hexsha
stringlengths 40
40
| size
int64 2
1.02M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
245
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
245
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
245
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
1.02M
| avg_line_length
float64 1
417k
| max_line_length
int64 1
987k
| alphanum_fraction
float64 0
1
| content_no_comment
stringlengths 0
1.01M
| is_comment_constant_removed
bool 1
class | is_sharp_comment_removed
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f70c6c4e1ef4c0a61876b8d442053117c763928f
| 3,809
|
py
|
Python
|
api/management/commands/import-translated-strings.py
|
IFRCGo/ifrcgo-api
|
c1c3e0cf1076ab48d03db6aaf7a00f8485ca9e1a
|
[
"MIT"
] | 11
|
2018-06-11T06:05:12.000Z
|
2022-03-25T09:31:44.000Z
|
api/management/commands/import-translated-strings.py
|
IFRCGo/ifrcgo-api
|
c1c3e0cf1076ab48d03db6aaf7a00f8485ca9e1a
|
[
"MIT"
] | 498
|
2017-11-07T21:20:13.000Z
|
2022-03-31T14:37:18.000Z
|
api/management/commands/import-translated-strings.py
|
IFRCGo/ifrcgo-api
|
c1c3e0cf1076ab48d03db6aaf7a00f8485ca9e1a
|
[
"MIT"
] | 6
|
2018-04-11T13:29:50.000Z
|
2020-07-16T16:52:11.000Z
|
import os
import csv
from django.core.management.base import BaseCommand
from django.db import transaction
from api.models import Action, Country, DisasterType, SituationReportType
from api.logger import logger
class Command(BaseCommand):
help = 'Import translated strings from a CSV. Either use the --table and --field params \
or else the CSV has to be named like "tablename__fieldname.csv" (ex. api_country__name.csv). \
Delimiter should be ";". Field order: original, fr, es, ar (ex. name, name_fr, name_es, name_ar)'
missing_args_message = "Filename is missing."
def add_arguments(self, parser):
parser.add_argument('filename', nargs='+', type=str)
parser.add_argument(
'-t',
'--table',
type=str,
help='Database table name of the translated strings'
)
parser.add_argument(
'-f',
'--field',
type=str,
help='Database field name of the translated strings'
)
@transaction.atomic
def handle(self, *args, **kwargs):
''' Example CSV header: name; name_fr; name_es; name_ar '''
filename = kwargs['filename'][0]
# os.path.split() [0] is the folder [1] is the filename
tablename = kwargs['table'] or os.path.split(filename)[1].split('__')[0]
# [:4] is to remove '.csv' from the end
fieldname = kwargs['field'] or os.path.split(filename)[1].split('__')[1][:4]
with open(filename, 'r') as f:
reader = csv.reader(f, delimiter=';')
fieldnames = next(reader)
translations = list(reader)
for tr in translations:
if tablename == 'api_country':
# fieldname = 'name'
# **{fieldname: tr[0]} = name=tr[0]
country = Country.objects.filter(**{fieldname: tr[0]})
if country:
country.update(**{
fieldnames[1]: tr[1],
fieldnames[2]: tr[2],
fieldnames[3]: tr[3]
})
else:
logger.info(f'No Country in GO DB with the string: {tr[0]}')
elif tablename == 'api_action':
action = Action.objects.filter(**{fieldname: tr[0]})
if action:
action.update(**{
fieldnames[1]: tr[1],
fieldnames[2]: tr[2],
fieldnames[3]: tr[3]
})
else:
logger.info(f'No Action in GO DB with the string: {tr[0]}')
elif tablename == 'api_disastertype':
distype = DisasterType.objects.filter(**{fieldname: tr[0]})
if distype:
distype.update(**{
fieldnames[1]: tr[1],
fieldnames[2]: tr[2],
fieldnames[3]: tr[3]
})
else:
logger.info(f'No DisasterType in GO DB with the string: {tr[0]}')
elif tablename == 'api_situationreporttype':
sitreptype = SituationReportType.objects.filter(**{fieldname: tr[0]})
if sitreptype:
sitreptype.update(**{
fieldnames[1]: tr[1],
fieldnames[2]: tr[2],
fieldnames[3]: tr[3]
})
else:
logger.info(f'No SituationReportType in GO DB with the string: {tr[0]}')
print('done!')
| 42.322222
| 109
| 0.476503
|
import os
import csv
from django.core.management.base import BaseCommand
from django.db import transaction
from api.models import Action, Country, DisasterType, SituationReportType
from api.logger import logger
class Command(BaseCommand):
help = 'Import translated strings from a CSV. Either use the --table and --field params \
or else the CSV has to be named like "tablename__fieldname.csv" (ex. api_country__name.csv). \
Delimiter should be ";". Field order: original, fr, es, ar (ex. name, name_fr, name_es, name_ar)'
missing_args_message = "Filename is missing."
def add_arguments(self, parser):
parser.add_argument('filename', nargs='+', type=str)
parser.add_argument(
'-t',
'--table',
type=str,
help='Database table name of the translated strings'
)
parser.add_argument(
'-f',
'--field',
type=str,
help='Database field name of the translated strings'
)
@transaction.atomic
def handle(self, *args, **kwargs):
filename = kwargs['filename'][0]
tablename = kwargs['table'] or os.path.split(filename)[1].split('__')[0]
fieldname = kwargs['field'] or os.path.split(filename)[1].split('__')[1][:4]
with open(filename, 'r') as f:
reader = csv.reader(f, delimiter=';')
fieldnames = next(reader)
translations = list(reader)
for tr in translations:
if tablename == 'api_country':
country = Country.objects.filter(**{fieldname: tr[0]})
if country:
country.update(**{
fieldnames[1]: tr[1],
fieldnames[2]: tr[2],
fieldnames[3]: tr[3]
})
else:
logger.info(f'No Country in GO DB with the string: {tr[0]}')
elif tablename == 'api_action':
action = Action.objects.filter(**{fieldname: tr[0]})
if action:
action.update(**{
fieldnames[1]: tr[1],
fieldnames[2]: tr[2],
fieldnames[3]: tr[3]
})
else:
logger.info(f'No Action in GO DB with the string: {tr[0]}')
elif tablename == 'api_disastertype':
distype = DisasterType.objects.filter(**{fieldname: tr[0]})
if distype:
distype.update(**{
fieldnames[1]: tr[1],
fieldnames[2]: tr[2],
fieldnames[3]: tr[3]
})
else:
logger.info(f'No DisasterType in GO DB with the string: {tr[0]}')
elif tablename == 'api_situationreporttype':
sitreptype = SituationReportType.objects.filter(**{fieldname: tr[0]})
if sitreptype:
sitreptype.update(**{
fieldnames[1]: tr[1],
fieldnames[2]: tr[2],
fieldnames[3]: tr[3]
})
else:
logger.info(f'No SituationReportType in GO DB with the string: {tr[0]}')
print('done!')
| true
| true
|
f70c6ce47b25347b86e85cda31ba0052a32de4eb
| 417
|
py
|
Python
|
robot/components/shooter.py
|
frc1418/robotpy-slides
|
88cad003ce8c8936ebbd749946e2b1014024426d
|
[
"MIT"
] | null | null | null |
robot/components/shooter.py
|
frc1418/robotpy-slides
|
88cad003ce8c8936ebbd749946e2b1014024426d
|
[
"MIT"
] | null | null | null |
robot/components/shooter.py
|
frc1418/robotpy-slides
|
88cad003ce8c8936ebbd749946e2b1014024426d
|
[
"MIT"
] | 1
|
2018-12-07T17:10:57.000Z
|
2018-12-07T17:10:57.000Z
|
from ctre import WPI_TalonSRX
class Shooter:
motor: WPI_TalonSRX
def __init__(self):
self.ref_velocity = 0
def enable(self):
self.ref_velocity = 1
def disable(self):
self.ref_velocity = 0
def ready(self):
return self.motor.getQuadratureVelocity() > 4500
def execute(self):
self.motor.set(WPI_TalonSRX.ControlMode.PercentOutput, self.ref_velocity)
| 20.85
| 81
| 0.666667
|
from ctre import WPI_TalonSRX
class Shooter:
motor: WPI_TalonSRX
def __init__(self):
self.ref_velocity = 0
def enable(self):
self.ref_velocity = 1
def disable(self):
self.ref_velocity = 0
def ready(self):
return self.motor.getQuadratureVelocity() > 4500
def execute(self):
self.motor.set(WPI_TalonSRX.ControlMode.PercentOutput, self.ref_velocity)
| true
| true
|
f70c6d412a98b13eb64ed6bf8a3330b33bfa02ed
| 10,891
|
py
|
Python
|
fasterRCNNtrain/loss_and_gen.py
|
TangZhenchaoTZC/Keras-mask-detection
|
325679d06a12a90b2552ed7d447298a23e3b9d57
|
[
"MIT"
] | 3
|
2020-05-26T15:13:06.000Z
|
2020-05-27T02:57:27.000Z
|
fasterRCNNtrain/loss_and_gen.py
|
TangZhenchaoTZC/Keras-mask-detection
|
325679d06a12a90b2552ed7d447298a23e3b9d57
|
[
"MIT"
] | null | null | null |
fasterRCNNtrain/loss_and_gen.py
|
TangZhenchaoTZC/Keras-mask-detection
|
325679d06a12a90b2552ed7d447298a23e3b9d57
|
[
"MIT"
] | null | null | null |
"""fasterRCNN训练的损失函数与数据生成器"""
from keras.applications.imagenet_utils import preprocess_input
from keras import backend as K
import keras
import tensorflow as tf
import numpy as np
from random import shuffle
import random
from PIL import Image
from keras.objectives import categorical_crossentropy
from matplotlib.colors import rgb_to_hsv, hsv_to_rgb
import sys
sys.path.append("..")
from net import RPN as RPN
def rand(a=0, b=1):
return np.random.rand() * (b - a) + a
def cls_loss(ratio=3):
def _cls_loss(y_true, y_pred):
# y_true [batch_size, num_anchor, num_classes+1]
# y_pred [batch_size, num_anchor, num_classes]
labels = y_true
anchor_state = y_true[:, :, -1] # -1 是需要忽略的, 0 是背景, 1 是存在目标
classification = y_pred
# 找出存在目标的先验框
indices_for_object = tf.where(keras.backend.equal(anchor_state, 1))
labels_for_object = tf.gather_nd(labels, indices_for_object)
classification_for_object = tf.gather_nd(classification, indices_for_object)
cls_loss_for_object = keras.backend.binary_crossentropy(labels_for_object, classification_for_object)
# 找出实际上为背景的先验框
indices_for_back = tf.where(keras.backend.equal(anchor_state, 0))
labels_for_back = tf.gather_nd(labels, indices_for_back)
classification_for_back = tf.gather_nd(classification, indices_for_back)
# 计算每一个先验框应该有的权重
cls_loss_for_back = keras.backend.binary_crossentropy(labels_for_back, classification_for_back)
# 标准化,实际上是正样本的数量
normalizer_pos = tf.where(keras.backend.equal(anchor_state, 1))
normalizer_pos = keras.backend.cast(keras.backend.shape(normalizer_pos)[0], keras.backend.floatx())
normalizer_pos = keras.backend.maximum(keras.backend.cast_to_floatx(1.0), normalizer_pos)
normalizer_neg = tf.where(keras.backend.equal(anchor_state, 0))
normalizer_neg = keras.backend.cast(keras.backend.shape(normalizer_neg)[0], keras.backend.floatx())
normalizer_neg = keras.backend.maximum(keras.backend.cast_to_floatx(1.0), normalizer_neg)
# 将所获得的loss除上正样本的数量
cls_loss_for_object = keras.backend.sum(cls_loss_for_object) / normalizer_pos
cls_loss_for_back = ratio * keras.backend.sum(cls_loss_for_back) / normalizer_neg
# 总的loss
loss = cls_loss_for_object + cls_loss_for_back
return loss
return _cls_loss
def smooth_l1(sigma=1.0):
sigma_squared = sigma ** 2
def _smooth_l1(y_true, y_pred):
# y_true [batch_size, num_anchor, 4+1]
# y_pred [batch_size, num_anchor, 4]
regression = y_pred
regression_target = y_true[:, :, :-1]
anchor_state = y_true[:, :, -1]
# 找到正样本
indices = tf.where(keras.backend.equal(anchor_state, 1))
regression = tf.gather_nd(regression, indices)
regression_target = tf.gather_nd(regression_target, indices)
# 计算 smooth L1 loss
# f(x) = 0.5 * (sigma * x)^2 if |x| < 1 / sigma / sigma
# |x| - 0.5 / sigma / sigma otherwise
regression_diff = regression - regression_target
regression_diff = keras.backend.abs(regression_diff)
regression_loss = tf.where(
keras.backend.less(regression_diff, 1.0 / sigma_squared),
0.5 * sigma_squared * keras.backend.pow(regression_diff, 2),
regression_diff - 0.5 / sigma_squared
)
normalizer = keras.backend.maximum(1, keras.backend.shape(indices)[0])
normalizer = keras.backend.cast(normalizer, dtype=keras.backend.floatx())
loss = keras.backend.sum(regression_loss) / normalizer
return loss
return _smooth_l1
def class_loss_regr(num_classes):
epsilon = 1e-4
def class_loss_regr_fixed_num(y_true, y_pred):
x = y_true[:, :, 4 * num_classes:] - y_pred
x_abs = K.abs(x)
x_bool = K.cast(K.less_equal(x_abs, 1.0), 'float32')
loss = 4 * K.sum(
y_true[:, :, :4 * num_classes] * (x_bool * (0.5 * x * x) + (1 - x_bool) * (x_abs - 0.5))) / K.sum(
epsilon + y_true[:, :, :4 * num_classes])
return loss
return class_loss_regr_fixed_num
def class_loss_cls(y_true, y_pred):
return K.mean(categorical_crossentropy(y_true[0, :, :], y_pred[0, :, :]))
def get_new_img_size(width, height, img_min_side=600):
if width <= height:
f = float(img_min_side) / width
resized_height = int(f * height)
resized_width = int(img_min_side)
else:
f = float(img_min_side) / height
resized_width = int(f * width)
resized_height = int(img_min_side)
return resized_width, resized_height
def get_img_output_length(width, height):
def get_output_length(input_length):
# input_length += 6
filter_sizes = [7, 3, 1, 1]
padding = [3, 1, 0, 0]
stride = 2
for i in range(4):
# input_length = (input_length - filter_size + stride) // stride
input_length = (input_length + 2 * padding[i] - filter_sizes[i]) // stride + 1
return input_length
return get_output_length(width), get_output_length(height)
class Generator(object):
def __init__(self, bbox_util, train_lines, num_classes, solid, solid_shape=[600, 600]):
self.bbox_util = bbox_util
self.train_lines = train_lines
self.train_batches = len(train_lines)
self.num_classes = num_classes
self.solid = solid
# 用于固定训练图片的大小(600,600)
self.solid_shape = solid_shape
def get_random_data(self, annotation_line, jitter=.3, hue=.1, sat=1.5, val=1.5):
"""数据增强,提高模型鲁棒性"""
line = annotation_line.split()
image = Image.open(line[0])
iw, ih = image.size
# 如果solid=True,训练的图片大小会强制resize
if self.solid:
w, h = self.solid_shape
else:
w, h = get_new_img_size(iw, ih)
box = np.array([np.array(list(map(int, box.split(',')))) for box in line[1:]])
# resize image
new_ar = w / h * rand(1 - jitter, 1 + jitter) / rand(1 - jitter, 1 + jitter)
scale = rand(.25, 2)
if new_ar < 1:
nh = int(scale * h)
nw = int(nh * new_ar)
else:
nw = int(scale * w)
nh = int(nw / new_ar)
image = image.resize((nw, nh), Image.BICUBIC)
# place image
dx = int(rand(0, w - nw))
dy = int(rand(0, h - nh))
new_image = Image.new('RGB', (w, h), (128, 128, 128))
new_image.paste(image, (dx, dy))
image = new_image
# flip image or not
flip = rand() < .5
if flip: image = image.transpose(Image.FLIP_LEFT_RIGHT)
# distort image
hue = rand(-hue, hue)
sat = rand(1, sat) if rand() < .5 else 1 / rand(1, sat)
val = rand(1, val) if rand() < .5 else 1 / rand(1, val)
x = rgb_to_hsv(np.array(image) / 255.)
x[..., 0] += hue
x[..., 0][x[..., 0] > 1] -= 1
x[..., 0][x[..., 0] < 0] += 1
x[..., 1] *= sat
x[..., 2] *= val
x[x > 1] = 1
x[x < 0] = 0
image_data = hsv_to_rgb(x) * 255 # numpy array, 0 to 1
# correct boxes
box_data = np.zeros((len(box), 5))
if len(box) > 0:
np.random.shuffle(box)
box[:, [0, 2]] = box[:, [0, 2]] * nw / iw + dx
box[:, [1, 3]] = box[:, [1, 3]] * nh / ih + dy
if flip: box[:, [0, 2]] = w - box[:, [2, 0]]
box[:, 0:2][box[:, 0:2] < 0] = 0
box[:, 2][box[:, 2] > w] = w
box[:, 3][box[:, 3] > h] = h
box_w = box[:, 2] - box[:, 0]
box_h = box[:, 3] - box[:, 1]
box = box[np.logical_and(box_w > 1, box_h > 1)] # discard invalid box
box_data = np.zeros((len(box), 5))
box_data[:len(box)] = box
if len(box) == 0:
return image_data, []
if (box_data[:, :4] > 0).any():
return image_data, box_data
else:
return image_data, []
def generate(self):
"""数据生成器"""
while True:
# 打乱2007_train.txt
shuffle(self.train_lines)
lines = self.train_lines
for annotation_line in lines:
# 对每一行即没一张图片进行数据增强:改变光照,对比度等,使图片变得多样,从而提高模型鲁棒性
# img为数据增强后的图片,y为目标的信息
img, y = self.get_random_data(annotation_line)
height, width, _ = np.shape(img)
# 没有目标就跳过
if len(y) == 0:
continue
# 将目标信息归一化
boxes = np.array(y[:, :4], dtype=np.float32)
boxes[:, 0] = boxes[:, 0] / width
boxes[:, 1] = boxes[:, 1] / height
boxes[:, 2] = boxes[:, 2] / width
boxes[:, 3] = boxes[:, 3] / height
box_heights = boxes[:, 3] - boxes[:, 1]
box_widths = boxes[:, 2] - boxes[:, 0]
# 如果遇到标记错误为负数的情况,应跳过
if (box_heights <= 0).any() or (box_widths <= 0).any():
continue
y[:, :4] = boxes[:, :4]
# 获得先验框 38*38*9个
anchors = RPN.create_anchor(get_img_output_length(width, height), width, height)
# 计算真实框对应的先验框,返回正样本:可以对应到真实框的先验框,负样本:背景
assignment = self.bbox_util.assign_boxes(y, anchors)
# 训练一般随机选择128个正样本,128个负样本
num_regions = 256
classification = assignment[:, 4]
regression = assignment[:, :]
mask_pos = classification[:] > 0
num_pos = len(classification[mask_pos])
# 如果正样本数量大于128,就忽略多余的正样本
if num_pos > num_regions / 2:
val_locs = random.sample(range(num_pos), int(num_pos - num_regions / 2))
classification[mask_pos][val_locs] = -1
regression[mask_pos][val_locs, -1] = -1
mask_neg = classification[:] == 0
num_neg = len(classification[mask_neg])
# 如果负样本过多,也进行忽略,这么做是为了平衡正负样本的数量
if len(classification[mask_neg]) + num_pos > num_regions:
val_locs = random.sample(range(num_neg), int(num_neg - num_pos))
classification[mask_neg][val_locs] = -1
classification = np.reshape(classification, [-1, 1])
regression = np.reshape(regression, [-1, 5])
tmp_inp = np.array(img)
tmp_targets = [np.expand_dims(np.array(classification, dtype=np.float32), 0),
np.expand_dims(np.array(regression, dtype=np.float32), 0)]
# 1.对图片进行预处理 2.返回训练使用的预测信息 3.返回真实框
yield preprocess_input(np.expand_dims(tmp_inp, 0)), tmp_targets, np.expand_dims(y, 0)
| 37.044218
| 110
| 0.570104
|
from keras.applications.imagenet_utils import preprocess_input
from keras import backend as K
import keras
import tensorflow as tf
import numpy as np
from random import shuffle
import random
from PIL import Image
from keras.objectives import categorical_crossentropy
from matplotlib.colors import rgb_to_hsv, hsv_to_rgb
import sys
sys.path.append("..")
from net import RPN as RPN
def rand(a=0, b=1):
return np.random.rand() * (b - a) + a
def cls_loss(ratio=3):
def _cls_loss(y_true, y_pred):
labels = y_true
anchor_state = y_true[:, :, -1]
classification = y_pred
indices_for_object = tf.where(keras.backend.equal(anchor_state, 1))
labels_for_object = tf.gather_nd(labels, indices_for_object)
classification_for_object = tf.gather_nd(classification, indices_for_object)
cls_loss_for_object = keras.backend.binary_crossentropy(labels_for_object, classification_for_object)
indices_for_back = tf.where(keras.backend.equal(anchor_state, 0))
labels_for_back = tf.gather_nd(labels, indices_for_back)
classification_for_back = tf.gather_nd(classification, indices_for_back)
cls_loss_for_back = keras.backend.binary_crossentropy(labels_for_back, classification_for_back)
normalizer_pos = tf.where(keras.backend.equal(anchor_state, 1))
normalizer_pos = keras.backend.cast(keras.backend.shape(normalizer_pos)[0], keras.backend.floatx())
normalizer_pos = keras.backend.maximum(keras.backend.cast_to_floatx(1.0), normalizer_pos)
normalizer_neg = tf.where(keras.backend.equal(anchor_state, 0))
normalizer_neg = keras.backend.cast(keras.backend.shape(normalizer_neg)[0], keras.backend.floatx())
normalizer_neg = keras.backend.maximum(keras.backend.cast_to_floatx(1.0), normalizer_neg)
cls_loss_for_object = keras.backend.sum(cls_loss_for_object) / normalizer_pos
cls_loss_for_back = ratio * keras.backend.sum(cls_loss_for_back) / normalizer_neg
loss = cls_loss_for_object + cls_loss_for_back
return loss
return _cls_loss
def smooth_l1(sigma=1.0):
sigma_squared = sigma ** 2
def _smooth_l1(y_true, y_pred):
regression = y_pred
regression_target = y_true[:, :, :-1]
anchor_state = y_true[:, :, -1]
indices = tf.where(keras.backend.equal(anchor_state, 1))
regression = tf.gather_nd(regression, indices)
regression_target = tf.gather_nd(regression_target, indices)
regression_diff = regression - regression_target
regression_diff = keras.backend.abs(regression_diff)
regression_loss = tf.where(
keras.backend.less(regression_diff, 1.0 / sigma_squared),
0.5 * sigma_squared * keras.backend.pow(regression_diff, 2),
regression_diff - 0.5 / sigma_squared
)
normalizer = keras.backend.maximum(1, keras.backend.shape(indices)[0])
normalizer = keras.backend.cast(normalizer, dtype=keras.backend.floatx())
loss = keras.backend.sum(regression_loss) / normalizer
return loss
return _smooth_l1
def class_loss_regr(num_classes):
epsilon = 1e-4
def class_loss_regr_fixed_num(y_true, y_pred):
x = y_true[:, :, 4 * num_classes:] - y_pred
x_abs = K.abs(x)
x_bool = K.cast(K.less_equal(x_abs, 1.0), 'float32')
loss = 4 * K.sum(
y_true[:, :, :4 * num_classes] * (x_bool * (0.5 * x * x) + (1 - x_bool) * (x_abs - 0.5))) / K.sum(
epsilon + y_true[:, :, :4 * num_classes])
return loss
return class_loss_regr_fixed_num
def class_loss_cls(y_true, y_pred):
return K.mean(categorical_crossentropy(y_true[0, :, :], y_pred[0, :, :]))
def get_new_img_size(width, height, img_min_side=600):
if width <= height:
f = float(img_min_side) / width
resized_height = int(f * height)
resized_width = int(img_min_side)
else:
f = float(img_min_side) / height
resized_width = int(f * width)
resized_height = int(img_min_side)
return resized_width, resized_height
def get_img_output_length(width, height):
def get_output_length(input_length):
filter_sizes = [7, 3, 1, 1]
padding = [3, 1, 0, 0]
stride = 2
for i in range(4):
input_length = (input_length + 2 * padding[i] - filter_sizes[i]) // stride + 1
return input_length
return get_output_length(width), get_output_length(height)
class Generator(object):
def __init__(self, bbox_util, train_lines, num_classes, solid, solid_shape=[600, 600]):
self.bbox_util = bbox_util
self.train_lines = train_lines
self.train_batches = len(train_lines)
self.num_classes = num_classes
self.solid = solid
self.solid_shape = solid_shape
def get_random_data(self, annotation_line, jitter=.3, hue=.1, sat=1.5, val=1.5):
line = annotation_line.split()
image = Image.open(line[0])
iw, ih = image.size
if self.solid:
w, h = self.solid_shape
else:
w, h = get_new_img_size(iw, ih)
box = np.array([np.array(list(map(int, box.split(',')))) for box in line[1:]])
new_ar = w / h * rand(1 - jitter, 1 + jitter) / rand(1 - jitter, 1 + jitter)
scale = rand(.25, 2)
if new_ar < 1:
nh = int(scale * h)
nw = int(nh * new_ar)
else:
nw = int(scale * w)
nh = int(nw / new_ar)
image = image.resize((nw, nh), Image.BICUBIC)
dx = int(rand(0, w - nw))
dy = int(rand(0, h - nh))
new_image = Image.new('RGB', (w, h), (128, 128, 128))
new_image.paste(image, (dx, dy))
image = new_image
flip = rand() < .5
if flip: image = image.transpose(Image.FLIP_LEFT_RIGHT)
hue = rand(-hue, hue)
sat = rand(1, sat) if rand() < .5 else 1 / rand(1, sat)
val = rand(1, val) if rand() < .5 else 1 / rand(1, val)
x = rgb_to_hsv(np.array(image) / 255.)
x[..., 0] += hue
x[..., 0][x[..., 0] > 1] -= 1
x[..., 0][x[..., 0] < 0] += 1
x[..., 1] *= sat
x[..., 2] *= val
x[x > 1] = 1
x[x < 0] = 0
image_data = hsv_to_rgb(x) * 255
box_data = np.zeros((len(box), 5))
if len(box) > 0:
np.random.shuffle(box)
box[:, [0, 2]] = box[:, [0, 2]] * nw / iw + dx
box[:, [1, 3]] = box[:, [1, 3]] * nh / ih + dy
if flip: box[:, [0, 2]] = w - box[:, [2, 0]]
box[:, 0:2][box[:, 0:2] < 0] = 0
box[:, 2][box[:, 2] > w] = w
box[:, 3][box[:, 3] > h] = h
box_w = box[:, 2] - box[:, 0]
box_h = box[:, 3] - box[:, 1]
box = box[np.logical_and(box_w > 1, box_h > 1)]
box_data = np.zeros((len(box), 5))
box_data[:len(box)] = box
if len(box) == 0:
return image_data, []
if (box_data[:, :4] > 0).any():
return image_data, box_data
else:
return image_data, []
def generate(self):
while True:
shuffle(self.train_lines)
lines = self.train_lines
for annotation_line in lines:
img, y = self.get_random_data(annotation_line)
height, width, _ = np.shape(img)
if len(y) == 0:
continue
boxes = np.array(y[:, :4], dtype=np.float32)
boxes[:, 0] = boxes[:, 0] / width
boxes[:, 1] = boxes[:, 1] / height
boxes[:, 2] = boxes[:, 2] / width
boxes[:, 3] = boxes[:, 3] / height
box_heights = boxes[:, 3] - boxes[:, 1]
box_widths = boxes[:, 2] - boxes[:, 0]
if (box_heights <= 0).any() or (box_widths <= 0).any():
continue
y[:, :4] = boxes[:, :4]
anchors = RPN.create_anchor(get_img_output_length(width, height), width, height)
assignment = self.bbox_util.assign_boxes(y, anchors)
num_regions = 256
classification = assignment[:, 4]
regression = assignment[:, :]
mask_pos = classification[:] > 0
num_pos = len(classification[mask_pos])
if num_pos > num_regions / 2:
val_locs = random.sample(range(num_pos), int(num_pos - num_regions / 2))
classification[mask_pos][val_locs] = -1
regression[mask_pos][val_locs, -1] = -1
mask_neg = classification[:] == 0
num_neg = len(classification[mask_neg])
if len(classification[mask_neg]) + num_pos > num_regions:
val_locs = random.sample(range(num_neg), int(num_neg - num_pos))
classification[mask_neg][val_locs] = -1
classification = np.reshape(classification, [-1, 1])
regression = np.reshape(regression, [-1, 5])
tmp_inp = np.array(img)
tmp_targets = [np.expand_dims(np.array(classification, dtype=np.float32), 0),
np.expand_dims(np.array(regression, dtype=np.float32), 0)]
yield preprocess_input(np.expand_dims(tmp_inp, 0)), tmp_targets, np.expand_dims(y, 0)
| true
| true
|
f70c6da1e309192d26af697878f3e74a962a2a3a
| 582
|
py
|
Python
|
setup.py
|
mballance/bmk
|
8a7b0fa7f4d05fce186cfbd07af71e78fb83c912
|
[
"Apache-2.0"
] | 2
|
2021-12-14T23:49:13.000Z
|
2022-01-13T17:44:29.000Z
|
setup.py
|
mballance/bmk
|
8a7b0fa7f4d05fce186cfbd07af71e78fb83c912
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
mballance/bmk
|
8a7b0fa7f4d05fce186cfbd07af71e78fb83c912
|
[
"Apache-2.0"
] | null | null | null |
import os
from setuptools import setup
#data_files = []
#directories = glob.glob('src/share')
setup(
name = "bmk",
packages=['bmk'],
package_dir = {'' : 'src'},
package_data = {'bmk' : ['share/*']},
author = "Matthew Ballance",
author_email = "matt.ballance@gmail.com",
description = ("Provides a core classes for use by memory-oriented BFMs"),
license = "Apache 2.0",
keywords = ["Python", "CocoTB", "embedded software"],
url = "https://github.com/mballance/bmk",
setup_requires=[
'setuptools_scm',
],
install_requires=[
"pyhvl-rpc",
],
)
| 21.555556
| 76
| 0.634021
|
import os
from setuptools import setup
setup(
name = "bmk",
packages=['bmk'],
package_dir = {'' : 'src'},
package_data = {'bmk' : ['share/*']},
author = "Matthew Ballance",
author_email = "matt.ballance@gmail.com",
description = ("Provides a core classes for use by memory-oriented BFMs"),
license = "Apache 2.0",
keywords = ["Python", "CocoTB", "embedded software"],
url = "https://github.com/mballance/bmk",
setup_requires=[
'setuptools_scm',
],
install_requires=[
"pyhvl-rpc",
],
)
| true
| true
|
f70c6e7d656ca8d754306a3a4bd70f95bb91c35e
| 20,203
|
py
|
Python
|
tests/handlers/test_presence.py
|
Cadair/synapse
|
466866a1d9dd1fcf82348a36c0532cb0c6614767
|
[
"Apache-2.0"
] | 1
|
2019-09-14T03:24:03.000Z
|
2019-09-14T03:24:03.000Z
|
tests/handlers/test_presence.py
|
Cadair/synapse
|
466866a1d9dd1fcf82348a36c0532cb0c6614767
|
[
"Apache-2.0"
] | 4
|
2020-03-04T23:47:05.000Z
|
2021-12-09T21:41:44.000Z
|
tests/handlers/test_presence.py
|
Cadair/synapse
|
466866a1d9dd1fcf82348a36c0532cb0c6614767
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mock import Mock, call
from signedjson.key import generate_signing_key
from synapse.api.constants import EventTypes, Membership, PresenceState
from synapse.events import room_version_to_event_format
from synapse.events.builder import EventBuilder
from synapse.handlers.presence import (
FEDERATION_PING_INTERVAL,
FEDERATION_TIMEOUT,
IDLE_TIMER,
LAST_ACTIVE_GRANULARITY,
SYNC_ONLINE_TIMEOUT,
handle_timeout,
handle_update,
)
from synapse.rest.client.v1 import room
from synapse.storage.presence import UserPresenceState
from synapse.types import UserID, get_domain_from_id
from tests import unittest
class PresenceUpdateTestCase(unittest.TestCase):
def test_offline_to_online(self):
wheel_timer = Mock()
user_id = "@foo:bar"
now = 5000000
prev_state = UserPresenceState.default(user_id)
new_state = prev_state.copy_and_replace(
state=PresenceState.ONLINE, last_active_ts=now
)
state, persist_and_notify, federation_ping = handle_update(
prev_state, new_state, is_mine=True, wheel_timer=wheel_timer, now=now
)
self.assertTrue(persist_and_notify)
self.assertTrue(state.currently_active)
self.assertEquals(new_state.state, state.state)
self.assertEquals(new_state.status_msg, state.status_msg)
self.assertEquals(state.last_federation_update_ts, now)
self.assertEquals(wheel_timer.insert.call_count, 3)
wheel_timer.insert.assert_has_calls(
[
call(now=now, obj=user_id, then=new_state.last_active_ts + IDLE_TIMER),
call(
now=now,
obj=user_id,
then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT,
),
call(
now=now,
obj=user_id,
then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY,
),
],
any_order=True,
)
def test_online_to_online(self):
wheel_timer = Mock()
user_id = "@foo:bar"
now = 5000000
prev_state = UserPresenceState.default(user_id)
prev_state = prev_state.copy_and_replace(
state=PresenceState.ONLINE, last_active_ts=now, currently_active=True
)
new_state = prev_state.copy_and_replace(
state=PresenceState.ONLINE, last_active_ts=now
)
state, persist_and_notify, federation_ping = handle_update(
prev_state, new_state, is_mine=True, wheel_timer=wheel_timer, now=now
)
self.assertFalse(persist_and_notify)
self.assertTrue(federation_ping)
self.assertTrue(state.currently_active)
self.assertEquals(new_state.state, state.state)
self.assertEquals(new_state.status_msg, state.status_msg)
self.assertEquals(state.last_federation_update_ts, now)
self.assertEquals(wheel_timer.insert.call_count, 3)
wheel_timer.insert.assert_has_calls(
[
call(now=now, obj=user_id, then=new_state.last_active_ts + IDLE_TIMER),
call(
now=now,
obj=user_id,
then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT,
),
call(
now=now,
obj=user_id,
then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY,
),
],
any_order=True,
)
def test_online_to_online_last_active_noop(self):
wheel_timer = Mock()
user_id = "@foo:bar"
now = 5000000
prev_state = UserPresenceState.default(user_id)
prev_state = prev_state.copy_and_replace(
state=PresenceState.ONLINE,
last_active_ts=now - LAST_ACTIVE_GRANULARITY - 10,
currently_active=True,
)
new_state = prev_state.copy_and_replace(
state=PresenceState.ONLINE, last_active_ts=now
)
state, persist_and_notify, federation_ping = handle_update(
prev_state, new_state, is_mine=True, wheel_timer=wheel_timer, now=now
)
self.assertFalse(persist_and_notify)
self.assertTrue(federation_ping)
self.assertTrue(state.currently_active)
self.assertEquals(new_state.state, state.state)
self.assertEquals(new_state.status_msg, state.status_msg)
self.assertEquals(state.last_federation_update_ts, now)
self.assertEquals(wheel_timer.insert.call_count, 3)
wheel_timer.insert.assert_has_calls(
[
call(now=now, obj=user_id, then=new_state.last_active_ts + IDLE_TIMER),
call(
now=now,
obj=user_id,
then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT,
),
call(
now=now,
obj=user_id,
then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY,
),
],
any_order=True,
)
def test_online_to_online_last_active(self):
wheel_timer = Mock()
user_id = "@foo:bar"
now = 5000000
prev_state = UserPresenceState.default(user_id)
prev_state = prev_state.copy_and_replace(
state=PresenceState.ONLINE,
last_active_ts=now - LAST_ACTIVE_GRANULARITY - 1,
currently_active=True,
)
new_state = prev_state.copy_and_replace(state=PresenceState.ONLINE)
state, persist_and_notify, federation_ping = handle_update(
prev_state, new_state, is_mine=True, wheel_timer=wheel_timer, now=now
)
self.assertTrue(persist_and_notify)
self.assertFalse(state.currently_active)
self.assertEquals(new_state.state, state.state)
self.assertEquals(new_state.status_msg, state.status_msg)
self.assertEquals(state.last_federation_update_ts, now)
self.assertEquals(wheel_timer.insert.call_count, 2)
wheel_timer.insert.assert_has_calls(
[
call(now=now, obj=user_id, then=new_state.last_active_ts + IDLE_TIMER),
call(
now=now,
obj=user_id,
then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT,
),
],
any_order=True,
)
def test_remote_ping_timer(self):
wheel_timer = Mock()
user_id = "@foo:bar"
now = 5000000
prev_state = UserPresenceState.default(user_id)
prev_state = prev_state.copy_and_replace(
state=PresenceState.ONLINE, last_active_ts=now
)
new_state = prev_state.copy_and_replace(state=PresenceState.ONLINE)
state, persist_and_notify, federation_ping = handle_update(
prev_state, new_state, is_mine=False, wheel_timer=wheel_timer, now=now
)
self.assertFalse(persist_and_notify)
self.assertFalse(federation_ping)
self.assertFalse(state.currently_active)
self.assertEquals(new_state.state, state.state)
self.assertEquals(new_state.status_msg, state.status_msg)
self.assertEquals(wheel_timer.insert.call_count, 1)
wheel_timer.insert.assert_has_calls(
[
call(
now=now,
obj=user_id,
then=new_state.last_federation_update_ts + FEDERATION_TIMEOUT,
)
],
any_order=True,
)
def test_online_to_offline(self):
wheel_timer = Mock()
user_id = "@foo:bar"
now = 5000000
prev_state = UserPresenceState.default(user_id)
prev_state = prev_state.copy_and_replace(
state=PresenceState.ONLINE, last_active_ts=now, currently_active=True
)
new_state = prev_state.copy_and_replace(state=PresenceState.OFFLINE)
state, persist_and_notify, federation_ping = handle_update(
prev_state, new_state, is_mine=True, wheel_timer=wheel_timer, now=now
)
self.assertTrue(persist_and_notify)
self.assertEquals(new_state.state, state.state)
self.assertEquals(state.last_federation_update_ts, now)
self.assertEquals(wheel_timer.insert.call_count, 0)
def test_online_to_idle(self):
wheel_timer = Mock()
user_id = "@foo:bar"
now = 5000000
prev_state = UserPresenceState.default(user_id)
prev_state = prev_state.copy_and_replace(
state=PresenceState.ONLINE, last_active_ts=now, currently_active=True
)
new_state = prev_state.copy_and_replace(state=PresenceState.UNAVAILABLE)
state, persist_and_notify, federation_ping = handle_update(
prev_state, new_state, is_mine=True, wheel_timer=wheel_timer, now=now
)
self.assertTrue(persist_and_notify)
self.assertEquals(new_state.state, state.state)
self.assertEquals(state.last_federation_update_ts, now)
self.assertEquals(new_state.state, state.state)
self.assertEquals(new_state.status_msg, state.status_msg)
self.assertEquals(wheel_timer.insert.call_count, 1)
wheel_timer.insert.assert_has_calls(
[
call(
now=now,
obj=user_id,
then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT,
)
],
any_order=True,
)
class PresenceTimeoutTestCase(unittest.TestCase):
def test_idle_timer(self):
user_id = "@foo:bar"
now = 5000000
state = UserPresenceState.default(user_id)
state = state.copy_and_replace(
state=PresenceState.ONLINE,
last_active_ts=now - IDLE_TIMER - 1,
last_user_sync_ts=now,
)
new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now)
self.assertIsNotNone(new_state)
self.assertEquals(new_state.state, PresenceState.UNAVAILABLE)
def test_sync_timeout(self):
user_id = "@foo:bar"
now = 5000000
state = UserPresenceState.default(user_id)
state = state.copy_and_replace(
state=PresenceState.ONLINE,
last_active_ts=0,
last_user_sync_ts=now - SYNC_ONLINE_TIMEOUT - 1,
)
new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now)
self.assertIsNotNone(new_state)
self.assertEquals(new_state.state, PresenceState.OFFLINE)
def test_sync_online(self):
user_id = "@foo:bar"
now = 5000000
state = UserPresenceState.default(user_id)
state = state.copy_and_replace(
state=PresenceState.ONLINE,
last_active_ts=now - SYNC_ONLINE_TIMEOUT - 1,
last_user_sync_ts=now - SYNC_ONLINE_TIMEOUT - 1,
)
new_state = handle_timeout(
state, is_mine=True, syncing_user_ids=set([user_id]), now=now
)
self.assertIsNotNone(new_state)
self.assertEquals(new_state.state, PresenceState.ONLINE)
def test_federation_ping(self):
user_id = "@foo:bar"
now = 5000000
state = UserPresenceState.default(user_id)
state = state.copy_and_replace(
state=PresenceState.ONLINE,
last_active_ts=now,
last_user_sync_ts=now,
last_federation_update_ts=now - FEDERATION_PING_INTERVAL - 1,
)
new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now)
self.assertIsNotNone(new_state)
self.assertEquals(new_state, new_state)
def test_no_timeout(self):
user_id = "@foo:bar"
now = 5000000
state = UserPresenceState.default(user_id)
state = state.copy_and_replace(
state=PresenceState.ONLINE,
last_active_ts=now,
last_user_sync_ts=now,
last_federation_update_ts=now,
)
new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now)
self.assertIsNone(new_state)
def test_federation_timeout(self):
user_id = "@foo:bar"
now = 5000000
state = UserPresenceState.default(user_id)
state = state.copy_and_replace(
state=PresenceState.ONLINE,
last_active_ts=now,
last_user_sync_ts=now,
last_federation_update_ts=now - FEDERATION_TIMEOUT - 1,
)
new_state = handle_timeout(
state, is_mine=False, syncing_user_ids=set(), now=now
)
self.assertIsNotNone(new_state)
self.assertEquals(new_state.state, PresenceState.OFFLINE)
def test_last_active(self):
user_id = "@foo:bar"
now = 5000000
state = UserPresenceState.default(user_id)
state = state.copy_and_replace(
state=PresenceState.ONLINE,
last_active_ts=now - LAST_ACTIVE_GRANULARITY - 1,
last_user_sync_ts=now,
last_federation_update_ts=now,
)
new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now)
self.assertIsNotNone(new_state)
self.assertEquals(state, new_state)
class PresenceJoinTestCase(unittest.HomeserverTestCase):
"""Tests remote servers get told about presence of users in the room when
they join and when new local users join.
"""
user_id = "@test:server"
servlets = [room.register_servlets]
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(
"server", http_client=None, federation_sender=Mock()
)
return hs
def prepare(self, reactor, clock, hs):
self.federation_sender = hs.get_federation_sender()
self.event_builder_factory = hs.get_event_builder_factory()
self.federation_handler = hs.get_handlers().federation_handler
self.presence_handler = hs.get_presence_handler()
# self.event_builder_for_2 = EventBuilderFactory(hs)
# self.event_builder_for_2.hostname = "test2"
self.store = hs.get_datastore()
self.state = hs.get_state_handler()
self.auth = hs.get_auth()
# We don't actually check signatures in tests, so lets just create a
# random key to use.
self.random_signing_key = generate_signing_key("ver")
def test_remote_joins(self):
# We advance time to something that isn't 0, as we use 0 as a special
# value.
self.reactor.advance(1000000000000)
# Create a room with two local users
room_id = self.helper.create_room_as(self.user_id)
self.helper.join(room_id, "@test2:server")
# Mark test2 as online, test will be offline with a last_active of 0
self.presence_handler.set_state(
UserID.from_string("@test2:server"), {"presence": PresenceState.ONLINE}
)
self.reactor.pump([0]) # Wait for presence updates to be handled
#
# Test that a new server gets told about existing presence
#
self.federation_sender.reset_mock()
# Add a new remote server to the room
self._add_new_user(room_id, "@alice:server2")
# We shouldn't have sent out any local presence *updates*
self.federation_sender.send_presence.assert_not_called()
# When new server is joined we send it the local users presence states.
# We expect to only see user @test2:server, as @test:server is offline
# and has a zero last_active_ts
expected_state = self.get_success(
self.presence_handler.current_state_for_user("@test2:server")
)
self.assertEqual(expected_state.state, PresenceState.ONLINE)
self.federation_sender.send_presence_to_destinations.assert_called_once_with(
destinations=["server2"], states=[expected_state]
)
#
# Test that only the new server gets sent presence and not existing servers
#
self.federation_sender.reset_mock()
self._add_new_user(room_id, "@bob:server3")
self.federation_sender.send_presence.assert_not_called()
self.federation_sender.send_presence_to_destinations.assert_called_once_with(
destinations=["server3"], states=[expected_state]
)
def test_remote_gets_presence_when_local_user_joins(self):
# We advance time to something that isn't 0, as we use 0 as a special
# value.
self.reactor.advance(1000000000000)
# Create a room with one local users
room_id = self.helper.create_room_as(self.user_id)
# Mark test as online
self.presence_handler.set_state(
UserID.from_string("@test:server"), {"presence": PresenceState.ONLINE}
)
# Mark test2 as online, test will be offline with a last_active of 0.
# Note we don't join them to the room yet
self.presence_handler.set_state(
UserID.from_string("@test2:server"), {"presence": PresenceState.ONLINE}
)
# Add servers to the room
self._add_new_user(room_id, "@alice:server2")
self._add_new_user(room_id, "@bob:server3")
self.reactor.pump([0]) # Wait for presence updates to be handled
#
# Test that when a local join happens remote servers get told about it
#
self.federation_sender.reset_mock()
# Join local user to room
self.helper.join(room_id, "@test2:server")
self.reactor.pump([0]) # Wait for presence updates to be handled
# We shouldn't have sent out any local presence *updates*
self.federation_sender.send_presence.assert_not_called()
# We expect to only send test2 presence to server2 and server3
expected_state = self.get_success(
self.presence_handler.current_state_for_user("@test2:server")
)
self.assertEqual(expected_state.state, PresenceState.ONLINE)
self.federation_sender.send_presence_to_destinations.assert_called_once_with(
destinations=set(("server2", "server3")), states=[expected_state]
)
def _add_new_user(self, room_id, user_id):
"""Add new user to the room by creating an event and poking the federation API.
"""
hostname = get_domain_from_id(user_id)
room_version = self.get_success(self.store.get_room_version(room_id))
builder = EventBuilder(
state=self.state,
auth=self.auth,
store=self.store,
clock=self.clock,
hostname=hostname,
signing_key=self.random_signing_key,
format_version=room_version_to_event_format(room_version),
room_id=room_id,
type=EventTypes.Member,
sender=user_id,
state_key=user_id,
content={"membership": Membership.JOIN},
)
prev_event_ids = self.get_success(
self.store.get_latest_event_ids_in_room(room_id)
)
event = self.get_success(builder.build(prev_event_ids))
self.get_success(self.federation_handler.on_receive_pdu(hostname, event))
# Check that it was successfully persisted.
self.get_success(self.store.get_event(event.event_id))
self.get_success(self.store.get_event(event.event_id))
| 34.832759
| 88
| 0.641093
|
from mock import Mock, call
from signedjson.key import generate_signing_key
from synapse.api.constants import EventTypes, Membership, PresenceState
from synapse.events import room_version_to_event_format
from synapse.events.builder import EventBuilder
from synapse.handlers.presence import (
FEDERATION_PING_INTERVAL,
FEDERATION_TIMEOUT,
IDLE_TIMER,
LAST_ACTIVE_GRANULARITY,
SYNC_ONLINE_TIMEOUT,
handle_timeout,
handle_update,
)
from synapse.rest.client.v1 import room
from synapse.storage.presence import UserPresenceState
from synapse.types import UserID, get_domain_from_id
from tests import unittest
class PresenceUpdateTestCase(unittest.TestCase):
def test_offline_to_online(self):
wheel_timer = Mock()
user_id = "@foo:bar"
now = 5000000
prev_state = UserPresenceState.default(user_id)
new_state = prev_state.copy_and_replace(
state=PresenceState.ONLINE, last_active_ts=now
)
state, persist_and_notify, federation_ping = handle_update(
prev_state, new_state, is_mine=True, wheel_timer=wheel_timer, now=now
)
self.assertTrue(persist_and_notify)
self.assertTrue(state.currently_active)
self.assertEquals(new_state.state, state.state)
self.assertEquals(new_state.status_msg, state.status_msg)
self.assertEquals(state.last_federation_update_ts, now)
self.assertEquals(wheel_timer.insert.call_count, 3)
wheel_timer.insert.assert_has_calls(
[
call(now=now, obj=user_id, then=new_state.last_active_ts + IDLE_TIMER),
call(
now=now,
obj=user_id,
then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT,
),
call(
now=now,
obj=user_id,
then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY,
),
],
any_order=True,
)
def test_online_to_online(self):
wheel_timer = Mock()
user_id = "@foo:bar"
now = 5000000
prev_state = UserPresenceState.default(user_id)
prev_state = prev_state.copy_and_replace(
state=PresenceState.ONLINE, last_active_ts=now, currently_active=True
)
new_state = prev_state.copy_and_replace(
state=PresenceState.ONLINE, last_active_ts=now
)
state, persist_and_notify, federation_ping = handle_update(
prev_state, new_state, is_mine=True, wheel_timer=wheel_timer, now=now
)
self.assertFalse(persist_and_notify)
self.assertTrue(federation_ping)
self.assertTrue(state.currently_active)
self.assertEquals(new_state.state, state.state)
self.assertEquals(new_state.status_msg, state.status_msg)
self.assertEquals(state.last_federation_update_ts, now)
self.assertEquals(wheel_timer.insert.call_count, 3)
wheel_timer.insert.assert_has_calls(
[
call(now=now, obj=user_id, then=new_state.last_active_ts + IDLE_TIMER),
call(
now=now,
obj=user_id,
then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT,
),
call(
now=now,
obj=user_id,
then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY,
),
],
any_order=True,
)
def test_online_to_online_last_active_noop(self):
wheel_timer = Mock()
user_id = "@foo:bar"
now = 5000000
prev_state = UserPresenceState.default(user_id)
prev_state = prev_state.copy_and_replace(
state=PresenceState.ONLINE,
last_active_ts=now - LAST_ACTIVE_GRANULARITY - 10,
currently_active=True,
)
new_state = prev_state.copy_and_replace(
state=PresenceState.ONLINE, last_active_ts=now
)
state, persist_and_notify, federation_ping = handle_update(
prev_state, new_state, is_mine=True, wheel_timer=wheel_timer, now=now
)
self.assertFalse(persist_and_notify)
self.assertTrue(federation_ping)
self.assertTrue(state.currently_active)
self.assertEquals(new_state.state, state.state)
self.assertEquals(new_state.status_msg, state.status_msg)
self.assertEquals(state.last_federation_update_ts, now)
self.assertEquals(wheel_timer.insert.call_count, 3)
wheel_timer.insert.assert_has_calls(
[
call(now=now, obj=user_id, then=new_state.last_active_ts + IDLE_TIMER),
call(
now=now,
obj=user_id,
then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT,
),
call(
now=now,
obj=user_id,
then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY,
),
],
any_order=True,
)
def test_online_to_online_last_active(self):
wheel_timer = Mock()
user_id = "@foo:bar"
now = 5000000
prev_state = UserPresenceState.default(user_id)
prev_state = prev_state.copy_and_replace(
state=PresenceState.ONLINE,
last_active_ts=now - LAST_ACTIVE_GRANULARITY - 1,
currently_active=True,
)
new_state = prev_state.copy_and_replace(state=PresenceState.ONLINE)
state, persist_and_notify, federation_ping = handle_update(
prev_state, new_state, is_mine=True, wheel_timer=wheel_timer, now=now
)
self.assertTrue(persist_and_notify)
self.assertFalse(state.currently_active)
self.assertEquals(new_state.state, state.state)
self.assertEquals(new_state.status_msg, state.status_msg)
self.assertEquals(state.last_federation_update_ts, now)
self.assertEquals(wheel_timer.insert.call_count, 2)
wheel_timer.insert.assert_has_calls(
[
call(now=now, obj=user_id, then=new_state.last_active_ts + IDLE_TIMER),
call(
now=now,
obj=user_id,
then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT,
),
],
any_order=True,
)
def test_remote_ping_timer(self):
wheel_timer = Mock()
user_id = "@foo:bar"
now = 5000000
prev_state = UserPresenceState.default(user_id)
prev_state = prev_state.copy_and_replace(
state=PresenceState.ONLINE, last_active_ts=now
)
new_state = prev_state.copy_and_replace(state=PresenceState.ONLINE)
state, persist_and_notify, federation_ping = handle_update(
prev_state, new_state, is_mine=False, wheel_timer=wheel_timer, now=now
)
self.assertFalse(persist_and_notify)
self.assertFalse(federation_ping)
self.assertFalse(state.currently_active)
self.assertEquals(new_state.state, state.state)
self.assertEquals(new_state.status_msg, state.status_msg)
self.assertEquals(wheel_timer.insert.call_count, 1)
wheel_timer.insert.assert_has_calls(
[
call(
now=now,
obj=user_id,
then=new_state.last_federation_update_ts + FEDERATION_TIMEOUT,
)
],
any_order=True,
)
def test_online_to_offline(self):
wheel_timer = Mock()
user_id = "@foo:bar"
now = 5000000
prev_state = UserPresenceState.default(user_id)
prev_state = prev_state.copy_and_replace(
state=PresenceState.ONLINE, last_active_ts=now, currently_active=True
)
new_state = prev_state.copy_and_replace(state=PresenceState.OFFLINE)
state, persist_and_notify, federation_ping = handle_update(
prev_state, new_state, is_mine=True, wheel_timer=wheel_timer, now=now
)
self.assertTrue(persist_and_notify)
self.assertEquals(new_state.state, state.state)
self.assertEquals(state.last_federation_update_ts, now)
self.assertEquals(wheel_timer.insert.call_count, 0)
def test_online_to_idle(self):
wheel_timer = Mock()
user_id = "@foo:bar"
now = 5000000
prev_state = UserPresenceState.default(user_id)
prev_state = prev_state.copy_and_replace(
state=PresenceState.ONLINE, last_active_ts=now, currently_active=True
)
new_state = prev_state.copy_and_replace(state=PresenceState.UNAVAILABLE)
state, persist_and_notify, federation_ping = handle_update(
prev_state, new_state, is_mine=True, wheel_timer=wheel_timer, now=now
)
self.assertTrue(persist_and_notify)
self.assertEquals(new_state.state, state.state)
self.assertEquals(state.last_federation_update_ts, now)
self.assertEquals(new_state.state, state.state)
self.assertEquals(new_state.status_msg, state.status_msg)
self.assertEquals(wheel_timer.insert.call_count, 1)
wheel_timer.insert.assert_has_calls(
[
call(
now=now,
obj=user_id,
then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT,
)
],
any_order=True,
)
class PresenceTimeoutTestCase(unittest.TestCase):
def test_idle_timer(self):
user_id = "@foo:bar"
now = 5000000
state = UserPresenceState.default(user_id)
state = state.copy_and_replace(
state=PresenceState.ONLINE,
last_active_ts=now - IDLE_TIMER - 1,
last_user_sync_ts=now,
)
new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now)
self.assertIsNotNone(new_state)
self.assertEquals(new_state.state, PresenceState.UNAVAILABLE)
def test_sync_timeout(self):
user_id = "@foo:bar"
now = 5000000
state = UserPresenceState.default(user_id)
state = state.copy_and_replace(
state=PresenceState.ONLINE,
last_active_ts=0,
last_user_sync_ts=now - SYNC_ONLINE_TIMEOUT - 1,
)
new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now)
self.assertIsNotNone(new_state)
self.assertEquals(new_state.state, PresenceState.OFFLINE)
def test_sync_online(self):
user_id = "@foo:bar"
now = 5000000
state = UserPresenceState.default(user_id)
state = state.copy_and_replace(
state=PresenceState.ONLINE,
last_active_ts=now - SYNC_ONLINE_TIMEOUT - 1,
last_user_sync_ts=now - SYNC_ONLINE_TIMEOUT - 1,
)
new_state = handle_timeout(
state, is_mine=True, syncing_user_ids=set([user_id]), now=now
)
self.assertIsNotNone(new_state)
self.assertEquals(new_state.state, PresenceState.ONLINE)
def test_federation_ping(self):
user_id = "@foo:bar"
now = 5000000
state = UserPresenceState.default(user_id)
state = state.copy_and_replace(
state=PresenceState.ONLINE,
last_active_ts=now,
last_user_sync_ts=now,
last_federation_update_ts=now - FEDERATION_PING_INTERVAL - 1,
)
new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now)
self.assertIsNotNone(new_state)
self.assertEquals(new_state, new_state)
def test_no_timeout(self):
user_id = "@foo:bar"
now = 5000000
state = UserPresenceState.default(user_id)
state = state.copy_and_replace(
state=PresenceState.ONLINE,
last_active_ts=now,
last_user_sync_ts=now,
last_federation_update_ts=now,
)
new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now)
self.assertIsNone(new_state)
def test_federation_timeout(self):
user_id = "@foo:bar"
now = 5000000
state = UserPresenceState.default(user_id)
state = state.copy_and_replace(
state=PresenceState.ONLINE,
last_active_ts=now,
last_user_sync_ts=now,
last_federation_update_ts=now - FEDERATION_TIMEOUT - 1,
)
new_state = handle_timeout(
state, is_mine=False, syncing_user_ids=set(), now=now
)
self.assertIsNotNone(new_state)
self.assertEquals(new_state.state, PresenceState.OFFLINE)
def test_last_active(self):
user_id = "@foo:bar"
now = 5000000
state = UserPresenceState.default(user_id)
state = state.copy_and_replace(
state=PresenceState.ONLINE,
last_active_ts=now - LAST_ACTIVE_GRANULARITY - 1,
last_user_sync_ts=now,
last_federation_update_ts=now,
)
new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now)
self.assertIsNotNone(new_state)
self.assertEquals(state, new_state)
class PresenceJoinTestCase(unittest.HomeserverTestCase):
user_id = "@test:server"
servlets = [room.register_servlets]
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(
"server", http_client=None, federation_sender=Mock()
)
return hs
def prepare(self, reactor, clock, hs):
self.federation_sender = hs.get_federation_sender()
self.event_builder_factory = hs.get_event_builder_factory()
self.federation_handler = hs.get_handlers().federation_handler
self.presence_handler = hs.get_presence_handler()
self.store = hs.get_datastore()
self.state = hs.get_state_handler()
self.auth = hs.get_auth()
# random key to use.
self.random_signing_key = generate_signing_key("ver")
def test_remote_joins(self):
# We advance time to something that isn't 0, as we use 0 as a special
self.reactor.advance(1000000000000)
room_id = self.helper.create_room_as(self.user_id)
self.helper.join(room_id, "@test2:server")
self.presence_handler.set_state(
UserID.from_string("@test2:server"), {"presence": PresenceState.ONLINE}
)
self.reactor.pump([0])
self.federation_sender.reset_mock()
self._add_new_user(room_id, "@alice:server2")
self.federation_sender.send_presence.assert_not_called()
# When new server is joined we send it the local users presence states.
# We expect to only see user @test2:server, as @test:server is offline
# and has a zero last_active_ts
expected_state = self.get_success(
self.presence_handler.current_state_for_user("@test2:server")
)
self.assertEqual(expected_state.state, PresenceState.ONLINE)
self.federation_sender.send_presence_to_destinations.assert_called_once_with(
destinations=["server2"], states=[expected_state]
)
#
# Test that only the new server gets sent presence and not existing servers
#
self.federation_sender.reset_mock()
self._add_new_user(room_id, "@bob:server3")
self.federation_sender.send_presence.assert_not_called()
self.federation_sender.send_presence_to_destinations.assert_called_once_with(
destinations=["server3"], states=[expected_state]
)
def test_remote_gets_presence_when_local_user_joins(self):
# We advance time to something that isn't 0, as we use 0 as a special
self.reactor.advance(1000000000000)
room_id = self.helper.create_room_as(self.user_id)
self.presence_handler.set_state(
UserID.from_string("@test:server"), {"presence": PresenceState.ONLINE}
)
self.presence_handler.set_state(
UserID.from_string("@test2:server"), {"presence": PresenceState.ONLINE}
)
# Add servers to the room
self._add_new_user(room_id, "@alice:server2")
self._add_new_user(room_id, "@bob:server3")
self.reactor.pump([0]) # Wait for presence updates to be handled
#
# Test that when a local join happens remote servers get told about it
#
self.federation_sender.reset_mock()
# Join local user to room
self.helper.join(room_id, "@test2:server")
self.reactor.pump([0]) # Wait for presence updates to be handled
# We shouldn't have sent out any local presence *updates*
self.federation_sender.send_presence.assert_not_called()
expected_state = self.get_success(
self.presence_handler.current_state_for_user("@test2:server")
)
self.assertEqual(expected_state.state, PresenceState.ONLINE)
self.federation_sender.send_presence_to_destinations.assert_called_once_with(
destinations=set(("server2", "server3")), states=[expected_state]
)
def _add_new_user(self, room_id, user_id):
hostname = get_domain_from_id(user_id)
room_version = self.get_success(self.store.get_room_version(room_id))
builder = EventBuilder(
state=self.state,
auth=self.auth,
store=self.store,
clock=self.clock,
hostname=hostname,
signing_key=self.random_signing_key,
format_version=room_version_to_event_format(room_version),
room_id=room_id,
type=EventTypes.Member,
sender=user_id,
state_key=user_id,
content={"membership": Membership.JOIN},
)
prev_event_ids = self.get_success(
self.store.get_latest_event_ids_in_room(room_id)
)
event = self.get_success(builder.build(prev_event_ids))
self.get_success(self.federation_handler.on_receive_pdu(hostname, event))
self.get_success(self.store.get_event(event.event_id))
self.get_success(self.store.get_event(event.event_id))
| true
| true
|
f70c6f0cdd25513b9e91b30b7f68b3fe31ad9b82
| 24,984
|
py
|
Python
|
openhtf/util/conf.py
|
ahaberlach/openhtf
|
5b63f12830a1020d32eeccc63f1759d4031345fc
|
[
"Apache-2.0"
] | null | null | null |
openhtf/util/conf.py
|
ahaberlach/openhtf
|
5b63f12830a1020d32eeccc63f1759d4031345fc
|
[
"Apache-2.0"
] | null | null | null |
openhtf/util/conf.py
|
ahaberlach/openhtf
|
5b63f12830a1020d32eeccc63f1759d4031345fc
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2014 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Interface to OpenHTF configuration files.
As a matter of convention, OpenHTF configuration files should contain values
which are specific to an individual station (not station type). This is
intended to provide a means to decouple deployment of test code from
station-specific configuration or calibration.
Examples of the types of values commonly found in the configuration are
physical port names, IP addresses, calibrated light/sound levels, etc.
Configuration values should not be used to determine test flow, or to control
debug output.
Config keys must be declared as in the following example, where default_value
and description are optional:
from openhtf.util import conf
conf.declare('antimatter_intermix_constant',
default_value=3.14159,
description='Intermix constant calibrated for our warp core.')
Declared keys can be accessed directly as attributes of the conf module. To
avoid naming conflicts, configuration keys must begin with a lowercase letter.
They may also be accessed by treating the conf module as a dictionary, but this
method is discouraged and should only be used in favor of getattr().
from openhtf.util import conf
warp_core.SetIntermixConstant(conf.antimatter_intermix_constant)
# An example of when you might use dict-like access.
for idx in range(5):
warp_core.SetDilithiumRatio(idx, conf['dilthium_ratio_%s' % idx])
Another common mechanism for obtaining configuration values is to use the
conf.inject_positional_args decorator:
from openhtf.util import conf
@conf.inject_positional_args
def ModifyThePhaseVariance(antimatter_intermix_constant, phase_variance):
return antimatter_intermix_constant * phase_variance
# antimatter_intermix_constant will be taken from the configuration value.
x = ModifyThePhaseVariance(phase_variance=2.71828)
Decorating a function with conf.inject_positional_args forces all other
arguments to be passed by keyword in order to avoid ambiguity in the values of
positional args. Values passed via keyword that also exist in the config will
override config values and log a warning message. Keyword args in the function
declaration will not be overridden (because it would be ambiguous which default
to use), and any overlap in keyword arg names and config keys will result in a
warning message.
If the configuration key is declared but no default_value is provided and no
value has been loaded, then no value will be passed, and a TypeError will be
raised unless the value is passed via keyword. Essentially, if `keyword_arg in
conf` evaluates to True, then that keyword arg will be provded from the
configuration unless overriden in the kwargs passed to the function. Otherwise
keyword_arg must be passed via kwargs at function invokation time.
The conf module supports 'in' checks, where `key in conf` will evaluate to True
if conf[key] would successfully provide a value. That is, if either a value
has been loaded or a default_value was declared.
Configuration values may be loaded directly or from a yaml or json file. If no
configuration is loaded, default values will still be accessible. Loading a
configuration always overrides default values, but only overrides previously
loaded values if _override=True (default) for the load* method used. Some
examples of how to load a configuration:
from openhtf.util import conf
conf.declare('antimatter_intermix_constant')
conf.declare('phase_variance')
conf.load(antimatter_intermix_constant=3.14,
phase_variance=2.718)
conf.load_from_dict({
'antimatter_intermix_constant': 3.14,
'phase_variance': 2.718,
})
conf.load_from_file('config.json')
conf.load_from_file('config.yaml')
Note that any of the load* methods here accept an _override keyword argument
that defaults to True, but may be set False to prevent overriding previously
loaded values. Regardless of whether _override is True or False, a message
will be logged indicating how the duplicate value was handled.
conf.load_from_file() attempts to parse the filename given as JSON and as YAML,
if neither succeeds, an exception will be raised. In either case, the value
parsed must be a dictionary mapping configuration key to value. Complex
configuration values are discouraged; they should be kept to single values or
lists of values when possible.
Lastly, configuration values may also be provided via the --config-value flag,
but this is discouraged, and should only be used for debugging purposes.
Configuration values loaded via commandline flags, either --config-file or
--config-value, are not checked against Declarations. This allows for using
configuration files that are supersets of required configuration. Declarations
are *always* checked upon configuration value access, however, so you still
must declare any keys you wish to use.
Loaded configuration values may be purged via the reset() method, but this
should only be used for testing purposes. This will reset the configuration
state to what it was before any load* methods were called (defaults loaded
and flag values used, either directly or from --config-file).
A recommended alternative to using reset() is the @save_and_restore decorator,
which allows you to decorate a function or method so that during execution
of the decorated callable, configuration values are altered (and restored
after execution of that callable). For example:
conf.load(foo='foo')
@conf.save_and_restore(foo='bar')
def do_stuff():
print 'foo has value: ', conf.foo
print 'foo before call: ', conf.foo
do_stuff()
print 'foo after call: ', conf.foo
This example prints:
foo before call: foo
foo has value: bar
foo after call: foo
This is useful primarily for unittest methods (see util/test.py for specific
examples of unittest usages). Note that config overrides may be specified at
decoration time, but do not have to be:
@conf.save_and_restore
def do_stuff():
conf.foo = 'bar'
This is also valid. The entire configuration is restored to the state it had
upon excution of the decorated callable, regardless of which keys are updated
in the decorator or in the decorated callable.
"""
import argparse
import functools
import inspect
import logging
import sys
import threading
import yaml
import mutablerecords
from . import argv
from . import threads
# If provided, --config-file will cause the given file to be load()ed when the
# conf module is initially imported.
ARG_PARSER = argv.ModuleParser()
ARG_PARSER.add_argument(
'--config-file', type=argparse.FileType('r'),
help='File from which to load configuration values.')
ARG_PARSER.add_argument(
'--config-value', action='append', default=[],
help='Allows specifying a configuration key=value on the command line. '
'The format should be --config-value=key=value. This value will override '
'any loaded value, and will be a string.')
class Configuration(object): # pylint: disable=too-many-instance-attributes
"""A singleton class to replace the 'conf' module.
This class provides the configuration interface described in the module
docstring. All attribuets/methods must not begin with a lowercase letter so
as to avoid naming conflicts with configuration keys.
"""
class ConfigurationInvalidError(Exception):
"""Indicates the configuration format was invalid or couldn't be read."""
class KeyAlreadyDeclaredError(Exception):
"""Indicates that a configuration key was already declared."""
class UndeclaredKeyError(Exception):
"""Indicates that a key was required but not predeclared."""
class InvalidKeyError(Exception):
"""Raised when an invalid key is declared or accessed."""
class UnsetKeyError(Exception):
"""Raised when a key value is requested but we have no value for it."""
# pylint: disable=invalid-name,bad-super-call
class Declaration(mutablerecords.Record(
'Declaration', ['name'], {
'description': None, 'default_value': None, 'has_default': False})):
"""Record type encapsulating information about a config declaration."""
def __init__(self, *args, **kwargs):
super(type(self), self).__init__(*args, **kwargs)
# Track this separately to allow for None as a default value, override
# any value that was passed in explicitly - don't do that.
self.has_default = 'default_value' in kwargs
# pylint: enable=invalid-name,bad-super-call
__slots__ = ('_logger', '_lock', '_modules', '_declarations',
'_flag_values', '_flags', '_loaded_values', 'ARG_PARSER',
'__name__')
def __init__(self, logger, lock, parser, **kwargs):
"""Initializes the configuration state.
We have to pull everything we need from global scope into here because we
will be swapping out the module with this instance and will lose any global
references.
Args:
logger: Logger to use for logging messages within this class.
lock: Threading.lock to use for locking access to config values.
**kwargs: Modules we need to access within this class.
"""
self._logger = logger
self._lock = lock
self._modules = kwargs
self._declarations = {}
self.ARG_PARSER = parser
# Parse just the flags we care about, since this happens at import time.
self._flags, _ = parser.parse_known_args()
self._flag_values = {}
# Populate flag_values from flags now.
self.load_flag_values()
# Initialize self._loaded_values and load from --config-file if it's set.
self.reset()
def load_flag_values(self, flags=None):
"""Load flag values given from command line flags.
Args:
flags: An argparse Namespace containing the command line flags.
"""
if flags is None:
flags = self._flags
for keyval in flags.config_value:
k,v = keyval.split('=', 1)
v = self._modules['yaml'].load(v) if isinstance(v, str) else v
self._flag_values.setdefault(k, v)
@staticmethod
def _is_valid_key(key):
"""Return True if key is a valid configuration key."""
return key and key[0].islower()
def __setattr__(self, attr, value):
"""Provide a useful error when attempting to set a value via setattr()."""
if self._is_valid_key(attr):
raise AttributeError("Can't set conf values by attribute, use load()")
# __slots__ is defined above, so this will raise an AttributeError if the
# attribute isn't one we expect; this limits the number of ways to abuse the
# conf module singleton instance. Also note that we can't use super()
# normally here because of the sys.modules swap (Configuration is no longer
# defined, and evaluates to None if used here).
# pylint: disable=bad-super-call
super(type(self), self).__setattr__(attr, value)
# Don't use synchronized on this one, because __getitem__ handles it.
def __getattr__(self, attr): # pylint: disable=invalid-name
"""Get a config value via attribute access."""
if self._is_valid_key(attr):
return self[attr]
# Config keys all begin with a lowercase letter, so treat this normally.
raise AttributeError("'%s' object has no attribute '%s'" %
(type(self).__name__, attr))
@threads.synchronized
def __getitem__(self, item): # pylint: disable=invalid-name
"""Get a config value via item access.
Order of precedence is:
- Value provided via --config-value flag.
- Value loaded via load*() methods.
- Default value as declared with conf.declare()
Args:
item: Config key name to get.
"""
if item not in self._declarations:
raise self.UndeclaredKeyError('Configuration key not declared', item)
if item in self._flag_values:
if item in self._loaded_values:
self._logger.warning(
'Overriding loaded value for %s (%s) with flag value: %s',
item, self._loaded_values[item], self._flag_values[item])
return self._flag_values[item]
if item in self._loaded_values:
return self._loaded_values[item]
if self._declarations[item].has_default:
return self._declarations[item].default_value
raise self.UnsetKeyError(
'Configuration value not set and has no default', item)
@threads.synchronized
def __contains__(self, name): # pylint: disable=invalid-name
"""True if we have a value for name."""
return (name in self._declarations and
(self._declarations[name].has_default or
name in self._loaded_values or
name in self._flag_values))
@threads.synchronized
def declare(self, name, description=None, **kwargs):
"""Declare a configuration key with the given name.
Args:
name: Configuration key to declare, must not have been already declared.
description: If provided, use this as the description for this key.
**kwargs: Other kwargs to pass to the Declaration, only default_value
is currently supported.
"""
if not self._is_valid_key(name):
raise self.InvalidKeyError(
'Invalid key name, must begin with a lowercase letter', name)
if name in self._declarations:
raise self.KeyAlreadyDeclaredError(
'Configuration key already declared', name)
self._declarations[name] = self.Declaration(
name, description=description, **kwargs)
@threads.synchronized
def reset(self):
"""Reset the loaded state of the configuration to what it was at import.
Note that this does *not* reset values set by commandline flags or loaded
from --config-file (in fact, any values loaded from --config-file that have
been overridden are reset to their value from --config-file).
"""
# Populate loaded_values with values from --config-file, if it was given.
self._loaded_values = {}
if self._flags.config_file is not None:
self.load_from_file(self._flags.config_file, _allow_undeclared=True)
def load_from_file(self, yamlfile, _override=True, _allow_undeclared=False):
"""Loads the configuration from a file.
Parsed contents must be a single dict mapping config key to value.
Args:
yamlfile: The opened file object to load configuration from.
See load_from_dict() for other args' descriptions.
Raises:
ConfigurationInvalidError: If configuration file can't be read, or can't
be parsed as either YAML (or JSON, which is a subset of YAML).
"""
self._logger.info('Loading configuration from file: %s', yamlfile)
try:
parsed_yaml = self._modules['yaml'].safe_load(yamlfile.read())
except self._modules['yaml'].YAMLError as exception:
raise self.ConfigurationInvalidError(
'Failed to load from %s as YAML' % yamlfile, exception)
if not isinstance(parsed_yaml, dict):
# Parsed YAML, but it's not a dict.
raise self.ConfigurationInvalidError(
'YAML parsed, but wrong type, should be dict', parsed_yaml)
self._logger.debug('Configuration loaded from file: %s', parsed_yaml)
self.load_from_dict(
parsed_yaml, _override=_override, _allow_undeclared=_allow_undeclared)
def load(self, _override=True, _allow_undeclared=False, **kwargs):
"""load configuration values from kwargs, see load_from_dict()."""
self.load_from_dict(
kwargs, _override=_override, _allow_undeclared=_allow_undeclared)
@threads.synchronized
def load_from_dict(self, dictionary, _override=True, _allow_undeclared=False):
"""Loads the config with values from a dictionary instead of a file.
This is meant for testing and bin purposes and shouldn't be used in most
applications.
Args:
dictionary: The dictionary containing config keys/values to update.
_override: If True, new values will override previous values.
_allow_undeclared: If True, silently load undeclared keys, otherwise
warn and ignore the value. Typically used for loading config
files before declarations have been evaluated.
"""
undeclared_keys = []
for key, value in dictionary.items():
# Warn in this case. We raise if you try to access a config key that
# hasn't been declared, but we don't raise here so that you can use
# configuration files that are supersets of required configuration for
# any particular test station.
if key not in self._declarations and not _allow_undeclared:
undeclared_keys.append(key)
continue
if key in self._loaded_values:
if _override:
self._logger.info(
'Overriding previously loaded value for %s (%s) with value: %s',
key, self._loaded_values[key], value)
else:
self._logger.info(
'Ignoring new value (%s), keeping previous value for %s: %s',
value, key, self._loaded_values[key])
continue
self._loaded_values[key] = value
if undeclared_keys:
self._logger.warning('Ignoring undeclared configuration keys: %s',
undeclared_keys)
@threads.synchronized
def _asdict(self):
"""Create a dictionary snapshot of the current config values."""
# Start with any default values we have, and override with loaded values,
# and then override with flag values.
retval = {key: self._declarations[key].default_value for
key in self._declarations if self._declarations[key].has_default}
retval.update(self._loaded_values)
# Only update keys that are declared so we don't allow injecting
# un-declared keys via commandline flags.
for key, value in self._flag_values.items():
if key in self._declarations:
retval[key] = value
return retval
@property
def help_text(self):
"""Return a string with all config keys and their descriptions."""
result = []
for name in sorted(self._declarations.keys()):
result.append(name)
result.append('-' * len(name))
decl = self._declarations[name]
if decl.description:
result.append(decl.description.strip())
else:
result.append('(no description found)')
if decl.has_default:
result.append('')
quotes = '"' if type(decl.default_value) is str else ''
result.append(' default_value={quotes}{val}{quotes}'.format(
quotes=quotes, val=decl.default_value))
result.append('')
result.append('')
return '\n'.join(result)
def save_and_restore(self, _func=None, **config_values):
"""Decorator for saving conf state and restoring it after a function.
This decorator is primarily for use in tests, where conf keys may be updated
for individual test cases, but those values need to be reverted after the
test case is done.
Examples:
conf.declare('my_conf_key')
@conf.save_and_restore
def MyTestFunc():
conf.load(my_conf_key='baz')
SomeFuncUnderTestThatUsesMyConfKey()
conf.load(my_conf_key='foo')
MyTestFunc()
print conf.my_conf_key # Prints 'foo', *NOT* 'baz'
# Without the save_and_restore decorator, MyTestFunc() would have had the
# side effect of altering the conf value of 'my_conf_key' to 'baz'.
# Config keys can also be initialized for the context inline at decoration
# time. This is the same as setting them at the beginning of the
# function, but is a little clearer syntax if you know ahead of time what
# config keys and values you need to set.
@conf.save_and_restore(my_conf_key='baz')
def MyOtherTestFunc():
print conf.my_conf_key # Prints 'baz'
MyOtherTestFunc()
print conf.my_conf_key # Prints 'foo' again, for the same reason.
Args:
_func: The function to wrap. The returned wrapper will invoke the
function and restore the config to the state it was in at invokation.
**config_values: Config keys can be set inline at decoration time, see
examples. Note that config keys can't begin with underscore, so
there can be no name collision with _func.
Returns:
Wrapper to replace _func, as per Python decorator semantics.
"""
functools = self._modules['functools'] # pylint: disable=redefined-outer-name
if not _func:
return functools.partial(self.save_and_restore, **config_values)
@functools.wraps(_func)
def _saving_wrapper(*args, **kwargs):
saved_config = dict(self._loaded_values)
try:
self.load_from_dict(config_values)
return _func(*args, **kwargs)
finally:
self._loaded_values = saved_config # pylint: disable=attribute-defined-outside-init
return _saving_wrapper
def inject_positional_args(self, method):
"""Decorator for injecting positional arguments from the configuration.
This decorator wraps the given method, so that any positional arguments are
passed with corresponding values from the configuration. The name of the
positional argument must match the configuration key.
Keyword arguments are *NEVER* modified, even if their names match
configuration keys. Avoid naming keyword args names that are also
configuration keys to avoid confusion.
Additional positional arguments may be used that do not appear in the
configuration, but those arguments *MUST* be specified as keyword arguments
upon invokation of the method. This is to avoid ambiguity in which
positional arguments are getting which values.
Args:
method: The method to wrap.
Returns:
A wrapper that, when invoked, will call the wrapped method, passing in
configuration values for positional arguments.
"""
inspect = self._modules['inspect']
argspec = inspect.getargspec(method)
# Index in argspec.args of the first keyword argument. This index is a
# negative number if there are any kwargs, or 0 if there are no kwargs.
keyword_arg_index = -1 * len(argspec.defaults or [])
arg_names = argspec.args[:keyword_arg_index or None]
kwarg_names = argspec.args[len(arg_names):]
functools = self._modules['functools'] # pylint: disable=redefined-outer-name
# Create the actual method wrapper, all we do is update kwargs. Note we
# don't pass any *args through because there can't be any - we've filled
# them all in with values from the configuration. Any positional args that
# are missing from the configuration *must* be explicitly specified as
# kwargs.
@functools.wraps(method)
def method_wrapper(**kwargs):
"""Wrapper that pulls values from openhtf.util.conf."""
# Check for keyword args with names that are in the config so we can warn.
for kwarg in kwarg_names:
if kwarg in self:
self._logger.warning('Keyword arg %s not set from configuration, but '
'is a configuration key', kwarg)
# Set positional args from configuration values.
final_kwargs = {name: self[name] for name in arg_names if name in self}
for overridden in set(kwargs) & set(final_kwargs):
self._logger.warning('Overriding configuration value for kwarg %s (%s) '
'with provided kwarg value: %s', overridden,
self[overridden], kwargs[overridden])
final_kwargs.update(kwargs)
if inspect.ismethod(method):
name = '%s.%s' % (method.__self__.__class__.__name__, method.__name__)
else:
name = method.__name__
self._logger.debug('Invoking %s with %s', name, final_kwargs)
return method(**final_kwargs)
# We have to check for a 'self' parameter explicitly because Python doesn't
# pass it as a keyword arg, it passes it as the first positional arg.
if argspec.args[0] == 'self':
@functools.wraps(method)
def self_wrapper(self, **kwargs): # pylint: disable=invalid-name
"""Wrapper that pulls values from openhtf.util.conf."""
kwargs['self'] = self
return method_wrapper(**kwargs)
return self_wrapper
return method_wrapper
# Swap out the module for a singleton instance of Configuration so we can
# provide __getattr__ and __getitem__ functionality at the module level.
sys.modules[__name__] = Configuration(
logging.getLogger(__name__), threading.RLock(), ARG_PARSER,
functools=functools, inspect=inspect, yaml=yaml)
| 40.957377
| 91
| 0.716459
|
import argparse
import functools
import inspect
import logging
import sys
import threading
import yaml
import mutablerecords
from . import argv
from . import threads
ARG_PARSER = argv.ModuleParser()
ARG_PARSER.add_argument(
'--config-file', type=argparse.FileType('r'),
help='File from which to load configuration values.')
ARG_PARSER.add_argument(
'--config-value', action='append', default=[],
help='Allows specifying a configuration key=value on the command line. '
'The format should be --config-value=key=value. This value will override '
'any loaded value, and will be a string.')
class Configuration(object):
class ConfigurationInvalidError(Exception):
class KeyAlreadyDeclaredError(Exception):
class UndeclaredKeyError(Exception):
class InvalidKeyError(Exception):
class UnsetKeyError(Exception):
class Declaration(mutablerecords.Record(
'Declaration', ['name'], {
'description': None, 'default_value': None, 'has_default': False})):
def __init__(self, *args, **kwargs):
super(type(self), self).__init__(*args, **kwargs)
self.has_default = 'default_value' in kwargs
# pylint: enable=invalid-name,bad-super-call
__slots__ = ('_logger', '_lock', '_modules', '_declarations',
'_flag_values', '_flags', '_loaded_values', 'ARG_PARSER',
'__name__')
def __init__(self, logger, lock, parser, **kwargs):
self._logger = logger
self._lock = lock
self._modules = kwargs
self._declarations = {}
self.ARG_PARSER = parser
# Parse just the flags we care about, since this happens at import time.
self._flags, _ = parser.parse_known_args()
self._flag_values = {}
# Populate flag_values from flags now.
self.load_flag_values()
# Initialize self._loaded_values and load from --config-file if it's set.
self.reset()
def load_flag_values(self, flags=None):
if flags is None:
flags = self._flags
for keyval in flags.config_value:
k,v = keyval.split('=', 1)
v = self._modules['yaml'].load(v) if isinstance(v, str) else v
self._flag_values.setdefault(k, v)
@staticmethod
def _is_valid_key(key):
return key and key[0].islower()
def __setattr__(self, attr, value):
if self._is_valid_key(attr):
raise AttributeError("Can't set conf values by attribute, use load()")
# __slots__ is defined above, so this will raise an AttributeError if the
# attribute isn't one we expect; this limits the number of ways to abuse the
# normally here because of the sys.modules swap (Configuration is no longer
# defined, and evaluates to None if used here).
# pylint: disable=bad-super-call
super(type(self), self).__setattr__(attr, value)
# Don't use synchronized on this one, because __getitem__ handles it.
def __getattr__(self, attr):
if self._is_valid_key(attr):
return self[attr]
raise AttributeError("'%s' object has no attribute '%s'" %
(type(self).__name__, attr))
@threads.synchronized
def __getitem__(self, item):
if item not in self._declarations:
raise self.UndeclaredKeyError('Configuration key not declared', item)
if item in self._flag_values:
if item in self._loaded_values:
self._logger.warning(
'Overriding loaded value for %s (%s) with flag value: %s',
item, self._loaded_values[item], self._flag_values[item])
return self._flag_values[item]
if item in self._loaded_values:
return self._loaded_values[item]
if self._declarations[item].has_default:
return self._declarations[item].default_value
raise self.UnsetKeyError(
'Configuration value not set and has no default', item)
@threads.synchronized
def __contains__(self, name):
return (name in self._declarations and
(self._declarations[name].has_default or
name in self._loaded_values or
name in self._flag_values))
@threads.synchronized
def declare(self, name, description=None, **kwargs):
if not self._is_valid_key(name):
raise self.InvalidKeyError(
'Invalid key name, must begin with a lowercase letter', name)
if name in self._declarations:
raise self.KeyAlreadyDeclaredError(
'Configuration key already declared', name)
self._declarations[name] = self.Declaration(
name, description=description, **kwargs)
@threads.synchronized
def reset(self):
self._loaded_values = {}
if self._flags.config_file is not None:
self.load_from_file(self._flags.config_file, _allow_undeclared=True)
def load_from_file(self, yamlfile, _override=True, _allow_undeclared=False):
self._logger.info('Loading configuration from file: %s', yamlfile)
try:
parsed_yaml = self._modules['yaml'].safe_load(yamlfile.read())
except self._modules['yaml'].YAMLError as exception:
raise self.ConfigurationInvalidError(
'Failed to load from %s as YAML' % yamlfile, exception)
if not isinstance(parsed_yaml, dict):
raise self.ConfigurationInvalidError(
'YAML parsed, but wrong type, should be dict', parsed_yaml)
self._logger.debug('Configuration loaded from file: %s', parsed_yaml)
self.load_from_dict(
parsed_yaml, _override=_override, _allow_undeclared=_allow_undeclared)
def load(self, _override=True, _allow_undeclared=False, **kwargs):
self.load_from_dict(
kwargs, _override=_override, _allow_undeclared=_allow_undeclared)
@threads.synchronized
def load_from_dict(self, dictionary, _override=True, _allow_undeclared=False):
undeclared_keys = []
for key, value in dictionary.items():
# Warn in this case. We raise if you try to access a config key that
# hasn't been declared, but we don't raise here so that you can use
# configuration files that are supersets of required configuration for
# any particular test station.
if key not in self._declarations and not _allow_undeclared:
undeclared_keys.append(key)
continue
if key in self._loaded_values:
if _override:
self._logger.info(
'Overriding previously loaded value for %s (%s) with value: %s',
key, self._loaded_values[key], value)
else:
self._logger.info(
'Ignoring new value (%s), keeping previous value for %s: %s',
value, key, self._loaded_values[key])
continue
self._loaded_values[key] = value
if undeclared_keys:
self._logger.warning('Ignoring undeclared configuration keys: %s',
undeclared_keys)
@threads.synchronized
def _asdict(self):
# Start with any default values we have, and override with loaded values,
# and then override with flag values.
retval = {key: self._declarations[key].default_value for
key in self._declarations if self._declarations[key].has_default}
retval.update(self._loaded_values)
# Only update keys that are declared so we don't allow injecting
for key, value in self._flag_values.items():
if key in self._declarations:
retval[key] = value
return retval
@property
def help_text(self):
result = []
for name in sorted(self._declarations.keys()):
result.append(name)
result.append('-' * len(name))
decl = self._declarations[name]
if decl.description:
result.append(decl.description.strip())
else:
result.append('(no description found)')
if decl.has_default:
result.append('')
quotes = '"' if type(decl.default_value) is str else ''
result.append(' default_value={quotes}{val}{quotes}'.format(
quotes=quotes, val=decl.default_value))
result.append('')
result.append('')
return '\n'.join(result)
def save_and_restore(self, _func=None, **config_values):
functools = self._modules['functools'] # pylint: disable=redefined-outer-name
if not _func:
return functools.partial(self.save_and_restore, **config_values)
@functools.wraps(_func)
def _saving_wrapper(*args, **kwargs):
saved_config = dict(self._loaded_values)
try:
self.load_from_dict(config_values)
return _func(*args, **kwargs)
finally:
self._loaded_values = saved_config # pylint: disable=attribute-defined-outside-init
return _saving_wrapper
def inject_positional_args(self, method):
inspect = self._modules['inspect']
argspec = inspect.getargspec(method)
# Index in argspec.args of the first keyword argument. This index is a
# negative number if there are any kwargs, or 0 if there are no kwargs.
keyword_arg_index = -1 * len(argspec.defaults or [])
arg_names = argspec.args[:keyword_arg_index or None]
kwarg_names = argspec.args[len(arg_names):]
functools = self._modules['functools'] # pylint: disable=redefined-outer-name
# Create the actual method wrapper, all we do is update kwargs. Note we
# don't pass any *args through because there can't be any - we've filled
# them all in with values from the configuration. Any positional args that
# are missing from the configuration *must* be explicitly specified as
# kwargs.
@functools.wraps(method)
def method_wrapper(**kwargs):
# Check for keyword args with names that are in the config so we can warn.
for kwarg in kwarg_names:
if kwarg in self:
self._logger.warning('Keyword arg %s not set from configuration, but '
'is a configuration key', kwarg)
# Set positional args from configuration values.
final_kwargs = {name: self[name] for name in arg_names if name in self}
for overridden in set(kwargs) & set(final_kwargs):
self._logger.warning('Overriding configuration value for kwarg %s (%s) '
'with provided kwarg value: %s', overridden,
self[overridden], kwargs[overridden])
final_kwargs.update(kwargs)
if inspect.ismethod(method):
name = '%s.%s' % (method.__self__.__class__.__name__, method.__name__)
else:
name = method.__name__
self._logger.debug('Invoking %s with %s', name, final_kwargs)
return method(**final_kwargs)
# We have to check for a 'self' parameter explicitly because Python doesn't
# pass it as a keyword arg, it passes it as the first positional arg.
if argspec.args[0] == 'self':
@functools.wraps(method)
def self_wrapper(self, **kwargs): # pylint: disable=invalid-name
kwargs['self'] = self
return method_wrapper(**kwargs)
return self_wrapper
return method_wrapper
# Swap out the module for a singleton instance of Configuration so we can
# provide __getattr__ and __getitem__ functionality at the module level.
sys.modules[__name__] = Configuration(
logging.getLogger(__name__), threading.RLock(), ARG_PARSER,
functools=functools, inspect=inspect, yaml=yaml)
| true
| true
|
f70c71864f7ad76dc086116e24ff7f441070652a
| 218
|
py
|
Python
|
tests/models.py
|
wearespindle/django-form-utils
|
fd371a23a08dfecfae4f2c0724cd643abcc80dad
|
[
"BSD-3-Clause"
] | null | null | null |
tests/models.py
|
wearespindle/django-form-utils
|
fd371a23a08dfecfae4f2c0724cd643abcc80dad
|
[
"BSD-3-Clause"
] | null | null | null |
tests/models.py
|
wearespindle/django-form-utils
|
fd371a23a08dfecfae4f2c0724cd643abcc80dad
|
[
"BSD-3-Clause"
] | null | null | null |
from django.db import models
class Person(models.Model):
age = models.IntegerField()
name = models.CharField(max_length=100)
class Document(models.Model):
myfile = models.FileField(upload_to="uploads")
| 19.818182
| 50
| 0.733945
|
from django.db import models
class Person(models.Model):
age = models.IntegerField()
name = models.CharField(max_length=100)
class Document(models.Model):
myfile = models.FileField(upload_to="uploads")
| true
| true
|
f70c71c7542da78519981b7abb3df189758ddcd1
| 2,555
|
py
|
Python
|
Simulation/macd_signal_line.py
|
SEPHIRONOVA/TradingDataAnalyzer
|
314cb5bc5f5327ceb16d0ce4e283694eb3f16e99
|
[
"MIT"
] | null | null | null |
Simulation/macd_signal_line.py
|
SEPHIRONOVA/TradingDataAnalyzer
|
314cb5bc5f5327ceb16d0ce4e283694eb3f16e99
|
[
"MIT"
] | null | null | null |
Simulation/macd_signal_line.py
|
SEPHIRONOVA/TradingDataAnalyzer
|
314cb5bc5f5327ceb16d0ce4e283694eb3f16e99
|
[
"MIT"
] | null | null | null |
from Simulation.calculation_status import CalculationStatus
from Simulation.sign_function import SignFunction
from Simulation.mcad import Mcad
from Simulation.market_snapshot import MarketSnapshot
from Simulation.stock_snapshot_helper import StockSnapshotHelper
from Simulation.visualization_data import VisualizationData
from Simulation.exponential_moving_average import ExponentialMovingAverage
__author__ = 'Raymond & Albert'
class McadSignalLine:
def __init__(self, total_capital, num_stocks):
self.transaction_amount = total_capital / num_stocks
self.mcads = []
self.old_mcads = []
self.mcad_EMA = []
self.old_mcad_EMA = []
self.visualization_data = VisualizationData()
for count in range(num_stocks):
self.mcads.append(Mcad())
for count in range(num_stocks):
self.old_mcads.append(CalculationStatus.Invalid)
for count in range(num_stocks):
self.mcad_EMA.append()
for count in range(num_stocks):
self.old_mcad_EMA.append(CalculationStatus.Invalid)
def notify(self, market_snapshot: MarketSnapshot):
decisions = []
for i, stock_snapshot in enumerate(market_snapshot.stock_snapshots):
stock_snapshot_helper = StockSnapshotHelper(stock_snapshot)
mid_price = stock_snapshot_helper.get_mid_price()
curr_mcad = self.mcads[i].evaluate(mid_price)
self.visualization_data.add_price(stock_snapshot.ticker, mid_price)
del_mcad = 0
if curr_mcad == CalculationStatus.Invalid:
self.visualization_data.add_mcad(stock_snapshot.ticker, 0)
continue
else:
self.visualization_data.add_mcad(stock_snapshot.ticker, curr_mcad)
self.mcad_EMA = ExponentialMovingAverage(9)
if self.old_mcads[i] == CalculationStatus.Invalid:
self.old_mcads[i] = curr_mcad
continue
curr_mcad_EMA = self.mcad_EMA.evaluate(curr_mcad)
if curr_mcad_EMA == CalculationStatus.Invalid:
self.old_mcad_EMA[i] = curr_mcad_EMA
continue
else:
del_mcad = SignFunction.evaluate(curr_mcad-curr_mcad_EMA) - SignFunction.evaluate(self.old_mcads[i]-curr_mcad_EMA)
self.old_mcad_EMA[i] = curr_mcad_EMA
if del_mcad > 0:
decisions.append((stock_snapshot.ticker, -self.transaction_amount))
elif del_mcad < 0:
decisions.append((stock_snapshot.ticker, self.transaction_amount))
return decisions
def reset(self):
for mcad in self.mcads:
mcad.reset()
self.old_mcads = [CalculationStatus.Invalid for old_mcad in self.old_mcads]
visualization_data_holder = self.visualization_data
self.visualization_data = VisualizationData()
return visualization_data_holder
| 31.158537
| 118
| 0.78317
|
from Simulation.calculation_status import CalculationStatus
from Simulation.sign_function import SignFunction
from Simulation.mcad import Mcad
from Simulation.market_snapshot import MarketSnapshot
from Simulation.stock_snapshot_helper import StockSnapshotHelper
from Simulation.visualization_data import VisualizationData
from Simulation.exponential_moving_average import ExponentialMovingAverage
__author__ = 'Raymond & Albert'
class McadSignalLine:
def __init__(self, total_capital, num_stocks):
self.transaction_amount = total_capital / num_stocks
self.mcads = []
self.old_mcads = []
self.mcad_EMA = []
self.old_mcad_EMA = []
self.visualization_data = VisualizationData()
for count in range(num_stocks):
self.mcads.append(Mcad())
for count in range(num_stocks):
self.old_mcads.append(CalculationStatus.Invalid)
for count in range(num_stocks):
self.mcad_EMA.append()
for count in range(num_stocks):
self.old_mcad_EMA.append(CalculationStatus.Invalid)
def notify(self, market_snapshot: MarketSnapshot):
decisions = []
for i, stock_snapshot in enumerate(market_snapshot.stock_snapshots):
stock_snapshot_helper = StockSnapshotHelper(stock_snapshot)
mid_price = stock_snapshot_helper.get_mid_price()
curr_mcad = self.mcads[i].evaluate(mid_price)
self.visualization_data.add_price(stock_snapshot.ticker, mid_price)
del_mcad = 0
if curr_mcad == CalculationStatus.Invalid:
self.visualization_data.add_mcad(stock_snapshot.ticker, 0)
continue
else:
self.visualization_data.add_mcad(stock_snapshot.ticker, curr_mcad)
self.mcad_EMA = ExponentialMovingAverage(9)
if self.old_mcads[i] == CalculationStatus.Invalid:
self.old_mcads[i] = curr_mcad
continue
curr_mcad_EMA = self.mcad_EMA.evaluate(curr_mcad)
if curr_mcad_EMA == CalculationStatus.Invalid:
self.old_mcad_EMA[i] = curr_mcad_EMA
continue
else:
del_mcad = SignFunction.evaluate(curr_mcad-curr_mcad_EMA) - SignFunction.evaluate(self.old_mcads[i]-curr_mcad_EMA)
self.old_mcad_EMA[i] = curr_mcad_EMA
if del_mcad > 0:
decisions.append((stock_snapshot.ticker, -self.transaction_amount))
elif del_mcad < 0:
decisions.append((stock_snapshot.ticker, self.transaction_amount))
return decisions
def reset(self):
for mcad in self.mcads:
mcad.reset()
self.old_mcads = [CalculationStatus.Invalid for old_mcad in self.old_mcads]
visualization_data_holder = self.visualization_data
self.visualization_data = VisualizationData()
return visualization_data_holder
| true
| true
|
f70c7296b40140065feebfd9c748b2d158481e4a
| 3,542
|
py
|
Python
|
configs/recognition/tsm_custom/tsm_r50_1x1x8_100e_customdataset_fight_rgb.py
|
ZJU-lishuang/mmaction2
|
dc46505319968eff0123eb5abb811969042377c5
|
[
"Apache-2.0"
] | 2
|
2021-12-07T09:09:10.000Z
|
2022-01-26T01:52:07.000Z
|
configs/recognition/tsm_custom/tsm_r50_1x1x8_100e_customdataset_fight_rgb.py
|
ZJU-lishuang/mmaction2
|
dc46505319968eff0123eb5abb811969042377c5
|
[
"Apache-2.0"
] | null | null | null |
configs/recognition/tsm_custom/tsm_r50_1x1x8_100e_customdataset_fight_rgb.py
|
ZJU-lishuang/mmaction2
|
dc46505319968eff0123eb5abb811969042377c5
|
[
"Apache-2.0"
] | null | null | null |
_base_ = [
'../../_base_/models/tsm_r50.py', '../../_base_/schedules/sgd_tsm_100e.py',
'../../_base_/default_runtime.py'
]
# model settings
model = dict(backbone=dict(pretrained='weight/resnet50-19c8e357.pth'),cls_head=dict(num_classes=2))
log_config = dict(
interval=1,
hooks=[
dict(type='TextLoggerHook'),
dict(type='TensorboardLoggerHook'),
])
optimizer = dict(
type='SGD',
constructor='TSMOptimizerConstructor',
paramwise_cfg=dict(fc_lr5=True),
lr=0.0025, # this lr is used for 1 gpus
momentum=0.9,
weight_decay=0.0001)
# dataset settings
dataset_type = 'RawframeDataset'
data_root = '/home/jovyan/data-vol-1/fight_recognition/fight-detection-rawframes'
data_root_val = '/home/jovyan/data-vol-1/fight_recognition/fight-detection-rawframes'
ann_file_train = '/home/jovyan/data-vol-1/fight_recognition/train_videofolder.txt'
ann_file_val = '/home/jovyan/data-vol-1/fight_recognition/val_videofolder.txt'
ann_file_test = '/home/jovyan/data-vol-1/fight_recognition/val_videofolder.txt'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False)
train_pipeline = [
#clip_len 是单个裁剪片段的长度
dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8),
dict(type='RawFrameDecode'),
dict(type='Resize', scale=(-1, 256)),
dict(
type='MultiScaleCrop',
input_size=224,
scales=(1, 0.875, 0.75, 0.66),
random_crop=False,
max_wh_scale_gap=1,
num_fixed_crops=13),
dict(type='Resize', scale=(224, 224), keep_ratio=False),
dict(type='Flip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='FormatShape', input_format='NCHW'),
dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),
dict(type='ToTensor', keys=['imgs', 'label'])
]
val_pipeline = [
dict(
type='SampleFrames',
clip_len=1,
frame_interval=1,
num_clips=8,
test_mode=True),
dict(type='RawFrameDecode'),
dict(type='Resize', scale=(-1, 256)),
dict(type='CenterCrop', crop_size=224),
dict(type='Flip', flip_ratio=0),
dict(type='Normalize', **img_norm_cfg),
dict(type='FormatShape', input_format='NCHW'),
dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),
dict(type='ToTensor', keys=['imgs'])
]
test_pipeline = [
dict(
type='SampleFrames',
clip_len=1,
frame_interval=1,
num_clips=8,
test_mode=True),
dict(type='RawFrameDecode'),
dict(type='Resize', scale=(-1, 256)),
dict(type='CenterCrop', crop_size=224),
dict(type='Flip', flip_ratio=0),
dict(type='Normalize', **img_norm_cfg),
dict(type='FormatShape', input_format='NCHW'),
dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),
dict(type='ToTensor', keys=['imgs'])
]
data = dict(
videos_per_gpu=8,
workers_per_gpu=4,
train=dict(
type=dataset_type,
ann_file=ann_file_train,
data_prefix=data_root,
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=ann_file_val,
data_prefix=data_root_val,
pipeline=val_pipeline),
test=dict(
type=dataset_type,
ann_file=ann_file_test,
data_prefix=data_root_val,
pipeline=test_pipeline))
evaluation = dict(
interval=5, metrics=['top_k_accuracy', 'mean_class_accuracy'])
# runtime settings
checkpoint_config = dict(interval=5)
work_dir = './work_dirs/tsm_r50_1x1x8_100e_customdataset_rgb/'
| 32.796296
| 99
| 0.657256
|
_base_ = [
'../../_base_/models/tsm_r50.py', '../../_base_/schedules/sgd_tsm_100e.py',
'../../_base_/default_runtime.py'
]
model = dict(backbone=dict(pretrained='weight/resnet50-19c8e357.pth'),cls_head=dict(num_classes=2))
log_config = dict(
interval=1,
hooks=[
dict(type='TextLoggerHook'),
dict(type='TensorboardLoggerHook'),
])
optimizer = dict(
type='SGD',
constructor='TSMOptimizerConstructor',
paramwise_cfg=dict(fc_lr5=True),
lr=0.0025,
momentum=0.9,
weight_decay=0.0001)
dataset_type = 'RawframeDataset'
data_root = '/home/jovyan/data-vol-1/fight_recognition/fight-detection-rawframes'
data_root_val = '/home/jovyan/data-vol-1/fight_recognition/fight-detection-rawframes'
ann_file_train = '/home/jovyan/data-vol-1/fight_recognition/train_videofolder.txt'
ann_file_val = '/home/jovyan/data-vol-1/fight_recognition/val_videofolder.txt'
ann_file_test = '/home/jovyan/data-vol-1/fight_recognition/val_videofolder.txt'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False)
train_pipeline = [
dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8),
dict(type='RawFrameDecode'),
dict(type='Resize', scale=(-1, 256)),
dict(
type='MultiScaleCrop',
input_size=224,
scales=(1, 0.875, 0.75, 0.66),
random_crop=False,
max_wh_scale_gap=1,
num_fixed_crops=13),
dict(type='Resize', scale=(224, 224), keep_ratio=False),
dict(type='Flip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='FormatShape', input_format='NCHW'),
dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),
dict(type='ToTensor', keys=['imgs', 'label'])
]
val_pipeline = [
dict(
type='SampleFrames',
clip_len=1,
frame_interval=1,
num_clips=8,
test_mode=True),
dict(type='RawFrameDecode'),
dict(type='Resize', scale=(-1, 256)),
dict(type='CenterCrop', crop_size=224),
dict(type='Flip', flip_ratio=0),
dict(type='Normalize', **img_norm_cfg),
dict(type='FormatShape', input_format='NCHW'),
dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),
dict(type='ToTensor', keys=['imgs'])
]
test_pipeline = [
dict(
type='SampleFrames',
clip_len=1,
frame_interval=1,
num_clips=8,
test_mode=True),
dict(type='RawFrameDecode'),
dict(type='Resize', scale=(-1, 256)),
dict(type='CenterCrop', crop_size=224),
dict(type='Flip', flip_ratio=0),
dict(type='Normalize', **img_norm_cfg),
dict(type='FormatShape', input_format='NCHW'),
dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),
dict(type='ToTensor', keys=['imgs'])
]
data = dict(
videos_per_gpu=8,
workers_per_gpu=4,
train=dict(
type=dataset_type,
ann_file=ann_file_train,
data_prefix=data_root,
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=ann_file_val,
data_prefix=data_root_val,
pipeline=val_pipeline),
test=dict(
type=dataset_type,
ann_file=ann_file_test,
data_prefix=data_root_val,
pipeline=test_pipeline))
evaluation = dict(
interval=5, metrics=['top_k_accuracy', 'mean_class_accuracy'])
checkpoint_config = dict(interval=5)
work_dir = './work_dirs/tsm_r50_1x1x8_100e_customdataset_rgb/'
| true
| true
|
f70c72ccc07d7ac3423857be8b2839f7788df7dd
| 3,998
|
py
|
Python
|
models/intro/vertical.py
|
caelanhadley/NNFSIP
|
da048af5ded549db7464b206b255104900b40ab8
|
[
"MIT"
] | null | null | null |
models/intro/vertical.py
|
caelanhadley/NNFSIP
|
da048af5ded549db7464b206b255104900b40ab8
|
[
"MIT"
] | null | null | null |
models/intro/vertical.py
|
caelanhadley/NNFSIP
|
da048af5ded549db7464b206b255104900b40ab8
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
import nnfs
from nnfs.datasets import vertical_data
nnfs.init()
X, y = vertical_data(samples=100, classes=3)
plt.scatter(X[:, 0], X[:, 1], c=y, s=40, cmap='brg')
plt.show()
import numpy as np
import nnfs
import matplotlib.pyplot as plt
nnfs.init()
class Layer_Dense:
def __init__(self, n_inputs, n_neurons):
self.weights = 0.01 * np.random.randn(n_inputs, n_neurons)
self.biases = np.zeros((1,n_neurons))
def forward(self, inputs):
self.output = np.dot(inputs, self.weights) + self.biases
class Activation_ReLU:
# Forward Pass
def forward(self, inputs):
self.output = np.maximum(0,inputs)
class Activation_Softmax:
def forward(self, inputs):
exp_values= np.exp(inputs - np.max(inputs, axis=1, keepdims=True))
normalized = exp_values / np.sum(exp_values, axis=1, keepdims=True)
self.output = normalized
class Loss:
# Calculates the data and regularization losses
# given model output and ground truth values
def calculate(self, output, y):
# Calculate sample losses
sample_losses = self.forward(output, y)
# Calculate mean loss
data_loss = np.mean(sample_losses)
# Return loss
return data_loss
class Loss_CatagoricalCrossEntropy(Loss):
def forward(self, y_pred, y_true):
# Number of Samples
samples = len(y_pred)
# Clip Data to prevent div by 0
# Clip Both sides to not drag the mean torwards any value
y_pred_clipped = np.clip(y_pred, 1e-7, 1-1e-7)
# Probabilities for target values -
# Only if categorical labels
if len(y_true.shape) == 1:
correct_confidences = y_pred_clipped[range(samples), y_true]
# Mask Values - only for one-hot encoded labels
elif len(y_true.shape) == 2:
correct_confidences = np.sum(y_pred_clipped * y_true, axis=1)
negative_log_likelyhoods = -np.log(correct_confidences)
return negative_log_likelyhoods
# Model
dense1 = Layer_Dense(2,3)
activation1 = Activation_ReLU()
dense2 = Layer_Dense(3, 3)
activation2 = Activation_Softmax()
loss_function = Loss_CatagoricalCrossEntropy()
# Helper variables
lowest_loss = 9999999 # some initial value
best_dense1_weights = dense1.weights.copy()
best_dense1_biases = dense1.biases.copy()
best_dense2_weights = dense2.weights.copy()
best_dense2_biases = dense2.biases.copy()
for iteration in range(10000):
# Generate a new set of weights for iteration
dense1.weights += 0.05 * np.random.randn(2, 3)
dense1.biases += 0.05 * np.random.randn(1, 3)
dense2.weights += 0.05 * np.random.randn(3, 3)
dense2.biases += 0.05 * np.random.randn(1, 3)
# Perform a forward pass of the training data through this layer
dense1.forward(X)
activation1.forward(dense1.output)
dense2.forward(activation1.output)
activation2.forward(dense2.output)
# Perform a forward pass through activation function
# it takes the output of second dense layer here and returns loss
loss = loss_function.calculate(activation2.output, y)
# Calculate accuracy from output of activation2 and targets
# calculate values along first axis
predictions = np.argmax(activation2.output, axis=1)
accuracy = np.mean(predictions==y)
# If loss is smaller - print and save weights and biases aside
if loss < lowest_loss:
print('New set of weights found, iteration:', iteration,
'loss:', loss, 'acc:', accuracy)
best_dense1_weights = dense1.weights.copy()
best_dense1_biases = dense1.biases.copy()
best_dense2_weights = dense2.weights.copy()
best_dense2_biases = dense2.biases.copy()
lowest_loss = loss
# Revert weights and biases
else:
dense1.weights = best_dense1_weights.copy()
dense1.biases = best_dense1_biases.copy()
dense2.weights = best_dense2_weights.copy()
dense2.biases = best_dense2_biases.copy()
| 35.070175
| 75
| 0.688844
|
import matplotlib.pyplot as plt
import nnfs
from nnfs.datasets import vertical_data
nnfs.init()
X, y = vertical_data(samples=100, classes=3)
plt.scatter(X[:, 0], X[:, 1], c=y, s=40, cmap='brg')
plt.show()
import numpy as np
import nnfs
import matplotlib.pyplot as plt
nnfs.init()
class Layer_Dense:
def __init__(self, n_inputs, n_neurons):
self.weights = 0.01 * np.random.randn(n_inputs, n_neurons)
self.biases = np.zeros((1,n_neurons))
def forward(self, inputs):
self.output = np.dot(inputs, self.weights) + self.biases
class Activation_ReLU:
def forward(self, inputs):
self.output = np.maximum(0,inputs)
class Activation_Softmax:
def forward(self, inputs):
exp_values= np.exp(inputs - np.max(inputs, axis=1, keepdims=True))
normalized = exp_values / np.sum(exp_values, axis=1, keepdims=True)
self.output = normalized
class Loss:
def calculate(self, output, y):
sample_losses = self.forward(output, y)
data_loss = np.mean(sample_losses)
return data_loss
class Loss_CatagoricalCrossEntropy(Loss):
def forward(self, y_pred, y_true):
samples = len(y_pred)
y_pred_clipped = np.clip(y_pred, 1e-7, 1-1e-7)
if len(y_true.shape) == 1:
correct_confidences = y_pred_clipped[range(samples), y_true]
elif len(y_true.shape) == 2:
correct_confidences = np.sum(y_pred_clipped * y_true, axis=1)
negative_log_likelyhoods = -np.log(correct_confidences)
return negative_log_likelyhoods
dense1 = Layer_Dense(2,3)
activation1 = Activation_ReLU()
dense2 = Layer_Dense(3, 3)
activation2 = Activation_Softmax()
loss_function = Loss_CatagoricalCrossEntropy()
lowest_loss = 9999999
best_dense1_weights = dense1.weights.copy()
best_dense1_biases = dense1.biases.copy()
best_dense2_weights = dense2.weights.copy()
best_dense2_biases = dense2.biases.copy()
for iteration in range(10000):
dense1.weights += 0.05 * np.random.randn(2, 3)
dense1.biases += 0.05 * np.random.randn(1, 3)
dense2.weights += 0.05 * np.random.randn(3, 3)
dense2.biases += 0.05 * np.random.randn(1, 3)
dense1.forward(X)
activation1.forward(dense1.output)
dense2.forward(activation1.output)
activation2.forward(dense2.output)
loss = loss_function.calculate(activation2.output, y)
predictions = np.argmax(activation2.output, axis=1)
accuracy = np.mean(predictions==y)
if loss < lowest_loss:
print('New set of weights found, iteration:', iteration,
'loss:', loss, 'acc:', accuracy)
best_dense1_weights = dense1.weights.copy()
best_dense1_biases = dense1.biases.copy()
best_dense2_weights = dense2.weights.copy()
best_dense2_biases = dense2.biases.copy()
lowest_loss = loss
else:
dense1.weights = best_dense1_weights.copy()
dense1.biases = best_dense1_biases.copy()
dense2.weights = best_dense2_weights.copy()
dense2.biases = best_dense2_biases.copy()
| true
| true
|
f70c7348c73dc5db09b2f4dab2f4a6cba757570c
| 1,427
|
py
|
Python
|
ldb/console.py
|
UnTenseUnJury/LbelDB.py
|
1a326c372ece83dc62bf5a72f4e0f22a131f1f12
|
[
"Unlicense"
] | 10
|
2020-08-24T03:43:45.000Z
|
2020-10-12T05:13:55.000Z
|
ldb/console.py
|
UnTenseUnJury/LbelDB.py
|
1a326c372ece83dc62bf5a72f4e0f22a131f1f12
|
[
"Unlicense"
] | null | null | null |
ldb/console.py
|
UnTenseUnJury/LbelDB.py
|
1a326c372ece83dc62bf5a72f4e0f22a131f1f12
|
[
"Unlicense"
] | 3
|
2020-08-24T12:20:10.000Z
|
2021-03-07T04:51:41.000Z
|
import ldb
# console based do not edit
while True:
cmd = str(input("LDB > "))
cmd = cmd.split(" ")
if cmd[0].lower() == "exit":
break
elif cmd[0].lower() == "init":
ldb.init()
elif cmd[0].lower() == "create":
ldb.create(list(cmd[1:]))
elif cmd[0].lower() == "view":
ldb.view()
elif cmd[0].lower() == "add_c":
ldb.add_c(list(cmd[1:]))
elif cmd[0].lower() == "add_r":
ldb.add_r(list(cmd[1:]))
elif cmd[0].lower() == "clear_r":
ldb.clear_r(cmd[1:])
elif cmd[0].lower() == "clear_c":
ldb.clear_c(cmd[1:])
elif cmd[0].lower() == "clearall":
ldb.clearall()
elif cmd[0].lower() == "store":
ldb.store()
elif cmd[0].lower() == "retrieve":
ldb.retrieve()
elif cmd[0].lower() == "genid":
ldb.genid()
elif cmd[0].lower() == "return_r":
print(ldb.return_r(cmd[1:]))
elif cmd[0].lower() == "return_c":
print(ldb.return_c(int(cmd[1])))
elif cmd[0].lower() == "update_r":
ldb.update_r(int(cmd[1]), cmd[2:])
elif cmd[0].lower() == "update_c":
ldb.update_c(int(cmd[1]), cmd[2:])
elif cmd[0].lower() == "update_ri":
ldb.update_ri(int(cmd[1]), int(cmd[2]), cmd[3:])
elif cmd[0].lower() == "sort_col":
ldb.sort_col(int(cmd[1]), True if len(cmd) == 2 else False)
elif cmd[0].lower() == "find":
ldb.find(cmd[1])
| 31.021739
| 67
| 0.512964
|
import ldb
while True:
cmd = str(input("LDB > "))
cmd = cmd.split(" ")
if cmd[0].lower() == "exit":
break
elif cmd[0].lower() == "init":
ldb.init()
elif cmd[0].lower() == "create":
ldb.create(list(cmd[1:]))
elif cmd[0].lower() == "view":
ldb.view()
elif cmd[0].lower() == "add_c":
ldb.add_c(list(cmd[1:]))
elif cmd[0].lower() == "add_r":
ldb.add_r(list(cmd[1:]))
elif cmd[0].lower() == "clear_r":
ldb.clear_r(cmd[1:])
elif cmd[0].lower() == "clear_c":
ldb.clear_c(cmd[1:])
elif cmd[0].lower() == "clearall":
ldb.clearall()
elif cmd[0].lower() == "store":
ldb.store()
elif cmd[0].lower() == "retrieve":
ldb.retrieve()
elif cmd[0].lower() == "genid":
ldb.genid()
elif cmd[0].lower() == "return_r":
print(ldb.return_r(cmd[1:]))
elif cmd[0].lower() == "return_c":
print(ldb.return_c(int(cmd[1])))
elif cmd[0].lower() == "update_r":
ldb.update_r(int(cmd[1]), cmd[2:])
elif cmd[0].lower() == "update_c":
ldb.update_c(int(cmd[1]), cmd[2:])
elif cmd[0].lower() == "update_ri":
ldb.update_ri(int(cmd[1]), int(cmd[2]), cmd[3:])
elif cmd[0].lower() == "sort_col":
ldb.sort_col(int(cmd[1]), True if len(cmd) == 2 else False)
elif cmd[0].lower() == "find":
ldb.find(cmd[1])
| true
| true
|
f70c739c58e9ad732cfb1c2ec6a32a42c0f46699
| 25,377
|
py
|
Python
|
jax/experimental/jax2tf/tests/primitives_test.py
|
tudorcebere/jax
|
dcaa28c624ef3402529786a5aa6dd3891f8779b0
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-04-01T06:42:49.000Z
|
2021-04-01T06:42:49.000Z
|
jax/experimental/jax2tf/tests/primitives_test.py
|
jhkang/jax
|
b1d0f87648f73b06091ea3929a52b5d572391088
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
jax/experimental/jax2tf/tests/primitives_test.py
|
jhkang/jax
|
b1d0f87648f73b06091ea3929a52b5d572391088
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for JAX primitive coverage."""
import unittest
from absl.testing import absltest
from absl.testing import parameterized
from functools import partial
import jax
from jax import dtypes
from jax import lax
from jax import numpy as jnp
from jax import test_util as jtu
from jax.config import config
from jax.experimental import jax2tf
from jax.experimental.jax2tf.tests import tf_test_util
from jax.interpreters import xla
import numpy as np
import tensorflow as tf # type: ignore[import]
config.parse_flags_with_absl()
# Import after parsing flags
from jax.experimental.jax2tf.tests import primitive_harness
REDUCE = (
jnp.all,
jnp.any,
jnp.max,
jnp.min,
jnp.prod,
jnp.sum,
)
INDEX = (
jax.ops.index_add,
jax.ops.index_max,
jax.ops.index_min,
jax.ops.index_mul,
jax.ops.index_update,
)
class JaxPrimitiveTest(tf_test_util.JaxToTfTestCase):
def test_primitive_coverage(self):
"""Fail if there are JAX primitives that are not implemented."""
# Harvest primitives from XLA translation tables
all_primitives = (set(xla.translations)
| set(xla.backend_specific_translations['cpu'])
| set(xla.backend_specific_translations['gpu'])
| set(xla.backend_specific_translations['tpu'])
| set(xla.initial_style_translations)
| set(xla.parallel_translations))
tf_impl = set(jax.experimental.jax2tf.jax2tf.tf_impl)
tf_not_yet_impl = set(jax.experimental.jax2tf.jax2tf.tf_not_yet_impl)
all_primitives = tuple(sorted(all_primitives, key=str))
for p in all_primitives:
# TODO: remove tie_in once omnistaging is on by default
if p.name == "axis_index" or p.name == "tie_in":
continue
if p in tf_not_yet_impl:
self.assertNotIn(p, tf_impl) # Should not be in both tf_impl and tf_not_yet_impl
else:
self.assertIn(p, tf_impl)
@parameterized.named_parameters(
dict(testcase_name=f"_{f_jax.__name__}",
f_jax=f_jax)
for f_jax in [jnp.add, jnp.subtract, jnp.multiply, jnp.divide,
jnp.less, jnp.less_equal, jnp.equal, jnp.greater,
jnp.greater_equal, jnp.not_equal, jnp.maximum,
jnp.minimum])
def test_type_promotion(self, f_jax=jnp.add):
# We only test a few types here, as tensorflow does not support many
# types like uint* or bool in binary ops.
types = [dtypes.bfloat16, np.int32, np.int64, np.float32]
for x_dtype in types:
for y_dtype in types:
x = np.array([1, 2], dtype=x_dtype)
y = np.array([3, 4], dtype=y_dtype)
self.ConvertAndCompare(f_jax, x, y)
def test_concat(self):
values = [np.array([1, 2], dtype=np.float32),
np.array([1, 2], dtype=np.int32),
np.array([1, 2], dtype=np.int8)]
f_jax = jax.jit(lambda x: jnp.concatenate(x, axis=0))
self.ConvertAndCompare(f_jax, values)
@primitive_harness.parameterized(primitive_harness.lax_pad)
def test_pad(self, harness: primitive_harness.Harness):
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_top_k)
def test_top_k(self, harness: primitive_harness.Harness):
if (harness.params["k"] > harness.params["shape"][-1] or
harness.params["k"] < 0):
with self.assertRaisesRegex(ValueError, "k argument to top_k must be"):
harness.dyn_fun(*harness.dyn_args_maker(self.rng()))
elif harness.params["dtype"] in jtu.dtypes.complex:
# TODO(necula): fix top_k complex bug on TPU
if jtu.device_under_test() == "tpu":
raise unittest.SkipTest("top_k complex on TPU raises different error")
with self.assertRaisesRegex(RuntimeError, "Unimplemented: complex comparison"):
harness.dyn_fun(*harness.dyn_args_maker(self.rng()))
# TODO: TF and JAX sort [inf, nan] differently.
elif harness.name.startswith("nan_"):
raise unittest.SkipTest("inconsistent [nan, inf] sorting")
else:
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_sort)
def test_sort(self, harness: primitive_harness.Harness):
if (jtu.device_under_test() == "gpu" and
len(harness.arg_descriptors) == 4 and
not harness.params["is_stable"]):
# TODO: fix the TF GPU test
raise unittest.SkipTest("GPU tests are running TF on CPU")
if jtu.device_under_test() == "tpu" and harness.params["dtype"] in jtu.dtypes.complex:
raise unittest.SkipTest("JAX sort is not implemented on TPU for complex")
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_fft)
@jtu.skip_on_flag("jax_skip_slow_tests", True)
def test_fft(self, harness: primitive_harness.Harness):
if len(harness.params["fft_lengths"]) > 3:
with self.assertRaisesRegex(RuntimeError, "FFT only supports ranks 1-3"):
harness.dyn_fun(*harness.dyn_args_maker(self.rng()))
elif (jtu.device_under_test() == "tpu" and
len(harness.params["fft_lengths"]) > 1):
# TODO(b/140351181): FFT is mostly unimplemented on TPU, even for JAX
with self.assertRaisesRegex(RuntimeError,
"only 1D FFT is currently supported."):
harness.dyn_fun(*harness.dyn_args_maker(self.rng()))
else:
tol = None
if jtu.device_under_test() == "gpu":
if harness.params["dtype"] in jtu.dtypes.boolean:
tol = 0.01
else:
tol = 1e-3
self.ConvertAndCompare(harness.dyn_fun,
*harness.dyn_args_maker(self.rng()),
atol=tol, rtol=tol)
@primitive_harness.parameterized(primitive_harness.lax_linalg_qr)
def test_qr(self, harness: primitive_harness.Harness):
# See jax.lib.lapack.geqrf for the list of compatible types
dtype = harness.params["dtype"]
dut = jtu.device_under_test()
# These cases are not implemented in JAX
if dtype in (jtu.dtypes.all_integer + [jnp.bfloat16]):
unimplemented_jax = True
elif dtype is np.complex64 and dut == "tpu":
unimplemented_jax = True
elif dtype is np.float16 and dut in ("cpu", "gpu"):
unimplemented_jax = True
else:
unimplemented_jax = False
if unimplemented_jax:
raise unittest.SkipTest(f"QR not implemented in JAX for {dtype} on {dut}")
# TODO: see https://github.com/google/jax/pull/3775#issuecomment-659407824.
# - for now, the performance of the HLO QR implementation called when
# compiling with TF is expected to have worse performance than the
# custom calls made in JAX.
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()),
atol=1e-5, rtol=1e-5)
@primitive_harness.parameterized(primitive_harness.lax_linalg_svd)
@jtu.skip_on_flag("jax_skip_slow_tests", True)
def test_svd(self, harness: primitive_harness.Harness):
if harness.params["dtype"] in [np.float16, dtypes.bfloat16]:
if jtu.device_under_test() != "tpu":
# Does not work in JAX
with self.assertRaisesRegex(NotImplementedError, "Unsupported dtype"):
harness.dyn_fun(*harness.dyn_args_maker(self.rng()))
return
if harness.params["dtype"] in [np.complex64, np.complex128]:
if jtu.device_under_test() == "tpu":
# TODO: on JAX on TPU there is no SVD implementation for complex
with self.assertRaisesRegex(RuntimeError,
"Binary op compare with different element types"):
harness.dyn_fun(*harness.dyn_args_maker(self.rng()))
return
def _custom_assert(r_jax, r_tf, atol=1e-6, rtol=1e-6):
def _reconstruct_operand(result, is_tf: bool):
# Reconstructing operand as documented in numpy.linalg.svd (see
# https://numpy.org/doc/stable/reference/generated/numpy.linalg.svd.html)
s, u, v = result
if is_tf:
s = s.numpy()
u = u.numpy()
v = v.numpy()
U = u[..., :s.shape[-1]]
V = v[..., :s.shape[-1], :]
S = s[..., None, :]
return jnp.matmul(U * S, V), s.shape, u.shape, v.shape
if harness.params["compute_uv"]:
r_jax_reconstructed = _reconstruct_operand(r_jax, False)
r_tf_reconstructed = _reconstruct_operand(r_tf, True)
self.assertAllClose(r_jax_reconstructed, r_tf_reconstructed,
atol=atol, rtol=rtol)
else:
self.assertAllClose(r_jax, r_tf, atol=atol, rtol=rtol)
tol = 1e-4
custom_assert = partial(_custom_assert, atol=tol, rtol=tol)
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()),
atol=tol, rtol=tol,
custom_assert=custom_assert,
always_custom_assert=True)
@primitive_harness.parameterized(primitive_harness.lax_select_and_gather_add)
@jtu.ignore_warning(category=UserWarning,
message="Using reduced precision for gradient.*")
def test_select_and_gather_add(self, harness: primitive_harness.Harness):
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_reduce_window)
def test_reduce_window(self, harness: primitive_harness.Harness):
dtype = harness.params['dtype']
if (jtu.device_under_test() == 'tpu' and dtype is np.complex64):
raise unittest.SkipTest(
'TODO: JAX reduce_window on TPU does not handle complex64'
)
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_unary_elementwise)
def test_unary_elementwise(self, harness: primitive_harness.Harness):
dtype = harness.params["dtype"]
lax_name = harness.params["lax_name"]
arg, = harness.dyn_args_maker(self.rng())
custom_assert = None
if lax_name == "digamma":
# TODO(necula): fix bug with digamma/(f32|f16) on TPU
if dtype in [np.float16, np.float32] and jtu.device_under_test() == "tpu":
raise unittest.SkipTest("TODO: fix bug: nan vs not-nan")
# In the bfloat16 case, TF and lax both return NaN in undefined cases.
if not dtype is dtypes.bfloat16:
# digamma is not defined at 0 and -1
def custom_assert(result_jax, result_tf):
# lax.digamma returns NaN and tf.math.digamma returns inf
special_cases = (arg == 0.) | (arg == -1.)
nr_special_cases = np.count_nonzero(special_cases)
self.assertAllClose(np.full((nr_special_cases,), dtype(np.nan)),
result_jax[special_cases])
self.assertAllClose(np.full((nr_special_cases,), dtype(np.inf)),
result_tf[special_cases])
# non-special cases are equal
self.assertAllClose(result_jax[~ special_cases],
result_tf[~ special_cases])
if lax_name == "erf_inv":
# TODO(necula): fix erf_inv bug on TPU
if jtu.device_under_test() == "tpu":
raise unittest.SkipTest("erf_inv bug on TPU: nan vs non-nan")
# TODO: investigate: in the (b)float16 cases, TF and lax both return the
# same result in undefined cases.
if not dtype in [np.float16, dtypes.bfloat16]:
# erf_inv is not defined for arg <= -1 or arg >= 1
def custom_assert(result_jax, result_tf): # noqa: F811
# for arg < -1 or arg > 1
# lax.erf_inv returns NaN; tf.math.erf_inv return +/- inf
special_cases = (arg < -1.) | (arg > 1.)
nr_special_cases = np.count_nonzero(special_cases)
self.assertAllClose(np.full((nr_special_cases,), dtype(np.nan),
dtype=dtype),
result_jax[special_cases])
signs = np.where(arg[special_cases] < 0., -1., 1.)
self.assertAllClose(np.full((nr_special_cases,),
signs * dtype(np.inf), dtype=dtype),
result_tf[special_cases])
# non-special cases are equal
self.assertAllClose(result_jax[~ special_cases],
result_tf[~ special_cases])
atol = None
if jtu.device_under_test() == "gpu":
# TODO(necula): revisit once we fix the GPU tests
atol = 1e-3
self.ConvertAndCompare(harness.dyn_fun, arg, custom_assert=custom_assert,
atol=atol)
@primitive_harness.parameterized(primitive_harness.lax_bitwise_not)
def test_bitwise_not(self, harness):
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_population_count)
def test_population_count(self, harness: primitive_harness.Harness):
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_add_mul)
def test_add_mul(self, harness: primitive_harness.Harness):
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_min_max)
def test_min_max(self, harness: primitive_harness.Harness):
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_binary_elementwise)
def test_binary_elementwise(self, harness):
tol = None
lax_name, dtype = harness.params["lax_name"], harness.params["dtype"]
if lax_name in ("igamma", "igammac"):
# TODO(necula): fix bug with igamma/f16
if dtype in [np.float16, dtypes.bfloat16]:
raise unittest.SkipTest("TODO: igamma(c) unsupported with (b)float16 in JAX")
# TODO(necula): fix bug with igamma/f32 on TPU
if dtype is np.float32 and jtu.device_under_test() == "tpu":
raise unittest.SkipTest("TODO: fix bug: nan vs not-nan")
arg1, arg2 = harness.dyn_args_maker(self.rng())
custom_assert = None
if lax_name == "igamma":
# igamma is not defined when the first argument is <=0
def custom_assert(result_jax, result_tf):
# lax.igamma returns NaN when arg1 == arg2 == 0; tf.math.igamma returns 0
special_cases = (arg1 == 0.) & (arg2 == 0.)
nr_special_cases = np.count_nonzero(special_cases)
self.assertAllClose(np.full((nr_special_cases,), np.nan, dtype=dtype),
result_jax[special_cases])
self.assertAllClose(np.full((nr_special_cases,), 0., dtype=dtype),
result_tf[special_cases])
# non-special cases are equal
self.assertAllClose(result_jax[~ special_cases],
result_tf[~ special_cases])
if lax_name == "igammac":
# On GPU, tolerance also needs to be adjusted in compiled mode
if dtype == np.float64 and jtu.device_under_test() == 'gpu':
tol = 1e-14
# igammac is not defined when the first argument is <=0
def custom_assert(result_jax, result_tf): # noqa: F811
# lax.igammac returns 1. when arg1 <= 0; tf.math.igammac returns NaN
special_cases = (arg1 <= 0.) | (arg2 <= 0)
nr_special_cases = np.count_nonzero(special_cases)
self.assertAllClose(np.full((nr_special_cases,), 1., dtype=dtype),
result_jax[special_cases])
self.assertAllClose(np.full((nr_special_cases,), np.nan, dtype=dtype),
result_tf[special_cases])
# On CPU, tolerance only needs to be adjusted in eager & graph modes
tol = None
if dtype == np.float64:
tol = 1e-14
# non-special cases are equal
self.assertAllClose(result_jax[~ special_cases],
result_tf[~ special_cases], atol=tol, rtol=tol)
self.ConvertAndCompare(harness.dyn_fun, arg1, arg2,
custom_assert=custom_assert, atol=tol, rtol=tol)
@primitive_harness.parameterized(primitive_harness.lax_binary_elementwise_logical)
def test_binary_elementwise_logical(self, harness):
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_betainc)
def test_betainc(self, harness: primitive_harness.Harness):
dtype = harness.params["dtype"]
# TODO: https://www.tensorflow.org/api_docs/python/tf/math/betainc only
# supports float32/64 tests.
# TODO(bchetioui): investigate why the test actually fails in JAX.
if dtype in [np.float16, dtypes.bfloat16]:
raise unittest.SkipTest("(b)float16 not implemented in TF")
tol = None
if dtype is np.float64:
tol = 1e-14
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()),
atol=tol, rtol=tol)
# TODO(necula): combine tests that are identical except for the harness
# wait until we get more experience with using harnesses.
@primitive_harness.parameterized(primitive_harness.lax_shift_left)
def test_shift_left(self, harness):
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_shift_right_logical)
def test_shift_right_logical(self, harness):
if jtu.device_under_test() == "tpu" and harness.params["dtype"] in [np.int8, np.int16]:
raise unittest.SkipTest("TODO: silent error for negative inputs")
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_shift_right_arithmetic)
def test_shift_right_arithmetic(self, harness):
if jtu.device_under_test() == "tpu" and harness.params["dtype"] in [np.uint8, np.uint16]:
raise unittest.SkipTest("TODO: silent error for negative inputs")
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_slice)
def test_slice(self, harness):
# JAX.slice rejects negative indices; check, and skip jax2tf
if any(si < 0 or si >= sh or li < 0 or li > sh
for sh, si, li in zip(harness.params["shape"],
harness.params["start_indices"],
harness.params["limit_indices"])):
with self.assertRaisesRegex(TypeError, ""):
harness.dyn_fun(*harness.dyn_args_maker(self.rng()))
else:
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_dynamic_slice)
def test_dynamic_slice(self, harness):
# JAX.dynamic_slice rejects slice sizes too big; check this, and skip jax2tf
args = harness.dyn_args_maker(self.rng())
if any(li - si < 0 or li - si >= sh
for sh, si, li in zip(harness.params["shape"],
harness.params["start_indices"],
harness.params["limit_indices"])):
with self.assertRaisesRegex(TypeError, ""):
harness.dyn_fun(*args)
return
self.ConvertAndCompare(harness.dyn_fun, *args)
@primitive_harness.parameterized(primitive_harness.lax_dynamic_update_slice)
def test_dynamic_update_slice(self, harness):
# JAX.dynamic_update_slice rejects update slices too big; check, and skip jax2tf
if any(ush > sh
for sh, ush in zip(harness.params["shape"],
harness.params["update_shape"])):
with self.assertRaisesRegex(TypeError, ""):
harness.dyn_fun(*harness.dyn_args_maker(self.rng()))
else:
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_squeeze)
def test_squeeze(self, harness: primitive_harness.Harness):
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_conv_general_dilated)
def test_conv_general_dilated(self, harness: primitive_harness.Harness):
if jtu.device_under_test() == "gpu":
raise unittest.SkipTest("TODO: test failures on GPU")
tol = None
# TODO(bchetioui): significant discrepancies in some float16 cases.
if harness.params["dtype"] is np.float16:
tol = 1.
# TODO(bchetioui): slight occasional discrepancy in float32 cases.
elif harness.params["dtype"] is np.float32:
tol = 1e-5
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()),
atol=tol, rtol=tol)
@primitive_harness.parameterized(primitive_harness.lax_gather)
def test_gather(self, harness: primitive_harness.Harness):
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_scatter)
def test_scatter(self, harness: primitive_harness.Harness):
f_name = harness.params['f_lax'].__name__
dtype = harness.params['dtype']
if jtu.device_under_test() == 'tpu':
if dtype is np.complex64 and f_name in ['scatter_min', 'scatter_max']:
raise unittest.SkipTest(f"TODO: complex {f_name} on TPU fails in JAX")
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
def test_boolean_gather(self):
values = np.array([[True, True], [False, True], [False, False]],
dtype=np.bool_)
indices = np.array([0, 1], dtype=np.int32)
for axis in [0, 1]:
f_jax = jax.jit(lambda v, i: jnp.take(v, i, axis=axis)) # pylint: disable=cell-var-from-loop
self.ConvertAndCompare(f_jax, values, indices)
def test_gather_rank_change(self):
params = jnp.array([[1.0, 1.5, 2.0], [2.0, 2.5, 3.0], [3.0, 3.5, 4.0]])
indices = jnp.array([[1, 1, 2], [0, 1, 0]])
f_jax = jax.jit(lambda i: params[i])
self.ConvertAndCompare(f_jax, indices)
@parameterized.named_parameters(jtu.cases_from_list(
dict(testcase_name=f"_{f_jax.__name__}",
f_jax=f_jax)
for f_jax in REDUCE))
def test_reduce_ops_with_numerical_input(self, f_jax):
values = np.array([1, 2, 3], dtype=np.float32)
self.ConvertAndCompare(f_jax, values)
@parameterized.named_parameters(jtu.cases_from_list(
dict(testcase_name=f"_{f_jax.__name__}",
f_jax=f_jax)
for f_jax in (jnp.cumsum, jnp.cumprod)))
def test_cumulated_ops(self, f_jax):
values = np.array([1, 2, 3], dtype=np.float32)
self.ConvertAndCompare(f_jax, values)
@parameterized.named_parameters(jtu.cases_from_list(
dict(testcase_name=f"_{op.__name__}",
op=op)
for op in INDEX))
def test_scatter_static(self, op):
values = np.ones((5, 6), dtype=np.float32)
update = np.float32(6.)
f_jax = jax.jit(lambda v, u: op(v, jax.ops.index[::2, 3:], u))
self.ConvertAndCompare(f_jax, values, update)
@parameterized.named_parameters(jtu.cases_from_list(
dict(testcase_name=f"_{f_jax.__name__}",
f_jax=f_jax)
for f_jax in REDUCE))
def test_reduce_ops_with_boolean_input(self, f_jax):
values = np.array([True, False, True], dtype=np.bool_)
self.ConvertAndCompare(f_jax, values)
@primitive_harness.parameterized(primitive_harness.random_gamma)
def test_random_gamma(self, harness: primitive_harness.Harness):
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()),
rtol=1e-5)
@primitive_harness.parameterized(primitive_harness.random_split)
def test_random_split(self, harness: primitive_harness.Harness):
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
def test_zeros_like(self):
v = np.float32(2.)
f_jax = jax.ad_util.zeros_like_jaxval
self.ConvertAndCompare(f_jax, v)
def test_stop_gradient(self):
f = jax2tf.convert(lax.stop_gradient)
self.assertEqual(f(tf.ones([])), 1.)
# test_bfloat16_constant checks that https://github.com/google/jax/issues/3942 is
# fixed
def test_bfloat16_constant(self):
def jax_fn_scalar(x):
x = x.astype(jnp.bfloat16)
x *= 2.
return x
def jax_fn_array(x):
x = x.astype(jnp.bfloat16)
x *= np.array([1.5, 2.5, 3.5], jnp.bfloat16)
return x
tf_fn_scalar = jax2tf.convert(jax_fn_scalar)
self.assertAllClose(tf_fn_scalar(1.375).numpy(), jnp.bfloat16(2.750))
tf_fn_array = jax2tf.convert(jax_fn_array)
self.assertAllClose(tf_fn_array(np.array([3, 4, 5])),
np.array([4.5, 10, 17.5], jnp.bfloat16))
if __name__ == "__main__":
absltest.main(testLoader=jtu.JaxTestLoader())
| 44.287958
| 99
| 0.675021
|
import unittest
from absl.testing import absltest
from absl.testing import parameterized
from functools import partial
import jax
from jax import dtypes
from jax import lax
from jax import numpy as jnp
from jax import test_util as jtu
from jax.config import config
from jax.experimental import jax2tf
from jax.experimental.jax2tf.tests import tf_test_util
from jax.interpreters import xla
import numpy as np
import tensorflow as tf
config.parse_flags_with_absl()
from jax.experimental.jax2tf.tests import primitive_harness
REDUCE = (
jnp.all,
jnp.any,
jnp.max,
jnp.min,
jnp.prod,
jnp.sum,
)
INDEX = (
jax.ops.index_add,
jax.ops.index_max,
jax.ops.index_min,
jax.ops.index_mul,
jax.ops.index_update,
)
class JaxPrimitiveTest(tf_test_util.JaxToTfTestCase):
def test_primitive_coverage(self):
all_primitives = (set(xla.translations)
| set(xla.backend_specific_translations['cpu'])
| set(xla.backend_specific_translations['gpu'])
| set(xla.backend_specific_translations['tpu'])
| set(xla.initial_style_translations)
| set(xla.parallel_translations))
tf_impl = set(jax.experimental.jax2tf.jax2tf.tf_impl)
tf_not_yet_impl = set(jax.experimental.jax2tf.jax2tf.tf_not_yet_impl)
all_primitives = tuple(sorted(all_primitives, key=str))
for p in all_primitives:
if p.name == "axis_index" or p.name == "tie_in":
continue
if p in tf_not_yet_impl:
self.assertNotIn(p, tf_impl)
else:
self.assertIn(p, tf_impl)
@parameterized.named_parameters(
dict(testcase_name=f"_{f_jax.__name__}",
f_jax=f_jax)
for f_jax in [jnp.add, jnp.subtract, jnp.multiply, jnp.divide,
jnp.less, jnp.less_equal, jnp.equal, jnp.greater,
jnp.greater_equal, jnp.not_equal, jnp.maximum,
jnp.minimum])
def test_type_promotion(self, f_jax=jnp.add):
types = [dtypes.bfloat16, np.int32, np.int64, np.float32]
for x_dtype in types:
for y_dtype in types:
x = np.array([1, 2], dtype=x_dtype)
y = np.array([3, 4], dtype=y_dtype)
self.ConvertAndCompare(f_jax, x, y)
def test_concat(self):
values = [np.array([1, 2], dtype=np.float32),
np.array([1, 2], dtype=np.int32),
np.array([1, 2], dtype=np.int8)]
f_jax = jax.jit(lambda x: jnp.concatenate(x, axis=0))
self.ConvertAndCompare(f_jax, values)
@primitive_harness.parameterized(primitive_harness.lax_pad)
def test_pad(self, harness: primitive_harness.Harness):
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_top_k)
def test_top_k(self, harness: primitive_harness.Harness):
if (harness.params["k"] > harness.params["shape"][-1] or
harness.params["k"] < 0):
with self.assertRaisesRegex(ValueError, "k argument to top_k must be"):
harness.dyn_fun(*harness.dyn_args_maker(self.rng()))
elif harness.params["dtype"] in jtu.dtypes.complex:
if jtu.device_under_test() == "tpu":
raise unittest.SkipTest("top_k complex on TPU raises different error")
with self.assertRaisesRegex(RuntimeError, "Unimplemented: complex comparison"):
harness.dyn_fun(*harness.dyn_args_maker(self.rng()))
elif harness.name.startswith("nan_"):
raise unittest.SkipTest("inconsistent [nan, inf] sorting")
else:
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_sort)
def test_sort(self, harness: primitive_harness.Harness):
if (jtu.device_under_test() == "gpu" and
len(harness.arg_descriptors) == 4 and
not harness.params["is_stable"]):
raise unittest.SkipTest("GPU tests are running TF on CPU")
if jtu.device_under_test() == "tpu" and harness.params["dtype"] in jtu.dtypes.complex:
raise unittest.SkipTest("JAX sort is not implemented on TPU for complex")
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_fft)
@jtu.skip_on_flag("jax_skip_slow_tests", True)
def test_fft(self, harness: primitive_harness.Harness):
if len(harness.params["fft_lengths"]) > 3:
with self.assertRaisesRegex(RuntimeError, "FFT only supports ranks 1-3"):
harness.dyn_fun(*harness.dyn_args_maker(self.rng()))
elif (jtu.device_under_test() == "tpu" and
len(harness.params["fft_lengths"]) > 1):
with self.assertRaisesRegex(RuntimeError,
"only 1D FFT is currently supported."):
harness.dyn_fun(*harness.dyn_args_maker(self.rng()))
else:
tol = None
if jtu.device_under_test() == "gpu":
if harness.params["dtype"] in jtu.dtypes.boolean:
tol = 0.01
else:
tol = 1e-3
self.ConvertAndCompare(harness.dyn_fun,
*harness.dyn_args_maker(self.rng()),
atol=tol, rtol=tol)
@primitive_harness.parameterized(primitive_harness.lax_linalg_qr)
def test_qr(self, harness: primitive_harness.Harness):
dtype = harness.params["dtype"]
dut = jtu.device_under_test()
if dtype in (jtu.dtypes.all_integer + [jnp.bfloat16]):
unimplemented_jax = True
elif dtype is np.complex64 and dut == "tpu":
unimplemented_jax = True
elif dtype is np.float16 and dut in ("cpu", "gpu"):
unimplemented_jax = True
else:
unimplemented_jax = False
if unimplemented_jax:
raise unittest.SkipTest(f"QR not implemented in JAX for {dtype} on {dut}")
.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()),
atol=1e-5, rtol=1e-5)
@primitive_harness.parameterized(primitive_harness.lax_linalg_svd)
@jtu.skip_on_flag("jax_skip_slow_tests", True)
def test_svd(self, harness: primitive_harness.Harness):
if harness.params["dtype"] in [np.float16, dtypes.bfloat16]:
if jtu.device_under_test() != "tpu":
with self.assertRaisesRegex(NotImplementedError, "Unsupported dtype"):
harness.dyn_fun(*harness.dyn_args_maker(self.rng()))
return
if harness.params["dtype"] in [np.complex64, np.complex128]:
if jtu.device_under_test() == "tpu":
with self.assertRaisesRegex(RuntimeError,
"Binary op compare with different element types"):
harness.dyn_fun(*harness.dyn_args_maker(self.rng()))
return
def _custom_assert(r_jax, r_tf, atol=1e-6, rtol=1e-6):
def _reconstruct_operand(result, is_tf: bool):
s, u, v = result
if is_tf:
s = s.numpy()
u = u.numpy()
v = v.numpy()
U = u[..., :s.shape[-1]]
V = v[..., :s.shape[-1], :]
S = s[..., None, :]
return jnp.matmul(U * S, V), s.shape, u.shape, v.shape
if harness.params["compute_uv"]:
r_jax_reconstructed = _reconstruct_operand(r_jax, False)
r_tf_reconstructed = _reconstruct_operand(r_tf, True)
self.assertAllClose(r_jax_reconstructed, r_tf_reconstructed,
atol=atol, rtol=rtol)
else:
self.assertAllClose(r_jax, r_tf, atol=atol, rtol=rtol)
tol = 1e-4
custom_assert = partial(_custom_assert, atol=tol, rtol=tol)
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()),
atol=tol, rtol=tol,
custom_assert=custom_assert,
always_custom_assert=True)
@primitive_harness.parameterized(primitive_harness.lax_select_and_gather_add)
@jtu.ignore_warning(category=UserWarning,
message="Using reduced precision for gradient.*")
def test_select_and_gather_add(self, harness: primitive_harness.Harness):
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_reduce_window)
def test_reduce_window(self, harness: primitive_harness.Harness):
dtype = harness.params['dtype']
if (jtu.device_under_test() == 'tpu' and dtype is np.complex64):
raise unittest.SkipTest(
'TODO: JAX reduce_window on TPU does not handle complex64'
)
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_unary_elementwise)
def test_unary_elementwise(self, harness: primitive_harness.Harness):
dtype = harness.params["dtype"]
lax_name = harness.params["lax_name"]
arg, = harness.dyn_args_maker(self.rng())
custom_assert = None
if lax_name == "digamma":
if dtype in [np.float16, np.float32] and jtu.device_under_test() == "tpu":
raise unittest.SkipTest("TODO: fix bug: nan vs not-nan")
if not dtype is dtypes.bfloat16:
def custom_assert(result_jax, result_tf):
special_cases = (arg == 0.) | (arg == -1.)
nr_special_cases = np.count_nonzero(special_cases)
self.assertAllClose(np.full((nr_special_cases,), dtype(np.nan)),
result_jax[special_cases])
self.assertAllClose(np.full((nr_special_cases,), dtype(np.inf)),
result_tf[special_cases])
self.assertAllClose(result_jax[~ special_cases],
result_tf[~ special_cases])
if lax_name == "erf_inv":
if jtu.device_under_test() == "tpu":
raise unittest.SkipTest("erf_inv bug on TPU: nan vs non-nan")
if not dtype in [np.float16, dtypes.bfloat16]:
def custom_assert(result_jax, result_tf):
special_cases = (arg < -1.) | (arg > 1.)
nr_special_cases = np.count_nonzero(special_cases)
self.assertAllClose(np.full((nr_special_cases,), dtype(np.nan),
dtype=dtype),
result_jax[special_cases])
signs = np.where(arg[special_cases] < 0., -1., 1.)
self.assertAllClose(np.full((nr_special_cases,),
signs * dtype(np.inf), dtype=dtype),
result_tf[special_cases])
self.assertAllClose(result_jax[~ special_cases],
result_tf[~ special_cases])
atol = None
if jtu.device_under_test() == "gpu":
atol = 1e-3
self.ConvertAndCompare(harness.dyn_fun, arg, custom_assert=custom_assert,
atol=atol)
@primitive_harness.parameterized(primitive_harness.lax_bitwise_not)
def test_bitwise_not(self, harness):
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_population_count)
def test_population_count(self, harness: primitive_harness.Harness):
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_add_mul)
def test_add_mul(self, harness: primitive_harness.Harness):
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_min_max)
def test_min_max(self, harness: primitive_harness.Harness):
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_binary_elementwise)
def test_binary_elementwise(self, harness):
tol = None
lax_name, dtype = harness.params["lax_name"], harness.params["dtype"]
if lax_name in ("igamma", "igammac"):
if dtype in [np.float16, dtypes.bfloat16]:
raise unittest.SkipTest("TODO: igamma(c) unsupported with (b)float16 in JAX")
if dtype is np.float32 and jtu.device_under_test() == "tpu":
raise unittest.SkipTest("TODO: fix bug: nan vs not-nan")
arg1, arg2 = harness.dyn_args_maker(self.rng())
custom_assert = None
if lax_name == "igamma":
def custom_assert(result_jax, result_tf):
special_cases = (arg1 == 0.) & (arg2 == 0.)
nr_special_cases = np.count_nonzero(special_cases)
self.assertAllClose(np.full((nr_special_cases,), np.nan, dtype=dtype),
result_jax[special_cases])
self.assertAllClose(np.full((nr_special_cases,), 0., dtype=dtype),
result_tf[special_cases])
self.assertAllClose(result_jax[~ special_cases],
result_tf[~ special_cases])
if lax_name == "igammac":
if dtype == np.float64 and jtu.device_under_test() == 'gpu':
tol = 1e-14
def custom_assert(result_jax, result_tf):
special_cases = (arg1 <= 0.) | (arg2 <= 0)
nr_special_cases = np.count_nonzero(special_cases)
self.assertAllClose(np.full((nr_special_cases,), 1., dtype=dtype),
result_jax[special_cases])
self.assertAllClose(np.full((nr_special_cases,), np.nan, dtype=dtype),
result_tf[special_cases])
tol = None
if dtype == np.float64:
tol = 1e-14
self.assertAllClose(result_jax[~ special_cases],
result_tf[~ special_cases], atol=tol, rtol=tol)
self.ConvertAndCompare(harness.dyn_fun, arg1, arg2,
custom_assert=custom_assert, atol=tol, rtol=tol)
@primitive_harness.parameterized(primitive_harness.lax_binary_elementwise_logical)
def test_binary_elementwise_logical(self, harness):
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_betainc)
def test_betainc(self, harness: primitive_harness.Harness):
dtype = harness.params["dtype"]
if dtype in [np.float16, dtypes.bfloat16]:
raise unittest.SkipTest("(b)float16 not implemented in TF")
tol = None
if dtype is np.float64:
tol = 1e-14
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()),
atol=tol, rtol=tol)
@primitive_harness.parameterized(primitive_harness.lax_shift_left)
def test_shift_left(self, harness):
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_shift_right_logical)
def test_shift_right_logical(self, harness):
if jtu.device_under_test() == "tpu" and harness.params["dtype"] in [np.int8, np.int16]:
raise unittest.SkipTest("TODO: silent error for negative inputs")
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_shift_right_arithmetic)
def test_shift_right_arithmetic(self, harness):
if jtu.device_under_test() == "tpu" and harness.params["dtype"] in [np.uint8, np.uint16]:
raise unittest.SkipTest("TODO: silent error for negative inputs")
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_slice)
def test_slice(self, harness):
if any(si < 0 or si >= sh or li < 0 or li > sh
for sh, si, li in zip(harness.params["shape"],
harness.params["start_indices"],
harness.params["limit_indices"])):
with self.assertRaisesRegex(TypeError, ""):
harness.dyn_fun(*harness.dyn_args_maker(self.rng()))
else:
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_dynamic_slice)
def test_dynamic_slice(self, harness):
args = harness.dyn_args_maker(self.rng())
if any(li - si < 0 or li - si >= sh
for sh, si, li in zip(harness.params["shape"],
harness.params["start_indices"],
harness.params["limit_indices"])):
with self.assertRaisesRegex(TypeError, ""):
harness.dyn_fun(*args)
return
self.ConvertAndCompare(harness.dyn_fun, *args)
@primitive_harness.parameterized(primitive_harness.lax_dynamic_update_slice)
def test_dynamic_update_slice(self, harness):
if any(ush > sh
for sh, ush in zip(harness.params["shape"],
harness.params["update_shape"])):
with self.assertRaisesRegex(TypeError, ""):
harness.dyn_fun(*harness.dyn_args_maker(self.rng()))
else:
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_squeeze)
def test_squeeze(self, harness: primitive_harness.Harness):
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_conv_general_dilated)
def test_conv_general_dilated(self, harness: primitive_harness.Harness):
if jtu.device_under_test() == "gpu":
raise unittest.SkipTest("TODO: test failures on GPU")
tol = None
if harness.params["dtype"] is np.float16:
tol = 1.
elif harness.params["dtype"] is np.float32:
tol = 1e-5
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()),
atol=tol, rtol=tol)
@primitive_harness.parameterized(primitive_harness.lax_gather)
def test_gather(self, harness: primitive_harness.Harness):
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
@primitive_harness.parameterized(primitive_harness.lax_scatter)
def test_scatter(self, harness: primitive_harness.Harness):
f_name = harness.params['f_lax'].__name__
dtype = harness.params['dtype']
if jtu.device_under_test() == 'tpu':
if dtype is np.complex64 and f_name in ['scatter_min', 'scatter_max']:
raise unittest.SkipTest(f"TODO: complex {f_name} on TPU fails in JAX")
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
def test_boolean_gather(self):
values = np.array([[True, True], [False, True], [False, False]],
dtype=np.bool_)
indices = np.array([0, 1], dtype=np.int32)
for axis in [0, 1]:
f_jax = jax.jit(lambda v, i: jnp.take(v, i, axis=axis))
self.ConvertAndCompare(f_jax, values, indices)
def test_gather_rank_change(self):
params = jnp.array([[1.0, 1.5, 2.0], [2.0, 2.5, 3.0], [3.0, 3.5, 4.0]])
indices = jnp.array([[1, 1, 2], [0, 1, 0]])
f_jax = jax.jit(lambda i: params[i])
self.ConvertAndCompare(f_jax, indices)
@parameterized.named_parameters(jtu.cases_from_list(
dict(testcase_name=f"_{f_jax.__name__}",
f_jax=f_jax)
for f_jax in REDUCE))
def test_reduce_ops_with_numerical_input(self, f_jax):
values = np.array([1, 2, 3], dtype=np.float32)
self.ConvertAndCompare(f_jax, values)
@parameterized.named_parameters(jtu.cases_from_list(
dict(testcase_name=f"_{f_jax.__name__}",
f_jax=f_jax)
for f_jax in (jnp.cumsum, jnp.cumprod)))
def test_cumulated_ops(self, f_jax):
values = np.array([1, 2, 3], dtype=np.float32)
self.ConvertAndCompare(f_jax, values)
@parameterized.named_parameters(jtu.cases_from_list(
dict(testcase_name=f"_{op.__name__}",
op=op)
for op in INDEX))
def test_scatter_static(self, op):
values = np.ones((5, 6), dtype=np.float32)
update = np.float32(6.)
f_jax = jax.jit(lambda v, u: op(v, jax.ops.index[::2, 3:], u))
self.ConvertAndCompare(f_jax, values, update)
@parameterized.named_parameters(jtu.cases_from_list(
dict(testcase_name=f"_{f_jax.__name__}",
f_jax=f_jax)
for f_jax in REDUCE))
def test_reduce_ops_with_boolean_input(self, f_jax):
values = np.array([True, False, True], dtype=np.bool_)
self.ConvertAndCompare(f_jax, values)
@primitive_harness.parameterized(primitive_harness.random_gamma)
def test_random_gamma(self, harness: primitive_harness.Harness):
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()),
rtol=1e-5)
@primitive_harness.parameterized(primitive_harness.random_split)
def test_random_split(self, harness: primitive_harness.Harness):
self.ConvertAndCompare(harness.dyn_fun, *harness.dyn_args_maker(self.rng()))
def test_zeros_like(self):
v = np.float32(2.)
f_jax = jax.ad_util.zeros_like_jaxval
self.ConvertAndCompare(f_jax, v)
def test_stop_gradient(self):
f = jax2tf.convert(lax.stop_gradient)
self.assertEqual(f(tf.ones([])), 1.)
def test_bfloat16_constant(self):
def jax_fn_scalar(x):
x = x.astype(jnp.bfloat16)
x *= 2.
return x
def jax_fn_array(x):
x = x.astype(jnp.bfloat16)
x *= np.array([1.5, 2.5, 3.5], jnp.bfloat16)
return x
tf_fn_scalar = jax2tf.convert(jax_fn_scalar)
self.assertAllClose(tf_fn_scalar(1.375).numpy(), jnp.bfloat16(2.750))
tf_fn_array = jax2tf.convert(jax_fn_array)
self.assertAllClose(tf_fn_array(np.array([3, 4, 5])),
np.array([4.5, 10, 17.5], jnp.bfloat16))
if __name__ == "__main__":
absltest.main(testLoader=jtu.JaxTestLoader())
| true
| true
|
f70c7544d6c9e8095e95d0629b94384bc1cbe35b
| 14,176
|
py
|
Python
|
tensorflow/python/eager/ops_test.py
|
sorhus/tensorflow
|
99de1826646c8d354259187fc9c2330b794c1ac4
|
[
"Apache-2.0"
] | 8
|
2019-08-29T06:16:53.000Z
|
2021-08-21T21:19:10.000Z
|
tensorflow/python/eager/ops_test.py
|
sorhus/tensorflow
|
99de1826646c8d354259187fc9c2330b794c1ac4
|
[
"Apache-2.0"
] | null | null | null |
tensorflow/python/eager/ops_test.py
|
sorhus/tensorflow
|
99de1826646c8d354259187fc9c2330b794c1ac4
|
[
"Apache-2.0"
] | 7
|
2019-05-31T02:57:03.000Z
|
2020-08-09T20:15:25.000Z
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for operations in eager execution."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.eager import context
from tensorflow.python.eager import execute
from tensorflow.python.eager import test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.layers import core
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import sparse_ops
class OpsTest(test_util.TensorFlowTestCase):
def testExecuteBasic(self):
three = constant_op.constant(3)
five = constant_op.constant(5)
product = three * five
self.assertAllEqual(15, product)
def testMatMulGPU(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
three = constant_op.constant([[3.]]).gpu()
five = constant_op.constant([[5.]]).gpu()
product = math_ops.matmul(three, five)
self.assertEqual([[15.0]], product.numpy())
def testExecuteStringAttr(self):
three = constant_op.constant(3.0)
checked_three = array_ops.check_numerics(three,
message='just checking')
self.assertEqual([[3]], checked_three.numpy())
def testExecuteFloatAttr(self):
three = constant_op.constant(3.0)
almost_three = constant_op.constant(2.8)
almost_equal = math_ops.approximate_equal(
three, almost_three, tolerance=0.3)
self.assertTrue(almost_equal)
def testExecuteIntAttr(self):
three = constant_op.constant(3)
four = constant_op.constant(4)
total = math_ops.add_n([three, four])
self.assertAllEqual(7, total)
def testExecuteBoolAttr(self):
three = constant_op.constant([[3]])
five = constant_op.constant([[5]])
product = math_ops.matmul(three, five, transpose_a=True)
self.assertAllEqual([[15]], product)
def testExecuteOneListOutput(self):
split_dim = constant_op.constant(1)
value = constant_op.constant([[0, 1, 2], [3, 4, 5]])
x1, x2, x3 = array_ops.split(value, 3, axis=split_dim)
self.assertAllEqual([[0], [3]], x1)
self.assertAllEqual([[1], [4]], x2)
self.assertAllEqual([[2], [5]], x3)
def testGraphMode(self):
graph = ops.Graph()
with graph.as_default(), context.graph_mode():
array_ops.placeholder(dtypes.int32)
self.assertEqual(1, len(graph.get_operations()))
# See comments on handling of int32 tensors on GPU in
# EagerTensor.__init__.
def testInt32CPUDefault(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
with context.device('/gpu:0'):
r = constant_op.constant(1) + constant_op.constant(2)
self.assertAllEqual(r, 3)
def testExecuteListOutputLen1(self):
split_dim = constant_op.constant(1)
value = constant_op.constant([[0, 1, 2], [3, 4, 5]])
result = array_ops.split(value, 1, axis=split_dim)
self.assertTrue(isinstance(result, list))
self.assertEqual(1, len(result))
self.assertAllEqual([[0, 1, 2], [3, 4, 5]], result[0])
def testExecuteListOutputLen0(self):
empty = constant_op.constant([], dtype=dtypes.int32)
result = array_ops.unstack(empty, 0)
self.assertTrue(isinstance(result, list))
self.assertEqual(0, len(result))
def testExecuteMultipleNonListOutput(self):
x = constant_op.constant([1, 2, 3, 4, 5, 6])
y = constant_op.constant([1, 3, 5])
result = array_ops.listdiff(x, y)
out, idx = result
self.assertTrue(out is result.out)
self.assertTrue(idx is result.idx)
self.assertAllEqual([2, 4, 6], out)
self.assertAllEqual([1, 3, 5], idx)
def testExecuteMultipleListOutput(self):
split_dim = constant_op.constant(1, dtype=dtypes.int64)
indices = constant_op.constant([[0, 2], [0, 4], [0, 5], [1, 0], [1, 1]],
dtype=dtypes.int64)
values = constant_op.constant([2, 3, 5, 7, 11])
shape = constant_op.constant([2, 7], dtype=dtypes.int64)
result = sparse_ops.gen_sparse_ops.sparse_split(
split_dim,
indices,
values,
shape,
num_split=2)
output_indices, output_values, output_shape = result
self.assertEqual(2, len(output_indices))
self.assertEqual(2, len(output_values))
self.assertEqual(2, len(output_shape))
self.assertEqual(output_indices, result.output_indices)
self.assertEqual(output_values, result.output_values)
self.assertEqual(output_shape, result.output_shape)
self.assertAllEqual([[0, 2], [1, 0], [1, 1]], output_indices[0])
self.assertAllEqual([[0, 0], [0, 1]], output_indices[1])
self.assertAllEqual([2, 7, 11], output_values[0])
self.assertAllEqual([3, 5], output_values[1])
self.assertAllEqual([2, 4], output_shape[0])
self.assertAllEqual([2, 3], output_shape[1])
# TODO(josh11b): Test an op that has multiple outputs, some but not
# all of which are lists. Examples: barrier_take_many (currently
# unsupported since it uses a type list) or sdca_optimizer (I don't
# have an example of legal inputs & outputs).
def testComposition(self):
x = constant_op.constant(1, dtype=dtypes.int32)
three_x = x + x + x
self.assertEquals(dtypes.int32, three_x.dtype)
self.assertAllEqual(3, three_x)
def testOperatorOverrides(self):
# TODO(henrytan): test with negative number.
a = constant_op.constant([1])
b = constant_op.constant([2])
self.assertAllEqual((-a), [-1])
self.assertAllEqual(abs(b), [2])
self.assertAllEqual((a + b), [3])
self.assertAllEqual((a - b), [-1])
self.assertAllEqual((a * b), [2])
self.assertAllEqual((a * a), [1])
self.assertAllEqual((a**b), [1])
self.assertAllEqual((a / b), [1 / 2])
self.assertAllEqual((a / a), [1])
self.assertAllEqual((a % b), [1])
self.assertAllEqual((a < b), [True])
self.assertAllEqual((a <= b), [True])
self.assertAllEqual((a > b), [False])
self.assertAllEqual((a >= b), [False])
self.assertAllEqual((a == b), False)
self.assertAllEqual((a != b), True)
self.assertAllEqual(1, a[constant_op.constant(0)])
def test_basic_slice(self):
npt = np.arange(1, 19, dtype=np.float32).reshape(3, 2, 3)
t = constant_op.constant(npt)
self.assertAllEqual(npt[:, :, :], t[:, :, :])
self.assertAllEqual(npt[::, ::, ::], t[::, ::, ::])
self.assertAllEqual(npt[::1, ::1, ::1], t[::1, ::1, ::1])
self.assertAllEqual(npt[::1, ::5, ::2], t[::1, ::5, ::2])
self.assertAllEqual(npt[::-1, :, :], t[::-1, :, :])
self.assertAllEqual(npt[:, ::-1, :], t[:, ::-1, :])
self.assertAllEqual(npt[:, :, ::-1], t[:, :, ::-1])
self.assertAllEqual(npt[-2::-1, :, ::1], t[-2::-1, :, ::1])
self.assertAllEqual(npt[-2::-1, :, ::2], t[-2::-1, :, ::2])
def testDegenerateSlices(self):
npt = np.arange(1, 19, dtype=np.float32).reshape(3, 2, 3)
t = constant_op.constant(npt)
# degenerate by offering a forward interval with a negative stride
self.assertAllEqual(npt[0:-1:-1, :, :], t[0:-1:-1, :, :])
# degenerate with a reverse interval with a positive stride
self.assertAllEqual(npt[-1:0, :, :], t[-1:0, :, :])
# empty interval in every dimension
self.assertAllEqual(npt[-1:0, 2:2, 2:3:-1], t[-1:0, 2:2, 2:3:-1])
def testEllipsis(self):
npt = np.array(
[[[[[1, 2], [3, 4], [5, 6]]], [[[7, 8], [9, 10], [11, 12]]]]])
t = constant_op.constant(npt)
self.assertAllEqual(npt[0:], t[0:])
# implicit ellipsis
self.assertAllEqual(npt[0:, ...], t[0:, ...])
# ellipsis alone
self.assertAllEqual(npt[...], t[...])
# ellipsis at end
self.assertAllEqual(npt[0:1, ...], t[0:1, ...])
# ellipsis at begin
self.assertAllEqual(npt[..., 0:1], t[..., 0:1])
# ellipsis at middle
self.assertAllEqual(npt[0:1, ..., 0:1], t[0:1, ..., 0:1])
def testShrink(self):
npt = np.array([[[[[1, 2, 4, 5], [5, 6, 7, 8], [9, 10, 11, 12]]],
[[[13, 14, 15, 16], [17, 18, 19, 20], [21, 22, 23, 24]]]]])
t = constant_op.constant(npt)
self.assertAllEqual(npt[:, :, :, :, 3], t[:, :, :, :, 3])
self.assertAllEqual(npt[..., 3], t[..., 3])
self.assertAllEqual(npt[:, 0], t[:, 0])
self.assertAllEqual(npt[:, :, 0], t[:, :, 0])
def testOpWithInputsOnDifferentDevices(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
# The GPU kernel for the Reshape op requires that the
# shape input be on CPU.
value = constant_op.constant([1., 2.]).gpu()
shape = constant_op.constant([2, 1])
reshaped = array_ops.reshape(value, shape)
self.assertAllEqual([[1], [2]], reshaped.cpu())
def testInt64(self):
# Fill requires the first input to be an int32 tensor.
self.assertAllEqual(
[1.0, 1.0],
array_ops.fill(constant_op.constant([2], dtype=dtypes.int64),
constant_op.constant(1)))
def testOutputOnHostMemory(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
# The Shape op kernel on GPU places the output in host memory.
value = constant_op.constant([1.]).gpu()
shape = array_ops.shape(value)
self.assertEqual([1], shape.numpy())
def testSilentCopy(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
# Temporarily replace the context
# pylint: disable=protected-access
del context._context
try:
context._context = context.Context(
device_policy=context.DEVICE_PLACEMENT_SILENT)
cpu_tensor = constant_op.constant(1.0)
gpu_tensor = cpu_tensor.gpu()
self.assertAllEqual(cpu_tensor + gpu_tensor, 2.0)
finally:
del context._context
context._context = context.Context()
# pylint: enable=protected-access
def testSoftPlacement(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
# Temporarily replace the context
# pylint: disable=protected-access
del context._context
try:
context._context = context.Context(
device_policy=context.DEVICE_PLACEMENT_SILENT,
config=config_pb2.ConfigProto(allow_soft_placement=True))
cpu_tensor = constant_op.constant(1.0)
result = cpu_tensor + cpu_tensor
self.assertEqual(result.device,
'/job:localhost/replica:0/task:0/device:GPU:0')
finally:
del context._context
context._context = context.Context()
# pylint: enable=protected-access
def testRandomUniform(self):
scalar_shape = constant_op.constant([], dtype=dtypes.int32)
x = random_ops.random_uniform(scalar_shape)
self.assertEquals(0, x.shape.ndims)
self.assertEquals(dtypes.float32, x.dtype)
x = random_ops.random_uniform(
scalar_shape, minval=constant_op.constant(5.),
maxval=constant_op.constant(6.))
self.assertLess(x, 6)
self.assertGreaterEqual(x, 5)
def testArgsToMatchingEagerDefault(self):
# Uses default
ctx = context.context()
t, r = execute.args_to_matching_eager([[3, 4]], ctx, dtypes.int32)
self.assertEquals(t, dtypes.int32)
self.assertEquals(r[0].dtype, dtypes.int32)
t, r = execute.args_to_matching_eager([[3, 4]], ctx, dtypes.int64)
self.assertEquals(t, dtypes.int64)
self.assertEquals(r[0].dtype, dtypes.int64)
# Doesn't use default
t, r = execute.args_to_matching_eager(
[['string', 'arg']], ctx, dtypes.int32)
self.assertEquals(t, dtypes.string)
self.assertEquals(r[0].dtype, dtypes.string)
def testFlattenLayer(self):
flatten_layer = core.Flatten()
x = constant_op.constant([[[-10, -20], [-30, -40]], [[10, 20], [30, 40]]])
y = flatten_layer(x)
self.assertAllEqual([[-10, -20, -30, -40], [10, 20, 30, 40]], y)
def testIdentity(self):
self.assertAllEqual(2, array_ops.identity(2))
def testIdentityOnVariable(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
with context.device('/gpu:0'):
v = resource_variable_ops.ResourceVariable(True)
self.assertAllEqual(True, array_ops.identity(v))
def testIncompatibleSetShape(self):
x = constant_op.constant(1)
with self.assertRaises(ValueError):
x.set_shape((1, 2))
def testCompatibleSetShape(self):
x = constant_op.constant([[1, 2]])
x.set_shape(tensor_shape.TensorShape([None, 2]))
self.assertEqual(x.get_shape(), (1, 2))
def testCastScalarToPrimitiveTypes(self):
x = constant_op.constant(1.3)
self.assertIsInstance(int(x), int)
self.assertEqual(int(x), 1)
self.assertIsInstance(float(x), float)
self.assertAllClose(float(x), 1.3)
def testCastNonScalarToPrimitiveTypesFails(self):
x = constant_op.constant([1.3, 2])
with self.assertRaises(TypeError):
int(x)
with self.assertRaises(TypeError):
float(x)
def testFormatString(self):
x = constant_op.constant(3.1415)
self.assertEqual('3.14', '{:.2f}'.format(x))
def testNoOpIsNone(self):
self.assertTrue(control_flow_ops.no_op() is None)
if __name__ == '__main__':
test.main()
| 37.109948
| 80
| 0.657308
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.eager import context
from tensorflow.python.eager import execute
from tensorflow.python.eager import test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.layers import core
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import sparse_ops
class OpsTest(test_util.TensorFlowTestCase):
def testExecuteBasic(self):
three = constant_op.constant(3)
five = constant_op.constant(5)
product = three * five
self.assertAllEqual(15, product)
def testMatMulGPU(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
three = constant_op.constant([[3.]]).gpu()
five = constant_op.constant([[5.]]).gpu()
product = math_ops.matmul(three, five)
self.assertEqual([[15.0]], product.numpy())
def testExecuteStringAttr(self):
three = constant_op.constant(3.0)
checked_three = array_ops.check_numerics(three,
message='just checking')
self.assertEqual([[3]], checked_three.numpy())
def testExecuteFloatAttr(self):
three = constant_op.constant(3.0)
almost_three = constant_op.constant(2.8)
almost_equal = math_ops.approximate_equal(
three, almost_three, tolerance=0.3)
self.assertTrue(almost_equal)
def testExecuteIntAttr(self):
three = constant_op.constant(3)
four = constant_op.constant(4)
total = math_ops.add_n([three, four])
self.assertAllEqual(7, total)
def testExecuteBoolAttr(self):
three = constant_op.constant([[3]])
five = constant_op.constant([[5]])
product = math_ops.matmul(three, five, transpose_a=True)
self.assertAllEqual([[15]], product)
def testExecuteOneListOutput(self):
split_dim = constant_op.constant(1)
value = constant_op.constant([[0, 1, 2], [3, 4, 5]])
x1, x2, x3 = array_ops.split(value, 3, axis=split_dim)
self.assertAllEqual([[0], [3]], x1)
self.assertAllEqual([[1], [4]], x2)
self.assertAllEqual([[2], [5]], x3)
def testGraphMode(self):
graph = ops.Graph()
with graph.as_default(), context.graph_mode():
array_ops.placeholder(dtypes.int32)
self.assertEqual(1, len(graph.get_operations()))
def testInt32CPUDefault(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
with context.device('/gpu:0'):
r = constant_op.constant(1) + constant_op.constant(2)
self.assertAllEqual(r, 3)
def testExecuteListOutputLen1(self):
split_dim = constant_op.constant(1)
value = constant_op.constant([[0, 1, 2], [3, 4, 5]])
result = array_ops.split(value, 1, axis=split_dim)
self.assertTrue(isinstance(result, list))
self.assertEqual(1, len(result))
self.assertAllEqual([[0, 1, 2], [3, 4, 5]], result[0])
def testExecuteListOutputLen0(self):
empty = constant_op.constant([], dtype=dtypes.int32)
result = array_ops.unstack(empty, 0)
self.assertTrue(isinstance(result, list))
self.assertEqual(0, len(result))
def testExecuteMultipleNonListOutput(self):
x = constant_op.constant([1, 2, 3, 4, 5, 6])
y = constant_op.constant([1, 3, 5])
result = array_ops.listdiff(x, y)
out, idx = result
self.assertTrue(out is result.out)
self.assertTrue(idx is result.idx)
self.assertAllEqual([2, 4, 6], out)
self.assertAllEqual([1, 3, 5], idx)
def testExecuteMultipleListOutput(self):
split_dim = constant_op.constant(1, dtype=dtypes.int64)
indices = constant_op.constant([[0, 2], [0, 4], [0, 5], [1, 0], [1, 1]],
dtype=dtypes.int64)
values = constant_op.constant([2, 3, 5, 7, 11])
shape = constant_op.constant([2, 7], dtype=dtypes.int64)
result = sparse_ops.gen_sparse_ops.sparse_split(
split_dim,
indices,
values,
shape,
num_split=2)
output_indices, output_values, output_shape = result
self.assertEqual(2, len(output_indices))
self.assertEqual(2, len(output_values))
self.assertEqual(2, len(output_shape))
self.assertEqual(output_indices, result.output_indices)
self.assertEqual(output_values, result.output_values)
self.assertEqual(output_shape, result.output_shape)
self.assertAllEqual([[0, 2], [1, 0], [1, 1]], output_indices[0])
self.assertAllEqual([[0, 0], [0, 1]], output_indices[1])
self.assertAllEqual([2, 7, 11], output_values[0])
self.assertAllEqual([3, 5], output_values[1])
self.assertAllEqual([2, 4], output_shape[0])
self.assertAllEqual([2, 3], output_shape[1])
# have an example of legal inputs & outputs).
def testComposition(self):
x = constant_op.constant(1, dtype=dtypes.int32)
three_x = x + x + x
self.assertEquals(dtypes.int32, three_x.dtype)
self.assertAllEqual(3, three_x)
def testOperatorOverrides(self):
# TODO(henrytan): test with negative number.
a = constant_op.constant([1])
b = constant_op.constant([2])
self.assertAllEqual((-a), [-1])
self.assertAllEqual(abs(b), [2])
self.assertAllEqual((a + b), [3])
self.assertAllEqual((a - b), [-1])
self.assertAllEqual((a * b), [2])
self.assertAllEqual((a * a), [1])
self.assertAllEqual((a**b), [1])
self.assertAllEqual((a / b), [1 / 2])
self.assertAllEqual((a / a), [1])
self.assertAllEqual((a % b), [1])
self.assertAllEqual((a < b), [True])
self.assertAllEqual((a <= b), [True])
self.assertAllEqual((a > b), [False])
self.assertAllEqual((a >= b), [False])
self.assertAllEqual((a == b), False)
self.assertAllEqual((a != b), True)
self.assertAllEqual(1, a[constant_op.constant(0)])
def test_basic_slice(self):
npt = np.arange(1, 19, dtype=np.float32).reshape(3, 2, 3)
t = constant_op.constant(npt)
self.assertAllEqual(npt[:, :, :], t[:, :, :])
self.assertAllEqual(npt[::, ::, ::], t[::, ::, ::])
self.assertAllEqual(npt[::1, ::1, ::1], t[::1, ::1, ::1])
self.assertAllEqual(npt[::1, ::5, ::2], t[::1, ::5, ::2])
self.assertAllEqual(npt[::-1, :, :], t[::-1, :, :])
self.assertAllEqual(npt[:, ::-1, :], t[:, ::-1, :])
self.assertAllEqual(npt[:, :, ::-1], t[:, :, ::-1])
self.assertAllEqual(npt[-2::-1, :, ::1], t[-2::-1, :, ::1])
self.assertAllEqual(npt[-2::-1, :, ::2], t[-2::-1, :, ::2])
def testDegenerateSlices(self):
npt = np.arange(1, 19, dtype=np.float32).reshape(3, 2, 3)
t = constant_op.constant(npt)
# degenerate by offering a forward interval with a negative stride
self.assertAllEqual(npt[0:-1:-1, :, :], t[0:-1:-1, :, :])
# degenerate with a reverse interval with a positive stride
self.assertAllEqual(npt[-1:0, :, :], t[-1:0, :, :])
# empty interval in every dimension
self.assertAllEqual(npt[-1:0, 2:2, 2:3:-1], t[-1:0, 2:2, 2:3:-1])
def testEllipsis(self):
npt = np.array(
[[[[[1, 2], [3, 4], [5, 6]]], [[[7, 8], [9, 10], [11, 12]]]]])
t = constant_op.constant(npt)
self.assertAllEqual(npt[0:], t[0:])
# implicit ellipsis
self.assertAllEqual(npt[0:, ...], t[0:, ...])
# ellipsis alone
self.assertAllEqual(npt[...], t[...])
# ellipsis at end
self.assertAllEqual(npt[0:1, ...], t[0:1, ...])
# ellipsis at begin
self.assertAllEqual(npt[..., 0:1], t[..., 0:1])
# ellipsis at middle
self.assertAllEqual(npt[0:1, ..., 0:1], t[0:1, ..., 0:1])
def testShrink(self):
npt = np.array([[[[[1, 2, 4, 5], [5, 6, 7, 8], [9, 10, 11, 12]]],
[[[13, 14, 15, 16], [17, 18, 19, 20], [21, 22, 23, 24]]]]])
t = constant_op.constant(npt)
self.assertAllEqual(npt[:, :, :, :, 3], t[:, :, :, :, 3])
self.assertAllEqual(npt[..., 3], t[..., 3])
self.assertAllEqual(npt[:, 0], t[:, 0])
self.assertAllEqual(npt[:, :, 0], t[:, :, 0])
def testOpWithInputsOnDifferentDevices(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
# The GPU kernel for the Reshape op requires that the
# shape input be on CPU.
value = constant_op.constant([1., 2.]).gpu()
shape = constant_op.constant([2, 1])
reshaped = array_ops.reshape(value, shape)
self.assertAllEqual([[1], [2]], reshaped.cpu())
def testInt64(self):
# Fill requires the first input to be an int32 tensor.
self.assertAllEqual(
[1.0, 1.0],
array_ops.fill(constant_op.constant([2], dtype=dtypes.int64),
constant_op.constant(1)))
def testOutputOnHostMemory(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
# The Shape op kernel on GPU places the output in host memory.
value = constant_op.constant([1.]).gpu()
shape = array_ops.shape(value)
self.assertEqual([1], shape.numpy())
def testSilentCopy(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
# Temporarily replace the context
# pylint: disable=protected-access
del context._context
try:
context._context = context.Context(
device_policy=context.DEVICE_PLACEMENT_SILENT)
cpu_tensor = constant_op.constant(1.0)
gpu_tensor = cpu_tensor.gpu()
self.assertAllEqual(cpu_tensor + gpu_tensor, 2.0)
finally:
del context._context
context._context = context.Context()
# pylint: enable=protected-access
def testSoftPlacement(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
# Temporarily replace the context
# pylint: disable=protected-access
del context._context
try:
context._context = context.Context(
device_policy=context.DEVICE_PLACEMENT_SILENT,
config=config_pb2.ConfigProto(allow_soft_placement=True))
cpu_tensor = constant_op.constant(1.0)
result = cpu_tensor + cpu_tensor
self.assertEqual(result.device,
'/job:localhost/replica:0/task:0/device:GPU:0')
finally:
del context._context
context._context = context.Context()
# pylint: enable=protected-access
def testRandomUniform(self):
scalar_shape = constant_op.constant([], dtype=dtypes.int32)
x = random_ops.random_uniform(scalar_shape)
self.assertEquals(0, x.shape.ndims)
self.assertEquals(dtypes.float32, x.dtype)
x = random_ops.random_uniform(
scalar_shape, minval=constant_op.constant(5.),
maxval=constant_op.constant(6.))
self.assertLess(x, 6)
self.assertGreaterEqual(x, 5)
def testArgsToMatchingEagerDefault(self):
# Uses default
ctx = context.context()
t, r = execute.args_to_matching_eager([[3, 4]], ctx, dtypes.int32)
self.assertEquals(t, dtypes.int32)
self.assertEquals(r[0].dtype, dtypes.int32)
t, r = execute.args_to_matching_eager([[3, 4]], ctx, dtypes.int64)
self.assertEquals(t, dtypes.int64)
self.assertEquals(r[0].dtype, dtypes.int64)
# Doesn't use default
t, r = execute.args_to_matching_eager(
[['string', 'arg']], ctx, dtypes.int32)
self.assertEquals(t, dtypes.string)
self.assertEquals(r[0].dtype, dtypes.string)
def testFlattenLayer(self):
flatten_layer = core.Flatten()
x = constant_op.constant([[[-10, -20], [-30, -40]], [[10, 20], [30, 40]]])
y = flatten_layer(x)
self.assertAllEqual([[-10, -20, -30, -40], [10, 20, 30, 40]], y)
def testIdentity(self):
self.assertAllEqual(2, array_ops.identity(2))
def testIdentityOnVariable(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
with context.device('/gpu:0'):
v = resource_variable_ops.ResourceVariable(True)
self.assertAllEqual(True, array_ops.identity(v))
def testIncompatibleSetShape(self):
x = constant_op.constant(1)
with self.assertRaises(ValueError):
x.set_shape((1, 2))
def testCompatibleSetShape(self):
x = constant_op.constant([[1, 2]])
x.set_shape(tensor_shape.TensorShape([None, 2]))
self.assertEqual(x.get_shape(), (1, 2))
def testCastScalarToPrimitiveTypes(self):
x = constant_op.constant(1.3)
self.assertIsInstance(int(x), int)
self.assertEqual(int(x), 1)
self.assertIsInstance(float(x), float)
self.assertAllClose(float(x), 1.3)
def testCastNonScalarToPrimitiveTypesFails(self):
x = constant_op.constant([1.3, 2])
with self.assertRaises(TypeError):
int(x)
with self.assertRaises(TypeError):
float(x)
def testFormatString(self):
x = constant_op.constant(3.1415)
self.assertEqual('3.14', '{:.2f}'.format(x))
def testNoOpIsNone(self):
self.assertTrue(control_flow_ops.no_op() is None)
if __name__ == '__main__':
test.main()
| true
| true
|
f70c7574cc07577b99f8a4f4fe68633e07e3f8f5
| 5,126
|
py
|
Python
|
docs/conf.py
|
Addepar/dataflake.fakeldap
|
6ef1b3b9b8d7198a132b7dcce83d5a855db9a577
|
[
"ZPL-2.1"
] | null | null | null |
docs/conf.py
|
Addepar/dataflake.fakeldap
|
6ef1b3b9b8d7198a132b7dcce83d5a855db9a577
|
[
"ZPL-2.1"
] | 6
|
2017-12-12T00:52:22.000Z
|
2018-02-08T15:47:42.000Z
|
docs/conf.py
|
Addepar/dataflake.fakeldap
|
6ef1b3b9b8d7198a132b7dcce83d5a855db9a577
|
[
"ZPL-2.1"
] | 1
|
2020-05-13T11:29:59.000Z
|
2020-05-13T11:29:59.000Z
|
# -*- coding: utf-8 -*-
#
# dataflake.fakeldap documentation build configuration file, created by
# sphinx-quickstart on Sat May 27 10:35:35 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
import datetime
import os
import sys
parent = os.path.dirname(os.path.dirname(__file__))
parent_dir = os.path.abspath(parent)
with open(os.path.join(parent_dir, 'version.txt'), 'r') as version_file:
pkg_version = version_file.read().strip()
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
#templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'dataflake.fakeldap'
copyright = u'2010-%i, Jens Vagelpohl' % datetime.datetime.now().year
author = u'Jens Vagelpohl'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = pkg_version.replace(u'.dev0', u'')
# The full version, including alpha/beta/rc tags.
release = pkg_version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'classic'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'dataflakefakeldapdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'dataflakefakeldap.tex', u'dataflake.fakeldap Documentation',
u'Jens Vagelpohl', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'dataflakefakeldap', u'dataflake.fakeldap Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'dataflakefakeldap', u'dataflake.fakeldap Documentation',
author, 'dataflakefakeldap', 'One line description of project.',
'Miscellaneous'),
]
| 31.066667
| 79
| 0.690012
|
import datetime
import os
import sys
parent = os.path.dirname(os.path.dirname(__file__))
parent_dir = os.path.abspath(parent)
with open(os.path.join(parent_dir, 'version.txt'), 'r') as version_file:
pkg_version = version_file.read().strip()
extensions = []
source_suffix = '.rst'
master_doc = 'index'
project = u'dataflake.fakeldap'
copyright = u'2010-%i, Jens Vagelpohl' % datetime.datetime.now().year
author = u'Jens Vagelpohl'
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = pkg_version.replace(u'.dev0', u'')
# The full version, including alpha/beta/rc tags.
release = pkg_version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'classic'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'dataflakefakeldapdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'dataflakefakeldap.tex', u'dataflake.fakeldap Documentation',
u'Jens Vagelpohl', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'dataflakefakeldap', u'dataflake.fakeldap Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'dataflakefakeldap', u'dataflake.fakeldap Documentation',
author, 'dataflakefakeldap', 'One line description of project.',
'Miscellaneous'),
]
| true
| true
|
f70c7786464b6515cb46daab41e11d2e6ccad74d
| 1,468
|
py
|
Python
|
MusicP_Kit/Librosa1.py
|
alexissoto1/AudioNerd.py
|
107b7eb8a141994798eb10075e8591491ca9b471
|
[
"MIT"
] | null | null | null |
MusicP_Kit/Librosa1.py
|
alexissoto1/AudioNerd.py
|
107b7eb8a141994798eb10075e8591491ca9b471
|
[
"MIT"
] | null | null | null |
MusicP_Kit/Librosa1.py
|
alexissoto1/AudioNerd.py
|
107b7eb8a141994798eb10075e8591491ca9b471
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Oct 15 10:14:35 2018
@author: alexissoto
"""
'''
Music Production Kit
'''
import librosa as lb
song = "Fantasia_Impromptu.m4a" #Input file
def TempoChange():
y, sr = lb.load(song, duration = 30)
tempo, beat_frames = lb.beat.beat_track(y = y, sr = sr)
stretch = lb.effects.time_stretch(y, 2.0) #2.0 for double fast. 0.5 for half
lb.output.write_wav('Double fast.wav', stretch, sr = sr) #Your output here.
def Harmonics():
y, sr = lb.load(song, duration = 30)
y_harmonic = lb.effects.harmonic(y)
lb.output.write_wav('Just harmonic content.wav', y_harmonic, sr=sr)
def Percussive():
y, sr = lb.load(song, duration = 30)
y_percussive = lb.effects.percussive(y)
lb.output.write_wav('Just percusive content.wav', y_percussive, sr = sr)
def Both():
y, sr = lb.load(song, duration= 30)
y_harmonic, y_percussive = lb.effects.hpss(y, margin = (1.0, 5.0))
lb.output.write_wav('Percussive.wav', y_percussive, sr = sr)
def Steps():
y, sr = lb.load(song, duration = 30)
y_third = lb.effects.pitch_shift(y, sr, n_steps = 2)
lb.output.write_wav('Major second.wav', y_third, sr=sr)
def Tempo():
y, sr = lb.load(song, duration = 30)
envelope = lb.onset.onset_strength(y, sr = sr)
tempo = lb.beat.tempo(onset_envelope = envelope, sr=sr)
print('Tempo is %d BPM' % tempo)
| 19.064935
| 80
| 0.629428
|
import librosa as lb
song = "Fantasia_Impromptu.m4a"
def TempoChange():
y, sr = lb.load(song, duration = 30)
tempo, beat_frames = lb.beat.beat_track(y = y, sr = sr)
stretch = lb.effects.time_stretch(y, 2.0)
lb.output.write_wav('Double fast.wav', stretch, sr = sr)
def Harmonics():
y, sr = lb.load(song, duration = 30)
y_harmonic = lb.effects.harmonic(y)
lb.output.write_wav('Just harmonic content.wav', y_harmonic, sr=sr)
def Percussive():
y, sr = lb.load(song, duration = 30)
y_percussive = lb.effects.percussive(y)
lb.output.write_wav('Just percusive content.wav', y_percussive, sr = sr)
def Both():
y, sr = lb.load(song, duration= 30)
y_harmonic, y_percussive = lb.effects.hpss(y, margin = (1.0, 5.0))
lb.output.write_wav('Percussive.wav', y_percussive, sr = sr)
def Steps():
y, sr = lb.load(song, duration = 30)
y_third = lb.effects.pitch_shift(y, sr, n_steps = 2)
lb.output.write_wav('Major second.wav', y_third, sr=sr)
def Tempo():
y, sr = lb.load(song, duration = 30)
envelope = lb.onset.onset_strength(y, sr = sr)
tempo = lb.beat.tempo(onset_envelope = envelope, sr=sr)
print('Tempo is %d BPM' % tempo)
| true
| true
|
f70c77dce55c5afe74d3faf888bccf4b6e67116c
| 6,274
|
py
|
Python
|
A2C/utils.py
|
CommanderCero/RL_Algorithms
|
fd8172e0075247b682a1dca752306147fa2ed3ba
|
[
"Apache-2.0"
] | 1
|
2021-10-06T14:45:41.000Z
|
2021-10-06T14:45:41.000Z
|
A2C/utils.py
|
CommanderCero/RL_Algorithms
|
fd8172e0075247b682a1dca752306147fa2ed3ba
|
[
"Apache-2.0"
] | null | null | null |
A2C/utils.py
|
CommanderCero/RL_Algorithms
|
fd8172e0075247b682a1dca752306147fa2ed3ba
|
[
"Apache-2.0"
] | null | null | null |
import scipy.signal as signal
import torch
import torch.nn as nn
import numpy as np
import models
import gym
import wandb
def create_feedforward(sizes, activation=nn.ReLU):
layers = []
for i in range(len(sizes) - 1):
layers.append(nn.Linear(sizes[i], sizes[i+1]))
if i < len(sizes) - 2:
layers.append(activation())
return nn.Sequential(*layers)
def get_shape(shape):
if shape is None:
return ()
return shape
def discounted_cumsum(rewards, reward_decay):
"""Taken from https://stackoverflow.com/questions/47970683/vectorize-a-numpy-discount-calculation"""
return signal.lfilter([1], [1, -reward_decay], x=rewards[::-1])[::-1]
class TrajectoryBuffer:
def __init__(self, observation_shape, action_shape, size, reward_decay=0.99):
self.max_size = size
self.trajectory_start = 0
self.pos = 0
self.reward_decay = reward_decay
self.observations = np.empty((size, *observation_shape), dtype=np.float32)
self.actions = np.empty((size, *get_shape(action_shape)), dtype=np.float32)
self.rewards = np.empty((size,), dtype=np.float32)
self.returns = np.empty((size,), dtype=np.float32)
self.dones = np.empty((size,), dtype=np.float32)
def store(self, observation, action, reward, done):
assert self.pos < self.max_size, "Buffer Overflow"
self.observations[self.pos] = observation
self.actions[self.pos] = action
self.rewards[self.pos] = reward
self.dones[self.pos] = done
self.pos += 1
def end_trajectory(self, value=0):
# Compute return
sl = slice(self.trajectory_start, self.pos)
rewards = self.rewards[sl]
rewards = np.append(rewards, value)
self.returns[sl] = discounted_cumsum(rewards, self.reward_decay)[:-1]
self.trajectory_start = self.pos
def get_data(self):
sl = slice(0, self.pos)
data = dict(
observations=self.observations[sl],
actions=self.actions[sl],
rewards=self.rewards[sl],
returns=self.returns[sl],
dones=self.dones[sl]
)
return {key : torch.from_numpy(value) for key, value in data.items()}
def clear(self):
self.pos = 0
self.trajectory_start = 0
class VecTrajectoryBuffer:
def __init__(self, observation_shape, action_shape, num_envs, size, reward_decay=0.99):
self.max_size = size
self.pos = 0
self.reward_decay = reward_decay
self.traj_starts = np.zeros((num_envs,), dtype=int)
self.observations = np.empty((size, num_envs, *observation_shape), dtype=np.float32)
self.actions = np.empty((size, num_envs, *get_shape(action_shape)), dtype=np.float32)
self.rewards = np.empty((size, num_envs), dtype=np.float32)
self.returns = np.empty((size, num_envs), dtype=np.float32)
self.dones = np.empty((size, num_envs), dtype=np.float32)
def store(self, observations, actions, rewards, dones):
assert self.pos < self.max_size, "Buffer Overflow"
self.observations[self.pos] = observations
self.actions[self.pos] = actions
self.rewards[self.pos] = rewards
self.dones[self.pos] = dones
self.pos += 1
# Compute returns
for env_index, done in enumerate(dones):
if done:
self._end_trajectory(env_index)
def end_trajectory(self, values):
for env_index, value in enumerate(values):
self._end_trajectory(env_index, value)
def _end_trajectory(self, env_index, value=0):
# Compute return
sl = slice(self.traj_starts[env_index], self.pos)
rewards = self.rewards[sl, env_index]
rewards = np.append(rewards, value)
self.returns[sl, env_index] = discounted_cumsum(rewards, self.reward_decay)[:-1]
# Update trajectory start
self.traj_starts[env_index] = self.pos
def get_data(self, device=torch.device('cpu')):
sl = slice(0, self.pos)
data = dict(
observations=self._remove_env_axis(self.observations[sl]),
actions=self._remove_env_axis(self.actions[sl]),
rewards=self._remove_env_axis(self.rewards[sl]),
returns=self._remove_env_axis(self.returns[sl]),
dones=self._remove_env_axis(self.dones[sl])
)
return {key : torch.from_numpy(value).to(device) for key, value in data.items()}
def clear(self):
self.pos = 0
self.traj_starts.fill(0)
def _remove_env_axis(self, array):
# array.shape = (size, num_envs, ???)
shape = array.shape
# Swap size with num_envs to ensure reshaping won't mix trajectories
array = array.swapaxes(0, 1)
# Flatten
new_shape = (shape[0] * shape[1], *shape[2:])
array = array.reshape(new_shape)
return array
def play(model: models.Policy, env: gym.Env, repeats=10, device=torch.device('cpu')):
for _ in range(repeats):
state = env.reset()
done = False
while not done:
inp = torch.FloatTensor([state]).to(device)
action = model.get_actions(inp)[0]
state, reward, done, _ = env.step(action)
env.render()
env.close()
def capture_video(model: models.Policy, env: gym.Env, fps=30, device=torch.device('cpu')):
frames = []
reward_sum = 0
step_count = 0
state = env.reset()
done = False
while not done:
inp = torch.FloatTensor([state]).to(device)
action = model.get_actions(inp)[0]
state, reward, done, _ = env.step(action)
frames.append(np.array(env.render("rgb_array")))
reward_sum += reward
step_count += 1
frames = np.array(frames) # (Time, Width, Height, Channels)
frames = np.moveaxis(frames, 3, 1) # (Time, Channels, Width, Height)
return wandb.Video(frames, caption=f"RewardSum={reward_sum}; EpisodeLength={step_count}", fps=fps)
| 36.265896
| 104
| 0.603762
|
import scipy.signal as signal
import torch
import torch.nn as nn
import numpy as np
import models
import gym
import wandb
def create_feedforward(sizes, activation=nn.ReLU):
layers = []
for i in range(len(sizes) - 1):
layers.append(nn.Linear(sizes[i], sizes[i+1]))
if i < len(sizes) - 2:
layers.append(activation())
return nn.Sequential(*layers)
def get_shape(shape):
if shape is None:
return ()
return shape
def discounted_cumsum(rewards, reward_decay):
return signal.lfilter([1], [1, -reward_decay], x=rewards[::-1])[::-1]
class TrajectoryBuffer:
def __init__(self, observation_shape, action_shape, size, reward_decay=0.99):
self.max_size = size
self.trajectory_start = 0
self.pos = 0
self.reward_decay = reward_decay
self.observations = np.empty((size, *observation_shape), dtype=np.float32)
self.actions = np.empty((size, *get_shape(action_shape)), dtype=np.float32)
self.rewards = np.empty((size,), dtype=np.float32)
self.returns = np.empty((size,), dtype=np.float32)
self.dones = np.empty((size,), dtype=np.float32)
def store(self, observation, action, reward, done):
assert self.pos < self.max_size, "Buffer Overflow"
self.observations[self.pos] = observation
self.actions[self.pos] = action
self.rewards[self.pos] = reward
self.dones[self.pos] = done
self.pos += 1
def end_trajectory(self, value=0):
sl = slice(self.trajectory_start, self.pos)
rewards = self.rewards[sl]
rewards = np.append(rewards, value)
self.returns[sl] = discounted_cumsum(rewards, self.reward_decay)[:-1]
self.trajectory_start = self.pos
def get_data(self):
sl = slice(0, self.pos)
data = dict(
observations=self.observations[sl],
actions=self.actions[sl],
rewards=self.rewards[sl],
returns=self.returns[sl],
dones=self.dones[sl]
)
return {key : torch.from_numpy(value) for key, value in data.items()}
def clear(self):
self.pos = 0
self.trajectory_start = 0
class VecTrajectoryBuffer:
def __init__(self, observation_shape, action_shape, num_envs, size, reward_decay=0.99):
self.max_size = size
self.pos = 0
self.reward_decay = reward_decay
self.traj_starts = np.zeros((num_envs,), dtype=int)
self.observations = np.empty((size, num_envs, *observation_shape), dtype=np.float32)
self.actions = np.empty((size, num_envs, *get_shape(action_shape)), dtype=np.float32)
self.rewards = np.empty((size, num_envs), dtype=np.float32)
self.returns = np.empty((size, num_envs), dtype=np.float32)
self.dones = np.empty((size, num_envs), dtype=np.float32)
def store(self, observations, actions, rewards, dones):
assert self.pos < self.max_size, "Buffer Overflow"
self.observations[self.pos] = observations
self.actions[self.pos] = actions
self.rewards[self.pos] = rewards
self.dones[self.pos] = dones
self.pos += 1
for env_index, done in enumerate(dones):
if done:
self._end_trajectory(env_index)
def end_trajectory(self, values):
for env_index, value in enumerate(values):
self._end_trajectory(env_index, value)
def _end_trajectory(self, env_index, value=0):
sl = slice(self.traj_starts[env_index], self.pos)
rewards = self.rewards[sl, env_index]
rewards = np.append(rewards, value)
self.returns[sl, env_index] = discounted_cumsum(rewards, self.reward_decay)[:-1]
self.traj_starts[env_index] = self.pos
def get_data(self, device=torch.device('cpu')):
sl = slice(0, self.pos)
data = dict(
observations=self._remove_env_axis(self.observations[sl]),
actions=self._remove_env_axis(self.actions[sl]),
rewards=self._remove_env_axis(self.rewards[sl]),
returns=self._remove_env_axis(self.returns[sl]),
dones=self._remove_env_axis(self.dones[sl])
)
return {key : torch.from_numpy(value).to(device) for key, value in data.items()}
def clear(self):
self.pos = 0
self.traj_starts.fill(0)
def _remove_env_axis(self, array):
shape = array.shape
array = array.swapaxes(0, 1)
# Flatten
new_shape = (shape[0] * shape[1], *shape[2:])
array = array.reshape(new_shape)
return array
def play(model: models.Policy, env: gym.Env, repeats=10, device=torch.device('cpu')):
for _ in range(repeats):
state = env.reset()
done = False
while not done:
inp = torch.FloatTensor([state]).to(device)
action = model.get_actions(inp)[0]
state, reward, done, _ = env.step(action)
env.render()
env.close()
def capture_video(model: models.Policy, env: gym.Env, fps=30, device=torch.device('cpu')):
frames = []
reward_sum = 0
step_count = 0
state = env.reset()
done = False
while not done:
inp = torch.FloatTensor([state]).to(device)
action = model.get_actions(inp)[0]
state, reward, done, _ = env.step(action)
frames.append(np.array(env.render("rgb_array")))
reward_sum += reward
step_count += 1
frames = np.array(frames) # (Time, Width, Height, Channels)
frames = np.moveaxis(frames, 3, 1) # (Time, Channels, Width, Height)
return wandb.Video(frames, caption=f"RewardSum={reward_sum}; EpisodeLength={step_count}", fps=fps)
| true
| true
|
f70c783d327d05400be9c870d3de601d76a9901d
| 3,408
|
py
|
Python
|
src/capsulesEM_V1/capsules/nets.py
|
LeanderLXZ/oracle-recognition
|
c82976333d4a72218b06fffc94192238d95fcf9e
|
[
"Apache-2.0"
] | 1
|
2022-02-08T09:33:16.000Z
|
2022-02-08T09:33:16.000Z
|
src/capsulesEM_V1/capsules/nets.py
|
LeanderLXZ/oracle-recognition
|
c82976333d4a72218b06fffc94192238d95fcf9e
|
[
"Apache-2.0"
] | null | null | null |
src/capsulesEM_V1/capsules/nets.py
|
LeanderLXZ/oracle-recognition
|
c82976333d4a72218b06fffc94192238d95fcf9e
|
[
"Apache-2.0"
] | null | null | null |
"""An implementation of matrix capsules with EM routing.
"""
import tensorflow as tf
from core import _conv2d_wrapper, capsules_init, capsules_conv, capsules_fc
slim = tf.contrib.slim
# ------------------------------------------------------------------------------#
# -------------------------------- capsules net --------------------------------#
# ------------------------------------------------------------------------------#
def capsules_v0(inputs, num_classes, iterations, name='CapsuleEM-V0'):
"""Replicate the network in `Matrix Capsules with EM Routing.`
"""
with tf.variable_scope(name) as scope:
# inputs [N, H, W, C] -> conv2d, 5x5, strides 2, channels 32 -> nets [N, OH, OW, 32]
nets = _conv2d_wrapper(
inputs, shape=[5, 5, 1, 32], strides=[1, 2, 2, 1], padding='SAME', add_bias=True, activation_fn=tf.nn.relu, name='conv1'
)
# inputs [N, H, W, C] -> conv2d, 1x1, strides 1, channels 32x(4x4+1) -> (poses, activations)
nets = capsules_init(
nets, shape=[1, 1, 32, 32], strides=[1, 1, 1, 1], padding='VALID', pose_shape=[4, 4], name='capsule_init'
)
# inputs: (poses, activations) -> capsule-conv 3x3x32x32x4x4, strides 2 -> (poses, activations)
nets = capsules_conv(
nets, shape=[3, 3, 32, 32], strides=[1, 2, 2, 1], iterations=iterations, name='capsule_conv1'
)
# inputs: (poses, activations) -> capsule-conv 3x3x32x32x4x4, strides 1 -> (poses, activations)
nets = capsules_conv(
nets, shape=[3, 3, 32, 32], strides=[1, 1, 1, 1], iterations=iterations, name='capsule_conv2'
)
# inputs: (poses, activations) -> capsule-fc 1x1x32x10x4x4 shared view transform matrix within each channel -> (poses, activations)
nets = capsules_fc(
nets, num_classes, iterations=iterations, name='capsule_fc'
)
poses, activations = nets
return poses, activations
# ------------------------------------------------------------------------------#
# ------------------------------------ loss ------------------------------------#
# ------------------------------------------------------------------------------#
def spread_loss(labels, activations, margin, name):
"""This adds spread loss to total loss.
:param labels: [N, O], where O is number of output classes, one hot vector, tf.uint8.
:param activations: [N, O], activations.
:param margin: margin 0.2 - 0.9 fixed schedule during training.
:return: spread loss
"""
activations_shape = activations.get_shape().as_list()
with tf.variable_scope(name) as scope:
mask_t = tf.equal(labels, 1)
mask_i = tf.equal(labels, 0)
activations_t = tf.reshape(
tf.boolean_mask(activations, mask_t), [activations_shape[0], 1]
)
activations_i = tf.reshape(
tf.boolean_mask(activations, mask_i), [activations_shape[0], activations_shape[1] - 1]
)
# margin = tf.Print(
# margin, [margin], 'margin', summarize=20
# )
gap_mit = tf.reduce_sum(
tf.square(
tf.nn.relu(
margin - (activations_t - activations_i)
)
)
)
# tf.add_to_collection(
# tf.GraphKeys.LOSSES, gap_mit
# )
#
# total_loss = tf.add_n(
# tf.get_collection(
# tf.GraphKeys.LOSSES
# ), name='total_loss'
# )
tf.losses.add_loss(gap_mit)
return gap_mit
# ------------------------------------------------------------------------------#
| 33.742574
| 135
| 0.54196
|
import tensorflow as tf
from core import _conv2d_wrapper, capsules_init, capsules_conv, capsules_fc
slim = tf.contrib.slim
def capsules_v0(inputs, num_classes, iterations, name='CapsuleEM-V0'):
with tf.variable_scope(name) as scope:
nets = _conv2d_wrapper(
inputs, shape=[5, 5, 1, 32], strides=[1, 2, 2, 1], padding='SAME', add_bias=True, activation_fn=tf.nn.relu, name='conv1'
)
nets = capsules_init(
nets, shape=[1, 1, 32, 32], strides=[1, 1, 1, 1], padding='VALID', pose_shape=[4, 4], name='capsule_init'
)
nets = capsules_conv(
nets, shape=[3, 3, 32, 32], strides=[1, 2, 2, 1], iterations=iterations, name='capsule_conv1'
)
nets = capsules_conv(
nets, shape=[3, 3, 32, 32], strides=[1, 1, 1, 1], iterations=iterations, name='capsule_conv2'
)
nets = capsules_fc(
nets, num_classes, iterations=iterations, name='capsule_fc'
)
poses, activations = nets
return poses, activations
def spread_loss(labels, activations, margin, name):
activations_shape = activations.get_shape().as_list()
with tf.variable_scope(name) as scope:
mask_t = tf.equal(labels, 1)
mask_i = tf.equal(labels, 0)
activations_t = tf.reshape(
tf.boolean_mask(activations, mask_t), [activations_shape[0], 1]
)
activations_i = tf.reshape(
tf.boolean_mask(activations, mask_i), [activations_shape[0], activations_shape[1] - 1]
)
gap_mit = tf.reduce_sum(
tf.square(
tf.nn.relu(
margin - (activations_t - activations_i)
)
)
)
tf.losses.add_loss(gap_mit)
return gap_mit
| true
| true
|
f70c79439c30f64e1f79aeb6fa3efae3b7615004
| 7,227
|
py
|
Python
|
tests/test_get_pr_info.py
|
yamap55/pull_request_info_compile
|
afea4148dd7d44bb9cf939373455f9edb747bf7b
|
[
"MIT"
] | null | null | null |
tests/test_get_pr_info.py
|
yamap55/pull_request_info_compile
|
afea4148dd7d44bb9cf939373455f9edb747bf7b
|
[
"MIT"
] | 4
|
2021-01-02T15:14:19.000Z
|
2021-01-10T13:54:13.000Z
|
tests/test_get_pr_info.py
|
yamap55/pull_request_info_compile
|
afea4148dd7d44bb9cf939373455f9edb747bf7b
|
[
"MIT"
] | null | null | null |
import re
from textwrap import dedent
from unittest import mock
from unittest.mock import PropertyMock
import pytest
from get_pr_info import extract_target_section, get_pr_number_from_commit_message, get_pr_summary
from github import BadCredentialsException, UnknownObjectException
class TestGetPrNumberFromCommitMessage:
@pytest.fixture(autouse=True)
def setUp(self):
self.pattern = re.compile(r"#(\d*)")
def test_not_match(self):
actual = get_pr_number_from_commit_message("abcdefg", self.pattern)
expected = 0
assert actual == expected
def test_match(self):
actual = get_pr_number_from_commit_message("ab#123cd", self.pattern)
expected = 123
assert actual == expected
def test_multi_match(self):
actual = get_pr_number_from_commit_message("ab#123cd#456ef", self.pattern)
expected = 123
assert actual == expected
def test_second_line_match(self):
# 2行目にPR番号が含まれている場合
actual = get_pr_number_from_commit_message("abc\ncd#123ef", self.pattern)
expected = 0
assert actual == expected
def test_blank_str(self):
actual = get_pr_number_from_commit_message("", self.pattern)
expected = 0
assert actual == expected
class TestGetPrInfo:
@pytest.fixture(autouse=True)
def setUp(self):
mock_client = mock.Mock()
mock_repo = mock.Mock()
mock_pull = mock.Mock()
with mock.patch("get_pr_info.Github") as mock_github:
mock_github.return_value = mock_client
mock_client.get_repo.return_value = mock_repo
mock_repo.get_pull.return_value = mock_pull
self.mock_github = mock_github
self.mock_client = mock_client
self.mock_repo = mock_repo
self.mock_pull = mock_pull
yield
def test_nomal(self):
mock_body = PropertyMock(return_value="PR_INFO")
type(self.mock_pull).body = mock_body
actual = get_pr_summary(999, "GITHUB_TOKEN", "REPOSITORY_NAME")
expected = "PR_INFO"
assert actual == expected
self.mock_github.assert_called_once_with("GITHUB_TOKEN")
self.mock_client.get_repo.assert_called_once_with("REPOSITORY_NAME")
self.mock_repo.get_pull.assert_called_once_with(999)
assert mock_body.call_count == 1
def test_bad_credential(self):
# トークンが誤っている場合
# APIにアクセスしたくないため、モックで例外を投げている
# ユニットテストとしては意味がないが、仕様記載の意味で記載しておく
# NOTE: トークンが誤っている場合でもgithubインスタンスの生成時にはエラーとならず、GitHub操作をした際にエラーとなる
self.mock_client.get_repo.side_effect = BadCredentialsException(
401,
data={
"message": "Bad credentials",
"documentation_url": "https://docs.github.com/rest",
},
)
with pytest.raises(BadCredentialsException):
get_pr_summary(999, "GITHUB_TOKEN", "REPOSITORY_NAME")
self.mock_github.assert_called_once_with("GITHUB_TOKEN")
self.mock_client.get_repo.assert_called_once_with("REPOSITORY_NAME")
self.mock_repo.get_pull.assert_not_called()
def test_not_exists_repository(self):
# リポジトリが存在しない場合
# APIにアクセスしたくないため、モックで例外を投げている
# ユニットテストとしては意味がないが、仕様記載の意味で記載しておく
self.mock_client.get_repo.side_effect = UnknownObjectException(
404,
data={
"message": "Not Found",
"documentation_url": "https://docs.github.com/rest/reference/repos#get-a-repository", # noqa
},
)
with pytest.raises(UnknownObjectException):
get_pr_summary(999, "GITHUB_TOKEN", "REPOSITORY_NAME")
self.mock_github.assert_called_once_with("GITHUB_TOKEN")
self.mock_client.get_repo.assert_called_once_with("REPOSITORY_NAME")
self.mock_repo.get_pull.assert_not_called()
def test_not_exists_pr_number(self):
# PR番号が存在しない場合
# APIにアクセスしたくないため、モックで例外を投げている
# ユニットテストとしては意味がないが、仕様記載の意味で記載しておく
self.mock_repo.get_pull.side_effect = UnknownObjectException(
404,
data={
"message": "Not Found",
"documentation_url": "https://docs.github.com/rest/reference/pulls#get-a-pull-request", # noqa
},
)
with pytest.raises(UnknownObjectException):
get_pr_summary(999, "GITHUB_TOKEN", "REPOSITORY_NAME")
self.mock_github.assert_called_once_with("GITHUB_TOKEN")
self.mock_client.get_repo.assert_called_once_with("REPOSITORY_NAME")
self.mock_repo.get_pull.assert_called_once_with(999)
class TestGetIntegrationTestPoint:
def test_nomal(self):
summary = dedent(
"""\
## 概要
- 現状(As is)
- こうなんです
- 理想(To be)
- こうなりたい
- 問題(Problem)
- こまってる
- 解決・やったこと(Action)
- これをやった
## 結合テスト観点
- 対応概要
- こうやった
- 観点
- こういうこと1
- 条件: こうしてほしい2
- こういうこと2
- 条件: こうしてほしい2
- 担当
- API yamap55
"""
)
actual = extract_target_section(summary, "## 結合テスト観点")
expected = dedent(
"""\
## 結合テスト観点
- 対応概要
- こうやった
- 観点
- こういうこと1
- 条件: こうしてほしい2
- こういうこと2
- 条件: こうしてほしい2
- 担当
- API yamap55"""
)
assert actual == expected
def test_not_exists_target_section(self):
summary = ""
actual = extract_target_section(summary, "## 結合テスト観点")
expected = ""
assert actual == expected
def test_another_section_at_the_end(self):
# 対象行より後ろに別のセクションが存在する場合
summary = dedent(
"""\
## 概要
- 現状(As is)
- こうなんです
- 理想(To be)
- こうなりたい
- 問題(Problem)
- こまってる
- 解決・やったこと(Action)
- これをやった
## 結合テスト観点
- 対応概要
- こうやった
- 観点
- こういうこと1
- 条件: こうしてほしい2
- こういうこと2
- 条件: こうしてほしい2
- 担当
- API yamap55
## 対象外セクション
- 対象外です
"""
)
actual = extract_target_section(summary, "## 結合テスト観点")
expected = dedent(
"""\
## 結合テスト観点
- 対応概要
- こうやった
- 観点
- こういうこと1
- 条件: こうしてほしい2
- こういうこと2
- 条件: こうしてほしい2
- 担当
- API yamap55
"""
)
assert actual == expected
@pytest.mark.parametrize("line_separator", ["\n", "\r\n", "\r"])
def test_various_line_separator(self, line_separator):
summary = f"## HOGE{line_separator}## TARGET_ROW{line_separator}## HUGA"
actual = extract_target_section(summary, "## TARGET_ROW")
expected = "## TARGET_ROW"
assert actual == expected
| 29.497959
| 111
| 0.57133
|
import re
from textwrap import dedent
from unittest import mock
from unittest.mock import PropertyMock
import pytest
from get_pr_info import extract_target_section, get_pr_number_from_commit_message, get_pr_summary
from github import BadCredentialsException, UnknownObjectException
class TestGetPrNumberFromCommitMessage:
@pytest.fixture(autouse=True)
def setUp(self):
self.pattern = re.compile(r"#(\d*)")
def test_not_match(self):
actual = get_pr_number_from_commit_message("abcdefg", self.pattern)
expected = 0
assert actual == expected
def test_match(self):
actual = get_pr_number_from_commit_message("ab#123cd", self.pattern)
expected = 123
assert actual == expected
def test_multi_match(self):
actual = get_pr_number_from_commit_message("ab#123cd#456ef", self.pattern)
expected = 123
assert actual == expected
def test_second_line_match(self):
actual = get_pr_number_from_commit_message("abc\ncd#123ef", self.pattern)
expected = 0
assert actual == expected
def test_blank_str(self):
actual = get_pr_number_from_commit_message("", self.pattern)
expected = 0
assert actual == expected
class TestGetPrInfo:
@pytest.fixture(autouse=True)
def setUp(self):
mock_client = mock.Mock()
mock_repo = mock.Mock()
mock_pull = mock.Mock()
with mock.patch("get_pr_info.Github") as mock_github:
mock_github.return_value = mock_client
mock_client.get_repo.return_value = mock_repo
mock_repo.get_pull.return_value = mock_pull
self.mock_github = mock_github
self.mock_client = mock_client
self.mock_repo = mock_repo
self.mock_pull = mock_pull
yield
def test_nomal(self):
mock_body = PropertyMock(return_value="PR_INFO")
type(self.mock_pull).body = mock_body
actual = get_pr_summary(999, "GITHUB_TOKEN", "REPOSITORY_NAME")
expected = "PR_INFO"
assert actual == expected
self.mock_github.assert_called_once_with("GITHUB_TOKEN")
self.mock_client.get_repo.assert_called_once_with("REPOSITORY_NAME")
self.mock_repo.get_pull.assert_called_once_with(999)
assert mock_body.call_count == 1
def test_bad_credential(self):
self.mock_client.get_repo.side_effect = BadCredentialsException(
401,
data={
"message": "Bad credentials",
"documentation_url": "https://docs.github.com/rest",
},
)
with pytest.raises(BadCredentialsException):
get_pr_summary(999, "GITHUB_TOKEN", "REPOSITORY_NAME")
self.mock_github.assert_called_once_with("GITHUB_TOKEN")
self.mock_client.get_repo.assert_called_once_with("REPOSITORY_NAME")
self.mock_repo.get_pull.assert_not_called()
def test_not_exists_repository(self):
self.mock_client.get_repo.side_effect = UnknownObjectException(
404,
data={
"message": "Not Found",
"documentation_url": "https://docs.github.com/rest/reference/repos#get-a-repository",
},
)
with pytest.raises(UnknownObjectException):
get_pr_summary(999, "GITHUB_TOKEN", "REPOSITORY_NAME")
self.mock_github.assert_called_once_with("GITHUB_TOKEN")
self.mock_client.get_repo.assert_called_once_with("REPOSITORY_NAME")
self.mock_repo.get_pull.assert_not_called()
def test_not_exists_pr_number(self):
self.mock_repo.get_pull.side_effect = UnknownObjectException(
404,
data={
"message": "Not Found",
"documentation_url": "https://docs.github.com/rest/reference/pulls#get-a-pull-request",
},
)
with pytest.raises(UnknownObjectException):
get_pr_summary(999, "GITHUB_TOKEN", "REPOSITORY_NAME")
self.mock_github.assert_called_once_with("GITHUB_TOKEN")
self.mock_client.get_repo.assert_called_once_with("REPOSITORY_NAME")
self.mock_repo.get_pull.assert_called_once_with(999)
class TestGetIntegrationTestPoint:
def test_nomal(self):
summary = dedent(
"""\
## 概要
- 現状(As is)
- こうなんです
- 理想(To be)
- こうなりたい
- 問題(Problem)
- こまってる
- 解決・やったこと(Action)
- これをやった
## 結合テスト観点
- 対応概要
- こうやった
- 観点
- こういうこと1
- 条件: こうしてほしい2
- こういうこと2
- 条件: こうしてほしい2
- 担当
- API yamap55
"""
)
actual = extract_target_section(summary, "## 結合テスト観点")
expected = dedent(
"""\
## 結合テスト観点
- 対応概要
- こうやった
- 観点
- こういうこと1
- 条件: こうしてほしい2
- こういうこと2
- 条件: こうしてほしい2
- 担当
- API yamap55"""
)
assert actual == expected
def test_not_exists_target_section(self):
summary = ""
actual = extract_target_section(summary, "## 結合テスト観点")
expected = ""
assert actual == expected
def test_another_section_at_the_end(self):
summary = dedent(
"""\
## 概要
- 現状(As is)
- こうなんです
- 理想(To be)
- こうなりたい
- 問題(Problem)
- こまってる
- 解決・やったこと(Action)
- これをやった
## 結合テスト観点
- 対応概要
- こうやった
- 観点
- こういうこと1
- 条件: こうしてほしい2
- こういうこと2
- 条件: こうしてほしい2
- 担当
- API yamap55
## 対象外セクション
- 対象外です
"""
)
actual = extract_target_section(summary, "## 結合テスト観点")
expected = dedent(
"""\
## 結合テスト観点
- 対応概要
- こうやった
- 観点
- こういうこと1
- 条件: こうしてほしい2
- こういうこと2
- 条件: こうしてほしい2
- 担当
- API yamap55
"""
)
assert actual == expected
@pytest.mark.parametrize("line_separator", ["\n", "\r\n", "\r"])
def test_various_line_separator(self, line_separator):
summary = f"## HOGE{line_separator}## TARGET_ROW{line_separator}## HUGA"
actual = extract_target_section(summary, "## TARGET_ROW")
expected = "## TARGET_ROW"
assert actual == expected
| true
| true
|
f70c7bc02c5bc102e0195b41a552537b1676c819
| 9,914
|
py
|
Python
|
2.MOA-prediction/4.model_viz/scripts/nbconverted/0.blend_test_predictions.py
|
broadinstitute/lincs-profiling-comparison
|
075c3bc60eeb3934fc42c30bae6aeed8cda1cd6d
|
[
"BSD-3-Clause"
] | 1
|
2021-07-20T07:47:02.000Z
|
2021-07-20T07:47:02.000Z
|
2.MOA-prediction/4.model_viz/scripts/nbconverted/0.blend_test_predictions.py
|
broadinstitute/lincs-profiling-comparison
|
075c3bc60eeb3934fc42c30bae6aeed8cda1cd6d
|
[
"BSD-3-Clause"
] | 19
|
2020-10-24T20:55:27.000Z
|
2021-08-13T16:26:30.000Z
|
2.MOA-prediction/4.model_viz/scripts/nbconverted/0.blend_test_predictions.py
|
broadinstitute/lincs-profiling-comparison
|
075c3bc60eeb3934fc42c30bae6aeed8cda1cd6d
|
[
"BSD-3-Clause"
] | 3
|
2020-10-24T18:14:07.000Z
|
2021-06-24T17:36:25.000Z
|
#!/usr/bin/env python
# coding: utf-8
# ### - Ensemble/Blend the 4 model predictions into a single prediction
# In[1]:
import os
import datetime
from time import time
import pathlib
import pandas as pd
import numpy as np
from collections import defaultdict
from collections import Counter
# In[2]:
from sklearn.metrics import precision_recall_curve,average_precision_score
from sklearn.metrics import log_loss, roc_curve
from sklearn.metrics import auc,roc_auc_score
# In[3]:
from numba import njit
from scipy.optimize import minimize, fsolve
# In[4]:
# The two options here are "" and "_subsample"
file_indicator = ""
data_dir = pathlib.Path("../2.data_split/model_data")
# In[5]:
cp_test = pathlib.Path(f"{data_dir}/cp/test_lvl4_data{file_indicator}.csv.gz")
L1000_test = pathlib.Path(f"{data_dir}/L1/test_lvl4_data.csv.gz")
cp_L1000_test = pathlib.Path(f"{data_dir}/merged/test_lvl4_data.csv.gz")
# In[6]:
model_preds_dir = '../L1000_CP_model_predictions/'
# In[7]:
df_cp_test = pd.read_csv(cp_test, compression='gzip',low_memory = False)
df_L1000_test = pd.read_csv(L1000_test, compression='gzip',low_memory = False)
df_cp_L1000_test = pd.read_csv(cp_L1000_test, compression='gzip',low_memory = False)
# In[8]:
df_cp_L1000_test.shape
# In[9]:
##resnet
df_cp_resnet_test = pd.read_csv(os.path.join(model_preds_dir, f'cp_test_preds_resnet{file_indicator}.csv'))
df_L1000_resnet_test = pd.read_csv(os.path.join(model_preds_dir, 'L1000_test_preds_resnet.csv'))
df_cp_L1000_resnet_test = pd.read_csv(os.path.join(model_preds_dir, 'cp_L1000_test_preds_resnet.csv'))
# In[10]:
print(df_cp_L1000_resnet_test.shape)
df_cp_L1000_resnet_test.head()
# In[11]:
##1-d cnn
df_cp_cnn_test = pd.read_csv(os.path.join(model_preds_dir, f'cp_test_preds_1dcnn{file_indicator}.csv'))
df_L1000_cnn_test = pd.read_csv(os.path.join(model_preds_dir, 'L1000_test_preds_1dcnn.csv'))
df_cp_L1000_cnn_test = pd.read_csv(os.path.join(model_preds_dir, 'cp_L1000_test_preds_1dcnn.csv'))
# In[12]:
print(df_cp_L1000_cnn_test.shape)
df_cp_L1000_cnn_test.head()
# In[13]:
##tabnet
df_cp_tabnet_test = pd.read_csv(os.path.join(model_preds_dir, f'cp_test_preds_tabnet{file_indicator}.csv'))
df_L1000_tabnet_test = pd.read_csv(os.path.join(model_preds_dir, 'L1000_test_preds_tabnet.csv'))
df_cp_L1000_tabnet_test = pd.read_csv(os.path.join(model_preds_dir, 'cp_L1000_test_preds_tabnet.csv'))
# In[14]:
df_cp_L1000_tabnet_test.shape
# In[15]:
##stagedNN
df_cp_simplenn_test = pd.read_csv(os.path.join(model_preds_dir, f'cp_test_preds_simplenn{file_indicator}.csv'))
df_L1000_simplenn_test = pd.read_csv(os.path.join(model_preds_dir, 'L1000_test_preds_simplenn.csv'))
df_cp_L1000_simplenn_test = pd.read_csv(os.path.join(model_preds_dir, 'cp_L1000_test_preds_simplenn.csv'))
# In[16]:
df_cp_L1000_simplenn_test.shape
# In[17]:
df_cp_tst_targets = df_cp_test[df_cp_cnn_test.columns]
df_L1000_tst_targets = df_L1000_test[df_L1000_cnn_test.columns]
df_cp_L1000_tst_targets = df_cp_L1000_test[df_cp_L1000_cnn_test.columns]
# In[18]:
df_cp_tst_targets.shape
# In[19]:
df_L1000_tst_targets.shape
# In[20]:
df_cp_L1000_tst_targets.shape
# #### - Resnet, 1d-cnn, Tabnet, Simplenn --> 4 model predictions
# In[21]:
# CPMP's logloss from https://www.kaggle.com/c/lish-moa/discussion/183010
def log_loss_numpy(y_true, y_pred):
y_true_ravel = np.asarray(y_true).ravel()
y_pred = np.asarray(y_pred).ravel()
y_pred = np.clip(y_pred, 1e-15, 1 - 1e-15)
loss = np.where(y_true_ravel == 1, - np.log(y_pred), - np.log(1 - y_pred))
return loss.mean()
def func_numpy_metric(weights, oof, y_true):
oof_blend = np.tensordot(weights, oof, axes = ((0), (0)))
return log_loss_numpy(y_true, oof_blend)
def grad_func(weights, oof, y_true):
oof_clip = np.clip(oof, 1e-15, 1 - 1e-15)
gradients = np.zeros(oof.shape[0])
for i in range(oof.shape[0]):
a, b, c = y_true, oof_clip[i], np.zeros((oof.shape[1], oof.shape[2]))
for j in range(oof.shape[0]):
if j != i:
c += weights[j] * oof_clip[j]
gradients[i] = -np.mean((-a*b+(b**2)*weights[i]+b*c)/((b**2)*(weights[i]**2)+2*b*c*weights[i]-b*weights[i]+(c**2)-c))
return gradients
@njit
def grad_func_jit(weights, oof, y_true):
oof_clip = np.minimum(1 - 1e-15, np.maximum(oof, 1e-15))
gradients = np.zeros(oof.shape[0])
for i in range(oof.shape[0]):
a, b, c = y_true, oof_clip[i], np.zeros((oof.shape[1], oof.shape[2]))
for j in range(oof.shape[0]):
if j != i:
c += weights[j] * oof_clip[j]
gradients[i] = -np.mean((-a*b+(b**2)*weights[i]+b*c)/((b**2)*(weights[i]**2)+2*b*c*weights[i]-b*weights[i]+(c**2)-c))
return gradients
# In[22]:
cp_model_preds = [df_cp_cnn_test, df_cp_resnet_test, df_cp_tabnet_test, df_cp_simplenn_test]
L1000_model_preds = [df_L1000_cnn_test, df_L1000_resnet_test, df_L1000_tabnet_test, df_L1000_simplenn_test]
cp_L1000_model_preds = [df_cp_L1000_cnn_test, df_cp_L1000_resnet_test, df_cp_L1000_tabnet_test, df_cp_L1000_simplenn_test]
# In[23]:
models_name = ['1d-Cnn', 'Resnet', 'Tabnet', 'SimpleNN']
def get_optmized_blended_weights(model_oofs, df_targets, num_of_models = 4, models_name = models_name):
"""
This function assign weights to each of the models used in predicting MOAs based on the log-loss obtained
when comparing each model prediction results with the actual MOA (Mechanism of actions) test labels.
for more info:https://www.kaggle.com/gogo827jz/optimise-blending-weights-with-bonus-0/notebook
"""
model_oof_preds = np.zeros((num_of_models, df_targets.shape[0], df_targets.shape[1]))
for idx in range(num_of_models):
model_oof_preds[idx] = model_oofs[idx].values
score_oof = log_loss_numpy(df_targets, model_oof_preds[idx])
print(f'{idx} {models_name[idx]}, Test loss:\t', score_oof)
tol = 1e-10
init_guess = [1 / model_oof_preds.shape[0]] * model_oof_preds.shape[0]
bnds = [(0, 1) for _ in range(model_oof_preds.shape[0])]
cons = {
'type': 'eq',
'fun': lambda x: np.sum(x) - 1,
'jac': lambda x: [1] * len(x)
}
print('Inital Blend OOF:', func_numpy_metric(init_guess, model_oof_preds, df_targets.values))
start_time = time()
res_scipy = minimize(fun = func_numpy_metric, x0 = init_guess,
args=(model_oof_preds, df_targets.values),
method = 'SLSQP', ##L-BFGS-B ##SLSQP
jac = grad_func_jit, # grad_func
bounds = bnds, constraints = cons, tol = tol)
print(f'[{str(datetime.timedelta(seconds = time() - start_time))[2:7]}] Optimised Blend OOF:', res_scipy.fun)
print('Optimised Weights:', res_scipy.x)
return model_oof_preds, res_scipy.x
# In[24]:
_, L1000_model_weights = get_optmized_blended_weights(L1000_model_preds, df_L1000_tst_targets,)
# In[25]:
_, cp_model_weights = get_optmized_blended_weights(cp_model_preds, df_cp_tst_targets,)
# In[26]:
_, cp_L1000_model_weights = get_optmized_blended_weights(cp_L1000_model_preds, df_cp_L1000_tst_targets)
# In[27]:
def model_eval_results(df_tst, df_tst_y, df_preds):
"""
This function prints out the model evaluation results from the train and test predictions.
The evaluation metrics used in assessing the performance of the models are: ROC AUC score,
log loss and Precision-Recall AUC score
"""
eval_metrics = ['log loss', 'ROC AUC score', 'PR-AUC/Average_precision_score',]
moa_class_list = df_tst['moa'].unique()
val_moas = [moa for moa_list in moa_class_list for moa in moa_list.split('|')]
print('-' * 10, 'Test data prediction results', '-' * 10)
print(f'{eval_metrics[0]}:', log_loss(np.ravel(df_tst_y), np.ravel(df_preds)))
print(f'{eval_metrics[1]}:', roc_auc_score(df_tst_y[val_moas],df_preds[val_moas], average='macro'))
print(f'{eval_metrics[2]}:', average_precision_score(df_tst_y[val_moas], df_preds[val_moas], average="micro"))
# In[28]:
##[1.57502187e-01,1.15142271e-16,0.00000000e+00,8.42497813e-01] <-- modify the model weights
df_L1000_blend = pd.DataFrame(np.zeros(df_L1000_cnn_test.shape), columns = df_L1000_cnn_test.columns)
df_L1000_blend = df_L1000_cnn_test*0.45 + df_L1000_resnet_test*0.05 + df_L1000_tabnet_test*0.05 + df_L1000_simplenn_test*0.45
# In[29]:
0.45+(0.05*2)+0.45
# In[30]:
model_eval_results(df_L1000_test, df_L1000_tst_targets, df_L1000_blend)
# In[31]:
##[4.29598527e-01 3.27312317e-01 2.43089156e-01 5.42101086e-18] <-- modify the model weights
df_cp_blend = pd.DataFrame(np.zeros(df_cp_cnn_test.shape), columns = df_cp_cnn_test.columns)
df_cp_blend = df_cp_cnn_test*0.35 + df_cp_resnet_test*0.35 + df_cp_tabnet_test*0.25 + df_cp_simplenn_test*0.05
# In[32]:
0.35+0.35+0.25+0.05
# In[33]:
model_eval_results(df_cp_test, df_cp_tst_targets, df_cp_blend)
# In[34]:
##[0.28574384 0.09796798 0.06528908 0.5509991 ] <-- modify the model weights
df_cp_L1000_blend = pd.DataFrame(np.zeros(df_cp_L1000_cnn_test.shape), columns = df_cp_L1000_cnn_test.columns)
df_cp_L1000_blend = df_cp_L1000_cnn_test*0.30 + df_cp_L1000_resnet_test*0.20 + df_cp_L1000_tabnet_test*0.15 + df_cp_L1000_simplenn_test*0.35
# In[35]:
0.30+0.20+0.15+0.35
# In[36]:
model_eval_results(df_cp_L1000_test, df_cp_L1000_tst_targets, df_cp_L1000_blend)
# In[37]:
def save_to_csv(df, path, file_name, compress=None):
"""save dataframes to csv"""
if not os.path.exists(path):
os.mkdir(path)
df.to_csv(os.path.join(path, file_name), index=False, compression=compress)
# In[38]:
save_to_csv(df_cp_blend, model_preds_dir, f'cp_test_preds_blend{file_indicator}.csv')
save_to_csv(df_L1000_blend, model_preds_dir, 'L1000_test_preds_blend.csv')
save_to_csv(df_cp_L1000_blend, model_preds_dir, 'cp_L1000_test_preds_blend.csv')
| 27.38674
| 140
| 0.719387
|
ort precision_recall_curve,average_precision_score
from sklearn.metrics import log_loss, roc_curve
from sklearn.metrics import auc,roc_auc_score
from numba import njit
from scipy.optimize import minimize, fsolve
file_indicator = ""
data_dir = pathlib.Path("../2.data_split/model_data")
cp_test = pathlib.Path(f"{data_dir}/cp/test_lvl4_data{file_indicator}.csv.gz")
L1000_test = pathlib.Path(f"{data_dir}/L1/test_lvl4_data.csv.gz")
cp_L1000_test = pathlib.Path(f"{data_dir}/merged/test_lvl4_data.csv.gz")
model_preds_dir = '../L1000_CP_model_predictions/'
df_cp_test = pd.read_csv(cp_test, compression='gzip',low_memory = False)
df_L1000_test = pd.read_csv(L1000_test, compression='gzip',low_memory = False)
df_cp_L1000_test = pd.read_csv(cp_L1000_test, compression='gzip',low_memory = False)
df_cp_L1000_test.shape
resnet_test = pd.read_csv(os.path.join(model_preds_dir, f'cp_test_preds_resnet{file_indicator}.csv'))
df_L1000_resnet_test = pd.read_csv(os.path.join(model_preds_dir, 'L1000_test_preds_resnet.csv'))
df_cp_L1000_resnet_test = pd.read_csv(os.path.join(model_preds_dir, 'cp_L1000_test_preds_resnet.csv'))
print(df_cp_L1000_resnet_test.shape)
df_cp_L1000_resnet_test.head()
nn_test = pd.read_csv(os.path.join(model_preds_dir, f'cp_test_preds_1dcnn{file_indicator}.csv'))
df_L1000_cnn_test = pd.read_csv(os.path.join(model_preds_dir, 'L1000_test_preds_1dcnn.csv'))
df_cp_L1000_cnn_test = pd.read_csv(os.path.join(model_preds_dir, 'cp_L1000_test_preds_1dcnn.csv'))
print(df_cp_L1000_cnn_test.shape)
df_cp_L1000_cnn_test.head()
tabnet_test = pd.read_csv(os.path.join(model_preds_dir, f'cp_test_preds_tabnet{file_indicator}.csv'))
df_L1000_tabnet_test = pd.read_csv(os.path.join(model_preds_dir, 'L1000_test_preds_tabnet.csv'))
df_cp_L1000_tabnet_test = pd.read_csv(os.path.join(model_preds_dir, 'cp_L1000_test_preds_tabnet.csv'))
df_cp_L1000_tabnet_test.shape
mplenn_test = pd.read_csv(os.path.join(model_preds_dir, f'cp_test_preds_simplenn{file_indicator}.csv'))
df_L1000_simplenn_test = pd.read_csv(os.path.join(model_preds_dir, 'L1000_test_preds_simplenn.csv'))
df_cp_L1000_simplenn_test = pd.read_csv(os.path.join(model_preds_dir, 'cp_L1000_test_preds_simplenn.csv'))
df_cp_L1000_simplenn_test.shape
df_cp_tst_targets = df_cp_test[df_cp_cnn_test.columns]
df_L1000_tst_targets = df_L1000_test[df_L1000_cnn_test.columns]
df_cp_L1000_tst_targets = df_cp_L1000_test[df_cp_L1000_cnn_test.columns]
df_cp_tst_targets.shape
df_L1000_tst_targets.shape
df_cp_L1000_tst_targets.shape
_pred))
return loss.mean()
def func_numpy_metric(weights, oof, y_true):
oof_blend = np.tensordot(weights, oof, axes = ((0), (0)))
return log_loss_numpy(y_true, oof_blend)
def grad_func(weights, oof, y_true):
oof_clip = np.clip(oof, 1e-15, 1 - 1e-15)
gradients = np.zeros(oof.shape[0])
for i in range(oof.shape[0]):
a, b, c = y_true, oof_clip[i], np.zeros((oof.shape[1], oof.shape[2]))
for j in range(oof.shape[0]):
if j != i:
c += weights[j] * oof_clip[j]
gradients[i] = -np.mean((-a*b+(b**2)*weights[i]+b*c)/((b**2)*(weights[i]**2)+2*b*c*weights[i]-b*weights[i]+(c**2)-c))
return gradients
@njit
def grad_func_jit(weights, oof, y_true):
oof_clip = np.minimum(1 - 1e-15, np.maximum(oof, 1e-15))
gradients = np.zeros(oof.shape[0])
for i in range(oof.shape[0]):
a, b, c = y_true, oof_clip[i], np.zeros((oof.shape[1], oof.shape[2]))
for j in range(oof.shape[0]):
if j != i:
c += weights[j] * oof_clip[j]
gradients[i] = -np.mean((-a*b+(b**2)*weights[i]+b*c)/((b**2)*(weights[i]**2)+2*b*c*weights[i]-b*weights[i]+(c**2)-c))
return gradients
# In[22]:
cp_model_preds = [df_cp_cnn_test, df_cp_resnet_test, df_cp_tabnet_test, df_cp_simplenn_test]
L1000_model_preds = [df_L1000_cnn_test, df_L1000_resnet_test, df_L1000_tabnet_test, df_L1000_simplenn_test]
cp_L1000_model_preds = [df_cp_L1000_cnn_test, df_cp_L1000_resnet_test, df_cp_L1000_tabnet_test, df_cp_L1000_simplenn_test]
# In[23]:
models_name = ['1d-Cnn', 'Resnet', 'Tabnet', 'SimpleNN']
def get_optmized_blended_weights(model_oofs, df_targets, num_of_models = 4, models_name = models_name):
model_oof_preds = np.zeros((num_of_models, df_targets.shape[0], df_targets.shape[1]))
for idx in range(num_of_models):
model_oof_preds[idx] = model_oofs[idx].values
score_oof = log_loss_numpy(df_targets, model_oof_preds[idx])
print(f'{idx} {models_name[idx]}, Test loss:\t', score_oof)
tol = 1e-10
init_guess = [1 / model_oof_preds.shape[0]] * model_oof_preds.shape[0]
bnds = [(0, 1) for _ in range(model_oof_preds.shape[0])]
cons = {
'type': 'eq',
'fun': lambda x: np.sum(x) - 1,
'jac': lambda x: [1] * len(x)
}
print('Inital Blend OOF:', func_numpy_metric(init_guess, model_oof_preds, df_targets.values))
start_time = time()
res_scipy = minimize(fun = func_numpy_metric, x0 = init_guess,
args=(model_oof_preds, df_targets.values),
method = 'SLSQP', ##L-BFGS-B ##SLSQP
jac = grad_func_jit, # grad_func
bounds = bnds, constraints = cons, tol = tol)
print(f'[{str(datetime.timedelta(seconds = time() - start_time))[2:7]}] Optimised Blend OOF:', res_scipy.fun)
print('Optimised Weights:', res_scipy.x)
return model_oof_preds, res_scipy.x
# In[24]:
_, L1000_model_weights = get_optmized_blended_weights(L1000_model_preds, df_L1000_tst_targets,)
# In[25]:
_, cp_model_weights = get_optmized_blended_weights(cp_model_preds, df_cp_tst_targets,)
# In[26]:
_, cp_L1000_model_weights = get_optmized_blended_weights(cp_L1000_model_preds, df_cp_L1000_tst_targets)
# In[27]:
def model_eval_results(df_tst, df_tst_y, df_preds):
eval_metrics = ['log loss', 'ROC AUC score', 'PR-AUC/Average_precision_score',]
moa_class_list = df_tst['moa'].unique()
val_moas = [moa for moa_list in moa_class_list for moa in moa_list.split('|')]
print('-' * 10, 'Test data prediction results', '-' * 10)
print(f'{eval_metrics[0]}:', log_loss(np.ravel(df_tst_y), np.ravel(df_preds)))
print(f'{eval_metrics[1]}:', roc_auc_score(df_tst_y[val_moas],df_preds[val_moas], average='macro'))
print(f'{eval_metrics[2]}:', average_precision_score(df_tst_y[val_moas], df_preds[val_moas], average="micro"))
# In[28]:
##[1.57502187e-01,1.15142271e-16,0.00000000e+00,8.42497813e-01] <-- modify the model weights
df_L1000_blend = pd.DataFrame(np.zeros(df_L1000_cnn_test.shape), columns = df_L1000_cnn_test.columns)
df_L1000_blend = df_L1000_cnn_test*0.45 + df_L1000_resnet_test*0.05 + df_L1000_tabnet_test*0.05 + df_L1000_simplenn_test*0.45
# In[29]:
0.45+(0.05*2)+0.45
# In[30]:
model_eval_results(df_L1000_test, df_L1000_tst_targets, df_L1000_blend)
# In[31]:
##[4.29598527e-01 3.27312317e-01 2.43089156e-01 5.42101086e-18] <-- modify the model weights
df_cp_blend = pd.DataFrame(np.zeros(df_cp_cnn_test.shape), columns = df_cp_cnn_test.columns)
df_cp_blend = df_cp_cnn_test*0.35 + df_cp_resnet_test*0.35 + df_cp_tabnet_test*0.25 + df_cp_simplenn_test*0.05
# In[32]:
0.35+0.35+0.25+0.05
# In[33]:
model_eval_results(df_cp_test, df_cp_tst_targets, df_cp_blend)
# In[34]:
##[0.28574384 0.09796798 0.06528908 0.5509991 ] <-- modify the model weights
df_cp_L1000_blend = pd.DataFrame(np.zeros(df_cp_L1000_cnn_test.shape), columns = df_cp_L1000_cnn_test.columns)
df_cp_L1000_blend = df_cp_L1000_cnn_test*0.30 + df_cp_L1000_resnet_test*0.20 + df_cp_L1000_tabnet_test*0.15 + df_cp_L1000_simplenn_test*0.35
# In[35]:
0.30+0.20+0.15+0.35
# In[36]:
model_eval_results(df_cp_L1000_test, df_cp_L1000_tst_targets, df_cp_L1000_blend)
# In[37]:
def save_to_csv(df, path, file_name, compress=None):
if not os.path.exists(path):
os.mkdir(path)
df.to_csv(os.path.join(path, file_name), index=False, compression=compress)
# In[38]:
save_to_csv(df_cp_blend, model_preds_dir, f'cp_test_preds_blend{file_indicator}.csv')
save_to_csv(df_L1000_blend, model_preds_dir, 'L1000_test_preds_blend.csv')
save_to_csv(df_cp_L1000_blend, model_preds_dir, 'cp_L1000_test_preds_blend.csv')
| true
| true
|
f70c7bc286d3778bb921c66dc8890c7d4ea43042
| 178
|
py
|
Python
|
pyltr/__init__.py
|
Haiga/pyltr
|
65e704e540fa104ccdf8143e61911ef57b70cf84
|
[
"BSD-3-Clause"
] | 432
|
2015-09-17T19:36:33.000Z
|
2022-03-26T17:06:48.000Z
|
pyltr/__init__.py
|
Haiga/pyltr
|
65e704e540fa104ccdf8143e61911ef57b70cf84
|
[
"BSD-3-Clause"
] | 24
|
2016-08-04T03:23:07.000Z
|
2021-10-16T13:17:06.000Z
|
pyltr/__init__.py
|
Haiga/pyltr
|
65e704e540fa104ccdf8143e61911ef57b70cf84
|
[
"BSD-3-Clause"
] | 112
|
2016-01-16T01:49:35.000Z
|
2022-03-21T06:01:40.000Z
|
"""
Base module for pyltr.
We import all packages since it's not particularly expensive.
"""
from . import data
from . import metrics
from . import models
from . import util
| 13.692308
| 61
| 0.730337
|
from . import data
from . import metrics
from . import models
from . import util
| true
| true
|
f70c7bef02e009abc9db3e52e9872978d79a7cc5
| 30,886
|
py
|
Python
|
modules/s3db/evr.py
|
mswdresden/AsylumEden
|
a68ee08f9f7031974ec12ec327d00c5d975a740a
|
[
"MIT"
] | 1
|
2017-07-22T18:49:34.000Z
|
2017-07-22T18:49:34.000Z
|
modules/s3db/evr.py
|
vpccalderara/sahana
|
6eb3f9798879dfa51bbe5d2b84829b1402671499
|
[
"MIT"
] | null | null | null |
modules/s3db/evr.py
|
vpccalderara/sahana
|
6eb3f9798879dfa51bbe5d2b84829b1402671499
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
""" Sahana Eden Evacuees Registry Model
@copyright: 2015-2017 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ("S3EVRCaseModel",
"evr_rheader",
"evr_AddGroupMembers",
)
from gluon import *
from ..s3 import *
# =============================================================================
class S3EVRCaseModel(S3Model):
names = ("evr_case",
"evr_medical_details",
)
def model(self):
T = current.T
settings = current.deployment_settings
define_table = self.define_table
person_id = self.pr_person_id
# ---------------------------------------------------------------------
# Case Data
#
enable_evr_organisation = settings.get_evr_link_to_organisation()
organisation_label = settings.get_hrm_organisation_label()
org_organisation_represent = self.org_OrganisationRepresent()
org_widget = S3HierarchyWidget(lookup="org_organisation",
represent=org_organisation_represent,
multiple=False,
leafonly=False,)
tablename = "evr_case"
define_table(tablename,
person_id(ondelete = "CASCADE"),
self.org_organisation_id(
empty = not settings.get_hrm_org_required(),
label = organisation_label,
requires = self.org_organisation_requires(required=True),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Designed Organisation"),
T("Organisation designed to take care of evacuee"))),
widget = org_widget,
readable = enable_evr_organisation,
writable = enable_evr_organisation,
),
Field("fiscal_code", "string", length=16,
label = T("Fiscal Code"),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Fiscal Code"),
T("Insert the fiscal code with no spaces")
)
),
),
s3_comments(),
*s3_meta_fields())
# If fiscal code is present, it's unique
# fiscal_code = db.evr_case.fiscal_code
# fiscal_code.requires = IS_EMPTY_OR(
# IS_NOT_IN_DB(db(db.evr_case.deleted != True),
# fiscal_code),
# null=''
# )
self.configure(tablename,
onaccept = self.evr_case_onaccept,
)
# ---------------------------------------------------------------------
# Medical Details
#
# @todo: use string-codes for option fields for better
# maintainability/interoperability
#
evr_therapy_opts = {1: T("Vital Long-Term Medication"),
2: T("Dialysis"),
3: T("Chronic Oxygen Supply"),
4: T("Intermittend Ventilator Support"),
5: T("Ventilator Dependend"),
6: T("Cardiac Assist Device"),
}
evr_allergy_opts = {1: T("Drug"),
2: T("Food"),
3: T("Olive Tree"),
4: T("Grass"),
5: T("Dust"),
6: T("Other"),
}
evr_disability_opts = {1: T("Visually Impaired"),
2: T("Blind"),
3: T("Hearing-Impaired"),
4: T("Deaf"),
5: T("Deaf-Mute"),
6: T("Deaf-Blind"),
7: T("Aphasic"),
8: T("Mobility-Impaired"),
9: T("Paralysed"),
10: T("Amputated"),
11: T("Other Physical Disability"),
12: T("Mentally Disabled"),
}
evr_aids_appliances_opts = {1: ("Guide Dog"),
2: ("Wheelchair"),
3: ("Walking stick"),
4: ("Crutch"),
5: ("Tripod"),
6: ("Artificial limb"),
7: ("Catheter"),
8: ("Sanity Napkin"),
}
def med_multiopt_field(fieldname, options, label=None):
""" Simple generator for option fields """
return Field(fieldname, "list:integer",
label = label,
represent = S3Represent(options = options,
multiple = True),
requires = IS_IN_SET(options, multiple = True),
widget = S3MultiSelectWidget(filter = False,
selectedList = 3,
noneSelectedText = "Select",
)
)
evr_source_opts = {1: "Self",
2: "Mother",
3: "Father",
4: "Uncle",
5: "Grandfather",
6: "Grandmother",
7: "Official",
8: "Attendant",
9: "Neighbour",
10: "Teacher",
11: "Priest",
12: "Other",
}
tablename = "evr_medical_details"
define_table(tablename,
person_id(),
med_multiopt_field("therapy",
evr_therapy_opts,
label = T("Therapy"),
),
Field("therapy_comment"),
Field("pregnancy", "boolean",
label = T("Pregnancy"),
),
med_multiopt_field("allergy",
evr_allergy_opts,
label = T("Allergies"),
),
Field("diet",
label = T("Food intolerance"),
),
med_multiopt_field("disability",
evr_disability_opts,
label = T("Disabilities"),
),
Field("self_sufficient", "boolean",
label = T("Self-Sufficient"),
),
med_multiopt_field("aids_appliances",
evr_aids_appliances_opts,
label = T("Aids and Appliances"),
),
Field("declared_by_name",
label = T("Declared by (Name)"),
),
Field("declared_by_relationship", "integer",
label = T("Declared by (Relationship)"),
represent=S3Represent(options=evr_source_opts),
requires = IS_IN_SET(evr_source_opts,
zero=None),
),
Field("declared_by_phone",
label = T("Declared by (Phone)"),
requires = IS_NULL_OR(IS_PHONE_NUMBER()),
),
Field("declared_by_email",
label = T("Declared by (Email)"),
requires = IS_NULL_OR(IS_EMAIL()),
),
Field("has_attendant", "boolean",
label = T("Has Attendant"),
),
Field("attendant_name",
label = T("Attendant (Name)"),
),
Field("attendant_phone",
label = T("Attendant (Phone)"),
requires = IS_NULL_OR(IS_PHONE_NUMBER()),
),
Field("attendant_email",
label = T("Attendant (Email)"),
requires = IS_NULL_OR(IS_EMAIL()),
),
s3_comments(),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Socio-economic Background
#
tablename = "evr_background"
define_table(tablename,
person_id(),
Field("legal_measure",
label = T("Legal measure / Home warrant"),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Legal measure / Home warrant"),
T("Evacuee subject to special or legal measures/penalities")
)
),
),
Field("diet_restrictions",
label = T("Food Restrictions")
),
Field("social_welfare",
label = T("Social Welfare"),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Social Welfare"),
T("Evacuee subject to Social Welfare")
)
),
),
Field("interpreter",
label = T("Interpreter / Cultural Mediator Required"),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Interpreter / Cultural Mediator"),
T("Specific language interpreter and/or cultural mediator required")
)
),
),
Field("home_help", "boolean",
label = T("Home Help"),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Home Help"),
T("Evacuee requiring dedicated assistance at home")
)
),
),
Field("distance_from_shelter", "integer",
label = T("Working Distance from Shelter (km)")
),
Field("job_lost_by_event", "boolean",
label = T("Job lost by event")
),
Field("domestic_animal", "boolean",
label = T("With Domestic Animals")
),
Field("car_available", "boolean",
label = T("Car available")
),
s3_comments(),
*s3_meta_fields())
# -------------------------------------------------------------------------
@staticmethod
def evr_case_onaccept(form):
"""
After DB I/O, check the correctness of fiscal code (ITALY)
@ToDo: The function should be made a deployment_setting when anyone else wishes to use this module
"""
# Initialization
fiscal_code = form.vars.fiscal_code
if fiscal_code == "" or fiscal_code == None:
return
fiscal_code = fiscal_code.upper()
MALE = 3
CONSONANTS = "BCDFGHJKLMNPQRSTVWXYZ"
VOWELS = "AEIOU"
MONTHS = "ABCDEHLMPRST"
T = current.T
ptable = current.s3db.pr_person
query = (form.vars.person_id == ptable.id)
row = current.db(query).select(ptable.first_name,
ptable.last_name,
ptable.date_of_birth,
ptable.gender,
limitby = (0, 1)
).first()
name = row.first_name.upper()
surname = row.last_name.upper()
date_of_birth = row.date_of_birth
year = date_of_birth.year
month = date_of_birth.month
day = date_of_birth.day
gender = row.gender
# Check surname
cons = ""
for c in surname:
if c in CONSONANTS:
cons += c
vow = ""
for c in surname:
if c in VOWELS:
vow += c
chars = cons + vow
if len(chars) < 3:
chars += ["X", "X"]
if fiscal_code[:3] != chars[0:3].upper():
current.response.warning = T("Warning: fiscal code isn't \
consistent with personal data")
return
# Check name
cons = ""
for c in name:
if c in CONSONANTS:
cons += c
if len(cons) > 3:
chars = cons[0] + cons[2] + cons[3]
else:
vow = ""
for c in name:
if c in VOWELS:
vow += c
chars = cons + vow
if len(chars) < 3:
chars += ["X", "X"]
if fiscal_code[3:6] != chars[0:3].upper():
current.response.warning = T("Warning: fiscal code isn't \
consistent with personal data")
return
# Check date of birth and gender
year = str(year)[2:4] # Convert to string and take only the last two elements
if fiscal_code[6:8] != year or \
fiscal_code[8] != MONTHS[month - 1]:
current.response.warning = T("Warning: fiscal code isn't \
consistent with personal data")
return
if gender == MALE:
birthday_in_cf = fiscal_code[9:11]
if not birthday_in_cf.isdigit():
current.response.warning = T("Warning: fiscal code isn't \
consistent with personal data")
return
else:
birthday_in_cf = int(birthday_in_cf)
if birthday_in_cf != day:
current.response.warning = T("Warning: fiscal code isn't \
consistent with personal data")
return
else: # if gender == FEMALE
if fiscal_code[9:11] != str(day + 40):
current.response.warning = T("Warning: fiscal code isn't \
consistent with personal data")
return
return
# =============================================================================
def evr_rheader(r):
"""
EVR Resource Headers
@param r: the S3Request
"""
T = current.T
settings = current.deployment_settings
if r.representation != "html" or not r.record:
return None
resourcename = r.name
rheader_fields = None
if resourcename == "person":
tabs = [(T("Person"), None),
(T("Addresses"), "address"),
(T("Contact Data"), "contacts"),
(T("Groups"), "group_membership"),
# these can be hidden since inline in the main form,
# but can enabled to verify the functionality:
#(T("Identity Documents"), "identity"),
#(T("Case Details"), "case"),
(T("Images"), "image"),
(T("Medical Information"), "medical_details"),
(T("Socio-Economic Background"), "background"),
]
if settings.get_evr_show_physical_description():
tabs.append((T("Physical Description"), "physical_description"))
if settings.has_module("cr"):
tabs.append((T("Shelter Registration"), "shelter_registration"))
rheader_fields = [["first_name", "last_name"],
["date_of_birth"],
]
# Show profile picture in rheader
itable = current.s3db.pr_image
query = (itable.pe_id == r.record.pe_id) & \
(itable.profile == True)
image = current.db(query).select(itable.image,
limitby=(0, 1)).first()
if image:
image = itable.image.represent(image.image)
else:
image = A(IMG(_src=URL(c="static", f="img", args="blank-user.gif"),
_height=60,
_title=T("No image available")),
_class="th",
_href=URL(f="person", args=[r.id, "image", "create"]),
)
return DIV(DIV(image, _style="float:left"),
S3ResourceHeader(rheader_fields, tabs)(r))
elif resourcename == "group":
tabs = [("Group Details", None),
(T("Contact Data"), "contact"),
(T("Members"), "group_membership"),
]
# Show "Add Members" tab only when we action it explicitly
# (=> from action-button in the group members list)
if r.method == "add_members":
tabs.append((T("Add Members"), "add_members"))
rheader_fields = [["name"],
["description"],
]
return S3ResourceHeader(rheader_fields, tabs)(r)
return None
# =============================================================================
class evr_AddGroupMembers(S3Method):
"""
Custom method to select multiple persons from a filtered list
and add them to a group
"""
# -------------------------------------------------------------------------
def apply_method(self, r, **attr):
"""
Entry point for REST controller
@param r: the S3Request
@param attr: dictionary of parameters for the method handler
@return: output object to send to the view
"""
# Add button "Add Members" to members tab
if r.http in ("GET", "POST"):
if r.representation == "html" and r.id or \
r.representation == "aadata":
return self.add_members(r, **attr)
else:
r.error(415, current.ERROR.BAD_FORMAT)
else:
r.error(405, current.ERROR.BAD_METHOD)
# -------------------------------------------------------------------------
def add_members(self, r, **attr):
"""
Add-members action: renders a filtered multi-select datatable
form, and creates group_memberships on POST
@param r: the S3Request
@param attr: dictionary of parameters for the method handler
@return: output object to send to the view
"""
T = current.T
db = current.db
s3db = current.s3db
unaffiliated = ((S3FieldSelector("group_membership.id") == None) & \
(S3FieldSelector("case.id") != None))
if r.http == "POST":
# Form submission
group_id = r.id
added = 0
post_vars = r.post_vars
if all([name in post_vars
for name in ("add", "selected", "mode")]):
# Get selection
selected = post_vars.selected
if selected:
selected = selected.split(",")
else:
selected = []
# Handle exclusion filter
if post_vars.mode == "Exclusive":
if "filterURL" in post_vars:
filters = S3URLQuery.parse_url(post_vars.filterURL)
else:
filters = None
query = unaffiliated & \
(~(S3FieldSelector("id").belongs(selected)))
resource = s3db.resource("pr_person",
filter=query,
vars=filters)
rows = resource.select(["id"], as_rows=True)
selected = [str(row.id) for row in rows]
# Avoid duplicates
gtable = s3db.pr_group_membership
query = (gtable.group_id == group_id) & \
(gtable.person_id.belongs(selected)) & \
(gtable.deleted != True)
rows = db(query).select(gtable.person_id)
skip = set(row.person_id for row in rows)
# Add new group members
for record_id in selected:
try:
person_id = int(record_id.strip())
except ValueError:
continue
if person_id in skip:
continue
gtable.insert(group_id = group_id,
person_id = person_id,
)
added += 1
# Confirmation message (in session because we redirect)
session = current.session
if not selected:
session.warning = T("No Persons Selected!")
else:
session.confirmation = T("%(number)s Members added to Group") % \
dict(number=added)
# Go back to list of existing group members
redirect(r.url(method = "",
id = group_id,
component = "group_membership"))
else:
resource = s3db.resource("pr_person", vars=r.get_vars)
resource.add_filter(unaffiliated)
get_config = resource.get_config
# Filter widgets
filter_widgets = get_config("filter_widgets", [])
filter_widgets.append(S3DateFilter("created_on",
label = T("Registered on"),
)
)
# List fields
list_fields = ["id",
"first_name",
"last_name",
"gender",
"date_of_birth",
]
response = current.response
# Data table boundaries
get_vars = self.request.get_vars
if "displayStart" in get_vars:
start = int(get_vars["displayStart"])
else:
start = None
if "pageLength" in get_vars:
display_length = int(get_vars["pageLength"])
else:
display_length = response.s3.ROWSPERPAGE
limit = 4 * display_length
# Apply datatable filter and sorting
totalrows = resource.count()
filter, orderby, left = resource.datatable_filter(list_fields, get_vars)
if not orderby:
# Most recently created records on top
orderby = "pr_person.created_on desc"
resource.add_filter(filter)
# Retrieve the data
data = resource.select(list_fields,
start=start,
limit=limit,
orderby=orderby,
left=left,
count=True,
represent=True)
filteredrows = data["numrows"]
# Generate the datatable
dt = S3DataTable(data["rfields"], data["rows"])
dt_id = "datatable"
# Bulk Action
dt_bulk_actions = [(T("Add as Group Members"), "add")]
if r.representation == "html":
# Page load
# Custom open-button, no delete-option
resource.configure(deletable = False)
open_url = URL(f = "person", args = ["[id]"])
S3CRUD.action_buttons(r,
deletable = False,
read_url = open_url,
update_url = open_url)
# Need no export formats (as this is a form)
response.s3.no_formats = True
# Data table (items)
items = dt.html(totalrows,
filteredrows,
dt_id,
dt_ajax_url=URL(c="evr",
f="group",
args=["add_members"],
vars={},
extension="aadata",
),
dt_bulk_actions=dt_bulk_actions,
dt_pageLength=display_length,
dt_pagination="true",
dt_searching="false",
)
resource.configure(deletable = False)
# Filter form
if filter_widgets:
# Where to retrieve filtered data from:
_vars = resource.crud._remove_filters(r.get_vars)
filter_submit_url = r.url(vars=_vars)
# Where to retrieve updated filter options from:
filter_ajax_url = URL(f="person",
args=["filter.options"],
vars={},
)
# Define filter form
filter_clear = get_config("filter_clear", True)
filter_submit = get_config("filter_submit", True)
filter_form = S3FilterForm(filter_widgets,
clear=filter_clear,
submit=filter_submit,
ajax=True,
url=filter_submit_url,
ajaxurl=filter_ajax_url,
_class="filter-form",
_id="datatable-filter-form",
)
# Render filter form
fresource = s3db.resource(resource.tablename)
alias = resource.alias if r.component else None
ff = filter_form.html(fresource,
r.get_vars,
target="datatable",
alias=alias)
else:
ff = ""
output = dict(items = items,
title = T("Add Members to Group"),
addheader = "%s:" % T("Select People to add them to the Group"),
list_filter_form = ff,
)
response.view = "list_filter.html"
return output
else:
# Ajax refresh
if "draw" in get_vars:
echo = int(get_vars.draw)
else:
echo = None
items = dt.json(totalrows,
filteredrows,
dt_id,
echo,
dt_bulk_actions=dt_bulk_actions,
)
response.headers["Content-Type"] = "application/json"
return items
# END =========================================================================
| 41.737838
| 127
| 0.400117
|
__all__ = ("S3EVRCaseModel",
"evr_rheader",
"evr_AddGroupMembers",
)
from gluon import *
from ..s3 import *
class S3EVRCaseModel(S3Model):
names = ("evr_case",
"evr_medical_details",
)
def model(self):
T = current.T
settings = current.deployment_settings
define_table = self.define_table
person_id = self.pr_person_id
enable_evr_organisation = settings.get_evr_link_to_organisation()
organisation_label = settings.get_hrm_organisation_label()
org_organisation_represent = self.org_OrganisationRepresent()
org_widget = S3HierarchyWidget(lookup="org_organisation",
represent=org_organisation_represent,
multiple=False,
leafonly=False,)
tablename = "evr_case"
define_table(tablename,
person_id(ondelete = "CASCADE"),
self.org_organisation_id(
empty = not settings.get_hrm_org_required(),
label = organisation_label,
requires = self.org_organisation_requires(required=True),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Designed Organisation"),
T("Organisation designed to take care of evacuee"))),
widget = org_widget,
readable = enable_evr_organisation,
writable = enable_evr_organisation,
),
Field("fiscal_code", "string", length=16,
label = T("Fiscal Code"),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Fiscal Code"),
T("Insert the fiscal code with no spaces")
)
),
),
s3_comments(),
*s3_meta_fields())
# fiscal_code = db.evr_case.fiscal_code
# fiscal_code.requires = IS_EMPTY_OR(
# IS_NOT_IN_DB(db(db.evr_case.deleted != True),
# fiscal_code),
# null=''
# )
self.configure(tablename,
onaccept = self.evr_case_onaccept,
)
# ---------------------------------------------------------------------
# Medical Details
#
# @todo: use string-codes for option fields for better
# maintainability/interoperability
#
evr_therapy_opts = {1: T("Vital Long-Term Medication"),
2: T("Dialysis"),
3: T("Chronic Oxygen Supply"),
4: T("Intermittend Ventilator Support"),
5: T("Ventilator Dependend"),
6: T("Cardiac Assist Device"),
}
evr_allergy_opts = {1: T("Drug"),
2: T("Food"),
3: T("Olive Tree"),
4: T("Grass"),
5: T("Dust"),
6: T("Other"),
}
evr_disability_opts = {1: T("Visually Impaired"),
2: T("Blind"),
3: T("Hearing-Impaired"),
4: T("Deaf"),
5: T("Deaf-Mute"),
6: T("Deaf-Blind"),
7: T("Aphasic"),
8: T("Mobility-Impaired"),
9: T("Paralysed"),
10: T("Amputated"),
11: T("Other Physical Disability"),
12: T("Mentally Disabled"),
}
evr_aids_appliances_opts = {1: ("Guide Dog"),
2: ("Wheelchair"),
3: ("Walking stick"),
4: ("Crutch"),
5: ("Tripod"),
6: ("Artificial limb"),
7: ("Catheter"),
8: ("Sanity Napkin"),
}
def med_multiopt_field(fieldname, options, label=None):
return Field(fieldname, "list:integer",
label = label,
represent = S3Represent(options = options,
multiple = True),
requires = IS_IN_SET(options, multiple = True),
widget = S3MultiSelectWidget(filter = False,
selectedList = 3,
noneSelectedText = "Select",
)
)
evr_source_opts = {1: "Self",
2: "Mother",
3: "Father",
4: "Uncle",
5: "Grandfather",
6: "Grandmother",
7: "Official",
8: "Attendant",
9: "Neighbour",
10: "Teacher",
11: "Priest",
12: "Other",
}
tablename = "evr_medical_details"
define_table(tablename,
person_id(),
med_multiopt_field("therapy",
evr_therapy_opts,
label = T("Therapy"),
),
Field("therapy_comment"),
Field("pregnancy", "boolean",
label = T("Pregnancy"),
),
med_multiopt_field("allergy",
evr_allergy_opts,
label = T("Allergies"),
),
Field("diet",
label = T("Food intolerance"),
),
med_multiopt_field("disability",
evr_disability_opts,
label = T("Disabilities"),
),
Field("self_sufficient", "boolean",
label = T("Self-Sufficient"),
),
med_multiopt_field("aids_appliances",
evr_aids_appliances_opts,
label = T("Aids and Appliances"),
),
Field("declared_by_name",
label = T("Declared by (Name)"),
),
Field("declared_by_relationship", "integer",
label = T("Declared by (Relationship)"),
represent=S3Represent(options=evr_source_opts),
requires = IS_IN_SET(evr_source_opts,
zero=None),
),
Field("declared_by_phone",
label = T("Declared by (Phone)"),
requires = IS_NULL_OR(IS_PHONE_NUMBER()),
),
Field("declared_by_email",
label = T("Declared by (Email)"),
requires = IS_NULL_OR(IS_EMAIL()),
),
Field("has_attendant", "boolean",
label = T("Has Attendant"),
),
Field("attendant_name",
label = T("Attendant (Name)"),
),
Field("attendant_phone",
label = T("Attendant (Phone)"),
requires = IS_NULL_OR(IS_PHONE_NUMBER()),
),
Field("attendant_email",
label = T("Attendant (Email)"),
requires = IS_NULL_OR(IS_EMAIL()),
),
s3_comments(),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Socio-economic Background
#
tablename = "evr_background"
define_table(tablename,
person_id(),
Field("legal_measure",
label = T("Legal measure / Home warrant"),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Legal measure / Home warrant"),
T("Evacuee subject to special or legal measures/penalities")
)
),
),
Field("diet_restrictions",
label = T("Food Restrictions")
),
Field("social_welfare",
label = T("Social Welfare"),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Social Welfare"),
T("Evacuee subject to Social Welfare")
)
),
),
Field("interpreter",
label = T("Interpreter / Cultural Mediator Required"),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Interpreter / Cultural Mediator"),
T("Specific language interpreter and/or cultural mediator required")
)
),
),
Field("home_help", "boolean",
label = T("Home Help"),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Home Help"),
T("Evacuee requiring dedicated assistance at home")
)
),
),
Field("distance_from_shelter", "integer",
label = T("Working Distance from Shelter (km)")
),
Field("job_lost_by_event", "boolean",
label = T("Job lost by event")
),
Field("domestic_animal", "boolean",
label = T("With Domestic Animals")
),
Field("car_available", "boolean",
label = T("Car available")
),
s3_comments(),
*s3_meta_fields())
# -------------------------------------------------------------------------
@staticmethod
def evr_case_onaccept(form):
# Initialization
fiscal_code = form.vars.fiscal_code
if fiscal_code == "" or fiscal_code == None:
return
fiscal_code = fiscal_code.upper()
MALE = 3
CONSONANTS = "BCDFGHJKLMNPQRSTVWXYZ"
VOWELS = "AEIOU"
MONTHS = "ABCDEHLMPRST"
T = current.T
ptable = current.s3db.pr_person
query = (form.vars.person_id == ptable.id)
row = current.db(query).select(ptable.first_name,
ptable.last_name,
ptable.date_of_birth,
ptable.gender,
limitby = (0, 1)
).first()
name = row.first_name.upper()
surname = row.last_name.upper()
date_of_birth = row.date_of_birth
year = date_of_birth.year
month = date_of_birth.month
day = date_of_birth.day
gender = row.gender
# Check surname
cons = ""
for c in surname:
if c in CONSONANTS:
cons += c
vow = ""
for c in surname:
if c in VOWELS:
vow += c
chars = cons + vow
if len(chars) < 3:
chars += ["X", "X"]
if fiscal_code[:3] != chars[0:3].upper():
current.response.warning = T("Warning: fiscal code isn't \
consistent with personal data")
return
cons = ""
for c in name:
if c in CONSONANTS:
cons += c
if len(cons) > 3:
chars = cons[0] + cons[2] + cons[3]
else:
vow = ""
for c in name:
if c in VOWELS:
vow += c
chars = cons + vow
if len(chars) < 3:
chars += ["X", "X"]
if fiscal_code[3:6] != chars[0:3].upper():
current.response.warning = T("Warning: fiscal code isn't \
consistent with personal data")
return
# Check date of birth and gender
year = str(year)[2:4] # Convert to string and take only the last two elements
if fiscal_code[6:8] != year or \
fiscal_code[8] != MONTHS[month - 1]:
current.response.warning = T("Warning: fiscal code isn't \
consistent with personal data")
return
if gender == MALE:
birthday_in_cf = fiscal_code[9:11]
if not birthday_in_cf.isdigit():
current.response.warning = T("Warning: fiscal code isn't \
consistent with personal data")
return
else:
birthday_in_cf = int(birthday_in_cf)
if birthday_in_cf != day:
current.response.warning = T("Warning: fiscal code isn't \
consistent with personal data")
return
else:
if fiscal_code[9:11] != str(day + 40):
current.response.warning = T("Warning: fiscal code isn't \
consistent with personal data")
return
return
# =============================================================================
def evr_rheader(r):
T = current.T
settings = current.deployment_settings
if r.representation != "html" or not r.record:
return None
resourcename = r.name
rheader_fields = None
if resourcename == "person":
tabs = [(T("Person"), None),
(T("Addresses"), "address"),
(T("Contact Data"), "contacts"),
(T("Groups"), "group_membership"),
# these can be hidden since inline in the main form,
# but can enabled to verify the functionality:
#(T("Identity Documents"), "identity"),
#(T("Case Details"), "case"),
(T("Images"), "image"),
(T("Medical Information"), "medical_details"),
(T("Socio-Economic Background"), "background"),
]
if settings.get_evr_show_physical_description():
tabs.append((T("Physical Description"), "physical_description"))
if settings.has_module("cr"):
tabs.append((T("Shelter Registration"), "shelter_registration"))
rheader_fields = [["first_name", "last_name"],
["date_of_birth"],
]
# Show profile picture in rheader
itable = current.s3db.pr_image
query = (itable.pe_id == r.record.pe_id) & \
(itable.profile == True)
image = current.db(query).select(itable.image,
limitby=(0, 1)).first()
if image:
image = itable.image.represent(image.image)
else:
image = A(IMG(_src=URL(c="static", f="img", args="blank-user.gif"),
_height=60,
_title=T("No image available")),
_class="th",
_href=URL(f="person", args=[r.id, "image", "create"]),
)
return DIV(DIV(image, _style="float:left"),
S3ResourceHeader(rheader_fields, tabs)(r))
elif resourcename == "group":
tabs = [("Group Details", None),
(T("Contact Data"), "contact"),
(T("Members"), "group_membership"),
]
# Show "Add Members" tab only when we action it explicitly
# (=> from action-button in the group members list)
if r.method == "add_members":
tabs.append((T("Add Members"), "add_members"))
rheader_fields = [["name"],
["description"],
]
return S3ResourceHeader(rheader_fields, tabs)(r)
return None
# =============================================================================
class evr_AddGroupMembers(S3Method):
# -------------------------------------------------------------------------
def apply_method(self, r, **attr):
# Add button "Add Members" to members tab
if r.http in ("GET", "POST"):
if r.representation == "html" and r.id or \
r.representation == "aadata":
return self.add_members(r, **attr)
else:
r.error(415, current.ERROR.BAD_FORMAT)
else:
r.error(405, current.ERROR.BAD_METHOD)
# -------------------------------------------------------------------------
def add_members(self, r, **attr):
T = current.T
db = current.db
s3db = current.s3db
unaffiliated = ((S3FieldSelector("group_membership.id") == None) & \
(S3FieldSelector("case.id") != None))
if r.http == "POST":
# Form submission
group_id = r.id
added = 0
post_vars = r.post_vars
if all([name in post_vars
for name in ("add", "selected", "mode")]):
# Get selection
selected = post_vars.selected
if selected:
selected = selected.split(",")
else:
selected = []
# Handle exclusion filter
if post_vars.mode == "Exclusive":
if "filterURL" in post_vars:
filters = S3URLQuery.parse_url(post_vars.filterURL)
else:
filters = None
query = unaffiliated & \
(~(S3FieldSelector("id").belongs(selected)))
resource = s3db.resource("pr_person",
filter=query,
vars=filters)
rows = resource.select(["id"], as_rows=True)
selected = [str(row.id) for row in rows]
# Avoid duplicates
gtable = s3db.pr_group_membership
query = (gtable.group_id == group_id) & \
(gtable.person_id.belongs(selected)) & \
(gtable.deleted != True)
rows = db(query).select(gtable.person_id)
skip = set(row.person_id for row in rows)
# Add new group members
for record_id in selected:
try:
person_id = int(record_id.strip())
except ValueError:
continue
if person_id in skip:
continue
gtable.insert(group_id = group_id,
person_id = person_id,
)
added += 1
# Confirmation message (in session because we redirect)
session = current.session
if not selected:
session.warning = T("No Persons Selected!")
else:
session.confirmation = T("%(number)s Members added to Group") % \
dict(number=added)
# Go back to list of existing group members
redirect(r.url(method = "",
id = group_id,
component = "group_membership"))
else:
resource = s3db.resource("pr_person", vars=r.get_vars)
resource.add_filter(unaffiliated)
get_config = resource.get_config
# Filter widgets
filter_widgets = get_config("filter_widgets", [])
filter_widgets.append(S3DateFilter("created_on",
label = T("Registered on"),
)
)
# List fields
list_fields = ["id",
"first_name",
"last_name",
"gender",
"date_of_birth",
]
response = current.response
# Data table boundaries
get_vars = self.request.get_vars
if "displayStart" in get_vars:
start = int(get_vars["displayStart"])
else:
start = None
if "pageLength" in get_vars:
display_length = int(get_vars["pageLength"])
else:
display_length = response.s3.ROWSPERPAGE
limit = 4 * display_length
# Apply datatable filter and sorting
totalrows = resource.count()
filter, orderby, left = resource.datatable_filter(list_fields, get_vars)
if not orderby:
# Most recently created records on top
orderby = "pr_person.created_on desc"
resource.add_filter(filter)
# Retrieve the data
data = resource.select(list_fields,
start=start,
limit=limit,
orderby=orderby,
left=left,
count=True,
represent=True)
filteredrows = data["numrows"]
# Generate the datatable
dt = S3DataTable(data["rfields"], data["rows"])
dt_id = "datatable"
# Bulk Action
dt_bulk_actions = [(T("Add as Group Members"), "add")]
if r.representation == "html":
# Page load
# Custom open-button, no delete-option
resource.configure(deletable = False)
open_url = URL(f = "person", args = ["[id]"])
S3CRUD.action_buttons(r,
deletable = False,
read_url = open_url,
update_url = open_url)
# Need no export formats (as this is a form)
response.s3.no_formats = True
# Data table (items)
items = dt.html(totalrows,
filteredrows,
dt_id,
dt_ajax_url=URL(c="evr",
f="group",
args=["add_members"],
vars={},
extension="aadata",
),
dt_bulk_actions=dt_bulk_actions,
dt_pageLength=display_length,
dt_pagination="true",
dt_searching="false",
)
resource.configure(deletable = False)
# Filter form
if filter_widgets:
# Where to retrieve filtered data from:
_vars = resource.crud._remove_filters(r.get_vars)
filter_submit_url = r.url(vars=_vars)
# Where to retrieve updated filter options from:
filter_ajax_url = URL(f="person",
args=["filter.options"],
vars={},
)
# Define filter form
filter_clear = get_config("filter_clear", True)
filter_submit = get_config("filter_submit", True)
filter_form = S3FilterForm(filter_widgets,
clear=filter_clear,
submit=filter_submit,
ajax=True,
url=filter_submit_url,
ajaxurl=filter_ajax_url,
_class="filter-form",
_id="datatable-filter-form",
)
# Render filter form
fresource = s3db.resource(resource.tablename)
alias = resource.alias if r.component else None
ff = filter_form.html(fresource,
r.get_vars,
target="datatable",
alias=alias)
else:
ff = ""
output = dict(items = items,
title = T("Add Members to Group"),
addheader = "%s:" % T("Select People to add them to the Group"),
list_filter_form = ff,
)
response.view = "list_filter.html"
return output
else:
# Ajax refresh
if "draw" in get_vars:
echo = int(get_vars.draw)
else:
echo = None
items = dt.json(totalrows,
filteredrows,
dt_id,
echo,
dt_bulk_actions=dt_bulk_actions,
)
response.headers["Content-Type"] = "application/json"
return items
# END =========================================================================
| true
| true
|
f70c7c8d3e72df63f8d1547bb698e8f8588b13ff
| 3,574
|
py
|
Python
|
rindr/settings.py
|
claird160/rindr
|
0ab9d77edf6258ab8f304fd4f1c5f92d96ff7a60
|
[
"MIT"
] | null | null | null |
rindr/settings.py
|
claird160/rindr
|
0ab9d77edf6258ab8f304fd4f1c5f92d96ff7a60
|
[
"MIT"
] | null | null | null |
rindr/settings.py
|
claird160/rindr
|
0ab9d77edf6258ab8f304fd4f1c5f92d96ff7a60
|
[
"MIT"
] | null | null | null |
"""
Django settings for rindr project.
Generated by 'django-admin startproject' using Django 3.2.8.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
from decouple import config
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config("SECRET_KEY")
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'type',
'ticket',
'django_bootstrap5',
'jquery',
'dashboard',
'mathfilters',
'BI'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'rindr.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'rindr.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
#DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': BASE_DIR / 'db.sqlite3',
# }
#}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'rindr',
'USER': 'rindr',
'PASSWORD': 'freya',
'HOST': '10.100.102.161',
'PORT': ''
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
LOGIN_URL="/login"
| 24.648276
| 91
| 0.678232
|
from pathlib import Path
from decouple import config
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = config("SECRET_KEY")
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'type',
'ticket',
'django_bootstrap5',
'jquery',
'dashboard',
'mathfilters',
'BI'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'rindr.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'rindr.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
#DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': BASE_DIR / 'db.sqlite3',
# }
#}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'rindr',
'USER': 'rindr',
'PASSWORD': 'freya',
'HOST': '10.100.102.161',
'PORT': ''
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
LOGIN_URL="/login"
| true
| true
|
f70c7cead4990040f286a584fa949ea7edd561a9
| 1,347
|
py
|
Python
|
apysc/_display/flip_interface_helper.py
|
ynsnf/apysc
|
b10ffaf76ec6beb187477d0a744fca00e3efc3fb
|
[
"MIT"
] | 16
|
2021-04-16T02:01:29.000Z
|
2022-01-01T08:53:49.000Z
|
apysc/_display/flip_interface_helper.py
|
ynsnf/apysc
|
b10ffaf76ec6beb187477d0a744fca00e3efc3fb
|
[
"MIT"
] | 613
|
2021-03-24T03:37:38.000Z
|
2022-03-26T10:58:37.000Z
|
apysc/_display/flip_interface_helper.py
|
simon-ritchie/apyscript
|
c319f8ab2f1f5f7fad8d2a8b4fc06e7195476279
|
[
"MIT"
] | 2
|
2021-06-20T07:32:58.000Z
|
2021-12-26T08:22:11.000Z
|
"""The helper module for the flip interfaces.
"""
from enum import Enum
from apysc._type.boolean import Boolean
class Axis(Enum):
X = 'x'
Y = 'y'
def make_flip_update_expression(
*, before_value: Boolean, after_value: Boolean,
axis: Axis, interface_variable_name: str) -> str:
"""
Make a flipping value updating expression.
Parameters
----------
before_value : Boolean
Before updating flipping value.
after_value : Boolean
After updating flipping value.
axis : Axis
X or y axis value.
interface_variable_name : str
Interface instance variable name.
Returns
-------
expression : str
Made expression string.
"""
from apysc._type import value_util
before_value_str: str = value_util.get_value_str_for_expression(
value=before_value)
after_value_str: str = value_util.get_value_str_for_expression(
value=after_value)
expression: str = (
f'if ({before_value_str}) {{'
f'\n {interface_variable_name}.flip("{axis.value}");'
'\n}'
f'\nif ({after_value_str}) {{'
f'\n {interface_variable_name}.flip("{axis.value}");'
'\n}'
f'\n{before_value_str} = {after_value_str};'
)
return expression
| 26.411765
| 69
| 0.604306
|
from enum import Enum
from apysc._type.boolean import Boolean
class Axis(Enum):
X = 'x'
Y = 'y'
def make_flip_update_expression(
*, before_value: Boolean, after_value: Boolean,
axis: Axis, interface_variable_name: str) -> str:
from apysc._type import value_util
before_value_str: str = value_util.get_value_str_for_expression(
value=before_value)
after_value_str: str = value_util.get_value_str_for_expression(
value=after_value)
expression: str = (
f'if ({before_value_str}) {{'
f'\n {interface_variable_name}.flip("{axis.value}");'
'\n}'
f'\nif ({after_value_str}) {{'
f'\n {interface_variable_name}.flip("{axis.value}");'
'\n}'
f'\n{before_value_str} = {after_value_str};'
)
return expression
| true
| true
|
f70c7d5536cf9053acba7375cc7748accbada4ba
| 248
|
py
|
Python
|
pythonclub/club/admin.py
|
janainfanger/itc240-webapp-1
|
0e9f9b0e12d82bf49ed1996ef2ed4ea21d3adc23
|
[
"Apache-2.0"
] | null | null | null |
pythonclub/club/admin.py
|
janainfanger/itc240-webapp-1
|
0e9f9b0e12d82bf49ed1996ef2ed4ea21d3adc23
|
[
"Apache-2.0"
] | null | null | null |
pythonclub/club/admin.py
|
janainfanger/itc240-webapp-1
|
0e9f9b0e12d82bf49ed1996ef2ed4ea21d3adc23
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib import admin
from .models import Meeting, MeetingMinutes, Resource, Event
# Register your models here.
admin.site.register(Meeting)
admin.site.register(MeetingMinutes)
admin.site.register(Resource)
admin.site.register(Event)
| 22.545455
| 60
| 0.814516
|
from django.contrib import admin
from .models import Meeting, MeetingMinutes, Resource, Event
admin.site.register(Meeting)
admin.site.register(MeetingMinutes)
admin.site.register(Resource)
admin.site.register(Event)
| true
| true
|
f70c7d6e65fb0d26ccd64aba1732de80b1421d06
| 418,199
|
py
|
Python
|
cfgov/hmda/resources/loan_file_metadata.py
|
higs4281/cfgov-refresh
|
a02b193fb2373d443265c21845adf8a196e05675
|
[
"CC0-1.0"
] | null | null | null |
cfgov/hmda/resources/loan_file_metadata.py
|
higs4281/cfgov-refresh
|
a02b193fb2373d443265c21845adf8a196e05675
|
[
"CC0-1.0"
] | null | null | null |
cfgov/hmda/resources/loan_file_metadata.py
|
higs4281/cfgov-refresh
|
a02b193fb2373d443265c21845adf8a196e05675
|
[
"CC0-1.0"
] | null | null | null |
# flake8: noqa E501
from hmda.models.hmda_data_file import HmdaDataFile
# Access this using HMDA_DATA_FILES[geo][field_descriptions][records]
LOAN_FILE_METADATA = {
'nationwide': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '7036352', '482.83 MB'),
'2007': HmdaDataFile('hmda_2007_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '7201366', '453.04 MB'),
'2017': HmdaDataFile('hmda_2017_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '5986659', '369.82 MB'),
'2015': HmdaDataFile('hmda_2015_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '6113423', '485.63 MB'),
'2014': HmdaDataFile('hmda_2014_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '4832425', '323.43 MB'),
'2008': HmdaDataFile('hmda_2008_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '5526941', '330.65 MB'),
'2009': HmdaDataFile('hmda_2009_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '7783986', '467.08 MB'),
'2011': HmdaDataFile('hmda_2011_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '5946435', '399.41 MB'),
'2010': HmdaDataFile('hmda_2010_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '6764902', '455.07 MB'),
'2013': HmdaDataFile('hmda_2013_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '7126202', '492.02 MB'),
'2012': HmdaDataFile('hmda_2012_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '8298882', '573.78 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nationwide_all-records_labels.zip', '16332987', '1.2 GB'),
'2007': HmdaDataFile('hmda_2007_nationwide_all-records_labels.zip', '26605695', '1.72 GB'),
'2017': HmdaDataFile('hmda_2017_nationwide_all-records_labels.zip', '14285496', '986 MB'),
'2015': HmdaDataFile('hmda_2015_nationwide_all-records_labels.zip', '14374184', '1.21 GB'),
'2014': HmdaDataFile('hmda_2014_nationwide_all-records_labels.zip', '12049341', '862.92 MB'),
'2008': HmdaDataFile('hmda_2008_nationwide_all-records_labels.zip', '17391570', '1.06 GB'),
'2009': HmdaDataFile('hmda_2009_nationwide_all-records_labels.zip', '19493491', '1.29 GB'),
'2011': HmdaDataFile('hmda_2011_nationwide_all-records_labels.zip', '14873415', '1.08 GB'),
'2010': HmdaDataFile('hmda_2010_nationwide_all-records_labels.zip', '16348557', '1.19 GB'),
'2013': HmdaDataFile('hmda_2013_nationwide_all-records_labels.zip', '17016159', '1.27 GB'),
'2012': HmdaDataFile('hmda_2012_nationwide_all-records_labels.zip', '18691551', '1.4 GB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nationwide_originated-records_labels.zip', '8377907', '457.12 MB'),
'2007': HmdaDataFile('hmda_2007_nationwide_originated-records_labels.zip', '10441545', '528.7 MB'),
'2017': HmdaDataFile('hmda_2017_nationwide_originated-records_labels.zip', '7339057', '247.2 MB'),
'2015': HmdaDataFile('hmda_2015_nationwide_originated-records_labels.zip', '7404258', '461.08 MB'),
'2014': HmdaDataFile('hmda_2014_nationwide_originated-records_labels.zip', '6039826', '331.36 MB'),
'2008': HmdaDataFile('hmda_2008_nationwide_originated-records_labels.zip', '7177262', '360.36 MB'),
'2009': HmdaDataFile('hmda_2009_nationwide_originated-records_labels.zip', '8950936', '416.67 MB'),
'2011': HmdaDataFile('hmda_2011_nationwide_originated-records_labels.zip', '7095262', '381.6 MB'),
'2010': HmdaDataFile('hmda_2010_nationwide_originated-records_labels.zip', '7863337', '419.27 MB'),
'2013': HmdaDataFile('hmda_2013_nationwide_originated-records_labels.zip', '8706657', '476.47 MB'),
'2012': HmdaDataFile('hmda_2012_nationwide_originated-records_labels.zip', '9783966', '529.5 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '7036352', '165.84 MB'),
'2007': HmdaDataFile('hmda_2007_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '7201366', '141.73 MB'),
'2017': HmdaDataFile('hmda_2017_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '5986659', '77.47 MB'),
'2015': HmdaDataFile('hmda_2015_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '6113423', '144.37 MB'),
'2014': HmdaDataFile('hmda_2014_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '4787867', '113.54 MB'),
'2008': HmdaDataFile('hmda_2008_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '5526941', '107.3 MB'),
'2009': HmdaDataFile('hmda_2009_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '7783986', '140.57 MB'),
'2011': HmdaDataFile('hmda_2011_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '5946435', '132 MB'),
'2010': HmdaDataFile('hmda_2010_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '6764902', '149.36 MB'),
'2013': HmdaDataFile('hmda_2013_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '7126202', '166.47 MB'),
'2012': HmdaDataFile('hmda_2012_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '8298882', '189.65 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nationwide_all-records_codes.zip', '16332987', '384.11 MB'),
'2007': HmdaDataFile('hmda_2007_nationwide_all-records_codes.zip', '26605695', '461.15 MB'),
'2017': HmdaDataFile('hmda_2017_nationwide_all-records_codes.zip', '14285496', '182.02 MB'),
'2015': HmdaDataFile('hmda_2015_nationwide_all-records_codes.zip', '14374184', '337.27 MB'),
'2014': HmdaDataFile('hmda_2014_nationwide_all-records_codes.zip', '11875464', '278.4 MB'),
'2008': HmdaDataFile('hmda_2008_nationwide_all-records_codes.zip', '17391570', '309.22 MB'),
'2009': HmdaDataFile('hmda_2009_nationwide_all-records_codes.zip', '19493491', '331.31 MB'),
'2011': HmdaDataFile('hmda_2011_nationwide_all-records_codes.zip', '14873415', '335.22 MB'),
'2010': HmdaDataFile('hmda_2010_nationwide_all-records_codes.zip', '16348557', '367.78 MB'),
'2013': HmdaDataFile('hmda_2013_nationwide_all-records_codes.zip', '17016159', '400.19 MB'),
'2012': HmdaDataFile('hmda_2012_nationwide_all-records_codes.zip', '18691551', '434.69 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nationwide_originated-records_codes.zip', '8377907', '196.62 MB'),
'2007': HmdaDataFile('hmda_2007_nationwide_originated-records_codes.zip', '10441545', '199.55 MB'),
'2017': HmdaDataFile('hmda_2017_nationwide_originated-records_codes.zip', '7339057', '94.95 MB'),
'2015': HmdaDataFile('hmda_2015_nationwide_originated-records_codes.zip', '7404258', '173.96 MB'),
'2014': HmdaDataFile('hmda_2014_nationwide_originated-records_codes.zip', '5979766', '140.82 MB'),
'2008': HmdaDataFile('hmda_2008_nationwide_originated-records_codes.zip', '7177262', '137.98 MB'),
'2009': HmdaDataFile('hmda_2009_nationwide_originated-records_codes.zip', '8950936', '162.04 MB'),
'2011': HmdaDataFile('hmda_2011_nationwide_originated-records_codes.zip', '7095262', '157.4 MB'),
'2010': HmdaDataFile('hmda_2010_nationwide_originated-records_codes.zip', '7863337', '173.45 MB'),
'2013': HmdaDataFile('hmda_2013_nationwide_originated-records_codes.zip', '8706657', '203.33 MB'),
'2012': HmdaDataFile('hmda_2012_nationwide_originated-records_codes.zip', '9783966', '224.11 MB')
}
}
},
'va': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '216152', '11.27 MB'),
'2007': HmdaDataFile('hmda_2007_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '228323', '11.15 MB'),
'2017': HmdaDataFile('hmda_2017_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '175737', '5.58 MB'),
'2015': HmdaDataFile('hmda_2015_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '191048', '11.13 MB'),
'2014': HmdaDataFile('hmda_2014_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '147744', '7.63 MB'),
'2008': HmdaDataFile('hmda_2008_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '186185', '8.91 MB'),
'2009': HmdaDataFile('hmda_2009_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '273787', '12.14 MB'),
'2011': HmdaDataFile('hmda_2011_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '205670', '10.53 MB'),
'2010': HmdaDataFile('hmda_2010_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '228664', '11.48 MB'),
'2013': HmdaDataFile('hmda_2013_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '244599', '12.77 MB'),
'2012': HmdaDataFile('hmda_2012_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '288436', '14.9 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_va_all-records_labels.zip', '494057', '27.35 MB'),
'2007': HmdaDataFile('hmda_2007_va_all-records_labels.zip', '784919', '38.17 MB'),
'2017': HmdaDataFile('hmda_2017_va_all-records_labels.zip', '411507', '14.48 MB'),
'2015': HmdaDataFile('hmda_2015_va_all-records_labels.zip', '445447', '27.97 MB'),
'2014': HmdaDataFile('hmda_2014_va_all-records_labels.zip', '365572', '20.34 MB'),
'2008': HmdaDataFile('hmda_2008_va_all-records_labels.zip', '539572', '26.49 MB'),
'2009': HmdaDataFile('hmda_2009_va_all-records_labels.zip', '637212', '29.34 MB'),
'2011': HmdaDataFile('hmda_2011_va_all-records_labels.zip', '482943', '27.06 MB'),
'2010': HmdaDataFile('hmda_2010_va_all-records_labels.zip', '517819', '28.54 MB'),
'2013': HmdaDataFile('hmda_2013_va_all-records_labels.zip', '563167', '31.8 MB'),
'2012': HmdaDataFile('hmda_2012_va_all-records_labels.zip', '634102', '35.59 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_va_originated-records_labels.zip', '252237', '13.42 MB'),
'2007': HmdaDataFile('hmda_2007_va_originated-records_labels.zip', '327766', '16.15 MB'),
'2017': HmdaDataFile('hmda_2017_va_originated-records_labels.zip', '211218', '6.89 MB'),
'2015': HmdaDataFile('hmda_2015_va_originated-records_labels.zip', '227837', '13.49 MB'),
'2014': HmdaDataFile('hmda_2014_va_originated-records_labels.zip', '183729', '9.73 MB'),
'2008': HmdaDataFile('hmda_2008_va_originated-records_labels.zip', '234813', '11.47 MB'),
'2009': HmdaDataFile('hmda_2009_va_originated-records_labels.zip', '308658', '13.99 MB'),
'2011': HmdaDataFile('hmda_2011_va_originated-records_labels.zip', '239310', '12.46 MB'),
'2010': HmdaDataFile('hmda_2010_va_originated-records_labels.zip', '260214', '13.38 MB'),
'2013': HmdaDataFile('hmda_2013_va_originated-records_labels.zip', '294145', '15.54 MB'),
'2012': HmdaDataFile('hmda_2012_va_originated-records_labels.zip', '334770', '17.51 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '216152', '7.74 MB'),
'2007': HmdaDataFile('hmda_2007_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '228323', '7.71 MB'),
'2017': HmdaDataFile('hmda_2017_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '175737', '3.89 MB'),
'2015': HmdaDataFile('hmda_2015_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '191048', '7.65 MB'),
'2014': HmdaDataFile('hmda_2014_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '147744', '5.26 MB'),
'2008': HmdaDataFile('hmda_2008_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '186185', '6.21 MB'),
'2009': HmdaDataFile('hmda_2009_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '273787', '8.61 MB'),
'2011': HmdaDataFile('hmda_2011_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '205670', '7.08 MB'),
'2010': HmdaDataFile('hmda_2010_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '228664', '7.72 MB'),
'2013': HmdaDataFile('hmda_2013_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '244599', '8.81 MB'),
'2012': HmdaDataFile('hmda_2012_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '288436', '10.21 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_va_all-records_codes.zip', '494057', '18.2 MB'),
'2007': HmdaDataFile('hmda_2007_va_all-records_codes.zip', '784919', '25.76 MB'),
'2017': HmdaDataFile('hmda_2017_va_all-records_codes.zip', '411507', '9.31 MB'),
'2015': HmdaDataFile('hmda_2015_va_all-records_codes.zip', '445447', '18.51 MB'),
'2014': HmdaDataFile('hmda_2014_va_all-records_codes.zip', '365572', '13.57 MB'),
'2008': HmdaDataFile('hmda_2008_va_all-records_codes.zip', '539572', '17.99 MB'),
'2009': HmdaDataFile('hmda_2009_va_all-records_codes.zip', '637212', '20.26 MB'),
'2011': HmdaDataFile('hmda_2011_va_all-records_codes.zip', '482943', '17.83 MB'),
'2010': HmdaDataFile('hmda_2010_va_all-records_codes.zip', '517819', '18.81 MB'),
'2013': HmdaDataFile('hmda_2013_va_all-records_codes.zip', '563167', '21.36 MB'),
'2012': HmdaDataFile('hmda_2012_va_all-records_codes.zip', '634102', '23.86 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_va_originated-records_codes.zip', '252237', '9.19 MB'),
'2007': HmdaDataFile('hmda_2007_va_originated-records_codes.zip', '327766', '11.21 MB'),
'2017': HmdaDataFile('hmda_2017_va_originated-records_codes.zip', '211218', '4.75 MB'),
'2015': HmdaDataFile('hmda_2015_va_originated-records_codes.zip', '227837', '9.22 MB'),
'2014': HmdaDataFile('hmda_2014_va_originated-records_codes.zip', '183729', '6.67 MB'),
'2008': HmdaDataFile('hmda_2008_va_originated-records_codes.zip', '234813', '7.98 MB'),
'2009': HmdaDataFile('hmda_2009_va_originated-records_codes.zip', '308658', '9.9 MB'),
'2011': HmdaDataFile('hmda_2011_va_originated-records_codes.zip', '239310', '8.34 MB'),
'2010': HmdaDataFile('hmda_2010_va_originated-records_codes.zip', '260214', '8.98 MB'),
'2013': HmdaDataFile('hmda_2013_va_originated-records_codes.zip', '294145', '10.63 MB'),
'2012': HmdaDataFile('hmda_2012_va_originated-records_codes.zip', '334770', '11.93 MB')
}
}
},
'co': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '228866', '11.12 MB'),
'2007': HmdaDataFile('hmda_2007_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '144805', '6.71 MB'),
'2017': HmdaDataFile('hmda_2017_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '182654', '5.82 MB'),
'2015': HmdaDataFile('hmda_2015_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '194123', '10.51 MB'),
'2014': HmdaDataFile('hmda_2014_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '139220', '6.92 MB'),
'2008': HmdaDataFile('hmda_2008_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '128542', '5.81 MB'),
'2009': HmdaDataFile('hmda_2009_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '209511', '8.79 MB'),
'2011': HmdaDataFile('hmda_2011_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '149880', '7.07 MB'),
'2010': HmdaDataFile('hmda_2010_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '180911', '8.7 MB'),
'2013': HmdaDataFile('hmda_2013_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '192627', '9.59 MB'),
'2012': HmdaDataFile('hmda_2012_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '222498', '10.92 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_co_all-records_labels.zip', '483436', '25.25 MB'),
'2007': HmdaDataFile('hmda_2007_co_all-records_labels.zip', '537363', '25.41 MB'),
'2017': HmdaDataFile('hmda_2017_co_all-records_labels.zip', '404517', '14.76 MB'),
'2015': HmdaDataFile('hmda_2015_co_all-records_labels.zip', '409511', '23.35 MB'),
'2014': HmdaDataFile('hmda_2014_co_all-records_labels.zip', '313445', '16.62 MB'),
'2008': HmdaDataFile('hmda_2008_co_all-records_labels.zip', '370468', '17.59 MB'),
'2009': HmdaDataFile('hmda_2009_co_all-records_labels.zip', '492317', '21.47 MB'),
'2011': HmdaDataFile('hmda_2011_co_all-records_labels.zip', '366969', '18.87 MB'),
'2010': HmdaDataFile('hmda_2010_co_all-records_labels.zip', '413027', '21.58 MB'),
'2013': HmdaDataFile('hmda_2013_co_all-records_labels.zip', '427952', '22.81 MB'),
'2012': HmdaDataFile('hmda_2012_co_all-records_labels.zip', '474846', '24.96 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_co_originated-records_labels.zip', '263402', '13.07 MB'),
'2007': HmdaDataFile('hmda_2007_co_originated-records_labels.zip', '218842', '10.18 MB'),
'2017': HmdaDataFile('hmda_2017_co_originated-records_labels.zip', '216848', '7.17 MB'),
'2015': HmdaDataFile('hmda_2015_co_originated-records_labels.zip', '227578', '12.54 MB'),
'2014': HmdaDataFile('hmda_2014_co_originated-records_labels.zip', '169959', '8.6 MB'),
'2008': HmdaDataFile('hmda_2008_co_originated-records_labels.zip', '162244', '7.43 MB'),
'2009': HmdaDataFile('hmda_2009_co_originated-records_labels.zip', '236219', '9.99 MB'),
'2011': HmdaDataFile('hmda_2011_co_originated-records_labels.zip', '179323', '8.69 MB'),
'2010': HmdaDataFile('hmda_2010_co_originated-records_labels.zip', '207951', '10.35 MB'),
'2013': HmdaDataFile('hmda_2013_co_originated-records_labels.zip', '235157', '11.95 MB'),
'2012': HmdaDataFile('hmda_2012_co_originated-records_labels.zip', '263229', '13.2 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '228866', '7.81 MB'),
'2007': HmdaDataFile('hmda_2007_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '144805', '4.75 MB'),
'2017': HmdaDataFile('hmda_2017_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '182654', '4.07 MB'),
'2015': HmdaDataFile('hmda_2015_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '194123', '7.52 MB'),
'2014': HmdaDataFile('hmda_2014_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '139220', '4.9 MB'),
'2008': HmdaDataFile('hmda_2008_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '128542', '4.11 MB'),
'2009': HmdaDataFile('hmda_2009_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '209511', '6.34 MB'),
'2011': HmdaDataFile('hmda_2011_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '149880', '4.88 MB'),
'2010': HmdaDataFile('hmda_2010_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '180911', '6.03 MB'),
'2013': HmdaDataFile('hmda_2013_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '192627', '6.74 MB'),
'2012': HmdaDataFile('hmda_2012_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '222498', '7.6 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_co_all-records_codes.zip', '483436', '17.2 MB'),
'2007': HmdaDataFile('hmda_2007_co_all-records_codes.zip', '537363', '17.72 MB'),
'2017': HmdaDataFile('hmda_2017_co_all-records_codes.zip', '404517', '9.9 MB'),
'2015': HmdaDataFile('hmda_2015_co_all-records_codes.zip', '409511', '16.01 MB'),
'2014': HmdaDataFile('hmda_2014_co_all-records_codes.zip', '313445', '11.43 MB'),
'2008': HmdaDataFile('hmda_2008_co_all-records_codes.zip', '370468', '12.3 MB'),
'2009': HmdaDataFile('hmda_2009_co_all-records_codes.zip', '492317', '15.15 MB'),
'2011': HmdaDataFile('hmda_2011_co_all-records_codes.zip', '366969', '12.63 MB'),
'2010': HmdaDataFile('hmda_2010_co_all-records_codes.zip', '413027', '14.47 MB'),
'2013': HmdaDataFile('hmda_2013_co_all-records_codes.zip', '427952', '15.58 MB'),
'2012': HmdaDataFile('hmda_2012_co_all-records_codes.zip', '474846', '16.87 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_co_originated-records_codes.zip', '263402', '9.15 MB'),
'2007': HmdaDataFile('hmda_2007_co_originated-records_codes.zip', '218842', '7.18 MB'),
'2017': HmdaDataFile('hmda_2017_co_originated-records_codes.zip', '216848', '4.99 MB'),
'2015': HmdaDataFile('hmda_2015_co_originated-records_codes.zip', '227578', '8.91 MB'),
'2014': HmdaDataFile('hmda_2014_co_originated-records_codes.zip', '169959', '6.05 MB'),
'2008': HmdaDataFile('hmda_2008_co_originated-records_codes.zip', '162244', '5.23 MB'),
'2009': HmdaDataFile('hmda_2009_co_originated-records_codes.zip', '236219', '7.16 MB'),
'2011': HmdaDataFile('hmda_2011_co_originated-records_codes.zip', '179323', '5.95 MB'),
'2010': HmdaDataFile('hmda_2010_co_originated-records_codes.zip', '207951', '7.15 MB'),
'2013': HmdaDataFile('hmda_2013_co_originated-records_codes.zip', '235157', '8.37 MB'),
'2012': HmdaDataFile('hmda_2012_co_originated-records_codes.zip', '263229', '9.16 MB')
}
}
},
'vi': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2007': HmdaDataFile('hmda_2007_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2017': HmdaDataFile('hmda_2017_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '15', '1.55 KB'),
'2015': HmdaDataFile('hmda_2015_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2014': HmdaDataFile('hmda_2014_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2008': HmdaDataFile('hmda_2008_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2009': HmdaDataFile('hmda_2009_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2011': HmdaDataFile('hmda_2011_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2010': HmdaDataFile('hmda_2010_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2013': HmdaDataFile('hmda_2013_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2012': HmdaDataFile('hmda_2012_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_vi_all-records_labels.zip', '0', '581 bytes'),
'2007': HmdaDataFile('hmda_2007_vi_all-records_labels.zip', '0', '581 bytes'),
'2017': HmdaDataFile('hmda_2017_vi_all-records_labels.zip', '47', '2.39 KB'),
'2015': HmdaDataFile('hmda_2015_vi_all-records_labels.zip', '0', '581 bytes'),
'2014': HmdaDataFile('hmda_2014_vi_all-records_labels.zip', '0', '581 bytes'),
'2008': HmdaDataFile('hmda_2008_vi_all-records_labels.zip', '0', '581 bytes'),
'2009': HmdaDataFile('hmda_2009_vi_all-records_labels.zip', '0', '581 bytes'),
'2011': HmdaDataFile('hmda_2011_vi_all-records_labels.zip', '0', '581 bytes'),
'2010': HmdaDataFile('hmda_2010_vi_all-records_labels.zip', '0', '581 bytes'),
'2013': HmdaDataFile('hmda_2013_vi_all-records_labels.zip', '0', '581 bytes'),
'2012': HmdaDataFile('hmda_2012_vi_all-records_labels.zip', '0', '581 bytes')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_vi_originated-records_labels.zip', '0', '595 bytes'),
'2007': HmdaDataFile('hmda_2007_vi_originated-records_labels.zip', '0', '595 bytes'),
'2017': HmdaDataFile('hmda_2017_vi_originated-records_labels.zip', '23', '1.73 KB'),
'2015': HmdaDataFile('hmda_2015_vi_originated-records_labels.zip', '0', '595 bytes'),
'2014': HmdaDataFile('hmda_2014_vi_originated-records_labels.zip', '0', '595 bytes'),
'2008': HmdaDataFile('hmda_2008_vi_originated-records_labels.zip', '0', '595 bytes'),
'2009': HmdaDataFile('hmda_2009_vi_originated-records_labels.zip', '0', '595 bytes'),
'2011': HmdaDataFile('hmda_2011_vi_originated-records_labels.zip', '0', '595 bytes'),
'2010': HmdaDataFile('hmda_2010_vi_originated-records_labels.zip', '0', '595 bytes'),
'2013': HmdaDataFile('hmda_2013_vi_originated-records_labels.zip', '0', '595 bytes'),
'2012': HmdaDataFile('hmda_2012_vi_originated-records_labels.zip', '0', '595 bytes')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2007': HmdaDataFile('hmda_2007_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2017': HmdaDataFile('hmda_2017_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '15', '940 bytes'),
'2015': HmdaDataFile('hmda_2015_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2014': HmdaDataFile('hmda_2014_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2008': HmdaDataFile('hmda_2008_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2009': HmdaDataFile('hmda_2009_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2011': HmdaDataFile('hmda_2011_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2010': HmdaDataFile('hmda_2010_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2013': HmdaDataFile('hmda_2013_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2012': HmdaDataFile('hmda_2012_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_vi_all-records_codes.zip', '0', '474 bytes'),
'2007': HmdaDataFile('hmda_2007_vi_all-records_codes.zip', '0', '474 bytes'),
'2017': HmdaDataFile('hmda_2017_vi_all-records_codes.zip', '47', '1.37 KB'),
'2015': HmdaDataFile('hmda_2015_vi_all-records_codes.zip', '0', '474 bytes'),
'2014': HmdaDataFile('hmda_2014_vi_all-records_codes.zip', '0', '474 bytes'),
'2008': HmdaDataFile('hmda_2008_vi_all-records_codes.zip', '0', '474 bytes'),
'2009': HmdaDataFile('hmda_2009_vi_all-records_codes.zip', '0', '474 bytes'),
'2011': HmdaDataFile('hmda_2011_vi_all-records_codes.zip', '0', '474 bytes'),
'2010': HmdaDataFile('hmda_2010_vi_all-records_codes.zip', '0', '474 bytes'),
'2013': HmdaDataFile('hmda_2013_vi_all-records_codes.zip', '0', '474 bytes'),
'2012': HmdaDataFile('hmda_2012_vi_all-records_codes.zip', '0', '474 bytes')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_vi_originated-records_codes.zip', '0', '488 bytes'),
'2007': HmdaDataFile('hmda_2007_vi_originated-records_codes.zip', '0', '488 bytes'),
'2017': HmdaDataFile('hmda_2017_vi_originated-records_codes.zip', '23', '1.03 KB'),
'2015': HmdaDataFile('hmda_2015_vi_originated-records_codes.zip', '0', '488 bytes'),
'2014': HmdaDataFile('hmda_2014_vi_originated-records_codes.zip', '0', '488 bytes'),
'2008': HmdaDataFile('hmda_2008_vi_originated-records_codes.zip', '0', '488 bytes'),
'2009': HmdaDataFile('hmda_2009_vi_originated-records_codes.zip', '0', '488 bytes'),
'2011': HmdaDataFile('hmda_2011_vi_originated-records_codes.zip', '0', '488 bytes'),
'2010': HmdaDataFile('hmda_2010_vi_originated-records_codes.zip', '0', '488 bytes'),
'2013': HmdaDataFile('hmda_2013_vi_originated-records_codes.zip', '0', '488 bytes'),
'2012': HmdaDataFile('hmda_2012_vi_originated-records_codes.zip', '0', '488 bytes')
}
}
},
'ak': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '15356', '685.22 KB'),
'2007': HmdaDataFile('hmda_2007_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '16758', '627.93 KB'),
'2017': HmdaDataFile('hmda_2017_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '12579', '350.04 KB'),
'2015': HmdaDataFile('hmda_2015_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '14511', '637.91 KB'),
'2014': HmdaDataFile('hmda_2014_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '12147', '530.18 KB'),
'2008': HmdaDataFile('hmda_2008_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '15576', '536.13 KB'),
'2009': HmdaDataFile('hmda_2009_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '23301', '812.74 KB'),
'2011': HmdaDataFile('hmda_2011_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '16974', '682.85 KB'),
'2010': HmdaDataFile('hmda_2010_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '18636', '750.13 KB'),
'2013': HmdaDataFile('hmda_2013_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '17337', '684.52 KB'),
'2012': HmdaDataFile('hmda_2012_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '22064', '872.42 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ak_all-records_labels.zip', '36105', '1.77 MB'),
'2007': HmdaDataFile('hmda_2007_ak_all-records_labels.zip', '48143', '2.05 MB'),
'2017': HmdaDataFile('hmda_2017_ak_all-records_labels.zip', '28632', '904.87 KB'),
'2015': HmdaDataFile('hmda_2015_ak_all-records_labels.zip', '33421', '1.59 MB'),
'2014': HmdaDataFile('hmda_2014_ak_all-records_labels.zip', '26499', '1.29 MB'),
'2008': HmdaDataFile('hmda_2008_ak_all-records_labels.zip', '36410', '1.47 MB'),
'2009': HmdaDataFile('hmda_2009_ak_all-records_labels.zip', '51821', '2.02 MB'),
'2011': HmdaDataFile('hmda_2011_ak_all-records_labels.zip', '36900', '1.62 MB'),
'2010': HmdaDataFile('hmda_2010_ak_all-records_labels.zip', '41203', '1.81 MB'),
'2013': HmdaDataFile('hmda_2013_ak_all-records_labels.zip', '39394', '1.81 MB'),
'2012': HmdaDataFile('hmda_2012_ak_all-records_labels.zip', '46691', '2.09 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ak_originated-records_labels.zip', '17503', '792.19 KB'),
'2007': HmdaDataFile('hmda_2007_ak_originated-records_labels.zip', '21167', '801.83 KB'),
'2017': HmdaDataFile('hmda_2017_ak_originated-records_labels.zip', '14430', '400.6 KB'),
'2015': HmdaDataFile('hmda_2015_ak_originated-records_labels.zip', '16680', '749.38 KB'),
'2014': HmdaDataFile('hmda_2014_ak_originated-records_labels.zip', '14272', '633.57 KB'),
'2008': HmdaDataFile('hmda_2008_ak_originated-records_labels.zip', '17485', '614.3 KB'),
'2009': HmdaDataFile('hmda_2009_ak_originated-records_labels.zip', '24987', '878.94 KB'),
'2011': HmdaDataFile('hmda_2011_ak_originated-records_labels.zip', '19236', '785.43 KB'),
'2010': HmdaDataFile('hmda_2010_ak_originated-records_labels.zip', '20697', '847.87 KB'),
'2013': HmdaDataFile('hmda_2013_ak_originated-records_labels.zip', '20363', '840.47 KB'),
'2012': HmdaDataFile('hmda_2012_ak_originated-records_labels.zip', '24887', '1.02 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '15356', '426.06 KB'),
'2007': HmdaDataFile('hmda_2007_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '16758', '411.75 KB'),
'2017': HmdaDataFile('hmda_2017_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '12579', '240.69 KB'),
'2015': HmdaDataFile('hmda_2015_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '14511', '394.05 KB'),
'2014': HmdaDataFile('hmda_2014_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '12147', '329.2 KB'),
'2008': HmdaDataFile('hmda_2008_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '15576', '364.56 KB'),
'2009': HmdaDataFile('hmda_2009_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '23301', '544.64 KB'),
'2011': HmdaDataFile('hmda_2011_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '16974', '426.2 KB'),
'2010': HmdaDataFile('hmda_2010_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '18636', '464.07 KB'),
'2013': HmdaDataFile('hmda_2013_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '17337', '417.9 KB'),
'2012': HmdaDataFile('hmda_2012_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '22064', '530.98 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ak_all-records_codes.zip', '36105', '1.07 MB'),
'2007': HmdaDataFile('hmda_2007_ak_all-records_codes.zip', '48143', '1.24 MB'),
'2017': HmdaDataFile('hmda_2017_ak_all-records_codes.zip', '28632', '571.83 KB'),
'2015': HmdaDataFile('hmda_2015_ak_all-records_codes.zip', '33421', '951.9 KB'),
'2014': HmdaDataFile('hmda_2014_ak_all-records_codes.zip', '26499', '778.42 KB'),
'2008': HmdaDataFile('hmda_2008_ak_all-records_codes.zip', '36410', '899.28 KB'),
'2009': HmdaDataFile('hmda_2009_ak_all-records_codes.zip', '51821', '1.29 MB'),
'2011': HmdaDataFile('hmda_2011_ak_all-records_codes.zip', '36900', '992.55 KB'),
'2010': HmdaDataFile('hmda_2010_ak_all-records_codes.zip', '41203', '1.1 MB'),
'2013': HmdaDataFile('hmda_2013_ak_all-records_codes.zip', '39394', '1.01 MB'),
'2012': HmdaDataFile('hmda_2012_ak_all-records_codes.zip', '46691', '1.17 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ak_originated-records_codes.zip', '17503', '490.92 KB'),
'2007': HmdaDataFile('hmda_2007_ak_originated-records_codes.zip', '21167', '527.65 KB'),
'2017': HmdaDataFile('hmda_2017_ak_originated-records_codes.zip', '14430', '275.46 KB'),
'2015': HmdaDataFile('hmda_2015_ak_originated-records_codes.zip', '16680', '458.98 KB'),
'2014': HmdaDataFile('hmda_2014_ak_originated-records_codes.zip', '14272', '392.19 KB'),
'2008': HmdaDataFile('hmda_2008_ak_originated-records_codes.zip', '17485', '413.46 KB'),
'2009': HmdaDataFile('hmda_2009_ak_originated-records_codes.zip', '24987', '583.23 KB'),
'2011': HmdaDataFile('hmda_2011_ak_originated-records_codes.zip', '19236', '488.12 KB'),
'2010': HmdaDataFile('hmda_2010_ak_originated-records_codes.zip', '20697', '518.32 KB'),
'2013': HmdaDataFile('hmda_2013_ak_originated-records_codes.zip', '20363', '514.87 KB'),
'2012': HmdaDataFile('hmda_2012_ak_originated-records_codes.zip', '24887', '617.44 KB')
}
}
},
'al': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '86522', '4.16 MB'),
'2007': HmdaDataFile('hmda_2007_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '106195', '4.88 MB'),
'2017': HmdaDataFile('hmda_2017_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '80115', '2.33 MB'),
'2015': HmdaDataFile('hmda_2015_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '75252', '4.13 MB'),
'2014': HmdaDataFile('hmda_2014_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '63808', '3.06 MB'),
'2008': HmdaDataFile('hmda_2008_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '89932', '4.1 MB'),
'2009': HmdaDataFile('hmda_2009_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '103427', '4.47 MB'),
'2011': HmdaDataFile('hmda_2011_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '75467', '3.54 MB'),
'2010': HmdaDataFile('hmda_2010_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '87005', '3.98 MB'),
'2013': HmdaDataFile('hmda_2013_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '93236', '4.39 MB'),
'2012': HmdaDataFile('hmda_2012_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '99531', '4.7 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_al_all-records_labels.zip', '226918', '11.95 MB'),
'2007': HmdaDataFile('hmda_2007_al_all-records_labels.zip', '367991', '17.47 MB'),
'2017': HmdaDataFile('hmda_2017_al_all-records_labels.zip', '209002', '7.08 MB'),
'2015': HmdaDataFile('hmda_2015_al_all-records_labels.zip', '205039', '11.99 MB'),
'2014': HmdaDataFile('hmda_2014_al_all-records_labels.zip', '182825', '9.57 MB'),
'2008': HmdaDataFile('hmda_2008_al_all-records_labels.zip', '286567', '13.72 MB'),
'2009': HmdaDataFile('hmda_2009_al_all-records_labels.zip', '294820', '13.87 MB'),
'2011': HmdaDataFile('hmda_2011_al_all-records_labels.zip', '228420', '11.89 MB'),
'2010': HmdaDataFile('hmda_2010_al_all-records_labels.zip', '249347', '12.77 MB'),
'2013': HmdaDataFile('hmda_2013_al_all-records_labels.zip', '253915', '13.16 MB'),
'2012': HmdaDataFile('hmda_2012_al_all-records_labels.zip', '264313', '13.66 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_al_originated-records_labels.zip', '109870', '5.39 MB'),
'2007': HmdaDataFile('hmda_2007_al_originated-records_labels.zip', '153334', '7.1 MB'),
'2017': HmdaDataFile('hmda_2017_al_originated-records_labels.zip', '103096', '3.05 MB'),
'2015': HmdaDataFile('hmda_2015_al_originated-records_labels.zip', '98097', '5.48 MB'),
'2014': HmdaDataFile('hmda_2014_al_originated-records_labels.zip', '85899', '4.21 MB'),
'2008': HmdaDataFile('hmda_2008_al_originated-records_labels.zip', '119306', '5.51 MB'),
'2009': HmdaDataFile('hmda_2009_al_originated-records_labels.zip', '126063', '5.57 MB'),
'2011': HmdaDataFile('hmda_2011_al_originated-records_labels.zip', '97761', '4.65 MB'),
'2010': HmdaDataFile('hmda_2010_al_originated-records_labels.zip', '106706', '4.95 MB'),
'2013': HmdaDataFile('hmda_2013_al_originated-records_labels.zip', '118638', '5.68 MB'),
'2012': HmdaDataFile('hmda_2012_al_originated-records_labels.zip', '123170', '5.93 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '86522', '2.77 MB'),
'2007': HmdaDataFile('hmda_2007_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '106195', '3.34 MB'),
'2017': HmdaDataFile('hmda_2017_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '80115', '1.67 MB'),
'2015': HmdaDataFile('hmda_2015_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '75252', '2.81 MB'),
'2014': HmdaDataFile('hmda_2014_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '63808', '2.06 MB'),
'2008': HmdaDataFile('hmda_2008_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '89932', '2.83 MB'),
'2009': HmdaDataFile('hmda_2009_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '103427', '3.06 MB'),
'2011': HmdaDataFile('hmda_2011_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '75467', '2.35 MB'),
'2010': HmdaDataFile('hmda_2010_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '87005', '2.64 MB'),
'2013': HmdaDataFile('hmda_2013_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '93236', '2.92 MB'),
'2012': HmdaDataFile('hmda_2012_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '99531', '3.15 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_al_all-records_codes.zip', '226918', '7.69 MB'),
'2007': HmdaDataFile('hmda_2007_al_all-records_codes.zip', '367991', '11.49 MB'),
'2017': HmdaDataFile('hmda_2017_al_all-records_codes.zip', '209002', '4.77 MB'),
'2015': HmdaDataFile('hmda_2015_al_all-records_codes.zip', '205039', '7.82 MB'),
'2014': HmdaDataFile('hmda_2014_al_all-records_codes.zip', '182825', '6.25 MB'),
'2008': HmdaDataFile('hmda_2008_al_all-records_codes.zip', '286567', '9.04 MB'),
'2009': HmdaDataFile('hmda_2009_al_all-records_codes.zip', '294820', '9.23 MB'),
'2011': HmdaDataFile('hmda_2011_al_all-records_codes.zip', '228420', '7.78 MB'),
'2010': HmdaDataFile('hmda_2010_al_all-records_codes.zip', '249347', '8.38 MB'),
'2013': HmdaDataFile('hmda_2013_al_all-records_codes.zip', '253915', '8.52 MB'),
'2012': HmdaDataFile('hmda_2012_al_all-records_codes.zip', '264313', '9.06 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_al_originated-records_codes.zip', '109870', '3.56 MB'),
'2007': HmdaDataFile('hmda_2007_al_originated-records_codes.zip', '153334', '4.85 MB'),
'2017': HmdaDataFile('hmda_2017_al_originated-records_codes.zip', '103096', '2.14 MB'),
'2015': HmdaDataFile('hmda_2015_al_originated-records_codes.zip', '98097', '3.71 MB'),
'2014': HmdaDataFile('hmda_2014_al_originated-records_codes.zip', '85899', '2.8 MB'),
'2008': HmdaDataFile('hmda_2008_al_originated-records_codes.zip', '119306', '3.78 MB'),
'2009': HmdaDataFile('hmda_2009_al_originated-records_codes.zip', '126063', '3.8 MB'),
'2011': HmdaDataFile('hmda_2011_al_originated-records_codes.zip', '97761', '3.07 MB'),
'2010': HmdaDataFile('hmda_2010_al_originated-records_codes.zip', '106706', '3.27 MB'),
'2013': HmdaDataFile('hmda_2013_al_originated-records_codes.zip', '118638', '3.76 MB'),
'2012': HmdaDataFile('hmda_2012_al_originated-records_codes.zip', '123170', '3.94 MB')
}
}
},
'ar': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '49025', '2.38 MB'),
'2007': HmdaDataFile('hmda_2007_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '55026', '2.46 MB'),
'2017': HmdaDataFile('hmda_2017_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '45327', '1.49 MB'),
'2015': HmdaDataFile('hmda_2015_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '42845', '2.26 MB'),
'2014': HmdaDataFile('hmda_2014_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '37601', '1.77 MB'),
'2008': HmdaDataFile('hmda_2008_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '49034', '2.14 MB'),
'2009': HmdaDataFile('hmda_2009_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '61531', '2.65 MB'),
'2011': HmdaDataFile('hmda_2011_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '46801', '2.1 MB'),
'2010': HmdaDataFile('hmda_2010_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '54493', '2.37 MB'),
'2013': HmdaDataFile('hmda_2013_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '54893', '2.56 MB'),
'2012': HmdaDataFile('hmda_2012_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '61834', '2.8 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ar_all-records_labels.zip', '131352', '6.97 MB'),
'2007': HmdaDataFile('hmda_2007_ar_all-records_labels.zip', '179146', '8.6 MB'),
'2017': HmdaDataFile('hmda_2017_ar_all-records_labels.zip', '125392', '4.6 MB'),
'2015': HmdaDataFile('hmda_2015_ar_all-records_labels.zip', '118384', '6.72 MB'),
'2014': HmdaDataFile('hmda_2014_ar_all-records_labels.zip', '108526', '5.63 MB'),
'2008': HmdaDataFile('hmda_2008_ar_all-records_labels.zip', '141191', '6.71 MB'),
'2009': HmdaDataFile('hmda_2009_ar_all-records_labels.zip', '159208', '7.41 MB'),
'2011': HmdaDataFile('hmda_2011_ar_all-records_labels.zip', '127757', '6.44 MB'),
'2010': HmdaDataFile('hmda_2010_ar_all-records_labels.zip', '142441', '7.13 MB'),
'2013': HmdaDataFile('hmda_2013_ar_all-records_labels.zip', '146285', '7.61 MB'),
'2012': HmdaDataFile('hmda_2012_ar_all-records_labels.zip', '154830', '7.85 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ar_originated-records_labels.zip', '65762', '3.27 MB'),
'2007': HmdaDataFile('hmda_2007_ar_originated-records_labels.zip', '83327', '3.71 MB'),
'2017': HmdaDataFile('hmda_2017_ar_originated-records_labels.zip', '62259', '2.05 MB'),
'2015': HmdaDataFile('hmda_2015_ar_originated-records_labels.zip', '59384', '3.2 MB'),
'2014': HmdaDataFile('hmda_2014_ar_originated-records_labels.zip', '52994', '2.56 MB'),
'2008': HmdaDataFile('hmda_2008_ar_originated-records_labels.zip', '71395', '3.11 MB'),
'2009': HmdaDataFile('hmda_2009_ar_originated-records_labels.zip', '78016', '3.4 MB'),
'2011': HmdaDataFile('hmda_2011_ar_originated-records_labels.zip', '62549', '2.86 MB'),
'2010': HmdaDataFile('hmda_2010_ar_originated-records_labels.zip', '69315', '3.08 MB'),
'2013': HmdaDataFile('hmda_2013_ar_originated-records_labels.zip', '73125', '3.48 MB'),
'2012': HmdaDataFile('hmda_2012_ar_originated-records_labels.zip', '79283', '3.68 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '49025', '1.56 MB'),
'2007': HmdaDataFile('hmda_2007_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '55026', '1.62 MB'),
'2017': HmdaDataFile('hmda_2017_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '45327', '1.04 MB'),
'2015': HmdaDataFile('hmda_2015_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '42845', '1.51 MB'),
'2014': HmdaDataFile('hmda_2014_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '37601', '1.16 MB'),
'2008': HmdaDataFile('hmda_2008_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '49034', '1.41 MB'),
'2009': HmdaDataFile('hmda_2009_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '61531', '1.81 MB'),
'2011': HmdaDataFile('hmda_2011_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '46801', '1.35 MB'),
'2010': HmdaDataFile('hmda_2010_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '54493', '1.53 MB'),
'2013': HmdaDataFile('hmda_2013_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '54893', '1.65 MB'),
'2012': HmdaDataFile('hmda_2012_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '61834', '1.81 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ar_all-records_codes.zip', '131352', '4.44 MB'),
'2007': HmdaDataFile('hmda_2007_ar_all-records_codes.zip', '179146', '5.43 MB'),
'2017': HmdaDataFile('hmda_2017_ar_all-records_codes.zip', '125392', '3.01 MB'),
'2015': HmdaDataFile('hmda_2015_ar_all-records_codes.zip', '118384', '4.33 MB'),
'2014': HmdaDataFile('hmda_2014_ar_all-records_codes.zip', '108526', '3.58 MB'),
'2008': HmdaDataFile('hmda_2008_ar_all-records_codes.zip', '141191', '4.14 MB'),
'2009': HmdaDataFile('hmda_2009_ar_all-records_codes.zip', '159208', '4.87 MB'),
'2011': HmdaDataFile('hmda_2011_ar_all-records_codes.zip', '127757', '4.13 MB'),
'2010': HmdaDataFile('hmda_2010_ar_all-records_codes.zip', '142441', '4.51 MB'),
'2013': HmdaDataFile('hmda_2013_ar_all-records_codes.zip', '146285', '4.87 MB'),
'2012': HmdaDataFile('hmda_2012_ar_all-records_codes.zip', '154830', '5.04 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ar_originated-records_codes.zip', '65762', '2.1 MB'),
'2007': HmdaDataFile('hmda_2007_ar_originated-records_codes.zip', '83327', '2.43 MB'),
'2017': HmdaDataFile('hmda_2017_ar_originated-records_codes.zip', '62259', '1.41 MB'),
'2015': HmdaDataFile('hmda_2015_ar_originated-records_codes.zip', '59384', '2.11 MB'),
'2014': HmdaDataFile('hmda_2014_ar_originated-records_codes.zip', '52994', '1.65 MB'),
'2008': HmdaDataFile('hmda_2008_ar_originated-records_codes.zip', '71395', '2.03 MB'),
'2009': HmdaDataFile('hmda_2009_ar_originated-records_codes.zip', '78016', '2.3 MB'),
'2011': HmdaDataFile('hmda_2011_ar_originated-records_codes.zip', '62549', '1.82 MB'),
'2010': HmdaDataFile('hmda_2010_ar_originated-records_codes.zip', '69315', '1.96 MB'),
'2013': HmdaDataFile('hmda_2013_ar_originated-records_codes.zip', '73125', '2.21 MB'),
'2012': HmdaDataFile('hmda_2012_ar_originated-records_codes.zip', '79283', '2.34 MB')
}
}
},
'vt': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '11252', '465.67 KB'),
'2007': HmdaDataFile('hmda_2007_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '13430', '571.04 KB'),
'2017': HmdaDataFile('hmda_2017_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '9250', '291.43 KB'),
'2015': HmdaDataFile('hmda_2015_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '10215', '455.27 KB'),
'2014': HmdaDataFile('hmda_2014_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '8361', '346.05 KB'),
'2008': HmdaDataFile('hmda_2008_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '12079', '491.28 KB'),
'2009': HmdaDataFile('hmda_2009_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '21026', '782.79 KB'),
'2011': HmdaDataFile('hmda_2011_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '13512', '578.18 KB'),
'2010': HmdaDataFile('hmda_2010_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '15651', '620.42 KB'),
'2013': HmdaDataFile('hmda_2013_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '14400', '594.85 KB'),
'2012': HmdaDataFile('hmda_2012_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '17914', '703.52 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_vt_all-records_labels.zip', '25971', '1.19 MB'),
'2007': HmdaDataFile('hmda_2007_vt_all-records_labels.zip', '43664', '1.95 MB'),
'2017': HmdaDataFile('hmda_2017_vt_all-records_labels.zip', '22335', '765.5 KB'),
'2015': HmdaDataFile('hmda_2015_vt_all-records_labels.zip', '24028', '1.19 MB'),
'2014': HmdaDataFile('hmda_2014_vt_all-records_labels.zip', '20816', '953.84 KB'),
'2008': HmdaDataFile('hmda_2008_vt_all-records_labels.zip', '33040', '1.44 MB'),
'2009': HmdaDataFile('hmda_2009_vt_all-records_labels.zip', '46532', '1.87 MB'),
'2011': HmdaDataFile('hmda_2011_vt_all-records_labels.zip', '32687', '1.56 MB'),
'2010': HmdaDataFile('hmda_2010_vt_all-records_labels.zip', '36637', '1.61 MB'),
'2013': HmdaDataFile('hmda_2013_vt_all-records_labels.zip', '32790', '1.49 MB'),
'2012': HmdaDataFile('hmda_2012_vt_all-records_labels.zip', '37869', '1.66 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_vt_originated-records_labels.zip', '15176', '645.84 KB'),
'2007': HmdaDataFile('hmda_2007_vt_originated-records_labels.zip', '20755', '882.15 KB'),
'2017': HmdaDataFile('hmda_2017_vt_originated-records_labels.zip', '13325', '425.68 KB'),
'2015': HmdaDataFile('hmda_2015_vt_originated-records_labels.zip', '14053', '645.96 KB'),
'2014': HmdaDataFile('hmda_2014_vt_originated-records_labels.zip', '11893', '504.52 KB'),
'2008': HmdaDataFile('hmda_2008_vt_originated-records_labels.zip', '17432', '722.72 KB'),
'2009': HmdaDataFile('hmda_2009_vt_originated-records_labels.zip', '25699', '980.48 KB'),
'2011': HmdaDataFile('hmda_2011_vt_originated-records_labels.zip', '17791', '786.1 KB'),
'2010': HmdaDataFile('hmda_2010_vt_originated-records_labels.zip', '19808', '804.9 KB'),
'2013': HmdaDataFile('hmda_2013_vt_originated-records_labels.zip', '19293', '814.7 KB'),
'2012': HmdaDataFile('hmda_2012_vt_originated-records_labels.zip', '22745', '914.05 KB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '11252', '290.62 KB'),
'2007': HmdaDataFile('hmda_2007_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '13430', '365.86 KB'),
'2017': HmdaDataFile('hmda_2017_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '9250', '198.27 KB'),
'2015': HmdaDataFile('hmda_2015_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '10215', '285.97 KB'),
'2014': HmdaDataFile('hmda_2014_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '8361', '216.87 KB'),
'2008': HmdaDataFile('hmda_2008_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '12079', '314.55 KB'),
'2009': HmdaDataFile('hmda_2009_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '21026', '510.88 KB'),
'2011': HmdaDataFile('hmda_2011_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '13512', '361.01 KB'),
'2010': HmdaDataFile('hmda_2010_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '15651', '386.89 KB'),
'2013': HmdaDataFile('hmda_2013_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '14400', '367.68 KB'),
'2012': HmdaDataFile('hmda_2012_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '17914', '433.81 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_vt_all-records_codes.zip', '25971', '716.41 KB'),
'2007': HmdaDataFile('hmda_2007_vt_all-records_codes.zip', '43664', '1.22 MB'),
'2017': HmdaDataFile('hmda_2017_vt_all-records_codes.zip', '22335', '486.85 KB'),
'2015': HmdaDataFile('hmda_2015_vt_all-records_codes.zip', '24028', '719.81 KB'),
'2014': HmdaDataFile('hmda_2014_vt_all-records_codes.zip', '20816', '576.12 KB'),
'2008': HmdaDataFile('hmda_2008_vt_all-records_codes.zip', '33040', '900.48 KB'),
'2009': HmdaDataFile('hmda_2009_vt_all-records_codes.zip', '46532', '1.2 MB'),
'2011': HmdaDataFile('hmda_2011_vt_all-records_codes.zip', '32687', '942.34 KB'),
'2010': HmdaDataFile('hmda_2010_vt_all-records_codes.zip', '36637', '971.16 KB'),
'2013': HmdaDataFile('hmda_2013_vt_all-records_codes.zip', '32790', '897.96 KB'),
'2012': HmdaDataFile('hmda_2012_vt_all-records_codes.zip', '37869', '998.22 KB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_vt_originated-records_codes.zip', '15176', '399.34 KB'),
'2007': HmdaDataFile('hmda_2007_vt_originated-records_codes.zip', '20755', '567.25 KB'),
'2017': HmdaDataFile('hmda_2017_vt_originated-records_codes.zip', '13325', '283.84 KB'),
'2015': HmdaDataFile('hmda_2015_vt_originated-records_codes.zip', '14053', '403.29 KB'),
'2014': HmdaDataFile('hmda_2014_vt_originated-records_codes.zip', '11893', '311.95 KB'),
'2008': HmdaDataFile('hmda_2008_vt_originated-records_codes.zip', '17432', '462.77 KB'),
'2009': HmdaDataFile('hmda_2009_vt_originated-records_codes.zip', '25699', '638.61 KB'),
'2011': HmdaDataFile('hmda_2011_vt_originated-records_codes.zip', '17791', '486.47 KB'),
'2010': HmdaDataFile('hmda_2010_vt_originated-records_codes.zip', '19808', '497.91 KB'),
'2013': HmdaDataFile('hmda_2013_vt_originated-records_codes.zip', '19293', '501.93 KB'),
'2012': HmdaDataFile('hmda_2012_vt_originated-records_codes.zip', '22745', '561.59 KB')
}
}
},
'il': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '278035', '14.02 MB'),
'2007': HmdaDataFile('hmda_2007_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '346807', '16.76 MB'),
'2017': HmdaDataFile('hmda_2017_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '225445', '7.03 MB'),
'2015': HmdaDataFile('hmda_2015_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '245110', '13.66 MB'),
'2014': HmdaDataFile('hmda_2014_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '193991', '9.89 MB'),
'2008': HmdaDataFile('hmda_2008_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '278003', '13.04 MB'),
'2009': HmdaDataFile('hmda_2009_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '396141', '17.05 MB'),
'2011': HmdaDataFile('hmda_2011_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '282861', '13.82 MB'),
'2010': HmdaDataFile('hmda_2010_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '343779', '16.76 MB'),
'2013': HmdaDataFile('hmda_2013_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '295909', '14.97 MB'),
'2012': HmdaDataFile('hmda_2012_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '378550', '18.82 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_il_all-records_labels.zip', '583019', '31.48 MB'),
'2007': HmdaDataFile('hmda_2007_il_all-records_labels.zip', '1117310', '53.26 MB'),
'2017': HmdaDataFile('hmda_2017_il_all-records_labels.zip', '502511', '18.63 MB'),
'2015': HmdaDataFile('hmda_2015_il_all-records_labels.zip', '517360', '30.86 MB'),
'2014': HmdaDataFile('hmda_2014_il_all-records_labels.zip', '437239', '23.79 MB'),
'2008': HmdaDataFile('hmda_2008_il_all-records_labels.zip', '761632', '36.35 MB'),
'2009': HmdaDataFile('hmda_2009_il_all-records_labels.zip', '849782', '37.88 MB'),
'2011': HmdaDataFile('hmda_2011_il_all-records_labels.zip', '620832', '33.1 MB'),
'2010': HmdaDataFile('hmda_2010_il_all-records_labels.zip', '716356', '37.81 MB'),
'2013': HmdaDataFile('hmda_2013_il_all-records_labels.zip', '637258', '34.91 MB'),
'2012': HmdaDataFile('hmda_2012_il_all-records_labels.zip', '754118', '40.71 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_il_originated-records_labels.zip', '317597', '16.17 MB'),
'2007': HmdaDataFile('hmda_2007_il_originated-records_labels.zip', '470592', '22.68 MB'),
'2017': HmdaDataFile('hmda_2017_il_originated-records_labels.zip', '265490', '8.51 MB'),
'2015': HmdaDataFile('hmda_2015_il_originated-records_labels.zip', '284551', '16.06 MB'),
'2014': HmdaDataFile('hmda_2014_il_originated-records_labels.zip', '232557', '11.98 MB'),
'2008': HmdaDataFile('hmda_2008_il_originated-records_labels.zip', '339543', '16.11 MB'),
'2009': HmdaDataFile('hmda_2009_il_originated-records_labels.zip', '432707', '18.86 MB'),
'2011': HmdaDataFile('hmda_2011_il_originated-records_labels.zip', '319004', '15.78 MB'),
'2010': HmdaDataFile('hmda_2010_il_originated-records_labels.zip', '378335', '18.65 MB'),
'2013': HmdaDataFile('hmda_2013_il_originated-records_labels.zip', '344172', '17.61 MB'),
'2012': HmdaDataFile('hmda_2012_il_originated-records_labels.zip', '424748', '21.38 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '278035', '10.15 MB'),
'2007': HmdaDataFile('hmda_2007_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '346807', '12.31 MB'),
'2017': HmdaDataFile('hmda_2017_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '225445', '5.06 MB'),
'2015': HmdaDataFile('hmda_2015_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '245110', '9.83 MB'),
'2014': HmdaDataFile('hmda_2014_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '193991', '7.12 MB'),
'2008': HmdaDataFile('hmda_2008_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '278003', '9.54 MB'),
'2009': HmdaDataFile('hmda_2009_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '396141', '12.55 MB'),
'2011': HmdaDataFile('hmda_2011_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '282861', '9.82 MB'),
'2010': HmdaDataFile('hmda_2010_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '343779', '11.91 MB'),
'2013': HmdaDataFile('hmda_2013_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '295909', '10.83 MB'),
'2012': HmdaDataFile('hmda_2012_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '378550', '13.52 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_il_all-records_codes.zip', '583019', '22.25 MB'),
'2007': HmdaDataFile('hmda_2007_il_all-records_codes.zip', '1117310', '37.59 MB'),
'2017': HmdaDataFile('hmda_2017_il_all-records_codes.zip', '502511', '12.69 MB'),
'2015': HmdaDataFile('hmda_2015_il_all-records_codes.zip', '517360', '21.34 MB'),
'2014': HmdaDataFile('hmda_2014_il_all-records_codes.zip', '437239', '16.64 MB'),
'2008': HmdaDataFile('hmda_2008_il_all-records_codes.zip', '761632', '25.64 MB'),
'2009': HmdaDataFile('hmda_2009_il_all-records_codes.zip', '849782', '26.91 MB'),
'2011': HmdaDataFile('hmda_2011_il_all-records_codes.zip', '620832', '22.91 MB'),
'2010': HmdaDataFile('hmda_2010_il_all-records_codes.zip', '716356', '26.12 MB'),
'2013': HmdaDataFile('hmda_2013_il_all-records_codes.zip', '637258', '24.68 MB'),
'2012': HmdaDataFile('hmda_2012_il_all-records_codes.zip', '754118', '28.69 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_il_originated-records_codes.zip', '317597', '11.62 MB'),
'2007': HmdaDataFile('hmda_2007_il_originated-records_codes.zip', '470592', '16.53 MB'),
'2017': HmdaDataFile('hmda_2017_il_originated-records_codes.zip', '265490', '6.03 MB'),
'2015': HmdaDataFile('hmda_2015_il_originated-records_codes.zip', '284551', '11.44 MB'),
'2014': HmdaDataFile('hmda_2014_il_originated-records_codes.zip', '232557', '8.54 MB'),
'2008': HmdaDataFile('hmda_2008_il_originated-records_codes.zip', '339543', '11.68 MB'),
'2009': HmdaDataFile('hmda_2009_il_originated-records_codes.zip', '432707', '13.79 MB'),
'2011': HmdaDataFile('hmda_2011_il_originated-records_codes.zip', '319004', '11.14 MB'),
'2010': HmdaDataFile('hmda_2010_il_originated-records_codes.zip', '378335', '13.17 MB'),
'2013': HmdaDataFile('hmda_2013_il_originated-records_codes.zip', '344172', '12.64 MB'),
'2012': HmdaDataFile('hmda_2012_il_originated-records_codes.zip', '424748', '15.26 MB')
}
}
},
'ga': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '225258', '11.78 MB'),
'2007': HmdaDataFile('hmda_2007_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '236346', '11.17 MB'),
'2017': HmdaDataFile('hmda_2017_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '203948', '6.37 MB'),
'2015': HmdaDataFile('hmda_2015_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '193285', '11.11 MB'),
'2014': HmdaDataFile('hmda_2014_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '147432', '7.81 MB'),
'2008': HmdaDataFile('hmda_2008_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '179658', '8.42 MB'),
'2009': HmdaDataFile('hmda_2009_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '220141', '9.84 MB'),
'2011': HmdaDataFile('hmda_2011_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '165891', '8.32 MB'),
'2010': HmdaDataFile('hmda_2010_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '171421', '8.49 MB'),
'2013': HmdaDataFile('hmda_2013_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '210048', '10.77 MB'),
'2012': HmdaDataFile('hmda_2012_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '229259', '11.67 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ga_all-records_labels.zip', '547637', '30.4 MB'),
'2007': HmdaDataFile('hmda_2007_ga_all-records_labels.zip', '899812', '43.49 MB'),
'2017': HmdaDataFile('hmda_2017_ga_all-records_labels.zip', '501310', '18.01 MB'),
'2015': HmdaDataFile('hmda_2015_ga_all-records_labels.zip', '478359', '29.23 MB'),
'2014': HmdaDataFile('hmda_2014_ga_all-records_labels.zip', '391231', '22.06 MB'),
'2008': HmdaDataFile('hmda_2008_ga_all-records_labels.zip', '583802', '28.24 MB'),
'2009': HmdaDataFile('hmda_2009_ga_all-records_labels.zip', '612188', '28.44 MB'),
'2011': HmdaDataFile('hmda_2011_ga_all-records_labels.zip', '444258', '24.51 MB'),
'2010': HmdaDataFile('hmda_2010_ga_all-records_labels.zip', '466839', '25.16 MB'),
'2013': HmdaDataFile('hmda_2013_ga_all-records_labels.zip', '537898', '29.53 MB'),
'2012': HmdaDataFile('hmda_2012_ga_all-records_labels.zip', '559464', '30.51 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ga_originated-records_labels.zip', '264802', '14.07 MB'),
'2007': HmdaDataFile('hmda_2007_ga_originated-records_labels.zip', '352181', '16.55 MB'),
'2017': HmdaDataFile('hmda_2017_ga_originated-records_labels.zip', '244731', '7.97 MB'),
'2015': HmdaDataFile('hmda_2015_ga_originated-records_labels.zip', '232822', '13.6 MB'),
'2014': HmdaDataFile('hmda_2014_ga_originated-records_labels.zip', '185375', '10 MB'),
'2008': HmdaDataFile('hmda_2008_ga_originated-records_labels.zip', '244230', '11.53 MB'),
'2009': HmdaDataFile('hmda_2009_ga_originated-records_labels.zip', '261989', '11.88 MB'),
'2011': HmdaDataFile('hmda_2011_ga_originated-records_labels.zip', '204582', '10.57 MB'),
'2010': HmdaDataFile('hmda_2010_ga_originated-records_labels.zip', '208728', '10.42 MB'),
'2013': HmdaDataFile('hmda_2013_ga_originated-records_labels.zip', '262544', '13.64 MB'),
'2012': HmdaDataFile('hmda_2012_ga_originated-records_labels.zip', '277607', '14.27 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '225258', '8.23 MB'),
'2007': HmdaDataFile('hmda_2007_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '236346', '7.88 MB'),
'2017': HmdaDataFile('hmda_2017_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '203948', '4.43 MB'),
'2015': HmdaDataFile('hmda_2015_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '193285', '7.71 MB'),
'2014': HmdaDataFile('hmda_2014_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '147432', '5.46 MB'),
'2008': HmdaDataFile('hmda_2008_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '179658', '5.94 MB'),
'2009': HmdaDataFile('hmda_2009_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '220141', '7.02 MB'),
'2011': HmdaDataFile('hmda_2011_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '165891', '5.63 MB'),
'2010': HmdaDataFile('hmda_2010_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '171421', '5.76 MB'),
'2013': HmdaDataFile('hmda_2013_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '210048', '7.54 MB'),
'2012': HmdaDataFile('hmda_2012_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '229259', '8.16 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ga_all-records_codes.zip', '547637', '20.69 MB'),
'2007': HmdaDataFile('hmda_2007_ga_all-records_codes.zip', '899812', '29.71 MB'),
'2017': HmdaDataFile('hmda_2017_ga_all-records_codes.zip', '501310', '11.97 MB'),
'2015': HmdaDataFile('hmda_2015_ga_all-records_codes.zip', '478359', '19.47 MB'),
'2014': HmdaDataFile('hmda_2014_ga_all-records_codes.zip', '391231', '14.94 MB'),
'2008': HmdaDataFile('hmda_2008_ga_all-records_codes.zip', '583802', '19.31 MB'),
'2009': HmdaDataFile('hmda_2009_ga_all-records_codes.zip', '612188', '19.6 MB'),
'2011': HmdaDataFile('hmda_2011_ga_all-records_codes.zip', '444258', '16.2 MB'),
'2010': HmdaDataFile('hmda_2010_ga_all-records_codes.zip', '466839', '16.61 MB'),
'2013': HmdaDataFile('hmda_2013_ga_all-records_codes.zip', '537898', '20.11 MB'),
'2012': HmdaDataFile('hmda_2012_ga_all-records_codes.zip', '559464', '20.8 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ga_originated-records_codes.zip', '264802', '9.77 MB'),
'2007': HmdaDataFile('hmda_2007_ga_originated-records_codes.zip', '352181', '11.59 MB'),
'2017': HmdaDataFile('hmda_2017_ga_originated-records_codes.zip', '244731', '5.49 MB'),
'2015': HmdaDataFile('hmda_2015_ga_originated-records_codes.zip', '232822', '9.35 MB'),
'2014': HmdaDataFile('hmda_2014_ga_originated-records_codes.zip', '185375', '6.91 MB'),
'2008': HmdaDataFile('hmda_2008_ga_originated-records_codes.zip', '244230', '8.04 MB'),
'2009': HmdaDataFile('hmda_2009_ga_originated-records_codes.zip', '261989', '8.4 MB'),
'2011': HmdaDataFile('hmda_2011_ga_originated-records_codes.zip', '204582', '7.11 MB'),
'2010': HmdaDataFile('hmda_2010_ga_originated-records_codes.zip', '208728', '7.02 MB'),
'2013': HmdaDataFile('hmda_2013_ga_originated-records_codes.zip', '262544', '9.46 MB'),
'2012': HmdaDataFile('hmda_2012_ga_originated-records_codes.zip', '277607', '9.89 MB')
}
}
},
'in': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '149979', '7.55 MB'),
'2007': HmdaDataFile('hmda_2007_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '145394', '6.8 MB'),
'2017': HmdaDataFile('hmda_2017_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '135246', '4.03 MB'),
'2015': HmdaDataFile('hmda_2015_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '134522', '7.59 MB'),
'2014': HmdaDataFile('hmda_2014_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '108789', '5.36 MB'),
'2008': HmdaDataFile('hmda_2008_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '124555', '5.73 MB'),
'2009': HmdaDataFile('hmda_2009_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '187145', '8.03 MB'),
'2011': HmdaDataFile('hmda_2011_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '141707', '6.73 MB'),
'2010': HmdaDataFile('hmda_2010_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '161225', '7.29 MB'),
'2013': HmdaDataFile('hmda_2013_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '159649', '7.66 MB'),
'2012': HmdaDataFile('hmda_2012_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '188614', '8.83 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_in_all-records_labels.zip', '319123', '17.64 MB'),
'2007': HmdaDataFile('hmda_2007_in_all-records_labels.zip', '474561', '24.38 MB'),
'2017': HmdaDataFile('hmda_2017_in_all-records_labels.zip', '292152', '10.14 MB'),
'2015': HmdaDataFile('hmda_2015_in_all-records_labels.zip', '288746', '17.61 MB'),
'2014': HmdaDataFile('hmda_2014_in_all-records_labels.zip', '248347', '13.51 MB'),
'2008': HmdaDataFile('hmda_2008_in_all-records_labels.zip', '348681', '17.43 MB'),
'2009': HmdaDataFile('hmda_2009_in_all-records_labels.zip', '421392', '19.89 MB'),
'2011': HmdaDataFile('hmda_2011_in_all-records_labels.zip', '322061', '17.24 MB'),
'2010': HmdaDataFile('hmda_2010_in_all-records_labels.zip', '359860', '18.65 MB'),
'2013': HmdaDataFile('hmda_2013_in_all-records_labels.zip', '344116', '18.48 MB'),
'2012': HmdaDataFile('hmda_2012_in_all-records_labels.zip', '385267', '20.36 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_in_originated-records_labels.zip', '172307', '8.79 MB'),
'2007': HmdaDataFile('hmda_2007_in_originated-records_labels.zip', '199213', '9.38 MB'),
'2017': HmdaDataFile('hmda_2017_in_originated-records_labels.zip', '158693', '4.9 MB'),
'2015': HmdaDataFile('hmda_2015_in_originated-records_labels.zip', '156109', '8.92 MB'),
'2014': HmdaDataFile('hmda_2014_in_originated-records_labels.zip', '130131', '6.52 MB'),
'2008': HmdaDataFile('hmda_2008_in_originated-records_labels.zip', '155308', '7.25 MB'),
'2009': HmdaDataFile('hmda_2009_in_originated-records_labels.zip', '207593', '9.02 MB'),
'2011': HmdaDataFile('hmda_2011_in_originated-records_labels.zip', '160424', '7.77 MB'),
'2010': HmdaDataFile('hmda_2010_in_originated-records_labels.zip', '179820', '8.27 MB'),
'2013': HmdaDataFile('hmda_2013_in_originated-records_labels.zip', '184428', '8.97 MB'),
'2012': HmdaDataFile('hmda_2012_in_originated-records_labels.zip', '210891', '10.05 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '149979', '5.05 MB'),
'2007': HmdaDataFile('hmda_2007_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '145394', '4.63 MB'),
'2017': HmdaDataFile('hmda_2017_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '135246', '2.85 MB'),
'2015': HmdaDataFile('hmda_2015_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '134522', '5.11 MB'),
'2014': HmdaDataFile('hmda_2014_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '108789', '3.62 MB'),
'2008': HmdaDataFile('hmda_2008_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '124555', '3.9 MB'),
'2009': HmdaDataFile('hmda_2009_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '187145', '5.67 MB'),
'2011': HmdaDataFile('hmda_2011_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '141707', '4.43 MB'),
'2010': HmdaDataFile('hmda_2010_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '161225', '4.82 MB'),
'2013': HmdaDataFile('hmda_2013_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '159649', '5.11 MB'),
'2012': HmdaDataFile('hmda_2012_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '188614', '5.87 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_in_all-records_codes.zip', '319123', '11.54 MB'),
'2007': HmdaDataFile('hmda_2007_in_all-records_codes.zip', '474561', '16.44 MB'),
'2017': HmdaDataFile('hmda_2017_in_all-records_codes.zip', '292152', '6.68 MB'),
'2015': HmdaDataFile('hmda_2015_in_all-records_codes.zip', '288746', '11.46 MB'),
'2014': HmdaDataFile('hmda_2014_in_all-records_codes.zip', '248347', '8.9 MB'),
'2008': HmdaDataFile('hmda_2008_in_all-records_codes.zip', '348681', '11.68 MB'),
'2009': HmdaDataFile('hmda_2009_in_all-records_codes.zip', '421392', '13.64 MB'),
'2011': HmdaDataFile('hmda_2011_in_all-records_codes.zip', '322061', '11.1 MB'),
'2010': HmdaDataFile('hmda_2010_in_all-records_codes.zip', '359860', '12.06 MB'),
'2013': HmdaDataFile('hmda_2013_in_all-records_codes.zip', '344116', '12.12 MB'),
'2012': HmdaDataFile('hmda_2012_in_all-records_codes.zip', '385267', '13.31 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_in_originated-records_codes.zip', '172307', '5.84 MB'),
'2007': HmdaDataFile('hmda_2007_in_originated-records_codes.zip', '199213', '6.61 MB'),
'2017': HmdaDataFile('hmda_2017_in_originated-records_codes.zip', '158693', '3.43 MB'),
'2015': HmdaDataFile('hmda_2015_in_originated-records_codes.zip', '156109', '5.98 MB'),
'2014': HmdaDataFile('hmda_2014_in_originated-records_codes.zip', '130131', '4.37 MB'),
'2008': HmdaDataFile('hmda_2008_in_originated-records_codes.zip', '155308', '4.94 MB'),
'2009': HmdaDataFile('hmda_2009_in_originated-records_codes.zip', '207593', '6.34 MB'),
'2011': HmdaDataFile('hmda_2011_in_originated-records_codes.zip', '160424', '5.09 MB'),
'2010': HmdaDataFile('hmda_2010_in_originated-records_codes.zip', '179820', '5.44 MB'),
'2013': HmdaDataFile('hmda_2013_in_originated-records_codes.zip', '184428', '5.94 MB'),
'2012': HmdaDataFile('hmda_2012_in_originated-records_codes.zip', '210891', '6.64 MB')
}
}
},
'ia': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '71680', '3.32 MB'),
'2007': HmdaDataFile('hmda_2007_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '65703', '3.02 MB'),
'2017': HmdaDataFile('hmda_2017_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '60347', '1.83 MB'),
'2015': HmdaDataFile('hmda_2015_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '63773', '3.32 MB'),
'2014': HmdaDataFile('hmda_2014_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '52784', '2.39 MB'),
'2008': HmdaDataFile('hmda_2008_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '60097', '2.72 MB'),
'2009': HmdaDataFile('hmda_2009_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '96858', '3.85 MB'),
'2011': HmdaDataFile('hmda_2011_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '72565', '3.17 MB'),
'2010': HmdaDataFile('hmda_2010_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '84923', '3.79 MB'),
'2013': HmdaDataFile('hmda_2013_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '76061', '3.38 MB'),
'2012': HmdaDataFile('hmda_2012_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '93906', '4.12 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ia_all-records_labels.zip', '149227', '7.39 MB'),
'2007': HmdaDataFile('hmda_2007_ia_all-records_labels.zip', '197991', '9.31 MB'),
'2017': HmdaDataFile('hmda_2017_ia_all-records_labels.zip', '127446', '4.12 MB'),
'2015': HmdaDataFile('hmda_2015_ia_all-records_labels.zip', '136795', '7.68 MB'),
'2014': HmdaDataFile('hmda_2014_ia_all-records_labels.zip', '115594', '5.62 MB'),
'2008': HmdaDataFile('hmda_2008_ia_all-records_labels.zip', '157339', '7.52 MB'),
'2009': HmdaDataFile('hmda_2009_ia_all-records_labels.zip', '200497', '8.54 MB'),
'2011': HmdaDataFile('hmda_2011_ia_all-records_labels.zip', '150683', '7.18 MB'),
'2010': HmdaDataFile('hmda_2010_ia_all-records_labels.zip', '172100', '8.41 MB'),
'2013': HmdaDataFile('hmda_2013_ia_all-records_labels.zip', '160707', '7.67 MB'),
'2012': HmdaDataFile('hmda_2012_ia_all-records_labels.zip', '181237', '8.53 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ia_originated-records_labels.zip', '90932', '4.27 MB'),
'2007': HmdaDataFile('hmda_2007_ia_originated-records_labels.zip', '93343', '4.31 MB'),
'2017': HmdaDataFile('hmda_2017_ia_originated-records_labels.zip', '79977', '2.43 MB'),
'2015': HmdaDataFile('hmda_2015_ia_originated-records_labels.zip', '83214', '4.37 MB'),
'2014': HmdaDataFile('hmda_2014_ia_originated-records_labels.zip', '71413', '3.26 MB'),
'2008': HmdaDataFile('hmda_2008_ia_originated-records_labels.zip', '78965', '3.63 MB'),
'2009': HmdaDataFile('hmda_2009_ia_originated-records_labels.zip', '112522', '4.57 MB'),
'2011': HmdaDataFile('hmda_2011_ia_originated-records_labels.zip', '87178', '3.88 MB'),
'2010': HmdaDataFile('hmda_2010_ia_originated-records_labels.zip', '100132', '4.56 MB'),
'2013': HmdaDataFile('hmda_2013_ia_originated-records_labels.zip', '95886', '4.34 MB'),
'2012': HmdaDataFile('hmda_2012_ia_originated-records_labels.zip', '111610', '4.98 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '71680', '2.17 MB'),
'2007': HmdaDataFile('hmda_2007_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '65703', '2.02 MB'),
'2017': HmdaDataFile('hmda_2017_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '60347', '1.29 MB'),
'2015': HmdaDataFile('hmda_2015_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '63773', '2.2 MB'),
'2014': HmdaDataFile('hmda_2014_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '52784', '1.56 MB'),
'2008': HmdaDataFile('hmda_2008_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '60097', '1.82 MB'),
'2009': HmdaDataFile('hmda_2009_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '96858', '2.64 MB'),
'2011': HmdaDataFile('hmda_2011_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '72565', '2.05 MB'),
'2010': HmdaDataFile('hmda_2010_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '84923', '2.46 MB'),
'2013': HmdaDataFile('hmda_2013_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '76061', '2.18 MB'),
'2012': HmdaDataFile('hmda_2012_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '93906', '2.66 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ia_all-records_codes.zip', '149227', '4.65 MB'),
'2007': HmdaDataFile('hmda_2007_ia_all-records_codes.zip', '197991', '6.1 MB'),
'2017': HmdaDataFile('hmda_2017_ia_all-records_codes.zip', '127446', '2.7 MB'),
'2015': HmdaDataFile('hmda_2015_ia_all-records_codes.zip', '136795', '4.9 MB'),
'2014': HmdaDataFile('hmda_2014_ia_all-records_codes.zip', '115594', '3.51 MB'),
'2008': HmdaDataFile('hmda_2008_ia_all-records_codes.zip', '157339', '4.92 MB'),
'2009': HmdaDataFile('hmda_2009_ia_all-records_codes.zip', '200497', '5.67 MB'),
'2011': HmdaDataFile('hmda_2011_ia_all-records_codes.zip', '150683', '4.49 MB'),
'2010': HmdaDataFile('hmda_2010_ia_all-records_codes.zip', '172100', '5.28 MB'),
'2013': HmdaDataFile('hmda_2013_ia_all-records_codes.zip', '160707', '4.75 MB'),
'2012': HmdaDataFile('hmda_2012_ia_all-records_codes.zip', '181237', '5.31 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ia_originated-records_codes.zip', '90932', '2.74 MB'),
'2007': HmdaDataFile('hmda_2007_ia_originated-records_codes.zip', '93343', '2.86 MB'),
'2017': HmdaDataFile('hmda_2017_ia_originated-records_codes.zip', '79977', '1.67 MB'),
'2015': HmdaDataFile('hmda_2015_ia_originated-records_codes.zip', '83214', '2.85 MB'),
'2014': HmdaDataFile('hmda_2014_ia_originated-records_codes.zip', '71413', '2.08 MB'),
'2008': HmdaDataFile('hmda_2008_ia_originated-records_codes.zip', '78965', '2.41 MB'),
'2009': HmdaDataFile('hmda_2009_ia_originated-records_codes.zip', '112522', '3.11 MB'),
'2011': HmdaDataFile('hmda_2011_ia_originated-records_codes.zip', '87178', '2.48 MB'),
'2010': HmdaDataFile('hmda_2010_ia_originated-records_codes.zip', '100132', '2.93 MB'),
'2013': HmdaDataFile('hmda_2013_ia_originated-records_codes.zip', '95886', '2.76 MB'),
'2012': HmdaDataFile('hmda_2012_ia_originated-records_codes.zip', '111610', '3.18 MB')
}
}
},
'az': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '201543', '9.98 MB'),
'2007': HmdaDataFile('hmda_2007_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '198791', '8.9 MB'),
'2017': HmdaDataFile('hmda_2017_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '177849', '5.45 MB'),
'2015': HmdaDataFile('hmda_2015_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '166410', '8.87 MB'),
'2014': HmdaDataFile('hmda_2014_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '124580', '6.3 MB'),
'2008': HmdaDataFile('hmda_2008_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '129343', '5.73 MB'),
'2009': HmdaDataFile('hmda_2009_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '161393', '6.71 MB'),
'2011': HmdaDataFile('hmda_2011_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '114583', '5.41 MB'),
'2010': HmdaDataFile('hmda_2010_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '132138', '6.21 MB'),
'2013': HmdaDataFile('hmda_2013_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '176268', '8.6 MB'),
'2012': HmdaDataFile('hmda_2012_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '197491', '9.59 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_az_all-records_labels.zip', '478386', '24.75 MB'),
'2007': HmdaDataFile('hmda_2007_az_all-records_labels.zip', '803675', '35.11 MB'),
'2017': HmdaDataFile('hmda_2017_az_all-records_labels.zip', '428411', '14.46 MB'),
'2015': HmdaDataFile('hmda_2015_az_all-records_labels.zip', '391879', '22 MB'),
'2014': HmdaDataFile('hmda_2014_az_all-records_labels.zip', '317345', '16.74 MB'),
'2008': HmdaDataFile('hmda_2008_az_all-records_labels.zip', '425680', '19.23 MB'),
'2009': HmdaDataFile('hmda_2009_az_all-records_labels.zip', '441291', '18.88 MB'),
'2011': HmdaDataFile('hmda_2011_az_all-records_labels.zip', '313348', '16.05 MB'),
'2010': HmdaDataFile('hmda_2010_az_all-records_labels.zip', '350571', '17.93 MB'),
'2013': HmdaDataFile('hmda_2013_az_all-records_labels.zip', '428383', '22.29 MB'),
'2012': HmdaDataFile('hmda_2012_az_all-records_labels.zip', '458365', '23.72 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_az_originated-records_labels.zip', '236688', '11.85 MB'),
'2007': HmdaDataFile('hmda_2007_az_originated-records_labels.zip', '292272', '13.05 MB'),
'2017': HmdaDataFile('hmda_2017_az_originated-records_labels.zip', '213547', '6.63 MB'),
'2015': HmdaDataFile('hmda_2015_az_originated-records_labels.zip', '198908', '10.8 MB'),
'2014': HmdaDataFile('hmda_2014_az_originated-records_labels.zip', '155001', '7.95 MB'),
'2008': HmdaDataFile('hmda_2008_az_originated-records_labels.zip', '165011', '7.37 MB'),
'2009': HmdaDataFile('hmda_2009_az_originated-records_labels.zip', '190609', '7.98 MB'),
'2011': HmdaDataFile('hmda_2011_az_originated-records_labels.zip', '146231', '7.09 MB'),
'2010': HmdaDataFile('hmda_2010_az_originated-records_labels.zip', '160055', '7.73 MB'),
'2013': HmdaDataFile('hmda_2013_az_originated-records_labels.zip', '224986', '11.14 MB'),
'2012': HmdaDataFile('hmda_2012_az_originated-records_labels.zip', '247572', '12.19 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '201543', '7.26 MB'),
'2007': HmdaDataFile('hmda_2007_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '198791', '6.42 MB'),
'2017': HmdaDataFile('hmda_2017_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '177849', '3.82 MB'),
'2015': HmdaDataFile('hmda_2015_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '166410', '6.44 MB'),
'2014': HmdaDataFile('hmda_2014_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '124580', '4.59 MB'),
'2008': HmdaDataFile('hmda_2008_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '129343', '4.15 MB'),
'2009': HmdaDataFile('hmda_2009_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '161393', '4.89 MB'),
'2011': HmdaDataFile('hmda_2011_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '114583', '3.79 MB'),
'2010': HmdaDataFile('hmda_2010_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '132138', '4.33 MB'),
'2013': HmdaDataFile('hmda_2013_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '176268', '6.24 MB'),
'2012': HmdaDataFile('hmda_2012_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '197491', '6.92 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_az_all-records_codes.zip', '478386', '17.45 MB'),
'2007': HmdaDataFile('hmda_2007_az_all-records_codes.zip', '803675', '24.35 MB'),
'2017': HmdaDataFile('hmda_2017_az_all-records_codes.zip', '428411', '9.68 MB'),
'2015': HmdaDataFile('hmda_2015_az_all-records_codes.zip', '391879', '15.26 MB'),
'2014': HmdaDataFile('hmda_2014_az_all-records_codes.zip', '317345', '11.78 MB'),
'2008': HmdaDataFile('hmda_2008_az_all-records_codes.zip', '425680', '13.42 MB'),
'2009': HmdaDataFile('hmda_2009_az_all-records_codes.zip', '441291', '13.49 MB'),
'2011': HmdaDataFile('hmda_2011_az_all-records_codes.zip', '313348', '10.91 MB'),
'2010': HmdaDataFile('hmda_2010_az_all-records_codes.zip', '350571', '12.17 MB'),
'2013': HmdaDataFile('hmda_2013_az_all-records_codes.zip', '428383', '15.68 MB'),
'2012': HmdaDataFile('hmda_2012_az_all-records_codes.zip', '458365', '16.67 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_az_originated-records_codes.zip', '236688', '8.56 MB'),
'2007': HmdaDataFile('hmda_2007_az_originated-records_codes.zip', '292272', '9.39 MB'),
'2017': HmdaDataFile('hmda_2017_az_originated-records_codes.zip', '213547', '4.61 MB'),
'2015': HmdaDataFile('hmda_2015_az_originated-records_codes.zip', '198908', '7.79 MB'),
'2014': HmdaDataFile('hmda_2014_az_originated-records_codes.zip', '155001', '5.74 MB'),
'2008': HmdaDataFile('hmda_2008_az_originated-records_codes.zip', '165011', '5.32 MB'),
'2009': HmdaDataFile('hmda_2009_az_originated-records_codes.zip', '190609', '5.81 MB'),
'2011': HmdaDataFile('hmda_2011_az_originated-records_codes.zip', '146231', '4.94 MB'),
'2010': HmdaDataFile('hmda_2010_az_originated-records_codes.zip', '160055', '5.38 MB'),
'2013': HmdaDataFile('hmda_2013_az_originated-records_codes.zip', '224986', '8.02 MB'),
'2012': HmdaDataFile('hmda_2012_az_originated-records_codes.zip', '247572', '8.75 MB')
}
}
},
'id': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '49270', '2.03 MB'),
'2007': HmdaDataFile('hmda_2007_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '45546', '1.84 MB'),
'2017': HmdaDataFile('hmda_2017_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '43374', '1.27 MB'),
'2015': HmdaDataFile('hmda_2015_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '41252', '1.97 MB'),
'2014': HmdaDataFile('hmda_2014_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '30539', '1.29 MB'),
'2008': HmdaDataFile('hmda_2008_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '36308', '1.49 MB'),
'2009': HmdaDataFile('hmda_2009_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '49056', '1.87 MB'),
'2011': HmdaDataFile('hmda_2011_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '30665', '1.26 MB'),
'2010': HmdaDataFile('hmda_2010_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '37182', '1.47 MB'),
'2013': HmdaDataFile('hmda_2013_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '41770', '1.75 MB'),
'2012': HmdaDataFile('hmda_2012_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '46607', '1.91 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_id_all-records_labels.zip', '103880', '4.67 MB'),
'2007': HmdaDataFile('hmda_2007_id_all-records_labels.zip', '156706', '6.54 MB'),
'2017': HmdaDataFile('hmda_2017_id_all-records_labels.zip', '92755', '3.01 MB'),
'2015': HmdaDataFile('hmda_2015_id_all-records_labels.zip', '89063', '4.72 MB'),
'2014': HmdaDataFile('hmda_2014_id_all-records_labels.zip', '70222', '3.27 MB'),
'2008': HmdaDataFile('hmda_2008_id_all-records_labels.zip', '108689', '4.65 MB'),
'2009': HmdaDataFile('hmda_2009_id_all-records_labels.zip', '125244', '5.05 MB'),
'2011': HmdaDataFile('hmda_2011_id_all-records_labels.zip', '77672', '3.5 MB'),
'2010': HmdaDataFile('hmda_2010_id_all-records_labels.zip', '94170', '4.16 MB'),
'2013': HmdaDataFile('hmda_2013_id_all-records_labels.zip', '97051', '4.49 MB'),
'2012': HmdaDataFile('hmda_2012_id_all-records_labels.zip', '103766', '4.64 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_id_originated-records_labels.zip', '59151', '2.48 MB'),
'2007': HmdaDataFile('hmda_2007_id_originated-records_labels.zip', '67396', '2.74 MB'),
'2017': HmdaDataFile('hmda_2017_id_originated-records_labels.zip', '54102', '1.62 MB'),
'2015': HmdaDataFile('hmda_2015_id_originated-records_labels.zip', '50554', '2.46 MB'),
'2014': HmdaDataFile('hmda_2014_id_originated-records_labels.zip', '38605', '1.67 MB'),
'2008': HmdaDataFile('hmda_2008_id_originated-records_labels.zip', '46531', '1.93 MB'),
'2009': HmdaDataFile('hmda_2009_id_originated-records_labels.zip', '56985', '2.2 MB'),
'2011': HmdaDataFile('hmda_2011_id_originated-records_labels.zip', '37943', '1.6 MB'),
'2010': HmdaDataFile('hmda_2010_id_originated-records_labels.zip', '44663', '1.79 MB'),
'2013': HmdaDataFile('hmda_2013_id_originated-records_labels.zip', '53109', '2.27 MB'),
'2012': HmdaDataFile('hmda_2012_id_originated-records_labels.zip', '57188', '2.39 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '49270', '1.31 MB'),
'2007': HmdaDataFile('hmda_2007_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '45546', '1.22 MB'),
'2017': HmdaDataFile('hmda_2017_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '43374', '882.08 KB'),
'2015': HmdaDataFile('hmda_2015_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '41252', '1.28 MB'),
'2014': HmdaDataFile('hmda_2014_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '30539', '834.4 KB'),
'2008': HmdaDataFile('hmda_2008_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '36308', '1 MB'),
'2009': HmdaDataFile('hmda_2009_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '49056', '1.27 MB'),
'2011': HmdaDataFile('hmda_2011_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '30665', '802.44 KB'),
'2010': HmdaDataFile('hmda_2010_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '37182', '944.45 KB'),
'2013': HmdaDataFile('hmda_2013_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '41770', '1.12 MB'),
'2012': HmdaDataFile('hmda_2012_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '46607', '1.21 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_id_all-records_codes.zip', '103880', '2.88 MB'),
'2007': HmdaDataFile('hmda_2007_id_all-records_codes.zip', '156706', '4.26 MB'),
'2017': HmdaDataFile('hmda_2017_id_all-records_codes.zip', '92755', '1.96 MB'),
'2015': HmdaDataFile('hmda_2015_id_all-records_codes.zip', '89063', '2.95 MB'),
'2014': HmdaDataFile('hmda_2014_id_all-records_codes.zip', '70222', '2.02 MB'),
'2008': HmdaDataFile('hmda_2008_id_all-records_codes.zip', '108689', '3.03 MB'),
'2009': HmdaDataFile('hmda_2009_id_all-records_codes.zip', '125244', '3.33 MB'),
'2011': HmdaDataFile('hmda_2011_id_all-records_codes.zip', '77672', '2.17 MB'),
'2010': HmdaDataFile('hmda_2010_id_all-records_codes.zip', '94170', '2.57 MB'),
'2013': HmdaDataFile('hmda_2013_id_all-records_codes.zip', '97051', '2.76 MB'),
'2012': HmdaDataFile('hmda_2012_id_all-records_codes.zip', '103766', '2.85 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_id_originated-records_codes.zip', '59151', '1.59 MB'),
'2007': HmdaDataFile('hmda_2007_id_originated-records_codes.zip', '67396', '1.83 MB'),
'2017': HmdaDataFile('hmda_2017_id_originated-records_codes.zip', '54102', '1.11 MB'),
'2015': HmdaDataFile('hmda_2015_id_originated-records_codes.zip', '50554', '1.59 MB'),
'2014': HmdaDataFile('hmda_2014_id_originated-records_codes.zip', '38605', '1.07 MB'),
'2008': HmdaDataFile('hmda_2008_id_originated-records_codes.zip', '46531', '1.29 MB'),
'2009': HmdaDataFile('hmda_2009_id_originated-records_codes.zip', '56985', '1.49 MB'),
'2011': HmdaDataFile('hmda_2011_id_originated-records_codes.zip', '37943', '1.02 MB'),
'2010': HmdaDataFile('hmda_2010_id_originated-records_codes.zip', '44663', '1.14 MB'),
'2013': HmdaDataFile('hmda_2013_id_originated-records_codes.zip', '53109', '1.45 MB'),
'2012': HmdaDataFile('hmda_2012_id_originated-records_codes.zip', '57188', '1.51 MB')
}
}
},
'ct': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '66025', '3.2 MB'),
'2007': HmdaDataFile('hmda_2007_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '89152', '4.26 MB'),
'2017': HmdaDataFile('hmda_2017_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '57398', '1.83 MB'),
'2015': HmdaDataFile('hmda_2015_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '59656', '3.12 MB'),
'2014': HmdaDataFile('hmda_2014_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '49553', '2.54 MB'),
'2008': HmdaDataFile('hmda_2008_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '68658', '3.22 MB'),
'2009': HmdaDataFile('hmda_2009_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '104310', '4.56 MB'),
'2011': HmdaDataFile('hmda_2011_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '78978', '3.93 MB'),
'2010': HmdaDataFile('hmda_2010_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '93797', '4.54 MB'),
'2013': HmdaDataFile('hmda_2013_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '87390', '4.18 MB'),
'2012': HmdaDataFile('hmda_2012_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '105049', '4.95 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ct_all-records_labels.zip', '146885', '7.73 MB'),
'2007': HmdaDataFile('hmda_2007_ct_all-records_labels.zip', '301760', '15.1 MB'),
'2017': HmdaDataFile('hmda_2017_ct_all-records_labels.zip', '129401', '4.77 MB'),
'2015': HmdaDataFile('hmda_2015_ct_all-records_labels.zip', '132491', '7.44 MB'),
'2014': HmdaDataFile('hmda_2014_ct_all-records_labels.zip', '114931', '6.31 MB'),
'2008': HmdaDataFile('hmda_2008_ct_all-records_labels.zip', '193168', '9.76 MB'),
'2009': HmdaDataFile('hmda_2009_ct_all-records_labels.zip', '235997', '11.17 MB'),
'2011': HmdaDataFile('hmda_2011_ct_all-records_labels.zip', '177460', '9.5 MB'),
'2010': HmdaDataFile('hmda_2010_ct_all-records_labels.zip', '204936', '10.79 MB'),
'2013': HmdaDataFile('hmda_2013_ct_all-records_labels.zip', '187158', '9.86 MB'),
'2012': HmdaDataFile('hmda_2012_ct_all-records_labels.zip', '214191', '11.09 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ct_originated-records_labels.zip', '75239', '3.73 MB'),
'2007': HmdaDataFile('hmda_2007_ct_originated-records_labels.zip', '126259', '6.09 MB'),
'2017': HmdaDataFile('hmda_2017_ct_originated-records_labels.zip', '67000', '2.19 MB'),
'2015': HmdaDataFile('hmda_2015_ct_originated-records_labels.zip', '68865', '3.65 MB'),
'2014': HmdaDataFile('hmda_2014_ct_originated-records_labels.zip', '58456', '3.04 MB'),
'2008': HmdaDataFile('hmda_2008_ct_originated-records_labels.zip', '84484', '4.01 MB'),
'2009': HmdaDataFile('hmda_2009_ct_originated-records_labels.zip', '113317', '5.04 MB'),
'2011': HmdaDataFile('hmda_2011_ct_originated-records_labels.zip', '87795', '4.47 MB'),
'2010': HmdaDataFile('hmda_2010_ct_originated-records_labels.zip', '102545', '5.08 MB'),
'2013': HmdaDataFile('hmda_2013_ct_originated-records_labels.zip', '98739', '4.8 MB'),
'2012': HmdaDataFile('hmda_2012_ct_originated-records_labels.zip', '115361', '5.49 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '66025', '2.19 MB'),
'2007': HmdaDataFile('hmda_2007_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '89152', '2.98 MB'),
'2017': HmdaDataFile('hmda_2017_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '57398', '1.3 MB'),
'2015': HmdaDataFile('hmda_2015_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '59656', '2.2 MB'),
'2014': HmdaDataFile('hmda_2014_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '49553', '1.74 MB'),
'2008': HmdaDataFile('hmda_2008_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '68658', '2.25 MB'),
'2009': HmdaDataFile('hmda_2009_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '104310', '3.22 MB'),
'2011': HmdaDataFile('hmda_2011_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '78978', '2.66 MB'),
'2010': HmdaDataFile('hmda_2010_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '93797', '3.09 MB'),
'2013': HmdaDataFile('hmda_2013_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '87390', '2.84 MB'),
'2012': HmdaDataFile('hmda_2012_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '105049', '3.31 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ct_all-records_codes.zip', '146885', '5.17 MB'),
'2007': HmdaDataFile('hmda_2007_ct_all-records_codes.zip', '301760', '10.45 MB'),
'2017': HmdaDataFile('hmda_2017_ct_all-records_codes.zip', '129401', '3.24 MB'),
'2015': HmdaDataFile('hmda_2015_ct_all-records_codes.zip', '132491', '5.05 MB'),
'2014': HmdaDataFile('hmda_2014_ct_all-records_codes.zip', '114931', '4.22 MB'),
'2008': HmdaDataFile('hmda_2008_ct_all-records_codes.zip', '193168', '6.73 MB'),
'2009': HmdaDataFile('hmda_2009_ct_all-records_codes.zip', '235997', '7.73 MB'),
'2011': HmdaDataFile('hmda_2011_ct_all-records_codes.zip', '177460', '6.28 MB'),
'2010': HmdaDataFile('hmda_2010_ct_all-records_codes.zip', '204936', '7.18 MB'),
'2013': HmdaDataFile('hmda_2013_ct_all-records_codes.zip', '187158', '6.56 MB'),
'2012': HmdaDataFile('hmda_2012_ct_all-records_codes.zip', '214191', '7.31 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ct_originated-records_codes.zip', '75239', '2.54 MB'),
'2007': HmdaDataFile('hmda_2007_ct_originated-records_codes.zip', '126259', '4.28 MB'),
'2017': HmdaDataFile('hmda_2017_ct_originated-records_codes.zip', '67000', '1.55 MB'),
'2015': HmdaDataFile('hmda_2015_ct_originated-records_codes.zip', '68865', '2.57 MB'),
'2014': HmdaDataFile('hmda_2014_ct_originated-records_codes.zip', '58456', '2.07 MB'),
'2008': HmdaDataFile('hmda_2008_ct_originated-records_codes.zip', '84484', '2.79 MB'),
'2009': HmdaDataFile('hmda_2009_ct_originated-records_codes.zip', '113317', '3.55 MB'),
'2011': HmdaDataFile('hmda_2011_ct_originated-records_codes.zip', '87795', '3.04 MB'),
'2010': HmdaDataFile('hmda_2010_ct_originated-records_codes.zip', '102545', '3.46 MB'),
'2013': HmdaDataFile('hmda_2013_ct_originated-records_codes.zip', '98739', '3.24 MB'),
'2012': HmdaDataFile('hmda_2012_ct_originated-records_codes.zip', '115361', '3.66 MB')
}
}
},
'nh': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '32293', '1.45 MB'),
'2007': HmdaDataFile('hmda_2007_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '30786', '1.35 MB'),
'2017': HmdaDataFile('hmda_2017_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '28246', '825.86 KB'),
'2015': HmdaDataFile('hmda_2015_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '27470', '1.33 MB'),
'2014': HmdaDataFile('hmda_2014_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '21418', '953.08 KB'),
'2008': HmdaDataFile('hmda_2008_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '25186', '1.1 MB'),
'2009': HmdaDataFile('hmda_2009_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '40099', '1.65 MB'),
'2011': HmdaDataFile('hmda_2011_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '30214', '1.28 MB'),
'2010': HmdaDataFile('hmda_2010_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '35463', '1.49 MB'),
'2013': HmdaDataFile('hmda_2013_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '34908', '1.55 MB'),
'2012': HmdaDataFile('hmda_2012_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '41909', '1.8 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nh_all-records_labels.zip', '72628', '3.51 MB'),
'2007': HmdaDataFile('hmda_2007_nh_all-records_labels.zip', '116500', '5.13 MB'),
'2017': HmdaDataFile('hmda_2017_nh_all-records_labels.zip', '65016', '2 MB'),
'2015': HmdaDataFile('hmda_2015_nh_all-records_labels.zip', '63482', '3.36 MB'),
'2014': HmdaDataFile('hmda_2014_nh_all-records_labels.zip', '53373', '2.55 MB'),
'2008': HmdaDataFile('hmda_2008_nh_all-records_labels.zip', '78591', '3.53 MB'),
'2009': HmdaDataFile('hmda_2009_nh_all-records_labels.zip', '100680', '4.29 MB'),
'2011': HmdaDataFile('hmda_2011_nh_all-records_labels.zip', '75090', '3.48 MB'),
'2010': HmdaDataFile('hmda_2010_nh_all-records_labels.zip', '85990', '3.93 MB'),
'2013': HmdaDataFile('hmda_2013_nh_all-records_labels.zip', '80737', '3.88 MB'),
'2012': HmdaDataFile('hmda_2012_nh_all-records_labels.zip', '92574', '4.33 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nh_originated-records_labels.zip', '38262', '1.76 MB'),
'2007': HmdaDataFile('hmda_2007_nh_originated-records_labels.zip', '46139', '2.02 MB'),
'2017': HmdaDataFile('hmda_2017_nh_originated-records_labels.zip', '34290', '1.02 MB'),
'2015': HmdaDataFile('hmda_2015_nh_originated-records_labels.zip', '33120', '1.64 MB'),
'2014': HmdaDataFile('hmda_2014_nh_originated-records_labels.zip', '26589', '1.21 MB'),
'2008': HmdaDataFile('hmda_2008_nh_originated-records_labels.zip', '32850', '1.46 MB'),
'2009': HmdaDataFile('hmda_2009_nh_originated-records_labels.zip', '45790', '1.91 MB'),
'2011': HmdaDataFile('hmda_2011_nh_originated-records_labels.zip', '35321', '1.53 MB'),
'2010': HmdaDataFile('hmda_2010_nh_originated-records_labels.zip', '40696', '1.75 MB'),
'2013': HmdaDataFile('hmda_2013_nh_originated-records_labels.zip', '41589', '1.88 MB'),
'2012': HmdaDataFile('hmda_2012_nh_originated-records_labels.zip', '48098', '2.12 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '32293', '929.81 KB'),
'2007': HmdaDataFile('hmda_2007_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '30786', '870.93 KB'),
'2017': HmdaDataFile('hmda_2017_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '28246', '579.48 KB'),
'2015': HmdaDataFile('hmda_2015_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '27470', '880.79 KB'),
'2014': HmdaDataFile('hmda_2014_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '21418', '605.7 KB'),
'2008': HmdaDataFile('hmda_2008_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '25186', '724.38 KB'),
'2009': HmdaDataFile('hmda_2009_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '40099', '1.1 MB'),
'2011': HmdaDataFile('hmda_2011_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '30214', '801.84 KB'),
'2010': HmdaDataFile('hmda_2010_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '35463', '934.66 KB'),
'2013': HmdaDataFile('hmda_2013_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '34908', '991.18 KB'),
'2012': HmdaDataFile('hmda_2012_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '41909', '1.14 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nh_all-records_codes.zip', '72628', '2.16 MB'),
'2007': HmdaDataFile('hmda_2007_nh_all-records_codes.zip', '116500', '3.28 MB'),
'2017': HmdaDataFile('hmda_2017_nh_all-records_codes.zip', '65016', '1.3 MB'),
'2015': HmdaDataFile('hmda_2015_nh_all-records_codes.zip', '63482', '2.15 MB'),
'2014': HmdaDataFile('hmda_2014_nh_all-records_codes.zip', '53373', '1.56 MB'),
'2008': HmdaDataFile('hmda_2008_nh_all-records_codes.zip', '78591', '2.27 MB'),
'2009': HmdaDataFile('hmda_2009_nh_all-records_codes.zip', '100680', '2.81 MB'),
'2011': HmdaDataFile('hmda_2011_nh_all-records_codes.zip', '75090', '2.12 MB'),
'2010': HmdaDataFile('hmda_2010_nh_all-records_codes.zip', '85990', '2.4 MB'),
'2013': HmdaDataFile('hmda_2013_nh_all-records_codes.zip', '80737', '2.4 MB'),
'2012': HmdaDataFile('hmda_2012_nh_all-records_codes.zip', '92574', '2.66 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nh_originated-records_codes.zip', '38262', '1.12 MB'),
'2007': HmdaDataFile('hmda_2007_nh_originated-records_codes.zip', '46139', '1.31 MB'),
'2017': HmdaDataFile('hmda_2017_nh_originated-records_codes.zip', '34290', '702.18 KB'),
'2015': HmdaDataFile('hmda_2015_nh_originated-records_codes.zip', '33120', '1.08 MB'),
'2014': HmdaDataFile('hmda_2014_nh_originated-records_codes.zip', '26589', '760.91 KB'),
'2008': HmdaDataFile('hmda_2008_nh_originated-records_codes.zip', '32850', '959.54 KB'),
'2009': HmdaDataFile('hmda_2009_nh_originated-records_codes.zip', '45790', '1.27 MB'),
'2011': HmdaDataFile('hmda_2011_nh_originated-records_codes.zip', '35321', '955.91 KB'),
'2010': HmdaDataFile('hmda_2010_nh_originated-records_codes.zip', '40696', '1.09 MB'),
'2013': HmdaDataFile('hmda_2013_nh_originated-records_codes.zip', '41589', '1.2 MB'),
'2012': HmdaDataFile('hmda_2012_nh_originated-records_codes.zip', '48098', '1.33 MB')
}
}
},
'nj': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '168437', '8.44 MB'),
'2007': HmdaDataFile('hmda_2007_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '222771', '10.95 MB'),
'2017': HmdaDataFile('hmda_2017_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '141365', '4.57 MB'),
'2015': HmdaDataFile('hmda_2015_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '147544', '8.22 MB'),
'2014': HmdaDataFile('hmda_2014_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '113481', '5.78 MB'),
'2008': HmdaDataFile('hmda_2008_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '166597', '8.06 MB'),
'2009': HmdaDataFile('hmda_2009_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '247580', '11.18 MB'),
'2011': HmdaDataFile('hmda_2011_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '184520', '9.28 MB'),
'2010': HmdaDataFile('hmda_2010_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '206564', '10.4 MB'),
'2013': HmdaDataFile('hmda_2013_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '194532', '9.73 MB'),
'2012': HmdaDataFile('hmda_2012_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '237169', '11.87 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nj_all-records_labels.zip', '399389', '21.26 MB'),
'2007': HmdaDataFile('hmda_2007_nj_all-records_labels.zip', '808103', '39.11 MB'),
'2017': HmdaDataFile('hmda_2017_nj_all-records_labels.zip', '349563', '12.38 MB'),
'2015': HmdaDataFile('hmda_2015_nj_all-records_labels.zip', '354746', '21.29 MB'),
'2014': HmdaDataFile('hmda_2014_nj_all-records_labels.zip', '289377', '15.65 MB'),
'2008': HmdaDataFile('hmda_2008_nj_all-records_labels.zip', '514816', '25.05 MB'),
'2009': HmdaDataFile('hmda_2009_nj_all-records_labels.zip', '613066', '28.18 MB'),
'2011': HmdaDataFile('hmda_2011_nj_all-records_labels.zip', '451221', '24.28 MB'),
'2010': HmdaDataFile('hmda_2010_nj_all-records_labels.zip', '499489', '27 MB'),
'2013': HmdaDataFile('hmda_2013_nj_all-records_labels.zip', '460264', '24.64 MB'),
'2012': HmdaDataFile('hmda_2012_nj_all-records_labels.zip', '541802', '29.13 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nj_originated-records_labels.zip', '194913', '9.86 MB'),
'2007': HmdaDataFile('hmda_2007_nj_originated-records_labels.zip', '317276', '15.57 MB'),
'2017': HmdaDataFile('hmda_2017_nj_originated-records_labels.zip', '169196', '5.6 MB'),
'2015': HmdaDataFile('hmda_2015_nj_originated-records_labels.zip', '171685', '9.68 MB'),
'2014': HmdaDataFile('hmda_2014_nj_originated-records_labels.zip', '135675', '6.98 MB'),
'2008': HmdaDataFile('hmda_2008_nj_originated-records_labels.zip', '208721', '10.17 MB'),
'2009': HmdaDataFile('hmda_2009_nj_originated-records_labels.zip', '274489', '12.53 MB'),
'2011': HmdaDataFile('hmda_2011_nj_originated-records_labels.zip', '210948', '10.71 MB'),
'2010': HmdaDataFile('hmda_2010_nj_originated-records_labels.zip', '232001', '11.8 MB'),
'2013': HmdaDataFile('hmda_2013_nj_originated-records_labels.zip', '226181', '11.43 MB'),
'2012': HmdaDataFile('hmda_2012_nj_originated-records_labels.zip', '269377', '13.64 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '168437', '6.08 MB'),
'2007': HmdaDataFile('hmda_2007_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '222771', '7.81 MB'),
'2017': HmdaDataFile('hmda_2017_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '141365', '3.22 MB'),
'2015': HmdaDataFile('hmda_2015_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '147544', '5.93 MB'),
'2014': HmdaDataFile('hmda_2014_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '113481', '4.17 MB'),
'2008': HmdaDataFile('hmda_2008_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '166597', '5.77 MB'),
'2009': HmdaDataFile('hmda_2009_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '247580', '8.06 MB'),
'2011': HmdaDataFile('hmda_2011_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '184520', '6.49 MB'),
'2010': HmdaDataFile('hmda_2010_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '206564', '7.36 MB'),
'2013': HmdaDataFile('hmda_2013_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '194532', '6.89 MB'),
'2012': HmdaDataFile('hmda_2012_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '237169', '8.4 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nj_all-records_codes.zip', '399389', '14.78 MB'),
'2007': HmdaDataFile('hmda_2007_nj_all-records_codes.zip', '808103', '27.04 MB'),
'2017': HmdaDataFile('hmda_2017_nj_all-records_codes.zip', '349563', '8.1 MB'),
'2015': HmdaDataFile('hmda_2015_nj_all-records_codes.zip', '354746', '14.72 MB'),
'2014': HmdaDataFile('hmda_2014_nj_all-records_codes.zip', '289377', '10.83 MB'),
'2008': HmdaDataFile('hmda_2008_nj_all-records_codes.zip', '514816', '17.43 MB'),
'2009': HmdaDataFile('hmda_2009_nj_all-records_codes.zip', '613066', '19.76 MB'),
'2011': HmdaDataFile('hmda_2011_nj_all-records_codes.zip', '451221', '16.47 MB'),
'2010': HmdaDataFile('hmda_2010_nj_all-records_codes.zip', '499489', '18.44 MB'),
'2013': HmdaDataFile('hmda_2013_nj_all-records_codes.zip', '460264', '16.86 MB'),
'2012': HmdaDataFile('hmda_2012_nj_all-records_codes.zip', '541802', '19.93 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nj_originated-records_codes.zip', '194913', '7.06 MB'),
'2007': HmdaDataFile('hmda_2007_nj_originated-records_codes.zip', '317276', '11.05 MB'),
'2017': HmdaDataFile('hmda_2017_nj_originated-records_codes.zip', '169196', '3.91 MB'),
'2015': HmdaDataFile('hmda_2015_nj_originated-records_codes.zip', '171685', '6.93 MB'),
'2014': HmdaDataFile('hmda_2014_nj_originated-records_codes.zip', '135675', '5 MB'),
'2008': HmdaDataFile('hmda_2008_nj_originated-records_codes.zip', '208721', '7.24 MB'),
'2009': HmdaDataFile('hmda_2009_nj_originated-records_codes.zip', '274489', '9.01 MB'),
'2011': HmdaDataFile('hmda_2011_nj_originated-records_codes.zip', '210948', '7.44 MB'),
'2010': HmdaDataFile('hmda_2010_nj_originated-records_codes.zip', '232001', '8.28 MB'),
'2013': HmdaDataFile('hmda_2013_nj_originated-records_codes.zip', '226181', '8.05 MB'),
'2012': HmdaDataFile('hmda_2012_nj_originated-records_codes.zip', '269377', '9.59 MB')
}
}
},
'nm': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '32653', '1.47 MB'),
'2007': HmdaDataFile('hmda_2007_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '47002', '2.07 MB'),
'2017': HmdaDataFile('hmda_2017_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '28982', '924.59 KB'),
'2015': HmdaDataFile('hmda_2015_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '29038', '1.45 MB'),
'2014': HmdaDataFile('hmda_2014_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '24491', '1.13 MB'),
'2008': HmdaDataFile('hmda_2008_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '35421', '1.54 MB'),
'2009': HmdaDataFile('hmda_2009_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '45147', '1.83 MB'),
'2011': HmdaDataFile('hmda_2011_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '31586', '1.35 MB'),
'2010': HmdaDataFile('hmda_2010_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '36691', '1.68 MB'),
'2013': HmdaDataFile('hmda_2013_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '38141', '1.77 MB'),
'2012': HmdaDataFile('hmda_2012_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '42629', '1.91 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nm_all-records_labels.zip', '89390', '4.48 MB'),
'2007': HmdaDataFile('hmda_2007_nm_all-records_labels.zip', '178911', '8.19 MB'),
'2017': HmdaDataFile('hmda_2017_nm_all-records_labels.zip', '80423', '2.86 MB'),
'2015': HmdaDataFile('hmda_2015_nm_all-records_labels.zip', '79236', '4.37 MB'),
'2014': HmdaDataFile('hmda_2014_nm_all-records_labels.zip', '71841', '3.67 MB'),
'2008': HmdaDataFile('hmda_2008_nm_all-records_labels.zip', '114678', '5.3 MB'),
'2009': HmdaDataFile('hmda_2009_nm_all-records_labels.zip', '123495', '5.37 MB'),
'2011': HmdaDataFile('hmda_2011_nm_all-records_labels.zip', '90785', '4.39 MB'),
'2010': HmdaDataFile('hmda_2010_nm_all-records_labels.zip', '104045', '5.33 MB'),
'2013': HmdaDataFile('hmda_2013_nm_all-records_labels.zip', '102217', '5.23 MB'),
'2012': HmdaDataFile('hmda_2012_nm_all-records_labels.zip', '108074', '5.4 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nm_originated-records_labels.zip', '41334', '1.93 MB'),
'2007': HmdaDataFile('hmda_2007_nm_originated-records_labels.zip', '69876', '3.15 MB'),
'2017': HmdaDataFile('hmda_2017_nm_originated-records_labels.zip', '37465', '1.24 MB'),
'2015': HmdaDataFile('hmda_2015_nm_originated-records_labels.zip', '37572', '1.93 MB'),
'2014': HmdaDataFile('hmda_2014_nm_originated-records_labels.zip', '32547', '1.57 MB'),
'2008': HmdaDataFile('hmda_2008_nm_originated-records_labels.zip', '46512', '2.09 MB'),
'2009': HmdaDataFile('hmda_2009_nm_originated-records_labels.zip', '54007', '2.27 MB'),
'2011': HmdaDataFile('hmda_2011_nm_originated-records_labels.zip', '39979', '1.8 MB'),
'2010': HmdaDataFile('hmda_2010_nm_originated-records_labels.zip', '45261', '2.12 MB'),
'2013': HmdaDataFile('hmda_2013_nm_originated-records_labels.zip', '49389', '2.34 MB'),
'2012': HmdaDataFile('hmda_2012_nm_originated-records_labels.zip', '53038', '2.41 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '32653', '980.97 KB'),
'2007': HmdaDataFile('hmda_2007_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '47002', '1.42 MB'),
'2017': HmdaDataFile('hmda_2017_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '28982', '644.64 KB'),
'2015': HmdaDataFile('hmda_2015_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '29038', '989.53 KB'),
'2014': HmdaDataFile('hmda_2014_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '24491', '759.74 KB'),
'2008': HmdaDataFile('hmda_2008_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '35421', '1.05 MB'),
'2009': HmdaDataFile('hmda_2009_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '45147', '1.26 MB'),
'2011': HmdaDataFile('hmda_2011_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '31586', '892.04 KB'),
'2010': HmdaDataFile('hmda_2010_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '36691', '1.12 MB'),
'2013': HmdaDataFile('hmda_2013_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '38141', '1.18 MB'),
'2012': HmdaDataFile('hmda_2012_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '42629', '1.26 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nm_all-records_codes.zip', '89390', '2.89 MB'),
'2007': HmdaDataFile('hmda_2007_nm_all-records_codes.zip', '178911', '5.48 MB'),
'2017': HmdaDataFile('hmda_2017_nm_all-records_codes.zip', '80423', '1.88 MB'),
'2015': HmdaDataFile('hmda_2015_nm_all-records_codes.zip', '79236', '2.87 MB'),
'2014': HmdaDataFile('hmda_2014_nm_all-records_codes.zip', '71841', '2.38 MB'),
'2008': HmdaDataFile('hmda_2008_nm_all-records_codes.zip', '114678', '3.55 MB'),
'2009': HmdaDataFile('hmda_2009_nm_all-records_codes.zip', '123495', '3.6 MB'),
'2011': HmdaDataFile('hmda_2011_nm_all-records_codes.zip', '90785', '2.81 MB'),
'2010': HmdaDataFile('hmda_2010_nm_all-records_codes.zip', '104045', '3.42 MB'),
'2013': HmdaDataFile('hmda_2013_nm_all-records_codes.zip', '102217', '3.39 MB'),
'2012': HmdaDataFile('hmda_2012_nm_all-records_codes.zip', '108074', '3.48 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nm_originated-records_codes.zip', '41334', '1.3 MB'),
'2007': HmdaDataFile('hmda_2007_nm_originated-records_codes.zip', '69876', '2.15 MB'),
'2017': HmdaDataFile('hmda_2017_nm_originated-records_codes.zip', '37465', '864.54 KB'),
'2015': HmdaDataFile('hmda_2015_nm_originated-records_codes.zip', '37572', '1.32 MB'),
'2014': HmdaDataFile('hmda_2014_nm_originated-records_codes.zip', '32547', '1.05 MB'),
'2008': HmdaDataFile('hmda_2008_nm_originated-records_codes.zip', '46512', '1.43 MB'),
'2009': HmdaDataFile('hmda_2009_nm_originated-records_codes.zip', '54007', '1.56 MB'),
'2011': HmdaDataFile('hmda_2011_nm_originated-records_codes.zip', '39979', '1.19 MB'),
'2010': HmdaDataFile('hmda_2010_nm_originated-records_codes.zip', '45261', '1.4 MB'),
'2013': HmdaDataFile('hmda_2013_nm_originated-records_codes.zip', '49389', '1.56 MB'),
'2012': HmdaDataFile('hmda_2012_nm_originated-records_codes.zip', '53038', '1.59 MB')
}
}
},
'tx': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '513200', '26.59 MB'),
'2007': HmdaDataFile('hmda_2007_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '454756', '22.34 MB'),
'2017': HmdaDataFile('hmda_2017_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '457884', '15.35 MB'),
'2015': HmdaDataFile('hmda_2015_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '460596', '26.85 MB'),
'2014': HmdaDataFile('hmda_2014_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '391921', '20.38 MB'),
'2008': HmdaDataFile('hmda_2008_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '364311', '17.57 MB'),
'2009': HmdaDataFile('hmda_2009_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '448222', '20.5 MB'),
'2011': HmdaDataFile('hmda_2011_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '391184', '19.56 MB'),
'2010': HmdaDataFile('hmda_2010_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '405050', '20.17 MB'),
'2013': HmdaDataFile('hmda_2013_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '503627', '26.39 MB'),
'2012': HmdaDataFile('hmda_2012_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '502655', '26.28 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_tx_all-records_labels.zip', '1266767', '70.12 MB'),
'2007': HmdaDataFile('hmda_2007_tx_all-records_labels.zip', '1723576', '83.63 MB'),
'2017': HmdaDataFile('hmda_2017_tx_all-records_labels.zip', '1148206', '42.77 MB'),
'2015': HmdaDataFile('hmda_2015_tx_all-records_labels.zip', '1139573', '71.35 MB'),
'2014': HmdaDataFile('hmda_2014_tx_all-records_labels.zip', '1011598', '55.94 MB'),
'2008': HmdaDataFile('hmda_2008_tx_all-records_labels.zip', '1204457', '58.59 MB'),
'2009': HmdaDataFile('hmda_2009_tx_all-records_labels.zip', '1242037', '57.86 MB'),
'2011': HmdaDataFile('hmda_2011_tx_all-records_labels.zip', '1038591', '56.64 MB'),
'2010': HmdaDataFile('hmda_2010_tx_all-records_labels.zip', '1063486', '58.04 MB'),
'2013': HmdaDataFile('hmda_2013_tx_all-records_labels.zip', '1254738', '70.8 MB'),
'2012': HmdaDataFile('hmda_2012_tx_all-records_labels.zip', '1221801', '69.13 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_tx_originated-records_labels.zip', '613325', '32.57 MB'),
'2007': HmdaDataFile('hmda_2007_tx_originated-records_labels.zip', '653817', '32.47 MB'),
'2017': HmdaDataFile('hmda_2017_tx_originated-records_labels.zip', '559492', '19.3 MB'),
'2015': HmdaDataFile('hmda_2015_tx_originated-records_labels.zip', '557266', '33.01 MB'),
'2014': HmdaDataFile('hmda_2014_tx_originated-records_labels.zip', '484747', '25.75 MB'),
'2008': HmdaDataFile('hmda_2008_tx_originated-records_labels.zip', '473701', '23.41 MB'),
'2009': HmdaDataFile('hmda_2009_tx_originated-records_labels.zip', '520422', '24.41 MB'),
'2011': HmdaDataFile('hmda_2011_tx_originated-records_labels.zip', '466338', '24 MB'),
'2010': HmdaDataFile('hmda_2010_tx_originated-records_labels.zip', '476566', '24.45 MB'),
'2013': HmdaDataFile('hmda_2013_tx_originated-records_labels.zip', '611180', '32.38 MB'),
'2012': HmdaDataFile('hmda_2012_tx_originated-records_labels.zip', '594151', '31.5 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '513200', '18.9 MB'),
'2007': HmdaDataFile('hmda_2007_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '454756', '16.07 MB'),
'2017': HmdaDataFile('hmda_2017_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '457884', '10.67 MB'),
'2015': HmdaDataFile('hmda_2015_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '460596', '19.03 MB'),
'2014': HmdaDataFile('hmda_2014_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '391921', '14.47 MB'),
'2008': HmdaDataFile('hmda_2008_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '364311', '12.62 MB'),
'2009': HmdaDataFile('hmda_2009_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '448222', '14.81 MB'),
'2011': HmdaDataFile('hmda_2011_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '391184', '13.52 MB'),
'2010': HmdaDataFile('hmda_2010_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '405050', '13.93 MB'),
'2013': HmdaDataFile('hmda_2013_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '503627', '18.83 MB'),
'2012': HmdaDataFile('hmda_2012_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '502655', '18.62 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_tx_all-records_codes.zip', '1266767', '48.14 MB'),
'2007': HmdaDataFile('hmda_2007_tx_all-records_codes.zip', '1723576', '58.17 MB'),
'2017': HmdaDataFile('hmda_2017_tx_all-records_codes.zip', '1148206', '27.41 MB'),
'2015': HmdaDataFile('hmda_2015_tx_all-records_codes.zip', '1139573', '48.5 MB'),
'2014': HmdaDataFile('hmda_2014_tx_all-records_codes.zip', '1011598', '38.26 MB'),
'2008': HmdaDataFile('hmda_2008_tx_all-records_codes.zip', '1204457', '40.77 MB'),
'2009': HmdaDataFile('hmda_2009_tx_all-records_codes.zip', '1242037', '40.49 MB'),
'2011': HmdaDataFile('hmda_2011_tx_all-records_codes.zip', '1038591', '38.13 MB'),
'2010': HmdaDataFile('hmda_2010_tx_all-records_codes.zip', '1063486', '39.1 MB'),
'2013': HmdaDataFile('hmda_2013_tx_all-records_codes.zip', '1254738', '48.99 MB'),
'2012': HmdaDataFile('hmda_2012_tx_all-records_codes.zip', '1221801', '47.68 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_tx_originated-records_codes.zip', '613325', '23.08 MB'),
'2007': HmdaDataFile('hmda_2007_tx_originated-records_codes.zip', '653817', '23.29 MB'),
'2017': HmdaDataFile('hmda_2017_tx_originated-records_codes.zip', '559492', '13.3 MB'),
'2015': HmdaDataFile('hmda_2015_tx_originated-records_codes.zip', '557266', '23.23 MB'),
'2014': HmdaDataFile('hmda_2014_tx_originated-records_codes.zip', '484747', '18.19 MB'),
'2008': HmdaDataFile('hmda_2008_tx_originated-records_codes.zip', '473701', '16.77 MB'),
'2009': HmdaDataFile('hmda_2009_tx_originated-records_codes.zip', '520422', '17.57 MB'),
'2011': HmdaDataFile('hmda_2011_tx_originated-records_codes.zip', '466338', '16.56 MB'),
'2010': HmdaDataFile('hmda_2010_tx_originated-records_codes.zip', '476566', '16.85 MB'),
'2013': HmdaDataFile('hmda_2013_tx_originated-records_codes.zip', '611180', '22.91 MB'),
'2012': HmdaDataFile('hmda_2012_tx_originated-records_codes.zip', '594151', '22.16 MB')
}
}
},
'la': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '72627', '3.46 MB'),
'2007': HmdaDataFile('hmda_2007_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '86710', '4.14 MB'),
'2017': HmdaDataFile('hmda_2017_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '61729', '1.89 MB'),
'2015': HmdaDataFile('hmda_2015_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '67942', '3.69 MB'),
'2014': HmdaDataFile('hmda_2014_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '59398', '2.86 MB'),
'2008': HmdaDataFile('hmda_2008_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '69191', '3.26 MB'),
'2009': HmdaDataFile('hmda_2009_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '81760', '3.74 MB'),
'2011': HmdaDataFile('hmda_2011_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '69735', '3.2 MB'),
'2010': HmdaDataFile('hmda_2010_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '75645', '3.55 MB'),
'2013': HmdaDataFile('hmda_2013_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '77713', '3.68 MB'),
'2012': HmdaDataFile('hmda_2012_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '86148', '4.02 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_la_all-records_labels.zip', '195937', '10.16 MB'),
'2007': HmdaDataFile('hmda_2007_la_all-records_labels.zip', '300738', '14.74 MB'),
'2017': HmdaDataFile('hmda_2017_la_all-records_labels.zip', '173079', '6.06 MB'),
'2015': HmdaDataFile('hmda_2015_la_all-records_labels.zip', '180533', '10.55 MB'),
'2014': HmdaDataFile('hmda_2014_la_all-records_labels.zip', '170514', '8.9 MB'),
'2008': HmdaDataFile('hmda_2008_la_all-records_labels.zip', '221773', '11.04 MB'),
'2009': HmdaDataFile('hmda_2009_la_all-records_labels.zip', '231242', '11.32 MB'),
'2011': HmdaDataFile('hmda_2011_la_all-records_labels.zip', '210644', '10.99 MB'),
'2010': HmdaDataFile('hmda_2010_la_all-records_labels.zip', '214403', '11.27 MB'),
'2013': HmdaDataFile('hmda_2013_la_all-records_labels.zip', '219546', '11.6 MB'),
'2012': HmdaDataFile('hmda_2012_la_all-records_labels.zip', '231946', '12.12 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_la_originated-records_labels.zip', '94261', '4.64 MB'),
'2007': HmdaDataFile('hmda_2007_la_originated-records_labels.zip', '126278', '6.16 MB'),
'2017': HmdaDataFile('hmda_2017_la_originated-records_labels.zip', '83323', '2.62 MB'),
'2015': HmdaDataFile('hmda_2015_la_originated-records_labels.zip', '89327', '4.95 MB'),
'2014': HmdaDataFile('hmda_2014_la_originated-records_labels.zip', '81230', '4.06 MB'),
'2008': HmdaDataFile('hmda_2008_la_originated-records_labels.zip', '97778', '4.77 MB'),
'2009': HmdaDataFile('hmda_2009_la_originated-records_labels.zip', '103928', '4.89 MB'),
'2011': HmdaDataFile('hmda_2011_la_originated-records_labels.zip', '90349', '4.38 MB'),
'2010': HmdaDataFile('hmda_2010_la_originated-records_labels.zip', '96150', '4.64 MB'),
'2013': HmdaDataFile('hmda_2013_la_originated-records_labels.zip', '101849', '4.89 MB'),
'2012': HmdaDataFile('hmda_2012_la_originated-records_labels.zip', '107636', '5.1 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '72627', '2.33 MB'),
'2007': HmdaDataFile('hmda_2007_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '86710', '2.79 MB'),
'2017': HmdaDataFile('hmda_2017_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '61729', '1.36 MB'),
'2015': HmdaDataFile('hmda_2015_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '67942', '2.53 MB'),
'2014': HmdaDataFile('hmda_2014_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '59398', '1.93 MB'),
'2008': HmdaDataFile('hmda_2008_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '69191', '2.23 MB'),
'2009': HmdaDataFile('hmda_2009_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '81760', '2.59 MB'),
'2011': HmdaDataFile('hmda_2011_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '69735', '2.12 MB'),
'2010': HmdaDataFile('hmda_2010_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '75645', '2.36 MB'),
'2013': HmdaDataFile('hmda_2013_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '77713', '2.44 MB'),
'2012': HmdaDataFile('hmda_2012_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '86148', '2.66 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_la_all-records_codes.zip', '195937', '6.65 MB'),
'2007': HmdaDataFile('hmda_2007_la_all-records_codes.zip', '300738', '9.84 MB'),
'2017': HmdaDataFile('hmda_2017_la_all-records_codes.zip', '173079', '4 MB'),
'2015': HmdaDataFile('hmda_2015_la_all-records_codes.zip', '180533', '6.97 MB'),
'2014': HmdaDataFile('hmda_2014_la_all-records_codes.zip', '170514', '5.81 MB'),
'2008': HmdaDataFile('hmda_2008_la_all-records_codes.zip', '221773', '7.43 MB'),
'2009': HmdaDataFile('hmda_2009_la_all-records_codes.zip', '231242', '7.74 MB'),
'2011': HmdaDataFile('hmda_2011_la_all-records_codes.zip', '210644', '7.15 MB'),
'2010': HmdaDataFile('hmda_2010_la_all-records_codes.zip', '214403', '7.36 MB'),
'2013': HmdaDataFile('hmda_2013_la_all-records_codes.zip', '219546', '7.57 MB'),
'2012': HmdaDataFile('hmda_2012_la_all-records_codes.zip', '231946', '7.91 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_la_originated-records_codes.zip', '94261', '3.12 MB'),
'2007': HmdaDataFile('hmda_2007_la_originated-records_codes.zip', '126278', '4.18 MB'),
'2017': HmdaDataFile('hmda_2017_la_originated-records_codes.zip', '83323', '1.86 MB'),
'2015': HmdaDataFile('hmda_2015_la_originated-records_codes.zip', '89327', '3.37 MB'),
'2014': HmdaDataFile('hmda_2014_la_originated-records_codes.zip', '81230', '2.72 MB'),
'2008': HmdaDataFile('hmda_2008_la_originated-records_codes.zip', '97778', '3.29 MB'),
'2009': HmdaDataFile('hmda_2009_la_originated-records_codes.zip', '103928', '3.38 MB'),
'2011': HmdaDataFile('hmda_2011_la_originated-records_codes.zip', '90349', '2.9 MB'),
'2010': HmdaDataFile('hmda_2010_la_originated-records_codes.zip', '96150', '3.06 MB'),
'2013': HmdaDataFile('hmda_2013_la_originated-records_codes.zip', '101849', '3.22 MB'),
'2012': HmdaDataFile('hmda_2012_la_originated-records_codes.zip', '107636', '3.34 MB')
}
}
},
'wa': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '225231', '11.2 MB'),
'2007': HmdaDataFile('hmda_2007_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '215619', '10.08 MB'),
'2017': HmdaDataFile('hmda_2017_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '191532', '6.03 MB'),
'2015': HmdaDataFile('hmda_2015_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '183367', '10.31 MB'),
'2014': HmdaDataFile('hmda_2014_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '133913', '6.66 MB'),
'2008': HmdaDataFile('hmda_2008_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '165363', '7.57 MB'),
'2009': HmdaDataFile('hmda_2009_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '255387', '10.98 MB'),
'2011': HmdaDataFile('hmda_2011_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '174449', '8.42 MB'),
'2010': HmdaDataFile('hmda_2010_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '211008', '10.2 MB'),
'2013': HmdaDataFile('hmda_2013_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '201071', '10.04 MB'),
'2012': HmdaDataFile('hmda_2012_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '245095', '11.94 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_wa_all-records_labels.zip', '466566', '24.68 MB'),
'2007': HmdaDataFile('hmda_2007_wa_all-records_labels.zip', '722481', '33.39 MB'),
'2017': HmdaDataFile('hmda_2017_wa_all-records_labels.zip', '402196', '13.8 MB'),
'2015': HmdaDataFile('hmda_2015_wa_all-records_labels.zip', '387805', '23.58 MB'),
'2014': HmdaDataFile('hmda_2014_wa_all-records_labels.zip', '311425', '16.4 MB'),
'2008': HmdaDataFile('hmda_2008_wa_all-records_labels.zip', '485622', '22.64 MB'),
'2009': HmdaDataFile('hmda_2009_wa_all-records_labels.zip', '590758', '25.93 MB'),
'2011': HmdaDataFile('hmda_2011_wa_all-records_labels.zip', '406149', '20.95 MB'),
'2010': HmdaDataFile('hmda_2010_wa_all-records_labels.zip', '473922', '24.62 MB'),
'2013': HmdaDataFile('hmda_2013_wa_all-records_labels.zip', '448753', '23.93 MB'),
'2012': HmdaDataFile('hmda_2012_wa_all-records_labels.zip', '519479', '27.08 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_wa_originated-records_labels.zip', '263712', '13.26 MB'),
'2007': HmdaDataFile('hmda_2007_wa_originated-records_labels.zip', '310267', '14.49 MB'),
'2017': HmdaDataFile('hmda_2017_wa_originated-records_labels.zip', '230076', '7.35 MB'),
'2015': HmdaDataFile('hmda_2015_wa_originated-records_labels.zip', '218772', '12.48 MB'),
'2014': HmdaDataFile('hmda_2014_wa_originated-records_labels.zip', '166279', '8.37 MB'),
'2008': HmdaDataFile('hmda_2008_wa_originated-records_labels.zip', '207841', '9.62 MB'),
'2009': HmdaDataFile('hmda_2009_wa_originated-records_labels.zip', '286416', '12.41 MB'),
'2011': HmdaDataFile('hmda_2011_wa_originated-records_labels.zip', '205550', '10.07 MB'),
'2010': HmdaDataFile('hmda_2010_wa_originated-records_labels.zip', '241128', '11.8 MB'),
'2013': HmdaDataFile('hmda_2013_wa_originated-records_labels.zip', '246580', '12.48 MB'),
'2012': HmdaDataFile('hmda_2012_wa_originated-records_labels.zip', '287748', '14.21 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '225231', '7.74 MB'),
'2007': HmdaDataFile('hmda_2007_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '215619', '7.08 MB'),
'2017': HmdaDataFile('hmda_2017_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '191532', '4.12 MB'),
'2015': HmdaDataFile('hmda_2015_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '183367', '7.25 MB'),
'2014': HmdaDataFile('hmda_2014_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '133913', '4.63 MB'),
'2008': HmdaDataFile('hmda_2008_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '165363', '5.29 MB'),
'2009': HmdaDataFile('hmda_2009_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '255387', '7.8 MB'),
'2011': HmdaDataFile('hmda_2011_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '174449', '5.8 MB'),
'2010': HmdaDataFile('hmda_2010_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '211008', '6.97 MB'),
'2013': HmdaDataFile('hmda_2013_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '201071', '6.95 MB'),
'2012': HmdaDataFile('hmda_2012_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '245095', '8.25 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_wa_all-records_codes.zip', '466566', '16.4 MB'),
'2007': HmdaDataFile('hmda_2007_wa_all-records_codes.zip', '722481', '22.74 MB'),
'2017': HmdaDataFile('hmda_2017_wa_all-records_codes.zip', '402196', '8.75 MB'),
'2015': HmdaDataFile('hmda_2015_wa_all-records_codes.zip', '387805', '15.87 MB'),
'2014': HmdaDataFile('hmda_2014_wa_all-records_codes.zip', '311425', '10.92 MB'),
'2008': HmdaDataFile('hmda_2008_wa_all-records_codes.zip', '485622', '15.32 MB'),
'2009': HmdaDataFile('hmda_2009_wa_all-records_codes.zip', '590758', '17.9 MB'),
'2011': HmdaDataFile('hmda_2011_wa_all-records_codes.zip', '406149', '13.93 MB'),
'2010': HmdaDataFile('hmda_2010_wa_all-records_codes.zip', '473922', '16.26 MB'),
'2013': HmdaDataFile('hmda_2013_wa_all-records_codes.zip', '448753', '15.96 MB'),
'2012': HmdaDataFile('hmda_2012_wa_all-records_codes.zip', '519479', '18.1 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_wa_originated-records_codes.zip', '263712', '9.1 MB'),
'2007': HmdaDataFile('hmda_2007_wa_originated-records_codes.zip', '310267', '10.15 MB'),
'2017': HmdaDataFile('hmda_2017_wa_originated-records_codes.zip', '230076', '4.96 MB'),
'2015': HmdaDataFile('hmda_2015_wa_originated-records_codes.zip', '218772', '8.71 MB'),
'2014': HmdaDataFile('hmda_2014_wa_originated-records_codes.zip', '166279', '5.77 MB'),
'2008': HmdaDataFile('hmda_2008_wa_originated-records_codes.zip', '207841', '6.67 MB'),
'2009': HmdaDataFile('hmda_2009_wa_originated-records_codes.zip', '286416', '8.79 MB'),
'2011': HmdaDataFile('hmda_2011_wa_originated-records_codes.zip', '205550', '6.9 MB'),
'2010': HmdaDataFile('hmda_2010_wa_originated-records_codes.zip', '241128', '8.02 MB'),
'2013': HmdaDataFile('hmda_2013_wa_originated-records_codes.zip', '246580', '8.58 MB'),
'2012': HmdaDataFile('hmda_2012_wa_originated-records_codes.zip', '287748', '9.76 MB')
}
}
},
'nc': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '220677', '10.93 MB'),
'2007': HmdaDataFile('hmda_2007_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '228163', '10.68 MB'),
'2017': HmdaDataFile('hmda_2017_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '196986', '6.12 MB'),
'2015': HmdaDataFile('hmda_2015_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '190281', '11.14 MB'),
'2014': HmdaDataFile('hmda_2014_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '154486', '7.77 MB'),
'2008': HmdaDataFile('hmda_2008_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '202793', '9.24 MB'),
'2009': HmdaDataFile('hmda_2009_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '263921', '11.18 MB'),
'2011': HmdaDataFile('hmda_2011_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '189738', '9.12 MB'),
'2010': HmdaDataFile('hmda_2010_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '211447', '10.31 MB'),
'2013': HmdaDataFile('hmda_2013_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '224534', '11.35 MB'),
'2012': HmdaDataFile('hmda_2012_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '240943', '12.17 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nc_all-records_labels.zip', '519897', '27.86 MB'),
'2007': HmdaDataFile('hmda_2007_nc_all-records_labels.zip', '779619', '36.44 MB'),
'2017': HmdaDataFile('hmda_2017_nc_all-records_labels.zip', '464109', '16.24 MB'),
'2015': HmdaDataFile('hmda_2015_nc_all-records_labels.zip', '457002', '29.06 MB'),
'2014': HmdaDataFile('hmda_2014_nc_all-records_labels.zip', '392549', '21.12 MB'),
'2008': HmdaDataFile('hmda_2008_nc_all-records_labels.zip', '575937', '26.69 MB'),
'2009': HmdaDataFile('hmda_2009_nc_all-records_labels.zip', '617968', '27.09 MB'),
'2011': HmdaDataFile('hmda_2011_nc_all-records_labels.zip', '476288', '24.93 MB'),
'2010': HmdaDataFile('hmda_2010_nc_all-records_labels.zip', '511912', '26.98 MB'),
'2013': HmdaDataFile('hmda_2013_nc_all-records_labels.zip', '562524', '30.51 MB'),
'2012': HmdaDataFile('hmda_2012_nc_all-records_labels.zip', '578793', '31.47 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nc_originated-records_labels.zip', '262765', '13.32 MB'),
'2007': HmdaDataFile('hmda_2007_nc_originated-records_labels.zip', '337640', '15.84 MB'),
'2017': HmdaDataFile('hmda_2017_nc_originated-records_labels.zip', '240128', '7.69 MB'),
'2015': HmdaDataFile('hmda_2015_nc_originated-records_labels.zip', '231114', '13.82 MB'),
'2014': HmdaDataFile('hmda_2014_nc_originated-records_labels.zip', '192473', '9.85 MB'),
'2008': HmdaDataFile('hmda_2008_nc_originated-records_labels.zip', '260693', '12.04 MB'),
'2009': HmdaDataFile('hmda_2009_nc_originated-records_labels.zip', '303410', '13.01 MB'),
'2011': HmdaDataFile('hmda_2011_nc_originated-records_labels.zip', '227079', '11.18 MB'),
'2010': HmdaDataFile('hmda_2010_nc_originated-records_labels.zip', '248590', '12.36 MB'),
'2013': HmdaDataFile('hmda_2013_nc_originated-records_labels.zip', '278062', '14.32 MB'),
'2012': HmdaDataFile('hmda_2012_nc_originated-records_labels.zip', '289407', '14.88 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '220677', '7.49 MB'),
'2007': HmdaDataFile('hmda_2007_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '228163', '7.37 MB'),
'2017': HmdaDataFile('hmda_2017_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '196986', '4.24 MB'),
'2015': HmdaDataFile('hmda_2015_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '190281', '7.64 MB'),
'2014': HmdaDataFile('hmda_2014_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '154486', '5.35 MB'),
'2008': HmdaDataFile('hmda_2008_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '202793', '6.4 MB'),
'2009': HmdaDataFile('hmda_2009_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '263921', '7.94 MB'),
'2011': HmdaDataFile('hmda_2011_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '189738', '6.03 MB'),
'2010': HmdaDataFile('hmda_2010_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '211447', '6.9 MB'),
'2013': HmdaDataFile('hmda_2013_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '224534', '7.74 MB'),
'2012': HmdaDataFile('hmda_2012_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '240943', '8.33 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nc_all-records_codes.zip', '519897', '18.52 MB'),
'2007': HmdaDataFile('hmda_2007_nc_all-records_codes.zip', '779619', '24.55 MB'),
'2017': HmdaDataFile('hmda_2017_nc_all-records_codes.zip', '464109', '10.37 MB'),
'2015': HmdaDataFile('hmda_2015_nc_all-records_codes.zip', '457002', '19.13 MB'),
'2014': HmdaDataFile('hmda_2014_nc_all-records_codes.zip', '392549', '13.94 MB'),
'2008': HmdaDataFile('hmda_2008_nc_all-records_codes.zip', '575937', '17.94 MB'),
'2009': HmdaDataFile('hmda_2009_nc_all-records_codes.zip', '617968', '18.62 MB'),
'2011': HmdaDataFile('hmda_2011_nc_all-records_codes.zip', '476288', '15.9 MB'),
'2010': HmdaDataFile('hmda_2010_nc_all-records_codes.zip', '511912', '17.45 MB'),
'2013': HmdaDataFile('hmda_2013_nc_all-records_codes.zip', '562524', '20.11 MB'),
'2012': HmdaDataFile('hmda_2012_nc_all-records_codes.zip', '578793', '20.81 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nc_originated-records_codes.zip', '262765', '9.11 MB'),
'2007': HmdaDataFile('hmda_2007_nc_originated-records_codes.zip', '337640', '10.93 MB'),
'2017': HmdaDataFile('hmda_2017_nc_originated-records_codes.zip', '240128', '5.27 MB'),
'2015': HmdaDataFile('hmda_2015_nc_originated-records_codes.zip', '231114', '9.42 MB'),
'2014': HmdaDataFile('hmda_2014_nc_originated-records_codes.zip', '192473', '6.73 MB'),
'2008': HmdaDataFile('hmda_2008_nc_originated-records_codes.zip', '260693', '8.29 MB'),
'2009': HmdaDataFile('hmda_2009_nc_originated-records_codes.zip', '303410', '9.19 MB'),
'2011': HmdaDataFile('hmda_2011_nc_originated-records_codes.zip', '227079', '7.35 MB'),
'2010': HmdaDataFile('hmda_2010_nc_originated-records_codes.zip', '248590', '8.23 MB'),
'2013': HmdaDataFile('hmda_2013_nc_originated-records_codes.zip', '278062', '9.71 MB'),
'2012': HmdaDataFile('hmda_2012_nc_originated-records_codes.zip', '289407', '10.13 MB')
}
}
},
'nd': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '15775', '603.58 KB'),
'2007': HmdaDataFile('hmda_2007_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '11613', '456.02 KB'),
'2017': HmdaDataFile('hmda_2017_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '12661', '336.26 KB'),
'2015': HmdaDataFile('hmda_2015_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '15228', '660.94 KB'),
'2014': HmdaDataFile('hmda_2014_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '12935', '513.04 KB'),
'2008': HmdaDataFile('hmda_2008_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '12079', '485.38 KB'),
'2009': HmdaDataFile('hmda_2009_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '17245', '600.63 KB'),
'2011': HmdaDataFile('hmda_2011_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '14852', '543.35 KB'),
'2010': HmdaDataFile('hmda_2010_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '16629', '606.86 KB'),
'2013': HmdaDataFile('hmda_2013_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '17550', '663.49 KB'),
'2012': HmdaDataFile('hmda_2012_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '19995', '730.61 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nd_all-records_labels.zip', '32670', '1.35 MB'),
'2007': HmdaDataFile('hmda_2007_nd_all-records_labels.zip', '32081', '1.34 MB'),
'2017': HmdaDataFile('hmda_2017_nd_all-records_labels.zip', '25521', '733.11 KB'),
'2015': HmdaDataFile('hmda_2015_nd_all-records_labels.zip', '31382', '1.48 MB'),
'2014': HmdaDataFile('hmda_2014_nd_all-records_labels.zip', '27698', '1.18 MB'),
'2008': HmdaDataFile('hmda_2008_nd_all-records_labels.zip', '28946', '1.25 MB'),
'2009': HmdaDataFile('hmda_2009_nd_all-records_labels.zip', '35789', '1.37 MB'),
'2011': HmdaDataFile('hmda_2011_nd_all-records_labels.zip', '30234', '1.22 MB'),
'2010': HmdaDataFile('hmda_2010_nd_all-records_labels.zip', '32754', '1.34 MB'),
'2013': HmdaDataFile('hmda_2013_nd_all-records_labels.zip', '35576', '1.48 MB'),
'2012': HmdaDataFile('hmda_2012_nd_all-records_labels.zip', '37581', '1.53 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nd_originated-records_labels.zip', '19580', '772.47 KB'),
'2007': HmdaDataFile('hmda_2007_nd_originated-records_labels.zip', '17673', '689.98 KB'),
'2017': HmdaDataFile('hmda_2017_nd_originated-records_labels.zip', '16016', '429.87 KB'),
'2015': HmdaDataFile('hmda_2015_nd_originated-records_labels.zip', '19045', '847.76 KB'),
'2014': HmdaDataFile('hmda_2014_nd_originated-records_labels.zip', '17180', '693.97 KB'),
'2008': HmdaDataFile('hmda_2008_nd_originated-records_labels.zip', '16806', '683.54 KB'),
'2009': HmdaDataFile('hmda_2009_nd_originated-records_labels.zip', '21080', '749.43 KB'),
'2011': HmdaDataFile('hmda_2011_nd_originated-records_labels.zip', '18548', '698.61 KB'),
'2010': HmdaDataFile('hmda_2010_nd_originated-records_labels.zip', '20218', '761.83 KB'),
'2013': HmdaDataFile('hmda_2013_nd_originated-records_labels.zip', '22042', '860.22 KB'),
'2012': HmdaDataFile('hmda_2012_nd_originated-records_labels.zip', '24096', '905.02 KB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '15775', '388.61 KB'),
'2007': HmdaDataFile('hmda_2007_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '11613', '292.47 KB'),
'2017': HmdaDataFile('hmda_2017_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '12661', '238.72 KB'),
'2015': HmdaDataFile('hmda_2015_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '15228', '425.66 KB'),
'2014': HmdaDataFile('hmda_2014_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '12935', '330.05 KB'),
'2008': HmdaDataFile('hmda_2008_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '12079', '313.83 KB'),
'2009': HmdaDataFile('hmda_2009_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '17245', '399.68 KB'),
'2011': HmdaDataFile('hmda_2011_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '14852', '348.81 KB'),
'2010': HmdaDataFile('hmda_2010_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '16629', '390.2 KB'),
'2013': HmdaDataFile('hmda_2013_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '17550', '426.9 KB'),
'2012': HmdaDataFile('hmda_2012_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '19995', '472.27 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nd_all-records_codes.zip', '32670', '832.59 KB'),
'2007': HmdaDataFile('hmda_2007_nd_all-records_codes.zip', '32081', '834.25 KB'),
'2017': HmdaDataFile('hmda_2017_nd_all-records_codes.zip', '25521', '487.38 KB'),
'2015': HmdaDataFile('hmda_2015_nd_all-records_codes.zip', '31382', '919.39 KB'),
'2014': HmdaDataFile('hmda_2014_nd_all-records_codes.zip', '27698', '726.15 KB'),
'2008': HmdaDataFile('hmda_2008_nd_all-records_codes.zip', '28946', '781.35 KB'),
'2009': HmdaDataFile('hmda_2009_nd_all-records_codes.zip', '35789', '876.82 KB'),
'2011': HmdaDataFile('hmda_2011_nd_all-records_codes.zip', '30234', '748.63 KB'),
'2010': HmdaDataFile('hmda_2010_nd_all-records_codes.zip', '32754', '826.34 KB'),
'2013': HmdaDataFile('hmda_2013_nd_all-records_codes.zip', '35576', '908.14 KB'),
'2012': HmdaDataFile('hmda_2012_nd_all-records_codes.zip', '37581', '943.93 KB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nd_originated-records_codes.zip', '19580', '486.75 KB'),
'2007': HmdaDataFile('hmda_2007_nd_originated-records_codes.zip', '17673', '439.38 KB'),
'2017': HmdaDataFile('hmda_2017_nd_originated-records_codes.zip', '16016', '299.25 KB'),
'2015': HmdaDataFile('hmda_2015_nd_originated-records_codes.zip', '19045', '537.29 KB'),
'2014': HmdaDataFile('hmda_2014_nd_originated-records_codes.zip', '17180', '437.05 KB'),
'2008': HmdaDataFile('hmda_2008_nd_originated-records_codes.zip', '16806', '438.4 KB'),
'2009': HmdaDataFile('hmda_2009_nd_originated-records_codes.zip', '21080', '495.22 KB'),
'2011': HmdaDataFile('hmda_2011_nd_originated-records_codes.zip', '18548', '440.47 KB'),
'2010': HmdaDataFile('hmda_2010_nd_originated-records_codes.zip', '20218', '482.84 KB'),
'2013': HmdaDataFile('hmda_2013_nd_originated-records_codes.zip', '22042', '542.74 KB'),
'2012': HmdaDataFile('hmda_2012_nd_originated-records_codes.zip', '24096', '576.32 KB')
}
}
},
'ne': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '42823', '1.96 MB'),
'2007': HmdaDataFile('hmda_2007_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '35882', '1.57 MB'),
'2017': HmdaDataFile('hmda_2017_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '34951', '1.05 MB'),
'2015': HmdaDataFile('hmda_2015_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '38670', '1.94 MB'),
'2014': HmdaDataFile('hmda_2014_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '30830', '1.5 MB'),
'2008': HmdaDataFile('hmda_2008_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '33590', '1.44 MB'),
'2009': HmdaDataFile('hmda_2009_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '56987', '2.26 MB'),
'2011': HmdaDataFile('hmda_2011_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '42269', '1.83 MB'),
'2010': HmdaDataFile('hmda_2010_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '51870', '2.25 MB'),
'2013': HmdaDataFile('hmda_2013_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '45909', '2.1 MB'),
'2012': HmdaDataFile('hmda_2012_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '57432', '2.6 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ne_all-records_labels.zip', '89068', '4.35 MB'),
'2007': HmdaDataFile('hmda_2007_ne_all-records_labels.zip', '112752', '5.1 MB'),
'2017': HmdaDataFile('hmda_2017_ne_all-records_labels.zip', '74966', '2.4 MB'),
'2015': HmdaDataFile('hmda_2015_ne_all-records_labels.zip', '82331', '4.47 MB'),
'2014': HmdaDataFile('hmda_2014_ne_all-records_labels.zip', '68559', '3.58 MB'),
'2008': HmdaDataFile('hmda_2008_ne_all-records_labels.zip', '88586', '3.98 MB'),
'2009': HmdaDataFile('hmda_2009_ne_all-records_labels.zip', '117158', '4.99 MB'),
'2011': HmdaDataFile('hmda_2011_ne_all-records_labels.zip', '90603', '4.29 MB'),
'2010': HmdaDataFile('hmda_2010_ne_all-records_labels.zip', '105043', '4.96 MB'),
'2013': HmdaDataFile('hmda_2013_ne_all-records_labels.zip', '97423', '4.78 MB'),
'2012': HmdaDataFile('hmda_2012_ne_all-records_labels.zip', '113860', '5.56 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ne_originated-records_labels.zip', '52019', '2.41 MB'),
'2007': HmdaDataFile('hmda_2007_ne_originated-records_labels.zip', '51923', '2.26 MB'),
'2017': HmdaDataFile('hmda_2017_ne_originated-records_labels.zip', '43786', '1.32 MB'),
'2015': HmdaDataFile('hmda_2015_ne_originated-records_labels.zip', '48157', '2.46 MB'),
'2014': HmdaDataFile('hmda_2014_ne_originated-records_labels.zip', '39960', '1.98 MB'),
'2008': HmdaDataFile('hmda_2008_ne_originated-records_labels.zip', '44333', '1.92 MB'),
'2009': HmdaDataFile('hmda_2009_ne_originated-records_labels.zip', '65475', '2.65 MB'),
'2011': HmdaDataFile('hmda_2011_ne_originated-records_labels.zip', '50258', '2.23 MB'),
'2010': HmdaDataFile('hmda_2010_ne_originated-records_labels.zip', '59744', '2.64 MB'),
'2013': HmdaDataFile('hmda_2013_ne_originated-records_labels.zip', '56003', '2.6 MB'),
'2012': HmdaDataFile('hmda_2012_ne_originated-records_labels.zip', '67081', '3.09 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '42823', '1.28 MB'),
'2007': HmdaDataFile('hmda_2007_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '35882', '1.06 MB'),
'2017': HmdaDataFile('hmda_2017_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '34951', '753.37 KB'),
'2015': HmdaDataFile('hmda_2015_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '38670', '1.3 MB'),
'2014': HmdaDataFile('hmda_2014_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '30830', '988.96 KB'),
'2008': HmdaDataFile('hmda_2008_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '33590', '983.97 KB'),
'2009': HmdaDataFile('hmda_2009_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '56987', '1.58 MB'),
'2011': HmdaDataFile('hmda_2011_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '42269', '1.2 MB'),
'2010': HmdaDataFile('hmda_2010_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '51870', '1.48 MB'),
'2013': HmdaDataFile('hmda_2013_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '45909', '1.37 MB'),
'2012': HmdaDataFile('hmda_2012_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '57432', '1.7 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ne_all-records_codes.zip', '89068', '2.76 MB'),
'2007': HmdaDataFile('hmda_2007_ne_all-records_codes.zip', '112752', '3.37 MB'),
'2017': HmdaDataFile('hmda_2017_ne_all-records_codes.zip', '74966', '1.6 MB'),
'2015': HmdaDataFile('hmda_2015_ne_all-records_codes.zip', '82331', '2.91 MB'),
'2014': HmdaDataFile('hmda_2014_ne_all-records_codes.zip', '68559', '2.27 MB'),
'2008': HmdaDataFile('hmda_2008_ne_all-records_codes.zip', '88586', '2.64 MB'),
'2009': HmdaDataFile('hmda_2009_ne_all-records_codes.zip', '117158', '3.39 MB'),
'2011': HmdaDataFile('hmda_2011_ne_all-records_codes.zip', '90603', '2.71 MB'),
'2010': HmdaDataFile('hmda_2010_ne_all-records_codes.zip', '105043', '3.16 MB'),
'2013': HmdaDataFile('hmda_2013_ne_all-records_codes.zip', '97423', '3.04 MB'),
'2012': HmdaDataFile('hmda_2012_ne_all-records_codes.zip', '113860', '3.54 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ne_originated-records_codes.zip', '52019', '1.56 MB'),
'2007': HmdaDataFile('hmda_2007_ne_originated-records_codes.zip', '51923', '1.52 MB'),
'2017': HmdaDataFile('hmda_2017_ne_originated-records_codes.zip', '43786', '932.71 KB'),
'2015': HmdaDataFile('hmda_2015_ne_originated-records_codes.zip', '48157', '1.64 MB'),
'2014': HmdaDataFile('hmda_2014_ne_originated-records_codes.zip', '39960', '1.29 MB'),
'2008': HmdaDataFile('hmda_2008_ne_originated-records_codes.zip', '44333', '1.3 MB'),
'2009': HmdaDataFile('hmda_2009_ne_originated-records_codes.zip', '65475', '1.84 MB'),
'2011': HmdaDataFile('hmda_2011_ne_originated-records_codes.zip', '50258', '1.45 MB'),
'2010': HmdaDataFile('hmda_2010_ne_originated-records_codes.zip', '59744', '1.72 MB'),
'2013': HmdaDataFile('hmda_2013_ne_originated-records_codes.zip', '56003', '1.69 MB'),
'2012': HmdaDataFile('hmda_2012_ne_originated-records_codes.zip', '67081', '2.01 MB')
}
}
},
'tn': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '144529', '7.15 MB'),
'2007': HmdaDataFile('hmda_2007_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '151937', '7.17 MB'),
'2017': HmdaDataFile('hmda_2017_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '133331', '4.11 MB'),
'2015': HmdaDataFile('hmda_2015_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '126852', '7.13 MB'),
'2014': HmdaDataFile('hmda_2014_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '103024', '5.17 MB'),
'2008': HmdaDataFile('hmda_2008_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '123580', '5.68 MB'),
'2009': HmdaDataFile('hmda_2009_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '160077', '6.99 MB'),
'2011': HmdaDataFile('hmda_2011_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '113228', '5.37 MB'),
'2010': HmdaDataFile('hmda_2010_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '128508', '6.09 MB'),
'2013': HmdaDataFile('hmda_2013_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '141210', '6.95 MB'),
'2012': HmdaDataFile('hmda_2012_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '151750', '7.48 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_tn_all-records_labels.zip', '350490', '18.5 MB'),
'2007': HmdaDataFile('hmda_2007_tn_all-records_labels.zip', '512117', '24.54 MB'),
'2017': HmdaDataFile('hmda_2017_tn_all-records_labels.zip', '326416', '11.11 MB'),
'2015': HmdaDataFile('hmda_2015_tn_all-records_labels.zip', '305114', '18.47 MB'),
'2014': HmdaDataFile('hmda_2014_tn_all-records_labels.zip', '265214', '14.16 MB'),
'2008': HmdaDataFile('hmda_2008_tn_all-records_labels.zip', '365839', '17.39 MB'),
'2009': HmdaDataFile('hmda_2009_tn_all-records_labels.zip', '406028', '18.65 MB'),
'2011': HmdaDataFile('hmda_2011_tn_all-records_labels.zip', '304377', '16.01 MB'),
'2010': HmdaDataFile('hmda_2010_tn_all-records_labels.zip', '335917', '17.65 MB'),
'2013': HmdaDataFile('hmda_2013_tn_all-records_labels.zip', '358454', '19.31 MB'),
'2012': HmdaDataFile('hmda_2012_tn_all-records_labels.zip', '373362', '20.16 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_tn_originated-records_labels.zip', '174965', '8.84 MB'),
'2007': HmdaDataFile('hmda_2007_tn_originated-records_labels.zip', '217392', '10.46 MB'),
'2017': HmdaDataFile('hmda_2017_tn_originated-records_labels.zip', '164577', '5.23 MB'),
'2015': HmdaDataFile('hmda_2015_tn_originated-records_labels.zip', '155616', '8.89 MB'),
'2014': HmdaDataFile('hmda_2014_tn_originated-records_labels.zip', '131171', '6.76 MB'),
'2008': HmdaDataFile('hmda_2008_tn_originated-records_labels.zip', '163188', '7.73 MB'),
'2009': HmdaDataFile('hmda_2009_tn_originated-records_labels.zip', '187776', '8.43 MB'),
'2011': HmdaDataFile('hmda_2011_tn_originated-records_labels.zip', '137943', '6.74 MB'),
'2010': HmdaDataFile('hmda_2010_tn_originated-records_labels.zip', '153282', '7.46 MB'),
'2013': HmdaDataFile('hmda_2013_tn_originated-records_labels.zip', '172612', '8.61 MB'),
'2012': HmdaDataFile('hmda_2012_tn_originated-records_labels.zip', '180686', '9.04 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '144529', '4.86 MB'),
'2007': HmdaDataFile('hmda_2007_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '151937', '4.92 MB'),
'2017': HmdaDataFile('hmda_2017_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '133331', '2.88 MB'),
'2015': HmdaDataFile('hmda_2015_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '126852', '4.92 MB'),
'2014': HmdaDataFile('hmda_2014_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '103024', '3.53 MB'),
'2008': HmdaDataFile('hmda_2008_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '123580', '3.92 MB'),
'2009': HmdaDataFile('hmda_2009_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '160077', '4.89 MB'),
'2011': HmdaDataFile('hmda_2011_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '113228', '3.56 MB'),
'2010': HmdaDataFile('hmda_2010_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '128508', '4.04 MB'),
'2013': HmdaDataFile('hmda_2013_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '141210', '4.68 MB'),
'2012': HmdaDataFile('hmda_2012_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '151750', '5.03 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_tn_all-records_codes.zip', '350490', '12.18 MB'),
'2007': HmdaDataFile('hmda_2007_tn_all-records_codes.zip', '512117', '16.43 MB'),
'2017': HmdaDataFile('hmda_2017_tn_all-records_codes.zip', '326416', '7.18 MB'),
'2015': HmdaDataFile('hmda_2015_tn_all-records_codes.zip', '305114', '12.28 MB'),
'2014': HmdaDataFile('hmda_2014_tn_all-records_codes.zip', '265214', '9.33 MB'),
'2008': HmdaDataFile('hmda_2008_tn_all-records_codes.zip', '365839', '11.7 MB'),
'2009': HmdaDataFile('hmda_2009_tn_all-records_codes.zip', '406028', '12.76 MB'),
'2011': HmdaDataFile('hmda_2011_tn_all-records_codes.zip', '304377', '10.33 MB'),
'2010': HmdaDataFile('hmda_2010_tn_all-records_codes.zip', '335917', '11.45 MB'),
'2013': HmdaDataFile('hmda_2013_tn_all-records_codes.zip', '358454', '12.68 MB'),
'2012': HmdaDataFile('hmda_2012_tn_all-records_codes.zip', '373362', '13.3 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_tn_originated-records_codes.zip', '174965', '6 MB'),
'2007': HmdaDataFile('hmda_2007_tn_originated-records_codes.zip', '217392', '7.22 MB'),
'2017': HmdaDataFile('hmda_2017_tn_originated-records_codes.zip', '164577', '3.62 MB'),
'2015': HmdaDataFile('hmda_2015_tn_originated-records_codes.zip', '155616', '6.1 MB'),
'2014': HmdaDataFile('hmda_2014_tn_originated-records_codes.zip', '131171', '4.61 MB'),
'2008': HmdaDataFile('hmda_2008_tn_originated-records_codes.zip', '163188', '5.35 MB'),
'2009': HmdaDataFile('hmda_2009_tn_originated-records_codes.zip', '187776', '5.9 MB'),
'2011': HmdaDataFile('hmda_2011_tn_originated-records_codes.zip', '137943', '4.45 MB'),
'2010': HmdaDataFile('hmda_2010_tn_originated-records_codes.zip', '153282', '4.93 MB'),
'2013': HmdaDataFile('hmda_2013_tn_originated-records_codes.zip', '172612', '5.75 MB'),
'2012': HmdaDataFile('hmda_2012_tn_originated-records_codes.zip', '180686', '6.04 MB')
}
}
},
'ny': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '199204', '10.43 MB'),
'2007': HmdaDataFile('hmda_2007_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '281690', '14.06 MB'),
'2017': HmdaDataFile('hmda_2017_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '187337', '6.57 MB'),
'2015': HmdaDataFile('hmda_2015_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '182621', '10.44 MB'),
'2014': HmdaDataFile('hmda_2014_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '156647', '8.24 MB'),
'2008': HmdaDataFile('hmda_2008_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '195917', '9.68 MB'),
'2009': HmdaDataFile('hmda_2009_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '245556', '11.46 MB'),
'2011': HmdaDataFile('hmda_2011_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '201157', '10.32 MB'),
'2010': HmdaDataFile('hmda_2010_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '217296', '11.01 MB'),
'2013': HmdaDataFile('hmda_2013_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '225700', '11.8 MB'),
'2012': HmdaDataFile('hmda_2012_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '242701', '12.39 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ny_all-records_labels.zip', '477313', '26.86 MB'),
'2007': HmdaDataFile('hmda_2007_ny_all-records_labels.zip', '1009451', '51.35 MB'),
'2017': HmdaDataFile('hmda_2017_ny_all-records_labels.zip', '446902', '17.47 MB'),
'2015': HmdaDataFile('hmda_2015_ny_all-records_labels.zip', '439654', '27.13 MB'),
'2014': HmdaDataFile('hmda_2014_ny_all-records_labels.zip', '389279', '22.13 MB'),
'2008': HmdaDataFile('hmda_2008_ny_all-records_labels.zip', '644647', '33.21 MB'),
'2009': HmdaDataFile('hmda_2009_ny_all-records_labels.zip', '645487', '31.84 MB'),
'2011': HmdaDataFile('hmda_2011_ny_all-records_labels.zip', '503733', '28.17 MB'),
'2010': HmdaDataFile('hmda_2010_ny_all-records_labels.zip', '529869', '29.46 MB'),
'2013': HmdaDataFile('hmda_2013_ny_all-records_labels.zip', '539217', '30.46 MB'),
'2012': HmdaDataFile('hmda_2012_ny_all-records_labels.zip', '566980', '31.53 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ny_originated-records_labels.zip', '246292', '13.15 MB'),
'2007': HmdaDataFile('hmda_2007_ny_originated-records_labels.zip', '398639', '20.13 MB'),
'2017': HmdaDataFile('hmda_2017_ny_originated-records_labels.zip', '236499', '8.45 MB'),
'2015': HmdaDataFile('hmda_2015_ny_originated-records_labels.zip', '228054', '13.23 MB'),
'2014': HmdaDataFile('hmda_2014_ny_originated-records_labels.zip', '198817', '10.63 MB'),
'2008': HmdaDataFile('hmda_2008_ny_originated-records_labels.zip', '252826', '12.72 MB'),
'2009': HmdaDataFile('hmda_2009_ny_originated-records_labels.zip', '281652', '13.38 MB'),
'2011': HmdaDataFile('hmda_2011_ny_originated-records_labels.zip', '238554', '12.42 MB'),
'2010': HmdaDataFile('hmda_2010_ny_originated-records_labels.zip', '252729', '13.07 MB'),
'2013': HmdaDataFile('hmda_2013_ny_originated-records_labels.zip', '271829', '14.42 MB'),
'2012': HmdaDataFile('hmda_2012_ny_originated-records_labels.zip', '285106', '14.71 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '199204', '7.47 MB'),
'2007': HmdaDataFile('hmda_2007_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '281690', '10.14 MB'),
'2017': HmdaDataFile('hmda_2017_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '187337', '4.66 MB'),
'2015': HmdaDataFile('hmda_2015_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '182621', '7.46 MB'),
'2014': HmdaDataFile('hmda_2014_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '156647', '5.87 MB'),
'2008': HmdaDataFile('hmda_2008_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '195917', '6.98 MB'),
'2009': HmdaDataFile('hmda_2009_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '245556', '8.36 MB'),
'2011': HmdaDataFile('hmda_2011_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '201157', '7.36 MB'),
'2010': HmdaDataFile('hmda_2010_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '217296', '7.85 MB'),
'2013': HmdaDataFile('hmda_2013_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '225700', '8.44 MB'),
'2012': HmdaDataFile('hmda_2012_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '242701', '8.82 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ny_all-records_codes.zip', '477313', '18.61 MB'),
'2007': HmdaDataFile('hmda_2007_ny_all-records_codes.zip', '1009451', '35.76 MB'),
'2017': HmdaDataFile('hmda_2017_ny_all-records_codes.zip', '446902', '11.54 MB'),
'2015': HmdaDataFile('hmda_2015_ny_all-records_codes.zip', '439654', '18.57 MB'),
'2014': HmdaDataFile('hmda_2014_ny_all-records_codes.zip', '389279', '15.25 MB'),
'2008': HmdaDataFile('hmda_2008_ny_all-records_codes.zip', '644647', '23.18 MB'),
'2009': HmdaDataFile('hmda_2009_ny_all-records_codes.zip', '645487', '22.45 MB'),
'2011': HmdaDataFile('hmda_2011_ny_all-records_codes.zip', '503733', '19.55 MB'),
'2010': HmdaDataFile('hmda_2010_ny_all-records_codes.zip', '529869', '20.47 MB'),
'2013': HmdaDataFile('hmda_2013_ny_all-records_codes.zip', '539217', '21.23 MB'),
'2012': HmdaDataFile('hmda_2012_ny_all-records_codes.zip', '566980', '21.92 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ny_originated-records_codes.zip', '246292', '9.34 MB'),
'2007': HmdaDataFile('hmda_2007_ny_originated-records_codes.zip', '398639', '14.43 MB'),
'2017': HmdaDataFile('hmda_2017_ny_originated-records_codes.zip', '236499', '5.9 MB'),
'2015': HmdaDataFile('hmda_2015_ny_originated-records_codes.zip', '228054', '9.33 MB'),
'2014': HmdaDataFile('hmda_2014_ny_originated-records_codes.zip', '198817', '7.49 MB'),
'2008': HmdaDataFile('hmda_2008_ny_originated-records_codes.zip', '252826', '9.14 MB'),
'2009': HmdaDataFile('hmda_2009_ny_originated-records_codes.zip', '281652', '9.73 MB'),
'2011': HmdaDataFile('hmda_2011_ny_originated-records_codes.zip', '238554', '8.78 MB'),
'2010': HmdaDataFile('hmda_2010_ny_originated-records_codes.zip', '252729', '9.27 MB'),
'2013': HmdaDataFile('hmda_2013_ny_originated-records_codes.zip', '271829', '10.23 MB'),
'2012': HmdaDataFile('hmda_2012_ny_originated-records_codes.zip', '285106', '10.38 MB')
}
}
},
'pa': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '226492', '11.67 MB'),
'2007': HmdaDataFile('hmda_2007_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '276621', '13.86 MB'),
'2017': HmdaDataFile('hmda_2017_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '200046', '6.74 MB'),
'2015': HmdaDataFile('hmda_2015_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '206884', '12.08 MB'),
'2014': HmdaDataFile('hmda_2014_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '175312', '8.94 MB'),
'2008': HmdaDataFile('hmda_2008_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '230119', '11.38 MB'),
'2009': HmdaDataFile('hmda_2009_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '318871', '14.7 MB'),
'2011': HmdaDataFile('hmda_2011_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '247649', '12.37 MB'),
'2010': HmdaDataFile('hmda_2010_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '280178', '14.07 MB'),
'2013': HmdaDataFile('hmda_2013_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '269808', '13.79 MB'),
'2012': HmdaDataFile('hmda_2012_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '313974', '15.99 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_pa_all-records_labels.zip', '526005', '29.04 MB'),
'2007': HmdaDataFile('hmda_2007_pa_all-records_labels.zip', '992904', '49.57 MB'),
'2017': HmdaDataFile('hmda_2017_pa_all-records_labels.zip', '473757', '17.91 MB'),
'2015': HmdaDataFile('hmda_2015_pa_all-records_labels.zip', '481331', '30.25 MB'),
'2014': HmdaDataFile('hmda_2014_pa_all-records_labels.zip', '427665', '23.41 MB'),
'2008': HmdaDataFile('hmda_2008_pa_all-records_labels.zip', '713995', '35.8 MB'),
'2009': HmdaDataFile('hmda_2009_pa_all-records_labels.zip', '768245', '37 MB'),
'2011': HmdaDataFile('hmda_2011_pa_all-records_labels.zip', '593240', '32.54 MB'),
'2010': HmdaDataFile('hmda_2010_pa_all-records_labels.zip', '659124', '36.22 MB'),
'2013': HmdaDataFile('hmda_2013_pa_all-records_labels.zip', '619770', '34.49 MB'),
'2012': HmdaDataFile('hmda_2012_pa_all-records_labels.zip', '695500', '38.56 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_pa_originated-records_labels.zip', '273334', '14.35 MB'),
'2007': HmdaDataFile('hmda_2007_pa_originated-records_labels.zip', '427955', '21.49 MB'),
'2017': HmdaDataFile('hmda_2017_pa_originated-records_labels.zip', '249620', '8.62 MB'),
'2015': HmdaDataFile('hmda_2015_pa_originated-records_labels.zip', '254361', '15.02 MB'),
'2014': HmdaDataFile('hmda_2014_pa_originated-records_labels.zip', '222765', '11.62 MB'),
'2008': HmdaDataFile('hmda_2008_pa_originated-records_labels.zip', '322031', '16.08 MB'),
'2009': HmdaDataFile('hmda_2009_pa_originated-records_labels.zip', '374957', '17.61 MB'),
'2011': HmdaDataFile('hmda_2011_pa_originated-records_labels.zip', '297874', '15.19 MB'),
'2010': HmdaDataFile('hmda_2010_pa_originated-records_labels.zip', '334150', '17.16 MB'),
'2013': HmdaDataFile('hmda_2013_pa_originated-records_labels.zip', '328425', '16.99 MB'),
'2012': HmdaDataFile('hmda_2012_pa_originated-records_labels.zip', '369571', '19.04 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '226492', '8.16 MB'),
'2007': HmdaDataFile('hmda_2007_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '276621', '9.82 MB'),
'2017': HmdaDataFile('hmda_2017_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '200046', '4.79 MB'),
'2015': HmdaDataFile('hmda_2015_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '206884', '8.4 MB'),
'2014': HmdaDataFile('hmda_2014_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '175312', '6.24 MB'),
'2008': HmdaDataFile('hmda_2008_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '230119', '8.1 MB'),
'2009': HmdaDataFile('hmda_2009_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '318871', '10.64 MB'),
'2011': HmdaDataFile('hmda_2011_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '247649', '8.65 MB'),
'2010': HmdaDataFile('hmda_2010_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '280178', '9.85 MB'),
'2013': HmdaDataFile('hmda_2013_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '269808', '9.72 MB'),
'2012': HmdaDataFile('hmda_2012_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '313974', '11.2 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_pa_all-records_codes.zip', '526005', '19.65 MB'),
'2007': HmdaDataFile('hmda_2007_pa_all-records_codes.zip', '992904', '34.13 MB'),
'2017': HmdaDataFile('hmda_2017_pa_all-records_codes.zip', '473757', '11.77 MB'),
'2015': HmdaDataFile('hmda_2015_pa_all-records_codes.zip', '481331', '20.19 MB'),
'2014': HmdaDataFile('hmda_2014_pa_all-records_codes.zip', '427665', '15.79 MB'),
'2008': HmdaDataFile('hmda_2008_pa_all-records_codes.zip', '713995', '24.78 MB'),
'2009': HmdaDataFile('hmda_2009_pa_all-records_codes.zip', '768245', '25.9 MB'),
'2011': HmdaDataFile('hmda_2011_pa_all-records_codes.zip', '593240', '22.19 MB'),
'2010': HmdaDataFile('hmda_2010_pa_all-records_codes.zip', '659124', '24.67 MB'),
'2013': HmdaDataFile('hmda_2013_pa_all-records_codes.zip', '619770', '23.66 MB'),
'2012': HmdaDataFile('hmda_2012_pa_all-records_codes.zip', '695500', '26.36 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_pa_originated-records_codes.zip', '273334', '9.99 MB'),
'2007': HmdaDataFile('hmda_2007_pa_originated-records_codes.zip', '427955', '15.19 MB'),
'2017': HmdaDataFile('hmda_2017_pa_originated-records_codes.zip', '249620', '6.04 MB'),
'2015': HmdaDataFile('hmda_2015_pa_originated-records_codes.zip', '254361', '10.34 MB'),
'2014': HmdaDataFile('hmda_2014_pa_originated-records_codes.zip', '222765', '8.07 MB'),
'2008': HmdaDataFile('hmda_2008_pa_originated-records_codes.zip', '322031', '11.45 MB'),
'2009': HmdaDataFile('hmda_2009_pa_originated-records_codes.zip', '374957', '12.74 MB'),
'2011': HmdaDataFile('hmda_2011_pa_originated-records_codes.zip', '297874', '10.53 MB'),
'2010': HmdaDataFile('hmda_2010_pa_originated-records_codes.zip', '334150', '11.94 MB'),
'2013': HmdaDataFile('hmda_2013_pa_originated-records_codes.zip', '328425', '11.84 MB'),
'2012': HmdaDataFile('hmda_2012_pa_originated-records_codes.zip', '369571', '13.21 MB')
}
}
},
'ca': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '1007181', '54.33 MB'),
'2007': HmdaDataFile('hmda_2007_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '870274', '44.14 MB'),
'2017': HmdaDataFile('hmda_2017_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '721751', '25.58 MB'),
'2015': HmdaDataFile('hmda_2015_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '840549', '50.22 MB'),
'2014': HmdaDataFile('hmda_2014_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '617503', '32.89 MB'),
'2008': HmdaDataFile('hmda_2008_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '540095', '26.82 MB'),
'2009': HmdaDataFile('hmda_2009_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '871218', '40.28 MB'),
'2011': HmdaDataFile('hmda_2011_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '777800', '41.39 MB'),
'2010': HmdaDataFile('hmda_2010_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '867778', '45.96 MB'),
'2013': HmdaDataFile('hmda_2013_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '943566', '51.22 MB'),
'2012': HmdaDataFile('hmda_2012_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '1179705', '63.7 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ca_all-records_labels.zip', '2235971', '128.67 MB'),
'2007': HmdaDataFile('hmda_2007_ca_all-records_labels.zip', '3425570', '178.89 MB'),
'2017': HmdaDataFile('hmda_2017_ca_all-records_labels.zip', '1714459', '68.82 MB'),
'2015': HmdaDataFile('hmda_2015_ca_all-records_labels.zip', '1878495', '120.02 MB'),
'2014': HmdaDataFile('hmda_2014_ca_all-records_labels.zip', '1436457', '81.73 MB'),
'2008': HmdaDataFile('hmda_2008_ca_all-records_labels.zip', '1843875', '91.09 MB'),
'2009': HmdaDataFile('hmda_2009_ca_all-records_labels.zip', '2186032', '102.95 MB'),
'2011': HmdaDataFile('hmda_2011_ca_all-records_labels.zip', '1914815', '108.47 MB'),
'2010': HmdaDataFile('hmda_2010_ca_all-records_labels.zip', '2007593', '113.6 MB'),
'2013': HmdaDataFile('hmda_2013_ca_all-records_labels.zip', '2161214', '124.84 MB'),
'2012': HmdaDataFile('hmda_2012_ca_all-records_labels.zip', '2541978', '146.32 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ca_originated-records_labels.zip', '1172541', '63.86 MB'),
'2007': HmdaDataFile('hmda_2007_ca_originated-records_labels.zip', '1233502', '62.47 MB'),
'2017': HmdaDataFile('hmda_2017_ca_originated-records_labels.zip', '877753', '31.88 MB'),
'2015': HmdaDataFile('hmda_2015_ca_originated-records_labels.zip', '993335', '60.03 MB'),
'2014': HmdaDataFile('hmda_2014_ca_originated-records_labels.zip', '750422', '40.44 MB'),
'2008': HmdaDataFile('hmda_2008_ca_originated-records_labels.zip', '672822', '33.65 MB'),
'2009': HmdaDataFile('hmda_2009_ca_originated-records_labels.zip', '972974', '45.3 MB'),
'2011': HmdaDataFile('hmda_2011_ca_originated-records_labels.zip', '917070', '49.28 MB'),
'2010': HmdaDataFile('hmda_2010_ca_originated-records_labels.zip', '980348', '52.43 MB'),
'2013': HmdaDataFile('hmda_2013_ca_originated-records_labels.zip', '1153965', '63.32 MB'),
'2012': HmdaDataFile('hmda_2012_ca_originated-records_labels.zip', '1391720', '75.92 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '1007181', '40 MB'),
'2007': HmdaDataFile('hmda_2007_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '870274', '32.16 MB'),
'2017': HmdaDataFile('hmda_2017_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '721751', '17.18 MB'),
'2015': HmdaDataFile('hmda_2015_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '840549', '35.92 MB'),
'2014': HmdaDataFile('hmda_2014_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '617503', '24.08 MB'),
'2008': HmdaDataFile('hmda_2008_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '540095', '19.47 MB'),
'2009': HmdaDataFile('hmda_2009_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '871218', '29.59 MB'),
'2011': HmdaDataFile('hmda_2011_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '777800', '30.2 MB'),
'2010': HmdaDataFile('hmda_2010_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '867778', '33.54 MB'),
'2013': HmdaDataFile('hmda_2013_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '943566', '37.62 MB'),
'2012': HmdaDataFile('hmda_2012_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '1179705', '46.88 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ca_all-records_codes.zip', '2235971', '91.67 MB'),
'2007': HmdaDataFile('hmda_2007_ca_all-records_codes.zip', '3425570', '117.94 MB'),
'2017': HmdaDataFile('hmda_2017_ca_all-records_codes.zip', '1714459', '42.19 MB'),
'2015': HmdaDataFile('hmda_2015_ca_all-records_codes.zip', '1878495', '82.3 MB'),
'2014': HmdaDataFile('hmda_2014_ca_all-records_codes.zip', '1436457', '57.96 MB'),
'2008': HmdaDataFile('hmda_2008_ca_all-records_codes.zip', '1843875', '63.68 MB'),
'2009': HmdaDataFile('hmda_2009_ca_all-records_codes.zip', '2186032', '72.66 MB'),
'2011': HmdaDataFile('hmda_2011_ca_all-records_codes.zip', '1914815', '76.92 MB'),
'2010': HmdaDataFile('hmda_2010_ca_all-records_codes.zip', '2007593', '80.32 MB'),
'2013': HmdaDataFile('hmda_2013_ca_all-records_codes.zip', '2161214', '88.6 MB'),
'2012': HmdaDataFile('hmda_2012_ca_all-records_codes.zip', '2541978', '104.3 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ca_originated-records_codes.zip', '1172541', '46.78 MB'),
'2007': HmdaDataFile('hmda_2007_ca_originated-records_codes.zip', '1233502', '45.28 MB'),
'2017': HmdaDataFile('hmda_2017_ca_originated-records_codes.zip', '877753', '21.13 MB'),
'2015': HmdaDataFile('hmda_2015_ca_originated-records_codes.zip', '993335', '42.66 MB'),
'2014': HmdaDataFile('hmda_2014_ca_originated-records_codes.zip', '750422', '29.45 MB'),
'2008': HmdaDataFile('hmda_2008_ca_originated-records_codes.zip', '672822', '24.32 MB'),
'2009': HmdaDataFile('hmda_2009_ca_originated-records_codes.zip', '972974', '33.15 MB'),
'2011': HmdaDataFile('hmda_2011_ca_originated-records_codes.zip', '917070', '35.8 MB'),
'2010': HmdaDataFile('hmda_2010_ca_originated-records_codes.zip', '980348', '38.11 MB'),
'2013': HmdaDataFile('hmda_2013_ca_originated-records_codes.zip', '1153965', '46.26 MB'),
'2012': HmdaDataFile('hmda_2012_ca_originated-records_codes.zip', '1391720', '55.58 MB')
}
}
},
'nv': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '84481', '3.96 MB'),
'2007': HmdaDataFile('hmda_2007_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '73747', '3.21 MB'),
'2017': HmdaDataFile('hmda_2017_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '76783', '2.28 MB'),
'2015': HmdaDataFile('hmda_2015_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '69214', '3.49 MB'),
'2014': HmdaDataFile('hmda_2014_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '49799', '2.4 MB'),
'2008': HmdaDataFile('hmda_2008_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '48287', '1.93 MB'),
'2009': HmdaDataFile('hmda_2009_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '59437', '2.29 MB'),
'2011': HmdaDataFile('hmda_2011_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '39765', '1.65 MB'),
'2010': HmdaDataFile('hmda_2010_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '47126', '2.01 MB'),
'2013': HmdaDataFile('hmda_2013_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '62574', '2.95 MB'),
'2012': HmdaDataFile('hmda_2012_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '67783', '3.09 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nv_all-records_labels.zip', '196764', '9.88 MB'),
'2007': HmdaDataFile('hmda_2007_nv_all-records_labels.zip', '326985', '14.53 MB'),
'2017': HmdaDataFile('hmda_2017_nv_all-records_labels.zip', '178587', '5.91 MB'),
'2015': HmdaDataFile('hmda_2015_nv_all-records_labels.zip', '158259', '8.69 MB'),
'2014': HmdaDataFile('hmda_2014_nv_all-records_labels.zip', '119744', '6.27 MB'),
'2008': HmdaDataFile('hmda_2008_nv_all-records_labels.zip', '163606', '6.93 MB'),
'2009': HmdaDataFile('hmda_2009_nv_all-records_labels.zip', '168658', '6.88 MB'),
'2011': HmdaDataFile('hmda_2011_nv_all-records_labels.zip', '112858', '5.27 MB'),
'2010': HmdaDataFile('hmda_2010_nv_all-records_labels.zip', '126313', '5.98 MB'),
'2013': HmdaDataFile('hmda_2013_nv_all-records_labels.zip', '151453', '7.8 MB'),
'2012': HmdaDataFile('hmda_2012_nv_all-records_labels.zip', '161343', '8.12 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nv_originated-records_labels.zip', '97926', '4.69 MB'),
'2007': HmdaDataFile('hmda_2007_nv_originated-records_labels.zip', '113248', '5 MB'),
'2017': HmdaDataFile('hmda_2017_nv_originated-records_labels.zip', '91540', '2.8 MB'),
'2015': HmdaDataFile('hmda_2015_nv_originated-records_labels.zip', '81624', '4.21 MB'),
'2014': HmdaDataFile('hmda_2014_nv_originated-records_labels.zip', '61757', '3.04 MB'),
'2008': HmdaDataFile('hmda_2008_nv_originated-records_labels.zip', '62592', '2.58 MB'),
'2009': HmdaDataFile('hmda_2009_nv_originated-records_labels.zip', '70049', '2.77 MB'),
'2011': HmdaDataFile('hmda_2011_nv_originated-records_labels.zip', '51445', '2.24 MB'),
'2010': HmdaDataFile('hmda_2010_nv_originated-records_labels.zip', '57173', '2.51 MB'),
'2013': HmdaDataFile('hmda_2013_nv_originated-records_labels.zip', '82011', '3.96 MB'),
'2012': HmdaDataFile('hmda_2012_nv_originated-records_labels.zip', '86528', '4.04 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '84481', '2.78 MB'),
'2007': HmdaDataFile('hmda_2007_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '73747', '2.26 MB'),
'2017': HmdaDataFile('hmda_2017_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '76783', '1.59 MB'),
'2015': HmdaDataFile('hmda_2015_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '69214', '2.48 MB'),
'2014': HmdaDataFile('hmda_2014_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '49799', '1.68 MB'),
'2008': HmdaDataFile('hmda_2008_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '48287', '1.36 MB'),
'2009': HmdaDataFile('hmda_2009_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '59437', '1.63 MB'),
'2011': HmdaDataFile('hmda_2011_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '39765', '1.13 MB'),
'2010': HmdaDataFile('hmda_2010_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '47126', '1.37 MB'),
'2013': HmdaDataFile('hmda_2013_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '62574', '2.07 MB'),
'2012': HmdaDataFile('hmda_2012_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '67783', '2.17 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nv_all-records_codes.zip', '196764', '6.71 MB'),
'2007': HmdaDataFile('hmda_2007_nv_all-records_codes.zip', '326985', '9.91 MB'),
'2017': HmdaDataFile('hmda_2017_nv_all-records_codes.zip', '178587', '3.8 MB'),
'2015': HmdaDataFile('hmda_2015_nv_all-records_codes.zip', '158259', '5.91 MB'),
'2014': HmdaDataFile('hmda_2014_nv_all-records_codes.zip', '119744', '4.26 MB'),
'2008': HmdaDataFile('hmda_2008_nv_all-records_codes.zip', '163606', '4.74 MB'),
'2009': HmdaDataFile('hmda_2009_nv_all-records_codes.zip', '168658', '4.74 MB'),
'2011': HmdaDataFile('hmda_2011_nv_all-records_codes.zip', '112858', '3.48 MB'),
'2010': HmdaDataFile('hmda_2010_nv_all-records_codes.zip', '126313', '3.93 MB'),
'2013': HmdaDataFile('hmda_2013_nv_all-records_codes.zip', '151453', '5.33 MB'),
'2012': HmdaDataFile('hmda_2012_nv_all-records_codes.zip', '161343', '5.54 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nv_originated-records_codes.zip', '97926', '3.28 MB'),
'2007': HmdaDataFile('hmda_2007_nv_originated-records_codes.zip', '113248', '3.54 MB'),
'2017': HmdaDataFile('hmda_2017_nv_originated-records_codes.zip', '91540', '1.93 MB'),
'2015': HmdaDataFile('hmda_2015_nv_originated-records_codes.zip', '81624', '2.98 MB'),
'2014': HmdaDataFile('hmda_2014_nv_originated-records_codes.zip', '61757', '2.13 MB'),
'2008': HmdaDataFile('hmda_2008_nv_originated-records_codes.zip', '62592', '1.82 MB'),
'2009': HmdaDataFile('hmda_2009_nv_originated-records_codes.zip', '70049', '1.98 MB'),
'2011': HmdaDataFile('hmda_2011_nv_originated-records_codes.zip', '51445', '1.52 MB'),
'2010': HmdaDataFile('hmda_2010_nv_originated-records_codes.zip', '57173', '1.7 MB'),
'2013': HmdaDataFile('hmda_2013_nv_originated-records_codes.zip', '82011', '2.79 MB'),
'2012': HmdaDataFile('hmda_2012_nv_originated-records_codes.zip', '86528', '2.82 MB')
}
}
},
'pr': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '16754', '783.1 KB'),
'2007': HmdaDataFile('hmda_2007_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '35822', '1.56 MB'),
'2017': HmdaDataFile('hmda_2017_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '12257', '361.87 KB'),
'2015': HmdaDataFile('hmda_2015_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '18782', '893.63 KB'),
'2014': HmdaDataFile('hmda_2014_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '20640', '942.76 KB'),
'2008': HmdaDataFile('hmda_2008_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '36283', '1.5 MB'),
'2009': HmdaDataFile('hmda_2009_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '34669', '1.37 MB'),
'2011': HmdaDataFile('hmda_2011_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '25832', '1.11 MB'),
'2010': HmdaDataFile('hmda_2010_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '25342', '1.08 MB'),
'2013': HmdaDataFile('hmda_2013_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '30144', '1.35 MB'),
'2012': HmdaDataFile('hmda_2012_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '31083', '1.38 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_pr_all-records_labels.zip', '55699', '2.78 MB'),
'2007': HmdaDataFile('hmda_2007_pr_all-records_labels.zip', '141905', '6.23 MB'),
'2017': HmdaDataFile('hmda_2017_pr_all-records_labels.zip', '41775', '1.3 MB'),
'2015': HmdaDataFile('hmda_2015_pr_all-records_labels.zip', '58798', '2.92 MB'),
'2014': HmdaDataFile('hmda_2014_pr_all-records_labels.zip', '69716', '3.45 MB'),
'2008': HmdaDataFile('hmda_2008_pr_all-records_labels.zip', '121121', '5.16 MB'),
'2009': HmdaDataFile('hmda_2009_pr_all-records_labels.zip', '117907', '4.79 MB'),
'2011': HmdaDataFile('hmda_2011_pr_all-records_labels.zip', '85316', '3.93 MB'),
'2010': HmdaDataFile('hmda_2010_pr_all-records_labels.zip', '83046', '3.88 MB'),
'2013': HmdaDataFile('hmda_2013_pr_all-records_labels.zip', '96530', '4.63 MB'),
'2012': HmdaDataFile('hmda_2012_pr_all-records_labels.zip', '87162', '4.17 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_pr_originated-records_labels.zip', '26124', '1.23 MB'),
'2007': HmdaDataFile('hmda_2007_pr_originated-records_labels.zip', '57036', '2.51 MB'),
'2017': HmdaDataFile('hmda_2017_pr_originated-records_labels.zip', '19395', '576.04 KB'),
'2015': HmdaDataFile('hmda_2015_pr_originated-records_labels.zip', '28616', '1.37 MB'),
'2014': HmdaDataFile('hmda_2014_pr_originated-records_labels.zip', '30528', '1.42 MB'),
'2008': HmdaDataFile('hmda_2008_pr_originated-records_labels.zip', '50632', '2.14 MB'),
'2009': HmdaDataFile('hmda_2009_pr_originated-records_labels.zip', '46237', '1.86 MB'),
'2011': HmdaDataFile('hmda_2011_pr_originated-records_labels.zip', '37660', '1.66 MB'),
'2010': HmdaDataFile('hmda_2010_pr_originated-records_labels.zip', '35121', '1.55 MB'),
'2013': HmdaDataFile('hmda_2013_pr_originated-records_labels.zip', '40075', '1.8 MB'),
'2012': HmdaDataFile('hmda_2012_pr_originated-records_labels.zip', '40616', '1.81 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '16754', '536.87 KB'),
'2007': HmdaDataFile('hmda_2007_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '35822', '1.08 MB'),
'2017': HmdaDataFile('hmda_2017_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '12257', '255.16 KB'),
'2015': HmdaDataFile('hmda_2015_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '18782', '614.78 KB'),
'2014': HmdaDataFile('hmda_2014_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '20640', '647.5 KB'),
'2008': HmdaDataFile('hmda_2008_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '36283', '1.05 MB'),
'2009': HmdaDataFile('hmda_2009_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '34669', '973.6 KB'),
'2011': HmdaDataFile('hmda_2011_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '25832', '756.02 KB'),
'2010': HmdaDataFile('hmda_2010_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '25342', '737.14 KB'),
'2013': HmdaDataFile('hmda_2013_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '30144', '925.82 KB'),
'2012': HmdaDataFile('hmda_2012_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '31083', '942.8 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_pr_all-records_codes.zip', '55699', '1.82 MB'),
'2007': HmdaDataFile('hmda_2007_pr_all-records_codes.zip', '141905', '4.18 MB'),
'2017': HmdaDataFile('hmda_2017_pr_all-records_codes.zip', '41775', '873.01 KB'),
'2015': HmdaDataFile('hmda_2015_pr_all-records_codes.zip', '58798', '1.94 MB'),
'2014': HmdaDataFile('hmda_2014_pr_all-records_codes.zip', '69716', '2.27 MB'),
'2008': HmdaDataFile('hmda_2008_pr_all-records_codes.zip', '121121', '3.48 MB'),
'2009': HmdaDataFile('hmda_2009_pr_all-records_codes.zip', '117907', '3.28 MB'),
'2011': HmdaDataFile('hmda_2011_pr_all-records_codes.zip', '85316', '2.56 MB'),
'2010': HmdaDataFile('hmda_2010_pr_all-records_codes.zip', '83046', '2.54 MB'),
'2013': HmdaDataFile('hmda_2013_pr_all-records_codes.zip', '96530', '3.04 MB'),
'2012': HmdaDataFile('hmda_2012_pr_all-records_codes.zip', '87162', '2.75 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_pr_originated-records_codes.zip', '26124', '835.09 KB'),
'2007': HmdaDataFile('hmda_2007_pr_originated-records_codes.zip', '57036', '1.73 MB'),
'2017': HmdaDataFile('hmda_2017_pr_originated-records_codes.zip', '19395', '406.77 KB'),
'2015': HmdaDataFile('hmda_2015_pr_originated-records_codes.zip', '28616', '942.61 KB'),
'2014': HmdaDataFile('hmda_2014_pr_originated-records_codes.zip', '30528', '963.45 KB'),
'2008': HmdaDataFile('hmda_2008_pr_originated-records_codes.zip', '50632', '1.49 MB'),
'2009': HmdaDataFile('hmda_2009_pr_originated-records_codes.zip', '46237', '1.32 MB'),
'2011': HmdaDataFile('hmda_2011_pr_originated-records_codes.zip', '37660', '1.13 MB'),
'2010': HmdaDataFile('hmda_2010_pr_originated-records_codes.zip', '35121', '1.05 MB'),
'2013': HmdaDataFile('hmda_2013_pr_originated-records_codes.zip', '40075', '1.23 MB'),
'2012': HmdaDataFile('hmda_2012_pr_originated-records_codes.zip', '40616', '1.24 MB')
}
}
},
'de': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '21457', '907.27 KB'),
'2007': HmdaDataFile('hmda_2007_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '24785', '1.05 MB'),
'2017': HmdaDataFile('hmda_2017_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '18891', '540.64 KB'),
'2015': HmdaDataFile('hmda_2015_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '18365', '848.09 KB'),
'2014': HmdaDataFile('hmda_2014_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '14206', '642.68 KB'),
'2008': HmdaDataFile('hmda_2008_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '19350', '807.78 KB'),
'2009': HmdaDataFile('hmda_2009_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '26431', '1.05 MB'),
'2011': HmdaDataFile('hmda_2011_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '18557', '764.88 KB'),
'2010': HmdaDataFile('hmda_2010_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '21244', '862.31 KB'),
'2013': HmdaDataFile('hmda_2013_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '22603', '952.26 KB'),
'2012': HmdaDataFile('hmda_2012_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '25372', '1.06 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_de_all-records_labels.zip', '56002', '2.64 MB'),
'2007': HmdaDataFile('hmda_2007_de_all-records_labels.zip', '102001', '4.48 MB'),
'2017': HmdaDataFile('hmda_2017_de_all-records_labels.zip', '49695', '1.64 MB'),
'2015': HmdaDataFile('hmda_2015_de_all-records_labels.zip', '48176', '2.44 MB'),
'2014': HmdaDataFile('hmda_2014_de_all-records_labels.zip', '39784', '2 MB'),
'2008': HmdaDataFile('hmda_2008_de_all-records_labels.zip', '68856', '3.06 MB'),
'2009': HmdaDataFile('hmda_2009_de_all-records_labels.zip', '72398', '3.14 MB'),
'2011': HmdaDataFile('hmda_2011_de_all-records_labels.zip', '52039', '2.43 MB'),
'2010': HmdaDataFile('hmda_2010_de_all-records_labels.zip', '57559', '2.66 MB'),
'2013': HmdaDataFile('hmda_2013_de_all-records_labels.zip', '59314', '2.83 MB'),
'2012': HmdaDataFile('hmda_2012_de_all-records_labels.zip', '65069', '3.06 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_de_originated-records_labels.zip', '27296', '1.18 MB'),
'2007': HmdaDataFile('hmda_2007_de_originated-records_labels.zip', '40053', '1.69 MB'),
'2017': HmdaDataFile('hmda_2017_de_originated-records_labels.zip', '24719', '718.05 KB'),
'2015': HmdaDataFile('hmda_2015_de_originated-records_labels.zip', '24062', '1.14 MB'),
'2014': HmdaDataFile('hmda_2014_de_originated-records_labels.zip', '19181', '882.64 KB'),
'2008': HmdaDataFile('hmda_2008_de_originated-records_labels.zip', '27619', '1.17 MB'),
'2009': HmdaDataFile('hmda_2009_de_originated-records_labels.zip', '32995', '1.31 MB'),
'2011': HmdaDataFile('hmda_2011_de_originated-records_labels.zip', '24377', '1.01 MB'),
'2010': HmdaDataFile('hmda_2010_de_originated-records_labels.zip', '26997', '1.1 MB'),
'2013': HmdaDataFile('hmda_2013_de_originated-records_labels.zip', '30167', '1.28 MB'),
'2012': HmdaDataFile('hmda_2012_de_originated-records_labels.zip', '32868', '1.38 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '21457', '600.87 KB'),
'2007': HmdaDataFile('hmda_2007_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '24785', '699.02 KB'),
'2017': HmdaDataFile('hmda_2017_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '18891', '376.86 KB'),
'2015': HmdaDataFile('hmda_2015_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '18365', '563.01 KB'),
'2014': HmdaDataFile('hmda_2014_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '14206', '424.11 KB'),
'2008': HmdaDataFile('hmda_2008_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '19350', '544.41 KB'),
'2009': HmdaDataFile('hmda_2009_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '26431', '709.46 KB'),
'2011': HmdaDataFile('hmda_2011_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '18557', '497.81 KB'),
'2010': HmdaDataFile('hmda_2010_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '21244', '559.01 KB'),
'2013': HmdaDataFile('hmda_2013_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '22603', '621.51 KB'),
'2012': HmdaDataFile('hmda_2012_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '25372', '689 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_de_all-records_codes.zip', '56002', '1.68 MB'),
'2007': HmdaDataFile('hmda_2007_de_all-records_codes.zip', '102001', '2.91 MB'),
'2017': HmdaDataFile('hmda_2017_de_all-records_codes.zip', '49695', '1.09 MB'),
'2015': HmdaDataFile('hmda_2015_de_all-records_codes.zip', '48176', '1.54 MB'),
'2014': HmdaDataFile('hmda_2014_de_all-records_codes.zip', '39784', '1.26 MB'),
'2008': HmdaDataFile('hmda_2008_de_all-records_codes.zip', '68856', '2 MB'),
'2009': HmdaDataFile('hmda_2009_de_all-records_codes.zip', '72398', '2.07 MB'),
'2011': HmdaDataFile('hmda_2011_de_all-records_codes.zip', '52039', '1.52 MB'),
'2010': HmdaDataFile('hmda_2010_de_all-records_codes.zip', '57559', '1.66 MB'),
'2013': HmdaDataFile('hmda_2013_de_all-records_codes.zip', '59314', '1.77 MB'),
'2012': HmdaDataFile('hmda_2012_de_all-records_codes.zip', '65069', '1.92 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_de_originated-records_codes.zip', '27296', '769.27 KB'),
'2007': HmdaDataFile('hmda_2007_de_originated-records_codes.zip', '40053', '1.12 MB'),
'2017': HmdaDataFile('hmda_2017_de_originated-records_codes.zip', '24719', '494.64 KB'),
'2015': HmdaDataFile('hmda_2015_de_originated-records_codes.zip', '24062', '748.52 KB'),
'2014': HmdaDataFile('hmda_2014_de_originated-records_codes.zip', '19181', '575.97 KB'),
'2008': HmdaDataFile('hmda_2008_de_originated-records_codes.zip', '27619', '775.49 KB'),
'2009': HmdaDataFile('hmda_2009_de_originated-records_codes.zip', '32995', '882.98 KB'),
'2011': HmdaDataFile('hmda_2011_de_originated-records_codes.zip', '24377', '652.39 KB'),
'2010': HmdaDataFile('hmda_2010_de_originated-records_codes.zip', '26997', '709.17 KB'),
'2013': HmdaDataFile('hmda_2013_de_originated-records_codes.zip', '30167', '829.22 KB'),
'2012': HmdaDataFile('hmda_2012_de_originated-records_codes.zip', '32868', '897.42 KB')
}
}
},
'dc': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '15924', '720.67 KB'),
'2007': HmdaDataFile('hmda_2007_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '16676', '696.13 KB'),
'2017': HmdaDataFile('hmda_2017_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '12419', '384.38 KB'),
'2015': HmdaDataFile('hmda_2015_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '14290', '659.64 KB'),
'2014': HmdaDataFile('hmda_2014_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '11399', '518.92 KB'),
'2008': HmdaDataFile('hmda_2008_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '12148', '506.83 KB'),
'2009': HmdaDataFile('hmda_2009_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '18262', '731.02 KB'),
'2011': HmdaDataFile('hmda_2011_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '15280', '677.01 KB'),
'2010': HmdaDataFile('hmda_2010_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '16225', '712.21 KB'),
'2013': HmdaDataFile('hmda_2013_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '17499', '785.67 KB'),
'2012': HmdaDataFile('hmda_2012_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '20716', '908.46 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_dc_all-records_labels.zip', '38399', '1.87 MB'),
'2007': HmdaDataFile('hmda_2007_dc_all-records_labels.zip', '53480', '2.43 MB'),
'2017': HmdaDataFile('hmda_2017_dc_all-records_labels.zip', '30927', '1.07 MB'),
'2015': HmdaDataFile('hmda_2015_dc_all-records_labels.zip', '34958', '1.77 MB'),
'2014': HmdaDataFile('hmda_2014_dc_all-records_labels.zip', '28672', '1.42 MB'),
'2008': HmdaDataFile('hmda_2008_dc_all-records_labels.zip', '33505', '1.55 MB'),
'2009': HmdaDataFile('hmda_2009_dc_all-records_labels.zip', '43540', '1.9 MB'),
'2011': HmdaDataFile('hmda_2011_dc_all-records_labels.zip', '37657', '1.81 MB'),
'2010': HmdaDataFile('hmda_2010_dc_all-records_labels.zip', '38173', '1.84 MB'),
'2013': HmdaDataFile('hmda_2013_dc_all-records_labels.zip', '43020', '2.11 MB'),
'2012': HmdaDataFile('hmda_2012_dc_all-records_labels.zip', '48621', '2.34 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_dc_originated-records_labels.zip', '19324', '891.94 KB'),
'2007': HmdaDataFile('hmda_2007_dc_originated-records_labels.zip', '23948', '998.05 KB'),
'2017': HmdaDataFile('hmda_2017_dc_originated-records_labels.zip', '15414', '483 KB'),
'2015': HmdaDataFile('hmda_2015_dc_originated-records_labels.zip', '17821', '839.5 KB'),
'2014': HmdaDataFile('hmda_2014_dc_originated-records_labels.zip', '14547', '676.21 KB'),
'2008': HmdaDataFile('hmda_2008_dc_originated-records_labels.zip', '15212', '643.03 KB'),
'2009': HmdaDataFile('hmda_2009_dc_originated-records_labels.zip', '20287', '823.05 KB'),
'2011': HmdaDataFile('hmda_2011_dc_originated-records_labels.zip', '18123', '817.1 KB'),
'2010': HmdaDataFile('hmda_2010_dc_originated-records_labels.zip', '18511', '828.68 KB'),
'2013': HmdaDataFile('hmda_2013_dc_originated-records_labels.zip', '21877', '1 MB'),
'2012': HmdaDataFile('hmda_2012_dc_originated-records_labels.zip', '24842', '1.12 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '15924', '457.4 KB'),
'2007': HmdaDataFile('hmda_2007_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '16676', '454.07 KB'),
'2017': HmdaDataFile('hmda_2017_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '12419', '259.55 KB'),
'2015': HmdaDataFile('hmda_2015_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '14290', '415.87 KB'),
'2014': HmdaDataFile('hmda_2014_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '11399', '327.64 KB'),
'2008': HmdaDataFile('hmda_2008_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '12148', '331.88 KB'),
'2009': HmdaDataFile('hmda_2009_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '18262', '485.74 KB'),
'2011': HmdaDataFile('hmda_2011_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '15280', '423.44 KB'),
'2010': HmdaDataFile('hmda_2010_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '16225', '446.48 KB'),
'2013': HmdaDataFile('hmda_2013_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '17499', '493.75 KB'),
'2012': HmdaDataFile('hmda_2012_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '20716', '567.98 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_dc_all-records_codes.zip', '38399', '1.15 MB'),
'2007': HmdaDataFile('hmda_2007_dc_all-records_codes.zip', '53480', '1.55 MB'),
'2017': HmdaDataFile('hmda_2017_dc_all-records_codes.zip', '30927', '685.42 KB'),
'2015': HmdaDataFile('hmda_2015_dc_all-records_codes.zip', '34958', '1.07 MB'),
'2014': HmdaDataFile('hmda_2014_dc_all-records_codes.zip', '28672', '862.7 KB'),
'2008': HmdaDataFile('hmda_2008_dc_all-records_codes.zip', '33505', '982.73 KB'),
'2009': HmdaDataFile('hmda_2009_dc_all-records_codes.zip', '43540', '1.21 MB'),
'2011': HmdaDataFile('hmda_2011_dc_all-records_codes.zip', '37657', '1.1 MB'),
'2010': HmdaDataFile('hmda_2010_dc_all-records_codes.zip', '38173', '1.11 MB'),
'2013': HmdaDataFile('hmda_2013_dc_all-records_codes.zip', '43020', '1.28 MB'),
'2012': HmdaDataFile('hmda_2012_dc_all-records_codes.zip', '48621', '1.42 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_dc_originated-records_codes.zip', '19324', '561.87 KB'),
'2007': HmdaDataFile('hmda_2007_dc_originated-records_codes.zip', '23948', '647.59 KB'),
'2017': HmdaDataFile('hmda_2017_dc_originated-records_codes.zip', '15414', '321.38 KB'),
'2015': HmdaDataFile('hmda_2015_dc_originated-records_codes.zip', '17821', '524.36 KB'),
'2014': HmdaDataFile('hmda_2014_dc_originated-records_codes.zip', '14547', '423.92 KB'),
'2008': HmdaDataFile('hmda_2008_dc_originated-records_codes.zip', '15212', '416.59 KB'),
'2009': HmdaDataFile('hmda_2009_dc_originated-records_codes.zip', '20287', '542.45 KB'),
'2011': HmdaDataFile('hmda_2011_dc_originated-records_codes.zip', '18123', '507.21 KB'),
'2010': HmdaDataFile('hmda_2010_dc_originated-records_codes.zip', '18511', '514.35 KB'),
'2013': HmdaDataFile('hmda_2013_dc_originated-records_codes.zip', '21877', '626.77 KB'),
'2012': HmdaDataFile('hmda_2012_dc_originated-records_codes.zip', '24842', '691.5 KB')
}
}
},
'wi': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '141247', '6.63 MB'),
'2007': HmdaDataFile('hmda_2007_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '140939', '6.7 MB'),
'2017': HmdaDataFile('hmda_2017_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '118149', '3.66 MB'),
'2015': HmdaDataFile('hmda_2015_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '126112', '6.94 MB'),
'2014': HmdaDataFile('hmda_2014_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '99338', '5.08 MB'),
'2008': HmdaDataFile('hmda_2008_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '140787', '6.42 MB'),
'2009': HmdaDataFile('hmda_2009_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '233978', '9.45 MB'),
'2011': HmdaDataFile('hmda_2011_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '159084', '7.03 MB'),
'2010': HmdaDataFile('hmda_2010_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '197382', '8.86 MB'),
'2013': HmdaDataFile('hmda_2013_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '155945', '7.35 MB'),
'2012': HmdaDataFile('hmda_2012_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '219594', '10.08 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_wi_all-records_labels.zip', '277224', '14.04 MB'),
'2007': HmdaDataFile('hmda_2007_wi_all-records_labels.zip', '460622', '22.04 MB'),
'2017': HmdaDataFile('hmda_2017_wi_all-records_labels.zip', '237542', '8.25 MB'),
'2015': HmdaDataFile('hmda_2015_wi_all-records_labels.zip', '250077', '14.91 MB'),
'2014': HmdaDataFile('hmda_2014_wi_all-records_labels.zip', '207239', '11.5 MB'),
'2008': HmdaDataFile('hmda_2008_wi_all-records_labels.zip', '359119', '16.92 MB'),
'2009': HmdaDataFile('hmda_2009_wi_all-records_labels.zip', '475760', '20.37 MB'),
'2011': HmdaDataFile('hmda_2011_wi_all-records_labels.zip', '324321', '15.85 MB'),
'2010': HmdaDataFile('hmda_2010_wi_all-records_labels.zip', '394638', '19.35 MB'),
'2013': HmdaDataFile('hmda_2013_wi_all-records_labels.zip', '306118', '15.66 MB'),
'2012': HmdaDataFile('hmda_2012_wi_all-records_labels.zip', '398029', '19.93 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_wi_originated-records_labels.zip', '168678', '8.06 MB'),
'2007': HmdaDataFile('hmda_2007_wi_originated-records_labels.zip', '211916', '10.01 MB'),
'2017': HmdaDataFile('hmda_2017_wi_originated-records_labels.zip', '146251', '4.59 MB'),
'2015': HmdaDataFile('hmda_2015_wi_originated-records_labels.zip', '153515', '8.58 MB'),
'2014': HmdaDataFile('hmda_2014_wi_originated-records_labels.zip', '124916', '6.51 MB'),
'2008': HmdaDataFile('hmda_2008_wi_originated-records_labels.zip', '187234', '8.58 MB'),
'2009': HmdaDataFile('hmda_2009_wi_originated-records_labels.zip', '270190', '11.1 MB'),
'2011': HmdaDataFile('hmda_2011_wi_originated-records_labels.zip', '188288', '8.56 MB'),
'2010': HmdaDataFile('hmda_2010_wi_originated-records_labels.zip', '228537', '10.45 MB'),
'2013': HmdaDataFile('hmda_2013_wi_originated-records_labels.zip', '188524', '9.07 MB'),
'2012': HmdaDataFile('hmda_2012_wi_originated-records_labels.zip', '253728', '11.88 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '141247', '4.45 MB'),
'2007': HmdaDataFile('hmda_2007_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '140939', '4.6 MB'),
'2017': HmdaDataFile('hmda_2017_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '118149', '2.59 MB'),
'2015': HmdaDataFile('hmda_2015_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '126112', '4.69 MB'),
'2014': HmdaDataFile('hmda_2014_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '99338', '3.39 MB'),
'2008': HmdaDataFile('hmda_2008_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '140787', '4.45 MB'),
'2009': HmdaDataFile('hmda_2009_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '233978', '6.72 MB'),
'2011': HmdaDataFile('hmda_2011_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '159084', '4.64 MB'),
'2010': HmdaDataFile('hmda_2010_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '197382', '5.93 MB'),
'2013': HmdaDataFile('hmda_2013_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '155945', '4.93 MB'),
'2012': HmdaDataFile('hmda_2012_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '219594', '6.71 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_wi_all-records_codes.zip', '277224', '9.08 MB'),
'2007': HmdaDataFile('hmda_2007_wi_all-records_codes.zip', '460622', '14.82 MB'),
'2017': HmdaDataFile('hmda_2017_wi_all-records_codes.zip', '237542', '5.41 MB'),
'2015': HmdaDataFile('hmda_2015_wi_all-records_codes.zip', '250077', '9.7 MB'),
'2014': HmdaDataFile('hmda_2014_wi_all-records_codes.zip', '207239', '7.39 MB'),
'2008': HmdaDataFile('hmda_2008_wi_all-records_codes.zip', '359119', '11.47 MB'),
'2009': HmdaDataFile('hmda_2009_wi_all-records_codes.zip', '475760', '14.08 MB'),
'2011': HmdaDataFile('hmda_2011_wi_all-records_codes.zip', '324321', '10.14 MB'),
'2010': HmdaDataFile('hmda_2010_wi_all-records_codes.zip', '394638', '12.55 MB'),
'2013': HmdaDataFile('hmda_2013_wi_all-records_codes.zip', '306118', '10.17 MB'),
'2012': HmdaDataFile('hmda_2012_wi_all-records_codes.zip', '398029', '12.88 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_wi_originated-records_codes.zip', '168678', '5.37 MB'),
'2007': HmdaDataFile('hmda_2007_wi_originated-records_codes.zip', '211916', '6.85 MB'),
'2017': HmdaDataFile('hmda_2017_wi_originated-records_codes.zip', '146251', '3.19 MB'),
'2015': HmdaDataFile('hmda_2015_wi_originated-records_codes.zip', '153515', '5.74 MB'),
'2014': HmdaDataFile('hmda_2014_wi_originated-records_codes.zip', '124916', '4.3 MB'),
'2008': HmdaDataFile('hmda_2008_wi_originated-records_codes.zip', '187234', '5.9 MB'),
'2009': HmdaDataFile('hmda_2009_wi_originated-records_codes.zip', '270190', '7.86 MB'),
'2011': HmdaDataFile('hmda_2011_wi_originated-records_codes.zip', '188288', '5.6 MB'),
'2010': HmdaDataFile('hmda_2010_wi_originated-records_codes.zip', '228537', '6.95 MB'),
'2013': HmdaDataFile('hmda_2013_wi_originated-records_codes.zip', '188524', '6.03 MB'),
'2012': HmdaDataFile('hmda_2012_wi_originated-records_codes.zip', '253728', '7.85 MB')
}
}
},
'wv': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '23752', '1.15 MB'),
'2007': HmdaDataFile('hmda_2007_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '32932', '1.43 MB'),
'2017': HmdaDataFile('hmda_2017_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '22635', '775.09 KB'),
'2015': HmdaDataFile('hmda_2015_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '22216', '1.16 MB'),
'2014': HmdaDataFile('hmda_2014_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '20952', '925.27 KB'),
'2008': HmdaDataFile('hmda_2008_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '27892', '1.25 MB'),
'2009': HmdaDataFile('hmda_2009_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '31766', '1.38 MB'),
'2011': HmdaDataFile('hmda_2011_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '25662', '1.09 MB'),
'2010': HmdaDataFile('hmda_2010_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '26690', '1.13 MB'),
'2013': HmdaDataFile('hmda_2013_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '29841', '1.37 MB'),
'2012': HmdaDataFile('hmda_2012_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '30400', '1.36 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_wv_all-records_labels.zip', '59932', '3.14 MB'),
'2007': HmdaDataFile('hmda_2007_wv_all-records_labels.zip', '108639', '4.82 MB'),
'2017': HmdaDataFile('hmda_2017_wv_all-records_labels.zip', '56407', '2.02 MB'),
'2015': HmdaDataFile('hmda_2015_wv_all-records_labels.zip', '56189', '3.18 MB'),
'2014': HmdaDataFile('hmda_2014_wv_all-records_labels.zip', '53804', '2.59 MB'),
'2008': HmdaDataFile('hmda_2008_wv_all-records_labels.zip', '82256', '3.81 MB'),
'2009': HmdaDataFile('hmda_2009_wv_all-records_labels.zip', '78726', '3.62 MB'),
'2011': HmdaDataFile('hmda_2011_wv_all-records_labels.zip', '65054', '3.02 MB'),
'2010': HmdaDataFile('hmda_2010_wv_all-records_labels.zip', '67127', '3.14 MB'),
'2013': HmdaDataFile('hmda_2013_wv_all-records_labels.zip', '71730', '3.58 MB'),
'2012': HmdaDataFile('hmda_2012_wv_all-records_labels.zip', '71668', '3.48 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_wv_originated-records_labels.zip', '31084', '1.54 MB'),
'2007': HmdaDataFile('hmda_2007_wv_originated-records_labels.zip', '49942', '2.16 MB'),
'2017': HmdaDataFile('hmda_2017_wv_originated-records_labels.zip', '29490', '1.02 MB'),
'2015': HmdaDataFile('hmda_2015_wv_originated-records_labels.zip', '29892', '1.59 MB'),
'2014': HmdaDataFile('hmda_2014_wv_originated-records_labels.zip', '28288', '1.28 MB'),
'2008': HmdaDataFile('hmda_2008_wv_originated-records_labels.zip', '39173', '1.77 MB'),
'2009': HmdaDataFile('hmda_2009_wv_originated-records_labels.zip', '40090', '1.78 MB'),
'2011': HmdaDataFile('hmda_2011_wv_originated-records_labels.zip', '33089', '1.44 MB'),
'2010': HmdaDataFile('hmda_2010_wv_originated-records_labels.zip', '34262', '1.49 MB'),
'2013': HmdaDataFile('hmda_2013_wv_originated-records_labels.zip', '38630', '1.82 MB'),
'2012': HmdaDataFile('hmda_2012_wv_originated-records_labels.zip', '38585', '1.77 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '23752', '731.44 KB'),
'2007': HmdaDataFile('hmda_2007_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '32932', '927.05 KB'),
'2017': HmdaDataFile('hmda_2017_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '22635', '535.47 KB'),
'2015': HmdaDataFile('hmda_2015_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '22216', '750.99 KB'),
'2014': HmdaDataFile('hmda_2014_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '20952', '586.49 KB'),
'2008': HmdaDataFile('hmda_2008_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '27892', '805.27 KB'),
'2009': HmdaDataFile('hmda_2009_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '31766', '906.21 KB'),
'2011': HmdaDataFile('hmda_2011_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '25662', '682.61 KB'),
'2010': HmdaDataFile('hmda_2010_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '26690', '718.48 KB'),
'2013': HmdaDataFile('hmda_2013_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '29841', '869.85 KB'),
'2012': HmdaDataFile('hmda_2012_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '30400', '859.14 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_wv_all-records_codes.zip', '59932', '1.9 MB'),
'2007': HmdaDataFile('hmda_2007_wv_all-records_codes.zip', '108639', '3.05 MB'),
'2017': HmdaDataFile('hmda_2017_wv_all-records_codes.zip', '56407', '1.32 MB'),
'2015': HmdaDataFile('hmda_2015_wv_all-records_codes.zip', '56189', '1.97 MB'),
'2014': HmdaDataFile('hmda_2014_wv_all-records_codes.zip', '53804', '1.57 MB'),
'2008': HmdaDataFile('hmda_2008_wv_all-records_codes.zip', '82256', '2.4 MB'),
'2009': HmdaDataFile('hmda_2009_wv_all-records_codes.zip', '78726', '2.31 MB'),
'2011': HmdaDataFile('hmda_2011_wv_all-records_codes.zip', '65054', '1.83 MB'),
'2010': HmdaDataFile('hmda_2010_wv_all-records_codes.zip', '67127', '1.91 MB'),
'2013': HmdaDataFile('hmda_2013_wv_all-records_codes.zip', '71730', '2.18 MB'),
'2012': HmdaDataFile('hmda_2012_wv_all-records_codes.zip', '71668', '2.12 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_wv_originated-records_codes.zip', '31084', '966.37 KB'),
'2007': HmdaDataFile('hmda_2007_wv_originated-records_codes.zip', '49942', '1.39 MB'),
'2017': HmdaDataFile('hmda_2017_wv_originated-records_codes.zip', '29490', '694.72 KB'),
'2015': HmdaDataFile('hmda_2015_wv_originated-records_codes.zip', '29892', '1.02 MB'),
'2014': HmdaDataFile('hmda_2014_wv_originated-records_codes.zip', '28288', '806.54 KB'),
'2008': HmdaDataFile('hmda_2008_wv_originated-records_codes.zip', '39173', '1.14 MB'),
'2009': HmdaDataFile('hmda_2009_wv_originated-records_codes.zip', '40090', '1.16 MB'),
'2011': HmdaDataFile('hmda_2011_wv_originated-records_codes.zip', '33089', '896.24 KB'),
'2010': HmdaDataFile('hmda_2010_wv_originated-records_codes.zip', '34262', '933.86 KB'),
'2013': HmdaDataFile('hmda_2013_wv_originated-records_codes.zip', '38630', '1.14 MB'),
'2012': HmdaDataFile('hmda_2012_wv_originated-records_codes.zip', '38585', '1.11 MB')
}
}
},
'hi': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '25790', '1.11 MB'),
'2007': HmdaDataFile('hmda_2007_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '26443', '1.11 MB'),
'2017': HmdaDataFile('hmda_2017_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '20146', '641.16 KB'),
'2015': HmdaDataFile('hmda_2015_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '21679', '993.8 KB'),
'2014': HmdaDataFile('hmda_2014_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '16100', '691.11 KB'),
'2008': HmdaDataFile('hmda_2008_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '19577', '808.1 KB'),
'2009': HmdaDataFile('hmda_2009_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '31476', '1.18 MB'),
'2011': HmdaDataFile('hmda_2011_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '22003', '927.28 KB'),
'2010': HmdaDataFile('hmda_2010_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '24636', '1.02 MB'),
'2013': HmdaDataFile('hmda_2013_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '26347', '1.13 MB'),
'2012': HmdaDataFile('hmda_2012_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '31689', '1.34 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_hi_all-records_labels.zip', '57857', '2.66 MB'),
'2007': HmdaDataFile('hmda_2007_hi_all-records_labels.zip', '97609', '4.13 MB'),
'2017': HmdaDataFile('hmda_2017_hi_all-records_labels.zip', '44868', '1.52 MB'),
'2015': HmdaDataFile('hmda_2015_hi_all-records_labels.zip', '48255', '2.42 MB'),
'2014': HmdaDataFile('hmda_2014_hi_all-records_labels.zip', '39152', '1.81 MB'),
'2008': HmdaDataFile('hmda_2008_hi_all-records_labels.zip', '58044', '2.51 MB'),
'2009': HmdaDataFile('hmda_2009_hi_all-records_labels.zip', '72505', '2.88 MB'),
'2011': HmdaDataFile('hmda_2011_hi_all-records_labels.zip', '52036', '2.39 MB'),
'2010': HmdaDataFile('hmda_2010_hi_all-records_labels.zip', '57360', '2.61 MB'),
'2013': HmdaDataFile('hmda_2013_hi_all-records_labels.zip', '62718', '2.93 MB'),
'2012': HmdaDataFile('hmda_2012_hi_all-records_labels.zip', '69807', '3.22 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_hi_originated-records_labels.zip', '32822', '1.42 MB'),
'2007': HmdaDataFile('hmda_2007_hi_originated-records_labels.zip', '39270', '1.64 MB'),
'2017': HmdaDataFile('hmda_2017_hi_originated-records_labels.zip', '26553', '842.99 KB'),
'2015': HmdaDataFile('hmda_2015_hi_originated-records_labels.zip', '28778', '1.34 MB'),
'2014': HmdaDataFile('hmda_2014_hi_originated-records_labels.zip', '22377', '977.23 KB'),
'2008': HmdaDataFile('hmda_2008_hi_originated-records_labels.zip', '25770', '1.07 MB'),
'2009': HmdaDataFile('hmda_2009_hi_originated-records_labels.zip', '36594', '1.37 MB'),
'2011': HmdaDataFile('hmda_2011_hi_originated-records_labels.zip', '27526', '1.18 MB'),
'2010': HmdaDataFile('hmda_2010_hi_originated-records_labels.zip', '29807', '1.25 MB'),
'2013': HmdaDataFile('hmda_2013_hi_originated-records_labels.zip', '36581', '1.6 MB'),
'2012': HmdaDataFile('hmda_2012_hi_originated-records_labels.zip', '40668', '1.75 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '25790', '747.28 KB'),
'2007': HmdaDataFile('hmda_2007_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '26443', '769.57 KB'),
'2017': HmdaDataFile('hmda_2017_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '20146', '442.43 KB'),
'2015': HmdaDataFile('hmda_2015_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '21679', '682.34 KB'),
'2014': HmdaDataFile('hmda_2014_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '16100', '467.14 KB'),
'2008': HmdaDataFile('hmda_2008_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '19577', '565.45 KB'),
'2009': HmdaDataFile('hmda_2009_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '31476', '829.34 KB'),
'2011': HmdaDataFile('hmda_2011_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '22003', '617.15 KB'),
'2010': HmdaDataFile('hmda_2010_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '24636', '682.86 KB'),
'2013': HmdaDataFile('hmda_2013_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '26347', '767.75 KB'),
'2012': HmdaDataFile('hmda_2012_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '31689', '898.68 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_hi_all-records_codes.zip', '57857', '1.73 MB'),
'2007': HmdaDataFile('hmda_2007_hi_all-records_codes.zip', '97609', '2.81 MB'),
'2017': HmdaDataFile('hmda_2017_hi_all-records_codes.zip', '44868', '991.92 KB'),
'2015': HmdaDataFile('hmda_2015_hi_all-records_codes.zip', '48255', '1.59 MB'),
'2014': HmdaDataFile('hmda_2014_hi_all-records_codes.zip', '39152', '1.17 MB'),
'2008': HmdaDataFile('hmda_2008_hi_all-records_codes.zip', '58044', '1.7 MB'),
'2009': HmdaDataFile('hmda_2009_hi_all-records_codes.zip', '72505', '1.97 MB'),
'2011': HmdaDataFile('hmda_2011_hi_all-records_codes.zip', '52036', '1.54 MB'),
'2010': HmdaDataFile('hmda_2010_hi_all-records_codes.zip', '57360', '1.69 MB'),
'2013': HmdaDataFile('hmda_2013_hi_all-records_codes.zip', '62718', '1.9 MB'),
'2012': HmdaDataFile('hmda_2012_hi_all-records_codes.zip', '69807', '2.09 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_hi_originated-records_codes.zip', '32822', '955.73 KB'),
'2007': HmdaDataFile('hmda_2007_hi_originated-records_codes.zip', '39270', '1.15 MB'),
'2017': HmdaDataFile('hmda_2017_hi_originated-records_codes.zip', '26553', '577.19 KB'),
'2015': HmdaDataFile('hmda_2015_hi_originated-records_codes.zip', '28778', '909.22 KB'),
'2014': HmdaDataFile('hmda_2014_hi_originated-records_codes.zip', '22377', '650.88 KB'),
'2008': HmdaDataFile('hmda_2008_hi_originated-records_codes.zip', '25770', '741.44 KB'),
'2009': HmdaDataFile('hmda_2009_hi_originated-records_codes.zip', '36594', '946.61 KB'),
'2011': HmdaDataFile('hmda_2011_hi_originated-records_codes.zip', '27526', '777.03 KB'),
'2010': HmdaDataFile('hmda_2010_hi_originated-records_codes.zip', '29807', '833.67 KB'),
'2013': HmdaDataFile('hmda_2013_hi_originated-records_codes.zip', '36581', '1.06 MB'),
'2012': HmdaDataFile('hmda_2012_hi_originated-records_codes.zip', '40668', '1.17 MB')
}
}
},
'ok': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '61711', '2.99 MB'),
'2007': HmdaDataFile('hmda_2007_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '71771', '3.36 MB'),
'2017': HmdaDataFile('hmda_2017_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '56021', '1.71 MB'),
'2015': HmdaDataFile('hmda_2015_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '57409', '3.04 MB'),
'2014': HmdaDataFile('hmda_2014_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '53820', '2.56 MB'),
'2008': HmdaDataFile('hmda_2008_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '64839', '3.04 MB'),
'2009': HmdaDataFile('hmda_2009_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '83962', '3.72 MB'),
'2011': HmdaDataFile('hmda_2011_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '62083', '2.92 MB'),
'2010': HmdaDataFile('hmda_2010_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '68205', '3.24 MB'),
'2013': HmdaDataFile('hmda_2013_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '68275', '3.28 MB'),
'2012': HmdaDataFile('hmda_2012_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '77297', '3.66 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ok_all-records_labels.zip', '165463', '8.55 MB'),
'2007': HmdaDataFile('hmda_2007_ok_all-records_labels.zip', '250763', '12.15 MB'),
'2017': HmdaDataFile('hmda_2017_ok_all-records_labels.zip', '150838', '5.09 MB'),
'2015': HmdaDataFile('hmda_2015_ok_all-records_labels.zip', '152804', '8.67 MB'),
'2014': HmdaDataFile('hmda_2014_ok_all-records_labels.zip', '146824', '7.59 MB'),
'2008': HmdaDataFile('hmda_2008_ok_all-records_labels.zip', '194552', '9.58 MB'),
'2009': HmdaDataFile('hmda_2009_ok_all-records_labels.zip', '219393', '10.29 MB'),
'2011': HmdaDataFile('hmda_2011_ok_all-records_labels.zip', '163798', '8.6 MB'),
'2010': HmdaDataFile('hmda_2010_ok_all-records_labels.zip', '177023', '9.25 MB'),
'2013': HmdaDataFile('hmda_2013_ok_all-records_labels.zip', '180860', '9.5 MB'),
'2012': HmdaDataFile('hmda_2012_ok_all-records_labels.zip', '189005', '9.82 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ok_originated-records_labels.zip', '84730', '4.21 MB'),
'2007': HmdaDataFile('hmda_2007_ok_originated-records_labels.zip', '107739', '5.16 MB'),
'2017': HmdaDataFile('hmda_2017_ok_originated-records_labels.zip', '78302', '2.43 MB'),
'2015': HmdaDataFile('hmda_2015_ok_originated-records_labels.zip', '79611', '4.27 MB'),
'2014': HmdaDataFile('hmda_2014_ok_originated-records_labels.zip', '76088', '3.74 MB'),
'2008': HmdaDataFile('hmda_2008_ok_originated-records_labels.zip', '90679', '4.34 MB'),
'2009': HmdaDataFile('hmda_2009_ok_originated-records_labels.zip', '105102', '4.8 MB'),
'2011': HmdaDataFile('hmda_2011_ok_originated-records_labels.zip', '81562', '3.94 MB'),
'2010': HmdaDataFile('hmda_2010_ok_originated-records_labels.zip', '87420', '4.24 MB'),
'2013': HmdaDataFile('hmda_2013_ok_originated-records_labels.zip', '91830', '4.49 MB'),
'2012': HmdaDataFile('hmda_2012_ok_originated-records_labels.zip', '98582', '4.73 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '61711', '2.07 MB'),
'2007': HmdaDataFile('hmda_2007_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '71771', '2.36 MB'),
'2017': HmdaDataFile('hmda_2017_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '56021', '1.24 MB'),
'2015': HmdaDataFile('hmda_2015_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '57409', '2.11 MB'),
'2014': HmdaDataFile('hmda_2014_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '53820', '1.76 MB'),
'2008': HmdaDataFile('hmda_2008_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '64839', '2.14 MB'),
'2009': HmdaDataFile('hmda_2009_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '83962', '2.65 MB'),
'2011': HmdaDataFile('hmda_2011_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '62083', '1.97 MB'),
'2010': HmdaDataFile('hmda_2010_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '68205', '2.2 MB'),
'2013': HmdaDataFile('hmda_2013_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '68275', '2.25 MB'),
'2012': HmdaDataFile('hmda_2012_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '77297', '2.49 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ok_all-records_codes.zip', '165463', '5.67 MB'),
'2007': HmdaDataFile('hmda_2007_ok_all-records_codes.zip', '250763', '8.32 MB'),
'2017': HmdaDataFile('hmda_2017_ok_all-records_codes.zip', '150838', '3.38 MB'),
'2015': HmdaDataFile('hmda_2015_ok_all-records_codes.zip', '152804', '5.81 MB'),
'2014': HmdaDataFile('hmda_2014_ok_all-records_codes.zip', '146824', '5.04 MB'),
'2008': HmdaDataFile('hmda_2008_ok_all-records_codes.zip', '194552', '6.58 MB'),
'2009': HmdaDataFile('hmda_2009_ok_all-records_codes.zip', '219393', '7.12 MB'),
'2011': HmdaDataFile('hmda_2011_ok_all-records_codes.zip', '163798', '5.66 MB'),
'2010': HmdaDataFile('hmda_2010_ok_all-records_codes.zip', '177023', '6.13 MB'),
'2013': HmdaDataFile('hmda_2013_ok_all-records_codes.zip', '180860', '6.33 MB'),
'2012': HmdaDataFile('hmda_2012_ok_all-records_codes.zip', '189005', '6.53 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ok_originated-records_codes.zip', '84730', '2.89 MB'),
'2007': HmdaDataFile('hmda_2007_ok_originated-records_codes.zip', '107739', '3.62 MB'),
'2017': HmdaDataFile('hmda_2017_ok_originated-records_codes.zip', '78302', '1.73 MB'),
'2015': HmdaDataFile('hmda_2015_ok_originated-records_codes.zip', '79611', '2.95 MB'),
'2014': HmdaDataFile('hmda_2014_ok_originated-records_codes.zip', '76088', '2.55 MB'),
'2008': HmdaDataFile('hmda_2008_ok_originated-records_codes.zip', '90679', '3.04 MB'),
'2009': HmdaDataFile('hmda_2009_ok_originated-records_codes.zip', '105102', '3.39 MB'),
'2011': HmdaDataFile('hmda_2011_ok_originated-records_codes.zip', '81562', '2.64 MB'),
'2010': HmdaDataFile('hmda_2010_ok_originated-records_codes.zip', '87420', '2.87 MB'),
'2013': HmdaDataFile('hmda_2013_ok_originated-records_codes.zip', '91830', '3.05 MB'),
'2012': HmdaDataFile('hmda_2012_ok_originated-records_codes.zip', '98582', '3.19 MB')
}
}
},
'fl': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '415338', '21.43 MB'),
'2007': HmdaDataFile('hmda_2007_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '501111', '25.12 MB'),
'2017': HmdaDataFile('hmda_2017_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '396688', '13.19 MB'),
'2015': HmdaDataFile('hmda_2015_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '347883', '21.05 MB'),
'2014': HmdaDataFile('hmda_2014_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '272904', '13.99 MB'),
'2008': HmdaDataFile('hmda_2008_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '258117', '12.67 MB'),
'2009': HmdaDataFile('hmda_2009_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '263772', '12.4 MB'),
'2011': HmdaDataFile('hmda_2011_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '231239', '11.59 MB'),
'2010': HmdaDataFile('hmda_2010_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '240644', '12.01 MB'),
'2013': HmdaDataFile('hmda_2013_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '365896', '19.08 MB'),
'2012': HmdaDataFile('hmda_2012_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '373288', '19.2 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_fl_all-records_labels.zip', '1043942', '58 MB'),
'2007': HmdaDataFile('hmda_2007_fl_all-records_labels.zip', '2006660', '102.81 MB'),
'2017': HmdaDataFile('hmda_2017_fl_all-records_labels.zip', '1018763', '38.28 MB'),
'2015': HmdaDataFile('hmda_2015_fl_all-records_labels.zip', '893206', '57.89 MB'),
'2014': HmdaDataFile('hmda_2014_fl_all-records_labels.zip', '732825', '40.79 MB'),
'2008': HmdaDataFile('hmda_2008_fl_all-records_labels.zip', '962944', '49.47 MB'),
'2009': HmdaDataFile('hmda_2009_fl_all-records_labels.zip', '806975', '40.26 MB'),
'2011': HmdaDataFile('hmda_2011_fl_all-records_labels.zip', '647776', '35.66 MB'),
'2010': HmdaDataFile('hmda_2010_fl_all-records_labels.zip', '675688', '37.38 MB'),
'2013': HmdaDataFile('hmda_2013_fl_all-records_labels.zip', '948672', '53.81 MB'),
'2012': HmdaDataFile('hmda_2012_fl_all-records_labels.zip', '919923', '51.66 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_fl_originated-records_labels.zip', '506394', '26.46 MB'),
'2007': HmdaDataFile('hmda_2007_fl_originated-records_labels.zip', '735174', '36.57 MB'),
'2017': HmdaDataFile('hmda_2017_fl_originated-records_labels.zip', '492702', '16.61 MB'),
'2015': HmdaDataFile('hmda_2015_fl_originated-records_labels.zip', '434779', '26.78 MB'),
'2014': HmdaDataFile('hmda_2014_fl_originated-records_labels.zip', '349696', '18.14 MB'),
'2008': HmdaDataFile('hmda_2008_fl_originated-records_labels.zip', '344859', '17 MB'),
'2009': HmdaDataFile('hmda_2009_fl_originated-records_labels.zip', '318689', '15.06 MB'),
'2011': HmdaDataFile('hmda_2011_fl_originated-records_labels.zip', '288632', '14.64 MB'),
'2010': HmdaDataFile('hmda_2010_fl_originated-records_labels.zip', '292476', '14.8 MB'),
'2013': HmdaDataFile('hmda_2013_fl_originated-records_labels.zip', '467201', '24.7 MB'),
'2012': HmdaDataFile('hmda_2012_fl_originated-records_labels.zip', '462049', '24.13 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '415338', '15.17 MB'),
'2007': HmdaDataFile('hmda_2007_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '501111', '17.95 MB'),
'2017': HmdaDataFile('hmda_2017_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '396688', '9.14 MB'),
'2015': HmdaDataFile('hmda_2015_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '347883', '14.62 MB'),
'2014': HmdaDataFile('hmda_2014_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '272904', '9.87 MB'),
'2008': HmdaDataFile('hmda_2008_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '258117', '8.93 MB'),
'2009': HmdaDataFile('hmda_2009_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '263772', '8.88 MB'),
'2011': HmdaDataFile('hmda_2011_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '231239', '8 MB'),
'2010': HmdaDataFile('hmda_2010_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '240644', '8.28 MB'),
'2013': HmdaDataFile('hmda_2013_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '365896', '13.41 MB'),
'2012': HmdaDataFile('hmda_2012_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '373288', '13.48 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_fl_all-records_codes.zip', '1043942', '40.03 MB'),
'2007': HmdaDataFile('hmda_2007_fl_all-records_codes.zip', '2006660', '71.9 MB'),
'2017': HmdaDataFile('hmda_2017_fl_all-records_codes.zip', '1018763', '24.91 MB'),
'2015': HmdaDataFile('hmda_2015_fl_all-records_codes.zip', '893206', '38.26 MB'),
'2014': HmdaDataFile('hmda_2014_fl_all-records_codes.zip', '732825', '28.16 MB'),
'2008': HmdaDataFile('hmda_2008_fl_all-records_codes.zip', '962944', '34.41 MB'),
'2009': HmdaDataFile('hmda_2009_fl_all-records_codes.zip', '806975', '28.11 MB'),
'2011': HmdaDataFile('hmda_2011_fl_all-records_codes.zip', '647776', '23.83 MB'),
'2010': HmdaDataFile('hmda_2010_fl_all-records_codes.zip', '675688', '24.98 MB'),
'2013': HmdaDataFile('hmda_2013_fl_all-records_codes.zip', '948672', '36.68 MB'),
'2012': HmdaDataFile('hmda_2012_fl_all-records_codes.zip', '919923', '35.16 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_fl_originated-records_codes.zip', '506394', '18.65 MB'),
'2007': HmdaDataFile('hmda_2007_fl_originated-records_codes.zip', '735174', '26.09 MB'),
'2017': HmdaDataFile('hmda_2017_fl_originated-records_codes.zip', '492702', '11.34 MB'),
'2015': HmdaDataFile('hmda_2015_fl_originated-records_codes.zip', '434779', '18.42 MB'),
'2014': HmdaDataFile('hmda_2014_fl_originated-records_codes.zip', '349696', '12.69 MB'),
'2008': HmdaDataFile('hmda_2008_fl_originated-records_codes.zip', '344859', '11.94 MB'),
'2009': HmdaDataFile('hmda_2009_fl_originated-records_codes.zip', '318689', '10.73 MB'),
'2011': HmdaDataFile('hmda_2011_fl_originated-records_codes.zip', '288632', '10.03 MB'),
'2010': HmdaDataFile('hmda_2010_fl_originated-records_codes.zip', '292476', '10.16 MB'),
'2013': HmdaDataFile('hmda_2013_fl_originated-records_codes.zip', '467201', '17.24 MB'),
'2012': HmdaDataFile('hmda_2012_fl_originated-records_codes.zip', '462049', '16.84 MB')
}
}
},
'wy': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '12342', '483.6 KB'),
'2007': HmdaDataFile('hmda_2007_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '13857', '521.55 KB'),
'2017': HmdaDataFile('hmda_2017_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '10510', '270.62 KB'),
'2015': HmdaDataFile('hmda_2015_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '11880', '503.22 KB'),
'2014': HmdaDataFile('hmda_2014_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '10422', '425.8 KB'),
'2008': HmdaDataFile('hmda_2008_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '12387', '474.69 KB'),
'2009': HmdaDataFile('hmda_2009_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '16912', '594.41 KB'),
'2011': HmdaDataFile('hmda_2011_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '11423', '470.67 KB'),
'2010': HmdaDataFile('hmda_2010_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '12694', '489.69 KB'),
'2013': HmdaDataFile('hmda_2013_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '14178', '537.95 KB'),
'2012': HmdaDataFile('hmda_2012_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '15410', '568.4 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_wy_all-records_labels.zip', '30163', '1.29 MB'),
'2007': HmdaDataFile('hmda_2007_wy_all-records_labels.zip', '48234', '1.94 MB'),
'2017': HmdaDataFile('hmda_2017_wy_all-records_labels.zip', '26154', '743.51 KB'),
'2015': HmdaDataFile('hmda_2015_wy_all-records_labels.zip', '28641', '1.34 MB'),
'2014': HmdaDataFile('hmda_2014_wy_all-records_labels.zip', '25049', '1.13 MB'),
'2008': HmdaDataFile('hmda_2008_wy_all-records_labels.zip', '35748', '1.45 MB'),
'2009': HmdaDataFile('hmda_2009_wy_all-records_labels.zip', '41659', '1.59 MB'),
'2011': HmdaDataFile('hmda_2011_wy_all-records_labels.zip', '28465', '1.31 MB'),
'2010': HmdaDataFile('hmda_2010_wy_all-records_labels.zip', '32035', '1.37 MB'),
'2013': HmdaDataFile('hmda_2013_wy_all-records_labels.zip', '32956', '1.37 MB'),
'2012': HmdaDataFile('hmda_2012_wy_all-records_labels.zip', '34092', '1.4 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_wy_originated-records_labels.zip', '15484', '616.35 KB'),
'2007': HmdaDataFile('hmda_2007_wy_originated-records_labels.zip', '21052', '815.34 KB'),
'2017': HmdaDataFile('hmda_2017_wy_originated-records_labels.zip', '13702', '356.27 KB'),
'2015': HmdaDataFile('hmda_2015_wy_originated-records_labels.zip', '15107', '651.6 KB'),
'2014': HmdaDataFile('hmda_2014_wy_originated-records_labels.zip', '13556', '566.41 KB'),
'2008': HmdaDataFile('hmda_2008_wy_originated-records_labels.zip', '16892', '658.73 KB'),
'2009': HmdaDataFile('hmda_2009_wy_originated-records_labels.zip', '20290', '727.57 KB'),
'2011': HmdaDataFile('hmda_2011_wy_originated-records_labels.zip', '14507', '613.35 KB'),
'2010': HmdaDataFile('hmda_2010_wy_originated-records_labels.zip', '15602', '616.08 KB'),
'2013': HmdaDataFile('hmda_2013_wy_originated-records_labels.zip', '17900', '691.83 KB'),
'2012': HmdaDataFile('hmda_2012_wy_originated-records_labels.zip', '19114', '723.04 KB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '12342', '312.27 KB'),
'2007': HmdaDataFile('hmda_2007_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '13857', '338.82 KB'),
'2017': HmdaDataFile('hmda_2017_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '10510', '187.36 KB'),
'2015': HmdaDataFile('hmda_2015_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '11880', '319.17 KB'),
'2014': HmdaDataFile('hmda_2014_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '10422', '275.68 KB'),
'2008': HmdaDataFile('hmda_2008_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '12387', '309.95 KB'),
'2009': HmdaDataFile('hmda_2009_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '16912', '397.25 KB'),
'2011': HmdaDataFile('hmda_2011_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '11423', '302.39 KB'),
'2010': HmdaDataFile('hmda_2010_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '12694', '313.52 KB'),
'2013': HmdaDataFile('hmda_2013_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '14178', '348.1 KB'),
'2012': HmdaDataFile('hmda_2012_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '15410', '368.87 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_wy_all-records_codes.zip', '30163', '802.03 KB'),
'2007': HmdaDataFile('hmda_2007_wy_all-records_codes.zip', '48234', '1.23 MB'),
'2017': HmdaDataFile('hmda_2017_wy_all-records_codes.zip', '26154', '481.64 KB'),
'2015': HmdaDataFile('hmda_2015_wy_all-records_codes.zip', '28641', '812.32 KB'),
'2014': HmdaDataFile('hmda_2014_wy_all-records_codes.zip', '25049', '695.65 KB'),
'2008': HmdaDataFile('hmda_2008_wy_all-records_codes.zip', '35748', '923.02 KB'),
'2009': HmdaDataFile('hmda_2009_wy_all-records_codes.zip', '41659', '1.02 MB'),
'2011': HmdaDataFile('hmda_2011_wy_all-records_codes.zip', '28465', '807.3 KB'),
'2010': HmdaDataFile('hmda_2010_wy_all-records_codes.zip', '32035', '844.63 KB'),
'2013': HmdaDataFile('hmda_2013_wy_all-records_codes.zip', '32956', '857.37 KB'),
'2012': HmdaDataFile('hmda_2012_wy_all-records_codes.zip', '34092', '878.55 KB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_wy_originated-records_codes.zip', '15484', '395.6 KB'),
'2007': HmdaDataFile('hmda_2007_wy_originated-records_codes.zip', '21052', '528.23 KB'),
'2017': HmdaDataFile('hmda_2017_wy_originated-records_codes.zip', '13702', '246.43 KB'),
'2015': HmdaDataFile('hmda_2015_wy_originated-records_codes.zip', '15107', '410.78 KB'),
'2014': HmdaDataFile('hmda_2014_wy_originated-records_codes.zip', '13556', '363.31 KB'),
'2008': HmdaDataFile('hmda_2008_wy_originated-records_codes.zip', '16892', '430.54 KB'),
'2009': HmdaDataFile('hmda_2009_wy_originated-records_codes.zip', '20290', '484.44 KB'),
'2011': HmdaDataFile('hmda_2011_wy_originated-records_codes.zip', '14507', '390.89 KB'),
'2010': HmdaDataFile('hmda_2010_wy_originated-records_codes.zip', '15602', '393.56 KB'),
'2013': HmdaDataFile('hmda_2013_wy_originated-records_codes.zip', '17900', '445.73 KB'),
'2012': HmdaDataFile('hmda_2012_wy_originated-records_codes.zip', '19114', '467.43 KB')
}
}
},
'me': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '25576', '1.12 MB'),
'2007': HmdaDataFile('hmda_2007_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '28469', '1.25 MB'),
'2017': HmdaDataFile('hmda_2017_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '23089', '660.63 KB'),
'2015': HmdaDataFile('hmda_2015_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '22073', '1.06 MB'),
'2014': HmdaDataFile('hmda_2014_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '18699', '819.03 KB'),
'2008': HmdaDataFile('hmda_2008_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '23318', '1.01 MB'),
'2009': HmdaDataFile('hmda_2009_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '35977', '1.45 MB'),
'2011': HmdaDataFile('hmda_2011_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '24858', '1.05 MB'),
'2010': HmdaDataFile('hmda_2010_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '29685', '1.3 MB'),
'2013': HmdaDataFile('hmda_2013_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '28892', '1.25 MB'),
'2012': HmdaDataFile('hmda_2012_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '32810', '1.4 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_me_all-records_labels.zip', '64142', '3.02 MB'),
'2007': HmdaDataFile('hmda_2007_me_all-records_labels.zip', '102877', '4.79 MB'),
'2017': HmdaDataFile('hmda_2017_me_all-records_labels.zip', '58188', '1.77 MB'),
'2015': HmdaDataFile('hmda_2015_me_all-records_labels.zip', '56450', '2.96 MB'),
'2014': HmdaDataFile('hmda_2014_me_all-records_labels.zip', '50231', '2.36 MB'),
'2008': HmdaDataFile('hmda_2008_me_all-records_labels.zip', '74281', '3.56 MB'),
'2009': HmdaDataFile('hmda_2009_me_all-records_labels.zip', '88765', '4 MB'),
'2011': HmdaDataFile('hmda_2011_me_all-records_labels.zip', '65903', '3.22 MB'),
'2010': HmdaDataFile('hmda_2010_me_all-records_labels.zip', '75911', '3.76 MB'),
'2013': HmdaDataFile('hmda_2013_me_all-records_labels.zip', '71008', '3.35 MB'),
'2012': HmdaDataFile('hmda_2012_me_all-records_labels.zip', '79176', '3.7 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_me_originated-records_labels.zip', '33761', '1.53 MB'),
'2007': HmdaDataFile('hmda_2007_me_originated-records_labels.zip', '44333', '2.01 MB'),
'2017': HmdaDataFile('hmda_2017_me_originated-records_labels.zip', '31503', '929.28 KB'),
'2015': HmdaDataFile('hmda_2015_me_originated-records_labels.zip', '29602', '1.45 MB'),
'2014': HmdaDataFile('hmda_2014_me_originated-records_labels.zip', '25962', '1.19 MB'),
'2008': HmdaDataFile('hmda_2008_me_originated-records_labels.zip', '34638', '1.54 MB'),
'2009': HmdaDataFile('hmda_2009_me_originated-records_labels.zip', '45092', '1.88 MB'),
'2011': HmdaDataFile('hmda_2011_me_originated-records_labels.zip', '32181', '1.43 MB'),
'2010': HmdaDataFile('hmda_2010_me_originated-records_labels.zip', '37475', '1.73 MB'),
'2013': HmdaDataFile('hmda_2013_me_originated-records_labels.zip', '37720', '1.67 MB'),
'2012': HmdaDataFile('hmda_2012_me_originated-records_labels.zip', '41515', '1.82 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '25576', '725.36 KB'),
'2007': HmdaDataFile('hmda_2007_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '28469', '820.97 KB'),
'2017': HmdaDataFile('hmda_2017_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '23089', '468.44 KB'),
'2015': HmdaDataFile('hmda_2015_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '22073', '702.08 KB'),
'2014': HmdaDataFile('hmda_2014_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '18699', '527.54 KB'),
'2008': HmdaDataFile('hmda_2008_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '23318', '660.77 KB'),
'2009': HmdaDataFile('hmda_2009_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '35977', '975.94 KB'),
'2011': HmdaDataFile('hmda_2011_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '24858', '674.23 KB'),
'2010': HmdaDataFile('hmda_2010_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '29685', '841.65 KB'),
'2013': HmdaDataFile('hmda_2013_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '28892', '797.28 KB'),
'2012': HmdaDataFile('hmda_2012_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '32810', '894.15 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_me_all-records_codes.zip', '64142', '1.89 MB'),
'2007': HmdaDataFile('hmda_2007_me_all-records_codes.zip', '102877', '3.1 MB'),
'2017': HmdaDataFile('hmda_2017_me_all-records_codes.zip', '58188', '1.16 MB'),
'2015': HmdaDataFile('hmda_2015_me_all-records_codes.zip', '56450', '1.88 MB'),
'2014': HmdaDataFile('hmda_2014_me_all-records_codes.zip', '50231', '1.46 MB'),
'2008': HmdaDataFile('hmda_2008_me_all-records_codes.zip', '74281', '2.32 MB'),
'2009': HmdaDataFile('hmda_2009_me_all-records_codes.zip', '88765', '2.65 MB'),
'2011': HmdaDataFile('hmda_2011_me_all-records_codes.zip', '65903', '2.01 MB'),
'2010': HmdaDataFile('hmda_2010_me_all-records_codes.zip', '75911', '2.35 MB'),
'2013': HmdaDataFile('hmda_2013_me_all-records_codes.zip', '71008', '2.07 MB'),
'2012': HmdaDataFile('hmda_2012_me_all-records_codes.zip', '79176', '2.29 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_me_originated-records_codes.zip', '33761', '987 KB'),
'2007': HmdaDataFile('hmda_2007_me_originated-records_codes.zip', '44333', '1.33 MB'),
'2017': HmdaDataFile('hmda_2017_me_originated-records_codes.zip', '31503', '654.38 KB'),
'2015': HmdaDataFile('hmda_2015_me_originated-records_codes.zip', '29602', '954.33 KB'),
'2014': HmdaDataFile('hmda_2014_me_originated-records_codes.zip', '25962', '765.27 KB'),
'2008': HmdaDataFile('hmda_2008_me_originated-records_codes.zip', '34638', '1.02 MB'),
'2009': HmdaDataFile('hmda_2009_me_originated-records_codes.zip', '45092', '1.26 MB'),
'2011': HmdaDataFile('hmda_2011_me_originated-records_codes.zip', '32181', '909.63 KB'),
'2010': HmdaDataFile('hmda_2010_me_originated-records_codes.zip', '37475', '1.11 MB'),
'2013': HmdaDataFile('hmda_2013_me_originated-records_codes.zip', '37720', '1.06 MB'),
'2012': HmdaDataFile('hmda_2012_me_originated-records_codes.zip', '41515', '1.15 MB')
}
}
},
'md': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '151314', '7.51 MB'),
'2007': HmdaDataFile('hmda_2007_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '189965', '9.14 MB'),
'2017': HmdaDataFile('hmda_2017_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '124206', '4 MB'),
'2015': HmdaDataFile('hmda_2015_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '131786', '7.26 MB'),
'2014': HmdaDataFile('hmda_2014_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '98965', '5.23 MB'),
'2008': HmdaDataFile('hmda_2008_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '133536', '6.34 MB'),
'2009': HmdaDataFile('hmda_2009_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '193122', '8.53 MB'),
'2011': HmdaDataFile('hmda_2011_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '142588', '7.11 MB'),
'2010': HmdaDataFile('hmda_2010_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '166872', '8.22 MB'),
'2013': HmdaDataFile('hmda_2013_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '161491', '8.05 MB'),
'2012': HmdaDataFile('hmda_2012_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '195908', '9.85 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_md_all-records_labels.zip', '358958', '19.06 MB'),
'2007': HmdaDataFile('hmda_2007_md_all-records_labels.zip', '656616', '31.96 MB'),
'2017': HmdaDataFile('hmda_2017_md_all-records_labels.zip', '301879', '10.75 MB'),
'2015': HmdaDataFile('hmda_2015_md_all-records_labels.zip', '316012', '18.68 MB'),
'2014': HmdaDataFile('hmda_2014_md_all-records_labels.zip', '247561', '14.04 MB'),
'2008': HmdaDataFile('hmda_2008_md_all-records_labels.zip', '393039', '19.26 MB'),
'2009': HmdaDataFile('hmda_2009_md_all-records_labels.zip', '467697', '21.73 MB'),
'2011': HmdaDataFile('hmda_2011_md_all-records_labels.zip', '347645', '18.89 MB'),
'2010': HmdaDataFile('hmda_2010_md_all-records_labels.zip', '385128', '20.7 MB'),
'2013': HmdaDataFile('hmda_2013_md_all-records_labels.zip', '385383', '20.95 MB'),
'2012': HmdaDataFile('hmda_2012_md_all-records_labels.zip', '439566', '23.91 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_md_originated-records_labels.zip', '171556', '8.64 MB'),
'2007': HmdaDataFile('hmda_2007_md_originated-records_labels.zip', '261984', '12.73 MB'),
'2017': HmdaDataFile('hmda_2017_md_originated-records_labels.zip', '144610', '4.78 MB'),
'2015': HmdaDataFile('hmda_2015_md_originated-records_labels.zip', '152541', '8.53 MB'),
'2014': HmdaDataFile('hmda_2014_md_originated-records_labels.zip', '118429', '6.33 MB'),
'2008': HmdaDataFile('hmda_2008_md_originated-records_labels.zip', '162234', '7.83 MB'),
'2009': HmdaDataFile('hmda_2009_md_originated-records_labels.zip', '210794', '9.48 MB'),
'2011': HmdaDataFile('hmda_2011_md_originated-records_labels.zip', '159707', '8.13 MB'),
'2010': HmdaDataFile('hmda_2010_md_originated-records_labels.zip', '182102', '9.04 MB'),
'2013': HmdaDataFile('hmda_2013_md_originated-records_labels.zip', '187825', '9.46 MB'),
'2012': HmdaDataFile('hmda_2012_md_originated-records_labels.zip', '219387', '11.11 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '151314', '5.32 MB'),
'2007': HmdaDataFile('hmda_2007_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '189965', '6.47 MB'),
'2017': HmdaDataFile('hmda_2017_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '124206', '2.79 MB'),
'2015': HmdaDataFile('hmda_2015_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '131786', '5.19 MB'),
'2014': HmdaDataFile('hmda_2014_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '98965', '3.67 MB'),
'2008': HmdaDataFile('hmda_2008_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '133536', '4.46 MB'),
'2009': HmdaDataFile('hmda_2009_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '193122', '6.08 MB'),
'2011': HmdaDataFile('hmda_2011_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '142588', '4.91 MB'),
'2010': HmdaDataFile('hmda_2010_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '166872', '5.7 MB'),
'2013': HmdaDataFile('hmda_2013_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '161491', '5.68 MB'),
'2012': HmdaDataFile('hmda_2012_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '195908', '6.88 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_md_all-records_codes.zip', '358958', '13.18 MB'),
'2007': HmdaDataFile('hmda_2007_md_all-records_codes.zip', '656616', '22.08 MB'),
'2017': HmdaDataFile('hmda_2017_md_all-records_codes.zip', '301879', '6.99 MB'),
'2015': HmdaDataFile('hmda_2015_md_all-records_codes.zip', '316012', '12.83 MB'),
'2014': HmdaDataFile('hmda_2014_md_all-records_codes.zip', '247561', '9.61 MB'),
'2008': HmdaDataFile('hmda_2008_md_all-records_codes.zip', '393039', '13.28 MB'),
'2009': HmdaDataFile('hmda_2009_md_all-records_codes.zip', '467697', '15.2 MB'),
'2011': HmdaDataFile('hmda_2011_md_all-records_codes.zip', '347645', '12.83 MB'),
'2010': HmdaDataFile('hmda_2010_md_all-records_codes.zip', '385128', '14.08 MB'),
'2013': HmdaDataFile('hmda_2013_md_all-records_codes.zip', '385383', '14.56 MB'),
'2012': HmdaDataFile('hmda_2012_md_all-records_codes.zip', '439566', '16.52 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_md_originated-records_codes.zip', '171556', '6.1 MB'),
'2007': HmdaDataFile('hmda_2007_md_originated-records_codes.zip', '261984', '9.04 MB'),
'2017': HmdaDataFile('hmda_2017_md_originated-records_codes.zip', '144610', '3.3 MB'),
'2015': HmdaDataFile('hmda_2015_md_originated-records_codes.zip', '152541', '6.06 MB'),
'2014': HmdaDataFile('hmda_2014_md_originated-records_codes.zip', '118429', '4.41 MB'),
'2008': HmdaDataFile('hmda_2008_md_originated-records_codes.zip', '162234', '5.51 MB'),
'2009': HmdaDataFile('hmda_2009_md_originated-records_codes.zip', '210794', '6.75 MB'),
'2011': HmdaDataFile('hmda_2011_md_originated-records_codes.zip', '159707', '5.59 MB'),
'2010': HmdaDataFile('hmda_2010_md_originated-records_codes.zip', '182102', '6.24 MB'),
'2013': HmdaDataFile('hmda_2013_md_originated-records_codes.zip', '187825', '6.64 MB'),
'2012': HmdaDataFile('hmda_2012_md_originated-records_codes.zip', '219387', '7.7 MB')
}
}
},
'ma': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '168663', '8.43 MB'),
'2007': HmdaDataFile('hmda_2007_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '148532', '7.1 MB'),
'2017': HmdaDataFile('hmda_2017_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '129891', '4.16 MB'),
'2015': HmdaDataFile('hmda_2015_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '144027', '7.64 MB'),
'2014': HmdaDataFile('hmda_2014_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '109057', '5.41 MB'),
'2008': HmdaDataFile('hmda_2008_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '126855', '6.11 MB'),
'2009': HmdaDataFile('hmda_2009_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '229420', '10.04 MB'),
'2011': HmdaDataFile('hmda_2011_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '177890', '8.46 MB'),
'2010': HmdaDataFile('hmda_2010_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '212589', '10.01 MB'),
'2013': HmdaDataFile('hmda_2013_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '186856', '9.2 MB'),
'2012': HmdaDataFile('hmda_2012_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '252396', '12.13 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ma_all-records_labels.zip', '350131', '18.93 MB'),
'2007': HmdaDataFile('hmda_2007_ma_all-records_labels.zip', '507509', '24.85 MB'),
'2017': HmdaDataFile('hmda_2017_ma_all-records_labels.zip', '282546', '10.31 MB'),
'2015': HmdaDataFile('hmda_2015_ma_all-records_labels.zip', '299991', '17.01 MB'),
'2014': HmdaDataFile('hmda_2014_ma_all-records_labels.zip', '246533', '13.15 MB'),
'2008': HmdaDataFile('hmda_2008_ma_all-records_labels.zip', '337077', '17.11 MB'),
'2009': HmdaDataFile('hmda_2009_ma_all-records_labels.zip', '493549', '22.81 MB'),
'2011': HmdaDataFile('hmda_2011_ma_all-records_labels.zip', '400586', '20.86 MB'),
'2010': HmdaDataFile('hmda_2010_ma_all-records_labels.zip', '458768', '23.77 MB'),
'2013': HmdaDataFile('hmda_2013_ma_all-records_labels.zip', '405166', '21.77 MB'),
'2012': HmdaDataFile('hmda_2012_ma_all-records_labels.zip', '516205', '27.13 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ma_originated-records_labels.zip', '201756', '10.26 MB'),
'2007': HmdaDataFile('hmda_2007_ma_originated-records_labels.zip', '214170', '10.35 MB'),
'2017': HmdaDataFile('hmda_2017_ma_originated-records_labels.zip', '162474', '5.34 MB'),
'2015': HmdaDataFile('hmda_2015_ma_originated-records_labels.zip', '173355', '9.34 MB'),
'2014': HmdaDataFile('hmda_2014_ma_originated-records_labels.zip', '137873', '6.99 MB'),
'2008': HmdaDataFile('hmda_2008_ma_originated-records_labels.zip', '159312', '7.79 MB'),
'2009': HmdaDataFile('hmda_2009_ma_originated-records_labels.zip', '255679', '11.37 MB'),
'2011': HmdaDataFile('hmda_2011_ma_originated-records_labels.zip', '205164', '9.93 MB'),
'2010': HmdaDataFile('hmda_2010_ma_originated-records_labels.zip', '239023', '11.5 MB'),
'2013': HmdaDataFile('hmda_2013_ma_originated-records_labels.zip', '224809', '11.32 MB'),
'2012': HmdaDataFile('hmda_2012_ma_originated-records_labels.zip', '288584', '14 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '168663', '5.88 MB'),
'2007': HmdaDataFile('hmda_2007_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '148532', '5 MB'),
'2017': HmdaDataFile('hmda_2017_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '129891', '2.95 MB'),
'2015': HmdaDataFile('hmda_2015_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '144027', '5.53 MB'),
'2014': HmdaDataFile('hmda_2014_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '109057', '3.83 MB'),
'2008': HmdaDataFile('hmda_2008_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '126855', '4.32 MB'),
'2009': HmdaDataFile('hmda_2009_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '229420', '7.29 MB'),
'2011': HmdaDataFile('hmda_2011_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '177890', '5.8 MB'),
'2010': HmdaDataFile('hmda_2010_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '212589', '6.88 MB'),
'2013': HmdaDataFile('hmda_2013_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '186856', '6.35 MB'),
'2012': HmdaDataFile('hmda_2012_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '252396', '8.35 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ma_all-records_codes.zip', '350131', '13.03 MB'),
'2007': HmdaDataFile('hmda_2007_ma_all-records_codes.zip', '507509', '17.13 MB'),
'2017': HmdaDataFile('hmda_2017_ma_all-records_codes.zip', '282546', '6.92 MB'),
'2015': HmdaDataFile('hmda_2015_ma_all-records_codes.zip', '299991', '11.87 MB'),
'2014': HmdaDataFile('hmda_2014_ma_all-records_codes.zip', '246533', '9.05 MB'),
'2008': HmdaDataFile('hmda_2008_ma_all-records_codes.zip', '337077', '11.88 MB'),
'2009': HmdaDataFile('hmda_2009_ma_all-records_codes.zip', '493549', '16.23 MB'),
'2011': HmdaDataFile('hmda_2011_ma_all-records_codes.zip', '400586', '14.19 MB'),
'2010': HmdaDataFile('hmda_2010_ma_all-records_codes.zip', '458768', '16.27 MB'),
'2013': HmdaDataFile('hmda_2013_ma_all-records_codes.zip', '405166', '14.9 MB'),
'2012': HmdaDataFile('hmda_2012_ma_all-records_codes.zip', '516205', '18.65 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ma_originated-records_codes.zip', '201756', '7.12 MB'),
'2007': HmdaDataFile('hmda_2007_ma_originated-records_codes.zip', '214170', '7.34 MB'),
'2017': HmdaDataFile('hmda_2017_ma_originated-records_codes.zip', '162474', '3.73 MB'),
'2015': HmdaDataFile('hmda_2015_ma_originated-records_codes.zip', '173355', '6.7 MB'),
'2014': HmdaDataFile('hmda_2014_ma_originated-records_codes.zip', '137873', '4.9 MB'),
'2008': HmdaDataFile('hmda_2008_ma_originated-records_codes.zip', '159312', '5.51 MB'),
'2009': HmdaDataFile('hmda_2009_ma_originated-records_codes.zip', '255679', '8.23 MB'),
'2011': HmdaDataFile('hmda_2011_ma_originated-records_codes.zip', '205164', '6.77 MB'),
'2010': HmdaDataFile('hmda_2010_ma_originated-records_codes.zip', '239023', '7.91 MB'),
'2013': HmdaDataFile('hmda_2013_ma_originated-records_codes.zip', '224809', '7.79 MB'),
'2012': HmdaDataFile('hmda_2012_ma_originated-records_codes.zip', '288584', '9.58 MB')
}
}
},
'oh': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '232207', '11.88 MB'),
'2007': HmdaDataFile('hmda_2007_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '224598', '11.06 MB'),
'2017': HmdaDataFile('hmda_2017_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '208362', '6.69 MB'),
'2015': HmdaDataFile('hmda_2015_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '204738', '11.72 MB'),
'2014': HmdaDataFile('hmda_2014_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '174084', '8.93 MB'),
'2008': HmdaDataFile('hmda_2008_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '186092', '9.03 MB'),
'2009': HmdaDataFile('hmda_2009_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '282249', '12.44 MB'),
'2011': HmdaDataFile('hmda_2011_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '219789', '10.95 MB'),
'2010': HmdaDataFile('hmda_2010_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '258240', '12.72 MB'),
'2013': HmdaDataFile('hmda_2013_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '267654', '13.76 MB'),
'2012': HmdaDataFile('hmda_2012_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '302970', '15.14 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_oh_all-records_labels.zip', '493271', '27.33 MB'),
'2007': HmdaDataFile('hmda_2007_oh_all-records_labels.zip', '774401', '39.17 MB'),
'2017': HmdaDataFile('hmda_2017_oh_all-records_labels.zip', '448269', '16.35 MB'),
'2015': HmdaDataFile('hmda_2015_oh_all-records_labels.zip', '439676', '27.18 MB'),
'2014': HmdaDataFile('hmda_2014_oh_all-records_labels.zip', '394459', '21.93 MB'),
'2008': HmdaDataFile('hmda_2008_oh_all-records_labels.zip', '533639', '27.2 MB'),
'2009': HmdaDataFile('hmda_2009_oh_all-records_labels.zip', '624555', '29.69 MB'),
'2011': HmdaDataFile('hmda_2011_oh_all-records_labels.zip', '489066', '26.92 MB'),
'2010': HmdaDataFile('hmda_2010_oh_all-records_labels.zip', '555119', '30.3 MB'),
'2013': HmdaDataFile('hmda_2013_oh_all-records_labels.zip', '578940', '32.56 MB'),
'2012': HmdaDataFile('hmda_2012_oh_all-records_labels.zip', '618867', '34.01 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_oh_originated-records_labels.zip', '262449', '13.71 MB'),
'2007': HmdaDataFile('hmda_2007_oh_originated-records_labels.zip', '313279', '15.77 MB'),
'2017': HmdaDataFile('hmda_2017_oh_originated-records_labels.zip', '241167', '7.96 MB'),
'2015': HmdaDataFile('hmda_2015_oh_originated-records_labels.zip', '235058', '13.65 MB'),
'2014': HmdaDataFile('hmda_2014_oh_originated-records_labels.zip', '203927', '10.71 MB'),
'2008': HmdaDataFile('hmda_2008_oh_originated-records_labels.zip', '231697', '11.5 MB'),
'2009': HmdaDataFile('hmda_2009_oh_originated-records_labels.zip', '309496', '13.92 MB'),
'2011': HmdaDataFile('hmda_2011_oh_originated-records_labels.zip', '245688', '12.6 MB'),
'2010': HmdaDataFile('hmda_2010_oh_originated-records_labels.zip', '283698', '14.38 MB'),
'2013': HmdaDataFile('hmda_2013_oh_originated-records_labels.zip', '306698', '15.95 MB'),
'2012': HmdaDataFile('hmda_2012_oh_originated-records_labels.zip', '336141', '16.99 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '232207', '8.28 MB'),
'2007': HmdaDataFile('hmda_2007_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '224598', '7.85 MB'),
'2017': HmdaDataFile('hmda_2017_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '208362', '4.84 MB'),
'2015': HmdaDataFile('hmda_2015_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '204738', '8.2 MB'),
'2014': HmdaDataFile('hmda_2014_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '174084', '6.26 MB'),
'2008': HmdaDataFile('hmda_2008_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '186092', '6.43 MB'),
'2009': HmdaDataFile('hmda_2009_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '282249', '9.02 MB'),
'2011': HmdaDataFile('hmda_2011_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '219789', '7.49 MB'),
'2010': HmdaDataFile('hmda_2010_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '258240', '8.73 MB'),
'2013': HmdaDataFile('hmda_2013_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '267654', '9.51 MB'),
'2012': HmdaDataFile('hmda_2012_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '302970', '10.48 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_oh_all-records_codes.zip', '493271', '18.52 MB'),
'2007': HmdaDataFile('hmda_2007_oh_all-records_codes.zip', '774401', '27.06 MB'),
'2017': HmdaDataFile('hmda_2017_oh_all-records_codes.zip', '448269', '10.89 MB'),
'2015': HmdaDataFile('hmda_2015_oh_all-records_codes.zip', '439676', '18.32 MB'),
'2014': HmdaDataFile('hmda_2014_oh_all-records_codes.zip', '394459', '14.89 MB'),
'2008': HmdaDataFile('hmda_2008_oh_all-records_codes.zip', '533639', '18.82 MB'),
'2009': HmdaDataFile('hmda_2009_oh_all-records_codes.zip', '624555', '20.88 MB'),
'2011': HmdaDataFile('hmda_2011_oh_all-records_codes.zip', '489066', '17.97 MB'),
'2010': HmdaDataFile('hmda_2010_oh_all-records_codes.zip', '555119', '20.35 MB'),
'2013': HmdaDataFile('hmda_2013_oh_all-records_codes.zip', '578940', '22.01 MB'),
'2012': HmdaDataFile('hmda_2012_oh_all-records_codes.zip', '618867', '23.02 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_oh_originated-records_codes.zip', '262449', '9.52 MB'),
'2007': HmdaDataFile('hmda_2007_oh_originated-records_codes.zip', '313279', '11.21 MB'),
'2017': HmdaDataFile('hmda_2017_oh_originated-records_codes.zip', '241167', '5.68 MB'),
'2015': HmdaDataFile('hmda_2015_oh_originated-records_codes.zip', '235058', '9.5 MB'),
'2014': HmdaDataFile('hmda_2014_oh_originated-records_codes.zip', '203927', '7.49 MB'),
'2008': HmdaDataFile('hmda_2008_oh_originated-records_codes.zip', '231697', '8.17 MB'),
'2009': HmdaDataFile('hmda_2009_oh_originated-records_codes.zip', '309496', '10.06 MB'),
'2011': HmdaDataFile('hmda_2011_oh_originated-records_codes.zip', '245688', '8.63 MB'),
'2010': HmdaDataFile('hmda_2010_oh_originated-records_codes.zip', '283698', '9.88 MB'),
'2013': HmdaDataFile('hmda_2013_oh_originated-records_codes.zip', '306698', '10.96 MB'),
'2012': HmdaDataFile('hmda_2012_oh_originated-records_codes.zip', '336141', '11.68 MB')
}
}
},
'ut': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '106374', '4.72 MB'),
'2007': HmdaDataFile('hmda_2007_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '94558', '4.04 MB'),
'2017': HmdaDataFile('hmda_2017_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '87920', '2.62 MB'),
'2015': HmdaDataFile('hmda_2015_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '88686', '4.42 MB'),
'2014': HmdaDataFile('hmda_2014_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '62864', '2.8 MB'),
'2008': HmdaDataFile('hmda_2008_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '83133', '3.43 MB'),
'2009': HmdaDataFile('hmda_2009_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '117282', '4.64 MB'),
'2011': HmdaDataFile('hmda_2011_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '66630', '2.88 MB'),
'2010': HmdaDataFile('hmda_2010_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '90322', '3.86 MB'),
'2013': HmdaDataFile('hmda_2013_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '82759', '3.71 MB'),
'2012': HmdaDataFile('hmda_2012_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '108573', '4.87 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ut_all-records_labels.zip', '227871', '11.21 MB'),
'2007': HmdaDataFile('hmda_2007_ut_all-records_labels.zip', '319327', '14.16 MB'),
'2017': HmdaDataFile('hmda_2017_ut_all-records_labels.zip', '198425', '6.62 MB'),
'2015': HmdaDataFile('hmda_2015_ut_all-records_labels.zip', '192509', '10.37 MB'),
'2014': HmdaDataFile('hmda_2014_ut_all-records_labels.zip', '144848', '7.1 MB'),
'2008': HmdaDataFile('hmda_2008_ut_all-records_labels.zip', '226654', '9.81 MB'),
'2009': HmdaDataFile('hmda_2009_ut_all-records_labels.zip', '279791', '11.41 MB'),
'2011': HmdaDataFile('hmda_2011_ut_all-records_labels.zip', '166439', '7.98 MB'),
'2010': HmdaDataFile('hmda_2010_ut_all-records_labels.zip', '212181', '10.08 MB'),
'2013': HmdaDataFile('hmda_2013_ut_all-records_labels.zip', '192653', '9.53 MB'),
'2012': HmdaDataFile('hmda_2012_ut_all-records_labels.zip', '230544', '11.39 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ut_originated-records_labels.zip', '126325', '5.77 MB'),
'2007': HmdaDataFile('hmda_2007_ut_originated-records_labels.zip', '136628', '5.96 MB'),
'2017': HmdaDataFile('hmda_2017_ut_originated-records_labels.zip', '108018', '3.29 MB'),
'2015': HmdaDataFile('hmda_2015_ut_originated-records_labels.zip', '105929', '5.36 MB'),
'2014': HmdaDataFile('hmda_2014_ut_originated-records_labels.zip', '76563', '3.49 MB'),
'2008': HmdaDataFile('hmda_2008_ut_originated-records_labels.zip', '98555', '4.16 MB'),
'2009': HmdaDataFile('hmda_2009_ut_originated-records_labels.zip', '127680', '5.13 MB'),
'2011': HmdaDataFile('hmda_2011_ut_originated-records_labels.zip', '77763', '3.46 MB'),
'2010': HmdaDataFile('hmda_2010_ut_originated-records_labels.zip', '101108', '4.4 MB'),
'2013': HmdaDataFile('hmda_2013_ut_originated-records_labels.zip', '99424', '4.53 MB'),
'2012': HmdaDataFile('hmda_2012_ut_originated-records_labels.zip', '125331', '5.73 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '106374', '3.15 MB'),
'2007': HmdaDataFile('hmda_2007_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '94558', '2.78 MB'),
'2017': HmdaDataFile('hmda_2017_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '87920', '1.85 MB'),
'2015': HmdaDataFile('hmda_2015_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '88686', '3.08 MB'),
'2014': HmdaDataFile('hmda_2014_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '62864', '1.88 MB'),
'2008': HmdaDataFile('hmda_2008_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '83133', '2.4 MB'),
'2009': HmdaDataFile('hmda_2009_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '117282', '3.31 MB'),
'2011': HmdaDataFile('hmda_2011_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '66630', '1.92 MB'),
'2010': HmdaDataFile('hmda_2010_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '90322', '2.56 MB'),
'2013': HmdaDataFile('hmda_2013_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '82759', '2.48 MB'),
'2012': HmdaDataFile('hmda_2012_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '108573', '3.28 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ut_all-records_codes.zip', '227871', '7.32 MB'),
'2007': HmdaDataFile('hmda_2007_ut_all-records_codes.zip', '319327', '9.65 MB'),
'2017': HmdaDataFile('hmda_2017_ut_all-records_codes.zip', '198425', '4.41 MB'),
'2015': HmdaDataFile('hmda_2015_ut_all-records_codes.zip', '192509', '6.96 MB'),
'2014': HmdaDataFile('hmda_2014_ut_all-records_codes.zip', '144848', '4.66 MB'),
'2008': HmdaDataFile('hmda_2008_ut_all-records_codes.zip', '226654', '6.76 MB'),
'2009': HmdaDataFile('hmda_2009_ut_all-records_codes.zip', '279791', '7.94 MB'),
'2011': HmdaDataFile('hmda_2011_ut_all-records_codes.zip', '166439', '5.23 MB'),
'2010': HmdaDataFile('hmda_2010_ut_all-records_codes.zip', '212181', '6.6 MB'),
'2013': HmdaDataFile('hmda_2013_ut_all-records_codes.zip', '192653', '6.29 MB'),
'2012': HmdaDataFile('hmda_2012_ut_all-records_codes.zip', '230544', '7.57 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ut_originated-records_codes.zip', '126325', '3.86 MB'),
'2007': HmdaDataFile('hmda_2007_ut_originated-records_codes.zip', '136628', '4.14 MB'),
'2017': HmdaDataFile('hmda_2017_ut_originated-records_codes.zip', '108018', '2.31 MB'),
'2015': HmdaDataFile('hmda_2015_ut_originated-records_codes.zip', '105929', '3.72 MB'),
'2014': HmdaDataFile('hmda_2014_ut_originated-records_codes.zip', '76563', '2.34 MB'),
'2008': HmdaDataFile('hmda_2008_ut_originated-records_codes.zip', '98555', '2.91 MB'),
'2009': HmdaDataFile('hmda_2009_ut_originated-records_codes.zip', '127680', '3.64 MB'),
'2011': HmdaDataFile('hmda_2011_ut_originated-records_codes.zip', '77763', '2.3 MB'),
'2010': HmdaDataFile('hmda_2010_ut_originated-records_codes.zip', '101108', '2.91 MB'),
'2013': HmdaDataFile('hmda_2013_ut_originated-records_codes.zip', '99424', '3.03 MB'),
'2012': HmdaDataFile('hmda_2012_ut_originated-records_codes.zip', '125331', '3.84 MB')
}
}
},
'mo': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '139247', '6.75 MB'),
'2007': HmdaDataFile('hmda_2007_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '154268', '7.4 MB'),
'2017': HmdaDataFile('hmda_2017_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '118901', '3.83 MB'),
'2015': HmdaDataFile('hmda_2015_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '121654', '6.63 MB'),
'2014': HmdaDataFile('hmda_2014_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '95718', '4.8 MB'),
'2008': HmdaDataFile('hmda_2008_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '132777', '6.15 MB'),
'2009': HmdaDataFile('hmda_2009_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '186579', '8.07 MB'),
'2011': HmdaDataFile('hmda_2011_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '130134', '6.22 MB'),
'2010': HmdaDataFile('hmda_2010_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '158585', '7.57 MB'),
'2013': HmdaDataFile('hmda_2013_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '146638', '7.2 MB'),
'2012': HmdaDataFile('hmda_2012_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '175614', '8.39 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_mo_all-records_labels.zip', '312237', '16.09 MB'),
'2007': HmdaDataFile('hmda_2007_mo_all-records_labels.zip', '531617', '25.33 MB'),
'2017': HmdaDataFile('hmda_2017_mo_all-records_labels.zip', '277843', '9.61 MB'),
'2015': HmdaDataFile('hmda_2015_mo_all-records_labels.zip', '276661', '16.28 MB'),
'2014': HmdaDataFile('hmda_2014_mo_all-records_labels.zip', '232023', '12.42 MB'),
'2008': HmdaDataFile('hmda_2008_mo_all-records_labels.zip', '379587', '17.83 MB'),
'2009': HmdaDataFile('hmda_2009_mo_all-records_labels.zip', '447918', '19.75 MB'),
'2011': HmdaDataFile('hmda_2011_mo_all-records_labels.zip', '309645', '15.9 MB'),
'2010': HmdaDataFile('hmda_2010_mo_all-records_labels.zip', '360738', '18.59 MB'),
'2013': HmdaDataFile('hmda_2013_mo_all-records_labels.zip', '347186', '18.19 MB'),
'2012': HmdaDataFile('hmda_2012_mo_all-records_labels.zip', '384551', '19.69 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_mo_originated-records_labels.zip', '165943', '8.21 MB'),
'2007': HmdaDataFile('hmda_2007_mo_originated-records_labels.zip', '218490', '10.7 MB'),
'2017': HmdaDataFile('hmda_2017_mo_originated-records_labels.zip', '145419', '4.81 MB'),
'2015': HmdaDataFile('hmda_2015_mo_originated-records_labels.zip', '147519', '8.15 MB'),
'2014': HmdaDataFile('hmda_2014_mo_originated-records_labels.zip', '120463', '6.22 MB'),
'2008': HmdaDataFile('hmda_2008_mo_originated-records_labels.zip', '169405', '8.03 MB'),
'2009': HmdaDataFile('hmda_2009_mo_originated-records_labels.zip', '211924', '9.41 MB'),
'2011': HmdaDataFile('hmda_2011_mo_originated-records_labels.zip', '154134', '7.55 MB'),
'2010': HmdaDataFile('hmda_2010_mo_originated-records_labels.zip', '182546', '8.84 MB'),
'2013': HmdaDataFile('hmda_2013_mo_originated-records_labels.zip', '178486', '8.86 MB'),
'2012': HmdaDataFile('hmda_2012_mo_originated-records_labels.zip', '204935', '9.93 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '139247', '4.66 MB'),
'2007': HmdaDataFile('hmda_2007_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '154268', '5.21 MB'),
'2017': HmdaDataFile('hmda_2017_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '118901', '2.73 MB'),
'2015': HmdaDataFile('hmda_2015_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '121654', '4.64 MB'),
'2014': HmdaDataFile('hmda_2014_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '95718', '3.31 MB'),
'2008': HmdaDataFile('hmda_2008_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '132777', '4.34 MB'),
'2009': HmdaDataFile('hmda_2009_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '186579', '5.81 MB'),
'2011': HmdaDataFile('hmda_2011_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '130134', '4.23 MB'),
'2010': HmdaDataFile('hmda_2010_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '158585', '5.17 MB'),
'2013': HmdaDataFile('hmda_2013_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '146638', '4.98 MB'),
'2012': HmdaDataFile('hmda_2012_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '175614', '5.75 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_mo_all-records_codes.zip', '312237', '10.71 MB'),
'2007': HmdaDataFile('hmda_2007_mo_all-records_codes.zip', '531617', '17.33 MB'),
'2017': HmdaDataFile('hmda_2017_mo_all-records_codes.zip', '277843', '6.33 MB'),
'2015': HmdaDataFile('hmda_2015_mo_all-records_codes.zip', '276661', '10.94 MB'),
'2014': HmdaDataFile('hmda_2014_mo_all-records_codes.zip', '232023', '8.21 MB'),
'2008': HmdaDataFile('hmda_2008_mo_all-records_codes.zip', '379587', '12.23 MB'),
'2009': HmdaDataFile('hmda_2009_mo_all-records_codes.zip', '447918', '13.74 MB'),
'2011': HmdaDataFile('hmda_2011_mo_all-records_codes.zip', '309645', '10.46 MB'),
'2010': HmdaDataFile('hmda_2010_mo_all-records_codes.zip', '360738', '12.3 MB'),
'2013': HmdaDataFile('hmda_2013_mo_all-records_codes.zip', '347186', '12.14 MB'),
'2012': HmdaDataFile('hmda_2012_mo_all-records_codes.zip', '384551', '13.07 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_mo_originated-records_codes.zip', '165943', '5.62 MB'),
'2007': HmdaDataFile('hmda_2007_mo_originated-records_codes.zip', '218490', '7.52 MB'),
'2017': HmdaDataFile('hmda_2017_mo_originated-records_codes.zip', '145419', '3.37 MB'),
'2015': HmdaDataFile('hmda_2015_mo_originated-records_codes.zip', '147519', '5.65 MB'),
'2014': HmdaDataFile('hmda_2014_mo_originated-records_codes.zip', '120463', '4.26 MB'),
'2008': HmdaDataFile('hmda_2008_mo_originated-records_codes.zip', '169405', '5.64 MB'),
'2009': HmdaDataFile('hmda_2009_mo_originated-records_codes.zip', '211924', '6.73 MB'),
'2011': HmdaDataFile('hmda_2011_mo_originated-records_codes.zip', '154134', '5.1 MB'),
'2010': HmdaDataFile('hmda_2010_mo_originated-records_codes.zip', '182546', '5.99 MB'),
'2013': HmdaDataFile('hmda_2013_mo_originated-records_codes.zip', '178486', '6.06 MB'),
'2012': HmdaDataFile('hmda_2012_mo_originated-records_codes.zip', '204935', '6.74 MB')
}
}
},
'mn': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '150929', '7.76 MB'),
'2007': HmdaDataFile('hmda_2007_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '114958', '5.46 MB'),
'2017': HmdaDataFile('hmda_2017_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '127113', '3.8 MB'),
'2015': HmdaDataFile('hmda_2015_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '135112', '7.39 MB'),
'2014': HmdaDataFile('hmda_2014_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '100659', '5.19 MB'),
'2008': HmdaDataFile('hmda_2008_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '101137', '4.61 MB'),
'2009': HmdaDataFile('hmda_2009_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '172464', '7.33 MB'),
'2011': HmdaDataFile('hmda_2011_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '130723', '6.42 MB'),
'2010': HmdaDataFile('hmda_2010_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '155626', '7.58 MB'),
'2013': HmdaDataFile('hmda_2013_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '158942', '7.98 MB'),
'2012': HmdaDataFile('hmda_2012_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '195258', '9.69 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_mn_all-records_labels.zip', '308571', '16.84 MB'),
'2007': HmdaDataFile('hmda_2007_mn_all-records_labels.zip', '396721', '18.97 MB'),
'2017': HmdaDataFile('hmda_2017_mn_all-records_labels.zip', '269551', '8.68 MB'),
'2015': HmdaDataFile('hmda_2015_mn_all-records_labels.zip', '280012', '16.44 MB'),
'2014': HmdaDataFile('hmda_2014_mn_all-records_labels.zip', '220146', '12.02 MB'),
'2008': HmdaDataFile('hmda_2008_mn_all-records_labels.zip', '272913', '12.87 MB'),
'2009': HmdaDataFile('hmda_2009_mn_all-records_labels.zip', '379860', '16.84 MB'),
'2011': HmdaDataFile('hmda_2011_mn_all-records_labels.zip', '282982', '14.91 MB'),
'2010': HmdaDataFile('hmda_2010_mn_all-records_labels.zip', '332542', '17.42 MB'),
'2013': HmdaDataFile('hmda_2013_mn_all-records_labels.zip', '332594', '17.76 MB'),
'2012': HmdaDataFile('hmda_2012_mn_all-records_labels.zip', '382934', '20.31 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_mn_originated-records_labels.zip', '176922', '9.23 MB'),
'2007': HmdaDataFile('hmda_2007_mn_originated-records_labels.zip', '171217', '8.27 MB'),
'2017': HmdaDataFile('hmda_2017_mn_originated-records_labels.zip', '154164', '4.74 MB'),
'2015': HmdaDataFile('hmda_2015_mn_originated-records_labels.zip', '160605', '8.94 MB'),
'2014': HmdaDataFile('hmda_2014_mn_originated-records_labels.zip', '123374', '6.47 MB'),
'2008': HmdaDataFile('hmda_2008_mn_originated-records_labels.zip', '130815', '6.14 MB'),
'2009': HmdaDataFile('hmda_2009_mn_originated-records_labels.zip', '195958', '8.53 MB'),
'2011': HmdaDataFile('hmda_2011_mn_originated-records_labels.zip', '151782', '7.55 MB'),
'2010': HmdaDataFile('hmda_2010_mn_originated-records_labels.zip', '177556', '8.84 MB'),
'2013': HmdaDataFile('hmda_2013_mn_originated-records_labels.zip', '187475', '9.52 MB'),
'2012': HmdaDataFile('hmda_2012_mn_originated-records_labels.zip', '222116', '11.23 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '150929', '5.39 MB'),
'2007': HmdaDataFile('hmda_2007_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '114958', '3.87 MB'),
'2017': HmdaDataFile('hmda_2017_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '127113', '2.69 MB'),
'2015': HmdaDataFile('hmda_2015_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '135112', '5.14 MB'),
'2014': HmdaDataFile('hmda_2014_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '100659', '3.62 MB'),
'2008': HmdaDataFile('hmda_2008_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '101137', '3.28 MB'),
'2009': HmdaDataFile('hmda_2009_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '172464', '5.3 MB'),
'2011': HmdaDataFile('hmda_2011_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '130723', '4.45 MB'),
'2010': HmdaDataFile('hmda_2010_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '155626', '5.29 MB'),
'2013': HmdaDataFile('hmda_2013_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '158942', '5.58 MB'),
'2012': HmdaDataFile('hmda_2012_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '195258', '6.78 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_mn_all-records_codes.zip', '308571', '11.31 MB'),
'2007': HmdaDataFile('hmda_2007_mn_all-records_codes.zip', '396721', '13.04 MB'),
'2017': HmdaDataFile('hmda_2017_mn_all-records_codes.zip', '269551', '5.62 MB'),
'2015': HmdaDataFile('hmda_2015_mn_all-records_codes.zip', '280012', '10.99 MB'),
'2014': HmdaDataFile('hmda_2014_mn_all-records_codes.zip', '220146', '8.1 MB'),
'2008': HmdaDataFile('hmda_2008_mn_all-records_codes.zip', '272913', '8.89 MB'),
'2009': HmdaDataFile('hmda_2009_mn_all-records_codes.zip', '379860', '11.74 MB'),
'2011': HmdaDataFile('hmda_2011_mn_all-records_codes.zip', '282982', '10.01 MB'),
'2010': HmdaDataFile('hmda_2010_mn_all-records_codes.zip', '332542', '11.73 MB'),
'2013': HmdaDataFile('hmda_2013_mn_all-records_codes.zip', '332594', '12.03 MB'),
'2012': HmdaDataFile('hmda_2012_mn_all-records_codes.zip', '382934', '13.81 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_mn_originated-records_codes.zip', '176922', '6.37 MB'),
'2007': HmdaDataFile('hmda_2007_mn_originated-records_codes.zip', '171217', '5.84 MB'),
'2017': HmdaDataFile('hmda_2017_mn_originated-records_codes.zip', '154164', '3.32 MB'),
'2015': HmdaDataFile('hmda_2015_mn_originated-records_codes.zip', '160605', '6.15 MB'),
'2014': HmdaDataFile('hmda_2014_mn_originated-records_codes.zip', '123374', '4.48 MB'),
'2008': HmdaDataFile('hmda_2008_mn_originated-records_codes.zip', '130815', '4.36 MB'),
'2009': HmdaDataFile('hmda_2009_mn_originated-records_codes.zip', '195958', '6.13 MB'),
'2011': HmdaDataFile('hmda_2011_mn_originated-records_codes.zip', '151782', '5.2 MB'),
'2010': HmdaDataFile('hmda_2010_mn_originated-records_codes.zip', '177556', '6.12 MB'),
'2013': HmdaDataFile('hmda_2013_mn_originated-records_codes.zip', '187475', '6.6 MB'),
'2012': HmdaDataFile('hmda_2012_mn_originated-records_codes.zip', '222116', '7.82 MB')
}
}
},
'mi': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '225509', '11.38 MB'),
'2007': HmdaDataFile('hmda_2007_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '202641', '10.01 MB'),
'2017': HmdaDataFile('hmda_2017_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '200696', '6.21 MB'),
'2015': HmdaDataFile('hmda_2015_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '198320', '11.13 MB'),
'2014': HmdaDataFile('hmda_2014_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '162424', '8.1 MB'),
'2008': HmdaDataFile('hmda_2008_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '149102', '7.25 MB'),
'2009': HmdaDataFile('hmda_2009_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '194235', '8.77 MB'),
'2011': HmdaDataFile('hmda_2011_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '168948', '8.4 MB'),
'2010': HmdaDataFile('hmda_2010_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '175445', '8.52 MB'),
'2013': HmdaDataFile('hmda_2013_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '239554', '11.94 MB'),
'2012': HmdaDataFile('hmda_2012_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '274203', '13.44 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_mi_all-records_labels.zip', '470652', '25.31 MB'),
'2007': HmdaDataFile('hmda_2007_mi_all-records_labels.zip', '780713', '38.5 MB'),
'2017': HmdaDataFile('hmda_2017_mi_all-records_labels.zip', '437181', '14.82 MB'),
'2015': HmdaDataFile('hmda_2015_mi_all-records_labels.zip', '424672', '25.71 MB'),
'2014': HmdaDataFile('hmda_2014_mi_all-records_labels.zip', '361546', '19.18 MB'),
'2008': HmdaDataFile('hmda_2008_mi_all-records_labels.zip', '472702', '23.46 MB'),
'2009': HmdaDataFile('hmda_2009_mi_all-records_labels.zip', '504304', '23.62 MB'),
'2011': HmdaDataFile('hmda_2011_mi_all-records_labels.zip', '396764', '21.31 MB'),
'2010': HmdaDataFile('hmda_2010_mi_all-records_labels.zip', '419300', '22.02 MB'),
'2013': HmdaDataFile('hmda_2013_mi_all-records_labels.zip', '521030', '27.85 MB'),
'2012': HmdaDataFile('hmda_2012_mi_all-records_labels.zip', '573645', '30.22 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_mi_originated-records_labels.zip', '262757', '13.58 MB'),
'2007': HmdaDataFile('hmda_2007_mi_originated-records_labels.zip', '294627', '14.81 MB'),
'2017': HmdaDataFile('hmda_2017_mi_originated-records_labels.zip', '241945', '7.74 MB'),
'2015': HmdaDataFile('hmda_2015_mi_originated-records_labels.zip', '233520', '13.34 MB'),
'2014': HmdaDataFile('hmda_2014_mi_originated-records_labels.zip', '194126', '9.91 MB'),
'2008': HmdaDataFile('hmda_2008_mi_originated-records_labels.zip', '191860', '9.52 MB'),
'2009': HmdaDataFile('hmda_2009_mi_originated-records_labels.zip', '224166', '10.41 MB'),
'2011': HmdaDataFile('hmda_2011_mi_originated-records_labels.zip', '196360', '10.06 MB'),
'2010': HmdaDataFile('hmda_2010_mi_originated-records_labels.zip', '202252', '10.19 MB'),
'2013': HmdaDataFile('hmda_2013_mi_originated-records_labels.zip', '280253', '14.18 MB'),
'2012': HmdaDataFile('hmda_2012_mi_originated-records_labels.zip', '312194', '15.55 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '225509', '7.89 MB'),
'2007': HmdaDataFile('hmda_2007_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '202641', '7.07 MB'),
'2017': HmdaDataFile('hmda_2017_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '200696', '4.39 MB'),
'2015': HmdaDataFile('hmda_2015_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '198320', '7.82 MB'),
'2014': HmdaDataFile('hmda_2014_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '162424', '5.66 MB'),
'2008': HmdaDataFile('hmda_2008_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '149102', '5.08 MB'),
'2009': HmdaDataFile('hmda_2009_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '194235', '6.2 MB'),
'2011': HmdaDataFile('hmda_2011_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '168948', '5.72 MB'),
'2010': HmdaDataFile('hmda_2010_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '175445', '5.8 MB'),
'2013': HmdaDataFile('hmda_2013_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '239554', '8.26 MB'),
'2012': HmdaDataFile('hmda_2012_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '274203', '9.28 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_mi_all-records_codes.zip', '470652', '16.94 MB'),
'2007': HmdaDataFile('hmda_2007_mi_all-records_codes.zip', '780713', '26.45 MB'),
'2017': HmdaDataFile('hmda_2017_mi_all-records_codes.zip', '437181', '9.61 MB'),
'2015': HmdaDataFile('hmda_2015_mi_all-records_codes.zip', '424672', '17.31 MB'),
'2014': HmdaDataFile('hmda_2014_mi_all-records_codes.zip', '361546', '12.87 MB'),
'2008': HmdaDataFile('hmda_2008_mi_all-records_codes.zip', '472702', '16.04 MB'),
'2009': HmdaDataFile('hmda_2009_mi_all-records_codes.zip', '504304', '16.25 MB'),
'2011': HmdaDataFile('hmda_2011_mi_all-records_codes.zip', '396764', '14.07 MB'),
'2010': HmdaDataFile('hmda_2010_mi_all-records_codes.zip', '419300', '14.52 MB'),
'2013': HmdaDataFile('hmda_2013_mi_all-records_codes.zip', '521030', '18.59 MB'),
'2012': HmdaDataFile('hmda_2012_mi_all-records_codes.zip', '573645', '20.21 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_mi_originated-records_codes.zip', '262757', '9.39 MB'),
'2007': HmdaDataFile('hmda_2007_mi_originated-records_codes.zip', '294627', '10.53 MB'),
'2017': HmdaDataFile('hmda_2017_mi_originated-records_codes.zip', '241945', '5.41 MB'),
'2015': HmdaDataFile('hmda_2015_mi_originated-records_codes.zip', '233520', '9.29 MB'),
'2014': HmdaDataFile('hmda_2014_mi_originated-records_codes.zip', '194126', '6.88 MB'),
'2008': HmdaDataFile('hmda_2008_mi_originated-records_codes.zip', '191860', '6.67 MB'),
'2009': HmdaDataFile('hmda_2009_mi_originated-records_codes.zip', '224166', '7.37 MB'),
'2011': HmdaDataFile('hmda_2011_mi_originated-records_codes.zip', '196360', '6.83 MB'),
'2010': HmdaDataFile('hmda_2010_mi_originated-records_codes.zip', '202252', '6.96 MB'),
'2013': HmdaDataFile('hmda_2013_mi_originated-records_codes.zip', '280253', '9.73 MB'),
'2012': HmdaDataFile('hmda_2012_mi_originated-records_codes.zip', '312194', '10.66 MB')
}
}
},
'ri': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '22014', '1 MB'),
'2007': HmdaDataFile('hmda_2007_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '24278', '1.09 MB'),
'2017': HmdaDataFile('hmda_2017_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '19545', '600.94 KB'),
'2015': HmdaDataFile('hmda_2015_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '19238', '896.87 KB'),
'2014': HmdaDataFile('hmda_2014_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '14179', '687.6 KB'),
'2008': HmdaDataFile('hmda_2008_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '18140', '802.68 KB'),
'2009': HmdaDataFile('hmda_2009_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '29449', '1.2 MB'),
'2011': HmdaDataFile('hmda_2011_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '20369', '903.7 KB'),
'2010': HmdaDataFile('hmda_2010_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '24263', '1.09 MB'),
'2013': HmdaDataFile('hmda_2013_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '24398', '1.11 MB'),
'2012': HmdaDataFile('hmda_2012_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '28863', '1.3 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ri_all-records_labels.zip', '49775', '2.45 MB'),
'2007': HmdaDataFile('hmda_2007_ri_all-records_labels.zip', '88662', '4.02 MB'),
'2017': HmdaDataFile('hmda_2017_ri_all-records_labels.zip', '44038', '1.51 MB'),
'2015': HmdaDataFile('hmda_2015_ri_all-records_labels.zip', '43611', '2.22 MB'),
'2014': HmdaDataFile('hmda_2014_ri_all-records_labels.zip', '33941', '1.8 MB'),
'2008': HmdaDataFile('hmda_2008_ri_all-records_labels.zip', '51710', '2.4 MB'),
'2009': HmdaDataFile('hmda_2009_ri_all-records_labels.zip', '64057', '2.81 MB'),
'2011': HmdaDataFile('hmda_2011_ri_all-records_labels.zip', '48785', '2.39 MB'),
'2010': HmdaDataFile('hmda_2010_ri_all-records_labels.zip', '55842', '2.75 MB'),
'2013': HmdaDataFile('hmda_2013_ri_all-records_labels.zip', '52960', '2.62 MB'),
'2012': HmdaDataFile('hmda_2012_ri_all-records_labels.zip', '61996', '3.04 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ri_originated-records_labels.zip', '27005', '1.25 MB'),
'2007': HmdaDataFile('hmda_2007_ri_originated-records_labels.zip', '36923', '1.65 MB'),
'2017': HmdaDataFile('hmda_2017_ri_originated-records_labels.zip', '24598', '766.74 KB'),
'2015': HmdaDataFile('hmda_2015_ri_originated-records_labels.zip', '23923', '1.13 MB'),
'2014': HmdaDataFile('hmda_2014_ri_originated-records_labels.zip', '18005', '891.11 KB'),
'2008': HmdaDataFile('hmda_2008_ri_originated-records_labels.zip', '23931', '1.07 MB'),
'2009': HmdaDataFile('hmda_2009_ri_originated-records_labels.zip', '33549', '1.38 MB'),
'2011': HmdaDataFile('hmda_2011_ri_originated-records_labels.zip', '24337', '1.11 MB'),
'2010': HmdaDataFile('hmda_2010_ri_originated-records_labels.zip', '28439', '1.3 MB'),
'2013': HmdaDataFile('hmda_2013_ri_originated-records_labels.zip', '29485', '1.37 MB'),
'2012': HmdaDataFile('hmda_2012_ri_originated-records_labels.zip', '33911', '1.55 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '22014', '662.47 KB'),
'2007': HmdaDataFile('hmda_2007_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '24278', '740.23 KB'),
'2017': HmdaDataFile('hmda_2017_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '19545', '422.08 KB'),
'2015': HmdaDataFile('hmda_2015_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '19238', '594.54 KB'),
'2014': HmdaDataFile('hmda_2014_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '14179', '455.38 KB'),
'2008': HmdaDataFile('hmda_2008_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '18140', '535.59 KB'),
'2009': HmdaDataFile('hmda_2009_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '29449', '820.78 KB'),
'2011': HmdaDataFile('hmda_2011_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '20369', '581.77 KB'),
'2010': HmdaDataFile('hmda_2010_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '24263', '698.2 KB'),
'2013': HmdaDataFile('hmda_2013_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '24398', '725.42 KB'),
'2012': HmdaDataFile('hmda_2012_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '28863', '844.23 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ri_all-records_codes.zip', '49775', '1.57 MB'),
'2007': HmdaDataFile('hmda_2007_ri_all-records_codes.zip', '88662', '2.64 MB'),
'2017': HmdaDataFile('hmda_2017_ri_all-records_codes.zip', '44038', '997.28 KB'),
'2015': HmdaDataFile('hmda_2015_ri_all-records_codes.zip', '43611', '1.42 MB'),
'2014': HmdaDataFile('hmda_2014_ri_all-records_codes.zip', '33941', '1.14 MB'),
'2008': HmdaDataFile('hmda_2008_ri_all-records_codes.zip', '51710', '1.58 MB'),
'2009': HmdaDataFile('hmda_2009_ri_all-records_codes.zip', '64057', '1.89 MB'),
'2011': HmdaDataFile('hmda_2011_ri_all-records_codes.zip', '48785', '1.49 MB'),
'2010': HmdaDataFile('hmda_2010_ri_all-records_codes.zip', '55842', '1.73 MB'),
'2013': HmdaDataFile('hmda_2013_ri_all-records_codes.zip', '52960', '1.66 MB'),
'2012': HmdaDataFile('hmda_2012_ri_all-records_codes.zip', '61996', '1.93 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ri_originated-records_codes.zip', '27005', '818.97 KB'),
'2007': HmdaDataFile('hmda_2007_ri_originated-records_codes.zip', '36923', '1.12 MB'),
'2017': HmdaDataFile('hmda_2017_ri_originated-records_codes.zip', '24598', '532.25 KB'),
'2015': HmdaDataFile('hmda_2015_ri_originated-records_codes.zip', '23923', '744.4 KB'),
'2014': HmdaDataFile('hmda_2014_ri_originated-records_codes.zip', '18005', '586.22 KB'),
'2008': HmdaDataFile('hmda_2008_ri_originated-records_codes.zip', '23931', '716.77 KB'),
'2009': HmdaDataFile('hmda_2009_ri_originated-records_codes.zip', '33549', '938.52 KB'),
'2011': HmdaDataFile('hmda_2011_ri_originated-records_codes.zip', '24337', '705.92 KB'),
'2010': HmdaDataFile('hmda_2010_ri_originated-records_codes.zip', '28439', '835.63 KB'),
'2013': HmdaDataFile('hmda_2013_ri_originated-records_codes.zip', '29485', '889.58 KB'),
'2012': HmdaDataFile('hmda_2012_ri_originated-records_codes.zip', '33911', '1.01 MB')
}
}
},
'ks': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '57549', '2.68 MB'),
'2007': HmdaDataFile('hmda_2007_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '62611', '2.93 MB'),
'2017': HmdaDataFile('hmda_2017_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '50202', '1.64 MB'),
'2015': HmdaDataFile('hmda_2015_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '52685', '2.73 MB'),
'2014': HmdaDataFile('hmda_2014_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '43216', '2.18 MB'),
'2008': HmdaDataFile('hmda_2008_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '56265', '2.59 MB'),
'2009': HmdaDataFile('hmda_2009_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '78703', '3.32 MB'),
'2011': HmdaDataFile('hmda_2011_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '56542', '2.6 MB'),
'2010': HmdaDataFile('hmda_2010_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '67768', '3.07 MB'),
'2013': HmdaDataFile('hmda_2013_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '62047', '2.98 MB'),
'2012': HmdaDataFile('hmda_2012_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '73455', '3.41 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ks_all-records_labels.zip', '124922', '6.39 MB'),
'2007': HmdaDataFile('hmda_2007_ks_all-records_labels.zip', '194856', '9.65 MB'),
'2017': HmdaDataFile('hmda_2017_ks_all-records_labels.zip', '107563', '3.99 MB'),
'2015': HmdaDataFile('hmda_2015_ks_all-records_labels.zip', '113367', '6.35 MB'),
'2014': HmdaDataFile('hmda_2014_ks_all-records_labels.zip', '96245', '5.33 MB'),
'2008': HmdaDataFile('hmda_2008_ks_all-records_labels.zip', '146968', '7.27 MB'),
'2009': HmdaDataFile('hmda_2009_ks_all-records_labels.zip', '175095', '8.11 MB'),
'2011': HmdaDataFile('hmda_2011_ks_all-records_labels.zip', '126214', '6.44 MB'),
'2010': HmdaDataFile('hmda_2010_ks_all-records_labels.zip', '149351', '7.61 MB'),
'2013': HmdaDataFile('hmda_2013_ks_all-records_labels.zip', '134547', '7.17 MB'),
'2012': HmdaDataFile('hmda_2012_ks_all-records_labels.zip', '149627', '7.77 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ks_originated-records_labels.zip', '69335', '3.31 MB'),
'2007': HmdaDataFile('hmda_2007_ks_originated-records_labels.zip', '86578', '4.16 MB'),
'2017': HmdaDataFile('hmda_2017_ks_originated-records_labels.zip', '61197', '2.05 MB'),
'2015': HmdaDataFile('hmda_2015_ks_originated-records_labels.zip', '63448', '3.34 MB'),
'2014': HmdaDataFile('hmda_2014_ks_originated-records_labels.zip', '53984', '2.79 MB'),
'2008': HmdaDataFile('hmda_2008_ks_originated-records_labels.zip', '72280', '3.42 MB'),
'2009': HmdaDataFile('hmda_2009_ks_originated-records_labels.zip', '90077', '3.93 MB'),
'2011': HmdaDataFile('hmda_2011_ks_originated-records_labels.zip', '66876', '3.14 MB'),
'2010': HmdaDataFile('hmda_2010_ks_originated-records_labels.zip', '78256', '3.64 MB'),
'2013': HmdaDataFile('hmda_2013_ks_originated-records_labels.zip', '74582', '3.64 MB'),
'2012': HmdaDataFile('hmda_2012_ks_originated-records_labels.zip', '84924', '4.01 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '57549', '1.78 MB'),
'2007': HmdaDataFile('hmda_2007_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '62611', '2.02 MB'),
'2017': HmdaDataFile('hmda_2017_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '50202', '1.17 MB'),
'2015': HmdaDataFile('hmda_2015_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '52685', '1.86 MB'),
'2014': HmdaDataFile('hmda_2014_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '43216', '1.46 MB'),
'2008': HmdaDataFile('hmda_2008_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '56265', '1.79 MB'),
'2009': HmdaDataFile('hmda_2009_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '78703', '2.28 MB'),
'2011': HmdaDataFile('hmda_2011_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '56542', '1.71 MB'),
'2010': HmdaDataFile('hmda_2010_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '67768', '2.02 MB'),
'2013': HmdaDataFile('hmda_2013_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '62047', '1.98 MB'),
'2012': HmdaDataFile('hmda_2012_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '73455', '2.25 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ks_all-records_codes.zip', '124922', '4.16 MB'),
'2007': HmdaDataFile('hmda_2007_ks_all-records_codes.zip', '194856', '6.5 MB'),
'2017': HmdaDataFile('hmda_2017_ks_all-records_codes.zip', '107563', '2.69 MB'),
'2015': HmdaDataFile('hmda_2015_ks_all-records_codes.zip', '113367', '4.18 MB'),
'2014': HmdaDataFile('hmda_2014_ks_all-records_codes.zip', '96245', '3.49 MB'),
'2008': HmdaDataFile('hmda_2008_ks_all-records_codes.zip', '146968', '4.88 MB'),
'2009': HmdaDataFile('hmda_2009_ks_all-records_codes.zip', '175095', '5.5 MB'),
'2011': HmdaDataFile('hmda_2011_ks_all-records_codes.zip', '126214', '4.14 MB'),
'2010': HmdaDataFile('hmda_2010_ks_all-records_codes.zip', '149351', '4.91 MB'),
'2013': HmdaDataFile('hmda_2013_ks_all-records_codes.zip', '134547', '4.67 MB'),
'2012': HmdaDataFile('hmda_2012_ks_all-records_codes.zip', '149627', '5.06 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ks_originated-records_codes.zip', '69335', '2.18 MB'),
'2007': HmdaDataFile('hmda_2007_ks_originated-records_codes.zip', '86578', '2.85 MB'),
'2017': HmdaDataFile('hmda_2017_ks_originated-records_codes.zip', '61197', '1.44 MB'),
'2015': HmdaDataFile('hmda_2015_ks_originated-records_codes.zip', '63448', '2.26 MB'),
'2014': HmdaDataFile('hmda_2014_ks_originated-records_codes.zip', '53984', '1.86 MB'),
'2008': HmdaDataFile('hmda_2008_ks_originated-records_codes.zip', '72280', '2.34 MB'),
'2009': HmdaDataFile('hmda_2009_ks_originated-records_codes.zip', '90077', '2.69 MB'),
'2011': HmdaDataFile('hmda_2011_ks_originated-records_codes.zip', '66876', '2.05 MB'),
'2010': HmdaDataFile('hmda_2010_ks_originated-records_codes.zip', '78256', '2.38 MB'),
'2013': HmdaDataFile('hmda_2013_ks_originated-records_codes.zip', '74582', '2.39 MB'),
'2012': HmdaDataFile('hmda_2012_ks_originated-records_codes.zip', '84924', '2.62 MB')
}
}
},
'mt': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '21366', '875.05 KB'),
'2007': HmdaDataFile('hmda_2007_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '20722', '828.42 KB'),
'2017': HmdaDataFile('hmda_2017_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '18983', '556.44 KB'),
'2015': HmdaDataFile('hmda_2015_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '19758', '888.25 KB'),
'2014': HmdaDataFile('hmda_2014_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '15841', '699.12 KB'),
'2008': HmdaDataFile('hmda_2008_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '19303', '733.23 KB'),
'2009': HmdaDataFile('hmda_2009_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '30035', '1.08 MB'),
'2011': HmdaDataFile('hmda_2011_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '18104', '695.47 KB'),
'2010': HmdaDataFile('hmda_2010_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '21854', '845.34 KB'),
'2013': HmdaDataFile('hmda_2013_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '23631', '959.77 KB'),
'2012': HmdaDataFile('hmda_2012_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '25225', '1 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_mt_all-records_labels.zip', '50537', '2.27 MB'),
'2007': HmdaDataFile('hmda_2007_mt_all-records_labels.zip', '72952', '3 MB'),
'2017': HmdaDataFile('hmda_2017_mt_all-records_labels.zip', '45597', '1.44 MB'),
'2015': HmdaDataFile('hmda_2015_mt_all-records_labels.zip', '46419', '2.32 MB'),
'2014': HmdaDataFile('hmda_2014_mt_all-records_labels.zip', '38476', '1.85 MB'),
'2008': HmdaDataFile('hmda_2008_mt_all-records_labels.zip', '55795', '2.26 MB'),
'2009': HmdaDataFile('hmda_2009_mt_all-records_labels.zip', '72632', '2.81 MB'),
'2011': HmdaDataFile('hmda_2011_mt_all-records_labels.zip', '45730', '1.97 MB'),
'2010': HmdaDataFile('hmda_2010_mt_all-records_labels.zip', '54464', '2.34 MB'),
'2013': HmdaDataFile('hmda_2013_mt_all-records_labels.zip', '56588', '2.52 MB'),
'2012': HmdaDataFile('hmda_2012_mt_all-records_labels.zip', '57320', '2.52 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_mt_originated-records_labels.zip', '26863', '1.12 MB'),
'2007': HmdaDataFile('hmda_2007_mt_originated-records_labels.zip', '31811', '1.27 MB'),
'2017': HmdaDataFile('hmda_2017_mt_originated-records_labels.zip', '24577', '727.45 KB'),
'2015': HmdaDataFile('hmda_2015_mt_originated-records_labels.zip', '25376', '1.17 MB'),
'2014': HmdaDataFile('hmda_2014_mt_originated-records_labels.zip', '21159', '958.54 KB'),
'2008': HmdaDataFile('hmda_2008_mt_originated-records_labels.zip', '26278', '1.02 MB'),
'2009': HmdaDataFile('hmda_2009_mt_originated-records_labels.zip', '36202', '1.33 MB'),
'2011': HmdaDataFile('hmda_2011_mt_originated-records_labels.zip', '23529', '929.24 KB'),
'2010': HmdaDataFile('hmda_2010_mt_originated-records_labels.zip', '27263', '1.08 MB'),
'2013': HmdaDataFile('hmda_2013_mt_originated-records_labels.zip', '31007', '1.28 MB'),
'2012': HmdaDataFile('hmda_2012_mt_originated-records_labels.zip', '31452', '1.28 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '21366', '564.56 KB'),
'2007': HmdaDataFile('hmda_2007_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '20722', '538.06 KB'),
'2017': HmdaDataFile('hmda_2017_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '18983', '389.92 KB'),
'2015': HmdaDataFile('hmda_2015_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '19758', '575.24 KB'),
'2014': HmdaDataFile('hmda_2014_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '15841', '453.36 KB'),
'2008': HmdaDataFile('hmda_2008_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '19303', '479.86 KB'),
'2009': HmdaDataFile('hmda_2009_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '30035', '726.19 KB'),
'2011': HmdaDataFile('hmda_2011_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '18104', '448.42 KB'),
'2010': HmdaDataFile('hmda_2010_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '21854', '543.67 KB'),
'2013': HmdaDataFile('hmda_2013_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '23631', '616.38 KB'),
'2012': HmdaDataFile('hmda_2012_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '25225', '642.31 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_mt_all-records_codes.zip', '50537', '1.41 MB'),
'2007': HmdaDataFile('hmda_2007_mt_all-records_codes.zip', '72952', '1.92 MB'),
'2017': HmdaDataFile('hmda_2017_mt_all-records_codes.zip', '45597', '947.96 KB'),
'2015': HmdaDataFile('hmda_2015_mt_all-records_codes.zip', '46419', '1.45 MB'),
'2014': HmdaDataFile('hmda_2014_mt_all-records_codes.zip', '38476', '1.16 MB'),
'2008': HmdaDataFile('hmda_2008_mt_all-records_codes.zip', '55795', '1.44 MB'),
'2009': HmdaDataFile('hmda_2009_mt_all-records_codes.zip', '72632', '1.84 MB'),
'2011': HmdaDataFile('hmda_2011_mt_all-records_codes.zip', '45730', '1.22 MB'),
'2010': HmdaDataFile('hmda_2010_mt_all-records_codes.zip', '54464', '1.45 MB'),
'2013': HmdaDataFile('hmda_2013_mt_all-records_codes.zip', '56588', '1.56 MB'),
'2012': HmdaDataFile('hmda_2012_mt_all-records_codes.zip', '57320', '1.57 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_mt_originated-records_codes.zip', '26863', '722 KB'),
'2007': HmdaDataFile('hmda_2007_mt_originated-records_codes.zip', '31811', '828.36 KB'),
'2017': HmdaDataFile('hmda_2017_mt_originated-records_codes.zip', '24577', '501.64 KB'),
'2015': HmdaDataFile('hmda_2015_mt_originated-records_codes.zip', '25376', '750.19 KB'),
'2014': HmdaDataFile('hmda_2014_mt_originated-records_codes.zip', '21159', '615.26 KB'),
'2008': HmdaDataFile('hmda_2008_mt_originated-records_codes.zip', '26278', '663.11 KB'),
'2009': HmdaDataFile('hmda_2009_mt_originated-records_codes.zip', '36202', '887.48 KB'),
'2011': HmdaDataFile('hmda_2011_mt_originated-records_codes.zip', '23529', '592.46 KB'),
'2010': HmdaDataFile('hmda_2010_mt_originated-records_codes.zip', '27263', '689.27 KB'),
'2013': HmdaDataFile('hmda_2013_mt_originated-records_codes.zip', '31007', '815.5 KB'),
'2012': HmdaDataFile('hmda_2012_mt_originated-records_codes.zip', '31452', '815.03 KB')
}
}
},
'ms': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '37426', '1.74 MB'),
'2007': HmdaDataFile('hmda_2007_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '50509', '2.25 MB'),
'2017': HmdaDataFile('hmda_2017_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '36762', '1.08 MB'),
'2015': HmdaDataFile('hmda_2015_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '33087', '1.71 MB'),
'2014': HmdaDataFile('hmda_2014_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '29718', '1.39 MB'),
'2008': HmdaDataFile('hmda_2008_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '41719', '1.93 MB'),
'2009': HmdaDataFile('hmda_2009_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '46880', '1.96 MB'),
'2011': HmdaDataFile('hmda_2011_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '34477', '1.51 MB'),
'2010': HmdaDataFile('hmda_2010_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '39727', '1.77 MB'),
'2013': HmdaDataFile('hmda_2013_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '40639', '1.88 MB'),
'2012': HmdaDataFile('hmda_2012_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '43832', '1.98 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ms_all-records_labels.zip', '107199', '5.36 MB'),
'2007': HmdaDataFile('hmda_2007_ms_all-records_labels.zip', '173130', '7.83 MB'),
'2017': HmdaDataFile('hmda_2017_ms_all-records_labels.zip', '101384', '3.15 MB'),
'2015': HmdaDataFile('hmda_2015_ms_all-records_labels.zip', '95532', '5.35 MB'),
'2014': HmdaDataFile('hmda_2014_ms_all-records_labels.zip', '89193', '4.45 MB'),
'2008': HmdaDataFile('hmda_2008_ms_all-records_labels.zip', '136596', '6.41 MB'),
'2009': HmdaDataFile('hmda_2009_ms_all-records_labels.zip', '137988', '6 MB'),
'2011': HmdaDataFile('hmda_2011_ms_all-records_labels.zip', '106833', '5.1 MB'),
'2010': HmdaDataFile('hmda_2010_ms_all-records_labels.zip', '120079', '5.8 MB'),
'2013': HmdaDataFile('hmda_2013_ms_all-records_labels.zip', '115511', '5.75 MB'),
'2012': HmdaDataFile('hmda_2012_ms_all-records_labels.zip', '119816', '5.82 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ms_originated-records_labels.zip', '51684', '2.47 MB'),
'2007': HmdaDataFile('hmda_2007_ms_originated-records_labels.zip', '75757', '3.36 MB'),
'2017': HmdaDataFile('hmda_2017_ms_originated-records_labels.zip', '51038', '1.5 MB'),
'2015': HmdaDataFile('hmda_2015_ms_originated-records_labels.zip', '47435', '2.5 MB'),
'2014': HmdaDataFile('hmda_2014_ms_originated-records_labels.zip', '44303', '2.12 MB'),
'2008': HmdaDataFile('hmda_2008_ms_originated-records_labels.zip', '63435', '2.92 MB'),
'2009': HmdaDataFile('hmda_2009_ms_originated-records_labels.zip', '64926', '2.75 MB'),
'2011': HmdaDataFile('hmda_2011_ms_originated-records_labels.zip', '49962', '2.26 MB'),
'2010': HmdaDataFile('hmda_2010_ms_originated-records_labels.zip', '55509', '2.53 MB'),
'2013': HmdaDataFile('hmda_2013_ms_originated-records_labels.zip', '57688', '2.73 MB'),
'2012': HmdaDataFile('hmda_2012_ms_originated-records_labels.zip', '59972', '2.76 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '37426', '1.15 MB'),
'2007': HmdaDataFile('hmda_2007_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '50509', '1.5 MB'),
'2017': HmdaDataFile('hmda_2017_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '36762', '770.12 KB'),
'2015': HmdaDataFile('hmda_2015_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '33087', '1.14 MB'),
'2014': HmdaDataFile('hmda_2014_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '29718', '914.92 KB'),
'2008': HmdaDataFile('hmda_2008_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '41719', '1.3 MB'),
'2009': HmdaDataFile('hmda_2009_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '46880', '1.32 MB'),
'2011': HmdaDataFile('hmda_2011_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '34477', '986.29 KB'),
'2010': HmdaDataFile('hmda_2010_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '39727', '1.16 MB'),
'2013': HmdaDataFile('hmda_2013_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '40639', '1.23 MB'),
'2012': HmdaDataFile('hmda_2012_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '43832', '1.29 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ms_all-records_codes.zip', '107199', '3.37 MB'),
'2007': HmdaDataFile('hmda_2007_ms_all-records_codes.zip', '173130', '5.1 MB'),
'2017': HmdaDataFile('hmda_2017_ms_all-records_codes.zip', '101384', '2.07 MB'),
'2015': HmdaDataFile('hmda_2015_ms_all-records_codes.zip', '95532', '3.43 MB'),
'2014': HmdaDataFile('hmda_2014_ms_all-records_codes.zip', '89193', '2.8 MB'),
'2008': HmdaDataFile('hmda_2008_ms_all-records_codes.zip', '136596', '4.22 MB'),
'2009': HmdaDataFile('hmda_2009_ms_all-records_codes.zip', '137988', '3.96 MB'),
'2011': HmdaDataFile('hmda_2011_ms_all-records_codes.zip', '106833', '3.19 MB'),
'2010': HmdaDataFile('hmda_2010_ms_all-records_codes.zip', '120079', '3.65 MB'),
'2013': HmdaDataFile('hmda_2013_ms_all-records_codes.zip', '115511', '3.61 MB'),
'2012': HmdaDataFile('hmda_2012_ms_all-records_codes.zip', '119816', '3.68 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ms_originated-records_codes.zip', '51684', '1.61 MB'),
'2007': HmdaDataFile('hmda_2007_ms_originated-records_codes.zip', '75757', '2.24 MB'),
'2017': HmdaDataFile('hmda_2017_ms_originated-records_codes.zip', '51038', '1.05 MB'),
'2015': HmdaDataFile('hmda_2015_ms_originated-records_codes.zip', '47435', '1.65 MB'),
'2014': HmdaDataFile('hmda_2014_ms_originated-records_codes.zip', '44303', '1.37 MB'),
'2008': HmdaDataFile('hmda_2008_ms_originated-records_codes.zip', '63435', '1.96 MB'),
'2009': HmdaDataFile('hmda_2009_ms_originated-records_codes.zip', '64926', '1.84 MB'),
'2011': HmdaDataFile('hmda_2011_ms_originated-records_codes.zip', '49962', '1.46 MB'),
'2010': HmdaDataFile('hmda_2010_ms_originated-records_codes.zip', '55509', '1.64 MB'),
'2013': HmdaDataFile('hmda_2013_ms_originated-records_codes.zip', '57688', '1.76 MB'),
'2012': HmdaDataFile('hmda_2012_ms_originated-records_codes.zip', '59972', '1.79 MB')
}
}
},
'sc': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '108076', '5.05 MB'),
'2007': HmdaDataFile('hmda_2007_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '106873', '4.81 MB'),
'2017': HmdaDataFile('hmda_2017_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '100333', '3.01 MB'),
'2015': HmdaDataFile('hmda_2015_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '92167', '5.02 MB'),
'2014': HmdaDataFile('hmda_2014_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '73174', '3.73 MB'),
'2008': HmdaDataFile('hmda_2008_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '86677', '3.98 MB'),
'2009': HmdaDataFile('hmda_2009_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '110931', '4.77 MB'),
'2011': HmdaDataFile('hmda_2011_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '78145', '3.46 MB'),
'2010': HmdaDataFile('hmda_2010_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '85858', '3.92 MB'),
'2013': HmdaDataFile('hmda_2013_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '100643', '4.76 MB'),
'2012': HmdaDataFile('hmda_2012_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '106107', '5 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_sc_all-records_labels.zip', '257644', '13.17 MB'),
'2007': HmdaDataFile('hmda_2007_sc_all-records_labels.zip', '383001', '17.86 MB'),
'2017': HmdaDataFile('hmda_2017_sc_all-records_labels.zip', '242772', '8.25 MB'),
'2015': HmdaDataFile('hmda_2015_sc_all-records_labels.zip', '225542', '13.41 MB'),
'2014': HmdaDataFile('hmda_2014_sc_all-records_labels.zip', '191197', '10.66 MB'),
'2008': HmdaDataFile('hmda_2008_sc_all-records_labels.zip', '271908', '13.26 MB'),
'2009': HmdaDataFile('hmda_2009_sc_all-records_labels.zip', '291014', '13.32 MB'),
'2011': HmdaDataFile('hmda_2011_sc_all-records_labels.zip', '218369', '11.01 MB'),
'2010': HmdaDataFile('hmda_2010_sc_all-records_labels.zip', '235957', '12.07 MB'),
'2013': HmdaDataFile('hmda_2013_sc_all-records_labels.zip', '259782', '13.73 MB'),
'2012': HmdaDataFile('hmda_2012_sc_all-records_labels.zip', '267040', '14.05 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_sc_originated-records_labels.zip', '131134', '6.36 MB'),
'2007': HmdaDataFile('hmda_2007_sc_originated-records_labels.zip', '161777', '7.5 MB'),
'2017': HmdaDataFile('hmda_2017_sc_originated-records_labels.zip', '123971', '3.87 MB'),
'2015': HmdaDataFile('hmda_2015_sc_originated-records_labels.zip', '114336', '6.37 MB'),
'2014': HmdaDataFile('hmda_2014_sc_originated-records_labels.zip', '93412', '4.86 MB'),
'2008': HmdaDataFile('hmda_2008_sc_originated-records_labels.zip', '118458', '5.61 MB'),
'2009': HmdaDataFile('hmda_2009_sc_originated-records_labels.zip', '133057', '5.91 MB'),
'2011': HmdaDataFile('hmda_2011_sc_originated-records_labels.zip', '97461', '4.61 MB'),
'2010': HmdaDataFile('hmda_2010_sc_originated-records_labels.zip', '105626', '4.87 MB'),
'2013': HmdaDataFile('hmda_2013_sc_originated-records_labels.zip', '127479', '6.16 MB'),
'2012': HmdaDataFile('hmda_2012_sc_originated-records_labels.zip', '130453', '6.21 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '108076', '3.37 MB'),
'2007': HmdaDataFile('hmda_2007_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '106873', '3.2 MB'),
'2017': HmdaDataFile('hmda_2017_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '100333', '2.11 MB'),
'2015': HmdaDataFile('hmda_2015_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '92167', '3.46 MB'),
'2014': HmdaDataFile('hmda_2014_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '73174', '2.49 MB'),
'2008': HmdaDataFile('hmda_2008_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '86677', '2.67 MB'),
'2009': HmdaDataFile('hmda_2009_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '110931', '3.27 MB'),
'2011': HmdaDataFile('hmda_2011_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '78145', '2.24 MB'),
'2010': HmdaDataFile('hmda_2010_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '85858', '2.58 MB'),
'2013': HmdaDataFile('hmda_2013_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '100643', '3.16 MB'),
'2012': HmdaDataFile('hmda_2012_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '106107', '3.31 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_sc_all-records_codes.zip', '257644', '8.51 MB'),
'2007': HmdaDataFile('hmda_2007_sc_all-records_codes.zip', '383001', '11.71 MB'),
'2017': HmdaDataFile('hmda_2017_sc_all-records_codes.zip', '242772', '5.31 MB'),
'2015': HmdaDataFile('hmda_2015_sc_all-records_codes.zip', '225542', '8.88 MB'),
'2014': HmdaDataFile('hmda_2014_sc_all-records_codes.zip', '191197', '6.91 MB'),
'2008': HmdaDataFile('hmda_2008_sc_all-records_codes.zip', '271908', '8.77 MB'),
'2009': HmdaDataFile('hmda_2009_sc_all-records_codes.zip', '291014', '8.92 MB'),
'2011': HmdaDataFile('hmda_2011_sc_all-records_codes.zip', '218369', '6.92 MB'),
'2010': HmdaDataFile('hmda_2010_sc_all-records_codes.zip', '235957', '7.67 MB'),
'2013': HmdaDataFile('hmda_2013_sc_all-records_codes.zip', '259782', '8.91 MB'),
'2012': HmdaDataFile('hmda_2012_sc_all-records_codes.zip', '267040', '9.12 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_sc_originated-records_codes.zip', '131134', '4.24 MB'),
'2007': HmdaDataFile('hmda_2007_sc_originated-records_codes.zip', '161777', '5.03 MB'),
'2017': HmdaDataFile('hmda_2017_sc_originated-records_codes.zip', '123971', '2.67 MB'),
'2015': HmdaDataFile('hmda_2015_sc_originated-records_codes.zip', '114336', '4.36 MB'),
'2014': HmdaDataFile('hmda_2014_sc_originated-records_codes.zip', '93412', '3.22 MB'),
'2008': HmdaDataFile('hmda_2008_sc_originated-records_codes.zip', '118458', '3.78 MB'),
'2009': HmdaDataFile('hmda_2009_sc_originated-records_codes.zip', '133057', '4.05 MB'),
'2011': HmdaDataFile('hmda_2011_sc_originated-records_codes.zip', '97461', '2.99 MB'),
'2010': HmdaDataFile('hmda_2010_sc_originated-records_codes.zip', '105626', '3.17 MB'),
'2013': HmdaDataFile('hmda_2013_sc_originated-records_codes.zip', '127479', '4.06 MB'),
'2012': HmdaDataFile('hmda_2012_sc_originated-records_codes.zip', '130453', '4.07 MB')
}
}
},
'ky': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '81819', '3.99 MB'),
'2007': HmdaDataFile('hmda_2007_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '82043', '3.84 MB'),
'2017': HmdaDataFile('hmda_2017_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '72775', '2.34 MB'),
'2015': HmdaDataFile('hmda_2015_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '71716', '3.98 MB'),
'2014': HmdaDataFile('hmda_2014_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '59329', '3.1 MB'),
'2008': HmdaDataFile('hmda_2008_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '73163', '3.45 MB'),
'2009': HmdaDataFile('hmda_2009_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '103950', '4.54 MB'),
'2011': HmdaDataFile('hmda_2011_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '80580', '3.74 MB'),
'2010': HmdaDataFile('hmda_2010_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '91441', '4.28 MB'),
'2013': HmdaDataFile('hmda_2013_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '89932', '4.39 MB'),
'2012': HmdaDataFile('hmda_2012_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '105634', '5.06 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ky_all-records_labels.zip', '185587', '10.06 MB'),
'2007': HmdaDataFile('hmda_2007_ky_all-records_labels.zip', '285560', '14.05 MB'),
'2017': HmdaDataFile('hmda_2017_ky_all-records_labels.zip', '173004', '6.52 MB'),
'2015': HmdaDataFile('hmda_2015_ky_all-records_labels.zip', '167714', '10.04 MB'),
'2014': HmdaDataFile('hmda_2014_ky_all-records_labels.zip', '149317', '8.6 MB'),
'2008': HmdaDataFile('hmda_2008_ky_all-records_labels.zip', '215096', '10.84 MB'),
'2009': HmdaDataFile('hmda_2009_ky_all-records_labels.zip', '246427', '11.78 MB'),
'2011': HmdaDataFile('hmda_2011_ky_all-records_labels.zip', '203934', '10.77 MB'),
'2010': HmdaDataFile('hmda_2010_ky_all-records_labels.zip', '222486', '11.61 MB'),
'2013': HmdaDataFile('hmda_2013_ky_all-records_labels.zip', '215281', '11.75 MB'),
'2012': HmdaDataFile('hmda_2012_ky_all-records_labels.zip', '239015', '12.85 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ky_originated-records_labels.zip', '100105', '5.04 MB'),
'2007': HmdaDataFile('hmda_2007_ky_originated-records_labels.zip', '121278', '5.84 MB'),
'2017': HmdaDataFile('hmda_2017_ky_originated-records_labels.zip', '91096', '3.01 MB'),
'2015': HmdaDataFile('hmda_2015_ky_originated-records_labels.zip', '88714', '5.02 MB'),
'2014': HmdaDataFile('hmda_2014_ky_originated-records_labels.zip', '76520', '4.06 MB'),
'2008': HmdaDataFile('hmda_2008_ky_originated-records_labels.zip', '99394', '4.77 MB'),
'2009': HmdaDataFile('hmda_2009_ky_originated-records_labels.zip', '123485', '5.57 MB'),
'2011': HmdaDataFile('hmda_2011_ky_originated-records_labels.zip', '98794', '4.7 MB'),
'2010': HmdaDataFile('hmda_2010_ky_originated-records_labels.zip', '109716', '5.22 MB'),
'2013': HmdaDataFile('hmda_2013_ky_originated-records_labels.zip', '110912', '5.58 MB'),
'2012': HmdaDataFile('hmda_2012_ky_originated-records_labels.zip', '125050', '6.13 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '81819', '2.64 MB'),
'2007': HmdaDataFile('hmda_2007_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '82043', '2.59 MB'),
'2017': HmdaDataFile('hmda_2017_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '72775', '1.68 MB'),
'2015': HmdaDataFile('hmda_2015_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '71716', '2.68 MB'),
'2014': HmdaDataFile('hmda_2014_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '59329', '2.06 MB'),
'2008': HmdaDataFile('hmda_2008_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '73163', '2.34 MB'),
'2009': HmdaDataFile('hmda_2009_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '103950', '3.13 MB'),
'2011': HmdaDataFile('hmda_2011_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '80580', '2.43 MB'),
'2010': HmdaDataFile('hmda_2010_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '91441', '2.81 MB'),
'2013': HmdaDataFile('hmda_2013_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '89932', '2.91 MB'),
'2012': HmdaDataFile('hmda_2012_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '105634', '3.33 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ky_all-records_codes.zip', '185587', '6.56 MB'),
'2007': HmdaDataFile('hmda_2007_ky_all-records_codes.zip', '285560', '9.35 MB'),
'2017': HmdaDataFile('hmda_2017_ky_all-records_codes.zip', '173004', '4.32 MB'),
'2015': HmdaDataFile('hmda_2015_ky_all-records_codes.zip', '167714', '6.5 MB'),
'2014': HmdaDataFile('hmda_2014_ky_all-records_codes.zip', '149317', '5.56 MB'),
'2008': HmdaDataFile('hmda_2008_ky_all-records_codes.zip', '215096', '7.24 MB'),
'2009': HmdaDataFile('hmda_2009_ky_all-records_codes.zip', '246427', '7.96 MB'),
'2011': HmdaDataFile('hmda_2011_ky_all-records_codes.zip', '203934', '6.87 MB'),
'2010': HmdaDataFile('hmda_2010_ky_all-records_codes.zip', '222486', '7.44 MB'),
'2013': HmdaDataFile('hmda_2013_ky_all-records_codes.zip', '215281', '7.63 MB'),
'2012': HmdaDataFile('hmda_2012_ky_all-records_codes.zip', '239015', '8.34 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ky_originated-records_codes.zip', '100105', '3.34 MB'),
'2007': HmdaDataFile('hmda_2007_ky_originated-records_codes.zip', '121278', '3.97 MB'),
'2017': HmdaDataFile('hmda_2017_ky_originated-records_codes.zip', '91096', '2.12 MB'),
'2015': HmdaDataFile('hmda_2015_ky_originated-records_codes.zip', '88714', '3.36 MB'),
'2014': HmdaDataFile('hmda_2014_ky_originated-records_codes.zip', '76520', '2.67 MB'),
'2008': HmdaDataFile('hmda_2008_ky_originated-records_codes.zip', '99394', '3.22 MB'),
'2009': HmdaDataFile('hmda_2009_ky_originated-records_codes.zip', '123485', '3.84 MB'),
'2011': HmdaDataFile('hmda_2011_ky_originated-records_codes.zip', '98794', '3.04 MB'),
'2010': HmdaDataFile('hmda_2010_ky_originated-records_codes.zip', '109716', '3.39 MB'),
'2013': HmdaDataFile('hmda_2013_ky_originated-records_codes.zip', '110912', '3.68 MB'),
'2012': HmdaDataFile('hmda_2012_ky_originated-records_codes.zip', '125050', '4.01 MB')
}
}
},
'or': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '114167', '5.46 MB'),
'2007': HmdaDataFile('hmda_2007_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '101187', '4.59 MB'),
'2017': HmdaDataFile('hmda_2017_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '95129', '2.88 MB'),
'2015': HmdaDataFile('hmda_2015_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '97022', '5.17 MB'),
'2014': HmdaDataFile('hmda_2014_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '69787', '3.39 MB'),
'2008': HmdaDataFile('hmda_2008_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '77187', '3.56 MB'),
'2009': HmdaDataFile('hmda_2009_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '118063', '5.04 MB'),
'2011': HmdaDataFile('hmda_2011_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '80824', '3.93 MB'),
'2010': HmdaDataFile('hmda_2010_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '102166', '4.84 MB'),
'2013': HmdaDataFile('hmda_2013_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '102103', '4.92 MB'),
'2012': HmdaDataFile('hmda_2012_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '119486', '5.65 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_or_all-records_labels.zip', '249739', '12.75 MB'),
'2007': HmdaDataFile('hmda_2007_or_all-records_labels.zip', '376732', '17.18 MB'),
'2017': HmdaDataFile('hmda_2017_or_all-records_labels.zip', '211344', '7.17 MB'),
'2015': HmdaDataFile('hmda_2015_or_all-records_labels.zip', '214365', '12.36 MB'),
'2014': HmdaDataFile('hmda_2014_or_all-records_labels.zip', '168582', '8.75 MB'),
'2008': HmdaDataFile('hmda_2008_or_all-records_labels.zip', '251125', '12 MB'),
'2009': HmdaDataFile('hmda_2009_or_all-records_labels.zip', '300552', '13.38 MB'),
'2011': HmdaDataFile('hmda_2011_or_all-records_labels.zip', '204085', '10.92 MB'),
'2010': HmdaDataFile('hmda_2010_or_all-records_labels.zip', '244799', '12.74 MB'),
'2013': HmdaDataFile('hmda_2013_or_all-records_labels.zip', '240614', '12.69 MB'),
'2012': HmdaDataFile('hmda_2012_or_all-records_labels.zip', '269285', '14.02 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_or_originated-records_labels.zip', '136083', '6.58 MB'),
'2007': HmdaDataFile('hmda_2007_or_originated-records_labels.zip', '151933', '7.06 MB'),
'2017': HmdaDataFile('hmda_2017_or_originated-records_labels.zip', '116700', '3.65 MB'),
'2015': HmdaDataFile('hmda_2015_or_originated-records_labels.zip', '117674', '6.39 MB'),
'2014': HmdaDataFile('hmda_2014_or_originated-records_labels.zip', '87626', '4.34 MB'),
'2008': HmdaDataFile('hmda_2008_or_originated-records_labels.zip', '97998', '4.65 MB'),
'2009': HmdaDataFile('hmda_2009_or_originated-records_labels.zip', '134377', '5.88 MB'),
'2011': HmdaDataFile('hmda_2011_or_originated-records_labels.zip', '98243', '4.9 MB'),
'2010': HmdaDataFile('hmda_2010_or_originated-records_labels.zip', '118373', '5.68 MB'),
'2013': HmdaDataFile('hmda_2013_or_originated-records_labels.zip', '128622', '6.28 MB'),
'2012': HmdaDataFile('hmda_2012_or_originated-records_labels.zip', '144891', '7.02 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '114167', '3.73 MB'),
'2007': HmdaDataFile('hmda_2007_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '101187', '3.17 MB'),
'2017': HmdaDataFile('hmda_2017_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '95129', '1.97 MB'),
'2015': HmdaDataFile('hmda_2015_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '97022', '3.6 MB'),
'2014': HmdaDataFile('hmda_2014_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '69787', '2.33 MB'),
'2008': HmdaDataFile('hmda_2008_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '77187', '2.47 MB'),
'2009': HmdaDataFile('hmda_2009_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '118063', '3.58 MB'),
'2011': HmdaDataFile('hmda_2011_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '80824', '2.65 MB'),
'2010': HmdaDataFile('hmda_2010_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '102166', '3.28 MB'),
'2013': HmdaDataFile('hmda_2013_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '102103', '3.39 MB'),
'2012': HmdaDataFile('hmda_2012_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '119486', '3.83 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_or_all-records_codes.zip', '249739', '8.44 MB'),
'2007': HmdaDataFile('hmda_2007_or_all-records_codes.zip', '376732', '11.64 MB'),
'2017': HmdaDataFile('hmda_2017_or_all-records_codes.zip', '211344', '4.61 MB'),
'2015': HmdaDataFile('hmda_2015_or_all-records_codes.zip', '214365', '8.29 MB'),
'2014': HmdaDataFile('hmda_2014_or_all-records_codes.zip', '168582', '5.8 MB'),
'2008': HmdaDataFile('hmda_2008_or_all-records_codes.zip', '251125', '8.16 MB'),
'2009': HmdaDataFile('hmda_2009_or_all-records_codes.zip', '300552', '9.29 MB'),
'2011': HmdaDataFile('hmda_2011_or_all-records_codes.zip', '204085', '7.17 MB'),
'2010': HmdaDataFile('hmda_2010_or_all-records_codes.zip', '244799', '8.44 MB'),
'2013': HmdaDataFile('hmda_2013_or_all-records_codes.zip', '240614', '8.5 MB'),
'2012': HmdaDataFile('hmda_2012_or_all-records_codes.zip', '269285', '9.32 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_or_originated-records_codes.zip', '136083', '4.48 MB'),
'2007': HmdaDataFile('hmda_2007_or_originated-records_codes.zip', '151933', '4.92 MB'),
'2017': HmdaDataFile('hmda_2017_or_originated-records_codes.zip', '116700', '2.48 MB'),
'2015': HmdaDataFile('hmda_2015_or_originated-records_codes.zip', '117674', '4.43 MB'),
'2014': HmdaDataFile('hmda_2014_or_originated-records_codes.zip', '87626', '2.98 MB'),
'2008': HmdaDataFile('hmda_2008_or_originated-records_codes.zip', '97998', '3.24 MB'),
'2009': HmdaDataFile('hmda_2009_or_originated-records_codes.zip', '134377', '4.19 MB'),
'2011': HmdaDataFile('hmda_2011_or_originated-records_codes.zip', '98243', '3.29 MB'),
'2010': HmdaDataFile('hmda_2010_or_originated-records_codes.zip', '118373', '3.84 MB'),
'2013': HmdaDataFile('hmda_2013_or_originated-records_codes.zip', '128622', '4.29 MB'),
'2012': HmdaDataFile('hmda_2012_or_originated-records_codes.zip', '144891', '4.75 MB')
}
}
},
'sd': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '17608', '715.46 KB'),
'2007': HmdaDataFile('hmda_2007_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '15984', '612.26 KB'),
'2017': HmdaDataFile('hmda_2017_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '15341', '394.73 KB'),
'2015': HmdaDataFile('hmda_2015_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '15835', '706.73 KB'),
'2014': HmdaDataFile('hmda_2014_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '12839', '526.79 KB'),
'2008': HmdaDataFile('hmda_2008_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '16258', '623.5 KB'),
'2009': HmdaDataFile('hmda_2009_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '24417', '866.74 KB'),
'2011': HmdaDataFile('hmda_2011_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '17828', '666.34 KB'),
'2010': HmdaDataFile('hmda_2010_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '20738', '786.61 KB'),
'2013': HmdaDataFile('hmda_2013_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '19523', '775.49 KB'),
'2012': HmdaDataFile('hmda_2012_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '24459', '981.4 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_sd_all-records_labels.zip', '37648', '1.67 MB'),
'2007': HmdaDataFile('hmda_2007_sd_all-records_labels.zip', '47432', '1.97 MB'),
'2017': HmdaDataFile('hmda_2017_sd_all-records_labels.zip', '33167', '980.45 KB'),
'2015': HmdaDataFile('hmda_2015_sd_all-records_labels.zip', '35072', '1.72 MB'),
'2014': HmdaDataFile('hmda_2014_sd_all-records_labels.zip', '29763', '1.37 MB'),
'2008': HmdaDataFile('hmda_2008_sd_all-records_labels.zip', '41213', '1.78 MB'),
'2009': HmdaDataFile('hmda_2009_sd_all-records_labels.zip', '53033', '2.13 MB'),
'2011': HmdaDataFile('hmda_2011_sd_all-records_labels.zip', '38426', '1.67 MB'),
'2010': HmdaDataFile('hmda_2010_sd_all-records_labels.zip', '45150', '1.96 MB'),
'2013': HmdaDataFile('hmda_2013_sd_all-records_labels.zip', '43401', '1.95 MB'),
'2012': HmdaDataFile('hmda_2012_sd_all-records_labels.zip', '48753', '2.2 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_sd_originated-records_labels.zip', '20937', '887.21 KB'),
'2007': HmdaDataFile('hmda_2007_sd_originated-records_labels.zip', '24091', '970 KB'),
'2017': HmdaDataFile('hmda_2017_sd_originated-records_labels.zip', '18871', '517.46 KB'),
'2015': HmdaDataFile('hmda_2015_sd_originated-records_labels.zip', '19418', '890.97 KB'),
'2014': HmdaDataFile('hmda_2014_sd_originated-records_labels.zip', '16136', '696.36 KB'),
'2008': HmdaDataFile('hmda_2008_sd_originated-records_labels.zip', '22824', '912.18 KB'),
'2009': HmdaDataFile('hmda_2009_sd_originated-records_labels.zip', '29867', '1.11 MB'),
'2011': HmdaDataFile('hmda_2011_sd_originated-records_labels.zip', '21818', '862.15 KB'),
'2010': HmdaDataFile('hmda_2010_sd_originated-records_labels.zip', '25068', '980.72 KB'),
'2013': HmdaDataFile('hmda_2013_sd_originated-records_labels.zip', '23684', '969.63 KB'),
'2012': HmdaDataFile('hmda_2012_sd_originated-records_labels.zip', '28651', '1.18 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '17608', '452.93 KB'),
'2007': HmdaDataFile('hmda_2007_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '15984', '392.92 KB'),
'2017': HmdaDataFile('hmda_2017_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '15341', '275.01 KB'),
'2015': HmdaDataFile('hmda_2015_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '15835', '442.1 KB'),
'2014': HmdaDataFile('hmda_2014_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '12839', '334.62 KB'),
'2008': HmdaDataFile('hmda_2008_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '16258', '411.49 KB'),
'2009': HmdaDataFile('hmda_2009_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '24417', '586.34 KB'),
'2011': HmdaDataFile('hmda_2011_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '17828', '429.4 KB'),
'2010': HmdaDataFile('hmda_2010_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '20738', '504.37 KB'),
'2013': HmdaDataFile('hmda_2013_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '19523', '492.06 KB'),
'2012': HmdaDataFile('hmda_2012_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '24459', '623.71 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_sd_all-records_codes.zip', '37648', '1.02 MB'),
'2007': HmdaDataFile('hmda_2007_sd_all-records_codes.zip', '47432', '1.24 MB'),
'2017': HmdaDataFile('hmda_2017_sd_all-records_codes.zip', '33167', '657.41 KB'),
'2015': HmdaDataFile('hmda_2015_sd_all-records_codes.zip', '35072', '1.04 MB'),
'2014': HmdaDataFile('hmda_2014_sd_all-records_codes.zip', '29763', '835.84 KB'),
'2008': HmdaDataFile('hmda_2008_sd_all-records_codes.zip', '41213', '1.13 MB'),
'2009': HmdaDataFile('hmda_2009_sd_all-records_codes.zip', '53033', '1.39 MB'),
'2011': HmdaDataFile('hmda_2011_sd_all-records_codes.zip', '38426', '1.03 MB'),
'2010': HmdaDataFile('hmda_2010_sd_all-records_codes.zip', '45150', '1.22 MB'),
'2013': HmdaDataFile('hmda_2013_sd_all-records_codes.zip', '43401', '1.19 MB'),
'2012': HmdaDataFile('hmda_2012_sd_all-records_codes.zip', '48753', '1.35 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_sd_originated-records_codes.zip', '20937', '557.49 KB'),
'2007': HmdaDataFile('hmda_2007_sd_originated-records_codes.zip', '24091', '623.6 KB'),
'2017': HmdaDataFile('hmda_2017_sd_originated-records_codes.zip', '18871', '357.8 KB'),
'2015': HmdaDataFile('hmda_2015_sd_originated-records_codes.zip', '19418', '553.25 KB'),
'2014': HmdaDataFile('hmda_2014_sd_originated-records_codes.zip', '16136', '436.32 KB'),
'2008': HmdaDataFile('hmda_2008_sd_originated-records_codes.zip', '22824', '595.53 KB'),
'2009': HmdaDataFile('hmda_2009_sd_originated-records_codes.zip', '29867', '746.74 KB'),
'2011': HmdaDataFile('hmda_2011_sd_originated-records_codes.zip', '21818', '549.43 KB'),
'2010': HmdaDataFile('hmda_2010_sd_originated-records_codes.zip', '25068', '623.74 KB'),
'2013': HmdaDataFile('hmda_2013_sd_originated-records_codes.zip', '23684', '608.21 KB'),
'2012': HmdaDataFile('hmda_2012_sd_originated-records_codes.zip', '28651', '744.18 KB')
}
}
}
}
| 92.033231
| 141
| 0.623882
|
from hmda.models.hmda_data_file import HmdaDataFile
LOAN_FILE_METADATA = {
'nationwide': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '7036352', '482.83 MB'),
'2007': HmdaDataFile('hmda_2007_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '7201366', '453.04 MB'),
'2017': HmdaDataFile('hmda_2017_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '5986659', '369.82 MB'),
'2015': HmdaDataFile('hmda_2015_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '6113423', '485.63 MB'),
'2014': HmdaDataFile('hmda_2014_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '4832425', '323.43 MB'),
'2008': HmdaDataFile('hmda_2008_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '5526941', '330.65 MB'),
'2009': HmdaDataFile('hmda_2009_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '7783986', '467.08 MB'),
'2011': HmdaDataFile('hmda_2011_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '5946435', '399.41 MB'),
'2010': HmdaDataFile('hmda_2010_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '6764902', '455.07 MB'),
'2013': HmdaDataFile('hmda_2013_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '7126202', '492.02 MB'),
'2012': HmdaDataFile('hmda_2012_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '8298882', '573.78 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nationwide_all-records_labels.zip', '16332987', '1.2 GB'),
'2007': HmdaDataFile('hmda_2007_nationwide_all-records_labels.zip', '26605695', '1.72 GB'),
'2017': HmdaDataFile('hmda_2017_nationwide_all-records_labels.zip', '14285496', '986 MB'),
'2015': HmdaDataFile('hmda_2015_nationwide_all-records_labels.zip', '14374184', '1.21 GB'),
'2014': HmdaDataFile('hmda_2014_nationwide_all-records_labels.zip', '12049341', '862.92 MB'),
'2008': HmdaDataFile('hmda_2008_nationwide_all-records_labels.zip', '17391570', '1.06 GB'),
'2009': HmdaDataFile('hmda_2009_nationwide_all-records_labels.zip', '19493491', '1.29 GB'),
'2011': HmdaDataFile('hmda_2011_nationwide_all-records_labels.zip', '14873415', '1.08 GB'),
'2010': HmdaDataFile('hmda_2010_nationwide_all-records_labels.zip', '16348557', '1.19 GB'),
'2013': HmdaDataFile('hmda_2013_nationwide_all-records_labels.zip', '17016159', '1.27 GB'),
'2012': HmdaDataFile('hmda_2012_nationwide_all-records_labels.zip', '18691551', '1.4 GB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nationwide_originated-records_labels.zip', '8377907', '457.12 MB'),
'2007': HmdaDataFile('hmda_2007_nationwide_originated-records_labels.zip', '10441545', '528.7 MB'),
'2017': HmdaDataFile('hmda_2017_nationwide_originated-records_labels.zip', '7339057', '247.2 MB'),
'2015': HmdaDataFile('hmda_2015_nationwide_originated-records_labels.zip', '7404258', '461.08 MB'),
'2014': HmdaDataFile('hmda_2014_nationwide_originated-records_labels.zip', '6039826', '331.36 MB'),
'2008': HmdaDataFile('hmda_2008_nationwide_originated-records_labels.zip', '7177262', '360.36 MB'),
'2009': HmdaDataFile('hmda_2009_nationwide_originated-records_labels.zip', '8950936', '416.67 MB'),
'2011': HmdaDataFile('hmda_2011_nationwide_originated-records_labels.zip', '7095262', '381.6 MB'),
'2010': HmdaDataFile('hmda_2010_nationwide_originated-records_labels.zip', '7863337', '419.27 MB'),
'2013': HmdaDataFile('hmda_2013_nationwide_originated-records_labels.zip', '8706657', '476.47 MB'),
'2012': HmdaDataFile('hmda_2012_nationwide_originated-records_labels.zip', '9783966', '529.5 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '7036352', '165.84 MB'),
'2007': HmdaDataFile('hmda_2007_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '7201366', '141.73 MB'),
'2017': HmdaDataFile('hmda_2017_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '5986659', '77.47 MB'),
'2015': HmdaDataFile('hmda_2015_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '6113423', '144.37 MB'),
'2014': HmdaDataFile('hmda_2014_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '4787867', '113.54 MB'),
'2008': HmdaDataFile('hmda_2008_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '5526941', '107.3 MB'),
'2009': HmdaDataFile('hmda_2009_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '7783986', '140.57 MB'),
'2011': HmdaDataFile('hmda_2011_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '5946435', '132 MB'),
'2010': HmdaDataFile('hmda_2010_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '6764902', '149.36 MB'),
'2013': HmdaDataFile('hmda_2013_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '7126202', '166.47 MB'),
'2012': HmdaDataFile('hmda_2012_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '8298882', '189.65 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nationwide_all-records_codes.zip', '16332987', '384.11 MB'),
'2007': HmdaDataFile('hmda_2007_nationwide_all-records_codes.zip', '26605695', '461.15 MB'),
'2017': HmdaDataFile('hmda_2017_nationwide_all-records_codes.zip', '14285496', '182.02 MB'),
'2015': HmdaDataFile('hmda_2015_nationwide_all-records_codes.zip', '14374184', '337.27 MB'),
'2014': HmdaDataFile('hmda_2014_nationwide_all-records_codes.zip', '11875464', '278.4 MB'),
'2008': HmdaDataFile('hmda_2008_nationwide_all-records_codes.zip', '17391570', '309.22 MB'),
'2009': HmdaDataFile('hmda_2009_nationwide_all-records_codes.zip', '19493491', '331.31 MB'),
'2011': HmdaDataFile('hmda_2011_nationwide_all-records_codes.zip', '14873415', '335.22 MB'),
'2010': HmdaDataFile('hmda_2010_nationwide_all-records_codes.zip', '16348557', '367.78 MB'),
'2013': HmdaDataFile('hmda_2013_nationwide_all-records_codes.zip', '17016159', '400.19 MB'),
'2012': HmdaDataFile('hmda_2012_nationwide_all-records_codes.zip', '18691551', '434.69 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nationwide_originated-records_codes.zip', '8377907', '196.62 MB'),
'2007': HmdaDataFile('hmda_2007_nationwide_originated-records_codes.zip', '10441545', '199.55 MB'),
'2017': HmdaDataFile('hmda_2017_nationwide_originated-records_codes.zip', '7339057', '94.95 MB'),
'2015': HmdaDataFile('hmda_2015_nationwide_originated-records_codes.zip', '7404258', '173.96 MB'),
'2014': HmdaDataFile('hmda_2014_nationwide_originated-records_codes.zip', '5979766', '140.82 MB'),
'2008': HmdaDataFile('hmda_2008_nationwide_originated-records_codes.zip', '7177262', '137.98 MB'),
'2009': HmdaDataFile('hmda_2009_nationwide_originated-records_codes.zip', '8950936', '162.04 MB'),
'2011': HmdaDataFile('hmda_2011_nationwide_originated-records_codes.zip', '7095262', '157.4 MB'),
'2010': HmdaDataFile('hmda_2010_nationwide_originated-records_codes.zip', '7863337', '173.45 MB'),
'2013': HmdaDataFile('hmda_2013_nationwide_originated-records_codes.zip', '8706657', '203.33 MB'),
'2012': HmdaDataFile('hmda_2012_nationwide_originated-records_codes.zip', '9783966', '224.11 MB')
}
}
},
'va': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '216152', '11.27 MB'),
'2007': HmdaDataFile('hmda_2007_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '228323', '11.15 MB'),
'2017': HmdaDataFile('hmda_2017_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '175737', '5.58 MB'),
'2015': HmdaDataFile('hmda_2015_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '191048', '11.13 MB'),
'2014': HmdaDataFile('hmda_2014_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '147744', '7.63 MB'),
'2008': HmdaDataFile('hmda_2008_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '186185', '8.91 MB'),
'2009': HmdaDataFile('hmda_2009_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '273787', '12.14 MB'),
'2011': HmdaDataFile('hmda_2011_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '205670', '10.53 MB'),
'2010': HmdaDataFile('hmda_2010_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '228664', '11.48 MB'),
'2013': HmdaDataFile('hmda_2013_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '244599', '12.77 MB'),
'2012': HmdaDataFile('hmda_2012_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '288436', '14.9 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_va_all-records_labels.zip', '494057', '27.35 MB'),
'2007': HmdaDataFile('hmda_2007_va_all-records_labels.zip', '784919', '38.17 MB'),
'2017': HmdaDataFile('hmda_2017_va_all-records_labels.zip', '411507', '14.48 MB'),
'2015': HmdaDataFile('hmda_2015_va_all-records_labels.zip', '445447', '27.97 MB'),
'2014': HmdaDataFile('hmda_2014_va_all-records_labels.zip', '365572', '20.34 MB'),
'2008': HmdaDataFile('hmda_2008_va_all-records_labels.zip', '539572', '26.49 MB'),
'2009': HmdaDataFile('hmda_2009_va_all-records_labels.zip', '637212', '29.34 MB'),
'2011': HmdaDataFile('hmda_2011_va_all-records_labels.zip', '482943', '27.06 MB'),
'2010': HmdaDataFile('hmda_2010_va_all-records_labels.zip', '517819', '28.54 MB'),
'2013': HmdaDataFile('hmda_2013_va_all-records_labels.zip', '563167', '31.8 MB'),
'2012': HmdaDataFile('hmda_2012_va_all-records_labels.zip', '634102', '35.59 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_va_originated-records_labels.zip', '252237', '13.42 MB'),
'2007': HmdaDataFile('hmda_2007_va_originated-records_labels.zip', '327766', '16.15 MB'),
'2017': HmdaDataFile('hmda_2017_va_originated-records_labels.zip', '211218', '6.89 MB'),
'2015': HmdaDataFile('hmda_2015_va_originated-records_labels.zip', '227837', '13.49 MB'),
'2014': HmdaDataFile('hmda_2014_va_originated-records_labels.zip', '183729', '9.73 MB'),
'2008': HmdaDataFile('hmda_2008_va_originated-records_labels.zip', '234813', '11.47 MB'),
'2009': HmdaDataFile('hmda_2009_va_originated-records_labels.zip', '308658', '13.99 MB'),
'2011': HmdaDataFile('hmda_2011_va_originated-records_labels.zip', '239310', '12.46 MB'),
'2010': HmdaDataFile('hmda_2010_va_originated-records_labels.zip', '260214', '13.38 MB'),
'2013': HmdaDataFile('hmda_2013_va_originated-records_labels.zip', '294145', '15.54 MB'),
'2012': HmdaDataFile('hmda_2012_va_originated-records_labels.zip', '334770', '17.51 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '216152', '7.74 MB'),
'2007': HmdaDataFile('hmda_2007_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '228323', '7.71 MB'),
'2017': HmdaDataFile('hmda_2017_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '175737', '3.89 MB'),
'2015': HmdaDataFile('hmda_2015_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '191048', '7.65 MB'),
'2014': HmdaDataFile('hmda_2014_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '147744', '5.26 MB'),
'2008': HmdaDataFile('hmda_2008_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '186185', '6.21 MB'),
'2009': HmdaDataFile('hmda_2009_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '273787', '8.61 MB'),
'2011': HmdaDataFile('hmda_2011_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '205670', '7.08 MB'),
'2010': HmdaDataFile('hmda_2010_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '228664', '7.72 MB'),
'2013': HmdaDataFile('hmda_2013_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '244599', '8.81 MB'),
'2012': HmdaDataFile('hmda_2012_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '288436', '10.21 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_va_all-records_codes.zip', '494057', '18.2 MB'),
'2007': HmdaDataFile('hmda_2007_va_all-records_codes.zip', '784919', '25.76 MB'),
'2017': HmdaDataFile('hmda_2017_va_all-records_codes.zip', '411507', '9.31 MB'),
'2015': HmdaDataFile('hmda_2015_va_all-records_codes.zip', '445447', '18.51 MB'),
'2014': HmdaDataFile('hmda_2014_va_all-records_codes.zip', '365572', '13.57 MB'),
'2008': HmdaDataFile('hmda_2008_va_all-records_codes.zip', '539572', '17.99 MB'),
'2009': HmdaDataFile('hmda_2009_va_all-records_codes.zip', '637212', '20.26 MB'),
'2011': HmdaDataFile('hmda_2011_va_all-records_codes.zip', '482943', '17.83 MB'),
'2010': HmdaDataFile('hmda_2010_va_all-records_codes.zip', '517819', '18.81 MB'),
'2013': HmdaDataFile('hmda_2013_va_all-records_codes.zip', '563167', '21.36 MB'),
'2012': HmdaDataFile('hmda_2012_va_all-records_codes.zip', '634102', '23.86 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_va_originated-records_codes.zip', '252237', '9.19 MB'),
'2007': HmdaDataFile('hmda_2007_va_originated-records_codes.zip', '327766', '11.21 MB'),
'2017': HmdaDataFile('hmda_2017_va_originated-records_codes.zip', '211218', '4.75 MB'),
'2015': HmdaDataFile('hmda_2015_va_originated-records_codes.zip', '227837', '9.22 MB'),
'2014': HmdaDataFile('hmda_2014_va_originated-records_codes.zip', '183729', '6.67 MB'),
'2008': HmdaDataFile('hmda_2008_va_originated-records_codes.zip', '234813', '7.98 MB'),
'2009': HmdaDataFile('hmda_2009_va_originated-records_codes.zip', '308658', '9.9 MB'),
'2011': HmdaDataFile('hmda_2011_va_originated-records_codes.zip', '239310', '8.34 MB'),
'2010': HmdaDataFile('hmda_2010_va_originated-records_codes.zip', '260214', '8.98 MB'),
'2013': HmdaDataFile('hmda_2013_va_originated-records_codes.zip', '294145', '10.63 MB'),
'2012': HmdaDataFile('hmda_2012_va_originated-records_codes.zip', '334770', '11.93 MB')
}
}
},
'co': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '228866', '11.12 MB'),
'2007': HmdaDataFile('hmda_2007_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '144805', '6.71 MB'),
'2017': HmdaDataFile('hmda_2017_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '182654', '5.82 MB'),
'2015': HmdaDataFile('hmda_2015_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '194123', '10.51 MB'),
'2014': HmdaDataFile('hmda_2014_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '139220', '6.92 MB'),
'2008': HmdaDataFile('hmda_2008_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '128542', '5.81 MB'),
'2009': HmdaDataFile('hmda_2009_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '209511', '8.79 MB'),
'2011': HmdaDataFile('hmda_2011_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '149880', '7.07 MB'),
'2010': HmdaDataFile('hmda_2010_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '180911', '8.7 MB'),
'2013': HmdaDataFile('hmda_2013_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '192627', '9.59 MB'),
'2012': HmdaDataFile('hmda_2012_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '222498', '10.92 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_co_all-records_labels.zip', '483436', '25.25 MB'),
'2007': HmdaDataFile('hmda_2007_co_all-records_labels.zip', '537363', '25.41 MB'),
'2017': HmdaDataFile('hmda_2017_co_all-records_labels.zip', '404517', '14.76 MB'),
'2015': HmdaDataFile('hmda_2015_co_all-records_labels.zip', '409511', '23.35 MB'),
'2014': HmdaDataFile('hmda_2014_co_all-records_labels.zip', '313445', '16.62 MB'),
'2008': HmdaDataFile('hmda_2008_co_all-records_labels.zip', '370468', '17.59 MB'),
'2009': HmdaDataFile('hmda_2009_co_all-records_labels.zip', '492317', '21.47 MB'),
'2011': HmdaDataFile('hmda_2011_co_all-records_labels.zip', '366969', '18.87 MB'),
'2010': HmdaDataFile('hmda_2010_co_all-records_labels.zip', '413027', '21.58 MB'),
'2013': HmdaDataFile('hmda_2013_co_all-records_labels.zip', '427952', '22.81 MB'),
'2012': HmdaDataFile('hmda_2012_co_all-records_labels.zip', '474846', '24.96 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_co_originated-records_labels.zip', '263402', '13.07 MB'),
'2007': HmdaDataFile('hmda_2007_co_originated-records_labels.zip', '218842', '10.18 MB'),
'2017': HmdaDataFile('hmda_2017_co_originated-records_labels.zip', '216848', '7.17 MB'),
'2015': HmdaDataFile('hmda_2015_co_originated-records_labels.zip', '227578', '12.54 MB'),
'2014': HmdaDataFile('hmda_2014_co_originated-records_labels.zip', '169959', '8.6 MB'),
'2008': HmdaDataFile('hmda_2008_co_originated-records_labels.zip', '162244', '7.43 MB'),
'2009': HmdaDataFile('hmda_2009_co_originated-records_labels.zip', '236219', '9.99 MB'),
'2011': HmdaDataFile('hmda_2011_co_originated-records_labels.zip', '179323', '8.69 MB'),
'2010': HmdaDataFile('hmda_2010_co_originated-records_labels.zip', '207951', '10.35 MB'),
'2013': HmdaDataFile('hmda_2013_co_originated-records_labels.zip', '235157', '11.95 MB'),
'2012': HmdaDataFile('hmda_2012_co_originated-records_labels.zip', '263229', '13.2 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '228866', '7.81 MB'),
'2007': HmdaDataFile('hmda_2007_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '144805', '4.75 MB'),
'2017': HmdaDataFile('hmda_2017_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '182654', '4.07 MB'),
'2015': HmdaDataFile('hmda_2015_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '194123', '7.52 MB'),
'2014': HmdaDataFile('hmda_2014_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '139220', '4.9 MB'),
'2008': HmdaDataFile('hmda_2008_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '128542', '4.11 MB'),
'2009': HmdaDataFile('hmda_2009_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '209511', '6.34 MB'),
'2011': HmdaDataFile('hmda_2011_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '149880', '4.88 MB'),
'2010': HmdaDataFile('hmda_2010_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '180911', '6.03 MB'),
'2013': HmdaDataFile('hmda_2013_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '192627', '6.74 MB'),
'2012': HmdaDataFile('hmda_2012_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '222498', '7.6 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_co_all-records_codes.zip', '483436', '17.2 MB'),
'2007': HmdaDataFile('hmda_2007_co_all-records_codes.zip', '537363', '17.72 MB'),
'2017': HmdaDataFile('hmda_2017_co_all-records_codes.zip', '404517', '9.9 MB'),
'2015': HmdaDataFile('hmda_2015_co_all-records_codes.zip', '409511', '16.01 MB'),
'2014': HmdaDataFile('hmda_2014_co_all-records_codes.zip', '313445', '11.43 MB'),
'2008': HmdaDataFile('hmda_2008_co_all-records_codes.zip', '370468', '12.3 MB'),
'2009': HmdaDataFile('hmda_2009_co_all-records_codes.zip', '492317', '15.15 MB'),
'2011': HmdaDataFile('hmda_2011_co_all-records_codes.zip', '366969', '12.63 MB'),
'2010': HmdaDataFile('hmda_2010_co_all-records_codes.zip', '413027', '14.47 MB'),
'2013': HmdaDataFile('hmda_2013_co_all-records_codes.zip', '427952', '15.58 MB'),
'2012': HmdaDataFile('hmda_2012_co_all-records_codes.zip', '474846', '16.87 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_co_originated-records_codes.zip', '263402', '9.15 MB'),
'2007': HmdaDataFile('hmda_2007_co_originated-records_codes.zip', '218842', '7.18 MB'),
'2017': HmdaDataFile('hmda_2017_co_originated-records_codes.zip', '216848', '4.99 MB'),
'2015': HmdaDataFile('hmda_2015_co_originated-records_codes.zip', '227578', '8.91 MB'),
'2014': HmdaDataFile('hmda_2014_co_originated-records_codes.zip', '169959', '6.05 MB'),
'2008': HmdaDataFile('hmda_2008_co_originated-records_codes.zip', '162244', '5.23 MB'),
'2009': HmdaDataFile('hmda_2009_co_originated-records_codes.zip', '236219', '7.16 MB'),
'2011': HmdaDataFile('hmda_2011_co_originated-records_codes.zip', '179323', '5.95 MB'),
'2010': HmdaDataFile('hmda_2010_co_originated-records_codes.zip', '207951', '7.15 MB'),
'2013': HmdaDataFile('hmda_2013_co_originated-records_codes.zip', '235157', '8.37 MB'),
'2012': HmdaDataFile('hmda_2012_co_originated-records_codes.zip', '263229', '9.16 MB')
}
}
},
'vi': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2007': HmdaDataFile('hmda_2007_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2017': HmdaDataFile('hmda_2017_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '15', '1.55 KB'),
'2015': HmdaDataFile('hmda_2015_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2014': HmdaDataFile('hmda_2014_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2008': HmdaDataFile('hmda_2008_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2009': HmdaDataFile('hmda_2009_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2011': HmdaDataFile('hmda_2011_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2010': HmdaDataFile('hmda_2010_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2013': HmdaDataFile('hmda_2013_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2012': HmdaDataFile('hmda_2012_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_vi_all-records_labels.zip', '0', '581 bytes'),
'2007': HmdaDataFile('hmda_2007_vi_all-records_labels.zip', '0', '581 bytes'),
'2017': HmdaDataFile('hmda_2017_vi_all-records_labels.zip', '47', '2.39 KB'),
'2015': HmdaDataFile('hmda_2015_vi_all-records_labels.zip', '0', '581 bytes'),
'2014': HmdaDataFile('hmda_2014_vi_all-records_labels.zip', '0', '581 bytes'),
'2008': HmdaDataFile('hmda_2008_vi_all-records_labels.zip', '0', '581 bytes'),
'2009': HmdaDataFile('hmda_2009_vi_all-records_labels.zip', '0', '581 bytes'),
'2011': HmdaDataFile('hmda_2011_vi_all-records_labels.zip', '0', '581 bytes'),
'2010': HmdaDataFile('hmda_2010_vi_all-records_labels.zip', '0', '581 bytes'),
'2013': HmdaDataFile('hmda_2013_vi_all-records_labels.zip', '0', '581 bytes'),
'2012': HmdaDataFile('hmda_2012_vi_all-records_labels.zip', '0', '581 bytes')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_vi_originated-records_labels.zip', '0', '595 bytes'),
'2007': HmdaDataFile('hmda_2007_vi_originated-records_labels.zip', '0', '595 bytes'),
'2017': HmdaDataFile('hmda_2017_vi_originated-records_labels.zip', '23', '1.73 KB'),
'2015': HmdaDataFile('hmda_2015_vi_originated-records_labels.zip', '0', '595 bytes'),
'2014': HmdaDataFile('hmda_2014_vi_originated-records_labels.zip', '0', '595 bytes'),
'2008': HmdaDataFile('hmda_2008_vi_originated-records_labels.zip', '0', '595 bytes'),
'2009': HmdaDataFile('hmda_2009_vi_originated-records_labels.zip', '0', '595 bytes'),
'2011': HmdaDataFile('hmda_2011_vi_originated-records_labels.zip', '0', '595 bytes'),
'2010': HmdaDataFile('hmda_2010_vi_originated-records_labels.zip', '0', '595 bytes'),
'2013': HmdaDataFile('hmda_2013_vi_originated-records_labels.zip', '0', '595 bytes'),
'2012': HmdaDataFile('hmda_2012_vi_originated-records_labels.zip', '0', '595 bytes')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2007': HmdaDataFile('hmda_2007_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2017': HmdaDataFile('hmda_2017_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '15', '940 bytes'),
'2015': HmdaDataFile('hmda_2015_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2014': HmdaDataFile('hmda_2014_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2008': HmdaDataFile('hmda_2008_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2009': HmdaDataFile('hmda_2009_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2011': HmdaDataFile('hmda_2011_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2010': HmdaDataFile('hmda_2010_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2013': HmdaDataFile('hmda_2013_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2012': HmdaDataFile('hmda_2012_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_vi_all-records_codes.zip', '0', '474 bytes'),
'2007': HmdaDataFile('hmda_2007_vi_all-records_codes.zip', '0', '474 bytes'),
'2017': HmdaDataFile('hmda_2017_vi_all-records_codes.zip', '47', '1.37 KB'),
'2015': HmdaDataFile('hmda_2015_vi_all-records_codes.zip', '0', '474 bytes'),
'2014': HmdaDataFile('hmda_2014_vi_all-records_codes.zip', '0', '474 bytes'),
'2008': HmdaDataFile('hmda_2008_vi_all-records_codes.zip', '0', '474 bytes'),
'2009': HmdaDataFile('hmda_2009_vi_all-records_codes.zip', '0', '474 bytes'),
'2011': HmdaDataFile('hmda_2011_vi_all-records_codes.zip', '0', '474 bytes'),
'2010': HmdaDataFile('hmda_2010_vi_all-records_codes.zip', '0', '474 bytes'),
'2013': HmdaDataFile('hmda_2013_vi_all-records_codes.zip', '0', '474 bytes'),
'2012': HmdaDataFile('hmda_2012_vi_all-records_codes.zip', '0', '474 bytes')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_vi_originated-records_codes.zip', '0', '488 bytes'),
'2007': HmdaDataFile('hmda_2007_vi_originated-records_codes.zip', '0', '488 bytes'),
'2017': HmdaDataFile('hmda_2017_vi_originated-records_codes.zip', '23', '1.03 KB'),
'2015': HmdaDataFile('hmda_2015_vi_originated-records_codes.zip', '0', '488 bytes'),
'2014': HmdaDataFile('hmda_2014_vi_originated-records_codes.zip', '0', '488 bytes'),
'2008': HmdaDataFile('hmda_2008_vi_originated-records_codes.zip', '0', '488 bytes'),
'2009': HmdaDataFile('hmda_2009_vi_originated-records_codes.zip', '0', '488 bytes'),
'2011': HmdaDataFile('hmda_2011_vi_originated-records_codes.zip', '0', '488 bytes'),
'2010': HmdaDataFile('hmda_2010_vi_originated-records_codes.zip', '0', '488 bytes'),
'2013': HmdaDataFile('hmda_2013_vi_originated-records_codes.zip', '0', '488 bytes'),
'2012': HmdaDataFile('hmda_2012_vi_originated-records_codes.zip', '0', '488 bytes')
}
}
},
'ak': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '15356', '685.22 KB'),
'2007': HmdaDataFile('hmda_2007_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '16758', '627.93 KB'),
'2017': HmdaDataFile('hmda_2017_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '12579', '350.04 KB'),
'2015': HmdaDataFile('hmda_2015_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '14511', '637.91 KB'),
'2014': HmdaDataFile('hmda_2014_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '12147', '530.18 KB'),
'2008': HmdaDataFile('hmda_2008_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '15576', '536.13 KB'),
'2009': HmdaDataFile('hmda_2009_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '23301', '812.74 KB'),
'2011': HmdaDataFile('hmda_2011_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '16974', '682.85 KB'),
'2010': HmdaDataFile('hmda_2010_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '18636', '750.13 KB'),
'2013': HmdaDataFile('hmda_2013_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '17337', '684.52 KB'),
'2012': HmdaDataFile('hmda_2012_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '22064', '872.42 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ak_all-records_labels.zip', '36105', '1.77 MB'),
'2007': HmdaDataFile('hmda_2007_ak_all-records_labels.zip', '48143', '2.05 MB'),
'2017': HmdaDataFile('hmda_2017_ak_all-records_labels.zip', '28632', '904.87 KB'),
'2015': HmdaDataFile('hmda_2015_ak_all-records_labels.zip', '33421', '1.59 MB'),
'2014': HmdaDataFile('hmda_2014_ak_all-records_labels.zip', '26499', '1.29 MB'),
'2008': HmdaDataFile('hmda_2008_ak_all-records_labels.zip', '36410', '1.47 MB'),
'2009': HmdaDataFile('hmda_2009_ak_all-records_labels.zip', '51821', '2.02 MB'),
'2011': HmdaDataFile('hmda_2011_ak_all-records_labels.zip', '36900', '1.62 MB'),
'2010': HmdaDataFile('hmda_2010_ak_all-records_labels.zip', '41203', '1.81 MB'),
'2013': HmdaDataFile('hmda_2013_ak_all-records_labels.zip', '39394', '1.81 MB'),
'2012': HmdaDataFile('hmda_2012_ak_all-records_labels.zip', '46691', '2.09 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ak_originated-records_labels.zip', '17503', '792.19 KB'),
'2007': HmdaDataFile('hmda_2007_ak_originated-records_labels.zip', '21167', '801.83 KB'),
'2017': HmdaDataFile('hmda_2017_ak_originated-records_labels.zip', '14430', '400.6 KB'),
'2015': HmdaDataFile('hmda_2015_ak_originated-records_labels.zip', '16680', '749.38 KB'),
'2014': HmdaDataFile('hmda_2014_ak_originated-records_labels.zip', '14272', '633.57 KB'),
'2008': HmdaDataFile('hmda_2008_ak_originated-records_labels.zip', '17485', '614.3 KB'),
'2009': HmdaDataFile('hmda_2009_ak_originated-records_labels.zip', '24987', '878.94 KB'),
'2011': HmdaDataFile('hmda_2011_ak_originated-records_labels.zip', '19236', '785.43 KB'),
'2010': HmdaDataFile('hmda_2010_ak_originated-records_labels.zip', '20697', '847.87 KB'),
'2013': HmdaDataFile('hmda_2013_ak_originated-records_labels.zip', '20363', '840.47 KB'),
'2012': HmdaDataFile('hmda_2012_ak_originated-records_labels.zip', '24887', '1.02 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '15356', '426.06 KB'),
'2007': HmdaDataFile('hmda_2007_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '16758', '411.75 KB'),
'2017': HmdaDataFile('hmda_2017_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '12579', '240.69 KB'),
'2015': HmdaDataFile('hmda_2015_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '14511', '394.05 KB'),
'2014': HmdaDataFile('hmda_2014_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '12147', '329.2 KB'),
'2008': HmdaDataFile('hmda_2008_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '15576', '364.56 KB'),
'2009': HmdaDataFile('hmda_2009_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '23301', '544.64 KB'),
'2011': HmdaDataFile('hmda_2011_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '16974', '426.2 KB'),
'2010': HmdaDataFile('hmda_2010_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '18636', '464.07 KB'),
'2013': HmdaDataFile('hmda_2013_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '17337', '417.9 KB'),
'2012': HmdaDataFile('hmda_2012_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '22064', '530.98 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ak_all-records_codes.zip', '36105', '1.07 MB'),
'2007': HmdaDataFile('hmda_2007_ak_all-records_codes.zip', '48143', '1.24 MB'),
'2017': HmdaDataFile('hmda_2017_ak_all-records_codes.zip', '28632', '571.83 KB'),
'2015': HmdaDataFile('hmda_2015_ak_all-records_codes.zip', '33421', '951.9 KB'),
'2014': HmdaDataFile('hmda_2014_ak_all-records_codes.zip', '26499', '778.42 KB'),
'2008': HmdaDataFile('hmda_2008_ak_all-records_codes.zip', '36410', '899.28 KB'),
'2009': HmdaDataFile('hmda_2009_ak_all-records_codes.zip', '51821', '1.29 MB'),
'2011': HmdaDataFile('hmda_2011_ak_all-records_codes.zip', '36900', '992.55 KB'),
'2010': HmdaDataFile('hmda_2010_ak_all-records_codes.zip', '41203', '1.1 MB'),
'2013': HmdaDataFile('hmda_2013_ak_all-records_codes.zip', '39394', '1.01 MB'),
'2012': HmdaDataFile('hmda_2012_ak_all-records_codes.zip', '46691', '1.17 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ak_originated-records_codes.zip', '17503', '490.92 KB'),
'2007': HmdaDataFile('hmda_2007_ak_originated-records_codes.zip', '21167', '527.65 KB'),
'2017': HmdaDataFile('hmda_2017_ak_originated-records_codes.zip', '14430', '275.46 KB'),
'2015': HmdaDataFile('hmda_2015_ak_originated-records_codes.zip', '16680', '458.98 KB'),
'2014': HmdaDataFile('hmda_2014_ak_originated-records_codes.zip', '14272', '392.19 KB'),
'2008': HmdaDataFile('hmda_2008_ak_originated-records_codes.zip', '17485', '413.46 KB'),
'2009': HmdaDataFile('hmda_2009_ak_originated-records_codes.zip', '24987', '583.23 KB'),
'2011': HmdaDataFile('hmda_2011_ak_originated-records_codes.zip', '19236', '488.12 KB'),
'2010': HmdaDataFile('hmda_2010_ak_originated-records_codes.zip', '20697', '518.32 KB'),
'2013': HmdaDataFile('hmda_2013_ak_originated-records_codes.zip', '20363', '514.87 KB'),
'2012': HmdaDataFile('hmda_2012_ak_originated-records_codes.zip', '24887', '617.44 KB')
}
}
},
'al': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '86522', '4.16 MB'),
'2007': HmdaDataFile('hmda_2007_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '106195', '4.88 MB'),
'2017': HmdaDataFile('hmda_2017_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '80115', '2.33 MB'),
'2015': HmdaDataFile('hmda_2015_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '75252', '4.13 MB'),
'2014': HmdaDataFile('hmda_2014_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '63808', '3.06 MB'),
'2008': HmdaDataFile('hmda_2008_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '89932', '4.1 MB'),
'2009': HmdaDataFile('hmda_2009_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '103427', '4.47 MB'),
'2011': HmdaDataFile('hmda_2011_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '75467', '3.54 MB'),
'2010': HmdaDataFile('hmda_2010_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '87005', '3.98 MB'),
'2013': HmdaDataFile('hmda_2013_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '93236', '4.39 MB'),
'2012': HmdaDataFile('hmda_2012_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '99531', '4.7 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_al_all-records_labels.zip', '226918', '11.95 MB'),
'2007': HmdaDataFile('hmda_2007_al_all-records_labels.zip', '367991', '17.47 MB'),
'2017': HmdaDataFile('hmda_2017_al_all-records_labels.zip', '209002', '7.08 MB'),
'2015': HmdaDataFile('hmda_2015_al_all-records_labels.zip', '205039', '11.99 MB'),
'2014': HmdaDataFile('hmda_2014_al_all-records_labels.zip', '182825', '9.57 MB'),
'2008': HmdaDataFile('hmda_2008_al_all-records_labels.zip', '286567', '13.72 MB'),
'2009': HmdaDataFile('hmda_2009_al_all-records_labels.zip', '294820', '13.87 MB'),
'2011': HmdaDataFile('hmda_2011_al_all-records_labels.zip', '228420', '11.89 MB'),
'2010': HmdaDataFile('hmda_2010_al_all-records_labels.zip', '249347', '12.77 MB'),
'2013': HmdaDataFile('hmda_2013_al_all-records_labels.zip', '253915', '13.16 MB'),
'2012': HmdaDataFile('hmda_2012_al_all-records_labels.zip', '264313', '13.66 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_al_originated-records_labels.zip', '109870', '5.39 MB'),
'2007': HmdaDataFile('hmda_2007_al_originated-records_labels.zip', '153334', '7.1 MB'),
'2017': HmdaDataFile('hmda_2017_al_originated-records_labels.zip', '103096', '3.05 MB'),
'2015': HmdaDataFile('hmda_2015_al_originated-records_labels.zip', '98097', '5.48 MB'),
'2014': HmdaDataFile('hmda_2014_al_originated-records_labels.zip', '85899', '4.21 MB'),
'2008': HmdaDataFile('hmda_2008_al_originated-records_labels.zip', '119306', '5.51 MB'),
'2009': HmdaDataFile('hmda_2009_al_originated-records_labels.zip', '126063', '5.57 MB'),
'2011': HmdaDataFile('hmda_2011_al_originated-records_labels.zip', '97761', '4.65 MB'),
'2010': HmdaDataFile('hmda_2010_al_originated-records_labels.zip', '106706', '4.95 MB'),
'2013': HmdaDataFile('hmda_2013_al_originated-records_labels.zip', '118638', '5.68 MB'),
'2012': HmdaDataFile('hmda_2012_al_originated-records_labels.zip', '123170', '5.93 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '86522', '2.77 MB'),
'2007': HmdaDataFile('hmda_2007_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '106195', '3.34 MB'),
'2017': HmdaDataFile('hmda_2017_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '80115', '1.67 MB'),
'2015': HmdaDataFile('hmda_2015_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '75252', '2.81 MB'),
'2014': HmdaDataFile('hmda_2014_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '63808', '2.06 MB'),
'2008': HmdaDataFile('hmda_2008_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '89932', '2.83 MB'),
'2009': HmdaDataFile('hmda_2009_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '103427', '3.06 MB'),
'2011': HmdaDataFile('hmda_2011_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '75467', '2.35 MB'),
'2010': HmdaDataFile('hmda_2010_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '87005', '2.64 MB'),
'2013': HmdaDataFile('hmda_2013_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '93236', '2.92 MB'),
'2012': HmdaDataFile('hmda_2012_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '99531', '3.15 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_al_all-records_codes.zip', '226918', '7.69 MB'),
'2007': HmdaDataFile('hmda_2007_al_all-records_codes.zip', '367991', '11.49 MB'),
'2017': HmdaDataFile('hmda_2017_al_all-records_codes.zip', '209002', '4.77 MB'),
'2015': HmdaDataFile('hmda_2015_al_all-records_codes.zip', '205039', '7.82 MB'),
'2014': HmdaDataFile('hmda_2014_al_all-records_codes.zip', '182825', '6.25 MB'),
'2008': HmdaDataFile('hmda_2008_al_all-records_codes.zip', '286567', '9.04 MB'),
'2009': HmdaDataFile('hmda_2009_al_all-records_codes.zip', '294820', '9.23 MB'),
'2011': HmdaDataFile('hmda_2011_al_all-records_codes.zip', '228420', '7.78 MB'),
'2010': HmdaDataFile('hmda_2010_al_all-records_codes.zip', '249347', '8.38 MB'),
'2013': HmdaDataFile('hmda_2013_al_all-records_codes.zip', '253915', '8.52 MB'),
'2012': HmdaDataFile('hmda_2012_al_all-records_codes.zip', '264313', '9.06 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_al_originated-records_codes.zip', '109870', '3.56 MB'),
'2007': HmdaDataFile('hmda_2007_al_originated-records_codes.zip', '153334', '4.85 MB'),
'2017': HmdaDataFile('hmda_2017_al_originated-records_codes.zip', '103096', '2.14 MB'),
'2015': HmdaDataFile('hmda_2015_al_originated-records_codes.zip', '98097', '3.71 MB'),
'2014': HmdaDataFile('hmda_2014_al_originated-records_codes.zip', '85899', '2.8 MB'),
'2008': HmdaDataFile('hmda_2008_al_originated-records_codes.zip', '119306', '3.78 MB'),
'2009': HmdaDataFile('hmda_2009_al_originated-records_codes.zip', '126063', '3.8 MB'),
'2011': HmdaDataFile('hmda_2011_al_originated-records_codes.zip', '97761', '3.07 MB'),
'2010': HmdaDataFile('hmda_2010_al_originated-records_codes.zip', '106706', '3.27 MB'),
'2013': HmdaDataFile('hmda_2013_al_originated-records_codes.zip', '118638', '3.76 MB'),
'2012': HmdaDataFile('hmda_2012_al_originated-records_codes.zip', '123170', '3.94 MB')
}
}
},
'ar': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '49025', '2.38 MB'),
'2007': HmdaDataFile('hmda_2007_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '55026', '2.46 MB'),
'2017': HmdaDataFile('hmda_2017_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '45327', '1.49 MB'),
'2015': HmdaDataFile('hmda_2015_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '42845', '2.26 MB'),
'2014': HmdaDataFile('hmda_2014_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '37601', '1.77 MB'),
'2008': HmdaDataFile('hmda_2008_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '49034', '2.14 MB'),
'2009': HmdaDataFile('hmda_2009_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '61531', '2.65 MB'),
'2011': HmdaDataFile('hmda_2011_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '46801', '2.1 MB'),
'2010': HmdaDataFile('hmda_2010_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '54493', '2.37 MB'),
'2013': HmdaDataFile('hmda_2013_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '54893', '2.56 MB'),
'2012': HmdaDataFile('hmda_2012_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '61834', '2.8 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ar_all-records_labels.zip', '131352', '6.97 MB'),
'2007': HmdaDataFile('hmda_2007_ar_all-records_labels.zip', '179146', '8.6 MB'),
'2017': HmdaDataFile('hmda_2017_ar_all-records_labels.zip', '125392', '4.6 MB'),
'2015': HmdaDataFile('hmda_2015_ar_all-records_labels.zip', '118384', '6.72 MB'),
'2014': HmdaDataFile('hmda_2014_ar_all-records_labels.zip', '108526', '5.63 MB'),
'2008': HmdaDataFile('hmda_2008_ar_all-records_labels.zip', '141191', '6.71 MB'),
'2009': HmdaDataFile('hmda_2009_ar_all-records_labels.zip', '159208', '7.41 MB'),
'2011': HmdaDataFile('hmda_2011_ar_all-records_labels.zip', '127757', '6.44 MB'),
'2010': HmdaDataFile('hmda_2010_ar_all-records_labels.zip', '142441', '7.13 MB'),
'2013': HmdaDataFile('hmda_2013_ar_all-records_labels.zip', '146285', '7.61 MB'),
'2012': HmdaDataFile('hmda_2012_ar_all-records_labels.zip', '154830', '7.85 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ar_originated-records_labels.zip', '65762', '3.27 MB'),
'2007': HmdaDataFile('hmda_2007_ar_originated-records_labels.zip', '83327', '3.71 MB'),
'2017': HmdaDataFile('hmda_2017_ar_originated-records_labels.zip', '62259', '2.05 MB'),
'2015': HmdaDataFile('hmda_2015_ar_originated-records_labels.zip', '59384', '3.2 MB'),
'2014': HmdaDataFile('hmda_2014_ar_originated-records_labels.zip', '52994', '2.56 MB'),
'2008': HmdaDataFile('hmda_2008_ar_originated-records_labels.zip', '71395', '3.11 MB'),
'2009': HmdaDataFile('hmda_2009_ar_originated-records_labels.zip', '78016', '3.4 MB'),
'2011': HmdaDataFile('hmda_2011_ar_originated-records_labels.zip', '62549', '2.86 MB'),
'2010': HmdaDataFile('hmda_2010_ar_originated-records_labels.zip', '69315', '3.08 MB'),
'2013': HmdaDataFile('hmda_2013_ar_originated-records_labels.zip', '73125', '3.48 MB'),
'2012': HmdaDataFile('hmda_2012_ar_originated-records_labels.zip', '79283', '3.68 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '49025', '1.56 MB'),
'2007': HmdaDataFile('hmda_2007_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '55026', '1.62 MB'),
'2017': HmdaDataFile('hmda_2017_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '45327', '1.04 MB'),
'2015': HmdaDataFile('hmda_2015_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '42845', '1.51 MB'),
'2014': HmdaDataFile('hmda_2014_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '37601', '1.16 MB'),
'2008': HmdaDataFile('hmda_2008_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '49034', '1.41 MB'),
'2009': HmdaDataFile('hmda_2009_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '61531', '1.81 MB'),
'2011': HmdaDataFile('hmda_2011_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '46801', '1.35 MB'),
'2010': HmdaDataFile('hmda_2010_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '54493', '1.53 MB'),
'2013': HmdaDataFile('hmda_2013_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '54893', '1.65 MB'),
'2012': HmdaDataFile('hmda_2012_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '61834', '1.81 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ar_all-records_codes.zip', '131352', '4.44 MB'),
'2007': HmdaDataFile('hmda_2007_ar_all-records_codes.zip', '179146', '5.43 MB'),
'2017': HmdaDataFile('hmda_2017_ar_all-records_codes.zip', '125392', '3.01 MB'),
'2015': HmdaDataFile('hmda_2015_ar_all-records_codes.zip', '118384', '4.33 MB'),
'2014': HmdaDataFile('hmda_2014_ar_all-records_codes.zip', '108526', '3.58 MB'),
'2008': HmdaDataFile('hmda_2008_ar_all-records_codes.zip', '141191', '4.14 MB'),
'2009': HmdaDataFile('hmda_2009_ar_all-records_codes.zip', '159208', '4.87 MB'),
'2011': HmdaDataFile('hmda_2011_ar_all-records_codes.zip', '127757', '4.13 MB'),
'2010': HmdaDataFile('hmda_2010_ar_all-records_codes.zip', '142441', '4.51 MB'),
'2013': HmdaDataFile('hmda_2013_ar_all-records_codes.zip', '146285', '4.87 MB'),
'2012': HmdaDataFile('hmda_2012_ar_all-records_codes.zip', '154830', '5.04 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ar_originated-records_codes.zip', '65762', '2.1 MB'),
'2007': HmdaDataFile('hmda_2007_ar_originated-records_codes.zip', '83327', '2.43 MB'),
'2017': HmdaDataFile('hmda_2017_ar_originated-records_codes.zip', '62259', '1.41 MB'),
'2015': HmdaDataFile('hmda_2015_ar_originated-records_codes.zip', '59384', '2.11 MB'),
'2014': HmdaDataFile('hmda_2014_ar_originated-records_codes.zip', '52994', '1.65 MB'),
'2008': HmdaDataFile('hmda_2008_ar_originated-records_codes.zip', '71395', '2.03 MB'),
'2009': HmdaDataFile('hmda_2009_ar_originated-records_codes.zip', '78016', '2.3 MB'),
'2011': HmdaDataFile('hmda_2011_ar_originated-records_codes.zip', '62549', '1.82 MB'),
'2010': HmdaDataFile('hmda_2010_ar_originated-records_codes.zip', '69315', '1.96 MB'),
'2013': HmdaDataFile('hmda_2013_ar_originated-records_codes.zip', '73125', '2.21 MB'),
'2012': HmdaDataFile('hmda_2012_ar_originated-records_codes.zip', '79283', '2.34 MB')
}
}
},
'vt': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '11252', '465.67 KB'),
'2007': HmdaDataFile('hmda_2007_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '13430', '571.04 KB'),
'2017': HmdaDataFile('hmda_2017_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '9250', '291.43 KB'),
'2015': HmdaDataFile('hmda_2015_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '10215', '455.27 KB'),
'2014': HmdaDataFile('hmda_2014_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '8361', '346.05 KB'),
'2008': HmdaDataFile('hmda_2008_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '12079', '491.28 KB'),
'2009': HmdaDataFile('hmda_2009_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '21026', '782.79 KB'),
'2011': HmdaDataFile('hmda_2011_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '13512', '578.18 KB'),
'2010': HmdaDataFile('hmda_2010_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '15651', '620.42 KB'),
'2013': HmdaDataFile('hmda_2013_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '14400', '594.85 KB'),
'2012': HmdaDataFile('hmda_2012_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '17914', '703.52 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_vt_all-records_labels.zip', '25971', '1.19 MB'),
'2007': HmdaDataFile('hmda_2007_vt_all-records_labels.zip', '43664', '1.95 MB'),
'2017': HmdaDataFile('hmda_2017_vt_all-records_labels.zip', '22335', '765.5 KB'),
'2015': HmdaDataFile('hmda_2015_vt_all-records_labels.zip', '24028', '1.19 MB'),
'2014': HmdaDataFile('hmda_2014_vt_all-records_labels.zip', '20816', '953.84 KB'),
'2008': HmdaDataFile('hmda_2008_vt_all-records_labels.zip', '33040', '1.44 MB'),
'2009': HmdaDataFile('hmda_2009_vt_all-records_labels.zip', '46532', '1.87 MB'),
'2011': HmdaDataFile('hmda_2011_vt_all-records_labels.zip', '32687', '1.56 MB'),
'2010': HmdaDataFile('hmda_2010_vt_all-records_labels.zip', '36637', '1.61 MB'),
'2013': HmdaDataFile('hmda_2013_vt_all-records_labels.zip', '32790', '1.49 MB'),
'2012': HmdaDataFile('hmda_2012_vt_all-records_labels.zip', '37869', '1.66 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_vt_originated-records_labels.zip', '15176', '645.84 KB'),
'2007': HmdaDataFile('hmda_2007_vt_originated-records_labels.zip', '20755', '882.15 KB'),
'2017': HmdaDataFile('hmda_2017_vt_originated-records_labels.zip', '13325', '425.68 KB'),
'2015': HmdaDataFile('hmda_2015_vt_originated-records_labels.zip', '14053', '645.96 KB'),
'2014': HmdaDataFile('hmda_2014_vt_originated-records_labels.zip', '11893', '504.52 KB'),
'2008': HmdaDataFile('hmda_2008_vt_originated-records_labels.zip', '17432', '722.72 KB'),
'2009': HmdaDataFile('hmda_2009_vt_originated-records_labels.zip', '25699', '980.48 KB'),
'2011': HmdaDataFile('hmda_2011_vt_originated-records_labels.zip', '17791', '786.1 KB'),
'2010': HmdaDataFile('hmda_2010_vt_originated-records_labels.zip', '19808', '804.9 KB'),
'2013': HmdaDataFile('hmda_2013_vt_originated-records_labels.zip', '19293', '814.7 KB'),
'2012': HmdaDataFile('hmda_2012_vt_originated-records_labels.zip', '22745', '914.05 KB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '11252', '290.62 KB'),
'2007': HmdaDataFile('hmda_2007_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '13430', '365.86 KB'),
'2017': HmdaDataFile('hmda_2017_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '9250', '198.27 KB'),
'2015': HmdaDataFile('hmda_2015_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '10215', '285.97 KB'),
'2014': HmdaDataFile('hmda_2014_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '8361', '216.87 KB'),
'2008': HmdaDataFile('hmda_2008_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '12079', '314.55 KB'),
'2009': HmdaDataFile('hmda_2009_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '21026', '510.88 KB'),
'2011': HmdaDataFile('hmda_2011_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '13512', '361.01 KB'),
'2010': HmdaDataFile('hmda_2010_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '15651', '386.89 KB'),
'2013': HmdaDataFile('hmda_2013_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '14400', '367.68 KB'),
'2012': HmdaDataFile('hmda_2012_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '17914', '433.81 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_vt_all-records_codes.zip', '25971', '716.41 KB'),
'2007': HmdaDataFile('hmda_2007_vt_all-records_codes.zip', '43664', '1.22 MB'),
'2017': HmdaDataFile('hmda_2017_vt_all-records_codes.zip', '22335', '486.85 KB'),
'2015': HmdaDataFile('hmda_2015_vt_all-records_codes.zip', '24028', '719.81 KB'),
'2014': HmdaDataFile('hmda_2014_vt_all-records_codes.zip', '20816', '576.12 KB'),
'2008': HmdaDataFile('hmda_2008_vt_all-records_codes.zip', '33040', '900.48 KB'),
'2009': HmdaDataFile('hmda_2009_vt_all-records_codes.zip', '46532', '1.2 MB'),
'2011': HmdaDataFile('hmda_2011_vt_all-records_codes.zip', '32687', '942.34 KB'),
'2010': HmdaDataFile('hmda_2010_vt_all-records_codes.zip', '36637', '971.16 KB'),
'2013': HmdaDataFile('hmda_2013_vt_all-records_codes.zip', '32790', '897.96 KB'),
'2012': HmdaDataFile('hmda_2012_vt_all-records_codes.zip', '37869', '998.22 KB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_vt_originated-records_codes.zip', '15176', '399.34 KB'),
'2007': HmdaDataFile('hmda_2007_vt_originated-records_codes.zip', '20755', '567.25 KB'),
'2017': HmdaDataFile('hmda_2017_vt_originated-records_codes.zip', '13325', '283.84 KB'),
'2015': HmdaDataFile('hmda_2015_vt_originated-records_codes.zip', '14053', '403.29 KB'),
'2014': HmdaDataFile('hmda_2014_vt_originated-records_codes.zip', '11893', '311.95 KB'),
'2008': HmdaDataFile('hmda_2008_vt_originated-records_codes.zip', '17432', '462.77 KB'),
'2009': HmdaDataFile('hmda_2009_vt_originated-records_codes.zip', '25699', '638.61 KB'),
'2011': HmdaDataFile('hmda_2011_vt_originated-records_codes.zip', '17791', '486.47 KB'),
'2010': HmdaDataFile('hmda_2010_vt_originated-records_codes.zip', '19808', '497.91 KB'),
'2013': HmdaDataFile('hmda_2013_vt_originated-records_codes.zip', '19293', '501.93 KB'),
'2012': HmdaDataFile('hmda_2012_vt_originated-records_codes.zip', '22745', '561.59 KB')
}
}
},
'il': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '278035', '14.02 MB'),
'2007': HmdaDataFile('hmda_2007_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '346807', '16.76 MB'),
'2017': HmdaDataFile('hmda_2017_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '225445', '7.03 MB'),
'2015': HmdaDataFile('hmda_2015_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '245110', '13.66 MB'),
'2014': HmdaDataFile('hmda_2014_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '193991', '9.89 MB'),
'2008': HmdaDataFile('hmda_2008_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '278003', '13.04 MB'),
'2009': HmdaDataFile('hmda_2009_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '396141', '17.05 MB'),
'2011': HmdaDataFile('hmda_2011_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '282861', '13.82 MB'),
'2010': HmdaDataFile('hmda_2010_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '343779', '16.76 MB'),
'2013': HmdaDataFile('hmda_2013_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '295909', '14.97 MB'),
'2012': HmdaDataFile('hmda_2012_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '378550', '18.82 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_il_all-records_labels.zip', '583019', '31.48 MB'),
'2007': HmdaDataFile('hmda_2007_il_all-records_labels.zip', '1117310', '53.26 MB'),
'2017': HmdaDataFile('hmda_2017_il_all-records_labels.zip', '502511', '18.63 MB'),
'2015': HmdaDataFile('hmda_2015_il_all-records_labels.zip', '517360', '30.86 MB'),
'2014': HmdaDataFile('hmda_2014_il_all-records_labels.zip', '437239', '23.79 MB'),
'2008': HmdaDataFile('hmda_2008_il_all-records_labels.zip', '761632', '36.35 MB'),
'2009': HmdaDataFile('hmda_2009_il_all-records_labels.zip', '849782', '37.88 MB'),
'2011': HmdaDataFile('hmda_2011_il_all-records_labels.zip', '620832', '33.1 MB'),
'2010': HmdaDataFile('hmda_2010_il_all-records_labels.zip', '716356', '37.81 MB'),
'2013': HmdaDataFile('hmda_2013_il_all-records_labels.zip', '637258', '34.91 MB'),
'2012': HmdaDataFile('hmda_2012_il_all-records_labels.zip', '754118', '40.71 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_il_originated-records_labels.zip', '317597', '16.17 MB'),
'2007': HmdaDataFile('hmda_2007_il_originated-records_labels.zip', '470592', '22.68 MB'),
'2017': HmdaDataFile('hmda_2017_il_originated-records_labels.zip', '265490', '8.51 MB'),
'2015': HmdaDataFile('hmda_2015_il_originated-records_labels.zip', '284551', '16.06 MB'),
'2014': HmdaDataFile('hmda_2014_il_originated-records_labels.zip', '232557', '11.98 MB'),
'2008': HmdaDataFile('hmda_2008_il_originated-records_labels.zip', '339543', '16.11 MB'),
'2009': HmdaDataFile('hmda_2009_il_originated-records_labels.zip', '432707', '18.86 MB'),
'2011': HmdaDataFile('hmda_2011_il_originated-records_labels.zip', '319004', '15.78 MB'),
'2010': HmdaDataFile('hmda_2010_il_originated-records_labels.zip', '378335', '18.65 MB'),
'2013': HmdaDataFile('hmda_2013_il_originated-records_labels.zip', '344172', '17.61 MB'),
'2012': HmdaDataFile('hmda_2012_il_originated-records_labels.zip', '424748', '21.38 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '278035', '10.15 MB'),
'2007': HmdaDataFile('hmda_2007_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '346807', '12.31 MB'),
'2017': HmdaDataFile('hmda_2017_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '225445', '5.06 MB'),
'2015': HmdaDataFile('hmda_2015_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '245110', '9.83 MB'),
'2014': HmdaDataFile('hmda_2014_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '193991', '7.12 MB'),
'2008': HmdaDataFile('hmda_2008_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '278003', '9.54 MB'),
'2009': HmdaDataFile('hmda_2009_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '396141', '12.55 MB'),
'2011': HmdaDataFile('hmda_2011_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '282861', '9.82 MB'),
'2010': HmdaDataFile('hmda_2010_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '343779', '11.91 MB'),
'2013': HmdaDataFile('hmda_2013_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '295909', '10.83 MB'),
'2012': HmdaDataFile('hmda_2012_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '378550', '13.52 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_il_all-records_codes.zip', '583019', '22.25 MB'),
'2007': HmdaDataFile('hmda_2007_il_all-records_codes.zip', '1117310', '37.59 MB'),
'2017': HmdaDataFile('hmda_2017_il_all-records_codes.zip', '502511', '12.69 MB'),
'2015': HmdaDataFile('hmda_2015_il_all-records_codes.zip', '517360', '21.34 MB'),
'2014': HmdaDataFile('hmda_2014_il_all-records_codes.zip', '437239', '16.64 MB'),
'2008': HmdaDataFile('hmda_2008_il_all-records_codes.zip', '761632', '25.64 MB'),
'2009': HmdaDataFile('hmda_2009_il_all-records_codes.zip', '849782', '26.91 MB'),
'2011': HmdaDataFile('hmda_2011_il_all-records_codes.zip', '620832', '22.91 MB'),
'2010': HmdaDataFile('hmda_2010_il_all-records_codes.zip', '716356', '26.12 MB'),
'2013': HmdaDataFile('hmda_2013_il_all-records_codes.zip', '637258', '24.68 MB'),
'2012': HmdaDataFile('hmda_2012_il_all-records_codes.zip', '754118', '28.69 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_il_originated-records_codes.zip', '317597', '11.62 MB'),
'2007': HmdaDataFile('hmda_2007_il_originated-records_codes.zip', '470592', '16.53 MB'),
'2017': HmdaDataFile('hmda_2017_il_originated-records_codes.zip', '265490', '6.03 MB'),
'2015': HmdaDataFile('hmda_2015_il_originated-records_codes.zip', '284551', '11.44 MB'),
'2014': HmdaDataFile('hmda_2014_il_originated-records_codes.zip', '232557', '8.54 MB'),
'2008': HmdaDataFile('hmda_2008_il_originated-records_codes.zip', '339543', '11.68 MB'),
'2009': HmdaDataFile('hmda_2009_il_originated-records_codes.zip', '432707', '13.79 MB'),
'2011': HmdaDataFile('hmda_2011_il_originated-records_codes.zip', '319004', '11.14 MB'),
'2010': HmdaDataFile('hmda_2010_il_originated-records_codes.zip', '378335', '13.17 MB'),
'2013': HmdaDataFile('hmda_2013_il_originated-records_codes.zip', '344172', '12.64 MB'),
'2012': HmdaDataFile('hmda_2012_il_originated-records_codes.zip', '424748', '15.26 MB')
}
}
},
'ga': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '225258', '11.78 MB'),
'2007': HmdaDataFile('hmda_2007_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '236346', '11.17 MB'),
'2017': HmdaDataFile('hmda_2017_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '203948', '6.37 MB'),
'2015': HmdaDataFile('hmda_2015_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '193285', '11.11 MB'),
'2014': HmdaDataFile('hmda_2014_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '147432', '7.81 MB'),
'2008': HmdaDataFile('hmda_2008_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '179658', '8.42 MB'),
'2009': HmdaDataFile('hmda_2009_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '220141', '9.84 MB'),
'2011': HmdaDataFile('hmda_2011_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '165891', '8.32 MB'),
'2010': HmdaDataFile('hmda_2010_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '171421', '8.49 MB'),
'2013': HmdaDataFile('hmda_2013_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '210048', '10.77 MB'),
'2012': HmdaDataFile('hmda_2012_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '229259', '11.67 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ga_all-records_labels.zip', '547637', '30.4 MB'),
'2007': HmdaDataFile('hmda_2007_ga_all-records_labels.zip', '899812', '43.49 MB'),
'2017': HmdaDataFile('hmda_2017_ga_all-records_labels.zip', '501310', '18.01 MB'),
'2015': HmdaDataFile('hmda_2015_ga_all-records_labels.zip', '478359', '29.23 MB'),
'2014': HmdaDataFile('hmda_2014_ga_all-records_labels.zip', '391231', '22.06 MB'),
'2008': HmdaDataFile('hmda_2008_ga_all-records_labels.zip', '583802', '28.24 MB'),
'2009': HmdaDataFile('hmda_2009_ga_all-records_labels.zip', '612188', '28.44 MB'),
'2011': HmdaDataFile('hmda_2011_ga_all-records_labels.zip', '444258', '24.51 MB'),
'2010': HmdaDataFile('hmda_2010_ga_all-records_labels.zip', '466839', '25.16 MB'),
'2013': HmdaDataFile('hmda_2013_ga_all-records_labels.zip', '537898', '29.53 MB'),
'2012': HmdaDataFile('hmda_2012_ga_all-records_labels.zip', '559464', '30.51 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ga_originated-records_labels.zip', '264802', '14.07 MB'),
'2007': HmdaDataFile('hmda_2007_ga_originated-records_labels.zip', '352181', '16.55 MB'),
'2017': HmdaDataFile('hmda_2017_ga_originated-records_labels.zip', '244731', '7.97 MB'),
'2015': HmdaDataFile('hmda_2015_ga_originated-records_labels.zip', '232822', '13.6 MB'),
'2014': HmdaDataFile('hmda_2014_ga_originated-records_labels.zip', '185375', '10 MB'),
'2008': HmdaDataFile('hmda_2008_ga_originated-records_labels.zip', '244230', '11.53 MB'),
'2009': HmdaDataFile('hmda_2009_ga_originated-records_labels.zip', '261989', '11.88 MB'),
'2011': HmdaDataFile('hmda_2011_ga_originated-records_labels.zip', '204582', '10.57 MB'),
'2010': HmdaDataFile('hmda_2010_ga_originated-records_labels.zip', '208728', '10.42 MB'),
'2013': HmdaDataFile('hmda_2013_ga_originated-records_labels.zip', '262544', '13.64 MB'),
'2012': HmdaDataFile('hmda_2012_ga_originated-records_labels.zip', '277607', '14.27 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '225258', '8.23 MB'),
'2007': HmdaDataFile('hmda_2007_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '236346', '7.88 MB'),
'2017': HmdaDataFile('hmda_2017_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '203948', '4.43 MB'),
'2015': HmdaDataFile('hmda_2015_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '193285', '7.71 MB'),
'2014': HmdaDataFile('hmda_2014_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '147432', '5.46 MB'),
'2008': HmdaDataFile('hmda_2008_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '179658', '5.94 MB'),
'2009': HmdaDataFile('hmda_2009_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '220141', '7.02 MB'),
'2011': HmdaDataFile('hmda_2011_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '165891', '5.63 MB'),
'2010': HmdaDataFile('hmda_2010_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '171421', '5.76 MB'),
'2013': HmdaDataFile('hmda_2013_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '210048', '7.54 MB'),
'2012': HmdaDataFile('hmda_2012_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '229259', '8.16 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ga_all-records_codes.zip', '547637', '20.69 MB'),
'2007': HmdaDataFile('hmda_2007_ga_all-records_codes.zip', '899812', '29.71 MB'),
'2017': HmdaDataFile('hmda_2017_ga_all-records_codes.zip', '501310', '11.97 MB'),
'2015': HmdaDataFile('hmda_2015_ga_all-records_codes.zip', '478359', '19.47 MB'),
'2014': HmdaDataFile('hmda_2014_ga_all-records_codes.zip', '391231', '14.94 MB'),
'2008': HmdaDataFile('hmda_2008_ga_all-records_codes.zip', '583802', '19.31 MB'),
'2009': HmdaDataFile('hmda_2009_ga_all-records_codes.zip', '612188', '19.6 MB'),
'2011': HmdaDataFile('hmda_2011_ga_all-records_codes.zip', '444258', '16.2 MB'),
'2010': HmdaDataFile('hmda_2010_ga_all-records_codes.zip', '466839', '16.61 MB'),
'2013': HmdaDataFile('hmda_2013_ga_all-records_codes.zip', '537898', '20.11 MB'),
'2012': HmdaDataFile('hmda_2012_ga_all-records_codes.zip', '559464', '20.8 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ga_originated-records_codes.zip', '264802', '9.77 MB'),
'2007': HmdaDataFile('hmda_2007_ga_originated-records_codes.zip', '352181', '11.59 MB'),
'2017': HmdaDataFile('hmda_2017_ga_originated-records_codes.zip', '244731', '5.49 MB'),
'2015': HmdaDataFile('hmda_2015_ga_originated-records_codes.zip', '232822', '9.35 MB'),
'2014': HmdaDataFile('hmda_2014_ga_originated-records_codes.zip', '185375', '6.91 MB'),
'2008': HmdaDataFile('hmda_2008_ga_originated-records_codes.zip', '244230', '8.04 MB'),
'2009': HmdaDataFile('hmda_2009_ga_originated-records_codes.zip', '261989', '8.4 MB'),
'2011': HmdaDataFile('hmda_2011_ga_originated-records_codes.zip', '204582', '7.11 MB'),
'2010': HmdaDataFile('hmda_2010_ga_originated-records_codes.zip', '208728', '7.02 MB'),
'2013': HmdaDataFile('hmda_2013_ga_originated-records_codes.zip', '262544', '9.46 MB'),
'2012': HmdaDataFile('hmda_2012_ga_originated-records_codes.zip', '277607', '9.89 MB')
}
}
},
'in': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '149979', '7.55 MB'),
'2007': HmdaDataFile('hmda_2007_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '145394', '6.8 MB'),
'2017': HmdaDataFile('hmda_2017_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '135246', '4.03 MB'),
'2015': HmdaDataFile('hmda_2015_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '134522', '7.59 MB'),
'2014': HmdaDataFile('hmda_2014_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '108789', '5.36 MB'),
'2008': HmdaDataFile('hmda_2008_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '124555', '5.73 MB'),
'2009': HmdaDataFile('hmda_2009_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '187145', '8.03 MB'),
'2011': HmdaDataFile('hmda_2011_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '141707', '6.73 MB'),
'2010': HmdaDataFile('hmda_2010_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '161225', '7.29 MB'),
'2013': HmdaDataFile('hmda_2013_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '159649', '7.66 MB'),
'2012': HmdaDataFile('hmda_2012_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '188614', '8.83 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_in_all-records_labels.zip', '319123', '17.64 MB'),
'2007': HmdaDataFile('hmda_2007_in_all-records_labels.zip', '474561', '24.38 MB'),
'2017': HmdaDataFile('hmda_2017_in_all-records_labels.zip', '292152', '10.14 MB'),
'2015': HmdaDataFile('hmda_2015_in_all-records_labels.zip', '288746', '17.61 MB'),
'2014': HmdaDataFile('hmda_2014_in_all-records_labels.zip', '248347', '13.51 MB'),
'2008': HmdaDataFile('hmda_2008_in_all-records_labels.zip', '348681', '17.43 MB'),
'2009': HmdaDataFile('hmda_2009_in_all-records_labels.zip', '421392', '19.89 MB'),
'2011': HmdaDataFile('hmda_2011_in_all-records_labels.zip', '322061', '17.24 MB'),
'2010': HmdaDataFile('hmda_2010_in_all-records_labels.zip', '359860', '18.65 MB'),
'2013': HmdaDataFile('hmda_2013_in_all-records_labels.zip', '344116', '18.48 MB'),
'2012': HmdaDataFile('hmda_2012_in_all-records_labels.zip', '385267', '20.36 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_in_originated-records_labels.zip', '172307', '8.79 MB'),
'2007': HmdaDataFile('hmda_2007_in_originated-records_labels.zip', '199213', '9.38 MB'),
'2017': HmdaDataFile('hmda_2017_in_originated-records_labels.zip', '158693', '4.9 MB'),
'2015': HmdaDataFile('hmda_2015_in_originated-records_labels.zip', '156109', '8.92 MB'),
'2014': HmdaDataFile('hmda_2014_in_originated-records_labels.zip', '130131', '6.52 MB'),
'2008': HmdaDataFile('hmda_2008_in_originated-records_labels.zip', '155308', '7.25 MB'),
'2009': HmdaDataFile('hmda_2009_in_originated-records_labels.zip', '207593', '9.02 MB'),
'2011': HmdaDataFile('hmda_2011_in_originated-records_labels.zip', '160424', '7.77 MB'),
'2010': HmdaDataFile('hmda_2010_in_originated-records_labels.zip', '179820', '8.27 MB'),
'2013': HmdaDataFile('hmda_2013_in_originated-records_labels.zip', '184428', '8.97 MB'),
'2012': HmdaDataFile('hmda_2012_in_originated-records_labels.zip', '210891', '10.05 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '149979', '5.05 MB'),
'2007': HmdaDataFile('hmda_2007_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '145394', '4.63 MB'),
'2017': HmdaDataFile('hmda_2017_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '135246', '2.85 MB'),
'2015': HmdaDataFile('hmda_2015_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '134522', '5.11 MB'),
'2014': HmdaDataFile('hmda_2014_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '108789', '3.62 MB'),
'2008': HmdaDataFile('hmda_2008_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '124555', '3.9 MB'),
'2009': HmdaDataFile('hmda_2009_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '187145', '5.67 MB'),
'2011': HmdaDataFile('hmda_2011_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '141707', '4.43 MB'),
'2010': HmdaDataFile('hmda_2010_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '161225', '4.82 MB'),
'2013': HmdaDataFile('hmda_2013_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '159649', '5.11 MB'),
'2012': HmdaDataFile('hmda_2012_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '188614', '5.87 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_in_all-records_codes.zip', '319123', '11.54 MB'),
'2007': HmdaDataFile('hmda_2007_in_all-records_codes.zip', '474561', '16.44 MB'),
'2017': HmdaDataFile('hmda_2017_in_all-records_codes.zip', '292152', '6.68 MB'),
'2015': HmdaDataFile('hmda_2015_in_all-records_codes.zip', '288746', '11.46 MB'),
'2014': HmdaDataFile('hmda_2014_in_all-records_codes.zip', '248347', '8.9 MB'),
'2008': HmdaDataFile('hmda_2008_in_all-records_codes.zip', '348681', '11.68 MB'),
'2009': HmdaDataFile('hmda_2009_in_all-records_codes.zip', '421392', '13.64 MB'),
'2011': HmdaDataFile('hmda_2011_in_all-records_codes.zip', '322061', '11.1 MB'),
'2010': HmdaDataFile('hmda_2010_in_all-records_codes.zip', '359860', '12.06 MB'),
'2013': HmdaDataFile('hmda_2013_in_all-records_codes.zip', '344116', '12.12 MB'),
'2012': HmdaDataFile('hmda_2012_in_all-records_codes.zip', '385267', '13.31 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_in_originated-records_codes.zip', '172307', '5.84 MB'),
'2007': HmdaDataFile('hmda_2007_in_originated-records_codes.zip', '199213', '6.61 MB'),
'2017': HmdaDataFile('hmda_2017_in_originated-records_codes.zip', '158693', '3.43 MB'),
'2015': HmdaDataFile('hmda_2015_in_originated-records_codes.zip', '156109', '5.98 MB'),
'2014': HmdaDataFile('hmda_2014_in_originated-records_codes.zip', '130131', '4.37 MB'),
'2008': HmdaDataFile('hmda_2008_in_originated-records_codes.zip', '155308', '4.94 MB'),
'2009': HmdaDataFile('hmda_2009_in_originated-records_codes.zip', '207593', '6.34 MB'),
'2011': HmdaDataFile('hmda_2011_in_originated-records_codes.zip', '160424', '5.09 MB'),
'2010': HmdaDataFile('hmda_2010_in_originated-records_codes.zip', '179820', '5.44 MB'),
'2013': HmdaDataFile('hmda_2013_in_originated-records_codes.zip', '184428', '5.94 MB'),
'2012': HmdaDataFile('hmda_2012_in_originated-records_codes.zip', '210891', '6.64 MB')
}
}
},
'ia': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '71680', '3.32 MB'),
'2007': HmdaDataFile('hmda_2007_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '65703', '3.02 MB'),
'2017': HmdaDataFile('hmda_2017_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '60347', '1.83 MB'),
'2015': HmdaDataFile('hmda_2015_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '63773', '3.32 MB'),
'2014': HmdaDataFile('hmda_2014_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '52784', '2.39 MB'),
'2008': HmdaDataFile('hmda_2008_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '60097', '2.72 MB'),
'2009': HmdaDataFile('hmda_2009_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '96858', '3.85 MB'),
'2011': HmdaDataFile('hmda_2011_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '72565', '3.17 MB'),
'2010': HmdaDataFile('hmda_2010_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '84923', '3.79 MB'),
'2013': HmdaDataFile('hmda_2013_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '76061', '3.38 MB'),
'2012': HmdaDataFile('hmda_2012_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '93906', '4.12 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ia_all-records_labels.zip', '149227', '7.39 MB'),
'2007': HmdaDataFile('hmda_2007_ia_all-records_labels.zip', '197991', '9.31 MB'),
'2017': HmdaDataFile('hmda_2017_ia_all-records_labels.zip', '127446', '4.12 MB'),
'2015': HmdaDataFile('hmda_2015_ia_all-records_labels.zip', '136795', '7.68 MB'),
'2014': HmdaDataFile('hmda_2014_ia_all-records_labels.zip', '115594', '5.62 MB'),
'2008': HmdaDataFile('hmda_2008_ia_all-records_labels.zip', '157339', '7.52 MB'),
'2009': HmdaDataFile('hmda_2009_ia_all-records_labels.zip', '200497', '8.54 MB'),
'2011': HmdaDataFile('hmda_2011_ia_all-records_labels.zip', '150683', '7.18 MB'),
'2010': HmdaDataFile('hmda_2010_ia_all-records_labels.zip', '172100', '8.41 MB'),
'2013': HmdaDataFile('hmda_2013_ia_all-records_labels.zip', '160707', '7.67 MB'),
'2012': HmdaDataFile('hmda_2012_ia_all-records_labels.zip', '181237', '8.53 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ia_originated-records_labels.zip', '90932', '4.27 MB'),
'2007': HmdaDataFile('hmda_2007_ia_originated-records_labels.zip', '93343', '4.31 MB'),
'2017': HmdaDataFile('hmda_2017_ia_originated-records_labels.zip', '79977', '2.43 MB'),
'2015': HmdaDataFile('hmda_2015_ia_originated-records_labels.zip', '83214', '4.37 MB'),
'2014': HmdaDataFile('hmda_2014_ia_originated-records_labels.zip', '71413', '3.26 MB'),
'2008': HmdaDataFile('hmda_2008_ia_originated-records_labels.zip', '78965', '3.63 MB'),
'2009': HmdaDataFile('hmda_2009_ia_originated-records_labels.zip', '112522', '4.57 MB'),
'2011': HmdaDataFile('hmda_2011_ia_originated-records_labels.zip', '87178', '3.88 MB'),
'2010': HmdaDataFile('hmda_2010_ia_originated-records_labels.zip', '100132', '4.56 MB'),
'2013': HmdaDataFile('hmda_2013_ia_originated-records_labels.zip', '95886', '4.34 MB'),
'2012': HmdaDataFile('hmda_2012_ia_originated-records_labels.zip', '111610', '4.98 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '71680', '2.17 MB'),
'2007': HmdaDataFile('hmda_2007_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '65703', '2.02 MB'),
'2017': HmdaDataFile('hmda_2017_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '60347', '1.29 MB'),
'2015': HmdaDataFile('hmda_2015_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '63773', '2.2 MB'),
'2014': HmdaDataFile('hmda_2014_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '52784', '1.56 MB'),
'2008': HmdaDataFile('hmda_2008_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '60097', '1.82 MB'),
'2009': HmdaDataFile('hmda_2009_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '96858', '2.64 MB'),
'2011': HmdaDataFile('hmda_2011_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '72565', '2.05 MB'),
'2010': HmdaDataFile('hmda_2010_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '84923', '2.46 MB'),
'2013': HmdaDataFile('hmda_2013_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '76061', '2.18 MB'),
'2012': HmdaDataFile('hmda_2012_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '93906', '2.66 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ia_all-records_codes.zip', '149227', '4.65 MB'),
'2007': HmdaDataFile('hmda_2007_ia_all-records_codes.zip', '197991', '6.1 MB'),
'2017': HmdaDataFile('hmda_2017_ia_all-records_codes.zip', '127446', '2.7 MB'),
'2015': HmdaDataFile('hmda_2015_ia_all-records_codes.zip', '136795', '4.9 MB'),
'2014': HmdaDataFile('hmda_2014_ia_all-records_codes.zip', '115594', '3.51 MB'),
'2008': HmdaDataFile('hmda_2008_ia_all-records_codes.zip', '157339', '4.92 MB'),
'2009': HmdaDataFile('hmda_2009_ia_all-records_codes.zip', '200497', '5.67 MB'),
'2011': HmdaDataFile('hmda_2011_ia_all-records_codes.zip', '150683', '4.49 MB'),
'2010': HmdaDataFile('hmda_2010_ia_all-records_codes.zip', '172100', '5.28 MB'),
'2013': HmdaDataFile('hmda_2013_ia_all-records_codes.zip', '160707', '4.75 MB'),
'2012': HmdaDataFile('hmda_2012_ia_all-records_codes.zip', '181237', '5.31 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ia_originated-records_codes.zip', '90932', '2.74 MB'),
'2007': HmdaDataFile('hmda_2007_ia_originated-records_codes.zip', '93343', '2.86 MB'),
'2017': HmdaDataFile('hmda_2017_ia_originated-records_codes.zip', '79977', '1.67 MB'),
'2015': HmdaDataFile('hmda_2015_ia_originated-records_codes.zip', '83214', '2.85 MB'),
'2014': HmdaDataFile('hmda_2014_ia_originated-records_codes.zip', '71413', '2.08 MB'),
'2008': HmdaDataFile('hmda_2008_ia_originated-records_codes.zip', '78965', '2.41 MB'),
'2009': HmdaDataFile('hmda_2009_ia_originated-records_codes.zip', '112522', '3.11 MB'),
'2011': HmdaDataFile('hmda_2011_ia_originated-records_codes.zip', '87178', '2.48 MB'),
'2010': HmdaDataFile('hmda_2010_ia_originated-records_codes.zip', '100132', '2.93 MB'),
'2013': HmdaDataFile('hmda_2013_ia_originated-records_codes.zip', '95886', '2.76 MB'),
'2012': HmdaDataFile('hmda_2012_ia_originated-records_codes.zip', '111610', '3.18 MB')
}
}
},
'az': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '201543', '9.98 MB'),
'2007': HmdaDataFile('hmda_2007_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '198791', '8.9 MB'),
'2017': HmdaDataFile('hmda_2017_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '177849', '5.45 MB'),
'2015': HmdaDataFile('hmda_2015_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '166410', '8.87 MB'),
'2014': HmdaDataFile('hmda_2014_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '124580', '6.3 MB'),
'2008': HmdaDataFile('hmda_2008_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '129343', '5.73 MB'),
'2009': HmdaDataFile('hmda_2009_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '161393', '6.71 MB'),
'2011': HmdaDataFile('hmda_2011_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '114583', '5.41 MB'),
'2010': HmdaDataFile('hmda_2010_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '132138', '6.21 MB'),
'2013': HmdaDataFile('hmda_2013_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '176268', '8.6 MB'),
'2012': HmdaDataFile('hmda_2012_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '197491', '9.59 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_az_all-records_labels.zip', '478386', '24.75 MB'),
'2007': HmdaDataFile('hmda_2007_az_all-records_labels.zip', '803675', '35.11 MB'),
'2017': HmdaDataFile('hmda_2017_az_all-records_labels.zip', '428411', '14.46 MB'),
'2015': HmdaDataFile('hmda_2015_az_all-records_labels.zip', '391879', '22 MB'),
'2014': HmdaDataFile('hmda_2014_az_all-records_labels.zip', '317345', '16.74 MB'),
'2008': HmdaDataFile('hmda_2008_az_all-records_labels.zip', '425680', '19.23 MB'),
'2009': HmdaDataFile('hmda_2009_az_all-records_labels.zip', '441291', '18.88 MB'),
'2011': HmdaDataFile('hmda_2011_az_all-records_labels.zip', '313348', '16.05 MB'),
'2010': HmdaDataFile('hmda_2010_az_all-records_labels.zip', '350571', '17.93 MB'),
'2013': HmdaDataFile('hmda_2013_az_all-records_labels.zip', '428383', '22.29 MB'),
'2012': HmdaDataFile('hmda_2012_az_all-records_labels.zip', '458365', '23.72 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_az_originated-records_labels.zip', '236688', '11.85 MB'),
'2007': HmdaDataFile('hmda_2007_az_originated-records_labels.zip', '292272', '13.05 MB'),
'2017': HmdaDataFile('hmda_2017_az_originated-records_labels.zip', '213547', '6.63 MB'),
'2015': HmdaDataFile('hmda_2015_az_originated-records_labels.zip', '198908', '10.8 MB'),
'2014': HmdaDataFile('hmda_2014_az_originated-records_labels.zip', '155001', '7.95 MB'),
'2008': HmdaDataFile('hmda_2008_az_originated-records_labels.zip', '165011', '7.37 MB'),
'2009': HmdaDataFile('hmda_2009_az_originated-records_labels.zip', '190609', '7.98 MB'),
'2011': HmdaDataFile('hmda_2011_az_originated-records_labels.zip', '146231', '7.09 MB'),
'2010': HmdaDataFile('hmda_2010_az_originated-records_labels.zip', '160055', '7.73 MB'),
'2013': HmdaDataFile('hmda_2013_az_originated-records_labels.zip', '224986', '11.14 MB'),
'2012': HmdaDataFile('hmda_2012_az_originated-records_labels.zip', '247572', '12.19 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '201543', '7.26 MB'),
'2007': HmdaDataFile('hmda_2007_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '198791', '6.42 MB'),
'2017': HmdaDataFile('hmda_2017_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '177849', '3.82 MB'),
'2015': HmdaDataFile('hmda_2015_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '166410', '6.44 MB'),
'2014': HmdaDataFile('hmda_2014_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '124580', '4.59 MB'),
'2008': HmdaDataFile('hmda_2008_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '129343', '4.15 MB'),
'2009': HmdaDataFile('hmda_2009_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '161393', '4.89 MB'),
'2011': HmdaDataFile('hmda_2011_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '114583', '3.79 MB'),
'2010': HmdaDataFile('hmda_2010_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '132138', '4.33 MB'),
'2013': HmdaDataFile('hmda_2013_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '176268', '6.24 MB'),
'2012': HmdaDataFile('hmda_2012_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '197491', '6.92 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_az_all-records_codes.zip', '478386', '17.45 MB'),
'2007': HmdaDataFile('hmda_2007_az_all-records_codes.zip', '803675', '24.35 MB'),
'2017': HmdaDataFile('hmda_2017_az_all-records_codes.zip', '428411', '9.68 MB'),
'2015': HmdaDataFile('hmda_2015_az_all-records_codes.zip', '391879', '15.26 MB'),
'2014': HmdaDataFile('hmda_2014_az_all-records_codes.zip', '317345', '11.78 MB'),
'2008': HmdaDataFile('hmda_2008_az_all-records_codes.zip', '425680', '13.42 MB'),
'2009': HmdaDataFile('hmda_2009_az_all-records_codes.zip', '441291', '13.49 MB'),
'2011': HmdaDataFile('hmda_2011_az_all-records_codes.zip', '313348', '10.91 MB'),
'2010': HmdaDataFile('hmda_2010_az_all-records_codes.zip', '350571', '12.17 MB'),
'2013': HmdaDataFile('hmda_2013_az_all-records_codes.zip', '428383', '15.68 MB'),
'2012': HmdaDataFile('hmda_2012_az_all-records_codes.zip', '458365', '16.67 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_az_originated-records_codes.zip', '236688', '8.56 MB'),
'2007': HmdaDataFile('hmda_2007_az_originated-records_codes.zip', '292272', '9.39 MB'),
'2017': HmdaDataFile('hmda_2017_az_originated-records_codes.zip', '213547', '4.61 MB'),
'2015': HmdaDataFile('hmda_2015_az_originated-records_codes.zip', '198908', '7.79 MB'),
'2014': HmdaDataFile('hmda_2014_az_originated-records_codes.zip', '155001', '5.74 MB'),
'2008': HmdaDataFile('hmda_2008_az_originated-records_codes.zip', '165011', '5.32 MB'),
'2009': HmdaDataFile('hmda_2009_az_originated-records_codes.zip', '190609', '5.81 MB'),
'2011': HmdaDataFile('hmda_2011_az_originated-records_codes.zip', '146231', '4.94 MB'),
'2010': HmdaDataFile('hmda_2010_az_originated-records_codes.zip', '160055', '5.38 MB'),
'2013': HmdaDataFile('hmda_2013_az_originated-records_codes.zip', '224986', '8.02 MB'),
'2012': HmdaDataFile('hmda_2012_az_originated-records_codes.zip', '247572', '8.75 MB')
}
}
},
'id': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '49270', '2.03 MB'),
'2007': HmdaDataFile('hmda_2007_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '45546', '1.84 MB'),
'2017': HmdaDataFile('hmda_2017_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '43374', '1.27 MB'),
'2015': HmdaDataFile('hmda_2015_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '41252', '1.97 MB'),
'2014': HmdaDataFile('hmda_2014_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '30539', '1.29 MB'),
'2008': HmdaDataFile('hmda_2008_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '36308', '1.49 MB'),
'2009': HmdaDataFile('hmda_2009_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '49056', '1.87 MB'),
'2011': HmdaDataFile('hmda_2011_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '30665', '1.26 MB'),
'2010': HmdaDataFile('hmda_2010_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '37182', '1.47 MB'),
'2013': HmdaDataFile('hmda_2013_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '41770', '1.75 MB'),
'2012': HmdaDataFile('hmda_2012_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '46607', '1.91 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_id_all-records_labels.zip', '103880', '4.67 MB'),
'2007': HmdaDataFile('hmda_2007_id_all-records_labels.zip', '156706', '6.54 MB'),
'2017': HmdaDataFile('hmda_2017_id_all-records_labels.zip', '92755', '3.01 MB'),
'2015': HmdaDataFile('hmda_2015_id_all-records_labels.zip', '89063', '4.72 MB'),
'2014': HmdaDataFile('hmda_2014_id_all-records_labels.zip', '70222', '3.27 MB'),
'2008': HmdaDataFile('hmda_2008_id_all-records_labels.zip', '108689', '4.65 MB'),
'2009': HmdaDataFile('hmda_2009_id_all-records_labels.zip', '125244', '5.05 MB'),
'2011': HmdaDataFile('hmda_2011_id_all-records_labels.zip', '77672', '3.5 MB'),
'2010': HmdaDataFile('hmda_2010_id_all-records_labels.zip', '94170', '4.16 MB'),
'2013': HmdaDataFile('hmda_2013_id_all-records_labels.zip', '97051', '4.49 MB'),
'2012': HmdaDataFile('hmda_2012_id_all-records_labels.zip', '103766', '4.64 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_id_originated-records_labels.zip', '59151', '2.48 MB'),
'2007': HmdaDataFile('hmda_2007_id_originated-records_labels.zip', '67396', '2.74 MB'),
'2017': HmdaDataFile('hmda_2017_id_originated-records_labels.zip', '54102', '1.62 MB'),
'2015': HmdaDataFile('hmda_2015_id_originated-records_labels.zip', '50554', '2.46 MB'),
'2014': HmdaDataFile('hmda_2014_id_originated-records_labels.zip', '38605', '1.67 MB'),
'2008': HmdaDataFile('hmda_2008_id_originated-records_labels.zip', '46531', '1.93 MB'),
'2009': HmdaDataFile('hmda_2009_id_originated-records_labels.zip', '56985', '2.2 MB'),
'2011': HmdaDataFile('hmda_2011_id_originated-records_labels.zip', '37943', '1.6 MB'),
'2010': HmdaDataFile('hmda_2010_id_originated-records_labels.zip', '44663', '1.79 MB'),
'2013': HmdaDataFile('hmda_2013_id_originated-records_labels.zip', '53109', '2.27 MB'),
'2012': HmdaDataFile('hmda_2012_id_originated-records_labels.zip', '57188', '2.39 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '49270', '1.31 MB'),
'2007': HmdaDataFile('hmda_2007_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '45546', '1.22 MB'),
'2017': HmdaDataFile('hmda_2017_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '43374', '882.08 KB'),
'2015': HmdaDataFile('hmda_2015_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '41252', '1.28 MB'),
'2014': HmdaDataFile('hmda_2014_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '30539', '834.4 KB'),
'2008': HmdaDataFile('hmda_2008_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '36308', '1 MB'),
'2009': HmdaDataFile('hmda_2009_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '49056', '1.27 MB'),
'2011': HmdaDataFile('hmda_2011_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '30665', '802.44 KB'),
'2010': HmdaDataFile('hmda_2010_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '37182', '944.45 KB'),
'2013': HmdaDataFile('hmda_2013_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '41770', '1.12 MB'),
'2012': HmdaDataFile('hmda_2012_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '46607', '1.21 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_id_all-records_codes.zip', '103880', '2.88 MB'),
'2007': HmdaDataFile('hmda_2007_id_all-records_codes.zip', '156706', '4.26 MB'),
'2017': HmdaDataFile('hmda_2017_id_all-records_codes.zip', '92755', '1.96 MB'),
'2015': HmdaDataFile('hmda_2015_id_all-records_codes.zip', '89063', '2.95 MB'),
'2014': HmdaDataFile('hmda_2014_id_all-records_codes.zip', '70222', '2.02 MB'),
'2008': HmdaDataFile('hmda_2008_id_all-records_codes.zip', '108689', '3.03 MB'),
'2009': HmdaDataFile('hmda_2009_id_all-records_codes.zip', '125244', '3.33 MB'),
'2011': HmdaDataFile('hmda_2011_id_all-records_codes.zip', '77672', '2.17 MB'),
'2010': HmdaDataFile('hmda_2010_id_all-records_codes.zip', '94170', '2.57 MB'),
'2013': HmdaDataFile('hmda_2013_id_all-records_codes.zip', '97051', '2.76 MB'),
'2012': HmdaDataFile('hmda_2012_id_all-records_codes.zip', '103766', '2.85 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_id_originated-records_codes.zip', '59151', '1.59 MB'),
'2007': HmdaDataFile('hmda_2007_id_originated-records_codes.zip', '67396', '1.83 MB'),
'2017': HmdaDataFile('hmda_2017_id_originated-records_codes.zip', '54102', '1.11 MB'),
'2015': HmdaDataFile('hmda_2015_id_originated-records_codes.zip', '50554', '1.59 MB'),
'2014': HmdaDataFile('hmda_2014_id_originated-records_codes.zip', '38605', '1.07 MB'),
'2008': HmdaDataFile('hmda_2008_id_originated-records_codes.zip', '46531', '1.29 MB'),
'2009': HmdaDataFile('hmda_2009_id_originated-records_codes.zip', '56985', '1.49 MB'),
'2011': HmdaDataFile('hmda_2011_id_originated-records_codes.zip', '37943', '1.02 MB'),
'2010': HmdaDataFile('hmda_2010_id_originated-records_codes.zip', '44663', '1.14 MB'),
'2013': HmdaDataFile('hmda_2013_id_originated-records_codes.zip', '53109', '1.45 MB'),
'2012': HmdaDataFile('hmda_2012_id_originated-records_codes.zip', '57188', '1.51 MB')
}
}
},
'ct': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '66025', '3.2 MB'),
'2007': HmdaDataFile('hmda_2007_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '89152', '4.26 MB'),
'2017': HmdaDataFile('hmda_2017_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '57398', '1.83 MB'),
'2015': HmdaDataFile('hmda_2015_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '59656', '3.12 MB'),
'2014': HmdaDataFile('hmda_2014_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '49553', '2.54 MB'),
'2008': HmdaDataFile('hmda_2008_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '68658', '3.22 MB'),
'2009': HmdaDataFile('hmda_2009_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '104310', '4.56 MB'),
'2011': HmdaDataFile('hmda_2011_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '78978', '3.93 MB'),
'2010': HmdaDataFile('hmda_2010_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '93797', '4.54 MB'),
'2013': HmdaDataFile('hmda_2013_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '87390', '4.18 MB'),
'2012': HmdaDataFile('hmda_2012_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '105049', '4.95 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ct_all-records_labels.zip', '146885', '7.73 MB'),
'2007': HmdaDataFile('hmda_2007_ct_all-records_labels.zip', '301760', '15.1 MB'),
'2017': HmdaDataFile('hmda_2017_ct_all-records_labels.zip', '129401', '4.77 MB'),
'2015': HmdaDataFile('hmda_2015_ct_all-records_labels.zip', '132491', '7.44 MB'),
'2014': HmdaDataFile('hmda_2014_ct_all-records_labels.zip', '114931', '6.31 MB'),
'2008': HmdaDataFile('hmda_2008_ct_all-records_labels.zip', '193168', '9.76 MB'),
'2009': HmdaDataFile('hmda_2009_ct_all-records_labels.zip', '235997', '11.17 MB'),
'2011': HmdaDataFile('hmda_2011_ct_all-records_labels.zip', '177460', '9.5 MB'),
'2010': HmdaDataFile('hmda_2010_ct_all-records_labels.zip', '204936', '10.79 MB'),
'2013': HmdaDataFile('hmda_2013_ct_all-records_labels.zip', '187158', '9.86 MB'),
'2012': HmdaDataFile('hmda_2012_ct_all-records_labels.zip', '214191', '11.09 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ct_originated-records_labels.zip', '75239', '3.73 MB'),
'2007': HmdaDataFile('hmda_2007_ct_originated-records_labels.zip', '126259', '6.09 MB'),
'2017': HmdaDataFile('hmda_2017_ct_originated-records_labels.zip', '67000', '2.19 MB'),
'2015': HmdaDataFile('hmda_2015_ct_originated-records_labels.zip', '68865', '3.65 MB'),
'2014': HmdaDataFile('hmda_2014_ct_originated-records_labels.zip', '58456', '3.04 MB'),
'2008': HmdaDataFile('hmda_2008_ct_originated-records_labels.zip', '84484', '4.01 MB'),
'2009': HmdaDataFile('hmda_2009_ct_originated-records_labels.zip', '113317', '5.04 MB'),
'2011': HmdaDataFile('hmda_2011_ct_originated-records_labels.zip', '87795', '4.47 MB'),
'2010': HmdaDataFile('hmda_2010_ct_originated-records_labels.zip', '102545', '5.08 MB'),
'2013': HmdaDataFile('hmda_2013_ct_originated-records_labels.zip', '98739', '4.8 MB'),
'2012': HmdaDataFile('hmda_2012_ct_originated-records_labels.zip', '115361', '5.49 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '66025', '2.19 MB'),
'2007': HmdaDataFile('hmda_2007_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '89152', '2.98 MB'),
'2017': HmdaDataFile('hmda_2017_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '57398', '1.3 MB'),
'2015': HmdaDataFile('hmda_2015_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '59656', '2.2 MB'),
'2014': HmdaDataFile('hmda_2014_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '49553', '1.74 MB'),
'2008': HmdaDataFile('hmda_2008_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '68658', '2.25 MB'),
'2009': HmdaDataFile('hmda_2009_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '104310', '3.22 MB'),
'2011': HmdaDataFile('hmda_2011_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '78978', '2.66 MB'),
'2010': HmdaDataFile('hmda_2010_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '93797', '3.09 MB'),
'2013': HmdaDataFile('hmda_2013_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '87390', '2.84 MB'),
'2012': HmdaDataFile('hmda_2012_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '105049', '3.31 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ct_all-records_codes.zip', '146885', '5.17 MB'),
'2007': HmdaDataFile('hmda_2007_ct_all-records_codes.zip', '301760', '10.45 MB'),
'2017': HmdaDataFile('hmda_2017_ct_all-records_codes.zip', '129401', '3.24 MB'),
'2015': HmdaDataFile('hmda_2015_ct_all-records_codes.zip', '132491', '5.05 MB'),
'2014': HmdaDataFile('hmda_2014_ct_all-records_codes.zip', '114931', '4.22 MB'),
'2008': HmdaDataFile('hmda_2008_ct_all-records_codes.zip', '193168', '6.73 MB'),
'2009': HmdaDataFile('hmda_2009_ct_all-records_codes.zip', '235997', '7.73 MB'),
'2011': HmdaDataFile('hmda_2011_ct_all-records_codes.zip', '177460', '6.28 MB'),
'2010': HmdaDataFile('hmda_2010_ct_all-records_codes.zip', '204936', '7.18 MB'),
'2013': HmdaDataFile('hmda_2013_ct_all-records_codes.zip', '187158', '6.56 MB'),
'2012': HmdaDataFile('hmda_2012_ct_all-records_codes.zip', '214191', '7.31 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ct_originated-records_codes.zip', '75239', '2.54 MB'),
'2007': HmdaDataFile('hmda_2007_ct_originated-records_codes.zip', '126259', '4.28 MB'),
'2017': HmdaDataFile('hmda_2017_ct_originated-records_codes.zip', '67000', '1.55 MB'),
'2015': HmdaDataFile('hmda_2015_ct_originated-records_codes.zip', '68865', '2.57 MB'),
'2014': HmdaDataFile('hmda_2014_ct_originated-records_codes.zip', '58456', '2.07 MB'),
'2008': HmdaDataFile('hmda_2008_ct_originated-records_codes.zip', '84484', '2.79 MB'),
'2009': HmdaDataFile('hmda_2009_ct_originated-records_codes.zip', '113317', '3.55 MB'),
'2011': HmdaDataFile('hmda_2011_ct_originated-records_codes.zip', '87795', '3.04 MB'),
'2010': HmdaDataFile('hmda_2010_ct_originated-records_codes.zip', '102545', '3.46 MB'),
'2013': HmdaDataFile('hmda_2013_ct_originated-records_codes.zip', '98739', '3.24 MB'),
'2012': HmdaDataFile('hmda_2012_ct_originated-records_codes.zip', '115361', '3.66 MB')
}
}
},
'nh': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '32293', '1.45 MB'),
'2007': HmdaDataFile('hmda_2007_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '30786', '1.35 MB'),
'2017': HmdaDataFile('hmda_2017_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '28246', '825.86 KB'),
'2015': HmdaDataFile('hmda_2015_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '27470', '1.33 MB'),
'2014': HmdaDataFile('hmda_2014_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '21418', '953.08 KB'),
'2008': HmdaDataFile('hmda_2008_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '25186', '1.1 MB'),
'2009': HmdaDataFile('hmda_2009_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '40099', '1.65 MB'),
'2011': HmdaDataFile('hmda_2011_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '30214', '1.28 MB'),
'2010': HmdaDataFile('hmda_2010_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '35463', '1.49 MB'),
'2013': HmdaDataFile('hmda_2013_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '34908', '1.55 MB'),
'2012': HmdaDataFile('hmda_2012_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '41909', '1.8 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nh_all-records_labels.zip', '72628', '3.51 MB'),
'2007': HmdaDataFile('hmda_2007_nh_all-records_labels.zip', '116500', '5.13 MB'),
'2017': HmdaDataFile('hmda_2017_nh_all-records_labels.zip', '65016', '2 MB'),
'2015': HmdaDataFile('hmda_2015_nh_all-records_labels.zip', '63482', '3.36 MB'),
'2014': HmdaDataFile('hmda_2014_nh_all-records_labels.zip', '53373', '2.55 MB'),
'2008': HmdaDataFile('hmda_2008_nh_all-records_labels.zip', '78591', '3.53 MB'),
'2009': HmdaDataFile('hmda_2009_nh_all-records_labels.zip', '100680', '4.29 MB'),
'2011': HmdaDataFile('hmda_2011_nh_all-records_labels.zip', '75090', '3.48 MB'),
'2010': HmdaDataFile('hmda_2010_nh_all-records_labels.zip', '85990', '3.93 MB'),
'2013': HmdaDataFile('hmda_2013_nh_all-records_labels.zip', '80737', '3.88 MB'),
'2012': HmdaDataFile('hmda_2012_nh_all-records_labels.zip', '92574', '4.33 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nh_originated-records_labels.zip', '38262', '1.76 MB'),
'2007': HmdaDataFile('hmda_2007_nh_originated-records_labels.zip', '46139', '2.02 MB'),
'2017': HmdaDataFile('hmda_2017_nh_originated-records_labels.zip', '34290', '1.02 MB'),
'2015': HmdaDataFile('hmda_2015_nh_originated-records_labels.zip', '33120', '1.64 MB'),
'2014': HmdaDataFile('hmda_2014_nh_originated-records_labels.zip', '26589', '1.21 MB'),
'2008': HmdaDataFile('hmda_2008_nh_originated-records_labels.zip', '32850', '1.46 MB'),
'2009': HmdaDataFile('hmda_2009_nh_originated-records_labels.zip', '45790', '1.91 MB'),
'2011': HmdaDataFile('hmda_2011_nh_originated-records_labels.zip', '35321', '1.53 MB'),
'2010': HmdaDataFile('hmda_2010_nh_originated-records_labels.zip', '40696', '1.75 MB'),
'2013': HmdaDataFile('hmda_2013_nh_originated-records_labels.zip', '41589', '1.88 MB'),
'2012': HmdaDataFile('hmda_2012_nh_originated-records_labels.zip', '48098', '2.12 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '32293', '929.81 KB'),
'2007': HmdaDataFile('hmda_2007_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '30786', '870.93 KB'),
'2017': HmdaDataFile('hmda_2017_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '28246', '579.48 KB'),
'2015': HmdaDataFile('hmda_2015_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '27470', '880.79 KB'),
'2014': HmdaDataFile('hmda_2014_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '21418', '605.7 KB'),
'2008': HmdaDataFile('hmda_2008_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '25186', '724.38 KB'),
'2009': HmdaDataFile('hmda_2009_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '40099', '1.1 MB'),
'2011': HmdaDataFile('hmda_2011_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '30214', '801.84 KB'),
'2010': HmdaDataFile('hmda_2010_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '35463', '934.66 KB'),
'2013': HmdaDataFile('hmda_2013_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '34908', '991.18 KB'),
'2012': HmdaDataFile('hmda_2012_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '41909', '1.14 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nh_all-records_codes.zip', '72628', '2.16 MB'),
'2007': HmdaDataFile('hmda_2007_nh_all-records_codes.zip', '116500', '3.28 MB'),
'2017': HmdaDataFile('hmda_2017_nh_all-records_codes.zip', '65016', '1.3 MB'),
'2015': HmdaDataFile('hmda_2015_nh_all-records_codes.zip', '63482', '2.15 MB'),
'2014': HmdaDataFile('hmda_2014_nh_all-records_codes.zip', '53373', '1.56 MB'),
'2008': HmdaDataFile('hmda_2008_nh_all-records_codes.zip', '78591', '2.27 MB'),
'2009': HmdaDataFile('hmda_2009_nh_all-records_codes.zip', '100680', '2.81 MB'),
'2011': HmdaDataFile('hmda_2011_nh_all-records_codes.zip', '75090', '2.12 MB'),
'2010': HmdaDataFile('hmda_2010_nh_all-records_codes.zip', '85990', '2.4 MB'),
'2013': HmdaDataFile('hmda_2013_nh_all-records_codes.zip', '80737', '2.4 MB'),
'2012': HmdaDataFile('hmda_2012_nh_all-records_codes.zip', '92574', '2.66 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nh_originated-records_codes.zip', '38262', '1.12 MB'),
'2007': HmdaDataFile('hmda_2007_nh_originated-records_codes.zip', '46139', '1.31 MB'),
'2017': HmdaDataFile('hmda_2017_nh_originated-records_codes.zip', '34290', '702.18 KB'),
'2015': HmdaDataFile('hmda_2015_nh_originated-records_codes.zip', '33120', '1.08 MB'),
'2014': HmdaDataFile('hmda_2014_nh_originated-records_codes.zip', '26589', '760.91 KB'),
'2008': HmdaDataFile('hmda_2008_nh_originated-records_codes.zip', '32850', '959.54 KB'),
'2009': HmdaDataFile('hmda_2009_nh_originated-records_codes.zip', '45790', '1.27 MB'),
'2011': HmdaDataFile('hmda_2011_nh_originated-records_codes.zip', '35321', '955.91 KB'),
'2010': HmdaDataFile('hmda_2010_nh_originated-records_codes.zip', '40696', '1.09 MB'),
'2013': HmdaDataFile('hmda_2013_nh_originated-records_codes.zip', '41589', '1.2 MB'),
'2012': HmdaDataFile('hmda_2012_nh_originated-records_codes.zip', '48098', '1.33 MB')
}
}
},
'nj': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '168437', '8.44 MB'),
'2007': HmdaDataFile('hmda_2007_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '222771', '10.95 MB'),
'2017': HmdaDataFile('hmda_2017_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '141365', '4.57 MB'),
'2015': HmdaDataFile('hmda_2015_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '147544', '8.22 MB'),
'2014': HmdaDataFile('hmda_2014_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '113481', '5.78 MB'),
'2008': HmdaDataFile('hmda_2008_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '166597', '8.06 MB'),
'2009': HmdaDataFile('hmda_2009_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '247580', '11.18 MB'),
'2011': HmdaDataFile('hmda_2011_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '184520', '9.28 MB'),
'2010': HmdaDataFile('hmda_2010_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '206564', '10.4 MB'),
'2013': HmdaDataFile('hmda_2013_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '194532', '9.73 MB'),
'2012': HmdaDataFile('hmda_2012_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '237169', '11.87 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nj_all-records_labels.zip', '399389', '21.26 MB'),
'2007': HmdaDataFile('hmda_2007_nj_all-records_labels.zip', '808103', '39.11 MB'),
'2017': HmdaDataFile('hmda_2017_nj_all-records_labels.zip', '349563', '12.38 MB'),
'2015': HmdaDataFile('hmda_2015_nj_all-records_labels.zip', '354746', '21.29 MB'),
'2014': HmdaDataFile('hmda_2014_nj_all-records_labels.zip', '289377', '15.65 MB'),
'2008': HmdaDataFile('hmda_2008_nj_all-records_labels.zip', '514816', '25.05 MB'),
'2009': HmdaDataFile('hmda_2009_nj_all-records_labels.zip', '613066', '28.18 MB'),
'2011': HmdaDataFile('hmda_2011_nj_all-records_labels.zip', '451221', '24.28 MB'),
'2010': HmdaDataFile('hmda_2010_nj_all-records_labels.zip', '499489', '27 MB'),
'2013': HmdaDataFile('hmda_2013_nj_all-records_labels.zip', '460264', '24.64 MB'),
'2012': HmdaDataFile('hmda_2012_nj_all-records_labels.zip', '541802', '29.13 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nj_originated-records_labels.zip', '194913', '9.86 MB'),
'2007': HmdaDataFile('hmda_2007_nj_originated-records_labels.zip', '317276', '15.57 MB'),
'2017': HmdaDataFile('hmda_2017_nj_originated-records_labels.zip', '169196', '5.6 MB'),
'2015': HmdaDataFile('hmda_2015_nj_originated-records_labels.zip', '171685', '9.68 MB'),
'2014': HmdaDataFile('hmda_2014_nj_originated-records_labels.zip', '135675', '6.98 MB'),
'2008': HmdaDataFile('hmda_2008_nj_originated-records_labels.zip', '208721', '10.17 MB'),
'2009': HmdaDataFile('hmda_2009_nj_originated-records_labels.zip', '274489', '12.53 MB'),
'2011': HmdaDataFile('hmda_2011_nj_originated-records_labels.zip', '210948', '10.71 MB'),
'2010': HmdaDataFile('hmda_2010_nj_originated-records_labels.zip', '232001', '11.8 MB'),
'2013': HmdaDataFile('hmda_2013_nj_originated-records_labels.zip', '226181', '11.43 MB'),
'2012': HmdaDataFile('hmda_2012_nj_originated-records_labels.zip', '269377', '13.64 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '168437', '6.08 MB'),
'2007': HmdaDataFile('hmda_2007_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '222771', '7.81 MB'),
'2017': HmdaDataFile('hmda_2017_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '141365', '3.22 MB'),
'2015': HmdaDataFile('hmda_2015_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '147544', '5.93 MB'),
'2014': HmdaDataFile('hmda_2014_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '113481', '4.17 MB'),
'2008': HmdaDataFile('hmda_2008_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '166597', '5.77 MB'),
'2009': HmdaDataFile('hmda_2009_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '247580', '8.06 MB'),
'2011': HmdaDataFile('hmda_2011_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '184520', '6.49 MB'),
'2010': HmdaDataFile('hmda_2010_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '206564', '7.36 MB'),
'2013': HmdaDataFile('hmda_2013_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '194532', '6.89 MB'),
'2012': HmdaDataFile('hmda_2012_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '237169', '8.4 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nj_all-records_codes.zip', '399389', '14.78 MB'),
'2007': HmdaDataFile('hmda_2007_nj_all-records_codes.zip', '808103', '27.04 MB'),
'2017': HmdaDataFile('hmda_2017_nj_all-records_codes.zip', '349563', '8.1 MB'),
'2015': HmdaDataFile('hmda_2015_nj_all-records_codes.zip', '354746', '14.72 MB'),
'2014': HmdaDataFile('hmda_2014_nj_all-records_codes.zip', '289377', '10.83 MB'),
'2008': HmdaDataFile('hmda_2008_nj_all-records_codes.zip', '514816', '17.43 MB'),
'2009': HmdaDataFile('hmda_2009_nj_all-records_codes.zip', '613066', '19.76 MB'),
'2011': HmdaDataFile('hmda_2011_nj_all-records_codes.zip', '451221', '16.47 MB'),
'2010': HmdaDataFile('hmda_2010_nj_all-records_codes.zip', '499489', '18.44 MB'),
'2013': HmdaDataFile('hmda_2013_nj_all-records_codes.zip', '460264', '16.86 MB'),
'2012': HmdaDataFile('hmda_2012_nj_all-records_codes.zip', '541802', '19.93 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nj_originated-records_codes.zip', '194913', '7.06 MB'),
'2007': HmdaDataFile('hmda_2007_nj_originated-records_codes.zip', '317276', '11.05 MB'),
'2017': HmdaDataFile('hmda_2017_nj_originated-records_codes.zip', '169196', '3.91 MB'),
'2015': HmdaDataFile('hmda_2015_nj_originated-records_codes.zip', '171685', '6.93 MB'),
'2014': HmdaDataFile('hmda_2014_nj_originated-records_codes.zip', '135675', '5 MB'),
'2008': HmdaDataFile('hmda_2008_nj_originated-records_codes.zip', '208721', '7.24 MB'),
'2009': HmdaDataFile('hmda_2009_nj_originated-records_codes.zip', '274489', '9.01 MB'),
'2011': HmdaDataFile('hmda_2011_nj_originated-records_codes.zip', '210948', '7.44 MB'),
'2010': HmdaDataFile('hmda_2010_nj_originated-records_codes.zip', '232001', '8.28 MB'),
'2013': HmdaDataFile('hmda_2013_nj_originated-records_codes.zip', '226181', '8.05 MB'),
'2012': HmdaDataFile('hmda_2012_nj_originated-records_codes.zip', '269377', '9.59 MB')
}
}
},
'nm': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '32653', '1.47 MB'),
'2007': HmdaDataFile('hmda_2007_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '47002', '2.07 MB'),
'2017': HmdaDataFile('hmda_2017_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '28982', '924.59 KB'),
'2015': HmdaDataFile('hmda_2015_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '29038', '1.45 MB'),
'2014': HmdaDataFile('hmda_2014_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '24491', '1.13 MB'),
'2008': HmdaDataFile('hmda_2008_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '35421', '1.54 MB'),
'2009': HmdaDataFile('hmda_2009_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '45147', '1.83 MB'),
'2011': HmdaDataFile('hmda_2011_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '31586', '1.35 MB'),
'2010': HmdaDataFile('hmda_2010_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '36691', '1.68 MB'),
'2013': HmdaDataFile('hmda_2013_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '38141', '1.77 MB'),
'2012': HmdaDataFile('hmda_2012_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '42629', '1.91 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nm_all-records_labels.zip', '89390', '4.48 MB'),
'2007': HmdaDataFile('hmda_2007_nm_all-records_labels.zip', '178911', '8.19 MB'),
'2017': HmdaDataFile('hmda_2017_nm_all-records_labels.zip', '80423', '2.86 MB'),
'2015': HmdaDataFile('hmda_2015_nm_all-records_labels.zip', '79236', '4.37 MB'),
'2014': HmdaDataFile('hmda_2014_nm_all-records_labels.zip', '71841', '3.67 MB'),
'2008': HmdaDataFile('hmda_2008_nm_all-records_labels.zip', '114678', '5.3 MB'),
'2009': HmdaDataFile('hmda_2009_nm_all-records_labels.zip', '123495', '5.37 MB'),
'2011': HmdaDataFile('hmda_2011_nm_all-records_labels.zip', '90785', '4.39 MB'),
'2010': HmdaDataFile('hmda_2010_nm_all-records_labels.zip', '104045', '5.33 MB'),
'2013': HmdaDataFile('hmda_2013_nm_all-records_labels.zip', '102217', '5.23 MB'),
'2012': HmdaDataFile('hmda_2012_nm_all-records_labels.zip', '108074', '5.4 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nm_originated-records_labels.zip', '41334', '1.93 MB'),
'2007': HmdaDataFile('hmda_2007_nm_originated-records_labels.zip', '69876', '3.15 MB'),
'2017': HmdaDataFile('hmda_2017_nm_originated-records_labels.zip', '37465', '1.24 MB'),
'2015': HmdaDataFile('hmda_2015_nm_originated-records_labels.zip', '37572', '1.93 MB'),
'2014': HmdaDataFile('hmda_2014_nm_originated-records_labels.zip', '32547', '1.57 MB'),
'2008': HmdaDataFile('hmda_2008_nm_originated-records_labels.zip', '46512', '2.09 MB'),
'2009': HmdaDataFile('hmda_2009_nm_originated-records_labels.zip', '54007', '2.27 MB'),
'2011': HmdaDataFile('hmda_2011_nm_originated-records_labels.zip', '39979', '1.8 MB'),
'2010': HmdaDataFile('hmda_2010_nm_originated-records_labels.zip', '45261', '2.12 MB'),
'2013': HmdaDataFile('hmda_2013_nm_originated-records_labels.zip', '49389', '2.34 MB'),
'2012': HmdaDataFile('hmda_2012_nm_originated-records_labels.zip', '53038', '2.41 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '32653', '980.97 KB'),
'2007': HmdaDataFile('hmda_2007_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '47002', '1.42 MB'),
'2017': HmdaDataFile('hmda_2017_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '28982', '644.64 KB'),
'2015': HmdaDataFile('hmda_2015_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '29038', '989.53 KB'),
'2014': HmdaDataFile('hmda_2014_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '24491', '759.74 KB'),
'2008': HmdaDataFile('hmda_2008_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '35421', '1.05 MB'),
'2009': HmdaDataFile('hmda_2009_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '45147', '1.26 MB'),
'2011': HmdaDataFile('hmda_2011_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '31586', '892.04 KB'),
'2010': HmdaDataFile('hmda_2010_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '36691', '1.12 MB'),
'2013': HmdaDataFile('hmda_2013_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '38141', '1.18 MB'),
'2012': HmdaDataFile('hmda_2012_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '42629', '1.26 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nm_all-records_codes.zip', '89390', '2.89 MB'),
'2007': HmdaDataFile('hmda_2007_nm_all-records_codes.zip', '178911', '5.48 MB'),
'2017': HmdaDataFile('hmda_2017_nm_all-records_codes.zip', '80423', '1.88 MB'),
'2015': HmdaDataFile('hmda_2015_nm_all-records_codes.zip', '79236', '2.87 MB'),
'2014': HmdaDataFile('hmda_2014_nm_all-records_codes.zip', '71841', '2.38 MB'),
'2008': HmdaDataFile('hmda_2008_nm_all-records_codes.zip', '114678', '3.55 MB'),
'2009': HmdaDataFile('hmda_2009_nm_all-records_codes.zip', '123495', '3.6 MB'),
'2011': HmdaDataFile('hmda_2011_nm_all-records_codes.zip', '90785', '2.81 MB'),
'2010': HmdaDataFile('hmda_2010_nm_all-records_codes.zip', '104045', '3.42 MB'),
'2013': HmdaDataFile('hmda_2013_nm_all-records_codes.zip', '102217', '3.39 MB'),
'2012': HmdaDataFile('hmda_2012_nm_all-records_codes.zip', '108074', '3.48 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nm_originated-records_codes.zip', '41334', '1.3 MB'),
'2007': HmdaDataFile('hmda_2007_nm_originated-records_codes.zip', '69876', '2.15 MB'),
'2017': HmdaDataFile('hmda_2017_nm_originated-records_codes.zip', '37465', '864.54 KB'),
'2015': HmdaDataFile('hmda_2015_nm_originated-records_codes.zip', '37572', '1.32 MB'),
'2014': HmdaDataFile('hmda_2014_nm_originated-records_codes.zip', '32547', '1.05 MB'),
'2008': HmdaDataFile('hmda_2008_nm_originated-records_codes.zip', '46512', '1.43 MB'),
'2009': HmdaDataFile('hmda_2009_nm_originated-records_codes.zip', '54007', '1.56 MB'),
'2011': HmdaDataFile('hmda_2011_nm_originated-records_codes.zip', '39979', '1.19 MB'),
'2010': HmdaDataFile('hmda_2010_nm_originated-records_codes.zip', '45261', '1.4 MB'),
'2013': HmdaDataFile('hmda_2013_nm_originated-records_codes.zip', '49389', '1.56 MB'),
'2012': HmdaDataFile('hmda_2012_nm_originated-records_codes.zip', '53038', '1.59 MB')
}
}
},
'tx': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '513200', '26.59 MB'),
'2007': HmdaDataFile('hmda_2007_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '454756', '22.34 MB'),
'2017': HmdaDataFile('hmda_2017_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '457884', '15.35 MB'),
'2015': HmdaDataFile('hmda_2015_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '460596', '26.85 MB'),
'2014': HmdaDataFile('hmda_2014_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '391921', '20.38 MB'),
'2008': HmdaDataFile('hmda_2008_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '364311', '17.57 MB'),
'2009': HmdaDataFile('hmda_2009_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '448222', '20.5 MB'),
'2011': HmdaDataFile('hmda_2011_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '391184', '19.56 MB'),
'2010': HmdaDataFile('hmda_2010_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '405050', '20.17 MB'),
'2013': HmdaDataFile('hmda_2013_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '503627', '26.39 MB'),
'2012': HmdaDataFile('hmda_2012_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '502655', '26.28 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_tx_all-records_labels.zip', '1266767', '70.12 MB'),
'2007': HmdaDataFile('hmda_2007_tx_all-records_labels.zip', '1723576', '83.63 MB'),
'2017': HmdaDataFile('hmda_2017_tx_all-records_labels.zip', '1148206', '42.77 MB'),
'2015': HmdaDataFile('hmda_2015_tx_all-records_labels.zip', '1139573', '71.35 MB'),
'2014': HmdaDataFile('hmda_2014_tx_all-records_labels.zip', '1011598', '55.94 MB'),
'2008': HmdaDataFile('hmda_2008_tx_all-records_labels.zip', '1204457', '58.59 MB'),
'2009': HmdaDataFile('hmda_2009_tx_all-records_labels.zip', '1242037', '57.86 MB'),
'2011': HmdaDataFile('hmda_2011_tx_all-records_labels.zip', '1038591', '56.64 MB'),
'2010': HmdaDataFile('hmda_2010_tx_all-records_labels.zip', '1063486', '58.04 MB'),
'2013': HmdaDataFile('hmda_2013_tx_all-records_labels.zip', '1254738', '70.8 MB'),
'2012': HmdaDataFile('hmda_2012_tx_all-records_labels.zip', '1221801', '69.13 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_tx_originated-records_labels.zip', '613325', '32.57 MB'),
'2007': HmdaDataFile('hmda_2007_tx_originated-records_labels.zip', '653817', '32.47 MB'),
'2017': HmdaDataFile('hmda_2017_tx_originated-records_labels.zip', '559492', '19.3 MB'),
'2015': HmdaDataFile('hmda_2015_tx_originated-records_labels.zip', '557266', '33.01 MB'),
'2014': HmdaDataFile('hmda_2014_tx_originated-records_labels.zip', '484747', '25.75 MB'),
'2008': HmdaDataFile('hmda_2008_tx_originated-records_labels.zip', '473701', '23.41 MB'),
'2009': HmdaDataFile('hmda_2009_tx_originated-records_labels.zip', '520422', '24.41 MB'),
'2011': HmdaDataFile('hmda_2011_tx_originated-records_labels.zip', '466338', '24 MB'),
'2010': HmdaDataFile('hmda_2010_tx_originated-records_labels.zip', '476566', '24.45 MB'),
'2013': HmdaDataFile('hmda_2013_tx_originated-records_labels.zip', '611180', '32.38 MB'),
'2012': HmdaDataFile('hmda_2012_tx_originated-records_labels.zip', '594151', '31.5 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '513200', '18.9 MB'),
'2007': HmdaDataFile('hmda_2007_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '454756', '16.07 MB'),
'2017': HmdaDataFile('hmda_2017_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '457884', '10.67 MB'),
'2015': HmdaDataFile('hmda_2015_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '460596', '19.03 MB'),
'2014': HmdaDataFile('hmda_2014_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '391921', '14.47 MB'),
'2008': HmdaDataFile('hmda_2008_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '364311', '12.62 MB'),
'2009': HmdaDataFile('hmda_2009_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '448222', '14.81 MB'),
'2011': HmdaDataFile('hmda_2011_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '391184', '13.52 MB'),
'2010': HmdaDataFile('hmda_2010_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '405050', '13.93 MB'),
'2013': HmdaDataFile('hmda_2013_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '503627', '18.83 MB'),
'2012': HmdaDataFile('hmda_2012_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '502655', '18.62 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_tx_all-records_codes.zip', '1266767', '48.14 MB'),
'2007': HmdaDataFile('hmda_2007_tx_all-records_codes.zip', '1723576', '58.17 MB'),
'2017': HmdaDataFile('hmda_2017_tx_all-records_codes.zip', '1148206', '27.41 MB'),
'2015': HmdaDataFile('hmda_2015_tx_all-records_codes.zip', '1139573', '48.5 MB'),
'2014': HmdaDataFile('hmda_2014_tx_all-records_codes.zip', '1011598', '38.26 MB'),
'2008': HmdaDataFile('hmda_2008_tx_all-records_codes.zip', '1204457', '40.77 MB'),
'2009': HmdaDataFile('hmda_2009_tx_all-records_codes.zip', '1242037', '40.49 MB'),
'2011': HmdaDataFile('hmda_2011_tx_all-records_codes.zip', '1038591', '38.13 MB'),
'2010': HmdaDataFile('hmda_2010_tx_all-records_codes.zip', '1063486', '39.1 MB'),
'2013': HmdaDataFile('hmda_2013_tx_all-records_codes.zip', '1254738', '48.99 MB'),
'2012': HmdaDataFile('hmda_2012_tx_all-records_codes.zip', '1221801', '47.68 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_tx_originated-records_codes.zip', '613325', '23.08 MB'),
'2007': HmdaDataFile('hmda_2007_tx_originated-records_codes.zip', '653817', '23.29 MB'),
'2017': HmdaDataFile('hmda_2017_tx_originated-records_codes.zip', '559492', '13.3 MB'),
'2015': HmdaDataFile('hmda_2015_tx_originated-records_codes.zip', '557266', '23.23 MB'),
'2014': HmdaDataFile('hmda_2014_tx_originated-records_codes.zip', '484747', '18.19 MB'),
'2008': HmdaDataFile('hmda_2008_tx_originated-records_codes.zip', '473701', '16.77 MB'),
'2009': HmdaDataFile('hmda_2009_tx_originated-records_codes.zip', '520422', '17.57 MB'),
'2011': HmdaDataFile('hmda_2011_tx_originated-records_codes.zip', '466338', '16.56 MB'),
'2010': HmdaDataFile('hmda_2010_tx_originated-records_codes.zip', '476566', '16.85 MB'),
'2013': HmdaDataFile('hmda_2013_tx_originated-records_codes.zip', '611180', '22.91 MB'),
'2012': HmdaDataFile('hmda_2012_tx_originated-records_codes.zip', '594151', '22.16 MB')
}
}
},
'la': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '72627', '3.46 MB'),
'2007': HmdaDataFile('hmda_2007_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '86710', '4.14 MB'),
'2017': HmdaDataFile('hmda_2017_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '61729', '1.89 MB'),
'2015': HmdaDataFile('hmda_2015_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '67942', '3.69 MB'),
'2014': HmdaDataFile('hmda_2014_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '59398', '2.86 MB'),
'2008': HmdaDataFile('hmda_2008_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '69191', '3.26 MB'),
'2009': HmdaDataFile('hmda_2009_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '81760', '3.74 MB'),
'2011': HmdaDataFile('hmda_2011_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '69735', '3.2 MB'),
'2010': HmdaDataFile('hmda_2010_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '75645', '3.55 MB'),
'2013': HmdaDataFile('hmda_2013_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '77713', '3.68 MB'),
'2012': HmdaDataFile('hmda_2012_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '86148', '4.02 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_la_all-records_labels.zip', '195937', '10.16 MB'),
'2007': HmdaDataFile('hmda_2007_la_all-records_labels.zip', '300738', '14.74 MB'),
'2017': HmdaDataFile('hmda_2017_la_all-records_labels.zip', '173079', '6.06 MB'),
'2015': HmdaDataFile('hmda_2015_la_all-records_labels.zip', '180533', '10.55 MB'),
'2014': HmdaDataFile('hmda_2014_la_all-records_labels.zip', '170514', '8.9 MB'),
'2008': HmdaDataFile('hmda_2008_la_all-records_labels.zip', '221773', '11.04 MB'),
'2009': HmdaDataFile('hmda_2009_la_all-records_labels.zip', '231242', '11.32 MB'),
'2011': HmdaDataFile('hmda_2011_la_all-records_labels.zip', '210644', '10.99 MB'),
'2010': HmdaDataFile('hmda_2010_la_all-records_labels.zip', '214403', '11.27 MB'),
'2013': HmdaDataFile('hmda_2013_la_all-records_labels.zip', '219546', '11.6 MB'),
'2012': HmdaDataFile('hmda_2012_la_all-records_labels.zip', '231946', '12.12 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_la_originated-records_labels.zip', '94261', '4.64 MB'),
'2007': HmdaDataFile('hmda_2007_la_originated-records_labels.zip', '126278', '6.16 MB'),
'2017': HmdaDataFile('hmda_2017_la_originated-records_labels.zip', '83323', '2.62 MB'),
'2015': HmdaDataFile('hmda_2015_la_originated-records_labels.zip', '89327', '4.95 MB'),
'2014': HmdaDataFile('hmda_2014_la_originated-records_labels.zip', '81230', '4.06 MB'),
'2008': HmdaDataFile('hmda_2008_la_originated-records_labels.zip', '97778', '4.77 MB'),
'2009': HmdaDataFile('hmda_2009_la_originated-records_labels.zip', '103928', '4.89 MB'),
'2011': HmdaDataFile('hmda_2011_la_originated-records_labels.zip', '90349', '4.38 MB'),
'2010': HmdaDataFile('hmda_2010_la_originated-records_labels.zip', '96150', '4.64 MB'),
'2013': HmdaDataFile('hmda_2013_la_originated-records_labels.zip', '101849', '4.89 MB'),
'2012': HmdaDataFile('hmda_2012_la_originated-records_labels.zip', '107636', '5.1 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '72627', '2.33 MB'),
'2007': HmdaDataFile('hmda_2007_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '86710', '2.79 MB'),
'2017': HmdaDataFile('hmda_2017_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '61729', '1.36 MB'),
'2015': HmdaDataFile('hmda_2015_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '67942', '2.53 MB'),
'2014': HmdaDataFile('hmda_2014_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '59398', '1.93 MB'),
'2008': HmdaDataFile('hmda_2008_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '69191', '2.23 MB'),
'2009': HmdaDataFile('hmda_2009_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '81760', '2.59 MB'),
'2011': HmdaDataFile('hmda_2011_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '69735', '2.12 MB'),
'2010': HmdaDataFile('hmda_2010_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '75645', '2.36 MB'),
'2013': HmdaDataFile('hmda_2013_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '77713', '2.44 MB'),
'2012': HmdaDataFile('hmda_2012_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '86148', '2.66 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_la_all-records_codes.zip', '195937', '6.65 MB'),
'2007': HmdaDataFile('hmda_2007_la_all-records_codes.zip', '300738', '9.84 MB'),
'2017': HmdaDataFile('hmda_2017_la_all-records_codes.zip', '173079', '4 MB'),
'2015': HmdaDataFile('hmda_2015_la_all-records_codes.zip', '180533', '6.97 MB'),
'2014': HmdaDataFile('hmda_2014_la_all-records_codes.zip', '170514', '5.81 MB'),
'2008': HmdaDataFile('hmda_2008_la_all-records_codes.zip', '221773', '7.43 MB'),
'2009': HmdaDataFile('hmda_2009_la_all-records_codes.zip', '231242', '7.74 MB'),
'2011': HmdaDataFile('hmda_2011_la_all-records_codes.zip', '210644', '7.15 MB'),
'2010': HmdaDataFile('hmda_2010_la_all-records_codes.zip', '214403', '7.36 MB'),
'2013': HmdaDataFile('hmda_2013_la_all-records_codes.zip', '219546', '7.57 MB'),
'2012': HmdaDataFile('hmda_2012_la_all-records_codes.zip', '231946', '7.91 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_la_originated-records_codes.zip', '94261', '3.12 MB'),
'2007': HmdaDataFile('hmda_2007_la_originated-records_codes.zip', '126278', '4.18 MB'),
'2017': HmdaDataFile('hmda_2017_la_originated-records_codes.zip', '83323', '1.86 MB'),
'2015': HmdaDataFile('hmda_2015_la_originated-records_codes.zip', '89327', '3.37 MB'),
'2014': HmdaDataFile('hmda_2014_la_originated-records_codes.zip', '81230', '2.72 MB'),
'2008': HmdaDataFile('hmda_2008_la_originated-records_codes.zip', '97778', '3.29 MB'),
'2009': HmdaDataFile('hmda_2009_la_originated-records_codes.zip', '103928', '3.38 MB'),
'2011': HmdaDataFile('hmda_2011_la_originated-records_codes.zip', '90349', '2.9 MB'),
'2010': HmdaDataFile('hmda_2010_la_originated-records_codes.zip', '96150', '3.06 MB'),
'2013': HmdaDataFile('hmda_2013_la_originated-records_codes.zip', '101849', '3.22 MB'),
'2012': HmdaDataFile('hmda_2012_la_originated-records_codes.zip', '107636', '3.34 MB')
}
}
},
'wa': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '225231', '11.2 MB'),
'2007': HmdaDataFile('hmda_2007_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '215619', '10.08 MB'),
'2017': HmdaDataFile('hmda_2017_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '191532', '6.03 MB'),
'2015': HmdaDataFile('hmda_2015_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '183367', '10.31 MB'),
'2014': HmdaDataFile('hmda_2014_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '133913', '6.66 MB'),
'2008': HmdaDataFile('hmda_2008_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '165363', '7.57 MB'),
'2009': HmdaDataFile('hmda_2009_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '255387', '10.98 MB'),
'2011': HmdaDataFile('hmda_2011_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '174449', '8.42 MB'),
'2010': HmdaDataFile('hmda_2010_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '211008', '10.2 MB'),
'2013': HmdaDataFile('hmda_2013_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '201071', '10.04 MB'),
'2012': HmdaDataFile('hmda_2012_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '245095', '11.94 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_wa_all-records_labels.zip', '466566', '24.68 MB'),
'2007': HmdaDataFile('hmda_2007_wa_all-records_labels.zip', '722481', '33.39 MB'),
'2017': HmdaDataFile('hmda_2017_wa_all-records_labels.zip', '402196', '13.8 MB'),
'2015': HmdaDataFile('hmda_2015_wa_all-records_labels.zip', '387805', '23.58 MB'),
'2014': HmdaDataFile('hmda_2014_wa_all-records_labels.zip', '311425', '16.4 MB'),
'2008': HmdaDataFile('hmda_2008_wa_all-records_labels.zip', '485622', '22.64 MB'),
'2009': HmdaDataFile('hmda_2009_wa_all-records_labels.zip', '590758', '25.93 MB'),
'2011': HmdaDataFile('hmda_2011_wa_all-records_labels.zip', '406149', '20.95 MB'),
'2010': HmdaDataFile('hmda_2010_wa_all-records_labels.zip', '473922', '24.62 MB'),
'2013': HmdaDataFile('hmda_2013_wa_all-records_labels.zip', '448753', '23.93 MB'),
'2012': HmdaDataFile('hmda_2012_wa_all-records_labels.zip', '519479', '27.08 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_wa_originated-records_labels.zip', '263712', '13.26 MB'),
'2007': HmdaDataFile('hmda_2007_wa_originated-records_labels.zip', '310267', '14.49 MB'),
'2017': HmdaDataFile('hmda_2017_wa_originated-records_labels.zip', '230076', '7.35 MB'),
'2015': HmdaDataFile('hmda_2015_wa_originated-records_labels.zip', '218772', '12.48 MB'),
'2014': HmdaDataFile('hmda_2014_wa_originated-records_labels.zip', '166279', '8.37 MB'),
'2008': HmdaDataFile('hmda_2008_wa_originated-records_labels.zip', '207841', '9.62 MB'),
'2009': HmdaDataFile('hmda_2009_wa_originated-records_labels.zip', '286416', '12.41 MB'),
'2011': HmdaDataFile('hmda_2011_wa_originated-records_labels.zip', '205550', '10.07 MB'),
'2010': HmdaDataFile('hmda_2010_wa_originated-records_labels.zip', '241128', '11.8 MB'),
'2013': HmdaDataFile('hmda_2013_wa_originated-records_labels.zip', '246580', '12.48 MB'),
'2012': HmdaDataFile('hmda_2012_wa_originated-records_labels.zip', '287748', '14.21 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '225231', '7.74 MB'),
'2007': HmdaDataFile('hmda_2007_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '215619', '7.08 MB'),
'2017': HmdaDataFile('hmda_2017_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '191532', '4.12 MB'),
'2015': HmdaDataFile('hmda_2015_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '183367', '7.25 MB'),
'2014': HmdaDataFile('hmda_2014_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '133913', '4.63 MB'),
'2008': HmdaDataFile('hmda_2008_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '165363', '5.29 MB'),
'2009': HmdaDataFile('hmda_2009_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '255387', '7.8 MB'),
'2011': HmdaDataFile('hmda_2011_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '174449', '5.8 MB'),
'2010': HmdaDataFile('hmda_2010_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '211008', '6.97 MB'),
'2013': HmdaDataFile('hmda_2013_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '201071', '6.95 MB'),
'2012': HmdaDataFile('hmda_2012_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '245095', '8.25 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_wa_all-records_codes.zip', '466566', '16.4 MB'),
'2007': HmdaDataFile('hmda_2007_wa_all-records_codes.zip', '722481', '22.74 MB'),
'2017': HmdaDataFile('hmda_2017_wa_all-records_codes.zip', '402196', '8.75 MB'),
'2015': HmdaDataFile('hmda_2015_wa_all-records_codes.zip', '387805', '15.87 MB'),
'2014': HmdaDataFile('hmda_2014_wa_all-records_codes.zip', '311425', '10.92 MB'),
'2008': HmdaDataFile('hmda_2008_wa_all-records_codes.zip', '485622', '15.32 MB'),
'2009': HmdaDataFile('hmda_2009_wa_all-records_codes.zip', '590758', '17.9 MB'),
'2011': HmdaDataFile('hmda_2011_wa_all-records_codes.zip', '406149', '13.93 MB'),
'2010': HmdaDataFile('hmda_2010_wa_all-records_codes.zip', '473922', '16.26 MB'),
'2013': HmdaDataFile('hmda_2013_wa_all-records_codes.zip', '448753', '15.96 MB'),
'2012': HmdaDataFile('hmda_2012_wa_all-records_codes.zip', '519479', '18.1 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_wa_originated-records_codes.zip', '263712', '9.1 MB'),
'2007': HmdaDataFile('hmda_2007_wa_originated-records_codes.zip', '310267', '10.15 MB'),
'2017': HmdaDataFile('hmda_2017_wa_originated-records_codes.zip', '230076', '4.96 MB'),
'2015': HmdaDataFile('hmda_2015_wa_originated-records_codes.zip', '218772', '8.71 MB'),
'2014': HmdaDataFile('hmda_2014_wa_originated-records_codes.zip', '166279', '5.77 MB'),
'2008': HmdaDataFile('hmda_2008_wa_originated-records_codes.zip', '207841', '6.67 MB'),
'2009': HmdaDataFile('hmda_2009_wa_originated-records_codes.zip', '286416', '8.79 MB'),
'2011': HmdaDataFile('hmda_2011_wa_originated-records_codes.zip', '205550', '6.9 MB'),
'2010': HmdaDataFile('hmda_2010_wa_originated-records_codes.zip', '241128', '8.02 MB'),
'2013': HmdaDataFile('hmda_2013_wa_originated-records_codes.zip', '246580', '8.58 MB'),
'2012': HmdaDataFile('hmda_2012_wa_originated-records_codes.zip', '287748', '9.76 MB')
}
}
},
'nc': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '220677', '10.93 MB'),
'2007': HmdaDataFile('hmda_2007_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '228163', '10.68 MB'),
'2017': HmdaDataFile('hmda_2017_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '196986', '6.12 MB'),
'2015': HmdaDataFile('hmda_2015_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '190281', '11.14 MB'),
'2014': HmdaDataFile('hmda_2014_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '154486', '7.77 MB'),
'2008': HmdaDataFile('hmda_2008_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '202793', '9.24 MB'),
'2009': HmdaDataFile('hmda_2009_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '263921', '11.18 MB'),
'2011': HmdaDataFile('hmda_2011_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '189738', '9.12 MB'),
'2010': HmdaDataFile('hmda_2010_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '211447', '10.31 MB'),
'2013': HmdaDataFile('hmda_2013_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '224534', '11.35 MB'),
'2012': HmdaDataFile('hmda_2012_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '240943', '12.17 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nc_all-records_labels.zip', '519897', '27.86 MB'),
'2007': HmdaDataFile('hmda_2007_nc_all-records_labels.zip', '779619', '36.44 MB'),
'2017': HmdaDataFile('hmda_2017_nc_all-records_labels.zip', '464109', '16.24 MB'),
'2015': HmdaDataFile('hmda_2015_nc_all-records_labels.zip', '457002', '29.06 MB'),
'2014': HmdaDataFile('hmda_2014_nc_all-records_labels.zip', '392549', '21.12 MB'),
'2008': HmdaDataFile('hmda_2008_nc_all-records_labels.zip', '575937', '26.69 MB'),
'2009': HmdaDataFile('hmda_2009_nc_all-records_labels.zip', '617968', '27.09 MB'),
'2011': HmdaDataFile('hmda_2011_nc_all-records_labels.zip', '476288', '24.93 MB'),
'2010': HmdaDataFile('hmda_2010_nc_all-records_labels.zip', '511912', '26.98 MB'),
'2013': HmdaDataFile('hmda_2013_nc_all-records_labels.zip', '562524', '30.51 MB'),
'2012': HmdaDataFile('hmda_2012_nc_all-records_labels.zip', '578793', '31.47 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nc_originated-records_labels.zip', '262765', '13.32 MB'),
'2007': HmdaDataFile('hmda_2007_nc_originated-records_labels.zip', '337640', '15.84 MB'),
'2017': HmdaDataFile('hmda_2017_nc_originated-records_labels.zip', '240128', '7.69 MB'),
'2015': HmdaDataFile('hmda_2015_nc_originated-records_labels.zip', '231114', '13.82 MB'),
'2014': HmdaDataFile('hmda_2014_nc_originated-records_labels.zip', '192473', '9.85 MB'),
'2008': HmdaDataFile('hmda_2008_nc_originated-records_labels.zip', '260693', '12.04 MB'),
'2009': HmdaDataFile('hmda_2009_nc_originated-records_labels.zip', '303410', '13.01 MB'),
'2011': HmdaDataFile('hmda_2011_nc_originated-records_labels.zip', '227079', '11.18 MB'),
'2010': HmdaDataFile('hmda_2010_nc_originated-records_labels.zip', '248590', '12.36 MB'),
'2013': HmdaDataFile('hmda_2013_nc_originated-records_labels.zip', '278062', '14.32 MB'),
'2012': HmdaDataFile('hmda_2012_nc_originated-records_labels.zip', '289407', '14.88 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '220677', '7.49 MB'),
'2007': HmdaDataFile('hmda_2007_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '228163', '7.37 MB'),
'2017': HmdaDataFile('hmda_2017_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '196986', '4.24 MB'),
'2015': HmdaDataFile('hmda_2015_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '190281', '7.64 MB'),
'2014': HmdaDataFile('hmda_2014_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '154486', '5.35 MB'),
'2008': HmdaDataFile('hmda_2008_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '202793', '6.4 MB'),
'2009': HmdaDataFile('hmda_2009_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '263921', '7.94 MB'),
'2011': HmdaDataFile('hmda_2011_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '189738', '6.03 MB'),
'2010': HmdaDataFile('hmda_2010_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '211447', '6.9 MB'),
'2013': HmdaDataFile('hmda_2013_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '224534', '7.74 MB'),
'2012': HmdaDataFile('hmda_2012_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '240943', '8.33 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nc_all-records_codes.zip', '519897', '18.52 MB'),
'2007': HmdaDataFile('hmda_2007_nc_all-records_codes.zip', '779619', '24.55 MB'),
'2017': HmdaDataFile('hmda_2017_nc_all-records_codes.zip', '464109', '10.37 MB'),
'2015': HmdaDataFile('hmda_2015_nc_all-records_codes.zip', '457002', '19.13 MB'),
'2014': HmdaDataFile('hmda_2014_nc_all-records_codes.zip', '392549', '13.94 MB'),
'2008': HmdaDataFile('hmda_2008_nc_all-records_codes.zip', '575937', '17.94 MB'),
'2009': HmdaDataFile('hmda_2009_nc_all-records_codes.zip', '617968', '18.62 MB'),
'2011': HmdaDataFile('hmda_2011_nc_all-records_codes.zip', '476288', '15.9 MB'),
'2010': HmdaDataFile('hmda_2010_nc_all-records_codes.zip', '511912', '17.45 MB'),
'2013': HmdaDataFile('hmda_2013_nc_all-records_codes.zip', '562524', '20.11 MB'),
'2012': HmdaDataFile('hmda_2012_nc_all-records_codes.zip', '578793', '20.81 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nc_originated-records_codes.zip', '262765', '9.11 MB'),
'2007': HmdaDataFile('hmda_2007_nc_originated-records_codes.zip', '337640', '10.93 MB'),
'2017': HmdaDataFile('hmda_2017_nc_originated-records_codes.zip', '240128', '5.27 MB'),
'2015': HmdaDataFile('hmda_2015_nc_originated-records_codes.zip', '231114', '9.42 MB'),
'2014': HmdaDataFile('hmda_2014_nc_originated-records_codes.zip', '192473', '6.73 MB'),
'2008': HmdaDataFile('hmda_2008_nc_originated-records_codes.zip', '260693', '8.29 MB'),
'2009': HmdaDataFile('hmda_2009_nc_originated-records_codes.zip', '303410', '9.19 MB'),
'2011': HmdaDataFile('hmda_2011_nc_originated-records_codes.zip', '227079', '7.35 MB'),
'2010': HmdaDataFile('hmda_2010_nc_originated-records_codes.zip', '248590', '8.23 MB'),
'2013': HmdaDataFile('hmda_2013_nc_originated-records_codes.zip', '278062', '9.71 MB'),
'2012': HmdaDataFile('hmda_2012_nc_originated-records_codes.zip', '289407', '10.13 MB')
}
}
},
'nd': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '15775', '603.58 KB'),
'2007': HmdaDataFile('hmda_2007_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '11613', '456.02 KB'),
'2017': HmdaDataFile('hmda_2017_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '12661', '336.26 KB'),
'2015': HmdaDataFile('hmda_2015_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '15228', '660.94 KB'),
'2014': HmdaDataFile('hmda_2014_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '12935', '513.04 KB'),
'2008': HmdaDataFile('hmda_2008_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '12079', '485.38 KB'),
'2009': HmdaDataFile('hmda_2009_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '17245', '600.63 KB'),
'2011': HmdaDataFile('hmda_2011_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '14852', '543.35 KB'),
'2010': HmdaDataFile('hmda_2010_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '16629', '606.86 KB'),
'2013': HmdaDataFile('hmda_2013_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '17550', '663.49 KB'),
'2012': HmdaDataFile('hmda_2012_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '19995', '730.61 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nd_all-records_labels.zip', '32670', '1.35 MB'),
'2007': HmdaDataFile('hmda_2007_nd_all-records_labels.zip', '32081', '1.34 MB'),
'2017': HmdaDataFile('hmda_2017_nd_all-records_labels.zip', '25521', '733.11 KB'),
'2015': HmdaDataFile('hmda_2015_nd_all-records_labels.zip', '31382', '1.48 MB'),
'2014': HmdaDataFile('hmda_2014_nd_all-records_labels.zip', '27698', '1.18 MB'),
'2008': HmdaDataFile('hmda_2008_nd_all-records_labels.zip', '28946', '1.25 MB'),
'2009': HmdaDataFile('hmda_2009_nd_all-records_labels.zip', '35789', '1.37 MB'),
'2011': HmdaDataFile('hmda_2011_nd_all-records_labels.zip', '30234', '1.22 MB'),
'2010': HmdaDataFile('hmda_2010_nd_all-records_labels.zip', '32754', '1.34 MB'),
'2013': HmdaDataFile('hmda_2013_nd_all-records_labels.zip', '35576', '1.48 MB'),
'2012': HmdaDataFile('hmda_2012_nd_all-records_labels.zip', '37581', '1.53 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nd_originated-records_labels.zip', '19580', '772.47 KB'),
'2007': HmdaDataFile('hmda_2007_nd_originated-records_labels.zip', '17673', '689.98 KB'),
'2017': HmdaDataFile('hmda_2017_nd_originated-records_labels.zip', '16016', '429.87 KB'),
'2015': HmdaDataFile('hmda_2015_nd_originated-records_labels.zip', '19045', '847.76 KB'),
'2014': HmdaDataFile('hmda_2014_nd_originated-records_labels.zip', '17180', '693.97 KB'),
'2008': HmdaDataFile('hmda_2008_nd_originated-records_labels.zip', '16806', '683.54 KB'),
'2009': HmdaDataFile('hmda_2009_nd_originated-records_labels.zip', '21080', '749.43 KB'),
'2011': HmdaDataFile('hmda_2011_nd_originated-records_labels.zip', '18548', '698.61 KB'),
'2010': HmdaDataFile('hmda_2010_nd_originated-records_labels.zip', '20218', '761.83 KB'),
'2013': HmdaDataFile('hmda_2013_nd_originated-records_labels.zip', '22042', '860.22 KB'),
'2012': HmdaDataFile('hmda_2012_nd_originated-records_labels.zip', '24096', '905.02 KB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '15775', '388.61 KB'),
'2007': HmdaDataFile('hmda_2007_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '11613', '292.47 KB'),
'2017': HmdaDataFile('hmda_2017_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '12661', '238.72 KB'),
'2015': HmdaDataFile('hmda_2015_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '15228', '425.66 KB'),
'2014': HmdaDataFile('hmda_2014_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '12935', '330.05 KB'),
'2008': HmdaDataFile('hmda_2008_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '12079', '313.83 KB'),
'2009': HmdaDataFile('hmda_2009_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '17245', '399.68 KB'),
'2011': HmdaDataFile('hmda_2011_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '14852', '348.81 KB'),
'2010': HmdaDataFile('hmda_2010_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '16629', '390.2 KB'),
'2013': HmdaDataFile('hmda_2013_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '17550', '426.9 KB'),
'2012': HmdaDataFile('hmda_2012_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '19995', '472.27 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nd_all-records_codes.zip', '32670', '832.59 KB'),
'2007': HmdaDataFile('hmda_2007_nd_all-records_codes.zip', '32081', '834.25 KB'),
'2017': HmdaDataFile('hmda_2017_nd_all-records_codes.zip', '25521', '487.38 KB'),
'2015': HmdaDataFile('hmda_2015_nd_all-records_codes.zip', '31382', '919.39 KB'),
'2014': HmdaDataFile('hmda_2014_nd_all-records_codes.zip', '27698', '726.15 KB'),
'2008': HmdaDataFile('hmda_2008_nd_all-records_codes.zip', '28946', '781.35 KB'),
'2009': HmdaDataFile('hmda_2009_nd_all-records_codes.zip', '35789', '876.82 KB'),
'2011': HmdaDataFile('hmda_2011_nd_all-records_codes.zip', '30234', '748.63 KB'),
'2010': HmdaDataFile('hmda_2010_nd_all-records_codes.zip', '32754', '826.34 KB'),
'2013': HmdaDataFile('hmda_2013_nd_all-records_codes.zip', '35576', '908.14 KB'),
'2012': HmdaDataFile('hmda_2012_nd_all-records_codes.zip', '37581', '943.93 KB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nd_originated-records_codes.zip', '19580', '486.75 KB'),
'2007': HmdaDataFile('hmda_2007_nd_originated-records_codes.zip', '17673', '439.38 KB'),
'2017': HmdaDataFile('hmda_2017_nd_originated-records_codes.zip', '16016', '299.25 KB'),
'2015': HmdaDataFile('hmda_2015_nd_originated-records_codes.zip', '19045', '537.29 KB'),
'2014': HmdaDataFile('hmda_2014_nd_originated-records_codes.zip', '17180', '437.05 KB'),
'2008': HmdaDataFile('hmda_2008_nd_originated-records_codes.zip', '16806', '438.4 KB'),
'2009': HmdaDataFile('hmda_2009_nd_originated-records_codes.zip', '21080', '495.22 KB'),
'2011': HmdaDataFile('hmda_2011_nd_originated-records_codes.zip', '18548', '440.47 KB'),
'2010': HmdaDataFile('hmda_2010_nd_originated-records_codes.zip', '20218', '482.84 KB'),
'2013': HmdaDataFile('hmda_2013_nd_originated-records_codes.zip', '22042', '542.74 KB'),
'2012': HmdaDataFile('hmda_2012_nd_originated-records_codes.zip', '24096', '576.32 KB')
}
}
},
'ne': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '42823', '1.96 MB'),
'2007': HmdaDataFile('hmda_2007_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '35882', '1.57 MB'),
'2017': HmdaDataFile('hmda_2017_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '34951', '1.05 MB'),
'2015': HmdaDataFile('hmda_2015_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '38670', '1.94 MB'),
'2014': HmdaDataFile('hmda_2014_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '30830', '1.5 MB'),
'2008': HmdaDataFile('hmda_2008_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '33590', '1.44 MB'),
'2009': HmdaDataFile('hmda_2009_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '56987', '2.26 MB'),
'2011': HmdaDataFile('hmda_2011_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '42269', '1.83 MB'),
'2010': HmdaDataFile('hmda_2010_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '51870', '2.25 MB'),
'2013': HmdaDataFile('hmda_2013_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '45909', '2.1 MB'),
'2012': HmdaDataFile('hmda_2012_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '57432', '2.6 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ne_all-records_labels.zip', '89068', '4.35 MB'),
'2007': HmdaDataFile('hmda_2007_ne_all-records_labels.zip', '112752', '5.1 MB'),
'2017': HmdaDataFile('hmda_2017_ne_all-records_labels.zip', '74966', '2.4 MB'),
'2015': HmdaDataFile('hmda_2015_ne_all-records_labels.zip', '82331', '4.47 MB'),
'2014': HmdaDataFile('hmda_2014_ne_all-records_labels.zip', '68559', '3.58 MB'),
'2008': HmdaDataFile('hmda_2008_ne_all-records_labels.zip', '88586', '3.98 MB'),
'2009': HmdaDataFile('hmda_2009_ne_all-records_labels.zip', '117158', '4.99 MB'),
'2011': HmdaDataFile('hmda_2011_ne_all-records_labels.zip', '90603', '4.29 MB'),
'2010': HmdaDataFile('hmda_2010_ne_all-records_labels.zip', '105043', '4.96 MB'),
'2013': HmdaDataFile('hmda_2013_ne_all-records_labels.zip', '97423', '4.78 MB'),
'2012': HmdaDataFile('hmda_2012_ne_all-records_labels.zip', '113860', '5.56 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ne_originated-records_labels.zip', '52019', '2.41 MB'),
'2007': HmdaDataFile('hmda_2007_ne_originated-records_labels.zip', '51923', '2.26 MB'),
'2017': HmdaDataFile('hmda_2017_ne_originated-records_labels.zip', '43786', '1.32 MB'),
'2015': HmdaDataFile('hmda_2015_ne_originated-records_labels.zip', '48157', '2.46 MB'),
'2014': HmdaDataFile('hmda_2014_ne_originated-records_labels.zip', '39960', '1.98 MB'),
'2008': HmdaDataFile('hmda_2008_ne_originated-records_labels.zip', '44333', '1.92 MB'),
'2009': HmdaDataFile('hmda_2009_ne_originated-records_labels.zip', '65475', '2.65 MB'),
'2011': HmdaDataFile('hmda_2011_ne_originated-records_labels.zip', '50258', '2.23 MB'),
'2010': HmdaDataFile('hmda_2010_ne_originated-records_labels.zip', '59744', '2.64 MB'),
'2013': HmdaDataFile('hmda_2013_ne_originated-records_labels.zip', '56003', '2.6 MB'),
'2012': HmdaDataFile('hmda_2012_ne_originated-records_labels.zip', '67081', '3.09 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '42823', '1.28 MB'),
'2007': HmdaDataFile('hmda_2007_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '35882', '1.06 MB'),
'2017': HmdaDataFile('hmda_2017_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '34951', '753.37 KB'),
'2015': HmdaDataFile('hmda_2015_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '38670', '1.3 MB'),
'2014': HmdaDataFile('hmda_2014_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '30830', '988.96 KB'),
'2008': HmdaDataFile('hmda_2008_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '33590', '983.97 KB'),
'2009': HmdaDataFile('hmda_2009_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '56987', '1.58 MB'),
'2011': HmdaDataFile('hmda_2011_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '42269', '1.2 MB'),
'2010': HmdaDataFile('hmda_2010_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '51870', '1.48 MB'),
'2013': HmdaDataFile('hmda_2013_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '45909', '1.37 MB'),
'2012': HmdaDataFile('hmda_2012_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '57432', '1.7 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ne_all-records_codes.zip', '89068', '2.76 MB'),
'2007': HmdaDataFile('hmda_2007_ne_all-records_codes.zip', '112752', '3.37 MB'),
'2017': HmdaDataFile('hmda_2017_ne_all-records_codes.zip', '74966', '1.6 MB'),
'2015': HmdaDataFile('hmda_2015_ne_all-records_codes.zip', '82331', '2.91 MB'),
'2014': HmdaDataFile('hmda_2014_ne_all-records_codes.zip', '68559', '2.27 MB'),
'2008': HmdaDataFile('hmda_2008_ne_all-records_codes.zip', '88586', '2.64 MB'),
'2009': HmdaDataFile('hmda_2009_ne_all-records_codes.zip', '117158', '3.39 MB'),
'2011': HmdaDataFile('hmda_2011_ne_all-records_codes.zip', '90603', '2.71 MB'),
'2010': HmdaDataFile('hmda_2010_ne_all-records_codes.zip', '105043', '3.16 MB'),
'2013': HmdaDataFile('hmda_2013_ne_all-records_codes.zip', '97423', '3.04 MB'),
'2012': HmdaDataFile('hmda_2012_ne_all-records_codes.zip', '113860', '3.54 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ne_originated-records_codes.zip', '52019', '1.56 MB'),
'2007': HmdaDataFile('hmda_2007_ne_originated-records_codes.zip', '51923', '1.52 MB'),
'2017': HmdaDataFile('hmda_2017_ne_originated-records_codes.zip', '43786', '932.71 KB'),
'2015': HmdaDataFile('hmda_2015_ne_originated-records_codes.zip', '48157', '1.64 MB'),
'2014': HmdaDataFile('hmda_2014_ne_originated-records_codes.zip', '39960', '1.29 MB'),
'2008': HmdaDataFile('hmda_2008_ne_originated-records_codes.zip', '44333', '1.3 MB'),
'2009': HmdaDataFile('hmda_2009_ne_originated-records_codes.zip', '65475', '1.84 MB'),
'2011': HmdaDataFile('hmda_2011_ne_originated-records_codes.zip', '50258', '1.45 MB'),
'2010': HmdaDataFile('hmda_2010_ne_originated-records_codes.zip', '59744', '1.72 MB'),
'2013': HmdaDataFile('hmda_2013_ne_originated-records_codes.zip', '56003', '1.69 MB'),
'2012': HmdaDataFile('hmda_2012_ne_originated-records_codes.zip', '67081', '2.01 MB')
}
}
},
'tn': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '144529', '7.15 MB'),
'2007': HmdaDataFile('hmda_2007_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '151937', '7.17 MB'),
'2017': HmdaDataFile('hmda_2017_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '133331', '4.11 MB'),
'2015': HmdaDataFile('hmda_2015_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '126852', '7.13 MB'),
'2014': HmdaDataFile('hmda_2014_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '103024', '5.17 MB'),
'2008': HmdaDataFile('hmda_2008_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '123580', '5.68 MB'),
'2009': HmdaDataFile('hmda_2009_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '160077', '6.99 MB'),
'2011': HmdaDataFile('hmda_2011_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '113228', '5.37 MB'),
'2010': HmdaDataFile('hmda_2010_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '128508', '6.09 MB'),
'2013': HmdaDataFile('hmda_2013_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '141210', '6.95 MB'),
'2012': HmdaDataFile('hmda_2012_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '151750', '7.48 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_tn_all-records_labels.zip', '350490', '18.5 MB'),
'2007': HmdaDataFile('hmda_2007_tn_all-records_labels.zip', '512117', '24.54 MB'),
'2017': HmdaDataFile('hmda_2017_tn_all-records_labels.zip', '326416', '11.11 MB'),
'2015': HmdaDataFile('hmda_2015_tn_all-records_labels.zip', '305114', '18.47 MB'),
'2014': HmdaDataFile('hmda_2014_tn_all-records_labels.zip', '265214', '14.16 MB'),
'2008': HmdaDataFile('hmda_2008_tn_all-records_labels.zip', '365839', '17.39 MB'),
'2009': HmdaDataFile('hmda_2009_tn_all-records_labels.zip', '406028', '18.65 MB'),
'2011': HmdaDataFile('hmda_2011_tn_all-records_labels.zip', '304377', '16.01 MB'),
'2010': HmdaDataFile('hmda_2010_tn_all-records_labels.zip', '335917', '17.65 MB'),
'2013': HmdaDataFile('hmda_2013_tn_all-records_labels.zip', '358454', '19.31 MB'),
'2012': HmdaDataFile('hmda_2012_tn_all-records_labels.zip', '373362', '20.16 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_tn_originated-records_labels.zip', '174965', '8.84 MB'),
'2007': HmdaDataFile('hmda_2007_tn_originated-records_labels.zip', '217392', '10.46 MB'),
'2017': HmdaDataFile('hmda_2017_tn_originated-records_labels.zip', '164577', '5.23 MB'),
'2015': HmdaDataFile('hmda_2015_tn_originated-records_labels.zip', '155616', '8.89 MB'),
'2014': HmdaDataFile('hmda_2014_tn_originated-records_labels.zip', '131171', '6.76 MB'),
'2008': HmdaDataFile('hmda_2008_tn_originated-records_labels.zip', '163188', '7.73 MB'),
'2009': HmdaDataFile('hmda_2009_tn_originated-records_labels.zip', '187776', '8.43 MB'),
'2011': HmdaDataFile('hmda_2011_tn_originated-records_labels.zip', '137943', '6.74 MB'),
'2010': HmdaDataFile('hmda_2010_tn_originated-records_labels.zip', '153282', '7.46 MB'),
'2013': HmdaDataFile('hmda_2013_tn_originated-records_labels.zip', '172612', '8.61 MB'),
'2012': HmdaDataFile('hmda_2012_tn_originated-records_labels.zip', '180686', '9.04 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '144529', '4.86 MB'),
'2007': HmdaDataFile('hmda_2007_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '151937', '4.92 MB'),
'2017': HmdaDataFile('hmda_2017_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '133331', '2.88 MB'),
'2015': HmdaDataFile('hmda_2015_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '126852', '4.92 MB'),
'2014': HmdaDataFile('hmda_2014_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '103024', '3.53 MB'),
'2008': HmdaDataFile('hmda_2008_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '123580', '3.92 MB'),
'2009': HmdaDataFile('hmda_2009_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '160077', '4.89 MB'),
'2011': HmdaDataFile('hmda_2011_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '113228', '3.56 MB'),
'2010': HmdaDataFile('hmda_2010_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '128508', '4.04 MB'),
'2013': HmdaDataFile('hmda_2013_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '141210', '4.68 MB'),
'2012': HmdaDataFile('hmda_2012_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '151750', '5.03 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_tn_all-records_codes.zip', '350490', '12.18 MB'),
'2007': HmdaDataFile('hmda_2007_tn_all-records_codes.zip', '512117', '16.43 MB'),
'2017': HmdaDataFile('hmda_2017_tn_all-records_codes.zip', '326416', '7.18 MB'),
'2015': HmdaDataFile('hmda_2015_tn_all-records_codes.zip', '305114', '12.28 MB'),
'2014': HmdaDataFile('hmda_2014_tn_all-records_codes.zip', '265214', '9.33 MB'),
'2008': HmdaDataFile('hmda_2008_tn_all-records_codes.zip', '365839', '11.7 MB'),
'2009': HmdaDataFile('hmda_2009_tn_all-records_codes.zip', '406028', '12.76 MB'),
'2011': HmdaDataFile('hmda_2011_tn_all-records_codes.zip', '304377', '10.33 MB'),
'2010': HmdaDataFile('hmda_2010_tn_all-records_codes.zip', '335917', '11.45 MB'),
'2013': HmdaDataFile('hmda_2013_tn_all-records_codes.zip', '358454', '12.68 MB'),
'2012': HmdaDataFile('hmda_2012_tn_all-records_codes.zip', '373362', '13.3 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_tn_originated-records_codes.zip', '174965', '6 MB'),
'2007': HmdaDataFile('hmda_2007_tn_originated-records_codes.zip', '217392', '7.22 MB'),
'2017': HmdaDataFile('hmda_2017_tn_originated-records_codes.zip', '164577', '3.62 MB'),
'2015': HmdaDataFile('hmda_2015_tn_originated-records_codes.zip', '155616', '6.1 MB'),
'2014': HmdaDataFile('hmda_2014_tn_originated-records_codes.zip', '131171', '4.61 MB'),
'2008': HmdaDataFile('hmda_2008_tn_originated-records_codes.zip', '163188', '5.35 MB'),
'2009': HmdaDataFile('hmda_2009_tn_originated-records_codes.zip', '187776', '5.9 MB'),
'2011': HmdaDataFile('hmda_2011_tn_originated-records_codes.zip', '137943', '4.45 MB'),
'2010': HmdaDataFile('hmda_2010_tn_originated-records_codes.zip', '153282', '4.93 MB'),
'2013': HmdaDataFile('hmda_2013_tn_originated-records_codes.zip', '172612', '5.75 MB'),
'2012': HmdaDataFile('hmda_2012_tn_originated-records_codes.zip', '180686', '6.04 MB')
}
}
},
'ny': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '199204', '10.43 MB'),
'2007': HmdaDataFile('hmda_2007_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '281690', '14.06 MB'),
'2017': HmdaDataFile('hmda_2017_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '187337', '6.57 MB'),
'2015': HmdaDataFile('hmda_2015_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '182621', '10.44 MB'),
'2014': HmdaDataFile('hmda_2014_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '156647', '8.24 MB'),
'2008': HmdaDataFile('hmda_2008_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '195917', '9.68 MB'),
'2009': HmdaDataFile('hmda_2009_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '245556', '11.46 MB'),
'2011': HmdaDataFile('hmda_2011_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '201157', '10.32 MB'),
'2010': HmdaDataFile('hmda_2010_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '217296', '11.01 MB'),
'2013': HmdaDataFile('hmda_2013_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '225700', '11.8 MB'),
'2012': HmdaDataFile('hmda_2012_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '242701', '12.39 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ny_all-records_labels.zip', '477313', '26.86 MB'),
'2007': HmdaDataFile('hmda_2007_ny_all-records_labels.zip', '1009451', '51.35 MB'),
'2017': HmdaDataFile('hmda_2017_ny_all-records_labels.zip', '446902', '17.47 MB'),
'2015': HmdaDataFile('hmda_2015_ny_all-records_labels.zip', '439654', '27.13 MB'),
'2014': HmdaDataFile('hmda_2014_ny_all-records_labels.zip', '389279', '22.13 MB'),
'2008': HmdaDataFile('hmda_2008_ny_all-records_labels.zip', '644647', '33.21 MB'),
'2009': HmdaDataFile('hmda_2009_ny_all-records_labels.zip', '645487', '31.84 MB'),
'2011': HmdaDataFile('hmda_2011_ny_all-records_labels.zip', '503733', '28.17 MB'),
'2010': HmdaDataFile('hmda_2010_ny_all-records_labels.zip', '529869', '29.46 MB'),
'2013': HmdaDataFile('hmda_2013_ny_all-records_labels.zip', '539217', '30.46 MB'),
'2012': HmdaDataFile('hmda_2012_ny_all-records_labels.zip', '566980', '31.53 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ny_originated-records_labels.zip', '246292', '13.15 MB'),
'2007': HmdaDataFile('hmda_2007_ny_originated-records_labels.zip', '398639', '20.13 MB'),
'2017': HmdaDataFile('hmda_2017_ny_originated-records_labels.zip', '236499', '8.45 MB'),
'2015': HmdaDataFile('hmda_2015_ny_originated-records_labels.zip', '228054', '13.23 MB'),
'2014': HmdaDataFile('hmda_2014_ny_originated-records_labels.zip', '198817', '10.63 MB'),
'2008': HmdaDataFile('hmda_2008_ny_originated-records_labels.zip', '252826', '12.72 MB'),
'2009': HmdaDataFile('hmda_2009_ny_originated-records_labels.zip', '281652', '13.38 MB'),
'2011': HmdaDataFile('hmda_2011_ny_originated-records_labels.zip', '238554', '12.42 MB'),
'2010': HmdaDataFile('hmda_2010_ny_originated-records_labels.zip', '252729', '13.07 MB'),
'2013': HmdaDataFile('hmda_2013_ny_originated-records_labels.zip', '271829', '14.42 MB'),
'2012': HmdaDataFile('hmda_2012_ny_originated-records_labels.zip', '285106', '14.71 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '199204', '7.47 MB'),
'2007': HmdaDataFile('hmda_2007_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '281690', '10.14 MB'),
'2017': HmdaDataFile('hmda_2017_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '187337', '4.66 MB'),
'2015': HmdaDataFile('hmda_2015_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '182621', '7.46 MB'),
'2014': HmdaDataFile('hmda_2014_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '156647', '5.87 MB'),
'2008': HmdaDataFile('hmda_2008_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '195917', '6.98 MB'),
'2009': HmdaDataFile('hmda_2009_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '245556', '8.36 MB'),
'2011': HmdaDataFile('hmda_2011_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '201157', '7.36 MB'),
'2010': HmdaDataFile('hmda_2010_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '217296', '7.85 MB'),
'2013': HmdaDataFile('hmda_2013_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '225700', '8.44 MB'),
'2012': HmdaDataFile('hmda_2012_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '242701', '8.82 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ny_all-records_codes.zip', '477313', '18.61 MB'),
'2007': HmdaDataFile('hmda_2007_ny_all-records_codes.zip', '1009451', '35.76 MB'),
'2017': HmdaDataFile('hmda_2017_ny_all-records_codes.zip', '446902', '11.54 MB'),
'2015': HmdaDataFile('hmda_2015_ny_all-records_codes.zip', '439654', '18.57 MB'),
'2014': HmdaDataFile('hmda_2014_ny_all-records_codes.zip', '389279', '15.25 MB'),
'2008': HmdaDataFile('hmda_2008_ny_all-records_codes.zip', '644647', '23.18 MB'),
'2009': HmdaDataFile('hmda_2009_ny_all-records_codes.zip', '645487', '22.45 MB'),
'2011': HmdaDataFile('hmda_2011_ny_all-records_codes.zip', '503733', '19.55 MB'),
'2010': HmdaDataFile('hmda_2010_ny_all-records_codes.zip', '529869', '20.47 MB'),
'2013': HmdaDataFile('hmda_2013_ny_all-records_codes.zip', '539217', '21.23 MB'),
'2012': HmdaDataFile('hmda_2012_ny_all-records_codes.zip', '566980', '21.92 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ny_originated-records_codes.zip', '246292', '9.34 MB'),
'2007': HmdaDataFile('hmda_2007_ny_originated-records_codes.zip', '398639', '14.43 MB'),
'2017': HmdaDataFile('hmda_2017_ny_originated-records_codes.zip', '236499', '5.9 MB'),
'2015': HmdaDataFile('hmda_2015_ny_originated-records_codes.zip', '228054', '9.33 MB'),
'2014': HmdaDataFile('hmda_2014_ny_originated-records_codes.zip', '198817', '7.49 MB'),
'2008': HmdaDataFile('hmda_2008_ny_originated-records_codes.zip', '252826', '9.14 MB'),
'2009': HmdaDataFile('hmda_2009_ny_originated-records_codes.zip', '281652', '9.73 MB'),
'2011': HmdaDataFile('hmda_2011_ny_originated-records_codes.zip', '238554', '8.78 MB'),
'2010': HmdaDataFile('hmda_2010_ny_originated-records_codes.zip', '252729', '9.27 MB'),
'2013': HmdaDataFile('hmda_2013_ny_originated-records_codes.zip', '271829', '10.23 MB'),
'2012': HmdaDataFile('hmda_2012_ny_originated-records_codes.zip', '285106', '10.38 MB')
}
}
},
'pa': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '226492', '11.67 MB'),
'2007': HmdaDataFile('hmda_2007_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '276621', '13.86 MB'),
'2017': HmdaDataFile('hmda_2017_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '200046', '6.74 MB'),
'2015': HmdaDataFile('hmda_2015_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '206884', '12.08 MB'),
'2014': HmdaDataFile('hmda_2014_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '175312', '8.94 MB'),
'2008': HmdaDataFile('hmda_2008_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '230119', '11.38 MB'),
'2009': HmdaDataFile('hmda_2009_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '318871', '14.7 MB'),
'2011': HmdaDataFile('hmda_2011_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '247649', '12.37 MB'),
'2010': HmdaDataFile('hmda_2010_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '280178', '14.07 MB'),
'2013': HmdaDataFile('hmda_2013_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '269808', '13.79 MB'),
'2012': HmdaDataFile('hmda_2012_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '313974', '15.99 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_pa_all-records_labels.zip', '526005', '29.04 MB'),
'2007': HmdaDataFile('hmda_2007_pa_all-records_labels.zip', '992904', '49.57 MB'),
'2017': HmdaDataFile('hmda_2017_pa_all-records_labels.zip', '473757', '17.91 MB'),
'2015': HmdaDataFile('hmda_2015_pa_all-records_labels.zip', '481331', '30.25 MB'),
'2014': HmdaDataFile('hmda_2014_pa_all-records_labels.zip', '427665', '23.41 MB'),
'2008': HmdaDataFile('hmda_2008_pa_all-records_labels.zip', '713995', '35.8 MB'),
'2009': HmdaDataFile('hmda_2009_pa_all-records_labels.zip', '768245', '37 MB'),
'2011': HmdaDataFile('hmda_2011_pa_all-records_labels.zip', '593240', '32.54 MB'),
'2010': HmdaDataFile('hmda_2010_pa_all-records_labels.zip', '659124', '36.22 MB'),
'2013': HmdaDataFile('hmda_2013_pa_all-records_labels.zip', '619770', '34.49 MB'),
'2012': HmdaDataFile('hmda_2012_pa_all-records_labels.zip', '695500', '38.56 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_pa_originated-records_labels.zip', '273334', '14.35 MB'),
'2007': HmdaDataFile('hmda_2007_pa_originated-records_labels.zip', '427955', '21.49 MB'),
'2017': HmdaDataFile('hmda_2017_pa_originated-records_labels.zip', '249620', '8.62 MB'),
'2015': HmdaDataFile('hmda_2015_pa_originated-records_labels.zip', '254361', '15.02 MB'),
'2014': HmdaDataFile('hmda_2014_pa_originated-records_labels.zip', '222765', '11.62 MB'),
'2008': HmdaDataFile('hmda_2008_pa_originated-records_labels.zip', '322031', '16.08 MB'),
'2009': HmdaDataFile('hmda_2009_pa_originated-records_labels.zip', '374957', '17.61 MB'),
'2011': HmdaDataFile('hmda_2011_pa_originated-records_labels.zip', '297874', '15.19 MB'),
'2010': HmdaDataFile('hmda_2010_pa_originated-records_labels.zip', '334150', '17.16 MB'),
'2013': HmdaDataFile('hmda_2013_pa_originated-records_labels.zip', '328425', '16.99 MB'),
'2012': HmdaDataFile('hmda_2012_pa_originated-records_labels.zip', '369571', '19.04 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '226492', '8.16 MB'),
'2007': HmdaDataFile('hmda_2007_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '276621', '9.82 MB'),
'2017': HmdaDataFile('hmda_2017_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '200046', '4.79 MB'),
'2015': HmdaDataFile('hmda_2015_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '206884', '8.4 MB'),
'2014': HmdaDataFile('hmda_2014_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '175312', '6.24 MB'),
'2008': HmdaDataFile('hmda_2008_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '230119', '8.1 MB'),
'2009': HmdaDataFile('hmda_2009_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '318871', '10.64 MB'),
'2011': HmdaDataFile('hmda_2011_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '247649', '8.65 MB'),
'2010': HmdaDataFile('hmda_2010_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '280178', '9.85 MB'),
'2013': HmdaDataFile('hmda_2013_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '269808', '9.72 MB'),
'2012': HmdaDataFile('hmda_2012_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '313974', '11.2 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_pa_all-records_codes.zip', '526005', '19.65 MB'),
'2007': HmdaDataFile('hmda_2007_pa_all-records_codes.zip', '992904', '34.13 MB'),
'2017': HmdaDataFile('hmda_2017_pa_all-records_codes.zip', '473757', '11.77 MB'),
'2015': HmdaDataFile('hmda_2015_pa_all-records_codes.zip', '481331', '20.19 MB'),
'2014': HmdaDataFile('hmda_2014_pa_all-records_codes.zip', '427665', '15.79 MB'),
'2008': HmdaDataFile('hmda_2008_pa_all-records_codes.zip', '713995', '24.78 MB'),
'2009': HmdaDataFile('hmda_2009_pa_all-records_codes.zip', '768245', '25.9 MB'),
'2011': HmdaDataFile('hmda_2011_pa_all-records_codes.zip', '593240', '22.19 MB'),
'2010': HmdaDataFile('hmda_2010_pa_all-records_codes.zip', '659124', '24.67 MB'),
'2013': HmdaDataFile('hmda_2013_pa_all-records_codes.zip', '619770', '23.66 MB'),
'2012': HmdaDataFile('hmda_2012_pa_all-records_codes.zip', '695500', '26.36 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_pa_originated-records_codes.zip', '273334', '9.99 MB'),
'2007': HmdaDataFile('hmda_2007_pa_originated-records_codes.zip', '427955', '15.19 MB'),
'2017': HmdaDataFile('hmda_2017_pa_originated-records_codes.zip', '249620', '6.04 MB'),
'2015': HmdaDataFile('hmda_2015_pa_originated-records_codes.zip', '254361', '10.34 MB'),
'2014': HmdaDataFile('hmda_2014_pa_originated-records_codes.zip', '222765', '8.07 MB'),
'2008': HmdaDataFile('hmda_2008_pa_originated-records_codes.zip', '322031', '11.45 MB'),
'2009': HmdaDataFile('hmda_2009_pa_originated-records_codes.zip', '374957', '12.74 MB'),
'2011': HmdaDataFile('hmda_2011_pa_originated-records_codes.zip', '297874', '10.53 MB'),
'2010': HmdaDataFile('hmda_2010_pa_originated-records_codes.zip', '334150', '11.94 MB'),
'2013': HmdaDataFile('hmda_2013_pa_originated-records_codes.zip', '328425', '11.84 MB'),
'2012': HmdaDataFile('hmda_2012_pa_originated-records_codes.zip', '369571', '13.21 MB')
}
}
},
'ca': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '1007181', '54.33 MB'),
'2007': HmdaDataFile('hmda_2007_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '870274', '44.14 MB'),
'2017': HmdaDataFile('hmda_2017_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '721751', '25.58 MB'),
'2015': HmdaDataFile('hmda_2015_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '840549', '50.22 MB'),
'2014': HmdaDataFile('hmda_2014_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '617503', '32.89 MB'),
'2008': HmdaDataFile('hmda_2008_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '540095', '26.82 MB'),
'2009': HmdaDataFile('hmda_2009_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '871218', '40.28 MB'),
'2011': HmdaDataFile('hmda_2011_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '777800', '41.39 MB'),
'2010': HmdaDataFile('hmda_2010_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '867778', '45.96 MB'),
'2013': HmdaDataFile('hmda_2013_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '943566', '51.22 MB'),
'2012': HmdaDataFile('hmda_2012_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '1179705', '63.7 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ca_all-records_labels.zip', '2235971', '128.67 MB'),
'2007': HmdaDataFile('hmda_2007_ca_all-records_labels.zip', '3425570', '178.89 MB'),
'2017': HmdaDataFile('hmda_2017_ca_all-records_labels.zip', '1714459', '68.82 MB'),
'2015': HmdaDataFile('hmda_2015_ca_all-records_labels.zip', '1878495', '120.02 MB'),
'2014': HmdaDataFile('hmda_2014_ca_all-records_labels.zip', '1436457', '81.73 MB'),
'2008': HmdaDataFile('hmda_2008_ca_all-records_labels.zip', '1843875', '91.09 MB'),
'2009': HmdaDataFile('hmda_2009_ca_all-records_labels.zip', '2186032', '102.95 MB'),
'2011': HmdaDataFile('hmda_2011_ca_all-records_labels.zip', '1914815', '108.47 MB'),
'2010': HmdaDataFile('hmda_2010_ca_all-records_labels.zip', '2007593', '113.6 MB'),
'2013': HmdaDataFile('hmda_2013_ca_all-records_labels.zip', '2161214', '124.84 MB'),
'2012': HmdaDataFile('hmda_2012_ca_all-records_labels.zip', '2541978', '146.32 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ca_originated-records_labels.zip', '1172541', '63.86 MB'),
'2007': HmdaDataFile('hmda_2007_ca_originated-records_labels.zip', '1233502', '62.47 MB'),
'2017': HmdaDataFile('hmda_2017_ca_originated-records_labels.zip', '877753', '31.88 MB'),
'2015': HmdaDataFile('hmda_2015_ca_originated-records_labels.zip', '993335', '60.03 MB'),
'2014': HmdaDataFile('hmda_2014_ca_originated-records_labels.zip', '750422', '40.44 MB'),
'2008': HmdaDataFile('hmda_2008_ca_originated-records_labels.zip', '672822', '33.65 MB'),
'2009': HmdaDataFile('hmda_2009_ca_originated-records_labels.zip', '972974', '45.3 MB'),
'2011': HmdaDataFile('hmda_2011_ca_originated-records_labels.zip', '917070', '49.28 MB'),
'2010': HmdaDataFile('hmda_2010_ca_originated-records_labels.zip', '980348', '52.43 MB'),
'2013': HmdaDataFile('hmda_2013_ca_originated-records_labels.zip', '1153965', '63.32 MB'),
'2012': HmdaDataFile('hmda_2012_ca_originated-records_labels.zip', '1391720', '75.92 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '1007181', '40 MB'),
'2007': HmdaDataFile('hmda_2007_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '870274', '32.16 MB'),
'2017': HmdaDataFile('hmda_2017_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '721751', '17.18 MB'),
'2015': HmdaDataFile('hmda_2015_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '840549', '35.92 MB'),
'2014': HmdaDataFile('hmda_2014_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '617503', '24.08 MB'),
'2008': HmdaDataFile('hmda_2008_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '540095', '19.47 MB'),
'2009': HmdaDataFile('hmda_2009_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '871218', '29.59 MB'),
'2011': HmdaDataFile('hmda_2011_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '777800', '30.2 MB'),
'2010': HmdaDataFile('hmda_2010_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '867778', '33.54 MB'),
'2013': HmdaDataFile('hmda_2013_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '943566', '37.62 MB'),
'2012': HmdaDataFile('hmda_2012_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '1179705', '46.88 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ca_all-records_codes.zip', '2235971', '91.67 MB'),
'2007': HmdaDataFile('hmda_2007_ca_all-records_codes.zip', '3425570', '117.94 MB'),
'2017': HmdaDataFile('hmda_2017_ca_all-records_codes.zip', '1714459', '42.19 MB'),
'2015': HmdaDataFile('hmda_2015_ca_all-records_codes.zip', '1878495', '82.3 MB'),
'2014': HmdaDataFile('hmda_2014_ca_all-records_codes.zip', '1436457', '57.96 MB'),
'2008': HmdaDataFile('hmda_2008_ca_all-records_codes.zip', '1843875', '63.68 MB'),
'2009': HmdaDataFile('hmda_2009_ca_all-records_codes.zip', '2186032', '72.66 MB'),
'2011': HmdaDataFile('hmda_2011_ca_all-records_codes.zip', '1914815', '76.92 MB'),
'2010': HmdaDataFile('hmda_2010_ca_all-records_codes.zip', '2007593', '80.32 MB'),
'2013': HmdaDataFile('hmda_2013_ca_all-records_codes.zip', '2161214', '88.6 MB'),
'2012': HmdaDataFile('hmda_2012_ca_all-records_codes.zip', '2541978', '104.3 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ca_originated-records_codes.zip', '1172541', '46.78 MB'),
'2007': HmdaDataFile('hmda_2007_ca_originated-records_codes.zip', '1233502', '45.28 MB'),
'2017': HmdaDataFile('hmda_2017_ca_originated-records_codes.zip', '877753', '21.13 MB'),
'2015': HmdaDataFile('hmda_2015_ca_originated-records_codes.zip', '993335', '42.66 MB'),
'2014': HmdaDataFile('hmda_2014_ca_originated-records_codes.zip', '750422', '29.45 MB'),
'2008': HmdaDataFile('hmda_2008_ca_originated-records_codes.zip', '672822', '24.32 MB'),
'2009': HmdaDataFile('hmda_2009_ca_originated-records_codes.zip', '972974', '33.15 MB'),
'2011': HmdaDataFile('hmda_2011_ca_originated-records_codes.zip', '917070', '35.8 MB'),
'2010': HmdaDataFile('hmda_2010_ca_originated-records_codes.zip', '980348', '38.11 MB'),
'2013': HmdaDataFile('hmda_2013_ca_originated-records_codes.zip', '1153965', '46.26 MB'),
'2012': HmdaDataFile('hmda_2012_ca_originated-records_codes.zip', '1391720', '55.58 MB')
}
}
},
'nv': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '84481', '3.96 MB'),
'2007': HmdaDataFile('hmda_2007_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '73747', '3.21 MB'),
'2017': HmdaDataFile('hmda_2017_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '76783', '2.28 MB'),
'2015': HmdaDataFile('hmda_2015_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '69214', '3.49 MB'),
'2014': HmdaDataFile('hmda_2014_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '49799', '2.4 MB'),
'2008': HmdaDataFile('hmda_2008_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '48287', '1.93 MB'),
'2009': HmdaDataFile('hmda_2009_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '59437', '2.29 MB'),
'2011': HmdaDataFile('hmda_2011_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '39765', '1.65 MB'),
'2010': HmdaDataFile('hmda_2010_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '47126', '2.01 MB'),
'2013': HmdaDataFile('hmda_2013_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '62574', '2.95 MB'),
'2012': HmdaDataFile('hmda_2012_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '67783', '3.09 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nv_all-records_labels.zip', '196764', '9.88 MB'),
'2007': HmdaDataFile('hmda_2007_nv_all-records_labels.zip', '326985', '14.53 MB'),
'2017': HmdaDataFile('hmda_2017_nv_all-records_labels.zip', '178587', '5.91 MB'),
'2015': HmdaDataFile('hmda_2015_nv_all-records_labels.zip', '158259', '8.69 MB'),
'2014': HmdaDataFile('hmda_2014_nv_all-records_labels.zip', '119744', '6.27 MB'),
'2008': HmdaDataFile('hmda_2008_nv_all-records_labels.zip', '163606', '6.93 MB'),
'2009': HmdaDataFile('hmda_2009_nv_all-records_labels.zip', '168658', '6.88 MB'),
'2011': HmdaDataFile('hmda_2011_nv_all-records_labels.zip', '112858', '5.27 MB'),
'2010': HmdaDataFile('hmda_2010_nv_all-records_labels.zip', '126313', '5.98 MB'),
'2013': HmdaDataFile('hmda_2013_nv_all-records_labels.zip', '151453', '7.8 MB'),
'2012': HmdaDataFile('hmda_2012_nv_all-records_labels.zip', '161343', '8.12 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nv_originated-records_labels.zip', '97926', '4.69 MB'),
'2007': HmdaDataFile('hmda_2007_nv_originated-records_labels.zip', '113248', '5 MB'),
'2017': HmdaDataFile('hmda_2017_nv_originated-records_labels.zip', '91540', '2.8 MB'),
'2015': HmdaDataFile('hmda_2015_nv_originated-records_labels.zip', '81624', '4.21 MB'),
'2014': HmdaDataFile('hmda_2014_nv_originated-records_labels.zip', '61757', '3.04 MB'),
'2008': HmdaDataFile('hmda_2008_nv_originated-records_labels.zip', '62592', '2.58 MB'),
'2009': HmdaDataFile('hmda_2009_nv_originated-records_labels.zip', '70049', '2.77 MB'),
'2011': HmdaDataFile('hmda_2011_nv_originated-records_labels.zip', '51445', '2.24 MB'),
'2010': HmdaDataFile('hmda_2010_nv_originated-records_labels.zip', '57173', '2.51 MB'),
'2013': HmdaDataFile('hmda_2013_nv_originated-records_labels.zip', '82011', '3.96 MB'),
'2012': HmdaDataFile('hmda_2012_nv_originated-records_labels.zip', '86528', '4.04 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '84481', '2.78 MB'),
'2007': HmdaDataFile('hmda_2007_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '73747', '2.26 MB'),
'2017': HmdaDataFile('hmda_2017_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '76783', '1.59 MB'),
'2015': HmdaDataFile('hmda_2015_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '69214', '2.48 MB'),
'2014': HmdaDataFile('hmda_2014_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '49799', '1.68 MB'),
'2008': HmdaDataFile('hmda_2008_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '48287', '1.36 MB'),
'2009': HmdaDataFile('hmda_2009_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '59437', '1.63 MB'),
'2011': HmdaDataFile('hmda_2011_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '39765', '1.13 MB'),
'2010': HmdaDataFile('hmda_2010_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '47126', '1.37 MB'),
'2013': HmdaDataFile('hmda_2013_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '62574', '2.07 MB'),
'2012': HmdaDataFile('hmda_2012_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '67783', '2.17 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nv_all-records_codes.zip', '196764', '6.71 MB'),
'2007': HmdaDataFile('hmda_2007_nv_all-records_codes.zip', '326985', '9.91 MB'),
'2017': HmdaDataFile('hmda_2017_nv_all-records_codes.zip', '178587', '3.8 MB'),
'2015': HmdaDataFile('hmda_2015_nv_all-records_codes.zip', '158259', '5.91 MB'),
'2014': HmdaDataFile('hmda_2014_nv_all-records_codes.zip', '119744', '4.26 MB'),
'2008': HmdaDataFile('hmda_2008_nv_all-records_codes.zip', '163606', '4.74 MB'),
'2009': HmdaDataFile('hmda_2009_nv_all-records_codes.zip', '168658', '4.74 MB'),
'2011': HmdaDataFile('hmda_2011_nv_all-records_codes.zip', '112858', '3.48 MB'),
'2010': HmdaDataFile('hmda_2010_nv_all-records_codes.zip', '126313', '3.93 MB'),
'2013': HmdaDataFile('hmda_2013_nv_all-records_codes.zip', '151453', '5.33 MB'),
'2012': HmdaDataFile('hmda_2012_nv_all-records_codes.zip', '161343', '5.54 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nv_originated-records_codes.zip', '97926', '3.28 MB'),
'2007': HmdaDataFile('hmda_2007_nv_originated-records_codes.zip', '113248', '3.54 MB'),
'2017': HmdaDataFile('hmda_2017_nv_originated-records_codes.zip', '91540', '1.93 MB'),
'2015': HmdaDataFile('hmda_2015_nv_originated-records_codes.zip', '81624', '2.98 MB'),
'2014': HmdaDataFile('hmda_2014_nv_originated-records_codes.zip', '61757', '2.13 MB'),
'2008': HmdaDataFile('hmda_2008_nv_originated-records_codes.zip', '62592', '1.82 MB'),
'2009': HmdaDataFile('hmda_2009_nv_originated-records_codes.zip', '70049', '1.98 MB'),
'2011': HmdaDataFile('hmda_2011_nv_originated-records_codes.zip', '51445', '1.52 MB'),
'2010': HmdaDataFile('hmda_2010_nv_originated-records_codes.zip', '57173', '1.7 MB'),
'2013': HmdaDataFile('hmda_2013_nv_originated-records_codes.zip', '82011', '2.79 MB'),
'2012': HmdaDataFile('hmda_2012_nv_originated-records_codes.zip', '86528', '2.82 MB')
}
}
},
'pr': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '16754', '783.1 KB'),
'2007': HmdaDataFile('hmda_2007_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '35822', '1.56 MB'),
'2017': HmdaDataFile('hmda_2017_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '12257', '361.87 KB'),
'2015': HmdaDataFile('hmda_2015_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '18782', '893.63 KB'),
'2014': HmdaDataFile('hmda_2014_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '20640', '942.76 KB'),
'2008': HmdaDataFile('hmda_2008_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '36283', '1.5 MB'),
'2009': HmdaDataFile('hmda_2009_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '34669', '1.37 MB'),
'2011': HmdaDataFile('hmda_2011_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '25832', '1.11 MB'),
'2010': HmdaDataFile('hmda_2010_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '25342', '1.08 MB'),
'2013': HmdaDataFile('hmda_2013_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '30144', '1.35 MB'),
'2012': HmdaDataFile('hmda_2012_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '31083', '1.38 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_pr_all-records_labels.zip', '55699', '2.78 MB'),
'2007': HmdaDataFile('hmda_2007_pr_all-records_labels.zip', '141905', '6.23 MB'),
'2017': HmdaDataFile('hmda_2017_pr_all-records_labels.zip', '41775', '1.3 MB'),
'2015': HmdaDataFile('hmda_2015_pr_all-records_labels.zip', '58798', '2.92 MB'),
'2014': HmdaDataFile('hmda_2014_pr_all-records_labels.zip', '69716', '3.45 MB'),
'2008': HmdaDataFile('hmda_2008_pr_all-records_labels.zip', '121121', '5.16 MB'),
'2009': HmdaDataFile('hmda_2009_pr_all-records_labels.zip', '117907', '4.79 MB'),
'2011': HmdaDataFile('hmda_2011_pr_all-records_labels.zip', '85316', '3.93 MB'),
'2010': HmdaDataFile('hmda_2010_pr_all-records_labels.zip', '83046', '3.88 MB'),
'2013': HmdaDataFile('hmda_2013_pr_all-records_labels.zip', '96530', '4.63 MB'),
'2012': HmdaDataFile('hmda_2012_pr_all-records_labels.zip', '87162', '4.17 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_pr_originated-records_labels.zip', '26124', '1.23 MB'),
'2007': HmdaDataFile('hmda_2007_pr_originated-records_labels.zip', '57036', '2.51 MB'),
'2017': HmdaDataFile('hmda_2017_pr_originated-records_labels.zip', '19395', '576.04 KB'),
'2015': HmdaDataFile('hmda_2015_pr_originated-records_labels.zip', '28616', '1.37 MB'),
'2014': HmdaDataFile('hmda_2014_pr_originated-records_labels.zip', '30528', '1.42 MB'),
'2008': HmdaDataFile('hmda_2008_pr_originated-records_labels.zip', '50632', '2.14 MB'),
'2009': HmdaDataFile('hmda_2009_pr_originated-records_labels.zip', '46237', '1.86 MB'),
'2011': HmdaDataFile('hmda_2011_pr_originated-records_labels.zip', '37660', '1.66 MB'),
'2010': HmdaDataFile('hmda_2010_pr_originated-records_labels.zip', '35121', '1.55 MB'),
'2013': HmdaDataFile('hmda_2013_pr_originated-records_labels.zip', '40075', '1.8 MB'),
'2012': HmdaDataFile('hmda_2012_pr_originated-records_labels.zip', '40616', '1.81 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '16754', '536.87 KB'),
'2007': HmdaDataFile('hmda_2007_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '35822', '1.08 MB'),
'2017': HmdaDataFile('hmda_2017_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '12257', '255.16 KB'),
'2015': HmdaDataFile('hmda_2015_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '18782', '614.78 KB'),
'2014': HmdaDataFile('hmda_2014_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '20640', '647.5 KB'),
'2008': HmdaDataFile('hmda_2008_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '36283', '1.05 MB'),
'2009': HmdaDataFile('hmda_2009_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '34669', '973.6 KB'),
'2011': HmdaDataFile('hmda_2011_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '25832', '756.02 KB'),
'2010': HmdaDataFile('hmda_2010_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '25342', '737.14 KB'),
'2013': HmdaDataFile('hmda_2013_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '30144', '925.82 KB'),
'2012': HmdaDataFile('hmda_2012_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '31083', '942.8 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_pr_all-records_codes.zip', '55699', '1.82 MB'),
'2007': HmdaDataFile('hmda_2007_pr_all-records_codes.zip', '141905', '4.18 MB'),
'2017': HmdaDataFile('hmda_2017_pr_all-records_codes.zip', '41775', '873.01 KB'),
'2015': HmdaDataFile('hmda_2015_pr_all-records_codes.zip', '58798', '1.94 MB'),
'2014': HmdaDataFile('hmda_2014_pr_all-records_codes.zip', '69716', '2.27 MB'),
'2008': HmdaDataFile('hmda_2008_pr_all-records_codes.zip', '121121', '3.48 MB'),
'2009': HmdaDataFile('hmda_2009_pr_all-records_codes.zip', '117907', '3.28 MB'),
'2011': HmdaDataFile('hmda_2011_pr_all-records_codes.zip', '85316', '2.56 MB'),
'2010': HmdaDataFile('hmda_2010_pr_all-records_codes.zip', '83046', '2.54 MB'),
'2013': HmdaDataFile('hmda_2013_pr_all-records_codes.zip', '96530', '3.04 MB'),
'2012': HmdaDataFile('hmda_2012_pr_all-records_codes.zip', '87162', '2.75 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_pr_originated-records_codes.zip', '26124', '835.09 KB'),
'2007': HmdaDataFile('hmda_2007_pr_originated-records_codes.zip', '57036', '1.73 MB'),
'2017': HmdaDataFile('hmda_2017_pr_originated-records_codes.zip', '19395', '406.77 KB'),
'2015': HmdaDataFile('hmda_2015_pr_originated-records_codes.zip', '28616', '942.61 KB'),
'2014': HmdaDataFile('hmda_2014_pr_originated-records_codes.zip', '30528', '963.45 KB'),
'2008': HmdaDataFile('hmda_2008_pr_originated-records_codes.zip', '50632', '1.49 MB'),
'2009': HmdaDataFile('hmda_2009_pr_originated-records_codes.zip', '46237', '1.32 MB'),
'2011': HmdaDataFile('hmda_2011_pr_originated-records_codes.zip', '37660', '1.13 MB'),
'2010': HmdaDataFile('hmda_2010_pr_originated-records_codes.zip', '35121', '1.05 MB'),
'2013': HmdaDataFile('hmda_2013_pr_originated-records_codes.zip', '40075', '1.23 MB'),
'2012': HmdaDataFile('hmda_2012_pr_originated-records_codes.zip', '40616', '1.24 MB')
}
}
},
'de': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '21457', '907.27 KB'),
'2007': HmdaDataFile('hmda_2007_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '24785', '1.05 MB'),
'2017': HmdaDataFile('hmda_2017_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '18891', '540.64 KB'),
'2015': HmdaDataFile('hmda_2015_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '18365', '848.09 KB'),
'2014': HmdaDataFile('hmda_2014_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '14206', '642.68 KB'),
'2008': HmdaDataFile('hmda_2008_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '19350', '807.78 KB'),
'2009': HmdaDataFile('hmda_2009_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '26431', '1.05 MB'),
'2011': HmdaDataFile('hmda_2011_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '18557', '764.88 KB'),
'2010': HmdaDataFile('hmda_2010_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '21244', '862.31 KB'),
'2013': HmdaDataFile('hmda_2013_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '22603', '952.26 KB'),
'2012': HmdaDataFile('hmda_2012_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '25372', '1.06 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_de_all-records_labels.zip', '56002', '2.64 MB'),
'2007': HmdaDataFile('hmda_2007_de_all-records_labels.zip', '102001', '4.48 MB'),
'2017': HmdaDataFile('hmda_2017_de_all-records_labels.zip', '49695', '1.64 MB'),
'2015': HmdaDataFile('hmda_2015_de_all-records_labels.zip', '48176', '2.44 MB'),
'2014': HmdaDataFile('hmda_2014_de_all-records_labels.zip', '39784', '2 MB'),
'2008': HmdaDataFile('hmda_2008_de_all-records_labels.zip', '68856', '3.06 MB'),
'2009': HmdaDataFile('hmda_2009_de_all-records_labels.zip', '72398', '3.14 MB'),
'2011': HmdaDataFile('hmda_2011_de_all-records_labels.zip', '52039', '2.43 MB'),
'2010': HmdaDataFile('hmda_2010_de_all-records_labels.zip', '57559', '2.66 MB'),
'2013': HmdaDataFile('hmda_2013_de_all-records_labels.zip', '59314', '2.83 MB'),
'2012': HmdaDataFile('hmda_2012_de_all-records_labels.zip', '65069', '3.06 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_de_originated-records_labels.zip', '27296', '1.18 MB'),
'2007': HmdaDataFile('hmda_2007_de_originated-records_labels.zip', '40053', '1.69 MB'),
'2017': HmdaDataFile('hmda_2017_de_originated-records_labels.zip', '24719', '718.05 KB'),
'2015': HmdaDataFile('hmda_2015_de_originated-records_labels.zip', '24062', '1.14 MB'),
'2014': HmdaDataFile('hmda_2014_de_originated-records_labels.zip', '19181', '882.64 KB'),
'2008': HmdaDataFile('hmda_2008_de_originated-records_labels.zip', '27619', '1.17 MB'),
'2009': HmdaDataFile('hmda_2009_de_originated-records_labels.zip', '32995', '1.31 MB'),
'2011': HmdaDataFile('hmda_2011_de_originated-records_labels.zip', '24377', '1.01 MB'),
'2010': HmdaDataFile('hmda_2010_de_originated-records_labels.zip', '26997', '1.1 MB'),
'2013': HmdaDataFile('hmda_2013_de_originated-records_labels.zip', '30167', '1.28 MB'),
'2012': HmdaDataFile('hmda_2012_de_originated-records_labels.zip', '32868', '1.38 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '21457', '600.87 KB'),
'2007': HmdaDataFile('hmda_2007_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '24785', '699.02 KB'),
'2017': HmdaDataFile('hmda_2017_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '18891', '376.86 KB'),
'2015': HmdaDataFile('hmda_2015_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '18365', '563.01 KB'),
'2014': HmdaDataFile('hmda_2014_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '14206', '424.11 KB'),
'2008': HmdaDataFile('hmda_2008_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '19350', '544.41 KB'),
'2009': HmdaDataFile('hmda_2009_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '26431', '709.46 KB'),
'2011': HmdaDataFile('hmda_2011_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '18557', '497.81 KB'),
'2010': HmdaDataFile('hmda_2010_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '21244', '559.01 KB'),
'2013': HmdaDataFile('hmda_2013_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '22603', '621.51 KB'),
'2012': HmdaDataFile('hmda_2012_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '25372', '689 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_de_all-records_codes.zip', '56002', '1.68 MB'),
'2007': HmdaDataFile('hmda_2007_de_all-records_codes.zip', '102001', '2.91 MB'),
'2017': HmdaDataFile('hmda_2017_de_all-records_codes.zip', '49695', '1.09 MB'),
'2015': HmdaDataFile('hmda_2015_de_all-records_codes.zip', '48176', '1.54 MB'),
'2014': HmdaDataFile('hmda_2014_de_all-records_codes.zip', '39784', '1.26 MB'),
'2008': HmdaDataFile('hmda_2008_de_all-records_codes.zip', '68856', '2 MB'),
'2009': HmdaDataFile('hmda_2009_de_all-records_codes.zip', '72398', '2.07 MB'),
'2011': HmdaDataFile('hmda_2011_de_all-records_codes.zip', '52039', '1.52 MB'),
'2010': HmdaDataFile('hmda_2010_de_all-records_codes.zip', '57559', '1.66 MB'),
'2013': HmdaDataFile('hmda_2013_de_all-records_codes.zip', '59314', '1.77 MB'),
'2012': HmdaDataFile('hmda_2012_de_all-records_codes.zip', '65069', '1.92 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_de_originated-records_codes.zip', '27296', '769.27 KB'),
'2007': HmdaDataFile('hmda_2007_de_originated-records_codes.zip', '40053', '1.12 MB'),
'2017': HmdaDataFile('hmda_2017_de_originated-records_codes.zip', '24719', '494.64 KB'),
'2015': HmdaDataFile('hmda_2015_de_originated-records_codes.zip', '24062', '748.52 KB'),
'2014': HmdaDataFile('hmda_2014_de_originated-records_codes.zip', '19181', '575.97 KB'),
'2008': HmdaDataFile('hmda_2008_de_originated-records_codes.zip', '27619', '775.49 KB'),
'2009': HmdaDataFile('hmda_2009_de_originated-records_codes.zip', '32995', '882.98 KB'),
'2011': HmdaDataFile('hmda_2011_de_originated-records_codes.zip', '24377', '652.39 KB'),
'2010': HmdaDataFile('hmda_2010_de_originated-records_codes.zip', '26997', '709.17 KB'),
'2013': HmdaDataFile('hmda_2013_de_originated-records_codes.zip', '30167', '829.22 KB'),
'2012': HmdaDataFile('hmda_2012_de_originated-records_codes.zip', '32868', '897.42 KB')
}
}
},
'dc': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '15924', '720.67 KB'),
'2007': HmdaDataFile('hmda_2007_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '16676', '696.13 KB'),
'2017': HmdaDataFile('hmda_2017_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '12419', '384.38 KB'),
'2015': HmdaDataFile('hmda_2015_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '14290', '659.64 KB'),
'2014': HmdaDataFile('hmda_2014_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '11399', '518.92 KB'),
'2008': HmdaDataFile('hmda_2008_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '12148', '506.83 KB'),
'2009': HmdaDataFile('hmda_2009_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '18262', '731.02 KB'),
'2011': HmdaDataFile('hmda_2011_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '15280', '677.01 KB'),
'2010': HmdaDataFile('hmda_2010_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '16225', '712.21 KB'),
'2013': HmdaDataFile('hmda_2013_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '17499', '785.67 KB'),
'2012': HmdaDataFile('hmda_2012_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '20716', '908.46 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_dc_all-records_labels.zip', '38399', '1.87 MB'),
'2007': HmdaDataFile('hmda_2007_dc_all-records_labels.zip', '53480', '2.43 MB'),
'2017': HmdaDataFile('hmda_2017_dc_all-records_labels.zip', '30927', '1.07 MB'),
'2015': HmdaDataFile('hmda_2015_dc_all-records_labels.zip', '34958', '1.77 MB'),
'2014': HmdaDataFile('hmda_2014_dc_all-records_labels.zip', '28672', '1.42 MB'),
'2008': HmdaDataFile('hmda_2008_dc_all-records_labels.zip', '33505', '1.55 MB'),
'2009': HmdaDataFile('hmda_2009_dc_all-records_labels.zip', '43540', '1.9 MB'),
'2011': HmdaDataFile('hmda_2011_dc_all-records_labels.zip', '37657', '1.81 MB'),
'2010': HmdaDataFile('hmda_2010_dc_all-records_labels.zip', '38173', '1.84 MB'),
'2013': HmdaDataFile('hmda_2013_dc_all-records_labels.zip', '43020', '2.11 MB'),
'2012': HmdaDataFile('hmda_2012_dc_all-records_labels.zip', '48621', '2.34 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_dc_originated-records_labels.zip', '19324', '891.94 KB'),
'2007': HmdaDataFile('hmda_2007_dc_originated-records_labels.zip', '23948', '998.05 KB'),
'2017': HmdaDataFile('hmda_2017_dc_originated-records_labels.zip', '15414', '483 KB'),
'2015': HmdaDataFile('hmda_2015_dc_originated-records_labels.zip', '17821', '839.5 KB'),
'2014': HmdaDataFile('hmda_2014_dc_originated-records_labels.zip', '14547', '676.21 KB'),
'2008': HmdaDataFile('hmda_2008_dc_originated-records_labels.zip', '15212', '643.03 KB'),
'2009': HmdaDataFile('hmda_2009_dc_originated-records_labels.zip', '20287', '823.05 KB'),
'2011': HmdaDataFile('hmda_2011_dc_originated-records_labels.zip', '18123', '817.1 KB'),
'2010': HmdaDataFile('hmda_2010_dc_originated-records_labels.zip', '18511', '828.68 KB'),
'2013': HmdaDataFile('hmda_2013_dc_originated-records_labels.zip', '21877', '1 MB'),
'2012': HmdaDataFile('hmda_2012_dc_originated-records_labels.zip', '24842', '1.12 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '15924', '457.4 KB'),
'2007': HmdaDataFile('hmda_2007_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '16676', '454.07 KB'),
'2017': HmdaDataFile('hmda_2017_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '12419', '259.55 KB'),
'2015': HmdaDataFile('hmda_2015_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '14290', '415.87 KB'),
'2014': HmdaDataFile('hmda_2014_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '11399', '327.64 KB'),
'2008': HmdaDataFile('hmda_2008_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '12148', '331.88 KB'),
'2009': HmdaDataFile('hmda_2009_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '18262', '485.74 KB'),
'2011': HmdaDataFile('hmda_2011_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '15280', '423.44 KB'),
'2010': HmdaDataFile('hmda_2010_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '16225', '446.48 KB'),
'2013': HmdaDataFile('hmda_2013_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '17499', '493.75 KB'),
'2012': HmdaDataFile('hmda_2012_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '20716', '567.98 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_dc_all-records_codes.zip', '38399', '1.15 MB'),
'2007': HmdaDataFile('hmda_2007_dc_all-records_codes.zip', '53480', '1.55 MB'),
'2017': HmdaDataFile('hmda_2017_dc_all-records_codes.zip', '30927', '685.42 KB'),
'2015': HmdaDataFile('hmda_2015_dc_all-records_codes.zip', '34958', '1.07 MB'),
'2014': HmdaDataFile('hmda_2014_dc_all-records_codes.zip', '28672', '862.7 KB'),
'2008': HmdaDataFile('hmda_2008_dc_all-records_codes.zip', '33505', '982.73 KB'),
'2009': HmdaDataFile('hmda_2009_dc_all-records_codes.zip', '43540', '1.21 MB'),
'2011': HmdaDataFile('hmda_2011_dc_all-records_codes.zip', '37657', '1.1 MB'),
'2010': HmdaDataFile('hmda_2010_dc_all-records_codes.zip', '38173', '1.11 MB'),
'2013': HmdaDataFile('hmda_2013_dc_all-records_codes.zip', '43020', '1.28 MB'),
'2012': HmdaDataFile('hmda_2012_dc_all-records_codes.zip', '48621', '1.42 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_dc_originated-records_codes.zip', '19324', '561.87 KB'),
'2007': HmdaDataFile('hmda_2007_dc_originated-records_codes.zip', '23948', '647.59 KB'),
'2017': HmdaDataFile('hmda_2017_dc_originated-records_codes.zip', '15414', '321.38 KB'),
'2015': HmdaDataFile('hmda_2015_dc_originated-records_codes.zip', '17821', '524.36 KB'),
'2014': HmdaDataFile('hmda_2014_dc_originated-records_codes.zip', '14547', '423.92 KB'),
'2008': HmdaDataFile('hmda_2008_dc_originated-records_codes.zip', '15212', '416.59 KB'),
'2009': HmdaDataFile('hmda_2009_dc_originated-records_codes.zip', '20287', '542.45 KB'),
'2011': HmdaDataFile('hmda_2011_dc_originated-records_codes.zip', '18123', '507.21 KB'),
'2010': HmdaDataFile('hmda_2010_dc_originated-records_codes.zip', '18511', '514.35 KB'),
'2013': HmdaDataFile('hmda_2013_dc_originated-records_codes.zip', '21877', '626.77 KB'),
'2012': HmdaDataFile('hmda_2012_dc_originated-records_codes.zip', '24842', '691.5 KB')
}
}
},
'wi': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '141247', '6.63 MB'),
'2007': HmdaDataFile('hmda_2007_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '140939', '6.7 MB'),
'2017': HmdaDataFile('hmda_2017_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '118149', '3.66 MB'),
'2015': HmdaDataFile('hmda_2015_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '126112', '6.94 MB'),
'2014': HmdaDataFile('hmda_2014_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '99338', '5.08 MB'),
'2008': HmdaDataFile('hmda_2008_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '140787', '6.42 MB'),
'2009': HmdaDataFile('hmda_2009_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '233978', '9.45 MB'),
'2011': HmdaDataFile('hmda_2011_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '159084', '7.03 MB'),
'2010': HmdaDataFile('hmda_2010_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '197382', '8.86 MB'),
'2013': HmdaDataFile('hmda_2013_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '155945', '7.35 MB'),
'2012': HmdaDataFile('hmda_2012_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '219594', '10.08 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_wi_all-records_labels.zip', '277224', '14.04 MB'),
'2007': HmdaDataFile('hmda_2007_wi_all-records_labels.zip', '460622', '22.04 MB'),
'2017': HmdaDataFile('hmda_2017_wi_all-records_labels.zip', '237542', '8.25 MB'),
'2015': HmdaDataFile('hmda_2015_wi_all-records_labels.zip', '250077', '14.91 MB'),
'2014': HmdaDataFile('hmda_2014_wi_all-records_labels.zip', '207239', '11.5 MB'),
'2008': HmdaDataFile('hmda_2008_wi_all-records_labels.zip', '359119', '16.92 MB'),
'2009': HmdaDataFile('hmda_2009_wi_all-records_labels.zip', '475760', '20.37 MB'),
'2011': HmdaDataFile('hmda_2011_wi_all-records_labels.zip', '324321', '15.85 MB'),
'2010': HmdaDataFile('hmda_2010_wi_all-records_labels.zip', '394638', '19.35 MB'),
'2013': HmdaDataFile('hmda_2013_wi_all-records_labels.zip', '306118', '15.66 MB'),
'2012': HmdaDataFile('hmda_2012_wi_all-records_labels.zip', '398029', '19.93 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_wi_originated-records_labels.zip', '168678', '8.06 MB'),
'2007': HmdaDataFile('hmda_2007_wi_originated-records_labels.zip', '211916', '10.01 MB'),
'2017': HmdaDataFile('hmda_2017_wi_originated-records_labels.zip', '146251', '4.59 MB'),
'2015': HmdaDataFile('hmda_2015_wi_originated-records_labels.zip', '153515', '8.58 MB'),
'2014': HmdaDataFile('hmda_2014_wi_originated-records_labels.zip', '124916', '6.51 MB'),
'2008': HmdaDataFile('hmda_2008_wi_originated-records_labels.zip', '187234', '8.58 MB'),
'2009': HmdaDataFile('hmda_2009_wi_originated-records_labels.zip', '270190', '11.1 MB'),
'2011': HmdaDataFile('hmda_2011_wi_originated-records_labels.zip', '188288', '8.56 MB'),
'2010': HmdaDataFile('hmda_2010_wi_originated-records_labels.zip', '228537', '10.45 MB'),
'2013': HmdaDataFile('hmda_2013_wi_originated-records_labels.zip', '188524', '9.07 MB'),
'2012': HmdaDataFile('hmda_2012_wi_originated-records_labels.zip', '253728', '11.88 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '141247', '4.45 MB'),
'2007': HmdaDataFile('hmda_2007_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '140939', '4.6 MB'),
'2017': HmdaDataFile('hmda_2017_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '118149', '2.59 MB'),
'2015': HmdaDataFile('hmda_2015_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '126112', '4.69 MB'),
'2014': HmdaDataFile('hmda_2014_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '99338', '3.39 MB'),
'2008': HmdaDataFile('hmda_2008_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '140787', '4.45 MB'),
'2009': HmdaDataFile('hmda_2009_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '233978', '6.72 MB'),
'2011': HmdaDataFile('hmda_2011_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '159084', '4.64 MB'),
'2010': HmdaDataFile('hmda_2010_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '197382', '5.93 MB'),
'2013': HmdaDataFile('hmda_2013_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '155945', '4.93 MB'),
'2012': HmdaDataFile('hmda_2012_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '219594', '6.71 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_wi_all-records_codes.zip', '277224', '9.08 MB'),
'2007': HmdaDataFile('hmda_2007_wi_all-records_codes.zip', '460622', '14.82 MB'),
'2017': HmdaDataFile('hmda_2017_wi_all-records_codes.zip', '237542', '5.41 MB'),
'2015': HmdaDataFile('hmda_2015_wi_all-records_codes.zip', '250077', '9.7 MB'),
'2014': HmdaDataFile('hmda_2014_wi_all-records_codes.zip', '207239', '7.39 MB'),
'2008': HmdaDataFile('hmda_2008_wi_all-records_codes.zip', '359119', '11.47 MB'),
'2009': HmdaDataFile('hmda_2009_wi_all-records_codes.zip', '475760', '14.08 MB'),
'2011': HmdaDataFile('hmda_2011_wi_all-records_codes.zip', '324321', '10.14 MB'),
'2010': HmdaDataFile('hmda_2010_wi_all-records_codes.zip', '394638', '12.55 MB'),
'2013': HmdaDataFile('hmda_2013_wi_all-records_codes.zip', '306118', '10.17 MB'),
'2012': HmdaDataFile('hmda_2012_wi_all-records_codes.zip', '398029', '12.88 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_wi_originated-records_codes.zip', '168678', '5.37 MB'),
'2007': HmdaDataFile('hmda_2007_wi_originated-records_codes.zip', '211916', '6.85 MB'),
'2017': HmdaDataFile('hmda_2017_wi_originated-records_codes.zip', '146251', '3.19 MB'),
'2015': HmdaDataFile('hmda_2015_wi_originated-records_codes.zip', '153515', '5.74 MB'),
'2014': HmdaDataFile('hmda_2014_wi_originated-records_codes.zip', '124916', '4.3 MB'),
'2008': HmdaDataFile('hmda_2008_wi_originated-records_codes.zip', '187234', '5.9 MB'),
'2009': HmdaDataFile('hmda_2009_wi_originated-records_codes.zip', '270190', '7.86 MB'),
'2011': HmdaDataFile('hmda_2011_wi_originated-records_codes.zip', '188288', '5.6 MB'),
'2010': HmdaDataFile('hmda_2010_wi_originated-records_codes.zip', '228537', '6.95 MB'),
'2013': HmdaDataFile('hmda_2013_wi_originated-records_codes.zip', '188524', '6.03 MB'),
'2012': HmdaDataFile('hmda_2012_wi_originated-records_codes.zip', '253728', '7.85 MB')
}
}
},
'wv': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '23752', '1.15 MB'),
'2007': HmdaDataFile('hmda_2007_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '32932', '1.43 MB'),
'2017': HmdaDataFile('hmda_2017_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '22635', '775.09 KB'),
'2015': HmdaDataFile('hmda_2015_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '22216', '1.16 MB'),
'2014': HmdaDataFile('hmda_2014_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '20952', '925.27 KB'),
'2008': HmdaDataFile('hmda_2008_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '27892', '1.25 MB'),
'2009': HmdaDataFile('hmda_2009_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '31766', '1.38 MB'),
'2011': HmdaDataFile('hmda_2011_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '25662', '1.09 MB'),
'2010': HmdaDataFile('hmda_2010_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '26690', '1.13 MB'),
'2013': HmdaDataFile('hmda_2013_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '29841', '1.37 MB'),
'2012': HmdaDataFile('hmda_2012_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '30400', '1.36 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_wv_all-records_labels.zip', '59932', '3.14 MB'),
'2007': HmdaDataFile('hmda_2007_wv_all-records_labels.zip', '108639', '4.82 MB'),
'2017': HmdaDataFile('hmda_2017_wv_all-records_labels.zip', '56407', '2.02 MB'),
'2015': HmdaDataFile('hmda_2015_wv_all-records_labels.zip', '56189', '3.18 MB'),
'2014': HmdaDataFile('hmda_2014_wv_all-records_labels.zip', '53804', '2.59 MB'),
'2008': HmdaDataFile('hmda_2008_wv_all-records_labels.zip', '82256', '3.81 MB'),
'2009': HmdaDataFile('hmda_2009_wv_all-records_labels.zip', '78726', '3.62 MB'),
'2011': HmdaDataFile('hmda_2011_wv_all-records_labels.zip', '65054', '3.02 MB'),
'2010': HmdaDataFile('hmda_2010_wv_all-records_labels.zip', '67127', '3.14 MB'),
'2013': HmdaDataFile('hmda_2013_wv_all-records_labels.zip', '71730', '3.58 MB'),
'2012': HmdaDataFile('hmda_2012_wv_all-records_labels.zip', '71668', '3.48 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_wv_originated-records_labels.zip', '31084', '1.54 MB'),
'2007': HmdaDataFile('hmda_2007_wv_originated-records_labels.zip', '49942', '2.16 MB'),
'2017': HmdaDataFile('hmda_2017_wv_originated-records_labels.zip', '29490', '1.02 MB'),
'2015': HmdaDataFile('hmda_2015_wv_originated-records_labels.zip', '29892', '1.59 MB'),
'2014': HmdaDataFile('hmda_2014_wv_originated-records_labels.zip', '28288', '1.28 MB'),
'2008': HmdaDataFile('hmda_2008_wv_originated-records_labels.zip', '39173', '1.77 MB'),
'2009': HmdaDataFile('hmda_2009_wv_originated-records_labels.zip', '40090', '1.78 MB'),
'2011': HmdaDataFile('hmda_2011_wv_originated-records_labels.zip', '33089', '1.44 MB'),
'2010': HmdaDataFile('hmda_2010_wv_originated-records_labels.zip', '34262', '1.49 MB'),
'2013': HmdaDataFile('hmda_2013_wv_originated-records_labels.zip', '38630', '1.82 MB'),
'2012': HmdaDataFile('hmda_2012_wv_originated-records_labels.zip', '38585', '1.77 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '23752', '731.44 KB'),
'2007': HmdaDataFile('hmda_2007_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '32932', '927.05 KB'),
'2017': HmdaDataFile('hmda_2017_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '22635', '535.47 KB'),
'2015': HmdaDataFile('hmda_2015_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '22216', '750.99 KB'),
'2014': HmdaDataFile('hmda_2014_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '20952', '586.49 KB'),
'2008': HmdaDataFile('hmda_2008_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '27892', '805.27 KB'),
'2009': HmdaDataFile('hmda_2009_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '31766', '906.21 KB'),
'2011': HmdaDataFile('hmda_2011_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '25662', '682.61 KB'),
'2010': HmdaDataFile('hmda_2010_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '26690', '718.48 KB'),
'2013': HmdaDataFile('hmda_2013_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '29841', '869.85 KB'),
'2012': HmdaDataFile('hmda_2012_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '30400', '859.14 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_wv_all-records_codes.zip', '59932', '1.9 MB'),
'2007': HmdaDataFile('hmda_2007_wv_all-records_codes.zip', '108639', '3.05 MB'),
'2017': HmdaDataFile('hmda_2017_wv_all-records_codes.zip', '56407', '1.32 MB'),
'2015': HmdaDataFile('hmda_2015_wv_all-records_codes.zip', '56189', '1.97 MB'),
'2014': HmdaDataFile('hmda_2014_wv_all-records_codes.zip', '53804', '1.57 MB'),
'2008': HmdaDataFile('hmda_2008_wv_all-records_codes.zip', '82256', '2.4 MB'),
'2009': HmdaDataFile('hmda_2009_wv_all-records_codes.zip', '78726', '2.31 MB'),
'2011': HmdaDataFile('hmda_2011_wv_all-records_codes.zip', '65054', '1.83 MB'),
'2010': HmdaDataFile('hmda_2010_wv_all-records_codes.zip', '67127', '1.91 MB'),
'2013': HmdaDataFile('hmda_2013_wv_all-records_codes.zip', '71730', '2.18 MB'),
'2012': HmdaDataFile('hmda_2012_wv_all-records_codes.zip', '71668', '2.12 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_wv_originated-records_codes.zip', '31084', '966.37 KB'),
'2007': HmdaDataFile('hmda_2007_wv_originated-records_codes.zip', '49942', '1.39 MB'),
'2017': HmdaDataFile('hmda_2017_wv_originated-records_codes.zip', '29490', '694.72 KB'),
'2015': HmdaDataFile('hmda_2015_wv_originated-records_codes.zip', '29892', '1.02 MB'),
'2014': HmdaDataFile('hmda_2014_wv_originated-records_codes.zip', '28288', '806.54 KB'),
'2008': HmdaDataFile('hmda_2008_wv_originated-records_codes.zip', '39173', '1.14 MB'),
'2009': HmdaDataFile('hmda_2009_wv_originated-records_codes.zip', '40090', '1.16 MB'),
'2011': HmdaDataFile('hmda_2011_wv_originated-records_codes.zip', '33089', '896.24 KB'),
'2010': HmdaDataFile('hmda_2010_wv_originated-records_codes.zip', '34262', '933.86 KB'),
'2013': HmdaDataFile('hmda_2013_wv_originated-records_codes.zip', '38630', '1.14 MB'),
'2012': HmdaDataFile('hmda_2012_wv_originated-records_codes.zip', '38585', '1.11 MB')
}
}
},
'hi': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '25790', '1.11 MB'),
'2007': HmdaDataFile('hmda_2007_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '26443', '1.11 MB'),
'2017': HmdaDataFile('hmda_2017_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '20146', '641.16 KB'),
'2015': HmdaDataFile('hmda_2015_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '21679', '993.8 KB'),
'2014': HmdaDataFile('hmda_2014_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '16100', '691.11 KB'),
'2008': HmdaDataFile('hmda_2008_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '19577', '808.1 KB'),
'2009': HmdaDataFile('hmda_2009_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '31476', '1.18 MB'),
'2011': HmdaDataFile('hmda_2011_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '22003', '927.28 KB'),
'2010': HmdaDataFile('hmda_2010_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '24636', '1.02 MB'),
'2013': HmdaDataFile('hmda_2013_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '26347', '1.13 MB'),
'2012': HmdaDataFile('hmda_2012_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '31689', '1.34 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_hi_all-records_labels.zip', '57857', '2.66 MB'),
'2007': HmdaDataFile('hmda_2007_hi_all-records_labels.zip', '97609', '4.13 MB'),
'2017': HmdaDataFile('hmda_2017_hi_all-records_labels.zip', '44868', '1.52 MB'),
'2015': HmdaDataFile('hmda_2015_hi_all-records_labels.zip', '48255', '2.42 MB'),
'2014': HmdaDataFile('hmda_2014_hi_all-records_labels.zip', '39152', '1.81 MB'),
'2008': HmdaDataFile('hmda_2008_hi_all-records_labels.zip', '58044', '2.51 MB'),
'2009': HmdaDataFile('hmda_2009_hi_all-records_labels.zip', '72505', '2.88 MB'),
'2011': HmdaDataFile('hmda_2011_hi_all-records_labels.zip', '52036', '2.39 MB'),
'2010': HmdaDataFile('hmda_2010_hi_all-records_labels.zip', '57360', '2.61 MB'),
'2013': HmdaDataFile('hmda_2013_hi_all-records_labels.zip', '62718', '2.93 MB'),
'2012': HmdaDataFile('hmda_2012_hi_all-records_labels.zip', '69807', '3.22 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_hi_originated-records_labels.zip', '32822', '1.42 MB'),
'2007': HmdaDataFile('hmda_2007_hi_originated-records_labels.zip', '39270', '1.64 MB'),
'2017': HmdaDataFile('hmda_2017_hi_originated-records_labels.zip', '26553', '842.99 KB'),
'2015': HmdaDataFile('hmda_2015_hi_originated-records_labels.zip', '28778', '1.34 MB'),
'2014': HmdaDataFile('hmda_2014_hi_originated-records_labels.zip', '22377', '977.23 KB'),
'2008': HmdaDataFile('hmda_2008_hi_originated-records_labels.zip', '25770', '1.07 MB'),
'2009': HmdaDataFile('hmda_2009_hi_originated-records_labels.zip', '36594', '1.37 MB'),
'2011': HmdaDataFile('hmda_2011_hi_originated-records_labels.zip', '27526', '1.18 MB'),
'2010': HmdaDataFile('hmda_2010_hi_originated-records_labels.zip', '29807', '1.25 MB'),
'2013': HmdaDataFile('hmda_2013_hi_originated-records_labels.zip', '36581', '1.6 MB'),
'2012': HmdaDataFile('hmda_2012_hi_originated-records_labels.zip', '40668', '1.75 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '25790', '747.28 KB'),
'2007': HmdaDataFile('hmda_2007_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '26443', '769.57 KB'),
'2017': HmdaDataFile('hmda_2017_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '20146', '442.43 KB'),
'2015': HmdaDataFile('hmda_2015_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '21679', '682.34 KB'),
'2014': HmdaDataFile('hmda_2014_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '16100', '467.14 KB'),
'2008': HmdaDataFile('hmda_2008_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '19577', '565.45 KB'),
'2009': HmdaDataFile('hmda_2009_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '31476', '829.34 KB'),
'2011': HmdaDataFile('hmda_2011_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '22003', '617.15 KB'),
'2010': HmdaDataFile('hmda_2010_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '24636', '682.86 KB'),
'2013': HmdaDataFile('hmda_2013_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '26347', '767.75 KB'),
'2012': HmdaDataFile('hmda_2012_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '31689', '898.68 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_hi_all-records_codes.zip', '57857', '1.73 MB'),
'2007': HmdaDataFile('hmda_2007_hi_all-records_codes.zip', '97609', '2.81 MB'),
'2017': HmdaDataFile('hmda_2017_hi_all-records_codes.zip', '44868', '991.92 KB'),
'2015': HmdaDataFile('hmda_2015_hi_all-records_codes.zip', '48255', '1.59 MB'),
'2014': HmdaDataFile('hmda_2014_hi_all-records_codes.zip', '39152', '1.17 MB'),
'2008': HmdaDataFile('hmda_2008_hi_all-records_codes.zip', '58044', '1.7 MB'),
'2009': HmdaDataFile('hmda_2009_hi_all-records_codes.zip', '72505', '1.97 MB'),
'2011': HmdaDataFile('hmda_2011_hi_all-records_codes.zip', '52036', '1.54 MB'),
'2010': HmdaDataFile('hmda_2010_hi_all-records_codes.zip', '57360', '1.69 MB'),
'2013': HmdaDataFile('hmda_2013_hi_all-records_codes.zip', '62718', '1.9 MB'),
'2012': HmdaDataFile('hmda_2012_hi_all-records_codes.zip', '69807', '2.09 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_hi_originated-records_codes.zip', '32822', '955.73 KB'),
'2007': HmdaDataFile('hmda_2007_hi_originated-records_codes.zip', '39270', '1.15 MB'),
'2017': HmdaDataFile('hmda_2017_hi_originated-records_codes.zip', '26553', '577.19 KB'),
'2015': HmdaDataFile('hmda_2015_hi_originated-records_codes.zip', '28778', '909.22 KB'),
'2014': HmdaDataFile('hmda_2014_hi_originated-records_codes.zip', '22377', '650.88 KB'),
'2008': HmdaDataFile('hmda_2008_hi_originated-records_codes.zip', '25770', '741.44 KB'),
'2009': HmdaDataFile('hmda_2009_hi_originated-records_codes.zip', '36594', '946.61 KB'),
'2011': HmdaDataFile('hmda_2011_hi_originated-records_codes.zip', '27526', '777.03 KB'),
'2010': HmdaDataFile('hmda_2010_hi_originated-records_codes.zip', '29807', '833.67 KB'),
'2013': HmdaDataFile('hmda_2013_hi_originated-records_codes.zip', '36581', '1.06 MB'),
'2012': HmdaDataFile('hmda_2012_hi_originated-records_codes.zip', '40668', '1.17 MB')
}
}
},
'ok': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '61711', '2.99 MB'),
'2007': HmdaDataFile('hmda_2007_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '71771', '3.36 MB'),
'2017': HmdaDataFile('hmda_2017_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '56021', '1.71 MB'),
'2015': HmdaDataFile('hmda_2015_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '57409', '3.04 MB'),
'2014': HmdaDataFile('hmda_2014_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '53820', '2.56 MB'),
'2008': HmdaDataFile('hmda_2008_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '64839', '3.04 MB'),
'2009': HmdaDataFile('hmda_2009_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '83962', '3.72 MB'),
'2011': HmdaDataFile('hmda_2011_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '62083', '2.92 MB'),
'2010': HmdaDataFile('hmda_2010_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '68205', '3.24 MB'),
'2013': HmdaDataFile('hmda_2013_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '68275', '3.28 MB'),
'2012': HmdaDataFile('hmda_2012_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '77297', '3.66 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ok_all-records_labels.zip', '165463', '8.55 MB'),
'2007': HmdaDataFile('hmda_2007_ok_all-records_labels.zip', '250763', '12.15 MB'),
'2017': HmdaDataFile('hmda_2017_ok_all-records_labels.zip', '150838', '5.09 MB'),
'2015': HmdaDataFile('hmda_2015_ok_all-records_labels.zip', '152804', '8.67 MB'),
'2014': HmdaDataFile('hmda_2014_ok_all-records_labels.zip', '146824', '7.59 MB'),
'2008': HmdaDataFile('hmda_2008_ok_all-records_labels.zip', '194552', '9.58 MB'),
'2009': HmdaDataFile('hmda_2009_ok_all-records_labels.zip', '219393', '10.29 MB'),
'2011': HmdaDataFile('hmda_2011_ok_all-records_labels.zip', '163798', '8.6 MB'),
'2010': HmdaDataFile('hmda_2010_ok_all-records_labels.zip', '177023', '9.25 MB'),
'2013': HmdaDataFile('hmda_2013_ok_all-records_labels.zip', '180860', '9.5 MB'),
'2012': HmdaDataFile('hmda_2012_ok_all-records_labels.zip', '189005', '9.82 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ok_originated-records_labels.zip', '84730', '4.21 MB'),
'2007': HmdaDataFile('hmda_2007_ok_originated-records_labels.zip', '107739', '5.16 MB'),
'2017': HmdaDataFile('hmda_2017_ok_originated-records_labels.zip', '78302', '2.43 MB'),
'2015': HmdaDataFile('hmda_2015_ok_originated-records_labels.zip', '79611', '4.27 MB'),
'2014': HmdaDataFile('hmda_2014_ok_originated-records_labels.zip', '76088', '3.74 MB'),
'2008': HmdaDataFile('hmda_2008_ok_originated-records_labels.zip', '90679', '4.34 MB'),
'2009': HmdaDataFile('hmda_2009_ok_originated-records_labels.zip', '105102', '4.8 MB'),
'2011': HmdaDataFile('hmda_2011_ok_originated-records_labels.zip', '81562', '3.94 MB'),
'2010': HmdaDataFile('hmda_2010_ok_originated-records_labels.zip', '87420', '4.24 MB'),
'2013': HmdaDataFile('hmda_2013_ok_originated-records_labels.zip', '91830', '4.49 MB'),
'2012': HmdaDataFile('hmda_2012_ok_originated-records_labels.zip', '98582', '4.73 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '61711', '2.07 MB'),
'2007': HmdaDataFile('hmda_2007_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '71771', '2.36 MB'),
'2017': HmdaDataFile('hmda_2017_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '56021', '1.24 MB'),
'2015': HmdaDataFile('hmda_2015_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '57409', '2.11 MB'),
'2014': HmdaDataFile('hmda_2014_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '53820', '1.76 MB'),
'2008': HmdaDataFile('hmda_2008_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '64839', '2.14 MB'),
'2009': HmdaDataFile('hmda_2009_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '83962', '2.65 MB'),
'2011': HmdaDataFile('hmda_2011_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '62083', '1.97 MB'),
'2010': HmdaDataFile('hmda_2010_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '68205', '2.2 MB'),
'2013': HmdaDataFile('hmda_2013_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '68275', '2.25 MB'),
'2012': HmdaDataFile('hmda_2012_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '77297', '2.49 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ok_all-records_codes.zip', '165463', '5.67 MB'),
'2007': HmdaDataFile('hmda_2007_ok_all-records_codes.zip', '250763', '8.32 MB'),
'2017': HmdaDataFile('hmda_2017_ok_all-records_codes.zip', '150838', '3.38 MB'),
'2015': HmdaDataFile('hmda_2015_ok_all-records_codes.zip', '152804', '5.81 MB'),
'2014': HmdaDataFile('hmda_2014_ok_all-records_codes.zip', '146824', '5.04 MB'),
'2008': HmdaDataFile('hmda_2008_ok_all-records_codes.zip', '194552', '6.58 MB'),
'2009': HmdaDataFile('hmda_2009_ok_all-records_codes.zip', '219393', '7.12 MB'),
'2011': HmdaDataFile('hmda_2011_ok_all-records_codes.zip', '163798', '5.66 MB'),
'2010': HmdaDataFile('hmda_2010_ok_all-records_codes.zip', '177023', '6.13 MB'),
'2013': HmdaDataFile('hmda_2013_ok_all-records_codes.zip', '180860', '6.33 MB'),
'2012': HmdaDataFile('hmda_2012_ok_all-records_codes.zip', '189005', '6.53 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ok_originated-records_codes.zip', '84730', '2.89 MB'),
'2007': HmdaDataFile('hmda_2007_ok_originated-records_codes.zip', '107739', '3.62 MB'),
'2017': HmdaDataFile('hmda_2017_ok_originated-records_codes.zip', '78302', '1.73 MB'),
'2015': HmdaDataFile('hmda_2015_ok_originated-records_codes.zip', '79611', '2.95 MB'),
'2014': HmdaDataFile('hmda_2014_ok_originated-records_codes.zip', '76088', '2.55 MB'),
'2008': HmdaDataFile('hmda_2008_ok_originated-records_codes.zip', '90679', '3.04 MB'),
'2009': HmdaDataFile('hmda_2009_ok_originated-records_codes.zip', '105102', '3.39 MB'),
'2011': HmdaDataFile('hmda_2011_ok_originated-records_codes.zip', '81562', '2.64 MB'),
'2010': HmdaDataFile('hmda_2010_ok_originated-records_codes.zip', '87420', '2.87 MB'),
'2013': HmdaDataFile('hmda_2013_ok_originated-records_codes.zip', '91830', '3.05 MB'),
'2012': HmdaDataFile('hmda_2012_ok_originated-records_codes.zip', '98582', '3.19 MB')
}
}
},
'fl': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '415338', '21.43 MB'),
'2007': HmdaDataFile('hmda_2007_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '501111', '25.12 MB'),
'2017': HmdaDataFile('hmda_2017_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '396688', '13.19 MB'),
'2015': HmdaDataFile('hmda_2015_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '347883', '21.05 MB'),
'2014': HmdaDataFile('hmda_2014_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '272904', '13.99 MB'),
'2008': HmdaDataFile('hmda_2008_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '258117', '12.67 MB'),
'2009': HmdaDataFile('hmda_2009_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '263772', '12.4 MB'),
'2011': HmdaDataFile('hmda_2011_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '231239', '11.59 MB'),
'2010': HmdaDataFile('hmda_2010_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '240644', '12.01 MB'),
'2013': HmdaDataFile('hmda_2013_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '365896', '19.08 MB'),
'2012': HmdaDataFile('hmda_2012_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '373288', '19.2 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_fl_all-records_labels.zip', '1043942', '58 MB'),
'2007': HmdaDataFile('hmda_2007_fl_all-records_labels.zip', '2006660', '102.81 MB'),
'2017': HmdaDataFile('hmda_2017_fl_all-records_labels.zip', '1018763', '38.28 MB'),
'2015': HmdaDataFile('hmda_2015_fl_all-records_labels.zip', '893206', '57.89 MB'),
'2014': HmdaDataFile('hmda_2014_fl_all-records_labels.zip', '732825', '40.79 MB'),
'2008': HmdaDataFile('hmda_2008_fl_all-records_labels.zip', '962944', '49.47 MB'),
'2009': HmdaDataFile('hmda_2009_fl_all-records_labels.zip', '806975', '40.26 MB'),
'2011': HmdaDataFile('hmda_2011_fl_all-records_labels.zip', '647776', '35.66 MB'),
'2010': HmdaDataFile('hmda_2010_fl_all-records_labels.zip', '675688', '37.38 MB'),
'2013': HmdaDataFile('hmda_2013_fl_all-records_labels.zip', '948672', '53.81 MB'),
'2012': HmdaDataFile('hmda_2012_fl_all-records_labels.zip', '919923', '51.66 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_fl_originated-records_labels.zip', '506394', '26.46 MB'),
'2007': HmdaDataFile('hmda_2007_fl_originated-records_labels.zip', '735174', '36.57 MB'),
'2017': HmdaDataFile('hmda_2017_fl_originated-records_labels.zip', '492702', '16.61 MB'),
'2015': HmdaDataFile('hmda_2015_fl_originated-records_labels.zip', '434779', '26.78 MB'),
'2014': HmdaDataFile('hmda_2014_fl_originated-records_labels.zip', '349696', '18.14 MB'),
'2008': HmdaDataFile('hmda_2008_fl_originated-records_labels.zip', '344859', '17 MB'),
'2009': HmdaDataFile('hmda_2009_fl_originated-records_labels.zip', '318689', '15.06 MB'),
'2011': HmdaDataFile('hmda_2011_fl_originated-records_labels.zip', '288632', '14.64 MB'),
'2010': HmdaDataFile('hmda_2010_fl_originated-records_labels.zip', '292476', '14.8 MB'),
'2013': HmdaDataFile('hmda_2013_fl_originated-records_labels.zip', '467201', '24.7 MB'),
'2012': HmdaDataFile('hmda_2012_fl_originated-records_labels.zip', '462049', '24.13 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '415338', '15.17 MB'),
'2007': HmdaDataFile('hmda_2007_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '501111', '17.95 MB'),
'2017': HmdaDataFile('hmda_2017_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '396688', '9.14 MB'),
'2015': HmdaDataFile('hmda_2015_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '347883', '14.62 MB'),
'2014': HmdaDataFile('hmda_2014_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '272904', '9.87 MB'),
'2008': HmdaDataFile('hmda_2008_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '258117', '8.93 MB'),
'2009': HmdaDataFile('hmda_2009_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '263772', '8.88 MB'),
'2011': HmdaDataFile('hmda_2011_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '231239', '8 MB'),
'2010': HmdaDataFile('hmda_2010_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '240644', '8.28 MB'),
'2013': HmdaDataFile('hmda_2013_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '365896', '13.41 MB'),
'2012': HmdaDataFile('hmda_2012_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '373288', '13.48 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_fl_all-records_codes.zip', '1043942', '40.03 MB'),
'2007': HmdaDataFile('hmda_2007_fl_all-records_codes.zip', '2006660', '71.9 MB'),
'2017': HmdaDataFile('hmda_2017_fl_all-records_codes.zip', '1018763', '24.91 MB'),
'2015': HmdaDataFile('hmda_2015_fl_all-records_codes.zip', '893206', '38.26 MB'),
'2014': HmdaDataFile('hmda_2014_fl_all-records_codes.zip', '732825', '28.16 MB'),
'2008': HmdaDataFile('hmda_2008_fl_all-records_codes.zip', '962944', '34.41 MB'),
'2009': HmdaDataFile('hmda_2009_fl_all-records_codes.zip', '806975', '28.11 MB'),
'2011': HmdaDataFile('hmda_2011_fl_all-records_codes.zip', '647776', '23.83 MB'),
'2010': HmdaDataFile('hmda_2010_fl_all-records_codes.zip', '675688', '24.98 MB'),
'2013': HmdaDataFile('hmda_2013_fl_all-records_codes.zip', '948672', '36.68 MB'),
'2012': HmdaDataFile('hmda_2012_fl_all-records_codes.zip', '919923', '35.16 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_fl_originated-records_codes.zip', '506394', '18.65 MB'),
'2007': HmdaDataFile('hmda_2007_fl_originated-records_codes.zip', '735174', '26.09 MB'),
'2017': HmdaDataFile('hmda_2017_fl_originated-records_codes.zip', '492702', '11.34 MB'),
'2015': HmdaDataFile('hmda_2015_fl_originated-records_codes.zip', '434779', '18.42 MB'),
'2014': HmdaDataFile('hmda_2014_fl_originated-records_codes.zip', '349696', '12.69 MB'),
'2008': HmdaDataFile('hmda_2008_fl_originated-records_codes.zip', '344859', '11.94 MB'),
'2009': HmdaDataFile('hmda_2009_fl_originated-records_codes.zip', '318689', '10.73 MB'),
'2011': HmdaDataFile('hmda_2011_fl_originated-records_codes.zip', '288632', '10.03 MB'),
'2010': HmdaDataFile('hmda_2010_fl_originated-records_codes.zip', '292476', '10.16 MB'),
'2013': HmdaDataFile('hmda_2013_fl_originated-records_codes.zip', '467201', '17.24 MB'),
'2012': HmdaDataFile('hmda_2012_fl_originated-records_codes.zip', '462049', '16.84 MB')
}
}
},
'wy': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '12342', '483.6 KB'),
'2007': HmdaDataFile('hmda_2007_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '13857', '521.55 KB'),
'2017': HmdaDataFile('hmda_2017_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '10510', '270.62 KB'),
'2015': HmdaDataFile('hmda_2015_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '11880', '503.22 KB'),
'2014': HmdaDataFile('hmda_2014_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '10422', '425.8 KB'),
'2008': HmdaDataFile('hmda_2008_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '12387', '474.69 KB'),
'2009': HmdaDataFile('hmda_2009_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '16912', '594.41 KB'),
'2011': HmdaDataFile('hmda_2011_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '11423', '470.67 KB'),
'2010': HmdaDataFile('hmda_2010_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '12694', '489.69 KB'),
'2013': HmdaDataFile('hmda_2013_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '14178', '537.95 KB'),
'2012': HmdaDataFile('hmda_2012_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '15410', '568.4 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_wy_all-records_labels.zip', '30163', '1.29 MB'),
'2007': HmdaDataFile('hmda_2007_wy_all-records_labels.zip', '48234', '1.94 MB'),
'2017': HmdaDataFile('hmda_2017_wy_all-records_labels.zip', '26154', '743.51 KB'),
'2015': HmdaDataFile('hmda_2015_wy_all-records_labels.zip', '28641', '1.34 MB'),
'2014': HmdaDataFile('hmda_2014_wy_all-records_labels.zip', '25049', '1.13 MB'),
'2008': HmdaDataFile('hmda_2008_wy_all-records_labels.zip', '35748', '1.45 MB'),
'2009': HmdaDataFile('hmda_2009_wy_all-records_labels.zip', '41659', '1.59 MB'),
'2011': HmdaDataFile('hmda_2011_wy_all-records_labels.zip', '28465', '1.31 MB'),
'2010': HmdaDataFile('hmda_2010_wy_all-records_labels.zip', '32035', '1.37 MB'),
'2013': HmdaDataFile('hmda_2013_wy_all-records_labels.zip', '32956', '1.37 MB'),
'2012': HmdaDataFile('hmda_2012_wy_all-records_labels.zip', '34092', '1.4 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_wy_originated-records_labels.zip', '15484', '616.35 KB'),
'2007': HmdaDataFile('hmda_2007_wy_originated-records_labels.zip', '21052', '815.34 KB'),
'2017': HmdaDataFile('hmda_2017_wy_originated-records_labels.zip', '13702', '356.27 KB'),
'2015': HmdaDataFile('hmda_2015_wy_originated-records_labels.zip', '15107', '651.6 KB'),
'2014': HmdaDataFile('hmda_2014_wy_originated-records_labels.zip', '13556', '566.41 KB'),
'2008': HmdaDataFile('hmda_2008_wy_originated-records_labels.zip', '16892', '658.73 KB'),
'2009': HmdaDataFile('hmda_2009_wy_originated-records_labels.zip', '20290', '727.57 KB'),
'2011': HmdaDataFile('hmda_2011_wy_originated-records_labels.zip', '14507', '613.35 KB'),
'2010': HmdaDataFile('hmda_2010_wy_originated-records_labels.zip', '15602', '616.08 KB'),
'2013': HmdaDataFile('hmda_2013_wy_originated-records_labels.zip', '17900', '691.83 KB'),
'2012': HmdaDataFile('hmda_2012_wy_originated-records_labels.zip', '19114', '723.04 KB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '12342', '312.27 KB'),
'2007': HmdaDataFile('hmda_2007_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '13857', '338.82 KB'),
'2017': HmdaDataFile('hmda_2017_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '10510', '187.36 KB'),
'2015': HmdaDataFile('hmda_2015_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '11880', '319.17 KB'),
'2014': HmdaDataFile('hmda_2014_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '10422', '275.68 KB'),
'2008': HmdaDataFile('hmda_2008_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '12387', '309.95 KB'),
'2009': HmdaDataFile('hmda_2009_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '16912', '397.25 KB'),
'2011': HmdaDataFile('hmda_2011_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '11423', '302.39 KB'),
'2010': HmdaDataFile('hmda_2010_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '12694', '313.52 KB'),
'2013': HmdaDataFile('hmda_2013_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '14178', '348.1 KB'),
'2012': HmdaDataFile('hmda_2012_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '15410', '368.87 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_wy_all-records_codes.zip', '30163', '802.03 KB'),
'2007': HmdaDataFile('hmda_2007_wy_all-records_codes.zip', '48234', '1.23 MB'),
'2017': HmdaDataFile('hmda_2017_wy_all-records_codes.zip', '26154', '481.64 KB'),
'2015': HmdaDataFile('hmda_2015_wy_all-records_codes.zip', '28641', '812.32 KB'),
'2014': HmdaDataFile('hmda_2014_wy_all-records_codes.zip', '25049', '695.65 KB'),
'2008': HmdaDataFile('hmda_2008_wy_all-records_codes.zip', '35748', '923.02 KB'),
'2009': HmdaDataFile('hmda_2009_wy_all-records_codes.zip', '41659', '1.02 MB'),
'2011': HmdaDataFile('hmda_2011_wy_all-records_codes.zip', '28465', '807.3 KB'),
'2010': HmdaDataFile('hmda_2010_wy_all-records_codes.zip', '32035', '844.63 KB'),
'2013': HmdaDataFile('hmda_2013_wy_all-records_codes.zip', '32956', '857.37 KB'),
'2012': HmdaDataFile('hmda_2012_wy_all-records_codes.zip', '34092', '878.55 KB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_wy_originated-records_codes.zip', '15484', '395.6 KB'),
'2007': HmdaDataFile('hmda_2007_wy_originated-records_codes.zip', '21052', '528.23 KB'),
'2017': HmdaDataFile('hmda_2017_wy_originated-records_codes.zip', '13702', '246.43 KB'),
'2015': HmdaDataFile('hmda_2015_wy_originated-records_codes.zip', '15107', '410.78 KB'),
'2014': HmdaDataFile('hmda_2014_wy_originated-records_codes.zip', '13556', '363.31 KB'),
'2008': HmdaDataFile('hmda_2008_wy_originated-records_codes.zip', '16892', '430.54 KB'),
'2009': HmdaDataFile('hmda_2009_wy_originated-records_codes.zip', '20290', '484.44 KB'),
'2011': HmdaDataFile('hmda_2011_wy_originated-records_codes.zip', '14507', '390.89 KB'),
'2010': HmdaDataFile('hmda_2010_wy_originated-records_codes.zip', '15602', '393.56 KB'),
'2013': HmdaDataFile('hmda_2013_wy_originated-records_codes.zip', '17900', '445.73 KB'),
'2012': HmdaDataFile('hmda_2012_wy_originated-records_codes.zip', '19114', '467.43 KB')
}
}
},
'me': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '25576', '1.12 MB'),
'2007': HmdaDataFile('hmda_2007_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '28469', '1.25 MB'),
'2017': HmdaDataFile('hmda_2017_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '23089', '660.63 KB'),
'2015': HmdaDataFile('hmda_2015_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '22073', '1.06 MB'),
'2014': HmdaDataFile('hmda_2014_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '18699', '819.03 KB'),
'2008': HmdaDataFile('hmda_2008_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '23318', '1.01 MB'),
'2009': HmdaDataFile('hmda_2009_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '35977', '1.45 MB'),
'2011': HmdaDataFile('hmda_2011_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '24858', '1.05 MB'),
'2010': HmdaDataFile('hmda_2010_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '29685', '1.3 MB'),
'2013': HmdaDataFile('hmda_2013_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '28892', '1.25 MB'),
'2012': HmdaDataFile('hmda_2012_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '32810', '1.4 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_me_all-records_labels.zip', '64142', '3.02 MB'),
'2007': HmdaDataFile('hmda_2007_me_all-records_labels.zip', '102877', '4.79 MB'),
'2017': HmdaDataFile('hmda_2017_me_all-records_labels.zip', '58188', '1.77 MB'),
'2015': HmdaDataFile('hmda_2015_me_all-records_labels.zip', '56450', '2.96 MB'),
'2014': HmdaDataFile('hmda_2014_me_all-records_labels.zip', '50231', '2.36 MB'),
'2008': HmdaDataFile('hmda_2008_me_all-records_labels.zip', '74281', '3.56 MB'),
'2009': HmdaDataFile('hmda_2009_me_all-records_labels.zip', '88765', '4 MB'),
'2011': HmdaDataFile('hmda_2011_me_all-records_labels.zip', '65903', '3.22 MB'),
'2010': HmdaDataFile('hmda_2010_me_all-records_labels.zip', '75911', '3.76 MB'),
'2013': HmdaDataFile('hmda_2013_me_all-records_labels.zip', '71008', '3.35 MB'),
'2012': HmdaDataFile('hmda_2012_me_all-records_labels.zip', '79176', '3.7 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_me_originated-records_labels.zip', '33761', '1.53 MB'),
'2007': HmdaDataFile('hmda_2007_me_originated-records_labels.zip', '44333', '2.01 MB'),
'2017': HmdaDataFile('hmda_2017_me_originated-records_labels.zip', '31503', '929.28 KB'),
'2015': HmdaDataFile('hmda_2015_me_originated-records_labels.zip', '29602', '1.45 MB'),
'2014': HmdaDataFile('hmda_2014_me_originated-records_labels.zip', '25962', '1.19 MB'),
'2008': HmdaDataFile('hmda_2008_me_originated-records_labels.zip', '34638', '1.54 MB'),
'2009': HmdaDataFile('hmda_2009_me_originated-records_labels.zip', '45092', '1.88 MB'),
'2011': HmdaDataFile('hmda_2011_me_originated-records_labels.zip', '32181', '1.43 MB'),
'2010': HmdaDataFile('hmda_2010_me_originated-records_labels.zip', '37475', '1.73 MB'),
'2013': HmdaDataFile('hmda_2013_me_originated-records_labels.zip', '37720', '1.67 MB'),
'2012': HmdaDataFile('hmda_2012_me_originated-records_labels.zip', '41515', '1.82 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '25576', '725.36 KB'),
'2007': HmdaDataFile('hmda_2007_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '28469', '820.97 KB'),
'2017': HmdaDataFile('hmda_2017_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '23089', '468.44 KB'),
'2015': HmdaDataFile('hmda_2015_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '22073', '702.08 KB'),
'2014': HmdaDataFile('hmda_2014_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '18699', '527.54 KB'),
'2008': HmdaDataFile('hmda_2008_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '23318', '660.77 KB'),
'2009': HmdaDataFile('hmda_2009_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '35977', '975.94 KB'),
'2011': HmdaDataFile('hmda_2011_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '24858', '674.23 KB'),
'2010': HmdaDataFile('hmda_2010_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '29685', '841.65 KB'),
'2013': HmdaDataFile('hmda_2013_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '28892', '797.28 KB'),
'2012': HmdaDataFile('hmda_2012_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '32810', '894.15 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_me_all-records_codes.zip', '64142', '1.89 MB'),
'2007': HmdaDataFile('hmda_2007_me_all-records_codes.zip', '102877', '3.1 MB'),
'2017': HmdaDataFile('hmda_2017_me_all-records_codes.zip', '58188', '1.16 MB'),
'2015': HmdaDataFile('hmda_2015_me_all-records_codes.zip', '56450', '1.88 MB'),
'2014': HmdaDataFile('hmda_2014_me_all-records_codes.zip', '50231', '1.46 MB'),
'2008': HmdaDataFile('hmda_2008_me_all-records_codes.zip', '74281', '2.32 MB'),
'2009': HmdaDataFile('hmda_2009_me_all-records_codes.zip', '88765', '2.65 MB'),
'2011': HmdaDataFile('hmda_2011_me_all-records_codes.zip', '65903', '2.01 MB'),
'2010': HmdaDataFile('hmda_2010_me_all-records_codes.zip', '75911', '2.35 MB'),
'2013': HmdaDataFile('hmda_2013_me_all-records_codes.zip', '71008', '2.07 MB'),
'2012': HmdaDataFile('hmda_2012_me_all-records_codes.zip', '79176', '2.29 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_me_originated-records_codes.zip', '33761', '987 KB'),
'2007': HmdaDataFile('hmda_2007_me_originated-records_codes.zip', '44333', '1.33 MB'),
'2017': HmdaDataFile('hmda_2017_me_originated-records_codes.zip', '31503', '654.38 KB'),
'2015': HmdaDataFile('hmda_2015_me_originated-records_codes.zip', '29602', '954.33 KB'),
'2014': HmdaDataFile('hmda_2014_me_originated-records_codes.zip', '25962', '765.27 KB'),
'2008': HmdaDataFile('hmda_2008_me_originated-records_codes.zip', '34638', '1.02 MB'),
'2009': HmdaDataFile('hmda_2009_me_originated-records_codes.zip', '45092', '1.26 MB'),
'2011': HmdaDataFile('hmda_2011_me_originated-records_codes.zip', '32181', '909.63 KB'),
'2010': HmdaDataFile('hmda_2010_me_originated-records_codes.zip', '37475', '1.11 MB'),
'2013': HmdaDataFile('hmda_2013_me_originated-records_codes.zip', '37720', '1.06 MB'),
'2012': HmdaDataFile('hmda_2012_me_originated-records_codes.zip', '41515', '1.15 MB')
}
}
},
'md': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '151314', '7.51 MB'),
'2007': HmdaDataFile('hmda_2007_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '189965', '9.14 MB'),
'2017': HmdaDataFile('hmda_2017_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '124206', '4 MB'),
'2015': HmdaDataFile('hmda_2015_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '131786', '7.26 MB'),
'2014': HmdaDataFile('hmda_2014_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '98965', '5.23 MB'),
'2008': HmdaDataFile('hmda_2008_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '133536', '6.34 MB'),
'2009': HmdaDataFile('hmda_2009_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '193122', '8.53 MB'),
'2011': HmdaDataFile('hmda_2011_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '142588', '7.11 MB'),
'2010': HmdaDataFile('hmda_2010_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '166872', '8.22 MB'),
'2013': HmdaDataFile('hmda_2013_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '161491', '8.05 MB'),
'2012': HmdaDataFile('hmda_2012_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '195908', '9.85 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_md_all-records_labels.zip', '358958', '19.06 MB'),
'2007': HmdaDataFile('hmda_2007_md_all-records_labels.zip', '656616', '31.96 MB'),
'2017': HmdaDataFile('hmda_2017_md_all-records_labels.zip', '301879', '10.75 MB'),
'2015': HmdaDataFile('hmda_2015_md_all-records_labels.zip', '316012', '18.68 MB'),
'2014': HmdaDataFile('hmda_2014_md_all-records_labels.zip', '247561', '14.04 MB'),
'2008': HmdaDataFile('hmda_2008_md_all-records_labels.zip', '393039', '19.26 MB'),
'2009': HmdaDataFile('hmda_2009_md_all-records_labels.zip', '467697', '21.73 MB'),
'2011': HmdaDataFile('hmda_2011_md_all-records_labels.zip', '347645', '18.89 MB'),
'2010': HmdaDataFile('hmda_2010_md_all-records_labels.zip', '385128', '20.7 MB'),
'2013': HmdaDataFile('hmda_2013_md_all-records_labels.zip', '385383', '20.95 MB'),
'2012': HmdaDataFile('hmda_2012_md_all-records_labels.zip', '439566', '23.91 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_md_originated-records_labels.zip', '171556', '8.64 MB'),
'2007': HmdaDataFile('hmda_2007_md_originated-records_labels.zip', '261984', '12.73 MB'),
'2017': HmdaDataFile('hmda_2017_md_originated-records_labels.zip', '144610', '4.78 MB'),
'2015': HmdaDataFile('hmda_2015_md_originated-records_labels.zip', '152541', '8.53 MB'),
'2014': HmdaDataFile('hmda_2014_md_originated-records_labels.zip', '118429', '6.33 MB'),
'2008': HmdaDataFile('hmda_2008_md_originated-records_labels.zip', '162234', '7.83 MB'),
'2009': HmdaDataFile('hmda_2009_md_originated-records_labels.zip', '210794', '9.48 MB'),
'2011': HmdaDataFile('hmda_2011_md_originated-records_labels.zip', '159707', '8.13 MB'),
'2010': HmdaDataFile('hmda_2010_md_originated-records_labels.zip', '182102', '9.04 MB'),
'2013': HmdaDataFile('hmda_2013_md_originated-records_labels.zip', '187825', '9.46 MB'),
'2012': HmdaDataFile('hmda_2012_md_originated-records_labels.zip', '219387', '11.11 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '151314', '5.32 MB'),
'2007': HmdaDataFile('hmda_2007_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '189965', '6.47 MB'),
'2017': HmdaDataFile('hmda_2017_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '124206', '2.79 MB'),
'2015': HmdaDataFile('hmda_2015_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '131786', '5.19 MB'),
'2014': HmdaDataFile('hmda_2014_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '98965', '3.67 MB'),
'2008': HmdaDataFile('hmda_2008_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '133536', '4.46 MB'),
'2009': HmdaDataFile('hmda_2009_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '193122', '6.08 MB'),
'2011': HmdaDataFile('hmda_2011_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '142588', '4.91 MB'),
'2010': HmdaDataFile('hmda_2010_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '166872', '5.7 MB'),
'2013': HmdaDataFile('hmda_2013_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '161491', '5.68 MB'),
'2012': HmdaDataFile('hmda_2012_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '195908', '6.88 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_md_all-records_codes.zip', '358958', '13.18 MB'),
'2007': HmdaDataFile('hmda_2007_md_all-records_codes.zip', '656616', '22.08 MB'),
'2017': HmdaDataFile('hmda_2017_md_all-records_codes.zip', '301879', '6.99 MB'),
'2015': HmdaDataFile('hmda_2015_md_all-records_codes.zip', '316012', '12.83 MB'),
'2014': HmdaDataFile('hmda_2014_md_all-records_codes.zip', '247561', '9.61 MB'),
'2008': HmdaDataFile('hmda_2008_md_all-records_codes.zip', '393039', '13.28 MB'),
'2009': HmdaDataFile('hmda_2009_md_all-records_codes.zip', '467697', '15.2 MB'),
'2011': HmdaDataFile('hmda_2011_md_all-records_codes.zip', '347645', '12.83 MB'),
'2010': HmdaDataFile('hmda_2010_md_all-records_codes.zip', '385128', '14.08 MB'),
'2013': HmdaDataFile('hmda_2013_md_all-records_codes.zip', '385383', '14.56 MB'),
'2012': HmdaDataFile('hmda_2012_md_all-records_codes.zip', '439566', '16.52 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_md_originated-records_codes.zip', '171556', '6.1 MB'),
'2007': HmdaDataFile('hmda_2007_md_originated-records_codes.zip', '261984', '9.04 MB'),
'2017': HmdaDataFile('hmda_2017_md_originated-records_codes.zip', '144610', '3.3 MB'),
'2015': HmdaDataFile('hmda_2015_md_originated-records_codes.zip', '152541', '6.06 MB'),
'2014': HmdaDataFile('hmda_2014_md_originated-records_codes.zip', '118429', '4.41 MB'),
'2008': HmdaDataFile('hmda_2008_md_originated-records_codes.zip', '162234', '5.51 MB'),
'2009': HmdaDataFile('hmda_2009_md_originated-records_codes.zip', '210794', '6.75 MB'),
'2011': HmdaDataFile('hmda_2011_md_originated-records_codes.zip', '159707', '5.59 MB'),
'2010': HmdaDataFile('hmda_2010_md_originated-records_codes.zip', '182102', '6.24 MB'),
'2013': HmdaDataFile('hmda_2013_md_originated-records_codes.zip', '187825', '6.64 MB'),
'2012': HmdaDataFile('hmda_2012_md_originated-records_codes.zip', '219387', '7.7 MB')
}
}
},
'ma': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '168663', '8.43 MB'),
'2007': HmdaDataFile('hmda_2007_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '148532', '7.1 MB'),
'2017': HmdaDataFile('hmda_2017_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '129891', '4.16 MB'),
'2015': HmdaDataFile('hmda_2015_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '144027', '7.64 MB'),
'2014': HmdaDataFile('hmda_2014_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '109057', '5.41 MB'),
'2008': HmdaDataFile('hmda_2008_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '126855', '6.11 MB'),
'2009': HmdaDataFile('hmda_2009_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '229420', '10.04 MB'),
'2011': HmdaDataFile('hmda_2011_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '177890', '8.46 MB'),
'2010': HmdaDataFile('hmda_2010_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '212589', '10.01 MB'),
'2013': HmdaDataFile('hmda_2013_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '186856', '9.2 MB'),
'2012': HmdaDataFile('hmda_2012_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '252396', '12.13 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ma_all-records_labels.zip', '350131', '18.93 MB'),
'2007': HmdaDataFile('hmda_2007_ma_all-records_labels.zip', '507509', '24.85 MB'),
'2017': HmdaDataFile('hmda_2017_ma_all-records_labels.zip', '282546', '10.31 MB'),
'2015': HmdaDataFile('hmda_2015_ma_all-records_labels.zip', '299991', '17.01 MB'),
'2014': HmdaDataFile('hmda_2014_ma_all-records_labels.zip', '246533', '13.15 MB'),
'2008': HmdaDataFile('hmda_2008_ma_all-records_labels.zip', '337077', '17.11 MB'),
'2009': HmdaDataFile('hmda_2009_ma_all-records_labels.zip', '493549', '22.81 MB'),
'2011': HmdaDataFile('hmda_2011_ma_all-records_labels.zip', '400586', '20.86 MB'),
'2010': HmdaDataFile('hmda_2010_ma_all-records_labels.zip', '458768', '23.77 MB'),
'2013': HmdaDataFile('hmda_2013_ma_all-records_labels.zip', '405166', '21.77 MB'),
'2012': HmdaDataFile('hmda_2012_ma_all-records_labels.zip', '516205', '27.13 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ma_originated-records_labels.zip', '201756', '10.26 MB'),
'2007': HmdaDataFile('hmda_2007_ma_originated-records_labels.zip', '214170', '10.35 MB'),
'2017': HmdaDataFile('hmda_2017_ma_originated-records_labels.zip', '162474', '5.34 MB'),
'2015': HmdaDataFile('hmda_2015_ma_originated-records_labels.zip', '173355', '9.34 MB'),
'2014': HmdaDataFile('hmda_2014_ma_originated-records_labels.zip', '137873', '6.99 MB'),
'2008': HmdaDataFile('hmda_2008_ma_originated-records_labels.zip', '159312', '7.79 MB'),
'2009': HmdaDataFile('hmda_2009_ma_originated-records_labels.zip', '255679', '11.37 MB'),
'2011': HmdaDataFile('hmda_2011_ma_originated-records_labels.zip', '205164', '9.93 MB'),
'2010': HmdaDataFile('hmda_2010_ma_originated-records_labels.zip', '239023', '11.5 MB'),
'2013': HmdaDataFile('hmda_2013_ma_originated-records_labels.zip', '224809', '11.32 MB'),
'2012': HmdaDataFile('hmda_2012_ma_originated-records_labels.zip', '288584', '14 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '168663', '5.88 MB'),
'2007': HmdaDataFile('hmda_2007_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '148532', '5 MB'),
'2017': HmdaDataFile('hmda_2017_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '129891', '2.95 MB'),
'2015': HmdaDataFile('hmda_2015_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '144027', '5.53 MB'),
'2014': HmdaDataFile('hmda_2014_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '109057', '3.83 MB'),
'2008': HmdaDataFile('hmda_2008_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '126855', '4.32 MB'),
'2009': HmdaDataFile('hmda_2009_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '229420', '7.29 MB'),
'2011': HmdaDataFile('hmda_2011_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '177890', '5.8 MB'),
'2010': HmdaDataFile('hmda_2010_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '212589', '6.88 MB'),
'2013': HmdaDataFile('hmda_2013_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '186856', '6.35 MB'),
'2012': HmdaDataFile('hmda_2012_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '252396', '8.35 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ma_all-records_codes.zip', '350131', '13.03 MB'),
'2007': HmdaDataFile('hmda_2007_ma_all-records_codes.zip', '507509', '17.13 MB'),
'2017': HmdaDataFile('hmda_2017_ma_all-records_codes.zip', '282546', '6.92 MB'),
'2015': HmdaDataFile('hmda_2015_ma_all-records_codes.zip', '299991', '11.87 MB'),
'2014': HmdaDataFile('hmda_2014_ma_all-records_codes.zip', '246533', '9.05 MB'),
'2008': HmdaDataFile('hmda_2008_ma_all-records_codes.zip', '337077', '11.88 MB'),
'2009': HmdaDataFile('hmda_2009_ma_all-records_codes.zip', '493549', '16.23 MB'),
'2011': HmdaDataFile('hmda_2011_ma_all-records_codes.zip', '400586', '14.19 MB'),
'2010': HmdaDataFile('hmda_2010_ma_all-records_codes.zip', '458768', '16.27 MB'),
'2013': HmdaDataFile('hmda_2013_ma_all-records_codes.zip', '405166', '14.9 MB'),
'2012': HmdaDataFile('hmda_2012_ma_all-records_codes.zip', '516205', '18.65 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ma_originated-records_codes.zip', '201756', '7.12 MB'),
'2007': HmdaDataFile('hmda_2007_ma_originated-records_codes.zip', '214170', '7.34 MB'),
'2017': HmdaDataFile('hmda_2017_ma_originated-records_codes.zip', '162474', '3.73 MB'),
'2015': HmdaDataFile('hmda_2015_ma_originated-records_codes.zip', '173355', '6.7 MB'),
'2014': HmdaDataFile('hmda_2014_ma_originated-records_codes.zip', '137873', '4.9 MB'),
'2008': HmdaDataFile('hmda_2008_ma_originated-records_codes.zip', '159312', '5.51 MB'),
'2009': HmdaDataFile('hmda_2009_ma_originated-records_codes.zip', '255679', '8.23 MB'),
'2011': HmdaDataFile('hmda_2011_ma_originated-records_codes.zip', '205164', '6.77 MB'),
'2010': HmdaDataFile('hmda_2010_ma_originated-records_codes.zip', '239023', '7.91 MB'),
'2013': HmdaDataFile('hmda_2013_ma_originated-records_codes.zip', '224809', '7.79 MB'),
'2012': HmdaDataFile('hmda_2012_ma_originated-records_codes.zip', '288584', '9.58 MB')
}
}
},
'oh': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '232207', '11.88 MB'),
'2007': HmdaDataFile('hmda_2007_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '224598', '11.06 MB'),
'2017': HmdaDataFile('hmda_2017_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '208362', '6.69 MB'),
'2015': HmdaDataFile('hmda_2015_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '204738', '11.72 MB'),
'2014': HmdaDataFile('hmda_2014_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '174084', '8.93 MB'),
'2008': HmdaDataFile('hmda_2008_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '186092', '9.03 MB'),
'2009': HmdaDataFile('hmda_2009_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '282249', '12.44 MB'),
'2011': HmdaDataFile('hmda_2011_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '219789', '10.95 MB'),
'2010': HmdaDataFile('hmda_2010_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '258240', '12.72 MB'),
'2013': HmdaDataFile('hmda_2013_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '267654', '13.76 MB'),
'2012': HmdaDataFile('hmda_2012_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '302970', '15.14 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_oh_all-records_labels.zip', '493271', '27.33 MB'),
'2007': HmdaDataFile('hmda_2007_oh_all-records_labels.zip', '774401', '39.17 MB'),
'2017': HmdaDataFile('hmda_2017_oh_all-records_labels.zip', '448269', '16.35 MB'),
'2015': HmdaDataFile('hmda_2015_oh_all-records_labels.zip', '439676', '27.18 MB'),
'2014': HmdaDataFile('hmda_2014_oh_all-records_labels.zip', '394459', '21.93 MB'),
'2008': HmdaDataFile('hmda_2008_oh_all-records_labels.zip', '533639', '27.2 MB'),
'2009': HmdaDataFile('hmda_2009_oh_all-records_labels.zip', '624555', '29.69 MB'),
'2011': HmdaDataFile('hmda_2011_oh_all-records_labels.zip', '489066', '26.92 MB'),
'2010': HmdaDataFile('hmda_2010_oh_all-records_labels.zip', '555119', '30.3 MB'),
'2013': HmdaDataFile('hmda_2013_oh_all-records_labels.zip', '578940', '32.56 MB'),
'2012': HmdaDataFile('hmda_2012_oh_all-records_labels.zip', '618867', '34.01 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_oh_originated-records_labels.zip', '262449', '13.71 MB'),
'2007': HmdaDataFile('hmda_2007_oh_originated-records_labels.zip', '313279', '15.77 MB'),
'2017': HmdaDataFile('hmda_2017_oh_originated-records_labels.zip', '241167', '7.96 MB'),
'2015': HmdaDataFile('hmda_2015_oh_originated-records_labels.zip', '235058', '13.65 MB'),
'2014': HmdaDataFile('hmda_2014_oh_originated-records_labels.zip', '203927', '10.71 MB'),
'2008': HmdaDataFile('hmda_2008_oh_originated-records_labels.zip', '231697', '11.5 MB'),
'2009': HmdaDataFile('hmda_2009_oh_originated-records_labels.zip', '309496', '13.92 MB'),
'2011': HmdaDataFile('hmda_2011_oh_originated-records_labels.zip', '245688', '12.6 MB'),
'2010': HmdaDataFile('hmda_2010_oh_originated-records_labels.zip', '283698', '14.38 MB'),
'2013': HmdaDataFile('hmda_2013_oh_originated-records_labels.zip', '306698', '15.95 MB'),
'2012': HmdaDataFile('hmda_2012_oh_originated-records_labels.zip', '336141', '16.99 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '232207', '8.28 MB'),
'2007': HmdaDataFile('hmda_2007_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '224598', '7.85 MB'),
'2017': HmdaDataFile('hmda_2017_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '208362', '4.84 MB'),
'2015': HmdaDataFile('hmda_2015_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '204738', '8.2 MB'),
'2014': HmdaDataFile('hmda_2014_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '174084', '6.26 MB'),
'2008': HmdaDataFile('hmda_2008_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '186092', '6.43 MB'),
'2009': HmdaDataFile('hmda_2009_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '282249', '9.02 MB'),
'2011': HmdaDataFile('hmda_2011_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '219789', '7.49 MB'),
'2010': HmdaDataFile('hmda_2010_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '258240', '8.73 MB'),
'2013': HmdaDataFile('hmda_2013_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '267654', '9.51 MB'),
'2012': HmdaDataFile('hmda_2012_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '302970', '10.48 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_oh_all-records_codes.zip', '493271', '18.52 MB'),
'2007': HmdaDataFile('hmda_2007_oh_all-records_codes.zip', '774401', '27.06 MB'),
'2017': HmdaDataFile('hmda_2017_oh_all-records_codes.zip', '448269', '10.89 MB'),
'2015': HmdaDataFile('hmda_2015_oh_all-records_codes.zip', '439676', '18.32 MB'),
'2014': HmdaDataFile('hmda_2014_oh_all-records_codes.zip', '394459', '14.89 MB'),
'2008': HmdaDataFile('hmda_2008_oh_all-records_codes.zip', '533639', '18.82 MB'),
'2009': HmdaDataFile('hmda_2009_oh_all-records_codes.zip', '624555', '20.88 MB'),
'2011': HmdaDataFile('hmda_2011_oh_all-records_codes.zip', '489066', '17.97 MB'),
'2010': HmdaDataFile('hmda_2010_oh_all-records_codes.zip', '555119', '20.35 MB'),
'2013': HmdaDataFile('hmda_2013_oh_all-records_codes.zip', '578940', '22.01 MB'),
'2012': HmdaDataFile('hmda_2012_oh_all-records_codes.zip', '618867', '23.02 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_oh_originated-records_codes.zip', '262449', '9.52 MB'),
'2007': HmdaDataFile('hmda_2007_oh_originated-records_codes.zip', '313279', '11.21 MB'),
'2017': HmdaDataFile('hmda_2017_oh_originated-records_codes.zip', '241167', '5.68 MB'),
'2015': HmdaDataFile('hmda_2015_oh_originated-records_codes.zip', '235058', '9.5 MB'),
'2014': HmdaDataFile('hmda_2014_oh_originated-records_codes.zip', '203927', '7.49 MB'),
'2008': HmdaDataFile('hmda_2008_oh_originated-records_codes.zip', '231697', '8.17 MB'),
'2009': HmdaDataFile('hmda_2009_oh_originated-records_codes.zip', '309496', '10.06 MB'),
'2011': HmdaDataFile('hmda_2011_oh_originated-records_codes.zip', '245688', '8.63 MB'),
'2010': HmdaDataFile('hmda_2010_oh_originated-records_codes.zip', '283698', '9.88 MB'),
'2013': HmdaDataFile('hmda_2013_oh_originated-records_codes.zip', '306698', '10.96 MB'),
'2012': HmdaDataFile('hmda_2012_oh_originated-records_codes.zip', '336141', '11.68 MB')
}
}
},
'ut': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '106374', '4.72 MB'),
'2007': HmdaDataFile('hmda_2007_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '94558', '4.04 MB'),
'2017': HmdaDataFile('hmda_2017_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '87920', '2.62 MB'),
'2015': HmdaDataFile('hmda_2015_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '88686', '4.42 MB'),
'2014': HmdaDataFile('hmda_2014_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '62864', '2.8 MB'),
'2008': HmdaDataFile('hmda_2008_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '83133', '3.43 MB'),
'2009': HmdaDataFile('hmda_2009_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '117282', '4.64 MB'),
'2011': HmdaDataFile('hmda_2011_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '66630', '2.88 MB'),
'2010': HmdaDataFile('hmda_2010_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '90322', '3.86 MB'),
'2013': HmdaDataFile('hmda_2013_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '82759', '3.71 MB'),
'2012': HmdaDataFile('hmda_2012_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '108573', '4.87 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ut_all-records_labels.zip', '227871', '11.21 MB'),
'2007': HmdaDataFile('hmda_2007_ut_all-records_labels.zip', '319327', '14.16 MB'),
'2017': HmdaDataFile('hmda_2017_ut_all-records_labels.zip', '198425', '6.62 MB'),
'2015': HmdaDataFile('hmda_2015_ut_all-records_labels.zip', '192509', '10.37 MB'),
'2014': HmdaDataFile('hmda_2014_ut_all-records_labels.zip', '144848', '7.1 MB'),
'2008': HmdaDataFile('hmda_2008_ut_all-records_labels.zip', '226654', '9.81 MB'),
'2009': HmdaDataFile('hmda_2009_ut_all-records_labels.zip', '279791', '11.41 MB'),
'2011': HmdaDataFile('hmda_2011_ut_all-records_labels.zip', '166439', '7.98 MB'),
'2010': HmdaDataFile('hmda_2010_ut_all-records_labels.zip', '212181', '10.08 MB'),
'2013': HmdaDataFile('hmda_2013_ut_all-records_labels.zip', '192653', '9.53 MB'),
'2012': HmdaDataFile('hmda_2012_ut_all-records_labels.zip', '230544', '11.39 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ut_originated-records_labels.zip', '126325', '5.77 MB'),
'2007': HmdaDataFile('hmda_2007_ut_originated-records_labels.zip', '136628', '5.96 MB'),
'2017': HmdaDataFile('hmda_2017_ut_originated-records_labels.zip', '108018', '3.29 MB'),
'2015': HmdaDataFile('hmda_2015_ut_originated-records_labels.zip', '105929', '5.36 MB'),
'2014': HmdaDataFile('hmda_2014_ut_originated-records_labels.zip', '76563', '3.49 MB'),
'2008': HmdaDataFile('hmda_2008_ut_originated-records_labels.zip', '98555', '4.16 MB'),
'2009': HmdaDataFile('hmda_2009_ut_originated-records_labels.zip', '127680', '5.13 MB'),
'2011': HmdaDataFile('hmda_2011_ut_originated-records_labels.zip', '77763', '3.46 MB'),
'2010': HmdaDataFile('hmda_2010_ut_originated-records_labels.zip', '101108', '4.4 MB'),
'2013': HmdaDataFile('hmda_2013_ut_originated-records_labels.zip', '99424', '4.53 MB'),
'2012': HmdaDataFile('hmda_2012_ut_originated-records_labels.zip', '125331', '5.73 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '106374', '3.15 MB'),
'2007': HmdaDataFile('hmda_2007_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '94558', '2.78 MB'),
'2017': HmdaDataFile('hmda_2017_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '87920', '1.85 MB'),
'2015': HmdaDataFile('hmda_2015_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '88686', '3.08 MB'),
'2014': HmdaDataFile('hmda_2014_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '62864', '1.88 MB'),
'2008': HmdaDataFile('hmda_2008_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '83133', '2.4 MB'),
'2009': HmdaDataFile('hmda_2009_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '117282', '3.31 MB'),
'2011': HmdaDataFile('hmda_2011_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '66630', '1.92 MB'),
'2010': HmdaDataFile('hmda_2010_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '90322', '2.56 MB'),
'2013': HmdaDataFile('hmda_2013_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '82759', '2.48 MB'),
'2012': HmdaDataFile('hmda_2012_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '108573', '3.28 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ut_all-records_codes.zip', '227871', '7.32 MB'),
'2007': HmdaDataFile('hmda_2007_ut_all-records_codes.zip', '319327', '9.65 MB'),
'2017': HmdaDataFile('hmda_2017_ut_all-records_codes.zip', '198425', '4.41 MB'),
'2015': HmdaDataFile('hmda_2015_ut_all-records_codes.zip', '192509', '6.96 MB'),
'2014': HmdaDataFile('hmda_2014_ut_all-records_codes.zip', '144848', '4.66 MB'),
'2008': HmdaDataFile('hmda_2008_ut_all-records_codes.zip', '226654', '6.76 MB'),
'2009': HmdaDataFile('hmda_2009_ut_all-records_codes.zip', '279791', '7.94 MB'),
'2011': HmdaDataFile('hmda_2011_ut_all-records_codes.zip', '166439', '5.23 MB'),
'2010': HmdaDataFile('hmda_2010_ut_all-records_codes.zip', '212181', '6.6 MB'),
'2013': HmdaDataFile('hmda_2013_ut_all-records_codes.zip', '192653', '6.29 MB'),
'2012': HmdaDataFile('hmda_2012_ut_all-records_codes.zip', '230544', '7.57 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ut_originated-records_codes.zip', '126325', '3.86 MB'),
'2007': HmdaDataFile('hmda_2007_ut_originated-records_codes.zip', '136628', '4.14 MB'),
'2017': HmdaDataFile('hmda_2017_ut_originated-records_codes.zip', '108018', '2.31 MB'),
'2015': HmdaDataFile('hmda_2015_ut_originated-records_codes.zip', '105929', '3.72 MB'),
'2014': HmdaDataFile('hmda_2014_ut_originated-records_codes.zip', '76563', '2.34 MB'),
'2008': HmdaDataFile('hmda_2008_ut_originated-records_codes.zip', '98555', '2.91 MB'),
'2009': HmdaDataFile('hmda_2009_ut_originated-records_codes.zip', '127680', '3.64 MB'),
'2011': HmdaDataFile('hmda_2011_ut_originated-records_codes.zip', '77763', '2.3 MB'),
'2010': HmdaDataFile('hmda_2010_ut_originated-records_codes.zip', '101108', '2.91 MB'),
'2013': HmdaDataFile('hmda_2013_ut_originated-records_codes.zip', '99424', '3.03 MB'),
'2012': HmdaDataFile('hmda_2012_ut_originated-records_codes.zip', '125331', '3.84 MB')
}
}
},
'mo': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '139247', '6.75 MB'),
'2007': HmdaDataFile('hmda_2007_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '154268', '7.4 MB'),
'2017': HmdaDataFile('hmda_2017_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '118901', '3.83 MB'),
'2015': HmdaDataFile('hmda_2015_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '121654', '6.63 MB'),
'2014': HmdaDataFile('hmda_2014_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '95718', '4.8 MB'),
'2008': HmdaDataFile('hmda_2008_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '132777', '6.15 MB'),
'2009': HmdaDataFile('hmda_2009_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '186579', '8.07 MB'),
'2011': HmdaDataFile('hmda_2011_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '130134', '6.22 MB'),
'2010': HmdaDataFile('hmda_2010_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '158585', '7.57 MB'),
'2013': HmdaDataFile('hmda_2013_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '146638', '7.2 MB'),
'2012': HmdaDataFile('hmda_2012_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '175614', '8.39 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_mo_all-records_labels.zip', '312237', '16.09 MB'),
'2007': HmdaDataFile('hmda_2007_mo_all-records_labels.zip', '531617', '25.33 MB'),
'2017': HmdaDataFile('hmda_2017_mo_all-records_labels.zip', '277843', '9.61 MB'),
'2015': HmdaDataFile('hmda_2015_mo_all-records_labels.zip', '276661', '16.28 MB'),
'2014': HmdaDataFile('hmda_2014_mo_all-records_labels.zip', '232023', '12.42 MB'),
'2008': HmdaDataFile('hmda_2008_mo_all-records_labels.zip', '379587', '17.83 MB'),
'2009': HmdaDataFile('hmda_2009_mo_all-records_labels.zip', '447918', '19.75 MB'),
'2011': HmdaDataFile('hmda_2011_mo_all-records_labels.zip', '309645', '15.9 MB'),
'2010': HmdaDataFile('hmda_2010_mo_all-records_labels.zip', '360738', '18.59 MB'),
'2013': HmdaDataFile('hmda_2013_mo_all-records_labels.zip', '347186', '18.19 MB'),
'2012': HmdaDataFile('hmda_2012_mo_all-records_labels.zip', '384551', '19.69 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_mo_originated-records_labels.zip', '165943', '8.21 MB'),
'2007': HmdaDataFile('hmda_2007_mo_originated-records_labels.zip', '218490', '10.7 MB'),
'2017': HmdaDataFile('hmda_2017_mo_originated-records_labels.zip', '145419', '4.81 MB'),
'2015': HmdaDataFile('hmda_2015_mo_originated-records_labels.zip', '147519', '8.15 MB'),
'2014': HmdaDataFile('hmda_2014_mo_originated-records_labels.zip', '120463', '6.22 MB'),
'2008': HmdaDataFile('hmda_2008_mo_originated-records_labels.zip', '169405', '8.03 MB'),
'2009': HmdaDataFile('hmda_2009_mo_originated-records_labels.zip', '211924', '9.41 MB'),
'2011': HmdaDataFile('hmda_2011_mo_originated-records_labels.zip', '154134', '7.55 MB'),
'2010': HmdaDataFile('hmda_2010_mo_originated-records_labels.zip', '182546', '8.84 MB'),
'2013': HmdaDataFile('hmda_2013_mo_originated-records_labels.zip', '178486', '8.86 MB'),
'2012': HmdaDataFile('hmda_2012_mo_originated-records_labels.zip', '204935', '9.93 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '139247', '4.66 MB'),
'2007': HmdaDataFile('hmda_2007_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '154268', '5.21 MB'),
'2017': HmdaDataFile('hmda_2017_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '118901', '2.73 MB'),
'2015': HmdaDataFile('hmda_2015_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '121654', '4.64 MB'),
'2014': HmdaDataFile('hmda_2014_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '95718', '3.31 MB'),
'2008': HmdaDataFile('hmda_2008_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '132777', '4.34 MB'),
'2009': HmdaDataFile('hmda_2009_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '186579', '5.81 MB'),
'2011': HmdaDataFile('hmda_2011_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '130134', '4.23 MB'),
'2010': HmdaDataFile('hmda_2010_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '158585', '5.17 MB'),
'2013': HmdaDataFile('hmda_2013_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '146638', '4.98 MB'),
'2012': HmdaDataFile('hmda_2012_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '175614', '5.75 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_mo_all-records_codes.zip', '312237', '10.71 MB'),
'2007': HmdaDataFile('hmda_2007_mo_all-records_codes.zip', '531617', '17.33 MB'),
'2017': HmdaDataFile('hmda_2017_mo_all-records_codes.zip', '277843', '6.33 MB'),
'2015': HmdaDataFile('hmda_2015_mo_all-records_codes.zip', '276661', '10.94 MB'),
'2014': HmdaDataFile('hmda_2014_mo_all-records_codes.zip', '232023', '8.21 MB'),
'2008': HmdaDataFile('hmda_2008_mo_all-records_codes.zip', '379587', '12.23 MB'),
'2009': HmdaDataFile('hmda_2009_mo_all-records_codes.zip', '447918', '13.74 MB'),
'2011': HmdaDataFile('hmda_2011_mo_all-records_codes.zip', '309645', '10.46 MB'),
'2010': HmdaDataFile('hmda_2010_mo_all-records_codes.zip', '360738', '12.3 MB'),
'2013': HmdaDataFile('hmda_2013_mo_all-records_codes.zip', '347186', '12.14 MB'),
'2012': HmdaDataFile('hmda_2012_mo_all-records_codes.zip', '384551', '13.07 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_mo_originated-records_codes.zip', '165943', '5.62 MB'),
'2007': HmdaDataFile('hmda_2007_mo_originated-records_codes.zip', '218490', '7.52 MB'),
'2017': HmdaDataFile('hmda_2017_mo_originated-records_codes.zip', '145419', '3.37 MB'),
'2015': HmdaDataFile('hmda_2015_mo_originated-records_codes.zip', '147519', '5.65 MB'),
'2014': HmdaDataFile('hmda_2014_mo_originated-records_codes.zip', '120463', '4.26 MB'),
'2008': HmdaDataFile('hmda_2008_mo_originated-records_codes.zip', '169405', '5.64 MB'),
'2009': HmdaDataFile('hmda_2009_mo_originated-records_codes.zip', '211924', '6.73 MB'),
'2011': HmdaDataFile('hmda_2011_mo_originated-records_codes.zip', '154134', '5.1 MB'),
'2010': HmdaDataFile('hmda_2010_mo_originated-records_codes.zip', '182546', '5.99 MB'),
'2013': HmdaDataFile('hmda_2013_mo_originated-records_codes.zip', '178486', '6.06 MB'),
'2012': HmdaDataFile('hmda_2012_mo_originated-records_codes.zip', '204935', '6.74 MB')
}
}
},
'mn': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '150929', '7.76 MB'),
'2007': HmdaDataFile('hmda_2007_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '114958', '5.46 MB'),
'2017': HmdaDataFile('hmda_2017_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '127113', '3.8 MB'),
'2015': HmdaDataFile('hmda_2015_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '135112', '7.39 MB'),
'2014': HmdaDataFile('hmda_2014_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '100659', '5.19 MB'),
'2008': HmdaDataFile('hmda_2008_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '101137', '4.61 MB'),
'2009': HmdaDataFile('hmda_2009_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '172464', '7.33 MB'),
'2011': HmdaDataFile('hmda_2011_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '130723', '6.42 MB'),
'2010': HmdaDataFile('hmda_2010_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '155626', '7.58 MB'),
'2013': HmdaDataFile('hmda_2013_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '158942', '7.98 MB'),
'2012': HmdaDataFile('hmda_2012_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '195258', '9.69 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_mn_all-records_labels.zip', '308571', '16.84 MB'),
'2007': HmdaDataFile('hmda_2007_mn_all-records_labels.zip', '396721', '18.97 MB'),
'2017': HmdaDataFile('hmda_2017_mn_all-records_labels.zip', '269551', '8.68 MB'),
'2015': HmdaDataFile('hmda_2015_mn_all-records_labels.zip', '280012', '16.44 MB'),
'2014': HmdaDataFile('hmda_2014_mn_all-records_labels.zip', '220146', '12.02 MB'),
'2008': HmdaDataFile('hmda_2008_mn_all-records_labels.zip', '272913', '12.87 MB'),
'2009': HmdaDataFile('hmda_2009_mn_all-records_labels.zip', '379860', '16.84 MB'),
'2011': HmdaDataFile('hmda_2011_mn_all-records_labels.zip', '282982', '14.91 MB'),
'2010': HmdaDataFile('hmda_2010_mn_all-records_labels.zip', '332542', '17.42 MB'),
'2013': HmdaDataFile('hmda_2013_mn_all-records_labels.zip', '332594', '17.76 MB'),
'2012': HmdaDataFile('hmda_2012_mn_all-records_labels.zip', '382934', '20.31 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_mn_originated-records_labels.zip', '176922', '9.23 MB'),
'2007': HmdaDataFile('hmda_2007_mn_originated-records_labels.zip', '171217', '8.27 MB'),
'2017': HmdaDataFile('hmda_2017_mn_originated-records_labels.zip', '154164', '4.74 MB'),
'2015': HmdaDataFile('hmda_2015_mn_originated-records_labels.zip', '160605', '8.94 MB'),
'2014': HmdaDataFile('hmda_2014_mn_originated-records_labels.zip', '123374', '6.47 MB'),
'2008': HmdaDataFile('hmda_2008_mn_originated-records_labels.zip', '130815', '6.14 MB'),
'2009': HmdaDataFile('hmda_2009_mn_originated-records_labels.zip', '195958', '8.53 MB'),
'2011': HmdaDataFile('hmda_2011_mn_originated-records_labels.zip', '151782', '7.55 MB'),
'2010': HmdaDataFile('hmda_2010_mn_originated-records_labels.zip', '177556', '8.84 MB'),
'2013': HmdaDataFile('hmda_2013_mn_originated-records_labels.zip', '187475', '9.52 MB'),
'2012': HmdaDataFile('hmda_2012_mn_originated-records_labels.zip', '222116', '11.23 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '150929', '5.39 MB'),
'2007': HmdaDataFile('hmda_2007_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '114958', '3.87 MB'),
'2017': HmdaDataFile('hmda_2017_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '127113', '2.69 MB'),
'2015': HmdaDataFile('hmda_2015_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '135112', '5.14 MB'),
'2014': HmdaDataFile('hmda_2014_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '100659', '3.62 MB'),
'2008': HmdaDataFile('hmda_2008_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '101137', '3.28 MB'),
'2009': HmdaDataFile('hmda_2009_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '172464', '5.3 MB'),
'2011': HmdaDataFile('hmda_2011_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '130723', '4.45 MB'),
'2010': HmdaDataFile('hmda_2010_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '155626', '5.29 MB'),
'2013': HmdaDataFile('hmda_2013_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '158942', '5.58 MB'),
'2012': HmdaDataFile('hmda_2012_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '195258', '6.78 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_mn_all-records_codes.zip', '308571', '11.31 MB'),
'2007': HmdaDataFile('hmda_2007_mn_all-records_codes.zip', '396721', '13.04 MB'),
'2017': HmdaDataFile('hmda_2017_mn_all-records_codes.zip', '269551', '5.62 MB'),
'2015': HmdaDataFile('hmda_2015_mn_all-records_codes.zip', '280012', '10.99 MB'),
'2014': HmdaDataFile('hmda_2014_mn_all-records_codes.zip', '220146', '8.1 MB'),
'2008': HmdaDataFile('hmda_2008_mn_all-records_codes.zip', '272913', '8.89 MB'),
'2009': HmdaDataFile('hmda_2009_mn_all-records_codes.zip', '379860', '11.74 MB'),
'2011': HmdaDataFile('hmda_2011_mn_all-records_codes.zip', '282982', '10.01 MB'),
'2010': HmdaDataFile('hmda_2010_mn_all-records_codes.zip', '332542', '11.73 MB'),
'2013': HmdaDataFile('hmda_2013_mn_all-records_codes.zip', '332594', '12.03 MB'),
'2012': HmdaDataFile('hmda_2012_mn_all-records_codes.zip', '382934', '13.81 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_mn_originated-records_codes.zip', '176922', '6.37 MB'),
'2007': HmdaDataFile('hmda_2007_mn_originated-records_codes.zip', '171217', '5.84 MB'),
'2017': HmdaDataFile('hmda_2017_mn_originated-records_codes.zip', '154164', '3.32 MB'),
'2015': HmdaDataFile('hmda_2015_mn_originated-records_codes.zip', '160605', '6.15 MB'),
'2014': HmdaDataFile('hmda_2014_mn_originated-records_codes.zip', '123374', '4.48 MB'),
'2008': HmdaDataFile('hmda_2008_mn_originated-records_codes.zip', '130815', '4.36 MB'),
'2009': HmdaDataFile('hmda_2009_mn_originated-records_codes.zip', '195958', '6.13 MB'),
'2011': HmdaDataFile('hmda_2011_mn_originated-records_codes.zip', '151782', '5.2 MB'),
'2010': HmdaDataFile('hmda_2010_mn_originated-records_codes.zip', '177556', '6.12 MB'),
'2013': HmdaDataFile('hmda_2013_mn_originated-records_codes.zip', '187475', '6.6 MB'),
'2012': HmdaDataFile('hmda_2012_mn_originated-records_codes.zip', '222116', '7.82 MB')
}
}
},
'mi': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '225509', '11.38 MB'),
'2007': HmdaDataFile('hmda_2007_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '202641', '10.01 MB'),
'2017': HmdaDataFile('hmda_2017_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '200696', '6.21 MB'),
'2015': HmdaDataFile('hmda_2015_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '198320', '11.13 MB'),
'2014': HmdaDataFile('hmda_2014_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '162424', '8.1 MB'),
'2008': HmdaDataFile('hmda_2008_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '149102', '7.25 MB'),
'2009': HmdaDataFile('hmda_2009_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '194235', '8.77 MB'),
'2011': HmdaDataFile('hmda_2011_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '168948', '8.4 MB'),
'2010': HmdaDataFile('hmda_2010_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '175445', '8.52 MB'),
'2013': HmdaDataFile('hmda_2013_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '239554', '11.94 MB'),
'2012': HmdaDataFile('hmda_2012_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '274203', '13.44 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_mi_all-records_labels.zip', '470652', '25.31 MB'),
'2007': HmdaDataFile('hmda_2007_mi_all-records_labels.zip', '780713', '38.5 MB'),
'2017': HmdaDataFile('hmda_2017_mi_all-records_labels.zip', '437181', '14.82 MB'),
'2015': HmdaDataFile('hmda_2015_mi_all-records_labels.zip', '424672', '25.71 MB'),
'2014': HmdaDataFile('hmda_2014_mi_all-records_labels.zip', '361546', '19.18 MB'),
'2008': HmdaDataFile('hmda_2008_mi_all-records_labels.zip', '472702', '23.46 MB'),
'2009': HmdaDataFile('hmda_2009_mi_all-records_labels.zip', '504304', '23.62 MB'),
'2011': HmdaDataFile('hmda_2011_mi_all-records_labels.zip', '396764', '21.31 MB'),
'2010': HmdaDataFile('hmda_2010_mi_all-records_labels.zip', '419300', '22.02 MB'),
'2013': HmdaDataFile('hmda_2013_mi_all-records_labels.zip', '521030', '27.85 MB'),
'2012': HmdaDataFile('hmda_2012_mi_all-records_labels.zip', '573645', '30.22 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_mi_originated-records_labels.zip', '262757', '13.58 MB'),
'2007': HmdaDataFile('hmda_2007_mi_originated-records_labels.zip', '294627', '14.81 MB'),
'2017': HmdaDataFile('hmda_2017_mi_originated-records_labels.zip', '241945', '7.74 MB'),
'2015': HmdaDataFile('hmda_2015_mi_originated-records_labels.zip', '233520', '13.34 MB'),
'2014': HmdaDataFile('hmda_2014_mi_originated-records_labels.zip', '194126', '9.91 MB'),
'2008': HmdaDataFile('hmda_2008_mi_originated-records_labels.zip', '191860', '9.52 MB'),
'2009': HmdaDataFile('hmda_2009_mi_originated-records_labels.zip', '224166', '10.41 MB'),
'2011': HmdaDataFile('hmda_2011_mi_originated-records_labels.zip', '196360', '10.06 MB'),
'2010': HmdaDataFile('hmda_2010_mi_originated-records_labels.zip', '202252', '10.19 MB'),
'2013': HmdaDataFile('hmda_2013_mi_originated-records_labels.zip', '280253', '14.18 MB'),
'2012': HmdaDataFile('hmda_2012_mi_originated-records_labels.zip', '312194', '15.55 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '225509', '7.89 MB'),
'2007': HmdaDataFile('hmda_2007_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '202641', '7.07 MB'),
'2017': HmdaDataFile('hmda_2017_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '200696', '4.39 MB'),
'2015': HmdaDataFile('hmda_2015_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '198320', '7.82 MB'),
'2014': HmdaDataFile('hmda_2014_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '162424', '5.66 MB'),
'2008': HmdaDataFile('hmda_2008_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '149102', '5.08 MB'),
'2009': HmdaDataFile('hmda_2009_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '194235', '6.2 MB'),
'2011': HmdaDataFile('hmda_2011_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '168948', '5.72 MB'),
'2010': HmdaDataFile('hmda_2010_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '175445', '5.8 MB'),
'2013': HmdaDataFile('hmda_2013_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '239554', '8.26 MB'),
'2012': HmdaDataFile('hmda_2012_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '274203', '9.28 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_mi_all-records_codes.zip', '470652', '16.94 MB'),
'2007': HmdaDataFile('hmda_2007_mi_all-records_codes.zip', '780713', '26.45 MB'),
'2017': HmdaDataFile('hmda_2017_mi_all-records_codes.zip', '437181', '9.61 MB'),
'2015': HmdaDataFile('hmda_2015_mi_all-records_codes.zip', '424672', '17.31 MB'),
'2014': HmdaDataFile('hmda_2014_mi_all-records_codes.zip', '361546', '12.87 MB'),
'2008': HmdaDataFile('hmda_2008_mi_all-records_codes.zip', '472702', '16.04 MB'),
'2009': HmdaDataFile('hmda_2009_mi_all-records_codes.zip', '504304', '16.25 MB'),
'2011': HmdaDataFile('hmda_2011_mi_all-records_codes.zip', '396764', '14.07 MB'),
'2010': HmdaDataFile('hmda_2010_mi_all-records_codes.zip', '419300', '14.52 MB'),
'2013': HmdaDataFile('hmda_2013_mi_all-records_codes.zip', '521030', '18.59 MB'),
'2012': HmdaDataFile('hmda_2012_mi_all-records_codes.zip', '573645', '20.21 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_mi_originated-records_codes.zip', '262757', '9.39 MB'),
'2007': HmdaDataFile('hmda_2007_mi_originated-records_codes.zip', '294627', '10.53 MB'),
'2017': HmdaDataFile('hmda_2017_mi_originated-records_codes.zip', '241945', '5.41 MB'),
'2015': HmdaDataFile('hmda_2015_mi_originated-records_codes.zip', '233520', '9.29 MB'),
'2014': HmdaDataFile('hmda_2014_mi_originated-records_codes.zip', '194126', '6.88 MB'),
'2008': HmdaDataFile('hmda_2008_mi_originated-records_codes.zip', '191860', '6.67 MB'),
'2009': HmdaDataFile('hmda_2009_mi_originated-records_codes.zip', '224166', '7.37 MB'),
'2011': HmdaDataFile('hmda_2011_mi_originated-records_codes.zip', '196360', '6.83 MB'),
'2010': HmdaDataFile('hmda_2010_mi_originated-records_codes.zip', '202252', '6.96 MB'),
'2013': HmdaDataFile('hmda_2013_mi_originated-records_codes.zip', '280253', '9.73 MB'),
'2012': HmdaDataFile('hmda_2012_mi_originated-records_codes.zip', '312194', '10.66 MB')
}
}
},
'ri': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '22014', '1 MB'),
'2007': HmdaDataFile('hmda_2007_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '24278', '1.09 MB'),
'2017': HmdaDataFile('hmda_2017_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '19545', '600.94 KB'),
'2015': HmdaDataFile('hmda_2015_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '19238', '896.87 KB'),
'2014': HmdaDataFile('hmda_2014_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '14179', '687.6 KB'),
'2008': HmdaDataFile('hmda_2008_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '18140', '802.68 KB'),
'2009': HmdaDataFile('hmda_2009_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '29449', '1.2 MB'),
'2011': HmdaDataFile('hmda_2011_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '20369', '903.7 KB'),
'2010': HmdaDataFile('hmda_2010_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '24263', '1.09 MB'),
'2013': HmdaDataFile('hmda_2013_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '24398', '1.11 MB'),
'2012': HmdaDataFile('hmda_2012_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '28863', '1.3 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ri_all-records_labels.zip', '49775', '2.45 MB'),
'2007': HmdaDataFile('hmda_2007_ri_all-records_labels.zip', '88662', '4.02 MB'),
'2017': HmdaDataFile('hmda_2017_ri_all-records_labels.zip', '44038', '1.51 MB'),
'2015': HmdaDataFile('hmda_2015_ri_all-records_labels.zip', '43611', '2.22 MB'),
'2014': HmdaDataFile('hmda_2014_ri_all-records_labels.zip', '33941', '1.8 MB'),
'2008': HmdaDataFile('hmda_2008_ri_all-records_labels.zip', '51710', '2.4 MB'),
'2009': HmdaDataFile('hmda_2009_ri_all-records_labels.zip', '64057', '2.81 MB'),
'2011': HmdaDataFile('hmda_2011_ri_all-records_labels.zip', '48785', '2.39 MB'),
'2010': HmdaDataFile('hmda_2010_ri_all-records_labels.zip', '55842', '2.75 MB'),
'2013': HmdaDataFile('hmda_2013_ri_all-records_labels.zip', '52960', '2.62 MB'),
'2012': HmdaDataFile('hmda_2012_ri_all-records_labels.zip', '61996', '3.04 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ri_originated-records_labels.zip', '27005', '1.25 MB'),
'2007': HmdaDataFile('hmda_2007_ri_originated-records_labels.zip', '36923', '1.65 MB'),
'2017': HmdaDataFile('hmda_2017_ri_originated-records_labels.zip', '24598', '766.74 KB'),
'2015': HmdaDataFile('hmda_2015_ri_originated-records_labels.zip', '23923', '1.13 MB'),
'2014': HmdaDataFile('hmda_2014_ri_originated-records_labels.zip', '18005', '891.11 KB'),
'2008': HmdaDataFile('hmda_2008_ri_originated-records_labels.zip', '23931', '1.07 MB'),
'2009': HmdaDataFile('hmda_2009_ri_originated-records_labels.zip', '33549', '1.38 MB'),
'2011': HmdaDataFile('hmda_2011_ri_originated-records_labels.zip', '24337', '1.11 MB'),
'2010': HmdaDataFile('hmda_2010_ri_originated-records_labels.zip', '28439', '1.3 MB'),
'2013': HmdaDataFile('hmda_2013_ri_originated-records_labels.zip', '29485', '1.37 MB'),
'2012': HmdaDataFile('hmda_2012_ri_originated-records_labels.zip', '33911', '1.55 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '22014', '662.47 KB'),
'2007': HmdaDataFile('hmda_2007_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '24278', '740.23 KB'),
'2017': HmdaDataFile('hmda_2017_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '19545', '422.08 KB'),
'2015': HmdaDataFile('hmda_2015_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '19238', '594.54 KB'),
'2014': HmdaDataFile('hmda_2014_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '14179', '455.38 KB'),
'2008': HmdaDataFile('hmda_2008_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '18140', '535.59 KB'),
'2009': HmdaDataFile('hmda_2009_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '29449', '820.78 KB'),
'2011': HmdaDataFile('hmda_2011_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '20369', '581.77 KB'),
'2010': HmdaDataFile('hmda_2010_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '24263', '698.2 KB'),
'2013': HmdaDataFile('hmda_2013_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '24398', '725.42 KB'),
'2012': HmdaDataFile('hmda_2012_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '28863', '844.23 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ri_all-records_codes.zip', '49775', '1.57 MB'),
'2007': HmdaDataFile('hmda_2007_ri_all-records_codes.zip', '88662', '2.64 MB'),
'2017': HmdaDataFile('hmda_2017_ri_all-records_codes.zip', '44038', '997.28 KB'),
'2015': HmdaDataFile('hmda_2015_ri_all-records_codes.zip', '43611', '1.42 MB'),
'2014': HmdaDataFile('hmda_2014_ri_all-records_codes.zip', '33941', '1.14 MB'),
'2008': HmdaDataFile('hmda_2008_ri_all-records_codes.zip', '51710', '1.58 MB'),
'2009': HmdaDataFile('hmda_2009_ri_all-records_codes.zip', '64057', '1.89 MB'),
'2011': HmdaDataFile('hmda_2011_ri_all-records_codes.zip', '48785', '1.49 MB'),
'2010': HmdaDataFile('hmda_2010_ri_all-records_codes.zip', '55842', '1.73 MB'),
'2013': HmdaDataFile('hmda_2013_ri_all-records_codes.zip', '52960', '1.66 MB'),
'2012': HmdaDataFile('hmda_2012_ri_all-records_codes.zip', '61996', '1.93 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ri_originated-records_codes.zip', '27005', '818.97 KB'),
'2007': HmdaDataFile('hmda_2007_ri_originated-records_codes.zip', '36923', '1.12 MB'),
'2017': HmdaDataFile('hmda_2017_ri_originated-records_codes.zip', '24598', '532.25 KB'),
'2015': HmdaDataFile('hmda_2015_ri_originated-records_codes.zip', '23923', '744.4 KB'),
'2014': HmdaDataFile('hmda_2014_ri_originated-records_codes.zip', '18005', '586.22 KB'),
'2008': HmdaDataFile('hmda_2008_ri_originated-records_codes.zip', '23931', '716.77 KB'),
'2009': HmdaDataFile('hmda_2009_ri_originated-records_codes.zip', '33549', '938.52 KB'),
'2011': HmdaDataFile('hmda_2011_ri_originated-records_codes.zip', '24337', '705.92 KB'),
'2010': HmdaDataFile('hmda_2010_ri_originated-records_codes.zip', '28439', '835.63 KB'),
'2013': HmdaDataFile('hmda_2013_ri_originated-records_codes.zip', '29485', '889.58 KB'),
'2012': HmdaDataFile('hmda_2012_ri_originated-records_codes.zip', '33911', '1.01 MB')
}
}
},
'ks': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '57549', '2.68 MB'),
'2007': HmdaDataFile('hmda_2007_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '62611', '2.93 MB'),
'2017': HmdaDataFile('hmda_2017_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '50202', '1.64 MB'),
'2015': HmdaDataFile('hmda_2015_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '52685', '2.73 MB'),
'2014': HmdaDataFile('hmda_2014_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '43216', '2.18 MB'),
'2008': HmdaDataFile('hmda_2008_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '56265', '2.59 MB'),
'2009': HmdaDataFile('hmda_2009_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '78703', '3.32 MB'),
'2011': HmdaDataFile('hmda_2011_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '56542', '2.6 MB'),
'2010': HmdaDataFile('hmda_2010_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '67768', '3.07 MB'),
'2013': HmdaDataFile('hmda_2013_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '62047', '2.98 MB'),
'2012': HmdaDataFile('hmda_2012_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '73455', '3.41 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ks_all-records_labels.zip', '124922', '6.39 MB'),
'2007': HmdaDataFile('hmda_2007_ks_all-records_labels.zip', '194856', '9.65 MB'),
'2017': HmdaDataFile('hmda_2017_ks_all-records_labels.zip', '107563', '3.99 MB'),
'2015': HmdaDataFile('hmda_2015_ks_all-records_labels.zip', '113367', '6.35 MB'),
'2014': HmdaDataFile('hmda_2014_ks_all-records_labels.zip', '96245', '5.33 MB'),
'2008': HmdaDataFile('hmda_2008_ks_all-records_labels.zip', '146968', '7.27 MB'),
'2009': HmdaDataFile('hmda_2009_ks_all-records_labels.zip', '175095', '8.11 MB'),
'2011': HmdaDataFile('hmda_2011_ks_all-records_labels.zip', '126214', '6.44 MB'),
'2010': HmdaDataFile('hmda_2010_ks_all-records_labels.zip', '149351', '7.61 MB'),
'2013': HmdaDataFile('hmda_2013_ks_all-records_labels.zip', '134547', '7.17 MB'),
'2012': HmdaDataFile('hmda_2012_ks_all-records_labels.zip', '149627', '7.77 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ks_originated-records_labels.zip', '69335', '3.31 MB'),
'2007': HmdaDataFile('hmda_2007_ks_originated-records_labels.zip', '86578', '4.16 MB'),
'2017': HmdaDataFile('hmda_2017_ks_originated-records_labels.zip', '61197', '2.05 MB'),
'2015': HmdaDataFile('hmda_2015_ks_originated-records_labels.zip', '63448', '3.34 MB'),
'2014': HmdaDataFile('hmda_2014_ks_originated-records_labels.zip', '53984', '2.79 MB'),
'2008': HmdaDataFile('hmda_2008_ks_originated-records_labels.zip', '72280', '3.42 MB'),
'2009': HmdaDataFile('hmda_2009_ks_originated-records_labels.zip', '90077', '3.93 MB'),
'2011': HmdaDataFile('hmda_2011_ks_originated-records_labels.zip', '66876', '3.14 MB'),
'2010': HmdaDataFile('hmda_2010_ks_originated-records_labels.zip', '78256', '3.64 MB'),
'2013': HmdaDataFile('hmda_2013_ks_originated-records_labels.zip', '74582', '3.64 MB'),
'2012': HmdaDataFile('hmda_2012_ks_originated-records_labels.zip', '84924', '4.01 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '57549', '1.78 MB'),
'2007': HmdaDataFile('hmda_2007_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '62611', '2.02 MB'),
'2017': HmdaDataFile('hmda_2017_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '50202', '1.17 MB'),
'2015': HmdaDataFile('hmda_2015_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '52685', '1.86 MB'),
'2014': HmdaDataFile('hmda_2014_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '43216', '1.46 MB'),
'2008': HmdaDataFile('hmda_2008_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '56265', '1.79 MB'),
'2009': HmdaDataFile('hmda_2009_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '78703', '2.28 MB'),
'2011': HmdaDataFile('hmda_2011_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '56542', '1.71 MB'),
'2010': HmdaDataFile('hmda_2010_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '67768', '2.02 MB'),
'2013': HmdaDataFile('hmda_2013_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '62047', '1.98 MB'),
'2012': HmdaDataFile('hmda_2012_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '73455', '2.25 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ks_all-records_codes.zip', '124922', '4.16 MB'),
'2007': HmdaDataFile('hmda_2007_ks_all-records_codes.zip', '194856', '6.5 MB'),
'2017': HmdaDataFile('hmda_2017_ks_all-records_codes.zip', '107563', '2.69 MB'),
'2015': HmdaDataFile('hmda_2015_ks_all-records_codes.zip', '113367', '4.18 MB'),
'2014': HmdaDataFile('hmda_2014_ks_all-records_codes.zip', '96245', '3.49 MB'),
'2008': HmdaDataFile('hmda_2008_ks_all-records_codes.zip', '146968', '4.88 MB'),
'2009': HmdaDataFile('hmda_2009_ks_all-records_codes.zip', '175095', '5.5 MB'),
'2011': HmdaDataFile('hmda_2011_ks_all-records_codes.zip', '126214', '4.14 MB'),
'2010': HmdaDataFile('hmda_2010_ks_all-records_codes.zip', '149351', '4.91 MB'),
'2013': HmdaDataFile('hmda_2013_ks_all-records_codes.zip', '134547', '4.67 MB'),
'2012': HmdaDataFile('hmda_2012_ks_all-records_codes.zip', '149627', '5.06 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ks_originated-records_codes.zip', '69335', '2.18 MB'),
'2007': HmdaDataFile('hmda_2007_ks_originated-records_codes.zip', '86578', '2.85 MB'),
'2017': HmdaDataFile('hmda_2017_ks_originated-records_codes.zip', '61197', '1.44 MB'),
'2015': HmdaDataFile('hmda_2015_ks_originated-records_codes.zip', '63448', '2.26 MB'),
'2014': HmdaDataFile('hmda_2014_ks_originated-records_codes.zip', '53984', '1.86 MB'),
'2008': HmdaDataFile('hmda_2008_ks_originated-records_codes.zip', '72280', '2.34 MB'),
'2009': HmdaDataFile('hmda_2009_ks_originated-records_codes.zip', '90077', '2.69 MB'),
'2011': HmdaDataFile('hmda_2011_ks_originated-records_codes.zip', '66876', '2.05 MB'),
'2010': HmdaDataFile('hmda_2010_ks_originated-records_codes.zip', '78256', '2.38 MB'),
'2013': HmdaDataFile('hmda_2013_ks_originated-records_codes.zip', '74582', '2.39 MB'),
'2012': HmdaDataFile('hmda_2012_ks_originated-records_codes.zip', '84924', '2.62 MB')
}
}
},
'mt': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '21366', '875.05 KB'),
'2007': HmdaDataFile('hmda_2007_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '20722', '828.42 KB'),
'2017': HmdaDataFile('hmda_2017_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '18983', '556.44 KB'),
'2015': HmdaDataFile('hmda_2015_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '19758', '888.25 KB'),
'2014': HmdaDataFile('hmda_2014_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '15841', '699.12 KB'),
'2008': HmdaDataFile('hmda_2008_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '19303', '733.23 KB'),
'2009': HmdaDataFile('hmda_2009_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '30035', '1.08 MB'),
'2011': HmdaDataFile('hmda_2011_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '18104', '695.47 KB'),
'2010': HmdaDataFile('hmda_2010_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '21854', '845.34 KB'),
'2013': HmdaDataFile('hmda_2013_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '23631', '959.77 KB'),
'2012': HmdaDataFile('hmda_2012_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '25225', '1 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_mt_all-records_labels.zip', '50537', '2.27 MB'),
'2007': HmdaDataFile('hmda_2007_mt_all-records_labels.zip', '72952', '3 MB'),
'2017': HmdaDataFile('hmda_2017_mt_all-records_labels.zip', '45597', '1.44 MB'),
'2015': HmdaDataFile('hmda_2015_mt_all-records_labels.zip', '46419', '2.32 MB'),
'2014': HmdaDataFile('hmda_2014_mt_all-records_labels.zip', '38476', '1.85 MB'),
'2008': HmdaDataFile('hmda_2008_mt_all-records_labels.zip', '55795', '2.26 MB'),
'2009': HmdaDataFile('hmda_2009_mt_all-records_labels.zip', '72632', '2.81 MB'),
'2011': HmdaDataFile('hmda_2011_mt_all-records_labels.zip', '45730', '1.97 MB'),
'2010': HmdaDataFile('hmda_2010_mt_all-records_labels.zip', '54464', '2.34 MB'),
'2013': HmdaDataFile('hmda_2013_mt_all-records_labels.zip', '56588', '2.52 MB'),
'2012': HmdaDataFile('hmda_2012_mt_all-records_labels.zip', '57320', '2.52 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_mt_originated-records_labels.zip', '26863', '1.12 MB'),
'2007': HmdaDataFile('hmda_2007_mt_originated-records_labels.zip', '31811', '1.27 MB'),
'2017': HmdaDataFile('hmda_2017_mt_originated-records_labels.zip', '24577', '727.45 KB'),
'2015': HmdaDataFile('hmda_2015_mt_originated-records_labels.zip', '25376', '1.17 MB'),
'2014': HmdaDataFile('hmda_2014_mt_originated-records_labels.zip', '21159', '958.54 KB'),
'2008': HmdaDataFile('hmda_2008_mt_originated-records_labels.zip', '26278', '1.02 MB'),
'2009': HmdaDataFile('hmda_2009_mt_originated-records_labels.zip', '36202', '1.33 MB'),
'2011': HmdaDataFile('hmda_2011_mt_originated-records_labels.zip', '23529', '929.24 KB'),
'2010': HmdaDataFile('hmda_2010_mt_originated-records_labels.zip', '27263', '1.08 MB'),
'2013': HmdaDataFile('hmda_2013_mt_originated-records_labels.zip', '31007', '1.28 MB'),
'2012': HmdaDataFile('hmda_2012_mt_originated-records_labels.zip', '31452', '1.28 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '21366', '564.56 KB'),
'2007': HmdaDataFile('hmda_2007_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '20722', '538.06 KB'),
'2017': HmdaDataFile('hmda_2017_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '18983', '389.92 KB'),
'2015': HmdaDataFile('hmda_2015_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '19758', '575.24 KB'),
'2014': HmdaDataFile('hmda_2014_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '15841', '453.36 KB'),
'2008': HmdaDataFile('hmda_2008_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '19303', '479.86 KB'),
'2009': HmdaDataFile('hmda_2009_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '30035', '726.19 KB'),
'2011': HmdaDataFile('hmda_2011_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '18104', '448.42 KB'),
'2010': HmdaDataFile('hmda_2010_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '21854', '543.67 KB'),
'2013': HmdaDataFile('hmda_2013_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '23631', '616.38 KB'),
'2012': HmdaDataFile('hmda_2012_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '25225', '642.31 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_mt_all-records_codes.zip', '50537', '1.41 MB'),
'2007': HmdaDataFile('hmda_2007_mt_all-records_codes.zip', '72952', '1.92 MB'),
'2017': HmdaDataFile('hmda_2017_mt_all-records_codes.zip', '45597', '947.96 KB'),
'2015': HmdaDataFile('hmda_2015_mt_all-records_codes.zip', '46419', '1.45 MB'),
'2014': HmdaDataFile('hmda_2014_mt_all-records_codes.zip', '38476', '1.16 MB'),
'2008': HmdaDataFile('hmda_2008_mt_all-records_codes.zip', '55795', '1.44 MB'),
'2009': HmdaDataFile('hmda_2009_mt_all-records_codes.zip', '72632', '1.84 MB'),
'2011': HmdaDataFile('hmda_2011_mt_all-records_codes.zip', '45730', '1.22 MB'),
'2010': HmdaDataFile('hmda_2010_mt_all-records_codes.zip', '54464', '1.45 MB'),
'2013': HmdaDataFile('hmda_2013_mt_all-records_codes.zip', '56588', '1.56 MB'),
'2012': HmdaDataFile('hmda_2012_mt_all-records_codes.zip', '57320', '1.57 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_mt_originated-records_codes.zip', '26863', '722 KB'),
'2007': HmdaDataFile('hmda_2007_mt_originated-records_codes.zip', '31811', '828.36 KB'),
'2017': HmdaDataFile('hmda_2017_mt_originated-records_codes.zip', '24577', '501.64 KB'),
'2015': HmdaDataFile('hmda_2015_mt_originated-records_codes.zip', '25376', '750.19 KB'),
'2014': HmdaDataFile('hmda_2014_mt_originated-records_codes.zip', '21159', '615.26 KB'),
'2008': HmdaDataFile('hmda_2008_mt_originated-records_codes.zip', '26278', '663.11 KB'),
'2009': HmdaDataFile('hmda_2009_mt_originated-records_codes.zip', '36202', '887.48 KB'),
'2011': HmdaDataFile('hmda_2011_mt_originated-records_codes.zip', '23529', '592.46 KB'),
'2010': HmdaDataFile('hmda_2010_mt_originated-records_codes.zip', '27263', '689.27 KB'),
'2013': HmdaDataFile('hmda_2013_mt_originated-records_codes.zip', '31007', '815.5 KB'),
'2012': HmdaDataFile('hmda_2012_mt_originated-records_codes.zip', '31452', '815.03 KB')
}
}
},
'ms': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '37426', '1.74 MB'),
'2007': HmdaDataFile('hmda_2007_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '50509', '2.25 MB'),
'2017': HmdaDataFile('hmda_2017_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '36762', '1.08 MB'),
'2015': HmdaDataFile('hmda_2015_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '33087', '1.71 MB'),
'2014': HmdaDataFile('hmda_2014_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '29718', '1.39 MB'),
'2008': HmdaDataFile('hmda_2008_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '41719', '1.93 MB'),
'2009': HmdaDataFile('hmda_2009_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '46880', '1.96 MB'),
'2011': HmdaDataFile('hmda_2011_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '34477', '1.51 MB'),
'2010': HmdaDataFile('hmda_2010_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '39727', '1.77 MB'),
'2013': HmdaDataFile('hmda_2013_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '40639', '1.88 MB'),
'2012': HmdaDataFile('hmda_2012_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '43832', '1.98 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ms_all-records_labels.zip', '107199', '5.36 MB'),
'2007': HmdaDataFile('hmda_2007_ms_all-records_labels.zip', '173130', '7.83 MB'),
'2017': HmdaDataFile('hmda_2017_ms_all-records_labels.zip', '101384', '3.15 MB'),
'2015': HmdaDataFile('hmda_2015_ms_all-records_labels.zip', '95532', '5.35 MB'),
'2014': HmdaDataFile('hmda_2014_ms_all-records_labels.zip', '89193', '4.45 MB'),
'2008': HmdaDataFile('hmda_2008_ms_all-records_labels.zip', '136596', '6.41 MB'),
'2009': HmdaDataFile('hmda_2009_ms_all-records_labels.zip', '137988', '6 MB'),
'2011': HmdaDataFile('hmda_2011_ms_all-records_labels.zip', '106833', '5.1 MB'),
'2010': HmdaDataFile('hmda_2010_ms_all-records_labels.zip', '120079', '5.8 MB'),
'2013': HmdaDataFile('hmda_2013_ms_all-records_labels.zip', '115511', '5.75 MB'),
'2012': HmdaDataFile('hmda_2012_ms_all-records_labels.zip', '119816', '5.82 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ms_originated-records_labels.zip', '51684', '2.47 MB'),
'2007': HmdaDataFile('hmda_2007_ms_originated-records_labels.zip', '75757', '3.36 MB'),
'2017': HmdaDataFile('hmda_2017_ms_originated-records_labels.zip', '51038', '1.5 MB'),
'2015': HmdaDataFile('hmda_2015_ms_originated-records_labels.zip', '47435', '2.5 MB'),
'2014': HmdaDataFile('hmda_2014_ms_originated-records_labels.zip', '44303', '2.12 MB'),
'2008': HmdaDataFile('hmda_2008_ms_originated-records_labels.zip', '63435', '2.92 MB'),
'2009': HmdaDataFile('hmda_2009_ms_originated-records_labels.zip', '64926', '2.75 MB'),
'2011': HmdaDataFile('hmda_2011_ms_originated-records_labels.zip', '49962', '2.26 MB'),
'2010': HmdaDataFile('hmda_2010_ms_originated-records_labels.zip', '55509', '2.53 MB'),
'2013': HmdaDataFile('hmda_2013_ms_originated-records_labels.zip', '57688', '2.73 MB'),
'2012': HmdaDataFile('hmda_2012_ms_originated-records_labels.zip', '59972', '2.76 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '37426', '1.15 MB'),
'2007': HmdaDataFile('hmda_2007_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '50509', '1.5 MB'),
'2017': HmdaDataFile('hmda_2017_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '36762', '770.12 KB'),
'2015': HmdaDataFile('hmda_2015_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '33087', '1.14 MB'),
'2014': HmdaDataFile('hmda_2014_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '29718', '914.92 KB'),
'2008': HmdaDataFile('hmda_2008_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '41719', '1.3 MB'),
'2009': HmdaDataFile('hmda_2009_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '46880', '1.32 MB'),
'2011': HmdaDataFile('hmda_2011_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '34477', '986.29 KB'),
'2010': HmdaDataFile('hmda_2010_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '39727', '1.16 MB'),
'2013': HmdaDataFile('hmda_2013_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '40639', '1.23 MB'),
'2012': HmdaDataFile('hmda_2012_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '43832', '1.29 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ms_all-records_codes.zip', '107199', '3.37 MB'),
'2007': HmdaDataFile('hmda_2007_ms_all-records_codes.zip', '173130', '5.1 MB'),
'2017': HmdaDataFile('hmda_2017_ms_all-records_codes.zip', '101384', '2.07 MB'),
'2015': HmdaDataFile('hmda_2015_ms_all-records_codes.zip', '95532', '3.43 MB'),
'2014': HmdaDataFile('hmda_2014_ms_all-records_codes.zip', '89193', '2.8 MB'),
'2008': HmdaDataFile('hmda_2008_ms_all-records_codes.zip', '136596', '4.22 MB'),
'2009': HmdaDataFile('hmda_2009_ms_all-records_codes.zip', '137988', '3.96 MB'),
'2011': HmdaDataFile('hmda_2011_ms_all-records_codes.zip', '106833', '3.19 MB'),
'2010': HmdaDataFile('hmda_2010_ms_all-records_codes.zip', '120079', '3.65 MB'),
'2013': HmdaDataFile('hmda_2013_ms_all-records_codes.zip', '115511', '3.61 MB'),
'2012': HmdaDataFile('hmda_2012_ms_all-records_codes.zip', '119816', '3.68 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ms_originated-records_codes.zip', '51684', '1.61 MB'),
'2007': HmdaDataFile('hmda_2007_ms_originated-records_codes.zip', '75757', '2.24 MB'),
'2017': HmdaDataFile('hmda_2017_ms_originated-records_codes.zip', '51038', '1.05 MB'),
'2015': HmdaDataFile('hmda_2015_ms_originated-records_codes.zip', '47435', '1.65 MB'),
'2014': HmdaDataFile('hmda_2014_ms_originated-records_codes.zip', '44303', '1.37 MB'),
'2008': HmdaDataFile('hmda_2008_ms_originated-records_codes.zip', '63435', '1.96 MB'),
'2009': HmdaDataFile('hmda_2009_ms_originated-records_codes.zip', '64926', '1.84 MB'),
'2011': HmdaDataFile('hmda_2011_ms_originated-records_codes.zip', '49962', '1.46 MB'),
'2010': HmdaDataFile('hmda_2010_ms_originated-records_codes.zip', '55509', '1.64 MB'),
'2013': HmdaDataFile('hmda_2013_ms_originated-records_codes.zip', '57688', '1.76 MB'),
'2012': HmdaDataFile('hmda_2012_ms_originated-records_codes.zip', '59972', '1.79 MB')
}
}
},
'sc': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '108076', '5.05 MB'),
'2007': HmdaDataFile('hmda_2007_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '106873', '4.81 MB'),
'2017': HmdaDataFile('hmda_2017_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '100333', '3.01 MB'),
'2015': HmdaDataFile('hmda_2015_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '92167', '5.02 MB'),
'2014': HmdaDataFile('hmda_2014_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '73174', '3.73 MB'),
'2008': HmdaDataFile('hmda_2008_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '86677', '3.98 MB'),
'2009': HmdaDataFile('hmda_2009_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '110931', '4.77 MB'),
'2011': HmdaDataFile('hmda_2011_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '78145', '3.46 MB'),
'2010': HmdaDataFile('hmda_2010_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '85858', '3.92 MB'),
'2013': HmdaDataFile('hmda_2013_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '100643', '4.76 MB'),
'2012': HmdaDataFile('hmda_2012_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '106107', '5 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_sc_all-records_labels.zip', '257644', '13.17 MB'),
'2007': HmdaDataFile('hmda_2007_sc_all-records_labels.zip', '383001', '17.86 MB'),
'2017': HmdaDataFile('hmda_2017_sc_all-records_labels.zip', '242772', '8.25 MB'),
'2015': HmdaDataFile('hmda_2015_sc_all-records_labels.zip', '225542', '13.41 MB'),
'2014': HmdaDataFile('hmda_2014_sc_all-records_labels.zip', '191197', '10.66 MB'),
'2008': HmdaDataFile('hmda_2008_sc_all-records_labels.zip', '271908', '13.26 MB'),
'2009': HmdaDataFile('hmda_2009_sc_all-records_labels.zip', '291014', '13.32 MB'),
'2011': HmdaDataFile('hmda_2011_sc_all-records_labels.zip', '218369', '11.01 MB'),
'2010': HmdaDataFile('hmda_2010_sc_all-records_labels.zip', '235957', '12.07 MB'),
'2013': HmdaDataFile('hmda_2013_sc_all-records_labels.zip', '259782', '13.73 MB'),
'2012': HmdaDataFile('hmda_2012_sc_all-records_labels.zip', '267040', '14.05 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_sc_originated-records_labels.zip', '131134', '6.36 MB'),
'2007': HmdaDataFile('hmda_2007_sc_originated-records_labels.zip', '161777', '7.5 MB'),
'2017': HmdaDataFile('hmda_2017_sc_originated-records_labels.zip', '123971', '3.87 MB'),
'2015': HmdaDataFile('hmda_2015_sc_originated-records_labels.zip', '114336', '6.37 MB'),
'2014': HmdaDataFile('hmda_2014_sc_originated-records_labels.zip', '93412', '4.86 MB'),
'2008': HmdaDataFile('hmda_2008_sc_originated-records_labels.zip', '118458', '5.61 MB'),
'2009': HmdaDataFile('hmda_2009_sc_originated-records_labels.zip', '133057', '5.91 MB'),
'2011': HmdaDataFile('hmda_2011_sc_originated-records_labels.zip', '97461', '4.61 MB'),
'2010': HmdaDataFile('hmda_2010_sc_originated-records_labels.zip', '105626', '4.87 MB'),
'2013': HmdaDataFile('hmda_2013_sc_originated-records_labels.zip', '127479', '6.16 MB'),
'2012': HmdaDataFile('hmda_2012_sc_originated-records_labels.zip', '130453', '6.21 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '108076', '3.37 MB'),
'2007': HmdaDataFile('hmda_2007_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '106873', '3.2 MB'),
'2017': HmdaDataFile('hmda_2017_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '100333', '2.11 MB'),
'2015': HmdaDataFile('hmda_2015_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '92167', '3.46 MB'),
'2014': HmdaDataFile('hmda_2014_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '73174', '2.49 MB'),
'2008': HmdaDataFile('hmda_2008_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '86677', '2.67 MB'),
'2009': HmdaDataFile('hmda_2009_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '110931', '3.27 MB'),
'2011': HmdaDataFile('hmda_2011_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '78145', '2.24 MB'),
'2010': HmdaDataFile('hmda_2010_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '85858', '2.58 MB'),
'2013': HmdaDataFile('hmda_2013_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '100643', '3.16 MB'),
'2012': HmdaDataFile('hmda_2012_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '106107', '3.31 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_sc_all-records_codes.zip', '257644', '8.51 MB'),
'2007': HmdaDataFile('hmda_2007_sc_all-records_codes.zip', '383001', '11.71 MB'),
'2017': HmdaDataFile('hmda_2017_sc_all-records_codes.zip', '242772', '5.31 MB'),
'2015': HmdaDataFile('hmda_2015_sc_all-records_codes.zip', '225542', '8.88 MB'),
'2014': HmdaDataFile('hmda_2014_sc_all-records_codes.zip', '191197', '6.91 MB'),
'2008': HmdaDataFile('hmda_2008_sc_all-records_codes.zip', '271908', '8.77 MB'),
'2009': HmdaDataFile('hmda_2009_sc_all-records_codes.zip', '291014', '8.92 MB'),
'2011': HmdaDataFile('hmda_2011_sc_all-records_codes.zip', '218369', '6.92 MB'),
'2010': HmdaDataFile('hmda_2010_sc_all-records_codes.zip', '235957', '7.67 MB'),
'2013': HmdaDataFile('hmda_2013_sc_all-records_codes.zip', '259782', '8.91 MB'),
'2012': HmdaDataFile('hmda_2012_sc_all-records_codes.zip', '267040', '9.12 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_sc_originated-records_codes.zip', '131134', '4.24 MB'),
'2007': HmdaDataFile('hmda_2007_sc_originated-records_codes.zip', '161777', '5.03 MB'),
'2017': HmdaDataFile('hmda_2017_sc_originated-records_codes.zip', '123971', '2.67 MB'),
'2015': HmdaDataFile('hmda_2015_sc_originated-records_codes.zip', '114336', '4.36 MB'),
'2014': HmdaDataFile('hmda_2014_sc_originated-records_codes.zip', '93412', '3.22 MB'),
'2008': HmdaDataFile('hmda_2008_sc_originated-records_codes.zip', '118458', '3.78 MB'),
'2009': HmdaDataFile('hmda_2009_sc_originated-records_codes.zip', '133057', '4.05 MB'),
'2011': HmdaDataFile('hmda_2011_sc_originated-records_codes.zip', '97461', '2.99 MB'),
'2010': HmdaDataFile('hmda_2010_sc_originated-records_codes.zip', '105626', '3.17 MB'),
'2013': HmdaDataFile('hmda_2013_sc_originated-records_codes.zip', '127479', '4.06 MB'),
'2012': HmdaDataFile('hmda_2012_sc_originated-records_codes.zip', '130453', '4.07 MB')
}
}
},
'ky': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '81819', '3.99 MB'),
'2007': HmdaDataFile('hmda_2007_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '82043', '3.84 MB'),
'2017': HmdaDataFile('hmda_2017_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '72775', '2.34 MB'),
'2015': HmdaDataFile('hmda_2015_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '71716', '3.98 MB'),
'2014': HmdaDataFile('hmda_2014_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '59329', '3.1 MB'),
'2008': HmdaDataFile('hmda_2008_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '73163', '3.45 MB'),
'2009': HmdaDataFile('hmda_2009_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '103950', '4.54 MB'),
'2011': HmdaDataFile('hmda_2011_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '80580', '3.74 MB'),
'2010': HmdaDataFile('hmda_2010_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '91441', '4.28 MB'),
'2013': HmdaDataFile('hmda_2013_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '89932', '4.39 MB'),
'2012': HmdaDataFile('hmda_2012_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '105634', '5.06 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ky_all-records_labels.zip', '185587', '10.06 MB'),
'2007': HmdaDataFile('hmda_2007_ky_all-records_labels.zip', '285560', '14.05 MB'),
'2017': HmdaDataFile('hmda_2017_ky_all-records_labels.zip', '173004', '6.52 MB'),
'2015': HmdaDataFile('hmda_2015_ky_all-records_labels.zip', '167714', '10.04 MB'),
'2014': HmdaDataFile('hmda_2014_ky_all-records_labels.zip', '149317', '8.6 MB'),
'2008': HmdaDataFile('hmda_2008_ky_all-records_labels.zip', '215096', '10.84 MB'),
'2009': HmdaDataFile('hmda_2009_ky_all-records_labels.zip', '246427', '11.78 MB'),
'2011': HmdaDataFile('hmda_2011_ky_all-records_labels.zip', '203934', '10.77 MB'),
'2010': HmdaDataFile('hmda_2010_ky_all-records_labels.zip', '222486', '11.61 MB'),
'2013': HmdaDataFile('hmda_2013_ky_all-records_labels.zip', '215281', '11.75 MB'),
'2012': HmdaDataFile('hmda_2012_ky_all-records_labels.zip', '239015', '12.85 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ky_originated-records_labels.zip', '100105', '5.04 MB'),
'2007': HmdaDataFile('hmda_2007_ky_originated-records_labels.zip', '121278', '5.84 MB'),
'2017': HmdaDataFile('hmda_2017_ky_originated-records_labels.zip', '91096', '3.01 MB'),
'2015': HmdaDataFile('hmda_2015_ky_originated-records_labels.zip', '88714', '5.02 MB'),
'2014': HmdaDataFile('hmda_2014_ky_originated-records_labels.zip', '76520', '4.06 MB'),
'2008': HmdaDataFile('hmda_2008_ky_originated-records_labels.zip', '99394', '4.77 MB'),
'2009': HmdaDataFile('hmda_2009_ky_originated-records_labels.zip', '123485', '5.57 MB'),
'2011': HmdaDataFile('hmda_2011_ky_originated-records_labels.zip', '98794', '4.7 MB'),
'2010': HmdaDataFile('hmda_2010_ky_originated-records_labels.zip', '109716', '5.22 MB'),
'2013': HmdaDataFile('hmda_2013_ky_originated-records_labels.zip', '110912', '5.58 MB'),
'2012': HmdaDataFile('hmda_2012_ky_originated-records_labels.zip', '125050', '6.13 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '81819', '2.64 MB'),
'2007': HmdaDataFile('hmda_2007_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '82043', '2.59 MB'),
'2017': HmdaDataFile('hmda_2017_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '72775', '1.68 MB'),
'2015': HmdaDataFile('hmda_2015_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '71716', '2.68 MB'),
'2014': HmdaDataFile('hmda_2014_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '59329', '2.06 MB'),
'2008': HmdaDataFile('hmda_2008_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '73163', '2.34 MB'),
'2009': HmdaDataFile('hmda_2009_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '103950', '3.13 MB'),
'2011': HmdaDataFile('hmda_2011_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '80580', '2.43 MB'),
'2010': HmdaDataFile('hmda_2010_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '91441', '2.81 MB'),
'2013': HmdaDataFile('hmda_2013_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '89932', '2.91 MB'),
'2012': HmdaDataFile('hmda_2012_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '105634', '3.33 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ky_all-records_codes.zip', '185587', '6.56 MB'),
'2007': HmdaDataFile('hmda_2007_ky_all-records_codes.zip', '285560', '9.35 MB'),
'2017': HmdaDataFile('hmda_2017_ky_all-records_codes.zip', '173004', '4.32 MB'),
'2015': HmdaDataFile('hmda_2015_ky_all-records_codes.zip', '167714', '6.5 MB'),
'2014': HmdaDataFile('hmda_2014_ky_all-records_codes.zip', '149317', '5.56 MB'),
'2008': HmdaDataFile('hmda_2008_ky_all-records_codes.zip', '215096', '7.24 MB'),
'2009': HmdaDataFile('hmda_2009_ky_all-records_codes.zip', '246427', '7.96 MB'),
'2011': HmdaDataFile('hmda_2011_ky_all-records_codes.zip', '203934', '6.87 MB'),
'2010': HmdaDataFile('hmda_2010_ky_all-records_codes.zip', '222486', '7.44 MB'),
'2013': HmdaDataFile('hmda_2013_ky_all-records_codes.zip', '215281', '7.63 MB'),
'2012': HmdaDataFile('hmda_2012_ky_all-records_codes.zip', '239015', '8.34 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ky_originated-records_codes.zip', '100105', '3.34 MB'),
'2007': HmdaDataFile('hmda_2007_ky_originated-records_codes.zip', '121278', '3.97 MB'),
'2017': HmdaDataFile('hmda_2017_ky_originated-records_codes.zip', '91096', '2.12 MB'),
'2015': HmdaDataFile('hmda_2015_ky_originated-records_codes.zip', '88714', '3.36 MB'),
'2014': HmdaDataFile('hmda_2014_ky_originated-records_codes.zip', '76520', '2.67 MB'),
'2008': HmdaDataFile('hmda_2008_ky_originated-records_codes.zip', '99394', '3.22 MB'),
'2009': HmdaDataFile('hmda_2009_ky_originated-records_codes.zip', '123485', '3.84 MB'),
'2011': HmdaDataFile('hmda_2011_ky_originated-records_codes.zip', '98794', '3.04 MB'),
'2010': HmdaDataFile('hmda_2010_ky_originated-records_codes.zip', '109716', '3.39 MB'),
'2013': HmdaDataFile('hmda_2013_ky_originated-records_codes.zip', '110912', '3.68 MB'),
'2012': HmdaDataFile('hmda_2012_ky_originated-records_codes.zip', '125050', '4.01 MB')
}
}
},
'or': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '114167', '5.46 MB'),
'2007': HmdaDataFile('hmda_2007_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '101187', '4.59 MB'),
'2017': HmdaDataFile('hmda_2017_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '95129', '2.88 MB'),
'2015': HmdaDataFile('hmda_2015_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '97022', '5.17 MB'),
'2014': HmdaDataFile('hmda_2014_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '69787', '3.39 MB'),
'2008': HmdaDataFile('hmda_2008_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '77187', '3.56 MB'),
'2009': HmdaDataFile('hmda_2009_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '118063', '5.04 MB'),
'2011': HmdaDataFile('hmda_2011_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '80824', '3.93 MB'),
'2010': HmdaDataFile('hmda_2010_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '102166', '4.84 MB'),
'2013': HmdaDataFile('hmda_2013_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '102103', '4.92 MB'),
'2012': HmdaDataFile('hmda_2012_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '119486', '5.65 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_or_all-records_labels.zip', '249739', '12.75 MB'),
'2007': HmdaDataFile('hmda_2007_or_all-records_labels.zip', '376732', '17.18 MB'),
'2017': HmdaDataFile('hmda_2017_or_all-records_labels.zip', '211344', '7.17 MB'),
'2015': HmdaDataFile('hmda_2015_or_all-records_labels.zip', '214365', '12.36 MB'),
'2014': HmdaDataFile('hmda_2014_or_all-records_labels.zip', '168582', '8.75 MB'),
'2008': HmdaDataFile('hmda_2008_or_all-records_labels.zip', '251125', '12 MB'),
'2009': HmdaDataFile('hmda_2009_or_all-records_labels.zip', '300552', '13.38 MB'),
'2011': HmdaDataFile('hmda_2011_or_all-records_labels.zip', '204085', '10.92 MB'),
'2010': HmdaDataFile('hmda_2010_or_all-records_labels.zip', '244799', '12.74 MB'),
'2013': HmdaDataFile('hmda_2013_or_all-records_labels.zip', '240614', '12.69 MB'),
'2012': HmdaDataFile('hmda_2012_or_all-records_labels.zip', '269285', '14.02 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_or_originated-records_labels.zip', '136083', '6.58 MB'),
'2007': HmdaDataFile('hmda_2007_or_originated-records_labels.zip', '151933', '7.06 MB'),
'2017': HmdaDataFile('hmda_2017_or_originated-records_labels.zip', '116700', '3.65 MB'),
'2015': HmdaDataFile('hmda_2015_or_originated-records_labels.zip', '117674', '6.39 MB'),
'2014': HmdaDataFile('hmda_2014_or_originated-records_labels.zip', '87626', '4.34 MB'),
'2008': HmdaDataFile('hmda_2008_or_originated-records_labels.zip', '97998', '4.65 MB'),
'2009': HmdaDataFile('hmda_2009_or_originated-records_labels.zip', '134377', '5.88 MB'),
'2011': HmdaDataFile('hmda_2011_or_originated-records_labels.zip', '98243', '4.9 MB'),
'2010': HmdaDataFile('hmda_2010_or_originated-records_labels.zip', '118373', '5.68 MB'),
'2013': HmdaDataFile('hmda_2013_or_originated-records_labels.zip', '128622', '6.28 MB'),
'2012': HmdaDataFile('hmda_2012_or_originated-records_labels.zip', '144891', '7.02 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '114167', '3.73 MB'),
'2007': HmdaDataFile('hmda_2007_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '101187', '3.17 MB'),
'2017': HmdaDataFile('hmda_2017_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '95129', '1.97 MB'),
'2015': HmdaDataFile('hmda_2015_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '97022', '3.6 MB'),
'2014': HmdaDataFile('hmda_2014_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '69787', '2.33 MB'),
'2008': HmdaDataFile('hmda_2008_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '77187', '2.47 MB'),
'2009': HmdaDataFile('hmda_2009_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '118063', '3.58 MB'),
'2011': HmdaDataFile('hmda_2011_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '80824', '2.65 MB'),
'2010': HmdaDataFile('hmda_2010_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '102166', '3.28 MB'),
'2013': HmdaDataFile('hmda_2013_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '102103', '3.39 MB'),
'2012': HmdaDataFile('hmda_2012_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '119486', '3.83 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_or_all-records_codes.zip', '249739', '8.44 MB'),
'2007': HmdaDataFile('hmda_2007_or_all-records_codes.zip', '376732', '11.64 MB'),
'2017': HmdaDataFile('hmda_2017_or_all-records_codes.zip', '211344', '4.61 MB'),
'2015': HmdaDataFile('hmda_2015_or_all-records_codes.zip', '214365', '8.29 MB'),
'2014': HmdaDataFile('hmda_2014_or_all-records_codes.zip', '168582', '5.8 MB'),
'2008': HmdaDataFile('hmda_2008_or_all-records_codes.zip', '251125', '8.16 MB'),
'2009': HmdaDataFile('hmda_2009_or_all-records_codes.zip', '300552', '9.29 MB'),
'2011': HmdaDataFile('hmda_2011_or_all-records_codes.zip', '204085', '7.17 MB'),
'2010': HmdaDataFile('hmda_2010_or_all-records_codes.zip', '244799', '8.44 MB'),
'2013': HmdaDataFile('hmda_2013_or_all-records_codes.zip', '240614', '8.5 MB'),
'2012': HmdaDataFile('hmda_2012_or_all-records_codes.zip', '269285', '9.32 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_or_originated-records_codes.zip', '136083', '4.48 MB'),
'2007': HmdaDataFile('hmda_2007_or_originated-records_codes.zip', '151933', '4.92 MB'),
'2017': HmdaDataFile('hmda_2017_or_originated-records_codes.zip', '116700', '2.48 MB'),
'2015': HmdaDataFile('hmda_2015_or_originated-records_codes.zip', '117674', '4.43 MB'),
'2014': HmdaDataFile('hmda_2014_or_originated-records_codes.zip', '87626', '2.98 MB'),
'2008': HmdaDataFile('hmda_2008_or_originated-records_codes.zip', '97998', '3.24 MB'),
'2009': HmdaDataFile('hmda_2009_or_originated-records_codes.zip', '134377', '4.19 MB'),
'2011': HmdaDataFile('hmda_2011_or_originated-records_codes.zip', '98243', '3.29 MB'),
'2010': HmdaDataFile('hmda_2010_or_originated-records_codes.zip', '118373', '3.84 MB'),
'2013': HmdaDataFile('hmda_2013_or_originated-records_codes.zip', '128622', '4.29 MB'),
'2012': HmdaDataFile('hmda_2012_or_originated-records_codes.zip', '144891', '4.75 MB')
}
}
},
'sd': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '17608', '715.46 KB'),
'2007': HmdaDataFile('hmda_2007_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '15984', '612.26 KB'),
'2017': HmdaDataFile('hmda_2017_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '15341', '394.73 KB'),
'2015': HmdaDataFile('hmda_2015_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '15835', '706.73 KB'),
'2014': HmdaDataFile('hmda_2014_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '12839', '526.79 KB'),
'2008': HmdaDataFile('hmda_2008_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '16258', '623.5 KB'),
'2009': HmdaDataFile('hmda_2009_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '24417', '866.74 KB'),
'2011': HmdaDataFile('hmda_2011_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '17828', '666.34 KB'),
'2010': HmdaDataFile('hmda_2010_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '20738', '786.61 KB'),
'2013': HmdaDataFile('hmda_2013_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '19523', '775.49 KB'),
'2012': HmdaDataFile('hmda_2012_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '24459', '981.4 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_sd_all-records_labels.zip', '37648', '1.67 MB'),
'2007': HmdaDataFile('hmda_2007_sd_all-records_labels.zip', '47432', '1.97 MB'),
'2017': HmdaDataFile('hmda_2017_sd_all-records_labels.zip', '33167', '980.45 KB'),
'2015': HmdaDataFile('hmda_2015_sd_all-records_labels.zip', '35072', '1.72 MB'),
'2014': HmdaDataFile('hmda_2014_sd_all-records_labels.zip', '29763', '1.37 MB'),
'2008': HmdaDataFile('hmda_2008_sd_all-records_labels.zip', '41213', '1.78 MB'),
'2009': HmdaDataFile('hmda_2009_sd_all-records_labels.zip', '53033', '2.13 MB'),
'2011': HmdaDataFile('hmda_2011_sd_all-records_labels.zip', '38426', '1.67 MB'),
'2010': HmdaDataFile('hmda_2010_sd_all-records_labels.zip', '45150', '1.96 MB'),
'2013': HmdaDataFile('hmda_2013_sd_all-records_labels.zip', '43401', '1.95 MB'),
'2012': HmdaDataFile('hmda_2012_sd_all-records_labels.zip', '48753', '2.2 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_sd_originated-records_labels.zip', '20937', '887.21 KB'),
'2007': HmdaDataFile('hmda_2007_sd_originated-records_labels.zip', '24091', '970 KB'),
'2017': HmdaDataFile('hmda_2017_sd_originated-records_labels.zip', '18871', '517.46 KB'),
'2015': HmdaDataFile('hmda_2015_sd_originated-records_labels.zip', '19418', '890.97 KB'),
'2014': HmdaDataFile('hmda_2014_sd_originated-records_labels.zip', '16136', '696.36 KB'),
'2008': HmdaDataFile('hmda_2008_sd_originated-records_labels.zip', '22824', '912.18 KB'),
'2009': HmdaDataFile('hmda_2009_sd_originated-records_labels.zip', '29867', '1.11 MB'),
'2011': HmdaDataFile('hmda_2011_sd_originated-records_labels.zip', '21818', '862.15 KB'),
'2010': HmdaDataFile('hmda_2010_sd_originated-records_labels.zip', '25068', '980.72 KB'),
'2013': HmdaDataFile('hmda_2013_sd_originated-records_labels.zip', '23684', '969.63 KB'),
'2012': HmdaDataFile('hmda_2012_sd_originated-records_labels.zip', '28651', '1.18 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '17608', '452.93 KB'),
'2007': HmdaDataFile('hmda_2007_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '15984', '392.92 KB'),
'2017': HmdaDataFile('hmda_2017_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '15341', '275.01 KB'),
'2015': HmdaDataFile('hmda_2015_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '15835', '442.1 KB'),
'2014': HmdaDataFile('hmda_2014_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '12839', '334.62 KB'),
'2008': HmdaDataFile('hmda_2008_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '16258', '411.49 KB'),
'2009': HmdaDataFile('hmda_2009_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '24417', '586.34 KB'),
'2011': HmdaDataFile('hmda_2011_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '17828', '429.4 KB'),
'2010': HmdaDataFile('hmda_2010_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '20738', '504.37 KB'),
'2013': HmdaDataFile('hmda_2013_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '19523', '492.06 KB'),
'2012': HmdaDataFile('hmda_2012_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '24459', '623.71 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_sd_all-records_codes.zip', '37648', '1.02 MB'),
'2007': HmdaDataFile('hmda_2007_sd_all-records_codes.zip', '47432', '1.24 MB'),
'2017': HmdaDataFile('hmda_2017_sd_all-records_codes.zip', '33167', '657.41 KB'),
'2015': HmdaDataFile('hmda_2015_sd_all-records_codes.zip', '35072', '1.04 MB'),
'2014': HmdaDataFile('hmda_2014_sd_all-records_codes.zip', '29763', '835.84 KB'),
'2008': HmdaDataFile('hmda_2008_sd_all-records_codes.zip', '41213', '1.13 MB'),
'2009': HmdaDataFile('hmda_2009_sd_all-records_codes.zip', '53033', '1.39 MB'),
'2011': HmdaDataFile('hmda_2011_sd_all-records_codes.zip', '38426', '1.03 MB'),
'2010': HmdaDataFile('hmda_2010_sd_all-records_codes.zip', '45150', '1.22 MB'),
'2013': HmdaDataFile('hmda_2013_sd_all-records_codes.zip', '43401', '1.19 MB'),
'2012': HmdaDataFile('hmda_2012_sd_all-records_codes.zip', '48753', '1.35 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_sd_originated-records_codes.zip', '20937', '557.49 KB'),
'2007': HmdaDataFile('hmda_2007_sd_originated-records_codes.zip', '24091', '623.6 KB'),
'2017': HmdaDataFile('hmda_2017_sd_originated-records_codes.zip', '18871', '357.8 KB'),
'2015': HmdaDataFile('hmda_2015_sd_originated-records_codes.zip', '19418', '553.25 KB'),
'2014': HmdaDataFile('hmda_2014_sd_originated-records_codes.zip', '16136', '436.32 KB'),
'2008': HmdaDataFile('hmda_2008_sd_originated-records_codes.zip', '22824', '595.53 KB'),
'2009': HmdaDataFile('hmda_2009_sd_originated-records_codes.zip', '29867', '746.74 KB'),
'2011': HmdaDataFile('hmda_2011_sd_originated-records_codes.zip', '21818', '549.43 KB'),
'2010': HmdaDataFile('hmda_2010_sd_originated-records_codes.zip', '25068', '623.74 KB'),
'2013': HmdaDataFile('hmda_2013_sd_originated-records_codes.zip', '23684', '608.21 KB'),
'2012': HmdaDataFile('hmda_2012_sd_originated-records_codes.zip', '28651', '744.18 KB')
}
}
}
}
| true
| true
|
f70c7d7cbcef3d7c08dc59ed660a624713cbabbe
| 2,285
|
py
|
Python
|
contenido_gc.py
|
euribates/biopython
|
dcd4e1cde413d52adb91ac874de2f2bdd8d214e5
|
[
"MIT"
] | null | null | null |
contenido_gc.py
|
euribates/biopython
|
dcd4e1cde413d52adb91ac874de2f2bdd8d214e5
|
[
"MIT"
] | null | null | null |
contenido_gc.py
|
euribates/biopython
|
dcd4e1cde413d52adb91ac874de2f2bdd8d214e5
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# CALCULATION OF GC CONTENT (GENES)
import os
import logging
import argparse
import fastalib
from fileutils import safe_filename
def get_option_args():
args_parser = argparse.ArgumentParser(
description='Found GC in FASTA files of a directory',
)
args_parser.add_argument(
'path',
help='Path where look for FASTA files, use . to use current working dir',
)
args_parser.add_argument(
'--tron',
dest='tron',
action='store_const',
const=True,
default=False,
help='Show trace of activity (Disabled by default)',
)
args = args_parser.parse_args()
logging.basicConfig(
level=logging.INFO if args.tron else logging.ERROR,
format='%(asctime)s %(levelname)s %(message)s',
)
return args
if __name__ == '__main__':
args = get_option_args()
genomes = [fn for fn in os.listdir(args.path) if fn.endswith(".faa")]
for filename in genomes:
if args.tron:
logging.info('Processing {}'.format(filename))
full_name = os.path.join(args.path, filename)
data = fastalib.read_fasta_file(full_name)
if args.tron:
logging.info('Generating output files')
num_outputs = 0
for key in data:
lines = data[key]
filename = safe_filename('result_id_{}.fasta'.format(key))
with open(filename, 'w') as f1:
for l in lines:
f1.write('{}\n'.format(l))
num_outputs += 1
g, a, t, c = fastalib.count_nucleotydes_gatc(lines)
filename = safe_filename('result_GC_{}.fasta'.format(key))
with open(filename, 'w') as f2:
f2.write('Guanine: {:d}\n'.format(g))
f2.write('Adenine: {:d}\n'.format(a))
f2.write('Thymine: {:d}\n'.format(t))
f2.write('Cytosine: {:d}\n'.format(c))
p = round(float(c+g)/(a+c+g+t), 9)
f2.write('CG proportion: {:9f}\n'.format(p))
num_outputs += 1
if args.tron:
logging.info('Finished: files processed {}, generated {}'.format(
len(genomes),
num_outputs,
))
| 32.642857
| 81
| 0.557549
|
import os
import logging
import argparse
import fastalib
from fileutils import safe_filename
def get_option_args():
args_parser = argparse.ArgumentParser(
description='Found GC in FASTA files of a directory',
)
args_parser.add_argument(
'path',
help='Path where look for FASTA files, use . to use current working dir',
)
args_parser.add_argument(
'--tron',
dest='tron',
action='store_const',
const=True,
default=False,
help='Show trace of activity (Disabled by default)',
)
args = args_parser.parse_args()
logging.basicConfig(
level=logging.INFO if args.tron else logging.ERROR,
format='%(asctime)s %(levelname)s %(message)s',
)
return args
if __name__ == '__main__':
args = get_option_args()
genomes = [fn for fn in os.listdir(args.path) if fn.endswith(".faa")]
for filename in genomes:
if args.tron:
logging.info('Processing {}'.format(filename))
full_name = os.path.join(args.path, filename)
data = fastalib.read_fasta_file(full_name)
if args.tron:
logging.info('Generating output files')
num_outputs = 0
for key in data:
lines = data[key]
filename = safe_filename('result_id_{}.fasta'.format(key))
with open(filename, 'w') as f1:
for l in lines:
f1.write('{}\n'.format(l))
num_outputs += 1
g, a, t, c = fastalib.count_nucleotydes_gatc(lines)
filename = safe_filename('result_GC_{}.fasta'.format(key))
with open(filename, 'w') as f2:
f2.write('Guanine: {:d}\n'.format(g))
f2.write('Adenine: {:d}\n'.format(a))
f2.write('Thymine: {:d}\n'.format(t))
f2.write('Cytosine: {:d}\n'.format(c))
p = round(float(c+g)/(a+c+g+t), 9)
f2.write('CG proportion: {:9f}\n'.format(p))
num_outputs += 1
if args.tron:
logging.info('Finished: files processed {}, generated {}'.format(
len(genomes),
num_outputs,
))
| true
| true
|
f70c7e01657553cf9cb3359901dd1b4a4e60dc7a
| 339,392
|
py
|
Python
|
pkgs/ops-pkg/src/genie/libs/ops/bgp/iosxr/yang/tests/bgp_output.py
|
miott/genielibs
|
6464642cdd67aa2367bdbb12561af4bb060e5e62
|
[
"Apache-2.0"
] | 94
|
2018-04-30T20:29:15.000Z
|
2022-03-29T13:40:31.000Z
|
pkgs/ops-pkg/src/genie/libs/ops/bgp/iosxr/yang/tests/bgp_output.py
|
miott/genielibs
|
6464642cdd67aa2367bdbb12561af4bb060e5e62
|
[
"Apache-2.0"
] | 67
|
2018-12-06T21:08:09.000Z
|
2022-03-29T18:00:46.000Z
|
pkgs/ops-pkg/src/genie/libs/ops/bgp/iosxr/yang/tests/bgp_output.py
|
miott/genielibs
|
6464642cdd67aa2367bdbb12561af4bb060e5e62
|
[
"Apache-2.0"
] | 49
|
2018-06-29T18:59:03.000Z
|
2022-03-10T02:07:59.000Z
|
'''
BGP Genie Ops Object Outputs for IOSXR.
'''
import xml.etree.ElementTree as ET
class BgpOutput(object):
############################################################################
# BGP INFO
############################################################################
ShowBgpInstances = {
"instance": {
"test1": {
"num_vrfs": 0,
"instance_id": 1,
"placed_grp": "bgp2_1",
"bgp_id": 333
},
"default": {
"num_vrfs": 2,
"instance_id": 3,
"address_families": [
"ipv4 unicast",
"vpnv4 unicast",
"ipv6 unicast",
"vpnv6 unicast"
],
"placed_grp": "bgp4_1",
"bgp_id": 100
},
"test": {
"num_vrfs": 0,
"instance_id": 0,
"placed_grp": "v4_routing",
"bgp_id": 333
},
"test2": {
"num_vrfs": 0,
"instance_id": 2,
"placed_grp": "bgp3_1",
"bgp_id": 333}}}
ShowPlacementProgramAll = {
'program':
{'rcp_fs':
{'instance':
{'default':
{'active': '0/0/CPU0',
'active_state': 'RUNNING',
'group': 'central-services',
'jid': '1168',
'standby': 'NONE',
'standby_state': 'NOT_SPAWNED'}}},
'bgp':
{'instance':
{'default':
{'active': '0/0/CPU0',
'active_state': 'RUNNING',
'group': 'v4-routing',
'jid': '1018',
'standby': 'NONE',
'standby_state': 'NOT_SPAWNED'}}},
'ospf':
{'instance':
{'1':
{'active': '0/0/CPU0',
'active_state': 'RUNNING',
'group': 'v4-routing',
'jid': '1018',
'standby': 'NONE',
'standby_state': 'NOT_SPAWNED'}}},
'statsd_manager_g':
{'instance':
{'default':
{'active': '0/0/CPU0',
'active_state': 'RUNNING',
'group': 'netmgmt',
'jid': '1141',
'standby': 'NONE',
'standby_state': 'NOT_SPAWNED'}}},
'pim':
{'instance':
{'default':
{'active': '0/0/CPU0',
'active_state': 'RUNNING',
'group': 'mcast-routing',
'jid': '1158',
'standby': 'NONE',
'standby_state': 'NOT_SPAWNED'}}},
'ipv6_local':
{'instance':
{'default':
{'active': '0/0/CPU0',
'active_state': 'RUNNING',
'group': 'v6-routing',
'jid': '1156',
'standby': 'NONE',
'standby_state': 'NOT_SPAWNED'}}}}
}
ShowBgpInstanceSessionGroupConfiguration = {
"default": {
"peer_session": {
"SG": {
"remote_as": 333,
"fall_over_bfd": True,
"password_text": "094F471A1A0A464058",
"holdtime": 30,
"transport_connection_mode": "active-only",
"ebgp_multihop_max_hop": 254,
"local_replace_as": True,
"ps_minimum_holdtime": 3,
"keepalive_interval": 10,
"shutdown": True,
"local_dual_as": True,
"local_no_prepend": True,
"ebgp_multihop_enable": True,
"suppress_four_byte_as_capability": True,
"local_as_as_no": 200,
"description": "SG_group",
"update_source": 'loopback0',
"disable_connected_check": True
}
}
}
}
ShowBgpInstanceAfGroupConfiguration = {
"instance": {
"default": {
"pp_name": {
"af_group": {
"address_family": "ipv4 unicast",
"default_originate": True,
"default_originate_route_map": "allpass",
"maximum_prefix_max_prefix_no": 429,
"maximum_prefix_threshold": 75,
"maximum_prefix_restart": 35,
"next_hop_self": True,
"route_map_name_in": "allpass",
"route_map_name_out": "allpass",
"route_reflector_client": True,
"send_community": "both",
"send_comm_ebgp": True,
"send_ext_comm_ebgp": True,
"soo": "100:1",
"soft_reconfiguration": "inbound always",
"allowas_in_as_number": 10,
"allowas_in": True,
"as_override": True
}
}
}
}
}
# =====================
# Process Detail Output
# =====================
# 'all all all'
ProcessAllOutput = '''\
RP/0/RSP1/CPU0:PE1#show bgp instance all all all process detail
BGP instance 0: 'default'
=========================
BGP Process Information:
BGP is operating in STANDALONE mode
Autonomous System number format: ASPLAIN
Autonomous System: 100
Router ID: 10.4.1.1 (manually configured)
Default Cluster ID: 10.4.1.1
Active Cluster IDs: 10.4.1.1
Fast external fallover enabled
Platform RLIMIT max: 2281701376 bytes
Maximum limit for BMP buffer size: 435 MB
Default value for BMP buffer size: 326 MB
Current limit for BMP buffer size: 326 MB
Current utilization of BMP buffer limit: 0 B
Neighbor logging is enabled
Enforce first AS enabled
Default local preference: 100
Default keepalive: 60
Non-stop routing is enabled
Update delay: 120
Generic scan interval: 60
BGP Speaker process: 3, Node: node0_RSP1_CPU0
Restart count: 32
Total Nbrs Estab/Cfg
Default VRFs: 1 0/3
Non-Default VRFs: 2 0/4
Sent Received
Updates: 0 0
Notifications: 0 0
Number Memory Used
Attributes: 0 0
AS Paths: 0 0
Communities: 0 0
Large Communities: 0 0
Extended communities: 0 0
PMSI Tunnel attr: 0 0
RIBRNH Tunnel attr: 0 0
PPMP attr: 0 0
Tunnel Encap attr: 0 0
PE distinguisher labels: 0 0
Route Reflector Entries: 0 0
Nexthop Entries: 27 10800
Alloc Free
Pool 200: 0 0
Pool 300: 1 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5000: 0 0
Pool 20000: 0 0
Message logging pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 500: 0 0
Pool 2200: 0 0
Pool 4500: 0 0
BMP pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 300: 0 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5500: 0 0
Pool 6500: 0 0
Pool 7500: 0 0
Pool 8500: 0 0
Pool 10000: 0 0
Pool 20000: 0 0
Address family: VPNv4 Unicast
Dampening is not enabled
Client reflection is not enabled in global config
Dynamic MED is Disabled
Dynamic MED interval : 10 minutes
Dynamic MED Timer : Not Running
Dynamic MED Periodic Timer : Not Running
Scan interval: 60
Total prefixes scanned: 0
Prefixes scanned per segment: 100000
Number of scan segments: 1
Nexthop resolution minimum prefix-length: 0 (not configured)
Main Table Version: 3
Table version synced to RIB: 3
Table version acked by RIB: 0
RIB has not converged: version 0
RIB table prefix-limit reached ? [No], version 0
Permanent Network Unconfigured
State: Normal mode.
BGP Table Version: 3
Attribute download: Disabled
Label retention timer value 5 mins
Soft Reconfig Entries: 0
Table bit-field size : 1 Chunk element size : 3
Maximum supported label-stack depth:
For IPv4 Nexthop: 0
For IPv6 Nexthop: 0
Last 8 Triggers Ver Tbl Ver Trig TID
Label Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.887 3 3 18
Aug 18 12:00:08.885 0 3 4
Total triggers: 3
Import Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.887 3 3 18
Aug 18 12:00:08.885 0 3 18
Total triggers: 3
RIB Thread Aug 18 12:00:08.887 3 3 8
Aug 18 12:00:08.885 1 3 8
Aug 18 12:00:08.882 1 3 6
Total triggers: 3
Update Thread Aug 18 12:00:11.883 3 3 8
Aug 18 12:00:08.887 3 3 8
Aug 18 12:00:08.887 3 3 18
Total triggers: 3
Allocated Freed
Remote Prefixes: 0 0
Remote Paths: 0 0
Remote Path-elems: 0 0
Local Prefixes: 0 0
Local Paths: 0 0
Number Mem Used
Remote Prefixes: 0 0
Remote Paths: 0 0
Remote Path-elems: 0 0
Remote RDs: 0 0
Local Prefixes: 0 0
Local Paths: 0 0
Local RDs: 2 160
Total Prefixes: 0 0
Total Paths: 0 0
Total Path-elems: 0 0
Imported Paths: 0 0
Total RDs: 2 160
Address family: VPNv6 Unicast
Dampening is not enabled
Client reflection is enabled in global config
Dynamic MED is Disabled
Dynamic MED interval : 10 minutes
Dynamic MED Timer : Not Running
Dynamic MED Periodic Timer : Not Running
Scan interval: 60
Total prefixes scanned: 0
Prefixes scanned per segment: 100000
Number of scan segments: 1
Nexthop resolution minimum prefix-length: 0 (not configured)
Main Table Version: 3
Table version synced to RIB: 3
Table version acked by RIB: 0
RIB has not converged: version 0
RIB table prefix-limit reached ? [No], version 0
Permanent Network Unconfigured
State: Normal mode.
BGP Table Version: 3
Attribute download: Disabled
Label retention timer value 5 mins
Soft Reconfig Entries: 0
Table bit-field size : 1 Chunk element size : 3
Maximum supported label-stack depth:
For IPv4 Nexthop: 0
For IPv6 Nexthop: 0
Last 8 Triggers Ver Tbl Ver Trig TID
Label Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.890 3 3 19
Aug 18 12:00:08.882 0 3 4
Total triggers: 3
Import Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.890 3 3 19
Aug 18 12:00:08.882 0 3 19
Total triggers: 3
RIB Thread Aug 18 12:00:08.890 3 3 8
Aug 18 12:00:08.883 1 3 8
Aug 18 12:00:08.882 1 3 6
Total triggers: 3
Update Thread Aug 18 12:00:11.883 3 3 8
Aug 18 12:00:08.890 3 3 8
Aug 18 12:00:08.890 3 3 19
Total triggers: 3
Allocated Freed
Remote Prefixes: 0 0
Remote Paths: 0 0
Remote Path-elems: 0 0
Local Prefixes: 0 0
Local Paths: 0 0
Number Mem Used
Remote Prefixes: 0 0
Remote Paths: 0 0
Remote Path-elems: 0 0
Remote RDs: 0 0
Local Prefixes: 0 0
Local Paths: 0 0
Local RDs: 2 160
Total Prefixes: 0 0
Total Paths: 0 0
Total Path-elems: 0 0
Imported Paths: 0 0
Total RDs: 2 160
Address family: IPv4 Unicast
Dampening is not enabled
Client reflection is enabled in global config
Dynamic MED is Disabled
Dynamic MED interval : 10 minutes
Dynamic MED Timer : Not Running
Dynamic MED Periodic Timer : Not Running
Scan interval: 60
Total prefixes scanned: 0
Prefixes scanned per segment: 100000
Number of scan segments: 1
Nexthop resolution minimum prefix-length: 0 (not configured)
Main Table Version: 2
Table version synced to RIB: 2
Table version acked by RIB: 2
IGP notification: IGPs notified
RIB has converged: version 4
RIB table prefix-limit reached ? [No], version 0
Permanent Network Unconfigured
State: Normal mode.
BGP Table Version: 2
Attribute download: Disabled
Label retention timer value 5 mins
Soft Reconfig Entries: 0
Table bit-field size : 1 Chunk element size : 3
Last 8 Triggers Ver Tbl Ver Trig TID
Label Thread Total triggers: 0
Import Thread Aug 18 12:00:11.883 2 2 3
Aug 18 12:00:08.885 2 2 18
Aug 18 12:00:08.881 0 2 18
Total triggers: 3
RIB Thread Aug 18 12:00:11.883 2 2 3
Aug 18 12:00:08.885 2 2 4
Aug 18 12:00:08.885 2 2 18
Aug 18 12:00:08.882 1 2 4
Aug 18 12:00:08.881 1 2 6
Total triggers: 5
Update Thread Aug 18 12:00:11.883 2 2 3
Aug 18 12:00:08.885 2 2 4
Aug 18 12:00:08.885 2 2 18
Aug 18 12:00:08.884 2 2 18
Aug 18 11:55:08.888 1 2 3
Aug 18 11:55:08.883 1 2 9
Total triggers: 6
Allocated Freed
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
Number Mem Used
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
BMP Prefixes: 0 0
BMP Paths: 0 0
Address family: IPv6 Unicast
Dampening is not enabled
Client reflection is enabled in global config
Dynamic MED is Disabled
Dynamic MED interval : 10 minutes
Dynamic MED Timer : Not Running
Dynamic MED Periodic Timer : Not Running
Scan interval: 60
Total prefixes scanned: 0
Prefixes scanned per segment: 100000
Number of scan segments: 1
Nexthop resolution minimum prefix-length: 0 (not configured)
Main Table Version: 2
Table version synced to RIB: 2
Table version acked by RIB: 2
RIB has converged: version 2
RIB table prefix-limit reached ? [No], version 0
Permanent Network Unconfigured
State: Normal mode.
BGP Table Version: 2
Attribute download: Disabled
Label retention timer value 5 mins
Soft Reconfig Entries: 0
Table bit-field size : 1 Chunk element size : 3
Last 8 Triggers Ver Tbl Ver Trig TID
Label Thread Total triggers: 0
Import Thread Aug 18 12:00:11.883 2 2 3
Aug 18 12:00:08.886 2 2 19
Aug 18 12:00:08.882 0 2 19
Total triggers: 3
RIB Thread Aug 18 12:00:11.883 2 2 3
Aug 18 12:00:08.886 2 2 19
Aug 18 12:00:08.882 1 2 4
Aug 18 12:00:08.882 1 2 6
Total triggers: 4
Update Thread Aug 18 12:00:11.883 2 2 3
Aug 18 12:00:08.886 2 2 19
Aug 18 12:00:08.886 2 2 19
Aug 18 12:00:08.882 1 2 4
Aug 18 11:55:08.888 1 2 3
Aug 18 11:55:08.883 1 2 9
Total triggers: 6
Allocated Freed
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
Number Mem Used
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
BMP Prefixes: 0 0
BMP Paths: 0 0
'''
# 'all vrf all ipv4 unicast'
ProcessIpv4Output = '''\
RP/0/RSP1/CPU0:PE1#show bgp instance all vrf all ipv4 unicast process detail
BGP instance 0: 'default'
=========================
VRF: VRF1
---------
BGP Process Information: VRF VRF1
BGP Route Distinguisher: 200:1
BGP is operating in STANDALONE mode
Autonomous System number format: ASPLAIN
Autonomous System: 100
Router ID: 10.229.11.11 (manually configured)
Default Cluster ID: 10.4.1.1
Active Cluster IDs: 10.4.1.1
Fast external fallover enabled
Platform RLIMIT max: 2281701376 bytes
Maximum limit for BMP buffer size: 435 MB
Default value for BMP buffer size: 326 MB
Current limit for BMP buffer size: 326 MB
Current utilization of BMP buffer limit: 0 B
Neighbor logging is enabled
Enforce first AS enabled
iBGP to IGP redistribution enabled
Default local preference: 100
Default keepalive: 60
Non-stop routing is enabled
Update delay: 120
Generic scan interval: 60
BGP Speaker process: 3, Node: node0_RSP1_CPU0
Restart count: 32
Total Nbrs Estab/Cfg
Default VRFs: 1 0/3
Non-Default VRFs: 2 0/4
This VRF: 0/2
Sent Received
Updates: 0 0
Notifications: 0 0
Number Memory Used
Attributes: 0 0
AS Paths: 0 0
Communities: 0 0
Large Communities: 0 0
Extended communities: 0 0
PMSI Tunnel attr: 0 0
RIBRNH Tunnel attr: 0 0
PPMP attr: 0 0
Tunnel Encap attr: 0 0
PE distinguisher labels: 0 0
Route Reflector Entries: 0 0
Nexthop Entries: 27 10800
Alloc Free
Pool 200: 0 0
Pool 300: 1 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5000: 0 0
Pool 20000: 0 0
Message logging pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 500: 0 0
Pool 2200: 0 0
Pool 4500: 0 0
BMP pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 300: 0 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5500: 0 0
Pool 6500: 0 0
Pool 7500: 0 0
Pool 8500: 0 0
Pool 10000: 0 0
Pool 20000: 0 0
VRF VRF1 Address family: IPv4 Unicast
Dampening is not enabled
Client reflection is not enabled in global config
Dynamic MED is Disabled
Dynamic MED interval : 10 minutes
Dynamic MED Timer : Not Running
Dynamic MED Periodic Timer : Not Running
Scan interval: 60
Total prefixes scanned: 0
Prefixes scanned per segment: 100000
Number of scan segments: 1
Nexthop resolution minimum prefix-length: 0 (not configured)
Main Table Version: 3
Table version synced to RIB: 3
Table version acked by RIB: 3
IGP notification: IGPs notified
RIB has converged: version 2
RIB table prefix-limit reached ? [No], version 0
Permanent Network Unconfigured
State: Normal mode.
BGP Table Version: 3
Attribute download: Disabled
Label retention timer value 5 mins
Soft Reconfig Entries: 0
Table bit-field size : 1 Chunk element size : 3
Last 8 Triggers Ver Tbl Ver Trig TID
Label Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.887 3 3 18
Aug 18 12:00:08.885 0 3 4
Total triggers: 3
Import Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.887 3 3 18
Aug 18 12:00:08.885 0 3 18
Total triggers: 3
RIB Thread Aug 18 12:00:08.887 3 3 8
Aug 18 12:00:08.885 1 3 8
Aug 18 12:00:08.882 1 3 6
Total triggers: 3
Update Thread Aug 18 12:00:11.883 3 3 8
Aug 18 12:00:08.887 3 3 8
Aug 18 12:00:08.887 3 3 18
Total triggers: 3
Allocated Freed
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
Number Mem Used
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
BMP Prefixes: 0 0
BMP Paths: 0 0
VRF: VRF2
---------
BGP Process Information: VRF VRF2
BGP Route Distinguisher: 200:2
BGP is operating in STANDALONE mode
Autonomous System number format: ASPLAIN
Autonomous System: 100
Router ID: 10.229.11.11 (manually configured)
Default Cluster ID: 10.4.1.1
Active Cluster IDs: 10.4.1.1
Fast external fallover enabled
Platform RLIMIT max: 2281701376 bytes
Maximum limit for BMP buffer size: 435 MB
Default value for BMP buffer size: 326 MB
Current limit for BMP buffer size: 326 MB
Current utilization of BMP buffer limit: 0 B
Neighbor logging is enabled
Enforce first AS enabled
iBGP to IGP redistribution enabled
Default local preference: 100
Default keepalive: 60
Non-stop routing is enabled
Update delay: 120
Generic scan interval: 60
BGP Speaker process: 3, Node: node0_RSP1_CPU0
Restart count: 32
Total Nbrs Estab/Cfg
Default VRFs: 1 0/3
Non-Default VRFs: 2 0/4
This VRF: 0/2
Sent Received
Updates: 0 0
Notifications: 0 0
Number Memory Used
Attributes: 0 0
AS Paths: 0 0
Communities: 0 0
Large Communities: 0 0
Extended communities: 0 0
PMSI Tunnel attr: 0 0
RIBRNH Tunnel attr: 0 0
PPMP attr: 0 0
Tunnel Encap attr: 0 0
PE distinguisher labels: 0 0
Route Reflector Entries: 0 0
Nexthop Entries: 27 10800
Alloc Free
Pool 200: 0 0
Pool 300: 1 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5000: 0 0
Pool 20000: 0 0
Message logging pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 500: 0 0
Pool 2200: 0 0
Pool 4500: 0 0
BMP pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 300: 0 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5500: 0 0
Pool 6500: 0 0
Pool 7500: 0 0
Pool 8500: 0 0
Pool 10000: 0 0
Pool 20000: 0 0
VRF VRF2 Address family: IPv4 Unicast
Dampening is not enabled
Client reflection is not enabled in global config
Dynamic MED is Disabled
Dynamic MED interval : 10 minutes
Dynamic MED Timer : Not Running
Dynamic MED Periodic Timer : Not Running
Scan interval: 60
Total prefixes scanned: 0
Prefixes scanned per segment: 100000
Number of scan segments: 1
Nexthop resolution minimum prefix-length: 0 (not configured)
Main Table Version: 3
Table version synced to RIB: 3
Table version acked by RIB: 3
IGP notification: IGPs notified
RIB has converged: version 2
RIB table prefix-limit reached ? [No], version 0
Permanent Network Unconfigured
State: Normal mode.
BGP Table Version: 3
Attribute download: Disabled
Label retention timer value 5 mins
Soft Reconfig Entries: 0
Table bit-field size : 1 Chunk element size : 3
Last 8 Triggers Ver Tbl Ver Trig TID
Label Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.887 3 3 18
Aug 18 12:00:08.885 0 3 4
Total triggers: 3
Import Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.887 3 3 18
Aug 18 12:00:08.885 0 3 18
Total triggers: 3
RIB Thread Aug 18 12:00:08.887 3 3 8
Aug 18 12:00:08.885 1 3 8
Aug 18 12:00:08.882 1 3 6
Total triggers: 3
Update Thread Aug 18 12:00:11.883 3 3 8
Aug 18 12:00:08.887 3 3 8
Aug 18 12:00:08.887 3 3 18
Total triggers: 3
Allocated Freed
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
Number Mem Used
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
BMP Prefixes: 0 0
BMP Paths: 0 0
'''
# 'all vrf all ipv6 unicast'
ProcessIpv6Output = '''\
RP/0/RSP1/CPU0:PE1#show bgp instance all vrf all ipv6 unicast process detail
BGP instance 0: 'default'
=========================
VRF: VRF1
---------
BGP Process Information: VRF VRF1
BGP Route Distinguisher: 200:1
BGP is operating in STANDALONE mode
Autonomous System number format: ASPLAIN
Autonomous System: 100
Router ID: 10.229.11.11 (manually configured)
Default Cluster ID: 10.4.1.1
Active Cluster IDs: 10.4.1.1
Fast external fallover enabled
Platform RLIMIT max: 2281701376 bytes
Maximum limit for BMP buffer size: 435 MB
Default value for BMP buffer size: 326 MB
Current limit for BMP buffer size: 326 MB
Current utilization of BMP buffer limit: 0 B
Neighbor logging is enabled
Enforce first AS enabled
iBGP to IGP redistribution enabled
Default local preference: 100
Default keepalive: 60
Non-stop routing is enabled
Update delay: 120
Generic scan interval: 60
BGP Speaker process: 3, Node: node0_RSP1_CPU0
Restart count: 32
Total Nbrs Estab/Cfg
Default VRFs: 1 0/3
Non-Default VRFs: 2 0/4
This VRF: 0/2
Sent Received
Updates: 0 0
Notifications: 0 0
Number Memory Used
Attributes: 0 0
AS Paths: 0 0
Communities: 0 0
Large Communities: 0 0
Extended communities: 0 0
PMSI Tunnel attr: 0 0
RIBRNH Tunnel attr: 0 0
PPMP attr: 0 0
Tunnel Encap attr: 0 0
PE distinguisher labels: 0 0
Route Reflector Entries: 0 0
Nexthop Entries: 27 10800
Alloc Free
Pool 200: 0 0
Pool 300: 1 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5000: 0 0
Pool 20000: 0 0
Message logging pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 500: 0 0
Pool 2200: 0 0
Pool 4500: 0 0
BMP pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 300: 0 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5500: 0 0
Pool 6500: 0 0
Pool 7500: 0 0
Pool 8500: 0 0
Pool 10000: 0 0
Pool 20000: 0 0
VRF VRF1 Address family: IPv6 Unicast
Dampening is not enabled
Client reflection is not enabled in global config
Dynamic MED is Disabled
Dynamic MED interval : 10 minutes
Dynamic MED Timer : Not Running
Dynamic MED Periodic Timer : Not Running
Scan interval: 60
Total prefixes scanned: 0
Prefixes scanned per segment: 100000
Number of scan segments: 1
Nexthop resolution minimum prefix-length: 0 (not configured)
Main Table Version: 3
Table version synced to RIB: 3
Table version acked by RIB: 3
RIB has converged: version 2
RIB table prefix-limit reached ? [No], version 0
Permanent Network Unconfigured
State: Normal mode.
BGP Table Version: 3
Attribute download: Disabled
Label retention timer value 5 mins
Soft Reconfig Entries: 0
Table bit-field size : 1 Chunk element size : 3
Last 8 Triggers Ver Tbl Ver Trig TID
Label Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.890 3 3 19
Aug 18 12:00:08.882 0 3 4
Total triggers: 3
Import Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.890 3 3 19
Aug 18 12:00:08.882 0 3 19
Total triggers: 3
RIB Thread Aug 18 12:00:08.890 3 3 8
Aug 18 12:00:08.883 1 3 8
Aug 18 12:00:08.882 1 3 6
Total triggers: 3
Update Thread Aug 18 12:00:11.883 3 3 8
Aug 18 12:00:08.890 3 3 8
Aug 18 12:00:08.890 3 3 19
Total triggers: 3
Allocated Freed
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
Number Mem Used
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
BMP Prefixes: 0 0
BMP Paths: 0 0
VRF: VRF2
---------
BGP Process Information: VRF VRF2
BGP Route Distinguisher: 200:2
BGP is operating in STANDALONE mode
Autonomous System number format: ASPLAIN
Autonomous System: 100
Router ID: 10.229.11.11 (manually configured)
Default Cluster ID: 10.4.1.1
Active Cluster IDs: 10.4.1.1
Fast external fallover enabled
Platform RLIMIT max: 2281701376 bytes
Maximum limit for BMP buffer size: 435 MB
Default value for BMP buffer size: 326 MB
Current limit for BMP buffer size: 326 MB
Current utilization of BMP buffer limit: 0 B
Neighbor logging is enabled
Enforce first AS enabled
iBGP to IGP redistribution enabled
Default local preference: 100
Default keepalive: 60
Non-stop routing is enabled
Update delay: 120
Generic scan interval: 60
BGP Speaker process: 3, Node: node0_RSP1_CPU0
Restart count: 32
Total Nbrs Estab/Cfg
Default VRFs: 1 0/3
Non-Default VRFs: 2 0/4
This VRF: 0/2
Sent Received
Updates: 0 0
Notifications: 0 0
Number Memory Used
Attributes: 0 0
AS Paths: 0 0
Communities: 0 0
Large Communities: 0 0
Extended communities: 0 0
PMSI Tunnel attr: 0 0
RIBRNH Tunnel attr: 0 0
PPMP attr: 0 0
Tunnel Encap attr: 0 0
PE distinguisher labels: 0 0
Route Reflector Entries: 0 0
Nexthop Entries: 27 10800
Alloc Free
Pool 200: 0 0
Pool 300: 1 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5000: 0 0
Pool 20000: 0 0
Message logging pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 500: 0 0
Pool 2200: 0 0
Pool 4500: 0 0
BMP pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 300: 0 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5500: 0 0
Pool 6500: 0 0
Pool 7500: 0 0
Pool 8500: 0 0
Pool 10000: 0 0
Pool 20000: 0 0
VRF VRF2 Address family: IPv6 Unicast
Dampening is not enabled
Client reflection is not enabled in global config
Dynamic MED is Disabled
Dynamic MED interval : 10 minutes
Dynamic MED Timer : Not Running
Dynamic MED Periodic Timer : Not Running
Scan interval: 60
Total prefixes scanned: 0
Prefixes scanned per segment: 100000
Number of scan segments: 1
Nexthop resolution minimum prefix-length: 0 (not configured)
Main Table Version: 3
Table version synced to RIB: 3
Table version acked by RIB: 3
RIB has converged: version 2
RIB table prefix-limit reached ? [No], version 0
Permanent Network Unconfigured
State: Normal mode.
BGP Table Version: 3
Attribute download: Disabled
Label retention timer value 5 mins
Soft Reconfig Entries: 0
Table bit-field size : 1 Chunk element size : 3
Last 8 Triggers Ver Tbl Ver Trig TID
Label Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.890 3 3 19
Aug 18 12:00:08.882 0 3 4
Total triggers: 3
Import Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.890 3 3 19
Aug 18 12:00:08.882 0 3 19
Total triggers: 3
RIB Thread Aug 18 12:00:08.890 3 3 8
Aug 18 12:00:08.883 1 3 8
Aug 18 12:00:08.882 1 3 6
Total triggers: 3
Update Thread Aug 18 12:00:11.883 3 3 8
Aug 18 12:00:08.890 3 3 8
Aug 18 12:00:08.890 3 3 19
Total triggers: 3
Allocated Freed
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
Number Mem Used
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
BMP Prefixes: 0 0
BMP Paths: 0 0
'''
# =======================
# Neighbors Detail Output
# =======================
# 'all all all'
NeighborsAllOutput = '''\
RP/0/RSP1/CPU0:PE1#show bgp instance all all all neighbors detail
BGP instance 0: 'default'
=========================
BGP neighbor is 10.16.2.2
Remote AS 100, local AS 100, internal link
Remote router ID 0.0.0.0
Speaker ID 3
BGP state = Idle (No route to multi-hop neighbor)
NSR State: None
Last read 00:00:00, Last read before reset 00:00:00
Hold time is 180, keepalive interval is 60 seconds
Configured hold time: 180, keepalive: 60, min acceptable hold time: 3
Last write 00:00:00, attempted 0, written 0
Second last write 00:00:00, attempted 0, written 0
Last write before reset 00:00:00, attempted 0, written 0
Second last write before reset 00:00:00, attempted 0, written 0
Last write pulse rcvd not set last full not set pulse count 0
Last write pulse rcvd before reset 00:00:00
Socket not armed for io, not armed for read, not armed for write
Last write thread event before reset 00:00:00, second last 00:00:00
Last KA expiry before reset 00:00:00, second last 00:00:00
Last KA error before reset 00:00:00, KA not sent 00:00:00
Last KA start before reset 00:00:00, second last 00:00:00
Precedence: internet
Non-stop routing is enabled
Entered Neighbor NSR TCP mode:
TCP Initial Sync : ---
TCP Initial Sync Phase Two : ---
TCP Initial Sync Done : ---
Multi-protocol capability not received
Message stats:
InQ depth: 0, OutQ depth: 0
Last_Sent Sent Last_Rcvd Rcvd
Open: --- 0 --- 0
Notification: --- 0 --- 0
Update: --- 0 --- 0
Keepalive: --- 0 --- 0
Route_Refresh: --- 0 --- 0
Total: 0 0
Minimum time between advertisement runs is 0 secs
Inbound message logging enabled, 3 messages buffered
Outbound message logging enabled, 3 messages buffered
For Address Family: VPNv4 Unicast
BGP neighbor version 0
Update group: 3.1 Filter-group: 3.0 No Refresh request being processed
Route refresh request: received 0, sent 0
0 accepted prefixes, 0 are bestpaths
Exact no. of prefixes denied : 0.
Cumulative no. of prefixes denied: 0.
Prefix advertised 0, suppressed 0, withdrawn 0
Maximum prefixes allowed 2097152
Threshold for warning message 75%, restart interval 0 min
AIGP is enabled
An EoR was not received during read-only mode
Last ack version 1, Last synced ack version 0
Outstanding version objects: current 0, max 0
Additional-paths operation: None
Send Multicast Attributes
For Address Family: VPNv6 Unicast
BGP neighbor version 0
Update group: 3.1 Filter-group: 3.0 No Refresh request being processed
Route refresh request: received 0, sent 0
0 accepted prefixes, 0 are bestpaths
Exact no. of prefixes denied : 0.
Cumulative no. of prefixes denied: 0.
Prefix advertised 0, suppressed 0, withdrawn 0
Maximum prefixes allowed 1048576
Threshold for warning message 75%, restart interval 0 min
AIGP is enabled
An EoR was not received during read-only mode
Last ack version 1, Last synced ack version 0
Outstanding version objects: current 0, max 0
Additional-paths operation: None
Send Multicast Attributes
Connections established 0; dropped 0
Local host: 0.0.0.0, Local port: 0, IF Handle: 0x00000000
Foreign host: 10.16.2.2, Foreign port: 0
Last reset 00:00:00
'''
# 'all vrf all ipv4 unicast'
NeighborsIpv4Output = '''\
RP/0/RSP1/CPU0:PE1#show bgp instance all vrf all ipv4 unicast neighbors detail
BGP instance 0: 'default'
=========================
VRF: VRF1
---------
BGP neighbor is 10.1.5.5, vrf VRF1
Remote AS 200, local AS 100, external link
Remote router ID 0.0.0.0
Speaker ID 3
BGP state = Idle (No best local address found)
NSR State: None
Last read 00:00:00, Last read before reset 00:00:00
Hold time is 180, keepalive interval is 60 seconds
Configured hold time: 180, keepalive: 60, min acceptable hold time: 3
Last write 00:00:00, attempted 0, written 0
Second last write 00:00:00, attempted 0, written 0
Last write before reset 00:00:00, attempted 0, written 0
Second last write before reset 00:00:00, attempted 0, written 0
Last write pulse rcvd not set last full not set pulse count 0
Last write pulse rcvd before reset 00:00:00
Socket not armed for io, not armed for read, not armed for write
Last write thread event before reset 00:00:00, second last 00:00:00
Last KA expiry before reset 00:00:00, second last 00:00:00
Last KA error before reset 00:00:00, KA not sent 00:00:00
Last KA start before reset 00:00:00, second last 00:00:00
Precedence: internet
Non-stop routing is enabled
Entered Neighbor NSR TCP mode:
TCP Initial Sync : ---
TCP Initial Sync Phase Two : ---
TCP Initial Sync Done : ---
Enforcing first AS is enabled
Multi-protocol capability not received
Message stats:
InQ depth: 0, OutQ depth: 0
Last_Sent Sent Last_Rcvd Rcvd
Open: --- 0 --- 0
Notification: --- 0 --- 0
Update: --- 0 --- 0
Keepalive: --- 0 --- 0
Route_Refresh: --- 0 --- 0
Total: 0 0
Minimum time between advertisement runs is 0 secs
Inbound message logging enabled, 3 messages buffered
Outbound message logging enabled, 3 messages buffered
For Address Family: IPv4 Unicast
BGP neighbor version 0
Update group: 3.1 Filter-group: 3.0 No Refresh request being processed
Route refresh request: received 0, sent 0
Policy for incoming advertisements is all-pass
Policy for outgoing advertisements is all-pass
0 accepted prefixes, 0 are bestpaths
Exact no. of prefixes denied : 0.
Cumulative no. of prefixes denied: 0.
Prefix advertised 0, suppressed 0, withdrawn 0
Maximum prefixes allowed 1048576
Threshold for warning message 75%, restart interval 0 min
An EoR was not received during read-only mode
Last ack version 1, Last synced ack version 0
Outstanding version objects: current 0, max 0
Additional-paths operation: None
Advertise routes with local-label via Unicast SAFI
Connections established 0; dropped 0
Local host: 0.0.0.0, Local port: 0, IF Handle: 0x00000000
Foreign host: 10.1.5.5, Foreign port: 0
Last reset 00:00:00
External BGP neighbor not directly connected.
VRF: VRF2
---------
BGP neighbor is 10.186.5.5, vrf VRF2
Remote AS 200, local AS 100, external link
Remote router ID 0.0.0.0
Speaker ID 3
BGP state = Idle (No best local address found)
NSR State: None
Last read 00:00:00, Last read before reset 00:00:00
Hold time is 180, keepalive interval is 60 seconds
Configured hold time: 180, keepalive: 60, min acceptable hold time: 3
Last write 00:00:00, attempted 0, written 0
Second last write 00:00:00, attempted 0, written 0
Last write before reset 00:00:00, attempted 0, written 0
Second last write before reset 00:00:00, attempted 0, written 0
Last write pulse rcvd not set last full not set pulse count 0
Last write pulse rcvd before reset 00:00:00
Socket not armed for io, not armed for read, not armed for write
Last write thread event before reset 00:00:00, second last 00:00:00
Last KA expiry before reset 00:00:00, second last 00:00:00
Last KA error before reset 00:00:00, KA not sent 00:00:00
Last KA start before reset 00:00:00, second last 00:00:00
Precedence: internet
Non-stop routing is enabled
Entered Neighbor NSR TCP mode:
TCP Initial Sync : ---
TCP Initial Sync Phase Two : ---
TCP Initial Sync Done : ---
Enforcing first AS is enabled
Multi-protocol capability not received
Message stats:
InQ depth: 0, OutQ depth: 0
Last_Sent Sent Last_Rcvd Rcvd
Open: --- 0 --- 0
Notification: --- 0 --- 0
Update: --- 0 --- 0
Keepalive: --- 0 --- 0
Route_Refresh: --- 0 --- 0
Total: 0 0
Minimum time between advertisement runs is 0 secs
Inbound message logging enabled, 3 messages buffered
Outbound message logging enabled, 3 messages buffered
For Address Family: IPv4 Unicast
BGP neighbor version 0
Update group: 3.1 Filter-group: 3.0 No Refresh request being processed
Inbound soft reconfiguration allowed (override route-refresh)
Route refresh request: received 0, sent 0
Policy for incoming advertisements is all-pass
Policy for outgoing advertisements is all-pass
0 accepted prefixes, 0 are bestpaths
Exact no. of prefixes denied : 0.
Cumulative no. of prefixes denied: 0.
Prefix advertised 0, suppressed 0, withdrawn 0
Maximum prefixes allowed 495
Threshold for warning message 75%, restart interval 0 min
An EoR was not received during read-only mode
Last ack version 1, Last synced ack version 0
Outstanding version objects: current 0, max 0
Additional-paths operation: None
Advertise routes with local-label via Unicast SAFI
Connections established 0; dropped 0
Local host: 0.0.0.0, Local port: 0, IF Handle: 0x00000000
Foreign host: 10.186.5.5, Foreign port: 0
Last reset 00:00:00
External BGP neighbor not directly connected.
'''
# 'all vrf all ipv6 unicast'
NeighborsIpv6Output = '''\
RP/0/RSP1/CPU0:PE1#show bgp instance all vrf all ipv6 unicast neighbors detail
BGP instance 0: 'default'
=========================
VRF: VRF1
---------
BGP neighbor is 2001:db8:1:5::5, vrf VRF1
Remote AS 200, local AS 100, external link
Remote router ID 0.0.0.0
Speaker ID 3
BGP state = Idle (No best local address found)
NSR State: None
Last read 00:00:00, Last read before reset 00:00:00
Hold time is 180, keepalive interval is 60 seconds
Configured hold time: 180, keepalive: 60, min acceptable hold time: 3
Last write 00:00:00, attempted 0, written 0
Second last write 00:00:00, attempted 0, written 0
Last write before reset 00:00:00, attempted 0, written 0
Second last write before reset 00:00:00, attempted 0, written 0
Last write pulse rcvd not set last full not set pulse count 0
Last write pulse rcvd before reset 00:00:00
Socket not armed for io, not armed for read, not armed for write
Last write thread event before reset 00:00:00, second last 00:00:00
Last KA expiry before reset 00:00:00, second last 00:00:00
Last KA error before reset 00:00:00, KA not sent 00:00:00
Last KA start before reset 00:00:00, second last 00:00:00
Precedence: internet
Non-stop routing is enabled
Entered Neighbor NSR TCP mode:
TCP Initial Sync : ---
TCP Initial Sync Phase Two : ---
TCP Initial Sync Done : ---
Enforcing first AS is enabled
Multi-protocol capability not received
Message stats:
InQ depth: 0, OutQ depth: 0
Last_Sent Sent Last_Rcvd Rcvd
Open: --- 0 --- 0
Notification: --- 0 --- 0
Update: --- 0 --- 0
Keepalive: --- 0 --- 0
Route_Refresh: --- 0 --- 0
Total: 0 0
Minimum time between advertisement runs is 0 secs
Inbound message logging enabled, 3 messages buffered
Outbound message logging enabled, 3 messages buffered
For Address Family: IPv6 Unicast
BGP neighbor version 0
Update group: 3.1 Filter-group: 3.0 No Refresh request being processed
Route refresh request: received 0, sent 0
Policy for incoming advertisements is all-pass
Policy for outgoing advertisements is all-pass
0 accepted prefixes, 0 are bestpaths
Exact no. of prefixes denied : 0.
Cumulative no. of prefixes denied: 0.
Prefix advertised 0, suppressed 0, withdrawn 0
Maximum prefixes allowed 524288
Threshold for warning message 75%, restart interval 0 min
An EoR was not received during read-only mode
Last ack version 1, Last synced ack version 0
Outstanding version objects: current 0, max 0
Additional-paths operation: None
Advertise routes with local-label via Unicast SAFI
Connections established 0; dropped 0
Local host: ::, Local port: 0, IF Handle: 0x00000000
Foreign host: 2001:db8:1:5::5, Foreign port: 0
Last reset 00:00:00
External BGP neighbor not directly connected.
VRF: VRF2
---------
BGP neighbor is 2001:db8:20:1:5::5, vrf VRF2
Remote AS 200, local AS 100, external link
Remote router ID 0.0.0.0
Speaker ID 3
BGP state = Idle (No best local address found)
NSR State: None
Last read 00:00:00, Last read before reset 00:00:00
Hold time is 180, keepalive interval is 60 seconds
Configured hold time: 180, keepalive: 60, min acceptable hold time: 3
Last write 00:00:00, attempted 0, written 0
Second last write 00:00:00, attempted 0, written 0
Last write before reset 00:00:00, attempted 0, written 0
Second last write before reset 00:00:00, attempted 0, written 0
Last write pulse rcvd not set last full not set pulse count 0
Last write pulse rcvd before reset 00:00:00
Socket not armed for io, not armed for read, not armed for write
Last write thread event before reset 00:00:00, second last 00:00:00
Last KA expiry before reset 00:00:00, second last 00:00:00
Last KA error before reset 00:00:00, KA not sent 00:00:00
Last KA start before reset 00:00:00, second last 00:00:00
Precedence: internet
Non-stop routing is enabled
Entered Neighbor NSR TCP mode:
TCP Initial Sync : ---
TCP Initial Sync Phase Two : ---
TCP Initial Sync Done : ---
Enforcing first AS is enabled
Multi-protocol capability not received
Message stats:
InQ depth: 0, OutQ depth: 0
Last_Sent Sent Last_Rcvd Rcvd
Open: --- 0 --- 0
Notification: --- 0 --- 0
Update: --- 0 --- 0
Keepalive: --- 0 --- 0
Route_Refresh: --- 0 --- 0
Total: 0 0
Minimum time between advertisement runs is 0 secs
Inbound message logging enabled, 3 messages buffered
Outbound message logging enabled, 3 messages buffered
For Address Family: IPv6 Unicast
BGP neighbor version 0
Update group: 3.1 Filter-group: 3.0 No Refresh request being processed
Route refresh request: received 0, sent 0
Policy for incoming advertisements is all-pass
Policy for outgoing advertisements is all-pass
0 accepted prefixes, 0 are bestpaths
Exact no. of prefixes denied : 0.
Cumulative no. of prefixes denied: 0.
Prefix advertised 0, suppressed 0, withdrawn 0
Maximum prefixes allowed 524288
Threshold for warning message 75%, restart interval 0 min
An EoR was not received during read-only mode
Last ack version 1, Last synced ack version 0
Outstanding version objects: current 0, max 0
Additional-paths operation: None
Advertise routes with local-label via Unicast SAFI
Connections established 0; dropped 0
Local host: ::, Local port: 0, IF Handle: 0x00000000
Foreign host: 2001:db8:20:1:5::5, Foreign port: 0
Last reset 00:00:00
External BGP neighbor not directly connected.
'''
############################################################################
# BGP TABLE
############################################################################
# =============
# AllAll Output
# =============
# 'all all all'
InstanceAllOutput = '''\
BGP instance 0: 'default'
=========================
Address Family: VPNv4 Unicast
-----------------------------
BGP router identifier 10.4.1.1, local AS number 100
BGP generic scan interval 60 secs
Non-stop routing is enabled
BGP table state: Active
Table ID: 0x0 RD version: 0
BGP main routing table version 47
BGP NSR Initial initsync version 5 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
BGP scan interval 60 secs
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 200:1 (default for vrf VRF1)
*> 10.1.1.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.2.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.3.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.4.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.5.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*>i10.205.1.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.2.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.3.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.4.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.5.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.169.1.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.2.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.3.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.4.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.5.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 200:2 (default for vrf VRF2)
*> 10.1.1.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.2.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.3.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.4.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.5.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*>i10.205.1.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.2.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.3.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.4.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.5.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.169.1.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.2.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.3.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.4.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.5.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 300:1
*>i10.169.1.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.2.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.3.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.4.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.5.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 400:1
*>i10.205.1.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.2.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.3.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.4.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.5.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
Processed 40 prefixes, 55 paths
Address Family: VPNv6 Unicast
-----------------------------
BGP router identifier 10.4.1.1, local AS number 100
BGP generic scan interval 60 secs
Non-stop routing is enabled
BGP table state: Active
Table ID: 0x0 RD version: 0
BGP main routing table version 32
BGP NSR Initial initsync version 5 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
BGP scan interval 60 secs
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 200:1 (default for vrf VRF1)
*> 2001:db8:cdc9:121::/64 2001:db8:1:5::5 2219 0 200 33299 51178 47751 {27016} e
* 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*> 2001:db8:cdc9:144::/64 2001:db8:1:5::5 2219 0 200 33299 51178 47751 {27016} e
* 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*> 2001:db8:cdc9:169::/64 2001:db8:1:5::5 2219 0 200 33299 51178 47751 {27016} e
* 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*> 2001:db8:cdc9:190::/64 2001:db8:1:5::5 2219 0 200 33299 51178 47751 {27016} e
* 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*> 2001:db8:cdc9:1b9::/64 2001:db8:1:5::5 2219 0 200 33299 51178 47751 {27016} e
* 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*>i2001:db8:31b9:121::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:144::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:169::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:190::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:1b9::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 200:2 (default for vrf VRF2)
*> 2001:db8:cdc9:121::/64 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*> 2001:db8:cdc9:144::/64 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*> 2001:db8:cdc9:169::/64 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*> 2001:db8:cdc9:190::/64 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*> 2001:db8:cdc9:1b9::/64 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*>i2001:db8:31b9:121::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:144::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:169::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:190::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:1b9::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 300:1
*>i2001:db8:31b9:121::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:144::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:169::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:190::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:1b9::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Processed 25 prefixes, 35 paths
'''
# 'all vrf all ipv4 unicast'
InstanceIpv4Output = '''\
BGP instance 0: 'default'
=========================
VRF: VRF1
---------
BGP VRF VRF1, state: Active
BGP Route Distinguisher: 200:1
VRF ID: 0x60000001
BGP router identifier 10.229.11.11, local AS number 100
Non-stop routing is enabled
BGP table state: Active
Table ID: 0xe0000010 RD version: 47
BGP main routing table version 47
BGP NSR Initial initsync version 5 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 200:1 (default for vrf VRF1)
*> 10.1.1.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.2.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.3.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.4.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.5.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*>i10.205.1.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.2.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.3.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.4.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.5.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.169.1.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.2.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.3.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.4.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.5.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Processed 15 prefixes, 20 paths
VRF: VRF2
---------
BGP VRF VRF2, state: Active
BGP Route Distinguisher: 200:2
VRF ID: 0x60000002
BGP router identifier 10.229.11.11, local AS number 100
Non-stop routing is enabled
BGP table state: Active
Table ID: 0xe0000011 RD version: 47
BGP main routing table version 47
BGP NSR Initial initsync version 5 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 200:2 (default for vrf VRF2)
*> 10.1.1.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.2.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.3.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.4.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.5.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*>i10.205.1.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.2.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.3.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.4.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.5.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.169.1.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.2.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.3.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.4.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.5.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Processed 15 prefixes, 15 paths
'''
# 'all vrf all ipv6 unicast'
InstanceIpv6Output = '''\
'''
############################################################################
# BGP ROUTES PER PEER
############################################################################
# ==============
# Summary Output
# ==============
# 'all all all'
SummaryAllOutput = '''\
BGP instance 0: 'default'
=========================
Address Family: VPNv4 Unicast
-----------------------------
BGP router identifier 10.4.1.1, local AS number 100
BGP generic scan interval 60 secs
Non-stop routing is enabled
BGP table state: Active
Table ID: 0x0 RD version: 0
BGP main routing table version 47
BGP NSR Initial initsync version 5 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
BGP scan interval 60 secs
BGP is operating in STANDALONE mode.
Process RcvTblVer bRIB/RIB LabelVer ImportVer SendTblVer StandbyVer
Speaker 47 47 47 47 47 0
Neighbor Spk AS MsgRcvd MsgSent TblVer InQ OutQ Up/Down St/PfxRcd
10.16.2.2 0 100 11875 11874 47 0 0 1w1d 10
Address Family: VPNv6 Unicast
-----------------------------
BGP router identifier 10.4.1.1, local AS number 100
BGP generic scan interval 60 secs
Non-stop routing is enabled
BGP table state: Active
Table ID: 0x0 RD version: 0
BGP main routing table version 32
BGP NSR Initial initsync version 5 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
BGP scan interval 60 secs
BGP is operating in STANDALONE mode.
Process RcvTblVer bRIB/RIB LabelVer ImportVer SendTblVer StandbyVer
Speaker 32 32 32 32 32 0
Neighbor Spk AS MsgRcvd MsgSent TblVer InQ OutQ Up/Down St/PfxRcd
10.16.2.2 0 100 11875 11874 32 0 0 1w1d 5
'''
# 'all vrf all ipv4'
SummaryIpv4Output = '''\
BGP instance 0: 'default'
=========================
VRF: VRF1
---------
BGP VRF VRF1, state: Active
BGP Route Distinguisher: 200:1
VRF ID: 0x60000001
BGP router identifier 10.229.11.11, local AS number 100
Non-stop routing is enabled
BGP table state: Active
Table ID: 0xe0000010 RD version: 47
BGP main routing table version 47
BGP NSR Initial initsync version 5 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
BGP is operating in STANDALONE mode.
Process RcvTblVer bRIB/RIB LabelVer ImportVer SendTblVer StandbyVer
Speaker 47 47 47 47 47 0
Neighbor Spk AS MsgRcvd MsgSent TblVer InQ OutQ Up/Down St/PfxRcd
10.1.5.5 0 200 11858 11864 47 0 0 1w1d 5
'''
# 'all vrf all ipv6'
SummaryIpv6Output = '''\
BGP instance 0: 'default'
=========================
VRF: VRF2
---------
BGP VRF VRF2, state: Active
BGP Route Distinguisher: 300:1
VRF ID: 0x50000002
BGP router identifier 10.229.11.11, local AS number 100
Non-stop routing is enabled
BGP table state: Active
Table ID: 0xe0000010 RD version: 47
BGP main routing table version 47
BGP NSR Initial initsync version 5 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
BGP is operating in STANDALONE mode.
Process RcvTblVer bRIB/RIB LabelVer ImportVer SendTblVer StandbyVer
Speaker 47 47 47 47 47 0
Neighbor Spk AS MsgRcvd MsgSent TblVer InQ OutQ Up/Down St/PfxRcd
2001:db8:20:1:5::5
0 200 11858 11864 47 0 0 1w1d 5
'''
# ========================
# Advertised Routes Output
# ========================
# 'all all all'
AdvertisedAllOutput = '''\
BGP instance 0: 'default'
=========================
Address Family: VPNv4 Unicast
-----------------------------
Network Next Hop From AS Path
Route Distinguisher: 200:1
10.1.1.0/24 10.4.1.1 10.186.5.5 200 33299 51178 47751 {27016}e
10.1.2.0/24 10.4.1.1 10.186.5.5 200 33299 51178 47751 {27016}e
Processed 2 prefixes, 2 paths
Address Family: VPNv6 Unicast
-----------------------------
Network Next Hop From AS Path
Route Distinguisher: 200:1
2001:db8:cdc9:121::/64 10.4.1.1 2001:db8:20:1:5::5
200 33299 51178 47751 {27017}e
2001:db8:cdc9:144::/64 10.4.1.1 2001:db8:20:1:5::5
200 33299 51178 47751 {27016}e
Processed 2 prefixes, 2 paths
'''
# ======================
# Received Routes Output
# ======================
# 'all all all'
ReceivedAllOutput = '''\
BGP instance 0: 'default'
=========================
Address Family: VPNv4 Unicast
-----------------------------
BGP router identifier 10.4.1.1, local AS number 100
BGP generic scan interval 60 secs
Non-stop routing is enabled
BGP table state: Active
Table ID: 0x0 RD version: 0
BGP main routing table version 43
BGP NSR Initial initsync version 11 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
BGP scan interval 60 secs
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 300:1
* i10.169.1.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 400:1
* i10.9.2.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
Processed 10 prefixes, 10 paths
Address Family: VPNv6 Unicast
-----------------------------
BGP router identifier 10.4.1.1, local AS number 100
BGP generic scan interval 60 secs
Non-stop routing is enabled
BGP table state: Active
Table ID: 0x0 RD version: 0
BGP main routing table version 43
BGP NSR Initial initsync version 11 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
BGP scan interval 60 secs
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 300:1
* i2001:db8:31b9:121::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i2001:db8:31b9:1b9::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 400:1
* i2001:db8:a69:484::/64 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
* i2001:db8:a69:4c9::/64 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
Processed 10 prefixes, 10 paths
'''
# 'all vrf all ipv4 unicast'
ReceivedIpv4Output = '''\
BGP instance 0: 'default'
=========================
VRF: VRF1
---------
BGP VRF VRF1, state: Active
BGP Route Distinguisher: 200:2
VRF ID: 0x60000002
BGP router identifier 10.229.11.11, local AS number 100
Non-stop routing is enabled
BGP table state: Active
Table ID: 0xe0000011 RD version: 63
BGP main routing table version 63
BGP NSR Initial initsync version 11 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 200:1 (default for vrf VRF2)
* 10.1.1.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.1.2.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
Processed 2 prefixes, 2 paths
'''
# 'all vrf all ipv6 unicast'
ReceivedIpv6Output = '''\
BGP instance 0: 'default'
=========================
VRF: VRF2
---------
BGP VRF VRF2, state: Active
BGP Route Distinguisher: 200:3
VRF ID: 0x50000006
BGP router identifier 10.229.11.11, local AS number 100
Non-stop routing is enabled
BGP table state: Active
Table ID: 0xe0000011 RD version: 63
BGP main routing table version 63
BGP NSR Initial initsync version 11 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 200:2 (default for vrf VRF1)
* 10.34.1.0/24 10.196.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.34.2.0/24 10.196.5.5 2219 0 200 33299 51178 47751 {27016} e
Processed 2 prefixes, 2 paths
'''
# =============
# Routes Output
# =============
# 'all all all'
RoutesAllOutput = '''\
BGP instance 0: 'default'
=========================
Address Family: VPNv4 Unicast
-----------------------------
BGP router identifier 10.4.1.1, local AS number 100
BGP generic scan interval 60 secs
Non-stop routing is enabled
BGP table state: Active
Table ID: 0x0 RD version: 0
BGP main routing table version 43
BGP NSR Initial initsync version 11 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
BGP scan interval 60 secs
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 300:1
* i10.169.1.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 400:1
* i10.9.2.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
Processed 2 prefixes, 2 paths
Address Family: VPNv6 Unicast
-----------------------------
BGP router identifier 10.4.1.1, local AS number 100
BGP generic scan interval 60 secs
Non-stop routing is enabled
BGP table state: Active
Table ID: 0x0 RD version: 0
BGP main routing table version 43
BGP NSR Initial initsync version 11 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
BGP scan interval 60 secs
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 300:1
* i2001:db8:31b9:121::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i2001:db8:31b9:144::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 400:1
* i2001:db8:a69:484::/64 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
Processed 3 prefixes, 3 paths
'''
# ==============
# BGP Ops Output
# ==============
BgpInfo = {
'instance':
{'default':
{'bgp_id': 100,
'peer_policy':
{'af_group':
{'allowas_in': True,
'allowas_in_as_number': 10,
'as_override': True,
'default_originate': True,
'default_originate_route_map': 'allpass',
'maximum_prefix_max_prefix_no': 429,
'maximum_prefix_restart': 35,
'maximum_prefix_threshold': 75,
'next_hop_self': True,
'route_map_name_in': 'allpass',
'route_map_name_out': 'allpass',
'route_reflector_client': True,
'send_community': 'both',
'soft_reconfiguration': 'inbound '
'always',
'soo': '100:1'}},
'peer_session':
{'SG':
{'description': 'SG_group',
'disable_connected_check': True,
'ebgp_multihop_enable': True,
'ebgp_multihop_max_hop': 254,
'fall_over_bfd': True,
'holdtime': 30,
'keepalive_interval': 10,
'local_as_as_no': 200,
'local_dual_as': True,
'local_no_prepend': True,
'local_replace_as': True,
'password_text': '094F471A1A0A464058',
'remote_as': 333,
'shutdown': True,
'suppress_four_byte_as_capability': True,
'transport_connection_mode': 'active-only',
'update_source': 'loopback0'}},
'protocol_state': 'RUNNING',
'vrf':
{'default':
{'neighbor':
{'10.16.2.2':
{'bgp_neighbor_counters':
{'messages':
{'received':
{'notifications': 0,
'updates': 0},
'sent':
{'notifications': 0,
'updates': 0}}},
'bgp_session_transport':
{'transport':
{'foreign_host': '0',
'foreign_port': '10.16.2.2',
'local_host': 'Loopback0',
'local_port': '0'}},
'holdtime': 180,
'remote_as': 100,
'session_state': 'idle'}},
'router_id': '10.4.1.1'}}},
'test':
{'bgp_id': 333},
'test1':
{'bgp_id': 333},
'test2':
{'bgp_id': 333}}}
BgpTable = {
'instance':
{'default':
{'vrf':
{'VRF1':
{'address_family':
{'vpnv4 unicast':
{'bgp_table_version': 47,
'local_as': 100},
'vpnv4 unicast RD 200:1':
{'default_vrf': 'vrf1',
'prefixes':
{'10.1.1.0/24':
{'index':
{1:
{'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.1.2.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.1.3.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.1.4.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.1.5.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.205.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}}},
'route_distinguisher': '200:1'}}},
'VRF2': {'address_family': {'vpnv4 unicast': {'bgp_table_version': 47,
'local_as': 100},
'vpnv4 unicast RD 200:2': {'default_vrf': 'vrf2',
'prefixes': {'10.1.1.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.1.2.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.1.3.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.1.4.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.1.5.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.205.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}}},
'route_distinguisher': '200:2'}}},
'default': {'address_family': {'vpnv4 unicast': {'bgp_table_version': 47,
'local_as': 100},
'vpnv4 unicast RD 200:1': {'default_vrf': 'vrf1',
'prefixes': {'10.1.1.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.1.2.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.1.3.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.1.4.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.1.5.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.205.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}}},
'route_distinguisher': '200:1'},
'vpnv4 unicast RD 200:2': {'default_vrf': 'vrf2',
'prefixes': {'10.1.1.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.1.2.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.1.3.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.1.4.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.1.5.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.205.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}}},
'route_distinguisher': '200:2'},
'vpnv4 unicast RD 300:1': {'default_vrf': 'none',
'prefixes': {'10.169.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'10.169.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'10.169.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'10.169.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'10.169.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}},
'route_distinguisher': '300:1'},
'vpnv4 unicast RD 400:1': {'default_vrf': 'none',
'prefixes': {'10.205.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'10.205.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'10.205.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'10.205.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'10.205.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}},
'route_distinguisher': '400:1'},
'vpnv6 unicast': {'bgp_table_version': 32,
'local_as': 100},
'vpnv6 unicast RD 200:1': {'default_vrf': 'vrf1',
'prefixes': {'2001:db8:cdc9:144::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:cdc9:169::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:cdc9:190::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:cdc9:1b9::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:cdc9:121::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:31b9:144::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'2001:db8:31b9:169::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'2001:db8:31b9:190::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'2001:db8:31b9:1b9::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'2001:db8:31b9:121::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}}},
'route_distinguisher': '200:1'},
'vpnv6 unicast RD 200:2': {'default_vrf': 'vrf2',
'prefixes': {'2001:db8:cdc9:144::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:20:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:cdc9:169::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:20:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:cdc9:190::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:20:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:cdc9:1b9::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:20:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:cdc9:121::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:20:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:31b9:144::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'2001:db8:31b9:169::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'2001:db8:31b9:190::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'2001:db8:31b9:1b9::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'2001:db8:31b9:121::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}}},
'route_distinguisher': '200:2'},
'vpnv6 unicast RD 300:1': {'default_vrf': 'none',
'prefixes': {'2001:db8:31b9:144::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'2001:db8:31b9:169::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'2001:db8:31b9:190::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'2001:db8:31b9:1b9::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'2001:db8:31b9:121::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}},
'route_distinguisher': '300:1'}}}}}}}
BgpRoutesPerPeer = {
'instance': {'default': {'vrf': {'VRF1': {'neighbor': {'10.1.5.5': {'address_family': {'vpnv4 unicast': {'input_queue': 0,
'msg_rcvd': 11858,
'msg_sent': 11864,
'output_queue': 0,
'route_distinguisher': '200:1',
'state_pfxrcd': '5',
'tbl_ver': 47,
'up_down': '1w1d'}},
'remote_as': 200}}},
'VRF2': {'neighbor': {'2001:db8:20:1:5::5': {'address_family': {'vpnv6 unicast': {'input_queue': 0,
'msg_rcvd': 11858,
'msg_sent': 11864,
'output_queue': 0,
'route_distinguisher': '300:1',
'state_pfxrcd': '5',
'tbl_ver': 47,
'up_down': '1w1d'}},
'remote_as': 200}}},
'default': {'neighbor': {'10.16.2.2': {'address_family': {'vpnv4 unicast': {'input_queue': 0,
'msg_rcvd': 11875,
'msg_sent': 11874,
'output_queue': 0,
'state_pfxrcd': '10',
'tbl_ver': 47,
'up_down': '1w1d'},
'vpnv4 unicast RD 200:1': {'advertised': {'10.1.1.0/24': {'index': {1: {'froms': '10.186.5.5',
'next_hop': '10.4.1.1',
'origin_code': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}'}}},
'10.1.2.0/24': {'index': {1: {'froms': '10.186.5.5',
'next_hop': '10.4.1.1',
'origin_code': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}'}}}}},
'vpnv4 unicast RD 300:1': {'received_routes': {'10.169.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}},
'routes': {'10.169.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}}},
'vpnv4 unicast RD 400:1': {'received_routes': {'10.9.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}},
'routes': {'10.9.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}}},
'vpnv6 unicast': {'input_queue': 0,
'msg_rcvd': 11875,
'msg_sent': 11874,
'output_queue': 0,
'state_pfxrcd': '5',
'tbl_ver': 32,
'up_down': '1w1d'},
'vpnv6 unicast RD 200:1': {'advertised': {'2001:db8:cdc9:144::/64': {'index': {1: {'froms': '2001:db8:20:1:5::5',
'next_hop': '10.4.1.1',
'origin_code': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}'}}},
'2001:db8:cdc9:121::/64': {'index': {1: {'froms': '2001:db8:20:1:5::5',
'next_hop': '10.4.1.1',
'origin_code': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27017}'}}}}},
'vpnv6 unicast RD 300:1': {'received_routes': {'2001:db8:31b9:1b9::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'2001:db8:31b9:121::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}},
'routes': {'2001:db8:31b9:144::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'2001:db8:31b9:121::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}}},
'vpnv6 unicast RD 400:1': {'received_routes': {'2001:db8:a69:4c9::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'2001:db8:a69:484::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}},
'routes': {'2001:db8:a69:484::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}}}},
'remote_as': 100}}}}}}}
# ============
# GET OPER RPC
# ============
class etree_holder():
def __init__(self):
self.data_ele = ET.fromstring('''
<data>
<bgp xmlns="http://openconfig.net/yang/bgp">
<global>
<config>
<as>100</as>
<router-id>10.4.1.1</router-id>
</config>
<state>
<as>100</as>
<router-id>10.4.1.1</router-id>
<total-paths>0</total-paths>
<total-prefixes>0</total-prefixes>
</state>
<afi-safis>
<afi-safi>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv4-unicast</afi-safi-name>
<config>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv4-unicast</afi-safi-name>
<enabled>true</enabled>
</config>
<state>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv4-unicast</afi-safi-name>
<enabled>true</enabled>
<total-paths>0</total-paths>
<total-prefixes>0</total-prefixes>
</state>
</afi-safi>
<afi-safi>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv6-unicast</afi-safi-name>
<config>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv6-unicast</afi-safi-name>
<enabled>true</enabled>
</config>
<state>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv6-unicast</afi-safi-name>
<enabled>true</enabled>
<total-paths>0</total-paths>
<total-prefixes>0</total-prefixes>
</state>
</afi-safi>
</afi-safis>
</global>
<neighbors>
<neighbor>
<neighbor-address>10.16.2.2</neighbor-address>
<config>
<neighbor-address>10.16.2.2</neighbor-address>
<peer-as>100</peer-as>
</config>
<state>
<neighbor-address>10.16.2.2</neighbor-address>
<peer-as>100</peer-as>
<queues>
<input>0</input>
<output>0</output>
</queues>
<session-state>IDLE</session-state>
<messages>
<sent>
<NOTIFICATION>0</NOTIFICATION>
<UPDATE>0</UPDATE>
</sent>
<received>
<NOTIFICATION>0</NOTIFICATION>
<UPDATE>0</UPDATE>
</received>
</messages>
</state>
<transport>
<config>
<local-address>Loopback0</local-address>
</config>
<state>
<local-address>Loopback0</local-address>
<local-port>0</local-port>
<remote-address>10.16.2.2</remote-address>
<remote-port>0</remote-port>
</state>
</transport>
<afi-safis>
<afi-safi>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv4-unicast</afi-safi-name>
<config>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv4-unicast</afi-safi-name>
<enabled>true</enabled>
</config>
<state>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv4-unicast</afi-safi-name>
<enabled>true</enabled>
<active>false</active>
<prefixes>
<received>0</received>
<sent>0</sent>
</prefixes>
</state>
</afi-safi>
<afi-safi>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv6-unicast</afi-safi-name>
<config>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv6-unicast</afi-safi-name>
<enabled>true</enabled>
</config>
<state>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv6-unicast</afi-safi-name>
<enabled>true</enabled>
<active>false</active>
<prefixes>
<received>0</received>
<sent>0</sent>
</prefixes>
</state>
</afi-safi>
</afi-safis>
<timers>
<state>
<negotiated-hold-time>180</negotiated-hold-time>
</state>
</timers>
<graceful-restart>
<state>
<peer-restart-time>120</peer-restart-time>
</state>
</graceful-restart>
</neighbor>
</neighbors>
</bgp>
</data>
''')
yang_output = etree_holder()
| 87.53985
| 203
| 0.183849
|
import xml.etree.ElementTree as ET
class BgpOutput(object):
===
BGP Process Information:
BGP is operating in STANDALONE mode
Autonomous System number format: ASPLAIN
Autonomous System: 100
Router ID: 10.4.1.1 (manually configured)
Default Cluster ID: 10.4.1.1
Active Cluster IDs: 10.4.1.1
Fast external fallover enabled
Platform RLIMIT max: 2281701376 bytes
Maximum limit for BMP buffer size: 435 MB
Default value for BMP buffer size: 326 MB
Current limit for BMP buffer size: 326 MB
Current utilization of BMP buffer limit: 0 B
Neighbor logging is enabled
Enforce first AS enabled
Default local preference: 100
Default keepalive: 60
Non-stop routing is enabled
Update delay: 120
Generic scan interval: 60
BGP Speaker process: 3, Node: node0_RSP1_CPU0
Restart count: 32
Total Nbrs Estab/Cfg
Default VRFs: 1 0/3
Non-Default VRFs: 2 0/4
Sent Received
Updates: 0 0
Notifications: 0 0
Number Memory Used
Attributes: 0 0
AS Paths: 0 0
Communities: 0 0
Large Communities: 0 0
Extended communities: 0 0
PMSI Tunnel attr: 0 0
RIBRNH Tunnel attr: 0 0
PPMP attr: 0 0
Tunnel Encap attr: 0 0
PE distinguisher labels: 0 0
Route Reflector Entries: 0 0
Nexthop Entries: 27 10800
Alloc Free
Pool 200: 0 0
Pool 300: 1 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5000: 0 0
Pool 20000: 0 0
Message logging pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 500: 0 0
Pool 2200: 0 0
Pool 4500: 0 0
BMP pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 300: 0 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5500: 0 0
Pool 6500: 0 0
Pool 7500: 0 0
Pool 8500: 0 0
Pool 10000: 0 0
Pool 20000: 0 0
Address family: VPNv4 Unicast
Dampening is not enabled
Client reflection is not enabled in global config
Dynamic MED is Disabled
Dynamic MED interval : 10 minutes
Dynamic MED Timer : Not Running
Dynamic MED Periodic Timer : Not Running
Scan interval: 60
Total prefixes scanned: 0
Prefixes scanned per segment: 100000
Number of scan segments: 1
Nexthop resolution minimum prefix-length: 0 (not configured)
Main Table Version: 3
Table version synced to RIB: 3
Table version acked by RIB: 0
RIB has not converged: version 0
RIB table prefix-limit reached ? [No], version 0
Permanent Network Unconfigured
State: Normal mode.
BGP Table Version: 3
Attribute download: Disabled
Label retention timer value 5 mins
Soft Reconfig Entries: 0
Table bit-field size : 1 Chunk element size : 3
Maximum supported label-stack depth:
For IPv4 Nexthop: 0
For IPv6 Nexthop: 0
Last 8 Triggers Ver Tbl Ver Trig TID
Label Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.887 3 3 18
Aug 18 12:00:08.885 0 3 4
Total triggers: 3
Import Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.887 3 3 18
Aug 18 12:00:08.885 0 3 18
Total triggers: 3
RIB Thread Aug 18 12:00:08.887 3 3 8
Aug 18 12:00:08.885 1 3 8
Aug 18 12:00:08.882 1 3 6
Total triggers: 3
Update Thread Aug 18 12:00:11.883 3 3 8
Aug 18 12:00:08.887 3 3 8
Aug 18 12:00:08.887 3 3 18
Total triggers: 3
Allocated Freed
Remote Prefixes: 0 0
Remote Paths: 0 0
Remote Path-elems: 0 0
Local Prefixes: 0 0
Local Paths: 0 0
Number Mem Used
Remote Prefixes: 0 0
Remote Paths: 0 0
Remote Path-elems: 0 0
Remote RDs: 0 0
Local Prefixes: 0 0
Local Paths: 0 0
Local RDs: 2 160
Total Prefixes: 0 0
Total Paths: 0 0
Total Path-elems: 0 0
Imported Paths: 0 0
Total RDs: 2 160
Address family: VPNv6 Unicast
Dampening is not enabled
Client reflection is enabled in global config
Dynamic MED is Disabled
Dynamic MED interval : 10 minutes
Dynamic MED Timer : Not Running
Dynamic MED Periodic Timer : Not Running
Scan interval: 60
Total prefixes scanned: 0
Prefixes scanned per segment: 100000
Number of scan segments: 1
Nexthop resolution minimum prefix-length: 0 (not configured)
Main Table Version: 3
Table version synced to RIB: 3
Table version acked by RIB: 0
RIB has not converged: version 0
RIB table prefix-limit reached ? [No], version 0
Permanent Network Unconfigured
State: Normal mode.
BGP Table Version: 3
Attribute download: Disabled
Label retention timer value 5 mins
Soft Reconfig Entries: 0
Table bit-field size : 1 Chunk element size : 3
Maximum supported label-stack depth:
For IPv4 Nexthop: 0
For IPv6 Nexthop: 0
Last 8 Triggers Ver Tbl Ver Trig TID
Label Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.890 3 3 19
Aug 18 12:00:08.882 0 3 4
Total triggers: 3
Import Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.890 3 3 19
Aug 18 12:00:08.882 0 3 19
Total triggers: 3
RIB Thread Aug 18 12:00:08.890 3 3 8
Aug 18 12:00:08.883 1 3 8
Aug 18 12:00:08.882 1 3 6
Total triggers: 3
Update Thread Aug 18 12:00:11.883 3 3 8
Aug 18 12:00:08.890 3 3 8
Aug 18 12:00:08.890 3 3 19
Total triggers: 3
Allocated Freed
Remote Prefixes: 0 0
Remote Paths: 0 0
Remote Path-elems: 0 0
Local Prefixes: 0 0
Local Paths: 0 0
Number Mem Used
Remote Prefixes: 0 0
Remote Paths: 0 0
Remote Path-elems: 0 0
Remote RDs: 0 0
Local Prefixes: 0 0
Local Paths: 0 0
Local RDs: 2 160
Total Prefixes: 0 0
Total Paths: 0 0
Total Path-elems: 0 0
Imported Paths: 0 0
Total RDs: 2 160
Address family: IPv4 Unicast
Dampening is not enabled
Client reflection is enabled in global config
Dynamic MED is Disabled
Dynamic MED interval : 10 minutes
Dynamic MED Timer : Not Running
Dynamic MED Periodic Timer : Not Running
Scan interval: 60
Total prefixes scanned: 0
Prefixes scanned per segment: 100000
Number of scan segments: 1
Nexthop resolution minimum prefix-length: 0 (not configured)
Main Table Version: 2
Table version synced to RIB: 2
Table version acked by RIB: 2
IGP notification: IGPs notified
RIB has converged: version 4
RIB table prefix-limit reached ? [No], version 0
Permanent Network Unconfigured
State: Normal mode.
BGP Table Version: 2
Attribute download: Disabled
Label retention timer value 5 mins
Soft Reconfig Entries: 0
Table bit-field size : 1 Chunk element size : 3
Last 8 Triggers Ver Tbl Ver Trig TID
Label Thread Total triggers: 0
Import Thread Aug 18 12:00:11.883 2 2 3
Aug 18 12:00:08.885 2 2 18
Aug 18 12:00:08.881 0 2 18
Total triggers: 3
RIB Thread Aug 18 12:00:11.883 2 2 3
Aug 18 12:00:08.885 2 2 4
Aug 18 12:00:08.885 2 2 18
Aug 18 12:00:08.882 1 2 4
Aug 18 12:00:08.881 1 2 6
Total triggers: 5
Update Thread Aug 18 12:00:11.883 2 2 3
Aug 18 12:00:08.885 2 2 4
Aug 18 12:00:08.885 2 2 18
Aug 18 12:00:08.884 2 2 18
Aug 18 11:55:08.888 1 2 3
Aug 18 11:55:08.883 1 2 9
Total triggers: 6
Allocated Freed
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
Number Mem Used
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
BMP Prefixes: 0 0
BMP Paths: 0 0
Address family: IPv6 Unicast
Dampening is not enabled
Client reflection is enabled in global config
Dynamic MED is Disabled
Dynamic MED interval : 10 minutes
Dynamic MED Timer : Not Running
Dynamic MED Periodic Timer : Not Running
Scan interval: 60
Total prefixes scanned: 0
Prefixes scanned per segment: 100000
Number of scan segments: 1
Nexthop resolution minimum prefix-length: 0 (not configured)
Main Table Version: 2
Table version synced to RIB: 2
Table version acked by RIB: 2
RIB has converged: version 2
RIB table prefix-limit reached ? [No], version 0
Permanent Network Unconfigured
State: Normal mode.
BGP Table Version: 2
Attribute download: Disabled
Label retention timer value 5 mins
Soft Reconfig Entries: 0
Table bit-field size : 1 Chunk element size : 3
Last 8 Triggers Ver Tbl Ver Trig TID
Label Thread Total triggers: 0
Import Thread Aug 18 12:00:11.883 2 2 3
Aug 18 12:00:08.886 2 2 19
Aug 18 12:00:08.882 0 2 19
Total triggers: 3
RIB Thread Aug 18 12:00:11.883 2 2 3
Aug 18 12:00:08.886 2 2 19
Aug 18 12:00:08.882 1 2 4
Aug 18 12:00:08.882 1 2 6
Total triggers: 4
Update Thread Aug 18 12:00:11.883 2 2 3
Aug 18 12:00:08.886 2 2 19
Aug 18 12:00:08.886 2 2 19
Aug 18 12:00:08.882 1 2 4
Aug 18 11:55:08.888 1 2 3
Aug 18 11:55:08.883 1 2 9
Total triggers: 6
Allocated Freed
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
Number Mem Used
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
BMP Prefixes: 0 0
BMP Paths: 0 0
'''
ProcessIpv4Output = '''\
RP/0/RSP1/CPU0:PE1#show bgp instance all vrf all ipv4 unicast process detail
BGP instance 0: 'default'
=========================
VRF: VRF1
---------
BGP Process Information: VRF VRF1
BGP Route Distinguisher: 200:1
BGP is operating in STANDALONE mode
Autonomous System number format: ASPLAIN
Autonomous System: 100
Router ID: 10.229.11.11 (manually configured)
Default Cluster ID: 10.4.1.1
Active Cluster IDs: 10.4.1.1
Fast external fallover enabled
Platform RLIMIT max: 2281701376 bytes
Maximum limit for BMP buffer size: 435 MB
Default value for BMP buffer size: 326 MB
Current limit for BMP buffer size: 326 MB
Current utilization of BMP buffer limit: 0 B
Neighbor logging is enabled
Enforce first AS enabled
iBGP to IGP redistribution enabled
Default local preference: 100
Default keepalive: 60
Non-stop routing is enabled
Update delay: 120
Generic scan interval: 60
BGP Speaker process: 3, Node: node0_RSP1_CPU0
Restart count: 32
Total Nbrs Estab/Cfg
Default VRFs: 1 0/3
Non-Default VRFs: 2 0/4
This VRF: 0/2
Sent Received
Updates: 0 0
Notifications: 0 0
Number Memory Used
Attributes: 0 0
AS Paths: 0 0
Communities: 0 0
Large Communities: 0 0
Extended communities: 0 0
PMSI Tunnel attr: 0 0
RIBRNH Tunnel attr: 0 0
PPMP attr: 0 0
Tunnel Encap attr: 0 0
PE distinguisher labels: 0 0
Route Reflector Entries: 0 0
Nexthop Entries: 27 10800
Alloc Free
Pool 200: 0 0
Pool 300: 1 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5000: 0 0
Pool 20000: 0 0
Message logging pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 500: 0 0
Pool 2200: 0 0
Pool 4500: 0 0
BMP pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 300: 0 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5500: 0 0
Pool 6500: 0 0
Pool 7500: 0 0
Pool 8500: 0 0
Pool 10000: 0 0
Pool 20000: 0 0
VRF VRF1 Address family: IPv4 Unicast
Dampening is not enabled
Client reflection is not enabled in global config
Dynamic MED is Disabled
Dynamic MED interval : 10 minutes
Dynamic MED Timer : Not Running
Dynamic MED Periodic Timer : Not Running
Scan interval: 60
Total prefixes scanned: 0
Prefixes scanned per segment: 100000
Number of scan segments: 1
Nexthop resolution minimum prefix-length: 0 (not configured)
Main Table Version: 3
Table version synced to RIB: 3
Table version acked by RIB: 3
IGP notification: IGPs notified
RIB has converged: version 2
RIB table prefix-limit reached ? [No], version 0
Permanent Network Unconfigured
State: Normal mode.
BGP Table Version: 3
Attribute download: Disabled
Label retention timer value 5 mins
Soft Reconfig Entries: 0
Table bit-field size : 1 Chunk element size : 3
Last 8 Triggers Ver Tbl Ver Trig TID
Label Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.887 3 3 18
Aug 18 12:00:08.885 0 3 4
Total triggers: 3
Import Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.887 3 3 18
Aug 18 12:00:08.885 0 3 18
Total triggers: 3
RIB Thread Aug 18 12:00:08.887 3 3 8
Aug 18 12:00:08.885 1 3 8
Aug 18 12:00:08.882 1 3 6
Total triggers: 3
Update Thread Aug 18 12:00:11.883 3 3 8
Aug 18 12:00:08.887 3 3 8
Aug 18 12:00:08.887 3 3 18
Total triggers: 3
Allocated Freed
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
Number Mem Used
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
BMP Prefixes: 0 0
BMP Paths: 0 0
VRF: VRF2
---------
BGP Process Information: VRF VRF2
BGP Route Distinguisher: 200:2
BGP is operating in STANDALONE mode
Autonomous System number format: ASPLAIN
Autonomous System: 100
Router ID: 10.229.11.11 (manually configured)
Default Cluster ID: 10.4.1.1
Active Cluster IDs: 10.4.1.1
Fast external fallover enabled
Platform RLIMIT max: 2281701376 bytes
Maximum limit for BMP buffer size: 435 MB
Default value for BMP buffer size: 326 MB
Current limit for BMP buffer size: 326 MB
Current utilization of BMP buffer limit: 0 B
Neighbor logging is enabled
Enforce first AS enabled
iBGP to IGP redistribution enabled
Default local preference: 100
Default keepalive: 60
Non-stop routing is enabled
Update delay: 120
Generic scan interval: 60
BGP Speaker process: 3, Node: node0_RSP1_CPU0
Restart count: 32
Total Nbrs Estab/Cfg
Default VRFs: 1 0/3
Non-Default VRFs: 2 0/4
This VRF: 0/2
Sent Received
Updates: 0 0
Notifications: 0 0
Number Memory Used
Attributes: 0 0
AS Paths: 0 0
Communities: 0 0
Large Communities: 0 0
Extended communities: 0 0
PMSI Tunnel attr: 0 0
RIBRNH Tunnel attr: 0 0
PPMP attr: 0 0
Tunnel Encap attr: 0 0
PE distinguisher labels: 0 0
Route Reflector Entries: 0 0
Nexthop Entries: 27 10800
Alloc Free
Pool 200: 0 0
Pool 300: 1 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5000: 0 0
Pool 20000: 0 0
Message logging pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 500: 0 0
Pool 2200: 0 0
Pool 4500: 0 0
BMP pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 300: 0 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5500: 0 0
Pool 6500: 0 0
Pool 7500: 0 0
Pool 8500: 0 0
Pool 10000: 0 0
Pool 20000: 0 0
VRF VRF2 Address family: IPv4 Unicast
Dampening is not enabled
Client reflection is not enabled in global config
Dynamic MED is Disabled
Dynamic MED interval : 10 minutes
Dynamic MED Timer : Not Running
Dynamic MED Periodic Timer : Not Running
Scan interval: 60
Total prefixes scanned: 0
Prefixes scanned per segment: 100000
Number of scan segments: 1
Nexthop resolution minimum prefix-length: 0 (not configured)
Main Table Version: 3
Table version synced to RIB: 3
Table version acked by RIB: 3
IGP notification: IGPs notified
RIB has converged: version 2
RIB table prefix-limit reached ? [No], version 0
Permanent Network Unconfigured
State: Normal mode.
BGP Table Version: 3
Attribute download: Disabled
Label retention timer value 5 mins
Soft Reconfig Entries: 0
Table bit-field size : 1 Chunk element size : 3
Last 8 Triggers Ver Tbl Ver Trig TID
Label Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.887 3 3 18
Aug 18 12:00:08.885 0 3 4
Total triggers: 3
Import Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.887 3 3 18
Aug 18 12:00:08.885 0 3 18
Total triggers: 3
RIB Thread Aug 18 12:00:08.887 3 3 8
Aug 18 12:00:08.885 1 3 8
Aug 18 12:00:08.882 1 3 6
Total triggers: 3
Update Thread Aug 18 12:00:11.883 3 3 8
Aug 18 12:00:08.887 3 3 8
Aug 18 12:00:08.887 3 3 18
Total triggers: 3
Allocated Freed
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
Number Mem Used
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
BMP Prefixes: 0 0
BMP Paths: 0 0
'''
ProcessIpv6Output = '''\
RP/0/RSP1/CPU0:PE1#show bgp instance all vrf all ipv6 unicast process detail
BGP instance 0: 'default'
=========================
VRF: VRF1
---------
BGP Process Information: VRF VRF1
BGP Route Distinguisher: 200:1
BGP is operating in STANDALONE mode
Autonomous System number format: ASPLAIN
Autonomous System: 100
Router ID: 10.229.11.11 (manually configured)
Default Cluster ID: 10.4.1.1
Active Cluster IDs: 10.4.1.1
Fast external fallover enabled
Platform RLIMIT max: 2281701376 bytes
Maximum limit for BMP buffer size: 435 MB
Default value for BMP buffer size: 326 MB
Current limit for BMP buffer size: 326 MB
Current utilization of BMP buffer limit: 0 B
Neighbor logging is enabled
Enforce first AS enabled
iBGP to IGP redistribution enabled
Default local preference: 100
Default keepalive: 60
Non-stop routing is enabled
Update delay: 120
Generic scan interval: 60
BGP Speaker process: 3, Node: node0_RSP1_CPU0
Restart count: 32
Total Nbrs Estab/Cfg
Default VRFs: 1 0/3
Non-Default VRFs: 2 0/4
This VRF: 0/2
Sent Received
Updates: 0 0
Notifications: 0 0
Number Memory Used
Attributes: 0 0
AS Paths: 0 0
Communities: 0 0
Large Communities: 0 0
Extended communities: 0 0
PMSI Tunnel attr: 0 0
RIBRNH Tunnel attr: 0 0
PPMP attr: 0 0
Tunnel Encap attr: 0 0
PE distinguisher labels: 0 0
Route Reflector Entries: 0 0
Nexthop Entries: 27 10800
Alloc Free
Pool 200: 0 0
Pool 300: 1 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5000: 0 0
Pool 20000: 0 0
Message logging pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 500: 0 0
Pool 2200: 0 0
Pool 4500: 0 0
BMP pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 300: 0 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5500: 0 0
Pool 6500: 0 0
Pool 7500: 0 0
Pool 8500: 0 0
Pool 10000: 0 0
Pool 20000: 0 0
VRF VRF1 Address family: IPv6 Unicast
Dampening is not enabled
Client reflection is not enabled in global config
Dynamic MED is Disabled
Dynamic MED interval : 10 minutes
Dynamic MED Timer : Not Running
Dynamic MED Periodic Timer : Not Running
Scan interval: 60
Total prefixes scanned: 0
Prefixes scanned per segment: 100000
Number of scan segments: 1
Nexthop resolution minimum prefix-length: 0 (not configured)
Main Table Version: 3
Table version synced to RIB: 3
Table version acked by RIB: 3
RIB has converged: version 2
RIB table prefix-limit reached ? [No], version 0
Permanent Network Unconfigured
State: Normal mode.
BGP Table Version: 3
Attribute download: Disabled
Label retention timer value 5 mins
Soft Reconfig Entries: 0
Table bit-field size : 1 Chunk element size : 3
Last 8 Triggers Ver Tbl Ver Trig TID
Label Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.890 3 3 19
Aug 18 12:00:08.882 0 3 4
Total triggers: 3
Import Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.890 3 3 19
Aug 18 12:00:08.882 0 3 19
Total triggers: 3
RIB Thread Aug 18 12:00:08.890 3 3 8
Aug 18 12:00:08.883 1 3 8
Aug 18 12:00:08.882 1 3 6
Total triggers: 3
Update Thread Aug 18 12:00:11.883 3 3 8
Aug 18 12:00:08.890 3 3 8
Aug 18 12:00:08.890 3 3 19
Total triggers: 3
Allocated Freed
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
Number Mem Used
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
BMP Prefixes: 0 0
BMP Paths: 0 0
VRF: VRF2
---------
BGP Process Information: VRF VRF2
BGP Route Distinguisher: 200:2
BGP is operating in STANDALONE mode
Autonomous System number format: ASPLAIN
Autonomous System: 100
Router ID: 10.229.11.11 (manually configured)
Default Cluster ID: 10.4.1.1
Active Cluster IDs: 10.4.1.1
Fast external fallover enabled
Platform RLIMIT max: 2281701376 bytes
Maximum limit for BMP buffer size: 435 MB
Default value for BMP buffer size: 326 MB
Current limit for BMP buffer size: 326 MB
Current utilization of BMP buffer limit: 0 B
Neighbor logging is enabled
Enforce first AS enabled
iBGP to IGP redistribution enabled
Default local preference: 100
Default keepalive: 60
Non-stop routing is enabled
Update delay: 120
Generic scan interval: 60
BGP Speaker process: 3, Node: node0_RSP1_CPU0
Restart count: 32
Total Nbrs Estab/Cfg
Default VRFs: 1 0/3
Non-Default VRFs: 2 0/4
This VRF: 0/2
Sent Received
Updates: 0 0
Notifications: 0 0
Number Memory Used
Attributes: 0 0
AS Paths: 0 0
Communities: 0 0
Large Communities: 0 0
Extended communities: 0 0
PMSI Tunnel attr: 0 0
RIBRNH Tunnel attr: 0 0
PPMP attr: 0 0
Tunnel Encap attr: 0 0
PE distinguisher labels: 0 0
Route Reflector Entries: 0 0
Nexthop Entries: 27 10800
Alloc Free
Pool 200: 0 0
Pool 300: 1 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5000: 0 0
Pool 20000: 0 0
Message logging pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 500: 0 0
Pool 2200: 0 0
Pool 4500: 0 0
BMP pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 300: 0 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5500: 0 0
Pool 6500: 0 0
Pool 7500: 0 0
Pool 8500: 0 0
Pool 10000: 0 0
Pool 20000: 0 0
VRF VRF2 Address family: IPv6 Unicast
Dampening is not enabled
Client reflection is not enabled in global config
Dynamic MED is Disabled
Dynamic MED interval : 10 minutes
Dynamic MED Timer : Not Running
Dynamic MED Periodic Timer : Not Running
Scan interval: 60
Total prefixes scanned: 0
Prefixes scanned per segment: 100000
Number of scan segments: 1
Nexthop resolution minimum prefix-length: 0 (not configured)
Main Table Version: 3
Table version synced to RIB: 3
Table version acked by RIB: 3
RIB has converged: version 2
RIB table prefix-limit reached ? [No], version 0
Permanent Network Unconfigured
State: Normal mode.
BGP Table Version: 3
Attribute download: Disabled
Label retention timer value 5 mins
Soft Reconfig Entries: 0
Table bit-field size : 1 Chunk element size : 3
Last 8 Triggers Ver Tbl Ver Trig TID
Label Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.890 3 3 19
Aug 18 12:00:08.882 0 3 4
Total triggers: 3
Import Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.890 3 3 19
Aug 18 12:00:08.882 0 3 19
Total triggers: 3
RIB Thread Aug 18 12:00:08.890 3 3 8
Aug 18 12:00:08.883 1 3 8
Aug 18 12:00:08.882 1 3 6
Total triggers: 3
Update Thread Aug 18 12:00:11.883 3 3 8
Aug 18 12:00:08.890 3 3 8
Aug 18 12:00:08.890 3 3 19
Total triggers: 3
Allocated Freed
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
Number Mem Used
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
BMP Prefixes: 0 0
BMP Paths: 0 0
'''
NeighborsAllOutput = '''\
RP/0/RSP1/CPU0:PE1#show bgp instance all all all neighbors detail
BGP instance 0: 'default'
=========================
BGP neighbor is 10.16.2.2
Remote AS 100, local AS 100, internal link
Remote router ID 0.0.0.0
Speaker ID 3
BGP state = Idle (No route to multi-hop neighbor)
NSR State: None
Last read 00:00:00, Last read before reset 00:00:00
Hold time is 180, keepalive interval is 60 seconds
Configured hold time: 180, keepalive: 60, min acceptable hold time: 3
Last write 00:00:00, attempted 0, written 0
Second last write 00:00:00, attempted 0, written 0
Last write before reset 00:00:00, attempted 0, written 0
Second last write before reset 00:00:00, attempted 0, written 0
Last write pulse rcvd not set last full not set pulse count 0
Last write pulse rcvd before reset 00:00:00
Socket not armed for io, not armed for read, not armed for write
Last write thread event before reset 00:00:00, second last 00:00:00
Last KA expiry before reset 00:00:00, second last 00:00:00
Last KA error before reset 00:00:00, KA not sent 00:00:00
Last KA start before reset 00:00:00, second last 00:00:00
Precedence: internet
Non-stop routing is enabled
Entered Neighbor NSR TCP mode:
TCP Initial Sync : ---
TCP Initial Sync Phase Two : ---
TCP Initial Sync Done : ---
Multi-protocol capability not received
Message stats:
InQ depth: 0, OutQ depth: 0
Last_Sent Sent Last_Rcvd Rcvd
Open: --- 0 --- 0
Notification: --- 0 --- 0
Update: --- 0 --- 0
Keepalive: --- 0 --- 0
Route_Refresh: --- 0 --- 0
Total: 0 0
Minimum time between advertisement runs is 0 secs
Inbound message logging enabled, 3 messages buffered
Outbound message logging enabled, 3 messages buffered
For Address Family: VPNv4 Unicast
BGP neighbor version 0
Update group: 3.1 Filter-group: 3.0 No Refresh request being processed
Route refresh request: received 0, sent 0
0 accepted prefixes, 0 are bestpaths
Exact no. of prefixes denied : 0.
Cumulative no. of prefixes denied: 0.
Prefix advertised 0, suppressed 0, withdrawn 0
Maximum prefixes allowed 2097152
Threshold for warning message 75%, restart interval 0 min
AIGP is enabled
An EoR was not received during read-only mode
Last ack version 1, Last synced ack version 0
Outstanding version objects: current 0, max 0
Additional-paths operation: None
Send Multicast Attributes
For Address Family: VPNv6 Unicast
BGP neighbor version 0
Update group: 3.1 Filter-group: 3.0 No Refresh request being processed
Route refresh request: received 0, sent 0
0 accepted prefixes, 0 are bestpaths
Exact no. of prefixes denied : 0.
Cumulative no. of prefixes denied: 0.
Prefix advertised 0, suppressed 0, withdrawn 0
Maximum prefixes allowed 1048576
Threshold for warning message 75%, restart interval 0 min
AIGP is enabled
An EoR was not received during read-only mode
Last ack version 1, Last synced ack version 0
Outstanding version objects: current 0, max 0
Additional-paths operation: None
Send Multicast Attributes
Connections established 0; dropped 0
Local host: 0.0.0.0, Local port: 0, IF Handle: 0x00000000
Foreign host: 10.16.2.2, Foreign port: 0
Last reset 00:00:00
'''
NeighborsIpv4Output = '''\
RP/0/RSP1/CPU0:PE1#show bgp instance all vrf all ipv4 unicast neighbors detail
BGP instance 0: 'default'
=========================
VRF: VRF1
---------
BGP neighbor is 10.1.5.5, vrf VRF1
Remote AS 200, local AS 100, external link
Remote router ID 0.0.0.0
Speaker ID 3
BGP state = Idle (No best local address found)
NSR State: None
Last read 00:00:00, Last read before reset 00:00:00
Hold time is 180, keepalive interval is 60 seconds
Configured hold time: 180, keepalive: 60, min acceptable hold time: 3
Last write 00:00:00, attempted 0, written 0
Second last write 00:00:00, attempted 0, written 0
Last write before reset 00:00:00, attempted 0, written 0
Second last write before reset 00:00:00, attempted 0, written 0
Last write pulse rcvd not set last full not set pulse count 0
Last write pulse rcvd before reset 00:00:00
Socket not armed for io, not armed for read, not armed for write
Last write thread event before reset 00:00:00, second last 00:00:00
Last KA expiry before reset 00:00:00, second last 00:00:00
Last KA error before reset 00:00:00, KA not sent 00:00:00
Last KA start before reset 00:00:00, second last 00:00:00
Precedence: internet
Non-stop routing is enabled
Entered Neighbor NSR TCP mode:
TCP Initial Sync : ---
TCP Initial Sync Phase Two : ---
TCP Initial Sync Done : ---
Enforcing first AS is enabled
Multi-protocol capability not received
Message stats:
InQ depth: 0, OutQ depth: 0
Last_Sent Sent Last_Rcvd Rcvd
Open: --- 0 --- 0
Notification: --- 0 --- 0
Update: --- 0 --- 0
Keepalive: --- 0 --- 0
Route_Refresh: --- 0 --- 0
Total: 0 0
Minimum time between advertisement runs is 0 secs
Inbound message logging enabled, 3 messages buffered
Outbound message logging enabled, 3 messages buffered
For Address Family: IPv4 Unicast
BGP neighbor version 0
Update group: 3.1 Filter-group: 3.0 No Refresh request being processed
Route refresh request: received 0, sent 0
Policy for incoming advertisements is all-pass
Policy for outgoing advertisements is all-pass
0 accepted prefixes, 0 are bestpaths
Exact no. of prefixes denied : 0.
Cumulative no. of prefixes denied: 0.
Prefix advertised 0, suppressed 0, withdrawn 0
Maximum prefixes allowed 1048576
Threshold for warning message 75%, restart interval 0 min
An EoR was not received during read-only mode
Last ack version 1, Last synced ack version 0
Outstanding version objects: current 0, max 0
Additional-paths operation: None
Advertise routes with local-label via Unicast SAFI
Connections established 0; dropped 0
Local host: 0.0.0.0, Local port: 0, IF Handle: 0x00000000
Foreign host: 10.1.5.5, Foreign port: 0
Last reset 00:00:00
External BGP neighbor not directly connected.
VRF: VRF2
---------
BGP neighbor is 10.186.5.5, vrf VRF2
Remote AS 200, local AS 100, external link
Remote router ID 0.0.0.0
Speaker ID 3
BGP state = Idle (No best local address found)
NSR State: None
Last read 00:00:00, Last read before reset 00:00:00
Hold time is 180, keepalive interval is 60 seconds
Configured hold time: 180, keepalive: 60, min acceptable hold time: 3
Last write 00:00:00, attempted 0, written 0
Second last write 00:00:00, attempted 0, written 0
Last write before reset 00:00:00, attempted 0, written 0
Second last write before reset 00:00:00, attempted 0, written 0
Last write pulse rcvd not set last full not set pulse count 0
Last write pulse rcvd before reset 00:00:00
Socket not armed for io, not armed for read, not armed for write
Last write thread event before reset 00:00:00, second last 00:00:00
Last KA expiry before reset 00:00:00, second last 00:00:00
Last KA error before reset 00:00:00, KA not sent 00:00:00
Last KA start before reset 00:00:00, second last 00:00:00
Precedence: internet
Non-stop routing is enabled
Entered Neighbor NSR TCP mode:
TCP Initial Sync : ---
TCP Initial Sync Phase Two : ---
TCP Initial Sync Done : ---
Enforcing first AS is enabled
Multi-protocol capability not received
Message stats:
InQ depth: 0, OutQ depth: 0
Last_Sent Sent Last_Rcvd Rcvd
Open: --- 0 --- 0
Notification: --- 0 --- 0
Update: --- 0 --- 0
Keepalive: --- 0 --- 0
Route_Refresh: --- 0 --- 0
Total: 0 0
Minimum time between advertisement runs is 0 secs
Inbound message logging enabled, 3 messages buffered
Outbound message logging enabled, 3 messages buffered
For Address Family: IPv4 Unicast
BGP neighbor version 0
Update group: 3.1 Filter-group: 3.0 No Refresh request being processed
Inbound soft reconfiguration allowed (override route-refresh)
Route refresh request: received 0, sent 0
Policy for incoming advertisements is all-pass
Policy for outgoing advertisements is all-pass
0 accepted prefixes, 0 are bestpaths
Exact no. of prefixes denied : 0.
Cumulative no. of prefixes denied: 0.
Prefix advertised 0, suppressed 0, withdrawn 0
Maximum prefixes allowed 495
Threshold for warning message 75%, restart interval 0 min
An EoR was not received during read-only mode
Last ack version 1, Last synced ack version 0
Outstanding version objects: current 0, max 0
Additional-paths operation: None
Advertise routes with local-label via Unicast SAFI
Connections established 0; dropped 0
Local host: 0.0.0.0, Local port: 0, IF Handle: 0x00000000
Foreign host: 10.186.5.5, Foreign port: 0
Last reset 00:00:00
External BGP neighbor not directly connected.
'''
NeighborsIpv6Output = '''\
RP/0/RSP1/CPU0:PE1#show bgp instance all vrf all ipv6 unicast neighbors detail
BGP instance 0: 'default'
=========================
VRF: VRF1
---------
BGP neighbor is 2001:db8:1:5::5, vrf VRF1
Remote AS 200, local AS 100, external link
Remote router ID 0.0.0.0
Speaker ID 3
BGP state = Idle (No best local address found)
NSR State: None
Last read 00:00:00, Last read before reset 00:00:00
Hold time is 180, keepalive interval is 60 seconds
Configured hold time: 180, keepalive: 60, min acceptable hold time: 3
Last write 00:00:00, attempted 0, written 0
Second last write 00:00:00, attempted 0, written 0
Last write before reset 00:00:00, attempted 0, written 0
Second last write before reset 00:00:00, attempted 0, written 0
Last write pulse rcvd not set last full not set pulse count 0
Last write pulse rcvd before reset 00:00:00
Socket not armed for io, not armed for read, not armed for write
Last write thread event before reset 00:00:00, second last 00:00:00
Last KA expiry before reset 00:00:00, second last 00:00:00
Last KA error before reset 00:00:00, KA not sent 00:00:00
Last KA start before reset 00:00:00, second last 00:00:00
Precedence: internet
Non-stop routing is enabled
Entered Neighbor NSR TCP mode:
TCP Initial Sync : ---
TCP Initial Sync Phase Two : ---
TCP Initial Sync Done : ---
Enforcing first AS is enabled
Multi-protocol capability not received
Message stats:
InQ depth: 0, OutQ depth: 0
Last_Sent Sent Last_Rcvd Rcvd
Open: --- 0 --- 0
Notification: --- 0 --- 0
Update: --- 0 --- 0
Keepalive: --- 0 --- 0
Route_Refresh: --- 0 --- 0
Total: 0 0
Minimum time between advertisement runs is 0 secs
Inbound message logging enabled, 3 messages buffered
Outbound message logging enabled, 3 messages buffered
For Address Family: IPv6 Unicast
BGP neighbor version 0
Update group: 3.1 Filter-group: 3.0 No Refresh request being processed
Route refresh request: received 0, sent 0
Policy for incoming advertisements is all-pass
Policy for outgoing advertisements is all-pass
0 accepted prefixes, 0 are bestpaths
Exact no. of prefixes denied : 0.
Cumulative no. of prefixes denied: 0.
Prefix advertised 0, suppressed 0, withdrawn 0
Maximum prefixes allowed 524288
Threshold for warning message 75%, restart interval 0 min
An EoR was not received during read-only mode
Last ack version 1, Last synced ack version 0
Outstanding version objects: current 0, max 0
Additional-paths operation: None
Advertise routes with local-label via Unicast SAFI
Connections established 0; dropped 0
Local host: ::, Local port: 0, IF Handle: 0x00000000
Foreign host: 2001:db8:1:5::5, Foreign port: 0
Last reset 00:00:00
External BGP neighbor not directly connected.
VRF: VRF2
---------
BGP neighbor is 2001:db8:20:1:5::5, vrf VRF2
Remote AS 200, local AS 100, external link
Remote router ID 0.0.0.0
Speaker ID 3
BGP state = Idle (No best local address found)
NSR State: None
Last read 00:00:00, Last read before reset 00:00:00
Hold time is 180, keepalive interval is 60 seconds
Configured hold time: 180, keepalive: 60, min acceptable hold time: 3
Last write 00:00:00, attempted 0, written 0
Second last write 00:00:00, attempted 0, written 0
Last write before reset 00:00:00, attempted 0, written 0
Second last write before reset 00:00:00, attempted 0, written 0
Last write pulse rcvd not set last full not set pulse count 0
Last write pulse rcvd before reset 00:00:00
Socket not armed for io, not armed for read, not armed for write
Last write thread event before reset 00:00:00, second last 00:00:00
Last KA expiry before reset 00:00:00, second last 00:00:00
Last KA error before reset 00:00:00, KA not sent 00:00:00
Last KA start before reset 00:00:00, second last 00:00:00
Precedence: internet
Non-stop routing is enabled
Entered Neighbor NSR TCP mode:
TCP Initial Sync : ---
TCP Initial Sync Phase Two : ---
TCP Initial Sync Done : ---
Enforcing first AS is enabled
Multi-protocol capability not received
Message stats:
InQ depth: 0, OutQ depth: 0
Last_Sent Sent Last_Rcvd Rcvd
Open: --- 0 --- 0
Notification: --- 0 --- 0
Update: --- 0 --- 0
Keepalive: --- 0 --- 0
Route_Refresh: --- 0 --- 0
Total: 0 0
Minimum time between advertisement runs is 0 secs
Inbound message logging enabled, 3 messages buffered
Outbound message logging enabled, 3 messages buffered
For Address Family: IPv6 Unicast
BGP neighbor version 0
Update group: 3.1 Filter-group: 3.0 No Refresh request being processed
Route refresh request: received 0, sent 0
Policy for incoming advertisements is all-pass
Policy for outgoing advertisements is all-pass
0 accepted prefixes, 0 are bestpaths
Exact no. of prefixes denied : 0.
Cumulative no. of prefixes denied: 0.
Prefix advertised 0, suppressed 0, withdrawn 0
Maximum prefixes allowed 524288
Threshold for warning message 75%, restart interval 0 min
An EoR was not received during read-only mode
Last ack version 1, Last synced ack version 0
Outstanding version objects: current 0, max 0
Additional-paths operation: None
Advertise routes with local-label via Unicast SAFI
Connections established 0; dropped 0
Local host: ::, Local port: 0, IF Handle: 0x00000000
Foreign host: 2001:db8:20:1:5::5, Foreign port: 0
Last reset 00:00:00
External BGP neighbor not directly connected.
'''
10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 400:1
*>i10.205.1.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.2.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.3.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.4.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.5.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
Processed 40 prefixes, 55 paths
Address Family: VPNv6 Unicast
-----------------------------
BGP router identifier 10.4.1.1, local AS number 100
BGP generic scan interval 60 secs
Non-stop routing is enabled
BGP table state: Active
Table ID: 0x0 RD version: 0
BGP main routing table version 32
BGP NSR Initial initsync version 5 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
BGP scan interval 60 secs
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 200:1 (default for vrf VRF1)
*> 2001:db8:cdc9:121::/64 2001:db8:1:5::5 2219 0 200 33299 51178 47751 {27016} e
* 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*> 2001:db8:cdc9:144::/64 2001:db8:1:5::5 2219 0 200 33299 51178 47751 {27016} e
* 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*> 2001:db8:cdc9:169::/64 2001:db8:1:5::5 2219 0 200 33299 51178 47751 {27016} e
* 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*> 2001:db8:cdc9:190::/64 2001:db8:1:5::5 2219 0 200 33299 51178 47751 {27016} e
* 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*> 2001:db8:cdc9:1b9::/64 2001:db8:1:5::5 2219 0 200 33299 51178 47751 {27016} e
* 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*>i2001:db8:31b9:121::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:144::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:169::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:190::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:1b9::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 200:2 (default for vrf VRF2)
*> 2001:db8:cdc9:121::/64 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*> 2001:db8:cdc9:144::/64 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*> 2001:db8:cdc9:169::/64 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*> 2001:db8:cdc9:190::/64 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*> 2001:db8:cdc9:1b9::/64 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*>i2001:db8:31b9:121::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:144::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:169::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:190::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:1b9::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 300:1
*>i2001:db8:31b9:121::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:144::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:169::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:190::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:1b9::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Processed 25 prefixes, 35 paths
'''
InstanceIpv4Output = '''\
BGP instance 0: 'default'
=========================
VRF: VRF1
---------
BGP VRF VRF1, state: Active
BGP Route Distinguisher: 200:1
VRF ID: 0x60000001
BGP router identifier 10.229.11.11, local AS number 100
Non-stop routing is enabled
BGP table state: Active
Table ID: 0xe0000010 RD version: 47
BGP main routing table version 47
BGP NSR Initial initsync version 5 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 200:1 (default for vrf VRF1)
*> 10.1.1.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.2.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.3.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.4.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.5.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*>i10.205.1.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.2.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.3.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.4.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.5.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.169.1.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.2.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.3.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.4.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.5.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Processed 15 prefixes, 20 paths
VRF: VRF2
---------
BGP VRF VRF2, state: Active
BGP Route Distinguisher: 200:2
VRF ID: 0x60000002
BGP router identifier 10.229.11.11, local AS number 100
Non-stop routing is enabled
BGP table state: Active
Table ID: 0xe0000011 RD version: 47
BGP main routing table version 47
BGP NSR Initial initsync version 5 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 200:2 (default for vrf VRF2)
*> 10.1.1.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.2.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.3.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.4.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.5.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*>i10.205.1.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.2.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.3.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.4.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.5.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.169.1.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.2.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.3.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.4.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.5.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Processed 15 prefixes, 15 paths
'''
InstanceIpv6Output = '''\
'''
- EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 300:1
* i10.169.1.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 400:1
* i10.9.2.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
Processed 10 prefixes, 10 paths
Address Family: VPNv6 Unicast
-----------------------------
BGP router identifier 10.4.1.1, local AS number 100
BGP generic scan interval 60 secs
Non-stop routing is enabled
BGP table state: Active
Table ID: 0x0 RD version: 0
BGP main routing table version 43
BGP NSR Initial initsync version 11 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
BGP scan interval 60 secs
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 300:1
* i2001:db8:31b9:121::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i2001:db8:31b9:1b9::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 400:1
* i2001:db8:a69:484::/64 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
* i2001:db8:a69:4c9::/64 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
Processed 10 prefixes, 10 paths
'''
ReceivedIpv4Output = '''\
BGP instance 0: 'default'
=========================
VRF: VRF1
---------
BGP VRF VRF1, state: Active
BGP Route Distinguisher: 200:2
VRF ID: 0x60000002
BGP router identifier 10.229.11.11, local AS number 100
Non-stop routing is enabled
BGP table state: Active
Table ID: 0xe0000011 RD version: 63
BGP main routing table version 63
BGP NSR Initial initsync version 11 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 200:1 (default for vrf VRF2)
* 10.1.1.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.1.2.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
Processed 2 prefixes, 2 paths
'''
ReceivedIpv6Output = '''\
BGP instance 0: 'default'
=========================
VRF: VRF2
---------
BGP VRF VRF2, state: Active
BGP Route Distinguisher: 200:3
VRF ID: 0x50000006
BGP router identifier 10.229.11.11, local AS number 100
Non-stop routing is enabled
BGP table state: Active
Table ID: 0xe0000011 RD version: 63
BGP main routing table version 63
BGP NSR Initial initsync version 11 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 200:2 (default for vrf VRF1)
* 10.34.1.0/24 10.196.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.34.2.0/24 10.196.5.5 2219 0 200 33299 51178 47751 {27016} e
Processed 2 prefixes, 2 paths
'''
RoutesAllOutput = '''\
BGP instance 0: 'default'
=========================
Address Family: VPNv4 Unicast
-----------------------------
BGP router identifier 10.4.1.1, local AS number 100
BGP generic scan interval 60 secs
Non-stop routing is enabled
BGP table state: Active
Table ID: 0x0 RD version: 0
BGP main routing table version 43
BGP NSR Initial initsync version 11 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
BGP scan interval 60 secs
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 300:1
* i10.169.1.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 400:1
* i10.9.2.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
Processed 2 prefixes, 2 paths
Address Family: VPNv6 Unicast
-----------------------------
BGP router identifier 10.4.1.1, local AS number 100
BGP generic scan interval 60 secs
Non-stop routing is enabled
BGP table state: Active
Table ID: 0x0 RD version: 0
BGP main routing table version 43
BGP NSR Initial initsync version 11 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
BGP scan interval 60 secs
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 300:1
* i2001:db8:31b9:121::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i2001:db8:31b9:144::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 400:1
* i2001:db8:a69:484::/64 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
Processed 3 prefixes, 3 paths
'''
BgpInfo = {
'instance':
{'default':
{'bgp_id': 100,
'peer_policy':
{'af_group':
{'allowas_in': True,
'allowas_in_as_number': 10,
'as_override': True,
'default_originate': True,
'default_originate_route_map': 'allpass',
'maximum_prefix_max_prefix_no': 429,
'maximum_prefix_restart': 35,
'maximum_prefix_threshold': 75,
'next_hop_self': True,
'route_map_name_in': 'allpass',
'route_map_name_out': 'allpass',
'route_reflector_client': True,
'send_community': 'both',
'soft_reconfiguration': 'inbound '
'always',
'soo': '100:1'}},
'peer_session':
{'SG':
{'description': 'SG_group',
'disable_connected_check': True,
'ebgp_multihop_enable': True,
'ebgp_multihop_max_hop': 254,
'fall_over_bfd': True,
'holdtime': 30,
'keepalive_interval': 10,
'local_as_as_no': 200,
'local_dual_as': True,
'local_no_prepend': True,
'local_replace_as': True,
'password_text': '094F471A1A0A464058',
'remote_as': 333,
'shutdown': True,
'suppress_four_byte_as_capability': True,
'transport_connection_mode': 'active-only',
'update_source': 'loopback0'}},
'protocol_state': 'RUNNING',
'vrf':
{'default':
{'neighbor':
{'10.16.2.2':
{'bgp_neighbor_counters':
{'messages':
{'received':
{'notifications': 0,
'updates': 0},
'sent':
{'notifications': 0,
'updates': 0}}},
'bgp_session_transport':
{'transport':
{'foreign_host': '0',
'foreign_port': '10.16.2.2',
'local_host': 'Loopback0',
'local_port': '0'}},
'holdtime': 180,
'remote_as': 100,
'session_state': 'idle'}},
'router_id': '10.4.1.1'}}},
'test':
{'bgp_id': 333},
'test1':
{'bgp_id': 333},
'test2':
{'bgp_id': 333}}}
BgpTable = {
'instance':
{'default':
{'vrf':
{'VRF1':
{'address_family':
{'vpnv4 unicast':
{'bgp_table_version': 47,
'local_as': 100},
'vpnv4 unicast RD 200:1':
{'default_vrf': 'vrf1',
'prefixes':
{'10.1.1.0/24':
{'index':
{1:
{'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.1.2.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.1.3.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.1.4.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.1.5.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.205.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}}},
'route_distinguisher': '200:1'}}},
'VRF2': {'address_family': {'vpnv4 unicast': {'bgp_table_version': 47,
'local_as': 100},
'vpnv4 unicast RD 200:2': {'default_vrf': 'vrf2',
'prefixes': {'10.1.1.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.1.2.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.1.3.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.1.4.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.1.5.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.205.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}}},
'route_distinguisher': '200:2'}}},
'default': {'address_family': {'vpnv4 unicast': {'bgp_table_version': 47,
'local_as': 100},
'vpnv4 unicast RD 200:1': {'default_vrf': 'vrf1',
'prefixes': {'10.1.1.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.1.2.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.1.3.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.1.4.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.1.5.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.205.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}}},
'route_distinguisher': '200:1'},
'vpnv4 unicast RD 200:2': {'default_vrf': 'vrf2',
'prefixes': {'10.1.1.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.1.2.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.1.3.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.1.4.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.1.5.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.205.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}}},
'route_distinguisher': '200:2'},
'vpnv4 unicast RD 300:1': {'default_vrf': 'none',
'prefixes': {'10.169.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'10.169.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'10.169.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'10.169.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'10.169.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}},
'route_distinguisher': '300:1'},
'vpnv4 unicast RD 400:1': {'default_vrf': 'none',
'prefixes': {'10.205.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'10.205.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'10.205.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'10.205.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'10.205.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}},
'route_distinguisher': '400:1'},
'vpnv6 unicast': {'bgp_table_version': 32,
'local_as': 100},
'vpnv6 unicast RD 200:1': {'default_vrf': 'vrf1',
'prefixes': {'2001:db8:cdc9:144::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:cdc9:169::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:cdc9:190::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:cdc9:1b9::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:cdc9:121::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:31b9:144::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'2001:db8:31b9:169::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'2001:db8:31b9:190::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'2001:db8:31b9:1b9::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'2001:db8:31b9:121::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}}},
'route_distinguisher': '200:1'},
'vpnv6 unicast RD 200:2': {'default_vrf': 'vrf2',
'prefixes': {'2001:db8:cdc9:144::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:20:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:cdc9:169::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:20:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:cdc9:190::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:20:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:cdc9:1b9::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:20:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:cdc9:121::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:20:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:31b9:144::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'2001:db8:31b9:169::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'2001:db8:31b9:190::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'2001:db8:31b9:1b9::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'2001:db8:31b9:121::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}}},
'route_distinguisher': '200:2'},
'vpnv6 unicast RD 300:1': {'default_vrf': 'none',
'prefixes': {'2001:db8:31b9:144::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'2001:db8:31b9:169::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'2001:db8:31b9:190::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'2001:db8:31b9:1b9::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'2001:db8:31b9:121::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}},
'route_distinguisher': '300:1'}}}}}}}
BgpRoutesPerPeer = {
'instance': {'default': {'vrf': {'VRF1': {'neighbor': {'10.1.5.5': {'address_family': {'vpnv4 unicast': {'input_queue': 0,
'msg_rcvd': 11858,
'msg_sent': 11864,
'output_queue': 0,
'route_distinguisher': '200:1',
'state_pfxrcd': '5',
'tbl_ver': 47,
'up_down': '1w1d'}},
'remote_as': 200}}},
'VRF2': {'neighbor': {'2001:db8:20:1:5::5': {'address_family': {'vpnv6 unicast': {'input_queue': 0,
'msg_rcvd': 11858,
'msg_sent': 11864,
'output_queue': 0,
'route_distinguisher': '300:1',
'state_pfxrcd': '5',
'tbl_ver': 47,
'up_down': '1w1d'}},
'remote_as': 200}}},
'default': {'neighbor': {'10.16.2.2': {'address_family': {'vpnv4 unicast': {'input_queue': 0,
'msg_rcvd': 11875,
'msg_sent': 11874,
'output_queue': 0,
'state_pfxrcd': '10',
'tbl_ver': 47,
'up_down': '1w1d'},
'vpnv4 unicast RD 200:1': {'advertised': {'10.1.1.0/24': {'index': {1: {'froms': '10.186.5.5',
'next_hop': '10.4.1.1',
'origin_code': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}'}}},
'10.1.2.0/24': {'index': {1: {'froms': '10.186.5.5',
'next_hop': '10.4.1.1',
'origin_code': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}'}}}}},
'vpnv4 unicast RD 300:1': {'received_routes': {'10.169.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}},
'routes': {'10.169.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}}},
'vpnv4 unicast RD 400:1': {'received_routes': {'10.9.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}},
'routes': {'10.9.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}}},
'vpnv6 unicast': {'input_queue': 0,
'msg_rcvd': 11875,
'msg_sent': 11874,
'output_queue': 0,
'state_pfxrcd': '5',
'tbl_ver': 32,
'up_down': '1w1d'},
'vpnv6 unicast RD 200:1': {'advertised': {'2001:db8:cdc9:144::/64': {'index': {1: {'froms': '2001:db8:20:1:5::5',
'next_hop': '10.4.1.1',
'origin_code': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}'}}},
'2001:db8:cdc9:121::/64': {'index': {1: {'froms': '2001:db8:20:1:5::5',
'next_hop': '10.4.1.1',
'origin_code': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27017}'}}}}},
'vpnv6 unicast RD 300:1': {'received_routes': {'2001:db8:31b9:1b9::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'2001:db8:31b9:121::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}},
'routes': {'2001:db8:31b9:144::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'2001:db8:31b9:121::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}}},
'vpnv6 unicast RD 400:1': {'received_routes': {'2001:db8:a69:4c9::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'2001:db8:a69:484::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}},
'routes': {'2001:db8:a69:484::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}}}},
'remote_as': 100}}}}}}}
class etree_holder():
def __init__(self):
self.data_ele = ET.fromstring('''
<data>
<bgp xmlns="http://openconfig.net/yang/bgp">
<global>
<config>
<as>100</as>
<router-id>10.4.1.1</router-id>
</config>
<state>
<as>100</as>
<router-id>10.4.1.1</router-id>
<total-paths>0</total-paths>
<total-prefixes>0</total-prefixes>
</state>
<afi-safis>
<afi-safi>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv4-unicast</afi-safi-name>
<config>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv4-unicast</afi-safi-name>
<enabled>true</enabled>
</config>
<state>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv4-unicast</afi-safi-name>
<enabled>true</enabled>
<total-paths>0</total-paths>
<total-prefixes>0</total-prefixes>
</state>
</afi-safi>
<afi-safi>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv6-unicast</afi-safi-name>
<config>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv6-unicast</afi-safi-name>
<enabled>true</enabled>
</config>
<state>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv6-unicast</afi-safi-name>
<enabled>true</enabled>
<total-paths>0</total-paths>
<total-prefixes>0</total-prefixes>
</state>
</afi-safi>
</afi-safis>
</global>
<neighbors>
<neighbor>
<neighbor-address>10.16.2.2</neighbor-address>
<config>
<neighbor-address>10.16.2.2</neighbor-address>
<peer-as>100</peer-as>
</config>
<state>
<neighbor-address>10.16.2.2</neighbor-address>
<peer-as>100</peer-as>
<queues>
<input>0</input>
<output>0</output>
</queues>
<session-state>IDLE</session-state>
<messages>
<sent>
<NOTIFICATION>0</NOTIFICATION>
<UPDATE>0</UPDATE>
</sent>
<received>
<NOTIFICATION>0</NOTIFICATION>
<UPDATE>0</UPDATE>
</received>
</messages>
</state>
<transport>
<config>
<local-address>Loopback0</local-address>
</config>
<state>
<local-address>Loopback0</local-address>
<local-port>0</local-port>
<remote-address>10.16.2.2</remote-address>
<remote-port>0</remote-port>
</state>
</transport>
<afi-safis>
<afi-safi>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv4-unicast</afi-safi-name>
<config>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv4-unicast</afi-safi-name>
<enabled>true</enabled>
</config>
<state>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv4-unicast</afi-safi-name>
<enabled>true</enabled>
<active>false</active>
<prefixes>
<received>0</received>
<sent>0</sent>
</prefixes>
</state>
</afi-safi>
<afi-safi>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv6-unicast</afi-safi-name>
<config>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv6-unicast</afi-safi-name>
<enabled>true</enabled>
</config>
<state>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv6-unicast</afi-safi-name>
<enabled>true</enabled>
<active>false</active>
<prefixes>
<received>0</received>
<sent>0</sent>
</prefixes>
</state>
</afi-safi>
</afi-safis>
<timers>
<state>
<negotiated-hold-time>180</negotiated-hold-time>
</state>
</timers>
<graceful-restart>
<state>
<peer-restart-time>120</peer-restart-time>
</state>
</graceful-restart>
</neighbor>
</neighbors>
</bgp>
</data>
''')
yang_output = etree_holder()
| true
| true
|
f70c7f1ef25ad37c74319931832954fc4f96fb85
| 21,207
|
py
|
Python
|
mapexport.py
|
muffinjets/LADXR
|
bbd82a5b7bac015561bb6a4cfe1c5fa017f827f5
|
[
"MIT"
] | 13
|
2020-09-13T16:50:28.000Z
|
2022-03-22T20:49:54.000Z
|
mapexport.py
|
muffinjets/LADXR
|
bbd82a5b7bac015561bb6a4cfe1c5fa017f827f5
|
[
"MIT"
] | 10
|
2020-06-27T12:34:38.000Z
|
2022-01-03T12:15:42.000Z
|
mapexport.py
|
muffinjets/LADXR
|
bbd82a5b7bac015561bb6a4cfe1c5fa017f827f5
|
[
"MIT"
] | 18
|
2020-05-29T17:48:04.000Z
|
2022-02-08T03:36:08.000Z
|
import PIL.Image, PIL.ImageDraw
from roomEditor import RoomEditor, ObjectHorizontal, ObjectVertical, ObjectWarp
class RenderedMap:
WALL_UP = 0x01
WALL_DOWN = 0x02
WALL_LEFT = 0x04
WALL_RIGHT = 0x08
def __init__(self, floor_object, overworld=False):
self.objects = {}
self.overworld = overworld
for y in range(8):
for x in range(10):
self.objects[(x, y)] = floor_object
def addWalls(self, flags):
for x in range(0, 10):
if flags & RenderedMap.WALL_UP:
self.placeObject(x, 0, 0x21)
if flags & RenderedMap.WALL_DOWN:
self.placeObject(x, 7, 0x22)
for y in range(0, 8):
if flags & RenderedMap.WALL_LEFT:
self.placeObject(0, y, 0x23)
if flags & RenderedMap.WALL_RIGHT:
self.placeObject(9, y, 0x24)
if flags & RenderedMap.WALL_LEFT and flags & RenderedMap.WALL_UP:
self.placeObject(0, 0, 0x25)
if flags & RenderedMap.WALL_RIGHT and flags & RenderedMap.WALL_UP:
self.placeObject(9, 0, 0x26)
if flags & RenderedMap.WALL_LEFT and flags & RenderedMap.WALL_DOWN:
self.placeObject(0, 7, 0x27)
if flags & RenderedMap.WALL_RIGHT and flags & RenderedMap.WALL_DOWN:
self.placeObject(9, 7, 0x28)
def placeObject(self, x, y, type_id):
if self.overworld:
if type_id == 0xF5:
if self.getObject(x, y) in (0x28, 0x83, 0x90):
self.placeObject(x, y, 0x29)
else:
self.placeObject(x, y, 0x25)
if self.getObject(x + 1, y) in (0x27, 0x82, 0x90):
self.placeObject(x + 1, y, 0x2A)
else:
self.placeObject(x + 1, y, 0x26)
if self.getObject(x, y + 1) in (0x26, 0x2A):
self.placeObject(x, y + 1, 0x2A)
elif self.getObject(x, y + 1) == 0x90:
self.placeObject(x, y + 1, 0x82)
else:
self.placeObject(x, y + 1, 0x27)
if self.getObject(x + 1, y + 1) in (0x25, 0x29):
self.placeObject(x + 1, y + 1, 0x29)
elif self.getObject(x + 1, y + 1) == 0x90:
self.placeObject(x + 1, y + 1, 0x83)
else:
self.placeObject(x + 1, y + 1, 0x28)
elif type_id == 0xF6: # two door house
self.placeObject(x + 0, y, 0x55)
self.placeObject(x + 1, y, 0x5A)
self.placeObject(x + 2, y, 0x5A)
self.placeObject(x + 3, y, 0x5A)
self.placeObject(x + 4, y, 0x56)
self.placeObject(x + 0, y + 1, 0x57)
self.placeObject(x + 1, y + 1, 0x59)
self.placeObject(x + 2, y + 1, 0x59)
self.placeObject(x + 3, y + 1, 0x59)
self.placeObject(x + 4, y + 1, 0x58)
self.placeObject(x + 0, y + 2, 0x5B)
self.placeObject(x + 1, y + 2, 0xE2)
self.placeObject(x + 2, y + 2, 0x5B)
self.placeObject(x + 3, y + 2, 0xE2)
self.placeObject(x + 4, y + 2, 0x5B)
elif type_id == 0xF7: # large house
self.placeObject(x + 0, y, 0x55)
self.placeObject(x + 1, y, 0x5A)
self.placeObject(x + 2, y, 0x56)
self.placeObject(x + 0, y + 1, 0x57)
self.placeObject(x + 1, y + 1, 0x59)
self.placeObject(x + 2, y + 1, 0x58)
self.placeObject(x + 0, y + 2, 0x5B)
self.placeObject(x + 1, y + 2, 0xE2)
self.placeObject(x + 2, y + 2, 0x5B)
elif type_id == 0xF8: # catfish
self.placeObject(x + 0, y, 0xB6)
self.placeObject(x + 1, y, 0xB7)
self.placeObject(x + 2, y, 0x66)
self.placeObject(x + 0, y + 1, 0x67)
self.placeObject(x + 1, y + 1, 0xE3)
self.placeObject(x + 2, y + 1, 0x68)
elif type_id == 0xF9: # palace door
self.placeObject(x + 0, y, 0xA4)
self.placeObject(x + 1, y, 0xA5)
self.placeObject(x + 2, y, 0xA6)
self.placeObject(x + 0, y + 1, 0xA7)
self.placeObject(x + 1, y + 1, 0xE3)
self.placeObject(x + 2, y + 1, 0xA8)
elif type_id == 0xFA: # stone pig head
self.placeObject(x + 0, y, 0xBB)
self.placeObject(x + 1, y, 0xBC)
self.placeObject(x + 0, y + 1, 0xBD)
self.placeObject(x + 1, y + 1, 0xBE)
elif type_id == 0xFB: # palmtree
if x == 15:
self.placeObject(x + 1, y + 1, 0xB7)
self.placeObject(x + 1, y + 2, 0xCE)
else:
self.placeObject(x + 0, y, 0xB6)
self.placeObject(x + 0, y + 1, 0xCD)
self.placeObject(x + 1, y + 0, 0xB7)
self.placeObject(x + 1, y + 1, 0xCE)
elif type_id == 0xFC: # square "hill with hole" (seen near lvl4 entrance)
self.placeObject(x + 0, y, 0x2B)
self.placeObject(x + 1, y, 0x2C)
self.placeObject(x + 2, y, 0x2D)
self.placeObject(x + 0, y + 1, 0x37)
self.placeObject(x + 1, y + 1, 0xE8)
self.placeObject(x + 2, y + 1, 0x38)
self.placeObject(x - 1, y + 2, 0x0A)
self.placeObject(x + 0, y + 2, 0x33)
self.placeObject(x + 1, y + 2, 0x2F)
self.placeObject(x + 2, y + 2, 0x34)
self.placeObject(x + 0, y + 3, 0x0A)
self.placeObject(x + 1, y + 3, 0x0A)
self.placeObject(x + 2, y + 3, 0x0A)
self.placeObject(x + 3, y + 3, 0x0A)
elif type_id == 0xFD: # small house
self.placeObject(x + 0, y, 0x52)
self.placeObject(x + 1, y, 0x52)
self.placeObject(x + 2, y, 0x52)
self.placeObject(x + 0, y + 1, 0x5B)
self.placeObject(x + 1, y + 1, 0xE2)
self.placeObject(x + 2, y + 1, 0x5B)
else:
self.objects[(x & 15), (y & 15)] = type_id
else:
if type_id == 0xEC: # key door
self.placeObject(x, y, 0x2D)
self.placeObject(x + 1, y, 0x2E)
elif type_id == 0xED:
self.placeObject(x, y, 0x2F)
self.placeObject(x + 1, y, 0x30)
elif type_id == 0xEE:
self.placeObject(x, y, 0x31)
self.placeObject(x, y + 1, 0x32)
elif type_id == 0xEF:
self.placeObject(x, y, 0x33)
self.placeObject(x, y + 1, 0x34)
elif type_id == 0xF0: # closed door
self.placeObject(x, y, 0x35)
self.placeObject(x + 1, y, 0x36)
elif type_id == 0xF1:
self.placeObject(x, y, 0x37)
self.placeObject(x + 1, y, 0x38)
elif type_id == 0xF2:
self.placeObject(x, y, 0x39)
self.placeObject(x, y + 1, 0x3A)
elif type_id == 0xF3:
self.placeObject(x, y, 0x3B)
self.placeObject(x, y + 1, 0x3C)
elif type_id == 0xF4: # open door
self.placeObject(x, y, 0x43)
self.placeObject(x + 1, y, 0x44)
elif type_id == 0xF5:
self.placeObject(x, y, 0x8C)
self.placeObject(x + 1, y, 0x08)
elif type_id == 0xF6:
self.placeObject(x, y, 0x09)
self.placeObject(x, y + 1, 0x0A)
elif type_id == 0xF7:
self.placeObject(x, y, 0x0B)
self.placeObject(x, y + 1, 0x0C)
elif type_id == 0xF8: # boss door
self.placeObject(x, y, 0xA4)
self.placeObject(x + 1, y, 0xA5)
elif type_id == 0xF9: # stairs door
self.placeObject(x, y, 0xAF)
self.placeObject(x + 1, y, 0xB0)
elif type_id == 0xFA: # flipwall
self.placeObject(x, y, 0xB1)
self.placeObject(x + 1, y, 0xB2)
elif type_id == 0xFB: # one way arrow
self.placeObject(x, y, 0x45)
self.placeObject(x + 1, y, 0x46)
elif type_id == 0xFC: # entrance
self.placeObject(x + 0, y, 0xB3)
self.placeObject(x + 1, y, 0xB4)
self.placeObject(x + 2, y, 0xB4)
self.placeObject(x + 3, y, 0xB5)
self.placeObject(x + 0, y + 1, 0xB6)
self.placeObject(x + 1, y + 1, 0xB7)
self.placeObject(x + 2, y + 1, 0xB8)
self.placeObject(x + 3, y + 1, 0xB9)
self.placeObject(x + 0, y + 2, 0xBA)
self.placeObject(x + 1, y + 2, 0xBB)
self.placeObject(x + 2, y + 2, 0xBC)
self.placeObject(x + 3, y + 2, 0xBD)
elif type_id == 0xFD: # entrance
self.placeObject(x, y, 0xC1)
self.placeObject(x + 1, y, 0xC2)
else:
self.objects[(x & 15), (y & 15)] = type_id
def getObject(self, x, y):
return self.objects.get(((x & 15), (y & 15)), None)
class MapExport:
def __init__(self, rom):
self.__rom = rom
self.__tiles = {
0x0C: self.getTiles(0x0C),
0x0D: self.getTiles(0x0D),
0x0F: self.getTiles(0x0F),
0x12: self.getTiles(0x12),
}
self.__room_map_info = {}
f = open("test.html", "wt")
result = PIL.Image.new("L", (16 * 20 * 8, 16 * 16 * 8))
for n in range(0x100):
x = n % 0x10
y = n // 0x10
result.paste(self.exportRoom(n), (x * 20 * 8, y * 16 * 8))
result.save("overworld.png")
f.write("<img src='overworld.png'><br><br>")
self.exportMetaTiles(f, "metatiles_main.png", 0x0F, 0, lambda n: n >= 32 and (n < 0x6C or n >= 0x70))
for n in (0x1A, 0x1C, 0x1E, 0x20, 0x22, 0x24, 0x26, 0x28, 0x2A, 0x2C, 0x2E, 0x30, 0x32, 0x34, 0x36, 0x38, 0x3A, 0x3C, 0x3E):
self.exportMetaTiles(f, "metatiles_%02x.png" % (n), n, 0, lambda n: n < 32)
for n in range(2, 17):
self.exportMetaTiles(f, "metatiles_anim_%02x.png" % (n), 0x0F, n, lambda n: n >= 0x6C and n < 0x70)
for n in (0,1,2,3,4,5,6,7, 10, 11):
addr = 0x0220 + n * 8 * 8
result = PIL.Image.new("L", (8 * 20 * 8, 8 * 16 * 8))
for y in range(8):
for x in range(8):
room = rom.banks[0x14][addr] + 0x100
if n > 5:
room += 0x100
if n == 11:
room += 0x100
addr += 1
if (room & 0xFF) == 0 and (n != 11 or x != 1 or y != 3): # ignore room nr 0, except on a very specific spot in the color dungeon.
continue
self.__room_map_info[room] = (x, y, n)
result.paste(self.exportRoom(room), (x * 20 * 8, y * 16 * 8))
result.save("dungeon_%d.png" % (n))
f.write("<img src='dungeon_%d.png'><br><br>" % (n))
result = PIL.Image.new("L", (16 * 20 * 8, 16 * 16 * 8))
for n in range(0x100):
if n + 0x100 in self.__room_map_info:
continue
x = n % 0x10
y = n // 0x10
result.paste(self.exportRoom(n + 0x100), (x * 20 * 8, y * 16 * 8))
result.save("caves1.png")
f.write("<img src='caves1.png'><br><br>")
result = PIL.Image.new("L", (16 * 20 * 8, 16 * 16 * 8))
for n in range(0x0FF):
if n + 0x200 in self.__room_map_info:
continue
x = n % 0x10
y = n // 0x10
result.paste(self.exportRoom(n + 0x200), (x * 20 * 8, y * 16 * 8))
result.save("caves2.png")
f.write("<img src='caves2.png'>")
f.close()
def exportMetaTiles(self, f, name, main_set, animation_set, condition_func):
condition = lambda n: condition_func(n) and (n < 0x80 or n >= 0xF0)
metatile_info_offset = self.__rom.banks[0x1A].find(b'\x7C\x7C\x7C\x7C\x7D\x7D\x7D\x7D')
metatile_info = self.__rom.banks[0x1A][metatile_info_offset:metatile_info_offset + 0x100 * 4]
result = PIL.Image.new("L", (16 * 16, 16 * 16))
sub_tileset_offset = main_set * 0x10
tilemap = self.__tiles[0x0f][sub_tileset_offset:sub_tileset_offset+0x20]
tilemap += self.__tiles[0x0c][0x120:0x180]
tilemap += self.__tiles[0x0c][0x080:0x100]
addr = (0x000, 0x000, 0x2B0, 0x2C0, 0x2D0, 0x2E0, 0x2F0, 0x2D0, 0x300, 0x310, 0x320, 0x2A0, 0x330, 0x350, 0x360, 0x340, 0x370)[animation_set]
tilemap[0x6C:0x70] = self.__tiles[0x0c][addr:addr+4]
for x in range(16):
for y in range(16):
obj = x + y * 16
if condition(metatile_info[obj*4+0]):
result.paste(tilemap[metatile_info[obj*4+0]], (x*16+0, y*16+0))
if condition(metatile_info[obj*4+1]):
result.paste(tilemap[metatile_info[obj*4+1]], (x*16+8, y*16+0))
if condition(metatile_info[obj*4+2]):
result.paste(tilemap[metatile_info[obj*4+2]], (x*16+0, y*16+8))
if condition(metatile_info[obj*4+3]):
result.paste(tilemap[metatile_info[obj*4+3]], (x*16+8, y*16+8))
result.save(name)
f.write("%s<br><img src='%s'><br><br>" % (name, name))
def exportRoom(self, room_nr):
re = RoomEditor(self.__rom, room_nr)
if room_nr < 0x100:
tile_info_offset = self.__rom.banks[0x1A].find(b'\x7C\x7C\x7C\x7C\x7D\x7D\x7D\x7D')
tile_info = self.__rom.banks[0x1A][tile_info_offset:tile_info_offset + 0x100 * 4]
else:
tile_info_offset = self.__rom.banks[0x08].find(b'\x7F\x7F\x7F\x7F\x7E\x7E\x7E\x7E')
tile_info = self.__rom.banks[0x08][tile_info_offset:tile_info_offset+0x100*4]
if room_nr >= 0x100:
rendered_map = RenderedMap(re.floor_object & 0x0F)
else:
rendered_map = RenderedMap(re.floor_object, True)
def objHSize(type_id):
if type_id == 0xF5:
return 2
return 1
def objVSize(type_id):
if type_id == 0xF5:
return 2
return 1
if room_nr >= 0x100:
if re.floor_object & 0xF0 == 0x00:
rendered_map.addWalls(RenderedMap.WALL_LEFT | RenderedMap.WALL_RIGHT | RenderedMap.WALL_UP | RenderedMap.WALL_DOWN)
if re.floor_object & 0xF0 == 0x10:
rendered_map.addWalls(RenderedMap.WALL_LEFT | RenderedMap.WALL_RIGHT | RenderedMap.WALL_DOWN)
if re.floor_object & 0xF0 == 0x20:
rendered_map.addWalls(RenderedMap.WALL_LEFT | RenderedMap.WALL_UP | RenderedMap.WALL_DOWN)
if re.floor_object & 0xF0 == 0x30:
rendered_map.addWalls(RenderedMap.WALL_LEFT | RenderedMap.WALL_RIGHT | RenderedMap.WALL_UP)
if re.floor_object & 0xF0 == 0x40:
rendered_map.addWalls(RenderedMap.WALL_RIGHT | RenderedMap.WALL_UP | RenderedMap.WALL_DOWN)
if re.floor_object & 0xF0 == 0x50:
rendered_map.addWalls(RenderedMap.WALL_LEFT | RenderedMap.WALL_DOWN)
if re.floor_object & 0xF0 == 0x60:
rendered_map.addWalls(RenderedMap.WALL_RIGHT | RenderedMap.WALL_DOWN)
if re.floor_object & 0xF0 == 0x70:
rendered_map.addWalls(RenderedMap.WALL_RIGHT | RenderedMap.WALL_UP)
if re.floor_object & 0xF0 == 0x80:
rendered_map.addWalls(RenderedMap.WALL_LEFT | RenderedMap.WALL_UP)
for obj in re.objects:
if isinstance(obj, ObjectWarp):
pass
elif isinstance(obj, ObjectHorizontal):
for n in range(0, obj.count):
rendered_map.placeObject(obj.x + n * objHSize(obj.type_id), obj.y, obj.type_id)
elif isinstance(obj, ObjectVertical):
for n in range(0, obj.count):
rendered_map.placeObject(obj.x, obj.y + n * objVSize(obj.type_id), obj.type_id)
else:
rendered_map.placeObject(obj.x, obj.y, obj.type_id)
tiles = [0] * 20 * 16
for y in range(8):
for x in range(10):
obj = rendered_map.objects[(x, y)]
tiles[x*2 + y*2*20] = tile_info[obj*4]
tiles[x*2+1 + y*2*20] = tile_info[obj*4+1]
tiles[x*2 + (y*2+1)*20] = tile_info[obj*4+2]
tiles[x*2+1 + (y*2+1)*20] = tile_info[obj*4+3]
if room_nr < 0x100:
sub_tileset_offset = self.__rom.banks[0x20][0x2E73 + (room_nr & 0x0F) // 2 + ((room_nr >> 5) * 8)] << 4
tilemap = self.__tiles[0x0f][sub_tileset_offset:sub_tileset_offset+0x20]
tilemap += self.__tiles[0x0c][0x120:0x180]
tilemap += self.__tiles[0x0c][0x080:0x100]
else:
# TODO: The whole indoor tileset loading seems complex...
tileset_nr = self.__rom.banks[0x20][0x2eB3 + room_nr - 0x100]
tilemap = [None] * 0x100
tilemap[0x20:0x80] = self.__tiles[0x0D][0x000:0x060]
if tileset_nr != 0xFF:
tilemap[0x00:0x10] = self.__tiles[0x0D][0x100 + tileset_nr * 0x10:0x110 + tileset_nr * 0x10]
tilemap[0x10:0x20] = self.__tiles[0x0D][0x210:0x220]
tilemap[0xF0:0x100] = self.__tiles[0x12][0x380:0x390]
if re.animation_id == 2:
addr = 0x2B0
elif re.animation_id == 3:
addr = 0x2C0
elif re.animation_id == 4:
addr = 0x2D0
elif re.animation_id == 5:
addr = 0x2E0
elif re.animation_id == 6:
addr = 0x2F0
elif re.animation_id == 7:
addr = 0x2D0
elif re.animation_id == 8:
addr = 0x300
elif re.animation_id == 9:
addr = 0x310
elif re.animation_id == 10:
addr = 0x320
elif re.animation_id == 11:
addr = 0x2A0
elif re.animation_id == 12:
addr = 0x330
elif re.animation_id == 13:
addr = 0x350
elif re.animation_id == 14:
addr = 0x360
elif re.animation_id == 15:
addr = 0x340
elif re.animation_id == 16:
addr = 0x370
else:
print(hex(room_nr), re.animation_id)
addr = 0x000
tilemap[0x6C:0x70] = self.__tiles[0x0c][addr:addr+4]
assert len(tilemap) == 0x100
result = PIL.Image.new('L', (8 * 20, 8 * 16))
draw = PIL.ImageDraw.Draw(result)
for y in range(16):
for x in range(20):
tile = tilemap[tiles[x+y*20]]
if tile is not None:
result.paste(tile, (x * 8, y * 8))
warp_pos = []
for y in range(8):
for x in range(10):
if rendered_map.objects[(x, y)] in (0xE1, 0xE2, 0xE3, 0xBA, 0xD5, 0xA8, 0xBE, 0xCB):
warp_pos.append((x, y))
for x, y, type_id in re.entities:
draw.rectangle([(x * 16, y * 16), (x * 16 + 15, y * 16 + 15)], outline=0)
draw.text((x * 16 + 3, y * 16 + 2), "%02X" % (type_id))
y = 8
for obj in re.objects:
if isinstance(obj, ObjectWarp):
draw.text((8, y), "W%d:%02x:%03x:%d,%d" % (obj.warp_type, obj.map_nr, obj.room, obj.target_x, obj.target_y))
y += 16
return result
def getTiles(self, bank_nr):
bank = self.__rom.banks[bank_nr]
buffer = bytearray(b'\x00' * 16 * 16)
result = []
for n in range(0, len(bank), 16):
for y in range(8):
a = bank[n + y * 2]
b = bank[n + y * 2 + 1]
for x in range(8):
v = 0x3F
if not a & (0x80 >> x):
v |= 0x40
if not b & (0x80 >> x):
v |= 0x80
buffer[x+y*8] = v
result.append(PIL.Image.frombytes('L', (8, 8), bytes(buffer)))
return result
| 46.002169
| 151
| 0.486443
|
import PIL.Image, PIL.ImageDraw
from roomEditor import RoomEditor, ObjectHorizontal, ObjectVertical, ObjectWarp
class RenderedMap:
WALL_UP = 0x01
WALL_DOWN = 0x02
WALL_LEFT = 0x04
WALL_RIGHT = 0x08
def __init__(self, floor_object, overworld=False):
self.objects = {}
self.overworld = overworld
for y in range(8):
for x in range(10):
self.objects[(x, y)] = floor_object
def addWalls(self, flags):
for x in range(0, 10):
if flags & RenderedMap.WALL_UP:
self.placeObject(x, 0, 0x21)
if flags & RenderedMap.WALL_DOWN:
self.placeObject(x, 7, 0x22)
for y in range(0, 8):
if flags & RenderedMap.WALL_LEFT:
self.placeObject(0, y, 0x23)
if flags & RenderedMap.WALL_RIGHT:
self.placeObject(9, y, 0x24)
if flags & RenderedMap.WALL_LEFT and flags & RenderedMap.WALL_UP:
self.placeObject(0, 0, 0x25)
if flags & RenderedMap.WALL_RIGHT and flags & RenderedMap.WALL_UP:
self.placeObject(9, 0, 0x26)
if flags & RenderedMap.WALL_LEFT and flags & RenderedMap.WALL_DOWN:
self.placeObject(0, 7, 0x27)
if flags & RenderedMap.WALL_RIGHT and flags & RenderedMap.WALL_DOWN:
self.placeObject(9, 7, 0x28)
def placeObject(self, x, y, type_id):
if self.overworld:
if type_id == 0xF5:
if self.getObject(x, y) in (0x28, 0x83, 0x90):
self.placeObject(x, y, 0x29)
else:
self.placeObject(x, y, 0x25)
if self.getObject(x + 1, y) in (0x27, 0x82, 0x90):
self.placeObject(x + 1, y, 0x2A)
else:
self.placeObject(x + 1, y, 0x26)
if self.getObject(x, y + 1) in (0x26, 0x2A):
self.placeObject(x, y + 1, 0x2A)
elif self.getObject(x, y + 1) == 0x90:
self.placeObject(x, y + 1, 0x82)
else:
self.placeObject(x, y + 1, 0x27)
if self.getObject(x + 1, y + 1) in (0x25, 0x29):
self.placeObject(x + 1, y + 1, 0x29)
elif self.getObject(x + 1, y + 1) == 0x90:
self.placeObject(x + 1, y + 1, 0x83)
else:
self.placeObject(x + 1, y + 1, 0x28)
elif type_id == 0xF6:
self.placeObject(x + 0, y, 0x55)
self.placeObject(x + 1, y, 0x5A)
self.placeObject(x + 2, y, 0x5A)
self.placeObject(x + 3, y, 0x5A)
self.placeObject(x + 4, y, 0x56)
self.placeObject(x + 0, y + 1, 0x57)
self.placeObject(x + 1, y + 1, 0x59)
self.placeObject(x + 2, y + 1, 0x59)
self.placeObject(x + 3, y + 1, 0x59)
self.placeObject(x + 4, y + 1, 0x58)
self.placeObject(x + 0, y + 2, 0x5B)
self.placeObject(x + 1, y + 2, 0xE2)
self.placeObject(x + 2, y + 2, 0x5B)
self.placeObject(x + 3, y + 2, 0xE2)
self.placeObject(x + 4, y + 2, 0x5B)
elif type_id == 0xF7:
self.placeObject(x + 0, y, 0x55)
self.placeObject(x + 1, y, 0x5A)
self.placeObject(x + 2, y, 0x56)
self.placeObject(x + 0, y + 1, 0x57)
self.placeObject(x + 1, y + 1, 0x59)
self.placeObject(x + 2, y + 1, 0x58)
self.placeObject(x + 0, y + 2, 0x5B)
self.placeObject(x + 1, y + 2, 0xE2)
self.placeObject(x + 2, y + 2, 0x5B)
elif type_id == 0xF8:
self.placeObject(x + 0, y, 0xB6)
self.placeObject(x + 1, y, 0xB7)
self.placeObject(x + 2, y, 0x66)
self.placeObject(x + 0, y + 1, 0x67)
self.placeObject(x + 1, y + 1, 0xE3)
self.placeObject(x + 2, y + 1, 0x68)
elif type_id == 0xF9:
self.placeObject(x + 0, y, 0xA4)
self.placeObject(x + 1, y, 0xA5)
self.placeObject(x + 2, y, 0xA6)
self.placeObject(x + 0, y + 1, 0xA7)
self.placeObject(x + 1, y + 1, 0xE3)
self.placeObject(x + 2, y + 1, 0xA8)
elif type_id == 0xFA:
self.placeObject(x + 0, y, 0xBB)
self.placeObject(x + 1, y, 0xBC)
self.placeObject(x + 0, y + 1, 0xBD)
self.placeObject(x + 1, y + 1, 0xBE)
elif type_id == 0xFB:
if x == 15:
self.placeObject(x + 1, y + 1, 0xB7)
self.placeObject(x + 1, y + 2, 0xCE)
else:
self.placeObject(x + 0, y, 0xB6)
self.placeObject(x + 0, y + 1, 0xCD)
self.placeObject(x + 1, y + 0, 0xB7)
self.placeObject(x + 1, y + 1, 0xCE)
elif type_id == 0xFC:
self.placeObject(x + 0, y, 0x2B)
self.placeObject(x + 1, y, 0x2C)
self.placeObject(x + 2, y, 0x2D)
self.placeObject(x + 0, y + 1, 0x37)
self.placeObject(x + 1, y + 1, 0xE8)
self.placeObject(x + 2, y + 1, 0x38)
self.placeObject(x - 1, y + 2, 0x0A)
self.placeObject(x + 0, y + 2, 0x33)
self.placeObject(x + 1, y + 2, 0x2F)
self.placeObject(x + 2, y + 2, 0x34)
self.placeObject(x + 0, y + 3, 0x0A)
self.placeObject(x + 1, y + 3, 0x0A)
self.placeObject(x + 2, y + 3, 0x0A)
self.placeObject(x + 3, y + 3, 0x0A)
elif type_id == 0xFD:
self.placeObject(x + 0, y, 0x52)
self.placeObject(x + 1, y, 0x52)
self.placeObject(x + 2, y, 0x52)
self.placeObject(x + 0, y + 1, 0x5B)
self.placeObject(x + 1, y + 1, 0xE2)
self.placeObject(x + 2, y + 1, 0x5B)
else:
self.objects[(x & 15), (y & 15)] = type_id
else:
if type_id == 0xEC:
self.placeObject(x, y, 0x2D)
self.placeObject(x + 1, y, 0x2E)
elif type_id == 0xED:
self.placeObject(x, y, 0x2F)
self.placeObject(x + 1, y, 0x30)
elif type_id == 0xEE:
self.placeObject(x, y, 0x31)
self.placeObject(x, y + 1, 0x32)
elif type_id == 0xEF:
self.placeObject(x, y, 0x33)
self.placeObject(x, y + 1, 0x34)
elif type_id == 0xF0:
self.placeObject(x, y, 0x35)
self.placeObject(x + 1, y, 0x36)
elif type_id == 0xF1:
self.placeObject(x, y, 0x37)
self.placeObject(x + 1, y, 0x38)
elif type_id == 0xF2:
self.placeObject(x, y, 0x39)
self.placeObject(x, y + 1, 0x3A)
elif type_id == 0xF3:
self.placeObject(x, y, 0x3B)
self.placeObject(x, y + 1, 0x3C)
elif type_id == 0xF4:
self.placeObject(x, y, 0x43)
self.placeObject(x + 1, y, 0x44)
elif type_id == 0xF5:
self.placeObject(x, y, 0x8C)
self.placeObject(x + 1, y, 0x08)
elif type_id == 0xF6:
self.placeObject(x, y, 0x09)
self.placeObject(x, y + 1, 0x0A)
elif type_id == 0xF7:
self.placeObject(x, y, 0x0B)
self.placeObject(x, y + 1, 0x0C)
elif type_id == 0xF8:
self.placeObject(x, y, 0xA4)
self.placeObject(x + 1, y, 0xA5)
elif type_id == 0xF9:
self.placeObject(x, y, 0xAF)
self.placeObject(x + 1, y, 0xB0)
elif type_id == 0xFA:
self.placeObject(x, y, 0xB1)
self.placeObject(x + 1, y, 0xB2)
elif type_id == 0xFB:
self.placeObject(x, y, 0x45)
self.placeObject(x + 1, y, 0x46)
elif type_id == 0xFC:
self.placeObject(x + 0, y, 0xB3)
self.placeObject(x + 1, y, 0xB4)
self.placeObject(x + 2, y, 0xB4)
self.placeObject(x + 3, y, 0xB5)
self.placeObject(x + 0, y + 1, 0xB6)
self.placeObject(x + 1, y + 1, 0xB7)
self.placeObject(x + 2, y + 1, 0xB8)
self.placeObject(x + 3, y + 1, 0xB9)
self.placeObject(x + 0, y + 2, 0xBA)
self.placeObject(x + 1, y + 2, 0xBB)
self.placeObject(x + 2, y + 2, 0xBC)
self.placeObject(x + 3, y + 2, 0xBD)
elif type_id == 0xFD:
self.placeObject(x, y, 0xC1)
self.placeObject(x + 1, y, 0xC2)
else:
self.objects[(x & 15), (y & 15)] = type_id
def getObject(self, x, y):
return self.objects.get(((x & 15), (y & 15)), None)
class MapExport:
def __init__(self, rom):
self.__rom = rom
self.__tiles = {
0x0C: self.getTiles(0x0C),
0x0D: self.getTiles(0x0D),
0x0F: self.getTiles(0x0F),
0x12: self.getTiles(0x12),
}
self.__room_map_info = {}
f = open("test.html", "wt")
result = PIL.Image.new("L", (16 * 20 * 8, 16 * 16 * 8))
for n in range(0x100):
x = n % 0x10
y = n // 0x10
result.paste(self.exportRoom(n), (x * 20 * 8, y * 16 * 8))
result.save("overworld.png")
f.write("<img src='overworld.png'><br><br>")
self.exportMetaTiles(f, "metatiles_main.png", 0x0F, 0, lambda n: n >= 32 and (n < 0x6C or n >= 0x70))
for n in (0x1A, 0x1C, 0x1E, 0x20, 0x22, 0x24, 0x26, 0x28, 0x2A, 0x2C, 0x2E, 0x30, 0x32, 0x34, 0x36, 0x38, 0x3A, 0x3C, 0x3E):
self.exportMetaTiles(f, "metatiles_%02x.png" % (n), n, 0, lambda n: n < 32)
for n in range(2, 17):
self.exportMetaTiles(f, "metatiles_anim_%02x.png" % (n), 0x0F, n, lambda n: n >= 0x6C and n < 0x70)
for n in (0,1,2,3,4,5,6,7, 10, 11):
addr = 0x0220 + n * 8 * 8
result = PIL.Image.new("L", (8 * 20 * 8, 8 * 16 * 8))
for y in range(8):
for x in range(8):
room = rom.banks[0x14][addr] + 0x100
if n > 5:
room += 0x100
if n == 11:
room += 0x100
addr += 1
if (room & 0xFF) == 0 and (n != 11 or x != 1 or y != 3):
continue
self.__room_map_info[room] = (x, y, n)
result.paste(self.exportRoom(room), (x * 20 * 8, y * 16 * 8))
result.save("dungeon_%d.png" % (n))
f.write("<img src='dungeon_%d.png'><br><br>" % (n))
result = PIL.Image.new("L", (16 * 20 * 8, 16 * 16 * 8))
for n in range(0x100):
if n + 0x100 in self.__room_map_info:
continue
x = n % 0x10
y = n // 0x10
result.paste(self.exportRoom(n + 0x100), (x * 20 * 8, y * 16 * 8))
result.save("caves1.png")
f.write("<img src='caves1.png'><br><br>")
result = PIL.Image.new("L", (16 * 20 * 8, 16 * 16 * 8))
for n in range(0x0FF):
if n + 0x200 in self.__room_map_info:
continue
x = n % 0x10
y = n // 0x10
result.paste(self.exportRoom(n + 0x200), (x * 20 * 8, y * 16 * 8))
result.save("caves2.png")
f.write("<img src='caves2.png'>")
f.close()
def exportMetaTiles(self, f, name, main_set, animation_set, condition_func):
condition = lambda n: condition_func(n) and (n < 0x80 or n >= 0xF0)
metatile_info_offset = self.__rom.banks[0x1A].find(b'\x7C\x7C\x7C\x7C\x7D\x7D\x7D\x7D')
metatile_info = self.__rom.banks[0x1A][metatile_info_offset:metatile_info_offset + 0x100 * 4]
result = PIL.Image.new("L", (16 * 16, 16 * 16))
sub_tileset_offset = main_set * 0x10
tilemap = self.__tiles[0x0f][sub_tileset_offset:sub_tileset_offset+0x20]
tilemap += self.__tiles[0x0c][0x120:0x180]
tilemap += self.__tiles[0x0c][0x080:0x100]
addr = (0x000, 0x000, 0x2B0, 0x2C0, 0x2D0, 0x2E0, 0x2F0, 0x2D0, 0x300, 0x310, 0x320, 0x2A0, 0x330, 0x350, 0x360, 0x340, 0x370)[animation_set]
tilemap[0x6C:0x70] = self.__tiles[0x0c][addr:addr+4]
for x in range(16):
for y in range(16):
obj = x + y * 16
if condition(metatile_info[obj*4+0]):
result.paste(tilemap[metatile_info[obj*4+0]], (x*16+0, y*16+0))
if condition(metatile_info[obj*4+1]):
result.paste(tilemap[metatile_info[obj*4+1]], (x*16+8, y*16+0))
if condition(metatile_info[obj*4+2]):
result.paste(tilemap[metatile_info[obj*4+2]], (x*16+0, y*16+8))
if condition(metatile_info[obj*4+3]):
result.paste(tilemap[metatile_info[obj*4+3]], (x*16+8, y*16+8))
result.save(name)
f.write("%s<br><img src='%s'><br><br>" % (name, name))
def exportRoom(self, room_nr):
re = RoomEditor(self.__rom, room_nr)
if room_nr < 0x100:
tile_info_offset = self.__rom.banks[0x1A].find(b'\x7C\x7C\x7C\x7C\x7D\x7D\x7D\x7D')
tile_info = self.__rom.banks[0x1A][tile_info_offset:tile_info_offset + 0x100 * 4]
else:
tile_info_offset = self.__rom.banks[0x08].find(b'\x7F\x7F\x7F\x7F\x7E\x7E\x7E\x7E')
tile_info = self.__rom.banks[0x08][tile_info_offset:tile_info_offset+0x100*4]
if room_nr >= 0x100:
rendered_map = RenderedMap(re.floor_object & 0x0F)
else:
rendered_map = RenderedMap(re.floor_object, True)
def objHSize(type_id):
if type_id == 0xF5:
return 2
return 1
def objVSize(type_id):
if type_id == 0xF5:
return 2
return 1
if room_nr >= 0x100:
if re.floor_object & 0xF0 == 0x00:
rendered_map.addWalls(RenderedMap.WALL_LEFT | RenderedMap.WALL_RIGHT | RenderedMap.WALL_UP | RenderedMap.WALL_DOWN)
if re.floor_object & 0xF0 == 0x10:
rendered_map.addWalls(RenderedMap.WALL_LEFT | RenderedMap.WALL_RIGHT | RenderedMap.WALL_DOWN)
if re.floor_object & 0xF0 == 0x20:
rendered_map.addWalls(RenderedMap.WALL_LEFT | RenderedMap.WALL_UP | RenderedMap.WALL_DOWN)
if re.floor_object & 0xF0 == 0x30:
rendered_map.addWalls(RenderedMap.WALL_LEFT | RenderedMap.WALL_RIGHT | RenderedMap.WALL_UP)
if re.floor_object & 0xF0 == 0x40:
rendered_map.addWalls(RenderedMap.WALL_RIGHT | RenderedMap.WALL_UP | RenderedMap.WALL_DOWN)
if re.floor_object & 0xF0 == 0x50:
rendered_map.addWalls(RenderedMap.WALL_LEFT | RenderedMap.WALL_DOWN)
if re.floor_object & 0xF0 == 0x60:
rendered_map.addWalls(RenderedMap.WALL_RIGHT | RenderedMap.WALL_DOWN)
if re.floor_object & 0xF0 == 0x70:
rendered_map.addWalls(RenderedMap.WALL_RIGHT | RenderedMap.WALL_UP)
if re.floor_object & 0xF0 == 0x80:
rendered_map.addWalls(RenderedMap.WALL_LEFT | RenderedMap.WALL_UP)
for obj in re.objects:
if isinstance(obj, ObjectWarp):
pass
elif isinstance(obj, ObjectHorizontal):
for n in range(0, obj.count):
rendered_map.placeObject(obj.x + n * objHSize(obj.type_id), obj.y, obj.type_id)
elif isinstance(obj, ObjectVertical):
for n in range(0, obj.count):
rendered_map.placeObject(obj.x, obj.y + n * objVSize(obj.type_id), obj.type_id)
else:
rendered_map.placeObject(obj.x, obj.y, obj.type_id)
tiles = [0] * 20 * 16
for y in range(8):
for x in range(10):
obj = rendered_map.objects[(x, y)]
tiles[x*2 + y*2*20] = tile_info[obj*4]
tiles[x*2+1 + y*2*20] = tile_info[obj*4+1]
tiles[x*2 + (y*2+1)*20] = tile_info[obj*4+2]
tiles[x*2+1 + (y*2+1)*20] = tile_info[obj*4+3]
if room_nr < 0x100:
sub_tileset_offset = self.__rom.banks[0x20][0x2E73 + (room_nr & 0x0F) // 2 + ((room_nr >> 5) * 8)] << 4
tilemap = self.__tiles[0x0f][sub_tileset_offset:sub_tileset_offset+0x20]
tilemap += self.__tiles[0x0c][0x120:0x180]
tilemap += self.__tiles[0x0c][0x080:0x100]
else:
tileset_nr = self.__rom.banks[0x20][0x2eB3 + room_nr - 0x100]
tilemap = [None] * 0x100
tilemap[0x20:0x80] = self.__tiles[0x0D][0x000:0x060]
if tileset_nr != 0xFF:
tilemap[0x00:0x10] = self.__tiles[0x0D][0x100 + tileset_nr * 0x10:0x110 + tileset_nr * 0x10]
tilemap[0x10:0x20] = self.__tiles[0x0D][0x210:0x220]
tilemap[0xF0:0x100] = self.__tiles[0x12][0x380:0x390]
if re.animation_id == 2:
addr = 0x2B0
elif re.animation_id == 3:
addr = 0x2C0
elif re.animation_id == 4:
addr = 0x2D0
elif re.animation_id == 5:
addr = 0x2E0
elif re.animation_id == 6:
addr = 0x2F0
elif re.animation_id == 7:
addr = 0x2D0
elif re.animation_id == 8:
addr = 0x300
elif re.animation_id == 9:
addr = 0x310
elif re.animation_id == 10:
addr = 0x320
elif re.animation_id == 11:
addr = 0x2A0
elif re.animation_id == 12:
addr = 0x330
elif re.animation_id == 13:
addr = 0x350
elif re.animation_id == 14:
addr = 0x360
elif re.animation_id == 15:
addr = 0x340
elif re.animation_id == 16:
addr = 0x370
else:
print(hex(room_nr), re.animation_id)
addr = 0x000
tilemap[0x6C:0x70] = self.__tiles[0x0c][addr:addr+4]
assert len(tilemap) == 0x100
result = PIL.Image.new('L', (8 * 20, 8 * 16))
draw = PIL.ImageDraw.Draw(result)
for y in range(16):
for x in range(20):
tile = tilemap[tiles[x+y*20]]
if tile is not None:
result.paste(tile, (x * 8, y * 8))
warp_pos = []
for y in range(8):
for x in range(10):
if rendered_map.objects[(x, y)] in (0xE1, 0xE2, 0xE3, 0xBA, 0xD5, 0xA8, 0xBE, 0xCB):
warp_pos.append((x, y))
for x, y, type_id in re.entities:
draw.rectangle([(x * 16, y * 16), (x * 16 + 15, y * 16 + 15)], outline=0)
draw.text((x * 16 + 3, y * 16 + 2), "%02X" % (type_id))
y = 8
for obj in re.objects:
if isinstance(obj, ObjectWarp):
draw.text((8, y), "W%d:%02x:%03x:%d,%d" % (obj.warp_type, obj.map_nr, obj.room, obj.target_x, obj.target_y))
y += 16
return result
def getTiles(self, bank_nr):
bank = self.__rom.banks[bank_nr]
buffer = bytearray(b'\x00' * 16 * 16)
result = []
for n in range(0, len(bank), 16):
for y in range(8):
a = bank[n + y * 2]
b = bank[n + y * 2 + 1]
for x in range(8):
v = 0x3F
if not a & (0x80 >> x):
v |= 0x40
if not b & (0x80 >> x):
v |= 0x80
buffer[x+y*8] = v
result.append(PIL.Image.frombytes('L', (8, 8), bytes(buffer)))
return result
| true
| true
|
f70c7f2258ce588444cf46d6c8affc4c9555203e
| 3,585
|
py
|
Python
|
python/paddle/fluid/layers/ops.py
|
skylarch/Paddle
|
d58d8df6f5f7aa6fd2f0780f87475055db57a80d
|
[
"Apache-2.0"
] | null | null | null |
python/paddle/fluid/layers/ops.py
|
skylarch/Paddle
|
d58d8df6f5f7aa6fd2f0780f87475055db57a80d
|
[
"Apache-2.0"
] | null | null | null |
python/paddle/fluid/layers/ops.py
|
skylarch/Paddle
|
d58d8df6f5f7aa6fd2f0780f87475055db57a80d
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .layer_function_generator import generate_layer_fn
__activations__ = [
'sigmoid',
'logsigmoid',
'exp',
'tanh',
'tanh_shrink',
'softshrink',
'sqrt',
'abs',
'ceil',
'floor',
'cos',
'sin',
'round',
'reciprocal',
'square',
'softplus',
'softsign',
'brelu',
'leaky_relu',
'soft_relu',
'elu',
'relu6',
'pow',
'stanh',
'hard_sigmoid',
'swish',
]
__all__ = [
'mean',
'mul',
'scale',
'sigmoid_cross_entropy_with_logits',
'elementwise_add',
'elementwise_div',
'elementwise_sub',
'elementwise_mul',
'elementwise_max',
'elementwise_min',
'elementwise_pow',
'clip',
'clip_by_norm',
'logical_and',
'logical_or',
'logical_xor',
'logical_not',
'uniform_random_batch_size_like',
'gaussian_random',
'gaussian_random_batch_size_like',
'scatter',
'sum',
'slice',
'shape',
'maxout',
] + __activations__
for _OP in set(__all__):
globals()[_OP] = generate_layer_fn(_OP)
__all__ += ["uniform_random"]
_uniform_random_ = generate_layer_fn('uniform_random')
def uniform_random(shape, dtype=None, min=None, max=None, seed=None):
kwargs = dict()
for name in locals():
val = locals()[name]
if val is not None:
kwargs[name] = val
return _uniform_random_(**kwargs)
uniform_random.__doc__ = _uniform_random_.__doc__ + """
Examples:
>>> result = fluid.layers.uniform_random(shape=[32, 784])
"""
__all__ += ['hard_shrink']
_hard_shrink_ = generate_layer_fn('hard_shrink')
def hard_shrink(x, threshold=None):
kwargs = dict()
for name in locals():
val = locals()[name]
if val is not None:
kwargs[name] = val
return _hard_shrink_(**kwargs)
hard_shrink.__doc__ = _hard_shrink_.__doc__ + """
Examples:
>>> data = fluid.layers.data(name="input", shape=[784])
>>> result = fluid.layers.hard_shrink(x=data, threshold=0.3)
"""
__all__ += ['cumsum']
_cum_sum_ = generate_layer_fn('cumsum')
def cumsum(x, axis=None, exclusive=None, reverse=None):
kwargs = dict()
for name in locals():
val = locals()[name]
if val is not None:
kwargs[name] = val
return _cum_sum_(**kwargs)
cumsum.__doc__ = _cum_sum_.__doc__ + """
Examples:
>>> data = fluid.layers.data(name="input", shape=[32, 784])
>>> result = fluid.layers.cumsum(data, axis=0)
"""
__all__ += ['thresholded_relu']
_thresholded_relu_ = generate_layer_fn('thresholded_relu')
def thresholded_relu(x, threshold=None):
kwargs = dict()
for name in locals():
val = locals()[name]
if val is not None:
kwargs[name] = val
_thresholded_relu_(**kwargs)
thresholded_relu.__doc__ = _thresholded_relu_.__doc__ + """
Examples:
>>> data = fluid.layers.data(name="input", shape=[1])
>>> result = fluid.layers.thresholded_relu(data, threshold=0.4)
"""
| 22.40625
| 74
| 0.641004
|
from .layer_function_generator import generate_layer_fn
__activations__ = [
'sigmoid',
'logsigmoid',
'exp',
'tanh',
'tanh_shrink',
'softshrink',
'sqrt',
'abs',
'ceil',
'floor',
'cos',
'sin',
'round',
'reciprocal',
'square',
'softplus',
'softsign',
'brelu',
'leaky_relu',
'soft_relu',
'elu',
'relu6',
'pow',
'stanh',
'hard_sigmoid',
'swish',
]
__all__ = [
'mean',
'mul',
'scale',
'sigmoid_cross_entropy_with_logits',
'elementwise_add',
'elementwise_div',
'elementwise_sub',
'elementwise_mul',
'elementwise_max',
'elementwise_min',
'elementwise_pow',
'clip',
'clip_by_norm',
'logical_and',
'logical_or',
'logical_xor',
'logical_not',
'uniform_random_batch_size_like',
'gaussian_random',
'gaussian_random_batch_size_like',
'scatter',
'sum',
'slice',
'shape',
'maxout',
] + __activations__
for _OP in set(__all__):
globals()[_OP] = generate_layer_fn(_OP)
__all__ += ["uniform_random"]
_uniform_random_ = generate_layer_fn('uniform_random')
def uniform_random(shape, dtype=None, min=None, max=None, seed=None):
kwargs = dict()
for name in locals():
val = locals()[name]
if val is not None:
kwargs[name] = val
return _uniform_random_(**kwargs)
uniform_random.__doc__ = _uniform_random_.__doc__ + """
Examples:
>>> result = fluid.layers.uniform_random(shape=[32, 784])
"""
__all__ += ['hard_shrink']
_hard_shrink_ = generate_layer_fn('hard_shrink')
def hard_shrink(x, threshold=None):
kwargs = dict()
for name in locals():
val = locals()[name]
if val is not None:
kwargs[name] = val
return _hard_shrink_(**kwargs)
hard_shrink.__doc__ = _hard_shrink_.__doc__ + """
Examples:
>>> data = fluid.layers.data(name="input", shape=[784])
>>> result = fluid.layers.hard_shrink(x=data, threshold=0.3)
"""
__all__ += ['cumsum']
_cum_sum_ = generate_layer_fn('cumsum')
def cumsum(x, axis=None, exclusive=None, reverse=None):
kwargs = dict()
for name in locals():
val = locals()[name]
if val is not None:
kwargs[name] = val
return _cum_sum_(**kwargs)
cumsum.__doc__ = _cum_sum_.__doc__ + """
Examples:
>>> data = fluid.layers.data(name="input", shape=[32, 784])
>>> result = fluid.layers.cumsum(data, axis=0)
"""
__all__ += ['thresholded_relu']
_thresholded_relu_ = generate_layer_fn('thresholded_relu')
def thresholded_relu(x, threshold=None):
kwargs = dict()
for name in locals():
val = locals()[name]
if val is not None:
kwargs[name] = val
_thresholded_relu_(**kwargs)
thresholded_relu.__doc__ = _thresholded_relu_.__doc__ + """
Examples:
>>> data = fluid.layers.data(name="input", shape=[1])
>>> result = fluid.layers.thresholded_relu(data, threshold=0.4)
"""
| true
| true
|
f70c80c259fb8d5441fc2b9fa843bb40401a27f5
| 1,435
|
py
|
Python
|
app/scripts/check_files.py
|
PromoFaux/plex-utills
|
570e2e4525b992978780b6a195df94c674c94ac3
|
[
"MIT"
] | 179
|
2020-02-27T01:09:32.000Z
|
2022-03-28T21:56:20.000Z
|
app/scripts/check_files.py
|
PromoFaux/plex-utills
|
570e2e4525b992978780b6a195df94c674c94ac3
|
[
"MIT"
] | 94
|
2020-03-03T03:22:42.000Z
|
2022-03-28T20:13:22.000Z
|
app/scripts/check_files.py
|
PromoFaux/plex-utills
|
570e2e4525b992978780b6a195df94c674c94ac3
|
[
"MIT"
] | 36
|
2020-02-28T13:58:54.000Z
|
2022-03-26T10:04:25.000Z
|
#!/usr/bin/python
import os
import sys
import time
from configparser import ConfigParser
from plexapi.server import PlexServer
import re
config_object = ConfigParser()
config_object.read("/config/config.ini")
server = config_object["PLEXSERVER"]
options = config_object["OPTIONS"]
baseurl = (server["PLEX_URL"])
token = (server["TOKEN"])
plex = PlexServer(baseurl, token)
plexlibrary = (server["FILMSLIBRARY"])
films = plex.library.section(plexlibrary)
ppath = (server["PLEXPATH"])
mpath = (server["MOUNTEDPATH"])
xdays = int(options["CHECK_FILES_HISTORY"])
xsize = 100000000
now = time.time()
for i in films.search():
dir = os.path.dirname(re.sub(ppath, mpath, i.media[0].parts[0].file))
for root, dirs, files in os.walk(dir):
for name in files:
filename = os.path.join(root, name)
if os.stat(filename).st_mtime > now - (xdays * 86400):
if os.stat(filename).st_size > xsize:
print('checking', i.title)
command = "ffmpeg -v error -i \"" + filename + "\" -c:v rawvideo -map 0:1 -f null - 2>&1"
output = os.popen(command).read()
print(output)
if output.lower().find('error') == -1:
print(i.title, 'is OK!')
else:
print('Oh Bugger!', filename, 'is completely buggered')
| 35
| 110
| 0.583275
|
import os
import sys
import time
from configparser import ConfigParser
from plexapi.server import PlexServer
import re
config_object = ConfigParser()
config_object.read("/config/config.ini")
server = config_object["PLEXSERVER"]
options = config_object["OPTIONS"]
baseurl = (server["PLEX_URL"])
token = (server["TOKEN"])
plex = PlexServer(baseurl, token)
plexlibrary = (server["FILMSLIBRARY"])
films = plex.library.section(plexlibrary)
ppath = (server["PLEXPATH"])
mpath = (server["MOUNTEDPATH"])
xdays = int(options["CHECK_FILES_HISTORY"])
xsize = 100000000
now = time.time()
for i in films.search():
dir = os.path.dirname(re.sub(ppath, mpath, i.media[0].parts[0].file))
for root, dirs, files in os.walk(dir):
for name in files:
filename = os.path.join(root, name)
if os.stat(filename).st_mtime > now - (xdays * 86400):
if os.stat(filename).st_size > xsize:
print('checking', i.title)
command = "ffmpeg -v error -i \"" + filename + "\" -c:v rawvideo -map 0:1 -f null - 2>&1"
output = os.popen(command).read()
print(output)
if output.lower().find('error') == -1:
print(i.title, 'is OK!')
else:
print('Oh Bugger!', filename, 'is completely buggered')
| true
| true
|
f70c82ccc483cb64421eca8eaf429e62c0793614
| 7,628
|
py
|
Python
|
mint/daemon/keychain_server.py
|
MintNetwork/mint-blockchain
|
65ec05a015a07664ed25f83efa736065a17f7d7a
|
[
"Apache-2.0"
] | 12
|
2021-08-18T20:53:31.000Z
|
2022-03-15T21:45:13.000Z
|
mint/daemon/keychain_server.py
|
MintNetwork/mint-blockchain
|
65ec05a015a07664ed25f83efa736065a17f7d7a
|
[
"Apache-2.0"
] | 34
|
2021-08-18T19:12:11.000Z
|
2022-01-06T17:15:34.000Z
|
mint/daemon/keychain_server.py
|
MintNetwork/mint-blockchain
|
65ec05a015a07664ed25f83efa736065a17f7d7a
|
[
"Apache-2.0"
] | 7
|
2021-08-18T20:53:34.000Z
|
2022-03-15T08:37:40.000Z
|
import logging
from blspy import PrivateKey
from mint.cmds.init_funcs import check_keys
from mint.util.keychain import Keychain
from pathlib import Path
from typing import Any, Dict, List, Optional, cast
# Commands that are handled by the KeychainServer
keychain_commands = [
"add_private_key",
"check_keys",
"delete_all_keys",
"delete_key_by_fingerprint",
"get_all_private_keys",
"get_first_private_key",
"get_key_for_fingerprint",
]
log = logging.getLogger(__name__)
KEYCHAIN_ERR_KEYERROR = "key error"
KEYCHAIN_ERR_LOCKED = "keyring is locked"
KEYCHAIN_ERR_NO_KEYS = "no keys present"
KEYCHAIN_ERR_MALFORMED_REQUEST = "malformed request"
class KeychainServer:
"""
Implements a remote keychain service for clients to perform key operations on
"""
def __init__(self):
self._default_keychain = Keychain()
self._alt_keychains = {}
def get_keychain_for_request(self, request: Dict[str, Any]):
"""
Keychain instances can have user and service strings associated with them.
The keychain backends ultimately point to the same data stores, but the user
and service strings are used to partition those data stores. We attempt to
maintain a mapping of user/service pairs to their corresponding Keychain.
"""
keychain = None
user = request.get("kc_user", self._default_keychain.user)
service = request.get("kc_service", self._default_keychain.service)
if user == self._default_keychain.user and service == self._default_keychain.service:
keychain = self._default_keychain
else:
key = (user or "unnamed") + (service or "")
if key in self._alt_keychains:
keychain = self._alt_keychains[key]
else:
keychain = Keychain(user=user, service=service)
self._alt_keychains[key] = keychain
return keychain
async def handle_command(self, command, data) -> Dict[str, Any]:
if command == "add_private_key":
return await self.add_private_key(cast(Dict[str, Any], data))
elif command == "check_keys":
return await self.check_keys(cast(Dict[str, Any], data))
elif command == "delete_all_keys":
return await self.delete_all_keys(cast(Dict[str, Any], data))
elif command == "delete_key_by_fingerprint":
return await self.delete_key_by_fingerprint(cast(Dict[str, Any], data))
elif command == "get_all_private_keys":
return await self.get_all_private_keys(cast(Dict[str, Any], data))
elif command == "get_first_private_key":
return await self.get_first_private_key(cast(Dict[str, Any], data))
elif command == "get_key_for_fingerprint":
return await self.get_key_for_fingerprint(cast(Dict[str, Any], data))
return {}
async def add_private_key(self, request: Dict[str, Any]) -> Dict[str, Any]:
if self.get_keychain_for_request(request).is_keyring_locked():
return {"success": False, "error": KEYCHAIN_ERR_LOCKED}
mnemonic = request.get("mnemonic", None)
passphrase = request.get("passphrase", None)
if mnemonic is None or passphrase is None:
return {
"success": False,
"error": KEYCHAIN_ERR_MALFORMED_REQUEST,
"error_details": {"message": "missing mnemonic and/or passphrase"},
}
try:
self.get_keychain_for_request(request).add_private_key(mnemonic, passphrase)
except KeyError as e:
return {
"success": False,
"error": KEYCHAIN_ERR_KEYERROR,
"error_details": {"message": f"The word '{e.args[0]}' is incorrect.'", "word": e.args[0]},
}
return {"success": True}
async def check_keys(self, request: Dict[str, Any]) -> Dict[str, Any]:
if self.get_keychain_for_request(request).is_keyring_locked():
return {"success": False, "error": KEYCHAIN_ERR_LOCKED}
root_path = request.get("root_path", None)
if root_path is None:
return {
"success": False,
"error": KEYCHAIN_ERR_MALFORMED_REQUEST,
"error_details": {"message": "missing root_path"},
}
check_keys(Path(root_path))
return {"success": True}
async def delete_all_keys(self, request: Dict[str, Any]) -> Dict[str, Any]:
if self.get_keychain_for_request(request).is_keyring_locked():
return {"success": False, "error": KEYCHAIN_ERR_LOCKED}
self.get_keychain_for_request(request).delete_all_keys()
return {"success": True}
async def delete_key_by_fingerprint(self, request: Dict[str, Any]) -> Dict[str, Any]:
if self.get_keychain_for_request(request).is_keyring_locked():
return {"success": False, "error": KEYCHAIN_ERR_LOCKED}
fingerprint = request.get("fingerprint", None)
if fingerprint is None:
return {
"success": False,
"error": KEYCHAIN_ERR_MALFORMED_REQUEST,
"error_details": {"message": "missing fingerprint"},
}
self.get_keychain_for_request(request).delete_key_by_fingerprint(fingerprint)
return {"success": True}
async def get_all_private_keys(self, request: Dict[str, Any]) -> Dict[str, Any]:
all_keys: List[Dict[str, Any]] = []
if self.get_keychain_for_request(request).is_keyring_locked():
return {"success": False, "error": KEYCHAIN_ERR_LOCKED}
private_keys = self.get_keychain_for_request(request).get_all_private_keys()
for sk, entropy in private_keys:
all_keys.append({"pk": bytes(sk.get_g1()).hex(), "entropy": entropy.hex()})
return {"success": True, "private_keys": all_keys}
async def get_first_private_key(self, request: Dict[str, Any]) -> Dict[str, Any]:
key: Dict[str, Any] = {}
if self.get_keychain_for_request(request).is_keyring_locked():
return {"success": False, "error": KEYCHAIN_ERR_LOCKED}
sk_ent = self.get_keychain_for_request(request).get_first_private_key()
if sk_ent is None:
return {"success": False, "error": KEYCHAIN_ERR_NO_KEYS}
pk_str = bytes(sk_ent[0].get_g1()).hex()
ent_str = sk_ent[1].hex()
key = {"pk": pk_str, "entropy": ent_str}
return {"success": True, "private_key": key}
async def get_key_for_fingerprint(self, request: Dict[str, Any]) -> Dict[str, Any]:
if self.get_keychain_for_request(request).is_keyring_locked():
return {"success": False, "error": KEYCHAIN_ERR_LOCKED}
private_keys = self.get_keychain_for_request(request).get_all_private_keys()
if len(private_keys) == 0:
return {"success": False, "error": KEYCHAIN_ERR_NO_KEYS}
fingerprint = request.get("fingerprint", None)
private_key: Optional[PrivateKey] = None
entropy: Optional[bytes] = None
if fingerprint is not None:
for sk, entropy in private_keys:
if sk.get_g1().get_fingerprint() == fingerprint:
private_key = sk
break
else:
private_key, entropy = private_keys[0]
if not private_key or not entropy:
return {"success": False, "error": KEYCHAIN_ERR_NO_KEYS}
else:
return {"success": True, "pk": bytes(private_key.get_g1()).hex(), "entropy": entropy.hex()}
| 40.574468
| 106
| 0.635947
|
import logging
from blspy import PrivateKey
from mint.cmds.init_funcs import check_keys
from mint.util.keychain import Keychain
from pathlib import Path
from typing import Any, Dict, List, Optional, cast
keychain_commands = [
"add_private_key",
"check_keys",
"delete_all_keys",
"delete_key_by_fingerprint",
"get_all_private_keys",
"get_first_private_key",
"get_key_for_fingerprint",
]
log = logging.getLogger(__name__)
KEYCHAIN_ERR_KEYERROR = "key error"
KEYCHAIN_ERR_LOCKED = "keyring is locked"
KEYCHAIN_ERR_NO_KEYS = "no keys present"
KEYCHAIN_ERR_MALFORMED_REQUEST = "malformed request"
class KeychainServer:
def __init__(self):
self._default_keychain = Keychain()
self._alt_keychains = {}
def get_keychain_for_request(self, request: Dict[str, Any]):
keychain = None
user = request.get("kc_user", self._default_keychain.user)
service = request.get("kc_service", self._default_keychain.service)
if user == self._default_keychain.user and service == self._default_keychain.service:
keychain = self._default_keychain
else:
key = (user or "unnamed") + (service or "")
if key in self._alt_keychains:
keychain = self._alt_keychains[key]
else:
keychain = Keychain(user=user, service=service)
self._alt_keychains[key] = keychain
return keychain
async def handle_command(self, command, data) -> Dict[str, Any]:
if command == "add_private_key":
return await self.add_private_key(cast(Dict[str, Any], data))
elif command == "check_keys":
return await self.check_keys(cast(Dict[str, Any], data))
elif command == "delete_all_keys":
return await self.delete_all_keys(cast(Dict[str, Any], data))
elif command == "delete_key_by_fingerprint":
return await self.delete_key_by_fingerprint(cast(Dict[str, Any], data))
elif command == "get_all_private_keys":
return await self.get_all_private_keys(cast(Dict[str, Any], data))
elif command == "get_first_private_key":
return await self.get_first_private_key(cast(Dict[str, Any], data))
elif command == "get_key_for_fingerprint":
return await self.get_key_for_fingerprint(cast(Dict[str, Any], data))
return {}
async def add_private_key(self, request: Dict[str, Any]) -> Dict[str, Any]:
if self.get_keychain_for_request(request).is_keyring_locked():
return {"success": False, "error": KEYCHAIN_ERR_LOCKED}
mnemonic = request.get("mnemonic", None)
passphrase = request.get("passphrase", None)
if mnemonic is None or passphrase is None:
return {
"success": False,
"error": KEYCHAIN_ERR_MALFORMED_REQUEST,
"error_details": {"message": "missing mnemonic and/or passphrase"},
}
try:
self.get_keychain_for_request(request).add_private_key(mnemonic, passphrase)
except KeyError as e:
return {
"success": False,
"error": KEYCHAIN_ERR_KEYERROR,
"error_details": {"message": f"The word '{e.args[0]}' is incorrect.'", "word": e.args[0]},
}
return {"success": True}
async def check_keys(self, request: Dict[str, Any]) -> Dict[str, Any]:
if self.get_keychain_for_request(request).is_keyring_locked():
return {"success": False, "error": KEYCHAIN_ERR_LOCKED}
root_path = request.get("root_path", None)
if root_path is None:
return {
"success": False,
"error": KEYCHAIN_ERR_MALFORMED_REQUEST,
"error_details": {"message": "missing root_path"},
}
check_keys(Path(root_path))
return {"success": True}
async def delete_all_keys(self, request: Dict[str, Any]) -> Dict[str, Any]:
if self.get_keychain_for_request(request).is_keyring_locked():
return {"success": False, "error": KEYCHAIN_ERR_LOCKED}
self.get_keychain_for_request(request).delete_all_keys()
return {"success": True}
async def delete_key_by_fingerprint(self, request: Dict[str, Any]) -> Dict[str, Any]:
if self.get_keychain_for_request(request).is_keyring_locked():
return {"success": False, "error": KEYCHAIN_ERR_LOCKED}
fingerprint = request.get("fingerprint", None)
if fingerprint is None:
return {
"success": False,
"error": KEYCHAIN_ERR_MALFORMED_REQUEST,
"error_details": {"message": "missing fingerprint"},
}
self.get_keychain_for_request(request).delete_key_by_fingerprint(fingerprint)
return {"success": True}
async def get_all_private_keys(self, request: Dict[str, Any]) -> Dict[str, Any]:
all_keys: List[Dict[str, Any]] = []
if self.get_keychain_for_request(request).is_keyring_locked():
return {"success": False, "error": KEYCHAIN_ERR_LOCKED}
private_keys = self.get_keychain_for_request(request).get_all_private_keys()
for sk, entropy in private_keys:
all_keys.append({"pk": bytes(sk.get_g1()).hex(), "entropy": entropy.hex()})
return {"success": True, "private_keys": all_keys}
async def get_first_private_key(self, request: Dict[str, Any]) -> Dict[str, Any]:
key: Dict[str, Any] = {}
if self.get_keychain_for_request(request).is_keyring_locked():
return {"success": False, "error": KEYCHAIN_ERR_LOCKED}
sk_ent = self.get_keychain_for_request(request).get_first_private_key()
if sk_ent is None:
return {"success": False, "error": KEYCHAIN_ERR_NO_KEYS}
pk_str = bytes(sk_ent[0].get_g1()).hex()
ent_str = sk_ent[1].hex()
key = {"pk": pk_str, "entropy": ent_str}
return {"success": True, "private_key": key}
async def get_key_for_fingerprint(self, request: Dict[str, Any]) -> Dict[str, Any]:
if self.get_keychain_for_request(request).is_keyring_locked():
return {"success": False, "error": KEYCHAIN_ERR_LOCKED}
private_keys = self.get_keychain_for_request(request).get_all_private_keys()
if len(private_keys) == 0:
return {"success": False, "error": KEYCHAIN_ERR_NO_KEYS}
fingerprint = request.get("fingerprint", None)
private_key: Optional[PrivateKey] = None
entropy: Optional[bytes] = None
if fingerprint is not None:
for sk, entropy in private_keys:
if sk.get_g1().get_fingerprint() == fingerprint:
private_key = sk
break
else:
private_key, entropy = private_keys[0]
if not private_key or not entropy:
return {"success": False, "error": KEYCHAIN_ERR_NO_KEYS}
else:
return {"success": True, "pk": bytes(private_key.get_g1()).hex(), "entropy": entropy.hex()}
| true
| true
|
f70c82dd8e7f66cb39383a76d97c579bef26798c
| 1,279
|
py
|
Python
|
Preprocessing.py
|
maciej-gajewski/gotta-catch-ai
|
510d593351266e929173aa5e06aaad5df3350bcd
|
[
"MIT"
] | null | null | null |
Preprocessing.py
|
maciej-gajewski/gotta-catch-ai
|
510d593351266e929173aa5e06aaad5df3350bcd
|
[
"MIT"
] | null | null | null |
Preprocessing.py
|
maciej-gajewski/gotta-catch-ai
|
510d593351266e929173aa5e06aaad5df3350bcd
|
[
"MIT"
] | null | null | null |
#Importing packages
import shutil
import os
import csv
#Preprocessing first dataset
file = "./Pokemon/data/pokemon_types_names.csv"
with open(file,'r') as f:
reader = csv.reader(f)
next(reader,None) #Skip the header
pkm_dict1 = {rows[1]:rows[2] for rows in reader}
for pkm_name, pkm_type in pkm_dict1.items():
source = './Pokemon/pokemon-generation-one/{pkm_name}/'.format(pkm_name=pkm_name)
dest = './Pokemon/dataset/{pkm_type}'.format(pkm_type=pkm_type)
for f in os.listdir(source):
if not os.path.exists(dest):
os.makedirs(dest)
shutil.copy(source+f, dest)
#Preprocessing second dataset
file = "./Pokemon/data/pokemon_types.csv"
with open(file,'r') as f:
reader = csv.reader(f)
next(reader,None) #Skip the header
pkm_dict2 = {rows[0]:rows[1] for rows in reader}
for pkm_no, pkm_type in pkm_dict2.items():
source = './Pokemon/pokemon-images-dataset/{pkm_no}.png'.format(pkm_no=pkm_no)
dest = './Pokemon/dataset/{pkm_type}'.format(pkm_type=pkm_type)
if not os.path.exists(dest):
os.makedirs(dest)
shutil.copy(source, dest)
| 33.657895
| 94
| 0.602033
|
import shutil
import os
import csv
file = "./Pokemon/data/pokemon_types_names.csv"
with open(file,'r') as f:
reader = csv.reader(f)
next(reader,None)
pkm_dict1 = {rows[1]:rows[2] for rows in reader}
for pkm_name, pkm_type in pkm_dict1.items():
source = './Pokemon/pokemon-generation-one/{pkm_name}/'.format(pkm_name=pkm_name)
dest = './Pokemon/dataset/{pkm_type}'.format(pkm_type=pkm_type)
for f in os.listdir(source):
if not os.path.exists(dest):
os.makedirs(dest)
shutil.copy(source+f, dest)
file = "./Pokemon/data/pokemon_types.csv"
with open(file,'r') as f:
reader = csv.reader(f)
next(reader,None)
pkm_dict2 = {rows[0]:rows[1] for rows in reader}
for pkm_no, pkm_type in pkm_dict2.items():
source = './Pokemon/pokemon-images-dataset/{pkm_no}.png'.format(pkm_no=pkm_no)
dest = './Pokemon/dataset/{pkm_type}'.format(pkm_type=pkm_type)
if not os.path.exists(dest):
os.makedirs(dest)
shutil.copy(source, dest)
| true
| true
|
f70c83141455e50f059168c60684b7ee1d9db2aa
| 13,702
|
py
|
Python
|
eagr/client/client_side_middleware.py
|
kensho-technologies/eagr
|
fc4a2ec21d22ebc753e194e5644fbbad1ae44dda
|
[
"Apache-2.0"
] | 14
|
2020-03-05T20:25:04.000Z
|
2021-12-18T02:38:33.000Z
|
eagr/client/client_side_middleware.py
|
kensho-technologies/eagr
|
fc4a2ec21d22ebc753e194e5644fbbad1ae44dda
|
[
"Apache-2.0"
] | 4
|
2020-05-21T15:02:57.000Z
|
2020-12-03T12:54:20.000Z
|
eagr/client/client_side_middleware.py
|
kensho-technologies/eagr
|
fc4a2ec21d22ebc753e194e5644fbbad1ae44dda
|
[
"Apache-2.0"
] | 1
|
2020-08-07T23:13:31.000Z
|
2020-08-07T23:13:31.000Z
|
# Copyright 2020-present Kensho Technologies, LLC.
"""Implementing client-side grpc interceptors"""
import functools
import json
import backoff
import grpc
import prometheus_client
CLIENTSIDE_METRICS_HISTO = prometheus_client.Histogram(
"clientside_grpc_endpoint",
"Response time histogram for grpc endpoints from the client-side",
labelnames=("client_name", "server_name", "service", "endpoint"),
)
CLIENTSIDE_ERROR_COUNTER = prometheus_client.Counter(
"clientside_grpc_endpoint_error",
"Clientside exception counts for grpc methods",
labelnames=("client_name", "server_name", "service", "endpoint", "exception"),
)
GRPC_RENDEZVOUS_ERROR = "_Rendezvous"
def get_service_and_method_from_url(method_url):
"""Extract service and method names from the method url string.
Returns strings that are applicable as prometheus metrics and/or labels.
Args:
method_url: string
Returns:
tuple(service_name, method_name)
"""
name_parts = method_url.split("/")
if len(name_parts) != 3 or name_parts[0] != "" or name_parts[1] == "" or name_parts[2] == "":
raise AssertionError("Invalid method name: {}".format(method_url))
return (name_parts[1].replace(".", "_"), name_parts[2].replace(".", "_"))
class GRPCClientGeneralInterceptor(
grpc.UnaryUnaryClientInterceptor,
grpc.StreamUnaryClientInterceptor,
grpc.UnaryStreamClientInterceptor,
grpc.StreamStreamClientInterceptor,
):
"""General GRPC client interceptor that intercepts all functions."""
def __init__(self, decorator_fn):
"""Initialize interceptor with a factory function producing decorators."""
super(GRPCClientGeneralInterceptor, self).__init__()
self._decorator_fn = decorator_fn
def _intercept_call(self, continuation, client_call_details, request_or_iterator):
"""Interceptor implementation."""
metadata = _get_metadata_map_from_client_details(client_call_details)
decorator = self._decorator_fn(client_call_details.method, metadata)
if not decorator:
handler = continuation
else:
handler = decorator(continuation)
return handler(client_call_details, request_or_iterator)
def intercept_unary_unary(self, continuation, client_call_details, request):
"""Intercept unary-unary."""
return self._intercept_call(continuation, client_call_details, request)
def intercept_stream_unary(self, continuation, client_call_details, request_iterator):
"""Intercept stream-unary."""
return self._intercept_call(continuation, client_call_details, request_iterator)
def intercept_unary_stream(self, continuation, client_call_details, request):
"""Intercept unary-stream."""
return self._intercept_call(continuation, client_call_details, request)
def intercept_stream_stream(self, continuation, client_call_details, request_iterator):
"""Intercept stream-stream."""
return self._intercept_call(continuation, client_call_details, request_iterator)
class GRPCClientUnaryOutputInterceptor(
grpc.UnaryUnaryClientInterceptor, grpc.StreamUnaryClientInterceptor
):
"""GRPC interceptor that makes intercepts only unary-output grpcs."""
def __init__(self, decorator_fn):
"""Initialize interceptor with a factory function producing decorators."""
super(GRPCClientUnaryOutputInterceptor, self).__init__()
self._decorator_fn = decorator_fn
def _intercept_call(self, continuation, client_call_details, request_or_iterator):
"""Interceptor implementation"""
metadata = _get_metadata_map_from_client_details(client_call_details)
decorator = self._decorator_fn(client_call_details.method, metadata)
if not decorator:
handler = continuation
else:
handler = decorator(continuation)
return handler(client_call_details, request_or_iterator)
def intercept_unary_unary(self, continuation, client_call_details, request):
"""Intercept unary-unary."""
return self._intercept_call(continuation, client_call_details, request)
def intercept_stream_unary(self, continuation, client_call_details, request_iterator):
"""Intercept stream-unary."""
return self._intercept_call(continuation, client_call_details, request_iterator)
class GRPCClientMiddleware(object):
"""Base class for GRPC client-side middleware.
GRPCMiddleware implementations must provide a get_decorator method:
# def get_decorator(self, method_name, metadata)
Which takes a string method name, and dict of rpc leading metadata and
returns a decorator that can be applied to the underlying rpc method.
Additionally:
__init__ is guaranteed to be called before the server is started.
get_interceptors(self) will be called to retrieve all GRPC interceptors
necessary for the middleware. Users may extend this method to include
additional interceptors.
"""
def __init__(self, client_label, server_label, interceptor_class):
"""Initialize"""
super(GRPCClientMiddleware, self).__init__()
self._server_label = server_label
self._client_label = client_label
self._interceptor_class = interceptor_class
@property
def server_label(self):
"""Get server label."""
return self._server_label
@property
def client_label(self):
"""Get client label."""
return self._client_label
def get_interceptors(self):
"""Get a list of interceptors needed by the middleware."""
return [self._interceptor_class(self.get_decorator)]
class ClientSideMetricsMiddleware(GRPCClientMiddleware):
"""GRPC middleware that captures prometheus metrics."""
def __init__(self, client_label, server_label):
"""Initialize"""
super(ClientSideMetricsMiddleware, self).__init__(
client_label, server_label, GRPCClientGeneralInterceptor
)
class Timer(object):
"""Decorator that wraps a function in a prometheus histogram."""
def __init__(self, histogram):
"""Initializes with the histogram object."""
self._histogram = histogram
def __call__(self, fn):
"""Wrap a method with a histogram."""
@functools.wraps(fn)
def wrap(request, context):
"""Inner wrapper."""
with self._histogram.time():
return fn(request, context)
return wrap
def get_decorator(self, method_name, _):
"""Normalize metric name and return decorator that captures metrics."""
service_label, endpoint_label = get_service_and_method_from_url(method_name)
return self.Timer(
CLIENTSIDE_METRICS_HISTO.labels(
client_name=self.client_label,
server_name=self.server_label,
service=service_label,
endpoint=endpoint_label,
)
)
class ClientSideExceptionCountMiddleware(GRPCClientMiddleware):
"""GRPC middleware that captures prometheus metrics for unary outputs."""
def __init__(self, client_label, server_label):
"""Initialize"""
super(ClientSideExceptionCountMiddleware, self).__init__(
client_label, server_label, GRPCClientUnaryOutputInterceptor
)
class Counter(object):
"""Decorator that wraps a function in a exception counter."""
def __init__(self, counter, client_name, server_name, service, endpoint):
"""Initializes with the counter object."""
self._counter = counter
self._client_name = client_name
self._server_name = server_name
self._service = service
self._endpoint = endpoint
def __call__(self, fn):
"""Wrap a method with an exception counter."""
@functools.wraps(fn)
def wrap(request, context):
"""Inner wrapper."""
r = fn(request, context)
if r.exception():
# If we get a Rendezvous error, we want some more information about the type
# of error we are getting. For example, a GRPC timeout error will be labelled as
# exception "_Rendezvous: <StatusCode.DEADLINE_EXCEEDED: 4>". All errors can be
# found at https://grpc.github.io/grpc/python/grpc.html#grpc-status-code
if type(r.exception()).__name__ == GRPC_RENDEZVOUS_ERROR:
exception = GRPC_RENDEZVOUS_ERROR + ": " + repr(r.exception().code())
# No guarantees of status code for other errors--only report error type.
else:
exception = type(r.exception()).__name__
self._counter.labels(
client_name=self._client_name,
server_name=self._server_name,
service=self._service,
endpoint=self._endpoint,
exception=exception,
).inc()
return r
return wrap
def get_decorator(self, method_name, _):
"""Normalize method name and return decorator that captures exceptions"""
service_label, endpoint_label = get_service_and_method_from_url(method_name)
return self.Counter(
CLIENTSIDE_ERROR_COUNTER,
self.client_label,
self.server_label,
service_label,
endpoint_label,
)
class ClientExceptionTranslationMiddlewareUnaryOutput(GRPCClientMiddleware):
"""Translate client exception"""
def __init__(self, client_label, server_label, code_to_exception_class_func):
"""Initialize"""
super(ClientExceptionTranslationMiddlewareUnaryOutput, self).__init__(
client_label, server_label, GRPCClientUnaryOutputInterceptor
)
self._code_to_exception_class_func = code_to_exception_class_func
class Translator(object):
"""Decorator that wraps a function in a exception translator"""
def __init__(self, code_to_exception_class_func):
"""Initializes with the counter object"""
self._code_to_exception_class_func = code_to_exception_class_func
def __call__(self, fn):
"""Wrap a method with an exception counter"""
@functools.wraps(fn)
def wrap(request, context):
"""Execute a function, if an exception is raised, change its type if necessary"""
try:
result = fn(request, context)
if result.code() is grpc.StatusCode.OK:
return result
else:
raise result
except grpc.RpcError as exc:
raise_exception_from_grpc_exception(self._code_to_exception_class_func, exc)
return wrap
def get_decorator(self, method_name, _):
"""Return exception translator decorator"""
return self.Translator(self._code_to_exception_class_func)
class ClientRetryingMiddlewareUnaryOutput(GRPCClientMiddleware):
"""Translate client exception"""
def __init__(self, client_label, server_label, exceptions_to_retry, max_retries):
"""Initialize"""
super(ClientRetryingMiddlewareUnaryOutput, self).__init__(
client_label, server_label, GRPCClientUnaryOutputInterceptor
)
self._exceptions_to_retry = exceptions_to_retry
self._max_retries = max_retries
class Retrier(object):
"""Decorator that wraps a function in a exception translator"""
def __init__(self, exceptions_to_retry, max_retries):
"""Initializes with the counter object"""
self._exceptions_to_retry = exceptions_to_retry
self._max_retries = max_retries
def __call__(self, fn):
"""Wrap a method with an exception counter"""
return backoff.on_exception(backoff.expo, self._exceptions_to_retry, self._max_retries)(
fn
)
def get_decorator(self, method_name, _):
"""Return exception translator decorator"""
return self.Retrier(self._exceptions_to_retry, self._max_retries)
def raise_exception_from_grpc_exception(code_to_exception_class_func, exc):
"""Raise exception from exc, translating with code_to_exception_class_func"""
code = None
details = "[]" # Details are expected to be jsondeserializable
if exc.code() == grpc.StatusCode.DEADLINE_EXCEEDED:
raise TimeoutError()
elif exc.code() == grpc.StatusCode.UNIMPLEMENTED:
raise NotImplementedError()
elif exc.code() == grpc.StatusCode.UNAVAILABLE:
raise ConnectionRefusedError()
for key, value in exc.trailing_metadata():
if key == "error_code":
try:
code = int(value)
except (TypeError, ValueError):
pass
elif key == "error_details":
details = value
if code_to_exception_class_func:
exception_class = code_to_exception_class_func(code)
if exception_class:
exception_args = json.loads(details)
raise exception_class(*exception_args)
raise exc
def _get_metadata_map_from_client_details(client_call_details):
"""Get metadata key->value map from client_call_details"""
metadata = {metadatum[0]: metadatum[1] for metadatum in (client_call_details.metadata or [])}
return metadata
| 38.488764
| 100
| 0.66837
|
import functools
import json
import backoff
import grpc
import prometheus_client
CLIENTSIDE_METRICS_HISTO = prometheus_client.Histogram(
"clientside_grpc_endpoint",
"Response time histogram for grpc endpoints from the client-side",
labelnames=("client_name", "server_name", "service", "endpoint"),
)
CLIENTSIDE_ERROR_COUNTER = prometheus_client.Counter(
"clientside_grpc_endpoint_error",
"Clientside exception counts for grpc methods",
labelnames=("client_name", "server_name", "service", "endpoint", "exception"),
)
GRPC_RENDEZVOUS_ERROR = "_Rendezvous"
def get_service_and_method_from_url(method_url):
name_parts = method_url.split("/")
if len(name_parts) != 3 or name_parts[0] != "" or name_parts[1] == "" or name_parts[2] == "":
raise AssertionError("Invalid method name: {}".format(method_url))
return (name_parts[1].replace(".", "_"), name_parts[2].replace(".", "_"))
class GRPCClientGeneralInterceptor(
grpc.UnaryUnaryClientInterceptor,
grpc.StreamUnaryClientInterceptor,
grpc.UnaryStreamClientInterceptor,
grpc.StreamStreamClientInterceptor,
):
def __init__(self, decorator_fn):
super(GRPCClientGeneralInterceptor, self).__init__()
self._decorator_fn = decorator_fn
def _intercept_call(self, continuation, client_call_details, request_or_iterator):
metadata = _get_metadata_map_from_client_details(client_call_details)
decorator = self._decorator_fn(client_call_details.method, metadata)
if not decorator:
handler = continuation
else:
handler = decorator(continuation)
return handler(client_call_details, request_or_iterator)
def intercept_unary_unary(self, continuation, client_call_details, request):
return self._intercept_call(continuation, client_call_details, request)
def intercept_stream_unary(self, continuation, client_call_details, request_iterator):
return self._intercept_call(continuation, client_call_details, request_iterator)
def intercept_unary_stream(self, continuation, client_call_details, request):
return self._intercept_call(continuation, client_call_details, request)
def intercept_stream_stream(self, continuation, client_call_details, request_iterator):
return self._intercept_call(continuation, client_call_details, request_iterator)
class GRPCClientUnaryOutputInterceptor(
grpc.UnaryUnaryClientInterceptor, grpc.StreamUnaryClientInterceptor
):
def __init__(self, decorator_fn):
super(GRPCClientUnaryOutputInterceptor, self).__init__()
self._decorator_fn = decorator_fn
def _intercept_call(self, continuation, client_call_details, request_or_iterator):
metadata = _get_metadata_map_from_client_details(client_call_details)
decorator = self._decorator_fn(client_call_details.method, metadata)
if not decorator:
handler = continuation
else:
handler = decorator(continuation)
return handler(client_call_details, request_or_iterator)
def intercept_unary_unary(self, continuation, client_call_details, request):
return self._intercept_call(continuation, client_call_details, request)
def intercept_stream_unary(self, continuation, client_call_details, request_iterator):
return self._intercept_call(continuation, client_call_details, request_iterator)
class GRPCClientMiddleware(object):
def __init__(self, client_label, server_label, interceptor_class):
super(GRPCClientMiddleware, self).__init__()
self._server_label = server_label
self._client_label = client_label
self._interceptor_class = interceptor_class
@property
def server_label(self):
return self._server_label
@property
def client_label(self):
return self._client_label
def get_interceptors(self):
return [self._interceptor_class(self.get_decorator)]
class ClientSideMetricsMiddleware(GRPCClientMiddleware):
def __init__(self, client_label, server_label):
super(ClientSideMetricsMiddleware, self).__init__(
client_label, server_label, GRPCClientGeneralInterceptor
)
class Timer(object):
def __init__(self, histogram):
self._histogram = histogram
def __call__(self, fn):
@functools.wraps(fn)
def wrap(request, context):
with self._histogram.time():
return fn(request, context)
return wrap
def get_decorator(self, method_name, _):
service_label, endpoint_label = get_service_and_method_from_url(method_name)
return self.Timer(
CLIENTSIDE_METRICS_HISTO.labels(
client_name=self.client_label,
server_name=self.server_label,
service=service_label,
endpoint=endpoint_label,
)
)
class ClientSideExceptionCountMiddleware(GRPCClientMiddleware):
def __init__(self, client_label, server_label):
super(ClientSideExceptionCountMiddleware, self).__init__(
client_label, server_label, GRPCClientUnaryOutputInterceptor
)
class Counter(object):
def __init__(self, counter, client_name, server_name, service, endpoint):
self._counter = counter
self._client_name = client_name
self._server_name = server_name
self._service = service
self._endpoint = endpoint
def __call__(self, fn):
@functools.wraps(fn)
def wrap(request, context):
r = fn(request, context)
if r.exception():
if type(r.exception()).__name__ == GRPC_RENDEZVOUS_ERROR:
exception = GRPC_RENDEZVOUS_ERROR + ": " + repr(r.exception().code())
else:
exception = type(r.exception()).__name__
self._counter.labels(
client_name=self._client_name,
server_name=self._server_name,
service=self._service,
endpoint=self._endpoint,
exception=exception,
).inc()
return r
return wrap
def get_decorator(self, method_name, _):
service_label, endpoint_label = get_service_and_method_from_url(method_name)
return self.Counter(
CLIENTSIDE_ERROR_COUNTER,
self.client_label,
self.server_label,
service_label,
endpoint_label,
)
class ClientExceptionTranslationMiddlewareUnaryOutput(GRPCClientMiddleware):
def __init__(self, client_label, server_label, code_to_exception_class_func):
super(ClientExceptionTranslationMiddlewareUnaryOutput, self).__init__(
client_label, server_label, GRPCClientUnaryOutputInterceptor
)
self._code_to_exception_class_func = code_to_exception_class_func
class Translator(object):
def __init__(self, code_to_exception_class_func):
self._code_to_exception_class_func = code_to_exception_class_func
def __call__(self, fn):
@functools.wraps(fn)
def wrap(request, context):
try:
result = fn(request, context)
if result.code() is grpc.StatusCode.OK:
return result
else:
raise result
except grpc.RpcError as exc:
raise_exception_from_grpc_exception(self._code_to_exception_class_func, exc)
return wrap
def get_decorator(self, method_name, _):
return self.Translator(self._code_to_exception_class_func)
class ClientRetryingMiddlewareUnaryOutput(GRPCClientMiddleware):
def __init__(self, client_label, server_label, exceptions_to_retry, max_retries):
super(ClientRetryingMiddlewareUnaryOutput, self).__init__(
client_label, server_label, GRPCClientUnaryOutputInterceptor
)
self._exceptions_to_retry = exceptions_to_retry
self._max_retries = max_retries
class Retrier(object):
def __init__(self, exceptions_to_retry, max_retries):
self._exceptions_to_retry = exceptions_to_retry
self._max_retries = max_retries
def __call__(self, fn):
return backoff.on_exception(backoff.expo, self._exceptions_to_retry, self._max_retries)(
fn
)
def get_decorator(self, method_name, _):
return self.Retrier(self._exceptions_to_retry, self._max_retries)
def raise_exception_from_grpc_exception(code_to_exception_class_func, exc):
code = None
details = "[]"
if exc.code() == grpc.StatusCode.DEADLINE_EXCEEDED:
raise TimeoutError()
elif exc.code() == grpc.StatusCode.UNIMPLEMENTED:
raise NotImplementedError()
elif exc.code() == grpc.StatusCode.UNAVAILABLE:
raise ConnectionRefusedError()
for key, value in exc.trailing_metadata():
if key == "error_code":
try:
code = int(value)
except (TypeError, ValueError):
pass
elif key == "error_details":
details = value
if code_to_exception_class_func:
exception_class = code_to_exception_class_func(code)
if exception_class:
exception_args = json.loads(details)
raise exception_class(*exception_args)
raise exc
def _get_metadata_map_from_client_details(client_call_details):
metadata = {metadatum[0]: metadatum[1] for metadatum in (client_call_details.metadata or [])}
return metadata
| true
| true
|
f70c835e6e69c7e01344ba1da7beafadb5e0f723
| 8,288
|
py
|
Python
|
rest-service/manager_rest/rest/rest_utils.py
|
yeshess/cloudify-manager
|
04dd199ce7df54355b87e9594f9db9fb1582924b
|
[
"Apache-2.0"
] | null | null | null |
rest-service/manager_rest/rest/rest_utils.py
|
yeshess/cloudify-manager
|
04dd199ce7df54355b87e9594f9db9fb1582924b
|
[
"Apache-2.0"
] | null | null | null |
rest-service/manager_rest/rest/rest_utils.py
|
yeshess/cloudify-manager
|
04dd199ce7df54355b87e9594f9db9fb1582924b
|
[
"Apache-2.0"
] | null | null | null |
#########
# Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import copy
import urllib
import subprocess
from flask import current_app
from string import ascii_letters
from flask import request, make_response
from flask_restful.reqparse import Argument
from flask_restful.reqparse import RequestParser
from contextlib import contextmanager
from manager_rest import manager_exceptions, config
from manager_rest.constants import REST_SERVICE_NAME
from manager_rest.storage.models_states import VisibilityState
try:
from cloudify_premium.ha import node_status
except ImportError:
node_status = {'initialized': False}
states_except_private = copy.deepcopy(VisibilityState.STATES)
states_except_private.remove('private')
VISIBILITY_EXCEPT_PRIVATE = states_except_private
@contextmanager
def skip_nested_marshalling():
request.__skip_marshalling = True
yield
delattr(request, '__skip_marshalling')
def get_json_and_verify_params(params=None):
params = params or []
if request.content_type != 'application/json':
raise manager_exceptions.UnsupportedContentTypeError(
'Content type must be application/json')
request_dict = request.json
is_params_dict = isinstance(params, dict)
def is_optional(param_name):
return is_params_dict and params[param_name].get('optional', False)
def check_type(param_name):
return is_params_dict and params[param_name].get('type', None)
for param in params:
if param not in request_dict:
if is_optional(param):
continue
raise manager_exceptions.BadParametersError(
'Missing {0} in json request body'.format(param))
param_type = check_type(param)
if param_type and not isinstance(request_dict[param], param_type):
raise manager_exceptions.BadParametersError(
'{0} parameter is expected to be of type {1} but is of type '
'{2}'.format(param,
param_type.__name__,
type(request_dict[param]).__name__))
return request_dict
def get_args_and_verify_arguments(arguments):
request_parser = RequestParser()
for argument in arguments:
argument.location = 'args'
request_parser.args.append(argument)
return request_parser.parse_args()
def verify_and_convert_bool(attribute_name, str_bool):
if isinstance(str_bool, bool):
return str_bool
if str_bool.lower() == 'true':
return True
if str_bool.lower() == 'false':
return False
raise manager_exceptions.BadParametersError(
'{0} must be <true/false>, got {1}'.format(attribute_name, str_bool))
def convert_to_int(value):
try:
return int(value)
except Exception:
raise manager_exceptions.BadParametersError(
'invalid parameter, should be int, got: {0}'.format(value))
def make_streaming_response(res_id, res_path, content_length, archive_type):
response = make_response()
response.headers['Content-Description'] = 'File Transfer'
response.headers['Cache-Control'] = 'no-cache'
response.headers['Content-Type'] = 'application/octet-stream'
response.headers['Content-Disposition'] = \
'attachment; filename={0}.{1}'.format(res_id, archive_type)
response.headers['Content-Length'] = content_length
response.headers['X-Accel-Redirect'] = res_path
response.headers['X-Accel-Buffering'] = 'yes'
return response
def set_restart_task(delay=1):
current_app.logger.info('Restarting the rest service')
cmd = 'sleep {0}; sudo systemctl restart {1}' \
.format(delay, REST_SERVICE_NAME)
subprocess.Popen(cmd, shell=True)
def validate_inputs(input_dict):
for input_name, input_value in input_dict.iteritems():
prefix = 'The `{0}` argument'.format(input_name)
if not input_value:
raise manager_exceptions.BadParametersError(
'{0} is empty'.format(prefix)
)
if len(input_value) > 256:
raise manager_exceptions.BadParametersError(
'{0} is too long. Maximum allowed length is 256 '
'characters'.format(prefix)
)
# urllib.quote changes all chars except alphanumeric chars and _-.
quoted_value = urllib.quote(input_value, safe='')
if quoted_value != input_value:
raise manager_exceptions.BadParametersError(
'{0} contains illegal characters. Only letters, digits and the'
' characters "-", "." and "_" are allowed'.format(prefix)
)
if input_value[0] not in ascii_letters:
raise manager_exceptions.BadParametersError(
'{0} must begin with a letter'.format(prefix)
)
def validate_and_decode_password(password):
if not password:
raise manager_exceptions.BadParametersError('The password is empty')
if len(password) > 256:
raise manager_exceptions.BadParametersError(
'The password is too long. Maximum allowed length is 256 '
'characters'
)
if len(password) < 5:
raise manager_exceptions.BadParametersError(
'The password is too short. Minimum allowed length is 5 '
'characters'
)
return password
def is_clustered():
return node_status.get('initialized')
def verify_role(role_name, is_system_role=False):
"""Make sure that role name is present in the system.
:param role_name: Role name to validate against database content.
:param is_system_role: True if system_role, False if tenant_role
:raises: BadParametersError when role is not found in the system or is
not from the right type
"""
expected_role_type = 'system_role' if is_system_role else 'tenant_role'
# Get role by name
role = next(
(
r
for r in config.instance.authorization_roles
if r['name'] == role_name
),
None
)
# Role not found
if role is None:
valid_roles = [
r['name']
for r in config.instance.authorization_roles
if r['type'] in (expected_role_type, 'any')
]
raise manager_exceptions.BadParametersError(
'Invalid role: `{0}`. Valid {1} roles are: {2}'
.format(role_name, expected_role_type, valid_roles)
)
# Role type doesn't match
if role['type'] not in (expected_role_type, 'any'):
raise manager_exceptions.BadParametersError(
'Role `{0}` is a {1} and cannot be assigned as a {2}'
.format(role_name, role['type'], expected_role_type)
)
def request_use_all_tenants():
return verify_and_convert_bool('all_tenants',
request.args.get('_all_tenants', False))
def get_visibility_parameter(optional=False,
is_argument=False,
valid_values=VISIBILITY_EXCEPT_PRIVATE):
if is_argument:
args = get_args_and_verify_arguments(
[Argument('visibility', type=unicode, default=None)]
)
visibility = args.visibility
else:
request_dict = get_json_and_verify_params({
'visibility': {'optional': optional, 'type': unicode}
})
visibility = request_dict.get('visibility', None)
if visibility is not None and visibility not in valid_values:
raise manager_exceptions.BadParametersError(
"Invalid visibility: `{0}`. Valid visibility's values are: {1}"
.format(visibility, valid_values)
)
return visibility
| 33.554656
| 79
| 0.664696
|
llib
import subprocess
from flask import current_app
from string import ascii_letters
from flask import request, make_response
from flask_restful.reqparse import Argument
from flask_restful.reqparse import RequestParser
from contextlib import contextmanager
from manager_rest import manager_exceptions, config
from manager_rest.constants import REST_SERVICE_NAME
from manager_rest.storage.models_states import VisibilityState
try:
from cloudify_premium.ha import node_status
except ImportError:
node_status = {'initialized': False}
states_except_private = copy.deepcopy(VisibilityState.STATES)
states_except_private.remove('private')
VISIBILITY_EXCEPT_PRIVATE = states_except_private
@contextmanager
def skip_nested_marshalling():
request.__skip_marshalling = True
yield
delattr(request, '__skip_marshalling')
def get_json_and_verify_params(params=None):
params = params or []
if request.content_type != 'application/json':
raise manager_exceptions.UnsupportedContentTypeError(
'Content type must be application/json')
request_dict = request.json
is_params_dict = isinstance(params, dict)
def is_optional(param_name):
return is_params_dict and params[param_name].get('optional', False)
def check_type(param_name):
return is_params_dict and params[param_name].get('type', None)
for param in params:
if param not in request_dict:
if is_optional(param):
continue
raise manager_exceptions.BadParametersError(
'Missing {0} in json request body'.format(param))
param_type = check_type(param)
if param_type and not isinstance(request_dict[param], param_type):
raise manager_exceptions.BadParametersError(
'{0} parameter is expected to be of type {1} but is of type '
'{2}'.format(param,
param_type.__name__,
type(request_dict[param]).__name__))
return request_dict
def get_args_and_verify_arguments(arguments):
request_parser = RequestParser()
for argument in arguments:
argument.location = 'args'
request_parser.args.append(argument)
return request_parser.parse_args()
def verify_and_convert_bool(attribute_name, str_bool):
if isinstance(str_bool, bool):
return str_bool
if str_bool.lower() == 'true':
return True
if str_bool.lower() == 'false':
return False
raise manager_exceptions.BadParametersError(
'{0} must be <true/false>, got {1}'.format(attribute_name, str_bool))
def convert_to_int(value):
try:
return int(value)
except Exception:
raise manager_exceptions.BadParametersError(
'invalid parameter, should be int, got: {0}'.format(value))
def make_streaming_response(res_id, res_path, content_length, archive_type):
response = make_response()
response.headers['Content-Description'] = 'File Transfer'
response.headers['Cache-Control'] = 'no-cache'
response.headers['Content-Type'] = 'application/octet-stream'
response.headers['Content-Disposition'] = \
'attachment; filename={0}.{1}'.format(res_id, archive_type)
response.headers['Content-Length'] = content_length
response.headers['X-Accel-Redirect'] = res_path
response.headers['X-Accel-Buffering'] = 'yes'
return response
def set_restart_task(delay=1):
current_app.logger.info('Restarting the rest service')
cmd = 'sleep {0}; sudo systemctl restart {1}' \
.format(delay, REST_SERVICE_NAME)
subprocess.Popen(cmd, shell=True)
def validate_inputs(input_dict):
for input_name, input_value in input_dict.iteritems():
prefix = 'The `{0}` argument'.format(input_name)
if not input_value:
raise manager_exceptions.BadParametersError(
'{0} is empty'.format(prefix)
)
if len(input_value) > 256:
raise manager_exceptions.BadParametersError(
'{0} is too long. Maximum allowed length is 256 '
'characters'.format(prefix)
)
quoted_value = urllib.quote(input_value, safe='')
if quoted_value != input_value:
raise manager_exceptions.BadParametersError(
'{0} contains illegal characters. Only letters, digits and the'
' characters "-", "." and "_" are allowed'.format(prefix)
)
if input_value[0] not in ascii_letters:
raise manager_exceptions.BadParametersError(
'{0} must begin with a letter'.format(prefix)
)
def validate_and_decode_password(password):
if not password:
raise manager_exceptions.BadParametersError('The password is empty')
if len(password) > 256:
raise manager_exceptions.BadParametersError(
'The password is too long. Maximum allowed length is 256 '
'characters'
)
if len(password) < 5:
raise manager_exceptions.BadParametersError(
'The password is too short. Minimum allowed length is 5 '
'characters'
)
return password
def is_clustered():
return node_status.get('initialized')
def verify_role(role_name, is_system_role=False):
expected_role_type = 'system_role' if is_system_role else 'tenant_role'
role = next(
(
r
for r in config.instance.authorization_roles
if r['name'] == role_name
),
None
)
if role is None:
valid_roles = [
r['name']
for r in config.instance.authorization_roles
if r['type'] in (expected_role_type, 'any')
]
raise manager_exceptions.BadParametersError(
'Invalid role: `{0}`. Valid {1} roles are: {2}'
.format(role_name, expected_role_type, valid_roles)
)
if role['type'] not in (expected_role_type, 'any'):
raise manager_exceptions.BadParametersError(
'Role `{0}` is a {1} and cannot be assigned as a {2}'
.format(role_name, role['type'], expected_role_type)
)
def request_use_all_tenants():
return verify_and_convert_bool('all_tenants',
request.args.get('_all_tenants', False))
def get_visibility_parameter(optional=False,
is_argument=False,
valid_values=VISIBILITY_EXCEPT_PRIVATE):
if is_argument:
args = get_args_and_verify_arguments(
[Argument('visibility', type=unicode, default=None)]
)
visibility = args.visibility
else:
request_dict = get_json_and_verify_params({
'visibility': {'optional': optional, 'type': unicode}
})
visibility = request_dict.get('visibility', None)
if visibility is not None and visibility not in valid_values:
raise manager_exceptions.BadParametersError(
"Invalid visibility: `{0}`. Valid visibility's values are: {1}"
.format(visibility, valid_values)
)
return visibility
| true
| true
|
f70c847bebf0a1aa589b994ffb769592faf8f965
| 1,156
|
py
|
Python
|
plans/fixed_ensemble_vggish_linear_4.py
|
dbis-uibk/MediaEval2021
|
14d754d9cea36415090aaa115db81f5ace465964
|
[
"BSD-2-Clause"
] | 1
|
2022-03-31T07:28:12.000Z
|
2022-03-31T07:28:12.000Z
|
plans/fixed_ensemble_vggish_linear_4.py
|
dbis-uibk/MediaEval2021
|
14d754d9cea36415090aaa115db81f5ace465964
|
[
"BSD-2-Clause"
] | null | null | null |
plans/fixed_ensemble_vggish_linear_4.py
|
dbis-uibk/MediaEval2021
|
14d754d9cea36415090aaa115db81f5ace465964
|
[
"BSD-2-Clause"
] | null | null | null |
"""Ensemble plan manually split by type moode/theme."""
import json
from dbispipeline.evaluators import FixedSplitEvaluator
from dbispipeline.evaluators import ModelCallbackWrapper
import numpy as np
from sklearn.pipeline import Pipeline
from mediaeval2021 import common
from mediaeval2021.dataloaders.melspectrograms import MelSpectPickleLoader
from mediaeval2021.models.ensemble import Ensemble
from mediaeval2021.models.wrapper import TorchWrapper
dataloader = MelSpectPickleLoader('data/mediaeval2020/melspect_1366.pickle')
label_splits = [
np.arange(0, 14, 1),
np.arange(14, 28, 1),
np.arange(28, 42, 1),
np.arange(42, 56, 1),
]
pipeline = Pipeline([
('model',
Ensemble(
base_estimator=TorchWrapper(
model_name='CNN',
dataloader=dataloader,
batch_size=64,
),
label_splits=label_splits,
epochs=100,
)),
])
evaluator = ModelCallbackWrapper(
FixedSplitEvaluator(**common.fixed_split_params()),
lambda model: common.store_prediction(model, dataloader),
)
result_handlers = [
lambda results: print(json.dumps(results, indent=4)),
]
| 26.272727
| 76
| 0.718858
|
import json
from dbispipeline.evaluators import FixedSplitEvaluator
from dbispipeline.evaluators import ModelCallbackWrapper
import numpy as np
from sklearn.pipeline import Pipeline
from mediaeval2021 import common
from mediaeval2021.dataloaders.melspectrograms import MelSpectPickleLoader
from mediaeval2021.models.ensemble import Ensemble
from mediaeval2021.models.wrapper import TorchWrapper
dataloader = MelSpectPickleLoader('data/mediaeval2020/melspect_1366.pickle')
label_splits = [
np.arange(0, 14, 1),
np.arange(14, 28, 1),
np.arange(28, 42, 1),
np.arange(42, 56, 1),
]
pipeline = Pipeline([
('model',
Ensemble(
base_estimator=TorchWrapper(
model_name='CNN',
dataloader=dataloader,
batch_size=64,
),
label_splits=label_splits,
epochs=100,
)),
])
evaluator = ModelCallbackWrapper(
FixedSplitEvaluator(**common.fixed_split_params()),
lambda model: common.store_prediction(model, dataloader),
)
result_handlers = [
lambda results: print(json.dumps(results, indent=4)),
]
| true
| true
|
f70c854ed6d40c821fd014e478a79120bd10049b
| 2,932
|
py
|
Python
|
lldb/packages/Python/lldbsuite/test/functionalities/gdb_remote_client/TestGDBRemoteLoad.py
|
medismailben/llvm-project
|
e334a839032fe500c3bba22bf976ab7af13ce1c1
|
[
"Apache-2.0"
] | 765
|
2015-12-03T16:44:59.000Z
|
2022-03-07T12:41:10.000Z
|
packages/Python/lldbsuite/test/functionalities/gdb_remote_client/TestGDBRemoteLoad.py
|
DalavanCloud/lldb
|
e913eaf2468290fb94c767d474d611b41a84dd69
|
[
"Apache-2.0"
] | 1,815
|
2015-12-11T23:56:05.000Z
|
2020-01-10T19:28:43.000Z
|
packages/Python/lldbsuite/test/functionalities/gdb_remote_client/TestGDBRemoteLoad.py
|
DalavanCloud/lldb
|
e913eaf2468290fb94c767d474d611b41a84dd69
|
[
"Apache-2.0"
] | 284
|
2015-12-03T16:47:25.000Z
|
2022-03-12T05:39:48.000Z
|
import lldb
from lldbsuite.test.lldbtest import *
from lldbsuite.test.decorators import *
from gdbclientutils import *
class TestGDBRemoteLoad(GDBRemoteTestBase):
def setUp(self):
super(TestGDBRemoteLoad, self).setUp()
self._initial_platform = lldb.DBG.GetSelectedPlatform()
def tearDown(self):
lldb.DBG.SetSelectedPlatform(self._initial_platform)
super(TestGDBRemoteLoad, self).tearDown()
def test_module_load_address(self):
"""Test that setting the load address of a module uses virtual addresses"""
target = self.createTarget("a.yaml")
process = self.connect(target)
module = target.GetModuleAtIndex(0)
self.assertTrue(module.IsValid())
self.assertTrue(target.SetModuleLoadAddress(module, 0).Success())
address = target.ResolveLoadAddress(0x2001)
self.assertTrue(address.IsValid())
self.assertEqual(".data", address.GetSection().GetName())
def test_ram_load(self):
"""Test loading an object file to a target's ram"""
target = self.createTarget("a.yaml")
process = self.connect(target)
self.dbg.HandleCommand("target modules load -l -s0")
self.assertPacketLogContains([
"M1000,4:c3c3c3c3",
"M1004,2:3232"
])
@skipIfXmlSupportMissing
def test_flash_load(self):
"""Test loading an object file to a target's flash memory"""
class Responder(MockGDBServerResponder):
def qSupported(self, client_supported):
return "PacketSize=3fff;QStartNoAckMode+;qXfer:memory-map:read+"
def qXferRead(self, obj, annex, offset, length):
if obj == "memory-map":
return (self.MEMORY_MAP[offset:offset + length],
offset + length < len(self.MEMORY_MAP))
return None, False
def other(self, packet):
if packet[0:11] == "vFlashErase":
return "OK"
if packet[0:11] == "vFlashWrite":
return "OK"
if packet == "vFlashDone":
return "OK"
return ""
MEMORY_MAP = """<?xml version="1.0"?>
<memory-map>
<memory type="ram" start="0x0" length="0x1000"/>
<memory type="flash" start="0x1000" length="0x1000">
<property name="blocksize">0x100</property>
</memory>
<memory type="ram" start="0x2000" length="0x1D400"/>
</memory-map>
"""
self.server.responder = Responder()
target = self.createTarget("a.yaml")
process = self.connect(target)
self.dbg.HandleCommand("target modules load -l -s0")
self.assertPacketLogContains([
"vFlashErase:1000,100",
"vFlashWrite:1000:\xc3\xc3\xc3\xc3",
"vFlashWrite:1004:\x32\x32",
"vFlashDone"
])
| 36.197531
| 83
| 0.594134
|
import lldb
from lldbsuite.test.lldbtest import *
from lldbsuite.test.decorators import *
from gdbclientutils import *
class TestGDBRemoteLoad(GDBRemoteTestBase):
def setUp(self):
super(TestGDBRemoteLoad, self).setUp()
self._initial_platform = lldb.DBG.GetSelectedPlatform()
def tearDown(self):
lldb.DBG.SetSelectedPlatform(self._initial_platform)
super(TestGDBRemoteLoad, self).tearDown()
def test_module_load_address(self):
target = self.createTarget("a.yaml")
process = self.connect(target)
module = target.GetModuleAtIndex(0)
self.assertTrue(module.IsValid())
self.assertTrue(target.SetModuleLoadAddress(module, 0).Success())
address = target.ResolveLoadAddress(0x2001)
self.assertTrue(address.IsValid())
self.assertEqual(".data", address.GetSection().GetName())
def test_ram_load(self):
target = self.createTarget("a.yaml")
process = self.connect(target)
self.dbg.HandleCommand("target modules load -l -s0")
self.assertPacketLogContains([
"M1000,4:c3c3c3c3",
"M1004,2:3232"
])
@skipIfXmlSupportMissing
def test_flash_load(self):
class Responder(MockGDBServerResponder):
def qSupported(self, client_supported):
return "PacketSize=3fff;QStartNoAckMode+;qXfer:memory-map:read+"
def qXferRead(self, obj, annex, offset, length):
if obj == "memory-map":
return (self.MEMORY_MAP[offset:offset + length],
offset + length < len(self.MEMORY_MAP))
return None, False
def other(self, packet):
if packet[0:11] == "vFlashErase":
return "OK"
if packet[0:11] == "vFlashWrite":
return "OK"
if packet == "vFlashDone":
return "OK"
return ""
MEMORY_MAP = """<?xml version="1.0"?>
<memory-map>
<memory type="ram" start="0x0" length="0x1000"/>
<memory type="flash" start="0x1000" length="0x1000">
<property name="blocksize">0x100</property>
</memory>
<memory type="ram" start="0x2000" length="0x1D400"/>
</memory-map>
"""
self.server.responder = Responder()
target = self.createTarget("a.yaml")
process = self.connect(target)
self.dbg.HandleCommand("target modules load -l -s0")
self.assertPacketLogContains([
"vFlashErase:1000,100",
"vFlashWrite:1000:\xc3\xc3\xc3\xc3",
"vFlashWrite:1004:\x32\x32",
"vFlashDone"
])
| true
| true
|
f70c8689b3fa9172e75204bc9e12eafd938d8501
| 1,112
|
py
|
Python
|
load-database/createbulk.py
|
jeantardelli/data-engineering-with-python
|
d547a20ff1464c945fd6c0b6a1a805fd0b4b227a
|
[
"MIT"
] | 6
|
2020-12-01T20:30:25.000Z
|
2022-02-02T17:28:49.000Z
|
load-database/createbulk.py
|
jeantardelli/data-engineering-with-python
|
d547a20ff1464c945fd6c0b6a1a805fd0b4b227a
|
[
"MIT"
] | null | null | null |
load-database/createbulk.py
|
jeantardelli/data-engineering-with-python
|
d547a20ff1464c945fd6c0b6a1a805fd0b4b227a
|
[
"MIT"
] | 2
|
2021-06-10T15:16:42.000Z
|
2022-03-22T20:36:09.000Z
|
"""createbulk
This module illustrates how to connect to MySQL and load bulk data in python
using the mysql-connector-python library.
"""
import mysql.connector
from faker import Faker
# Create a connection to MySQL
db = mysql.connector.connect(option_files="../sql-user/my.ini")
cursor = db.cursor()
# Create Faker instance
fake = Faker()
# Define the query template and the parameters to submit with it
sql = """INSERT INTO
dataengineering.people (name, age, street, city, state, zip, lng, lat)
VALUES
(%(name)s, %(age)s, %(street)s, %(city)s, %(state)s, %(zip)s, %(lng)s, %(lat)s);
"""
params = [
{'name': fake.name(),
'age': fake.random_int(min=18, max=80, step=1),
'street': fake.street_address(),
'city': fake.city(),
'state': fake.state(),
'zip': fake.zipcode(),
'lng': fake.longitude(),
'lat': fake.latitude(),
}
for _ in range(1000)
]
# Execute queries
cursor.executemany(sql, params)
print("Row count: {0}".format(cursor.rowcount))
db.commit()
db.close()
| 25.860465
| 93
| 0.604317
|
import mysql.connector
from faker import Faker
db = mysql.connector.connect(option_files="../sql-user/my.ini")
cursor = db.cursor()
fake = Faker()
sql = """INSERT INTO
dataengineering.people (name, age, street, city, state, zip, lng, lat)
VALUES
(%(name)s, %(age)s, %(street)s, %(city)s, %(state)s, %(zip)s, %(lng)s, %(lat)s);
"""
params = [
{'name': fake.name(),
'age': fake.random_int(min=18, max=80, step=1),
'street': fake.street_address(),
'city': fake.city(),
'state': fake.state(),
'zip': fake.zipcode(),
'lng': fake.longitude(),
'lat': fake.latitude(),
}
for _ in range(1000)
]
cursor.executemany(sql, params)
print("Row count: {0}".format(cursor.rowcount))
db.commit()
db.close()
| true
| true
|
f70c876b372d9d78e012d9a9197fbb06a087e3dc
| 5,847
|
py
|
Python
|
using_microservices/workflows/example3/allies_prediction_workflow.py
|
MD-Studio/MDStudio_examples
|
16919769ee4d69e598683d192e98649a6345bd0b
|
[
"Apache-2.0"
] | null | null | null |
using_microservices/workflows/example3/allies_prediction_workflow.py
|
MD-Studio/MDStudio_examples
|
16919769ee4d69e598683d192e98649a6345bd0b
|
[
"Apache-2.0"
] | null | null | null |
using_microservices/workflows/example3/allies_prediction_workflow.py
|
MD-Studio/MDStudio_examples
|
16919769ee4d69e598683d192e98649a6345bd0b
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import pickle
from autobahn.twisted.util import sleep
from mdstudio.deferred.chainable import chainable
from mdstudio.component.session import ComponentSession
from mdstudio.runner import main
from mdstudio_workflow import Workflow
class LIEPredictionWorkflow(ComponentSession):
"""
This workflow will perform a binding affinity prediction for CYP 1A2 with
applicability domain analysis using the Linear Interaction Energy (LIE)
method as described in:
Capoferri L, Verkade-Vreeker MCA, Buitenhuis D, Commandeur JNM, Pastor M,
Vermeulen NPE, et al. (2015) "Linear Interaction Energy Based Prediction
of Cytochrome P450 1A2 Binding Affinities with Reliability Estimation."
PLoS ONE 10(11): e0142232. https://doi.org/10.1371/journal.pone.0142232
The workflow uses data from the pre-calibrated CYP1A2 model created using
the eTOX ALLIES Linear Interaction Energy pipeline (liemodel parameter).
Pre-calculated molecular dynamics trajectory LIE energy values are
available for bound and unbound ligand cases (bound_trajectory,
unbound_trajectory respectively)
"""
def authorize_request(self, uri, claims):
"""
Microservice specific authorization method.
Will always be called when the service first tries to register with the
broker. It returns True (= authorized) by default.
"""
return True
@chainable
def on_run(self):
# Ligand to make prediction for
ligand = 'O1[C@@H](CCC1=O)CCC'
ligand_format = 'smi'
liemodel = os.path.join(os.getcwd(), '1A2_model')
# CYP1A2 pre-calibrated model
modelpicklefile = os.path.join(liemodel, 'params.pkl')
modelfile = pickle.load(open(modelpicklefile))
unbound_trajectory = os.path.join(os.getcwd(), "unbound_trajectory.ene")
bound_trajectory = [os.path.join(os.getcwd(), "bound_trajectory.ene")]
decompose_files = [os.path.join(os.getcwd(), "decompose_dataframe.ene")]
# Build Workflow
wf = Workflow(project_dir='./lie_prediction')
wf.task_runner = self
# STAGE 5. PYLIE FILTERING, AD ANALYSIS AND BINDING-AFFINITY PREDICTION
# Collect Gromacs bound and unbound MD energy trajectories in a dataframe
t18 = wf.add_task('Create mdframe',
task_type='WampTask',
uri='mdgroup.lie_pylie.endpoint.collect_energy_trajectories')
t18.set_input(unbound_trajectory=unbound_trajectory,
bound_trajectory=bound_trajectory,
lie_vdw_header="Ligand-Ligenv-vdw",
lie_ele_header="Ligand-Ligenv-ele")
# Determine stable regions in MDFrame and filter
t19 = wf.add_task('Detect stable regions',
task_type='WampTask',
uri='mdgroup.lie_pylie.endpoint.filter_stable_trajectory')
t19.set_input(do_plot=True,
minlength=45,
workdir='/tmp/mdstudio/lie_pylie')
wf.connect_task(t18.nid, t19.nid, 'mdframe')
# Extract average LIE energy values from the trajectory
t20 = wf.add_task('LIE averages',
task_type='WampTask',
uri='mdgroup.lie_pylie.endpoint.calculate_lie_average')
wf.connect_task(t19.nid, t20.nid, filtered_mdframe='mdframe')
# Calculate dG using pre-calibrated model parameters
t21 = wf.add_task('Calc dG',
task_type='WampTask',
uri='mdgroup.lie_pylie.endpoint.liedeltag')
t21.set_input(alpha_beta_gamma=modelfile['LIE']['params'])
wf.connect_task(t20.nid, t21.nid, 'averaged', averaged='dataframe')
# Applicability domain: 1. Tanimoto similarity with training set
t22 = wf.add_task('AD1 tanimoto simmilarity',
task_type='WampTask',
uri='mdgroup.mdstudio_structures.endpoint.chemical_similarity')
t22.set_input(test_set=[ligand], mol_format=ligand_format, reference_set=modelfile['AD']['Tanimoto']['smi'],
ci_cutoff=modelfile['AD']['Tanimoto']['Furthest'])
wf.connect_task(t18.nid, t22.nid)
# Applicability domain: 2. residue decomposition
t23 = wf.add_task('AD2 residue decomposition',
task_type='WampTask',
uri='mdgroup.lie_pylie.endpoint.adan_residue_decomp',
inline_files=False)
t23.set_input(model_pkl=modelpicklefile, decompose_files=decompose_files)
wf.connect_task(t18.nid, t23.nid)
# Applicability domain: 3. deltaG energy range
t24 = wf.add_task('AD3 dene yrange',
task_type='WampTask',
uri='mdgroup.lie_pylie.endpoint.adan_dene_yrange')
t24.set_input(ymin=modelfile['AD']['Yrange']['min'],
ymax=modelfile['AD']['Yrange']['max'])
wf.connect_task(t21.nid, t24.nid, 'liedeltag_file', liedeltag_file='dataframe')
# Applicability domain: 4. deltaG energy distribution
t25 = wf.add_task('AD4 dene distribution',
task_type='WampTask',
uri='mdgroup.lie_pylie.endpoint.adan_dene')
t25.set_input(model_pkl=modelpicklefile,
center=list(modelfile['AD']['Dene']['Xmean']),
ci_cutoff=modelfile['AD']['Dene']['Maxdist'])
wf.connect_task(t21.nid, t25.nid, 'liedeltag_file', liedeltag_file='dataframe')
wf.run()
while wf.is_running:
yield sleep(1)
if __name__ == "__main__":
main(LIEPredictionWorkflow, auto_reconnect=False, daily_log=False)
| 43.962406
| 116
| 0.635198
|
import os
import pickle
from autobahn.twisted.util import sleep
from mdstudio.deferred.chainable import chainable
from mdstudio.component.session import ComponentSession
from mdstudio.runner import main
from mdstudio_workflow import Workflow
class LIEPredictionWorkflow(ComponentSession):
def authorize_request(self, uri, claims):
return True
@chainable
def on_run(self):
ligand = 'O1[C@@H](CCC1=O)CCC'
ligand_format = 'smi'
liemodel = os.path.join(os.getcwd(), '1A2_model')
modelpicklefile = os.path.join(liemodel, 'params.pkl')
modelfile = pickle.load(open(modelpicklefile))
unbound_trajectory = os.path.join(os.getcwd(), "unbound_trajectory.ene")
bound_trajectory = [os.path.join(os.getcwd(), "bound_trajectory.ene")]
decompose_files = [os.path.join(os.getcwd(), "decompose_dataframe.ene")]
wf = Workflow(project_dir='./lie_prediction')
wf.task_runner = self
t18 = wf.add_task('Create mdframe',
task_type='WampTask',
uri='mdgroup.lie_pylie.endpoint.collect_energy_trajectories')
t18.set_input(unbound_trajectory=unbound_trajectory,
bound_trajectory=bound_trajectory,
lie_vdw_header="Ligand-Ligenv-vdw",
lie_ele_header="Ligand-Ligenv-ele")
t19 = wf.add_task('Detect stable regions',
task_type='WampTask',
uri='mdgroup.lie_pylie.endpoint.filter_stable_trajectory')
t19.set_input(do_plot=True,
minlength=45,
workdir='/tmp/mdstudio/lie_pylie')
wf.connect_task(t18.nid, t19.nid, 'mdframe')
t20 = wf.add_task('LIE averages',
task_type='WampTask',
uri='mdgroup.lie_pylie.endpoint.calculate_lie_average')
wf.connect_task(t19.nid, t20.nid, filtered_mdframe='mdframe')
t21 = wf.add_task('Calc dG',
task_type='WampTask',
uri='mdgroup.lie_pylie.endpoint.liedeltag')
t21.set_input(alpha_beta_gamma=modelfile['LIE']['params'])
wf.connect_task(t20.nid, t21.nid, 'averaged', averaged='dataframe')
t22 = wf.add_task('AD1 tanimoto simmilarity',
task_type='WampTask',
uri='mdgroup.mdstudio_structures.endpoint.chemical_similarity')
t22.set_input(test_set=[ligand], mol_format=ligand_format, reference_set=modelfile['AD']['Tanimoto']['smi'],
ci_cutoff=modelfile['AD']['Tanimoto']['Furthest'])
wf.connect_task(t18.nid, t22.nid)
t23 = wf.add_task('AD2 residue decomposition',
task_type='WampTask',
uri='mdgroup.lie_pylie.endpoint.adan_residue_decomp',
inline_files=False)
t23.set_input(model_pkl=modelpicklefile, decompose_files=decompose_files)
wf.connect_task(t18.nid, t23.nid)
t24 = wf.add_task('AD3 dene yrange',
task_type='WampTask',
uri='mdgroup.lie_pylie.endpoint.adan_dene_yrange')
t24.set_input(ymin=modelfile['AD']['Yrange']['min'],
ymax=modelfile['AD']['Yrange']['max'])
wf.connect_task(t21.nid, t24.nid, 'liedeltag_file', liedeltag_file='dataframe')
t25 = wf.add_task('AD4 dene distribution',
task_type='WampTask',
uri='mdgroup.lie_pylie.endpoint.adan_dene')
t25.set_input(model_pkl=modelpicklefile,
center=list(modelfile['AD']['Dene']['Xmean']),
ci_cutoff=modelfile['AD']['Dene']['Maxdist'])
wf.connect_task(t21.nid, t25.nid, 'liedeltag_file', liedeltag_file='dataframe')
wf.run()
while wf.is_running:
yield sleep(1)
if __name__ == "__main__":
main(LIEPredictionWorkflow, auto_reconnect=False, daily_log=False)
| true
| true
|
f70c878b02f3fda19aefdfd9b267bced09c2c057
| 1,909
|
py
|
Python
|
tests/crystallography/test_direct_metric_tensor.py
|
drix00/ElectronDiffraction
|
9dc258d90d0b73745b904b1bb6e1e3e794403a27
|
[
"Apache-2.0"
] | null | null | null |
tests/crystallography/test_direct_metric_tensor.py
|
drix00/ElectronDiffraction
|
9dc258d90d0b73745b904b1bb6e1e3e794403a27
|
[
"Apache-2.0"
] | null | null | null |
tests/crystallography/test_direct_metric_tensor.py
|
drix00/ElectronDiffraction
|
9dc258d90d0b73745b904b1bb6e1e3e794403a27
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
.. py:currentmodule:: test_direct_metric_tensor
:synopsis: Tests for the module :py:mod:`direct_metric_tensor`
.. moduleauthor:: Hendrix Demers <hendrix.demers@mail.mcgill.ca>
Tests for the module :py:mod:`direct_metric_tensor`.
"""
###############################################################################
# Copyright 2017 Hendrix Demers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
# Standard library modules.
import unittest
# Third party modules.
# Local modules.
# Project modules.
import electrondiffraction.crystallography.direct_metric_tensor as direct_metric_tensor
# Globals and constants variables.
class Test_direct_metric_tensor(unittest.TestCase):
"""
TestCase class for the module `${moduleName}`.
"""
def setUp(self):
"""
Setup method.
"""
unittest.TestCase.setUp(self)
def tearDown(self):
"""
Teardown method.
"""
unittest.TestCase.tearDown(self)
def testSkeleton(self):
"""
First test to check if the testcase is working with the testing framework.
"""
#self.fail("Test if the testcase is working.")
self.assert_(True)
if __name__ == '__main__': # pragma: no cover
import nose
nose.runmodule()
| 25.797297
| 87
| 0.632268
| true
| true
|
|
f70c87fbcd1a5396c99536a27f7e3ca06a99465c
| 916
|
py
|
Python
|
setup.py
|
forslund/librespot-python
|
7a340b1b20889e1afae47aa0f433a0893f4290f1
|
[
"Apache-2.0"
] | 64
|
2021-02-24T06:46:34.000Z
|
2022-03-29T11:33:46.000Z
|
setup.py
|
forslund/librespot-python
|
7a340b1b20889e1afae47aa0f433a0893f4290f1
|
[
"Apache-2.0"
] | 16
|
2021-04-24T12:25:30.000Z
|
2022-02-19T00:02:44.000Z
|
setup.py
|
forslund/librespot-python
|
7a340b1b20889e1afae47aa0f433a0893f4290f1
|
[
"Apache-2.0"
] | 22
|
2021-04-05T23:57:14.000Z
|
2022-03-10T04:45:08.000Z
|
import setuptools
setuptools.setup(name="librespot",
version="0.0.1",
description="Open Source Spotify Client",
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
author="kokarare1212",
url="https://github.com/kokarare1212/librespot-python",
license="Apache-2.0",
packages=setuptools.find_packages("."),
install_requires=[
"defusedxml", "protobuf", "pycryptodomex", "pyogg",
"requests", "websocket-client", "zeroconf"
],
classifiers=[
"Development Status :: 1 - Planning",
"License :: OSI Approved :: Apache Software License",
"Topic :: Multimedia :: Sound/Audio"
])
| 43.619048
| 74
| 0.494541
|
import setuptools
setuptools.setup(name="librespot",
version="0.0.1",
description="Open Source Spotify Client",
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
author="kokarare1212",
url="https://github.com/kokarare1212/librespot-python",
license="Apache-2.0",
packages=setuptools.find_packages("."),
install_requires=[
"defusedxml", "protobuf", "pycryptodomex", "pyogg",
"requests", "websocket-client", "zeroconf"
],
classifiers=[
"Development Status :: 1 - Planning",
"License :: OSI Approved :: Apache Software License",
"Topic :: Multimedia :: Sound/Audio"
])
| true
| true
|
f70c8a2e1832207b44ba61a052a45f916f0afa0d
| 18,912
|
py
|
Python
|
regain/covariance/time_graphical_lasso_.py
|
veronicatozzo/regain
|
5eaa9685eb34afa77abaf80a4e5764444bc95dd7
|
[
"BSD-3-Clause"
] | null | null | null |
regain/covariance/time_graphical_lasso_.py
|
veronicatozzo/regain
|
5eaa9685eb34afa77abaf80a4e5764444bc95dd7
|
[
"BSD-3-Clause"
] | null | null | null |
regain/covariance/time_graphical_lasso_.py
|
veronicatozzo/regain
|
5eaa9685eb34afa77abaf80a4e5764444bc95dd7
|
[
"BSD-3-Clause"
] | null | null | null |
# BSD 3-Clause License
# Copyright (c) 2017, Federico T.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Sparse inverse covariance selection over time via ADMM.
More information can be found in the paper linked at:
https://arxiv.org/abs/1703.01958
"""
from __future__ import division
import warnings
import numpy as np
from scipy import linalg
from six.moves import map, range, zip
from sklearn.covariance import empirical_covariance, log_likelihood
from sklearn.utils.extmath import squared_norm
from sklearn.utils.validation import check_X_y
from regain.covariance.graphical_lasso_ import GraphicalLasso, logl
from regain.norm import l1_od_norm
from regain.prox import prox_logdet, soft_thresholding
from regain.update_rules import update_rho
from regain.utils import convergence, error_norm_time
from regain.validation import check_norm_prox
def loss(S, K, n_samples=None):
"""Loss function for time-varying graphical lasso."""
if n_samples is None:
n_samples = np.ones(S.shape[0])
return sum(
-ni * logl(emp_cov, precision)
for emp_cov, precision, ni in zip(S, K, n_samples))
def objective(n_samples, S, K, Z_0, Z_1, Z_2, alpha, beta, psi):
"""Objective function for time-varying graphical lasso."""
obj = loss(S, K, n_samples=n_samples)
if isinstance(alpha, np.ndarray):
obj += sum(l1_od_norm(a * z) for a, z in zip(alpha, Z_0))
else:
obj += alpha * sum(map(l1_od_norm, Z_0))
if isinstance(beta, np.ndarray):
obj += sum(b[0][0] * m for b, m in zip(beta, map(psi, Z_2 - Z_1)))
else:
obj += beta * sum(map(psi, Z_2 - Z_1))
return obj
def init_precision(emp_cov, mode='empirical'):
if isinstance(mode, np.ndarray):
return mode.copy()
if mode == 'empirical':
n_times, _, n_features = emp_cov.shape
covariance_ = emp_cov.copy()
covariance_ *= 0.95
K = np.empty_like(emp_cov)
for i, (c, e) in enumerate(zip(covariance_, emp_cov)):
c.flat[::n_features + 1] = e.flat[::n_features + 1]
K[i] = linalg.pinvh(c)
elif mode == 'zeros':
K = np.zeros_like(emp_cov)
return K
def time_graphical_lasso(
emp_cov, alpha=0.01, rho=1, beta=1, max_iter=100, n_samples=None,
verbose=False, psi='laplacian', tol=1e-4, rtol=1e-4,
return_history=False, return_n_iter=True, mode='admm',
compute_objective=True, stop_at=None, stop_when=1e-4,
update_rho_options=None, init='empirical'):
"""Time-varying graphical lasso solver.
Solves the following problem via ADMM:
min sum_{i=1}^T -n_i log_likelihood(S_i, K_i) + alpha*||K_i||_{od,1}
+ beta sum_{i=2}^T Psi(K_i - K_{i-1})
where S_i = (1/n_i) X_i^T \times X_i is the empirical covariance of data
matrix X (training observations by features).
Parameters
----------
emp_cov : ndarray, shape (n_features, n_features)
Empirical covariance of data.
alpha, beta : float, optional
Regularisation parameter.
rho : float, optional
Augmented Lagrangian parameter.
max_iter : int, optional
Maximum number of iterations.
n_samples : ndarray
Number of samples available for each time point.
tol : float, optional
Absolute tolerance for convergence.
rtol : float, optional
Relative tolerance for convergence.
return_history : bool, optional
Return the history of computed values.
return_n_iter : bool, optional
Return the number of iteration before convergence.
verbose : bool, default False
Print info at each iteration.
update_rho_options : dict, optional
Arguments for the rho update.
See regain.update_rules.update_rho function for more information.
compute_objective : bool, default True
Choose to compute the objective value.
init : {'empirical', 'zero', ndarray}
Choose how to initialize the precision matrix, with the inverse
empirical covariance, zero matrix or precomputed.
Returns
-------
K : numpy.array, 3-dimensional (T x d x d)
Solution to the problem for each time t=1...T .
history : list
If return_history, then also a structure that contains the
objective value, the primal and dual residual norms, and tolerances
for the primal and dual residual norms at each iteration.
"""
psi, prox_psi, psi_node_penalty = check_norm_prox(psi)
Z_0 = init_precision(emp_cov, mode=init)
Z_1 = Z_0.copy()[:-1] # np.zeros_like(emp_cov)[:-1]
Z_2 = Z_0.copy()[1:] # np.zeros_like(emp_cov)[1:]
U_0 = np.zeros_like(Z_0)
U_1 = np.zeros_like(Z_1)
U_2 = np.zeros_like(Z_2)
Z_0_old = np.zeros_like(Z_0)
Z_1_old = np.zeros_like(Z_1)
Z_2_old = np.zeros_like(Z_2)
# divisor for consensus variables, accounting for two less matrices
divisor = np.full(emp_cov.shape[0], 3, dtype=float)
divisor[0] -= 1
divisor[-1] -= 1
if n_samples is None:
n_samples = np.ones(emp_cov.shape[0])
checks = [
convergence(
obj=objective(
n_samples, emp_cov, Z_0, Z_0, Z_1, Z_2, alpha, beta, psi))
]
for iteration_ in range(max_iter):
# update K
A = Z_0 - U_0
A[:-1] += Z_1 - U_1
A[1:] += Z_2 - U_2
A /= divisor[:, None, None]
# soft_thresholding_ = partial(soft_thresholding, lamda=alpha / rho)
# K = np.array(map(soft_thresholding_, A))
A += A.transpose(0, 2, 1)
A /= 2.
A *= -rho * divisor[:, None, None] / n_samples[:, None, None]
A += emp_cov
K = np.array(
[
prox_logdet(a, lamda=ni / (rho * div))
for a, div, ni in zip(A, divisor, n_samples)
])
# update Z_0
A = K + U_0
A += A.transpose(0, 2, 1)
A /= 2.
Z_0 = soft_thresholding(A, lamda=alpha / rho)
# other Zs
A_1 = K[:-1] + U_1
A_2 = K[1:] + U_2
if not psi_node_penalty:
prox_e = prox_psi(A_2 - A_1, lamda=2. * beta / rho)
Z_1 = .5 * (A_1 + A_2 - prox_e)
Z_2 = .5 * (A_1 + A_2 + prox_e)
else:
Z_1, Z_2 = prox_psi(
np.concatenate((A_1, A_2), axis=1), lamda=.5 * beta / rho,
rho=rho, tol=tol, rtol=rtol, max_iter=max_iter)
# update residuals
U_0 += K - Z_0
U_1 += K[:-1] - Z_1
U_2 += K[1:] - Z_2
# diagnostics, reporting, termination checks
rnorm = np.sqrt(
squared_norm(K - Z_0) + squared_norm(K[:-1] - Z_1) +
squared_norm(K[1:] - Z_2))
snorm = rho * np.sqrt(
squared_norm(Z_0 - Z_0_old) + squared_norm(Z_1 - Z_1_old) +
squared_norm(Z_2 - Z_2_old))
obj = objective(
n_samples, emp_cov, Z_0, K, Z_1, Z_2, alpha, beta, psi) \
if compute_objective else np.nan
# if np.isinf(obj):
# Z_0 = Z_0_old
# break
check = convergence(
obj=obj,
rnorm=rnorm,
snorm=snorm,
e_pri=np.sqrt(K.size + 2 * Z_1.size) * tol + rtol * max(
np.sqrt(
squared_norm(Z_0) + squared_norm(Z_1) + squared_norm(Z_2)),
np.sqrt(
squared_norm(K) + squared_norm(K[:-1]) +
squared_norm(K[1:]))),
e_dual=np.sqrt(K.size + 2 * Z_1.size) * tol + rtol * rho *
np.sqrt(squared_norm(U_0) + squared_norm(U_1) + squared_norm(U_2)),
# precision=Z_0.copy()
)
Z_0_old = Z_0.copy()
Z_1_old = Z_1.copy()
Z_2_old = Z_2.copy()
if verbose:
print(
"obj: %.4f, rnorm: %.4f, snorm: %.4f,"
"eps_pri: %.4f, eps_dual: %.4f" % check[:5])
checks.append(check)
if stop_at is not None:
if abs(check.obj - stop_at) / abs(stop_at) < stop_when:
break
if check.rnorm <= check.e_pri and check.snorm <= check.e_dual:
break
rho_new = update_rho(
rho, rnorm, snorm, iteration=iteration_,
**(update_rho_options or {}))
# scaled dual variables should be also rescaled
U_0 *= rho / rho_new
U_1 *= rho / rho_new
U_2 *= rho / rho_new
rho = rho_new
# assert is_pos_def(Z_0)
else:
warnings.warn("Objective did not converge.")
covariance_ = np.array([linalg.pinvh(x) for x in Z_0])
return_list = [Z_0, covariance_]
if return_history:
return_list.append(checks)
if return_n_iter:
return_list.append(iteration_ + 1)
return return_list
class TimeGraphicalLasso(GraphicalLasso):
"""Sparse inverse covariance estimation with an l1-penalized estimator.
Parameters
----------
alpha : positive float, default 0.01
Regularization parameter for precision matrix. The higher alpha,
the more regularization, the sparser the inverse covariance.
beta : positive float, default 1
Regularization parameter to constrain precision matrices in time.
The higher beta, the more regularization,
and consecutive precision matrices in time are more similar.
psi : {'laplacian', 'l1', 'l2', 'linf', 'node'}, default 'laplacian'
Type of norm to enforce for consecutive precision matrices in time.
rho : positive float, default 1
Augmented Lagrangian parameter.
over_relax : positive float, deafult 1
Over-relaxation parameter (typically between 1.0 and 1.8).
tol : positive float, default 1e-4
Absolute tolerance to declare convergence.
rtol : positive float, default 1e-4
Relative tolerance to declare convergence.
max_iter : integer, default 100
The maximum number of iterations.
verbose : boolean, default False
If verbose is True, the objective function, rnorm and snorm are
printed at each iteration.
assume_centered : boolean, default False
If True, data are not centered before computation.
Useful when working with data whose mean is almost, but not exactly
zero.
If False, data are centered before computation.
time_on_axis : {'first', 'last'}, default 'first'
If data have time as the last dimension, set this to 'last'.
Useful to use scikit-learn functions as train_test_split.
update_rho_options : dict, default None
Options for the update of rho. See `update_rho` function for details.
compute_objective : boolean, default True
Choose if compute the objective function during iterations
(only useful if `verbose=True`).
init : {'empirical', 'zeros', ndarray}, default 'empirical'
How to initialise the inverse covariance matrix. Default is take
the empirical covariance and inverting it.
Attributes
----------
covariance_ : array-like, shape (n_times, n_features, n_features)
Estimated covariance matrix
precision_ : array-like, shape (n_times, n_features, n_features)
Estimated precision matrix.
n_iter_ : int
Number of iterations run.
"""
def __init__(
self, alpha=0.01, beta=1., mode='admm', rho=1., tol=1e-4,
rtol=1e-4, psi='laplacian', max_iter=100, verbose=False,
assume_centered=False, return_history=False,
update_rho_options=None, compute_objective=True, stop_at=None,
stop_when=1e-4, suppress_warn_list=False, init='empirical'):
super(TimeGraphicalLasso, self).__init__(
alpha=alpha, rho=rho, tol=tol, rtol=rtol, max_iter=max_iter,
verbose=verbose, assume_centered=assume_centered, mode=mode,
update_rho_options=update_rho_options,
compute_objective=compute_objective, init=init)
self.beta = beta
self.psi = psi
self.return_history = return_history
self.stop_at = stop_at
self.stop_when = stop_when
self.suppress_warn_list = suppress_warn_list
def get_observed_precision(self):
"""Getter for the observed precision matrix.
Returns
-------
precision_ : array-like,
The precision matrix associated to the current covariance object.
"""
return self.get_precision()
def _fit(self, emp_cov, n_samples):
"""Fit the TimeGraphicalLasso model to X.
Parameters
----------
emp_cov : ndarray, shape (n_time, n_features, n_features)
Empirical covariance of data.
"""
out = time_graphical_lasso(
emp_cov, alpha=self.alpha, rho=self.rho, beta=self.beta,
mode=self.mode, n_samples=n_samples, tol=self.tol, rtol=self.rtol,
psi=self.psi, max_iter=self.max_iter, verbose=self.verbose,
return_n_iter=True, return_history=self.return_history,
update_rho_options=self.update_rho_options,
compute_objective=self.compute_objective, stop_at=self.stop_at,
stop_when=self.stop_when, init=self.init)
if self.return_history:
self.precision_, self.covariance_, self.history_, self.n_iter_ = \
out
else:
self.precision_, self.covariance_, self.n_iter_ = out
return self
def fit(self, X, y):
"""Fit the TimeGraphicalLasso model to X.
Parameters
----------
X : ndarray, shape = (n_samples * n_times, n_dimensions)
Data matrix.
y : ndarray, shape = (n_times,)
Indicate the temporal belonging of each sample.
"""
# Covariance does not make sense for a single feature
X, y = check_X_y(
X, y, accept_sparse=False, dtype=np.float64, order="C",
ensure_min_features=2, estimator=self)
n_dimensions = X.shape[1]
self.classes_, n_samples = np.unique(y, return_counts=True)
n_times = self.classes_.size
# n_samples = np.array([x.shape[0] for x in X])
if self.assume_centered:
self.location_ = np.zeros((n_times, n_dimensions))
else:
self.location_ = np.array(
[X[y == cl].mean(0) for cl in self.classes_])
emp_cov = np.array(
[
empirical_covariance(
X[y == cl], assume_centered=self.assume_centered)
for cl in self.classes_
])
return self._fit(emp_cov, n_samples)
def score(self, X, y):
"""Computes the log-likelihood of a Gaussian data set with
`self.covariance_` as an estimator of its covariance matrix.
Parameters
----------
X : array-like, shape = (n_samples, n_features)
Test data of which we compute the likelihood, where n_samples is
the number of samples and n_features is the number of features.
X is assumed to be drawn from the same distribution than
the data used in fit (including centering).
y : array-like, shape = (n_samples,)
Class of samples.
Returns
-------
res : float
The likelihood of the data set with `self.covariance_` as an
estimator of its covariance matrix.
"""
# Covariance does not make sense for a single feature
X, y = check_X_y(
X, y, accept_sparse=False, dtype=np.float64, order="C",
ensure_min_features=2, estimator=self)
# compute empirical covariance of the test set
test_cov = np.array(
[
empirical_covariance(
X[y == cl] - self.location_[i], assume_centered=True)
for i, cl in enumerate(self.classes_)
])
res = sum(
X[y == cl].shape[0] * log_likelihood(S, K) for S, K, cl in zip(
test_cov, self.get_observed_precision(), self.classes_))
return res
def error_norm(
self, comp_cov, norm='frobenius', scaling=True, squared=True):
"""Compute the Mean Squared Error between two covariance estimators.
(In the sense of the Frobenius norm).
Parameters
----------
comp_cov : array-like, shape = [n_features, n_features]
The covariance to compare with.
norm : str
The type of norm used to compute the error. Available error types:
- 'frobenius' (default): sqrt(tr(A^t.A))
- 'spectral': sqrt(max(eigenvalues(A^t.A))
where A is the error ``(comp_cov - self.covariance_)``.
scaling : bool
If True (default), the squared error norm is divided by n_features.
If False, the squared error norm is not rescaled.
squared : bool
Whether to compute the squared error norm or the error norm.
If True (default), the squared error norm is returned.
If False, the error norm is returned.
Returns
-------
The Mean Squared Error (in the sense of the Frobenius norm) between
`self` and `comp_cov` covariance estimators.
"""
return error_norm_time(
self.covariance_, comp_cov, norm=norm, scaling=scaling,
squared=squared)
| 35.750473
| 80
| 0.62003
|
from __future__ import division
import warnings
import numpy as np
from scipy import linalg
from six.moves import map, range, zip
from sklearn.covariance import empirical_covariance, log_likelihood
from sklearn.utils.extmath import squared_norm
from sklearn.utils.validation import check_X_y
from regain.covariance.graphical_lasso_ import GraphicalLasso, logl
from regain.norm import l1_od_norm
from regain.prox import prox_logdet, soft_thresholding
from regain.update_rules import update_rho
from regain.utils import convergence, error_norm_time
from regain.validation import check_norm_prox
def loss(S, K, n_samples=None):
if n_samples is None:
n_samples = np.ones(S.shape[0])
return sum(
-ni * logl(emp_cov, precision)
for emp_cov, precision, ni in zip(S, K, n_samples))
def objective(n_samples, S, K, Z_0, Z_1, Z_2, alpha, beta, psi):
obj = loss(S, K, n_samples=n_samples)
if isinstance(alpha, np.ndarray):
obj += sum(l1_od_norm(a * z) for a, z in zip(alpha, Z_0))
else:
obj += alpha * sum(map(l1_od_norm, Z_0))
if isinstance(beta, np.ndarray):
obj += sum(b[0][0] * m for b, m in zip(beta, map(psi, Z_2 - Z_1)))
else:
obj += beta * sum(map(psi, Z_2 - Z_1))
return obj
def init_precision(emp_cov, mode='empirical'):
if isinstance(mode, np.ndarray):
return mode.copy()
if mode == 'empirical':
n_times, _, n_features = emp_cov.shape
covariance_ = emp_cov.copy()
covariance_ *= 0.95
K = np.empty_like(emp_cov)
for i, (c, e) in enumerate(zip(covariance_, emp_cov)):
c.flat[::n_features + 1] = e.flat[::n_features + 1]
K[i] = linalg.pinvh(c)
elif mode == 'zeros':
K = np.zeros_like(emp_cov)
return K
def time_graphical_lasso(
emp_cov, alpha=0.01, rho=1, beta=1, max_iter=100, n_samples=None,
verbose=False, psi='laplacian', tol=1e-4, rtol=1e-4,
return_history=False, return_n_iter=True, mode='admm',
compute_objective=True, stop_at=None, stop_when=1e-4,
update_rho_options=None, init='empirical'):
psi, prox_psi, psi_node_penalty = check_norm_prox(psi)
Z_0 = init_precision(emp_cov, mode=init)
Z_1 = Z_0.copy()[:-1]
Z_2 = Z_0.copy()[1:]
U_0 = np.zeros_like(Z_0)
U_1 = np.zeros_like(Z_1)
U_2 = np.zeros_like(Z_2)
Z_0_old = np.zeros_like(Z_0)
Z_1_old = np.zeros_like(Z_1)
Z_2_old = np.zeros_like(Z_2)
divisor = np.full(emp_cov.shape[0], 3, dtype=float)
divisor[0] -= 1
divisor[-1] -= 1
if n_samples is None:
n_samples = np.ones(emp_cov.shape[0])
checks = [
convergence(
obj=objective(
n_samples, emp_cov, Z_0, Z_0, Z_1, Z_2, alpha, beta, psi))
]
for iteration_ in range(max_iter):
A = Z_0 - U_0
A[:-1] += Z_1 - U_1
A[1:] += Z_2 - U_2
A /= divisor[:, None, None]
A += A.transpose(0, 2, 1)
A /= 2.
A *= -rho * divisor[:, None, None] / n_samples[:, None, None]
A += emp_cov
K = np.array(
[
prox_logdet(a, lamda=ni / (rho * div))
for a, div, ni in zip(A, divisor, n_samples)
])
A = K + U_0
A += A.transpose(0, 2, 1)
A /= 2.
Z_0 = soft_thresholding(A, lamda=alpha / rho)
A_1 = K[:-1] + U_1
A_2 = K[1:] + U_2
if not psi_node_penalty:
prox_e = prox_psi(A_2 - A_1, lamda=2. * beta / rho)
Z_1 = .5 * (A_1 + A_2 - prox_e)
Z_2 = .5 * (A_1 + A_2 + prox_e)
else:
Z_1, Z_2 = prox_psi(
np.concatenate((A_1, A_2), axis=1), lamda=.5 * beta / rho,
rho=rho, tol=tol, rtol=rtol, max_iter=max_iter)
U_0 += K - Z_0
U_1 += K[:-1] - Z_1
U_2 += K[1:] - Z_2
rnorm = np.sqrt(
squared_norm(K - Z_0) + squared_norm(K[:-1] - Z_1) +
squared_norm(K[1:] - Z_2))
snorm = rho * np.sqrt(
squared_norm(Z_0 - Z_0_old) + squared_norm(Z_1 - Z_1_old) +
squared_norm(Z_2 - Z_2_old))
obj = objective(
n_samples, emp_cov, Z_0, K, Z_1, Z_2, alpha, beta, psi) \
if compute_objective else np.nan
check = convergence(
obj=obj,
rnorm=rnorm,
snorm=snorm,
e_pri=np.sqrt(K.size + 2 * Z_1.size) * tol + rtol * max(
np.sqrt(
squared_norm(Z_0) + squared_norm(Z_1) + squared_norm(Z_2)),
np.sqrt(
squared_norm(K) + squared_norm(K[:-1]) +
squared_norm(K[1:]))),
e_dual=np.sqrt(K.size + 2 * Z_1.size) * tol + rtol * rho *
np.sqrt(squared_norm(U_0) + squared_norm(U_1) + squared_norm(U_2)),
)
Z_0_old = Z_0.copy()
Z_1_old = Z_1.copy()
Z_2_old = Z_2.copy()
if verbose:
print(
"obj: %.4f, rnorm: %.4f, snorm: %.4f,"
"eps_pri: %.4f, eps_dual: %.4f" % check[:5])
checks.append(check)
if stop_at is not None:
if abs(check.obj - stop_at) / abs(stop_at) < stop_when:
break
if check.rnorm <= check.e_pri and check.snorm <= check.e_dual:
break
rho_new = update_rho(
rho, rnorm, snorm, iteration=iteration_,
**(update_rho_options or {}))
U_0 *= rho / rho_new
U_1 *= rho / rho_new
U_2 *= rho / rho_new
rho = rho_new
else:
warnings.warn("Objective did not converge.")
covariance_ = np.array([linalg.pinvh(x) for x in Z_0])
return_list = [Z_0, covariance_]
if return_history:
return_list.append(checks)
if return_n_iter:
return_list.append(iteration_ + 1)
return return_list
class TimeGraphicalLasso(GraphicalLasso):
def __init__(
self, alpha=0.01, beta=1., mode='admm', rho=1., tol=1e-4,
rtol=1e-4, psi='laplacian', max_iter=100, verbose=False,
assume_centered=False, return_history=False,
update_rho_options=None, compute_objective=True, stop_at=None,
stop_when=1e-4, suppress_warn_list=False, init='empirical'):
super(TimeGraphicalLasso, self).__init__(
alpha=alpha, rho=rho, tol=tol, rtol=rtol, max_iter=max_iter,
verbose=verbose, assume_centered=assume_centered, mode=mode,
update_rho_options=update_rho_options,
compute_objective=compute_objective, init=init)
self.beta = beta
self.psi = psi
self.return_history = return_history
self.stop_at = stop_at
self.stop_when = stop_when
self.suppress_warn_list = suppress_warn_list
def get_observed_precision(self):
return self.get_precision()
def _fit(self, emp_cov, n_samples):
out = time_graphical_lasso(
emp_cov, alpha=self.alpha, rho=self.rho, beta=self.beta,
mode=self.mode, n_samples=n_samples, tol=self.tol, rtol=self.rtol,
psi=self.psi, max_iter=self.max_iter, verbose=self.verbose,
return_n_iter=True, return_history=self.return_history,
update_rho_options=self.update_rho_options,
compute_objective=self.compute_objective, stop_at=self.stop_at,
stop_when=self.stop_when, init=self.init)
if self.return_history:
self.precision_, self.covariance_, self.history_, self.n_iter_ = \
out
else:
self.precision_, self.covariance_, self.n_iter_ = out
return self
def fit(self, X, y):
X, y = check_X_y(
X, y, accept_sparse=False, dtype=np.float64, order="C",
ensure_min_features=2, estimator=self)
n_dimensions = X.shape[1]
self.classes_, n_samples = np.unique(y, return_counts=True)
n_times = self.classes_.size
if self.assume_centered:
self.location_ = np.zeros((n_times, n_dimensions))
else:
self.location_ = np.array(
[X[y == cl].mean(0) for cl in self.classes_])
emp_cov = np.array(
[
empirical_covariance(
X[y == cl], assume_centered=self.assume_centered)
for cl in self.classes_
])
return self._fit(emp_cov, n_samples)
def score(self, X, y):
X, y = check_X_y(
X, y, accept_sparse=False, dtype=np.float64, order="C",
ensure_min_features=2, estimator=self)
test_cov = np.array(
[
empirical_covariance(
X[y == cl] - self.location_[i], assume_centered=True)
for i, cl in enumerate(self.classes_)
])
res = sum(
X[y == cl].shape[0] * log_likelihood(S, K) for S, K, cl in zip(
test_cov, self.get_observed_precision(), self.classes_))
return res
def error_norm(
self, comp_cov, norm='frobenius', scaling=True, squared=True):
return error_norm_time(
self.covariance_, comp_cov, norm=norm, scaling=scaling,
squared=squared)
| true
| true
|
f70c8a58a8c830bc91bf765992a9befc181edfe7
| 808
|
py
|
Python
|
src/causal2020/graphs/rum.py
|
hassanobeid1994/tr_b_causal_2020
|
1ffaeb7dcefccf5e1f24c459e9a2f140b2a052a5
|
[
"MIT"
] | null | null | null |
src/causal2020/graphs/rum.py
|
hassanobeid1994/tr_b_causal_2020
|
1ffaeb7dcefccf5e1f24c459e9a2f140b2a052a5
|
[
"MIT"
] | 89
|
2020-02-10T02:52:11.000Z
|
2020-06-23T03:50:27.000Z
|
src/causal2020/graphs/rum.py
|
hassan-obeid/tr_b_causal_2020
|
1ffaeb7dcefccf5e1f24c459e9a2f140b2a052a5
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Causal graph for the random utility model in Ben-Akiva et al. (2002).
References
----------
Ben-Akiva, Moshe, Joan Walker, Adriana T. Bernardino, Dinesh A. Gopinath,
Taka Morikawa, and Amalia Polydoropoulou. "Integration of choice and latent
variable models." Perpetual motion: Travel behaviour research opportunities and
application challenges (2002): 431-470.
"""
import graphviz
RUM_GRAPH = graphviz.Digraph("Random Utility Maximization")
# Add all nodes to the graph
# Use square nodes for observed variables and circular nodes for unoobserved
RUM_GRAPH.node("X", "Explanatory Variables", shape="box")
RUM_GRAPH.node("U", "Utility", shape="ellipse")
RUM_GRAPH.node("C", "Choice", shape="box")
# Create the graphical chain
RUM_GRAPH.edge("X", "U")
RUM_GRAPH.edge("U", "C")
| 32.32
| 79
| 0.736386
|
import graphviz
RUM_GRAPH = graphviz.Digraph("Random Utility Maximization")
RUM_GRAPH.node("X", "Explanatory Variables", shape="box")
RUM_GRAPH.node("U", "Utility", shape="ellipse")
RUM_GRAPH.node("C", "Choice", shape="box")
RUM_GRAPH.edge("X", "U")
RUM_GRAPH.edge("U", "C")
| true
| true
|
f70c8ae86d1ee49a88424559123d137100d760ac
| 4,726
|
py
|
Python
|
evalml/tests/component_tests/test_prophet_regressor.py
|
peterataylor/evalml
|
917f07845c4a319bb08c7aaa8df9e09623df11c8
|
[
"BSD-3-Clause"
] | null | null | null |
evalml/tests/component_tests/test_prophet_regressor.py
|
peterataylor/evalml
|
917f07845c4a319bb08c7aaa8df9e09623df11c8
|
[
"BSD-3-Clause"
] | null | null | null |
evalml/tests/component_tests/test_prophet_regressor.py
|
peterataylor/evalml
|
917f07845c4a319bb08c7aaa8df9e09623df11c8
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
import pandas as pd
import pytest
from pytest import importorskip
from evalml.model_family import ModelFamily
from evalml.pipelines.components import ProphetRegressor
from evalml.problem_types import ProblemTypes
prophet = importorskip("prophet", reason="Skipping test because prophet not installed")
def test_model_family():
assert ProphetRegressor.model_family == ModelFamily.PROPHET
def test_cmdstanpy_backend():
m = prophet.Prophet(stan_backend="CMDSTANPY")
assert m.stan_backend.get_type() == "CMDSTANPY"
def test_problem_types():
assert set(ProphetRegressor.supported_problem_types) == {
ProblemTypes.TIME_SERIES_REGRESSION
}
def test_init_with_other_params():
clf = ProphetRegressor(
daily_seasonality=True,
mcmc_samples=5,
interval_width=0.8,
uncertainty_samples=0,
)
assert clf.parameters == {
"changepoint_prior_scale": 0.05,
"daily_seasonality": True,
"date_index": None,
"holidays_prior_scale": 10,
"interval_width": 0.8,
"mcmc_samples": 5,
"seasonality_mode": "additive",
"seasonality_prior_scale": 10,
"uncertainty_samples": 0,
"stan_backend": "CMDSTANPY",
}
def test_feature_importance(ts_data):
X, y = ts_data
clf = ProphetRegressor(uncertainty_samples=False, changepoint_prior_scale=2.0)
clf.fit(X, y)
clf.feature_importance == np.zeros(1)
def test_get_params(ts_data):
clf = ProphetRegressor()
assert clf.get_params() == {
"changepoint_prior_scale": 0.05,
"date_index": None,
"seasonality_prior_scale": 10,
"holidays_prior_scale": 10,
"seasonality_mode": "additive",
"stan_backend": "CMDSTANPY",
}
def test_fit_predict_ts_with_X_index(ts_data):
X, y = ts_data
assert isinstance(X.index, pd.DatetimeIndex)
p_clf = prophet.Prophet(uncertainty_samples=False, changepoint_prior_scale=2.0)
prophet_df = ProphetRegressor.build_prophet_df(X=X, y=y, date_column="ds")
p_clf.fit(prophet_df)
y_pred_p = p_clf.predict(prophet_df)["yhat"]
clf = ProphetRegressor(uncertainty_samples=False, changepoint_prior_scale=2.0)
clf.fit(X, y)
y_pred = clf.predict(X)
np.array_equal(y_pred_p.values, y_pred.values)
def test_fit_predict_ts_with_y_index(ts_data):
X, y = ts_data
X = X.reset_index(drop=True)
assert isinstance(y.index, pd.DatetimeIndex)
p_clf = prophet.Prophet(uncertainty_samples=False, changepoint_prior_scale=2.0)
prophet_df = ProphetRegressor.build_prophet_df(X=X, y=y, date_column="ds")
p_clf.fit(prophet_df)
y_pred_p = p_clf.predict(prophet_df)["yhat"]
clf = ProphetRegressor(uncertainty_samples=False, changepoint_prior_scale=2.0)
clf.fit(X, y)
y_pred = clf.predict(X, y)
np.array_equal(y_pred_p.values, y_pred.values)
def test_fit_predict_ts_no_X(ts_data):
y = pd.Series(
range(1, 32), name="dates", index=pd.date_range("2020-10-01", "2020-10-31")
)
p_clf = prophet.Prophet(uncertainty_samples=False, changepoint_prior_scale=2.0)
prophet_df = ProphetRegressor.build_prophet_df(
X=pd.DataFrame(), y=y, date_column="ds"
)
p_clf.fit(prophet_df)
y_pred_p = p_clf.predict(prophet_df)["yhat"]
clf = ProphetRegressor(uncertainty_samples=False, changepoint_prior_scale=2.0)
clf.fit(X=None, y=y)
y_pred = clf.predict(X=None, y=y)
np.array_equal(y_pred_p.values, y_pred.values)
def test_fit_predict_date_col(ts_data):
X = pd.DataFrame(
{
"features": range(100),
"these_dates": pd.date_range("1/1/21", periods=100),
"more_dates": pd.date_range("7/4/1987", periods=100),
}
)
y = pd.Series(np.random.randint(1, 5, 100), name="y")
clf = ProphetRegressor(
date_index="these_dates", uncertainty_samples=False, changepoint_prior_scale=2.0
)
clf.fit(X, y)
y_pred = clf.predict(X)
p_clf = prophet.Prophet(uncertainty_samples=False, changepoint_prior_scale=2.0)
prophet_df = ProphetRegressor.build_prophet_df(X=X, y=y, date_column="these_dates")
p_clf.fit(prophet_df)
y_pred_p = p_clf.predict(prophet_df)["yhat"]
np.array_equal(y_pred_p.values, y_pred.values)
def test_fit_predict_no_date_col_or_index(ts_data):
X, y = ts_data
X = X.reset_index(drop=True)
y = y.reset_index(drop=True)
assert not isinstance(X.index, pd.DatetimeIndex)
assert not isinstance(y.index, pd.DatetimeIndex)
clf = ProphetRegressor()
with pytest.raises(
ValueError,
match="Prophet estimator requires input data X to have a datetime column",
):
clf.fit(X, y)
| 29.72327
| 88
| 0.692975
|
import numpy as np
import pandas as pd
import pytest
from pytest import importorskip
from evalml.model_family import ModelFamily
from evalml.pipelines.components import ProphetRegressor
from evalml.problem_types import ProblemTypes
prophet = importorskip("prophet", reason="Skipping test because prophet not installed")
def test_model_family():
assert ProphetRegressor.model_family == ModelFamily.PROPHET
def test_cmdstanpy_backend():
m = prophet.Prophet(stan_backend="CMDSTANPY")
assert m.stan_backend.get_type() == "CMDSTANPY"
def test_problem_types():
assert set(ProphetRegressor.supported_problem_types) == {
ProblemTypes.TIME_SERIES_REGRESSION
}
def test_init_with_other_params():
clf = ProphetRegressor(
daily_seasonality=True,
mcmc_samples=5,
interval_width=0.8,
uncertainty_samples=0,
)
assert clf.parameters == {
"changepoint_prior_scale": 0.05,
"daily_seasonality": True,
"date_index": None,
"holidays_prior_scale": 10,
"interval_width": 0.8,
"mcmc_samples": 5,
"seasonality_mode": "additive",
"seasonality_prior_scale": 10,
"uncertainty_samples": 0,
"stan_backend": "CMDSTANPY",
}
def test_feature_importance(ts_data):
X, y = ts_data
clf = ProphetRegressor(uncertainty_samples=False, changepoint_prior_scale=2.0)
clf.fit(X, y)
clf.feature_importance == np.zeros(1)
def test_get_params(ts_data):
clf = ProphetRegressor()
assert clf.get_params() == {
"changepoint_prior_scale": 0.05,
"date_index": None,
"seasonality_prior_scale": 10,
"holidays_prior_scale": 10,
"seasonality_mode": "additive",
"stan_backend": "CMDSTANPY",
}
def test_fit_predict_ts_with_X_index(ts_data):
X, y = ts_data
assert isinstance(X.index, pd.DatetimeIndex)
p_clf = prophet.Prophet(uncertainty_samples=False, changepoint_prior_scale=2.0)
prophet_df = ProphetRegressor.build_prophet_df(X=X, y=y, date_column="ds")
p_clf.fit(prophet_df)
y_pred_p = p_clf.predict(prophet_df)["yhat"]
clf = ProphetRegressor(uncertainty_samples=False, changepoint_prior_scale=2.0)
clf.fit(X, y)
y_pred = clf.predict(X)
np.array_equal(y_pred_p.values, y_pred.values)
def test_fit_predict_ts_with_y_index(ts_data):
X, y = ts_data
X = X.reset_index(drop=True)
assert isinstance(y.index, pd.DatetimeIndex)
p_clf = prophet.Prophet(uncertainty_samples=False, changepoint_prior_scale=2.0)
prophet_df = ProphetRegressor.build_prophet_df(X=X, y=y, date_column="ds")
p_clf.fit(prophet_df)
y_pred_p = p_clf.predict(prophet_df)["yhat"]
clf = ProphetRegressor(uncertainty_samples=False, changepoint_prior_scale=2.0)
clf.fit(X, y)
y_pred = clf.predict(X, y)
np.array_equal(y_pred_p.values, y_pred.values)
def test_fit_predict_ts_no_X(ts_data):
y = pd.Series(
range(1, 32), name="dates", index=pd.date_range("2020-10-01", "2020-10-31")
)
p_clf = prophet.Prophet(uncertainty_samples=False, changepoint_prior_scale=2.0)
prophet_df = ProphetRegressor.build_prophet_df(
X=pd.DataFrame(), y=y, date_column="ds"
)
p_clf.fit(prophet_df)
y_pred_p = p_clf.predict(prophet_df)["yhat"]
clf = ProphetRegressor(uncertainty_samples=False, changepoint_prior_scale=2.0)
clf.fit(X=None, y=y)
y_pred = clf.predict(X=None, y=y)
np.array_equal(y_pred_p.values, y_pred.values)
def test_fit_predict_date_col(ts_data):
X = pd.DataFrame(
{
"features": range(100),
"these_dates": pd.date_range("1/1/21", periods=100),
"more_dates": pd.date_range("7/4/1987", periods=100),
}
)
y = pd.Series(np.random.randint(1, 5, 100), name="y")
clf = ProphetRegressor(
date_index="these_dates", uncertainty_samples=False, changepoint_prior_scale=2.0
)
clf.fit(X, y)
y_pred = clf.predict(X)
p_clf = prophet.Prophet(uncertainty_samples=False, changepoint_prior_scale=2.0)
prophet_df = ProphetRegressor.build_prophet_df(X=X, y=y, date_column="these_dates")
p_clf.fit(prophet_df)
y_pred_p = p_clf.predict(prophet_df)["yhat"]
np.array_equal(y_pred_p.values, y_pred.values)
def test_fit_predict_no_date_col_or_index(ts_data):
X, y = ts_data
X = X.reset_index(drop=True)
y = y.reset_index(drop=True)
assert not isinstance(X.index, pd.DatetimeIndex)
assert not isinstance(y.index, pd.DatetimeIndex)
clf = ProphetRegressor()
with pytest.raises(
ValueError,
match="Prophet estimator requires input data X to have a datetime column",
):
clf.fit(X, y)
| true
| true
|
f70c8b0953fb7f16415b90aca7c15aa492645bd2
| 2,990
|
py
|
Python
|
test/SConsignFile/use-dbhash.py
|
moroten/scons
|
20927b42ed4f0cb87f51287fa3b4b6cf915afcf8
|
[
"MIT"
] | 1
|
2017-01-28T15:39:07.000Z
|
2017-01-28T15:39:07.000Z
|
test/SConsignFile/use-dbhash.py
|
moroten/scons
|
20927b42ed4f0cb87f51287fa3b4b6cf915afcf8
|
[
"MIT"
] | 4
|
2019-04-11T16:27:45.000Z
|
2019-04-11T23:56:30.000Z
|
test/SConsignFile/use-dbhash.py
|
moroten/scons
|
20927b42ed4f0cb87f51287fa3b4b6cf915afcf8
|
[
"MIT"
] | 2
|
2018-01-16T11:29:16.000Z
|
2020-05-13T16:48:26.000Z
|
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Verify SConsignFile() when used with dbhash.
"""
import TestSCons
_python_ = TestSCons._python_
test = TestSCons.TestSCons()
try:
import dbm.bsd
except ImportError:
test.skip_test('No dbhash in this version of Python; skipping test.\n')
test.subdir('subdir')
test.write('build.py', r"""
import sys
contents = open(sys.argv[2], 'rb').read()
file = open(sys.argv[1], 'wb')
file.write(contents)
file.close()
sys.exit(0)
""")
#
test.write('SConstruct', """
import sys
import dbhash
SConsignFile('.sconsign', dbhash)
B = Builder(action = r'%(_python_)s build.py $TARGETS $SOURCES')
env = Environment(BUILDERS = { 'B' : B })
env.B(target = 'f1.out', source = 'f1.in')
env.B(target = 'f2.out', source = 'f2.in')
env.B(target = 'subdir/f3.out', source = 'subdir/f3.in')
env.B(target = 'subdir/f4.out', source = 'subdir/f4.in')
""" % locals())
test.write('f1.in', "f1.in\n")
test.write('f2.in', "f2.in\n")
test.write(['subdir', 'f3.in'], "subdir/f3.in\n")
test.write(['subdir', 'f4.in'], "subdir/f4.in\n")
test.run()
test.must_exist(test.workpath('.sconsign'))
test.must_not_exist(test.workpath('.sconsign.dblite'))
test.must_not_exist(test.workpath('subdir', '.sconsign'))
test.must_not_exist(test.workpath('subdir', '.sconsign.dblite'))
test.must_match('f1.out', "f1.in\n")
test.must_match('f2.out', "f2.in\n")
test.must_match(['subdir', 'f3.out'], "subdir/f3.in\n")
test.must_match(['subdir', 'f4.out'], "subdir/f4.in\n")
test.up_to_date(arguments = '.')
test.must_exist(test.workpath('.sconsign'))
test.must_not_exist(test.workpath('.sconsign.dblite'))
test.must_not_exist(test.workpath('subdir', '.sconsign'))
test.must_not_exist(test.workpath('subdir', '.sconsign.dblite'))
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| 30.824742
| 75
| 0.721405
|
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import TestSCons
_python_ = TestSCons._python_
test = TestSCons.TestSCons()
try:
import dbm.bsd
except ImportError:
test.skip_test('No dbhash in this version of Python; skipping test.\n')
test.subdir('subdir')
test.write('build.py', r"""
import sys
contents = open(sys.argv[2], 'rb').read()
file = open(sys.argv[1], 'wb')
file.write(contents)
file.close()
sys.exit(0)
""")
test.write('SConstruct', """
import sys
import dbhash
SConsignFile('.sconsign', dbhash)
B = Builder(action = r'%(_python_)s build.py $TARGETS $SOURCES')
env = Environment(BUILDERS = { 'B' : B })
env.B(target = 'f1.out', source = 'f1.in')
env.B(target = 'f2.out', source = 'f2.in')
env.B(target = 'subdir/f3.out', source = 'subdir/f3.in')
env.B(target = 'subdir/f4.out', source = 'subdir/f4.in')
""" % locals())
test.write('f1.in', "f1.in\n")
test.write('f2.in', "f2.in\n")
test.write(['subdir', 'f3.in'], "subdir/f3.in\n")
test.write(['subdir', 'f4.in'], "subdir/f4.in\n")
test.run()
test.must_exist(test.workpath('.sconsign'))
test.must_not_exist(test.workpath('.sconsign.dblite'))
test.must_not_exist(test.workpath('subdir', '.sconsign'))
test.must_not_exist(test.workpath('subdir', '.sconsign.dblite'))
test.must_match('f1.out', "f1.in\n")
test.must_match('f2.out', "f2.in\n")
test.must_match(['subdir', 'f3.out'], "subdir/f3.in\n")
test.must_match(['subdir', 'f4.out'], "subdir/f4.in\n")
test.up_to_date(arguments = '.')
test.must_exist(test.workpath('.sconsign'))
test.must_not_exist(test.workpath('.sconsign.dblite'))
test.must_not_exist(test.workpath('subdir', '.sconsign'))
test.must_not_exist(test.workpath('subdir', '.sconsign.dblite'))
test.pass_test()
| true
| true
|
f70c8b3a87ae45f179806ab4700928fb6bc1cd69
| 1,782
|
py
|
Python
|
pyabc/distance/__init__.py
|
Pat-Laub/pyABC
|
f23f0ff8d430a8ce0a0c8253b45e19add9121992
|
[
"BSD-3-Clause"
] | null | null | null |
pyabc/distance/__init__.py
|
Pat-Laub/pyABC
|
f23f0ff8d430a8ce0a0c8253b45e19add9121992
|
[
"BSD-3-Clause"
] | null | null | null |
pyabc/distance/__init__.py
|
Pat-Laub/pyABC
|
f23f0ff8d430a8ce0a0c8253b45e19add9121992
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Distance functions
==================
Distance functions measure closeness of observed and sampled data. This
module implements various commonly used distance functions for ABC, featuring
a few advanced concepts.
For custom distance functions, either pass a plain function to ABCSMC or
subclass the pyabc.Distance class.
"""
from .base import (
Distance,
NoDistance,
IdentityFakeDistance,
AcceptAllDistance,
SimpleFunctionDistance,
to_distance)
from .distance import (
PNormDistance,
AdaptivePNormDistance,
ZScoreDistance,
PCADistance,
MinMaxDistance,
PercentileDistance,
RangeEstimatorDistance,
DistanceWithMeasureList)
from .scales import (
median_absolute_deviation,
mean_absolute_deviation,
standard_deviation,
bias,
root_mean_square_deviation,
median_absolute_deviation_to_observation,
mean_absolute_deviation_to_observation,
combined_median_absolute_deviation,
combined_mean_absolute_deviation,
standard_deviation_to_observation)
__all__ = [
# base
"Distance",
"NoDistance",
"IdentityFakeDistance",
"AcceptAllDistance",
"SimpleFunctionDistance",
"to_distance",
# distance
"PNormDistance",
"AdaptivePNormDistance",
"ZScoreDistance",
"PCADistance",
"MinMaxDistance",
"PercentileDistance",
"RangeEstimatorDistance",
"DistanceWithMeasureList",
# scales
"median_absolute_deviation",
"mean_absolute_deviation",
"standard_deviation",
"bias",
"root_mean_square_deviation",
"median_absolute_deviation_to_observation",
"mean_absolute_deviation_to_observation",
"combined_median_absolute_deviation",
"combined_mean_absolute_deviation",
"standard_deviation_to_observation"
]
| 25.098592
| 77
| 0.741302
|
from .base import (
Distance,
NoDistance,
IdentityFakeDistance,
AcceptAllDistance,
SimpleFunctionDistance,
to_distance)
from .distance import (
PNormDistance,
AdaptivePNormDistance,
ZScoreDistance,
PCADistance,
MinMaxDistance,
PercentileDistance,
RangeEstimatorDistance,
DistanceWithMeasureList)
from .scales import (
median_absolute_deviation,
mean_absolute_deviation,
standard_deviation,
bias,
root_mean_square_deviation,
median_absolute_deviation_to_observation,
mean_absolute_deviation_to_observation,
combined_median_absolute_deviation,
combined_mean_absolute_deviation,
standard_deviation_to_observation)
__all__ = [
"Distance",
"NoDistance",
"IdentityFakeDistance",
"AcceptAllDistance",
"SimpleFunctionDistance",
"to_distance",
"PNormDistance",
"AdaptivePNormDistance",
"ZScoreDistance",
"PCADistance",
"MinMaxDistance",
"PercentileDistance",
"RangeEstimatorDistance",
"DistanceWithMeasureList",
"median_absolute_deviation",
"mean_absolute_deviation",
"standard_deviation",
"bias",
"root_mean_square_deviation",
"median_absolute_deviation_to_observation",
"mean_absolute_deviation_to_observation",
"combined_median_absolute_deviation",
"combined_mean_absolute_deviation",
"standard_deviation_to_observation"
]
| true
| true
|
f70c8be35cb4236006856c868141ae41ff613cea
| 4,097
|
py
|
Python
|
src/chembl_beaker/beaker/core_apps/D2Coords/views.py
|
chembl/chembl_beaker
|
0ea3a8efc9220fce5334cb37ac2239e7189b44ef
|
[
"Apache-2.0"
] | 28
|
2015-08-12T07:22:48.000Z
|
2022-02-26T03:50:27.000Z
|
src/chembl_beaker/beaker/core_apps/D2Coords/views.py
|
chembl/chembl_beaker
|
0ea3a8efc9220fce5334cb37ac2239e7189b44ef
|
[
"Apache-2.0"
] | 18
|
2015-03-03T12:03:12.000Z
|
2021-06-16T11:03:05.000Z
|
src/chembl_beaker/beaker/core_apps/D2Coords/views.py
|
chembl/chembl_beaker
|
0ea3a8efc9220fce5334cb37ac2239e7189b44ef
|
[
"Apache-2.0"
] | 13
|
2015-02-16T12:35:01.000Z
|
2021-08-02T09:40:30.000Z
|
__author__ = 'efelix'
# ----------------------------------------------------------------------------------------------------------------------
from rdkit.Chem import AllChem
from beaker import app
from bottle import request
from beaker.core_apps.D2Coords.impl import _ctab22D, _smiles22D, _is3D
from beaker.utils.io import _parseFlag
# ----------------------------------------------------------------------------------------------------------------------
def ctab22DView(data, params):
kwargs = dict()
kwargs['loadMol'] = _parseFlag(params.get('loadMol', True))
kwargs['useRDKitChemistry'] = _parseFlag(params.get('useRDKitChemistry', False))
return _ctab22D(data, **kwargs)
# ----------------------------------------------------------------------------------------------------------------------
def is3DView(data, params):
kwargs = dict()
kwargs['loadMol'] = _parseFlag(params.get('loadMol', True))
kwargs['useRDKitChemistry'] = _parseFlag(params.get('useRDKitChemistry', False))
return _is3D(data, **kwargs)
# ----------------------------------------------------------------------------------------------------------------------
@app.route('/ctab22D', method=['OPTIONS', 'POST'], name="ctab22D")
def ctab22D():
"""
Generate 2D coordinates for a molecule using Schrodinger's coordgen.
CTAB is either single molfile or SDF file.
cURL examples:
curl -X POST --data-binary @no_coords.mol ${BEAKER_ROOT_URL}ctab22D
curl -X POST -F "file=@no_coords.mol" ${BEAKER_ROOT_URL}ctab22D
"""
data = list(request.files.values())[0].file.read() if len(request.files) else request.body.read()
return ctab22DView(data, request.params)
# ----------------------------------------------------------------------------------------------------------------------
def smiles22DView(data, params):
kwargs = dict()
kwargs['computeCoords'] = False
kwargs['delimiter'] = params.get('delimiter', ' ')
kwargs['smilesColumn'] = int(params.get('smilesColumn', 0))
kwargs['nameColumn'] = int(params.get('nameColumn', 1))
kwargs['sanitize'] = _parseFlag(params.get('sanitize', True))
if params.get('titleLine') is None and not data.startswith(b'SMILES Name'):
kwargs['titleLine'] = False
else:
kwargs['titleLine'] = _parseFlag(params.get('titleLine', True))
return _smiles22D(data, **kwargs)
# ----------------------------------------------------------------------------------------------------------------------
@app.route('/smiles22D', method=['OPTIONS', 'POST'], name="smiles22D")
def smiles22D():
"""
Generate 2D coordinates from SMILES using Schrodinger's coordgen.
CTAB is either single molfile or SDF file.
cURL examples:
curl -X POST --data-binary @aspirin_with_header.smi ${BEAKER_ROOT_URL}smiles22D
curl -X POST -F "file=@aspirin_with_header.smi" ${BEAKER_ROOT_URL}smiles22D
curl -X POST --data-binary @aspirin_no_header.smi ${BEAKER_ROOT_URL}smiles22D
curl -X POST -F "file=@aspirin_no_header.smi" ${BEAKER_ROOT_URL}smiles22D
"""
data = list(request.files.values())[0].file.read() if len(request.files) else request.body.read()
return smiles22DView(data, request.params)
# ----------------------------------------------------------------------------------------------------------------------
@app.route('/is3D', method=['OPTIONS', 'POST'], name="is3D")
def is3D():
"""
Check if molecule has any 3D coordinate.
CTAB is either single molfile or SDF file.
cURL examples:
curl -X POST --data-binary @aspirin_with_header.smi ${BEAKER_ROOT_URL}is3D
curl -X POST -F "file=@aspirin_with_header.smi" ${BEAKER_ROOT_URL}is3D
curl -X POST --data-binary @aspirin_no_header.smi ${BEAKER_ROOT_URL}is3D
curl -X POST -F "file=@aspirin_no_header.smi" ${BEAKER_ROOT_URL}is3D
"""
data = list(request.files.values())[0].file.read() if len(request.files) else request.body.read()
return is3DView(data, request.params)
# ----------------------------------------------------------------------------------------------------------------------
| 39.019048
| 120
| 0.537711
|
__author__ = 'efelix'
from rdkit.Chem import AllChem
from beaker import app
from bottle import request
from beaker.core_apps.D2Coords.impl import _ctab22D, _smiles22D, _is3D
from beaker.utils.io import _parseFlag
def ctab22DView(data, params):
kwargs = dict()
kwargs['loadMol'] = _parseFlag(params.get('loadMol', True))
kwargs['useRDKitChemistry'] = _parseFlag(params.get('useRDKitChemistry', False))
return _ctab22D(data, **kwargs)
def is3DView(data, params):
kwargs = dict()
kwargs['loadMol'] = _parseFlag(params.get('loadMol', True))
kwargs['useRDKitChemistry'] = _parseFlag(params.get('useRDKitChemistry', False))
return _is3D(data, **kwargs)
@app.route('/ctab22D', method=['OPTIONS', 'POST'], name="ctab22D")
def ctab22D():
data = list(request.files.values())[0].file.read() if len(request.files) else request.body.read()
return ctab22DView(data, request.params)
def smiles22DView(data, params):
kwargs = dict()
kwargs['computeCoords'] = False
kwargs['delimiter'] = params.get('delimiter', ' ')
kwargs['smilesColumn'] = int(params.get('smilesColumn', 0))
kwargs['nameColumn'] = int(params.get('nameColumn', 1))
kwargs['sanitize'] = _parseFlag(params.get('sanitize', True))
if params.get('titleLine') is None and not data.startswith(b'SMILES Name'):
kwargs['titleLine'] = False
else:
kwargs['titleLine'] = _parseFlag(params.get('titleLine', True))
return _smiles22D(data, **kwargs)
@app.route('/smiles22D', method=['OPTIONS', 'POST'], name="smiles22D")
def smiles22D():
data = list(request.files.values())[0].file.read() if len(request.files) else request.body.read()
return smiles22DView(data, request.params)
@app.route('/is3D', method=['OPTIONS', 'POST'], name="is3D")
def is3D():
data = list(request.files.values())[0].file.read() if len(request.files) else request.body.read()
return is3DView(data, request.params)
| true
| true
|
f70c8c4b1862864e3418e1969f0a535ab12ce6ad
| 2,340
|
py
|
Python
|
development/performance/langevin_error_example.py
|
choderalab/saltswap
|
d30804beb158960a62f94182c694df6dd9130fb8
|
[
"MIT"
] | 3
|
2017-06-30T11:40:20.000Z
|
2021-05-14T02:20:38.000Z
|
development/performance/langevin_error_example.py
|
choderalab/saltswap
|
d30804beb158960a62f94182c694df6dd9130fb8
|
[
"MIT"
] | 19
|
2017-04-27T14:56:51.000Z
|
2019-12-10T14:26:38.000Z
|
development/performance/langevin_error_example.py
|
choderalab/saltswap
|
d30804beb158960a62f94182c694df6dd9130fb8
|
[
"MIT"
] | 2
|
2017-02-01T21:46:18.000Z
|
2018-01-15T18:56:56.000Z
|
import numpy as np
from simtk import openmm, unit
from simtk.openmm import app
from openmmtools.testsystems import WaterBox
from openmmtools.integrators import ExternalPerturbationLangevinIntegrator
print('OpenMM version: ', openmm.version.full_version)
# Using one CPU thread
import os
os.environ['OPENMM_CPU_THREADS'] = '1'
# Long range method
nonbonded_method = 'CutoffPeriodic'
# Creating a waterbox
wbox = WaterBox(box_edge=21.0*unit.angstrom , nonbondedMethod=getattr(app, nonbonded_method))
wbox.system.addForce(openmm.MonteCarloBarostat(1*unit.atmospheres, 300*unit.kelvin))
# Extracting the nonbonded force
non_bonded_force = wbox.system.getForce(2)
# The integrator to perform the equilibrium dynamics
integrator = ExternalPerturbationLangevinIntegrator(temperature=300*unit.kelvin, collision_rate=50.0 / unit.picosecond, timestep=1.0 * unit.femtosecond)
# Creating the context
platform = openmm.Platform.getPlatformByName('CPU')
context = openmm.Context(wbox.system, integrator, platform)
context.setPositions(wbox.positions)
# Running some equilibrium dynamics
integrator.step(100)
# The number of NCMC type iterations and NCMC steps per iteration.
niterations = 20
ncmc_steps = 10
internal_work = np.zeros(niterations)
external_work = np.zeros(niterations)
# Whether to call updateParametersInContext. If True, then assertion below will fail.
update_parameters = True
# A model of NCMC without perturbation but using updateParametersInContext
for i in range(niterations):
#integrator.reset_protocol_work()
#integrator.setGlobalVariableByName('first_step',0)
integrator.setGlobalVariableByName('protocol_work',0)
for s in range(ncmc_steps):
integrator.step(1)
initial_external_energy = context.getState(getEnergy=True).getPotentialEnergy() / unit.kilojoule_per_mole
###---- Not perturbing the system but updating parameters anyway----###
if update_parameters:
non_bonded_force.updateParametersInContext(context)
final_external_energy = context.getState(getEnergy=True).getPotentialEnergy() / unit.kilojoule_per_mole
integrator.step(1)
internal_work[i] = integrator.getGlobalVariableByName('protocol_work')
external_work[i] = final_external_energy - initial_external_energy
assert np.all(np.abs(internal_work - external_work) < 1E-5)
| 39.661017
| 152
| 0.789316
|
import numpy as np
from simtk import openmm, unit
from simtk.openmm import app
from openmmtools.testsystems import WaterBox
from openmmtools.integrators import ExternalPerturbationLangevinIntegrator
print('OpenMM version: ', openmm.version.full_version)
import os
os.environ['OPENMM_CPU_THREADS'] = '1'
nonbonded_method = 'CutoffPeriodic'
wbox = WaterBox(box_edge=21.0*unit.angstrom , nonbondedMethod=getattr(app, nonbonded_method))
wbox.system.addForce(openmm.MonteCarloBarostat(1*unit.atmospheres, 300*unit.kelvin))
non_bonded_force = wbox.system.getForce(2)
integrator = ExternalPerturbationLangevinIntegrator(temperature=300*unit.kelvin, collision_rate=50.0 / unit.picosecond, timestep=1.0 * unit.femtosecond)
platform = openmm.Platform.getPlatformByName('CPU')
context = openmm.Context(wbox.system, integrator, platform)
context.setPositions(wbox.positions)
integrator.step(100)
niterations = 20
ncmc_steps = 10
internal_work = np.zeros(niterations)
external_work = np.zeros(niterations)
update_parameters = True
for i in range(niterations):
integrator.setGlobalVariableByName('protocol_work',0)
for s in range(ncmc_steps):
integrator.step(1)
initial_external_energy = context.getState(getEnergy=True).getPotentialEnergy() / unit.kilojoule_per_mole
etEnergy=True).getPotentialEnergy() / unit.kilojoule_per_mole
integrator.step(1)
internal_work[i] = integrator.getGlobalVariableByName('protocol_work')
external_work[i] = final_external_energy - initial_external_energy
assert np.all(np.abs(internal_work - external_work) < 1E-5)
| true
| true
|
f70c8c618feb024a795bc729d00c3537e6ed6965
| 9,087
|
py
|
Python
|
ivy/dot_layout.py
|
AnonyQUIC/EPIC---Ivy
|
316f17ff7d2fae400359ccb58177fbc8bf5c4551
|
[
"MIT"
] | 113
|
2019-05-09T15:37:47.000Z
|
2022-03-14T04:02:01.000Z
|
ivy/dot_layout.py
|
AnonyQUIC/EPIC---Ivy
|
316f17ff7d2fae400359ccb58177fbc8bf5c4551
|
[
"MIT"
] | 58
|
2019-09-03T15:42:29.000Z
|
2021-01-15T02:20:29.000Z
|
ivy/dot_layout.py
|
AnonyQUIC/EPIC---Ivy
|
316f17ff7d2fae400359ccb58177fbc8bf5c4551
|
[
"MIT"
] | 40
|
2016-01-02T19:13:18.000Z
|
2018-10-27T11:38:00.000Z
|
#
# Copyright (c) Microsoft Corporation. All Rights Reserved.
#
"""
Use DOT to layout a graph for cytoscape.js
TODO: add support for middle points in edges
"""
from __future__ import division
from collections import deque, defaultdict
import platform
if True or platform.system() == 'Windows':
from ivy_graphviz import AGraph
else:
from pygraphviz import AGraph
from ivy_utils import topological_sort
import ivy_utils as iu
# import pygraphviz
def cubic_bezier_point(p0, p1, p2, p3, t):
"""
https://en.wikipedia.org/wiki/B%C3%A9zier_curve#Cubic_B.C3.A9zier_curves
"""
a = (1.0 - t)**3
b = 3.0 * t * (1.0 - t)**2
c = 3.0 * t**2 * (1.0 - t)
d = t**3
return {
"x": a * p0["x"] + b * p1["x"] + c * p2["x"] + d * p3["x"],
"y": a * p0["y"] + b * p1["y"] + c * p2["y"] + d * p3["y"],
}
def square_distance_to_segment(p, p1, p2):
v0 = (p["x"] - p1["x"], p["y"] - p1["y"])
v1 = (p2["x"] - p1["x"], p2["y"] - p1["y"])
v0sq = v0[0] * v0[0] + v0[1] * v0[1]
v1sq = v1[0] * v1[0] + v1[1] * v1[1]
prod = v0[0] * v1[0] + v0[1] * v1[1]
v2sq = prod * prod / v1sq
if prod < 0:
return v0sq
elif v2sq < v1sq:
return v0sq - v2sq
else:
v3 = (v0[0] - v1[0], v0[1] - v1[1])
return v3[0] * v3[0] + v3[1] * v3[1]
def approximate_cubic_bezier(p0, p1, p2, p3, threshold=1.0, limit=1024):
"""
Return an series of points whose segments approximate the given
bezier curve
"""
threshold_squared = threshold ** 2
points = { # dict mapping t values to points
0.0: p0,
1.0: p3,
}
to_check = deque([(0.0, 1.0)])
while len(to_check) > 0 and len(points) < limit:
l, r = to_check.popleft()
pl = points[l]
pr = points[r]
m = (l + r) / 2.0
pm = cubic_bezier_point(p0, p1, p2, p3, m)
if square_distance_to_segment(pm, pl, pr) > threshold_squared:
points[m] = pm
to_check.append((l, m))
to_check.append((m, r))
return [points[t] for t in sorted(points.keys())]
def get_approximation_points(bspline):
"""
Retrurn a series of points whose segments approximate the given
bspline
"""
result = []
for i in range(0, len(bspline) - 3, 3):
result.extend(approximate_cubic_bezier(
bspline[i], bspline[i+1], bspline[i+2], bspline[i+3],
threshold=4.0,
limit=100,
)[:-1])
result.append(bspline[-1])
return result
def _to_position(st):
global y_origin
sp = st.split(',')
assert len(sp) == 2, st
return {
"x": float(sp[0]),
"y": y_origin-float(sp[1]),
}
def _to_edge_position(st):
"""
http://www.graphviz.org/doc/info/attrs.html#k:splineType
"""
sp = st.split()
result = {}
if sp[0].startswith('e,'):
result["arrowend"] = _to_position(sp[0][2:])
sp = sp[1:]
if sp[0].startswith('s,'):
result["arrowstart"] = _to_position(sp[0][2:])
sp = sp[1:]
result["bspline"] = [_to_position(x) for x in sp]
result["approxpoints"] = get_approximation_points(result["bspline"])
# print "approxpoints: ", len(result["approxpoints"])
return result
def _to_coord_list(st):
""" create a sequence of positions from a dot-generated string """
nums = st.split(',')
pairs = [','.join((nums[2*i],nums[2*i+1])) for i in range(len(nums)//2)]
return map(_to_position,pairs)
def dot_layout(cy_elements,edge_labels=False,subgraph_boxes=False,node_gt=None):
"""
Get a CyElements object and augment it (in-place) with positions,
widths, heights, and spline data from a dot based layout.
edge_labels is true if labels should appear on edges
subgraph_boxes is true if boxes should be drawn around subgraphs
Returns the object.
"""
elements = cy_elements.elements
# g = AGraph(directed=True, strict=False)
g = AGraph(directed=True, strict=False, forcelabels=True)
# make transitive relations appear top to bottom
elements = list(elements)
nodes_by_id = dict(
(e["data"]["id"], e)
for e in elements if e["group"] == "nodes"
)
order = [
(nodes_by_id[e["data"]["source"]], nodes_by_id[e["data"]["target"]])
for e in elements if
e["group"] == "edges" and
"transitive" in e["data"] and
e["data"]["transitive"]
]
elements = topological_sort(elements, order, lambda e: e["data"]["id"])
# get the node id's and stable sort them by cluster
# the idea here is to convert the graph into a dag by sorting
# the nodes, then reversing the back edges. In particular, we try to make
# all the edges between two clusters go in the same direction so clustering
# doesn't result in horizontal edges, which dot renders badly.
sorted_nodes = [e["data"]["id"] for e in elements if e["group"] == "nodes"]
sorted_nodes = sorted(enumerate(sorted_nodes),key = lambda x: (nodes_by_id[x[1]]["data"]["cluster"],x[0]))
sorted_nodes = [y for idx,y in sorted_nodes]
node_key = dict((id,idx) for idx,id in enumerate(sorted_nodes))
if node_gt is None:
node_gt = lambda X,y:False
else:
node_gt = lambda x,y: node_key[x] > node_key[y]
# add nodes to the graph
for e in elements:
if e["group"] == "nodes" and e["classes"] != 'non_existing':
g.add_node(e["data"]["id"], label=e["data"]["label"].replace('\n', '\\n'))
# TODO: remove this, it's specific to leader_demo
weight = {
'reach': 10,
'le': 10,
'id': 1,
}
constraint = {
'pending': False,
}
# add edges to the graph
for e in elements:
if e["group"] == "edges":
# kwargs = {'weight': weight.get(e["data"]["obj"], 0)},
kwargs = {'label':e["data"]["label"]} if edge_labels else {}
if node_gt(e["data"]["source"],e["data"]["target"]):
g.add_edge(
e["data"]["target"],
e["data"]["source"],
e["data"]["id"],
dir = 'back',
**kwargs
#constraint=constraint.get(e["data"]["obj"], True),
)
else:
g.add_edge(
e["data"]["source"],
e["data"]["target"],
e["data"]["id"],
**kwargs
#constraint=constraint.get(e["data"]["obj"], True),
)
# add clusters
clusters = defaultdict(list)
for e in elements:
if e["group"] == "nodes" and e["data"]["cluster"] is not None and e["classes"] != 'non_existing':
clusters[e["data"]["cluster"]].append(e["data"]["id"])
for i, k in enumerate(sorted(clusters.keys())):
g.add_subgraph(
name='cluster_{}'.format(i),
nbunch=clusters[k],
rank='min',
)
# now get positions, heights, widths, and bsplines
g.layout(prog='dot')
# get the y origin. we want the top left of the graph to be a
# fixed coordinate (hopefully (0,0)) so the graph doesn't jump when
# its height changes. Unfortunately, pygraphviz has a bug a gives
# the wrong bbox, so we compute the max y coord.
# bbox = pygraphviz.graphviz.agget(g.handle,'bb')
global y_origin
y_origin = 0.0
for n in g.nodes():
top = float(n.attr['pos'].split(',')[1]) + float(n.attr['height'])/2
if top > y_origin:
y_origin = top
if subgraph_boxes:
for sg in g.subgraphs():
top = float(sg.graph_attr['bb'].split(',')[3])
if top > y_origin:
y_origin = top
for e in elements:
if e["group"] == "nodes" and e["classes"] != 'non_existing':
attr = g.get_node(e["data"]["id"]).attr
e["position"] = _to_position(attr['pos'])
e["data"]["width"] = 72 * float(attr['width'])
e["data"]["height"] = 72 * float(attr['height'])
elif e["group"] == "edges":
if node_gt(e["data"]["source"],e["data"]["target"]):
attr = g.get_edge(e["data"]["target"], e["data"]["source"], e["data"]["id"]).attr
pos = attr['pos']
pe = pos.split()
ppe = pe[1:]
ppe.reverse()
pos = ' '.join([pe[0].replace('s','e')] + ppe)
else:
attr = g.get_edge(e["data"]["source"], e["data"]["target"], e["data"]["id"]).attr
pos = attr['pos']
e["data"].update(_to_edge_position(pos))
if edge_labels and e["data"]["label"] != '':
e["data"]["lp"] = _to_position(attr['lp'])
# g.draw('g.png')
if subgraph_boxes:
for sg in g.subgraphs():
box = cy_elements.add_shape(sg.name,classes='subgraphs')
coords = _to_coord_list(sg.graph_attr['bb'])
box["data"]["coords"] = coords
return cy_elements
| 31.884211
| 110
| 0.541873
|
from __future__ import division
from collections import deque, defaultdict
import platform
if True or platform.system() == 'Windows':
from ivy_graphviz import AGraph
else:
from pygraphviz import AGraph
from ivy_utils import topological_sort
import ivy_utils as iu
def cubic_bezier_point(p0, p1, p2, p3, t):
a = (1.0 - t)**3
b = 3.0 * t * (1.0 - t)**2
c = 3.0 * t**2 * (1.0 - t)
d = t**3
return {
"x": a * p0["x"] + b * p1["x"] + c * p2["x"] + d * p3["x"],
"y": a * p0["y"] + b * p1["y"] + c * p2["y"] + d * p3["y"],
}
def square_distance_to_segment(p, p1, p2):
v0 = (p["x"] - p1["x"], p["y"] - p1["y"])
v1 = (p2["x"] - p1["x"], p2["y"] - p1["y"])
v0sq = v0[0] * v0[0] + v0[1] * v0[1]
v1sq = v1[0] * v1[0] + v1[1] * v1[1]
prod = v0[0] * v1[0] + v0[1] * v1[1]
v2sq = prod * prod / v1sq
if prod < 0:
return v0sq
elif v2sq < v1sq:
return v0sq - v2sq
else:
v3 = (v0[0] - v1[0], v0[1] - v1[1])
return v3[0] * v3[0] + v3[1] * v3[1]
def approximate_cubic_bezier(p0, p1, p2, p3, threshold=1.0, limit=1024):
threshold_squared = threshold ** 2
points = {
0.0: p0,
1.0: p3,
}
to_check = deque([(0.0, 1.0)])
while len(to_check) > 0 and len(points) < limit:
l, r = to_check.popleft()
pl = points[l]
pr = points[r]
m = (l + r) / 2.0
pm = cubic_bezier_point(p0, p1, p2, p3, m)
if square_distance_to_segment(pm, pl, pr) > threshold_squared:
points[m] = pm
to_check.append((l, m))
to_check.append((m, r))
return [points[t] for t in sorted(points.keys())]
def get_approximation_points(bspline):
result = []
for i in range(0, len(bspline) - 3, 3):
result.extend(approximate_cubic_bezier(
bspline[i], bspline[i+1], bspline[i+2], bspline[i+3],
threshold=4.0,
limit=100,
)[:-1])
result.append(bspline[-1])
return result
def _to_position(st):
global y_origin
sp = st.split(',')
assert len(sp) == 2, st
return {
"x": float(sp[0]),
"y": y_origin-float(sp[1]),
}
def _to_edge_position(st):
sp = st.split()
result = {}
if sp[0].startswith('e,'):
result["arrowend"] = _to_position(sp[0][2:])
sp = sp[1:]
if sp[0].startswith('s,'):
result["arrowstart"] = _to_position(sp[0][2:])
sp = sp[1:]
result["bspline"] = [_to_position(x) for x in sp]
result["approxpoints"] = get_approximation_points(result["bspline"])
return result
def _to_coord_list(st):
nums = st.split(',')
pairs = [','.join((nums[2*i],nums[2*i+1])) for i in range(len(nums)//2)]
return map(_to_position,pairs)
def dot_layout(cy_elements,edge_labels=False,subgraph_boxes=False,node_gt=None):
elements = cy_elements.elements
g = AGraph(directed=True, strict=False, forcelabels=True)
elements = list(elements)
nodes_by_id = dict(
(e["data"]["id"], e)
for e in elements if e["group"] == "nodes"
)
order = [
(nodes_by_id[e["data"]["source"]], nodes_by_id[e["data"]["target"]])
for e in elements if
e["group"] == "edges" and
"transitive" in e["data"] and
e["data"]["transitive"]
]
elements = topological_sort(elements, order, lambda e: e["data"]["id"])
# the idea here is to convert the graph into a dag by sorting
# the nodes, then reversing the back edges. In particular, we try to make
# all the edges between two clusters go in the same direction so clustering
# doesn't result in horizontal edges, which dot renders badly.
sorted_nodes = [e["data"]["id"] for e in elements if e["group"] == "nodes"]
sorted_nodes = sorted(enumerate(sorted_nodes),key = lambda x: (nodes_by_id[x[1]]["data"]["cluster"],x[0]))
sorted_nodes = [y for idx,y in sorted_nodes]
node_key = dict((id,idx) for idx,id in enumerate(sorted_nodes))
if node_gt is None:
node_gt = lambda X,y:False
else:
node_gt = lambda x,y: node_key[x] > node_key[y]
for e in elements:
if e["group"] == "nodes" and e["classes"] != 'non_existing':
g.add_node(e["data"]["id"], label=e["data"]["label"].replace('\n', '\\n'))
weight = {
'reach': 10,
'le': 10,
'id': 1,
}
constraint = {
'pending': False,
}
# add edges to the graph
for e in elements:
if e["group"] == "edges":
# kwargs = {'weight': weight.get(e["data"]["obj"], 0)},
kwargs = {'label':e["data"]["label"]} if edge_labels else {}
if node_gt(e["data"]["source"],e["data"]["target"]):
g.add_edge(
e["data"]["target"],
e["data"]["source"],
e["data"]["id"],
dir = 'back',
**kwargs
#constraint=constraint.get(e["data"]["obj"], True),
)
else:
g.add_edge(
e["data"]["source"],
e["data"]["target"],
e["data"]["id"],
**kwargs
#constraint=constraint.get(e["data"]["obj"], True),
)
# add clusters
clusters = defaultdict(list)
for e in elements:
if e["group"] == "nodes" and e["data"]["cluster"] is not None and e["classes"] != 'non_existing':
clusters[e["data"]["cluster"]].append(e["data"]["id"])
for i, k in enumerate(sorted(clusters.keys())):
g.add_subgraph(
name='cluster_{}'.format(i),
nbunch=clusters[k],
rank='min',
)
# now get positions, heights, widths, and bsplines
g.layout(prog='dot')
# get the y origin. we want the top left of the graph to be a
# fixed coordinate (hopefully (0,0)) so the graph doesn't jump when
global y_origin
y_origin = 0.0
for n in g.nodes():
top = float(n.attr['pos'].split(',')[1]) + float(n.attr['height'])/2
if top > y_origin:
y_origin = top
if subgraph_boxes:
for sg in g.subgraphs():
top = float(sg.graph_attr['bb'].split(',')[3])
if top > y_origin:
y_origin = top
for e in elements:
if e["group"] == "nodes" and e["classes"] != 'non_existing':
attr = g.get_node(e["data"]["id"]).attr
e["position"] = _to_position(attr['pos'])
e["data"]["width"] = 72 * float(attr['width'])
e["data"]["height"] = 72 * float(attr['height'])
elif e["group"] == "edges":
if node_gt(e["data"]["source"],e["data"]["target"]):
attr = g.get_edge(e["data"]["target"], e["data"]["source"], e["data"]["id"]).attr
pos = attr['pos']
pe = pos.split()
ppe = pe[1:]
ppe.reverse()
pos = ' '.join([pe[0].replace('s','e')] + ppe)
else:
attr = g.get_edge(e["data"]["source"], e["data"]["target"], e["data"]["id"]).attr
pos = attr['pos']
e["data"].update(_to_edge_position(pos))
if edge_labels and e["data"]["label"] != '':
e["data"]["lp"] = _to_position(attr['lp'])
if subgraph_boxes:
for sg in g.subgraphs():
box = cy_elements.add_shape(sg.name,classes='subgraphs')
coords = _to_coord_list(sg.graph_attr['bb'])
box["data"]["coords"] = coords
return cy_elements
| true
| true
|
f70c8c9b8ac7368a5b5eb84ab7f548195df88c4b
| 713
|
py
|
Python
|
app/server/test/testWordService.py
|
hsadler/zentype2
|
08694727d65531b2c7bd0cea97f53c5a270d0f51
|
[
"MIT"
] | null | null | null |
app/server/test/testWordService.py
|
hsadler/zentype2
|
08694727d65531b2c7bd0cea97f53c5a270d0f51
|
[
"MIT"
] | null | null | null |
app/server/test/testWordService.py
|
hsadler/zentype2
|
08694727d65531b2c7bd0cea97f53c5a270d0f51
|
[
"MIT"
] | null | null | null |
import sys
sys.path.append('..')
from data_object.word_data_object import WordDataObject
from service.word import Word
from service.language import Language
from utils.print import ppp
# random word list options
language = Language(Language.ENGLISH)
qwerty_difficulty_rank = {
'min': 0,
'max': 10000
}
frequency_rank = {
'min': 0,
'max': 10000
}
length = {
'min': 0,
'max': 100
}
substring = 'ba'
limit = 10
word_list = Word.get_random_list(
language=language,
qwerty_difficulty_rank=qwerty_difficulty_rank,
frequency_rank=frequency_rank,
length=length,
substring=substring,
limit=limit
)
ppp(word_list)
for wordDO in word_list:
ppp(wordDO.to_dict())
ppp("length: {0}".format(len(word_list)))
| 16.97619
| 55
| 0.746143
|
import sys
sys.path.append('..')
from data_object.word_data_object import WordDataObject
from service.word import Word
from service.language import Language
from utils.print import ppp
language = Language(Language.ENGLISH)
qwerty_difficulty_rank = {
'min': 0,
'max': 10000
}
frequency_rank = {
'min': 0,
'max': 10000
}
length = {
'min': 0,
'max': 100
}
substring = 'ba'
limit = 10
word_list = Word.get_random_list(
language=language,
qwerty_difficulty_rank=qwerty_difficulty_rank,
frequency_rank=frequency_rank,
length=length,
substring=substring,
limit=limit
)
ppp(word_list)
for wordDO in word_list:
ppp(wordDO.to_dict())
ppp("length: {0}".format(len(word_list)))
| true
| true
|
f70c8ca1e6f009c0c1bb2b221e2877a82815c4ef
| 33,141
|
py
|
Python
|
tests/sessions_tests/tests.py
|
jmcdono362/django
|
2014db50f4522243dba3190c640f64cf124d5b68
|
[
"PSF-2.0",
"BSD-3-Clause"
] | 2
|
2021-03-13T21:16:42.000Z
|
2022-01-12T00:29:14.000Z
|
tests/sessions_tests/tests.py
|
jmcdono362/django
|
2014db50f4522243dba3190c640f64cf124d5b68
|
[
"PSF-2.0",
"BSD-3-Clause"
] | 11
|
2020-03-24T15:46:05.000Z
|
2022-03-11T23:20:58.000Z
|
tests/sessions_tests/tests.py
|
jmcdono362/django
|
2014db50f4522243dba3190c640f64cf124d5b68
|
[
"PSF-2.0",
"BSD-3-Clause"
] | 2
|
2018-01-08T08:14:29.000Z
|
2020-11-04T08:46:29.000Z
|
import base64
import os
import shutil
import string
import tempfile
import unittest
from datetime import timedelta
from http import cookies
from django.conf import settings
from django.contrib.sessions.backends.base import UpdateError
from django.contrib.sessions.backends.cache import SessionStore as CacheSession
from django.contrib.sessions.backends.cached_db import (
SessionStore as CacheDBSession,
)
from django.contrib.sessions.backends.db import SessionStore as DatabaseSession
from django.contrib.sessions.backends.file import SessionStore as FileSession
from django.contrib.sessions.backends.signed_cookies import (
SessionStore as CookieSession,
)
from django.contrib.sessions.exceptions import InvalidSessionKey
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.sessions.models import Session
from django.contrib.sessions.serializers import (
JSONSerializer, PickleSerializer,
)
from django.core import management
from django.core.cache import caches
from django.core.cache.backends.base import InvalidCacheBackendError
from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation
from django.http import HttpResponse
from django.test import (
RequestFactory, TestCase, ignore_warnings, override_settings,
)
from django.test.utils import patch_logger
from django.utils import timezone
from .models import SessionStore as CustomDatabaseSession
class SessionTestsMixin:
# This does not inherit from TestCase to avoid any tests being run with this
# class, which wouldn't work, and to allow different TestCase subclasses to
# be used.
backend = None # subclasses must specify
def setUp(self):
self.session = self.backend()
def tearDown(self):
# NB: be careful to delete any sessions created; stale sessions fill up
# the /tmp (with some backends) and eventually overwhelm it after lots
# of runs (think buildbots)
self.session.delete()
def test_new_session(self):
self.assertIs(self.session.modified, False)
self.assertIs(self.session.accessed, False)
def test_get_empty(self):
self.assertIsNone(self.session.get('cat'))
def test_store(self):
self.session['cat'] = "dog"
self.assertIs(self.session.modified, True)
self.assertEqual(self.session.pop('cat'), 'dog')
def test_pop(self):
self.session['some key'] = 'exists'
# Need to reset these to pretend we haven't accessed it:
self.accessed = False
self.modified = False
self.assertEqual(self.session.pop('some key'), 'exists')
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, True)
self.assertIsNone(self.session.get('some key'))
def test_pop_default(self):
self.assertEqual(self.session.pop('some key', 'does not exist'),
'does not exist')
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_pop_default_named_argument(self):
self.assertEqual(self.session.pop('some key', default='does not exist'), 'does not exist')
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_pop_no_default_keyerror_raised(self):
with self.assertRaises(KeyError):
self.session.pop('some key')
def test_setdefault(self):
self.assertEqual(self.session.setdefault('foo', 'bar'), 'bar')
self.assertEqual(self.session.setdefault('foo', 'baz'), 'bar')
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, True)
def test_update(self):
self.session.update({'update key': 1})
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, True)
self.assertEqual(self.session.get('update key', None), 1)
def test_has_key(self):
self.session['some key'] = 1
self.session.modified = False
self.session.accessed = False
self.assertIn('some key', self.session)
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_values(self):
self.assertEqual(list(self.session.values()), [])
self.assertIs(self.session.accessed, True)
self.session['some key'] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.values()), [1])
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_keys(self):
self.session['x'] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.keys()), ['x'])
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_items(self):
self.session['x'] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.items()), [('x', 1)])
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_clear(self):
self.session['x'] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.items()), [('x', 1)])
self.session.clear()
self.assertEqual(list(self.session.items()), [])
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, True)
def test_save(self):
self.session.save()
self.assertIs(self.session.exists(self.session.session_key), True)
def test_delete(self):
self.session.save()
self.session.delete(self.session.session_key)
self.assertIs(self.session.exists(self.session.session_key), False)
def test_flush(self):
self.session['foo'] = 'bar'
self.session.save()
prev_key = self.session.session_key
self.session.flush()
self.assertIs(self.session.exists(prev_key), False)
self.assertNotEqual(self.session.session_key, prev_key)
self.assertIsNone(self.session.session_key)
self.assertIs(self.session.modified, True)
self.assertIs(self.session.accessed, True)
def test_cycle(self):
self.session['a'], self.session['b'] = 'c', 'd'
self.session.save()
prev_key = self.session.session_key
prev_data = list(self.session.items())
self.session.cycle_key()
self.assertIs(self.session.exists(prev_key), False)
self.assertNotEqual(self.session.session_key, prev_key)
self.assertEqual(list(self.session.items()), prev_data)
def test_cycle_with_no_session_cache(self):
self.session['a'], self.session['b'] = 'c', 'd'
self.session.save()
prev_data = self.session.items()
self.session = self.backend(self.session.session_key)
self.assertIs(hasattr(self.session, '_session_cache'), False)
self.session.cycle_key()
self.assertCountEqual(self.session.items(), prev_data)
def test_save_doesnt_clear_data(self):
self.session['a'] = 'b'
self.session.save()
self.assertEqual(self.session['a'], 'b')
def test_invalid_key(self):
# Submitting an invalid session key (either by guessing, or if the db has
# removed the key) results in a new key being generated.
try:
session = self.backend('1')
session.save()
self.assertNotEqual(session.session_key, '1')
self.assertIsNone(session.get('cat'))
session.delete()
finally:
# Some backends leave a stale cache entry for the invalid
# session key; make sure that entry is manually deleted
session.delete('1')
def test_session_key_empty_string_invalid(self):
"""Falsey values (Such as an empty string) are rejected."""
self.session._session_key = ''
self.assertIsNone(self.session.session_key)
def test_session_key_too_short_invalid(self):
"""Strings shorter than 8 characters are rejected."""
self.session._session_key = '1234567'
self.assertIsNone(self.session.session_key)
def test_session_key_valid_string_saved(self):
"""Strings of length 8 and up are accepted and stored."""
self.session._session_key = '12345678'
self.assertEqual(self.session.session_key, '12345678')
def test_session_key_is_read_only(self):
def set_session_key(session):
session.session_key = session._get_new_session_key()
with self.assertRaises(AttributeError):
set_session_key(self.session)
# Custom session expiry
def test_default_expiry(self):
# A normal session has a max age equal to settings
self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE)
# So does a custom session with an idle expiration time of 0 (but it'll
# expire at browser close)
self.session.set_expiry(0)
self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE)
def test_custom_expiry_seconds(self):
modification = timezone.now()
self.session.set_expiry(10)
date = self.session.get_expiry_date(modification=modification)
self.assertEqual(date, modification + timedelta(seconds=10))
age = self.session.get_expiry_age(modification=modification)
self.assertEqual(age, 10)
def test_custom_expiry_timedelta(self):
modification = timezone.now()
# Mock timezone.now, because set_expiry calls it on this code path.
original_now = timezone.now
try:
timezone.now = lambda: modification
self.session.set_expiry(timedelta(seconds=10))
finally:
timezone.now = original_now
date = self.session.get_expiry_date(modification=modification)
self.assertEqual(date, modification + timedelta(seconds=10))
age = self.session.get_expiry_age(modification=modification)
self.assertEqual(age, 10)
def test_custom_expiry_datetime(self):
modification = timezone.now()
self.session.set_expiry(modification + timedelta(seconds=10))
date = self.session.get_expiry_date(modification=modification)
self.assertEqual(date, modification + timedelta(seconds=10))
age = self.session.get_expiry_age(modification=modification)
self.assertEqual(age, 10)
def test_custom_expiry_reset(self):
self.session.set_expiry(None)
self.session.set_expiry(10)
self.session.set_expiry(None)
self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE)
def test_get_expire_at_browser_close(self):
# Tests get_expire_at_browser_close with different settings and different
# set_expiry calls
with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=False):
self.session.set_expiry(10)
self.assertIs(self.session.get_expire_at_browser_close(), False)
self.session.set_expiry(0)
self.assertIs(self.session.get_expire_at_browser_close(), True)
self.session.set_expiry(None)
self.assertIs(self.session.get_expire_at_browser_close(), False)
with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=True):
self.session.set_expiry(10)
self.assertIs(self.session.get_expire_at_browser_close(), False)
self.session.set_expiry(0)
self.assertIs(self.session.get_expire_at_browser_close(), True)
self.session.set_expiry(None)
self.assertIs(self.session.get_expire_at_browser_close(), True)
def test_decode(self):
# Ensure we can decode what we encode
data = {'a test key': 'a test value'}
encoded = self.session.encode(data)
self.assertEqual(self.session.decode(encoded), data)
def test_decode_failure_logged_to_security(self):
bad_encode = base64.b64encode(b'flaskdj:alkdjf')
with patch_logger('django.security.SuspiciousSession', 'warning') as calls:
self.assertEqual({}, self.session.decode(bad_encode))
# check that the failed decode is logged
self.assertEqual(len(calls), 1)
self.assertIn('corrupted', calls[0])
def test_actual_expiry(self):
# this doesn't work with JSONSerializer (serializing timedelta)
with override_settings(SESSION_SERIALIZER='django.contrib.sessions.serializers.PickleSerializer'):
self.session = self.backend() # reinitialize after overriding settings
# Regression test for #19200
old_session_key = None
new_session_key = None
try:
self.session['foo'] = 'bar'
self.session.set_expiry(-timedelta(seconds=10))
self.session.save()
old_session_key = self.session.session_key
# With an expiry date in the past, the session expires instantly.
new_session = self.backend(self.session.session_key)
new_session_key = new_session.session_key
self.assertNotIn('foo', new_session)
finally:
self.session.delete(old_session_key)
self.session.delete(new_session_key)
def test_session_load_does_not_create_record(self):
"""
Loading an unknown session key does not create a session record.
Creating session records on load is a DOS vulnerability.
"""
session = self.backend('someunknownkey')
session.load()
self.assertIsNone(session.session_key)
self.assertIs(session.exists(session.session_key), False)
# provided unknown key was cycled, not reused
self.assertNotEqual(session.session_key, 'someunknownkey')
def test_session_save_does_not_resurrect_session_logged_out_in_other_context(self):
"""
Sessions shouldn't be resurrected by a concurrent request.
"""
# Create new session.
s1 = self.backend()
s1['test_data'] = 'value1'
s1.save(must_create=True)
# Logout in another context.
s2 = self.backend(s1.session_key)
s2.delete()
# Modify session in first context.
s1['test_data'] = 'value2'
with self.assertRaises(UpdateError):
# This should throw an exception as the session is deleted, not
# resurrect the session.
s1.save()
self.assertEqual(s1.load(), {})
class DatabaseSessionTests(SessionTestsMixin, TestCase):
backend = DatabaseSession
session_engine = 'django.contrib.sessions.backends.db'
@property
def model(self):
return self.backend.get_model_class()
def test_session_str(self):
"Session repr should be the session key."
self.session['x'] = 1
self.session.save()
session_key = self.session.session_key
s = self.model.objects.get(session_key=session_key)
self.assertEqual(str(s), session_key)
def test_session_get_decoded(self):
"""
Test we can use Session.get_decoded to retrieve data stored
in normal way
"""
self.session['x'] = 1
self.session.save()
s = self.model.objects.get(session_key=self.session.session_key)
self.assertEqual(s.get_decoded(), {'x': 1})
def test_sessionmanager_save(self):
"""
Test SessionManager.save method
"""
# Create a session
self.session['y'] = 1
self.session.save()
s = self.model.objects.get(session_key=self.session.session_key)
# Change it
self.model.objects.save(s.session_key, {'y': 2}, s.expire_date)
# Clear cache, so that it will be retrieved from DB
del self.session._session_cache
self.assertEqual(self.session['y'], 2)
def test_clearsessions_command(self):
"""
Test clearsessions command for clearing expired sessions.
"""
self.assertEqual(0, self.model.objects.count())
# One object in the future
self.session['foo'] = 'bar'
self.session.set_expiry(3600)
self.session.save()
# One object in the past
other_session = self.backend()
other_session['foo'] = 'bar'
other_session.set_expiry(-3600)
other_session.save()
# Two sessions are in the database before clearsessions...
self.assertEqual(2, self.model.objects.count())
with override_settings(SESSION_ENGINE=self.session_engine):
management.call_command('clearsessions')
# ... and one is deleted.
self.assertEqual(1, self.model.objects.count())
@override_settings(USE_TZ=True)
class DatabaseSessionWithTimeZoneTests(DatabaseSessionTests):
pass
class CustomDatabaseSessionTests(DatabaseSessionTests):
backend = CustomDatabaseSession
session_engine = 'sessions_tests.models'
def test_extra_session_field(self):
# Set the account ID to be picked up by a custom session storage
# and saved to a custom session model database column.
self.session['_auth_user_id'] = 42
self.session.save()
# Make sure that the customized create_model_instance() was called.
s = self.model.objects.get(session_key=self.session.session_key)
self.assertEqual(s.account_id, 42)
# Make the session "anonymous".
self.session.pop('_auth_user_id')
self.session.save()
# Make sure that save() on an existing session did the right job.
s = self.model.objects.get(session_key=self.session.session_key)
self.assertIsNone(s.account_id)
class CacheDBSessionTests(SessionTestsMixin, TestCase):
backend = CacheDBSession
def test_exists_searches_cache_first(self):
self.session.save()
with self.assertNumQueries(0):
self.assertIs(self.session.exists(self.session.session_key), True)
# Some backends might issue a warning
@ignore_warnings(module="django.core.cache.backends.base")
def test_load_overlong_key(self):
self.session._session_key = (string.ascii_letters + string.digits) * 20
self.assertEqual(self.session.load(), {})
@override_settings(SESSION_CACHE_ALIAS='sessions')
def test_non_default_cache(self):
# 21000 - CacheDB backend should respect SESSION_CACHE_ALIAS.
with self.assertRaises(InvalidCacheBackendError):
self.backend()
@override_settings(USE_TZ=True)
class CacheDBSessionWithTimeZoneTests(CacheDBSessionTests):
pass
# Don't need DB flushing for these tests, so can use unittest.TestCase as base class
class FileSessionTests(SessionTestsMixin, unittest.TestCase):
backend = FileSession
def setUp(self):
# Do file session tests in an isolated directory, and kill it after we're done.
self.original_session_file_path = settings.SESSION_FILE_PATH
self.temp_session_store = settings.SESSION_FILE_PATH = tempfile.mkdtemp()
# Reset the file session backend's internal caches
if hasattr(self.backend, '_storage_path'):
del self.backend._storage_path
super().setUp()
def tearDown(self):
super().tearDown()
settings.SESSION_FILE_PATH = self.original_session_file_path
shutil.rmtree(self.temp_session_store)
@override_settings(
SESSION_FILE_PATH="/if/this/directory/exists/you/have/a/weird/computer")
def test_configuration_check(self):
del self.backend._storage_path
# Make sure the file backend checks for a good storage dir
with self.assertRaises(ImproperlyConfigured):
self.backend()
def test_invalid_key_backslash(self):
# Ensure we don't allow directory-traversal.
# This is tested directly on _key_to_file, as load() will swallow
# a SuspiciousOperation in the same way as an IOError - by creating
# a new session, making it unclear whether the slashes were detected.
with self.assertRaises(InvalidSessionKey):
self.backend()._key_to_file("a\\b\\c")
def test_invalid_key_forwardslash(self):
# Ensure we don't allow directory-traversal
with self.assertRaises(InvalidSessionKey):
self.backend()._key_to_file("a/b/c")
@override_settings(
SESSION_ENGINE="django.contrib.sessions.backends.file",
SESSION_COOKIE_AGE=0,
)
def test_clearsessions_command(self):
"""
Test clearsessions command for clearing expired sessions.
"""
storage_path = self.backend._get_storage_path()
file_prefix = settings.SESSION_COOKIE_NAME
def count_sessions():
return len([
session_file for session_file in os.listdir(storage_path)
if session_file.startswith(file_prefix)
])
self.assertEqual(0, count_sessions())
# One object in the future
self.session['foo'] = 'bar'
self.session.set_expiry(3600)
self.session.save()
# One object in the past
other_session = self.backend()
other_session['foo'] = 'bar'
other_session.set_expiry(-3600)
other_session.save()
# One object in the present without an expiry (should be deleted since
# its modification time + SESSION_COOKIE_AGE will be in the past when
# clearsessions runs).
other_session2 = self.backend()
other_session2['foo'] = 'bar'
other_session2.save()
# Three sessions are in the filesystem before clearsessions...
self.assertEqual(3, count_sessions())
management.call_command('clearsessions')
# ... and two are deleted.
self.assertEqual(1, count_sessions())
class CacheSessionTests(SessionTestsMixin, unittest.TestCase):
backend = CacheSession
# Some backends might issue a warning
@ignore_warnings(module="django.core.cache.backends.base")
def test_load_overlong_key(self):
self.session._session_key = (string.ascii_letters + string.digits) * 20
self.assertEqual(self.session.load(), {})
def test_default_cache(self):
self.session.save()
self.assertIsNotNone(caches['default'].get(self.session.cache_key))
@override_settings(CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
},
'sessions': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'session',
},
}, SESSION_CACHE_ALIAS='sessions')
def test_non_default_cache(self):
# Re-initialize the session backend to make use of overridden settings.
self.session = self.backend()
self.session.save()
self.assertIsNone(caches['default'].get(self.session.cache_key))
self.assertIsNotNone(caches['sessions'].get(self.session.cache_key))
def test_create_and_save(self):
self.session = self.backend()
self.session.create()
self.session.save()
self.assertIsNotNone(caches['default'].get(self.session.cache_key))
class SessionMiddlewareTests(TestCase):
@override_settings(SESSION_COOKIE_SECURE=True)
def test_secure_session_cookie(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
self.assertIs(response.cookies[settings.SESSION_COOKIE_NAME]['secure'], True)
@override_settings(SESSION_COOKIE_HTTPONLY=True)
def test_httponly_session_cookie(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
self.assertIs(response.cookies[settings.SESSION_COOKIE_NAME]['httponly'], True)
self.assertIn(
cookies.Morsel._reserved['httponly'],
str(response.cookies[settings.SESSION_COOKIE_NAME])
)
@override_settings(SESSION_COOKIE_HTTPONLY=False)
def test_no_httponly_session_cookie(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
self.assertEqual(response.cookies[settings.SESSION_COOKIE_NAME]['httponly'], '')
self.assertNotIn(
cookies.Morsel._reserved['httponly'],
str(response.cookies[settings.SESSION_COOKIE_NAME])
)
def test_session_save_on_500(self):
request = RequestFactory().get('/')
response = HttpResponse('Horrible error')
response.status_code = 500
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
# The value wasn't saved above.
self.assertNotIn('hello', request.session.load())
def test_session_update_error_redirect(self):
path = '/foo/'
request = RequestFactory().get(path)
response = HttpResponse()
middleware = SessionMiddleware()
request.session = DatabaseSession()
request.session.save(must_create=True)
request.session.delete()
msg = (
"The request's session was deleted before the request completed. "
"The user may have logged out in a concurrent request, for example."
)
with self.assertRaisesMessage(SuspiciousOperation, msg):
# Handle the response through the middleware. It will try to save
# the deleted session which will cause an UpdateError that's caught
# and raised as a SuspiciousOperation.
middleware.process_response(request, response)
def test_session_delete_on_end(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Before deleting, there has to be an existing cookie
request.COOKIES[settings.SESSION_COOKIE_NAME] = 'abc'
# Simulate a request that ends the session
middleware.process_request(request)
request.session.flush()
# Handle the response through the middleware
response = middleware.process_response(request, response)
# The cookie was deleted, not recreated.
# A deleted cookie header looks like:
# Set-Cookie: sessionid=; expires=Thu, 01 Jan 1970 00:00:00 GMT; Max-Age=0; Path=/
self.assertEqual(
'Set-Cookie: {}=""; expires=Thu, 01 Jan 1970 00:00:00 GMT; '
'Max-Age=0; Path=/'.format(
settings.SESSION_COOKIE_NAME,
),
str(response.cookies[settings.SESSION_COOKIE_NAME])
)
@override_settings(SESSION_COOKIE_DOMAIN='.example.local', SESSION_COOKIE_PATH='/example/')
def test_session_delete_on_end_with_custom_domain_and_path(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Before deleting, there has to be an existing cookie
request.COOKIES[settings.SESSION_COOKIE_NAME] = 'abc'
# Simulate a request that ends the session
middleware.process_request(request)
request.session.flush()
# Handle the response through the middleware
response = middleware.process_response(request, response)
# The cookie was deleted, not recreated.
# A deleted cookie header with a custom domain and path looks like:
# Set-Cookie: sessionid=; Domain=.example.local;
# expires=Thu, 01 Jan 1970 00:00:00 GMT; Max-Age=0;
# Path=/example/
self.assertEqual(
'Set-Cookie: {}=""; Domain=.example.local; expires=Thu, '
'01 Jan 1970 00:00:00 GMT; Max-Age=0; Path=/example/'.format(
settings.SESSION_COOKIE_NAME,
),
str(response.cookies[settings.SESSION_COOKIE_NAME])
)
def test_flush_empty_without_session_cookie_doesnt_set_cookie(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request that ends the session
middleware.process_request(request)
request.session.flush()
# Handle the response through the middleware
response = middleware.process_response(request, response)
# A cookie should not be set.
self.assertEqual(response.cookies, {})
# The session is accessed so "Vary: Cookie" should be set.
self.assertEqual(response['Vary'], 'Cookie')
def test_empty_session_saved(self):
"""
If a session is emptied of data but still has a key, it should still
be updated.
"""
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Set a session key and some data.
middleware.process_request(request)
request.session['foo'] = 'bar'
# Handle the response through the middleware.
response = middleware.process_response(request, response)
self.assertEqual(tuple(request.session.items()), (('foo', 'bar'),))
# A cookie should be set, along with Vary: Cookie.
self.assertIn(
'Set-Cookie: sessionid=%s' % request.session.session_key,
str(response.cookies)
)
self.assertEqual(response['Vary'], 'Cookie')
# Empty the session data.
del request.session['foo']
# Handle the response through the middleware.
response = HttpResponse('Session test')
response = middleware.process_response(request, response)
self.assertEqual(dict(request.session.values()), {})
session = Session.objects.get(session_key=request.session.session_key)
self.assertEqual(session.get_decoded(), {})
# While the session is empty, it hasn't been flushed so a cookie should
# still be set, along with Vary: Cookie.
self.assertGreater(len(request.session.session_key), 8)
self.assertIn(
'Set-Cookie: sessionid=%s' % request.session.session_key,
str(response.cookies)
)
self.assertEqual(response['Vary'], 'Cookie')
# Don't need DB flushing for these tests, so can use unittest.TestCase as base class
class CookieSessionTests(SessionTestsMixin, unittest.TestCase):
backend = CookieSession
def test_save(self):
"""
This test tested exists() in the other session backends, but that
doesn't make sense for us.
"""
pass
def test_cycle(self):
"""
This test tested cycle_key() which would create a new session
key for the same session data. But we can't invalidate previously
signed cookies (other than letting them expire naturally) so
testing for this behavior is meaningless.
"""
pass
@unittest.expectedFailure
def test_actual_expiry(self):
# The cookie backend doesn't handle non-default expiry dates, see #19201
super().test_actual_expiry()
def test_unpickling_exception(self):
# signed_cookies backend should handle unpickle exceptions gracefully
# by creating a new session
self.assertEqual(self.session.serializer, JSONSerializer)
self.session.save()
self.session.serializer = PickleSerializer
self.session.load()
@unittest.skip("Cookie backend doesn't have an external store to create records in.")
def test_session_load_does_not_create_record(self):
pass
@unittest.skip("CookieSession is stored in the client and there is no way to query it.")
def test_session_save_does_not_resurrect_session_logged_out_in_other_context(self):
pass
| 38.093103
| 106
| 0.666667
|
import base64
import os
import shutil
import string
import tempfile
import unittest
from datetime import timedelta
from http import cookies
from django.conf import settings
from django.contrib.sessions.backends.base import UpdateError
from django.contrib.sessions.backends.cache import SessionStore as CacheSession
from django.contrib.sessions.backends.cached_db import (
SessionStore as CacheDBSession,
)
from django.contrib.sessions.backends.db import SessionStore as DatabaseSession
from django.contrib.sessions.backends.file import SessionStore as FileSession
from django.contrib.sessions.backends.signed_cookies import (
SessionStore as CookieSession,
)
from django.contrib.sessions.exceptions import InvalidSessionKey
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.sessions.models import Session
from django.contrib.sessions.serializers import (
JSONSerializer, PickleSerializer,
)
from django.core import management
from django.core.cache import caches
from django.core.cache.backends.base import InvalidCacheBackendError
from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation
from django.http import HttpResponse
from django.test import (
RequestFactory, TestCase, ignore_warnings, override_settings,
)
from django.test.utils import patch_logger
from django.utils import timezone
from .models import SessionStore as CustomDatabaseSession
class SessionTestsMixin:
# be used.
backend = None # subclasses must specify
def setUp(self):
self.session = self.backend()
def tearDown(self):
# NB: be careful to delete any sessions created; stale sessions fill up
# the /tmp (with some backends) and eventually overwhelm it after lots
# of runs (think buildbots)
self.session.delete()
def test_new_session(self):
self.assertIs(self.session.modified, False)
self.assertIs(self.session.accessed, False)
def test_get_empty(self):
self.assertIsNone(self.session.get('cat'))
def test_store(self):
self.session['cat'] = "dog"
self.assertIs(self.session.modified, True)
self.assertEqual(self.session.pop('cat'), 'dog')
def test_pop(self):
self.session['some key'] = 'exists'
# Need to reset these to pretend we haven't accessed it:
self.accessed = False
self.modified = False
self.assertEqual(self.session.pop('some key'), 'exists')
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, True)
self.assertIsNone(self.session.get('some key'))
def test_pop_default(self):
self.assertEqual(self.session.pop('some key', 'does not exist'),
'does not exist')
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_pop_default_named_argument(self):
self.assertEqual(self.session.pop('some key', default='does not exist'), 'does not exist')
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_pop_no_default_keyerror_raised(self):
with self.assertRaises(KeyError):
self.session.pop('some key')
def test_setdefault(self):
self.assertEqual(self.session.setdefault('foo', 'bar'), 'bar')
self.assertEqual(self.session.setdefault('foo', 'baz'), 'bar')
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, True)
def test_update(self):
self.session.update({'update key': 1})
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, True)
self.assertEqual(self.session.get('update key', None), 1)
def test_has_key(self):
self.session['some key'] = 1
self.session.modified = False
self.session.accessed = False
self.assertIn('some key', self.session)
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_values(self):
self.assertEqual(list(self.session.values()), [])
self.assertIs(self.session.accessed, True)
self.session['some key'] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.values()), [1])
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_keys(self):
self.session['x'] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.keys()), ['x'])
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_items(self):
self.session['x'] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.items()), [('x', 1)])
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_clear(self):
self.session['x'] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.items()), [('x', 1)])
self.session.clear()
self.assertEqual(list(self.session.items()), [])
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, True)
def test_save(self):
self.session.save()
self.assertIs(self.session.exists(self.session.session_key), True)
def test_delete(self):
self.session.save()
self.session.delete(self.session.session_key)
self.assertIs(self.session.exists(self.session.session_key), False)
def test_flush(self):
self.session['foo'] = 'bar'
self.session.save()
prev_key = self.session.session_key
self.session.flush()
self.assertIs(self.session.exists(prev_key), False)
self.assertNotEqual(self.session.session_key, prev_key)
self.assertIsNone(self.session.session_key)
self.assertIs(self.session.modified, True)
self.assertIs(self.session.accessed, True)
def test_cycle(self):
self.session['a'], self.session['b'] = 'c', 'd'
self.session.save()
prev_key = self.session.session_key
prev_data = list(self.session.items())
self.session.cycle_key()
self.assertIs(self.session.exists(prev_key), False)
self.assertNotEqual(self.session.session_key, prev_key)
self.assertEqual(list(self.session.items()), prev_data)
def test_cycle_with_no_session_cache(self):
self.session['a'], self.session['b'] = 'c', 'd'
self.session.save()
prev_data = self.session.items()
self.session = self.backend(self.session.session_key)
self.assertIs(hasattr(self.session, '_session_cache'), False)
self.session.cycle_key()
self.assertCountEqual(self.session.items(), prev_data)
def test_save_doesnt_clear_data(self):
self.session['a'] = 'b'
self.session.save()
self.assertEqual(self.session['a'], 'b')
def test_invalid_key(self):
try:
session = self.backend('1')
session.save()
self.assertNotEqual(session.session_key, '1')
self.assertIsNone(session.get('cat'))
session.delete()
finally:
session.delete('1')
def test_session_key_empty_string_invalid(self):
self.session._session_key = ''
self.assertIsNone(self.session.session_key)
def test_session_key_too_short_invalid(self):
self.session._session_key = '1234567'
self.assertIsNone(self.session.session_key)
def test_session_key_valid_string_saved(self):
self.session._session_key = '12345678'
self.assertEqual(self.session.session_key, '12345678')
def test_session_key_is_read_only(self):
def set_session_key(session):
session.session_key = session._get_new_session_key()
with self.assertRaises(AttributeError):
set_session_key(self.session)
def test_default_expiry(self):
self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE)
# expire at browser close)
self.session.set_expiry(0)
self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE)
def test_custom_expiry_seconds(self):
modification = timezone.now()
self.session.set_expiry(10)
date = self.session.get_expiry_date(modification=modification)
self.assertEqual(date, modification + timedelta(seconds=10))
age = self.session.get_expiry_age(modification=modification)
self.assertEqual(age, 10)
def test_custom_expiry_timedelta(self):
modification = timezone.now()
# Mock timezone.now, because set_expiry calls it on this code path.
original_now = timezone.now
try:
timezone.now = lambda: modification
self.session.set_expiry(timedelta(seconds=10))
finally:
timezone.now = original_now
date = self.session.get_expiry_date(modification=modification)
self.assertEqual(date, modification + timedelta(seconds=10))
age = self.session.get_expiry_age(modification=modification)
self.assertEqual(age, 10)
def test_custom_expiry_datetime(self):
modification = timezone.now()
self.session.set_expiry(modification + timedelta(seconds=10))
date = self.session.get_expiry_date(modification=modification)
self.assertEqual(date, modification + timedelta(seconds=10))
age = self.session.get_expiry_age(modification=modification)
self.assertEqual(age, 10)
def test_custom_expiry_reset(self):
self.session.set_expiry(None)
self.session.set_expiry(10)
self.session.set_expiry(None)
self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE)
def test_get_expire_at_browser_close(self):
# Tests get_expire_at_browser_close with different settings and different
# set_expiry calls
with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=False):
self.session.set_expiry(10)
self.assertIs(self.session.get_expire_at_browser_close(), False)
self.session.set_expiry(0)
self.assertIs(self.session.get_expire_at_browser_close(), True)
self.session.set_expiry(None)
self.assertIs(self.session.get_expire_at_browser_close(), False)
with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=True):
self.session.set_expiry(10)
self.assertIs(self.session.get_expire_at_browser_close(), False)
self.session.set_expiry(0)
self.assertIs(self.session.get_expire_at_browser_close(), True)
self.session.set_expiry(None)
self.assertIs(self.session.get_expire_at_browser_close(), True)
def test_decode(self):
# Ensure we can decode what we encode
data = {'a test key': 'a test value'}
encoded = self.session.encode(data)
self.assertEqual(self.session.decode(encoded), data)
def test_decode_failure_logged_to_security(self):
bad_encode = base64.b64encode(b'flaskdj:alkdjf')
with patch_logger('django.security.SuspiciousSession', 'warning') as calls:
self.assertEqual({}, self.session.decode(bad_encode))
# check that the failed decode is logged
self.assertEqual(len(calls), 1)
self.assertIn('corrupted', calls[0])
def test_actual_expiry(self):
# this doesn't work with JSONSerializer (serializing timedelta)
with override_settings(SESSION_SERIALIZER='django.contrib.sessions.serializers.PickleSerializer'):
self.session = self.backend()
old_session_key = None
new_session_key = None
try:
self.session['foo'] = 'bar'
self.session.set_expiry(-timedelta(seconds=10))
self.session.save()
old_session_key = self.session.session_key
new_session = self.backend(self.session.session_key)
new_session_key = new_session.session_key
self.assertNotIn('foo', new_session)
finally:
self.session.delete(old_session_key)
self.session.delete(new_session_key)
def test_session_load_does_not_create_record(self):
session = self.backend('someunknownkey')
session.load()
self.assertIsNone(session.session_key)
self.assertIs(session.exists(session.session_key), False)
self.assertNotEqual(session.session_key, 'someunknownkey')
def test_session_save_does_not_resurrect_session_logged_out_in_other_context(self):
s1 = self.backend()
s1['test_data'] = 'value1'
s1.save(must_create=True)
s2 = self.backend(s1.session_key)
s2.delete()
s1['test_data'] = 'value2'
with self.assertRaises(UpdateError):
s1.save()
self.assertEqual(s1.load(), {})
class DatabaseSessionTests(SessionTestsMixin, TestCase):
backend = DatabaseSession
session_engine = 'django.contrib.sessions.backends.db'
@property
def model(self):
return self.backend.get_model_class()
def test_session_str(self):
self.session['x'] = 1
self.session.save()
session_key = self.session.session_key
s = self.model.objects.get(session_key=session_key)
self.assertEqual(str(s), session_key)
def test_session_get_decoded(self):
self.session['x'] = 1
self.session.save()
s = self.model.objects.get(session_key=self.session.session_key)
self.assertEqual(s.get_decoded(), {'x': 1})
def test_sessionmanager_save(self):
self.session['y'] = 1
self.session.save()
s = self.model.objects.get(session_key=self.session.session_key)
self.model.objects.save(s.session_key, {'y': 2}, s.expire_date)
del self.session._session_cache
self.assertEqual(self.session['y'], 2)
def test_clearsessions_command(self):
self.assertEqual(0, self.model.objects.count())
self.session['foo'] = 'bar'
self.session.set_expiry(3600)
self.session.save()
other_session = self.backend()
other_session['foo'] = 'bar'
other_session.set_expiry(-3600)
other_session.save()
self.assertEqual(2, self.model.objects.count())
with override_settings(SESSION_ENGINE=self.session_engine):
management.call_command('clearsessions')
self.assertEqual(1, self.model.objects.count())
@override_settings(USE_TZ=True)
class DatabaseSessionWithTimeZoneTests(DatabaseSessionTests):
pass
class CustomDatabaseSessionTests(DatabaseSessionTests):
backend = CustomDatabaseSession
session_engine = 'sessions_tests.models'
def test_extra_session_field(self):
self.session['_auth_user_id'] = 42
self.session.save()
s = self.model.objects.get(session_key=self.session.session_key)
self.assertEqual(s.account_id, 42)
self.session.pop('_auth_user_id')
self.session.save()
s = self.model.objects.get(session_key=self.session.session_key)
self.assertIsNone(s.account_id)
class CacheDBSessionTests(SessionTestsMixin, TestCase):
backend = CacheDBSession
def test_exists_searches_cache_first(self):
self.session.save()
with self.assertNumQueries(0):
self.assertIs(self.session.exists(self.session.session_key), True)
@ignore_warnings(module="django.core.cache.backends.base")
def test_load_overlong_key(self):
self.session._session_key = (string.ascii_letters + string.digits) * 20
self.assertEqual(self.session.load(), {})
@override_settings(SESSION_CACHE_ALIAS='sessions')
def test_non_default_cache(self):
with self.assertRaises(InvalidCacheBackendError):
self.backend()
@override_settings(USE_TZ=True)
class CacheDBSessionWithTimeZoneTests(CacheDBSessionTests):
pass
class FileSessionTests(SessionTestsMixin, unittest.TestCase):
backend = FileSession
def setUp(self):
# Do file session tests in an isolated directory, and kill it after we're done.
self.original_session_file_path = settings.SESSION_FILE_PATH
self.temp_session_store = settings.SESSION_FILE_PATH = tempfile.mkdtemp()
if hasattr(self.backend, '_storage_path'):
del self.backend._storage_path
super().setUp()
def tearDown(self):
super().tearDown()
settings.SESSION_FILE_PATH = self.original_session_file_path
shutil.rmtree(self.temp_session_store)
@override_settings(
SESSION_FILE_PATH="/if/this/directory/exists/you/have/a/weird/computer")
def test_configuration_check(self):
del self.backend._storage_path
# Make sure the file backend checks for a good storage dir
with self.assertRaises(ImproperlyConfigured):
self.backend()
def test_invalid_key_backslash(self):
# Ensure we don't allow directory-traversal.
with self.assertRaises(InvalidSessionKey):
self.backend()._key_to_file("a\\b\\c")
def test_invalid_key_forwardslash(self):
with self.assertRaises(InvalidSessionKey):
self.backend()._key_to_file("a/b/c")
@override_settings(
SESSION_ENGINE="django.contrib.sessions.backends.file",
SESSION_COOKIE_AGE=0,
)
def test_clearsessions_command(self):
storage_path = self.backend._get_storage_path()
file_prefix = settings.SESSION_COOKIE_NAME
def count_sessions():
return len([
session_file for session_file in os.listdir(storage_path)
if session_file.startswith(file_prefix)
])
self.assertEqual(0, count_sessions())
# One object in the future
self.session['foo'] = 'bar'
self.session.set_expiry(3600)
self.session.save()
# One object in the past
other_session = self.backend()
other_session['foo'] = 'bar'
other_session.set_expiry(-3600)
other_session.save()
# One object in the present without an expiry (should be deleted since
# its modification time + SESSION_COOKIE_AGE will be in the past when
# clearsessions runs).
other_session2 = self.backend()
other_session2['foo'] = 'bar'
other_session2.save()
# Three sessions are in the filesystem before clearsessions...
self.assertEqual(3, count_sessions())
management.call_command('clearsessions')
# ... and two are deleted.
self.assertEqual(1, count_sessions())
class CacheSessionTests(SessionTestsMixin, unittest.TestCase):
backend = CacheSession
# Some backends might issue a warning
@ignore_warnings(module="django.core.cache.backends.base")
def test_load_overlong_key(self):
self.session._session_key = (string.ascii_letters + string.digits) * 20
self.assertEqual(self.session.load(), {})
def test_default_cache(self):
self.session.save()
self.assertIsNotNone(caches['default'].get(self.session.cache_key))
@override_settings(CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
},
'sessions': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'session',
},
}, SESSION_CACHE_ALIAS='sessions')
def test_non_default_cache(self):
# Re-initialize the session backend to make use of overridden settings.
self.session = self.backend()
self.session.save()
self.assertIsNone(caches['default'].get(self.session.cache_key))
self.assertIsNotNone(caches['sessions'].get(self.session.cache_key))
def test_create_and_save(self):
self.session = self.backend()
self.session.create()
self.session.save()
self.assertIsNotNone(caches['default'].get(self.session.cache_key))
class SessionMiddlewareTests(TestCase):
@override_settings(SESSION_COOKIE_SECURE=True)
def test_secure_session_cookie(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
self.assertIs(response.cookies[settings.SESSION_COOKIE_NAME]['secure'], True)
@override_settings(SESSION_COOKIE_HTTPONLY=True)
def test_httponly_session_cookie(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
self.assertIs(response.cookies[settings.SESSION_COOKIE_NAME]['httponly'], True)
self.assertIn(
cookies.Morsel._reserved['httponly'],
str(response.cookies[settings.SESSION_COOKIE_NAME])
)
@override_settings(SESSION_COOKIE_HTTPONLY=False)
def test_no_httponly_session_cookie(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
self.assertEqual(response.cookies[settings.SESSION_COOKIE_NAME]['httponly'], '')
self.assertNotIn(
cookies.Morsel._reserved['httponly'],
str(response.cookies[settings.SESSION_COOKIE_NAME])
)
def test_session_save_on_500(self):
request = RequestFactory().get('/')
response = HttpResponse('Horrible error')
response.status_code = 500
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
# The value wasn't saved above.
self.assertNotIn('hello', request.session.load())
def test_session_update_error_redirect(self):
path = '/foo/'
request = RequestFactory().get(path)
response = HttpResponse()
middleware = SessionMiddleware()
request.session = DatabaseSession()
request.session.save(must_create=True)
request.session.delete()
msg = (
"The request's session was deleted before the request completed. "
"The user may have logged out in a concurrent request, for example."
)
with self.assertRaisesMessage(SuspiciousOperation, msg):
# Handle the response through the middleware. It will try to save
# the deleted session which will cause an UpdateError that's caught
middleware.process_response(request, response)
def test_session_delete_on_end(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
request.COOKIES[settings.SESSION_COOKIE_NAME] = 'abc'
middleware.process_request(request)
request.session.flush()
response = middleware.process_response(request, response)
self.assertEqual(
'Set-Cookie: {}=""; expires=Thu, 01 Jan 1970 00:00:00 GMT; '
'Max-Age=0; Path=/'.format(
settings.SESSION_COOKIE_NAME,
),
str(response.cookies[settings.SESSION_COOKIE_NAME])
)
@override_settings(SESSION_COOKIE_DOMAIN='.example.local', SESSION_COOKIE_PATH='/example/')
def test_session_delete_on_end_with_custom_domain_and_path(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
request.COOKIES[settings.SESSION_COOKIE_NAME] = 'abc'
middleware.process_request(request)
request.session.flush()
response = middleware.process_response(request, response)
self.assertEqual(
'Set-Cookie: {}=""; Domain=.example.local; expires=Thu, '
'01 Jan 1970 00:00:00 GMT; Max-Age=0; Path=/example/'.format(
settings.SESSION_COOKIE_NAME,
),
str(response.cookies[settings.SESSION_COOKIE_NAME])
)
def test_flush_empty_without_session_cookie_doesnt_set_cookie(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
middleware.process_request(request)
request.session.flush()
response = middleware.process_response(request, response)
self.assertEqual(response.cookies, {})
self.assertEqual(response['Vary'], 'Cookie')
def test_empty_session_saved(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
middleware.process_request(request)
request.session['foo'] = 'bar'
response = middleware.process_response(request, response)
self.assertEqual(tuple(request.session.items()), (('foo', 'bar'),))
self.assertIn(
'Set-Cookie: sessionid=%s' % request.session.session_key,
str(response.cookies)
)
self.assertEqual(response['Vary'], 'Cookie')
del request.session['foo']
response = HttpResponse('Session test')
response = middleware.process_response(request, response)
self.assertEqual(dict(request.session.values()), {})
session = Session.objects.get(session_key=request.session.session_key)
self.assertEqual(session.get_decoded(), {})
# still be set, along with Vary: Cookie.
self.assertGreater(len(request.session.session_key), 8)
self.assertIn(
'Set-Cookie: sessionid=%s' % request.session.session_key,
str(response.cookies)
)
self.assertEqual(response['Vary'], 'Cookie')
# Don't need DB flushing for these tests, so can use unittest.TestCase as base class
class CookieSessionTests(SessionTestsMixin, unittest.TestCase):
backend = CookieSession
def test_save(self):
pass
def test_cycle(self):
pass
@unittest.expectedFailure
def test_actual_expiry(self):
super().test_actual_expiry()
def test_unpickling_exception(self):
# signed_cookies backend should handle unpickle exceptions gracefully
# by creating a new session
self.assertEqual(self.session.serializer, JSONSerializer)
self.session.save()
self.session.serializer = PickleSerializer
self.session.load()
@unittest.skip("Cookie backend doesn't have an external store to create records in.")
def test_session_load_does_not_create_record(self):
pass
@unittest.skip("CookieSession is stored in the client and there is no way to query it.")
def test_session_save_does_not_resurrect_session_logged_out_in_other_context(self):
pass
| true
| true
|
f70c8cb3a96a40a4af96607a654cf48df272e02d
| 2,492
|
py
|
Python
|
build/compiler/nds32le-elf-mculib-v3/nds32le-elf/lib/libstdc++.a-gdb.py
|
mu340881/alios_Integrate
|
a9f98898e13725423bd2379b2d766e2414c40e99
|
[
"Apache-2.0"
] | null | null | null |
build/compiler/nds32le-elf-mculib-v3/nds32le-elf/lib/libstdc++.a-gdb.py
|
mu340881/alios_Integrate
|
a9f98898e13725423bd2379b2d766e2414c40e99
|
[
"Apache-2.0"
] | null | null | null |
build/compiler/nds32le-elf-mculib-v3/nds32le-elf/lib/libstdc++.a-gdb.py
|
mu340881/alios_Integrate
|
a9f98898e13725423bd2379b2d766e2414c40e99
|
[
"Apache-2.0"
] | 2
|
2019-10-22T09:26:23.000Z
|
2019-12-20T07:28:20.000Z
|
# -*- python -*-
# Copyright (C) 2009-2014 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import gdb
import os
import os.path
pythondir = '/NOBACKUP/sqa10/sqa/build-bspv410/build-toolchain/build-2016-04-13/toolchain/nds32le-elf-mculib-v3/share/gcc-4.9.3/python'
libdir = '/NOBACKUP/sqa10/sqa/build-bspv410/build-toolchain/build-2016-04-13/toolchain/nds32le-elf-mculib-v3/nds32le-elf/lib'
# This file might be loaded when there is no current objfile. This
# can happen if the user loads it manually. In this case we don't
# update sys.path; instead we just hope the user managed to do that
# beforehand.
if gdb.current_objfile () is not None:
# Update module path. We want to find the relative path from libdir
# to pythondir, and then we want to apply that relative path to the
# directory holding the objfile with which this file is associated.
# This preserves relocatability of the gcc tree.
# Do a simple normalization that removes duplicate separators.
pythondir = os.path.normpath (pythondir)
libdir = os.path.normpath (libdir)
prefix = os.path.commonprefix ([libdir, pythondir])
# In some bizarre configuration we might have found a match in the
# middle of a directory name.
if prefix[-1] != '/':
prefix = os.path.dirname (prefix) + '/'
# Strip off the prefix.
pythondir = pythondir[len (prefix):]
libdir = libdir[len (prefix):]
# Compute the ".."s needed to get from libdir to the prefix.
dotdots = ('..' + os.sep) * len (libdir.split (os.sep))
objfile = gdb.current_objfile ().filename
dir_ = os.path.join (os.path.dirname (objfile), dotdots, pythondir)
if not dir_ in sys.path:
sys.path.insert(0, dir_)
# Load the pretty-printers.
from libstdcxx.v6.printers import register_libstdcxx_printers
register_libstdcxx_printers (gdb.current_objfile ())
| 40.852459
| 135
| 0.726726
|
import sys
import gdb
import os
import os.path
pythondir = '/NOBACKUP/sqa10/sqa/build-bspv410/build-toolchain/build-2016-04-13/toolchain/nds32le-elf-mculib-v3/share/gcc-4.9.3/python'
libdir = '/NOBACKUP/sqa10/sqa/build-bspv410/build-toolchain/build-2016-04-13/toolchain/nds32le-elf-mculib-v3/nds32le-elf/lib'
# update sys.path; instead we just hope the user managed to do that
# beforehand.
if gdb.current_objfile () is not None:
# Update module path. We want to find the relative path from libdir
# to pythondir, and then we want to apply that relative path to the
# directory holding the objfile with which this file is associated.
# This preserves relocatability of the gcc tree.
# Do a simple normalization that removes duplicate separators.
pythondir = os.path.normpath (pythondir)
libdir = os.path.normpath (libdir)
prefix = os.path.commonprefix ([libdir, pythondir])
# In some bizarre configuration we might have found a match in the
# middle of a directory name.
if prefix[-1] != '/':
prefix = os.path.dirname (prefix) + '/'
# Strip off the prefix.
pythondir = pythondir[len (prefix):]
libdir = libdir[len (prefix):]
# Compute the ".."s needed to get from libdir to the prefix.
dotdots = ('..' + os.sep) * len (libdir.split (os.sep))
objfile = gdb.current_objfile ().filename
dir_ = os.path.join (os.path.dirname (objfile), dotdots, pythondir)
if not dir_ in sys.path:
sys.path.insert(0, dir_)
# Load the pretty-printers.
from libstdcxx.v6.printers import register_libstdcxx_printers
register_libstdcxx_printers (gdb.current_objfile ())
| true
| true
|
f70c8e189b6fa80ed3959d3b92567217e2bb43d0
| 692
|
py
|
Python
|
test_example.py
|
kaapitoshka/AntonStudy
|
d42a643e2f0a78e5257d3d2b876258e1e765db04
|
[
"Apache-2.0"
] | null | null | null |
test_example.py
|
kaapitoshka/AntonStudy
|
d42a643e2f0a78e5257d3d2b876258e1e765db04
|
[
"Apache-2.0"
] | null | null | null |
test_example.py
|
kaapitoshka/AntonStudy
|
d42a643e2f0a78e5257d3d2b876258e1e765db04
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from selenium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import time
@pytest.fixture(scope="session")
def driver(request):
wd=webdriver.Chrome()
request.addfinalizer(wd.quit)
return wd
def test_admin_login(driver):
driver.get("http://localhost/litecart/admin")
driver.find_element_by_name("username").send_keys("admin")
driver.find_element_by_name("password").send_keys("123456")
driver.find_element_by_name("login").click()
WebDriverWait(driver,10).until(EC.title_is("My Store"))
print('Поставим sleep перед закрытием браузера на 5 секунд для отладки')
time.sleep(5)
| 32.952381
| 73
| 0.799133
|
import pytest
from selenium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import time
@pytest.fixture(scope="session")
def driver(request):
wd=webdriver.Chrome()
request.addfinalizer(wd.quit)
return wd
def test_admin_login(driver):
driver.get("http://localhost/litecart/admin")
driver.find_element_by_name("username").send_keys("admin")
driver.find_element_by_name("password").send_keys("123456")
driver.find_element_by_name("login").click()
WebDriverWait(driver,10).until(EC.title_is("My Store"))
print('Поставим sleep перед закрытием браузера на 5 секунд для отладки')
time.sleep(5)
| true
| true
|
f70c8e39be15cb53affbe77a0dfe1db21f9d708e
| 2,092
|
py
|
Python
|
mammoth/style_reader/html_path_reader.py
|
tsaltena/python-mammoth
|
6746d5f17377327d9947a10a1e8101f8810122e2
|
[
"BSD-2-Clause"
] | null | null | null |
mammoth/style_reader/html_path_reader.py
|
tsaltena/python-mammoth
|
6746d5f17377327d9947a10a1e8101f8810122e2
|
[
"BSD-2-Clause"
] | null | null | null |
mammoth/style_reader/html_path_reader.py
|
tsaltena/python-mammoth
|
6746d5f17377327d9947a10a1e8101f8810122e2
|
[
"BSD-2-Clause"
] | null | null | null |
from parsimonious.grammar import Grammar
from .. import html_paths
def read_html_path(string):
path_node = _grammar.parse(string)
return read_html_path_node(path_node)
def read_html_path_node(path_node):
if path_node.children[0].expr_name == "ignore":
return html_paths.ignore
elif path_node.children[0].children:
return _read_html_path_elements_node(path_node.children[0].children[0])
else:
return html_paths.empty
def _read_html_path_elements_node(path_node):
elements = [
_read_element_node(child)
for child in _repeated_children_with_separator(path_node, has_whitespace=True)
]
return html_paths.path(elements)
def _read_element_node(node):
tag_names = _read_tag_names_node(node.children[0])
class_names = _read_class_names_node(node.children[1])
fresh = _read_fresh_node(node.children[2])
return html_paths.element(tag_names, class_names=class_names, fresh=fresh)
def _read_tag_names_node(node):
return [
child.text
for child in _repeated_children_with_separator(node, has_whitespace=False)
]
def _read_class_names_node(class_names_node):
return [
_read_class_name_node(node)
for node in class_names_node.children
]
def _read_class_name_node(node):
return node.children[1].text
def _read_fresh_node(node):
return len(node.children) > 0
def _repeated_children_with_separator(node, has_whitespace):
yield node.children[0]
if has_whitespace:
sequence_node_index = 3
else:
sequence_node_index = 1
sequence_node = node.children[1]
for child in sequence_node.children:
yield child.children[sequence_node_index]
grammar_text = r"""
html_path = ignore / html_path_elements?
ignore = "!"
html_path_elements = element (whitespace* ">" whitespace* element)*
element = tag_names class_name* fresh?
tag_names = identifier ("|" identifier)*
class_name = "." identifier
fresh = ":fresh"
identifier = ~"[A-Z0-9]+"i
whitespace = ~"\s"*
"""
_grammar = Grammar(grammar_text)
| 22.255319
| 86
| 0.721319
|
from parsimonious.grammar import Grammar
from .. import html_paths
def read_html_path(string):
path_node = _grammar.parse(string)
return read_html_path_node(path_node)
def read_html_path_node(path_node):
if path_node.children[0].expr_name == "ignore":
return html_paths.ignore
elif path_node.children[0].children:
return _read_html_path_elements_node(path_node.children[0].children[0])
else:
return html_paths.empty
def _read_html_path_elements_node(path_node):
elements = [
_read_element_node(child)
for child in _repeated_children_with_separator(path_node, has_whitespace=True)
]
return html_paths.path(elements)
def _read_element_node(node):
tag_names = _read_tag_names_node(node.children[0])
class_names = _read_class_names_node(node.children[1])
fresh = _read_fresh_node(node.children[2])
return html_paths.element(tag_names, class_names=class_names, fresh=fresh)
def _read_tag_names_node(node):
return [
child.text
for child in _repeated_children_with_separator(node, has_whitespace=False)
]
def _read_class_names_node(class_names_node):
return [
_read_class_name_node(node)
for node in class_names_node.children
]
def _read_class_name_node(node):
return node.children[1].text
def _read_fresh_node(node):
return len(node.children) > 0
def _repeated_children_with_separator(node, has_whitespace):
yield node.children[0]
if has_whitespace:
sequence_node_index = 3
else:
sequence_node_index = 1
sequence_node = node.children[1]
for child in sequence_node.children:
yield child.children[sequence_node_index]
grammar_text = r"""
html_path = ignore / html_path_elements?
ignore = "!"
html_path_elements = element (whitespace* ">" whitespace* element)*
element = tag_names class_name* fresh?
tag_names = identifier ("|" identifier)*
class_name = "." identifier
fresh = ":fresh"
identifier = ~"[A-Z0-9]+"i
whitespace = ~"\s"*
"""
_grammar = Grammar(grammar_text)
| true
| true
|
f70c8f55fb8d45c12d48988d6f13b08b0ec2fcfd
| 3,899
|
py
|
Python
|
glit/repo.py
|
vandmo/glit
|
d29b685c2a836e6fe52068f8e5c89393a47cc23f
|
[
"Apache-2.0"
] | 2
|
2018-10-31T06:41:05.000Z
|
2018-10-31T07:50:28.000Z
|
glit/repo.py
|
vandmo/plit
|
d29b685c2a836e6fe52068f8e5c89393a47cc23f
|
[
"Apache-2.0"
] | null | null | null |
glit/repo.py
|
vandmo/plit
|
d29b685c2a836e6fe52068f8e5c89393a47cc23f
|
[
"Apache-2.0"
] | null | null | null |
from . import git
import os
from .utils import errordie, mkpath, msg
def _trim(lines):
stripped = [line.strip() for line in lines]
return [line for line in stripped if line and not line.startswith('#')]
def _git_destname(repository):
git_folder = repository.rsplit('/', 1)[1]
if git_folder.endswith('.git'):
return git_folder[:-4]
else:
return git_folder
class Repo(object):
def __init__(self, repository, prefix):
self._repository = repository
self._prefix = prefix
@classmethod
def parse(cls, line):
parts = line.split(' ', 1)
if len(parts) == 2:
repository = parts[1]
prefix = parts[0]
else:
errordie('Invalid repository file line: {}'.format(line))
return cls(repository, prefix)
def _group_folder(self, folder):
if self._prefix:
return os.path.join(folder, self._prefix)
else:
return folder
def clone(self, folder):
group_folder = self._group_folder(folder)
mkpath(group_folder)
git_folder = _git_destname(self._repository)
destination = os.path.join(group_folder, git_folder)
if os.path.exists(destination):
msg('IN %s SKIPPING %s' % (self._prefix, git_folder))
return
msg('IN %s CLONING %s' % (self._prefix, git_folder))
git.clone_or_die(self._repository, destination)
def fast_forward(self, folder):
group_folder = self._group_folder(folder)
git_folder = _git_destname(self._repository)
destination = os.path.join(group_folder, git_folder)
if not os.path.exists(destination):
errordie('Can\'t fast forward missing repository: {}'.format(destination))
msg('IN %s FAST FORWARDING %s' % (self._prefix, git_folder))
git.fast_forward_or_die(destination)
def as_line(self):
if self._prefix:
return '{} {}'.format(self._prefix, self._repository)
else:
return '. {}'.format(self._repository)
def __eq__(self, other):
if isinstance(other, Repo):
return (
self._repository == other._repository
and
self._prefix == other._prefix
)
return False
def __ne__(self, other):
return not self.__eq__(other)
def __str__(self):
return self.as_line()
class ReposFile(object):
def __init__(self, stored_file):
self._stored_file = stored_file
self._repositories = [
Repo.parse(line)
for line in _trim(stored_file.readlines())]
def clone(self, folder):
for repo in self._repositories:
repo.clone(folder)
def fast_forward(self, folder):
for repo in self._repositories:
repo.fast_forward(folder)
def add(self, repo):
for existing in self._repositories:
if repo == existing:
errordie('Duplicate entry {}'.format(repo))
self._repositories.append(repo)
def save(self):
lines = [repo.as_line()+'\n' for repo in self._repositories]
self._stored_file.writelines(sorted(lines))
class RepoSet(object):
def __init__(self, name, folder, stored_file):
self._name = name
self._reposfile = ReposFile(stored_file)
self._folder = folder
def clone(self):
msg('CLONING SET {}'.format(self._name))
self._reposfile.clone(self._folder)
def fast_forward(self):
msg('FAST FORWARD IN SET {}'.format(self._name))
self._reposfile.fast_forward(self._folder)
def add_and_clone(self, repository, prefix):
msg('IN SET {}'.format(self._name))
repo = Repo(repository=repository, prefix=prefix)
self._reposfile.add(repo)
self._reposfile.save()
repo.clone(self._folder)
| 30.460938
| 86
| 0.611182
|
from . import git
import os
from .utils import errordie, mkpath, msg
def _trim(lines):
stripped = [line.strip() for line in lines]
return [line for line in stripped if line and not line.startswith('#')]
def _git_destname(repository):
git_folder = repository.rsplit('/', 1)[1]
if git_folder.endswith('.git'):
return git_folder[:-4]
else:
return git_folder
class Repo(object):
def __init__(self, repository, prefix):
self._repository = repository
self._prefix = prefix
@classmethod
def parse(cls, line):
parts = line.split(' ', 1)
if len(parts) == 2:
repository = parts[1]
prefix = parts[0]
else:
errordie('Invalid repository file line: {}'.format(line))
return cls(repository, prefix)
def _group_folder(self, folder):
if self._prefix:
return os.path.join(folder, self._prefix)
else:
return folder
def clone(self, folder):
group_folder = self._group_folder(folder)
mkpath(group_folder)
git_folder = _git_destname(self._repository)
destination = os.path.join(group_folder, git_folder)
if os.path.exists(destination):
msg('IN %s SKIPPING %s' % (self._prefix, git_folder))
return
msg('IN %s CLONING %s' % (self._prefix, git_folder))
git.clone_or_die(self._repository, destination)
def fast_forward(self, folder):
group_folder = self._group_folder(folder)
git_folder = _git_destname(self._repository)
destination = os.path.join(group_folder, git_folder)
if not os.path.exists(destination):
errordie('Can\'t fast forward missing repository: {}'.format(destination))
msg('IN %s FAST FORWARDING %s' % (self._prefix, git_folder))
git.fast_forward_or_die(destination)
def as_line(self):
if self._prefix:
return '{} {}'.format(self._prefix, self._repository)
else:
return '. {}'.format(self._repository)
def __eq__(self, other):
if isinstance(other, Repo):
return (
self._repository == other._repository
and
self._prefix == other._prefix
)
return False
def __ne__(self, other):
return not self.__eq__(other)
def __str__(self):
return self.as_line()
class ReposFile(object):
def __init__(self, stored_file):
self._stored_file = stored_file
self._repositories = [
Repo.parse(line)
for line in _trim(stored_file.readlines())]
def clone(self, folder):
for repo in self._repositories:
repo.clone(folder)
def fast_forward(self, folder):
for repo in self._repositories:
repo.fast_forward(folder)
def add(self, repo):
for existing in self._repositories:
if repo == existing:
errordie('Duplicate entry {}'.format(repo))
self._repositories.append(repo)
def save(self):
lines = [repo.as_line()+'\n' for repo in self._repositories]
self._stored_file.writelines(sorted(lines))
class RepoSet(object):
def __init__(self, name, folder, stored_file):
self._name = name
self._reposfile = ReposFile(stored_file)
self._folder = folder
def clone(self):
msg('CLONING SET {}'.format(self._name))
self._reposfile.clone(self._folder)
def fast_forward(self):
msg('FAST FORWARD IN SET {}'.format(self._name))
self._reposfile.fast_forward(self._folder)
def add_and_clone(self, repository, prefix):
msg('IN SET {}'.format(self._name))
repo = Repo(repository=repository, prefix=prefix)
self._reposfile.add(repo)
self._reposfile.save()
repo.clone(self._folder)
| true
| true
|
f70c90492de0b23222eb9e58cbb472345cf3c226
| 1,621
|
py
|
Python
|
azure-batch/azure/batch/models/task_id_range.py
|
HydAu/AzureSDKForPython
|
5cbe34e9e0b8ea1faacc9f205633ccc0b885c0f3
|
[
"Apache-2.0"
] | null | null | null |
azure-batch/azure/batch/models/task_id_range.py
|
HydAu/AzureSDKForPython
|
5cbe34e9e0b8ea1faacc9f205633ccc0b885c0f3
|
[
"Apache-2.0"
] | null | null | null |
azure-batch/azure/batch/models/task_id_range.py
|
HydAu/AzureSDKForPython
|
5cbe34e9e0b8ea1faacc9f205633ccc0b885c0f3
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft and contributors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class TaskIdRange(Model):
"""
A range of task ids that a task can depend on. All tasks with ids in the
range must complete successfully before the dependent task can be
scheduled.
:param start: The first task id in the range.
:type start: int
:param end: The last task id in the range.
:type end: int
"""
_validation = {
'start': {'required': True},
'end': {'required': True},
}
_attribute_map = {
'start': {'key': 'start', 'type': 'int'},
'end': {'key': 'end', 'type': 'int'},
}
def __init__(self, start, end):
self.start = start
self.end = end
| 32.42
| 76
| 0.611968
|
from msrest.serialization import Model
class TaskIdRange(Model):
_validation = {
'start': {'required': True},
'end': {'required': True},
}
_attribute_map = {
'start': {'key': 'start', 'type': 'int'},
'end': {'key': 'end', 'type': 'int'},
}
def __init__(self, start, end):
self.start = start
self.end = end
| true
| true
|
f70c906712870c5ce6e99203c651af3b92bc28a8
| 32,190
|
py
|
Python
|
admsInput/admsInputDataRetriever.py
|
mdhillmancmcl/TheWorldAvatar-CMCL-Fork
|
011aee78c016b76762eaf511c78fabe3f98189f4
|
[
"MIT"
] | 21
|
2021-03-08T01:58:25.000Z
|
2022-03-09T15:46:16.000Z
|
admsInput/admsInputDataRetriever.py
|
mdhillmancmcl/TheWorldAvatar-CMCL-Fork
|
011aee78c016b76762eaf511c78fabe3f98189f4
|
[
"MIT"
] | 63
|
2021-05-04T15:05:30.000Z
|
2022-03-23T14:32:29.000Z
|
admsInput/admsInputDataRetriever.py
|
mdhillmancmcl/TheWorldAvatar-CMCL-Fork
|
011aee78c016b76762eaf511c78fabe3f98189f4
|
[
"MIT"
] | 15
|
2021-03-08T07:52:03.000Z
|
2022-03-29T04:46:20.000Z
|
'''
module that retreives and pack adms input info
'''
import rdflib
from pyproj import Proj, transform
import requests
import math
import sys
import os
import rdflib.plugins.sparql.results.jsonresults as jsresult
from collections import namedtuple
from admsSrc import admsSrc
from admsPolygon import Polygon
import cobbling
class admsInputDataRetriever(object):
BDN = namedtuple('BDN', ['BldNumBuildings','BldName','BldType','BldX','BldY','BldHeight', 'BldLength', 'BldWidth', 'BldAngle'])
OPT = namedtuple('OPT', ['OptNumOutputs','OptPolName','OptInclude','OptShortOrLong', 'OptSamplingTime','OptSamplingTimeUnits','OptCondition','OptNumPercentiles','OptNumExceedences','OptPercentiles','OptExceedences','OptUnits','OptGroupsOrSource','OptAllSources','OptNumGroups','OptIncludedGroups','OptIncludedSource','OptCreateComprehensiveFile'])
def __init__(self, topnode, bdnnode=None, range=None, pollutants =['so2'], srcLimit = 5, bdnLimit = 25, filterSrc = False):
'''constructor
inputs:
range - user input range {'xmin', 'xmax', 'ymin', 'ymax'}, actual range is the min(user range, region envelope(e.g. jurongisland))
topnode - uri of topnode to begin search within tree struct,
filtersrc - true if use all children under topnode as src, false to use topnode as src directly
bdnnode - top/colleciton node of building
pollutants: pollutant to test
srcLimit: limit of src number, actual number might fewer
bdnLimit: limit of bdn number, actual number might fewer
'''
self.address = None
self.pollutants = pollutants
self.topnode = topnode
self.bdnnode = bdnnode
self.srcLimit = srcLimit
self.bdnLimit = bdnLimit
self.filterSrc = False
self.range = self.getRange(range)
print(self.range)
def getRange(self, userrange):
'''
Define range from topnode info and user give parameters
returns (xRange, yRange)
'''
if not self.filterSrc:
return ((userrange['xmin'], userrange['xmax']), (userrange['ymin'],userrange['ymax']))
self.connectDB(self.topnode)#connect to db
qx = self.query(
"""
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
SELECT ?value
WHERE {
?co space_and_time_extended:hasProjectedCoordinate_x ?upper.
?upper sys:hasValue ?v.
?v sys:numericalValue ?value .
}
""")
qy = self.query(
"""
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
SELECT ?value
WHERE {
?co space_and_time_extended:hasProjectedCoordinate_y ?upper.
?upper sys:hasValue ?v.
?v sys:numericalValue ?value .
}
""")
#extract bounds data
xs = tuple(row['value'] for row in qx)
ys = tuple(row['value'] for row in qy)
xRange = (xs[0].toPython(),xs[1].toPython()) if xs[0]<xs[1] else (xs[1].toPython(),xs[0].toPython())
yRange = (ys[0].toPython(),ys[1].toPython()) if ys[0]<ys[1] else (ys[1].toPython(),ys[0].toPython())
#todo: provide gis speci number in future and do conversion if needed
#if user specified range, compare
if userrange is not None:
xRange = (min(xRange[0], userrange['xmin']), max(xRange[1], userrange['xmax']))
yRange = (min(yRange[0], userrange['ymin']), max(yRange[1], userrange['ymax']))
print('xrange: {} - {}', *xRange)
print('yrange: {} - {}', *yRange)
return(xRange, yRange)
def filterSource(self):
'''filter the source from tree starting from topnode, within the range and with user set content
returns: list of source uris
'''
xRange, yRange = self.range
self.connectDB(self.topnode)#connect to db
#query for children uris from topnode
#todo: future kb: type check :emission type
qChildren = self.query(
"""
PREFIX Eco-industrialPark: <http://www.theworldavatar.com/OntoEIP/Eco-industrialPark.owl#>
SELECT ?child
WHERE {{
?o Eco-industrialPark:hasIRI ?child .
}}
LIMIT {0}
""".format(self.srcLimit)
)
###query each children to get coordinates
uris = list(row["child"].strip() for row in qChildren)
###todo: add this test file, delete in future
#uris.append("http://www.theworldavatar.com/TankID_1574.owl#TankID_1574")
filtered = []
print(uris)
for uri in uris:
print("connecting: {:s}".format(uri))
self.connectDB(uri)
qstr ='''
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
PREFIX material: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/material/material.owl#>
SELECT DISTINCT ?x ?y ?content
WHERE {{
<{0!s}> space_and_time_extended:hasGISCoordinateSystem ?o.
?o space_and_time_extended:hasProjectedCoordinate_x ?xe.
?xe sys:hasValue ?vx.
?vx sys:numericalValue ?x .
<{0!s}> sys:hasContent ?contentE .
?contentE material:intrinsicCharacteristics ?chemsp.
?chemsp sys:containsDirectly ?content.
OPTIONAL{{
?o space_and_time_extended:hasProjectedCoordinate_y ?ye.
?ye sys:hasValue ?vy.
?vy sys:numericalValue ?y .
}}
}}
'''.format(uri)
#print (qstr)
coordQresults = self.query(qstr)
##filter children within range
for row in coordQresults:
x,y,content = float(row['x'].toPython()), float(row['y'].toPython()), row['content'].toPython()
#print("{},{},{}".format(x, y, content))
if x - xRange[0]>0 and x - xRange[1] < 0 and y - yRange[0] > 0 and y - yRange[1]<0 and content in self.pollutants:
filtered.append(uri)
print('add to filtered {}'.format(uri))
break
return filtered
def getSrcData(self):
'''get all sourced data :
returns: data object
'''
filtered = None
if not self.filterSrc:
filtered = self.filterSource()
else:
filtered = self.topnode
s = set()#make a set of substance to query later
result = []
for uri in filtered:
print("connecting: {:s}".format(uri))
self.connectDB(uri)
qdata = self.query(
"""
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
PREFIX plant:<http://www.theworldavatar.com/OntoCAPE/OntoCAPE/chemical_process_system/CPS_realization/plant.owl#>
PREFIX topology:<http://www.theworldavatar.com/OntoCAPE/meta_model/topology/topology.owl#>
PREFIX behavior: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/chemical_process_system/CPS_behavior/behavior.owl#>
PREFIX chemical_process_system:<http://www.theworldavatar.com/OntoCAPE/OntoCAPE/chemical_process_system/chemical_process_system.owl#>
PREFIX phase_system:<http://www.theworldavatar.com/OntoCAPE/OntoCAPE/material/phase_system/phase_system.owl#>
PREFIX material: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/material/material.owl#>
SELECT ?o ?height ?diameter ?content ?x ?y ?velocity ?massflow ?temp
WHERE {{
?o plant:hasHeight ?he.
?he sys:numericalValue ?height .
?o plant:hasInsideDiameter ?de .
?de sys:numericalValue ?diameter.
?o sys:hasContent ?contentE .
?contentE material:intrinsicCharacteristics ?chemsp.
?chemsp sys:containsDirectly ?content.
?contentE material:thermodynamicBehavior ?phase.
?phase phase_system:has_temperature ?tempE.
?tempE sys:hasValue ?vte.
?vte sys:numericalValue ?temp .
?o space_and_time_extended:hasGISCoordinateSystem ?coe .
?coe space_and_time_extended:hasProjectedCoordinate_x ?xe.
?xe sys:hasValue ?xv.
?xv sys:numericalValue ?x.
?coe space_and_time_extended:hasProjectedCoordinate_y ?ye.
?ye sys:hasValue ?yv.
?yv sys:numericalValue ?y.
?stream topology:leaves ?o.
?stream chemical_process_system:refersToGeneralizedAmount ?ga.
?ga sys:hasSubsystem ?ma.
?ma sys:hasProperty ?ve.
?ve a behavior:Velocity .
?ve sys:hasValue ?vv.
?vv sys:numericalValue ?velocity.
?ma sys:hasProperty ?me.
?me a behavior:ConvectiveMassFlowrate .
?me sys:hasValue ?mv.
?mv sys:numericalValue ?massflow.
}}
LIMIT 1
""")
for row in qdata:
s.add(row['content'].toPython())
result.append(row.asdict())
print("FILTERED :")
print(result)
#use this if need query substance separately
#query substance for substance related data
cMap = {}
#hard coding for now before I get a better solusion
self.connectDB("http://www.theworldavatar.com/OntoCAPE/OntoCAPE/material/substance/substance.owl")
template = """
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
PREFIX sub:<http://www.theworldavatar.com/OntoCAPE/OntoCAPE/material/substance/substance.owl#>
PREFIX behavior: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/chemical_process_system/CPS_behavior/behavior.owl#>
PREFIX phase_system:<http://www.theworldavatar.com/OntoCAPE/OntoCAPE/material/phase_system/phase_system.owl#>
SELECT DISTINCT ?o ?hc ?d ?mw
WHERE {{
<{0}> behavior:hasHeatCapacityRatio ?hce. #heatcapacity ratio
?hce sys:hasValue ?hcv .
?hcv sys:numericalValue ?hc.
<{0}> phase_system:has_density ?de . #density
?de sys:hasValue ?dv .
?dv sys:numericalValue ?d.
<{0}> sys:hasProperty ?mwe. #molecular weight
?mwe a sub:MolecularWeight .
?mwe sys:hasValue ?mwv .
?mwv sys:numericalValue ?mw.
}}
LIMIT 1
"""
for sub in s:
print (sub)
print (template.format(sub))
sdata = self.query(template.format(sub))
for row in sdata:
cMap[sub] = row
break
packed = []
for src in result:
subData = cMap[src['content'].toPython()].asdict()
newSrc = admsSrc(SrcName = src['o'].toPython(), SrcHeight = src['height'].toPython(), SrcDiameter = src['diameter'].toPython(),SrcVertVeloc = src['velocity'].toPython(), SrcPolEmissionRate = src['massflow'].toPython(), SrcPollutants = self.polIRI2Name(src['content'].toPython()),SrcTemperature = src['temp'].toPython(), SrcX1 = src['x'].toPython(), SrcY1 = src['y'].toPython(), SrcMolWeight = subData['mw'].toPython(), SrcDensity = subData['d'].toPython(), SrcSpecHeatCap = subData['hc'].toPython())
packed.append(newSrc)
return packed
def getBdnData(self):
self.connectDB(self.bdnnode, connectType = 'endpoint')
bdns = self.filterBdnEnvelope()
if len(bdns) is 0: #range is smaller than any envelope,
#then we have to filter indi buildings
#todo: in this case, should we filter by calculated cnetroid, or a crude one with ground x,y? i'd go with x, y first。。。
bdns = self.filterBdns(bdns)
if len(bdns) is 0:
raise Exception('no bdn within range')
print ('Found {0} bdn within range , they are '.format(len(bdns)))
result = list((zip(*[self.getMetrics(bld) for bld in bdns])))
print (result)
newBdn = self.BDN(len(bdns), *result)
return newBdn
def filterBdnEnvelope(self):
'''
Get all buildings within range by comparing range with envelope
return list of building url
'''
xRange, yRange = self.range
qb =self.query('''
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX citygml: <http://www.theworldavatar.com/CityGMLOntology.owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
SELECT distinct ?bdn
WHERE{{
?cityM a citygml:CityModelType.
?cityM citygml:boundedBy ?envelope .
?envelope a citygml:EnvelopeType. # get all envelopes
?envelope citygml:upperCornerPoint ?upoint. # get bounds of envelope
?upoint space_and_time_extended:hasGISCoordinateSystem ?uco.
?uco space_and_time_extended:hasProjectedCoordinate_x ?uxe.
?uxe sys:hasValue ?uxv.
?uxv sys:numericalValue ?ux.
?uco space_and_time_extended:hasProjectedCoordinate_y ?uye.
?uye sys:hasValue ?uyv.
?uyv sys:numericalValue ?uy.
?envelope citygml:lowerCornerPoint ?lpoint.
?lpoint space_and_time_extended:hasGISCoordinateSystem ?lco.
?lco space_and_time_extended:hasProjectedCoordinate_x ?lxe.
?lxe sys:hasValue ?lxv.
?lxv sys:numericalValue ?lx.
?lco space_and_time_extended:hasProjectedCoordinate_y ?lye.
?lye sys:hasValue ?lyv.
?lyv sys:numericalValue ?ly.
?cityM citygml:cityObjectMember ?bdn . #get bdn belongs to filterd envelope
Filter(xsd:double(?ly) > "{1}"^^xsd:double && xsd:double(?uy) < "{2}"^^xsd:double && xsd:double(?lx) > "{3}"^^xsd:double && xsd:double(?ux) < "{4}"^^xsd:double) #filter envelope within range
}}
LIMIT {0} #limit of building num
'''.format(self.bdnLimit, *yRange, *xRange))
########todo: in future delete stud data
return tuple(row['bdn'] for row in qb)
#todo
def filterBdns(self, bdns):
'''
filter individual building to see if they are within range
get all uris where every x and y in its ground is within range(maybe count ?)
'''
xRange, yRange = self.range
qstr = '''
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
PREFIX citygml:<http://www.theworldavatar.com/CityGMLOntology.owl#>
#PREFIX citygml:<file:/D:/citygmllearn/citygmlhandmade.owl#>
PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
SELECT distinct ?bdn
WHERE {{
{{ #case1:building has no parts
?bdn a citygml:BuildingType.
?bdn citygml:boundedBy ?g. #building boundBy surface
?g a citygml:GroundSurfaceType. # surface is a ground
?g citygml:lod2MultiSurface ?ms. #ground has lod2multisurface ms
?ms citygml:surfaceMember ?pol. #ms has member polygon
?pol citygml:exterior ?lring. # polygon exterior is linear ring
?lring sys:contains ?po. # linear ring consists of points
?po space_and_time_extended:hasGISCoordinateSystem ?co. #point has coordinate system cs
?co space_and_time_extended:hasProjectedCoordinate_x ?xe. #[extract cs to get x,y,z value]
?xe sys:hasValue ?xv.
?xv sys:numericalValue ?x.
?co space_and_time_extended:hasProjectedCoordinate_y ?ye.
?ye sys:hasValue ?yv.
?yv sys:numericalValue ?y.
}} UNION {{ #case 2:
?bdn a citygml:BuildingType. #bdns that consists of part
?bdn citygml:consistsOfBuildingPart ?part.
?part a citygml:BuildingPartType.
?part citygml:boundedBy ?g.
?g a citygml:GroundSurfaceType.
?g citygml:lod2MultiSurface ?ms.
?ms citygml:surfaceMember ?pol.
?pol citygml:exterior ?lring.
?lring sys:contains ?po.
?po space_and_time_extended:hasGISCoordinateSystem ?co.
?co space_and_time_extended:hasProjectedCoordinate_x ?xe.
?xe sys:hasValue ?xv.
?xv sys:numericalValue ?x.
?co space_and_time_extended:hasProjectedCoordinate_y ?ye.
?ye sys:hasValue ?yv.
?yv sys:numericalValue ?y.
}}
filter(xsd:double(?y) > "{1}"^^xsd:double && xsd:double(?y) < "{2}"^^xsd:double && xsd:double(?x) > "{3}"^^xsd:double && xsd:double(?x) < "{4}"^^xsd:double)
}}
LIMIT {0} #limit of building num
'''.format(self.bdnLimit, *yRange, *xRange)
qre = self.query(qstr)
return tuple(row['bdn'] for row in qre)
def getBdnVertices(self, nodeuri):
'''get all buildings data
returns: data object
'''
#todo: modify query to get raw data,then pass to converter
qData= self.query('''
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
PREFIX citygml:<http://www.theworldavatar.com/CityGMLOntology.owl#>
SELECT ?x ?y ?z
WHERE {{
<{0}> citygml:boundedBy ?g. #building/part IRI boundBy surface
?g a citygml:GroundSurfaceType. # surface is a ground
?g citygml:lod2MultiSurface ?ms. #ground has lod2multisurface ms
?ms citygml:surfaceMember ?pol. #ms has member polygon
?pol citygml:exterior ?lring. # polygon exterior is linear ring
?lring sys:contains ?po. # linear ring consists of points
?po space_and_time_extended:hasGISCoordinateSystem ?co. #point has coordinate system cs
?co space_and_time_extended:hasProjectedCoordinate_x ?xe. #[extract cs to get x,y,z value]
?xe sys:hasValue ?xv.
?xv sys:numericalValue ?x.
?co space_and_time_extended:hasProjectedCoordinate_y ?ye.
?ye sys:hasValue ?yv.
?yv sys:numericalValue ?y.
?co space_and_time_extended:hasProjectedCoordinate_z ?ze.
?ze sys:hasValue ?zv.
?zv sys:numericalValue ?z.
}}
'''.format(nodeuri))
#query for roof max and ground min
qHeight = self.query("""
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
PREFIX citygml:<http://www.theworldavatar.com/CityGMLOntology.owl#>
SELECT (MIN(?z) AS ?min) (MAX(?zr) AS ?max) #select min of ground z values, max of roof z values
WHERE {{
<{0}> citygml:boundedBy ?g. #building/part IRI boundBy surface
?g a citygml:GroundSurfaceType. # surface is a GROUND
?g citygml:lod2MultiSurface ?ms. #[select all coordi z value for ground surface]
?ms citygml:surfaceMember ?pol.
?pol citygml:exterior ?lring.
?lring sys:contains ?po.
?po space_and_time_extended:hasGISCoordinateSystem ?co.
?co space_and_time_extended:hasProjectedCoordinate_z ?ze.
?ze sys:hasValue ?zv.
?zv sys:numericalValue ?z.
<{0}> citygml:boundedBy ?gr. #building/part IRI boundBy surface
?gr a citygml:RoofSurfaceType. # surface is a ROOF
?gr citygml:lod2MultiSurface ?msr. #[select all coordi z value for roof surface]
?msr citygml:surfaceMember ?polr.
?polr citygml:exterior ?lringr.
?lringr sys:contains ?por.
?por space_and_time_extended:hasGISCoordinateSystem ?cor.
?cor space_and_time_extended:hasProjectedCoordinate_z ?zer.
?zer sys:hasValue ?zvr.
?zvr sys:numericalValue ?zr.
}} GROUP BY ?g # group by each ground IRI
""".format(nodeuri))
# define coordi convert function : building kb ---> adms
Bdn2ADMSCoordC = defineCoordConvert('epsg:28992','epsg:32648')
#float(row['min'].toPython()), float(row['max'].toPython())
zlimit = tuple( Bdn2ADMSCoordC(float(row['min'].toPython()), float(row['max'].toPython())) for row in qHeight )[0]
return ( list(Bdn2ADMSCoordC(float(row['x'].toPython()), float(row['y'].toPython())) for row in qData), zlimit)
def getMetrics(self, nodeuri):
base = None
if self.hasBuildingPart(nodeuri):
print('{0} has building part'.format(nodeuri))
#get list of building part
bparts = list(row['p'] for row in self.query(
''' PREFIX citygml:<http://www.theworldavatar.com/CityGMLOntology.owl#>
SELECT ?p
WHERE
{
?b citygml:consistsOfBuildingPart ?p.
}'''))
#get metrics for each part
polygons = tuple( Polygon(*self.getBdnVertices(uri)) for uri in bparts)
#get centroid for pols
base = Polygon.combineBaseMulti(polygons)
else: # no building part
print('{0} NOT has building part'.format(nodeuri))
verticesNHeight = self.getBdnVertices(nodeuri)
#print(verticesHeight)
base = Polygon(*verticesNHeight)
#todo pack return result
#('BldName','BldType','BldX','BldY','BldHeight', 'BldLength', 'BldWidth', 'BldAngle')
print ((nodeuri.toPython(), base.type, base.centroid[0], base.centroid[1], base.height, base.length, base.width, base.angle) )
#todo: coordinate coversion for centroid!!!
return (uri2name(nodeuri.toPython()), base.type, base.centroid[0], base.centroid[1], base.height, base.length, base.width, base.angle)
#calulate centro
#choose shape
#calculate angle
#probably for the best if we construct a type of polygon instead?
def hasBuildingPart(self, nodeuri):
#self.connectDB(nodeuri)
print('checking if building part for :{0}'.format(nodeuri))
qData = self.query('''
PREFIX citygml:<http://www.theworldavatar.com/CityGMLOntology.owl#>
ASK
{{
<{0}> citygml:consistsOfBuildingPart ?p
}}
'''.format(nodeuri))
qData, = tuple(qData)
return qData
def coreBdn2Src(self):
'''calculate main building for each src
'''
#compare src coords to each bdn
for src in self.rawSrc:
closed, dClosed, first = None, sys.maxsize, True
print('find closed bdn for src: '+src.SrcName+" with x: "+str(src.SrcX1) +" y: "+str(src.SrcY1))
for i in range(len(self.rawBdn.BldX)):
#print('bdn x: ' +str( self.rawBdn.BldX[i]))
dx, dy = self.rawBdn.BldX[i] - src.SrcX1, self.rawBdn.BldY[i] - src.SrcY1
d = dx * dx + dy * dy
if first:
dClosed = d
closed = self.rawBdn.BldName[i]
first = False
#print('d:{0} dclosed:{1}'.format(d, dClosed))
if d - dClosed < 0:
closed = self.rawBdn.BldName[i]
dClosed = d
print('new smallest distance: '+str(dClosed))
if closed is not None:
src.setMainBuilding(closed)
else: #err handling, something is wrong if no closed building is found, just throw it
raise Exception('Dear lord, no closed buildinf found for src: '+src.SrcName)
def getOpt(self, PolNames, SrcNames):
numPol = len(PolNames)
return self.OPT(numPol,PolNames, [1]*numPol,[0]*numPol,[1]*numPol,[3]*numPol,[0]*numPol,[0]*numPol,[0]*numPol,[0]*80,[0]*80,['ug/m3']*4,1,0,1,"Grouptank001",SrcNames,0)
def polIRI2Name(self, polIRI):
substances = {'http://www.theworldavatar.com/OntoCAPE/OntoCAPE/material/substance/substance.owl#chlorine':'Cl2'}
if polIRI in substances.keys():
print('Found: '+ substances[polIRI])
return substances[polIRI]
else:
print ('Not found !!!!')
raise Exception('This substance is not defined!!!!')
def getWeather(self):
'''
get weather data,
for now we trigger a python script to write the .met file directly
'''
#finish writing met
metLoc= r"test.met"
cobbling.run(meteo_data = metLoc)
#pointing to met in apl
return os.path.realpath(metLoc)
def get(self):
'''main function, get all related info for adms
returns: complete data for adms
'''
#get all src data
self.rawSrc = self.getSrcData()
##todo: think about this case: when nothing is found ,where should we handle it?
##then nothing should be written, and catch this exception and mute it in main function
if self.rawSrc is None:
raise Exception("No src in found to requiries")
#get all building data
self.rawBdn = self.getBdnData()
#print('raw building: ')
#print(self.rawBdn)
rawOpt = self.getOpt(self.pollutants, [s.SrcName for s in self.rawSrc])
self.coreBdn2Src()
#for debuging, in future,define this for data type, i dont think it auto rpint for objet
for src in self.rawSrc:
print(src)
met = self.getWeather()
return {'Src': self.rawSrc, 'Bdn': self.rawBdn, 'Opt': rawOpt, 'Met': met}
def queryEndpoint(self, str):
print('requesting @ '+self.address+" with query:")
#print(str)
resp = requests.get(self.address, params = {'query':str}, timeout = 1500, headers = {'user-agent': 'my-app/0.0.1'})
print(resp.json())
qres = jsresult.JSONResult(resp.json())#json decoded
print(qres)
return qres
def queryLocalGraph(self, str):
qres = self.g.query(str)
return qres
def Uri2Local(uri):
'''replace a uri to local address
inputs:
uri - uri to be changed
returns: string - local address
'''
return uri.replace("http://www.jparksimulator.com",config.root ).replace("http://www.theworldavatar.com",config.root)
def remote2local(self, func):
'''decorator to change connection function to local connection by replacing given iri to local address
'''
def functionWrapper(self, address):
address = self.Uri2Local(address)
func(self, address)
return functionWrapper
def connectDB(self, address, connectType = 'parse'):
'''connect to db anyhow (we use rdflib graph parse now)
'''
def connectDBActual( address):
'''
Actual method to connect to db
'''
#obsolete: use rdflib locally
self.address = address
if connectType is 'parse':
self.g = rdflib.Graph()#comment out in future
self.g.parse(address)#comment out in future
self.qmethodMap = {'parse': self.queryLocalGraph, 'endpoint':self.queryEndpoint}
if not sameGraph(address, self.address):
print ('parsing graph: '+ address)
if connectType not in self.qmethodMap:
raise exception('db connection method not defined')
#self.connectType = connectType
self.query = self.qmethodMap[connectType]
connectDBActual(address)
def sameGraph(uri1, uri2):
def trimloc(uri):
if uri is None:
return None
else:
return uri.split('#')[0]
return trimloc(uri1) == trimloc(uri2)
def defineCoordConvert(inCode, outCode):
inProj = Proj(init=inCode)
outProj = Proj(init=outCode)
def coordConvert(x,y):
return transform(inProj, outProj, x, y)
return coordConvert
def uri2name(uri):
base = 'http://www.theworldavatar.com/'
return uri.split('#')[1]
| 44.217033
| 511
| 0.564461
|
import rdflib
from pyproj import Proj, transform
import requests
import math
import sys
import os
import rdflib.plugins.sparql.results.jsonresults as jsresult
from collections import namedtuple
from admsSrc import admsSrc
from admsPolygon import Polygon
import cobbling
class admsInputDataRetriever(object):
BDN = namedtuple('BDN', ['BldNumBuildings','BldName','BldType','BldX','BldY','BldHeight', 'BldLength', 'BldWidth', 'BldAngle'])
OPT = namedtuple('OPT', ['OptNumOutputs','OptPolName','OptInclude','OptShortOrLong', 'OptSamplingTime','OptSamplingTimeUnits','OptCondition','OptNumPercentiles','OptNumExceedences','OptPercentiles','OptExceedences','OptUnits','OptGroupsOrSource','OptAllSources','OptNumGroups','OptIncludedGroups','OptIncludedSource','OptCreateComprehensiveFile'])
def __init__(self, topnode, bdnnode=None, range=None, pollutants =['so2'], srcLimit = 5, bdnLimit = 25, filterSrc = False):
self.address = None
self.pollutants = pollutants
self.topnode = topnode
self.bdnnode = bdnnode
self.srcLimit = srcLimit
self.bdnLimit = bdnLimit
self.filterSrc = False
self.range = self.getRange(range)
print(self.range)
def getRange(self, userrange):
if not self.filterSrc:
return ((userrange['xmin'], userrange['xmax']), (userrange['ymin'],userrange['ymax']))
self.connectDB(self.topnode)
qx = self.query(
"""
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
SELECT ?value
WHERE {
?co space_and_time_extended:hasProjectedCoordinate_x ?upper.
?upper sys:hasValue ?v.
?v sys:numericalValue ?value .
}
""")
qy = self.query(
"""
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
SELECT ?value
WHERE {
?co space_and_time_extended:hasProjectedCoordinate_y ?upper.
?upper sys:hasValue ?v.
?v sys:numericalValue ?value .
}
""")
xs = tuple(row['value'] for row in qx)
ys = tuple(row['value'] for row in qy)
xRange = (xs[0].toPython(),xs[1].toPython()) if xs[0]<xs[1] else (xs[1].toPython(),xs[0].toPython())
yRange = (ys[0].toPython(),ys[1].toPython()) if ys[0]<ys[1] else (ys[1].toPython(),ys[0].toPython())
if userrange is not None:
xRange = (min(xRange[0], userrange['xmin']), max(xRange[1], userrange['xmax']))
yRange = (min(yRange[0], userrange['ymin']), max(yRange[1], userrange['ymax']))
print('xrange: {} - {}', *xRange)
print('yrange: {} - {}', *yRange)
return(xRange, yRange)
def filterSource(self):
xRange, yRange = self.range
self.connectDB(self.topnode)
qChildren = self.query(
"""
PREFIX Eco-industrialPark: <http://www.theworldavatar.com/OntoEIP/Eco-industrialPark.owl#>
SELECT ?child
WHERE {{
?o Eco-industrialPark:hasIRI ?child .
}}
LIMIT {0}
""".format(self.srcLimit)
)
t("connecting: {:s}".format(uri))
self.connectDB(uri)
qstr ='''
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
PREFIX material: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/material/material.owl#>
SELECT DISTINCT ?x ?y ?content
WHERE {{
<{0!s}> space_and_time_extended:hasGISCoordinateSystem ?o.
?o space_and_time_extended:hasProjectedCoordinate_x ?xe.
?xe sys:hasValue ?vx.
?vx sys:numericalValue ?x .
<{0!s}> sys:hasContent ?contentE .
?contentE material:intrinsicCharacteristics ?chemsp.
?chemsp sys:containsDirectly ?content.
OPTIONAL{{
?o space_and_time_extended:hasProjectedCoordinate_y ?ye.
?ye sys:hasValue ?vy.
?vy sys:numericalValue ?y .
}}
}}
'''.format(uri)
coordQresults = self.query(qstr)
Qresults:
x,y,content = float(row['x'].toPython()), float(row['y'].toPython()), row['content'].toPython()
if x - xRange[0]>0 and x - xRange[1] < 0 and y - yRange[0] > 0 and y - yRange[1]<0 and content in self.pollutants:
filtered.append(uri)
print('add to filtered {}'.format(uri))
break
return filtered
def getSrcData(self):
filtered = None
if not self.filterSrc:
filtered = self.filterSource()
else:
filtered = self.topnode
s = set()
result = []
for uri in filtered:
print("connecting: {:s}".format(uri))
self.connectDB(uri)
qdata = self.query(
"""
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
PREFIX plant:<http://www.theworldavatar.com/OntoCAPE/OntoCAPE/chemical_process_system/CPS_realization/plant.owl#>
PREFIX topology:<http://www.theworldavatar.com/OntoCAPE/meta_model/topology/topology.owl#>
PREFIX behavior: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/chemical_process_system/CPS_behavior/behavior.owl#>
PREFIX chemical_process_system:<http://www.theworldavatar.com/OntoCAPE/OntoCAPE/chemical_process_system/chemical_process_system.owl#>
PREFIX phase_system:<http://www.theworldavatar.com/OntoCAPE/OntoCAPE/material/phase_system/phase_system.owl#>
PREFIX material: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/material/material.owl#>
SELECT ?o ?height ?diameter ?content ?x ?y ?velocity ?massflow ?temp
WHERE {{
?o plant:hasHeight ?he.
?he sys:numericalValue ?height .
?o plant:hasInsideDiameter ?de .
?de sys:numericalValue ?diameter.
?o sys:hasContent ?contentE .
?contentE material:intrinsicCharacteristics ?chemsp.
?chemsp sys:containsDirectly ?content.
?contentE material:thermodynamicBehavior ?phase.
?phase phase_system:has_temperature ?tempE.
?tempE sys:hasValue ?vte.
?vte sys:numericalValue ?temp .
?o space_and_time_extended:hasGISCoordinateSystem ?coe .
?coe space_and_time_extended:hasProjectedCoordinate_x ?xe.
?xe sys:hasValue ?xv.
?xv sys:numericalValue ?x.
?coe space_and_time_extended:hasProjectedCoordinate_y ?ye.
?ye sys:hasValue ?yv.
?yv sys:numericalValue ?y.
?stream topology:leaves ?o.
?stream chemical_process_system:refersToGeneralizedAmount ?ga.
?ga sys:hasSubsystem ?ma.
?ma sys:hasProperty ?ve.
?ve a behavior:Velocity .
?ve sys:hasValue ?vv.
?vv sys:numericalValue ?velocity.
?ma sys:hasProperty ?me.
?me a behavior:ConvectiveMassFlowrate .
?me sys:hasValue ?mv.
?mv sys:numericalValue ?massflow.
}}
LIMIT 1
""")
for row in qdata:
s.add(row['content'].toPython())
result.append(row.asdict())
print("FILTERED :")
print(result)
cMap = {}
self.connectDB("http://www.theworldavatar.com/OntoCAPE/OntoCAPE/material/substance/substance.owl")
template = """
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
PREFIX sub:<http://www.theworldavatar.com/OntoCAPE/OntoCAPE/material/substance/substance.owl#>
PREFIX behavior: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/chemical_process_system/CPS_behavior/behavior.owl#>
PREFIX phase_system:<http://www.theworldavatar.com/OntoCAPE/OntoCAPE/material/phase_system/phase_system.owl#>
SELECT DISTINCT ?o ?hc ?d ?mw
WHERE {{
<{0}> behavior:hasHeatCapacityRatio ?hce. #heatcapacity ratio
?hce sys:hasValue ?hcv .
?hcv sys:numericalValue ?hc.
<{0}> phase_system:has_density ?de . #density
?de sys:hasValue ?dv .
?dv sys:numericalValue ?d.
<{0}> sys:hasProperty ?mwe. #molecular weight
?mwe a sub:MolecularWeight .
?mwe sys:hasValue ?mwv .
?mwv sys:numericalValue ?mw.
}}
LIMIT 1
"""
for sub in s:
print (sub)
print (template.format(sub))
sdata = self.query(template.format(sub))
for row in sdata:
cMap[sub] = row
break
packed = []
for src in result:
subData = cMap[src['content'].toPython()].asdict()
newSrc = admsSrc(SrcName = src['o'].toPython(), SrcHeight = src['height'].toPython(), SrcDiameter = src['diameter'].toPython(),SrcVertVeloc = src['velocity'].toPython(), SrcPolEmissionRate = src['massflow'].toPython(), SrcPollutants = self.polIRI2Name(src['content'].toPython()),SrcTemperature = src['temp'].toPython(), SrcX1 = src['x'].toPython(), SrcY1 = src['y'].toPython(), SrcMolWeight = subData['mw'].toPython(), SrcDensity = subData['d'].toPython(), SrcSpecHeatCap = subData['hc'].toPython())
packed.append(newSrc)
return packed
def getBdnData(self):
self.connectDB(self.bdnnode, connectType = 'endpoint')
bdns = self.filterBdnEnvelope()
if len(bdns) is 0:
bdns = self.filterBdns(bdns)
if len(bdns) is 0:
raise Exception('no bdn within range')
print ('Found {0} bdn within range , they are '.format(len(bdns)))
result = list((zip(*[self.getMetrics(bld) for bld in bdns])))
print (result)
newBdn = self.BDN(len(bdns), *result)
return newBdn
def filterBdnEnvelope(self):
xRange, yRange = self.range
qb =self.query('''
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX citygml: <http://www.theworldavatar.com/CityGMLOntology.owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
SELECT distinct ?bdn
WHERE{{
?cityM a citygml:CityModelType.
?cityM citygml:boundedBy ?envelope .
?envelope a citygml:EnvelopeType. # get all envelopes
?envelope citygml:upperCornerPoint ?upoint. # get bounds of envelope
?upoint space_and_time_extended:hasGISCoordinateSystem ?uco.
?uco space_and_time_extended:hasProjectedCoordinate_x ?uxe.
?uxe sys:hasValue ?uxv.
?uxv sys:numericalValue ?ux.
?uco space_and_time_extended:hasProjectedCoordinate_y ?uye.
?uye sys:hasValue ?uyv.
?uyv sys:numericalValue ?uy.
?envelope citygml:lowerCornerPoint ?lpoint.
?lpoint space_and_time_extended:hasGISCoordinateSystem ?lco.
?lco space_and_time_extended:hasProjectedCoordinate_x ?lxe.
?lxe sys:hasValue ?lxv.
?lxv sys:numericalValue ?lx.
?lco space_and_time_extended:hasProjectedCoordinate_y ?lye.
?lye sys:hasValue ?lyv.
?lyv sys:numericalValue ?ly.
?cityM citygml:cityObjectMember ?bdn . #get bdn belongs to filterd envelope
Filter(xsd:double(?ly) > "{1}"^^xsd:double && xsd:double(?uy) < "{2}"^^xsd:double && xsd:double(?lx) > "{3}"^^xsd:double && xsd:double(?ux) < "{4}"^^xsd:double) #filter envelope within range
}}
LIMIT {0} #limit of building num
'''.format(self.bdnLimit, *yRange, *xRange))
########todo: in future delete stud data
return tuple(row['bdn'] for row in qb)
#todo
def filterBdns(self, bdns):
xRange, yRange = self.range
qstr = '''
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
PREFIX citygml:<http://www.theworldavatar.com/CityGMLOntology.owl#>
#PREFIX citygml:<file:/D:/citygmllearn/citygmlhandmade.owl#>
PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
SELECT distinct ?bdn
WHERE {{
{{ #case1:building has no parts
?bdn a citygml:BuildingType.
?bdn citygml:boundedBy ?g. #building boundBy surface
?g a citygml:GroundSurfaceType. # surface is a ground
?g citygml:lod2MultiSurface ?ms. #ground has lod2multisurface ms
?ms citygml:surfaceMember ?pol. #ms has member polygon
?pol citygml:exterior ?lring. # polygon exterior is linear ring
?lring sys:contains ?po. # linear ring consists of points
?po space_and_time_extended:hasGISCoordinateSystem ?co. #point has coordinate system cs
?co space_and_time_extended:hasProjectedCoordinate_x ?xe. #[extract cs to get x,y,z value]
?xe sys:hasValue ?xv.
?xv sys:numericalValue ?x.
?co space_and_time_extended:hasProjectedCoordinate_y ?ye.
?ye sys:hasValue ?yv.
?yv sys:numericalValue ?y.
}} UNION {{ #case 2:
?bdn a citygml:BuildingType. #bdns that consists of part
?bdn citygml:consistsOfBuildingPart ?part.
?part a citygml:BuildingPartType.
?part citygml:boundedBy ?g.
?g a citygml:GroundSurfaceType.
?g citygml:lod2MultiSurface ?ms.
?ms citygml:surfaceMember ?pol.
?pol citygml:exterior ?lring.
?lring sys:contains ?po.
?po space_and_time_extended:hasGISCoordinateSystem ?co.
?co space_and_time_extended:hasProjectedCoordinate_x ?xe.
?xe sys:hasValue ?xv.
?xv sys:numericalValue ?x.
?co space_and_time_extended:hasProjectedCoordinate_y ?ye.
?ye sys:hasValue ?yv.
?yv sys:numericalValue ?y.
}}
filter(xsd:double(?y) > "{1}"^^xsd:double && xsd:double(?y) < "{2}"^^xsd:double && xsd:double(?x) > "{3}"^^xsd:double && xsd:double(?x) < "{4}"^^xsd:double)
}}
LIMIT {0} #limit of building num
'''.format(self.bdnLimit, *yRange, *xRange)
qre = self.query(qstr)
return tuple(row['bdn'] for row in qre)
def getBdnVertices(self, nodeuri):
#todo: modify query to get raw data,then pass to converter
qData= self.query('''
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
PREFIX citygml:<http://www.theworldavatar.com/CityGMLOntology.owl#>
SELECT ?x ?y ?z
WHERE {{
<{0}> citygml:boundedBy ?g. #building/part IRI boundBy surface
?g a citygml:GroundSurfaceType. # surface is a ground
?g citygml:lod2MultiSurface ?ms. #ground has lod2multisurface ms
?ms citygml:surfaceMember ?pol. #ms has member polygon
?pol citygml:exterior ?lring. # polygon exterior is linear ring
?lring sys:contains ?po. # linear ring consists of points
?po space_and_time_extended:hasGISCoordinateSystem ?co. #point has coordinate system cs
?co space_and_time_extended:hasProjectedCoordinate_x ?xe. #[extract cs to get x,y,z value]
?xe sys:hasValue ?xv.
?xv sys:numericalValue ?x.
?co space_and_time_extended:hasProjectedCoordinate_y ?ye.
?ye sys:hasValue ?yv.
?yv sys:numericalValue ?y.
?co space_and_time_extended:hasProjectedCoordinate_z ?ze.
?ze sys:hasValue ?zv.
?zv sys:numericalValue ?z.
}}
'''.format(nodeuri))
#query for roof max and ground min
qHeight = self.query("""
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
PREFIX citygml:<http://www.theworldavatar.com/CityGMLOntology.owl#>
SELECT (MIN(?z) AS ?min) (MAX(?zr) AS ?max) #select min of ground z values, max of roof z values
WHERE {{
<{0}> citygml:boundedBy ?g. #building/part IRI boundBy surface
?g a citygml:GroundSurfaceType. # surface is a GROUND
?g citygml:lod2MultiSurface ?ms. #[select all coordi z value for ground surface]
?ms citygml:surfaceMember ?pol.
?pol citygml:exterior ?lring.
?lring sys:contains ?po.
?po space_and_time_extended:hasGISCoordinateSystem ?co.
?co space_and_time_extended:hasProjectedCoordinate_z ?ze.
?ze sys:hasValue ?zv.
?zv sys:numericalValue ?z.
<{0}> citygml:boundedBy ?gr. #building/part IRI boundBy surface
?gr a citygml:RoofSurfaceType. # surface is a ROOF
?gr citygml:lod2MultiSurface ?msr. #[select all coordi z value for roof surface]
?msr citygml:surfaceMember ?polr.
?polr citygml:exterior ?lringr.
?lringr sys:contains ?por.
?por space_and_time_extended:hasGISCoordinateSystem ?cor.
?cor space_and_time_extended:hasProjectedCoordinate_z ?zer.
?zer sys:hasValue ?zvr.
?zvr sys:numericalValue ?zr.
}} GROUP BY ?g # group by each ground IRI
""".format(nodeuri))
# define coordi convert function : building kb ---> adms
Bdn2ADMSCoordC = defineCoordConvert('epsg:28992','epsg:32648')
#float(row['min'].toPython()), float(row['max'].toPython())
zlimit = tuple( Bdn2ADMSCoordC(float(row['min'].toPython()), float(row['max'].toPython())) for row in qHeight )[0]
return ( list(Bdn2ADMSCoordC(float(row['x'].toPython()), float(row['y'].toPython())) for row in qData), zlimit)
def getMetrics(self, nodeuri):
base = None
if self.hasBuildingPart(nodeuri):
print('{0} has building part'.format(nodeuri))
#get list of building part
bparts = list(row['p'] for row in self.query(
''' PREFIX citygml:<http://www.theworldavatar.com/CityGMLOntology.owl#>
SELECT ?p
WHERE
{
?b citygml:consistsOfBuildingPart ?p.
}'''))
#get metrics for each part
polygons = tuple( Polygon(*self.getBdnVertices(uri)) for uri in bparts)
#get centroid for pols
base = Polygon.combineBaseMulti(polygons)
else: # no building part
print('{0} NOT has building part'.format(nodeuri))
verticesNHeight = self.getBdnVertices(nodeuri)
#print(verticesHeight)
base = Polygon(*verticesNHeight)
#todo pack return result
#('BldName','BldType','BldX','BldY','BldHeight', 'BldLength', 'BldWidth', 'BldAngle')
print ((nodeuri.toPython(), base.type, base.centroid[0], base.centroid[1], base.height, base.length, base.width, base.angle) )
#todo: coordinate coversion for centroid!!!
return (uri2name(nodeuri.toPython()), base.type, base.centroid[0], base.centroid[1], base.height, base.length, base.width, base.angle)
#calulate centro
#choose shape
#calculate angle
#probably for the best if we construct a type of polygon instead?
def hasBuildingPart(self, nodeuri):
#self.connectDB(nodeuri)
print('checking if building part for :{0}'.format(nodeuri))
qData = self.query('''
PREFIX citygml:<http://www.theworldavatar.com/CityGMLOntology.owl#>
ASK
{{
<{0}> citygml:consistsOfBuildingPart ?p
}}
'''.format(nodeuri))
qData, = tuple(qData)
return qData
def coreBdn2Src(self):
#compare src coords to each bdn
for src in self.rawSrc:
closed, dClosed, first = None, sys.maxsize, True
print('find closed bdn for src: '+src.SrcName+" with x: "+str(src.SrcX1) +" y: "+str(src.SrcY1))
for i in range(len(self.rawBdn.BldX)):
#print('bdn x: ' +str( self.rawBdn.BldX[i]))
dx, dy = self.rawBdn.BldX[i] - src.SrcX1, self.rawBdn.BldY[i] - src.SrcY1
d = dx * dx + dy * dy
if first:
dClosed = d
closed = self.rawBdn.BldName[i]
first = False
#print('d:{0} dclosed:{1}'.format(d, dClosed))
if d - dClosed < 0:
closed = self.rawBdn.BldName[i]
dClosed = d
print('new smallest distance: '+str(dClosed))
if closed is not None:
src.setMainBuilding(closed)
else: #err handling, something is wrong if no closed building is found, just throw it
raise Exception('Dear lord, no closed buildinf found for src: '+src.SrcName)
def getOpt(self, PolNames, SrcNames):
numPol = len(PolNames)
return self.OPT(numPol,PolNames, [1]*numPol,[0]*numPol,[1]*numPol,[3]*numPol,[0]*numPol,[0]*numPol,[0]*numPol,[0]*80,[0]*80,['ug/m3']*4,1,0,1,"Grouptank001",SrcNames,0)
def polIRI2Name(self, polIRI):
substances = {'http://www.theworldavatar.com/OntoCAPE/OntoCAPE/material/substance/substance.owl
if polIRI in substances.keys():
print('Found: '+ substances[polIRI])
return substances[polIRI]
else:
print ('Not found !!!!')
raise Exception('This substance is not defined!!!!')
def getWeather(self):
#finish writing met
metLoc= r"test.met"
cobbling.run(meteo_data = metLoc)
#pointing to met in apl
return os.path.realpath(metLoc)
def get(self):
#get all src data
self.rawSrc = self.getSrcData()
##todo: think about this case: when nothing is found ,where should we handle it?
##then nothing should be written, and catch this exception and mute it in main function
if self.rawSrc is None:
raise Exception("No src in found to requiries")
#get all building data
self.rawBdn = self.getBdnData()
#print('raw building: ')
#print(self.rawBdn)
rawOpt = self.getOpt(self.pollutants, [s.SrcName for s in self.rawSrc])
self.coreBdn2Src()
#for debuging, in future,define this for data type, i dont think it auto rpint for objet
for src in self.rawSrc:
print(src)
met = self.getWeather()
return {'Src': self.rawSrc, 'Bdn': self.rawBdn, 'Opt': rawOpt, 'Met': met}
def queryEndpoint(self, str):
print('requesting @ '+self.address+" with query:")
#print(str)
resp = requests.get(self.address, params = {'query':str}, timeout = 1500, headers = {'user-agent': 'my-app/0.0.1'})
print(resp.json())
qres = jsresult.JSONResult(resp.json())#json decoded
print(qres)
return qres
def queryLocalGraph(self, str):
qres = self.g.query(str)
return qres
def Uri2Local(uri):
return uri.replace("http://www.jparksimulator.com",config.root ).replace("http://www.theworldavatar.com",config.root)
def remote2local(self, func):
def functionWrapper(self, address):
address = self.Uri2Local(address)
func(self, address)
return functionWrapper
def connectDB(self, address, connectType = 'parse'):
def connectDBActual( address):
#obsolete: use rdflib locally
self.address = address
if connectType is 'parse':
self.g = rdflib.Graph()#comment out in future
self.g.parse(address)#comment out in future
self.qmethodMap = {'parse': self.queryLocalGraph, 'endpoint':self.queryEndpoint}
if not sameGraph(address, self.address):
print ('parsing graph: '+ address)
if connectType not in self.qmethodMap:
raise exception('db connection method not defined')
#self.connectType = connectType
self.query = self.qmethodMap[connectType]
connectDBActual(address)
def sameGraph(uri1, uri2):
def trimloc(uri):
if uri is None:
return None
else:
return uri.split('
return trimloc(uri1) == trimloc(uri2)
def defineCoordConvert(inCode, outCode):
inProj = Proj(init=inCode)
outProj = Proj(init=outCode)
def coordConvert(x,y):
return transform(inProj, outProj, x, y)
return coordConvert
def uri2name(uri):
base = 'http://www.theworldavatar.com/'
return uri.split('
| true
| true
|
f70c91590d30de58fe09b0711b50eb211f5a6ef8
| 36,807
|
py
|
Python
|
oscar/lib/python3.6/site-packages/_pytest/config/__init__.py
|
bcruz97/django-oscar
|
df3dc1d99e70765a6c95a67f5b076644cd482852
|
[
"BSD-3-Clause"
] | 445
|
2019-01-26T13:50:26.000Z
|
2022-03-18T05:17:38.000Z
|
Library/lib/python3.7/site-packages/_pytest/config/__init__.py
|
gengyong/Carnets
|
8930a14f69360d4db115a85ff9e0f6efa80fa2e7
|
[
"BSD-3-Clause"
] | 242
|
2019-01-29T15:48:27.000Z
|
2022-03-31T22:09:21.000Z
|
Library/lib/python3.7/site-packages/_pytest/config/__init__.py
|
gengyong/Carnets
|
8930a14f69360d4db115a85ff9e0f6efa80fa2e7
|
[
"BSD-3-Clause"
] | 31
|
2019-03-10T09:51:27.000Z
|
2022-02-14T23:11:12.000Z
|
""" command line options, ini-file and conftest.py processing. """
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import copy
import inspect
import os
import shlex
import sys
import types
import warnings
import py
import six
from pkg_resources import parse_version
from pluggy import HookimplMarker
from pluggy import HookspecMarker
from pluggy import PluginManager
import _pytest._code
import _pytest.assertion
import _pytest.hookspec # the extension point definitions
from .exceptions import PrintHelp
from .exceptions import UsageError
from .findpaths import determine_setup
from .findpaths import exists
from _pytest._code import ExceptionInfo
from _pytest._code import filter_traceback
from _pytest.compat import lru_cache
from _pytest.compat import safe_str
from _pytest.outcomes import Skipped
hookimpl = HookimplMarker("pytest")
hookspec = HookspecMarker("pytest")
class ConftestImportFailure(Exception):
def __init__(self, path, excinfo):
Exception.__init__(self, path, excinfo)
self.path = path
self.excinfo = excinfo
def main(args=None, plugins=None):
""" return exit code, after performing an in-process test run.
:arg args: list of command line arguments.
:arg plugins: list of plugin objects to be auto-registered during
initialization.
"""
from _pytest.main import EXIT_USAGEERROR
try:
try:
config = _prepareconfig(args, plugins)
except ConftestImportFailure as e:
exc_info = ExceptionInfo(e.excinfo)
tw = py.io.TerminalWriter(sys.stderr)
tw.line(
"ImportError while loading conftest '{e.path}'.".format(e=e), red=True
)
exc_info.traceback = exc_info.traceback.filter(filter_traceback)
exc_repr = (
exc_info.getrepr(style="short", chain=False)
if exc_info.traceback
else exc_info.exconly()
)
formatted_tb = safe_str(exc_repr)
for line in formatted_tb.splitlines():
tw.line(line.rstrip(), red=True)
return 4
else:
try:
return config.hook.pytest_cmdline_main(config=config)
finally:
config._ensure_unconfigure()
except UsageError as e:
tw = py.io.TerminalWriter(sys.stderr)
for msg in e.args:
tw.line("ERROR: {}\n".format(msg), red=True)
return EXIT_USAGEERROR
class cmdline(object): # compatibility namespace
main = staticmethod(main)
def filename_arg(path, optname):
""" Argparse type validator for filename arguments.
:path: path of filename
:optname: name of the option
"""
if os.path.isdir(path):
raise UsageError("{} must be a filename, given: {}".format(optname, path))
return path
def directory_arg(path, optname):
"""Argparse type validator for directory arguments.
:path: path of directory
:optname: name of the option
"""
if not os.path.isdir(path):
raise UsageError("{} must be a directory, given: {}".format(optname, path))
return path
default_plugins = (
"mark",
"main",
"terminal",
"runner",
"python",
"fixtures",
"debugging",
"unittest",
"capture",
"skipping",
"tmpdir",
"monkeypatch",
"recwarn",
"pastebin",
"helpconfig",
"nose",
"assertion",
"junitxml",
"resultlog",
"doctest",
"cacheprovider",
"freeze_support",
"setuponly",
"setupplan",
"stepwise",
"warnings",
"logging",
)
builtin_plugins = set(default_plugins)
builtin_plugins.add("pytester")
def get_config():
# subsequent calls to main will create a fresh instance
pluginmanager = PytestPluginManager()
config = Config(pluginmanager)
for spec in default_plugins:
pluginmanager.import_plugin(spec)
return config
def get_plugin_manager():
"""
Obtain a new instance of the
:py:class:`_pytest.config.PytestPluginManager`, with default plugins
already loaded.
This function can be used by integration with other tools, like hooking
into pytest to run tests into an IDE.
"""
return get_config().pluginmanager
def _prepareconfig(args=None, plugins=None):
warning = None
if args is None:
args = sys.argv[1:]
elif isinstance(args, py.path.local):
args = [str(args)]
elif not isinstance(args, (tuple, list)):
if not isinstance(args, str):
raise ValueError("not a string or argument list: %r" % (args,))
args = shlex.split(args, posix=sys.platform != "win32")
from _pytest import deprecated
warning = deprecated.MAIN_STR_ARGS
config = get_config()
pluginmanager = config.pluginmanager
try:
if plugins:
for plugin in plugins:
if isinstance(plugin, six.string_types):
pluginmanager.consider_pluginarg(plugin)
else:
pluginmanager.register(plugin)
if warning:
from _pytest.warnings import _issue_config_warning
_issue_config_warning(warning, config=config, stacklevel=4)
return pluginmanager.hook.pytest_cmdline_parse(
pluginmanager=pluginmanager, args=args
)
except BaseException:
config._ensure_unconfigure()
raise
class PytestPluginManager(PluginManager):
"""
Overwrites :py:class:`pluggy.PluginManager <pluggy.PluginManager>` to add pytest-specific
functionality:
* loading plugins from the command line, ``PYTEST_PLUGINS`` env variable and
``pytest_plugins`` global variables found in plugins being loaded;
* ``conftest.py`` loading during start-up;
"""
def __init__(self):
super(PytestPluginManager, self).__init__("pytest")
self._conftest_plugins = set()
# state related to local conftest plugins
self._dirpath2confmods = {}
self._conftestpath2mod = {}
self._confcutdir = None
self._noconftest = False
self._duplicatepaths = set()
self.add_hookspecs(_pytest.hookspec)
self.register(self)
if os.environ.get("PYTEST_DEBUG"):
err = sys.stderr
encoding = getattr(err, "encoding", "utf8")
try:
err = py.io.dupfile(err, encoding=encoding)
except Exception:
pass
self.trace.root.setwriter(err.write)
self.enable_tracing()
# Config._consider_importhook will set a real object if required.
self.rewrite_hook = _pytest.assertion.DummyRewriteHook()
# Used to know when we are importing conftests after the pytest_configure stage
self._configured = False
def addhooks(self, module_or_class):
"""
.. deprecated:: 2.8
Use :py:meth:`pluggy.PluginManager.add_hookspecs <PluginManager.add_hookspecs>`
instead.
"""
warning = dict(
code="I2",
fslocation=_pytest._code.getfslineno(sys._getframe(1)),
nodeid=None,
message="use pluginmanager.add_hookspecs instead of "
"deprecated addhooks() method.",
)
self._warn(warning)
return self.add_hookspecs(module_or_class)
def parse_hookimpl_opts(self, plugin, name):
# pytest hooks are always prefixed with pytest_
# so we avoid accessing possibly non-readable attributes
# (see issue #1073)
if not name.startswith("pytest_"):
return
# ignore some historic special names which can not be hooks anyway
if name == "pytest_plugins" or name.startswith("pytest_funcarg__"):
return
method = getattr(plugin, name)
opts = super(PytestPluginManager, self).parse_hookimpl_opts(plugin, name)
# consider only actual functions for hooks (#3775)
if not inspect.isroutine(method):
return
# collect unmarked hooks as long as they have the `pytest_' prefix
if opts is None and name.startswith("pytest_"):
opts = {}
if opts is not None:
for name in ("tryfirst", "trylast", "optionalhook", "hookwrapper"):
opts.setdefault(name, hasattr(method, name))
return opts
def parse_hookspec_opts(self, module_or_class, name):
opts = super(PytestPluginManager, self).parse_hookspec_opts(
module_or_class, name
)
if opts is None:
method = getattr(module_or_class, name)
if name.startswith("pytest_"):
opts = {
"firstresult": hasattr(method, "firstresult"),
"historic": hasattr(method, "historic"),
}
return opts
def register(self, plugin, name=None):
if name in ["pytest_catchlog", "pytest_capturelog"]:
self._warn(
"{} plugin has been merged into the core, "
"please remove it from your requirements.".format(
name.replace("_", "-")
)
)
return
ret = super(PytestPluginManager, self).register(plugin, name)
if ret:
self.hook.pytest_plugin_registered.call_historic(
kwargs=dict(plugin=plugin, manager=self)
)
if isinstance(plugin, types.ModuleType):
self.consider_module(plugin)
return ret
def getplugin(self, name):
# support deprecated naming because plugins (xdist e.g.) use it
return self.get_plugin(name)
def hasplugin(self, name):
"""Return True if the plugin with the given name is registered."""
return bool(self.get_plugin(name))
def pytest_configure(self, config):
# XXX now that the pluginmanager exposes hookimpl(tryfirst...)
# we should remove tryfirst/trylast as markers
config.addinivalue_line(
"markers",
"tryfirst: mark a hook implementation function such that the "
"plugin machinery will try to call it first/as early as possible.",
)
config.addinivalue_line(
"markers",
"trylast: mark a hook implementation function such that the "
"plugin machinery will try to call it last/as late as possible.",
)
self._configured = True
def _warn(self, message):
kwargs = (
message
if isinstance(message, dict)
else {"code": "I1", "message": message, "fslocation": None, "nodeid": None}
)
self.hook.pytest_logwarning.call_historic(kwargs=kwargs)
#
# internal API for local conftest plugin handling
#
def _set_initial_conftests(self, namespace):
""" load initial conftest files given a preparsed "namespace".
As conftest files may add their own command line options
which have arguments ('--my-opt somepath') we might get some
false positives. All builtin and 3rd party plugins will have
been loaded, however, so common options will not confuse our logic
here.
"""
current = py.path.local()
self._confcutdir = (
current.join(namespace.confcutdir, abs=True)
if namespace.confcutdir
else None
)
self._noconftest = namespace.noconftest
self._using_pyargs = namespace.pyargs
testpaths = namespace.file_or_dir
foundanchor = False
for path in testpaths:
path = str(path)
# remove node-id syntax
i = path.find("::")
if i != -1:
path = path[:i]
anchor = current.join(path, abs=1)
if exists(anchor): # we found some file object
self._try_load_conftest(anchor)
foundanchor = True
if not foundanchor:
self._try_load_conftest(current)
def _try_load_conftest(self, anchor):
self._getconftestmodules(anchor)
# let's also consider test* subdirs
if anchor.check(dir=1):
for x in anchor.listdir("test*"):
if x.check(dir=1):
self._getconftestmodules(x)
@lru_cache(maxsize=128)
def _getconftestmodules(self, path):
if self._noconftest:
return []
if path.isfile():
directory = path.dirpath()
else:
directory = path
if six.PY2: # py2 is not using lru_cache.
try:
return self._dirpath2confmods[directory]
except KeyError:
pass
# XXX these days we may rather want to use config.rootdir
# and allow users to opt into looking into the rootdir parent
# directories instead of requiring to specify confcutdir
clist = []
for parent in directory.realpath().parts():
if self._confcutdir and self._confcutdir.relto(parent):
continue
conftestpath = parent.join("conftest.py")
if conftestpath.isfile():
mod = self._importconftest(conftestpath)
clist.append(mod)
self._dirpath2confmods[directory] = clist
return clist
def _rget_with_confmod(self, name, path):
modules = self._getconftestmodules(path)
for mod in reversed(modules):
try:
return mod, getattr(mod, name)
except AttributeError:
continue
raise KeyError(name)
def _importconftest(self, conftestpath):
try:
return self._conftestpath2mod[conftestpath]
except KeyError:
pkgpath = conftestpath.pypkgpath()
if pkgpath is None:
_ensure_removed_sysmodule(conftestpath.purebasename)
try:
mod = conftestpath.pyimport()
if (
hasattr(mod, "pytest_plugins")
and self._configured
and not self._using_pyargs
):
from _pytest.deprecated import (
PYTEST_PLUGINS_FROM_NON_TOP_LEVEL_CONFTEST
)
warnings.warn_explicit(
PYTEST_PLUGINS_FROM_NON_TOP_LEVEL_CONFTEST,
category=None,
filename=str(conftestpath),
lineno=0,
)
except Exception:
raise ConftestImportFailure(conftestpath, sys.exc_info())
self._conftest_plugins.add(mod)
self._conftestpath2mod[conftestpath] = mod
dirpath = conftestpath.dirpath()
if dirpath in self._dirpath2confmods:
for path, mods in self._dirpath2confmods.items():
if path and path.relto(dirpath) or path == dirpath:
assert mod not in mods
mods.append(mod)
self.trace("loaded conftestmodule %r" % (mod))
self.consider_conftest(mod)
return mod
#
# API for bootstrapping plugin loading
#
#
def consider_preparse(self, args):
for opt1, opt2 in zip(args, args[1:]):
if opt1 == "-p":
self.consider_pluginarg(opt2)
def consider_pluginarg(self, arg):
if arg.startswith("no:"):
name = arg[3:]
# PR #4304 : remove stepwise if cacheprovider is blocked
if name == "cacheprovider":
self.set_blocked("stepwise")
self.set_blocked("pytest_stepwise")
self.set_blocked(name)
if not name.startswith("pytest_"):
self.set_blocked("pytest_" + name)
else:
self.import_plugin(arg)
def consider_conftest(self, conftestmodule):
self.register(conftestmodule, name=conftestmodule.__file__)
def consider_env(self):
self._import_plugin_specs(os.environ.get("PYTEST_PLUGINS"))
def consider_module(self, mod):
self._import_plugin_specs(getattr(mod, "pytest_plugins", []))
def _import_plugin_specs(self, spec):
plugins = _get_plugin_specs_as_list(spec)
for import_spec in plugins:
self.import_plugin(import_spec)
def import_plugin(self, modname):
# most often modname refers to builtin modules, e.g. "pytester",
# "terminal" or "capture". Those plugins are registered under their
# basename for historic purposes but must be imported with the
# _pytest prefix.
assert isinstance(modname, (six.text_type, str)), (
"module name as text required, got %r" % modname
)
modname = str(modname)
if self.is_blocked(modname) or self.get_plugin(modname) is not None:
return
if modname in builtin_plugins:
importspec = "_pytest." + modname
else:
importspec = modname
self.rewrite_hook.mark_rewrite(importspec)
try:
__import__(importspec)
except ImportError as e:
new_exc_type = ImportError
new_exc_message = 'Error importing plugin "%s": %s' % (
modname,
safe_str(e.args[0]),
)
new_exc = new_exc_type(new_exc_message)
six.reraise(new_exc_type, new_exc, sys.exc_info()[2])
except Skipped as e:
self._warn("skipped plugin %r: %s" % ((modname, e.msg)))
else:
mod = sys.modules[importspec]
self.register(mod, modname)
def _get_plugin_specs_as_list(specs):
"""
Parses a list of "plugin specs" and returns a list of plugin names.
Plugin specs can be given as a list of strings separated by "," or already as a list/tuple in
which case it is returned as a list. Specs can also be `None` in which case an
empty list is returned.
"""
if specs is not None:
if isinstance(specs, str):
specs = specs.split(",") if specs else []
if not isinstance(specs, (list, tuple)):
raise UsageError(
"Plugin specs must be a ','-separated string or a "
"list/tuple of strings for plugin names. Given: %r" % specs
)
return list(specs)
return []
def _ensure_removed_sysmodule(modname):
try:
del sys.modules[modname]
except KeyError:
pass
class Notset(object):
def __repr__(self):
return "<NOTSET>"
notset = Notset()
def _iter_rewritable_modules(package_files):
for fn in package_files:
is_simple_module = "/" not in fn and fn.endswith(".py")
is_package = fn.count("/") == 1 and fn.endswith("__init__.py")
if is_simple_module:
module_name, _ = os.path.splitext(fn)
yield module_name
elif is_package:
package_name = os.path.dirname(fn)
yield package_name
class Config(object):
""" access to configuration values, pluginmanager and plugin hooks. """
def __init__(self, pluginmanager):
#: access to command line option as attributes.
#: (deprecated), use :py:func:`getoption() <_pytest.config.Config.getoption>` instead
self.option = argparse.Namespace()
from .argparsing import Parser, FILE_OR_DIR
_a = FILE_OR_DIR
self._parser = Parser(
usage="%%(prog)s [options] [%s] [%s] [...]" % (_a, _a),
processopt=self._processopt,
)
#: a pluginmanager instance
self.pluginmanager = pluginmanager
self.trace = self.pluginmanager.trace.root.get("config")
self.hook = self.pluginmanager.hook
self._inicache = {}
self._override_ini = ()
self._opt2dest = {}
self._cleanup = []
self._warn = self.pluginmanager._warn
self.pluginmanager.register(self, "pytestconfig")
self._configured = False
def do_setns(dic):
import pytest
setns(pytest, dic)
self.hook.pytest_namespace.call_historic(do_setns, {})
self.hook.pytest_addoption.call_historic(kwargs=dict(parser=self._parser))
def add_cleanup(self, func):
""" Add a function to be called when the config object gets out of
use (usually coninciding with pytest_unconfigure)."""
self._cleanup.append(func)
def _do_configure(self):
assert not self._configured
self._configured = True
self.hook.pytest_configure.call_historic(kwargs=dict(config=self))
def _ensure_unconfigure(self):
if self._configured:
self._configured = False
self.hook.pytest_unconfigure(config=self)
self.hook.pytest_configure._call_history = []
while self._cleanup:
fin = self._cleanup.pop()
fin()
def warn(self, code, message, fslocation=None, nodeid=None):
"""
.. deprecated:: 3.8
Use :py:func:`warnings.warn` or :py:func:`warnings.warn_explicit` directly instead.
Generate a warning for this test session.
"""
from _pytest.warning_types import RemovedInPytest4Warning
if isinstance(fslocation, (tuple, list)) and len(fslocation) > 2:
filename, lineno = fslocation[:2]
else:
filename = "unknown file"
lineno = 0
msg = "config.warn has been deprecated, use warnings.warn instead"
if nodeid:
msg = "{}: {}".format(nodeid, msg)
warnings.warn_explicit(
RemovedInPytest4Warning(msg),
category=None,
filename=filename,
lineno=lineno,
)
self.hook.pytest_logwarning.call_historic(
kwargs=dict(
code=code, message=message, fslocation=fslocation, nodeid=nodeid
)
)
def get_terminal_writer(self):
return self.pluginmanager.get_plugin("terminalreporter")._tw
def pytest_cmdline_parse(self, pluginmanager, args):
# REF1 assert self == pluginmanager.config, (self, pluginmanager.config)
self.parse(args)
return self
def notify_exception(self, excinfo, option=None):
if option and option.fulltrace:
style = "long"
else:
style = "native"
excrepr = excinfo.getrepr(
funcargs=True, showlocals=getattr(option, "showlocals", False), style=style
)
res = self.hook.pytest_internalerror(excrepr=excrepr, excinfo=excinfo)
if not any(res):
for line in str(excrepr).split("\n"):
sys.stderr.write("INTERNALERROR> %s\n" % line)
sys.stderr.flush()
def cwd_relative_nodeid(self, nodeid):
# nodeid's are relative to the rootpath, compute relative to cwd
if self.invocation_dir != self.rootdir:
fullpath = self.rootdir.join(nodeid)
nodeid = self.invocation_dir.bestrelpath(fullpath)
return nodeid
@classmethod
def fromdictargs(cls, option_dict, args):
""" constructor useable for subprocesses. """
config = get_config()
config.option.__dict__.update(option_dict)
config.parse(args, addopts=False)
for x in config.option.plugins:
config.pluginmanager.consider_pluginarg(x)
return config
def _processopt(self, opt):
for name in opt._short_opts + opt._long_opts:
self._opt2dest[name] = opt.dest
if hasattr(opt, "default") and opt.dest:
if not hasattr(self.option, opt.dest):
setattr(self.option, opt.dest, opt.default)
@hookimpl(trylast=True)
def pytest_load_initial_conftests(self, early_config):
self.pluginmanager._set_initial_conftests(early_config.known_args_namespace)
def _initini(self, args):
ns, unknown_args = self._parser.parse_known_and_unknown_args(
args, namespace=copy.copy(self.option)
)
r = determine_setup(
ns.inifilename,
ns.file_or_dir + unknown_args,
rootdir_cmd_arg=ns.rootdir or None,
config=self,
)
self.rootdir, self.inifile, self.inicfg = r
self._parser.extra_info["rootdir"] = self.rootdir
self._parser.extra_info["inifile"] = self.inifile
self.invocation_dir = py.path.local()
self._parser.addini("addopts", "extra command line options", "args")
self._parser.addini("minversion", "minimally required pytest version")
self._override_ini = ns.override_ini or ()
def _consider_importhook(self, args):
"""Install the PEP 302 import hook if using assertion rewriting.
Needs to parse the --assert=<mode> option from the commandline
and find all the installed plugins to mark them for rewriting
by the importhook.
"""
ns, unknown_args = self._parser.parse_known_and_unknown_args(args)
mode = ns.assertmode
if mode == "rewrite":
try:
hook = _pytest.assertion.install_importhook(self)
except SystemError:
mode = "plain"
else:
self._mark_plugins_for_rewrite(hook)
_warn_about_missing_assertion(mode)
def _mark_plugins_for_rewrite(self, hook):
"""
Given an importhook, mark for rewrite any top-level
modules or packages in the distribution package for
all pytest plugins.
"""
import pkg_resources
self.pluginmanager.rewrite_hook = hook
if os.environ.get("PYTEST_DISABLE_PLUGIN_AUTOLOAD"):
# We don't autoload from setuptools entry points, no need to continue.
return
# 'RECORD' available for plugins installed normally (pip install)
# 'SOURCES.txt' available for plugins installed in dev mode (pip install -e)
# for installed plugins 'SOURCES.txt' returns an empty list, and vice-versa
# so it shouldn't be an issue
metadata_files = "RECORD", "SOURCES.txt"
package_files = (
entry.split(",")[0]
for entrypoint in pkg_resources.iter_entry_points("pytest11")
for metadata in metadata_files
for entry in entrypoint.dist._get_metadata(metadata)
)
for name in _iter_rewritable_modules(package_files):
hook.mark_rewrite(name)
def _validate_args(self, args):
"""Validate known args."""
self._parser.parse_known_and_unknown_args(
args, namespace=copy.copy(self.option)
)
return args
def _preparse(self, args, addopts=True):
if addopts:
env_addopts = os.environ.get("PYTEST_ADDOPTS", "")
if len(env_addopts):
args[:] = self._validate_args(shlex.split(env_addopts)) + args
self._initini(args)
if addopts:
args[:] = self._validate_args(self.getini("addopts")) + args
self._checkversion()
self._consider_importhook(args)
self.pluginmanager.consider_preparse(args)
if not os.environ.get("PYTEST_DISABLE_PLUGIN_AUTOLOAD"):
# Don't autoload from setuptools entry point. Only explicitly specified
# plugins are going to be loaded.
self.pluginmanager.load_setuptools_entrypoints("pytest11")
self.pluginmanager.consider_env()
self.known_args_namespace = ns = self._parser.parse_known_args(
args, namespace=copy.copy(self.option)
)
if self.known_args_namespace.confcutdir is None and self.inifile:
confcutdir = py.path.local(self.inifile).dirname
self.known_args_namespace.confcutdir = confcutdir
try:
self.hook.pytest_load_initial_conftests(
early_config=self, args=args, parser=self._parser
)
except ConftestImportFailure:
e = sys.exc_info()[1]
if ns.help or ns.version:
# we don't want to prevent --help/--version to work
# so just let is pass and print a warning at the end
self._warn("could not load initial conftests (%s)\n" % e.path)
else:
raise
def _checkversion(self):
import pytest
minver = self.inicfg.get("minversion", None)
if minver:
if parse_version(minver) > parse_version(pytest.__version__):
raise pytest.UsageError(
"%s:%d: requires pytest-%s, actual pytest-%s'"
% (
self.inicfg.config.path,
self.inicfg.lineof("minversion"),
minver,
pytest.__version__,
)
)
def parse(self, args, addopts=True):
# parse given cmdline arguments into this config object.
assert not hasattr(
self, "args"
), "can only parse cmdline args at most once per Config object"
self._origargs = args
self.hook.pytest_addhooks.call_historic(
kwargs=dict(pluginmanager=self.pluginmanager)
)
self._preparse(args, addopts=addopts)
# XXX deprecated hook:
self.hook.pytest_cmdline_preparse(config=self, args=args)
self._parser.after_preparse = True
try:
args = self._parser.parse_setoption(
args, self.option, namespace=self.option
)
if not args:
if self.invocation_dir == self.rootdir:
args = self.getini("testpaths")
if not args:
args = [str(self.invocation_dir)]
self.args = args
except PrintHelp:
pass
def addinivalue_line(self, name, line):
""" add a line to an ini-file option. The option must have been
declared but might not yet be set in which case the line becomes the
the first line in its value. """
x = self.getini(name)
assert isinstance(x, list)
x.append(line) # modifies the cached list inline
def getini(self, name):
""" return configuration value from an :ref:`ini file <inifiles>`. If the
specified name hasn't been registered through a prior
:py:func:`parser.addini <_pytest.config.Parser.addini>`
call (usually from a plugin), a ValueError is raised. """
try:
return self._inicache[name]
except KeyError:
self._inicache[name] = val = self._getini(name)
return val
def _getini(self, name):
try:
description, type, default = self._parser._inidict[name]
except KeyError:
raise ValueError("unknown configuration value: %r" % (name,))
value = self._get_override_ini_value(name)
if value is None:
try:
value = self.inicfg[name]
except KeyError:
if default is not None:
return default
if type is None:
return ""
return []
if type == "pathlist":
dp = py.path.local(self.inicfg.config.path).dirpath()
values = []
for relpath in shlex.split(value):
values.append(dp.join(relpath, abs=True))
return values
elif type == "args":
return shlex.split(value)
elif type == "linelist":
return [t for t in map(lambda x: x.strip(), value.split("\n")) if t]
elif type == "bool":
return bool(_strtobool(value.strip()))
else:
assert type is None
return value
def _getconftest_pathlist(self, name, path):
try:
mod, relroots = self.pluginmanager._rget_with_confmod(name, path)
except KeyError:
return None
modpath = py.path.local(mod.__file__).dirpath()
values = []
for relroot in relroots:
if not isinstance(relroot, py.path.local):
relroot = relroot.replace("/", py.path.local.sep)
relroot = modpath.join(relroot, abs=True)
values.append(relroot)
return values
def _get_override_ini_value(self, name):
value = None
# override_ini is a list of "ini=value" options
# always use the last item if multiple values are set for same ini-name,
# e.g. -o foo=bar1 -o foo=bar2 will set foo to bar2
for ini_config in self._override_ini:
try:
key, user_ini_value = ini_config.split("=", 1)
except ValueError:
raise UsageError("-o/--override-ini expects option=value style.")
else:
if key == name:
value = user_ini_value
return value
def getoption(self, name, default=notset, skip=False):
""" return command line option value.
:arg name: name of the option. You may also specify
the literal ``--OPT`` option instead of the "dest" option name.
:arg default: default value if no option of that name exists.
:arg skip: if True raise pytest.skip if option does not exists
or has a None value.
"""
name = self._opt2dest.get(name, name)
try:
val = getattr(self.option, name)
if val is None and skip:
raise AttributeError(name)
return val
except AttributeError:
if default is not notset:
return default
if skip:
import pytest
pytest.skip("no %r option found" % (name,))
raise ValueError("no option named %r" % (name,))
def getvalue(self, name, path=None):
""" (deprecated, use getoption()) """
return self.getoption(name)
def getvalueorskip(self, name, path=None):
""" (deprecated, use getoption(skip=True)) """
return self.getoption(name, skip=True)
def _assertion_supported():
try:
assert False
except AssertionError:
return True
else:
return False
def _warn_about_missing_assertion(mode):
if not _assertion_supported():
if mode == "plain":
sys.stderr.write(
"WARNING: ASSERTIONS ARE NOT EXECUTED"
" and FAILING TESTS WILL PASS. Are you"
" using python -O?"
)
else:
sys.stderr.write(
"WARNING: assertions not in test modules or"
" plugins will be ignored"
" because assert statements are not executed "
"by the underlying Python interpreter "
"(are you using python -O?)\n"
)
def setns(obj, dic):
import pytest
for name, value in dic.items():
if isinstance(value, dict):
mod = getattr(obj, name, None)
if mod is None:
modname = "pytest.%s" % name
mod = types.ModuleType(modname)
sys.modules[modname] = mod
mod.__all__ = []
setattr(obj, name, mod)
obj.__all__.append(name)
setns(mod, value)
else:
setattr(obj, name, value)
obj.__all__.append(name)
# if obj != pytest:
# pytest.__all__.append(name)
setattr(pytest, name, value)
def create_terminal_writer(config, *args, **kwargs):
"""Create a TerminalWriter instance configured according to the options
in the config object. Every code which requires a TerminalWriter object
and has access to a config object should use this function.
"""
tw = py.io.TerminalWriter(*args, **kwargs)
if config.option.color == "yes":
tw.hasmarkup = True
if config.option.color == "no":
tw.hasmarkup = False
return tw
def _strtobool(val):
"""Convert a string representation of truth to true (1) or false (0).
True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if
'val' is anything else.
.. note:: copied from distutils.util
"""
val = val.lower()
if val in ("y", "yes", "t", "true", "on", "1"):
return 1
elif val in ("n", "no", "f", "false", "off", "0"):
return 0
else:
raise ValueError("invalid truth value %r" % (val,))
| 34.690858
| 97
| 0.594914
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import copy
import inspect
import os
import shlex
import sys
import types
import warnings
import py
import six
from pkg_resources import parse_version
from pluggy import HookimplMarker
from pluggy import HookspecMarker
from pluggy import PluginManager
import _pytest._code
import _pytest.assertion
import _pytest.hookspec
from .exceptions import PrintHelp
from .exceptions import UsageError
from .findpaths import determine_setup
from .findpaths import exists
from _pytest._code import ExceptionInfo
from _pytest._code import filter_traceback
from _pytest.compat import lru_cache
from _pytest.compat import safe_str
from _pytest.outcomes import Skipped
hookimpl = HookimplMarker("pytest")
hookspec = HookspecMarker("pytest")
class ConftestImportFailure(Exception):
def __init__(self, path, excinfo):
Exception.__init__(self, path, excinfo)
self.path = path
self.excinfo = excinfo
def main(args=None, plugins=None):
from _pytest.main import EXIT_USAGEERROR
try:
try:
config = _prepareconfig(args, plugins)
except ConftestImportFailure as e:
exc_info = ExceptionInfo(e.excinfo)
tw = py.io.TerminalWriter(sys.stderr)
tw.line(
"ImportError while loading conftest '{e.path}'.".format(e=e), red=True
)
exc_info.traceback = exc_info.traceback.filter(filter_traceback)
exc_repr = (
exc_info.getrepr(style="short", chain=False)
if exc_info.traceback
else exc_info.exconly()
)
formatted_tb = safe_str(exc_repr)
for line in formatted_tb.splitlines():
tw.line(line.rstrip(), red=True)
return 4
else:
try:
return config.hook.pytest_cmdline_main(config=config)
finally:
config._ensure_unconfigure()
except UsageError as e:
tw = py.io.TerminalWriter(sys.stderr)
for msg in e.args:
tw.line("ERROR: {}\n".format(msg), red=True)
return EXIT_USAGEERROR
class cmdline(object):
main = staticmethod(main)
def filename_arg(path, optname):
if os.path.isdir(path):
raise UsageError("{} must be a filename, given: {}".format(optname, path))
return path
def directory_arg(path, optname):
if not os.path.isdir(path):
raise UsageError("{} must be a directory, given: {}".format(optname, path))
return path
default_plugins = (
"mark",
"main",
"terminal",
"runner",
"python",
"fixtures",
"debugging",
"unittest",
"capture",
"skipping",
"tmpdir",
"monkeypatch",
"recwarn",
"pastebin",
"helpconfig",
"nose",
"assertion",
"junitxml",
"resultlog",
"doctest",
"cacheprovider",
"freeze_support",
"setuponly",
"setupplan",
"stepwise",
"warnings",
"logging",
)
builtin_plugins = set(default_plugins)
builtin_plugins.add("pytester")
def get_config():
pluginmanager = PytestPluginManager()
config = Config(pluginmanager)
for spec in default_plugins:
pluginmanager.import_plugin(spec)
return config
def get_plugin_manager():
return get_config().pluginmanager
def _prepareconfig(args=None, plugins=None):
warning = None
if args is None:
args = sys.argv[1:]
elif isinstance(args, py.path.local):
args = [str(args)]
elif not isinstance(args, (tuple, list)):
if not isinstance(args, str):
raise ValueError("not a string or argument list: %r" % (args,))
args = shlex.split(args, posix=sys.platform != "win32")
from _pytest import deprecated
warning = deprecated.MAIN_STR_ARGS
config = get_config()
pluginmanager = config.pluginmanager
try:
if plugins:
for plugin in plugins:
if isinstance(plugin, six.string_types):
pluginmanager.consider_pluginarg(plugin)
else:
pluginmanager.register(plugin)
if warning:
from _pytest.warnings import _issue_config_warning
_issue_config_warning(warning, config=config, stacklevel=4)
return pluginmanager.hook.pytest_cmdline_parse(
pluginmanager=pluginmanager, args=args
)
except BaseException:
config._ensure_unconfigure()
raise
class PytestPluginManager(PluginManager):
def __init__(self):
super(PytestPluginManager, self).__init__("pytest")
self._conftest_plugins = set()
self._dirpath2confmods = {}
self._conftestpath2mod = {}
self._confcutdir = None
self._noconftest = False
self._duplicatepaths = set()
self.add_hookspecs(_pytest.hookspec)
self.register(self)
if os.environ.get("PYTEST_DEBUG"):
err = sys.stderr
encoding = getattr(err, "encoding", "utf8")
try:
err = py.io.dupfile(err, encoding=encoding)
except Exception:
pass
self.trace.root.setwriter(err.write)
self.enable_tracing()
self.rewrite_hook = _pytest.assertion.DummyRewriteHook()
self._configured = False
def addhooks(self, module_or_class):
warning = dict(
code="I2",
fslocation=_pytest._code.getfslineno(sys._getframe(1)),
nodeid=None,
message="use pluginmanager.add_hookspecs instead of "
"deprecated addhooks() method.",
)
self._warn(warning)
return self.add_hookspecs(module_or_class)
def parse_hookimpl_opts(self, plugin, name):
if not name.startswith("pytest_"):
return
if name == "pytest_plugins" or name.startswith("pytest_funcarg__"):
return
method = getattr(plugin, name)
opts = super(PytestPluginManager, self).parse_hookimpl_opts(plugin, name)
if not inspect.isroutine(method):
return
if opts is None and name.startswith("pytest_"):
opts = {}
if opts is not None:
for name in ("tryfirst", "trylast", "optionalhook", "hookwrapper"):
opts.setdefault(name, hasattr(method, name))
return opts
def parse_hookspec_opts(self, module_or_class, name):
opts = super(PytestPluginManager, self).parse_hookspec_opts(
module_or_class, name
)
if opts is None:
method = getattr(module_or_class, name)
if name.startswith("pytest_"):
opts = {
"firstresult": hasattr(method, "firstresult"),
"historic": hasattr(method, "historic"),
}
return opts
def register(self, plugin, name=None):
if name in ["pytest_catchlog", "pytest_capturelog"]:
self._warn(
"{} plugin has been merged into the core, "
"please remove it from your requirements.".format(
name.replace("_", "-")
)
)
return
ret = super(PytestPluginManager, self).register(plugin, name)
if ret:
self.hook.pytest_plugin_registered.call_historic(
kwargs=dict(plugin=plugin, manager=self)
)
if isinstance(plugin, types.ModuleType):
self.consider_module(plugin)
return ret
def getplugin(self, name):
# support deprecated naming because plugins (xdist e.g.) use it
return self.get_plugin(name)
def hasplugin(self, name):
return bool(self.get_plugin(name))
def pytest_configure(self, config):
# XXX now that the pluginmanager exposes hookimpl(tryfirst...)
# we should remove tryfirst/trylast as markers
config.addinivalue_line(
"markers",
"tryfirst: mark a hook implementation function such that the "
"plugin machinery will try to call it first/as early as possible.",
)
config.addinivalue_line(
"markers",
"trylast: mark a hook implementation function such that the "
"plugin machinery will try to call it last/as late as possible.",
)
self._configured = True
def _warn(self, message):
kwargs = (
message
if isinstance(message, dict)
else {"code": "I1", "message": message, "fslocation": None, "nodeid": None}
)
self.hook.pytest_logwarning.call_historic(kwargs=kwargs)
#
# internal API for local conftest plugin handling
#
def _set_initial_conftests(self, namespace):
current = py.path.local()
self._confcutdir = (
current.join(namespace.confcutdir, abs=True)
if namespace.confcutdir
else None
)
self._noconftest = namespace.noconftest
self._using_pyargs = namespace.pyargs
testpaths = namespace.file_or_dir
foundanchor = False
for path in testpaths:
path = str(path)
# remove node-id syntax
i = path.find("::")
if i != -1:
path = path[:i]
anchor = current.join(path, abs=1)
if exists(anchor): # we found some file object
self._try_load_conftest(anchor)
foundanchor = True
if not foundanchor:
self._try_load_conftest(current)
def _try_load_conftest(self, anchor):
self._getconftestmodules(anchor)
# let's also consider test* subdirs
if anchor.check(dir=1):
for x in anchor.listdir("test*"):
if x.check(dir=1):
self._getconftestmodules(x)
@lru_cache(maxsize=128)
def _getconftestmodules(self, path):
if self._noconftest:
return []
if path.isfile():
directory = path.dirpath()
else:
directory = path
if six.PY2:
try:
return self._dirpath2confmods[directory]
except KeyError:
pass
clist = []
for parent in directory.realpath().parts():
if self._confcutdir and self._confcutdir.relto(parent):
continue
conftestpath = parent.join("conftest.py")
if conftestpath.isfile():
mod = self._importconftest(conftestpath)
clist.append(mod)
self._dirpath2confmods[directory] = clist
return clist
def _rget_with_confmod(self, name, path):
modules = self._getconftestmodules(path)
for mod in reversed(modules):
try:
return mod, getattr(mod, name)
except AttributeError:
continue
raise KeyError(name)
def _importconftest(self, conftestpath):
try:
return self._conftestpath2mod[conftestpath]
except KeyError:
pkgpath = conftestpath.pypkgpath()
if pkgpath is None:
_ensure_removed_sysmodule(conftestpath.purebasename)
try:
mod = conftestpath.pyimport()
if (
hasattr(mod, "pytest_plugins")
and self._configured
and not self._using_pyargs
):
from _pytest.deprecated import (
PYTEST_PLUGINS_FROM_NON_TOP_LEVEL_CONFTEST
)
warnings.warn_explicit(
PYTEST_PLUGINS_FROM_NON_TOP_LEVEL_CONFTEST,
category=None,
filename=str(conftestpath),
lineno=0,
)
except Exception:
raise ConftestImportFailure(conftestpath, sys.exc_info())
self._conftest_plugins.add(mod)
self._conftestpath2mod[conftestpath] = mod
dirpath = conftestpath.dirpath()
if dirpath in self._dirpath2confmods:
for path, mods in self._dirpath2confmods.items():
if path and path.relto(dirpath) or path == dirpath:
assert mod not in mods
mods.append(mod)
self.trace("loaded conftestmodule %r" % (mod))
self.consider_conftest(mod)
return mod
def consider_preparse(self, args):
for opt1, opt2 in zip(args, args[1:]):
if opt1 == "-p":
self.consider_pluginarg(opt2)
def consider_pluginarg(self, arg):
if arg.startswith("no:"):
name = arg[3:]
self.set_blocked("stepwise")
self.set_blocked("pytest_stepwise")
self.set_blocked(name)
if not name.startswith("pytest_"):
self.set_blocked("pytest_" + name)
else:
self.import_plugin(arg)
def consider_conftest(self, conftestmodule):
self.register(conftestmodule, name=conftestmodule.__file__)
def consider_env(self):
self._import_plugin_specs(os.environ.get("PYTEST_PLUGINS"))
def consider_module(self, mod):
self._import_plugin_specs(getattr(mod, "pytest_plugins", []))
def _import_plugin_specs(self, spec):
plugins = _get_plugin_specs_as_list(spec)
for import_spec in plugins:
self.import_plugin(import_spec)
def import_plugin(self, modname):
assert isinstance(modname, (six.text_type, str)), (
"module name as text required, got %r" % modname
)
modname = str(modname)
if self.is_blocked(modname) or self.get_plugin(modname) is not None:
return
if modname in builtin_plugins:
importspec = "_pytest." + modname
else:
importspec = modname
self.rewrite_hook.mark_rewrite(importspec)
try:
__import__(importspec)
except ImportError as e:
new_exc_type = ImportError
new_exc_message = 'Error importing plugin "%s": %s' % (
modname,
safe_str(e.args[0]),
)
new_exc = new_exc_type(new_exc_message)
six.reraise(new_exc_type, new_exc, sys.exc_info()[2])
except Skipped as e:
self._warn("skipped plugin %r: %s" % ((modname, e.msg)))
else:
mod = sys.modules[importspec]
self.register(mod, modname)
def _get_plugin_specs_as_list(specs):
if specs is not None:
if isinstance(specs, str):
specs = specs.split(",") if specs else []
if not isinstance(specs, (list, tuple)):
raise UsageError(
"Plugin specs must be a ','-separated string or a "
"list/tuple of strings for plugin names. Given: %r" % specs
)
return list(specs)
return []
def _ensure_removed_sysmodule(modname):
try:
del sys.modules[modname]
except KeyError:
pass
class Notset(object):
def __repr__(self):
return "<NOTSET>"
notset = Notset()
def _iter_rewritable_modules(package_files):
for fn in package_files:
is_simple_module = "/" not in fn and fn.endswith(".py")
is_package = fn.count("/") == 1 and fn.endswith("__init__.py")
if is_simple_module:
module_name, _ = os.path.splitext(fn)
yield module_name
elif is_package:
package_name = os.path.dirname(fn)
yield package_name
class Config(object):
def __init__(self, pluginmanager):
self.option = argparse.Namespace()
from .argparsing import Parser, FILE_OR_DIR
_a = FILE_OR_DIR
self._parser = Parser(
usage="%%(prog)s [options] [%s] [%s] [...]" % (_a, _a),
processopt=self._processopt,
)
self.pluginmanager = pluginmanager
self.trace = self.pluginmanager.trace.root.get("config")
self.hook = self.pluginmanager.hook
self._inicache = {}
self._override_ini = ()
self._opt2dest = {}
self._cleanup = []
self._warn = self.pluginmanager._warn
self.pluginmanager.register(self, "pytestconfig")
self._configured = False
def do_setns(dic):
import pytest
setns(pytest, dic)
self.hook.pytest_namespace.call_historic(do_setns, {})
self.hook.pytest_addoption.call_historic(kwargs=dict(parser=self._parser))
def add_cleanup(self, func):
self._cleanup.append(func)
def _do_configure(self):
assert not self._configured
self._configured = True
self.hook.pytest_configure.call_historic(kwargs=dict(config=self))
def _ensure_unconfigure(self):
if self._configured:
self._configured = False
self.hook.pytest_unconfigure(config=self)
self.hook.pytest_configure._call_history = []
while self._cleanup:
fin = self._cleanup.pop()
fin()
def warn(self, code, message, fslocation=None, nodeid=None):
from _pytest.warning_types import RemovedInPytest4Warning
if isinstance(fslocation, (tuple, list)) and len(fslocation) > 2:
filename, lineno = fslocation[:2]
else:
filename = "unknown file"
lineno = 0
msg = "config.warn has been deprecated, use warnings.warn instead"
if nodeid:
msg = "{}: {}".format(nodeid, msg)
warnings.warn_explicit(
RemovedInPytest4Warning(msg),
category=None,
filename=filename,
lineno=lineno,
)
self.hook.pytest_logwarning.call_historic(
kwargs=dict(
code=code, message=message, fslocation=fslocation, nodeid=nodeid
)
)
def get_terminal_writer(self):
return self.pluginmanager.get_plugin("terminalreporter")._tw
def pytest_cmdline_parse(self, pluginmanager, args):
self.parse(args)
return self
def notify_exception(self, excinfo, option=None):
if option and option.fulltrace:
style = "long"
else:
style = "native"
excrepr = excinfo.getrepr(
funcargs=True, showlocals=getattr(option, "showlocals", False), style=style
)
res = self.hook.pytest_internalerror(excrepr=excrepr, excinfo=excinfo)
if not any(res):
for line in str(excrepr).split("\n"):
sys.stderr.write("INTERNALERROR> %s\n" % line)
sys.stderr.flush()
def cwd_relative_nodeid(self, nodeid):
if self.invocation_dir != self.rootdir:
fullpath = self.rootdir.join(nodeid)
nodeid = self.invocation_dir.bestrelpath(fullpath)
return nodeid
@classmethod
def fromdictargs(cls, option_dict, args):
config = get_config()
config.option.__dict__.update(option_dict)
config.parse(args, addopts=False)
for x in config.option.plugins:
config.pluginmanager.consider_pluginarg(x)
return config
def _processopt(self, opt):
for name in opt._short_opts + opt._long_opts:
self._opt2dest[name] = opt.dest
if hasattr(opt, "default") and opt.dest:
if not hasattr(self.option, opt.dest):
setattr(self.option, opt.dest, opt.default)
@hookimpl(trylast=True)
def pytest_load_initial_conftests(self, early_config):
self.pluginmanager._set_initial_conftests(early_config.known_args_namespace)
def _initini(self, args):
ns, unknown_args = self._parser.parse_known_and_unknown_args(
args, namespace=copy.copy(self.option)
)
r = determine_setup(
ns.inifilename,
ns.file_or_dir + unknown_args,
rootdir_cmd_arg=ns.rootdir or None,
config=self,
)
self.rootdir, self.inifile, self.inicfg = r
self._parser.extra_info["rootdir"] = self.rootdir
self._parser.extra_info["inifile"] = self.inifile
self.invocation_dir = py.path.local()
self._parser.addini("addopts", "extra command line options", "args")
self._parser.addini("minversion", "minimally required pytest version")
self._override_ini = ns.override_ini or ()
def _consider_importhook(self, args):
ns, unknown_args = self._parser.parse_known_and_unknown_args(args)
mode = ns.assertmode
if mode == "rewrite":
try:
hook = _pytest.assertion.install_importhook(self)
except SystemError:
mode = "plain"
else:
self._mark_plugins_for_rewrite(hook)
_warn_about_missing_assertion(mode)
def _mark_plugins_for_rewrite(self, hook):
import pkg_resources
self.pluginmanager.rewrite_hook = hook
if os.environ.get("PYTEST_DISABLE_PLUGIN_AUTOLOAD"):
# We don't autoload from setuptools entry points, no need to continue.
return
metadata_files = "RECORD", "SOURCES.txt"
package_files = (
entry.split(",")[0]
for entrypoint in pkg_resources.iter_entry_points("pytest11")
for metadata in metadata_files
for entry in entrypoint.dist._get_metadata(metadata)
)
for name in _iter_rewritable_modules(package_files):
hook.mark_rewrite(name)
def _validate_args(self, args):
self._parser.parse_known_and_unknown_args(
args, namespace=copy.copy(self.option)
)
return args
def _preparse(self, args, addopts=True):
if addopts:
env_addopts = os.environ.get("PYTEST_ADDOPTS", "")
if len(env_addopts):
args[:] = self._validate_args(shlex.split(env_addopts)) + args
self._initini(args)
if addopts:
args[:] = self._validate_args(self.getini("addopts")) + args
self._checkversion()
self._consider_importhook(args)
self.pluginmanager.consider_preparse(args)
if not os.environ.get("PYTEST_DISABLE_PLUGIN_AUTOLOAD"):
# Don't autoload from setuptools entry point. Only explicitly specified
self.pluginmanager.load_setuptools_entrypoints("pytest11")
self.pluginmanager.consider_env()
self.known_args_namespace = ns = self._parser.parse_known_args(
args, namespace=copy.copy(self.option)
)
if self.known_args_namespace.confcutdir is None and self.inifile:
confcutdir = py.path.local(self.inifile).dirname
self.known_args_namespace.confcutdir = confcutdir
try:
self.hook.pytest_load_initial_conftests(
early_config=self, args=args, parser=self._parser
)
except ConftestImportFailure:
e = sys.exc_info()[1]
if ns.help or ns.version:
# so just let is pass and print a warning at the end
self._warn("could not load initial conftests (%s)\n" % e.path)
else:
raise
def _checkversion(self):
import pytest
minver = self.inicfg.get("minversion", None)
if minver:
if parse_version(minver) > parse_version(pytest.__version__):
raise pytest.UsageError(
"%s:%d: requires pytest-%s, actual pytest-%s'"
% (
self.inicfg.config.path,
self.inicfg.lineof("minversion"),
minver,
pytest.__version__,
)
)
def parse(self, args, addopts=True):
assert not hasattr(
self, "args"
), "can only parse cmdline args at most once per Config object"
self._origargs = args
self.hook.pytest_addhooks.call_historic(
kwargs=dict(pluginmanager=self.pluginmanager)
)
self._preparse(args, addopts=addopts)
self.hook.pytest_cmdline_preparse(config=self, args=args)
self._parser.after_preparse = True
try:
args = self._parser.parse_setoption(
args, self.option, namespace=self.option
)
if not args:
if self.invocation_dir == self.rootdir:
args = self.getini("testpaths")
if not args:
args = [str(self.invocation_dir)]
self.args = args
except PrintHelp:
pass
def addinivalue_line(self, name, line):
x = self.getini(name)
assert isinstance(x, list)
x.append(line)
def getini(self, name):
try:
return self._inicache[name]
except KeyError:
self._inicache[name] = val = self._getini(name)
return val
def _getini(self, name):
try:
description, type, default = self._parser._inidict[name]
except KeyError:
raise ValueError("unknown configuration value: %r" % (name,))
value = self._get_override_ini_value(name)
if value is None:
try:
value = self.inicfg[name]
except KeyError:
if default is not None:
return default
if type is None:
return ""
return []
if type == "pathlist":
dp = py.path.local(self.inicfg.config.path).dirpath()
values = []
for relpath in shlex.split(value):
values.append(dp.join(relpath, abs=True))
return values
elif type == "args":
return shlex.split(value)
elif type == "linelist":
return [t for t in map(lambda x: x.strip(), value.split("\n")) if t]
elif type == "bool":
return bool(_strtobool(value.strip()))
else:
assert type is None
return value
def _getconftest_pathlist(self, name, path):
try:
mod, relroots = self.pluginmanager._rget_with_confmod(name, path)
except KeyError:
return None
modpath = py.path.local(mod.__file__).dirpath()
values = []
for relroot in relroots:
if not isinstance(relroot, py.path.local):
relroot = relroot.replace("/", py.path.local.sep)
relroot = modpath.join(relroot, abs=True)
values.append(relroot)
return values
def _get_override_ini_value(self, name):
value = None
for ini_config in self._override_ini:
try:
key, user_ini_value = ini_config.split("=", 1)
except ValueError:
raise UsageError("-o/--override-ini expects option=value style.")
else:
if key == name:
value = user_ini_value
return value
def getoption(self, name, default=notset, skip=False):
name = self._opt2dest.get(name, name)
try:
val = getattr(self.option, name)
if val is None and skip:
raise AttributeError(name)
return val
except AttributeError:
if default is not notset:
return default
if skip:
import pytest
pytest.skip("no %r option found" % (name,))
raise ValueError("no option named %r" % (name,))
def getvalue(self, name, path=None):
return self.getoption(name)
def getvalueorskip(self, name, path=None):
return self.getoption(name, skip=True)
def _assertion_supported():
try:
assert False
except AssertionError:
return True
else:
return False
def _warn_about_missing_assertion(mode):
if not _assertion_supported():
if mode == "plain":
sys.stderr.write(
"WARNING: ASSERTIONS ARE NOT EXECUTED"
" and FAILING TESTS WILL PASS. Are you"
" using python -O?"
)
else:
sys.stderr.write(
"WARNING: assertions not in test modules or"
" plugins will be ignored"
" because assert statements are not executed "
"by the underlying Python interpreter "
"(are you using python -O?)\n"
)
def setns(obj, dic):
import pytest
for name, value in dic.items():
if isinstance(value, dict):
mod = getattr(obj, name, None)
if mod is None:
modname = "pytest.%s" % name
mod = types.ModuleType(modname)
sys.modules[modname] = mod
mod.__all__ = []
setattr(obj, name, mod)
obj.__all__.append(name)
setns(mod, value)
else:
setattr(obj, name, value)
obj.__all__.append(name)
setattr(pytest, name, value)
def create_terminal_writer(config, *args, **kwargs):
tw = py.io.TerminalWriter(*args, **kwargs)
if config.option.color == "yes":
tw.hasmarkup = True
if config.option.color == "no":
tw.hasmarkup = False
return tw
def _strtobool(val):
val = val.lower()
if val in ("y", "yes", "t", "true", "on", "1"):
return 1
elif val in ("n", "no", "f", "false", "off", "0"):
return 0
else:
raise ValueError("invalid truth value %r" % (val,))
| true
| true
|
f70c9167149b0106f5be74ae34f60b2a00dc61dc
| 45
|
py
|
Python
|
tests/__init__.py
|
9kin/codeforces-dl
|
70edc1b8942f4a12ef052e0ae6e4331d34be6a71
|
[
"MIT"
] | 5
|
2020-10-08T05:17:58.000Z
|
2021-05-16T17:42:04.000Z
|
tests/__init__.py
|
9kin/cfdl
|
70edc1b8942f4a12ef052e0ae6e4331d34be6a71
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
9kin/cfdl
|
70edc1b8942f4a12ef052e0ae6e4331d34be6a71
|
[
"MIT"
] | null | null | null |
"""Unit test package for codeforces2html."""
| 22.5
| 44
| 0.733333
| true
| true
|
|
f70c9195412787b5affe6c6964b92b855e385044
| 16,268
|
py
|
Python
|
btfxwss/connection.py
|
robertkarklinsh/btfxwss
|
c3520e1da3bda89f762f871c6069d37142491f67
|
[
"MIT"
] | null | null | null |
btfxwss/connection.py
|
robertkarklinsh/btfxwss
|
c3520e1da3bda89f762f871c6069d37142491f67
|
[
"MIT"
] | null | null | null |
btfxwss/connection.py
|
robertkarklinsh/btfxwss
|
c3520e1da3bda89f762f871c6069d37142491f67
|
[
"MIT"
] | null | null | null |
# Import Built-Ins
import logging
import json
import time
import ssl
import hashlib
import hmac
from multiprocessing import Queue
from threading import Thread, Event, Timer
from collections import OrderedDict
# Import Third-Party
import websocket
# Import Homebrew
# Init Logging Facilities
log = logging.getLogger(__name__)
class WebSocketConnection(Thread):
"""Websocket Connection Thread
Inspired heavily by ekulyk's PythonPusherClient Connection Class
https://github.com/ekulyk/PythonPusherClient/blob/master/pusherclient/connection.py
It handles all low-level system messages, such a reconnects, pausing of
activity and continuing of activity.
"""
def __init__(self, *args, url=None, timeout=None, sslopt=None,
reconnect_interval=None, log_level=None, **kwargs):
"""Initialize a WebSocketConnection Instance.
:param data_q: Queue(), connection to the Client Class
:param args: args for Thread.__init__()
:param url: websocket address, defaults to v2 websocket.
:param timeout: timeout for connection; defaults to 10s
:param reconnect_interval: interval at which to try reconnecting;
defaults to 10s.
:param log_level: logging level for the connection Logger. Defaults to
logging.INFO.
:param kwargs: kwargs for Thread.__ini__()
"""
# Queue used to pass data up to BTFX client
self.q = Queue()
# Connection Settings
self.socket = None
self.url = url if url else 'wss://api.bitfinex.com/ws/2'
self.sslopt = sslopt if sslopt else {}
# Dict to store all subscribe commands for reconnects
self.channel_configs = OrderedDict()
# Connection Handling Attributes
self.connected = Event()
self.disconnect_called = Event()
self.reconnect_required = Event()
self.reconnect_interval = reconnect_interval if reconnect_interval else 10
self.paused = Event()
# Setup Timer attributes
# Tracks API Connection & Responses
self.ping_timer = None
self.ping_interval = 120
# Tracks Websocket Connection
self.connection_timer = None
self.connection_timeout = timeout if timeout else 10
# Tracks responses from send_ping()
self.pong_timer = None
self.pong_received = False
self.pong_timeout = 30
self.log = logging.getLogger(self.__module__)
if log_level == logging.DEBUG:
websocket.enableTrace(True)
self.log.setLevel(level=log_level if log_level else logging.INFO)
# Call init of Thread and pass remaining args and kwargs
Thread.__init__(self)
self.daemon = True
def disconnect(self):
"""Disconnects from the websocket connection and joins the Thread.
:return:
"""
self.log.debug("disconnect(): Disconnecting from API..")
self.reconnect_required.clear()
self.disconnect_called.set()
if self.socket:
self.socket.close()
self.join(timeout=1)
def reconnect(self):
"""Issues a reconnection by setting the reconnect_required event.
:return:
"""
# Reconnect attempt at self.reconnect_interval
self.log.debug("reconnect(): Initialzion reconnect sequence..")
self.connected.clear()
self.reconnect_required.set()
if self.socket:
self.socket.close()
def _connect(self):
"""Creates a websocket connection.
:return:
"""
self.log.debug("_connect(): Initializing Connection..")
self.socket = websocket.WebSocketApp(
self.url,
on_open=self._on_open,
on_message=self._on_message,
on_error=self._on_error,
on_close=self._on_close
)
if 'ca_certs' not in self.sslopt.keys():
ssl_defaults = ssl.get_default_verify_paths()
self.sslopt['ca_certs'] = ssl_defaults.cafile
self.log.debug("_connect(): Starting Connection..")
self.socket.run_forever(sslopt=self.sslopt)
while self.reconnect_required.is_set():
if not self.disconnect_called.is_set():
self.log.info("Attempting to connect again in %s seconds."
% self.reconnect_interval)
self.state = "unavailable"
time.sleep(self.reconnect_interval)
# We need to set this flag since closing the socket will
# set it to False
self.socket.keep_running = True
self.socket.run_forever(sslopt=self.sslopt)
def run(self):
"""Main method of Thread.
:return:
"""
self.log.debug("run(): Starting up..")
self._connect()
def _on_message(self, ws, message):
"""Handles and passes received data to the appropriate handlers.
:return:
"""
self._stop_timers()
raw, received_at = message, time.time()
self.log.debug("_on_message(): Received new message %s at %s",
raw, received_at)
try:
data = json.loads(raw)
except json.JSONDecodeError:
# Something wrong with this data, log and discard
return
# Handle data
if isinstance(data, dict):
# This is a system message
self._system_handler(data, received_at)
else:
# This is a list of data
if data[1] == 'hb':
self._heartbeat_handler()
else:
self._data_handler(data, received_at)
# We've received data, reset timers
self._start_timers()
def _on_close(self, ws, *args):
self.log.info("Connection closed")
self.connected.clear()
self._stop_timers()
def _on_open(self, ws):
self.log.info("Connection opened")
self.connected.set()
self.send_ping()
self._start_timers()
if self.reconnect_required.is_set():
self.log.info("_on_open(): Connection reconnected, re-subscribing..")
self._resubscribe(soft=False)
def _on_error(self, ws, error):
self.log.info("Connection Error - %s", error)
self.reconnect_required.set()
self.connected.clear()
def _stop_timers(self):
"""Stops ping, pong and connection timers.
:return:
"""
if self.ping_timer:
self.ping_timer.cancel()
if self.connection_timer:
self.connection_timer.cancel()
if self.pong_timer:
self.pong_timer.cancel()
self.log.debug("_stop_timers(): Timers stopped.")
def _start_timers(self):
"""Resets and starts timers for API data and connection.
:return:
"""
self.log.debug("_start_timers(): Resetting timers..")
self._stop_timers()
# Sends a ping at ping_interval to see if API still responding
self.ping_timer = Timer(self.ping_interval, self.send_ping)
self.ping_timer.start()
# Automatically reconnect if we didnt receive data
self.connection_timer = Timer(self.connection_timeout,
self._connection_timed_out)
self.connection_timer.start()
def send_ping(self):
"""Sends a ping message to the API and starts pong timers.
:return:
"""
self.log.debug("send_ping(): Sending ping to API..")
self.socket.send(json.dumps({'event': 'ping'}))
self.pong_timer = Timer(self.pong_timeout, self._check_pong)
self.pong_timer.start()
def _check_pong(self):
"""Checks if a Pong message was received.
:return:
"""
self.pong_timer.cancel()
if self.pong_received:
self.log.debug("_check_pong(): Pong received in time.")
self.pong_received = False
else:
# reconnect
self.log.debug("_check_pong(): Pong not received in time."
"Issuing reconnect..")
self.reconnect()
def send(self, api_key=None, secret=None, list_data=None, auth=False, **kwargs):
"""Sends the given Payload to the API via the websocket connection.
:param kwargs: payload paarameters as key=value pairs
:return:
"""
if auth:
nonce = str(int(time.time() * 10000000))
auth_string = 'AUTH' + nonce
auth_sig = hmac.new(secret.encode(), auth_string.encode(),
hashlib.sha384).hexdigest()
payload = {'event': 'auth', 'apiKey': api_key, 'authSig': auth_sig,
'authPayload': auth_string, 'authNonce': nonce}
payload = json.dumps(payload)
elif list_data:
payload = json.dumps(list_data)
else:
payload = json.dumps(kwargs)
self.log.debug("send(): Sending payload to API: %s", payload)
try:
self.socket.send(payload)
except websocket.WebSocketConnectionClosedException:
self.log.error("send(): Did not send out payload %s - client not connected. ", kwargs)
def pass_to_client(self, event, data, *args):
"""Passes data up to the client via a Queue().
:param event:
:param data:
:param args:
:return:
"""
self.q.put((event, data, *args))
def _connection_timed_out(self):
"""Issues a reconnection if the connection timed out.
:return:
"""
self.log.debug("_connection_timed_out(): Fired! Issuing reconnect..")
self.reconnect()
def _pause(self):
"""Pauses the connection.
:return:
"""
self.log.debug("_pause(): Setting paused() Flag!")
self.paused.set()
def _unpause(self):
"""Unpauses the connection.
Send a message up to client that he should re-subscribe to all
channels.
:return:
"""
self.log.debug("_unpause(): Clearing paused() Flag!")
self.paused.clear()
self.log.debug("_unpause(): Re-subscribing softly..")
self._resubscribe(soft=True)
def _heartbeat_handler(self):
"""Handles heartbeat messages.
:return:
"""
# Restart our timers since we received some data
self.log.debug("_heartbeat_handler(): Received a heart beat "
"from connection!")
self._start_timers()
def _pong_handler(self):
"""Handle a pong response.
:return:
"""
# We received a Pong response to our Ping!
self.log.debug("_pong_handler(): Received a Pong message!")
self.pong_received = True
def _system_handler(self, data, ts):
"""Distributes system messages to the appropriate handler.
System messages include everything that arrives as a dict,
or a list containing a heartbeat.
:param data:
:param ts:
:return:
"""
self.log.debug("_system_handler(): Received a system message: %s", data)
# Unpack the data
event = data.pop('event')
if event == 'pong':
self.log.debug("_system_handler(): Distributing %s to _pong_handler..",
data)
self._pong_handler()
elif event == 'info':
self.log.debug("_system_handler(): Distributing %s to _info_handler..",
data)
self._info_handler(data)
elif event == 'error':
self.log.debug("_system_handler(): Distributing %s to _error_handler..",
data)
self._error_handler(data)
elif event in ('subscribed', 'unsubscribed', 'conf', 'auth', 'unauth'):
self.log.debug("_system_handler(): Distributing %s to "
"_response_handler..", data)
self._response_handler(event, data, ts)
else:
self.log.error("Unhandled event: %s, data: %s", event, data)
def _response_handler(self, event, data, ts):
"""Handles responses to (un)subscribe and conf commands.
Passes data up to client.
:param data:
:param ts:
:return:
"""
self.log.debug("_response_handler(): Passing %s to client..", data)
self.pass_to_client(event, data, ts)
def _info_handler(self, data):
"""
Handle INFO messages from the API and issues relevant actions.
:param data:
:param ts:
"""
info_message = {'20051': 'Stop/Restart websocket server '
'(please try to reconnect)',
'20060': 'Refreshing data from the trading engine; '
'please pause any acivity.',
'20061': 'Done refreshing data from the trading engine.'
' Re-subscription advised.'}
codes = {'20051': self.reconnect, '20060': self._pause,
'20061': self._unpause}
if 'version' in data:
self.log.info("API version: %i", data['version'])
return
try:
self.log.info(info_message[data['code']])
codes[data['code']]()
except KeyError as e:
self.log.exception(e)
self.log.error("Unknown Info code %s!", data['code'])
raise
def _error_handler(self, data):
"""
Handle Error messages and log them accordingly.
:param data:
:param ts:
"""
errors = {10000: 'Unknown event',
10001: 'Generic error',
10008: 'Concurrency error',
10020: 'Request parameters error',
10050: 'Configuration setup failed',
10100: 'Failed authentication',
10111: 'Error in authentication request payload',
10112: 'Error in authentication request signature',
10113: 'Error in authentication request encryption',
10114: 'Error in authentication request nonce',
10200: 'Error in un-authentication request',
10300: 'Subscription Failed (generic)',
10301: 'Already Subscribed',
10302: 'Unknown channel',
10400: 'Subscription Failed (generic)',
10401: 'Not subscribed',
11000: 'Not ready, try again later',
20000: 'User is invalid!'
}
try:
self.log.error(errors[data['code']])
except KeyError:
self.log.error("Received unknown error Code in message %s! "
"Reconnecting..", data)
def _data_handler(self, data, ts):
"""Handles data messages by passing them up to the client.
:param data:
:param ts:
:return:
"""
# Pass the data up to the Client
self.log.debug("_data_handler(): Passing %s to client..",
data)
self.pass_to_client('data', data, ts)
def _resubscribe(self, soft=False):
"""Resubscribes to all channels found in self.channel_configs.
:param soft: if True, unsubscribes first.
:return: None
"""
q_list = []
while True:
try:
identifier, q = self.channel_configs.popitem(last=True if soft else False)
except KeyError:
break
q_list.append((identifier, q.copy()))
if identifier == 'auth':
self.send(**q, auth=True)
continue
if soft:
q['event'] = 'unsubscribe'
self.send(**q)
# Resubscribe for soft start.
if soft:
for identifier, q in reversed(q_list):
self.channel_configs[identifier] = q
self.send(**q)
else:
for identifier, q in q_list:
self.channel_configs[identifier] = q
| 33.61157
| 98
| 0.57149
|
import logging
import json
import time
import ssl
import hashlib
import hmac
from multiprocessing import Queue
from threading import Thread, Event, Timer
from collections import OrderedDict
import websocket
log = logging.getLogger(__name__)
class WebSocketConnection(Thread):
def __init__(self, *args, url=None, timeout=None, sslopt=None,
reconnect_interval=None, log_level=None, **kwargs):
self.q = Queue()
self.socket = None
self.url = url if url else 'wss://api.bitfinex.com/ws/2'
self.sslopt = sslopt if sslopt else {}
self.channel_configs = OrderedDict()
self.connected = Event()
self.disconnect_called = Event()
self.reconnect_required = Event()
self.reconnect_interval = reconnect_interval if reconnect_interval else 10
self.paused = Event()
self.ping_timer = None
self.ping_interval = 120
self.connection_timer = None
self.connection_timeout = timeout if timeout else 10
self.pong_timer = None
self.pong_received = False
self.pong_timeout = 30
self.log = logging.getLogger(self.__module__)
if log_level == logging.DEBUG:
websocket.enableTrace(True)
self.log.setLevel(level=log_level if log_level else logging.INFO)
Thread.__init__(self)
self.daemon = True
def disconnect(self):
self.log.debug("disconnect(): Disconnecting from API..")
self.reconnect_required.clear()
self.disconnect_called.set()
if self.socket:
self.socket.close()
self.join(timeout=1)
def reconnect(self):
self.log.debug("reconnect(): Initialzion reconnect sequence..")
self.connected.clear()
self.reconnect_required.set()
if self.socket:
self.socket.close()
def _connect(self):
self.log.debug("_connect(): Initializing Connection..")
self.socket = websocket.WebSocketApp(
self.url,
on_open=self._on_open,
on_message=self._on_message,
on_error=self._on_error,
on_close=self._on_close
)
if 'ca_certs' not in self.sslopt.keys():
ssl_defaults = ssl.get_default_verify_paths()
self.sslopt['ca_certs'] = ssl_defaults.cafile
self.log.debug("_connect(): Starting Connection..")
self.socket.run_forever(sslopt=self.sslopt)
while self.reconnect_required.is_set():
if not self.disconnect_called.is_set():
self.log.info("Attempting to connect again in %s seconds."
% self.reconnect_interval)
self.state = "unavailable"
time.sleep(self.reconnect_interval)
self.socket.keep_running = True
self.socket.run_forever(sslopt=self.sslopt)
def run(self):
self.log.debug("run(): Starting up..")
self._connect()
def _on_message(self, ws, message):
self._stop_timers()
raw, received_at = message, time.time()
self.log.debug("_on_message(): Received new message %s at %s",
raw, received_at)
try:
data = json.loads(raw)
except json.JSONDecodeError:
return
if isinstance(data, dict):
self._system_handler(data, received_at)
else:
if data[1] == 'hb':
self._heartbeat_handler()
else:
self._data_handler(data, received_at)
self._start_timers()
def _on_close(self, ws, *args):
self.log.info("Connection closed")
self.connected.clear()
self._stop_timers()
def _on_open(self, ws):
self.log.info("Connection opened")
self.connected.set()
self.send_ping()
self._start_timers()
if self.reconnect_required.is_set():
self.log.info("_on_open(): Connection reconnected, re-subscribing..")
self._resubscribe(soft=False)
def _on_error(self, ws, error):
self.log.info("Connection Error - %s", error)
self.reconnect_required.set()
self.connected.clear()
def _stop_timers(self):
if self.ping_timer:
self.ping_timer.cancel()
if self.connection_timer:
self.connection_timer.cancel()
if self.pong_timer:
self.pong_timer.cancel()
self.log.debug("_stop_timers(): Timers stopped.")
def _start_timers(self):
self.log.debug("_start_timers(): Resetting timers..")
self._stop_timers()
# Sends a ping at ping_interval to see if API still responding
self.ping_timer = Timer(self.ping_interval, self.send_ping)
self.ping_timer.start()
# Automatically reconnect if we didnt receive data
self.connection_timer = Timer(self.connection_timeout,
self._connection_timed_out)
self.connection_timer.start()
def send_ping(self):
self.log.debug("send_ping(): Sending ping to API..")
self.socket.send(json.dumps({'event': 'ping'}))
self.pong_timer = Timer(self.pong_timeout, self._check_pong)
self.pong_timer.start()
def _check_pong(self):
self.pong_timer.cancel()
if self.pong_received:
self.log.debug("_check_pong(): Pong received in time.")
self.pong_received = False
else:
# reconnect
self.log.debug("_check_pong(): Pong not received in time."
"Issuing reconnect..")
self.reconnect()
def send(self, api_key=None, secret=None, list_data=None, auth=False, **kwargs):
if auth:
nonce = str(int(time.time() * 10000000))
auth_string = 'AUTH' + nonce
auth_sig = hmac.new(secret.encode(), auth_string.encode(),
hashlib.sha384).hexdigest()
payload = {'event': 'auth', 'apiKey': api_key, 'authSig': auth_sig,
'authPayload': auth_string, 'authNonce': nonce}
payload = json.dumps(payload)
elif list_data:
payload = json.dumps(list_data)
else:
payload = json.dumps(kwargs)
self.log.debug("send(): Sending payload to API: %s", payload)
try:
self.socket.send(payload)
except websocket.WebSocketConnectionClosedException:
self.log.error("send(): Did not send out payload %s - client not connected. ", kwargs)
def pass_to_client(self, event, data, *args):
self.q.put((event, data, *args))
def _connection_timed_out(self):
self.log.debug("_connection_timed_out(): Fired! Issuing reconnect..")
self.reconnect()
def _pause(self):
self.log.debug("_pause(): Setting paused() Flag!")
self.paused.set()
def _unpause(self):
self.log.debug("_unpause(): Clearing paused() Flag!")
self.paused.clear()
self.log.debug("_unpause(): Re-subscribing softly..")
self._resubscribe(soft=True)
def _heartbeat_handler(self):
# Restart our timers since we received some data
self.log.debug("_heartbeat_handler(): Received a heart beat "
"from connection!")
self._start_timers()
def _pong_handler(self):
# We received a Pong response to our Ping!
self.log.debug("_pong_handler(): Received a Pong message!")
self.pong_received = True
def _system_handler(self, data, ts):
self.log.debug("_system_handler(): Received a system message: %s", data)
# Unpack the data
event = data.pop('event')
if event == 'pong':
self.log.debug("_system_handler(): Distributing %s to _pong_handler..",
data)
self._pong_handler()
elif event == 'info':
self.log.debug("_system_handler(): Distributing %s to _info_handler..",
data)
self._info_handler(data)
elif event == 'error':
self.log.debug("_system_handler(): Distributing %s to _error_handler..",
data)
self._error_handler(data)
elif event in ('subscribed', 'unsubscribed', 'conf', 'auth', 'unauth'):
self.log.debug("_system_handler(): Distributing %s to "
"_response_handler..", data)
self._response_handler(event, data, ts)
else:
self.log.error("Unhandled event: %s, data: %s", event, data)
def _response_handler(self, event, data, ts):
self.log.debug("_response_handler(): Passing %s to client..", data)
self.pass_to_client(event, data, ts)
def _info_handler(self, data):
info_message = {'20051': 'Stop/Restart websocket server '
'(please try to reconnect)',
'20060': 'Refreshing data from the trading engine; '
'please pause any acivity.',
'20061': 'Done refreshing data from the trading engine.'
' Re-subscription advised.'}
codes = {'20051': self.reconnect, '20060': self._pause,
'20061': self._unpause}
if 'version' in data:
self.log.info("API version: %i", data['version'])
return
try:
self.log.info(info_message[data['code']])
codes[data['code']]()
except KeyError as e:
self.log.exception(e)
self.log.error("Unknown Info code %s!", data['code'])
raise
def _error_handler(self, data):
errors = {10000: 'Unknown event',
10001: 'Generic error',
10008: 'Concurrency error',
10020: 'Request parameters error',
10050: 'Configuration setup failed',
10100: 'Failed authentication',
10111: 'Error in authentication request payload',
10112: 'Error in authentication request signature',
10113: 'Error in authentication request encryption',
10114: 'Error in authentication request nonce',
10200: 'Error in un-authentication request',
10300: 'Subscription Failed (generic)',
10301: 'Already Subscribed',
10302: 'Unknown channel',
10400: 'Subscription Failed (generic)',
10401: 'Not subscribed',
11000: 'Not ready, try again later',
20000: 'User is invalid!'
}
try:
self.log.error(errors[data['code']])
except KeyError:
self.log.error("Received unknown error Code in message %s! "
"Reconnecting..", data)
def _data_handler(self, data, ts):
# Pass the data up to the Client
self.log.debug("_data_handler(): Passing %s to client..",
data)
self.pass_to_client('data', data, ts)
def _resubscribe(self, soft=False):
q_list = []
while True:
try:
identifier, q = self.channel_configs.popitem(last=True if soft else False)
except KeyError:
break
q_list.append((identifier, q.copy()))
if identifier == 'auth':
self.send(**q, auth=True)
continue
if soft:
q['event'] = 'unsubscribe'
self.send(**q)
# Resubscribe for soft start.
if soft:
for identifier, q in reversed(q_list):
self.channel_configs[identifier] = q
self.send(**q)
else:
for identifier, q in q_list:
self.channel_configs[identifier] = q
| true
| true
|
f70c91cd387a0dc3aa4646a03addc469d8d04475
| 9,364
|
py
|
Python
|
src/ripplenet_data_loader.py
|
andreeaiana/geneg_benchmarking
|
0b53989c79b8e3771c144c0332fd36587dfe0f4d
|
[
"MIT"
] | 1
|
2021-12-08T12:02:56.000Z
|
2021-12-08T12:02:56.000Z
|
src/ripplenet_data_loader.py
|
andreeaiana/geneg_benchmarking
|
0b53989c79b8e3771c144c0332fd36587dfe0f4d
|
[
"MIT"
] | null | null | null |
src/ripplenet_data_loader.py
|
andreeaiana/geneg_benchmarking
|
0b53989c79b8e3771c144c0332fd36587dfe0f4d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# DISCLAIMER
# This code file is forked and adapted from https://github.com/tezignlab/RippleNet-TF2/blob/master/tools/load_data.py, which is under an MIT license.
""" Utilities for data loading for RippleNet. """
# import libraries
import os
import numpy as np
from collections import defaultdict
from pathlib import Path
from typing import Dict, List, Tuple
# import custom code
from src.util.logger import setup_logging
from src.util.caching import create_cache, load_cache
from src.config import FILENAME_RATINGS_FINAL_TXT, FILENAME_RATINGS_FINAL_NPY, FILENAME_KG_FINAL_TXT, FILENAME_KG_FINAL_NPY, FILENAME_TRAIN_RATINGS, FILENAME_USER_HISTORY_DICT
from src.config import FILENAME_TEST_RATINGS, FILENAME_TEST_RATINGS_RANDOM, FILENAME_TEST_RATINGS_NO_TFIDF, FILENAME_TEST_RATINGS_NO_WORD2VEC, FILENAME_TEST_RATINGS_NO_TRANSFORMER
class LoadData:
def __init__(self, args):
self.args = args
self.logger = setup_logging(name=__file__, log_level='info')
def load_data(self) -> Tuple[np.ndarray, np.ndarray, int, int, Dict[int, List[Tuple[int, int, int]]]]:
"""
Loads and returns the data needed in RippleNet.
Returns:
- :obj:`np.ndarray`:
Training set of ratings.
- :obj:`np.ndarray`:
Test set of ratings.
- :obj:`int`:
Number of entities.
- :obj:`int`:
Number of relations.
- :obj:`Dict[int, List[Tuple[int, int, int]]]`:
Ripple sets of each user.
"""
train_data, test_data, user_history_dict = self.load_rating()
n_entity, n_relation, kg = self.load_kg()
ripple_set = self.get_ripple_set(kg, user_history_dict)
return train_data, test_data, n_entity, n_relation, ripple_set
def get_test_file(self, test_set_type: str) -> Path:
"""
Retrieves the filepath of a test set given its type.
Args:
test_set_type (:obj:`str`):
The type of test set.
Returns:
:obj:`Path`:
The filepath of the test set.
"""
test_set_type2file = {
'complete': FILENAME_TEST_RATINGS,
'random': FILENAME_TEST_RATINGS_RANDOM,
'no_tfidf_ratings': FILENAME_TEST_RATINGS_NO_TFIDF,
'no_word2vec_ratings': FILENAME_TEST_RATINGS_NO_WORD2VEC,
'no_transformer_ratings': FILENAME_TEST_RATINGS_NO_TRANSFORMER
}
return test_set_type2file[test_set_type]
def load_rating(self) -> Tuple[np.ndarray, np.ndarray, Dict[int, List[int]]]:
"""
It loads the training and test data, and the user history, if they exist.
Otherwise, it loads the user ratings, processes them to construct the training and test sets, and user history, and caches them to disk.
Returns:
- :obj:`np.ndarray`:
Training set of ratings.
- :obj:`np.ndarray`:
Test set of ratings.
- :obj:`Dict[int, List[int]]`:
User history dictionary.
"""
self.logger.info('Reading rating file.')
test_file = self.get_test_file(self.args.test_set)
if os.path.exists(FILENAME_TRAIN_RATINGS) and os.path.exists(test_file) and os.path.exists(FILENAME_USER_HISTORY_DICT):
self.logger.info('Loading training and test data.')
train_data = np.load(FILENAME_TRAIN_RATINGS)
test_data = np.load(test_file)
user_history_dict = load_cache(FILENAME_USER_HISTORY_DICT)
self.logger.info(f'Size training data: {train_data.shape}.')
self.logger.info(f'Size test data: {test_data.shape}.')
else:
# Read rating file
if os.path.exists(FILENAME_RATINGS_FINAL_NPY):
rating_np = np.load(FILENAME_RATINGS_FINAL_NPY)
else:
rating_np = np.loadtxt(FILENAME_RATINGS_FINAL_TXT, dtype=np.int32)
np.save(FILENAME_RATINGS_FINAL_NPY, rating_np)
# Split dataset
self.logger.info('Splitting dataset.')
test_ratio = 0.2
n_ratings = rating_np.shape[0]
test_indices = np.random.choice(n_ratings, size=int(n_ratings * test_ratio), replace=False)
train_indices = set(range(n_ratings)) - set(test_indices)
# Traverse training data, only keeping the users with positive ratings
user_history_dict = dict()
for i in train_indices:
user = rating_np[i][0]
item = rating_np[i][1]
rating = rating_np[i][2]
if rating == 1:
if user not in user_history_dict:
user_history_dict[user] = []
user_history_dict[user].append(item)
train_indices = [i for i in train_indices if rating_np[i][0] in user_history_dict]
test_indices = [i for i in test_indices if rating_np[i][0] in user_history_dict]
train_data = rating_np[train_indices]
test_data = rating_np[test_indices]
self.logger.info(f'Size training data: {train_data.shape}.')
self.logger.info(f'Size test data: {test_data.shape}.')
# Cache test and train data
np.save(FILENAME_TRAIN_RATINGS, train_data)
np.save(FILENAME_TEST_RATINGS, test_data)
create_cache(user_history_dict, FILENAME_USER_HISTORY_DICT)
self.logger.info('Finished.\n')
return train_data, test_data, user_history_dict
def load_kg(self) -> Tuple[int, int, Dict[int, List[Tuple[int, int]]]]:
"""
Loads the knowledge graph if already cached as :obj:`np.ndarray`, otherwise it constructs it from the text file.
Returns:
- :obj:`int`:
Number of entities.
- :obj:`int`:
Number of relations.
- :obj:`Dict[int, List[Tuple[int, int]]]`:
The knowledge graph as a dictionary which maps each head entity to a tuple of the form (tail, relation).
"""
self.logger.info('Reading KG file.')
# Reading KG file
if os.path.exists(FILENAME_KG_FINAL_NPY):
kg_np = np.load(FILENAME_KG_FINAL_NPY)
else:
kg_np = np.loadtxt(FILENAME_KG_FINAL_TXT, dtype=np.int32)
np.save(FILENAME_KG_FINAL_NPY, kg_np)
n_entity = len(set(kg_np[:, 0]) | set(kg_np[:, 2]))
n_relation = len(set(kg_np[:, 1]))
self.logger.info('Constructing knowledge graph.')
kg = defaultdict(list)
for head, relation, tail in kg_np:
kg[head].append((tail, relation))
self.logger.info('Finished.\n')
return n_entity, n_relation, kg
def get_ripple_set(self, kg: Dict[int, List[Tuple[int, int]]], user_history_dict: Dict[int, List[int]]) -> Dict[int, List[Tuple[int, int, int]]]:
"""
Creates the ripple set for each user.
Args:
kg (:obj:`Dict[int, List[Tuple[int, int]]]`):
The knowledge graph as a dictionary which maps each head entity to a tuple of the form (tail, relation).
user_history_dict (:obj:`Dict[int, List[int]]`):
User history dictionary.
Returns:
:obj:`Dict[int, List[Tuple[int, int, int]]]`:
Ripple sets of each user.
"""
self.logger.info('Constructing ripple set.')
# user -> [(hop_0_heads, hop_0_relations, hop_0_tails), (hop_1_heads, hop_1_relations, hop_1_tails), ...]
ripple_set = defaultdict(list)
for user in user_history_dict:
for h in range(self.args.n_hop):
memories_h = []
memories_r = []
memories_t = []
if h == 0:
tails_of_last_hop = user_history_dict[user]
else:
tails_of_last_hop = ripple_set[user][-1][2]
for entity in tails_of_last_hop:
for tail_and_relation in kg[entity]:
memories_h.append(entity)
memories_r.append(tail_and_relation[1])
memories_t.append(tail_and_relation[0])
"""
If the current ripple set of the given user is empty, we simply copy the ripple set of the last hop here
This won't happen for h = 0, because only the items that appear in the KG have been selected.
"""
if len(memories_h) == 0:
ripple_set[user].append(ripple_set[user][-1])
else:
# Sample a fixed-size 1-hop memory for each user
replace = len(memories_h) < self.args.n_memory
indices = np.random.choice(len(memories_h), size=self.args.n_memory, replace=replace)
memories_h = [memories_h[i] for i in indices]
memories_r = [memories_r[i] for i in indices]
memories_t = [memories_t[i] for i in indices]
ripple_set[user].append((memories_h, memories_r, memories_t))
self.logger.info('Finished.\n')
return ripple_set
| 41.617778
| 179
| 0.596967
|
import os
import numpy as np
from collections import defaultdict
from pathlib import Path
from typing import Dict, List, Tuple
from src.util.logger import setup_logging
from src.util.caching import create_cache, load_cache
from src.config import FILENAME_RATINGS_FINAL_TXT, FILENAME_RATINGS_FINAL_NPY, FILENAME_KG_FINAL_TXT, FILENAME_KG_FINAL_NPY, FILENAME_TRAIN_RATINGS, FILENAME_USER_HISTORY_DICT
from src.config import FILENAME_TEST_RATINGS, FILENAME_TEST_RATINGS_RANDOM, FILENAME_TEST_RATINGS_NO_TFIDF, FILENAME_TEST_RATINGS_NO_WORD2VEC, FILENAME_TEST_RATINGS_NO_TRANSFORMER
class LoadData:
def __init__(self, args):
self.args = args
self.logger = setup_logging(name=__file__, log_level='info')
def load_data(self) -> Tuple[np.ndarray, np.ndarray, int, int, Dict[int, List[Tuple[int, int, int]]]]:
train_data, test_data, user_history_dict = self.load_rating()
n_entity, n_relation, kg = self.load_kg()
ripple_set = self.get_ripple_set(kg, user_history_dict)
return train_data, test_data, n_entity, n_relation, ripple_set
def get_test_file(self, test_set_type: str) -> Path:
test_set_type2file = {
'complete': FILENAME_TEST_RATINGS,
'random': FILENAME_TEST_RATINGS_RANDOM,
'no_tfidf_ratings': FILENAME_TEST_RATINGS_NO_TFIDF,
'no_word2vec_ratings': FILENAME_TEST_RATINGS_NO_WORD2VEC,
'no_transformer_ratings': FILENAME_TEST_RATINGS_NO_TRANSFORMER
}
return test_set_type2file[test_set_type]
def load_rating(self) -> Tuple[np.ndarray, np.ndarray, Dict[int, List[int]]]:
self.logger.info('Reading rating file.')
test_file = self.get_test_file(self.args.test_set)
if os.path.exists(FILENAME_TRAIN_RATINGS) and os.path.exists(test_file) and os.path.exists(FILENAME_USER_HISTORY_DICT):
self.logger.info('Loading training and test data.')
train_data = np.load(FILENAME_TRAIN_RATINGS)
test_data = np.load(test_file)
user_history_dict = load_cache(FILENAME_USER_HISTORY_DICT)
self.logger.info(f'Size training data: {train_data.shape}.')
self.logger.info(f'Size test data: {test_data.shape}.')
else:
if os.path.exists(FILENAME_RATINGS_FINAL_NPY):
rating_np = np.load(FILENAME_RATINGS_FINAL_NPY)
else:
rating_np = np.loadtxt(FILENAME_RATINGS_FINAL_TXT, dtype=np.int32)
np.save(FILENAME_RATINGS_FINAL_NPY, rating_np)
self.logger.info('Splitting dataset.')
test_ratio = 0.2
n_ratings = rating_np.shape[0]
test_indices = np.random.choice(n_ratings, size=int(n_ratings * test_ratio), replace=False)
train_indices = set(range(n_ratings)) - set(test_indices)
user_history_dict = dict()
for i in train_indices:
user = rating_np[i][0]
item = rating_np[i][1]
rating = rating_np[i][2]
if rating == 1:
if user not in user_history_dict:
user_history_dict[user] = []
user_history_dict[user].append(item)
train_indices = [i for i in train_indices if rating_np[i][0] in user_history_dict]
test_indices = [i for i in test_indices if rating_np[i][0] in user_history_dict]
train_data = rating_np[train_indices]
test_data = rating_np[test_indices]
self.logger.info(f'Size training data: {train_data.shape}.')
self.logger.info(f'Size test data: {test_data.shape}.')
np.save(FILENAME_TRAIN_RATINGS, train_data)
np.save(FILENAME_TEST_RATINGS, test_data)
create_cache(user_history_dict, FILENAME_USER_HISTORY_DICT)
self.logger.info('Finished.\n')
return train_data, test_data, user_history_dict
def load_kg(self) -> Tuple[int, int, Dict[int, List[Tuple[int, int]]]]:
self.logger.info('Reading KG file.')
if os.path.exists(FILENAME_KG_FINAL_NPY):
kg_np = np.load(FILENAME_KG_FINAL_NPY)
else:
kg_np = np.loadtxt(FILENAME_KG_FINAL_TXT, dtype=np.int32)
np.save(FILENAME_KG_FINAL_NPY, kg_np)
n_entity = len(set(kg_np[:, 0]) | set(kg_np[:, 2]))
n_relation = len(set(kg_np[:, 1]))
self.logger.info('Constructing knowledge graph.')
kg = defaultdict(list)
for head, relation, tail in kg_np:
kg[head].append((tail, relation))
self.logger.info('Finished.\n')
return n_entity, n_relation, kg
def get_ripple_set(self, kg: Dict[int, List[Tuple[int, int]]], user_history_dict: Dict[int, List[int]]) -> Dict[int, List[Tuple[int, int, int]]]:
self.logger.info('Constructing ripple set.')
ripple_set = defaultdict(list)
for user in user_history_dict:
for h in range(self.args.n_hop):
memories_h = []
memories_r = []
memories_t = []
if h == 0:
tails_of_last_hop = user_history_dict[user]
else:
tails_of_last_hop = ripple_set[user][-1][2]
for entity in tails_of_last_hop:
for tail_and_relation in kg[entity]:
memories_h.append(entity)
memories_r.append(tail_and_relation[1])
memories_t.append(tail_and_relation[0])
if len(memories_h) == 0:
ripple_set[user].append(ripple_set[user][-1])
else:
replace = len(memories_h) < self.args.n_memory
indices = np.random.choice(len(memories_h), size=self.args.n_memory, replace=replace)
memories_h = [memories_h[i] for i in indices]
memories_r = [memories_r[i] for i in indices]
memories_t = [memories_t[i] for i in indices]
ripple_set[user].append((memories_h, memories_r, memories_t))
self.logger.info('Finished.\n')
return ripple_set
| true
| true
|
f70c9372d1e08c227c451f6995f0db4f740bf2b7
| 757
|
py
|
Python
|
scripts/name_creator.py
|
ankitbhatia/word-mastermind
|
51529b04e6e1bb150c867e0f6e44f36131c33189
|
[
"MIT"
] | null | null | null |
scripts/name_creator.py
|
ankitbhatia/word-mastermind
|
51529b04e6e1bb150c867e0f6e44f36131c33189
|
[
"MIT"
] | null | null | null |
scripts/name_creator.py
|
ankitbhatia/word-mastermind
|
51529b04e6e1bb150c867e0f6e44f36131c33189
|
[
"MIT"
] | null | null | null |
import csv
import glob
import pandas as pd
files = glob.glob("data/*.txt")
names = {}
for file in files:
with open(file) as csvfile:
reader = csv.reader(csvfile, delimiter=',')
for row in reader:
name = row[0]
sex = row[1]
number = row[2]
if name not in names.keys():
names[name] = {}
names[name]['freq'] = number
names[name]['sex'] = sex
names[name]['count'] = len(name)
else:
names[name]['freq'] += number
df = pd.DataFrame.from_dict(names, orient='index')
with open("dict/names",'w') as f:
for v in df[df['count']==5].index.values:
f.write(v + '\n')
| 20.459459
| 51
| 0.483487
|
import csv
import glob
import pandas as pd
files = glob.glob("data/*.txt")
names = {}
for file in files:
with open(file) as csvfile:
reader = csv.reader(csvfile, delimiter=',')
for row in reader:
name = row[0]
sex = row[1]
number = row[2]
if name not in names.keys():
names[name] = {}
names[name]['freq'] = number
names[name]['sex'] = sex
names[name]['count'] = len(name)
else:
names[name]['freq'] += number
df = pd.DataFrame.from_dict(names, orient='index')
with open("dict/names",'w') as f:
for v in df[df['count']==5].index.values:
f.write(v + '\n')
| true
| true
|
f70c93d1e096e9a92827acda3d4d66f0256c4a0c
| 3,117
|
py
|
Python
|
segmentation_models_pytorch/losses/focal.py
|
themozel/segmentation_models_pytorch
|
999c7c5cb77faa55793488db7a358ac9fe320b30
|
[
"MIT"
] | 1
|
2021-07-24T11:57:45.000Z
|
2021-07-24T11:57:45.000Z
|
segmentation_models_pytorch/losses/focal.py
|
wstchhwp/segmentation_models.pytorch
|
dcd19d676bdfbf73fc140d5b98d780f449b0a2f8
|
[
"MIT"
] | null | null | null |
segmentation_models_pytorch/losses/focal.py
|
wstchhwp/segmentation_models.pytorch
|
dcd19d676bdfbf73fc140d5b98d780f449b0a2f8
|
[
"MIT"
] | null | null | null |
from typing import Optional
from functools import partial
import torch
from torch.nn.modules.loss import _Loss
from ._functional import focal_loss_with_logits
from .constants import BINARY_MODE, MULTICLASS_MODE, MULTILABEL_MODE
__all__ = ["FocalLoss"]
class FocalLoss(_Loss):
def __init__(
self,
mode: str,
alpha: Optional[float] = None,
gamma: Optional[float] = 2.,
ignore_index: Optional[int] = None,
reduction: Optional[str] = "mean",
normalized: bool = False,
reduced_threshold: Optional[float] = None,
):
"""Compute Focal loss
Args:
mode: Loss mode 'binary', 'multiclass' or 'multilabel'
alpha: Prior probability of having positive value in target.
gamma: Power factor for dampening weight (focal strenght).
ignore_index: If not None, targets may contain values to be ignored.
Target values equal to ignore_index will be ignored from loss computation.
normalized: Compute normalized focal loss (https://arxiv.org/pdf/1909.07829.pdf).
reduced_threshold: Switch to reduced focal loss. Note, when using this mode you should use `reduction="sum"`.
Shape
- **y_pred** - torch.Tensor of shape (N, C, H, W)
- **y_true** - torch.Tensor of shape (N, H, W) or (N, C, H, W)
Reference
https://github.com/BloodAxe/pytorch-toolbelt
"""
assert mode in {BINARY_MODE, MULTILABEL_MODE, MULTICLASS_MODE}
super().__init__()
self.mode = mode
self.ignore_index = ignore_index
self.focal_loss_fn = partial(
focal_loss_with_logits,
alpha=alpha,
gamma=gamma,
reduced_threshold=reduced_threshold,
reduction=reduction,
normalized=normalized,
)
def forward(self, y_pred: torch.Tensor, y_true: torch.Tensor) -> torch.Tensor:
if self.mode in {BINARY_MODE, MULTILABEL_MODE}:
y_true = y_true.view(-1)
y_pred = y_pred.view(-1)
if self.ignore_index is not None:
# Filter predictions with ignore label from loss computation
not_ignored = y_true != self.ignore_index
y_pred = y_pred[not_ignored]
y_true = y_true[not_ignored]
loss = self.focal_loss_fn(y_pred, y_true)
elif self.mode == MULTICLASS_MODE:
num_classes = y_pred.size(1)
loss = 0
# Filter anchors with -1 label from loss computation
if self.ignore_index is not None:
not_ignored = y_true != self.ignore_index
for cls in range(num_classes):
cls_y_true = (y_true == cls).long()
cls_y_pred = y_pred[:, cls, ...]
if self.ignore_index is not None:
cls_y_true = cls_y_true[not_ignored]
cls_y_pred = cls_y_pred[not_ignored]
loss += self.focal_loss_fn(cls_y_pred, cls_y_true)
return loss
| 34.252747
| 121
| 0.59769
|
from typing import Optional
from functools import partial
import torch
from torch.nn.modules.loss import _Loss
from ._functional import focal_loss_with_logits
from .constants import BINARY_MODE, MULTICLASS_MODE, MULTILABEL_MODE
__all__ = ["FocalLoss"]
class FocalLoss(_Loss):
def __init__(
self,
mode: str,
alpha: Optional[float] = None,
gamma: Optional[float] = 2.,
ignore_index: Optional[int] = None,
reduction: Optional[str] = "mean",
normalized: bool = False,
reduced_threshold: Optional[float] = None,
):
assert mode in {BINARY_MODE, MULTILABEL_MODE, MULTICLASS_MODE}
super().__init__()
self.mode = mode
self.ignore_index = ignore_index
self.focal_loss_fn = partial(
focal_loss_with_logits,
alpha=alpha,
gamma=gamma,
reduced_threshold=reduced_threshold,
reduction=reduction,
normalized=normalized,
)
def forward(self, y_pred: torch.Tensor, y_true: torch.Tensor) -> torch.Tensor:
if self.mode in {BINARY_MODE, MULTILABEL_MODE}:
y_true = y_true.view(-1)
y_pred = y_pred.view(-1)
if self.ignore_index is not None:
not_ignored = y_true != self.ignore_index
y_pred = y_pred[not_ignored]
y_true = y_true[not_ignored]
loss = self.focal_loss_fn(y_pred, y_true)
elif self.mode == MULTICLASS_MODE:
num_classes = y_pred.size(1)
loss = 0
if self.ignore_index is not None:
not_ignored = y_true != self.ignore_index
for cls in range(num_classes):
cls_y_true = (y_true == cls).long()
cls_y_pred = y_pred[:, cls, ...]
if self.ignore_index is not None:
cls_y_true = cls_y_true[not_ignored]
cls_y_pred = cls_y_pred[not_ignored]
loss += self.focal_loss_fn(cls_y_pred, cls_y_true)
return loss
| true
| true
|
f70c9446510fd3a913edf73e84b550857396d62e
| 153
|
py
|
Python
|
src/process/infrastructure/cqrs/CommandQueryBase.py
|
PythonDataIntegrator/pythondataintegrator
|
6167778c36c2295e36199ac0d4d256a4a0c28d7a
|
[
"MIT"
] | 14
|
2020-12-19T15:06:13.000Z
|
2022-01-12T19:52:17.000Z
|
src/api/infrastructure/cqrs/CommandQueryBase.py
|
PythonDataIntegrator/pythondataintegrator
|
6167778c36c2295e36199ac0d4d256a4a0c28d7a
|
[
"MIT"
] | 43
|
2021-01-06T22:05:22.000Z
|
2022-03-10T10:30:30.000Z
|
src/process/infrastructure/cqrs/CommandQueryBase.py
|
PythonDataIntegrator/pythondataintegrator
|
6167778c36c2295e36199ac0d4d256a4a0c28d7a
|
[
"MIT"
] | 4
|
2020-12-18T23:10:09.000Z
|
2021-04-02T13:03:12.000Z
|
from typing import Generic, TypeVar
CQ = TypeVar('CQ', covariant=True)
class CommandQueryBase(Generic[CQ]):
def __init__(self) -> CQ:
pass
| 19.125
| 36
| 0.686275
|
from typing import Generic, TypeVar
CQ = TypeVar('CQ', covariant=True)
class CommandQueryBase(Generic[CQ]):
def __init__(self) -> CQ:
pass
| true
| true
|
f70c95990672c947172bdd2639482942d6a67997
| 7,096
|
py
|
Python
|
py/kubeflow/testing/go-license-tools/get_github_repo.py
|
ChanYiLin/testing
|
fab6c2782d18c3439b2699df7d1d7da154393e06
|
[
"Apache-2.0"
] | null | null | null |
py/kubeflow/testing/go-license-tools/get_github_repo.py
|
ChanYiLin/testing
|
fab6c2782d18c3439b2699df7d1d7da154393e06
|
[
"Apache-2.0"
] | 451
|
2021-01-22T12:27:17.000Z
|
2022-03-30T02:08:22.000Z
|
py/kubeflow/testing/go-license-tools/get_github_repo.py
|
ChanYiLin/testing
|
fab6c2782d18c3439b2699df7d1d7da154393e06
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import requests
import sys
import traceback
from bs4 import BeautifulSoup as Soup
parser = argparse.ArgumentParser(
description='Get github repo from go import path.')
parser.add_argument(
'go_dependency_list_file',
nargs='?',
default='dep.txt',
help=
'File path of a golang dependency list file, one line has a dependency name. '
+'(default: %(default)s)',
)
parser.add_argument(
'-o',
'--output',
dest='output_file',
nargs='?',
default='repo.txt',
help=
'Output file with one line per resolved github repo. Format: org/repo. (default: %(default)s)',
)
parser.add_argument(
'--manual-dep-repo-mapping',
dest='manual_dep_repo_mapping_file',
nargs='?',
default='dep_repo.manual.csv',
help=
'Optional dependency to repo mapping maintained manually for dependencies we cannot '
+'automatically resolve. Format: each line has dependency import name and its github repo '
+'separated by comma. Like, "upper.io/db.v3,upper/db". Note: github/upper/db is the repo. '
+'(default: %(default)s)'
)
args = parser.parse_args()
protocol = 'https://'
godoc_base = 'godoc.org/'
github_base = 'github.com/'
gopkg_base = 'gopkg.in/'
def github_link_to_repo(repo):
'''
Removes extra sub folder in github url.
'''
if len(repo.split('/')) > 2:
print('repo {} has subfolder'.format(repo), file=sys.stderr)
repo = '/'.join(repo.split('/')[:2])
assert len(repo.split(
'/')) == 2, 'repo name should be org/repo, but is {}'.format(repo)
return repo
def get_github_repo(url):
'''
Tries to resolve github repo from a github url.
Returns org/repo format github repo string.
'''
if url.startswith(protocol):
url = url[len(protocol):]
if not url.startswith(github_base):
raise Exception('Package url is not github: {}'.format(url))
github_repo = url[len(github_base):]
github_repo = github_link_to_repo(github_repo)
if github_repo[-1] == '/':
github_repo = github_repo[:-1]
return github_repo
def fetch_github_uri_from_godoc(url):
'''
Tries to resolve github repo from godoc website.
Implementation: Godoc is a standard way for a lot of golang libraries to
host its documentation. Godoc page usually has a link on top left with
github repo url. This function crawls godoc page for the library and finds
the github url there. If the link there isn't a github url, it throws an
exception.
'''
full_url = protocol + godoc_base + url
print('fetching godoc {}'.format(full_url), file=sys.stderr)
response = requests.get(full_url)
assert response.ok, 'it failed with {} {}'.format(response.status_code,
response.reason)
soup = Soup(response.text, features="html.parser")
navs = soup.select('#x-projnav')
if len(navs) != 1:
raise Exception(
'#x-projnav should occur exactly once, but {} found for {}'.format(len(navs), url))
nav = navs[0]
package_name = nav.select_one('span').contents[0]
assert package_name == url, 'fetched package name should be the same'
link = nav.select_one('a').attrs.get('href')
return get_github_repo(link)
def fetch_gopkg_uri(url):
'''
Tries to resolve github repo for gopkg libraries.
Implementation: gopkg library page has a button with text 'Source code', its
url is usually the corresponding github repo. Throws an exception if the url
found is not github.
'''
response = requests.get(protocol + url)
assert response.ok, 'fetching {} failed with {} {}'.format(
url, response.status_code, response.reason)
soup = Soup(response.text, features="html.parser")
def is_source_code_link(link):
return link.getText().find('Source Code') >= 0
source_code_links = list(filter(is_source_code_link, soup.select('a')))
assert len(
source_code_links) == 1, 'Expect exactly one source code link found'
link = source_code_links[0].attrs.get('href')
return get_github_repo(link)
def get_github_repo_for_dep(dep):
'''
Tries to resolve github repo by three ways:
1. fetch gopkg website
2. parse from github url
3. fetch godoc website
'''
print('Fetching github uri for {}'.format(dep), file=sys.stderr)
repo = None
if dep.startswith(gopkg_base):
print('Try fetching {} from gopkg'.format(dep), file=sys.stderr)
repo = fetch_gopkg_uri(dep)
elif dep.startswith(github_base):
print('{} is already github'.format(dep), file=sys.stderr)
repo = get_github_repo(dep)
else:
print('Try fetching {} repo from godoc'.format(dep), file=sys.stderr)
repo = fetch_github_uri_from_godoc(dep)
return repo
def main():
with open(args.go_dependency_list_file,
'r') as dep_file, open(args.output_file, 'w') as output_file:
mappings = {}
try:
with open(args.manual_dep_repo_mapping_file, 'r') as dep_repo_mapping_file:
for line in dep_repo_mapping_file:
mapping = line.strip().split(',')
assert len(mapping) == 2
[dep, repo] = mapping
mappings[dep] = repo
except Exception: # pylint: disable=broad-except
print('ignore manual_dep_repo_mapping_file', file=sys.stderr)
deps = [line.strip() for line in dep_file]
repo_seen = set()
dep_succeeded = []
# Dependencies that we couldn't resolve their github repos.
dep_failed = []
for dep in deps:
try:
# Get dep's repo from manually maintained mapping first.
repo = mappings.get(dep)
if repo is not None:
print('repo of {} is already configured to {}'.format(dep, repo), file=sys.stderr)
else:
# Try to resolve if not found
repo = get_github_repo_for_dep(dep)
if repo in repo_seen:
print('repo {} is seen more than once'.format(repo), file=sys.stderr)
else:
repo_seen.add(repo)
print(repo, file=output_file)
dep_succeeded.append(dep)
except Exception as e: # pylint: disable=broad-except
print('[failed]', e, file=sys.stderr)
traceback.print_exc(file=sys.stderr)
dep_failed.append(dep)
print()
print((
'Successfully resolved github repo for {} dependencies and saved to {}. '
+'Failed to resolve {} dependencies.'
).format(len(dep_succeeded), args.output_file, len(dep_failed)),
file=sys.stderr)
if dep_failed:
print('We failed to resolve the following dependencies:', file=sys.stderr)
for dep in dep_failed:
print(dep, file=sys.stderr)
if __name__ == '__main__':
main()
| 33.471698
| 97
| 0.681229
|
import argparse
import requests
import sys
import traceback
from bs4 import BeautifulSoup as Soup
parser = argparse.ArgumentParser(
description='Get github repo from go import path.')
parser.add_argument(
'go_dependency_list_file',
nargs='?',
default='dep.txt',
help=
'File path of a golang dependency list file, one line has a dependency name. '
+'(default: %(default)s)',
)
parser.add_argument(
'-o',
'--output',
dest='output_file',
nargs='?',
default='repo.txt',
help=
'Output file with one line per resolved github repo. Format: org/repo. (default: %(default)s)',
)
parser.add_argument(
'--manual-dep-repo-mapping',
dest='manual_dep_repo_mapping_file',
nargs='?',
default='dep_repo.manual.csv',
help=
'Optional dependency to repo mapping maintained manually for dependencies we cannot '
+'automatically resolve. Format: each line has dependency import name and its github repo '
+'separated by comma. Like, "upper.io/db.v3,upper/db". Note: github/upper/db is the repo. '
+'(default: %(default)s)'
)
args = parser.parse_args()
protocol = 'https://'
godoc_base = 'godoc.org/'
github_base = 'github.com/'
gopkg_base = 'gopkg.in/'
def github_link_to_repo(repo):
if len(repo.split('/')) > 2:
print('repo {} has subfolder'.format(repo), file=sys.stderr)
repo = '/'.join(repo.split('/')[:2])
assert len(repo.split(
'/')) == 2, 'repo name should be org/repo, but is {}'.format(repo)
return repo
def get_github_repo(url):
if url.startswith(protocol):
url = url[len(protocol):]
if not url.startswith(github_base):
raise Exception('Package url is not github: {}'.format(url))
github_repo = url[len(github_base):]
github_repo = github_link_to_repo(github_repo)
if github_repo[-1] == '/':
github_repo = github_repo[:-1]
return github_repo
def fetch_github_uri_from_godoc(url):
full_url = protocol + godoc_base + url
print('fetching godoc {}'.format(full_url), file=sys.stderr)
response = requests.get(full_url)
assert response.ok, 'it failed with {} {}'.format(response.status_code,
response.reason)
soup = Soup(response.text, features="html.parser")
navs = soup.select('#x-projnav')
if len(navs) != 1:
raise Exception(
'#x-projnav should occur exactly once, but {} found for {}'.format(len(navs), url))
nav = navs[0]
package_name = nav.select_one('span').contents[0]
assert package_name == url, 'fetched package name should be the same'
link = nav.select_one('a').attrs.get('href')
return get_github_repo(link)
def fetch_gopkg_uri(url):
response = requests.get(protocol + url)
assert response.ok, 'fetching {} failed with {} {}'.format(
url, response.status_code, response.reason)
soup = Soup(response.text, features="html.parser")
def is_source_code_link(link):
return link.getText().find('Source Code') >= 0
source_code_links = list(filter(is_source_code_link, soup.select('a')))
assert len(
source_code_links) == 1, 'Expect exactly one source code link found'
link = source_code_links[0].attrs.get('href')
return get_github_repo(link)
def get_github_repo_for_dep(dep):
print('Fetching github uri for {}'.format(dep), file=sys.stderr)
repo = None
if dep.startswith(gopkg_base):
print('Try fetching {} from gopkg'.format(dep), file=sys.stderr)
repo = fetch_gopkg_uri(dep)
elif dep.startswith(github_base):
print('{} is already github'.format(dep), file=sys.stderr)
repo = get_github_repo(dep)
else:
print('Try fetching {} repo from godoc'.format(dep), file=sys.stderr)
repo = fetch_github_uri_from_godoc(dep)
return repo
def main():
with open(args.go_dependency_list_file,
'r') as dep_file, open(args.output_file, 'w') as output_file:
mappings = {}
try:
with open(args.manual_dep_repo_mapping_file, 'r') as dep_repo_mapping_file:
for line in dep_repo_mapping_file:
mapping = line.strip().split(',')
assert len(mapping) == 2
[dep, repo] = mapping
mappings[dep] = repo
except Exception:
print('ignore manual_dep_repo_mapping_file', file=sys.stderr)
deps = [line.strip() for line in dep_file]
repo_seen = set()
dep_succeeded = []
dep_failed = []
for dep in deps:
try:
# Get dep's repo from manually maintained mapping first.
repo = mappings.get(dep)
if repo is not None:
print('repo of {} is already configured to {}'.format(dep, repo), file=sys.stderr)
else:
repo = get_github_repo_for_dep(dep)
if repo in repo_seen:
print('repo {} is seen more than once'.format(repo), file=sys.stderr)
else:
repo_seen.add(repo)
print(repo, file=output_file)
dep_succeeded.append(dep)
except Exception as e:
print('[failed]', e, file=sys.stderr)
traceback.print_exc(file=sys.stderr)
dep_failed.append(dep)
print()
print((
'Successfully resolved github repo for {} dependencies and saved to {}. '
+'Failed to resolve {} dependencies.'
).format(len(dep_succeeded), args.output_file, len(dep_failed)),
file=sys.stderr)
if dep_failed:
print('We failed to resolve the following dependencies:', file=sys.stderr)
for dep in dep_failed:
print(dep, file=sys.stderr)
if __name__ == '__main__':
main()
| true
| true
|
f70c971c7ebd42d8e73a05dcfba1ec8f9432a06c
| 342
|
py
|
Python
|
tests/modeltests/model_package/models/article.py
|
kix/django
|
5262a288df07daa050a0e17669c3f103f47a8640
|
[
"BSD-3-Clause"
] | 790
|
2015-01-03T02:13:39.000Z
|
2020-05-10T19:53:57.000Z
|
AppServer/lib/django-1.5/tests/modeltests/model_package/models/article.py
|
nlake44/appscale
|
6944af660ca4cb772c9b6c2332ab28e5ef4d849f
|
[
"Apache-2.0"
] | 1,361
|
2015-01-08T23:09:40.000Z
|
2020-04-14T00:03:04.000Z
|
AppServer/lib/django-1.5/tests/modeltests/model_package/models/article.py
|
nlake44/appscale
|
6944af660ca4cb772c9b6c2332ab28e5ef4d849f
|
[
"Apache-2.0"
] | 155
|
2015-01-08T22:59:31.000Z
|
2020-04-08T08:01:53.000Z
|
from django.contrib.sites.models import Site
from django.db import models
class Article(models.Model):
sites = models.ManyToManyField(Site)
headline = models.CharField(max_length=100)
publications = models.ManyToManyField("model_package.Publication", null=True, blank=True,)
class Meta:
app_label = 'model_package'
| 28.5
| 94
| 0.748538
|
from django.contrib.sites.models import Site
from django.db import models
class Article(models.Model):
sites = models.ManyToManyField(Site)
headline = models.CharField(max_length=100)
publications = models.ManyToManyField("model_package.Publication", null=True, blank=True,)
class Meta:
app_label = 'model_package'
| true
| true
|
f70c9769bada922ea3842bb81a05492d1121bf42
| 5,473
|
py
|
Python
|
data_utils/ModelNetDataLoader.py
|
ChambinLee/Pointnet_Pointnet2_pytorch
|
c5612493ce3bbdbb18a65eefc0dc8d90e09da74d
|
[
"MIT"
] | null | null | null |
data_utils/ModelNetDataLoader.py
|
ChambinLee/Pointnet_Pointnet2_pytorch
|
c5612493ce3bbdbb18a65eefc0dc8d90e09da74d
|
[
"MIT"
] | null | null | null |
data_utils/ModelNetDataLoader.py
|
ChambinLee/Pointnet_Pointnet2_pytorch
|
c5612493ce3bbdbb18a65eefc0dc8d90e09da74d
|
[
"MIT"
] | null | null | null |
'''
@author: Xu Yan
@file: ModelNet.py
@time: 2021/3/19 15:51
'''
import os
import numpy as np
import warnings
import pickle
from tqdm import tqdm
from torch.utils.data import Dataset
warnings.filterwarnings('ignore')
def pc_normalize(pc):
centroid = np.mean(pc, axis=0)
pc = pc - centroid
m = np.max(np.sqrt(np.sum(pc**2, axis=1)))
pc = pc / m
return pc
def farthest_point_sample(point, npoint):
"""
Input:
xyz: pointcloud data, [N, D]
npoint: number of samples
Return:
centroids: sampled pointcloud index, [npoint, D]
"""
N, D = point.shape
xyz = point[:,:3]
centroids = np.zeros((npoint,))
distance = np.ones((N,)) * 1e10
farthest = np.random.randint(0, N)
for i in range(npoint):
centroids[i] = farthest
centroid = xyz[farthest, :]
dist = np.sum((xyz - centroid) ** 2, -1)
mask = dist < distance
distance[mask] = dist[mask]
farthest = np.argmax(distance, -1)
point = point[centroids.astype(np.int32)]
return point
class ModelNetDataLoader(Dataset):
def __init__(self, root, args, split='train', process_data=False):
self.root = root
self.npoints = args.num_point
self.process_data = process_data
self.uniform = args.use_uniform_sample
self.use_normals = args.use_normals
self.num_category = args.num_category
if self.num_category == 10:
self.catfile = os.path.join(self.root, 'modelnet10_shape_names.txt')
else:
self.catfile = os.path.join(self.root, 'modelnet40_shape_names.txt')
self.cat = [line.rstrip() for line in open(self.catfile)]
self.classes = dict(zip(self.cat, range(len(self.cat))))
shape_ids = {}
if self.num_category == 10:
shape_ids['train'] = [line.rstrip() for line in open(os.path.join(self.root, 'modelnet10_train.txt'))]
shape_ids['test'] = [line.rstrip() for line in open(os.path.join(self.root, 'modelnet10_test.txt'))]
else:
shape_ids['train'] = [line.rstrip() for line in open(os.path.join(self.root, 'modelnet40_train.txt'))]
shape_ids['test'] = [line.rstrip() for line in open(os.path.join(self.root, 'modelnet40_test.txt'))]
assert (split == 'train' or split == 'test')
shape_names = ['_'.join(x.split('_')[0:-1]) for x in shape_ids[split]]
self.datapath = [(shape_names[i], os.path.join(self.root, shape_names[i], shape_ids[split][i]) + '.txt') for i
in range(len(shape_ids[split]))]
print('The size of %s data is %d' % (split, len(self.datapath)))
if self.uniform:
self.save_path = os.path.join(root, 'modelnet%d_%s_%dpts_fps.dat' % (self.num_category, split, self.npoints))
else:
self.save_path = os.path.join(root, 'modelnet%d_%s_%dpts.dat' % (self.num_category, split, self.npoints))
if self.process_data:
if not os.path.exists(self.save_path):
print('Processing data %s (only running in the first time)...' % self.save_path)
self.list_of_points = [None] * len(self.datapath)
self.list_of_labels = [None] * len(self.datapath)
for index in tqdm(range(len(self.datapath)), total=len(self.datapath)):
fn = self.datapath[index]
cls = self.classes[self.datapath[index][0]]
cls = np.array([cls]).astype(np.int32)
point_set = np.genfromtxt(fn[1], delimiter=',').astype(np.float32)
if self.uniform:
point_set = farthest_point_sample(point_set, self.npoints)
else:
point_set = point_set[0:self.npoints, :]
self.list_of_points[index] = point_set
self.list_of_labels[index] = cls
with open(self.save_path, 'wb') as f:
pickle.dump([self.list_of_points, self.list_of_labels], f)
else:
print('Load processed data from %s...' % self.save_path)
with open(self.save_path, 'rb') as f:
self.list_of_points, self.list_of_labels = pickle.load(f)
def __len__(self):
return len(self.datapath)
def _get_item(self, index):
if self.process_data:
point_set, label = self.list_of_points[index], self.list_of_labels[index]
else:
fn = self.datapath[index]
cls = self.classes[self.datapath[index][0]]
label = np.array([cls]).astype(np.int32)
point_set = np.loadtxt(fn[1], delimiter=',').astype(np.float32)
if self.uniform:
point_set = farthest_point_sample(point_set, self.npoints)
else:
point_set = point_set[0:self.npoints, :]
point_set[:, 0:3] = pc_normalize(point_set[:, 0:3])
if not self.use_normals:
point_set = point_set[:, 0:3]
return point_set, label[0]
def __getitem__(self, index):
return self._get_item(index)
if __name__ == '__main__':
import torch
data = ModelNetDataLoader('/data/modelnet40_normal_resampled/', split='train')
DataLoader = torch.utils.data.DataLoader(data, batch_size=12, shuffle=True)
for point, label in DataLoader:
print(point.shape)
print(label.shape)
| 37.231293
| 121
| 0.589439
|
import os
import numpy as np
import warnings
import pickle
from tqdm import tqdm
from torch.utils.data import Dataset
warnings.filterwarnings('ignore')
def pc_normalize(pc):
centroid = np.mean(pc, axis=0)
pc = pc - centroid
m = np.max(np.sqrt(np.sum(pc**2, axis=1)))
pc = pc / m
return pc
def farthest_point_sample(point, npoint):
N, D = point.shape
xyz = point[:,:3]
centroids = np.zeros((npoint,))
distance = np.ones((N,)) * 1e10
farthest = np.random.randint(0, N)
for i in range(npoint):
centroids[i] = farthest
centroid = xyz[farthest, :]
dist = np.sum((xyz - centroid) ** 2, -1)
mask = dist < distance
distance[mask] = dist[mask]
farthest = np.argmax(distance, -1)
point = point[centroids.astype(np.int32)]
return point
class ModelNetDataLoader(Dataset):
def __init__(self, root, args, split='train', process_data=False):
self.root = root
self.npoints = args.num_point
self.process_data = process_data
self.uniform = args.use_uniform_sample
self.use_normals = args.use_normals
self.num_category = args.num_category
if self.num_category == 10:
self.catfile = os.path.join(self.root, 'modelnet10_shape_names.txt')
else:
self.catfile = os.path.join(self.root, 'modelnet40_shape_names.txt')
self.cat = [line.rstrip() for line in open(self.catfile)]
self.classes = dict(zip(self.cat, range(len(self.cat))))
shape_ids = {}
if self.num_category == 10:
shape_ids['train'] = [line.rstrip() for line in open(os.path.join(self.root, 'modelnet10_train.txt'))]
shape_ids['test'] = [line.rstrip() for line in open(os.path.join(self.root, 'modelnet10_test.txt'))]
else:
shape_ids['train'] = [line.rstrip() for line in open(os.path.join(self.root, 'modelnet40_train.txt'))]
shape_ids['test'] = [line.rstrip() for line in open(os.path.join(self.root, 'modelnet40_test.txt'))]
assert (split == 'train' or split == 'test')
shape_names = ['_'.join(x.split('_')[0:-1]) for x in shape_ids[split]]
self.datapath = [(shape_names[i], os.path.join(self.root, shape_names[i], shape_ids[split][i]) + '.txt') for i
in range(len(shape_ids[split]))]
print('The size of %s data is %d' % (split, len(self.datapath)))
if self.uniform:
self.save_path = os.path.join(root, 'modelnet%d_%s_%dpts_fps.dat' % (self.num_category, split, self.npoints))
else:
self.save_path = os.path.join(root, 'modelnet%d_%s_%dpts.dat' % (self.num_category, split, self.npoints))
if self.process_data:
if not os.path.exists(self.save_path):
print('Processing data %s (only running in the first time)...' % self.save_path)
self.list_of_points = [None] * len(self.datapath)
self.list_of_labels = [None] * len(self.datapath)
for index in tqdm(range(len(self.datapath)), total=len(self.datapath)):
fn = self.datapath[index]
cls = self.classes[self.datapath[index][0]]
cls = np.array([cls]).astype(np.int32)
point_set = np.genfromtxt(fn[1], delimiter=',').astype(np.float32)
if self.uniform:
point_set = farthest_point_sample(point_set, self.npoints)
else:
point_set = point_set[0:self.npoints, :]
self.list_of_points[index] = point_set
self.list_of_labels[index] = cls
with open(self.save_path, 'wb') as f:
pickle.dump([self.list_of_points, self.list_of_labels], f)
else:
print('Load processed data from %s...' % self.save_path)
with open(self.save_path, 'rb') as f:
self.list_of_points, self.list_of_labels = pickle.load(f)
def __len__(self):
return len(self.datapath)
def _get_item(self, index):
if self.process_data:
point_set, label = self.list_of_points[index], self.list_of_labels[index]
else:
fn = self.datapath[index]
cls = self.classes[self.datapath[index][0]]
label = np.array([cls]).astype(np.int32)
point_set = np.loadtxt(fn[1], delimiter=',').astype(np.float32)
if self.uniform:
point_set = farthest_point_sample(point_set, self.npoints)
else:
point_set = point_set[0:self.npoints, :]
point_set[:, 0:3] = pc_normalize(point_set[:, 0:3])
if not self.use_normals:
point_set = point_set[:, 0:3]
return point_set, label[0]
def __getitem__(self, index):
return self._get_item(index)
if __name__ == '__main__':
import torch
data = ModelNetDataLoader('/data/modelnet40_normal_resampled/', split='train')
DataLoader = torch.utils.data.DataLoader(data, batch_size=12, shuffle=True)
for point, label in DataLoader:
print(point.shape)
print(label.shape)
| true
| true
|
f70c9894e512e0874e335bee06c8ffd923e1e83d
| 1,400
|
py
|
Python
|
cfgov/sheerlike/management/commands/sheer_index.py
|
atuggle/cfgov-refresh
|
5a9cfd92b460b9be7befb39f5845abf56857aeac
|
[
"CC0-1.0"
] | null | null | null |
cfgov/sheerlike/management/commands/sheer_index.py
|
atuggle/cfgov-refresh
|
5a9cfd92b460b9be7befb39f5845abf56857aeac
|
[
"CC0-1.0"
] | null | null | null |
cfgov/sheerlike/management/commands/sheer_index.py
|
atuggle/cfgov-refresh
|
5a9cfd92b460b9be7befb39f5845abf56857aeac
|
[
"CC0-1.0"
] | null | null | null |
import os.path
from django.conf import settings
from django.core.management.base import BaseCommand
from sheerlike.indexer import index
LOCATION = os.environ.get('SHEER_LOCATION', os.getcwd())
ELASTICSEARCH_HOSTS = settings.SHEER_ELASTICSEARCH_SERVER
ELASTICSEARCH_INDEX = settings.SHEER_ELASTICSEARCH_INDEX
class Command(BaseCommand):
help = "Run the classic 'sheer' indexer"
def add_arguments(self, parser):
parser.add_argument('--reindex', '-r', action="store_true",
help="Recreate the index and reindex all content.")
parser.add_argument('--processors', '-p', nargs='*',
help='Content processors to index.')
parser.add_argument(
'--elasticsearch',
'-e',
default=ELASTICSEARCH_HOSTS,
help=("Elasticsearch host:port pairs. Separate hosts with commas. "
"Default is localhost:9200. You can also set the "
"SHEER_ELASTICSEARCH_HOSTS environment variable."))
parser.add_argument(
'--index',
'-i',
default=ELASTICSEARCH_INDEX,
help=("Elasticsearch index name. Default is 'content'. You can "
"also set the SHEER_ELASTICSEARCH_INDEX environment "
"variable."))
def handle(self, *args, **options):
index(args, options)
| 35
| 79
| 0.621429
|
import os.path
from django.conf import settings
from django.core.management.base import BaseCommand
from sheerlike.indexer import index
LOCATION = os.environ.get('SHEER_LOCATION', os.getcwd())
ELASTICSEARCH_HOSTS = settings.SHEER_ELASTICSEARCH_SERVER
ELASTICSEARCH_INDEX = settings.SHEER_ELASTICSEARCH_INDEX
class Command(BaseCommand):
help = "Run the classic 'sheer' indexer"
def add_arguments(self, parser):
parser.add_argument('--reindex', '-r', action="store_true",
help="Recreate the index and reindex all content.")
parser.add_argument('--processors', '-p', nargs='*',
help='Content processors to index.')
parser.add_argument(
'--elasticsearch',
'-e',
default=ELASTICSEARCH_HOSTS,
help=("Elasticsearch host:port pairs. Separate hosts with commas. "
"Default is localhost:9200. You can also set the "
"SHEER_ELASTICSEARCH_HOSTS environment variable."))
parser.add_argument(
'--index',
'-i',
default=ELASTICSEARCH_INDEX,
help=("Elasticsearch index name. Default is 'content'. You can "
"also set the SHEER_ELASTICSEARCH_INDEX environment "
"variable."))
def handle(self, *args, **options):
index(args, options)
| true
| true
|
f70c9a155e354e16ba75b8f392baa5c978202f18
| 2,369
|
py
|
Python
|
mozetl/system_check.py
|
willkg/python_mozetl
|
946fbd50f4c593fabe6b7ff33982c07c52c3d3de
|
[
"MIT"
] | 28
|
2017-05-01T20:06:19.000Z
|
2021-11-14T19:41:51.000Z
|
mozetl/system_check.py
|
willkg/python_mozetl
|
946fbd50f4c593fabe6b7ff33982c07c52c3d3de
|
[
"MIT"
] | 302
|
2017-04-25T17:59:54.000Z
|
2022-03-24T13:19:34.000Z
|
mozetl/system_check.py
|
willkg/python_mozetl
|
946fbd50f4c593fabe6b7ff33982c07c52c3d3de
|
[
"MIT"
] | 36
|
2017-04-25T18:31:37.000Z
|
2022-01-25T02:05:20.000Z
|
""""A system check for testing integration of various libraries with mozetl.
This sub-module will print out relevant version info. It will also read data
from `main_summary` and print basic statistics to verify that the system is
correctly set-up.
"""
import sys
import click
import logging
from datetime import datetime, timedelta
from pyspark.sql import SparkSession
from mozetl.utils import (
format_as_submission_date,
format_spark_path,
stop_session_safely,
)
logging.basicConfig(level=logging.DEBUG)
@click.command()
@click.option("--local/--no-local", default=False)
@click.option(
"--submission-date-s3",
type=str,
default=format_as_submission_date(datetime.now() - timedelta(2)),
)
@click.option("--input-bucket", type=str, default="telemetry-parquet")
@click.option("--input-prefix", type=str, default="main_summary/v4")
@click.option("--output-bucket", type=str, default="telemetry-test-bucket")
@click.option("--output-prefix", type=str, default="mozetl_system_check")
def main(
local, submission_date_s3, input_bucket, input_prefix, output_bucket, output_prefix
):
# print argument information
for k, v in locals().items():
print("{}: {}".format(k, v))
print("Python version: {}".format(sys.version_info))
spark = SparkSession.builder.getOrCreate()
print("Spark version: {}".format(spark.version))
# run a basic count over a sample of `main_summary` from 2 days ago
if not local:
ds_nodash = submission_date_s3
input_path = format_spark_path(input_bucket, input_prefix)
output_path = format_spark_path(output_bucket, output_prefix)
print(
"Reading data for {ds_nodash} from {input_path} and writing to {output_path}".format(
ds_nodash=ds_nodash, input_path=input_path, output_path=output_path
)
)
path = "{}/submission_date_s3={}/sample_id={}".format(input_path, ds_nodash, 1)
subset = spark.read.parquet(path)
print("Saw {} documents".format(subset.count()))
summary = subset.select(
"memory_mb", "cpu_cores", "subsession_length"
).describe()
summary.show()
summary.write.parquet(
output_path + "/submission_date_s3={}/".format(ds_nodash), mode="overwrite"
)
stop_session_safely(spark)
print("Done!")
| 32.452055
| 97
| 0.688476
|
import sys
import click
import logging
from datetime import datetime, timedelta
from pyspark.sql import SparkSession
from mozetl.utils import (
format_as_submission_date,
format_spark_path,
stop_session_safely,
)
logging.basicConfig(level=logging.DEBUG)
@click.command()
@click.option("--local/--no-local", default=False)
@click.option(
"--submission-date-s3",
type=str,
default=format_as_submission_date(datetime.now() - timedelta(2)),
)
@click.option("--input-bucket", type=str, default="telemetry-parquet")
@click.option("--input-prefix", type=str, default="main_summary/v4")
@click.option("--output-bucket", type=str, default="telemetry-test-bucket")
@click.option("--output-prefix", type=str, default="mozetl_system_check")
def main(
local, submission_date_s3, input_bucket, input_prefix, output_bucket, output_prefix
):
for k, v in locals().items():
print("{}: {}".format(k, v))
print("Python version: {}".format(sys.version_info))
spark = SparkSession.builder.getOrCreate()
print("Spark version: {}".format(spark.version))
if not local:
ds_nodash = submission_date_s3
input_path = format_spark_path(input_bucket, input_prefix)
output_path = format_spark_path(output_bucket, output_prefix)
print(
"Reading data for {ds_nodash} from {input_path} and writing to {output_path}".format(
ds_nodash=ds_nodash, input_path=input_path, output_path=output_path
)
)
path = "{}/submission_date_s3={}/sample_id={}".format(input_path, ds_nodash, 1)
subset = spark.read.parquet(path)
print("Saw {} documents".format(subset.count()))
summary = subset.select(
"memory_mb", "cpu_cores", "subsession_length"
).describe()
summary.show()
summary.write.parquet(
output_path + "/submission_date_s3={}/".format(ds_nodash), mode="overwrite"
)
stop_session_safely(spark)
print("Done!")
| true
| true
|
f70c9b165f9aa9483be6ebcddaeaa2406a850498
| 76
|
py
|
Python
|
tests/basic.py
|
brightway-lca/brightway_projects
|
ccd5036620c505c700cf59aafaaa3533dbdd52a4
|
[
"BSD-3-Clause"
] | 4
|
2017-05-23T14:28:05.000Z
|
2018-08-17T14:16:11.000Z
|
tests/basic.py
|
brightway-lca/bw_projects
|
ccd5036620c505c700cf59aafaaa3533dbdd52a4
|
[
"BSD-3-Clause"
] | 3
|
2019-10-12T07:37:45.000Z
|
2019-10-18T19:27:24.000Z
|
tests/basic.py
|
brightway-lca/brightway_projects
|
ccd5036620c505c700cf59aafaaa3533dbdd52a4
|
[
"BSD-3-Clause"
] | null | null | null |
import bw_projects
def test_version():
assert bw_projects.__version__
| 12.666667
| 34
| 0.789474
|
import bw_projects
def test_version():
assert bw_projects.__version__
| true
| true
|
f70c9c710e197f63a9f68c52b505b8836ff9453d
| 3,674
|
py
|
Python
|
server/user/models.py
|
openml/openml.org
|
dadc4f79c159058776500b204977a1062b927d4c
|
[
"BSD-3-Clause"
] | 16
|
2018-10-17T19:35:11.000Z
|
2022-03-31T23:37:00.000Z
|
server/user/models.py
|
PortML/openml.org
|
b526fae6c0ba2df0ccebf60f1dd703368ed394ec
|
[
"BSD-3-Clause"
] | 192
|
2018-10-17T17:31:03.000Z
|
2022-03-27T23:55:51.000Z
|
server/user/models.py
|
PortML/openml.org
|
b526fae6c0ba2df0ccebf60f1dd703368ed394ec
|
[
"BSD-3-Clause"
] | 8
|
2019-04-15T11:47:32.000Z
|
2021-12-15T13:23:54.000Z
|
import datetime
# from sqlalchemy import Column, Integer, String
import hashlib
from server.extensions import Base, argon2, bcrypt
class User(Base):
__table__ = Base.metadata.tables["users"]
__table_args__ = {"autoload": True}
# Attribute names to help out with functions
# id = Column(Integer, primary_key=True, unique=True)
# ip_address = Column(String(64))
# username = Column(String(64), index=True, unique=True)
# email = Column(String(120), index=True, unique=True)
# password = Column(String(240))
# activation_selector = Column(String(120))#Unique
# activation_code = Column(String(120))
# forgotten_password_selector = Column(String(120))#Unique
# forgotten_password_code = Column(String(120))
# forgotten_password_time = Column(String(120))
# remember_selector = Column(String(120))#Unique
# remember_code = Column(String(120))
# created_on = Column(String(120))
# last_login = Column(String(120))
# active = Column(String(120))
# first_name = Column(String(120))
# last_name = Column(String(120))
# company = Column(String(120))
# phone = Column(String(120))
# country = Column(String(120))
# image = Column(String(120))
# bio = Column(String(240))
# core = Column(String(240))
# external_source = Column(String(120))
# external_id = Column(String(120))
# session_hash = Column(String(120))# session hash is API key
# password_hash = Column(String(120))
def set_password(self, password):
self.password = argon2.generate_password_hash(password)
def check_password(self, passwd):
"""
Check if the passwordhash is in Argon2 or Bcrypt(old) format
Resets the password hash to argon2 format if stored in bcrypt
Returns value for login route
"""
try:
if bcrypt.check_password_hash(self.password, passwd):
bpass = True
except ValueError as error:
print(error)
bpass = False
if argon2.check_password_hash(self.password, passwd):
return True
elif not argon2.check_password_hash(self.password, passwd) and not bpass:
return False
elif not argon2.check_password_hash(self.password, passwd) and bpass:
self.set_password(passwd)
return True
def update_bio(self, new_bio):
self.bio = new_bio
def update_email(self, email):
self.email = email
def update_first_name(self, first_name):
self.first_name = first_name
def update_last_name(self, last_name):
self.last_name = last_name
def update_forgotten_code(self, code):
self.forgotten_password_code = code
def update_activation_code(self, code):
self.activation_code = code
def update_activation(self):
self.active = "1"
print("user activated successfully")
def update_forgotten_time(self, time):
self.forgotten_password_time = time
def set_session_hash(self):
timestamp = datetime.datetime.now()
timestamp1 = timestamp.strftime("%Y-%m-%d %H:%M:%S")
md5_digest = hashlib.md5(timestamp1.encode()).hexdigest()
self.session_hash = md5_digest
def update_image_address(self, path):
self.image = path
def __repr__(self):
return "<User {}>".format(self.username)
class UserGroups(Base):
__table__ = Base.metadata.tables["users_groups"]
__table_args__ = {"autoload": True}
def set_group(self):
self.group_id = 2
print('group updated')
def __repr__(self):
return "<User {}>".format(self.username)
| 32.513274
| 81
| 0.655144
|
import datetime
import hashlib
from server.extensions import Base, argon2, bcrypt
class User(Base):
__table__ = Base.metadata.tables["users"]
__table_args__ = {"autoload": True}
rd(self, password):
self.password = argon2.generate_password_hash(password)
def check_password(self, passwd):
try:
if bcrypt.check_password_hash(self.password, passwd):
bpass = True
except ValueError as error:
print(error)
bpass = False
if argon2.check_password_hash(self.password, passwd):
return True
elif not argon2.check_password_hash(self.password, passwd) and not bpass:
return False
elif not argon2.check_password_hash(self.password, passwd) and bpass:
self.set_password(passwd)
return True
def update_bio(self, new_bio):
self.bio = new_bio
def update_email(self, email):
self.email = email
def update_first_name(self, first_name):
self.first_name = first_name
def update_last_name(self, last_name):
self.last_name = last_name
def update_forgotten_code(self, code):
self.forgotten_password_code = code
def update_activation_code(self, code):
self.activation_code = code
def update_activation(self):
self.active = "1"
print("user activated successfully")
def update_forgotten_time(self, time):
self.forgotten_password_time = time
def set_session_hash(self):
timestamp = datetime.datetime.now()
timestamp1 = timestamp.strftime("%Y-%m-%d %H:%M:%S")
md5_digest = hashlib.md5(timestamp1.encode()).hexdigest()
self.session_hash = md5_digest
def update_image_address(self, path):
self.image = path
def __repr__(self):
return "<User {}>".format(self.username)
class UserGroups(Base):
__table__ = Base.metadata.tables["users_groups"]
__table_args__ = {"autoload": True}
def set_group(self):
self.group_id = 2
print('group updated')
def __repr__(self):
return "<User {}>".format(self.username)
| true
| true
|
f70c9ce834362406e19b54ddfbde6250f8533c79
| 6,389
|
py
|
Python
|
gcloud/tests/taskflow3/models/taskflow/test_get_node_detail.py
|
brookylin/bk-sops
|
6c0cf78879849921c4ff6ad6bf3bb82dfdf5b973
|
[
"Apache-2.0"
] | 881
|
2019-03-25T02:45:42.000Z
|
2022-03-30T09:10:49.000Z
|
gcloud/tests/taskflow3/models/taskflow/test_get_node_detail.py
|
m0re-work/bk-sops
|
d03ba8a4ee0781c6daaf0dd38a7369dc82669f7d
|
[
"Apache-2.0"
] | 3,303
|
2019-03-25T04:18:03.000Z
|
2022-03-31T11:52:03.000Z
|
gcloud/tests/taskflow3/models/taskflow/test_get_node_detail.py
|
m0re-work/bk-sops
|
d03ba8a4ee0781c6daaf0dd38a7369dc82669f7d
|
[
"Apache-2.0"
] | 395
|
2019-03-25T02:53:36.000Z
|
2022-03-31T08:37:28.000Z
|
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from django.test import TestCase
from gcloud import err_code
from gcloud.taskflow3.models import TaskFlowInstance
from gcloud.tests.mock import * # noqa
from gcloud.tests.mock_settings import * # noqa
class GetNodeDetailTestCase(TestCase):
def test_node_does_not_exist(self):
taskflow = TaskFlowInstance()
taskflow.id = 1
taskflow.has_node = MagicMock(return_value=False)
detail = taskflow.get_node_detail(node_id="node_id", username="username")
self.assertFalse(detail["result"])
self.assertEqual(detail["code"], err_code.REQUEST_PARAM_INVALID.code)
def test_get_node_data_err(self):
taskflow = TaskFlowInstance()
taskflow.id = 1
taskflow.engine_ver = 2
taskflow.has_node = MagicMock(return_value=True)
dispatcher = MagicMock()
get_node_data_return = {"result": False}
dispatcher.get_node_data = MagicMock(return_value=get_node_data_return)
with patch(TASKFLOW_MODEL_NODE_CMD_DISPATCHER, MagicMock(return_value=dispatcher)):
detail = taskflow.get_node_detail(node_id="node_id", username="username", project_id="project_id")
self.assertEqual(detail, get_node_data_return)
def test_get_node_detail_err(self):
taskflow = TaskFlowInstance()
taskflow.id = 1
taskflow.engine_ver = 2
taskflow.has_node = MagicMock(return_value=True)
dispatcher = MagicMock()
get_node_data_return = {"result": True, "data": {}}
get_node_detail_return = {"result": False}
dispatcher.get_node_data = MagicMock(return_value=get_node_data_return)
dispatcher.get_node_detail = MagicMock(return_value=get_node_detail_return)
with patch(TASKFLOW_MODEL_NODE_CMD_DISPATCHER, MagicMock(return_value=dispatcher)):
detail = taskflow.get_node_detail(node_id="node_id", username="username", project_id="project_id")
self.assertEqual(detail, get_node_detail_return)
def test_include_data_is_false(self):
taskflow = TaskFlowInstance()
taskflow.id = 1
taskflow.engine_ver = 2
taskflow.has_node = MagicMock(return_value=True)
dispatcher = MagicMock()
get_node_data_return = {"result": True, "data": {}}
get_node_detail_return = {"result": True, "data": {}}
dispatcher.get_node_data = MagicMock(return_value=get_node_data_return)
dispatcher.get_node_detail = MagicMock(return_value=get_node_detail_return)
dispatcher_init = MagicMock(return_value=dispatcher)
node_id = "node_id"
username = "username"
component_code = "component_code"
subprocess_stack = ["1"]
loop = 1
include_data = False
with patch(TASKFLOW_MODEL_NODE_CMD_DISPATCHER, dispatcher_init):
detail = taskflow.get_node_detail(
node_id=node_id,
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
loop=loop,
include_data=include_data,
)
dispatcher_init.assert_called_once_with(engine_ver=taskflow.engine_ver, node_id=node_id, taskflow_id=1)
dispatcher.get_node_data.assert_not_called()
dispatcher.get_node_detail.assert_called_once_with(
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
pipeline_instance=taskflow.pipeline_instance,
loop=loop,
)
self.assertEqual(detail, {"code": 0, "data": {}, "message": "", "result": True})
def test_success(self):
taskflow = TaskFlowInstance()
taskflow.id = 1
taskflow.engine_ver = 2
taskflow.has_node = MagicMock(return_value=True)
dispatcher = MagicMock()
get_node_data_return = {"result": True, "data": {"data": "data"}}
get_node_detail_return = {"result": True, "data": {"detail": "detail"}}
dispatcher.get_node_data = MagicMock(return_value=get_node_data_return)
dispatcher.get_node_detail = MagicMock(return_value=get_node_detail_return)
dispatcher_init = MagicMock(return_value=dispatcher)
node_id = "node_id"
username = "username"
component_code = "component_code"
subprocess_stack = ["1"]
loop = 1
include_data = True
with patch(TASKFLOW_MODEL_NODE_CMD_DISPATCHER, dispatcher_init):
detail = taskflow.get_node_detail(
node_id=node_id,
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
loop=loop,
include_data=include_data,
project_id="project_id",
)
dispatcher_init.assert_called_once_with(engine_ver=taskflow.engine_ver, node_id=node_id, taskflow_id=1)
dispatcher.get_node_data.assert_called_once_with(
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
pipeline_instance=taskflow.pipeline_instance,
loop=loop,
project_id="project_id",
)
dispatcher.get_node_detail.assert_called_once_with(
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
pipeline_instance=taskflow.pipeline_instance,
loop=loop,
)
self.assertEqual(
detail, {"code": 0, "data": {"data": "data", "detail": "detail"}, "message": "", "result": True}
)
| 42.032895
| 115
| 0.673971
|
from django.test import TestCase
from gcloud import err_code
from gcloud.taskflow3.models import TaskFlowInstance
from gcloud.tests.mock import *
from gcloud.tests.mock_settings import *
class GetNodeDetailTestCase(TestCase):
def test_node_does_not_exist(self):
taskflow = TaskFlowInstance()
taskflow.id = 1
taskflow.has_node = MagicMock(return_value=False)
detail = taskflow.get_node_detail(node_id="node_id", username="username")
self.assertFalse(detail["result"])
self.assertEqual(detail["code"], err_code.REQUEST_PARAM_INVALID.code)
def test_get_node_data_err(self):
taskflow = TaskFlowInstance()
taskflow.id = 1
taskflow.engine_ver = 2
taskflow.has_node = MagicMock(return_value=True)
dispatcher = MagicMock()
get_node_data_return = {"result": False}
dispatcher.get_node_data = MagicMock(return_value=get_node_data_return)
with patch(TASKFLOW_MODEL_NODE_CMD_DISPATCHER, MagicMock(return_value=dispatcher)):
detail = taskflow.get_node_detail(node_id="node_id", username="username", project_id="project_id")
self.assertEqual(detail, get_node_data_return)
def test_get_node_detail_err(self):
taskflow = TaskFlowInstance()
taskflow.id = 1
taskflow.engine_ver = 2
taskflow.has_node = MagicMock(return_value=True)
dispatcher = MagicMock()
get_node_data_return = {"result": True, "data": {}}
get_node_detail_return = {"result": False}
dispatcher.get_node_data = MagicMock(return_value=get_node_data_return)
dispatcher.get_node_detail = MagicMock(return_value=get_node_detail_return)
with patch(TASKFLOW_MODEL_NODE_CMD_DISPATCHER, MagicMock(return_value=dispatcher)):
detail = taskflow.get_node_detail(node_id="node_id", username="username", project_id="project_id")
self.assertEqual(detail, get_node_detail_return)
def test_include_data_is_false(self):
taskflow = TaskFlowInstance()
taskflow.id = 1
taskflow.engine_ver = 2
taskflow.has_node = MagicMock(return_value=True)
dispatcher = MagicMock()
get_node_data_return = {"result": True, "data": {}}
get_node_detail_return = {"result": True, "data": {}}
dispatcher.get_node_data = MagicMock(return_value=get_node_data_return)
dispatcher.get_node_detail = MagicMock(return_value=get_node_detail_return)
dispatcher_init = MagicMock(return_value=dispatcher)
node_id = "node_id"
username = "username"
component_code = "component_code"
subprocess_stack = ["1"]
loop = 1
include_data = False
with patch(TASKFLOW_MODEL_NODE_CMD_DISPATCHER, dispatcher_init):
detail = taskflow.get_node_detail(
node_id=node_id,
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
loop=loop,
include_data=include_data,
)
dispatcher_init.assert_called_once_with(engine_ver=taskflow.engine_ver, node_id=node_id, taskflow_id=1)
dispatcher.get_node_data.assert_not_called()
dispatcher.get_node_detail.assert_called_once_with(
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
pipeline_instance=taskflow.pipeline_instance,
loop=loop,
)
self.assertEqual(detail, {"code": 0, "data": {}, "message": "", "result": True})
def test_success(self):
taskflow = TaskFlowInstance()
taskflow.id = 1
taskflow.engine_ver = 2
taskflow.has_node = MagicMock(return_value=True)
dispatcher = MagicMock()
get_node_data_return = {"result": True, "data": {"data": "data"}}
get_node_detail_return = {"result": True, "data": {"detail": "detail"}}
dispatcher.get_node_data = MagicMock(return_value=get_node_data_return)
dispatcher.get_node_detail = MagicMock(return_value=get_node_detail_return)
dispatcher_init = MagicMock(return_value=dispatcher)
node_id = "node_id"
username = "username"
component_code = "component_code"
subprocess_stack = ["1"]
loop = 1
include_data = True
with patch(TASKFLOW_MODEL_NODE_CMD_DISPATCHER, dispatcher_init):
detail = taskflow.get_node_detail(
node_id=node_id,
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
loop=loop,
include_data=include_data,
project_id="project_id",
)
dispatcher_init.assert_called_once_with(engine_ver=taskflow.engine_ver, node_id=node_id, taskflow_id=1)
dispatcher.get_node_data.assert_called_once_with(
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
pipeline_instance=taskflow.pipeline_instance,
loop=loop,
project_id="project_id",
)
dispatcher.get_node_detail.assert_called_once_with(
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
pipeline_instance=taskflow.pipeline_instance,
loop=loop,
)
self.assertEqual(
detail, {"code": 0, "data": {"data": "data", "detail": "detail"}, "message": "", "result": True}
)
| true
| true
|
f70c9d011edd06801fbe1eb5c620a18f842e46d5
| 996
|
py
|
Python
|
vlcplayer.py
|
danlyke/squareplay
|
30783579c3e4dc9a61890b78102fa4020f80c6aa
|
[
"MIT"
] | null | null | null |
vlcplayer.py
|
danlyke/squareplay
|
30783579c3e4dc9a61890b78102fa4020f80c6aa
|
[
"MIT"
] | null | null | null |
vlcplayer.py
|
danlyke/squareplay
|
30783579c3e4dc9a61890b78102fa4020f80c6aa
|
[
"MIT"
] | null | null | null |
import vlc
instance = vlc.Instance()
class VLCPlayer:
def __init__(self):
self.player = instance.media_player_new()
self.player.audio_output_set("Scaletempo")
self.length = 0
def set_position(self,position) :
self.player.set_position(position / self.get_length())
def get_position(self) :
return self.player.get_position()
def get_length(self) :
self.length = self.player.get_length() / 1000
return self.length
def is_playing(self) :
return self.player.get_state() == vlc.State.Playing;
def set_tempo(self,tempo) :
self.player.set_rate(tempo);
def get_tempo(self) :
return self.player.get_rate()
def play(self) :
self.player.play()
def stop(self) :
self.player.stop()
def pause(self) :
self.player.pause()
def load_song(self,filename) :
media = instance.media_new(filename)
self.player.set_media(media)
| 23.714286
| 62
| 0.621486
|
import vlc
instance = vlc.Instance()
class VLCPlayer:
def __init__(self):
self.player = instance.media_player_new()
self.player.audio_output_set("Scaletempo")
self.length = 0
def set_position(self,position) :
self.player.set_position(position / self.get_length())
def get_position(self) :
return self.player.get_position()
def get_length(self) :
self.length = self.player.get_length() / 1000
return self.length
def is_playing(self) :
return self.player.get_state() == vlc.State.Playing;
def set_tempo(self,tempo) :
self.player.set_rate(tempo);
def get_tempo(self) :
return self.player.get_rate()
def play(self) :
self.player.play()
def stop(self) :
self.player.stop()
def pause(self) :
self.player.pause()
def load_song(self,filename) :
media = instance.media_new(filename)
self.player.set_media(media)
| true
| true
|
f70c9d3e13ab3ff94667a701c7583af139e37534
| 894
|
py
|
Python
|
eunice012716/Week2/ch4/4.3/exercise2.py
|
txya900619/Intern-Training
|
76cac20ac988609f313765ebeb72d20da9dcc05e
|
[
"MIT"
] | 1
|
2021-08-24T12:14:46.000Z
|
2021-08-24T12:14:46.000Z
|
eunice012716/Week2/ch4/4.3/exercise2.py
|
txya900619/Intern-Training
|
76cac20ac988609f313765ebeb72d20da9dcc05e
|
[
"MIT"
] | 14
|
2021-07-09T07:48:35.000Z
|
2021-08-19T03:06:31.000Z
|
eunice012716/Week2/ch4/4.3/exercise2.py
|
txya900619/Intern-Training
|
76cac20ac988609f313765ebeb72d20da9dcc05e
|
[
"MIT"
] | 11
|
2021-07-09T07:35:24.000Z
|
2021-08-15T07:19:43.000Z
|
import torch
from torch import nn
from d2l import torch as d2l
BATCH_SIZE, LR, NUM_EPOCHS = 256, 0.1, 10
ACTIVATE_FUNCS = [nn.ReLU(), nn.Sigmoid(), nn.Tanh()]
def init_weights(m):
if type(m) == nn.Linear:
nn.init.normal_(m.weight, std=0.01)
if __name__ == "__main__":
for i in range(0, 3):
net = nn.Sequential(
nn.Flatten(),
nn.Linear(784, 256),
ACTIVATE_FUNCS[i],
nn.Linear(256, 10),
)
net.apply(init_weights)
loss = nn.CrossEntropyLoss()
trainer = torch.optim.SGD(net.parameters(), lr=LR)
train_iter, test_iter = d2l.load_data_fashion_mnist(BATCH_SIZE)
d2l.train_ch3(net, train_iter, test_iter, loss, NUM_EPOCHS, trainer)
print("Train Accuracy", d2l.evaluate_accuracy(net, train_iter))
print("Test Accuracy", d2l.evaluate_accuracy(net, test_iter))
| 27.9375
| 76
| 0.624161
|
import torch
from torch import nn
from d2l import torch as d2l
BATCH_SIZE, LR, NUM_EPOCHS = 256, 0.1, 10
ACTIVATE_FUNCS = [nn.ReLU(), nn.Sigmoid(), nn.Tanh()]
def init_weights(m):
if type(m) == nn.Linear:
nn.init.normal_(m.weight, std=0.01)
if __name__ == "__main__":
for i in range(0, 3):
net = nn.Sequential(
nn.Flatten(),
nn.Linear(784, 256),
ACTIVATE_FUNCS[i],
nn.Linear(256, 10),
)
net.apply(init_weights)
loss = nn.CrossEntropyLoss()
trainer = torch.optim.SGD(net.parameters(), lr=LR)
train_iter, test_iter = d2l.load_data_fashion_mnist(BATCH_SIZE)
d2l.train_ch3(net, train_iter, test_iter, loss, NUM_EPOCHS, trainer)
print("Train Accuracy", d2l.evaluate_accuracy(net, train_iter))
print("Test Accuracy", d2l.evaluate_accuracy(net, test_iter))
| true
| true
|
f70c9d66dcce1ce601c098649b1b6f89a1032d77
| 6,348
|
py
|
Python
|
bot.py
|
DestinyofYeet/antonstechbot
|
b01372431a3a2b51fb83180cf8caa1a168e294ad
|
[
"MIT"
] | 1
|
2021-04-21T09:01:26.000Z
|
2021-04-21T09:01:26.000Z
|
bot.py
|
DestinyofYeet/antonstechbot
|
b01372431a3a2b51fb83180cf8caa1a168e294ad
|
[
"MIT"
] | null | null | null |
bot.py
|
DestinyofYeet/antonstechbot
|
b01372431a3a2b51fb83180cf8caa1a168e294ad
|
[
"MIT"
] | null | null | null |
import asyncio
import json
import random
import subprocess
import discord
import discord.ext
import requests
from discord.ext import commands
import os
from botlibrary.utils import get_variable
from botlibrary import constants
# Wichs Codierung
# ä=ü
# ö=ö
# assign constant variables
constants.assignVariables()
VERSION = constants.VERSION
bot_prefix = constants.bot_prefix
client = commands.Bot(command_prefix=bot_prefix, intents=discord.Intents.all())
def tokenchecker():
riotapi = constants.lol_token
base_riot_url = "https://euw1.api.riotgames.com/lol/summoner/v4/summoners/by-name/DCGALAXY?api_key="
rioturl = base_riot_url + riotapi
response = requests.get(rioturl)
if response.status_code == 200:
pass
else:
print("Der Riot-API Key hat nicht funktioniert :((")
print(
"Bitte checke ob der Key in der config.json richtig gesetzt ist und schau auf "
"https://developer.riotgames.com/api-status/ nach ob es nicht vllt an Riot selber liegt")
riotnotworkingexe = input("Willst du trotzdem starten? (j/n): ")
if riotnotworkingexe == "j":
pass
else:
raise Exception("Der Riot-API Key hat nicht funktioniert.")
osuapi = constants.osu_token
base_osu_url = "https://osu.ppy.sh/api/get_user_best?u=Aftersh0ock&k="
osuurl = base_osu_url + osuapi
osuresponse = requests.get(osuurl)
if osuresponse.status_code == 200:
pass
else:
print("Der Osu-API Key hat nicht funktioniert :((")
print(
"Bitte checke ob der Key in der config.json richtig gesetzt ist und schau auf https://status.ppy.sh nach ob es nicht vllt an Osu selber liegt")
osunotworkingexe = input("Willst du trotzdem starten? (j/n): ")
if osunotworkingexe == "j":
pass
else:
raise Exception("Der Osu-API Key hat nicht funktioniert.")
token = constants.bot_token
headers = {
"Authorization": "Bot " + token
}
response = requests.get('https://discordapp.com/api/v8/auth/login', headers=headers)
if response.status_code == 200:
pass
else:
raise Exception("Der Discord Bot Token funktioniert nicht!")
ipdata = constants.ipdata_token
baseipurl = "https://api.ipdata.co/8.8.8.8"
ipurl = baseipurl + "?api-key=" + ipdata
ipresponse = requests.get(ipurl)
if ipresponse.status_code == 200:
pass
else:
print("Der IPData-API Key hat nicht funktioniert :((")
print(
"Bitte checke ob der Key in der config.json richtig gesetzt ist und schau auf https://status.ipdata.co nach ob es nicht vllt an Osu selber liegt")
ipdatanotworkingexe = input("Willst du trotzdem starten? (j/n): ")
if ipdatanotworkingexe == "j":
pass
else:
raise Exception("Der IPData Key hat nicht funktioniert.")
tokenchecker()
@client.event
async def on_ready():
print("Yess der bot läuft :)".format(client))
print("Du hast derzeit Release " + str(VERSION) + " installiert")
print("Du bist eingeloggt als {0.user} auf discord.py Version {1}".format(client, discord.__version__))
if os.path.exists("config/mysql.json"):
print("MySQL-Logging ist AKTIVIERT")
else:
print("MySQL-Logging ist DEAKTIVIERT")
print("Der Bot ist zurzeit auf folgenden " + str(len(client.guilds)) + " Servern:")
for guild in client.guilds:
print("- " + str(guild.name))
client.loop.create_task(status_task())
async def status_task():
while True:
await client.change_presence(activity=discord.Game("https://git.io/antonsbot"),
status=discord.Status.online)
await asyncio.sleep(60)
await client.change_presence(
activity=discord.Game(bot_prefix + "corona auf " + str(len(client.guilds)) + " Servern"))
await asyncio.sleep(60)
await client.change_presence(activity=discord.Game("ein heißes Spiel mit der Stiefschwester"))
await asyncio.sleep(5)
await client.change_presence(
activity=discord.Activity(type=discord.ActivityType.watching, name="auf deine Nachrichten"))
await asyncio.sleep(60)
"""
##############################################################################################################################################################
Ole rewrite paradise
##############################################################################################################################################################
"""
def owner_only(func):
async def wrapper(self, *args, **kwargs):
ctx = get_variable('ctx')
info = await client.application_info()
if ctx.author.id == info.owner.id:
return await func(self, *args, **kwargs)
else:
await ctx.channel.send("Error, only the bot owner can use this command!")
return wrapper
@client.command(name="reload")
@owner_only
async def reload_cog(ctx, cogName):
try:
await unload_cog(ctx, cogName)
await load_cog(ctx, cogName)
except Exception as e:
await ctx.channel.send(f"Während dem versuch die Erweiterung {cogName} neu zu laden ist etwas schiefgelaufen!")
@client.command(name="unload")
@owner_only
async def unload_cog(ctx, cogName):
try:
client.unload_extension(f"cogs.{cogName}")
await ctx.channel.send(f"Erfolgreich erweiterung {cogName} entladen!")
except Exception as e:
await ctx.channel.send(f"Fehler, entweder ist die erweiterung schong entladen, oder sie wurde nicht gefunden!")
@client.command(name="load")
@owner_only
async def load_cog(ctx, cogName):
try:
client.load_extension(f"cogs.{cogName}")
await ctx.channel.send(f"Erfolgreich erweiterung {cogName} geladen!")
except Exception as e:
await ctx.channel.send(f"Fehler, entweder ist die erweiterung schon geladen, oder sie wurde nicht gefunden.")
with open('config/config.json', 'r') as f:
json_stuff = json.load(f)
token = json_stuff["token"]
# load cogs
for filename in os.listdir("./cogs"):
if filename.endswith(".py"):
client.load_extension(f"cogs.{filename[:-3]}")
# run bot
client.run(token)
| 35.071823
| 158
| 0.625709
|
import asyncio
import json
import random
import subprocess
import discord
import discord.ext
import requests
from discord.ext import commands
import os
from botlibrary.utils import get_variable
from botlibrary import constants
constants.assignVariables()
VERSION = constants.VERSION
bot_prefix = constants.bot_prefix
client = commands.Bot(command_prefix=bot_prefix, intents=discord.Intents.all())
def tokenchecker():
riotapi = constants.lol_token
base_riot_url = "https://euw1.api.riotgames.com/lol/summoner/v4/summoners/by-name/DCGALAXY?api_key="
rioturl = base_riot_url + riotapi
response = requests.get(rioturl)
if response.status_code == 200:
pass
else:
print("Der Riot-API Key hat nicht funktioniert :((")
print(
"Bitte checke ob der Key in der config.json richtig gesetzt ist und schau auf "
"https://developer.riotgames.com/api-status/ nach ob es nicht vllt an Riot selber liegt")
riotnotworkingexe = input("Willst du trotzdem starten? (j/n): ")
if riotnotworkingexe == "j":
pass
else:
raise Exception("Der Riot-API Key hat nicht funktioniert.")
osuapi = constants.osu_token
base_osu_url = "https://osu.ppy.sh/api/get_user_best?u=Aftersh0ock&k="
osuurl = base_osu_url + osuapi
osuresponse = requests.get(osuurl)
if osuresponse.status_code == 200:
pass
else:
print("Der Osu-API Key hat nicht funktioniert :((")
print(
"Bitte checke ob der Key in der config.json richtig gesetzt ist und schau auf https://status.ppy.sh nach ob es nicht vllt an Osu selber liegt")
osunotworkingexe = input("Willst du trotzdem starten? (j/n): ")
if osunotworkingexe == "j":
pass
else:
raise Exception("Der Osu-API Key hat nicht funktioniert.")
token = constants.bot_token
headers = {
"Authorization": "Bot " + token
}
response = requests.get('https://discordapp.com/api/v8/auth/login', headers=headers)
if response.status_code == 200:
pass
else:
raise Exception("Der Discord Bot Token funktioniert nicht!")
ipdata = constants.ipdata_token
baseipurl = "https://api.ipdata.co/8.8.8.8"
ipurl = baseipurl + "?api-key=" + ipdata
ipresponse = requests.get(ipurl)
if ipresponse.status_code == 200:
pass
else:
print("Der IPData-API Key hat nicht funktioniert :((")
print(
"Bitte checke ob der Key in der config.json richtig gesetzt ist und schau auf https://status.ipdata.co nach ob es nicht vllt an Osu selber liegt")
ipdatanotworkingexe = input("Willst du trotzdem starten? (j/n): ")
if ipdatanotworkingexe == "j":
pass
else:
raise Exception("Der IPData Key hat nicht funktioniert.")
tokenchecker()
@client.event
async def on_ready():
print("Yess der bot läuft :)".format(client))
print("Du hast derzeit Release " + str(VERSION) + " installiert")
print("Du bist eingeloggt als {0.user} auf discord.py Version {1}".format(client, discord.__version__))
if os.path.exists("config/mysql.json"):
print("MySQL-Logging ist AKTIVIERT")
else:
print("MySQL-Logging ist DEAKTIVIERT")
print("Der Bot ist zurzeit auf folgenden " + str(len(client.guilds)) + " Servern:")
for guild in client.guilds:
print("- " + str(guild.name))
client.loop.create_task(status_task())
async def status_task():
while True:
await client.change_presence(activity=discord.Game("https://git.io/antonsbot"),
status=discord.Status.online)
await asyncio.sleep(60)
await client.change_presence(
activity=discord.Game(bot_prefix + "corona auf " + str(len(client.guilds)) + " Servern"))
await asyncio.sleep(60)
await client.change_presence(activity=discord.Game("ein heißes Spiel mit der Stiefschwester"))
await asyncio.sleep(5)
await client.change_presence(
activity=discord.Activity(type=discord.ActivityType.watching, name="auf deine Nachrichten"))
await asyncio.sleep(60)
def owner_only(func):
async def wrapper(self, *args, **kwargs):
ctx = get_variable('ctx')
info = await client.application_info()
if ctx.author.id == info.owner.id:
return await func(self, *args, **kwargs)
else:
await ctx.channel.send("Error, only the bot owner can use this command!")
return wrapper
@client.command(name="reload")
@owner_only
async def reload_cog(ctx, cogName):
try:
await unload_cog(ctx, cogName)
await load_cog(ctx, cogName)
except Exception as e:
await ctx.channel.send(f"Während dem versuch die Erweiterung {cogName} neu zu laden ist etwas schiefgelaufen!")
@client.command(name="unload")
@owner_only
async def unload_cog(ctx, cogName):
try:
client.unload_extension(f"cogs.{cogName}")
await ctx.channel.send(f"Erfolgreich erweiterung {cogName} entladen!")
except Exception as e:
await ctx.channel.send(f"Fehler, entweder ist die erweiterung schong entladen, oder sie wurde nicht gefunden!")
@client.command(name="load")
@owner_only
async def load_cog(ctx, cogName):
try:
client.load_extension(f"cogs.{cogName}")
await ctx.channel.send(f"Erfolgreich erweiterung {cogName} geladen!")
except Exception as e:
await ctx.channel.send(f"Fehler, entweder ist die erweiterung schon geladen, oder sie wurde nicht gefunden.")
with open('config/config.json', 'r') as f:
json_stuff = json.load(f)
token = json_stuff["token"]
for filename in os.listdir("./cogs"):
if filename.endswith(".py"):
client.load_extension(f"cogs.{filename[:-3]}")
client.run(token)
| true
| true
|
f70c9dbaed52611996e0d139370df5a86dca6d9b
| 485
|
py
|
Python
|
dev/new_task.py
|
mccolm-robotics/Claver-Dispatch
|
84ce0b4b65862c48c892759d66506be9990d4449
|
[
"MIT"
] | null | null | null |
dev/new_task.py
|
mccolm-robotics/Claver-Dispatch
|
84ce0b4b65862c48c892759d66506be9990d4449
|
[
"MIT"
] | null | null | null |
dev/new_task.py
|
mccolm-robotics/Claver-Dispatch
|
84ce0b4b65862c48c892759d66506be9990d4449
|
[
"MIT"
] | null | null | null |
import pika
import sys
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.queue_declare(queue='task_queue', durable=True)
message = ' '.join(sys.argv[1:]) or "Hello World!"
channel.basic_publish(
exchange='',
routing_key='task_queue',
body=message,
properties=pika.BasicProperties(
delivery_mode=2, # make message persistent
))
print(" [x] Sent %r" % message)
connection.close()
| 25.526316
| 55
| 0.713402
|
import pika
import sys
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.queue_declare(queue='task_queue', durable=True)
message = ' '.join(sys.argv[1:]) or "Hello World!"
channel.basic_publish(
exchange='',
routing_key='task_queue',
body=message,
properties=pika.BasicProperties(
delivery_mode=2,
))
print(" [x] Sent %r" % message)
connection.close()
| true
| true
|
f70c9e3b50c4675c04919b64426f03ccededdeba
| 118
|
py
|
Python
|
zdevelop/tests/test_example.py
|
illuscio-dev/islelib-py
|
6f4dd27233a7c38f112954673bb683c6790956fd
|
[
"MIT"
] | 1
|
2020-04-16T00:58:39.000Z
|
2020-04-16T00:58:39.000Z
|
zdevelop/tests/test_example.py
|
illuscio-dev/islelib-py
|
6f4dd27233a7c38f112954673bb683c6790956fd
|
[
"MIT"
] | null | null | null |
zdevelop/tests/test_example.py
|
illuscio-dev/islelib-py
|
6f4dd27233a7c38f112954673bb683c6790956fd
|
[
"MIT"
] | 1
|
2021-04-28T22:25:21.000Z
|
2021-04-28T22:25:21.000Z
|
from islelib import __version__
def test_example(example_fixture: int) -> None:
assert __version__ is not False
| 19.666667
| 47
| 0.779661
|
from islelib import __version__
def test_example(example_fixture: int) -> None:
assert __version__ is not False
| true
| true
|
f70c9efd4cf48afe372c5482d16a7c5c6f94e684
| 1,798
|
py
|
Python
|
tests/paz/backend/processor.py
|
niqbal996/paz
|
f27205907367415d5b21f90e1a1d1d1ce598e889
|
[
"MIT"
] | 300
|
2020-10-29T08:02:05.000Z
|
2022-03-30T21:47:32.000Z
|
tests/paz/backend/processor.py
|
albertofernandezvillan/paz
|
9fbd50b993f37e1e807297a29c6044c09967c9cc
|
[
"MIT"
] | 30
|
2020-10-29T12:40:32.000Z
|
2022-03-31T14:06:35.000Z
|
tests/paz/backend/processor.py
|
albertofernandezvillan/paz
|
9fbd50b993f37e1e807297a29c6044c09967c9cc
|
[
"MIT"
] | 62
|
2020-10-29T12:34:13.000Z
|
2022-03-29T05:21:45.000Z
|
from paz.core import Processor
from paz.core import SequentialProcessor
import numpy as np
class ProcessorA(Processor):
def __init__(self):
super(ProcessorA, self).__init__()
def call(self, image, boxes):
boxes = boxes - 1.0
return image, boxes
class ProcessorB(Processor):
def __init__(self):
super(ProcessorB, self).__init__()
def call(self, image, boxes):
boxes = boxes - 2.0
return image, boxes
class ProcessorC(Processor):
def __init__(self, probability=0.5):
super(ProcessorC, self).__init__()
def call(self, image):
return image / 255.0
class TransformA(SequentialProcessor):
def __init__(self):
super(TransformA, self).__init__()
self.add(ProcessorC())
class TransformB(SequentialProcessor):
def __init__(self):
super(TransformB, self).__init__()
self.add(ProcessorA())
self.add(ProcessorB())
self.add(ProcessorB())
class TransformC(SequentialProcessor):
def __init__(self):
super(TransformB, self).__init__()
self.add(ProcessorA())
def test_arg_in_sequential_processor_input():
transformA = TransformA()
values = transformA(255.0)
assert np.isclose(values == 1.0)
def test_kwargs_in_sequential_processor_input():
transformB = TransformB()
values = transformB(image=1.0, boxes=2.0)
assert np.allclose([1.0, -3.0], values)
def test_kwargs_invariance_in_sequential_processor_input():
transformB = TransformB()
values = transformB(boxes=2.0, image=1.0)
assert np.allclose([1.0, -3.0], values)
def test_flipped_kwargs_in_sequential_processor_input():
transformB = TransformB()
values = transformB(boxes=1.0, image=2.0)
assert np.allclose([2.0, -4.0], values)
| 24.297297
| 59
| 0.674638
|
from paz.core import Processor
from paz.core import SequentialProcessor
import numpy as np
class ProcessorA(Processor):
def __init__(self):
super(ProcessorA, self).__init__()
def call(self, image, boxes):
boxes = boxes - 1.0
return image, boxes
class ProcessorB(Processor):
def __init__(self):
super(ProcessorB, self).__init__()
def call(self, image, boxes):
boxes = boxes - 2.0
return image, boxes
class ProcessorC(Processor):
def __init__(self, probability=0.5):
super(ProcessorC, self).__init__()
def call(self, image):
return image / 255.0
class TransformA(SequentialProcessor):
def __init__(self):
super(TransformA, self).__init__()
self.add(ProcessorC())
class TransformB(SequentialProcessor):
def __init__(self):
super(TransformB, self).__init__()
self.add(ProcessorA())
self.add(ProcessorB())
self.add(ProcessorB())
class TransformC(SequentialProcessor):
def __init__(self):
super(TransformB, self).__init__()
self.add(ProcessorA())
def test_arg_in_sequential_processor_input():
transformA = TransformA()
values = transformA(255.0)
assert np.isclose(values == 1.0)
def test_kwargs_in_sequential_processor_input():
transformB = TransformB()
values = transformB(image=1.0, boxes=2.0)
assert np.allclose([1.0, -3.0], values)
def test_kwargs_invariance_in_sequential_processor_input():
transformB = TransformB()
values = transformB(boxes=2.0, image=1.0)
assert np.allclose([1.0, -3.0], values)
def test_flipped_kwargs_in_sequential_processor_input():
transformB = TransformB()
values = transformB(boxes=1.0, image=2.0)
assert np.allclose([2.0, -4.0], values)
| true
| true
|
f70c9fc4d2ad877ebca3e0fda9dbe2a471a56fe1
| 3,642
|
py
|
Python
|
5.2_CUSTOM_LIBRARY/model_analyzer.py
|
pedroMoya/M5_kaggle_uncertainty_share
|
f1dea9af9ec2e29e9bccb21d9b6e3627dff14c6e
|
[
"MIT"
] | null | null | null |
5.2_CUSTOM_LIBRARY/model_analyzer.py
|
pedroMoya/M5_kaggle_uncertainty_share
|
f1dea9af9ec2e29e9bccb21d9b6e3627dff14c6e
|
[
"MIT"
] | null | null | null |
5.2_CUSTOM_LIBRARY/model_analyzer.py
|
pedroMoya/M5_kaggle_uncertainty_share
|
f1dea9af9ec2e29e9bccb21d9b6e3627dff14c6e
|
[
"MIT"
] | null | null | null |
# Model architecture analyzer
import os
import logging
import logging.handlers as handlers
import json
import numpy as np
import tensorflow as tf
physical_devices = tf.config.list_physical_devices('GPU')
tf.config.experimental.set_memory_growth(physical_devices[0], enable=True)
tf.keras.backend.set_floatx('float32')
from tensorflow.keras import layers
from tensorflow.keras.experimental import PeepholeLSTMCell
from tensorflow.keras.layers import TimeDistributed
from tensorflow.keras.layers import RepeatVector
from tensorflow.keras import regularizers
from tensorflow.keras import optimizers
from tensorflow.keras import losses, models
from tensorflow.keras import metrics
from tensorflow.keras import callbacks as cb
from tensorflow.keras import backend as kb
from sklearn.metrics import mean_squared_error
from tensorflow.keras.utils import plot_model, model_to_dot
# open local settings
with open('./settings.json') as local_json_file:
local_submodule_settings = json.loads(local_json_file.read())
local_json_file.close()
# log setup
current_script_name = os.path.basename(__file__).split('.')[0]
log_path_filename = ''.join([local_submodule_settings['log_path'], current_script_name, '.log'])
logging.basicConfig(filename=log_path_filename, level=logging.DEBUG,
format='%(asctime)s %(levelname)s %(name)s %(message)s')
logger = logging.getLogger(__name__)
logHandler = handlers.RotatingFileHandler(log_path_filename, maxBytes=10485760, backupCount=5)
logger.addHandler(logHandler)
class model_structure:
def analize(self, local_model_name, local_settings):
try:
# loading model (h5 format)
print('trying to open model file (assuming h5 format)')
local_model = models.load_model(''.join([local_settings['models_path'], local_model_name]))
# saving architecture in JSON format
local_model_json = local_model.to_json()
with open(''.join([local_settings['models_path'], local_model_name,
'_analyzed_.json']), 'w') as json_file:
json_file.write(local_model_json)
json_file.close()
# changing for subclassing to functional model
local_model_json = json.loads(local_model_json)
print(type(local_model_json))
local_batch_size = None
local_time_step_days = local_model_json['config']['build_input_shape'][1]
local_features = local_model_json['config']['build_input_shape'][2]
input_layer = layers.Input(batch_shape=(local_batch_size, local_time_step_days, local_features))
prev_layer = input_layer
for layer in local_model.layers:
prev_layer = layer(prev_layer)
functional_model = models.Model([input_layer], [prev_layer])
# plotting (exporting to png) the model
plot_path = ''.join([local_settings['models_path'], local_model_name, '_model.png'])
# model_to_dot(functional_model, show_shapes=True, show_layer_names=True, rankdir='TB',
# expand_nested=True, dpi=96, subgraph=True)
plot_model(functional_model, to_file=plot_path, show_shapes=True, show_layer_names=True,
rankdir='TB', expand_nested=True, dpi=216)
plot_model(functional_model, to_file=''.join([plot_path, '.pdf']), show_shapes=True, show_layer_names=True,
rankdir='TB', expand_nested=True)
except Exception as e1:
print('Error reading or saving model')
print(e1)
return False
return True
| 47.921053
| 119
| 0.704009
|
import os
import logging
import logging.handlers as handlers
import json
import numpy as np
import tensorflow as tf
physical_devices = tf.config.list_physical_devices('GPU')
tf.config.experimental.set_memory_growth(physical_devices[0], enable=True)
tf.keras.backend.set_floatx('float32')
from tensorflow.keras import layers
from tensorflow.keras.experimental import PeepholeLSTMCell
from tensorflow.keras.layers import TimeDistributed
from tensorflow.keras.layers import RepeatVector
from tensorflow.keras import regularizers
from tensorflow.keras import optimizers
from tensorflow.keras import losses, models
from tensorflow.keras import metrics
from tensorflow.keras import callbacks as cb
from tensorflow.keras import backend as kb
from sklearn.metrics import mean_squared_error
from tensorflow.keras.utils import plot_model, model_to_dot
with open('./settings.json') as local_json_file:
local_submodule_settings = json.loads(local_json_file.read())
local_json_file.close()
current_script_name = os.path.basename(__file__).split('.')[0]
log_path_filename = ''.join([local_submodule_settings['log_path'], current_script_name, '.log'])
logging.basicConfig(filename=log_path_filename, level=logging.DEBUG,
format='%(asctime)s %(levelname)s %(name)s %(message)s')
logger = logging.getLogger(__name__)
logHandler = handlers.RotatingFileHandler(log_path_filename, maxBytes=10485760, backupCount=5)
logger.addHandler(logHandler)
class model_structure:
def analize(self, local_model_name, local_settings):
try:
print('trying to open model file (assuming h5 format)')
local_model = models.load_model(''.join([local_settings['models_path'], local_model_name]))
local_model_json = local_model.to_json()
with open(''.join([local_settings['models_path'], local_model_name,
'_analyzed_.json']), 'w') as json_file:
json_file.write(local_model_json)
json_file.close()
local_model_json = json.loads(local_model_json)
print(type(local_model_json))
local_batch_size = None
local_time_step_days = local_model_json['config']['build_input_shape'][1]
local_features = local_model_json['config']['build_input_shape'][2]
input_layer = layers.Input(batch_shape=(local_batch_size, local_time_step_days, local_features))
prev_layer = input_layer
for layer in local_model.layers:
prev_layer = layer(prev_layer)
functional_model = models.Model([input_layer], [prev_layer])
plot_path = ''.join([local_settings['models_path'], local_model_name, '_model.png'])
plot_model(functional_model, to_file=plot_path, show_shapes=True, show_layer_names=True,
rankdir='TB', expand_nested=True, dpi=216)
plot_model(functional_model, to_file=''.join([plot_path, '.pdf']), show_shapes=True, show_layer_names=True,
rankdir='TB', expand_nested=True)
except Exception as e1:
print('Error reading or saving model')
print(e1)
return False
return True
| true
| true
|
f70ca09d28978c7b179ba1d709eabf61d6294994
| 1,670
|
py
|
Python
|
panoptes/webapp/config.py
|
lesingerouge/Panoptes
|
4be798b31f659694b83533f8df520d674756afd7
|
[
"MIT"
] | null | null | null |
panoptes/webapp/config.py
|
lesingerouge/Panoptes
|
4be798b31f659694b83533f8df520d674756afd7
|
[
"MIT"
] | null | null | null |
panoptes/webapp/config.py
|
lesingerouge/Panoptes
|
4be798b31f659694b83533f8df520d674756afd7
|
[
"MIT"
] | null | null | null |
# system imports
import os, json
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
"""
Class used to hold configurations that are used in both website and admin applications
"""
MONGO_HOST = "localhost"
MONGO_DBNAME = "panoptes"
THREADS_PER_PAGE = 8
mailcfg = json.load(open('mailcfg.json'))
MAIL_SERVER = mailcfg['MAIL_SERVER']
MAIL_PORT = mailcfg['MAIL_PORT']
MAIL_USE_TLS = mailcfg['MAIL_USE_TLS']
MAIL_USERNAME = mailcfg['MAIL_USERNAME']
MAIL_PASSWORD = mailcfg['MAIL_PASSWORD']
MAIL_ADDRESS = mailcfg['MAIL_ADDRESS']
PERMISSIONS = {'admin':3, 'manager':2, 'user':1}
SERVICE_TYPES = ["mongo","redis"]
SERVICES = [('/services/db/'+x,'services',x.title()) for x in SERVICE_TYPES]
SERVICES += [('/services/setari/lista','services','Settings')]
MENU = [('services.home','hardware','Hardware',''),('services.home','services','Software services',SERVICES),('services.home','hardware','App','')]
@staticmethod
def init_app(app):
pass
class DevConfig(Config):
"""
Class used to hold admin-specific configurations
"""
SECRET_KEY = 'hardtoguesstring'
DEBUG = True
ADMINS = json.load(open('admins.json'))
CSRF_ENABLED = False
CSRF_SESSION_KEY = "somethingimpossibletoguess"
class AdminConfig(Config):
"""
Class used to hold admin-specific configurations
"""
SECRET_KEY = 'hardtoguesstring'
DEBUG = False
ADMINS = json.load(open('admins.json'))
CSRF_ENABLED = True
CSRF_SESSION_KEY = "somethingimpossibletoguess"
config = {
'dev': DevConfig,
'admin':AdminConfig
}
| 22.876712
| 151
| 0.658084
|
import os, json
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
MONGO_HOST = "localhost"
MONGO_DBNAME = "panoptes"
THREADS_PER_PAGE = 8
mailcfg = json.load(open('mailcfg.json'))
MAIL_SERVER = mailcfg['MAIL_SERVER']
MAIL_PORT = mailcfg['MAIL_PORT']
MAIL_USE_TLS = mailcfg['MAIL_USE_TLS']
MAIL_USERNAME = mailcfg['MAIL_USERNAME']
MAIL_PASSWORD = mailcfg['MAIL_PASSWORD']
MAIL_ADDRESS = mailcfg['MAIL_ADDRESS']
PERMISSIONS = {'admin':3, 'manager':2, 'user':1}
SERVICE_TYPES = ["mongo","redis"]
SERVICES = [('/services/db/'+x,'services',x.title()) for x in SERVICE_TYPES]
SERVICES += [('/services/setari/lista','services','Settings')]
MENU = [('services.home','hardware','Hardware',''),('services.home','services','Software services',SERVICES),('services.home','hardware','App','')]
@staticmethod
def init_app(app):
pass
class DevConfig(Config):
SECRET_KEY = 'hardtoguesstring'
DEBUG = True
ADMINS = json.load(open('admins.json'))
CSRF_ENABLED = False
CSRF_SESSION_KEY = "somethingimpossibletoguess"
class AdminConfig(Config):
SECRET_KEY = 'hardtoguesstring'
DEBUG = False
ADMINS = json.load(open('admins.json'))
CSRF_ENABLED = True
CSRF_SESSION_KEY = "somethingimpossibletoguess"
config = {
'dev': DevConfig,
'admin':AdminConfig
}
| true
| true
|
f70ca13496a84f896b25dd21e28f513253f96a73
| 1,375
|
py
|
Python
|
base-LASED/LASED/.ipynb_checkpoints/detuning-checkpoint.py
|
mvpmanish/LASED
|
7793037b3e77ee9205f631d7ff6c511895108400
|
[
"MIT"
] | 7
|
2021-06-07T14:58:01.000Z
|
2022-03-24T18:08:13.000Z
|
base-LASED/build/lib/LASED/detuning.py
|
mvpmanish/LASED
|
7793037b3e77ee9205f631d7ff6c511895108400
|
[
"MIT"
] | 13
|
2021-06-07T14:15:54.000Z
|
2022-03-29T11:06:10.000Z
|
base-LASED/LASED/detuning.py
|
mvpmanish/LASED
|
7793037b3e77ee9205f631d7ff6c511895108400
|
[
"MIT"
] | 2
|
2021-12-09T06:31:23.000Z
|
2022-03-18T17:29:48.000Z
|
'''
Define functions for the detuning of an atomic system
'''
from LASED.state import *
from LASED.constants import *
def delta(e, g):
"""Detunings between substates.
Parameters:
e (State): State object
g (State): State object
Returns:
float: Difference in angular frequency of states (Grad/s).
"""
return e.w - g.w
def angularFreq(wavelength):
"""Calculates the angular frequency in Grad/s from a given wavelength.
Parameters:
wavelength (float): A wavelength in nm
Returns:
float: The angular frequency in Grad/s
"""
return 2*PI*C/wavelength*1e-9
def dopplerDelta(e, g, w_q, lambda_q, v_z):
""" The detuning between excited and ground states.
Accounts for a fixed motion of the atoms. Used between excited and ground states.
Parameters:
e (State): State object for excited state.
g (State): State object for ground state.
w_q (float): Angular frequency of exciting laser in rad/s.
lambda_q (float): Wavelength of exciting laser in m.
v_z (float): Velocity component of atoms in direction of laser in m/s.
Returns:
float: The detuning between ground and excited states including the doppler detuning due to a given atomic velocity.
"""
return w_q - v_z/lambda_q - e.w + g.w
| 29.891304
| 124
| 0.647273
|
from LASED.state import *
from LASED.constants import *
def delta(e, g):
return e.w - g.w
def angularFreq(wavelength):
return 2*PI*C/wavelength*1e-9
def dopplerDelta(e, g, w_q, lambda_q, v_z):
return w_q - v_z/lambda_q - e.w + g.w
| true
| true
|
f70ca15d9c4d2723eb2b39fff7fab7a631832be1
| 1,602
|
py
|
Python
|
desktop/core/ext-py/lxml/benchmark/bench_xslt.py
|
t3hi3x/hue
|
36d71c1a8dd978b899ef2dc3eef8887b68fd99a8
|
[
"Apache-2.0"
] | 19
|
2015-05-01T19:59:03.000Z
|
2021-12-09T08:03:16.000Z
|
desktop/core/ext-py/lxml/benchmark/bench_xslt.py
|
t3hi3x/hue
|
36d71c1a8dd978b899ef2dc3eef8887b68fd99a8
|
[
"Apache-2.0"
] | 1
|
2018-01-03T15:26:49.000Z
|
2018-01-03T15:26:49.000Z
|
desktop/core/ext-py/lxml/benchmark/bench_xslt.py
|
t3hi3x/hue
|
36d71c1a8dd978b899ef2dc3eef8887b68fd99a8
|
[
"Apache-2.0"
] | 30
|
2015-03-25T19:40:07.000Z
|
2021-05-28T22:59:26.000Z
|
import sys, copy
from itertools import *
from StringIO import StringIO
import benchbase
from benchbase import with_attributes, with_text, onlylib, serialized
############################################################
# Benchmarks
############################################################
class XSLTBenchMark(benchbase.TreeBenchMark):
@onlylib('lxe')
def bench_xslt_extensions_old(self, root):
tree = self.etree.XML("""\
<xsl:stylesheet version="1.0"
xmlns:l="test"
xmlns:testns="testns"
xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
<l:data>TEST</l:data>
<xsl:template match="/">
<l:result>
<xsl:for-each select="*/*">
<xsl:copy-of select="testns:child(.)"/>
</xsl:for-each>
</l:result>
</xsl:template>
</xsl:stylesheet>
""")
def return_child(_, elements):
return elements[0][0]
extensions = {('testns', 'child') : return_child}
transform = self.etree.XSLT(tree, extensions)
for i in range(10):
transform(root)
@onlylib('lxe')
def bench_xslt_document(self, root):
transform = self.etree.XSLT(self.etree.XML("""\
<xsl:stylesheet version="1.0"
xmlns:l="test"
xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
<l:data>TEST</l:data>
<xsl:template match="/">
<l:result>
<xsl:for-each select="*/*">
<l:test><xsl:copy-of select="document('')//l:data/text()"/></l:test>
</xsl:for-each>
</l:result>
</xsl:template>
</xsl:stylesheet>
"""))
transform(root)
if __name__ == '__main__':
benchbase.main(XSLTBenchMark)
| 27.152542
| 76
| 0.574282
|
import sys, copy
from itertools import *
from StringIO import StringIO
import benchbase
from benchbase import with_attributes, with_text, onlylib, serialized
| true
| true
|
f70ca2e741a791f661c4c075fecb809c2656b213
| 28,177
|
py
|
Python
|
unifac/database.py
|
JacekPrzemieniecki/UNIFAC
|
c32702aeffd695a9fd3883cf150500e902df7a97
|
[
"MIT"
] | 6
|
2016-01-04T02:33:13.000Z
|
2021-07-18T08:32:51.000Z
|
unifac/database.py
|
JacekPrzemieniecki/UNIFAC
|
c32702aeffd695a9fd3883cf150500e902df7a97
|
[
"MIT"
] | 1
|
2020-07-27T17:56:23.000Z
|
2020-08-10T06:30:06.000Z
|
unifac/database.py
|
JacekPrzemieniecki/UNIFAC
|
c32702aeffd695a9fd3883cf150500e902df7a97
|
[
"MIT"
] | 6
|
2015-01-02T19:18:16.000Z
|
2021-04-16T08:12:14.000Z
|
'''
Created on 08-10-2012
@author: Jacek Przemieniecki
'''
from . import errors
class Database(object):
def get_atom_valency(self, symbol):
return valency[symbol]
def get_q_r(self, symbol):
grp_id = str_to_id[symbol][0]
return q_r_data[grp_id]
def get_parameter(self, symbol1, symbol2):
if symbol1 == symbol2:
return 0.0
grp1 = str_to_id[symbol1][1] - 1 # Adjust for list indexing starting at 0
grp2 = str_to_id[symbol2][1] - 1
param = params[grp1][grp2]
if param is None:
raise errors.ValueNotFound()
else:
return param
def iterate_strings(self):
for key in str_to_id:
yield key
def __init__(self):
pass
valency = {"C" : 4,
"N" : 3,
"O" : 2,
"S" : 2,
"Si" : 4,
"Cl" : 1,
"Br" : 1,
"I" : 1,
"F" : 1}
### Data from http://www.aim.env.uea.ac.uk/aim/info/UNIFACgroups.html
params = [[0.0, 86.02, 61.13, 76.5, 986.5, 697.2, 1318.0, 1333.0, 476.4, 677.0, 232.1, 507.0, 251.5, 391.5, 255.7, 206.6, 920.7, 287.77, 597.0, 663.5, 35.93, 53.76, 24.9, 104.3, 11.44, 661.5, 543.0, 153.6, 184.4, 354.55, 3025.0, 335.8, 479.5, 298.9, 526.5, 689.0, -4.189, 125.8, 485.3, -2.859, 387.1, -450.4, 252.7, 220.3, -5.869, 390.9, 553.3, 187.0, 216.1, 92.99, None, 808.59, 408.3, 718.01, None, 153.72, ], #1
[-35.36, 0.0, 38.81, 74.15, 524.1, 787.6, 270.6, 526.1, 182.6, 448.8, 37.85, 333.5, 214.5, 240.9, 163.9, 61.11, 749.3, 280.5, 336.9, 318.9, -36.87, 58.55, -13.99, -109.7, 100.1, 357.5, None, 76.302, None, 262.9, None, None, 183.8, 31.14, 179.0, -52.87, -66.46, 359.3, -70.45, 449.4, 48.33, None, None, 86.46, None, 200.2, 268.1, -617.0, 62.56, None, None, 200.94, 219.9, -677.25, None, None, ], #2
[-11.12, 3.446, 0.0, 167.0, 636.1, 637.35, 903.8, 1329.0, 25.77, 347.3, 5.994, 287.1, 32.14, 161.7, 122.8, 90.49, 648.2, -4.449, 212.5, 537.4, -18.81, -144.4, -231.9, 3.0, 187.0, 168.0, 194.9, 52.07, -10.43, -64.69, 210.4, 113.3, 261.3, 154.26, 169.9, 383.9, -259.1, 389.3, 245.6, 22.67, 103.5, -432.3, 238.9, 30.04, -88.11, None, 333.3, None, -59.58, -39.16, None, 360.82, 171.49, 272.33, 22.06, 174.35, ], #3
[-69.7, -113.6, -146.8, 0.0, 803.2, 603.25, 5695.0, 884.9, -52.1, 586.6, 5688.0, 197.8, 213.1, 19.02, -49.29, 23.5, 664.2, 52.8, 6096.0, 872.3, -114.1, -111.0, -80.25, -141.3, -211.0, 3629.0, 4448.0, -9.451, 393.6, 48.49, 4975.0, 259.0, 210.0, -152.55, 4284.0, -119.2, -282.5, 101.4, 5629.0, -245.39, 69.26, 683.3, 355.5, 46.38, None, None, 421.9, None, -203.6, 184.9, None, 233.51, -184.68, 9.63, 795.38, -280.9, ], #4
[156.4, 457.0, 89.6, 25.82, 0.0, -137.1, 353.5, -259.7, 84.0, -203.6, 101.1, 267.8, 28.06, 83.02, 42.7, -323.0, -52.39, 170.0, 6.712, 199.0, 75.62, 65.28, -98.12, 143.1, 123.5, 256.5, 157.1, 488.9, 147.5, -120.5, -318.9, 313.5, 202.1, 727.8, -202.1, 74.27, 225.8, 44.78, -143.9, None, 190.3, -817.7, 202.7, -504.2, 72.96, -382.7, -248.3, None, 104.7, 57.65, None, 215.81, 6.39, None, None, 147.97, ], #5
[16.51, -12.52, -50.0, -44.5, 249.1, 0.0, -181.0, -101.7, 23.39, 306.4, -10.72, 179.7, -128.6, 359.3, -20.98, 53.9, 489.7, 580.5, 53.28, -202.0, -38.32, -102.5, -139.4, -44.76, -28.25, 75.14, 457.88, -31.09, 17.5, -61.76, -119.2, 212.1, 106.3, -119.1, -399.3, -5.224, 33.47, -48.25, -172.4, None, 165.7, None, None, None, -52.1, None, None, 37.63, -59.4, -46.01, None, 150.02, 98.2, None, None, None, ], #6
[300.0, 496.1, 362.3, 377.6, -229.1, 289.6, 0.0, 324.5, -195.4, -116.0, 72.87, 233.87, 540.5, 48.89, 168.0, 304.0, 459.0, 459.0, 112.6, -14.09, 325.4, 370.4, 353.7, 497.5, 133.9, 220.6, 399.5, 887.1, None, 188.0, 12.72, None, 777.1, None, -139.0, 160.8, None, None, 319.0, None, -197.5, -363.8, None, -452.2, None, 835.6, 139.6, None, 407.9, None, None, -255.63, -144.77, None, None, 580.28, ], #7
[275.8, 217.5, 25.34, 244.2, -451.6, -265.2, -601.8, 0.0, -356.1, -271.1, -449.4, -32.52, -162.9, -832.97, None, None, -305.5, -305.5, None, 408.9, None, 517.27, None, 1827.0, 6915.0, None, -413.48, 8484.0, None, None, -687.1, None, None, None, None, None, None, None, None, None, -494.2, None, None, -659.0, None, None, None, None, None, 1005.0, None, None, None, None, None, None, ], #8
[26.76, 42.92, 140.1, 365.8, 164.5, 108.7, 472.5, -133.1, 0.0, -37.36, -213.7, -190.4, -103.6, None, -174.2, -169.0, 6201.0, 7.341, 481.7, 669.4, -191.7, -130.3, -354.6, -39.2, -119.8, 137.5, 548.5, 216.1, -46.28, -163.7, 71.46, 53.59, 245.2, -246.6, -44.58, -63.5, -34.57, None, -61.7, None, -18.8, -588.9, None, None, None, None, 37.54, None, None, -162.6, None, None, -288.94, 91.01, None, 179.74, ], #9
[505.7, 56.3, 23.39, 106.0, 529.0, -340.2, 480.8, -155.6, 128.0, 0.0, -110.3, 766.0, 304.1, None, None, None, None, None, -106.4, 497.5, 751.9, 67.52, -483.7, None, None, None, None, None, None, None, None, 117.0, None, 2.21, None, -339.2, 172.4, None, -268.8, None, -275.5, None, None, None, None, None, None, None, None, None, None, None, 79.71, None, None, None, ], #10
[114.8, 132.1, 85.84, -170.0, 245.4, 249.63, 200.8, -36.72, 372.2, 185.1, 0.0, -241.8, -235.7, None, -73.5, -196.7, 475.5, -0.13, 494.6, 660.2, -34.74, 108.9, -209.7, 54.57, 442.4, -81.13, None, 183.0, None, 202.3, -101.7, 148.3, 18.88, 71.48, 52.08, -28.61, -275.2, None, 85.33, None, 560.2, None, None, None, None, None, 151.8, None, None, None, None, None, 36.34, 446.9, None, None, ], #11
[329.3, 110.4, 18.12, 428.0, 139.4, 227.8, 124.63, -234.25, 385.4, -236.5, 1167.0, 0.0, -234.0, None, None, None, None, -233.4, -47.25, -268.1, None, 31.0, -126.2, 179.7, 24.28, None, None, None, 103.9, None, None, None, 298.13, None, None, None, -11.4, None, 308.9, None, -70.24, None, None, None, None, None, None, None, None, None, None, None, -77.96, None, None, None, ], #12
[83.36, 26.51, 52.13, 65.69, 237.7, 238.4, -314.7, -178.5, 191.1, -7.838, 461.3, 457.3, 0.0, -78.36, 251.5, 5422.3, -46.39, 213.2, -18.51, 664.6, 301.1, 137.8, -154.3, 47.67, 134.8, 95.18, 155.11, 140.9, -8.538, 170.1, -20.11, -149.5, -202.3, -156.57, 128.8, None, 240.2, -273.9, 254.8, -172.51, 417.0, 1338.0, None, None, None, None, None, None, None, None, None, None, 567.0, 102.21, None, None, ], #13
[-30.48, 1.163, -44.85, 296.4, -242.8, -481.7, -330.48, -870.8, None, None, None, None, 222.1, 0.0, -107.2, -41.11, -200.7, None, 358.9, None, -82.92, None, None, -99.81, 30.05, None, None, None, -70.14, None, None, None, None, None, 874.19, None, None, None, -164.0, None, None, -664.4, 275.9, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #14
[65.33, -28.7, -22.31, 223.0, -150.0, -370.3, -448.2, None, 394.6, None, 136.0, None, -56.08, 127.4, 0.0, -189.2, 138.54, 431.49, 147.1, None, None, None, None, 71.23, -18.93, None, None, None, None, None, 939.07, None, None, None, None, None, None, 570.9, -255.22, None, -38.77, 448.1, -1327.0, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #15
[-83.98, -25.38, -223.9, 109.9, 28.6, -406.8, -598.8, None, 225.3, None, 2889.0, None, -194.1, 38.89, 865.9, 0.0, 287.43, None, 1255.1, None, -182.91, -73.85, -352.9, -262.0, -181.9, None, None, None, None, None, None, None, None, None, 243.1, None, None, -196.3, 22.05, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #16
[1139.0, 2000.0, 247.5, 762.8, -17.4, -118.1, -341.6, -253.1, -450.3, None, -294.8, None, 285.36, -15.07, 64.3, -24.46, 0.0, 89.7, -281.6, -396.0, 287.0, -111.0, None, 882.0, 617.5, None, -139.3, None, None, None, 0.1004, None, None, None, None, None, None, None, -334.4, None, -89.42, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #17
[-101.6, -47.63, 31.87, 49.8, -132.3, -378.2, -332.9, -341.6, 29.1, None, 8.87, 554.4, -156.1, None, -207.66, None, 117.4, 0.0, -169.7, -153.7, None, -351.6, -114.7, -205.3, -2.17, None, 2845.0, None, None, None, None, None, -60.78, None, None, None, 160.7, -158.8, None, None, None, None, None, None, None, None, None, None, None, -136.6, None, None, None, 98.82, None, None, ], #18
[24.82, -40.62, -22.97, -138.4, 185.4, 162.6, 242.8, None, -287.5, 224.66, -266.6, 99.37, 38.81, -157.3, -108.5, -446.86, 777.4, 134.3, 0.0, 205.27, 4.933, -152.7, -15.62, -54.86, -4.624, -0.515, None, 230.9, 0.4604, None, 177.5, None, -62.17, -203.0, None, 81.57, -55.77, None, -151.5, None, 120.3, None, None, None, None, None, 16.23, None, None, None, None, None, None, None, None, None, ], #19
[315.3, 1264.0, 62.32, 89.86, -151.0, 339.8, -66.17, -11.0, -297.8, -165.5, -256.3, 193.9, -338.5, None, None, None, 493.8, -313.5, 92.07, 0.0, 13.41, -44.7, 39.63, 183.4, -79.08, None, None, None, None, -208.9, None, 228.4, -95.0, None, -463.6, None, -11.16, None, -228.0, None, -337.0, 169.3, 127.2, None, None, -322.3, None, None, None, None, None, None, 12.55, -60.07, 88.09, None, ], #20
[91.46, 40.25, 4.68, 122.9, 562.2, 529.0, 698.2, None, 286.3, -47.51, 35.38, None, 225.4, 131.2, None, 151.38, 429.7, None, 54.32, 519.1, 0.0, 108.3, 249.2, 62.42, 153.0, 32.73, 86.2, 450.1, 59.02, 65.56, None, 2.22, 344.4, None, None, None, -168.2, None, 6.57, None, 63.67, None, None, None, None, None, None, None, None, None, None, None, -127.9, None, None, None, ], #21
[34.01, -23.5, 121.3, 140.8, 527.6, 669.9, 708.7, 1633.5, 82.86, 190.6, -132.9, 80.99, -197.7, None, None, -141.4, 140.8, 587.3, 258.6, 543.3, -84.53, 0.0, 0.0, 56.33, 223.1, 108.9, None, None, None, 149.56, None, 177.6, 315.9, None, 215.0, None, -91.8, None, -160.28, None, -96.87, None, None, None, None, None, 361.1, None, None, None, None, None, None, None, None, None, ], #22
[36.7, 51.06, 288.5, 69.9, 742.1, 649.1, 826.76, None, 552.1, 242.8, 176.5, 235.6, -20.93, None, None, -293.7, None, 18.98, 74.04, 504.2, -157.1, 0.0, 0.0, -30.1, 192.1, None, None, 116.6, None, -64.38, None, 86.4, 168.8, None, 363.7, None, 111.2, None, None, None, 255.8, None, None, -35.68, None, None, None, 565.9, None, None, None, None, 165.67, None, None, None, ], #23
[-78.45, 160.9, -4.7, 134.7, 856.3, 709.6, 1201.0, 10000.0, 372.0, None, 129.5, 351.9, 113.9, 261.1, 91.13, 316.9, 898.2, 368.5, 492.0, 631.0, 11.8, 17.97, 51.9, 0.0, -75.97, 490.9, 534.7, 132.2, None, 546.7, None, 247.8, 146.6, None, 337.7, 369.5, 187.1, 215.2, 498.6, None, 256.5, None, 233.1, None, None, None, 423.1, 63.95, None, 108.5, None, 585.19, 291.87, 532.73, None, 127.16, ], #24
[106.8, 70.32, -97.27, 402.5, 325.7, 612.8, -274.5, 622.3, 518.4, None, -171.1, 383.3, -25.15, 108.5, 102.2, 2951.0, 334.9, 20.18, 363.5, 993.4, -129.7, -8.309, -0.2266, -248.4, 0.0, 132.7, 2213.0, None, None, None, None, None, 593.4, None, 1337.37, None, None, None, 5143.14, 309.58, -145.1, None, None, -209.7, None, None, 434.1, None, None, None, None, None, None, None, None, 8.48, ], #25
[-32.69, -1.996, 10.38, -97.05, 261.6, 252.6, 417.9, None, -142.6, None, 129.3, None, -94.49, None, None, None, None, None, 0.2827, None, 113.0, -9.639, None, -34.68, 132.9, 0.0, 533.2, 320.2, None, None, 139.8, 304.3, 10.17, -27.7, None, None, 10.76, None, -223.1, None, 248.4, None, None, None, -218.9, None, None, None, None, -4.565, None, None, None, None, None, None, ], #26
[5541.0, None, 1824.0, -127.8, 561.6, 511.29, 360.7, 815.12, -101.5, None, None, None, 220.66, None, None, None, 134.9, 2475.0, None, None, 1971.0, None, None, 514.6, -123.1, -85.12, 0.0, None, None, None, None, 2990.0, -124.0, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 1742.53, ], #27
[-52.65, 16.62, 21.5, 40.68, 609.8, 914.2, 1081.0, 1421.0, 303.7, None, 243.8, None, 112.4, None, None, None, None, None, 335.7, None, -73.09, None, -26.06, -60.71, None, 277.8, None, 0.0, None, None, None, 292.7, None, None, None, None, -47.37, None, None, None, 469.8, None, None, None, None, None, None, None, None, None, None, None, None, 684.78, None, None, ], #28
[-7.481, None, 28.41, 19.56, 461.6, 448.6, None, None, 160.6, None, None, 201.5, 63.71, 106.7, None, None, None, None, 161.0, None, -27.94, None, None, None, None, None, None, None, 0.0, None, None, None, None, None, 31.66, None, None, None, 78.92, None, None, None, None, 1004.0, None, None, None, -18.27, None, None, None, None, None, None, None, None, ], #29
[-25.31, 82.64, 157.3, 128.8, 521.6, 287.0, 23.48, None, 317.5, None, -146.3, None, -87.31, None, None, None, None, None, None, 570.6, -39.46, -116.21, 48.48, -133.16, None, None, None, None, None, 0.0, None, None, None, None, None, None, 262.9, None, None, None, 43.37, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #30
[140.0, None, 221.4, 150.6, 267.6, 240.8, -137.4, 838.4, 135.4, None, 152.0, None, 9.207, None, -213.74, None, 192.3, None, 169.6, None, None, None, None, None, None, 481.3, None, None, None, None, 0.0, None, None, None, -417.2, None, None, None, 302.2, None, 347.8, None, None, -262.0, None, None, -353.5, None, None, None, None, None, None, None, None, None, ], #31
[128.0, None, 58.68, 26.41, 501.3, 431.3, None, None, 138.0, 245.9, 21.92, None, 476.6, None, None, None, None, None, None, 616.6, 179.25, -40.82, 21.76, 48.49, None, 64.28, 2448.0, -27.45, None, None, None, 0.0, 6.37, None, None, None, None, None, None, None, 68.55, None, None, None, None, None, None, None, None, None, None, None, None, 190.81, None, None, ], #32
[-31.52, 174.6, -154.2, 1112.0, 524.9, 494.7, 79.18, None, -142.6, None, 24.37, -92.26, 736.4, None, None, None, None, -42.71, 136.9, 5256.0, -262.3, -174.5, -46.8, 77.55, -185.3, 125.3, 4288.0, None, None, None, None, 37.1, 0.0, None, 32.9, None, -48.33, None, 336.25, None, -195.1, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #33
[-72.88, 41.38, -101.12, 614.52, 68.95, 967.71, None, None, 443.6, -55.87, -111.45, None, 173.77, None, None, None, None, None, 329.1, None, None, None, None, None, None, 174.4, None, None, None, None, None, None, None, 0.0, None, None, 2073.0, None, -119.8, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #34
[50.49, 64.07, -2.504, -143.2, -25.87, 695.0, -240.0, None, 110.4, None, 41.57, None, -93.51, -366.51, None, -257.2, None, None, None, -180.2, None, -215.0, -343.6, -58.43, -334.12, None, None, None, 85.7, None, 535.8, None, -111.2, None, 0.0, None, None, None, -97.71, None, 153.7, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #35
[-165.9, 573.0, -123.6, 397.4, 389.3, 218.8, 386.6, None, 114.55, 354.0, 175.5, None, None, None, None, None, None, None, -42.31, None, None, None, None, -85.15, None, None, None, None, None, None, None, None, None, None, None, 0.0, -208.8, None, -8.804, None, 423.4, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #36
[47.41, 124.2, 395.8, 419.1, 738.9, 528.0, None, None, -40.9, 183.8, 611.3, 134.5, -217.9, None, None, None, None, 281.6, 335.2, 898.2, 383.2, 301.9, -149.8, -134.2, None, 379.4, None, 167.9, None, 82.64, None, None, 322.42, 631.5, None, 837.2, 0.0, None, 255.0, None, 730.8, None, None, None, None, None, None, 2429.0, None, None, None, None, -127.06, None, None, None, ], #37
[-5.132, -131.7, -237.2, -157.3, 649.7, 645.9, None, None, None, None, None, None, 167.1, None, -198.8, 116.5, None, 159.8, None, None, None, None, None, -124.6, None, None, None, None, None, None, None, None, None, None, None, None, None, 0.0, -110.65, -117.2, None, None, None, 26.35, None, None, None, None, None, None, None, None, None, None, None, 117.59, ], #38
[-31.95, 249.0, -133.9, -240.2, 64.16, 172.2, -287.1, None, 97.04, 13.89, -82.12, -116.7, -158.2, 49.7, 10.03, -185.2, 343.7, None, 150.6, -97.77, -55.21, 397.24, None, -186.7, -374.16, 223.6, None, None, -71.0, None, -191.7, None, -176.26, 6.699, 136.6, 5.15, -137.7, 50.06, 0.0, -5.579, 72.31, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 39.84, ], #39
[147.3, 62.4, 140.6, 839.83, None, None, None, None, None, None, None, None, 278.15, None, None, None, None, None, None, None, None, None, None, None, 33.95, None, None, None, None, None, None, None, None, None, None, None, None, 185.6, 55.8, 0.0, None, None, None, None, 111.8, None, None, None, None, None, None, None, None, None, None, None, ], #40
[529.0, 1397.0, 317.6, 615.8, 88.63, 171.0, 284.4, -167.3, 123.4, 577.5, -234.9, 65.37, -247.8, None, 284.5, None, -22.1, None, -61.6, 1179.0, 182.2, 305.4, -193.0, 335.7, 1107.0, -124.7, None, 885.5, None, -64.28, -264.3, 288.1, 627.7, None, -29.34, -53.91, -198.0, None, -28.65, None, 0.0, None, None, None, None, None, None, None, None, None, None, None, None, -100.53, None, None, ], #41
[-34.36, None, 787.9, 191.6, 1913.0, None, 180.2, None, 992.4, None, None, None, 448.5, 961.8, 1464.0, None, None, None, None, 2450.0, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 0.0, -2166.0, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #42
[110.2, None, 234.4, 221.8, 84.85, None, None, None, None, None, None, None, None, -125.2, 1604.0, None, None, None, None, 2496.0, None, None, None, 70.81, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 745.3, 0.0, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #43
[13.89, -16.11, -23.88, 6.214, 796.9, None, 832.2, -234.7, None, None, None, None, None, None, None, None, None, None, None, None, None, None, -196.2, None, 161.5, None, None, None, -274.1, None, 262.0, None, None, None, None, None, -66.31, None, None, None, None, None, None, 0.0, None, None, None, None, None, None, None, None, None, None, None, None, ], #44
[30.74, None, 167.9, None, 794.4, 762.7, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 844.0, None, None, None, None, None, None, None, None, None, None, None, None, None, -32.17, None, None, None, None, 0.0, None, None, None, None, None, None, None, None, None, None, None, ], #45
[27.97, 9.755, None, None, 394.8, None, -509.3, None, None, None, None, None, None, None, None, None, None, None, None, -70.25, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 0.0, None, None, None, None, None, None, None, None, None, None, ], #46
[-11.92, 132.4, -86.88, -19.45, 517.5, None, -205.7, None, 156.4, None, -3.444, None, None, None, None, None, None, None, 119.2, None, None, -194.7, None, 3.163, 7.082, None, None, None, None, None, 515.8, None, None, None, None, None, None, None, None, None, 101.2, None, None, None, None, None, 0.0, None, None, None, None, None, None, None, None, None, ], #47
[39.93, 543.6, None, None, None, 420.0, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, -363.1, -11.3, None, None, None, None, 6.971, None, None, None, None, None, None, None, 148.9, None, None, None, None, None, None, None, None, None, None, 0.0, None, None, None, None, None, None, None, None, ], #48
[-23.61, 161.1, 142.9, 274.1, -61.2, -89.24, -384.3, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 0.0, None, None, None, None, None, None, None, ], #49
[-8.479, None, 23.93, 2.845, 682.5, 597.8, None, 810.5, 278.8, None, None, None, None, None, None, None, None, 221.4, None, None, None, None, None, -79.34, None, 176.3, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #50
[None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #51
[245.21, 384.45, 47.05, 347.13, 72.19, 265.75, 627.39, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 75.04, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #52
[21.49, -2.8, 344.42, 510.32, 244.67, 163.76, 833.21, None, 569.18, -1.25, -38.4, 69.7, -375.6, None, None, None, None, None, None, 600.78, 291.1, None, -286.26, -52.93, None, None, None, None, None, None, None, None, None, None, None, None, 177.12, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #53
[272.82, 569.71, 165.18, 369.89, None, None, None, None, -62.02, None, -229.01, None, -196.59, None, None, None, None, 100.25, None, 472.04, None, None, None, 196.73, None, None, None, 434.32, None, None, None, 313.14, None, None, None, None, None, None, None, None, -244.59, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #54
[None, None, 920.49, 305.77, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 171.94, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #55
[-20.31, None, -106.7, 568.47, 284.28, None, 401.2, None, 106.21, None, None, None, None, None, None, None, None, None, None, None, None, None, None, -108.37, 5.76, None, -272.01, None, None, None, None, None, None, None, None, None, None, 107.84, -33.93, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ]] #56
# {symbol : (group_unique_id, main_group_id), ...}
# main_group_id stands for ids as listed at:
# http://www.aim.env.uea.ac.uk/aim/info/UNIFACgroups.html
# in "Group" column
str_to_id = { 'AC': (11, 3),
'ACBr': (119, 56),
'ACC#N': (118, 55),
'ACCH': (14, 4),
'ACCH2': (13, 4),
'ACCH3': (12, 4),
'ACCl': (54, 25),
'ACF': (71, 38),
'ACH': (10, 3),
'ACN(=O)=O': (58, 27),
'ACNH2': (37, 17),
'ACOH': (18, 8),
'Br': (65, 33),
'C#C': (67, 34),
'C(=O)N(CH2)CH2': (99, 46),
'C(=O)N(CH3)CH2': (98, 46),
'C(=O)N(CH3)CH3': (97, 46),
'C(=O)NH2': (94, 46),
'C(=O)NHCH2': (96, 46),
'C(=O)NHCH3': (95, 46),
'C(=O)OH': (43, 20),
# 'C2H4O2': (101, 47),
# 'C2H5O2': (100, 47),
# 'C4H2S': (108, 50),
# 'C4H3S': (107, 50),
# 'C4H4S': (106, 50),
# 'C5H3N': (40, 18),
# 'C5H4N': (39, 18),
# 'C5H5N': (38, 18),
'C=CCl': (70, 37),
'CCl': (47, 21),
'CCl2': (50, 22),
'CCl2F': (87, 45),
'CCl2F2': (93, 45),
'CCl3': (52, 23),
'CCl3F': (86, 45),
'CCl4': (53, 24),
'CClF2': (90, 45),
'CClF3': (92, 45),
'CF': (76, 40),
'CF2': (75, 40),
'CF3': (74, 40),
'CH': (3, 1),
'CH#C': (66, 34),
'CH(=O)O': (24, 12),
'CH(=O)OH': (44, 20),
'CH0': (4, 1),
'CH0=CH0': (9, 2),
'CH0OCH0': (116, 53),
'CH2': (2, 1),
'CH2=CH': (5, 2),
'CH2=CH0': (7, 2),
'CH2=CHC#N': (69, 36),
'CH2OH0': (26, 13),
'CH2C#N': (42, 19),
'CH2C(=O)O': (23, 11),
'CH2C=O': (20, 9),
'CH2Cl': (45, 21),
'CH2Cl2': (48, 22),
'CH2N(=O)=O': (56, 26),
'CH2NH': (33, 15),
'CH2NH0': (36, 16),
'CH2NH2': (30, 14),
# 'CH2OCH': (112, 53), # these are oxides, not ethers
# 'CH2OCH0': (113, 53),
# 'CH2OCH2': (111, 53),
'CH2S': (103, 48),
'CH2SH': (61, 29),
# 'CH2SuCH': (110, 52),
# 'CH2SuCH2': (109, 52),
'CH3': (1, 1),
'CH3OH0': (25, 13),
'CH3C#N': (41, 19),
'CH3C(=O)O': (22, 11),
'CH3C=O': (19, 9),
'CH3N(=O)=O': (55, 26),
'CH3NH': (32, 15),
'CH3NH0': (35, 16),
'CH3NH2': (29, 14),
'CH3OH': (16, 6),
'CH3S': (102, 48),
'CH3SH': (60, 29),
'CH=CH': (6, 2),
'CH=CH0': (8, 2),
'CHOH0': (27, 13),
'CHCl': (46, 21),
'CHCl2': (49, 22),
'CHCl2F': (88, 45),
'CHCl3': (51, 23),
'CHClF': (89, 45),
'CHClF2': (91, 45),
'CHN(=O)=O': (57, 26),
'CHNH': (34, 15),
'CHNH2': (31, 14),
# 'CHOCH': (114, 53), #these are oxides, not ethers
# 'CHOCH0': (115, 53),
'CHS': (104, 48),
# 'COO': (77, 41),
# 'DMF': (72, 39),
# 'DMSO': (68, 35),
# 'DOH': (63, 31),
# 'HCON(CH2)2': (73, 39),
'I': (64, 32),
# 'MORPH': (105, 49),
# 'NMP': (85, 44),
'O=COC=O': (117, 54),
'OH': (15, 5),
'OH2': (17, 7),
'SCS': (59, 28),
'Si': (81, 42),
'SiH': (80, 42),
'SiH2': (79, 42),
'SiH2O': (82, 43),
'SiH3': (78, 42),
'SiHO': (83, 43),
'SiO': (84, 43),
# 'THF': (28, 13),
# 'furfural': (62, 30)
}
q_r_data = {
1: (0.848, 0.9011),
2: (0.54, 0.6744),
3: (0.228, 0.4469),
4: (0.0, 0.2195),
5: (1.176, 1.3454),
6: (0.867, 1.1167),
7: (0.988, 1.1173),
8: (0.676, 0.8886),
9: (0.485, 0.6605),
10: (0.4, 0.5313),
11: (0.12, 0.3652),
12: (0.968, 1.2663),
13: (0.66, 1.0396),
14: (0.348, 0.8121),
15: (1.2, 1.0),
16: (1.432, 1.4311),
17: (1.4, 0.92),
18: (0.68, 0.8952),
19: (1.448, 1.6724),
20: (1.18, 1.4457),
21: (0.948, 0.998),
22: (1.728, 1.9031),
23: (1.42, 1.6764),
24: (1.188, 1.242),
25: (1.088, 1.145),
26: (0.78, 0.9183),
27: (0.468, 0.6908),
28: (1.1, 0.9183),
29: (1.544, 1.5959),
30: (1.236, 1.3692),
31: (0.924, 1.1417),
32: (1.244, 1.4337),
33: (0.936, 1.207),
34: (0.624, 0.9795),
35: (0.94, 1.1865),
36: (0.632, 0.9597),
37: (0.816, 1.06),
38: (2.113, 2.9993),
39: (1.833, 2.8332),
40: (1.553, 2.667),
41: (1.724, 1.8701),
42: (1.416, 1.6434),
43: (1.224, 1.3013),
44: (1.532, 1.528),
45: (1.264, 1.4654),
46: (0.952, 1.238),
47: (0.724, 1.0106),
48: (1.998, 2.2564),
49: (1.684, 2.0606),
50: (1.448, 1.8016),
51: (2.41, 2.87),
52: (2.184, 2.6401),
53: (2.91, 3.39),
54: (0.844, 1.1562),
55: (1.868, 2.0086),
56: (1.56, 1.7818),
57: (1.248, 1.5544),
58: (1.104, 1.4199),
59: (1.65, 2.057),
60: (1.676, 1.877),
61: (1.368, 1.651),
62: (2.484, 3.168),
63: (2.248, 2.4088),
64: (0.992, 1.264),
65: (0.832, 0.9492),
66: (1.088, 1.292),
67: (0.784, 1.0613),
68: (2.472, 2.8266),
69: (2.052, 2.3144),
70: (0.724, 0.791),
71: (0.524, 0.6948),
72: (2.736, 3.0856),
73: (2.12, 2.6322),
74: (1.38, 1.406),
75: (0.92, 1.0105),
76: (0.46, 0.615),
77: (1.2, 1.38),
78: (1.263, 1.6035),
79: (1.006, 1.4443),
80: (0.749, 1.2853),
81: (0.41, 1.047),
82: (1.062, 1.4838),
83: (0.764, 1.303),
84: (0.466, 1.1044),
85: (3.2, 3.981),
86: (2.644, 3.0356),
87: (1.916, 2.2287),
88: (2.116, 2.406),
89: (1.416, 1.6493),
90: (1.648, 1.8174),
91: (1.828, 1.967),
92: (2.1, 2.1721),
93: (2.376, 2.6243),
94: (1.248, 1.4515),
95: (1.796, 2.1905),
96: (1.488, 1.9637),
97: (2.428, 2.8589),
98: (2.12, 2.6322),
99: (1.812, 2.4054),
100: (1.904, 2.1226),
101: (1.592, 1.8952),
102: (1.368, 1.613),
103: (1.06, 1.3863),
104: (0.748, 1.1589),
105: (2.796, 3.474),
106: (2.14, 2.8569),
107: (1.86, 2.6908),
108: (1.58, 2.5247),
109: (2.12, 2.6869),
110: (1.808, 2.4595),
111: (1.32, 1.5926),
112: (1.008, 1.3652),
113: (0.78, 1.1378),
114: (0.696, 1.1378),
115: (0.468, 0.9103),
116: (0.24, 0.6829),
117: (1.52, 1.7732),
118: (0.996, 1.3342),
119: (0.972, 1.3629)}
| 80.048295
| 419
| 0.555559
|
from . import errors
class Database(object):
def get_atom_valency(self, symbol):
return valency[symbol]
def get_q_r(self, symbol):
grp_id = str_to_id[symbol][0]
return q_r_data[grp_id]
def get_parameter(self, symbol1, symbol2):
if symbol1 == symbol2:
return 0.0
grp1 = str_to_id[symbol1][1] - 1
grp2 = str_to_id[symbol2][1] - 1
param = params[grp1][grp2]
if param is None:
raise errors.ValueNotFound()
else:
return param
def iterate_strings(self):
for key in str_to_id:
yield key
def __init__(self):
pass
valency = {"C" : 4,
"N" : 3,
"O" : 2,
"S" : 2,
"Si" : 4,
"Cl" : 1,
"Br" : 1,
"I" : 1,
"F" : 1}
7, 597.0, 663.5, 35.93, 53.76, 24.9, 104.3, 11.44, 661.5, 543.0, 153.6, 184.4, 354.55, 3025.0, 335.8, 479.5, 298.9, 526.5, 689.0, -4.189, 125.8, 485.3, -2.859, 387.1, -450.4, 252.7, 220.3, -5.869, 390.9, 553.3, 187.0, 216.1, 92.99, None, 808.59, 408.3, 718.01, None, 153.72, ],
[-35.36, 0.0, 38.81, 74.15, 524.1, 787.6, 270.6, 526.1, 182.6, 448.8, 37.85, 333.5, 214.5, 240.9, 163.9, 61.11, 749.3, 280.5, 336.9, 318.9, -36.87, 58.55, -13.99, -109.7, 100.1, 357.5, None, 76.302, None, 262.9, None, None, 183.8, 31.14, 179.0, -52.87, -66.46, 359.3, -70.45, 449.4, 48.33, None, None, 86.46, None, 200.2, 268.1, -617.0, 62.56, None, None, 200.94, 219.9, -677.25, None, None, ],
[-11.12, 3.446, 0.0, 167.0, 636.1, 637.35, 903.8, 1329.0, 25.77, 347.3, 5.994, 287.1, 32.14, 161.7, 122.8, 90.49, 648.2, -4.449, 212.5, 537.4, -18.81, -144.4, -231.9, 3.0, 187.0, 168.0, 194.9, 52.07, -10.43, -64.69, 210.4, 113.3, 261.3, 154.26, 169.9, 383.9, -259.1, 389.3, 245.6, 22.67, 103.5, -432.3, 238.9, 30.04, -88.11, None, 333.3, None, -59.58, -39.16, None, 360.82, 171.49, 272.33, 22.06, 174.35, ],
[-69.7, -113.6, -146.8, 0.0, 803.2, 603.25, 5695.0, 884.9, -52.1, 586.6, 5688.0, 197.8, 213.1, 19.02, -49.29, 23.5, 664.2, 52.8, 6096.0, 872.3, -114.1, -111.0, -80.25, -141.3, -211.0, 3629.0, 4448.0, -9.451, 393.6, 48.49, 4975.0, 259.0, 210.0, -152.55, 4284.0, -119.2, -282.5, 101.4, 5629.0, -245.39, 69.26, 683.3, 355.5, 46.38, None, None, 421.9, None, -203.6, 184.9, None, 233.51, -184.68, 9.63, 795.38, -280.9, ],
[156.4, 457.0, 89.6, 25.82, 0.0, -137.1, 353.5, -259.7, 84.0, -203.6, 101.1, 267.8, 28.06, 83.02, 42.7, -323.0, -52.39, 170.0, 6.712, 199.0, 75.62, 65.28, -98.12, 143.1, 123.5, 256.5, 157.1, 488.9, 147.5, -120.5, -318.9, 313.5, 202.1, 727.8, -202.1, 74.27, 225.8, 44.78, -143.9, None, 190.3, -817.7, 202.7, -504.2, 72.96, -382.7, -248.3, None, 104.7, 57.65, None, 215.81, 6.39, None, None, 147.97, ],
[16.51, -12.52, -50.0, -44.5, 249.1, 0.0, -181.0, -101.7, 23.39, 306.4, -10.72, 179.7, -128.6, 359.3, -20.98, 53.9, 489.7, 580.5, 53.28, -202.0, -38.32, -102.5, -139.4, -44.76, -28.25, 75.14, 457.88, -31.09, 17.5, -61.76, -119.2, 212.1, 106.3, -119.1, -399.3, -5.224, 33.47, -48.25, -172.4, None, 165.7, None, None, None, -52.1, None, None, 37.63, -59.4, -46.01, None, 150.02, 98.2, None, None, None, ],
[300.0, 496.1, 362.3, 377.6, -229.1, 289.6, 0.0, 324.5, -195.4, -116.0, 72.87, 233.87, 540.5, 48.89, 168.0, 304.0, 459.0, 459.0, 112.6, -14.09, 325.4, 370.4, 353.7, 497.5, 133.9, 220.6, 399.5, 887.1, None, 188.0, 12.72, None, 777.1, None, -139.0, 160.8, None, None, 319.0, None, -197.5, -363.8, None, -452.2, None, 835.6, 139.6, None, 407.9, None, None, -255.63, -144.77, None, None, 580.28, ],
[275.8, 217.5, 25.34, 244.2, -451.6, -265.2, -601.8, 0.0, -356.1, -271.1, -449.4, -32.52, -162.9, -832.97, None, None, -305.5, -305.5, None, 408.9, None, 517.27, None, 1827.0, 6915.0, None, -413.48, 8484.0, None, None, -687.1, None, None, None, None, None, None, None, None, None, -494.2, None, None, -659.0, None, None, None, None, None, 1005.0, None, None, None, None, None, None, ],
[26.76, 42.92, 140.1, 365.8, 164.5, 108.7, 472.5, -133.1, 0.0, -37.36, -213.7, -190.4, -103.6, None, -174.2, -169.0, 6201.0, 7.341, 481.7, 669.4, -191.7, -130.3, -354.6, -39.2, -119.8, 137.5, 548.5, 216.1, -46.28, -163.7, 71.46, 53.59, 245.2, -246.6, -44.58, -63.5, -34.57, None, -61.7, None, -18.8, -588.9, None, None, None, None, 37.54, None, None, -162.6, None, None, -288.94, 91.01, None, 179.74, ],
[505.7, 56.3, 23.39, 106.0, 529.0, -340.2, 480.8, -155.6, 128.0, 0.0, -110.3, 766.0, 304.1, None, None, None, None, None, -106.4, 497.5, 751.9, 67.52, -483.7, None, None, None, None, None, None, None, None, 117.0, None, 2.21, None, -339.2, 172.4, None, -268.8, None, -275.5, None, None, None, None, None, None, None, None, None, None, None, 79.71, None, None, None, ],
[114.8, 132.1, 85.84, -170.0, 245.4, 249.63, 200.8, -36.72, 372.2, 185.1, 0.0, -241.8, -235.7, None, -73.5, -196.7, 475.5, -0.13, 494.6, 660.2, -34.74, 108.9, -209.7, 54.57, 442.4, -81.13, None, 183.0, None, 202.3, -101.7, 148.3, 18.88, 71.48, 52.08, -28.61, -275.2, None, 85.33, None, 560.2, None, None, None, None, None, 151.8, None, None, None, None, None, 36.34, 446.9, None, None, ],
[329.3, 110.4, 18.12, 428.0, 139.4, 227.8, 124.63, -234.25, 385.4, -236.5, 1167.0, 0.0, -234.0, None, None, None, None, -233.4, -47.25, -268.1, None, 31.0, -126.2, 179.7, 24.28, None, None, None, 103.9, None, None, None, 298.13, None, None, None, -11.4, None, 308.9, None, -70.24, None, None, None, None, None, None, None, None, None, None, None, -77.96, None, None, None, ],
[83.36, 26.51, 52.13, 65.69, 237.7, 238.4, -314.7, -178.5, 191.1, -7.838, 461.3, 457.3, 0.0, -78.36, 251.5, 5422.3, -46.39, 213.2, -18.51, 664.6, 301.1, 137.8, -154.3, 47.67, 134.8, 95.18, 155.11, 140.9, -8.538, 170.1, -20.11, -149.5, -202.3, -156.57, 128.8, None, 240.2, -273.9, 254.8, -172.51, 417.0, 1338.0, None, None, None, None, None, None, None, None, None, None, 567.0, 102.21, None, None, ],
[-30.48, 1.163, -44.85, 296.4, -242.8, -481.7, -330.48, -870.8, None, None, None, None, 222.1, 0.0, -107.2, -41.11, -200.7, None, 358.9, None, -82.92, None, None, -99.81, 30.05, None, None, None, -70.14, None, None, None, None, None, 874.19, None, None, None, -164.0, None, None, -664.4, 275.9, None, None, None, None, None, None, None, None, None, None, None, None, None, ],
[65.33, -28.7, -22.31, 223.0, -150.0, -370.3, -448.2, None, 394.6, None, 136.0, None, -56.08, 127.4, 0.0, -189.2, 138.54, 431.49, 147.1, None, None, None, None, 71.23, -18.93, None, None, None, None, None, 939.07, None, None, None, None, None, None, 570.9, -255.22, None, -38.77, 448.1, -1327.0, None, None, None, None, None, None, None, None, None, None, None, None, None, ],
[-83.98, -25.38, -223.9, 109.9, 28.6, -406.8, -598.8, None, 225.3, None, 2889.0, None, -194.1, 38.89, 865.9, 0.0, 287.43, None, 1255.1, None, -182.91, -73.85, -352.9, -262.0, -181.9, None, None, None, None, None, None, None, None, None, 243.1, None, None, -196.3, 22.05, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ],
[1139.0, 2000.0, 247.5, 762.8, -17.4, -118.1, -341.6, -253.1, -450.3, None, -294.8, None, 285.36, -15.07, 64.3, -24.46, 0.0, 89.7, -281.6, -396.0, 287.0, -111.0, None, 882.0, 617.5, None, -139.3, None, None, None, 0.1004, None, None, None, None, None, None, None, -334.4, None, -89.42, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ],
[-101.6, -47.63, 31.87, 49.8, -132.3, -378.2, -332.9, -341.6, 29.1, None, 8.87, 554.4, -156.1, None, -207.66, None, 117.4, 0.0, -169.7, -153.7, None, -351.6, -114.7, -205.3, -2.17, None, 2845.0, None, None, None, None, None, -60.78, None, None, None, 160.7, -158.8, None, None, None, None, None, None, None, None, None, None, None, -136.6, None, None, None, 98.82, None, None, ],
[24.82, -40.62, -22.97, -138.4, 185.4, 162.6, 242.8, None, -287.5, 224.66, -266.6, 99.37, 38.81, -157.3, -108.5, -446.86, 777.4, 134.3, 0.0, 205.27, 4.933, -152.7, -15.62, -54.86, -4.624, -0.515, None, 230.9, 0.4604, None, 177.5, None, -62.17, -203.0, None, 81.57, -55.77, None, -151.5, None, 120.3, None, None, None, None, None, 16.23, None, None, None, None, None, None, None, None, None, ],
[315.3, 1264.0, 62.32, 89.86, -151.0, 339.8, -66.17, -11.0, -297.8, -165.5, -256.3, 193.9, -338.5, None, None, None, 493.8, -313.5, 92.07, 0.0, 13.41, -44.7, 39.63, 183.4, -79.08, None, None, None, None, -208.9, None, 228.4, -95.0, None, -463.6, None, -11.16, None, -228.0, None, -337.0, 169.3, 127.2, None, None, -322.3, None, None, None, None, None, None, 12.55, -60.07, 88.09, None, ],
[91.46, 40.25, 4.68, 122.9, 562.2, 529.0, 698.2, None, 286.3, -47.51, 35.38, None, 225.4, 131.2, None, 151.38, 429.7, None, 54.32, 519.1, 0.0, 108.3, 249.2, 62.42, 153.0, 32.73, 86.2, 450.1, 59.02, 65.56, None, 2.22, 344.4, None, None, None, -168.2, None, 6.57, None, 63.67, None, None, None, None, None, None, None, None, None, None, None, -127.9, None, None, None, ],
[34.01, -23.5, 121.3, 140.8, 527.6, 669.9, 708.7, 1633.5, 82.86, 190.6, -132.9, 80.99, -197.7, None, None, -141.4, 140.8, 587.3, 258.6, 543.3, -84.53, 0.0, 0.0, 56.33, 223.1, 108.9, None, None, None, 149.56, None, 177.6, 315.9, None, 215.0, None, -91.8, None, -160.28, None, -96.87, None, None, None, None, None, 361.1, None, None, None, None, None, None, None, None, None, ],
[36.7, 51.06, 288.5, 69.9, 742.1, 649.1, 826.76, None, 552.1, 242.8, 176.5, 235.6, -20.93, None, None, -293.7, None, 18.98, 74.04, 504.2, -157.1, 0.0, 0.0, -30.1, 192.1, None, None, 116.6, None, -64.38, None, 86.4, 168.8, None, 363.7, None, 111.2, None, None, None, 255.8, None, None, -35.68, None, None, None, 565.9, None, None, None, None, 165.67, None, None, None, ],
[-78.45, 160.9, -4.7, 134.7, 856.3, 709.6, 1201.0, 10000.0, 372.0, None, 129.5, 351.9, 113.9, 261.1, 91.13, 316.9, 898.2, 368.5, 492.0, 631.0, 11.8, 17.97, 51.9, 0.0, -75.97, 490.9, 534.7, 132.2, None, 546.7, None, 247.8, 146.6, None, 337.7, 369.5, 187.1, 215.2, 498.6, None, 256.5, None, 233.1, None, None, None, 423.1, 63.95, None, 108.5, None, 585.19, 291.87, 532.73, None, 127.16, ],
[106.8, 70.32, -97.27, 402.5, 325.7, 612.8, -274.5, 622.3, 518.4, None, -171.1, 383.3, -25.15, 108.5, 102.2, 2951.0, 334.9, 20.18, 363.5, 993.4, -129.7, -8.309, -0.2266, -248.4, 0.0, 132.7, 2213.0, None, None, None, None, None, 593.4, None, 1337.37, None, None, None, 5143.14, 309.58, -145.1, None, None, -209.7, None, None, 434.1, None, None, None, None, None, None, None, None, 8.48, ],
[-32.69, -1.996, 10.38, -97.05, 261.6, 252.6, 417.9, None, -142.6, None, 129.3, None, -94.49, None, None, None, None, None, 0.2827, None, 113.0, -9.639, None, -34.68, 132.9, 0.0, 533.2, 320.2, None, None, 139.8, 304.3, 10.17, -27.7, None, None, 10.76, None, -223.1, None, 248.4, None, None, None, -218.9, None, None, None, None, -4.565, None, None, None, None, None, None, ],
[5541.0, None, 1824.0, -127.8, 561.6, 511.29, 360.7, 815.12, -101.5, None, None, None, 220.66, None, None, None, 134.9, 2475.0, None, None, 1971.0, None, None, 514.6, -123.1, -85.12, 0.0, None, None, None, None, 2990.0, -124.0, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 1742.53, ],
[-52.65, 16.62, 21.5, 40.68, 609.8, 914.2, 1081.0, 1421.0, 303.7, None, 243.8, None, 112.4, None, None, None, None, None, 335.7, None, -73.09, None, -26.06, -60.71, None, 277.8, None, 0.0, None, None, None, 292.7, None, None, None, None, -47.37, None, None, None, 469.8, None, None, None, None, None, None, None, None, None, None, None, None, 684.78, None, None, ],
[-7.481, None, 28.41, 19.56, 461.6, 448.6, None, None, 160.6, None, None, 201.5, 63.71, 106.7, None, None, None, None, 161.0, None, -27.94, None, None, None, None, None, None, None, 0.0, None, None, None, None, None, 31.66, None, None, None, 78.92, None, None, None, None, 1004.0, None, None, None, -18.27, None, None, None, None, None, None, None, None, ],
[-25.31, 82.64, 157.3, 128.8, 521.6, 287.0, 23.48, None, 317.5, None, -146.3, None, -87.31, None, None, None, None, None, None, 570.6, -39.46, -116.21, 48.48, -133.16, None, None, None, None, None, 0.0, None, None, None, None, None, None, 262.9, None, None, None, 43.37, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ],
[140.0, None, 221.4, 150.6, 267.6, 240.8, -137.4, 838.4, 135.4, None, 152.0, None, 9.207, None, -213.74, None, 192.3, None, 169.6, None, None, None, None, None, None, 481.3, None, None, None, None, 0.0, None, None, None, -417.2, None, None, None, 302.2, None, 347.8, None, None, -262.0, None, None, -353.5, None, None, None, None, None, None, None, None, None, ],
[128.0, None, 58.68, 26.41, 501.3, 431.3, None, None, 138.0, 245.9, 21.92, None, 476.6, None, None, None, None, None, None, 616.6, 179.25, -40.82, 21.76, 48.49, None, 64.28, 2448.0, -27.45, None, None, None, 0.0, 6.37, None, None, None, None, None, None, None, 68.55, None, None, None, None, None, None, None, None, None, None, None, None, 190.81, None, None, ],
[-31.52, 174.6, -154.2, 1112.0, 524.9, 494.7, 79.18, None, -142.6, None, 24.37, -92.26, 736.4, None, None, None, None, -42.71, 136.9, 5256.0, -262.3, -174.5, -46.8, 77.55, -185.3, 125.3, 4288.0, None, None, None, None, 37.1, 0.0, None, 32.9, None, -48.33, None, 336.25, None, -195.1, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ],
[-72.88, 41.38, -101.12, 614.52, 68.95, 967.71, None, None, 443.6, -55.87, -111.45, None, 173.77, None, None, None, None, None, 329.1, None, None, None, None, None, None, 174.4, None, None, None, None, None, None, None, 0.0, None, None, 2073.0, None, -119.8, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ],
[50.49, 64.07, -2.504, -143.2, -25.87, 695.0, -240.0, None, 110.4, None, 41.57, None, -93.51, -366.51, None, -257.2, None, None, None, -180.2, None, -215.0, -343.6, -58.43, -334.12, None, None, None, 85.7, None, 535.8, None, -111.2, None, 0.0, None, None, None, -97.71, None, 153.7, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ],
[-165.9, 573.0, -123.6, 397.4, 389.3, 218.8, 386.6, None, 114.55, 354.0, 175.5, None, None, None, None, None, None, None, -42.31, None, None, None, None, -85.15, None, None, None, None, None, None, None, None, None, None, None, 0.0, -208.8, None, -8.804, None, 423.4, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ],
[47.41, 124.2, 395.8, 419.1, 738.9, 528.0, None, None, -40.9, 183.8, 611.3, 134.5, -217.9, None, None, None, None, 281.6, 335.2, 898.2, 383.2, 301.9, -149.8, -134.2, None, 379.4, None, 167.9, None, 82.64, None, None, 322.42, 631.5, None, 837.2, 0.0, None, 255.0, None, 730.8, None, None, None, None, None, None, 2429.0, None, None, None, None, -127.06, None, None, None, ],
[-5.132, -131.7, -237.2, -157.3, 649.7, 645.9, None, None, None, None, None, None, 167.1, None, -198.8, 116.5, None, 159.8, None, None, None, None, None, -124.6, None, None, None, None, None, None, None, None, None, None, None, None, None, 0.0, -110.65, -117.2, None, None, None, 26.35, None, None, None, None, None, None, None, None, None, None, None, 117.59, ],
[-31.95, 249.0, -133.9, -240.2, 64.16, 172.2, -287.1, None, 97.04, 13.89, -82.12, -116.7, -158.2, 49.7, 10.03, -185.2, 343.7, None, 150.6, -97.77, -55.21, 397.24, None, -186.7, -374.16, 223.6, None, None, -71.0, None, -191.7, None, -176.26, 6.699, 136.6, 5.15, -137.7, 50.06, 0.0, -5.579, 72.31, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 39.84, ],
[147.3, 62.4, 140.6, 839.83, None, None, None, None, None, None, None, None, 278.15, None, None, None, None, None, None, None, None, None, None, None, 33.95, None, None, None, None, None, None, None, None, None, None, None, None, 185.6, 55.8, 0.0, None, None, None, None, 111.8, None, None, None, None, None, None, None, None, None, None, None, ],
[529.0, 1397.0, 317.6, 615.8, 88.63, 171.0, 284.4, -167.3, 123.4, 577.5, -234.9, 65.37, -247.8, None, 284.5, None, -22.1, None, -61.6, 1179.0, 182.2, 305.4, -193.0, 335.7, 1107.0, -124.7, None, 885.5, None, -64.28, -264.3, 288.1, 627.7, None, -29.34, -53.91, -198.0, None, -28.65, None, 0.0, None, None, None, None, None, None, None, None, None, None, None, None, -100.53, None, None, ],
[-34.36, None, 787.9, 191.6, 1913.0, None, 180.2, None, 992.4, None, None, None, 448.5, 961.8, 1464.0, None, None, None, None, 2450.0, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 0.0, -2166.0, None, None, None, None, None, None, None, None, None, None, None, None, None, ],
[110.2, None, 234.4, 221.8, 84.85, None, None, None, None, None, None, None, None, -125.2, 1604.0, None, None, None, None, 2496.0, None, None, None, 70.81, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 745.3, 0.0, None, None, None, None, None, None, None, None, None, None, None, None, None, ],
[13.89, -16.11, -23.88, 6.214, 796.9, None, 832.2, -234.7, None, None, None, None, None, None, None, None, None, None, None, None, None, None, -196.2, None, 161.5, None, None, None, -274.1, None, 262.0, None, None, None, None, None, -66.31, None, None, None, None, None, None, 0.0, None, None, None, None, None, None, None, None, None, None, None, None, ],
[30.74, None, 167.9, None, 794.4, 762.7, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 844.0, None, None, None, None, None, None, None, None, None, None, None, None, None, -32.17, None, None, None, None, 0.0, None, None, None, None, None, None, None, None, None, None, None, ],
[27.97, 9.755, None, None, 394.8, None, -509.3, None, None, None, None, None, None, None, None, None, None, None, None, -70.25, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 0.0, None, None, None, None, None, None, None, None, None, None, ],
[-11.92, 132.4, -86.88, -19.45, 517.5, None, -205.7, None, 156.4, None, -3.444, None, None, None, None, None, None, None, 119.2, None, None, -194.7, None, 3.163, 7.082, None, None, None, None, None, 515.8, None, None, None, None, None, None, None, None, None, 101.2, None, None, None, None, None, 0.0, None, None, None, None, None, None, None, None, None, ],
[39.93, 543.6, None, None, None, 420.0, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, -363.1, -11.3, None, None, None, None, 6.971, None, None, None, None, None, None, None, 148.9, None, None, None, None, None, None, None, None, None, None, 0.0, None, None, None, None, None, None, None, None, ],
[-23.61, 161.1, 142.9, 274.1, -61.2, -89.24, -384.3, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 0.0, None, None, None, None, None, None, None, ],
[-8.479, None, 23.93, 2.845, 682.5, 597.8, None, 810.5, 278.8, None, None, None, None, None, None, None, None, 221.4, None, None, None, None, None, -79.34, None, 176.3, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ],
[None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ],
[245.21, 384.45, 47.05, 347.13, 72.19, 265.75, 627.39, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 75.04, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ],
[21.49, -2.8, 344.42, 510.32, 244.67, 163.76, 833.21, None, 569.18, -1.25, -38.4, 69.7, -375.6, None, None, None, None, None, None, 600.78, 291.1, None, -286.26, -52.93, None, None, None, None, None, None, None, None, None, None, None, None, 177.12, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ],
[272.82, 569.71, 165.18, 369.89, None, None, None, None, -62.02, None, -229.01, None, -196.59, None, None, None, None, 100.25, None, 472.04, None, None, None, 196.73, None, None, None, 434.32, None, None, None, 313.14, None, None, None, None, None, None, None, None, -244.59, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ],
[None, None, 920.49, 305.77, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 171.94, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ],
[-20.31, None, -106.7, 568.47, 284.28, None, 401.2, None, 106.21, None, None, None, None, None, None, None, None, None, None, None, None, None, None, -108.37, 5.76, None, -272.01, None, None, None, None, None, None, None, None, None, None, 107.84, -33.93, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ]]
str_to_id = { 'AC': (11, 3),
'ACBr': (119, 56),
'ACC#N': (118, 55),
'ACCH': (14, 4),
'ACCH2': (13, 4),
'ACCH3': (12, 4),
'ACCl': (54, 25),
'ACF': (71, 38),
'ACH': (10, 3),
'ACN(=O)=O': (58, 27),
'ACNH2': (37, 17),
'ACOH': (18, 8),
'Br': (65, 33),
'C#C': (67, 34),
'C(=O)N(CH2)CH2': (99, 46),
'C(=O)N(CH3)CH2': (98, 46),
'C(=O)N(CH3)CH3': (97, 46),
'C(=O)NH2': (94, 46),
'C(=O)NHCH2': (96, 46),
'C(=O)NHCH3': (95, 46),
'C(=O)OH': (43, 20),
'C=CCl': (70, 37),
'CCl': (47, 21),
'CCl2': (50, 22),
'CCl2F': (87, 45),
'CCl2F2': (93, 45),
'CCl3': (52, 23),
'CCl3F': (86, 45),
'CCl4': (53, 24),
'CClF2': (90, 45),
'CClF3': (92, 45),
'CF': (76, 40),
'CF2': (75, 40),
'CF3': (74, 40),
'CH': (3, 1),
'CH#C': (66, 34),
'CH(=O)O': (24, 12),
'CH(=O)OH': (44, 20),
'CH0': (4, 1),
'CH0=CH0': (9, 2),
'CH0OCH0': (116, 53),
'CH2': (2, 1),
'CH2=CH': (5, 2),
'CH2=CH0': (7, 2),
'CH2=CHC#N': (69, 36),
'CH2OH0': (26, 13),
'CH2C#N': (42, 19),
'CH2C(=O)O': (23, 11),
'CH2C=O': (20, 9),
'CH2Cl': (45, 21),
'CH2Cl2': (48, 22),
'CH2N(=O)=O': (56, 26),
'CH2NH': (33, 15),
'CH2NH0': (36, 16),
'CH2NH2': (30, 14),
'CH2SH': (61, 29),
'CH3': (1, 1),
'CH3OH0': (25, 13),
'CH3C#N': (41, 19),
'CH3C(=O)O': (22, 11),
'CH3C=O': (19, 9),
'CH3N(=O)=O': (55, 26),
'CH3NH': (32, 15),
'CH3NH0': (35, 16),
'CH3NH2': (29, 14),
'CH3OH': (16, 6),
'CH3S': (102, 48),
'CH3SH': (60, 29),
'CH=CH': (6, 2),
'CH=CH0': (8, 2),
'CHOH0': (27, 13),
'CHCl': (46, 21),
'CHCl2': (49, 22),
'CHCl2F': (88, 45),
'CHCl3': (51, 23),
'CHClF': (89, 45),
'CHClF2': (91, 45),
'CHN(=O)=O': (57, 26),
'CHNH': (34, 15),
'CHNH2': (31, 14),
'I': (64, 32),
'O=COC=O': (117, 54),
'OH': (15, 5),
'OH2': (17, 7),
'SCS': (59, 28),
'Si': (81, 42),
'SiH': (80, 42),
'SiH2': (79, 42),
'SiH2O': (82, 43),
'SiH3': (78, 42),
'SiHO': (83, 43),
'SiO': (84, 43),
}
q_r_data = {
1: (0.848, 0.9011),
2: (0.54, 0.6744),
3: (0.228, 0.4469),
4: (0.0, 0.2195),
5: (1.176, 1.3454),
6: (0.867, 1.1167),
7: (0.988, 1.1173),
8: (0.676, 0.8886),
9: (0.485, 0.6605),
10: (0.4, 0.5313),
11: (0.12, 0.3652),
12: (0.968, 1.2663),
13: (0.66, 1.0396),
14: (0.348, 0.8121),
15: (1.2, 1.0),
16: (1.432, 1.4311),
17: (1.4, 0.92),
18: (0.68, 0.8952),
19: (1.448, 1.6724),
20: (1.18, 1.4457),
21: (0.948, 0.998),
22: (1.728, 1.9031),
23: (1.42, 1.6764),
24: (1.188, 1.242),
25: (1.088, 1.145),
26: (0.78, 0.9183),
27: (0.468, 0.6908),
28: (1.1, 0.9183),
29: (1.544, 1.5959),
30: (1.236, 1.3692),
31: (0.924, 1.1417),
32: (1.244, 1.4337),
33: (0.936, 1.207),
34: (0.624, 0.9795),
35: (0.94, 1.1865),
36: (0.632, 0.9597),
37: (0.816, 1.06),
38: (2.113, 2.9993),
39: (1.833, 2.8332),
40: (1.553, 2.667),
41: (1.724, 1.8701),
42: (1.416, 1.6434),
43: (1.224, 1.3013),
44: (1.532, 1.528),
45: (1.264, 1.4654),
46: (0.952, 1.238),
47: (0.724, 1.0106),
48: (1.998, 2.2564),
49: (1.684, 2.0606),
50: (1.448, 1.8016),
51: (2.41, 2.87),
52: (2.184, 2.6401),
53: (2.91, 3.39),
54: (0.844, 1.1562),
55: (1.868, 2.0086),
56: (1.56, 1.7818),
57: (1.248, 1.5544),
58: (1.104, 1.4199),
59: (1.65, 2.057),
60: (1.676, 1.877),
61: (1.368, 1.651),
62: (2.484, 3.168),
63: (2.248, 2.4088),
64: (0.992, 1.264),
65: (0.832, 0.9492),
66: (1.088, 1.292),
67: (0.784, 1.0613),
68: (2.472, 2.8266),
69: (2.052, 2.3144),
70: (0.724, 0.791),
71: (0.524, 0.6948),
72: (2.736, 3.0856),
73: (2.12, 2.6322),
74: (1.38, 1.406),
75: (0.92, 1.0105),
76: (0.46, 0.615),
77: (1.2, 1.38),
78: (1.263, 1.6035),
79: (1.006, 1.4443),
80: (0.749, 1.2853),
81: (0.41, 1.047),
82: (1.062, 1.4838),
83: (0.764, 1.303),
84: (0.466, 1.1044),
85: (3.2, 3.981),
86: (2.644, 3.0356),
87: (1.916, 2.2287),
88: (2.116, 2.406),
89: (1.416, 1.6493),
90: (1.648, 1.8174),
91: (1.828, 1.967),
92: (2.1, 2.1721),
93: (2.376, 2.6243),
94: (1.248, 1.4515),
95: (1.796, 2.1905),
96: (1.488, 1.9637),
97: (2.428, 2.8589),
98: (2.12, 2.6322),
99: (1.812, 2.4054),
100: (1.904, 2.1226),
101: (1.592, 1.8952),
102: (1.368, 1.613),
103: (1.06, 1.3863),
104: (0.748, 1.1589),
105: (2.796, 3.474),
106: (2.14, 2.8569),
107: (1.86, 2.6908),
108: (1.58, 2.5247),
109: (2.12, 2.6869),
110: (1.808, 2.4595),
111: (1.32, 1.5926),
112: (1.008, 1.3652),
113: (0.78, 1.1378),
114: (0.696, 1.1378),
115: (0.468, 0.9103),
116: (0.24, 0.6829),
117: (1.52, 1.7732),
118: (0.996, 1.3342),
119: (0.972, 1.3629)}
| true
| true
|
f70ca4380e2d991139e16da6ddfa214ad0a9934f
| 9,327
|
py
|
Python
|
config/efficientnet/efficientnet_b5_fpn_bn_scratch_400_6x.py
|
tingyumao94/groupsoftmax-simpledet
|
feda05ae2261efcbd0e298792cd66dd383b1bdf6
|
[
"Apache-2.0"
] | 153
|
2019-08-26T06:39:32.000Z
|
2022-02-17T03:51:23.000Z
|
config/efficientnet/efficientnet_b5_fpn_bn_scratch_400_6x.py
|
tingyumao94/groupsoftmax-simpledet
|
feda05ae2261efcbd0e298792cd66dd383b1bdf6
|
[
"Apache-2.0"
] | 4
|
2019-09-11T09:49:04.000Z
|
2020-03-04T07:31:42.000Z
|
config/efficientnet/efficientnet_b5_fpn_bn_scratch_400_6x.py
|
tingyumao94/groupsoftmax-simpledet
|
feda05ae2261efcbd0e298792cd66dd383b1bdf6
|
[
"Apache-2.0"
] | 26
|
2019-08-27T13:29:18.000Z
|
2022-02-09T01:42:25.000Z
|
from symbol.builder import FasterRcnn as Detector
from symbol.builder import add_anchor_to_arg
from models.efficientnet.builder import EfficientNetB5FPN as Backbone
from models.FPN.builder import FPNNeck as Neck
from models.FPN.builder import FPNRpnHead as RpnHead
from models.FPN.builder import FPNRoiAlign as RoiExtractor
from models.FPN.builder import FPNBbox2fcHead as BboxHead
from mxnext.complicate import normalizer_factory
def get_config(is_train):
class General:
log_frequency = 10
name = __name__.rsplit("/")[-1].rsplit(".")[-1]
batch_image = 8 if is_train else 1
fp16 = True
loader_worker = 8
class KvstoreParam:
kvstore = "nccl"
batch_image = General.batch_image
gpus = [0, 1, 2, 3, 4, 5, 6, 7]
fp16 = General.fp16
class NormalizeParam:
normalizer = normalizer_factory(type="localbn", ndev=len(KvstoreParam.gpus))
# normalizer = normalizer_factory(type="gn")
class BackboneParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
class NeckParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
class RpnParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
batch_image = General.batch_image
nnvm_proposal = True
nnvm_rpn_target = False
class anchor_generate:
scale = (4,)
ratio = (0.5, 1.0, 2.0)
stride = (4, 8, 16, 32, 64)
image_anchor = 256
max_side = 700
class anchor_assign:
allowed_border = 0
pos_thr = 0.7
neg_thr = 0.3
min_pos_thr = 0.0
image_anchor = 256
pos_fraction = 0.5
class head:
conv_channel = 256
mean = (0, 0, 0, 0)
std = (1, 1, 1, 1)
class proposal:
pre_nms_top_n = 2000 if is_train else 1000
post_nms_top_n = 2000 if is_train else 1000
nms_thr = 0.7
min_bbox_side = 0
class subsample_proposal:
proposal_wo_gt = False
image_roi = 512
fg_fraction = 0.25
fg_thr = 0.5
bg_thr_hi = 0.5
bg_thr_lo = 0.0
class bbox_target:
num_reg_class = 81
class_agnostic = False
weight = (1.0, 1.0, 1.0, 1.0)
mean = (0.0, 0.0, 0.0, 0.0)
std = (0.1, 0.1, 0.2, 0.2)
class BboxParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
num_class = 1 + 80
image_roi = 512
batch_image = General.batch_image
class regress_target:
class_agnostic = False
mean = (0.0, 0.0, 0.0, 0.0)
std = (0.1, 0.1, 0.2, 0.2)
class RoiParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
out_size = 7
stride = (4, 8, 16, 32)
roi_canonical_scale = 224
roi_canonical_level = 4
class DatasetParam:
if is_train:
image_set = ("coco_train2014", "coco_valminusminival2014")
total_image = 82783 + 35504
else:
image_set = ("coco_minival2014", )
total_image = 5000
backbone = Backbone(BackboneParam)
neck = Neck(NeckParam)
rpn_head = RpnHead(RpnParam)
roi_extractor = RoiExtractor(RoiParam)
bbox_head = BboxHead(BboxParam)
detector = Detector()
if is_train:
train_sym = detector.get_train_symbol(backbone, neck, rpn_head, roi_extractor, bbox_head)
rpn_test_sym = None
test_sym = None
else:
train_sym = None
rpn_test_sym = detector.get_rpn_test_symbol(backbone, neck, rpn_head)
test_sym = detector.get_test_symbol(backbone, neck, rpn_head, roi_extractor, bbox_head)
class ModelParam:
train_symbol = train_sym
test_symbol = test_sym
rpn_test_symbol = rpn_test_sym
from_scratch = True
random = True
memonger = False
memonger_until = "stage3_unit21_plus"
class pretrain:
prefix = None
epoch = 0
fixed_param = []
def process_weight(sym, arg, aux):
for stride in RpnParam.anchor_generate.stride:
add_anchor_to_arg(
sym, arg, aux, RpnParam.anchor_generate.max_side,
stride, RpnParam.anchor_generate.scale,
RpnParam.anchor_generate.ratio)
class OptimizeParam:
class optimizer:
type = "sgd"
lr = 0.01 / 8 * len(KvstoreParam.gpus) * KvstoreParam.batch_image
momentum = 0.9
wd = 1e-4
clip_gradient = None
class schedule:
mult = 6
begin_epoch = 0
end_epoch = 6 * mult
if mult <= 2:
lr_iter = [60000 * mult * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image),
80000 * mult * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image)]
else:
# follow the setting in Rethinking ImageNet Pre-training
# reduce the lr in the last 60k and 20k iterations
lr_iter = [(DatasetParam.total_image * 2 // 16 * end_epoch - 60000) * 16 //
(len(KvstoreParam.gpus) * KvstoreParam.batch_image),
(DatasetParam.total_image * 2 // 16 * end_epoch - 20000) * 16 //
(len(KvstoreParam.gpus) * KvstoreParam.batch_image)]
class warmup:
type = "gradual"
lr = 0
iter = 500
class TestParam:
min_det_score = 0.05
max_det_per_image = 100
process_roidb = lambda x: x
process_output = lambda x, y: x
class model:
prefix = "experiments/{}/checkpoint".format(General.name)
epoch = OptimizeParam.schedule.end_epoch
class nms:
type = "nms"
thr = 0.5
class coco:
annotation = "data/coco/annotations/instances_minival2014.json"
# data processing
class NormParam:
mean = tuple(i * 255 for i in (0.485, 0.456, 0.406)) # RGB order
std = tuple(i * 255 for i in (0.229, 0.224, 0.225))
# data processing
class ResizeParam:
short = 400
long = 600
class PadParam:
short = 400
long = 600
max_num_gt = 100
class AnchorTarget2DParam:
def __init__(self):
self.generate = self._generate()
class _generate:
def __init__(self):
self.stride = (4, 8, 16, 32, 64)
self.short = (100, 50, 25, 13, 7)
self.long = (150, 75, 38, 19, 10)
scales = (4)
aspects = (0.5, 1.0, 2.0)
class assign:
allowed_border = 0
pos_thr = 0.7
neg_thr = 0.3
min_pos_thr = 0.0
class sample:
image_anchor = 256
pos_fraction = 0.5
class RenameParam:
mapping = dict(image="data")
from core.detection_input import ReadRoiRecord, Resize2DImageBbox, \
ConvertImageFromHwcToChw, Flip2DImageBbox, Pad2DImageBbox, \
RenameRecord, Norm2DImage
from models.FPN.input import PyramidAnchorTarget2D
if is_train:
transform = [
ReadRoiRecord(None),
Norm2DImage(NormParam),
Resize2DImageBbox(ResizeParam),
Flip2DImageBbox(),
Pad2DImageBbox(PadParam),
ConvertImageFromHwcToChw(),
RenameRecord(RenameParam.mapping)
]
data_name = ["data"]
label_name = ["gt_bbox", "im_info"]
if not RpnParam.nnvm_rpn_target:
transform.append(PyramidAnchorTarget2D(AnchorTarget2DParam()))
label_name += ["rpn_cls_label", "rpn_reg_target", "rpn_reg_weight"]
else:
transform = [
ReadRoiRecord(None),
Norm2DImage(NormParam),
Resize2DImageBbox(ResizeParam),
Pad2DImageBbox(PadParam),
ConvertImageFromHwcToChw(),
RenameRecord(RenameParam.mapping)
]
data_name = ["data", "im_info", "im_id", "rec_id"]
label_name = []
import core.detection_metric as metric
rpn_acc_metric = metric.AccWithIgnore(
"RpnAcc",
["rpn_cls_loss_output", "rpn_cls_label_blockgrad_output"],
[]
)
rpn_l1_metric = metric.L1(
"RpnL1",
["rpn_reg_loss_output", "rpn_cls_label_blockgrad_output"],
[]
)
# for bbox, the label is generated in network so it is an output
box_acc_metric = metric.AccWithIgnore(
"RcnnAcc",
["bbox_cls_loss_output", "bbox_label_blockgrad_output"],
[]
)
box_l1_metric = metric.L1(
"RcnnL1",
["bbox_reg_loss_output", "bbox_label_blockgrad_output"],
[]
)
metric_list = [rpn_acc_metric, rpn_l1_metric, box_acc_metric, box_l1_metric]
return General, KvstoreParam, RpnParam, RoiParam, BboxParam, DatasetParam, \
ModelParam, OptimizeParam, TestParam, \
transform, data_name, label_name, metric_list
| 29.703822
| 100
| 0.57221
|
from symbol.builder import FasterRcnn as Detector
from symbol.builder import add_anchor_to_arg
from models.efficientnet.builder import EfficientNetB5FPN as Backbone
from models.FPN.builder import FPNNeck as Neck
from models.FPN.builder import FPNRpnHead as RpnHead
from models.FPN.builder import FPNRoiAlign as RoiExtractor
from models.FPN.builder import FPNBbox2fcHead as BboxHead
from mxnext.complicate import normalizer_factory
def get_config(is_train):
class General:
log_frequency = 10
name = __name__.rsplit("/")[-1].rsplit(".")[-1]
batch_image = 8 if is_train else 1
fp16 = True
loader_worker = 8
class KvstoreParam:
kvstore = "nccl"
batch_image = General.batch_image
gpus = [0, 1, 2, 3, 4, 5, 6, 7]
fp16 = General.fp16
class NormalizeParam:
normalizer = normalizer_factory(type="localbn", ndev=len(KvstoreParam.gpus))
class BackboneParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
class NeckParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
class RpnParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
batch_image = General.batch_image
nnvm_proposal = True
nnvm_rpn_target = False
class anchor_generate:
scale = (4,)
ratio = (0.5, 1.0, 2.0)
stride = (4, 8, 16, 32, 64)
image_anchor = 256
max_side = 700
class anchor_assign:
allowed_border = 0
pos_thr = 0.7
neg_thr = 0.3
min_pos_thr = 0.0
image_anchor = 256
pos_fraction = 0.5
class head:
conv_channel = 256
mean = (0, 0, 0, 0)
std = (1, 1, 1, 1)
class proposal:
pre_nms_top_n = 2000 if is_train else 1000
post_nms_top_n = 2000 if is_train else 1000
nms_thr = 0.7
min_bbox_side = 0
class subsample_proposal:
proposal_wo_gt = False
image_roi = 512
fg_fraction = 0.25
fg_thr = 0.5
bg_thr_hi = 0.5
bg_thr_lo = 0.0
class bbox_target:
num_reg_class = 81
class_agnostic = False
weight = (1.0, 1.0, 1.0, 1.0)
mean = (0.0, 0.0, 0.0, 0.0)
std = (0.1, 0.1, 0.2, 0.2)
class BboxParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
num_class = 1 + 80
image_roi = 512
batch_image = General.batch_image
class regress_target:
class_agnostic = False
mean = (0.0, 0.0, 0.0, 0.0)
std = (0.1, 0.1, 0.2, 0.2)
class RoiParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
out_size = 7
stride = (4, 8, 16, 32)
roi_canonical_scale = 224
roi_canonical_level = 4
class DatasetParam:
if is_train:
image_set = ("coco_train2014", "coco_valminusminival2014")
total_image = 82783 + 35504
else:
image_set = ("coco_minival2014", )
total_image = 5000
backbone = Backbone(BackboneParam)
neck = Neck(NeckParam)
rpn_head = RpnHead(RpnParam)
roi_extractor = RoiExtractor(RoiParam)
bbox_head = BboxHead(BboxParam)
detector = Detector()
if is_train:
train_sym = detector.get_train_symbol(backbone, neck, rpn_head, roi_extractor, bbox_head)
rpn_test_sym = None
test_sym = None
else:
train_sym = None
rpn_test_sym = detector.get_rpn_test_symbol(backbone, neck, rpn_head)
test_sym = detector.get_test_symbol(backbone, neck, rpn_head, roi_extractor, bbox_head)
class ModelParam:
train_symbol = train_sym
test_symbol = test_sym
rpn_test_symbol = rpn_test_sym
from_scratch = True
random = True
memonger = False
memonger_until = "stage3_unit21_plus"
class pretrain:
prefix = None
epoch = 0
fixed_param = []
def process_weight(sym, arg, aux):
for stride in RpnParam.anchor_generate.stride:
add_anchor_to_arg(
sym, arg, aux, RpnParam.anchor_generate.max_side,
stride, RpnParam.anchor_generate.scale,
RpnParam.anchor_generate.ratio)
class OptimizeParam:
class optimizer:
type = "sgd"
lr = 0.01 / 8 * len(KvstoreParam.gpus) * KvstoreParam.batch_image
momentum = 0.9
wd = 1e-4
clip_gradient = None
class schedule:
mult = 6
begin_epoch = 0
end_epoch = 6 * mult
if mult <= 2:
lr_iter = [60000 * mult * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image),
80000 * mult * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image)]
else:
lr_iter = [(DatasetParam.total_image * 2 // 16 * end_epoch - 60000) * 16 //
(len(KvstoreParam.gpus) * KvstoreParam.batch_image),
(DatasetParam.total_image * 2 // 16 * end_epoch - 20000) * 16 //
(len(KvstoreParam.gpus) * KvstoreParam.batch_image)]
class warmup:
type = "gradual"
lr = 0
iter = 500
class TestParam:
min_det_score = 0.05
max_det_per_image = 100
process_roidb = lambda x: x
process_output = lambda x, y: x
class model:
prefix = "experiments/{}/checkpoint".format(General.name)
epoch = OptimizeParam.schedule.end_epoch
class nms:
type = "nms"
thr = 0.5
class coco:
annotation = "data/coco/annotations/instances_minival2014.json"
class NormParam:
mean = tuple(i * 255 for i in (0.485, 0.456, 0.406))
std = tuple(i * 255 for i in (0.229, 0.224, 0.225))
class ResizeParam:
short = 400
long = 600
class PadParam:
short = 400
long = 600
max_num_gt = 100
class AnchorTarget2DParam:
def __init__(self):
self.generate = self._generate()
class _generate:
def __init__(self):
self.stride = (4, 8, 16, 32, 64)
self.short = (100, 50, 25, 13, 7)
self.long = (150, 75, 38, 19, 10)
scales = (4)
aspects = (0.5, 1.0, 2.0)
class assign:
allowed_border = 0
pos_thr = 0.7
neg_thr = 0.3
min_pos_thr = 0.0
class sample:
image_anchor = 256
pos_fraction = 0.5
class RenameParam:
mapping = dict(image="data")
from core.detection_input import ReadRoiRecord, Resize2DImageBbox, \
ConvertImageFromHwcToChw, Flip2DImageBbox, Pad2DImageBbox, \
RenameRecord, Norm2DImage
from models.FPN.input import PyramidAnchorTarget2D
if is_train:
transform = [
ReadRoiRecord(None),
Norm2DImage(NormParam),
Resize2DImageBbox(ResizeParam),
Flip2DImageBbox(),
Pad2DImageBbox(PadParam),
ConvertImageFromHwcToChw(),
RenameRecord(RenameParam.mapping)
]
data_name = ["data"]
label_name = ["gt_bbox", "im_info"]
if not RpnParam.nnvm_rpn_target:
transform.append(PyramidAnchorTarget2D(AnchorTarget2DParam()))
label_name += ["rpn_cls_label", "rpn_reg_target", "rpn_reg_weight"]
else:
transform = [
ReadRoiRecord(None),
Norm2DImage(NormParam),
Resize2DImageBbox(ResizeParam),
Pad2DImageBbox(PadParam),
ConvertImageFromHwcToChw(),
RenameRecord(RenameParam.mapping)
]
data_name = ["data", "im_info", "im_id", "rec_id"]
label_name = []
import core.detection_metric as metric
rpn_acc_metric = metric.AccWithIgnore(
"RpnAcc",
["rpn_cls_loss_output", "rpn_cls_label_blockgrad_output"],
[]
)
rpn_l1_metric = metric.L1(
"RpnL1",
["rpn_reg_loss_output", "rpn_cls_label_blockgrad_output"],
[]
)
box_acc_metric = metric.AccWithIgnore(
"RcnnAcc",
["bbox_cls_loss_output", "bbox_label_blockgrad_output"],
[]
)
box_l1_metric = metric.L1(
"RcnnL1",
["bbox_reg_loss_output", "bbox_label_blockgrad_output"],
[]
)
metric_list = [rpn_acc_metric, rpn_l1_metric, box_acc_metric, box_l1_metric]
return General, KvstoreParam, RpnParam, RoiParam, BboxParam, DatasetParam, \
ModelParam, OptimizeParam, TestParam, \
transform, data_name, label_name, metric_list
| true
| true
|
f70ca4df4d8693543185eb6be9555a14f6817889
| 463
|
py
|
Python
|
flask/server.py
|
graykode/kubernetes-glusterfs-aws
|
b377f731c7e732a1d0c4d64fb067b6ed602a6761
|
[
"MIT"
] | 15
|
2019-04-02T09:20:25.000Z
|
2020-02-15T06:54:23.000Z
|
flask/server.py
|
graykode/kubernetes-glusterfs-aws
|
b377f731c7e732a1d0c4d64fb067b6ed602a6761
|
[
"MIT"
] | null | null | null |
flask/server.py
|
graykode/kubernetes-glusterfs-aws
|
b377f731c7e732a1d0c4d64fb067b6ed602a6761
|
[
"MIT"
] | 4
|
2019-04-03T06:44:59.000Z
|
2021-08-04T07:39:56.000Z
|
#!/usr/bin/env python3
from flask import Flask, render_template, request
from werkzeug import secure_filename
app = Flask(__name__)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/upload', methods=['POST'])
def upload():
if request.method == 'POST':
f = request.files.get('file')
fname = secure_filename(f.filename)
f.save(fname)
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=False)
| 24.368421
| 49
| 0.663067
|
from flask import Flask, render_template, request
from werkzeug import secure_filename
app = Flask(__name__)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/upload', methods=['POST'])
def upload():
if request.method == 'POST':
f = request.files.get('file')
fname = secure_filename(f.filename)
f.save(fname)
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=False)
| true
| true
|
f70ca51b7c3abc91b467439c4e0dab5b31d3f255
| 995
|
py
|
Python
|
lanthanum/widgets.py
|
kingstonlabs/django-lanthanum
|
726f714ecd874a9a598d3e12d113a62f24832514
|
[
"BSD-3-Clause"
] | null | null | null |
lanthanum/widgets.py
|
kingstonlabs/django-lanthanum
|
726f714ecd874a9a598d3e12d113a62f24832514
|
[
"BSD-3-Clause"
] | null | null | null |
lanthanum/widgets.py
|
kingstonlabs/django-lanthanum
|
726f714ecd874a9a598d3e12d113a62f24832514
|
[
"BSD-3-Clause"
] | null | null | null |
import copy
import json
from django_admin_json_editor import JSONEditorWidget
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
class JSONEditorWidget(JSONEditorWidget):
template_name = 'lanthanum/_json_editor_widget.html'
def render(self, name, value, attrs=None, renderer=None):
"""
Fix the JSON Editor widget by doing a standard json dump for dict data
This will not convert booleans to ints like the standard JSON Editor.
"""
if callable(self._schema):
schema = self._schema(self)
else:
schema = copy.copy(self._schema)
schema['title'] = ' '
schema['options'] = {'collapsed': int(self._collapsed)}
context = {
'name': name,
'schema': json.dumps(schema),
'data': value,
'sceditor': int(self._sceditor),
}
return mark_safe(render_to_string(self.template_name, context))
| 30.151515
| 78
| 0.643216
|
import copy
import json
from django_admin_json_editor import JSONEditorWidget
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
class JSONEditorWidget(JSONEditorWidget):
template_name = 'lanthanum/_json_editor_widget.html'
def render(self, name, value, attrs=None, renderer=None):
if callable(self._schema):
schema = self._schema(self)
else:
schema = copy.copy(self._schema)
schema['title'] = ' '
schema['options'] = {'collapsed': int(self._collapsed)}
context = {
'name': name,
'schema': json.dumps(schema),
'data': value,
'sceditor': int(self._sceditor),
}
return mark_safe(render_to_string(self.template_name, context))
| true
| true
|
f70ca6efda6e90267dd0303b0c44eddd0534c01e
| 3,750
|
py
|
Python
|
test/acceptance/integration/environments/get_environment_details/steps.py
|
hmunfru/fiware-paas
|
dd808e986f5463dcbb85370b295404f167838ea1
|
[
"Apache-2.0"
] | null | null | null |
test/acceptance/integration/environments/get_environment_details/steps.py
|
hmunfru/fiware-paas
|
dd808e986f5463dcbb85370b295404f167838ea1
|
[
"Apache-2.0"
] | null | null | null |
test/acceptance/integration/environments/get_environment_details/steps.py
|
hmunfru/fiware-paas
|
dd808e986f5463dcbb85370b295404f167838ea1
|
[
"Apache-2.0"
] | 2
|
2016-08-22T16:03:25.000Z
|
2018-03-05T23:28:55.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2014 Telefonica Investigación y Desarrollo, S.A.U
#
# This file is part of FI-WARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with opensource@tid.es
from lettuce import step, world
from lettuce_tools.dataset_utils.dataset_utils import DatasetUtils
from tools import http
from tools import environment_request
from tools.tier import Tier
from tools.constants import NAME, DESCRIPTION, PRODUCTS, NETWORKS, PAAS,\
TIER_IMAGE
dataset_utils = DatasetUtils()
@step(u'the paas manager is up and properly configured')
def the_paas_manager_is_up_and_properly_configured(step):
pass # Nothing to do here, the set up should be done by external means
@step(u'a list of tiers has been defined with data:')
def a_list_of_tiers_has_been_defined_with_data(step):
world.tiers = []
for row in step.hashes:
data = dataset_utils.prepare_data(row)
tier = Tier(data.get(NAME), world.config[PAAS][TIER_IMAGE])
tier.parse_and_add_products(data.get(PRODUCTS))
tier.parse_and_add_networks(data.get(NETWORKS))
world.tiers.append(tier)
@step(u'an environment has already been created with data:')
def an_environment_has_already_been_created_with_data(step):
data = dataset_utils.prepare_data(step.hashes[0])
world.env_requests.add_environment(data.get(NAME), data.get(DESCRIPTION))
@step(u'an environment has already been created with the previous tiers and data:')
def an_environment_has_already_been_created_with_the_previous_tiers_and_data(step):
data = dataset_utils.prepare_data(step.hashes[0])
world.env_requests.add_environment(data.get(NAME), data.get(DESCRIPTION), world.tiers)
@step(u'there is no environment with name "([^"]*)" already created')
def there_is_no_environment_with_name_already_created(step, name):
world.env_requests.delete_environment(name) # Just in case it exists
@step(u'I request the details of the environment with name "([^"]*)"')
def i_request_the_list_of_existing_environments(step, name):
name = dataset_utils.generate_fixed_length_param(name)
world.env_requests.get_environment(name)
@step(u'I receive an? "([^"]*)" response with data:')
def i_receive_a_response_of_type_with_data(step, response_type):
status_code = http.status_codes[response_type]
data = dataset_utils.prepare_data(step.hashes[0])
environment_request.check_get_environment_response(world.response, status_code,
data.get(NAME), data.get(DESCRIPTION))
@step(u'I receive an? "([^"]*)" response with the previous tiers and data:')
def i_receive_a_response_of_type_with_the_previous_tiers_and_data(step, response_type):
status_code = http.status_codes[response_type]
data = dataset_utils.prepare_data(step.hashes[0])
environment_request.check_get_environment_response(world.response, status_code,
data.get(NAME), data.get(DESCRIPTION),
world.tiers)
@step(u'I receive an? "([^"]*)" response$')
def i_receive_a_response_of_type(step, response_type):
status_code = http.status_codes[response_type]
environment_request.check_get_environment_response(world.response, status_code)
| 39.893617
| 90
| 0.7672
|
from lettuce import step, world
from lettuce_tools.dataset_utils.dataset_utils import DatasetUtils
from tools import http
from tools import environment_request
from tools.tier import Tier
from tools.constants import NAME, DESCRIPTION, PRODUCTS, NETWORKS, PAAS,\
TIER_IMAGE
dataset_utils = DatasetUtils()
@step(u'the paas manager is up and properly configured')
def the_paas_manager_is_up_and_properly_configured(step):
pass
@step(u'a list of tiers has been defined with data:')
def a_list_of_tiers_has_been_defined_with_data(step):
world.tiers = []
for row in step.hashes:
data = dataset_utils.prepare_data(row)
tier = Tier(data.get(NAME), world.config[PAAS][TIER_IMAGE])
tier.parse_and_add_products(data.get(PRODUCTS))
tier.parse_and_add_networks(data.get(NETWORKS))
world.tiers.append(tier)
@step(u'an environment has already been created with data:')
def an_environment_has_already_been_created_with_data(step):
data = dataset_utils.prepare_data(step.hashes[0])
world.env_requests.add_environment(data.get(NAME), data.get(DESCRIPTION))
@step(u'an environment has already been created with the previous tiers and data:')
def an_environment_has_already_been_created_with_the_previous_tiers_and_data(step):
data = dataset_utils.prepare_data(step.hashes[0])
world.env_requests.add_environment(data.get(NAME), data.get(DESCRIPTION), world.tiers)
@step(u'there is no environment with name "([^"]*)" already created')
def there_is_no_environment_with_name_already_created(step, name):
world.env_requests.delete_environment(name) # Just in case it exists
@step(u'I request the details of the environment with name "([^"]*)"')
def i_request_the_list_of_existing_environments(step, name):
name = dataset_utils.generate_fixed_length_param(name)
world.env_requests.get_environment(name)
@step(u'I receive an? "([^"]*)" response with data:')
def i_receive_a_response_of_type_with_data(step, response_type):
status_code = http.status_codes[response_type]
data = dataset_utils.prepare_data(step.hashes[0])
environment_request.check_get_environment_response(world.response, status_code,
data.get(NAME), data.get(DESCRIPTION))
@step(u'I receive an? "([^"]*)" response with the previous tiers and data:')
def i_receive_a_response_of_type_with_the_previous_tiers_and_data(step, response_type):
status_code = http.status_codes[response_type]
data = dataset_utils.prepare_data(step.hashes[0])
environment_request.check_get_environment_response(world.response, status_code,
data.get(NAME), data.get(DESCRIPTION),
world.tiers)
@step(u'I receive an? "([^"]*)" response$')
def i_receive_a_response_of_type(step, response_type):
status_code = http.status_codes[response_type]
environment_request.check_get_environment_response(world.response, status_code)
| true
| true
|
f70ca702e765b72d170cd703e3c1c68745be1103
| 453
|
py
|
Python
|
setup.py
|
sanyatuning/home-assistant-polymer
|
cc301df57d7249d0cb97a7617880fad27a37d35f
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
sanyatuning/home-assistant-polymer
|
cc301df57d7249d0cb97a7617880fad27a37d35f
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
sanyatuning/home-assistant-polymer
|
cc301df57d7249d0cb97a7617880fad27a37d35f
|
[
"Apache-2.0"
] | null | null | null |
from setuptools import setup, find_packages
setup(
name="home-assistant-frontend",
version="20211215.0",
description="The Home Assistant frontend",
url="https://github.com/home-assistant/frontend",
author="The Home Assistant Authors",
author_email="hello@home-assistant.io",
license="Apache-2.0",
packages=find_packages(include=["hass_frontend", "hass_frontend.*"]),
include_package_data=True,
zip_safe=False,
)
| 30.2
| 73
| 0.715232
|
from setuptools import setup, find_packages
setup(
name="home-assistant-frontend",
version="20211215.0",
description="The Home Assistant frontend",
url="https://github.com/home-assistant/frontend",
author="The Home Assistant Authors",
author_email="hello@home-assistant.io",
license="Apache-2.0",
packages=find_packages(include=["hass_frontend", "hass_frontend.*"]),
include_package_data=True,
zip_safe=False,
)
| true
| true
|
f70ca7ed9abc143ee5dbd4df547f5da8a385c7ba
| 12,169
|
py
|
Python
|
synthtool/gcp/gapic_bazel.py
|
arithmetic1728/synthtool
|
d022f14edf182e1653a5832cf6d29365f503b26c
|
[
"Apache-2.0"
] | null | null | null |
synthtool/gcp/gapic_bazel.py
|
arithmetic1728/synthtool
|
d022f14edf182e1653a5832cf6d29365f503b26c
|
[
"Apache-2.0"
] | null | null | null |
synthtool/gcp/gapic_bazel.py
|
arithmetic1728/synthtool
|
d022f14edf182e1653a5832cf6d29365f503b26c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pathlib import Path
from typing import Optional, Union
import os
import shutil
import tempfile
from synthtool import _tracked_paths, metadata, shell
from synthtool.log import logger
from synthtool.sources import git
GOOGLEAPIS_URL: str = git.make_repo_clone_url("googleapis/googleapis")
GOOGLEAPIS_PRIVATE_URL: str = git.make_repo_clone_url("googleapis/googleapis-private")
DISCOVERY_ARTIFACT_MANAGER_URL: str = git.make_repo_clone_url(
"googleapis/discovery-artifact-manager"
)
LOCAL_GOOGLEAPIS: Optional[str] = os.environ.get("SYNTHTOOL_GOOGLEAPIS")
LOCAL_DISCOVERY_ARTIFACT_MANAGER: Optional[str] = os.environ.get(
"SYNTHTOOL_DISCOVERY_ARTIFACT_MANAGER"
)
class GAPICBazel:
"""A synthtool component that can produce libraries using bazel build.
"""
def __init__(self):
self._ensure_dependencies_installed()
self._googleapis = None
self._googleapis_private = None
self._discovery_artifact_manager = None
def py_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "python", **kwargs)
def go_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "go", **kwargs)
def node_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "nodejs", **kwargs)
def csharp_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "csharp", **kwargs)
def php_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "php", **kwargs)
def java_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "java", **kwargs)
def ruby_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "ruby", **kwargs)
def _generate_code(
self,
service: str,
version: str,
language: str,
*,
private: bool = False,
discogapic: bool = False,
proto_path: Union[str, Path] = None,
output_dir: Union[str, Path] = None,
bazel_target: str = None,
include_protos: bool = False,
proto_output_path: Union[str, Path] = None,
):
# Determine which googleapis repo to use
if discogapic:
api_definitions_repo = self._clone_discovery_artifact_manager()
api_definitions_repo_name = "discovery-artifact-manager"
elif private:
api_definitions_repo = self._clone_googleapis_private()
api_definitions_repo_name = "googleapis_private"
else:
api_definitions_repo = self._clone_googleapis()
api_definitions_repo_name = "googleapis"
# Sanity check: We should have a googleapis repo; if we do not,
# something went wrong, and we should abort.
if not api_definitions_repo:
raise RuntimeError(
f"Unable to generate {service}, the sources repository repository"
"is unavailable."
)
# Calculate proto_path if necessary.
if not bazel_target or include_protos:
# If bazel_target is not specified explicitly, we will need
# proto_path to calculate it. If include_protos is True,
# we will need the proto_path to copy the protos.
if not proto_path:
if bazel_target:
# Calculate proto_path from the full bazel target, which is
# in the format "//proto_path:target_name
proto_path = bazel_target.split(":")[0][2:]
else:
# If bazel_target is not specified, assume the protos are
# simply under google/cloud, where the most of the protos
# usually are.
proto_path = f"google/cloud/{service}/{version}"
protos = Path(proto_path)
if protos.is_absolute():
protos = protos.relative_to("/")
# Determine bazel target based on per-language patterns
# Java: google-cloud-{{assembly_name}}-{{version}}-java
# Go: gapi-cloud-{{assembly_name}}-{{version}}-go
# Python: {{assembly_name}}-{{version}}-py
# PHP: google-cloud-{{assembly_name}}-{{version}}-php
# Node.js: {{assembly_name}}-{{version}}-nodejs
# Ruby: google-cloud-{{assembly_name}}-{{version}}-ruby
# C#: google-cloud-{{assembly_name}}-{{version}}-csharp
if not bazel_target:
# Determine where the protos we are generating actually live.
# We can sometimes (but not always) determine this from the service
# and version; in other cases, the user must provide it outright.
parts = list(protos.parts)
while len(parts) > 0 and parts[0] != "google":
parts.pop(0)
if len(parts) == 0:
raise RuntimeError(
f"Cannot determine bazel_target from proto_path {protos}."
"Please set bazel_target explicitly."
)
if language == "python":
suffix = f"{service}-{version}-py"
elif language == "nodejs":
suffix = f"{service}-{version}-nodejs"
elif language == "go":
suffix = f"gapi-{'-'.join(parts[1:])}-go"
else:
suffix = f"{'-'.join(parts)}-{language}"
bazel_target = f"//{os.path.sep.join(parts)}:{suffix}"
# Sanity check: Do we have protos where we think we should?
if not (api_definitions_repo / protos).exists():
raise FileNotFoundError(
f"Unable to find directory for protos: {(api_definitions_repo / protos)}."
)
if not tuple((api_definitions_repo / protos).glob("*.proto")):
raise FileNotFoundError(
f"Directory {(api_definitions_repo / protos)} exists, but no protos found."
)
if not (api_definitions_repo / protos / "BUILD.bazel"):
raise FileNotFoundError(
f"File {(api_definitions_repo / protos / 'BUILD.bazel')} does not exist."
)
# Ensure the desired output directory exists.
# If none was provided, create a temporary directory.
if not output_dir:
output_dir = tempfile.mkdtemp()
output_dir = Path(output_dir).resolve()
# Let's build some stuff now.
cwd = os.getcwd()
os.chdir(str(api_definitions_repo))
bazel_run_args = [
"bazel",
"--max_idle_secs=240",
"build",
bazel_target,
]
logger.debug(f"Generating code for: {bazel_target}.")
shell.run(bazel_run_args)
# We've got tar file!
# its location: bazel-bin/google/cloud/language/v1/language-v1-nodejs.tar.gz
# bazel_target: //google/cloud/language/v1:language-v1-nodejs
tar_file = (
f"bazel-bin{os.path.sep}{bazel_target[2:].replace(':', os.path.sep)}.tar.gz"
)
tar_run_args = [
"tar",
"-C",
str(output_dir),
"--strip-components=1",
"-xzf",
tar_file,
]
shell.run(tar_run_args)
# Get the *.protos files and put them in a protos dir in the output
if include_protos:
proto_files = protos.glob("**/*.proto")
# By default, put the protos at the root in a folder named 'protos'.
# Specific languages can be cased here to put them in a more language
# appropriate place.
if not proto_output_path:
proto_output_path = output_dir / "protos"
if language == "python":
# place protos alongsize the *_pb2.py files
proto_output_path = (
output_dir / f"google/cloud/{service}_{version}/proto"
)
else:
proto_output_path = Path(output_dir / proto_output_path)
os.makedirs(proto_output_path, exist_ok=True)
for i in proto_files:
logger.debug(f"Copy: {i} to {proto_output_path / i.name}")
shutil.copyfile(i, proto_output_path / i.name)
logger.success(f"Placed proto files into {proto_output_path}.")
os.chdir(cwd)
# Sanity check: Does the output location have code in it?
# If not, complain.
if not tuple(output_dir.iterdir()):
raise RuntimeError(
f"Code generation seemed to succeed, but {output_dir} is empty."
)
# Huzzah, it worked.
logger.success(f"Generated code into {output_dir}.")
# Record this in the synthtool metadata.
metadata.add_client_destination(
source=api_definitions_repo_name,
api_name=service,
api_version=version,
language=language,
generator="bazel",
)
_tracked_paths.add(output_dir)
return output_dir
def _clone_googleapis(self):
if self._googleapis:
return self._googleapis
if LOCAL_GOOGLEAPIS:
self._googleapis = Path(LOCAL_GOOGLEAPIS).expanduser()
logger.debug(f"Using local googleapis at {self._googleapis}")
else:
logger.debug("Cloning googleapis.")
self._googleapis = git.clone(GOOGLEAPIS_URL)
return self._googleapis
def _clone_googleapis_private(self):
if self._googleapis_private:
return self._googleapis_private
if LOCAL_GOOGLEAPIS:
self._googleapis_private = Path(LOCAL_GOOGLEAPIS).expanduser()
logger.debug(
f"Using local googleapis at {self._googleapis_private} for googleapis-private"
)
else:
logger.debug("Cloning googleapis-private.")
self._googleapis_private = git.clone(GOOGLEAPIS_PRIVATE_URL)
return self._googleapis_private
def _clone_discovery_artifact_manager(self):
if self._discovery_artifact_manager:
return self._discovery_artifact_manager
if LOCAL_DISCOVERY_ARTIFACT_MANAGER:
self._discovery_artifact_manager = Path(
LOCAL_DISCOVERY_ARTIFACT_MANAGER
).expanduser()
logger.debug(
f"Using local discovery_artifact_manager at {self._discovery_artifact_manager} for googleapis-private"
)
else:
logger.debug("Cloning discovery-artifact-manager.")
self._discovery_artifact_manager = git.clone(DISCOVERY_ARTIFACT_MANAGER_URL)
return self._discovery_artifact_manager
def _ensure_dependencies_installed(self):
logger.debug("Ensuring dependencies.")
dependencies = ["bazel", "zip", "unzip", "tar"]
failed_dependencies = []
for dependency in dependencies:
return_code = shell.run(["which", dependency], check=False).returncode
if return_code:
failed_dependencies.append(dependency)
if failed_dependencies:
raise EnvironmentError(
f"Dependencies missing: {', '.join(failed_dependencies)}"
)
| 39.767974
| 118
| 0.608513
|
from pathlib import Path
from typing import Optional, Union
import os
import shutil
import tempfile
from synthtool import _tracked_paths, metadata, shell
from synthtool.log import logger
from synthtool.sources import git
GOOGLEAPIS_URL: str = git.make_repo_clone_url("googleapis/googleapis")
GOOGLEAPIS_PRIVATE_URL: str = git.make_repo_clone_url("googleapis/googleapis-private")
DISCOVERY_ARTIFACT_MANAGER_URL: str = git.make_repo_clone_url(
"googleapis/discovery-artifact-manager"
)
LOCAL_GOOGLEAPIS: Optional[str] = os.environ.get("SYNTHTOOL_GOOGLEAPIS")
LOCAL_DISCOVERY_ARTIFACT_MANAGER: Optional[str] = os.environ.get(
"SYNTHTOOL_DISCOVERY_ARTIFACT_MANAGER"
)
class GAPICBazel:
def __init__(self):
self._ensure_dependencies_installed()
self._googleapis = None
self._googleapis_private = None
self._discovery_artifact_manager = None
def py_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "python", **kwargs)
def go_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "go", **kwargs)
def node_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "nodejs", **kwargs)
def csharp_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "csharp", **kwargs)
def php_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "php", **kwargs)
def java_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "java", **kwargs)
def ruby_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "ruby", **kwargs)
def _generate_code(
self,
service: str,
version: str,
language: str,
*,
private: bool = False,
discogapic: bool = False,
proto_path: Union[str, Path] = None,
output_dir: Union[str, Path] = None,
bazel_target: str = None,
include_protos: bool = False,
proto_output_path: Union[str, Path] = None,
):
if discogapic:
api_definitions_repo = self._clone_discovery_artifact_manager()
api_definitions_repo_name = "discovery-artifact-manager"
elif private:
api_definitions_repo = self._clone_googleapis_private()
api_definitions_repo_name = "googleapis_private"
else:
api_definitions_repo = self._clone_googleapis()
api_definitions_repo_name = "googleapis"
if not api_definitions_repo:
raise RuntimeError(
f"Unable to generate {service}, the sources repository repository"
"is unavailable."
)
if not bazel_target or include_protos:
if not proto_path:
if bazel_target:
proto_path = bazel_target.split(":")[0][2:]
else:
# If bazel_target is not specified, assume the protos are
# simply under google/cloud, where the most of the protos
# usually are.
proto_path = f"google/cloud/{service}/{version}"
protos = Path(proto_path)
if protos.is_absolute():
protos = protos.relative_to("/")
# Determine bazel target based on per-language patterns
# Java: google-cloud-{{assembly_name}}-{{version}}-java
# Go: gapi-cloud-{{assembly_name}}-{{version}}-go
# Python: {{assembly_name}}-{{version}}-py
# PHP: google-cloud-{{assembly_name}}-{{version}}-php
# Node.js: {{assembly_name}}-{{version}}-nodejs
# Ruby: google-cloud-{{assembly_name}}-{{version}}-ruby
# C#: google-cloud-{{assembly_name}}-{{version}}-csharp
if not bazel_target:
# Determine where the protos we are generating actually live.
# We can sometimes (but not always) determine this from the service
# and version; in other cases, the user must provide it outright.
parts = list(protos.parts)
while len(parts) > 0 and parts[0] != "google":
parts.pop(0)
if len(parts) == 0:
raise RuntimeError(
f"Cannot determine bazel_target from proto_path {protos}."
"Please set bazel_target explicitly."
)
if language == "python":
suffix = f"{service}-{version}-py"
elif language == "nodejs":
suffix = f"{service}-{version}-nodejs"
elif language == "go":
suffix = f"gapi-{'-'.join(parts[1:])}-go"
else:
suffix = f"{'-'.join(parts)}-{language}"
bazel_target = f"//{os.path.sep.join(parts)}:{suffix}"
# Sanity check: Do we have protos where we think we should?
if not (api_definitions_repo / protos).exists():
raise FileNotFoundError(
f"Unable to find directory for protos: {(api_definitions_repo / protos)}."
)
if not tuple((api_definitions_repo / protos).glob("*.proto")):
raise FileNotFoundError(
f"Directory {(api_definitions_repo / protos)} exists, but no protos found."
)
if not (api_definitions_repo / protos / "BUILD.bazel"):
raise FileNotFoundError(
f"File {(api_definitions_repo / protos / 'BUILD.bazel')} does not exist."
)
# Ensure the desired output directory exists.
# If none was provided, create a temporary directory.
if not output_dir:
output_dir = tempfile.mkdtemp()
output_dir = Path(output_dir).resolve()
# Let's build some stuff now.
cwd = os.getcwd()
os.chdir(str(api_definitions_repo))
bazel_run_args = [
"bazel",
"--max_idle_secs=240",
"build",
bazel_target,
]
logger.debug(f"Generating code for: {bazel_target}.")
shell.run(bazel_run_args)
# We've got tar file!
# its location: bazel-bin/google/cloud/language/v1/language-v1-nodejs.tar.gz
# bazel_target: //google/cloud/language/v1:language-v1-nodejs
tar_file = (
f"bazel-bin{os.path.sep}{bazel_target[2:].replace(':', os.path.sep)}.tar.gz"
)
tar_run_args = [
"tar",
"-C",
str(output_dir),
"--strip-components=1",
"-xzf",
tar_file,
]
shell.run(tar_run_args)
# Get the *.protos files and put them in a protos dir in the output
if include_protos:
proto_files = protos.glob("**/*.proto")
# By default, put the protos at the root in a folder named 'protos'.
# Specific languages can be cased here to put them in a more language
# appropriate place.
if not proto_output_path:
proto_output_path = output_dir / "protos"
if language == "python":
# place protos alongsize the *_pb2.py files
proto_output_path = (
output_dir / f"google/cloud/{service}_{version}/proto"
)
else:
proto_output_path = Path(output_dir / proto_output_path)
os.makedirs(proto_output_path, exist_ok=True)
for i in proto_files:
logger.debug(f"Copy: {i} to {proto_output_path / i.name}")
shutil.copyfile(i, proto_output_path / i.name)
logger.success(f"Placed proto files into {proto_output_path}.")
os.chdir(cwd)
# Sanity check: Does the output location have code in it?
# If not, complain.
if not tuple(output_dir.iterdir()):
raise RuntimeError(
f"Code generation seemed to succeed, but {output_dir} is empty."
)
# Huzzah, it worked.
logger.success(f"Generated code into {output_dir}.")
# Record this in the synthtool metadata.
metadata.add_client_destination(
source=api_definitions_repo_name,
api_name=service,
api_version=version,
language=language,
generator="bazel",
)
_tracked_paths.add(output_dir)
return output_dir
def _clone_googleapis(self):
if self._googleapis:
return self._googleapis
if LOCAL_GOOGLEAPIS:
self._googleapis = Path(LOCAL_GOOGLEAPIS).expanduser()
logger.debug(f"Using local googleapis at {self._googleapis}")
else:
logger.debug("Cloning googleapis.")
self._googleapis = git.clone(GOOGLEAPIS_URL)
return self._googleapis
def _clone_googleapis_private(self):
if self._googleapis_private:
return self._googleapis_private
if LOCAL_GOOGLEAPIS:
self._googleapis_private = Path(LOCAL_GOOGLEAPIS).expanduser()
logger.debug(
f"Using local googleapis at {self._googleapis_private} for googleapis-private"
)
else:
logger.debug("Cloning googleapis-private.")
self._googleapis_private = git.clone(GOOGLEAPIS_PRIVATE_URL)
return self._googleapis_private
def _clone_discovery_artifact_manager(self):
if self._discovery_artifact_manager:
return self._discovery_artifact_manager
if LOCAL_DISCOVERY_ARTIFACT_MANAGER:
self._discovery_artifact_manager = Path(
LOCAL_DISCOVERY_ARTIFACT_MANAGER
).expanduser()
logger.debug(
f"Using local discovery_artifact_manager at {self._discovery_artifact_manager} for googleapis-private"
)
else:
logger.debug("Cloning discovery-artifact-manager.")
self._discovery_artifact_manager = git.clone(DISCOVERY_ARTIFACT_MANAGER_URL)
return self._discovery_artifact_manager
def _ensure_dependencies_installed(self):
logger.debug("Ensuring dependencies.")
dependencies = ["bazel", "zip", "unzip", "tar"]
failed_dependencies = []
for dependency in dependencies:
return_code = shell.run(["which", dependency], check=False).returncode
if return_code:
failed_dependencies.append(dependency)
if failed_dependencies:
raise EnvironmentError(
f"Dependencies missing: {', '.join(failed_dependencies)}"
)
| true
| true
|
f70ca8667ffe97f1e7850ad20dea532cf5ffb1c1
| 1,197
|
py
|
Python
|
tests/test_easyipc.py
|
luiscarlosgph/easyipc
|
befe03bd2d1bf9f8378bcdf391dbeac8576bd723
|
[
"MIT"
] | 1
|
2020-09-15T13:04:16.000Z
|
2020-09-15T13:04:16.000Z
|
tests/test_easyipc.py
|
luiscarlosgph/easyipc
|
befe03bd2d1bf9f8378bcdf391dbeac8576bd723
|
[
"MIT"
] | null | null | null |
tests/test_easyipc.py
|
luiscarlosgph/easyipc
|
befe03bd2d1bf9f8378bcdf391dbeac8576bd723
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @brief This module has unit tests for the classes of EasyIPC.
# @author Luis C. Garcia-Peraza Herrera (luiscarlos.gph@gmail.com).
# @date 25 June 2020.
import unittest
import os
import sys
import numpy as np
# My imports
import easyipc
class TestEasyIPC(unittest.TestCase):
def test_pipe(self):
data = [np.random.rand(1000, 1000) for i in range(100)]
newpid = os.fork()
if newpid == 0:
client = easyipc.Pipe('hoho')
client.connect()
client.send_whatever({'Hello': 'from the client'})
for i in range(len(data)):
client.send_array(data[i])
else:
server = easyipc.Pipe('hoho')
server.listen()
whatever = None
while whatever is None:
whatever = server.recv_whatever(blocking=False)
self.assertTrue(whatever['Hello'] == 'from the client')
for i in range(len(data)):
data_back = server.recv_array()
self.assertTrue(np.sum(data[i] - data_back) == 0)
if __name__ == '__main__':
unittest.main()
| 28.5
| 67
| 0.567251
|
import unittest
import os
import sys
import numpy as np
import easyipc
class TestEasyIPC(unittest.TestCase):
def test_pipe(self):
data = [np.random.rand(1000, 1000) for i in range(100)]
newpid = os.fork()
if newpid == 0:
client = easyipc.Pipe('hoho')
client.connect()
client.send_whatever({'Hello': 'from the client'})
for i in range(len(data)):
client.send_array(data[i])
else:
server = easyipc.Pipe('hoho')
server.listen()
whatever = None
while whatever is None:
whatever = server.recv_whatever(blocking=False)
self.assertTrue(whatever['Hello'] == 'from the client')
for i in range(len(data)):
data_back = server.recv_array()
self.assertTrue(np.sum(data[i] - data_back) == 0)
if __name__ == '__main__':
unittest.main()
| true
| true
|
f70ca9f3889ccc3449eca2d8d12d6c708481874c
| 6,474
|
py
|
Python
|
tensorflow_model_optimization/python/core/common/keras/compression/algorithm.py
|
arovir01/model-optimization
|
92bfb45da34715eeff8849c2007cf3b734429120
|
[
"Apache-2.0"
] | 1,318
|
2018-10-31T23:57:52.000Z
|
2022-03-30T11:07:40.000Z
|
tensorflow_model_optimization/python/core/common/keras/compression/algorithm.py
|
arovir01/model-optimization
|
92bfb45da34715eeff8849c2007cf3b734429120
|
[
"Apache-2.0"
] | 410
|
2019-05-15T14:11:13.000Z
|
2022-03-31T07:27:07.000Z
|
tensorflow_model_optimization/python/core/common/keras/compression/algorithm.py
|
arovir01/model-optimization
|
92bfb45da34715eeff8849c2007cf3b734429120
|
[
"Apache-2.0"
] | 290
|
2019-05-14T17:42:49.000Z
|
2022-03-28T02:21:45.000Z
|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Public APIs for algorithm developer using weight compression API."""
import abc
from typing import List, Any
import dataclasses
import tensorflow as tf
from tensorflow_model_optimization.python.core.common.keras.compression.internal import optimize
@dataclasses.dataclass
class WeightRepr:
args: Any = None
kwargs: Any = None
class WeightCompressor(metaclass=abc.ABCMeta):
"""Interface for weight compression algorithm that acts on a per-layer basis.
This allows both options of either decompressing during inference or
decompressing prior to inference (where compression occurs by applying a
tool such as zip to the model file).
This interface is a purely functional one.
"""
update_ops = [] # type: List
# TODO(tfmot): Consider separate from algorithm API for custom layer supports.
def get_compressible_weights(
self, original_layer: tf.keras.layers.Layer) -> List[tf.Variable]:
"""Define compressible weights for each layer.
Args:
original_layer: tf.keras.layers.Layer representing a layer from the
original model.
Returns:
List of compressible weights for the given layer.
"""
del original_layer
return []
@abc.abstractmethod
def init_training_weights(
self, pretrained_weight: tf.Tensor):
"""Initialize training weights for the compressible weight.
It calls the `add_training_weight` to add a training weight for a given
`pretrained_weight`. A `pretrained_weight` can have multiple training
weights. We initialize the training weights for each compressible
weight by just calling this function for each.
Args:
pretrained_weight: tf.Tensor of a pretrained weight of a layer that will
be compressed eventually.
"""
def add_training_weight(
self, *args, **kwargs):
"""Add a training weight for the compressible weight.
When this method is called from the `init_training_weights`, this adds
training weights for the pretrained_weight that is the input of the
`init_training_weights`.
Args:
*args: Passed through to training_model.add_weight.
**kwargs: Passed through to training_model.add_weight.
"""
weight_repr = WeightRepr(args=args, kwargs=kwargs)
if hasattr(self, 'weight_reprs'):
self.weight_reprs.append(weight_repr)
else:
self.weight_reprs = [weight_repr]
@abc.abstractmethod
def project_training_weights(
self, *training_weights: tf.Tensor) -> tf.Tensor:
"""Define a piece of the forward pass during training.
It operates on a single compressible weight.
The default throws an error when training occurs.
Args:
*training_weights: tf.Tensors representing any variables used during
training, for a single compressible weight, in the order returned in
`init_training_weights`.
Returns:
tf.Tensor to set the compressible weight to.
"""
def init_update_ops(self, tensor_weight_pairs):
self.update_ops = []
self.tensor_weight_pairs = tensor_weight_pairs
def update_training_weight(
self, training_weight: tf.Tensor, value: tf.Tensor):
"""Add training weight assign op to the model update list.
This method is for the case that training weight should update to a
specific value not from the model optimizer. It will throw an error if it
can't find the training weight.
This method should called in project_training_weights. During the training,
We collect all update_training_weight calls and make an UpdateOp for each
call. Finally, we put all these update ops to model.add_update.
Args:
training_weight: tf.Tensor representing a training weight.
value: tf.Tensor representing a value to be assigned to the training
weight.
Raises:
ValueError if it can't find the training weight.
"""
for tensor, weight in self.tensor_weight_pairs:
if training_weight is tensor:
self.update_ops.append(weight.assign(value))
return
raise ValueError('Training weight not found. Please call '
'the update_training_weight with given training '
'weight tensor.')
def get_update_ops(self):
return self.update_ops
def compress_training_weights(
self, *training_weights: tf.Tensor) -> List[tf.Tensor]:
"""Define the operations to compress a single weight’s training form.
'compress_training_weights' can refer to making the weight more amenable to
compression or actually compress the weight.
The default is an identity.
Args:
*training_weights: tf.Tensors representing all variables used during
training, for a single compressible weight, in the order returned in
`init_training_weights`.
Returns:
List of tf.Tensors to set to compressed or more compressible form.
"""
return list(training_weights)
@abc.abstractmethod
def decompress_weights(
self, *compressed_weights: tf.Tensor) -> tf.Tensor:
"""Define the operations to decompress a single weight’s compressed form.
The default is an identity.
Args:
*compressed_weights: tf.Tensors representing a single weight’s compressed
form, coming from what’s returned in `compress`.
Returns:
A tf.Tensor representing the decompressed `compressed_weights`.
"""
def create_layer_for_training(
layer: tf.keras.layers.Layer,
algorithm: WeightCompressor) -> tf.keras.layers.Layer:
return optimize.create_layer_for_training(layer, algorithm)
def create_layer_for_inference(
layer_for_training: tf.keras.layers.Layer,
algorithm: WeightCompressor) -> tf.keras.layers.Layer:
return optimize.create_layer_for_inference(layer_for_training, algorithm)
| 34.620321
| 96
| 0.718103
|
import abc
from typing import List, Any
import dataclasses
import tensorflow as tf
from tensorflow_model_optimization.python.core.common.keras.compression.internal import optimize
@dataclasses.dataclass
class WeightRepr:
args: Any = None
kwargs: Any = None
class WeightCompressor(metaclass=abc.ABCMeta):
update_ops = []
def get_compressible_weights(
self, original_layer: tf.keras.layers.Layer) -> List[tf.Variable]:
del original_layer
return []
@abc.abstractmethod
def init_training_weights(
self, pretrained_weight: tf.Tensor):
def add_training_weight(
self, *args, **kwargs):
weight_repr = WeightRepr(args=args, kwargs=kwargs)
if hasattr(self, 'weight_reprs'):
self.weight_reprs.append(weight_repr)
else:
self.weight_reprs = [weight_repr]
@abc.abstractmethod
def project_training_weights(
self, *training_weights: tf.Tensor) -> tf.Tensor:
def init_update_ops(self, tensor_weight_pairs):
self.update_ops = []
self.tensor_weight_pairs = tensor_weight_pairs
def update_training_weight(
self, training_weight: tf.Tensor, value: tf.Tensor):
for tensor, weight in self.tensor_weight_pairs:
if training_weight is tensor:
self.update_ops.append(weight.assign(value))
return
raise ValueError('Training weight not found. Please call '
'the update_training_weight with given training '
'weight tensor.')
def get_update_ops(self):
return self.update_ops
def compress_training_weights(
self, *training_weights: tf.Tensor) -> List[tf.Tensor]:
return list(training_weights)
@abc.abstractmethod
def decompress_weights(
self, *compressed_weights: tf.Tensor) -> tf.Tensor:
def create_layer_for_training(
layer: tf.keras.layers.Layer,
algorithm: WeightCompressor) -> tf.keras.layers.Layer:
return optimize.create_layer_for_training(layer, algorithm)
def create_layer_for_inference(
layer_for_training: tf.keras.layers.Layer,
algorithm: WeightCompressor) -> tf.keras.layers.Layer:
return optimize.create_layer_for_inference(layer_for_training, algorithm)
| true
| true
|
f70caa3fbef0f01fbdcda9de53ac6b5e5a0d5873
| 2,670
|
py
|
Python
|
causalicp/test/tests_icp.py
|
juangamella/icp
|
80548610a13b6b76515f46f56e0f7f486cf9c1c7
|
[
"BSD-3-Clause"
] | 7
|
2021-07-20T07:25:32.000Z
|
2022-03-09T09:01:39.000Z
|
causalicp/test/tests_icp.py
|
juangamella/icp
|
80548610a13b6b76515f46f56e0f7f486cf9c1c7
|
[
"BSD-3-Clause"
] | null | null | null |
causalicp/test/tests_icp.py
|
juangamella/icp
|
80548610a13b6b76515f46f56e0f7f486cf9c1c7
|
[
"BSD-3-Clause"
] | 1
|
2022-01-10T16:18:57.000Z
|
2022-01-10T16:18:57.000Z
|
# Copyright 2021 Juan L. Gamella
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ---------------------------------------------------------------------
# Unit tests
import unittest
import numpy as np
import copy
# Tested functions
from causalicp.data import _Data
class DataTests(unittest.TestCase):
def setUp(self):
self.p = 20
self.n_obs = [2, 3, 4]
self.N = np.sum(self.n_obs)
self.e = len(self.n_obs)
self.target = 3
XX = []
for i, ne in enumerate(self.n_obs):
X = np.tile(np.ones(self.p), (ne, 1))
X *= (i + 1)
X[:, self.target] *= -1
XX.append(X)
self.XX = XX
def test_basic(self):
data = _Data(self.XX)
self.assertEqual(data.N, self.N)
self.assertTrue((data.n_obs == self.n_obs).all())
self.assertEqual(data.p, self.p)
self.assertEqual(data.e, self.e)
self.assertEqual(data.e, len(self.XX))
def test_memory(self):
# Test that the data is copied into the class
XX = copy.deepcopy(self.XX)
data = _Data(XX)
XX[0][0, 0] = -100
data_pooled = data._pooled_data
self.assertFalse(data_pooled[0, 0] == XX[0][0, 0])
| 36.575342
| 71
| 0.673408
|
import unittest
import numpy as np
import copy
from causalicp.data import _Data
class DataTests(unittest.TestCase):
def setUp(self):
self.p = 20
self.n_obs = [2, 3, 4]
self.N = np.sum(self.n_obs)
self.e = len(self.n_obs)
self.target = 3
XX = []
for i, ne in enumerate(self.n_obs):
X = np.tile(np.ones(self.p), (ne, 1))
X *= (i + 1)
X[:, self.target] *= -1
XX.append(X)
self.XX = XX
def test_basic(self):
data = _Data(self.XX)
self.assertEqual(data.N, self.N)
self.assertTrue((data.n_obs == self.n_obs).all())
self.assertEqual(data.p, self.p)
self.assertEqual(data.e, self.e)
self.assertEqual(data.e, len(self.XX))
def test_memory(self):
XX = copy.deepcopy(self.XX)
data = _Data(XX)
XX[0][0, 0] = -100
data_pooled = data._pooled_data
self.assertFalse(data_pooled[0, 0] == XX[0][0, 0])
| true
| true
|
f70cab40dd4694fa9aa6eaedca1b390eac27f21d
| 7,321
|
py
|
Python
|
nova/tests/model_unittest.py
|
joshuamckenty/yolo-octo-wookie
|
8e078e91d367f3deaf1785c46ee7734dd7907f24
|
[
"Apache-2.0"
] | 1
|
2021-06-09T17:58:53.000Z
|
2021-06-09T17:58:53.000Z
|
nova/tests/model_unittest.py
|
joshuamckenty/yolo-octo-wookie
|
8e078e91d367f3deaf1785c46ee7734dd7907f24
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/model_unittest.py
|
joshuamckenty/yolo-octo-wookie
|
8e078e91d367f3deaf1785c46ee7734dd7907f24
|
[
"Apache-2.0"
] | null | null | null |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import time
from nova import vendor
from twisted.internet import defer
from nova import exception
from nova import flags
from nova import test
from nova import utils
from nova.compute import model
from nova.compute import node
FLAGS = flags.FLAGS
class ModelTestCase(test.TrialTestCase):
def setUp(self):
super(ModelTestCase, self).setUp()
self.flags(fake_libvirt=True,
fake_storage=True,
fake_users=True)
def tearDown(self):
model.Instance('i-test').destroy()
model.Host('testhost').destroy()
model.Daemon('testhost', 'nova-testdaemon').destroy()
def create_instance(self):
inst = model.Instance('i-test')
inst['reservation_id'] = 'r-test'
inst['launch_time'] = '10'
inst['user_id'] = 'fake'
inst['project_id'] = 'fake'
inst['instance_type'] = 'm1.tiny'
inst['node_name'] = FLAGS.node_name
inst['mac_address'] = utils.generate_mac()
inst['ami_launch_index'] = 0
inst.save()
return inst
def create_host(self):
host = model.Host('testhost')
host.save()
return host
def create_daemon(self):
daemon = model.Daemon('testhost', 'nova-testdaemon')
daemon.save()
return daemon
@defer.inlineCallbacks
def test_create_instance(self):
"""store with create_instace, then test that a load finds it"""
instance = yield self.create_instance()
old = yield model.Instance(instance.identifier)
self.assertFalse(old.is_new_record())
@defer.inlineCallbacks
def test_delete_instance(self):
"""create, then destroy, then make sure loads a new record"""
instance = yield self.create_instance()
yield instance.destroy()
newinst = yield model.Instance('i-test')
self.assertTrue(newinst.is_new_record())
@defer.inlineCallbacks
def test_instance_added_to_set(self):
"""create, then check that it is listed for the project"""
instance = yield self.create_instance()
found = False
for x in model.InstanceDirectory().all:
if x.identifier == 'i-test':
found = True
self.assert_(found)
@defer.inlineCallbacks
def test_instance_associates_project(self):
"""create, then check that it is listed for the project"""
instance = yield self.create_instance()
found = False
for x in model.InstanceDirectory().by_project(instance.project):
if x.identifier == 'i-test':
found = True
self.assert_(found)
@defer.inlineCallbacks
def test_host_class_finds_hosts(self):
host = yield self.create_host()
self.assertEqual('testhost', model.Host.lookup('testhost').identifier)
@defer.inlineCallbacks
def test_host_class_doesnt_find_missing_hosts(self):
rv = yield model.Host.lookup('woahnelly')
self.assertEqual(None, rv)
@defer.inlineCallbacks
def test_create_host(self):
"""store with create_host, then test that a load finds it"""
host = yield self.create_host()
old = yield model.Host(host.identifier)
self.assertFalse(old.is_new_record())
@defer.inlineCallbacks
def test_delete_host(self):
"""create, then destroy, then make sure loads a new record"""
instance = yield self.create_host()
yield instance.destroy()
newinst = yield model.Host('testhost')
self.assertTrue(newinst.is_new_record())
@defer.inlineCallbacks
def test_host_added_to_set(self):
"""create, then check that it is included in list"""
instance = yield self.create_host()
found = False
for x in model.Host.all():
if x.identifier == 'testhost':
found = True
self.assert_(found)
@defer.inlineCallbacks
def test_create_daemon_two_args(self):
"""create a daemon with two arguments"""
d = yield self.create_daemon()
d = model.Daemon('testhost', 'nova-testdaemon')
self.assertFalse(d.is_new_record())
@defer.inlineCallbacks
def test_create_daemon_single_arg(self):
"""Create a daemon using the combined host:bin format"""
d = yield model.Daemon("testhost:nova-testdaemon")
d.save()
d = model.Daemon('testhost:nova-testdaemon')
self.assertFalse(d.is_new_record())
@defer.inlineCallbacks
def test_equality_of_daemon_single_and_double_args(self):
"""Create a daemon using the combined host:bin arg, find with 2"""
d = yield model.Daemon("testhost:nova-testdaemon")
d.save()
d = model.Daemon('testhost', 'nova-testdaemon')
self.assertFalse(d.is_new_record())
@defer.inlineCallbacks
def test_equality_daemon_of_double_and_single_args(self):
"""Create a daemon using the combined host:bin arg, find with 2"""
d = yield self.create_daemon()
d = model.Daemon('testhost:nova-testdaemon')
self.assertFalse(d.is_new_record())
@defer.inlineCallbacks
def test_delete_daemon(self):
"""create, then destroy, then make sure loads a new record"""
instance = yield self.create_daemon()
yield instance.destroy()
newinst = yield model.Daemon('testhost', 'nova-testdaemon')
self.assertTrue(newinst.is_new_record())
@defer.inlineCallbacks
def test_daemon_heartbeat(self):
"""Create a daemon, sleep, heartbeat, check for update"""
d = yield self.create_daemon()
ts = d['updated_at']
time.sleep(2)
d.heartbeat()
d2 = model.Daemon('testhost', 'nova-testdaemon')
ts2 = d2['updated_at']
self.assert_(ts2 > ts)
@defer.inlineCallbacks
def test_daemon_added_to_set(self):
"""create, then check that it is included in list"""
instance = yield self.create_daemon()
found = False
for x in model.Daemon.all():
if x.identifier == 'testhost:nova-testdaemon':
found = True
self.assert_(found)
@defer.inlineCallbacks
def test_daemon_associates_host(self):
"""create, then check that it is listed for the host"""
instance = yield self.create_daemon()
found = False
for x in model.Daemon.by_host('testhost'):
if x.identifier == 'testhost:nova-testdaemon':
found = True
self.assertTrue(found)
| 35.028708
| 78
| 0.648272
|
import logging
import time
from nova import vendor
from twisted.internet import defer
from nova import exception
from nova import flags
from nova import test
from nova import utils
from nova.compute import model
from nova.compute import node
FLAGS = flags.FLAGS
class ModelTestCase(test.TrialTestCase):
def setUp(self):
super(ModelTestCase, self).setUp()
self.flags(fake_libvirt=True,
fake_storage=True,
fake_users=True)
def tearDown(self):
model.Instance('i-test').destroy()
model.Host('testhost').destroy()
model.Daemon('testhost', 'nova-testdaemon').destroy()
def create_instance(self):
inst = model.Instance('i-test')
inst['reservation_id'] = 'r-test'
inst['launch_time'] = '10'
inst['user_id'] = 'fake'
inst['project_id'] = 'fake'
inst['instance_type'] = 'm1.tiny'
inst['node_name'] = FLAGS.node_name
inst['mac_address'] = utils.generate_mac()
inst['ami_launch_index'] = 0
inst.save()
return inst
def create_host(self):
host = model.Host('testhost')
host.save()
return host
def create_daemon(self):
daemon = model.Daemon('testhost', 'nova-testdaemon')
daemon.save()
return daemon
@defer.inlineCallbacks
def test_create_instance(self):
instance = yield self.create_instance()
old = yield model.Instance(instance.identifier)
self.assertFalse(old.is_new_record())
@defer.inlineCallbacks
def test_delete_instance(self):
instance = yield self.create_instance()
yield instance.destroy()
newinst = yield model.Instance('i-test')
self.assertTrue(newinst.is_new_record())
@defer.inlineCallbacks
def test_instance_added_to_set(self):
instance = yield self.create_instance()
found = False
for x in model.InstanceDirectory().all:
if x.identifier == 'i-test':
found = True
self.assert_(found)
@defer.inlineCallbacks
def test_instance_associates_project(self):
instance = yield self.create_instance()
found = False
for x in model.InstanceDirectory().by_project(instance.project):
if x.identifier == 'i-test':
found = True
self.assert_(found)
@defer.inlineCallbacks
def test_host_class_finds_hosts(self):
host = yield self.create_host()
self.assertEqual('testhost', model.Host.lookup('testhost').identifier)
@defer.inlineCallbacks
def test_host_class_doesnt_find_missing_hosts(self):
rv = yield model.Host.lookup('woahnelly')
self.assertEqual(None, rv)
@defer.inlineCallbacks
def test_create_host(self):
host = yield self.create_host()
old = yield model.Host(host.identifier)
self.assertFalse(old.is_new_record())
@defer.inlineCallbacks
def test_delete_host(self):
instance = yield self.create_host()
yield instance.destroy()
newinst = yield model.Host('testhost')
self.assertTrue(newinst.is_new_record())
@defer.inlineCallbacks
def test_host_added_to_set(self):
instance = yield self.create_host()
found = False
for x in model.Host.all():
if x.identifier == 'testhost':
found = True
self.assert_(found)
@defer.inlineCallbacks
def test_create_daemon_two_args(self):
d = yield self.create_daemon()
d = model.Daemon('testhost', 'nova-testdaemon')
self.assertFalse(d.is_new_record())
@defer.inlineCallbacks
def test_create_daemon_single_arg(self):
d = yield model.Daemon("testhost:nova-testdaemon")
d.save()
d = model.Daemon('testhost:nova-testdaemon')
self.assertFalse(d.is_new_record())
@defer.inlineCallbacks
def test_equality_of_daemon_single_and_double_args(self):
d = yield model.Daemon("testhost:nova-testdaemon")
d.save()
d = model.Daemon('testhost', 'nova-testdaemon')
self.assertFalse(d.is_new_record())
@defer.inlineCallbacks
def test_equality_daemon_of_double_and_single_args(self):
d = yield self.create_daemon()
d = model.Daemon('testhost:nova-testdaemon')
self.assertFalse(d.is_new_record())
@defer.inlineCallbacks
def test_delete_daemon(self):
instance = yield self.create_daemon()
yield instance.destroy()
newinst = yield model.Daemon('testhost', 'nova-testdaemon')
self.assertTrue(newinst.is_new_record())
@defer.inlineCallbacks
def test_daemon_heartbeat(self):
d = yield self.create_daemon()
ts = d['updated_at']
time.sleep(2)
d.heartbeat()
d2 = model.Daemon('testhost', 'nova-testdaemon')
ts2 = d2['updated_at']
self.assert_(ts2 > ts)
@defer.inlineCallbacks
def test_daemon_added_to_set(self):
instance = yield self.create_daemon()
found = False
for x in model.Daemon.all():
if x.identifier == 'testhost:nova-testdaemon':
found = True
self.assert_(found)
@defer.inlineCallbacks
def test_daemon_associates_host(self):
instance = yield self.create_daemon()
found = False
for x in model.Daemon.by_host('testhost'):
if x.identifier == 'testhost:nova-testdaemon':
found = True
self.assertTrue(found)
| true
| true
|
f70caba48ad101e239a31af848fefd6f6f8a6547
| 20,208
|
py
|
Python
|
src/oci/devops/models/repository_summary.py
|
LaudateCorpus1/oci-python-sdk
|
b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/oci/devops/models/repository_summary.py
|
LaudateCorpus1/oci-python-sdk
|
b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/oci/devops/models/repository_summary.py
|
LaudateCorpus1/oci-python-sdk
|
b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class RepositorySummary(object):
"""
Summary of the repository.
"""
def __init__(self, **kwargs):
"""
Initializes a new RepositorySummary object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param id:
The value to assign to the id property of this RepositorySummary.
:type id: str
:param name:
The value to assign to the name property of this RepositorySummary.
:type name: str
:param compartment_id:
The value to assign to the compartment_id property of this RepositorySummary.
:type compartment_id: str
:param project_id:
The value to assign to the project_id property of this RepositorySummary.
:type project_id: str
:param namespace:
The value to assign to the namespace property of this RepositorySummary.
:type namespace: str
:param project_name:
The value to assign to the project_name property of this RepositorySummary.
:type project_name: str
:param description:
The value to assign to the description property of this RepositorySummary.
:type description: str
:param default_branch:
The value to assign to the default_branch property of this RepositorySummary.
:type default_branch: str
:param repository_type:
The value to assign to the repository_type property of this RepositorySummary.
:type repository_type: str
:param ssh_url:
The value to assign to the ssh_url property of this RepositorySummary.
:type ssh_url: str
:param http_url:
The value to assign to the http_url property of this RepositorySummary.
:type http_url: str
:param mirror_repository_config:
The value to assign to the mirror_repository_config property of this RepositorySummary.
:type mirror_repository_config: oci.devops.models.MirrorRepositoryConfig
:param time_created:
The value to assign to the time_created property of this RepositorySummary.
:type time_created: datetime
:param time_updated:
The value to assign to the time_updated property of this RepositorySummary.
:type time_updated: datetime
:param lifecycle_state:
The value to assign to the lifecycle_state property of this RepositorySummary.
:type lifecycle_state: str
:param lifecycle_details:
The value to assign to the lifecycle_details property of this RepositorySummary.
:type lifecycle_details: str
:param freeform_tags:
The value to assign to the freeform_tags property of this RepositorySummary.
:type freeform_tags: dict(str, str)
:param defined_tags:
The value to assign to the defined_tags property of this RepositorySummary.
:type defined_tags: dict(str, dict(str, object))
:param system_tags:
The value to assign to the system_tags property of this RepositorySummary.
:type system_tags: dict(str, dict(str, object))
"""
self.swagger_types = {
'id': 'str',
'name': 'str',
'compartment_id': 'str',
'project_id': 'str',
'namespace': 'str',
'project_name': 'str',
'description': 'str',
'default_branch': 'str',
'repository_type': 'str',
'ssh_url': 'str',
'http_url': 'str',
'mirror_repository_config': 'MirrorRepositoryConfig',
'time_created': 'datetime',
'time_updated': 'datetime',
'lifecycle_state': 'str',
'lifecycle_details': 'str',
'freeform_tags': 'dict(str, str)',
'defined_tags': 'dict(str, dict(str, object))',
'system_tags': 'dict(str, dict(str, object))'
}
self.attribute_map = {
'id': 'id',
'name': 'name',
'compartment_id': 'compartmentId',
'project_id': 'projectId',
'namespace': 'namespace',
'project_name': 'projectName',
'description': 'description',
'default_branch': 'defaultBranch',
'repository_type': 'repositoryType',
'ssh_url': 'sshUrl',
'http_url': 'httpUrl',
'mirror_repository_config': 'mirrorRepositoryConfig',
'time_created': 'timeCreated',
'time_updated': 'timeUpdated',
'lifecycle_state': 'lifecycleState',
'lifecycle_details': 'lifecycleDetails',
'freeform_tags': 'freeformTags',
'defined_tags': 'definedTags',
'system_tags': 'systemTags'
}
self._id = None
self._name = None
self._compartment_id = None
self._project_id = None
self._namespace = None
self._project_name = None
self._description = None
self._default_branch = None
self._repository_type = None
self._ssh_url = None
self._http_url = None
self._mirror_repository_config = None
self._time_created = None
self._time_updated = None
self._lifecycle_state = None
self._lifecycle_details = None
self._freeform_tags = None
self._defined_tags = None
self._system_tags = None
@property
def id(self):
"""
**[Required]** Gets the id of this RepositorySummary.
The OCID of the repository. This value is unique and immutable.
:return: The id of this RepositorySummary.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""
Sets the id of this RepositorySummary.
The OCID of the repository. This value is unique and immutable.
:param id: The id of this RepositorySummary.
:type: str
"""
self._id = id
@property
def name(self):
"""
Gets the name of this RepositorySummary.
Unique name of a repository. This value is mutable.
:return: The name of this RepositorySummary.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this RepositorySummary.
Unique name of a repository. This value is mutable.
:param name: The name of this RepositorySummary.
:type: str
"""
self._name = name
@property
def compartment_id(self):
"""
**[Required]** Gets the compartment_id of this RepositorySummary.
The OCID of the repository's compartment.
:return: The compartment_id of this RepositorySummary.
:rtype: str
"""
return self._compartment_id
@compartment_id.setter
def compartment_id(self, compartment_id):
"""
Sets the compartment_id of this RepositorySummary.
The OCID of the repository's compartment.
:param compartment_id: The compartment_id of this RepositorySummary.
:type: str
"""
self._compartment_id = compartment_id
@property
def project_id(self):
"""
**[Required]** Gets the project_id of this RepositorySummary.
The OCID of the DevOps project containing the repository.
:return: The project_id of this RepositorySummary.
:rtype: str
"""
return self._project_id
@project_id.setter
def project_id(self, project_id):
"""
Sets the project_id of this RepositorySummary.
The OCID of the DevOps project containing the repository.
:param project_id: The project_id of this RepositorySummary.
:type: str
"""
self._project_id = project_id
@property
def namespace(self):
"""
Gets the namespace of this RepositorySummary.
Tenancy unique namespace.
:return: The namespace of this RepositorySummary.
:rtype: str
"""
return self._namespace
@namespace.setter
def namespace(self, namespace):
"""
Sets the namespace of this RepositorySummary.
Tenancy unique namespace.
:param namespace: The namespace of this RepositorySummary.
:type: str
"""
self._namespace = namespace
@property
def project_name(self):
"""
Gets the project_name of this RepositorySummary.
Unique project name in a namespace.
:return: The project_name of this RepositorySummary.
:rtype: str
"""
return self._project_name
@project_name.setter
def project_name(self, project_name):
"""
Sets the project_name of this RepositorySummary.
Unique project name in a namespace.
:param project_name: The project_name of this RepositorySummary.
:type: str
"""
self._project_name = project_name
@property
def description(self):
"""
Gets the description of this RepositorySummary.
Details of the repository. Avoid entering confidential information.
:return: The description of this RepositorySummary.
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""
Sets the description of this RepositorySummary.
Details of the repository. Avoid entering confidential information.
:param description: The description of this RepositorySummary.
:type: str
"""
self._description = description
@property
def default_branch(self):
"""
Gets the default_branch of this RepositorySummary.
The default branch of the repository.
:return: The default_branch of this RepositorySummary.
:rtype: str
"""
return self._default_branch
@default_branch.setter
def default_branch(self, default_branch):
"""
Sets the default_branch of this RepositorySummary.
The default branch of the repository.
:param default_branch: The default_branch of this RepositorySummary.
:type: str
"""
self._default_branch = default_branch
@property
def repository_type(self):
"""
Gets the repository_type of this RepositorySummary.
Type of repository.
:return: The repository_type of this RepositorySummary.
:rtype: str
"""
return self._repository_type
@repository_type.setter
def repository_type(self, repository_type):
"""
Sets the repository_type of this RepositorySummary.
Type of repository.
:param repository_type: The repository_type of this RepositorySummary.
:type: str
"""
self._repository_type = repository_type
@property
def ssh_url(self):
"""
Gets the ssh_url of this RepositorySummary.
SSH URL that you use to git clone, pull and push.
:return: The ssh_url of this RepositorySummary.
:rtype: str
"""
return self._ssh_url
@ssh_url.setter
def ssh_url(self, ssh_url):
"""
Sets the ssh_url of this RepositorySummary.
SSH URL that you use to git clone, pull and push.
:param ssh_url: The ssh_url of this RepositorySummary.
:type: str
"""
self._ssh_url = ssh_url
@property
def http_url(self):
"""
Gets the http_url of this RepositorySummary.
HTTP URL that you use to git clone, pull and push.
:return: The http_url of this RepositorySummary.
:rtype: str
"""
return self._http_url
@http_url.setter
def http_url(self, http_url):
"""
Sets the http_url of this RepositorySummary.
HTTP URL that you use to git clone, pull and push.
:param http_url: The http_url of this RepositorySummary.
:type: str
"""
self._http_url = http_url
@property
def mirror_repository_config(self):
"""
Gets the mirror_repository_config of this RepositorySummary.
:return: The mirror_repository_config of this RepositorySummary.
:rtype: oci.devops.models.MirrorRepositoryConfig
"""
return self._mirror_repository_config
@mirror_repository_config.setter
def mirror_repository_config(self, mirror_repository_config):
"""
Sets the mirror_repository_config of this RepositorySummary.
:param mirror_repository_config: The mirror_repository_config of this RepositorySummary.
:type: oci.devops.models.MirrorRepositoryConfig
"""
self._mirror_repository_config = mirror_repository_config
@property
def time_created(self):
"""
Gets the time_created of this RepositorySummary.
The time the repository was created. Format defined by `RFC3339`__.
__ https://datatracker.ietf.org/doc/html/rfc3339
:return: The time_created of this RepositorySummary.
:rtype: datetime
"""
return self._time_created
@time_created.setter
def time_created(self, time_created):
"""
Sets the time_created of this RepositorySummary.
The time the repository was created. Format defined by `RFC3339`__.
__ https://datatracker.ietf.org/doc/html/rfc3339
:param time_created: The time_created of this RepositorySummary.
:type: datetime
"""
self._time_created = time_created
@property
def time_updated(self):
"""
Gets the time_updated of this RepositorySummary.
The time the repository was updated. Format defined by `RFC3339`__.
__ https://datatracker.ietf.org/doc/html/rfc3339
:return: The time_updated of this RepositorySummary.
:rtype: datetime
"""
return self._time_updated
@time_updated.setter
def time_updated(self, time_updated):
"""
Sets the time_updated of this RepositorySummary.
The time the repository was updated. Format defined by `RFC3339`__.
__ https://datatracker.ietf.org/doc/html/rfc3339
:param time_updated: The time_updated of this RepositorySummary.
:type: datetime
"""
self._time_updated = time_updated
@property
def lifecycle_state(self):
"""
Gets the lifecycle_state of this RepositorySummary.
The current state of the repository.
:return: The lifecycle_state of this RepositorySummary.
:rtype: str
"""
return self._lifecycle_state
@lifecycle_state.setter
def lifecycle_state(self, lifecycle_state):
"""
Sets the lifecycle_state of this RepositorySummary.
The current state of the repository.
:param lifecycle_state: The lifecycle_state of this RepositorySummary.
:type: str
"""
self._lifecycle_state = lifecycle_state
@property
def lifecycle_details(self):
"""
Gets the lifecycle_details of this RepositorySummary.
A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state.
:return: The lifecycle_details of this RepositorySummary.
:rtype: str
"""
return self._lifecycle_details
@lifecycle_details.setter
def lifecycle_details(self, lifecycle_details):
"""
Sets the lifecycle_details of this RepositorySummary.
A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state.
:param lifecycle_details: The lifecycle_details of this RepositorySummary.
:type: str
"""
self._lifecycle_details = lifecycle_details
@property
def freeform_tags(self):
"""
Gets the freeform_tags of this RepositorySummary.
Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. See `Resource Tags`__. Example: `{\"bar-key\": \"value\"}`
__ https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm
:return: The freeform_tags of this RepositorySummary.
:rtype: dict(str, str)
"""
return self._freeform_tags
@freeform_tags.setter
def freeform_tags(self, freeform_tags):
"""
Sets the freeform_tags of this RepositorySummary.
Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. See `Resource Tags`__. Example: `{\"bar-key\": \"value\"}`
__ https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm
:param freeform_tags: The freeform_tags of this RepositorySummary.
:type: dict(str, str)
"""
self._freeform_tags = freeform_tags
@property
def defined_tags(self):
"""
Gets the defined_tags of this RepositorySummary.
Defined tags for this resource. Each key is predefined and scoped to a namespace. See `Resource Tags`__. Example: `{\"foo-namespace\": {\"bar-key\": \"value\"}}`
__ https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm
:return: The defined_tags of this RepositorySummary.
:rtype: dict(str, dict(str, object))
"""
return self._defined_tags
@defined_tags.setter
def defined_tags(self, defined_tags):
"""
Sets the defined_tags of this RepositorySummary.
Defined tags for this resource. Each key is predefined and scoped to a namespace. See `Resource Tags`__. Example: `{\"foo-namespace\": {\"bar-key\": \"value\"}}`
__ https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm
:param defined_tags: The defined_tags of this RepositorySummary.
:type: dict(str, dict(str, object))
"""
self._defined_tags = defined_tags
@property
def system_tags(self):
"""
Gets the system_tags of this RepositorySummary.
Usage of system tag keys. These predefined keys are scoped to namespaces. See `Resource Tags`__. Example: `{\"orcl-cloud\": {\"free-tier-retained\": \"true\"}}`
__ https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm
:return: The system_tags of this RepositorySummary.
:rtype: dict(str, dict(str, object))
"""
return self._system_tags
@system_tags.setter
def system_tags(self, system_tags):
"""
Sets the system_tags of this RepositorySummary.
Usage of system tag keys. These predefined keys are scoped to namespaces. See `Resource Tags`__. Example: `{\"orcl-cloud\": {\"free-tier-retained\": \"true\"}}`
__ https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm
:param system_tags: The system_tags of this RepositorySummary.
:type: dict(str, dict(str, object))
"""
self._system_tags = system_tags
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 31.330233
| 245
| 0.636431
|
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class RepositorySummary(object):
def __init__(self, **kwargs):
self.swagger_types = {
'id': 'str',
'name': 'str',
'compartment_id': 'str',
'project_id': 'str',
'namespace': 'str',
'project_name': 'str',
'description': 'str',
'default_branch': 'str',
'repository_type': 'str',
'ssh_url': 'str',
'http_url': 'str',
'mirror_repository_config': 'MirrorRepositoryConfig',
'time_created': 'datetime',
'time_updated': 'datetime',
'lifecycle_state': 'str',
'lifecycle_details': 'str',
'freeform_tags': 'dict(str, str)',
'defined_tags': 'dict(str, dict(str, object))',
'system_tags': 'dict(str, dict(str, object))'
}
self.attribute_map = {
'id': 'id',
'name': 'name',
'compartment_id': 'compartmentId',
'project_id': 'projectId',
'namespace': 'namespace',
'project_name': 'projectName',
'description': 'description',
'default_branch': 'defaultBranch',
'repository_type': 'repositoryType',
'ssh_url': 'sshUrl',
'http_url': 'httpUrl',
'mirror_repository_config': 'mirrorRepositoryConfig',
'time_created': 'timeCreated',
'time_updated': 'timeUpdated',
'lifecycle_state': 'lifecycleState',
'lifecycle_details': 'lifecycleDetails',
'freeform_tags': 'freeformTags',
'defined_tags': 'definedTags',
'system_tags': 'systemTags'
}
self._id = None
self._name = None
self._compartment_id = None
self._project_id = None
self._namespace = None
self._project_name = None
self._description = None
self._default_branch = None
self._repository_type = None
self._ssh_url = None
self._http_url = None
self._mirror_repository_config = None
self._time_created = None
self._time_updated = None
self._lifecycle_state = None
self._lifecycle_details = None
self._freeform_tags = None
self._defined_tags = None
self._system_tags = None
@property
def id(self):
return self._id
@id.setter
def id(self, id):
self._id = id
@property
def name(self):
return self._name
@name.setter
def name(self, name):
self._name = name
@property
def compartment_id(self):
return self._compartment_id
@compartment_id.setter
def compartment_id(self, compartment_id):
self._compartment_id = compartment_id
@property
def project_id(self):
return self._project_id
@project_id.setter
def project_id(self, project_id):
self._project_id = project_id
@property
def namespace(self):
return self._namespace
@namespace.setter
def namespace(self, namespace):
self._namespace = namespace
@property
def project_name(self):
return self._project_name
@project_name.setter
def project_name(self, project_name):
self._project_name = project_name
@property
def description(self):
return self._description
@description.setter
def description(self, description):
self._description = description
@property
def default_branch(self):
return self._default_branch
@default_branch.setter
def default_branch(self, default_branch):
self._default_branch = default_branch
@property
def repository_type(self):
return self._repository_type
@repository_type.setter
def repository_type(self, repository_type):
self._repository_type = repository_type
@property
def ssh_url(self):
return self._ssh_url
@ssh_url.setter
def ssh_url(self, ssh_url):
self._ssh_url = ssh_url
@property
def http_url(self):
return self._http_url
@http_url.setter
def http_url(self, http_url):
self._http_url = http_url
@property
def mirror_repository_config(self):
return self._mirror_repository_config
@mirror_repository_config.setter
def mirror_repository_config(self, mirror_repository_config):
self._mirror_repository_config = mirror_repository_config
@property
def time_created(self):
return self._time_created
@time_created.setter
def time_created(self, time_created):
self._time_created = time_created
@property
def time_updated(self):
return self._time_updated
@time_updated.setter
def time_updated(self, time_updated):
self._time_updated = time_updated
@property
def lifecycle_state(self):
return self._lifecycle_state
@lifecycle_state.setter
def lifecycle_state(self, lifecycle_state):
self._lifecycle_state = lifecycle_state
@property
def lifecycle_details(self):
return self._lifecycle_details
@lifecycle_details.setter
def lifecycle_details(self, lifecycle_details):
self._lifecycle_details = lifecycle_details
@property
def freeform_tags(self):
return self._freeform_tags
@freeform_tags.setter
def freeform_tags(self, freeform_tags):
self._freeform_tags = freeform_tags
@property
def defined_tags(self):
return self._defined_tags
@defined_tags.setter
def defined_tags(self, defined_tags):
self._defined_tags = defined_tags
@property
def system_tags(self):
return self._system_tags
@system_tags.setter
def system_tags(self, system_tags):
self._system_tags = system_tags
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true
| true
|
f70cac565f90e0edf08649c431242a2f214680bd
| 23,932
|
py
|
Python
|
tatk/policy/mdrg/multiwoz/model.py
|
yqzhangthu/tatk
|
4d27e89604a33f19f1c7b8fe5dc92d4ba6c6f10a
|
[
"Apache-2.0"
] | 81
|
2019-03-12T13:40:29.000Z
|
2022-01-17T10:59:21.000Z
|
tatk/policy/mdrg/multiwoz/model.py
|
zqwerty/tatk
|
fafabc45d02ad889f59354acac4e3b1367e7d4bf
|
[
"Apache-2.0"
] | 35
|
2019-03-13T14:05:05.000Z
|
2021-08-25T15:38:14.000Z
|
tatk/policy/mdrg/multiwoz/model.py
|
zqwerty/tatk
|
fafabc45d02ad889f59354acac4e3b1367e7d4bf
|
[
"Apache-2.0"
] | 41
|
2019-03-13T09:40:24.000Z
|
2022-03-07T17:59:07.000Z
|
import json
import math
import operator
import os
import random
from io import open
from queue import PriorityQueue
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import optim
import functools
import tatk.policy.mdrg.multiwoz.default_policy as policy
SOS_token = 0
EOS_token = 1
UNK_token = 2
PAD_token = 3
# Shawn beam search decoding
class BeamSearchNode(object):
def __init__(self, h, prevNode, wordid, logp, leng):
self.h = h
self.prevNode = prevNode
self.wordid = wordid
self.logp = logp
self.leng = leng
def eval(self, repeatPenalty, tokenReward, scoreTable, alpha=1.0):
reward = 0
alpha = 1.0
return self.logp / float(self.leng - 1 + 1e-6) + alpha * reward
def init_lstm(cell, gain=1):
init_gru(cell, gain)
# positive forget gate bias (Jozefowicz et al., 2015)
for _, _, ih_b, hh_b in cell.all_weights:
l = len(ih_b)
ih_b[l // 4:l // 2].data.fill_(1.0)
hh_b[l // 4:l // 2].data.fill_(1.0)
def init_gru(gru, gain=1):
gru.reset_parameters()
for _, hh, _, _ in gru.all_weights:
for i in range(0, hh.size(0), gru.hidden_size):
torch.nn.init.orthogonal_(hh[i:i+gru.hidden_size],gain=gain)
def whatCellType(input_size, hidden_size, cell_type, dropout_rate):
if cell_type == 'rnn':
cell = nn.RNN(input_size, hidden_size, dropout=dropout_rate, batch_first=False)
init_gru(cell)
return cell
elif cell_type == 'gru':
cell = nn.GRU(input_size, hidden_size, dropout=dropout_rate, batch_first=False)
init_gru(cell)
return cell
elif cell_type == 'lstm':
cell = nn.LSTM(input_size, hidden_size, dropout=dropout_rate, batch_first=False)
init_lstm(cell)
return cell
elif cell_type == 'bigru':
cell = nn.GRU(input_size, hidden_size, bidirectional=True, dropout=dropout_rate, batch_first=False)
init_gru(cell)
return cell
elif cell_type == 'bilstm':
cell = nn.LSTM(input_size, hidden_size, bidirectional=True, dropout=dropout_rate, batch_first=False)
init_lstm(cell)
return cell
class EncoderRNN(nn.Module):
def __init__(self, input_size, embedding_size, hidden_size, cell_type, depth, dropout):
super(EncoderRNN, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.embed_size = embedding_size
self.n_layers = depth
self.dropout = dropout
self.bidirectional = False
if 'bi' in cell_type:
self.bidirectional = True
padding_idx = 3
self.embedding = nn.Embedding(input_size, embedding_size, padding_idx=padding_idx)
self.rnn = whatCellType(embedding_size, hidden_size,
cell_type, dropout_rate=self.dropout)
def forward(self, input_seqs, input_lens, hidden=None):
"""
forward procedure. **No need for inputs to be sorted**
:param input_seqs: Variable of [T,B]
:param hidden:
:param input_lens: *numpy array* of len for each input sequence
:return:
"""
input_lens = np.asarray(input_lens)
input_seqs = input_seqs.transpose(0,1)
#batch_size = input_seqs.size(1)
embedded = self.embedding(input_seqs)
embedded = embedded.transpose(0, 1) # [B,T,E]
sort_idx = np.argsort(-input_lens)
unsort_idx = torch.LongTensor(np.argsort(sort_idx))
input_lens = input_lens[sort_idx]
sort_idx = torch.LongTensor(sort_idx)
embedded = embedded[sort_idx].transpose(0, 1) # [T,B,E]
packed = torch.nn.utils.rnn.pack_padded_sequence(embedded, input_lens)
outputs, hidden = self.rnn(packed, hidden)
outputs, _ = torch.nn.utils.rnn.pad_packed_sequence(outputs)
if self.bidirectional:
outputs = outputs[:, :, :self.hidden_size] + outputs[:, :, self.hidden_size:]
outputs = outputs.transpose(0, 1)[unsort_idx].transpose(0, 1).contiguous()
if isinstance(hidden, tuple):
hidden = list(hidden)
hidden[0] = hidden[0].transpose(0, 1)[unsort_idx].transpose(0, 1).contiguous()
hidden[1] = hidden[1].transpose(0, 1)[unsort_idx].transpose(0, 1).contiguous()
hidden = tuple(hidden)
else:
hidden = hidden.transpose(0, 1)[unsort_idx].transpose(0, 1).contiguous()
return outputs, hidden
class Attn(nn.Module):
def __init__(self, method, hidden_size):
super(Attn, self).__init__()
self.method = method
self.hidden_size = hidden_size
self.attn = nn.Linear(self.hidden_size * 2, hidden_size)
self.v = nn.Parameter(torch.rand(hidden_size))
stdv = 1. / math.sqrt(self.v.size(0))
self.v.data.normal_(mean=0, std=stdv)
def forward(self, hidden, encoder_outputs):
'''
:param hidden:
previous hidden state of the decoder, in shape (layers*directions,B,H)
:param encoder_outputs:
encoder outputs from Encoder, in shape (T,B,H)
:return
attention energies in shape (B,T)
'''
max_len = encoder_outputs.size(0)
H = hidden.repeat(max_len,1,1).transpose(0,1)
encoder_outputs = encoder_outputs.transpose(0,1) # [T,B,H] -> [B,T,H]
attn_energies = self.score(H,encoder_outputs) # compute attention score
return F.softmax(attn_energies, dim=1).unsqueeze(1) # normalize with softmax
def score(self, hidden, encoder_outputs):
cat = torch.cat([hidden, encoder_outputs], 2)
energy = torch.tanh(self.attn(cat)) # [B*T*2H]->[B*T*H]
energy = energy.transpose(2,1) # [B*H*T]
v = self.v.repeat(encoder_outputs.data.shape[0],1).unsqueeze(1) #[B*1*H]
energy = torch.bmm(v,energy) # [B*1*T]
return energy.squeeze(1) # [B*T]
class SeqAttnDecoderRNN(nn.Module):
def __init__(self, embedding_size, hidden_size, output_size, cell_type, dropout_p=0.1, max_length=30):
super(SeqAttnDecoderRNN, self).__init__()
# Define parameters
self.hidden_size = hidden_size
self.embed_size = embedding_size
self.output_size = output_size
self.n_layers = 1
self.dropout_p = dropout_p
# Define layers
self.embedding = nn.Embedding(output_size, embedding_size)
self.dropout = nn.Dropout(dropout_p)
if 'bi' in cell_type: # we dont need bidirectionality in decoding
cell_type = cell_type.strip('bi')
self.rnn = whatCellType(embedding_size + hidden_size, hidden_size, cell_type, dropout_rate=self.dropout_p)
self.out = nn.Linear(hidden_size, output_size)
self.score = nn.Linear(self.hidden_size + self.hidden_size, self.hidden_size)
self.attn_combine = nn.Linear(embedding_size + hidden_size, embedding_size)
# attention
self.method = 'concat'
self.attn = nn.Linear(self.hidden_size * 2, hidden_size)
self.v = nn.Parameter(torch.rand(hidden_size))
stdv = 1. / math.sqrt(self.v.size(0))
self.v.data.normal_(mean=0, std=stdv)
def forward(self, input, hidden, encoder_outputs):
if isinstance(hidden, tuple):
h_t = hidden[0]
else:
h_t = hidden
encoder_outputs = encoder_outputs.transpose(0, 1)
embedded = self.embedding(input) # .view(1, 1, -1)
# embedded = F.dropout(embedded, self.dropout_p)
# SCORE 3
max_len = encoder_outputs.size(1)
h_t = h_t.transpose(0, 1) # [1,B,D] -> [B,1,D]
h_t = h_t.repeat(1, max_len, 1) # [B,1,D] -> [B,T,D]
energy = self.attn(torch.cat((h_t, encoder_outputs), 2)) # [B,T,2D] -> [B,T,D]
energy = torch.tanh(energy)
energy = energy.transpose(2, 1) # [B,H,T]
v = self.v.repeat(encoder_outputs.size(0), 1).unsqueeze(1) # [B,1,H]
energy = torch.bmm(v, energy) # [B,1,T]
attn_weights = F.softmax(energy, dim=2) # [B,1,T]
# getting context
context = torch.bmm(attn_weights, encoder_outputs) # [B,1,H]
# context = torch.bmm(attn_weights.unsqueeze(0), encoder_outputs.unsqueeze(0)) #[B,1,H]
# Combine embedded input word and attended context, run through RNN
rnn_input = torch.cat((embedded, context), 2)
rnn_input = rnn_input.transpose(0, 1)
output, hidden = self.rnn(rnn_input, hidden)
output = output.squeeze(0) # (1,B,V)->(B,V)
output = F.log_softmax(self.out(output), dim=1)
return output, hidden # , attn_weights
class DecoderRNN(nn.Module):
def __init__(self, embedding_size, hidden_size, output_size, cell_type, dropout=0.1):
super(DecoderRNN, self).__init__()
self.hidden_size = hidden_size
self.cell_type = cell_type
padding_idx = 3
self.embedding = nn.Embedding(num_embeddings=output_size,
embedding_dim=embedding_size,
padding_idx=padding_idx
)
if 'bi' in cell_type: # we dont need bidirectionality in decoding
cell_type = cell_type.strip('bi')
self.rnn = whatCellType(embedding_size, hidden_size, cell_type, dropout_rate=dropout)
self.dropout_rate = dropout
self.out = nn.Linear(hidden_size, output_size)
def forward(self, input, hidden, not_used):
embedded = self.embedding(input).transpose(0, 1) # [B,1] -> [ 1,B, D]
embedded = F.dropout(embedded, self.dropout_rate)
output = embedded
#output = F.relu(embedded)
output, hidden = self.rnn(output, hidden)
out = self.out(output.squeeze(0))
output = F.log_softmax(out, dim=1)
return output, hidden
class Model(nn.Module):
def __init__(self, args, input_lang_index2word, output_lang_index2word, input_lang_word2index, output_lang_word2index):
super(Model, self).__init__()
self.args = args
self.max_len = args.max_len
self.output_lang_index2word = output_lang_index2word
self.input_lang_index2word = input_lang_index2word
self.output_lang_word2index = output_lang_word2index
self.input_lang_word2index = input_lang_word2index
self.hid_size_enc = args.hid_size_enc
self.hid_size_dec = args.hid_size_dec
self.hid_size_pol = args.hid_size_pol
self.emb_size = args.emb_size
self.db_size = args.db_size
self.bs_size = args.bs_size
self.cell_type = args.cell_type
if 'bi' in self.cell_type:
self.num_directions = 2
else:
self.num_directions = 1
self.depth = args.depth
self.use_attn = args.use_attn
self.attn_type = args.attention_type
self.dropout = args.dropout
self.device = torch.device("cuda" if args.cuda else "cpu")
self.model_dir = args.model_dir
self.model_name = args.model_name
self.teacher_forcing_ratio = args.teacher_ratio
self.vocab_size = args.vocab_size
self.epsln = 10E-5
torch.manual_seed(args.seed)
self.build_model()
self.getCount()
try:
assert self.args.beam_width > 0
self.beam_search = True
except:
self.beam_search = False
self.global_step = 0
def cuda_(self, var):
return var.cuda() if self.args.cuda else var
def build_model(self):
self.encoder = EncoderRNN(len(self.input_lang_index2word), self.emb_size, self.hid_size_enc,
self.cell_type, self.depth, self.dropout).to(self.device)
self.policy = policy.DefaultPolicy(self.hid_size_pol, self.hid_size_enc, self.db_size, self.bs_size).to(self.device)
if self.use_attn:
if self.attn_type == 'bahdanau':
self.decoder = SeqAttnDecoderRNN(self.emb_size, self.hid_size_dec, len(self.output_lang_index2word), self.cell_type, self.dropout, self.max_len).to(self.device)
else:
self.decoder = DecoderRNN(self.emb_size, self.hid_size_dec, len(self.output_lang_index2word), self.cell_type, self.dropout).to(self.device)
if self.args.mode == 'train':
self.gen_criterion = nn.NLLLoss(ignore_index=3, size_average=True) # logsoftmax is done in decoder part
self.setOptimizers()
def train(self, input_tensor, input_lengths, target_tensor, target_lengths, db_tensor, bs_tensor, dial_name=None):
proba, _, decoded_sent = self.forward(input_tensor, input_lengths, target_tensor, target_lengths, db_tensor, bs_tensor)
proba = proba.view(-1, self.vocab_size)
self.gen_loss = self.gen_criterion(proba, target_tensor.view(-1))
self.loss = self.gen_loss
self.loss.backward()
grad = self.clipGradients()
self.optimizer.step()
self.optimizer.zero_grad()
#self.printGrad()
return self.loss.item(), 0, grad
def setOptimizers(self):
self.optimizer_policy = None
if self.args.optim == 'sgd':
self.optimizer = optim.SGD(lr=self.args.lr_rate, params=filter(lambda x: x.requires_grad, self.parameters()), weight_decay=self.args.l2_norm)
elif self.args.optim == 'adadelta':
self.optimizer = optim.Adadelta(lr=self.args.lr_rate, params=filter(lambda x: x.requires_grad, self.parameters()), weight_decay=self.args.l2_norm)
elif self.args.optim == 'adam':
self.optimizer = optim.Adam(lr=self.args.lr_rate, params=filter(lambda x: x.requires_grad, self.parameters()), weight_decay=self.args.l2_norm)
def forward(self, input_tensor, input_lengths, target_tensor, target_lengths, db_tensor, bs_tensor):
"""Given the user sentence, user belief state and database pointer,
encode the sentence, decide what policy vector construct and
feed it as the first hiddent state to the decoder."""
target_length = target_tensor.size(1)
# for fixed encoding this is zero so it does not contribute
batch_size, seq_len = input_tensor.size()
# ENCODER
encoder_outputs, encoder_hidden = self.encoder(input_tensor, input_lengths)
# POLICY
decoder_hidden = self.policy(encoder_hidden, db_tensor, bs_tensor)
# GENERATOR
# Teacher forcing: Feed the target as the next input
_, target_len = target_tensor.size()
decoder_input = torch.LongTensor([[SOS_token] for _ in range(batch_size)], device=self.device)
proba = torch.zeros(batch_size, target_length, self.vocab_size) # [B,T,V]
for t in range(target_len):
decoder_output, decoder_hidden = self.decoder(decoder_input, decoder_hidden, encoder_outputs)
use_teacher_forcing = True if random.random() < self.args.teacher_ratio else False
if use_teacher_forcing:
decoder_input = target_tensor[:, t].view(-1, 1) # [B,1] Teacher forcing
else:
# Without teacher forcing: use its own predictions as the next input
topv, topi = decoder_output.topk(1)
decoder_input = topi.squeeze().detach() # detach from history as input
proba[:, t, :] = decoder_output
decoded_sent = None
return proba, None, decoded_sent
def predict(self, input_tensor, input_lengths, target_tensor, target_lengths, db_tensor, bs_tensor):
with torch.no_grad():
# ENCODER
encoder_outputs, encoder_hidden = self.encoder(input_tensor, input_lengths)
# POLICY
decoder_hidden = self.policy(encoder_hidden, db_tensor, bs_tensor)
# GENERATION
decoded_words = self.decode(target_tensor, decoder_hidden, encoder_outputs)
return decoded_words, 0
def decode(self, target_tensor, decoder_hidden, encoder_outputs):
decoder_hiddens = decoder_hidden
if self.beam_search: # wenqiang style - sequicity
decoded_sentences = []
for idx in range(target_tensor.size(0)):
if isinstance(decoder_hiddens, tuple): # LSTM case
decoder_hidden = (decoder_hiddens[0][:,idx, :].unsqueeze(0),decoder_hiddens[1][:,idx, :].unsqueeze(0))
else:
decoder_hidden = decoder_hiddens[:, idx, :].unsqueeze(0)
encoder_output = encoder_outputs[:,idx, :].unsqueeze(1)
# Beam start
self.topk = 1
endnodes = [] # stored end nodes
number_required = min((self.topk + 1), self.topk - len(endnodes))
decoder_input = torch.LongTensor([[SOS_token]], device=self.device)
# starting node hidden vector, prevNode, wordid, logp, leng,
node = BeamSearchNode(decoder_hidden, None, decoder_input, 0, 1)
nodes = PriorityQueue() # start the queue
nodes.put((-node.eval(None, None, None, None),
node))
# start beam search
qsize = 1
while True:
# give up when decoding takes too long
if qsize > 2000: break
# fetch the best node
score, n = nodes.get()
decoder_input = n.wordid
decoder_hidden = n.h
if n.wordid.item() == EOS_token and n.prevNode != None: # its not empty
endnodes.append((score, n))
# if reach maximum # of sentences required
if len(endnodes) >= number_required:
break
else:
continue
# decode for one step using decoder
decoder_output, decoder_hidden = self.decoder(decoder_input, decoder_hidden, encoder_output)
log_prob, indexes = torch.topk(decoder_output, self.args.beam_width)
nextnodes = []
for new_k in range(self.args.beam_width):
decoded_t = indexes[0][new_k].view(1, -1)
log_p = log_prob[0][new_k].item()
node = BeamSearchNode(decoder_hidden, n, decoded_t, n.logp + log_p, n.leng + 1)
score = -node.eval(None, None, None, None)
nextnodes.append((score, node))
# put them into queue
for i in range(len(nextnodes)):
score, nn = nextnodes[i]
nodes.put((score, nn))
# increase qsize
qsize += len(nextnodes)
# choose nbest paths, back trace them
if len(endnodes) == 0:
endnodes = [nodes.get() for n in range(self.topk)]
utterances = []
for score, n in sorted(endnodes, key=operator.itemgetter(0)):
utterance = []
utterance.append(n.wordid)
# back trace
while n.prevNode != None:
n = n.prevNode
utterance.append(n.wordid)
utterance = utterance[::-1]
utterances.append(utterance)
decoded_words = utterances[0]
decoded_sentence = [self.output_index2word(str(ind.item())) for ind in decoded_words]
#print(decoded_sentence)
decoded_sentences.append(' '.join(decoded_sentence[1:-1]))
return decoded_sentences
else: # GREEDY DECODING
decoded_sentences = self.greedy_decode(decoder_hidden, encoder_outputs, target_tensor)
return decoded_sentences
def greedy_decode(self, decoder_hidden, encoder_outputs, target_tensor):
decoded_sentences = []
batch_size, seq_len = target_tensor.size()
decoder_input = torch.LongTensor([[SOS_token] for _ in range(batch_size)], device=self.device)
decoded_words = torch.zeros((batch_size, self.max_len))
for t in range(self.max_len):
decoder_output, decoder_hidden = self.decoder(decoder_input, decoder_hidden, encoder_outputs)
topv, topi = decoder_output.data.topk(1) # get candidates
topi = topi.view(-1)
decoded_words[:, t] = topi
decoder_input = topi.detach().view(-1, 1)
for sentence in decoded_words:
sent = []
for ind in sentence:
if self.output_index2word(str(int(ind.item()))) == self.output_index2word(str(EOS_token)):
break
sent.append(self.output_index2word(str(int(ind.item()))))
decoded_sentences.append(' '.join(sent))
return decoded_sentences
def clipGradients(self):
grad = torch.nn.utils.clip_grad_norm_(self.parameters(), self.args.clip)
return grad
def saveModel(self, iter):
print('Saving parameters..')
if not os.path.exists(os.path.join(os.path.dirname(__file__), self.model_dir)):
os.makedirs(os.path.join(os.path.dirname(__file__), self.model_dir))
# print(self.model_dir)
torch.save(self.encoder.state_dict(), os.path.join(os.path.dirname(__file__), self.model_dir + self.model_name + '-' + str(iter) + '.enc'))
torch.save(self.policy.state_dict(), os.path.join(os.path.dirname(__file__), self.model_dir + self.model_name + '-' + str(iter) + '.pol'))
torch.save(self.decoder.state_dict(), os.path.join(os.path.dirname(__file__), self.model_dir + self.model_name + '-' + str(iter) + '.dec'))
with open(os.path.join(os.path.dirname(__file__), self.model_dir + self.model_name + '.config'), 'w') as f:
f.write(json.dumps(vars(self.args), ensure_ascii=False, indent=4))
def loadModel(self, iter=0):
print('Loading parameters of iter %s ' % iter)
self.encoder.load_state_dict(torch.load(os.path.join(os.path.dirname(__file__), self.model_dir + self.model_name + '-' + str(iter) + '.enc')))
self.policy.load_state_dict(torch.load(os.path.join(os.path.dirname(__file__), self.model_dir + self.model_name + '-' + str(iter) + '.pol')))
self.decoder.load_state_dict(torch.load(os.path.join(os.path.dirname(__file__), self.model_dir + self.model_name + '-' + str(iter) + '.dec')))
def input_index2word(self, index):
if index in self.input_lang_index2word.has_key:
return self.input_lang_index2word[index]
else:
raise UserWarning('We are using UNK')
def output_index2word(self, index):
if index in self.output_lang_index2word:
return self.output_lang_index2word[index]
else:
raise UserWarning('We are using UNK')
def input_word2index(self, index):
if index in self.input_lang_word2index:
return self.input_lang_word2index[index]
else:
return 2
def output_word2index(self, index):
if index in self.output_lang_word2index:
return self.output_lang_word2index[index]
else:
return 2
def getCount(self):
learnable_parameters = filter(lambda p: p.requires_grad, self.parameters())
param_cnt = sum([functools.reduce((lambda x, y: x * y), param.shape) for param in learnable_parameters])
print('Model has', param_cnt, ' parameters.')
def printGrad(self):
learnable_parameters = filter(lambda p: p.requires_grad, self.parameters())
for idx, param in enumerate(learnable_parameters):
print(param.grad, param.shape)
| 41.262069
| 176
| 0.617082
|
import json
import math
import operator
import os
import random
from io import open
from queue import PriorityQueue
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import optim
import functools
import tatk.policy.mdrg.multiwoz.default_policy as policy
SOS_token = 0
EOS_token = 1
UNK_token = 2
PAD_token = 3
class BeamSearchNode(object):
def __init__(self, h, prevNode, wordid, logp, leng):
self.h = h
self.prevNode = prevNode
self.wordid = wordid
self.logp = logp
self.leng = leng
def eval(self, repeatPenalty, tokenReward, scoreTable, alpha=1.0):
reward = 0
alpha = 1.0
return self.logp / float(self.leng - 1 + 1e-6) + alpha * reward
def init_lstm(cell, gain=1):
init_gru(cell, gain)
for _, _, ih_b, hh_b in cell.all_weights:
l = len(ih_b)
ih_b[l // 4:l // 2].data.fill_(1.0)
hh_b[l // 4:l // 2].data.fill_(1.0)
def init_gru(gru, gain=1):
gru.reset_parameters()
for _, hh, _, _ in gru.all_weights:
for i in range(0, hh.size(0), gru.hidden_size):
torch.nn.init.orthogonal_(hh[i:i+gru.hidden_size],gain=gain)
def whatCellType(input_size, hidden_size, cell_type, dropout_rate):
if cell_type == 'rnn':
cell = nn.RNN(input_size, hidden_size, dropout=dropout_rate, batch_first=False)
init_gru(cell)
return cell
elif cell_type == 'gru':
cell = nn.GRU(input_size, hidden_size, dropout=dropout_rate, batch_first=False)
init_gru(cell)
return cell
elif cell_type == 'lstm':
cell = nn.LSTM(input_size, hidden_size, dropout=dropout_rate, batch_first=False)
init_lstm(cell)
return cell
elif cell_type == 'bigru':
cell = nn.GRU(input_size, hidden_size, bidirectional=True, dropout=dropout_rate, batch_first=False)
init_gru(cell)
return cell
elif cell_type == 'bilstm':
cell = nn.LSTM(input_size, hidden_size, bidirectional=True, dropout=dropout_rate, batch_first=False)
init_lstm(cell)
return cell
class EncoderRNN(nn.Module):
def __init__(self, input_size, embedding_size, hidden_size, cell_type, depth, dropout):
super(EncoderRNN, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.embed_size = embedding_size
self.n_layers = depth
self.dropout = dropout
self.bidirectional = False
if 'bi' in cell_type:
self.bidirectional = True
padding_idx = 3
self.embedding = nn.Embedding(input_size, embedding_size, padding_idx=padding_idx)
self.rnn = whatCellType(embedding_size, hidden_size,
cell_type, dropout_rate=self.dropout)
def forward(self, input_seqs, input_lens, hidden=None):
input_lens = np.asarray(input_lens)
input_seqs = input_seqs.transpose(0,1)
embedded = self.embedding(input_seqs)
embedded = embedded.transpose(0, 1)
sort_idx = np.argsort(-input_lens)
unsort_idx = torch.LongTensor(np.argsort(sort_idx))
input_lens = input_lens[sort_idx]
sort_idx = torch.LongTensor(sort_idx)
embedded = embedded[sort_idx].transpose(0, 1)
packed = torch.nn.utils.rnn.pack_padded_sequence(embedded, input_lens)
outputs, hidden = self.rnn(packed, hidden)
outputs, _ = torch.nn.utils.rnn.pad_packed_sequence(outputs)
if self.bidirectional:
outputs = outputs[:, :, :self.hidden_size] + outputs[:, :, self.hidden_size:]
outputs = outputs.transpose(0, 1)[unsort_idx].transpose(0, 1).contiguous()
if isinstance(hidden, tuple):
hidden = list(hidden)
hidden[0] = hidden[0].transpose(0, 1)[unsort_idx].transpose(0, 1).contiguous()
hidden[1] = hidden[1].transpose(0, 1)[unsort_idx].transpose(0, 1).contiguous()
hidden = tuple(hidden)
else:
hidden = hidden.transpose(0, 1)[unsort_idx].transpose(0, 1).contiguous()
return outputs, hidden
class Attn(nn.Module):
def __init__(self, method, hidden_size):
super(Attn, self).__init__()
self.method = method
self.hidden_size = hidden_size
self.attn = nn.Linear(self.hidden_size * 2, hidden_size)
self.v = nn.Parameter(torch.rand(hidden_size))
stdv = 1. / math.sqrt(self.v.size(0))
self.v.data.normal_(mean=0, std=stdv)
def forward(self, hidden, encoder_outputs):
max_len = encoder_outputs.size(0)
H = hidden.repeat(max_len,1,1).transpose(0,1)
encoder_outputs = encoder_outputs.transpose(0,1)
attn_energies = self.score(H,encoder_outputs)
return F.softmax(attn_energies, dim=1).unsqueeze(1)
def score(self, hidden, encoder_outputs):
cat = torch.cat([hidden, encoder_outputs], 2)
energy = torch.tanh(self.attn(cat))
energy = energy.transpose(2,1)
v = self.v.repeat(encoder_outputs.data.shape[0],1).unsqueeze(1)
energy = torch.bmm(v,energy)
return energy.squeeze(1)
class SeqAttnDecoderRNN(nn.Module):
def __init__(self, embedding_size, hidden_size, output_size, cell_type, dropout_p=0.1, max_length=30):
super(SeqAttnDecoderRNN, self).__init__()
self.hidden_size = hidden_size
self.embed_size = embedding_size
self.output_size = output_size
self.n_layers = 1
self.dropout_p = dropout_p
self.embedding = nn.Embedding(output_size, embedding_size)
self.dropout = nn.Dropout(dropout_p)
if 'bi' in cell_type:
cell_type = cell_type.strip('bi')
self.rnn = whatCellType(embedding_size + hidden_size, hidden_size, cell_type, dropout_rate=self.dropout_p)
self.out = nn.Linear(hidden_size, output_size)
self.score = nn.Linear(self.hidden_size + self.hidden_size, self.hidden_size)
self.attn_combine = nn.Linear(embedding_size + hidden_size, embedding_size)
self.method = 'concat'
self.attn = nn.Linear(self.hidden_size * 2, hidden_size)
self.v = nn.Parameter(torch.rand(hidden_size))
stdv = 1. / math.sqrt(self.v.size(0))
self.v.data.normal_(mean=0, std=stdv)
def forward(self, input, hidden, encoder_outputs):
if isinstance(hidden, tuple):
h_t = hidden[0]
else:
h_t = hidden
encoder_outputs = encoder_outputs.transpose(0, 1)
embedded = self.embedding(input)
max_len = encoder_outputs.size(1)
h_t = h_t.transpose(0, 1)
h_t = h_t.repeat(1, max_len, 1)
energy = self.attn(torch.cat((h_t, encoder_outputs), 2))
energy = torch.tanh(energy)
energy = energy.transpose(2, 1)
v = self.v.repeat(encoder_outputs.size(0), 1).unsqueeze(1)
energy = torch.bmm(v, energy)
attn_weights = F.softmax(energy, dim=2)
context = torch.bmm(attn_weights, encoder_outputs)
rnn_input = torch.cat((embedded, context), 2)
rnn_input = rnn_input.transpose(0, 1)
output, hidden = self.rnn(rnn_input, hidden)
output = output.squeeze(0)
output = F.log_softmax(self.out(output), dim=1)
return output, hidden
class DecoderRNN(nn.Module):
def __init__(self, embedding_size, hidden_size, output_size, cell_type, dropout=0.1):
super(DecoderRNN, self).__init__()
self.hidden_size = hidden_size
self.cell_type = cell_type
padding_idx = 3
self.embedding = nn.Embedding(num_embeddings=output_size,
embedding_dim=embedding_size,
padding_idx=padding_idx
)
if 'bi' in cell_type:
cell_type = cell_type.strip('bi')
self.rnn = whatCellType(embedding_size, hidden_size, cell_type, dropout_rate=dropout)
self.dropout_rate = dropout
self.out = nn.Linear(hidden_size, output_size)
def forward(self, input, hidden, not_used):
embedded = self.embedding(input).transpose(0, 1)
embedded = F.dropout(embedded, self.dropout_rate)
output = embedded
output, hidden = self.rnn(output, hidden)
out = self.out(output.squeeze(0))
output = F.log_softmax(out, dim=1)
return output, hidden
class Model(nn.Module):
def __init__(self, args, input_lang_index2word, output_lang_index2word, input_lang_word2index, output_lang_word2index):
super(Model, self).__init__()
self.args = args
self.max_len = args.max_len
self.output_lang_index2word = output_lang_index2word
self.input_lang_index2word = input_lang_index2word
self.output_lang_word2index = output_lang_word2index
self.input_lang_word2index = input_lang_word2index
self.hid_size_enc = args.hid_size_enc
self.hid_size_dec = args.hid_size_dec
self.hid_size_pol = args.hid_size_pol
self.emb_size = args.emb_size
self.db_size = args.db_size
self.bs_size = args.bs_size
self.cell_type = args.cell_type
if 'bi' in self.cell_type:
self.num_directions = 2
else:
self.num_directions = 1
self.depth = args.depth
self.use_attn = args.use_attn
self.attn_type = args.attention_type
self.dropout = args.dropout
self.device = torch.device("cuda" if args.cuda else "cpu")
self.model_dir = args.model_dir
self.model_name = args.model_name
self.teacher_forcing_ratio = args.teacher_ratio
self.vocab_size = args.vocab_size
self.epsln = 10E-5
torch.manual_seed(args.seed)
self.build_model()
self.getCount()
try:
assert self.args.beam_width > 0
self.beam_search = True
except:
self.beam_search = False
self.global_step = 0
def cuda_(self, var):
return var.cuda() if self.args.cuda else var
def build_model(self):
self.encoder = EncoderRNN(len(self.input_lang_index2word), self.emb_size, self.hid_size_enc,
self.cell_type, self.depth, self.dropout).to(self.device)
self.policy = policy.DefaultPolicy(self.hid_size_pol, self.hid_size_enc, self.db_size, self.bs_size).to(self.device)
if self.use_attn:
if self.attn_type == 'bahdanau':
self.decoder = SeqAttnDecoderRNN(self.emb_size, self.hid_size_dec, len(self.output_lang_index2word), self.cell_type, self.dropout, self.max_len).to(self.device)
else:
self.decoder = DecoderRNN(self.emb_size, self.hid_size_dec, len(self.output_lang_index2word), self.cell_type, self.dropout).to(self.device)
if self.args.mode == 'train':
self.gen_criterion = nn.NLLLoss(ignore_index=3, size_average=True)
self.setOptimizers()
def train(self, input_tensor, input_lengths, target_tensor, target_lengths, db_tensor, bs_tensor, dial_name=None):
proba, _, decoded_sent = self.forward(input_tensor, input_lengths, target_tensor, target_lengths, db_tensor, bs_tensor)
proba = proba.view(-1, self.vocab_size)
self.gen_loss = self.gen_criterion(proba, target_tensor.view(-1))
self.loss = self.gen_loss
self.loss.backward()
grad = self.clipGradients()
self.optimizer.step()
self.optimizer.zero_grad()
return self.loss.item(), 0, grad
def setOptimizers(self):
self.optimizer_policy = None
if self.args.optim == 'sgd':
self.optimizer = optim.SGD(lr=self.args.lr_rate, params=filter(lambda x: x.requires_grad, self.parameters()), weight_decay=self.args.l2_norm)
elif self.args.optim == 'adadelta':
self.optimizer = optim.Adadelta(lr=self.args.lr_rate, params=filter(lambda x: x.requires_grad, self.parameters()), weight_decay=self.args.l2_norm)
elif self.args.optim == 'adam':
self.optimizer = optim.Adam(lr=self.args.lr_rate, params=filter(lambda x: x.requires_grad, self.parameters()), weight_decay=self.args.l2_norm)
def forward(self, input_tensor, input_lengths, target_tensor, target_lengths, db_tensor, bs_tensor):
target_length = target_tensor.size(1)
batch_size, seq_len = input_tensor.size()
encoder_outputs, encoder_hidden = self.encoder(input_tensor, input_lengths)
decoder_hidden = self.policy(encoder_hidden, db_tensor, bs_tensor)
_, target_len = target_tensor.size()
decoder_input = torch.LongTensor([[SOS_token] for _ in range(batch_size)], device=self.device)
proba = torch.zeros(batch_size, target_length, self.vocab_size)
for t in range(target_len):
decoder_output, decoder_hidden = self.decoder(decoder_input, decoder_hidden, encoder_outputs)
use_teacher_forcing = True if random.random() < self.args.teacher_ratio else False
if use_teacher_forcing:
decoder_input = target_tensor[:, t].view(-1, 1)
else:
topv, topi = decoder_output.topk(1)
decoder_input = topi.squeeze().detach()
proba[:, t, :] = decoder_output
decoded_sent = None
return proba, None, decoded_sent
def predict(self, input_tensor, input_lengths, target_tensor, target_lengths, db_tensor, bs_tensor):
with torch.no_grad():
encoder_outputs, encoder_hidden = self.encoder(input_tensor, input_lengths)
decoder_hidden = self.policy(encoder_hidden, db_tensor, bs_tensor)
decoded_words = self.decode(target_tensor, decoder_hidden, encoder_outputs)
return decoded_words, 0
def decode(self, target_tensor, decoder_hidden, encoder_outputs):
decoder_hiddens = decoder_hidden
if self.beam_search:
decoded_sentences = []
for idx in range(target_tensor.size(0)):
if isinstance(decoder_hiddens, tuple):
decoder_hidden = (decoder_hiddens[0][:,idx, :].unsqueeze(0),decoder_hiddens[1][:,idx, :].unsqueeze(0))
else:
decoder_hidden = decoder_hiddens[:, idx, :].unsqueeze(0)
encoder_output = encoder_outputs[:,idx, :].unsqueeze(1)
self.topk = 1
endnodes = []
number_required = min((self.topk + 1), self.topk - len(endnodes))
decoder_input = torch.LongTensor([[SOS_token]], device=self.device)
node = BeamSearchNode(decoder_hidden, None, decoder_input, 0, 1)
nodes = PriorityQueue()
nodes.put((-node.eval(None, None, None, None),
node))
qsize = 1
while True:
if qsize > 2000: break
score, n = nodes.get()
decoder_input = n.wordid
decoder_hidden = n.h
if n.wordid.item() == EOS_token and n.prevNode != None:
endnodes.append((score, n))
if len(endnodes) >= number_required:
break
else:
continue
decoder_output, decoder_hidden = self.decoder(decoder_input, decoder_hidden, encoder_output)
log_prob, indexes = torch.topk(decoder_output, self.args.beam_width)
nextnodes = []
for new_k in range(self.args.beam_width):
decoded_t = indexes[0][new_k].view(1, -1)
log_p = log_prob[0][new_k].item()
node = BeamSearchNode(decoder_hidden, n, decoded_t, n.logp + log_p, n.leng + 1)
score = -node.eval(None, None, None, None)
nextnodes.append((score, node))
for i in range(len(nextnodes)):
score, nn = nextnodes[i]
nodes.put((score, nn))
qsize += len(nextnodes)
if len(endnodes) == 0:
endnodes = [nodes.get() for n in range(self.topk)]
utterances = []
for score, n in sorted(endnodes, key=operator.itemgetter(0)):
utterance = []
utterance.append(n.wordid)
while n.prevNode != None:
n = n.prevNode
utterance.append(n.wordid)
utterance = utterance[::-1]
utterances.append(utterance)
decoded_words = utterances[0]
decoded_sentence = [self.output_index2word(str(ind.item())) for ind in decoded_words]
decoded_sentences.append(' '.join(decoded_sentence[1:-1]))
return decoded_sentences
else:
decoded_sentences = self.greedy_decode(decoder_hidden, encoder_outputs, target_tensor)
return decoded_sentences
def greedy_decode(self, decoder_hidden, encoder_outputs, target_tensor):
decoded_sentences = []
batch_size, seq_len = target_tensor.size()
decoder_input = torch.LongTensor([[SOS_token] for _ in range(batch_size)], device=self.device)
decoded_words = torch.zeros((batch_size, self.max_len))
for t in range(self.max_len):
decoder_output, decoder_hidden = self.decoder(decoder_input, decoder_hidden, encoder_outputs)
topv, topi = decoder_output.data.topk(1)
topi = topi.view(-1)
decoded_words[:, t] = topi
decoder_input = topi.detach().view(-1, 1)
for sentence in decoded_words:
sent = []
for ind in sentence:
if self.output_index2word(str(int(ind.item()))) == self.output_index2word(str(EOS_token)):
break
sent.append(self.output_index2word(str(int(ind.item()))))
decoded_sentences.append(' '.join(sent))
return decoded_sentences
def clipGradients(self):
grad = torch.nn.utils.clip_grad_norm_(self.parameters(), self.args.clip)
return grad
def saveModel(self, iter):
print('Saving parameters..')
if not os.path.exists(os.path.join(os.path.dirname(__file__), self.model_dir)):
os.makedirs(os.path.join(os.path.dirname(__file__), self.model_dir))
torch.save(self.encoder.state_dict(), os.path.join(os.path.dirname(__file__), self.model_dir + self.model_name + '-' + str(iter) + '.enc'))
torch.save(self.policy.state_dict(), os.path.join(os.path.dirname(__file__), self.model_dir + self.model_name + '-' + str(iter) + '.pol'))
torch.save(self.decoder.state_dict(), os.path.join(os.path.dirname(__file__), self.model_dir + self.model_name + '-' + str(iter) + '.dec'))
with open(os.path.join(os.path.dirname(__file__), self.model_dir + self.model_name + '.config'), 'w') as f:
f.write(json.dumps(vars(self.args), ensure_ascii=False, indent=4))
def loadModel(self, iter=0):
print('Loading parameters of iter %s ' % iter)
self.encoder.load_state_dict(torch.load(os.path.join(os.path.dirname(__file__), self.model_dir + self.model_name + '-' + str(iter) + '.enc')))
self.policy.load_state_dict(torch.load(os.path.join(os.path.dirname(__file__), self.model_dir + self.model_name + '-' + str(iter) + '.pol')))
self.decoder.load_state_dict(torch.load(os.path.join(os.path.dirname(__file__), self.model_dir + self.model_name + '-' + str(iter) + '.dec')))
def input_index2word(self, index):
if index in self.input_lang_index2word.has_key:
return self.input_lang_index2word[index]
else:
raise UserWarning('We are using UNK')
def output_index2word(self, index):
if index in self.output_lang_index2word:
return self.output_lang_index2word[index]
else:
raise UserWarning('We are using UNK')
def input_word2index(self, index):
if index in self.input_lang_word2index:
return self.input_lang_word2index[index]
else:
return 2
def output_word2index(self, index):
if index in self.output_lang_word2index:
return self.output_lang_word2index[index]
else:
return 2
def getCount(self):
learnable_parameters = filter(lambda p: p.requires_grad, self.parameters())
param_cnt = sum([functools.reduce((lambda x, y: x * y), param.shape) for param in learnable_parameters])
print('Model has', param_cnt, ' parameters.')
def printGrad(self):
learnable_parameters = filter(lambda p: p.requires_grad, self.parameters())
for idx, param in enumerate(learnable_parameters):
print(param.grad, param.shape)
| true
| true
|
f70cacac5ac1a4ae9c31f4001dc3bf77f3eea737
| 56
|
py
|
Python
|
pybitrix24/__init__.py
|
EugeneFadeev/pybitrix24
|
109173f72d1fef45342ded7e8d3b7bf70a555618
|
[
"MIT"
] | null | null | null |
pybitrix24/__init__.py
|
EugeneFadeev/pybitrix24
|
109173f72d1fef45342ded7e8d3b7bf70a555618
|
[
"MIT"
] | null | null | null |
pybitrix24/__init__.py
|
EugeneFadeev/pybitrix24
|
109173f72d1fef45342ded7e8d3b7bf70a555618
|
[
"MIT"
] | null | null | null |
from .pybitrix24 import Bitrix24
__version__ = '0.5.0'
| 14
| 32
| 0.75
|
from .pybitrix24 import Bitrix24
__version__ = '0.5.0'
| true
| true
|
f70cacd241a30e15e79ca9ff8089b49e1a95c652
| 1,814
|
py
|
Python
|
examples/collapse_example.py
|
muyr/dayu_widgets3
|
a319cc719d84c031829893c45b8f20e87cbbabc8
|
[
"MIT"
] | 5
|
2020-10-16T03:46:47.000Z
|
2022-03-21T07:10:37.000Z
|
examples/collapse_example.py
|
muyr/dayu_widgets3
|
a319cc719d84c031829893c45b8f20e87cbbabc8
|
[
"MIT"
] | null | null | null |
examples/collapse_example.py
|
muyr/dayu_widgets3
|
a319cc719d84c031829893c45b8f20e87cbbabc8
|
[
"MIT"
] | 1
|
2022-02-16T14:18:43.000Z
|
2022-02-16T14:18:43.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###################################################################
# Author: Mu yanru
# Date : 2019.3
# Email : muyanru345@163.com
###################################################################
from dayu_widgets3.collapse import MCollapse
from dayu_widgets3.label import MLabel
from dayu_widgets3.qt import *
class CollapseExample(QWidget):
def __init__(self, parent=None):
super(CollapseExample, self).__init__(parent)
self._init_ui()
def _init_ui(self):
label_1 = MLabel(u'史蒂夫·乔布斯(Steve Jobs),1955年2月24日生于美国加利福尼亚州旧金山,美国发明家、企业家、美国苹果公司联合创办人。')
label_2 = MLabel(
u'斯蒂夫·盖瑞·沃兹尼亚克(Stephen Gary Wozniak),美国电脑工程师,曾与史蒂夫·乔布斯合伙创立苹果电脑(今之苹果公司)。斯蒂夫·盖瑞·沃兹尼亚克曾就读于美国科罗拉多大学,后转学入美国著名高等学府加州大学伯克利分校(UC Berkeley)并获得电机工程及计算机(EECS)本科学位(1987年)。')
label_3 = MLabel(
u'乔纳森·伊夫是一位工业设计师,现任Apple公司设计师兼资深副总裁,英国爵士。他曾参与设计了iPod,iMac,iPhone,iPad等众多苹果产品。除了乔布斯,他是对苹果那些著名的产品最有影响力的人。')
label_1.setWordWrap(True)
label_2.setWordWrap(True)
label_3.setWordWrap(True)
section_list = [
{
'title': u'史蒂夫乔布斯',
'expand': True,
'widget': label_1
}, {
'title': u'斯蒂夫·盖瑞·沃兹尼亚克',
'expand': True,
'widget': label_2
}
]
section_group = MCollapse()
section_group.add_section_list(section_list)
main_lay = QVBoxLayout()
main_lay.addWidget(section_group)
main_lay.addStretch()
self.setLayout(main_lay)
if __name__ == '__main__':
import sys
app = QApplication(sys.argv)
test = CollapseExample()
from dayu_widgets3 import dayu_theme
dayu_theme.apply(test)
test.show()
sys.exit(app.exec_())
| 31.824561
| 173
| 0.581036
| true
| true
|
|
f70cacd5d9f463ae535ec327edb728d8d9c8030c
| 12,624
|
py
|
Python
|
python3-alpha/python3-src/Lib/ctypes/test/test_functions.py
|
stormtheh4ck3r/python-for-android
|
b9ea9161392f60566b81482b1e25cd77004d5c45
|
[
"Apache-2.0"
] | 4
|
2016-05-04T07:05:22.000Z
|
2020-09-24T00:21:05.000Z
|
python3-alpha/python3-src/Lib/ctypes/test/test_functions.py
|
stormtheh4ck3r/python-for-android
|
b9ea9161392f60566b81482b1e25cd77004d5c45
|
[
"Apache-2.0"
] | null | null | null |
python3-alpha/python3-src/Lib/ctypes/test/test_functions.py
|
stormtheh4ck3r/python-for-android
|
b9ea9161392f60566b81482b1e25cd77004d5c45
|
[
"Apache-2.0"
] | 1
|
2018-12-12T03:06:17.000Z
|
2018-12-12T03:06:17.000Z
|
"""
Here is probably the place to write the docs, since the test-cases
show how the type behave.
Later...
"""
from ctypes import *
import sys, unittest
try:
WINFUNCTYPE
except NameError:
# fake to enable this test on Linux
WINFUNCTYPE = CFUNCTYPE
import _ctypes_test
dll = CDLL(_ctypes_test.__file__)
if sys.platform == "win32":
windll = WinDLL(_ctypes_test.__file__)
class POINT(Structure):
_fields_ = [("x", c_int), ("y", c_int)]
class RECT(Structure):
_fields_ = [("left", c_int), ("top", c_int),
("right", c_int), ("bottom", c_int)]
class FunctionTestCase(unittest.TestCase):
def test_mro(self):
# in Python 2.3, this raises TypeError: MRO conflict among bases classes,
# in Python 2.2 it works.
#
# But in early versions of _ctypes.c, the result of tp_new
# wasn't checked, and it even crashed Python.
# Found by Greg Chapman.
try:
class X(object, Array):
_length_ = 5
_type_ = "i"
except TypeError:
pass
from _ctypes import _Pointer
try:
class X(object, _Pointer):
pass
except TypeError:
pass
from _ctypes import _SimpleCData
try:
class X(object, _SimpleCData):
_type_ = "i"
except TypeError:
pass
try:
class X(object, Structure):
_fields_ = []
except TypeError:
pass
def test_wchar_parm(self):
try:
c_wchar
except NameError:
return
f = dll._testfunc_i_bhilfd
f.argtypes = [c_byte, c_wchar, c_int, c_long, c_float, c_double]
result = f(1, "x", 3, 4, 5.0, 6.0)
self.assertEqual(result, 139)
self.assertEqual(type(result), int)
def test_wchar_result(self):
try:
c_wchar
except NameError:
return
f = dll._testfunc_i_bhilfd
f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double]
f.restype = c_wchar
result = f(0, 0, 0, 0, 0, 0)
self.assertEqual(result, '\x00')
def test_voidresult(self):
f = dll._testfunc_v
f.restype = None
f.argtypes = [c_int, c_int, POINTER(c_int)]
result = c_int()
self.assertEqual(None, f(1, 2, byref(result)))
self.assertEqual(result.value, 3)
def test_intresult(self):
f = dll._testfunc_i_bhilfd
f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double]
f.restype = c_int
result = f(1, 2, 3, 4, 5.0, 6.0)
self.assertEqual(result, 21)
self.assertEqual(type(result), int)
result = f(-1, -2, -3, -4, -5.0, -6.0)
self.assertEqual(result, -21)
self.assertEqual(type(result), int)
# If we declare the function to return a short,
# is the high part split off?
f.restype = c_short
result = f(1, 2, 3, 4, 5.0, 6.0)
self.assertEqual(result, 21)
self.assertEqual(type(result), int)
result = f(1, 2, 3, 0x10004, 5.0, 6.0)
self.assertEqual(result, 21)
self.assertEqual(type(result), int)
# You cannot assign character format codes as restype any longer
self.assertRaises(TypeError, setattr, f, "restype", "i")
def test_floatresult(self):
f = dll._testfunc_f_bhilfd
f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double]
f.restype = c_float
result = f(1, 2, 3, 4, 5.0, 6.0)
self.assertEqual(result, 21)
self.assertEqual(type(result), float)
result = f(-1, -2, -3, -4, -5.0, -6.0)
self.assertEqual(result, -21)
self.assertEqual(type(result), float)
def test_doubleresult(self):
f = dll._testfunc_d_bhilfd
f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double]
f.restype = c_double
result = f(1, 2, 3, 4, 5.0, 6.0)
self.assertEqual(result, 21)
self.assertEqual(type(result), float)
result = f(-1, -2, -3, -4, -5.0, -6.0)
self.assertEqual(result, -21)
self.assertEqual(type(result), float)
def test_longdoubleresult(self):
f = dll._testfunc_D_bhilfD
f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_longdouble]
f.restype = c_longdouble
result = f(1, 2, 3, 4, 5.0, 6.0)
self.assertEqual(result, 21)
self.assertEqual(type(result), float)
result = f(-1, -2, -3, -4, -5.0, -6.0)
self.assertEqual(result, -21)
self.assertEqual(type(result), float)
def test_longlongresult(self):
try:
c_longlong
except NameError:
return
f = dll._testfunc_q_bhilfd
f.restype = c_longlong
f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double]
result = f(1, 2, 3, 4, 5.0, 6.0)
self.assertEqual(result, 21)
f = dll._testfunc_q_bhilfdq
f.restype = c_longlong
f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double, c_longlong]
result = f(1, 2, 3, 4, 5.0, 6.0, 21)
self.assertEqual(result, 42)
def test_stringresult(self):
f = dll._testfunc_p_p
f.argtypes = None
f.restype = c_char_p
result = f(b"123")
self.assertEqual(result, b"123")
result = f(None)
self.assertEqual(result, None)
def test_pointers(self):
f = dll._testfunc_p_p
f.restype = POINTER(c_int)
f.argtypes = [POINTER(c_int)]
# This only works if the value c_int(42) passed to the
# function is still alive while the pointer (the result) is
# used.
v = c_int(42)
self.assertEqual(pointer(v).contents.value, 42)
result = f(pointer(v))
self.assertEqual(type(result), POINTER(c_int))
self.assertEqual(result.contents.value, 42)
# This on works...
result = f(pointer(v))
self.assertEqual(result.contents.value, v.value)
p = pointer(c_int(99))
result = f(p)
self.assertEqual(result.contents.value, 99)
arg = byref(v)
result = f(arg)
self.assertNotEqual(result.contents, v.value)
self.assertRaises(ArgumentError, f, byref(c_short(22)))
# It is dangerous, however, because you don't control the lifetime
# of the pointer:
result = f(byref(c_int(99)))
self.assertNotEqual(result.contents, 99)
def test_errors(self):
f = dll._testfunc_p_p
f.restype = c_int
class X(Structure):
_fields_ = [("y", c_int)]
self.assertRaises(TypeError, f, X()) #cannot convert parameter
################################################################
def test_shorts(self):
f = dll._testfunc_callback_i_if
args = []
expected = [262144, 131072, 65536, 32768, 16384, 8192, 4096, 2048,
1024, 512, 256, 128, 64, 32, 16, 8, 4, 2, 1]
def callback(v):
args.append(v)
return v
CallBack = CFUNCTYPE(c_int, c_int)
cb = CallBack(callback)
f(2**18, cb)
self.assertEqual(args, expected)
################################################################
def test_callbacks(self):
f = dll._testfunc_callback_i_if
f.restype = c_int
MyCallback = CFUNCTYPE(c_int, c_int)
def callback(value):
#print "called back with", value
return value
cb = MyCallback(callback)
result = f(-10, cb)
self.assertEqual(result, -18)
# test with prototype
f.argtypes = [c_int, MyCallback]
cb = MyCallback(callback)
result = f(-10, cb)
self.assertEqual(result, -18)
AnotherCallback = WINFUNCTYPE(c_int, c_int, c_int, c_int, c_int)
# check that the prototype works: we call f with wrong
# argument types
cb = AnotherCallback(callback)
self.assertRaises(ArgumentError, f, -10, cb)
def test_callbacks_2(self):
# Can also use simple datatypes as argument type specifiers
# for the callback function.
# In this case the call receives an instance of that type
f = dll._testfunc_callback_i_if
f.restype = c_int
MyCallback = CFUNCTYPE(c_int, c_int)
f.argtypes = [c_int, MyCallback]
def callback(value):
#print "called back with", value
self.assertEqual(type(value), int)
return value
cb = MyCallback(callback)
result = f(-10, cb)
self.assertEqual(result, -18)
def test_longlong_callbacks(self):
f = dll._testfunc_callback_q_qf
f.restype = c_longlong
MyCallback = CFUNCTYPE(c_longlong, c_longlong)
f.argtypes = [c_longlong, MyCallback]
def callback(value):
self.assertTrue(isinstance(value, int))
return value & 0x7FFFFFFF
cb = MyCallback(callback)
self.assertEqual(13577625587, f(1000000000000, cb))
def test_errors(self):
self.assertRaises(AttributeError, getattr, dll, "_xxx_yyy")
self.assertRaises(ValueError, c_int.in_dll, dll, "_xxx_yyy")
def test_byval(self):
# without prototype
ptin = POINT(1, 2)
ptout = POINT()
# EXPORT int _testfunc_byval(point in, point *pout)
result = dll._testfunc_byval(ptin, byref(ptout))
got = result, ptout.x, ptout.y
expected = 3, 1, 2
self.assertEqual(got, expected)
# with prototype
ptin = POINT(101, 102)
ptout = POINT()
dll._testfunc_byval.argtypes = (POINT, POINTER(POINT))
dll._testfunc_byval.restype = c_int
result = dll._testfunc_byval(ptin, byref(ptout))
got = result, ptout.x, ptout.y
expected = 203, 101, 102
self.assertEqual(got, expected)
def test_struct_return_2H(self):
class S2H(Structure):
_fields_ = [("x", c_short),
("y", c_short)]
dll.ret_2h_func.restype = S2H
dll.ret_2h_func.argtypes = [S2H]
inp = S2H(99, 88)
s2h = dll.ret_2h_func(inp)
self.assertEqual((s2h.x, s2h.y), (99*2, 88*3))
if sys.platform == "win32":
def test_struct_return_2H_stdcall(self):
class S2H(Structure):
_fields_ = [("x", c_short),
("y", c_short)]
windll.s_ret_2h_func.restype = S2H
windll.s_ret_2h_func.argtypes = [S2H]
s2h = windll.s_ret_2h_func(S2H(99, 88))
self.assertEqual((s2h.x, s2h.y), (99*2, 88*3))
def test_struct_return_8H(self):
class S8I(Structure):
_fields_ = [("a", c_int),
("b", c_int),
("c", c_int),
("d", c_int),
("e", c_int),
("f", c_int),
("g", c_int),
("h", c_int)]
dll.ret_8i_func.restype = S8I
dll.ret_8i_func.argtypes = [S8I]
inp = S8I(9, 8, 7, 6, 5, 4, 3, 2)
s8i = dll.ret_8i_func(inp)
self.assertEqual((s8i.a, s8i.b, s8i.c, s8i.d, s8i.e, s8i.f, s8i.g, s8i.h),
(9*2, 8*3, 7*4, 6*5, 5*6, 4*7, 3*8, 2*9))
if sys.platform == "win32":
def test_struct_return_8H_stdcall(self):
class S8I(Structure):
_fields_ = [("a", c_int),
("b", c_int),
("c", c_int),
("d", c_int),
("e", c_int),
("f", c_int),
("g", c_int),
("h", c_int)]
windll.s_ret_8i_func.restype = S8I
windll.s_ret_8i_func.argtypes = [S8I]
inp = S8I(9, 8, 7, 6, 5, 4, 3, 2)
s8i = windll.s_ret_8i_func(inp)
self.assertEqual((s8i.a, s8i.b, s8i.c, s8i.d, s8i.e, s8i.f, s8i.g, s8i.h),
(9*2, 8*3, 7*4, 6*5, 5*6, 4*7, 3*8, 2*9))
def test_sf1651235(self):
# see http://www.python.org/sf/1651235
proto = CFUNCTYPE(c_int, RECT, POINT)
def callback(*args):
return 0
callback = proto(callback)
self.assertRaises(ArgumentError, lambda: callback((1, 2, 3, 4), POINT()))
if __name__ == '__main__':
unittest.main()
| 30.941176
| 86
| 0.544598
|
from ctypes import *
import sys, unittest
try:
WINFUNCTYPE
except NameError:
WINFUNCTYPE = CFUNCTYPE
import _ctypes_test
dll = CDLL(_ctypes_test.__file__)
if sys.platform == "win32":
windll = WinDLL(_ctypes_test.__file__)
class POINT(Structure):
_fields_ = [("x", c_int), ("y", c_int)]
class RECT(Structure):
_fields_ = [("left", c_int), ("top", c_int),
("right", c_int), ("bottom", c_int)]
class FunctionTestCase(unittest.TestCase):
def test_mro(self):
# Found by Greg Chapman.
try:
class X(object, Array):
_length_ = 5
_type_ = "i"
except TypeError:
pass
from _ctypes import _Pointer
try:
class X(object, _Pointer):
pass
except TypeError:
pass
from _ctypes import _SimpleCData
try:
class X(object, _SimpleCData):
_type_ = "i"
except TypeError:
pass
try:
class X(object, Structure):
_fields_ = []
except TypeError:
pass
def test_wchar_parm(self):
try:
c_wchar
except NameError:
return
f = dll._testfunc_i_bhilfd
f.argtypes = [c_byte, c_wchar, c_int, c_long, c_float, c_double]
result = f(1, "x", 3, 4, 5.0, 6.0)
self.assertEqual(result, 139)
self.assertEqual(type(result), int)
def test_wchar_result(self):
try:
c_wchar
except NameError:
return
f = dll._testfunc_i_bhilfd
f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double]
f.restype = c_wchar
result = f(0, 0, 0, 0, 0, 0)
self.assertEqual(result, '\x00')
def test_voidresult(self):
f = dll._testfunc_v
f.restype = None
f.argtypes = [c_int, c_int, POINTER(c_int)]
result = c_int()
self.assertEqual(None, f(1, 2, byref(result)))
self.assertEqual(result.value, 3)
def test_intresult(self):
f = dll._testfunc_i_bhilfd
f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double]
f.restype = c_int
result = f(1, 2, 3, 4, 5.0, 6.0)
self.assertEqual(result, 21)
self.assertEqual(type(result), int)
result = f(-1, -2, -3, -4, -5.0, -6.0)
self.assertEqual(result, -21)
self.assertEqual(type(result), int)
# If we declare the function to return a short,
# is the high part split off?
f.restype = c_short
result = f(1, 2, 3, 4, 5.0, 6.0)
self.assertEqual(result, 21)
self.assertEqual(type(result), int)
result = f(1, 2, 3, 0x10004, 5.0, 6.0)
self.assertEqual(result, 21)
self.assertEqual(type(result), int)
# You cannot assign character format codes as restype any longer
self.assertRaises(TypeError, setattr, f, "restype", "i")
def test_floatresult(self):
f = dll._testfunc_f_bhilfd
f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double]
f.restype = c_float
result = f(1, 2, 3, 4, 5.0, 6.0)
self.assertEqual(result, 21)
self.assertEqual(type(result), float)
result = f(-1, -2, -3, -4, -5.0, -6.0)
self.assertEqual(result, -21)
self.assertEqual(type(result), float)
def test_doubleresult(self):
f = dll._testfunc_d_bhilfd
f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double]
f.restype = c_double
result = f(1, 2, 3, 4, 5.0, 6.0)
self.assertEqual(result, 21)
self.assertEqual(type(result), float)
result = f(-1, -2, -3, -4, -5.0, -6.0)
self.assertEqual(result, -21)
self.assertEqual(type(result), float)
def test_longdoubleresult(self):
f = dll._testfunc_D_bhilfD
f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_longdouble]
f.restype = c_longdouble
result = f(1, 2, 3, 4, 5.0, 6.0)
self.assertEqual(result, 21)
self.assertEqual(type(result), float)
result = f(-1, -2, -3, -4, -5.0, -6.0)
self.assertEqual(result, -21)
self.assertEqual(type(result), float)
def test_longlongresult(self):
try:
c_longlong
except NameError:
return
f = dll._testfunc_q_bhilfd
f.restype = c_longlong
f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double]
result = f(1, 2, 3, 4, 5.0, 6.0)
self.assertEqual(result, 21)
f = dll._testfunc_q_bhilfdq
f.restype = c_longlong
f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double, c_longlong]
result = f(1, 2, 3, 4, 5.0, 6.0, 21)
self.assertEqual(result, 42)
def test_stringresult(self):
f = dll._testfunc_p_p
f.argtypes = None
f.restype = c_char_p
result = f(b"123")
self.assertEqual(result, b"123")
result = f(None)
self.assertEqual(result, None)
def test_pointers(self):
f = dll._testfunc_p_p
f.restype = POINTER(c_int)
f.argtypes = [POINTER(c_int)]
# This only works if the value c_int(42) passed to the
# function is still alive while the pointer (the result) is
# used.
v = c_int(42)
self.assertEqual(pointer(v).contents.value, 42)
result = f(pointer(v))
self.assertEqual(type(result), POINTER(c_int))
self.assertEqual(result.contents.value, 42)
# This on works...
result = f(pointer(v))
self.assertEqual(result.contents.value, v.value)
p = pointer(c_int(99))
result = f(p)
self.assertEqual(result.contents.value, 99)
arg = byref(v)
result = f(arg)
self.assertNotEqual(result.contents, v.value)
self.assertRaises(ArgumentError, f, byref(c_short(22)))
# It is dangerous, however, because you don't control the lifetime
result = f(byref(c_int(99)))
self.assertNotEqual(result.contents, 99)
def test_errors(self):
f = dll._testfunc_p_p
f.restype = c_int
class X(Structure):
_fields_ = [("y", c_int)]
self.assertRaises(TypeError, f, X())
(s8i.a, s8i.b, s8i.c, s8i.d, s8i.e, s8i.f, s8i.g, s8i.h),
(9*2, 8*3, 7*4, 6*5, 5*6, 4*7, 3*8, 2*9))
if sys.platform == "win32":
def test_struct_return_8H_stdcall(self):
class S8I(Structure):
_fields_ = [("a", c_int),
("b", c_int),
("c", c_int),
("d", c_int),
("e", c_int),
("f", c_int),
("g", c_int),
("h", c_int)]
windll.s_ret_8i_func.restype = S8I
windll.s_ret_8i_func.argtypes = [S8I]
inp = S8I(9, 8, 7, 6, 5, 4, 3, 2)
s8i = windll.s_ret_8i_func(inp)
self.assertEqual((s8i.a, s8i.b, s8i.c, s8i.d, s8i.e, s8i.f, s8i.g, s8i.h),
(9*2, 8*3, 7*4, 6*5, 5*6, 4*7, 3*8, 2*9))
def test_sf1651235(self):
proto = CFUNCTYPE(c_int, RECT, POINT)
def callback(*args):
return 0
callback = proto(callback)
self.assertRaises(ArgumentError, lambda: callback((1, 2, 3, 4), POINT()))
if __name__ == '__main__':
unittest.main()
| true
| true
|
f70cadaad1235cebd9ec3eb5772d33f7d545992f
| 8,946
|
py
|
Python
|
src_old/tests/scripts/iga/2d/poisson_v3.py
|
toddrme2178/pyccel
|
deec37503ab0c5d0bcca1a035f7909f7ce8ef653
|
[
"MIT"
] | null | null | null |
src_old/tests/scripts/iga/2d/poisson_v3.py
|
toddrme2178/pyccel
|
deec37503ab0c5d0bcca1a035f7909f7ce8ef653
|
[
"MIT"
] | null | null | null |
src_old/tests/scripts/iga/2d/poisson_v3.py
|
toddrme2178/pyccel
|
deec37503ab0c5d0bcca1a035f7909f7ce8ef653
|
[
"MIT"
] | null | null | null |
# coding: utf-8
# - run the command:
# > pyccel-quickstart poisson
# this will compile pyccel extensions and install them in $PWD/poisson/usr
# - export the following variables
# > export INCLUDE_DIR=$PWD/poisson/usr/include/poisson
# > export LIB_DIR=$PWD/poisson/usr/lib
# Usage:
# > pyccel poisson_v2.py --include='$INCLUDE_DIR' --libdir='$LIB_DIR' --libs=poisson --no-modules --execute
# Cleaning:
# > rm -f *.mod *.pyccel *.f90 *.o
from pyccelext.math.quadratures import legendre
from pyccelext.math.external.bsp import spl_make_open_knots
from pyccelext.math.external.bsp import spl_compute_spans
from pyccelext.math.external.bsp import spl_construct_grid_from_knots
from pyccelext.math.external.bsp import spl_construct_quadrature_grid
from pyccelext.math.external.bsp import spl_eval_on_grid_splines_ders
# ...
p1 = 2
p2 = 2
n_elements_1 = 8
n_elements_2 = 8
n_elements_1 = n_elements_1 - p1
n_elements_2 = n_elements_2 - p2
# number of derivatives
d1 = 1
d2 = 1
n1 = p1 + n_elements_1
n2 = p2 + n_elements_2
k1 = p1 + 1
k2 = p2 + 1
verbose = False
#verbose = True
# ...
# ...
[u1,w1] = legendre(p1)
# ...
# ...
[u2,w2] = legendre(p2)
# ...
# ...
m1 = n1 + p1 + 1
m2 = n2 + p2 + 1
knots1 = zeros(m1, double)
knots2 = zeros(m2, double)
# call to spl
knots1 = spl_make_open_knots (n1, p1)
# call to spl
knots2 = spl_make_open_knots (n2, p2)
# ...
# ... TODO fix args of zeros
m1 = n_elements_1+1
m2 = n_elements_2+1
grid_1 = zeros(m1, double)
grid_2 = zeros(m2, double)
# call to spl
grid_1 = spl_construct_grid_from_knots(p1, n1, n_elements_1, knots1)
# call to spl
grid_2 = spl_construct_grid_from_knots(p2, n2, n_elements_2, knots2)
# ...
# ... construct the quadrature points grid
points_1 = zeros((k1, n_elements_1), double)
points_2 = zeros((k2, n_elements_2), double)
weights_1 = zeros((k1, n_elements_1), double)
weights_2 = zeros((k2, n_elements_2), double)
# call to spl
[points_1, weights_1] = spl_construct_quadrature_grid(u1, w1, grid_1)
# call to spl
[points_2, weights_2] = spl_construct_quadrature_grid(u2, w2, grid_2)
# ...
# ...
basis_1 = zeros((p1+1, d1+1, k1, n_elements_1), double)
basis_2 = zeros((p2+1, d2+1, k2, n_elements_2), double)
# call to spl
basis_1 = spl_eval_on_grid_splines_ders(n1, p1, d1, knots1, points_1)
# call to spl
basis_2 = spl_eval_on_grid_splines_ders(n2, p2, d2, knots2, points_2)
# ...
# ...
spans_1 = zeros(n_elements_1, int)
spans_2 = zeros(n_elements_2, int)
spans_1 = spl_compute_spans(p1, n1, knots1)
spans_2 = spl_compute_spans(p2, n2, knots2)
# ...
# ...
start_1 = 0
end_1 = n1-1
pad_1 = p1
start_2 = 0
end_2 = n2-1
pad_2 = p2
# ...
# ...
mass = stencil((start_1, start_2), (end_1, end_2), (pad_1, pad_2))
stiffness = stencil((start_1, start_2), (end_1, end_2), (pad_1, pad_2))
rhs = vector((start_1-pad_1, start_2-pad_2), (end_1+pad_1, end_2+pad_2))
# ...
# ... build matrix
for ie1 in range(0, n_elements_1):
for ie2 in range(0, n_elements_2):
i_span_1 = spans_1[ie1]
i_span_2 = spans_2[ie2]
for il_1 in range(0, p1+1):
for jl_1 in range(0, p1+1):
for il_2 in range(0, p2+1):
for jl_2 in range(0, p2+1):
i1 = i_span_1 - p1 - 1 + il_1
j1 = i_span_1 - p1 - 1 + jl_1
i2 = i_span_2 - p2 - 1 + il_2
j2 = i_span_2 - p2 - 1 + jl_2
v_m = 0.0
v_s = 0.0
for g1 in range(0, k1):
for g2 in range(0, k2):
bi_0 = basis_1[il_1, 0, g1, ie1] * basis_2[il_2, 0, g2, ie2]
bi_x = basis_1[il_1, 1, g1, ie1] * basis_2[il_2, 0, g2, ie2]
bi_y = basis_1[il_1, 0, g1, ie1] * basis_2[il_2, 1, g2, ie2]
bj_0 = basis_1[jl_1, 0, g1, ie1] * basis_2[jl_2, 0, g2, ie2]
bj_x = basis_1[jl_1, 1, g1, ie1] * basis_2[jl_2, 0, g2, ie2]
bj_y = basis_1[jl_1, 0, g1, ie1] * basis_2[jl_2, 1, g2, ie2]
wvol = weights_1[g1, ie1] * weights_2[g2, ie2]
v_m += bi_0 * bj_0 * wvol
v_s += (bi_x * bj_x + bi_y * bj_y) * wvol
mass[j1 - i1, j2 - i2, i1, i2] += v_m
stiffness[j1 - i1, j2 - i2, i1, i2] += v_s
# ...
for i1 in range(0, n1):
for i2 in range(0, n2):
for k1 in range(-p1, p1+1):
for k2 in range(-p2, p2+1):
print (i1, i2, k1, k2, mass[k1,k2,i1,i2])
print ('done')
pass
# ... build rhs
for ie1 in range(0, n_elements_1):
for ie2 in range(0, n_elements_2):
i_span_1 = spans_1[ie1]
i_span_2 = spans_2[ie2]
for il_1 in range(0, p1+1):
for il_2 in range(0, p2+1):
i1 = i_span_1 - p1 - 1 + il_1
i2 = i_span_2 - p2 - 1 + il_2
v = 0.0
for g1 in range(0, k1):
for g2 in range(0, k2):
bi_0 = basis_1[il_1, 0, g1, ie1] * basis_2[il_2, 0, g2, ie2]
bi_x = basis_1[il_1, 1, g1, ie1] * basis_2[il_2, 0, g2, ie2]
bi_y = basis_1[il_1, 0, g1, ie1] * basis_2[il_2, 1, g2, ie2]
x1 = points_1[g1, ie1]
x2 = points_2[g2, ie2]
wvol = weights_1[g1, ie1] * weights_2[g2, ie2]
v += bi_0 * x1 * (1.0 - x1) * x2 * (1.0 - x2) * wvol
rhs[i1, i2] += v
# ...
# ... define matrix-vector product
#$ header procedure mv(double [:,:,:,:], double [:,:], double [:,:])
def mv(mat, x, y):
y = 0.0
for i1 in range(start_1, end_1+1):
for i2 in range(start_2, end_2+1):
for k1 in range(-p1, p1+1):
for k2 in range(-p2, p2+1):
j1 = k1+i1
j2 = k2+i2
y[i1,i2] = y[i1,i2] + mat[k1,k2,i1,i2] * x[j1,j2]
# ...
# ... define dot for 2d arrays
#$ header function vdot(double[:,:], double[:,:]) results(double)
def vdot(xl, xr):
r = 0.0
for i1 in range(start_1, end_1+1):
for i2 in range(start_2, end_2+1):
for k1 in range(-p1, p1+1):
for k2 in range(-p2, p2+1):
r += xl[k1,i1] * xr[k2,i2]
return r
# ...
# ... CGL performs maxit CG iterations on the linear system Ax = b
# starting from x = x0
#$ header procedure cgl(double [:,:,:,:], double [:,:], double [:,:], int, double)
def cgl(mat, b, x0, maxit, tol):
xk = zeros_like(x0)
mx = zeros_like(x0)
p = zeros_like(x0)
q = zeros_like(x0)
r = zeros_like(x0)
xk = x0
mv(mat, x0, mx)
r = b - mx
p = r
rdr = vdot(r,r)
for i_iter in range(1, maxit+1):
mv(mat, p, q)
alpha = rdr / vdot (p, q)
xk = xk + alpha * p
r = r - alpha * q
norm_err = sqrt(vdot(r, r))
print (i_iter, norm_err)
if norm_err < tol:
x0 = xk
break
rdrold = rdr
rdr = vdot(r, r)
beta = rdr / rdrold
p = r + beta * p
x0 = xk
# ...
# ... CRL performs maxit CG iterations on the linear system Ax = b
# where A is a symmetric positive definite matrix, using CG method
# starting from x = x0
#$ header procedure crl(double [:,:,:,:], double [:,:], double [:,:], int, double)
def crl(mat, b, x0, maxit, tol):
xk = zeros_like(x0)
mx = zeros_like(x0)
p = zeros_like(x0)
q = zeros_like(x0)
r = zeros_like(x0)
s = zeros_like(x0)
xk = x0
mv(mat, x0, mx)
r = b - mx
p = r
mv(mat, p, q)
s = q
sdr = vdot(s,r)
for i_iter in range(1, maxit+1):
alpha = sdr / vdot (q, q)
xk = xk + alpha * p
r = r - alpha * q
norm_err = sqrt(vdot(r, r))
print (i_iter, norm_err)
if norm_err < tol:
x0 = xk
break
mv(mat, r, s)
sdrold = sdr
sdr = vdot(s, r)
beta = sdr / sdrold
p = r + beta * p
q = s + beta * q
x0 = xk
# ...
# ...
x0 = vector((start_1-pad_1, start_2-pad_2), (end_1+pad_1, end_2+pad_2))
xn = vector((start_1-pad_1, start_2-pad_2), (end_1+pad_1, end_2+pad_2))
y = vector((start_1-pad_1, start_2-pad_2), (end_1+pad_1, end_2+pad_2))
# ...
# ...
n_maxiter = 100
tol = 1.0e-7
xn = 0.0
cgl(mass, rhs, xn, n_maxiter, tol)
# TODO crl is converging slowly. must be investigated
#xn = 0.0
#crl(stiffness, rhs, xn, n_maxiter, tol)
mv(mass, xn, x0)
print ('> residual error = ', max(abs(x0-rhs)))
# ...
del knots1
del grid_1
del points_1
del weights_1
del basis_1
del spans_1
del knots2
del grid_2
del points_2
del weights_2
del basis_2
del spans_2
del mass
del stiffness
del rhs
| 25.78098
| 110
| 0.54013
|
from pyccelext.math.quadratures import legendre
from pyccelext.math.external.bsp import spl_make_open_knots
from pyccelext.math.external.bsp import spl_compute_spans
from pyccelext.math.external.bsp import spl_construct_grid_from_knots
from pyccelext.math.external.bsp import spl_construct_quadrature_grid
from pyccelext.math.external.bsp import spl_eval_on_grid_splines_ders
p1 = 2
p2 = 2
n_elements_1 = 8
n_elements_2 = 8
n_elements_1 = n_elements_1 - p1
n_elements_2 = n_elements_2 - p2
d1 = 1
d2 = 1
n1 = p1 + n_elements_1
n2 = p2 + n_elements_2
k1 = p1 + 1
k2 = p2 + 1
verbose = False
[u1,w1] = legendre(p1)
[u2,w2] = legendre(p2)
m1 = n1 + p1 + 1
m2 = n2 + p2 + 1
knots1 = zeros(m1, double)
knots2 = zeros(m2, double)
knots1 = spl_make_open_knots (n1, p1)
knots2 = spl_make_open_knots (n2, p2)
m1 = n_elements_1+1
m2 = n_elements_2+1
grid_1 = zeros(m1, double)
grid_2 = zeros(m2, double)
grid_1 = spl_construct_grid_from_knots(p1, n1, n_elements_1, knots1)
grid_2 = spl_construct_grid_from_knots(p2, n2, n_elements_2, knots2)
points_1 = zeros((k1, n_elements_1), double)
points_2 = zeros((k2, n_elements_2), double)
weights_1 = zeros((k1, n_elements_1), double)
weights_2 = zeros((k2, n_elements_2), double)
[points_1, weights_1] = spl_construct_quadrature_grid(u1, w1, grid_1)
[points_2, weights_2] = spl_construct_quadrature_grid(u2, w2, grid_2)
basis_1 = zeros((p1+1, d1+1, k1, n_elements_1), double)
basis_2 = zeros((p2+1, d2+1, k2, n_elements_2), double)
basis_1 = spl_eval_on_grid_splines_ders(n1, p1, d1, knots1, points_1)
basis_2 = spl_eval_on_grid_splines_ders(n2, p2, d2, knots2, points_2)
spans_1 = zeros(n_elements_1, int)
spans_2 = zeros(n_elements_2, int)
spans_1 = spl_compute_spans(p1, n1, knots1)
spans_2 = spl_compute_spans(p2, n2, knots2)
start_1 = 0
end_1 = n1-1
pad_1 = p1
start_2 = 0
end_2 = n2-1
pad_2 = p2
mass = stencil((start_1, start_2), (end_1, end_2), (pad_1, pad_2))
stiffness = stencil((start_1, start_2), (end_1, end_2), (pad_1, pad_2))
rhs = vector((start_1-pad_1, start_2-pad_2), (end_1+pad_1, end_2+pad_2))
for ie1 in range(0, n_elements_1):
for ie2 in range(0, n_elements_2):
i_span_1 = spans_1[ie1]
i_span_2 = spans_2[ie2]
for il_1 in range(0, p1+1):
for jl_1 in range(0, p1+1):
for il_2 in range(0, p2+1):
for jl_2 in range(0, p2+1):
i1 = i_span_1 - p1 - 1 + il_1
j1 = i_span_1 - p1 - 1 + jl_1
i2 = i_span_2 - p2 - 1 + il_2
j2 = i_span_2 - p2 - 1 + jl_2
v_m = 0.0
v_s = 0.0
for g1 in range(0, k1):
for g2 in range(0, k2):
bi_0 = basis_1[il_1, 0, g1, ie1] * basis_2[il_2, 0, g2, ie2]
bi_x = basis_1[il_1, 1, g1, ie1] * basis_2[il_2, 0, g2, ie2]
bi_y = basis_1[il_1, 0, g1, ie1] * basis_2[il_2, 1, g2, ie2]
bj_0 = basis_1[jl_1, 0, g1, ie1] * basis_2[jl_2, 0, g2, ie2]
bj_x = basis_1[jl_1, 1, g1, ie1] * basis_2[jl_2, 0, g2, ie2]
bj_y = basis_1[jl_1, 0, g1, ie1] * basis_2[jl_2, 1, g2, ie2]
wvol = weights_1[g1, ie1] * weights_2[g2, ie2]
v_m += bi_0 * bj_0 * wvol
v_s += (bi_x * bj_x + bi_y * bj_y) * wvol
mass[j1 - i1, j2 - i2, i1, i2] += v_m
stiffness[j1 - i1, j2 - i2, i1, i2] += v_s
for i1 in range(0, n1):
for i2 in range(0, n2):
for k1 in range(-p1, p1+1):
for k2 in range(-p2, p2+1):
print (i1, i2, k1, k2, mass[k1,k2,i1,i2])
print ('done')
pass
for ie1 in range(0, n_elements_1):
for ie2 in range(0, n_elements_2):
i_span_1 = spans_1[ie1]
i_span_2 = spans_2[ie2]
for il_1 in range(0, p1+1):
for il_2 in range(0, p2+1):
i1 = i_span_1 - p1 - 1 + il_1
i2 = i_span_2 - p2 - 1 + il_2
v = 0.0
for g1 in range(0, k1):
for g2 in range(0, k2):
bi_0 = basis_1[il_1, 0, g1, ie1] * basis_2[il_2, 0, g2, ie2]
bi_x = basis_1[il_1, 1, g1, ie1] * basis_2[il_2, 0, g2, ie2]
bi_y = basis_1[il_1, 0, g1, ie1] * basis_2[il_2, 1, g2, ie2]
x1 = points_1[g1, ie1]
x2 = points_2[g2, ie2]
wvol = weights_1[g1, ie1] * weights_2[g2, ie2]
v += bi_0 * x1 * (1.0 - x1) * x2 * (1.0 - x2) * wvol
rhs[i1, i2] += v
def mv(mat, x, y):
y = 0.0
for i1 in range(start_1, end_1+1):
for i2 in range(start_2, end_2+1):
for k1 in range(-p1, p1+1):
for k2 in range(-p2, p2+1):
j1 = k1+i1
j2 = k2+i2
y[i1,i2] = y[i1,i2] + mat[k1,k2,i1,i2] * x[j1,j2]
def vdot(xl, xr):
r = 0.0
for i1 in range(start_1, end_1+1):
for i2 in range(start_2, end_2+1):
for k1 in range(-p1, p1+1):
for k2 in range(-p2, p2+1):
r += xl[k1,i1] * xr[k2,i2]
return r
def cgl(mat, b, x0, maxit, tol):
xk = zeros_like(x0)
mx = zeros_like(x0)
p = zeros_like(x0)
q = zeros_like(x0)
r = zeros_like(x0)
xk = x0
mv(mat, x0, mx)
r = b - mx
p = r
rdr = vdot(r,r)
for i_iter in range(1, maxit+1):
mv(mat, p, q)
alpha = rdr / vdot (p, q)
xk = xk + alpha * p
r = r - alpha * q
norm_err = sqrt(vdot(r, r))
print (i_iter, norm_err)
if norm_err < tol:
x0 = xk
break
rdrold = rdr
rdr = vdot(r, r)
beta = rdr / rdrold
p = r + beta * p
x0 = xk
def crl(mat, b, x0, maxit, tol):
xk = zeros_like(x0)
mx = zeros_like(x0)
p = zeros_like(x0)
q = zeros_like(x0)
r = zeros_like(x0)
s = zeros_like(x0)
xk = x0
mv(mat, x0, mx)
r = b - mx
p = r
mv(mat, p, q)
s = q
sdr = vdot(s,r)
for i_iter in range(1, maxit+1):
alpha = sdr / vdot (q, q)
xk = xk + alpha * p
r = r - alpha * q
norm_err = sqrt(vdot(r, r))
print (i_iter, norm_err)
if norm_err < tol:
x0 = xk
break
mv(mat, r, s)
sdrold = sdr
sdr = vdot(s, r)
beta = sdr / sdrold
p = r + beta * p
q = s + beta * q
x0 = xk
x0 = vector((start_1-pad_1, start_2-pad_2), (end_1+pad_1, end_2+pad_2))
xn = vector((start_1-pad_1, start_2-pad_2), (end_1+pad_1, end_2+pad_2))
y = vector((start_1-pad_1, start_2-pad_2), (end_1+pad_1, end_2+pad_2))
n_maxiter = 100
tol = 1.0e-7
xn = 0.0
cgl(mass, rhs, xn, n_maxiter, tol)
mv(mass, xn, x0)
print ('> residual error = ', max(abs(x0-rhs)))
del knots1
del grid_1
del points_1
del weights_1
del basis_1
del spans_1
del knots2
del grid_2
del points_2
del weights_2
del basis_2
del spans_2
del mass
del stiffness
del rhs
| true
| true
|
f70cadcbe24110b3af7282ceb62b0509c18ea21a
| 1,377
|
py
|
Python
|
fanuc_planning/src/scripts/aruco_transforms/world_aruco_id1_tf_listener.py
|
oaghub/FMS
|
f3a4836fcb993903d239935fe05b878f3fe1f874
|
[
"MIT"
] | null | null | null |
fanuc_planning/src/scripts/aruco_transforms/world_aruco_id1_tf_listener.py
|
oaghub/FMS
|
f3a4836fcb993903d239935fe05b878f3fe1f874
|
[
"MIT"
] | null | null | null |
fanuc_planning/src/scripts/aruco_transforms/world_aruco_id1_tf_listener.py
|
oaghub/FMS
|
f3a4836fcb993903d239935fe05b878f3fe1f874
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import roslib
import rospy
import tf
import geometry_msgs.msg
from geometry_msgs.msg import Pose
if __name__ == '__main__':
rospy.init_node('world_aruco_ID_1_tf_listener')
listener = tf.TransformListener()
# Publishing the transform of the ArUco seen from the magnet aka end-effector
# Is not a target_pose (only the position in terms of location is used)
pub_aruco_world_pose = rospy.Publisher("world_to_aruco_ID_1/target_pose", geometry_msgs.msg.Pose,queue_size=1)
#rate = rospy.Rate(10.0) # buffers them for up to 10 seconds
while not rospy.is_shutdown():
msg = geometry_msgs.msg.Pose()
try:
# Create the transform of interest (base_link == base_link)
(position,orientation) = listener.lookupTransform("world", "camera_aruco_ID_1", rospy.Time(0))
msg.position.x = position[0]
msg.position.y = position[1]
msg.position.z = position[2]
msg.orientation.x = orientation[0]
msg.orientation.y = orientation[1]
msg.orientation.z = orientation[2]
msg.orientation.w = orientation[3]
pub_aruco_world_pose.publish(msg)
except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
continue
rospy.sleep(1)
| 32.785714
| 114
| 0.654321
|
import roslib
import rospy
import tf
import geometry_msgs.msg
from geometry_msgs.msg import Pose
if __name__ == '__main__':
rospy.init_node('world_aruco_ID_1_tf_listener')
listener = tf.TransformListener()
pub_aruco_world_pose = rospy.Publisher("world_to_aruco_ID_1/target_pose", geometry_msgs.msg.Pose,queue_size=1)
msg = geometry_msgs.msg.Pose()
try:
(position,orientation) = listener.lookupTransform("world", "camera_aruco_ID_1", rospy.Time(0))
msg.position.x = position[0]
msg.position.y = position[1]
msg.position.z = position[2]
msg.orientation.x = orientation[0]
msg.orientation.y = orientation[1]
msg.orientation.z = orientation[2]
msg.orientation.w = orientation[3]
pub_aruco_world_pose.publish(msg)
except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
continue
rospy.sleep(1)
| true
| true
|
f70cae20241d08ae3962b49b578a5c3b6f9ba735
| 12,284
|
py
|
Python
|
libp2p/stream_muxer/mplex/mplex.py
|
ShadowJonathan/py-libp2p
|
4d814f05870f31c10927e8f1ee032ca8047b16b9
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
libp2p/stream_muxer/mplex/mplex.py
|
ShadowJonathan/py-libp2p
|
4d814f05870f31c10927e8f1ee032ca8047b16b9
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
libp2p/stream_muxer/mplex/mplex.py
|
ShadowJonathan/py-libp2p
|
4d814f05870f31c10927e8f1ee032ca8047b16b9
|
[
"Apache-2.0",
"MIT"
] | 2
|
2020-10-13T07:43:55.000Z
|
2020-10-27T08:51:53.000Z
|
import asyncio
import logging
from typing import Any # noqa: F401
from typing import Awaitable, Dict, List, Optional, Tuple
from libp2p.exceptions import ParseError
from libp2p.io.exceptions import IncompleteReadError
from libp2p.network.connection.exceptions import RawConnError
from libp2p.peer.id import ID
from libp2p.security.secure_conn_interface import ISecureConn
from libp2p.stream_muxer.abc import IMuxedConn, IMuxedStream
from libp2p.typing import TProtocol
from libp2p.utils import (
decode_uvarint_from_stream,
encode_uvarint,
encode_varint_prefixed,
read_varint_prefixed_bytes,
)
from .constants import HeaderTags
from .datastructures import StreamID
from .exceptions import MplexUnavailable
from .mplex_stream import MplexStream
MPLEX_PROTOCOL_ID = TProtocol("/mplex/6.7.0")
logger = logging.getLogger("libp2p.stream_muxer.mplex.mplex")
class Mplex(IMuxedConn):
"""
reference: https://github.com/libp2p/go-mplex/blob/master/multiplex.go
"""
secured_conn: ISecureConn
peer_id: ID
next_channel_id: int
streams: Dict[StreamID, MplexStream]
streams_lock: asyncio.Lock
new_stream_queue: "asyncio.Queue[IMuxedStream]"
event_shutting_down: asyncio.Event
event_closed: asyncio.Event
_tasks: List["asyncio.Future[Any]"]
def __init__(self, secured_conn: ISecureConn, peer_id: ID) -> None:
"""
create a new muxed connection.
:param secured_conn: an instance of ``ISecureConn``
:param generic_protocol_handler: generic protocol handler
for new muxed streams
:param peer_id: peer_id of peer the connection is to
"""
self.secured_conn = secured_conn
self.next_channel_id = 0
# Set peer_id
self.peer_id = peer_id
# Mapping from stream ID -> buffer of messages for that stream
self.streams = {}
self.streams_lock = asyncio.Lock()
self.new_stream_queue = asyncio.Queue()
self.event_shutting_down = asyncio.Event()
self.event_closed = asyncio.Event()
self._tasks = []
# Kick off reading
self._tasks.append(asyncio.ensure_future(self.handle_incoming()))
@property
def is_initiator(self) -> bool:
return self.secured_conn.is_initiator
async def close(self) -> None:
"""close the stream muxer and underlying secured connection."""
if self.event_shutting_down.is_set():
return
# Set the `event_shutting_down`, to allow graceful shutdown.
self.event_shutting_down.set()
await self.secured_conn.close()
# Blocked until `close` is finally set.
await self.event_closed.wait()
def is_closed(self) -> bool:
"""
check connection is fully closed.
:return: true if successful
"""
return self.event_closed.is_set()
def _get_next_channel_id(self) -> int:
"""
Get next available stream id.
:return: next available stream id for the connection
"""
next_id = self.next_channel_id
self.next_channel_id += 1
return next_id
async def _initialize_stream(self, stream_id: StreamID, name: str) -> MplexStream:
stream = MplexStream(name, stream_id, self)
async with self.streams_lock:
self.streams[stream_id] = stream
return stream
async def open_stream(self) -> IMuxedStream:
"""
creates a new muxed_stream.
:return: a new ``MplexStream``
"""
channel_id = self._get_next_channel_id()
stream_id = StreamID(channel_id=channel_id, is_initiator=True)
# Default stream name is the `channel_id`
name = str(channel_id)
stream = await self._initialize_stream(stream_id, name)
await self.send_message(HeaderTags.NewStream, name.encode(), stream_id)
return stream
async def _wait_until_shutting_down_or_closed(self, coro: Awaitable[Any]) -> Any:
task_coro = asyncio.ensure_future(coro)
task_wait_closed = asyncio.ensure_future(self.event_closed.wait())
task_wait_shutting_down = asyncio.ensure_future(self.event_shutting_down.wait())
done, pending = await asyncio.wait(
[task_coro, task_wait_closed, task_wait_shutting_down],
return_when=asyncio.FIRST_COMPLETED,
)
for fut in pending:
fut.cancel()
if task_wait_closed in done:
raise MplexUnavailable("Mplex is closed")
if task_wait_shutting_down in done:
raise MplexUnavailable("Mplex is shutting down")
return task_coro.result()
async def accept_stream(self) -> IMuxedStream:
"""accepts a muxed stream opened by the other end."""
return await self._wait_until_shutting_down_or_closed(
self.new_stream_queue.get()
)
async def send_message(
self, flag: HeaderTags, data: Optional[bytes], stream_id: StreamID
) -> int:
"""
sends a message over the connection.
:param header: header to use
:param data: data to send in the message
:param stream_id: stream the message is in
"""
# << by 3, then or with flag
header = encode_uvarint((stream_id.channel_id << 3) | flag.value)
if data is None:
data = b""
_bytes = header + encode_varint_prefixed(data)
return await self._wait_until_shutting_down_or_closed(
self.write_to_stream(_bytes)
)
async def write_to_stream(self, _bytes: bytes) -> int:
"""
writes a byte array to a secured connection.
:param _bytes: byte array to write
:return: length written
"""
await self.secured_conn.write(_bytes)
return len(_bytes)
async def handle_incoming(self) -> None:
"""Read a message off of the secured connection and add it to the
corresponding message buffer."""
while True:
try:
await self._handle_incoming_message()
except MplexUnavailable as e:
logger.debug("mplex unavailable while waiting for incoming: %s", e)
break
# Force context switch
await asyncio.sleep(0)
# If we enter here, it means this connection is shutting down.
# We should clean things up.
await self._cleanup()
async def read_message(self) -> Tuple[int, int, bytes]:
"""
Read a single message off of the secured connection.
:return: stream_id, flag, message contents
"""
# FIXME: No timeout is used in Go implementation.
try:
header = await decode_uvarint_from_stream(self.secured_conn)
message = await asyncio.wait_for(
read_varint_prefixed_bytes(self.secured_conn), timeout=5
)
except (ParseError, RawConnError, IncompleteReadError) as error:
raise MplexUnavailable(
"failed to read messages correctly from the underlying connection"
) from error
except asyncio.TimeoutError as error:
raise MplexUnavailable(
"failed to read more message body within the timeout"
) from error
flag = header & 0x07
channel_id = header >> 3
return channel_id, flag, message
async def _handle_incoming_message(self) -> None:
"""
Read and handle a new incoming message.
:raise MplexUnavailable: `Mplex` encounters fatal error or is shutting down.
"""
channel_id, flag, message = await self._wait_until_shutting_down_or_closed(
self.read_message()
)
stream_id = StreamID(channel_id=channel_id, is_initiator=bool(flag & 1))
if flag == HeaderTags.NewStream.value:
await self._handle_new_stream(stream_id, message)
elif flag in (
HeaderTags.MessageInitiator.value,
HeaderTags.MessageReceiver.value,
):
await self._handle_message(stream_id, message)
elif flag in (HeaderTags.CloseInitiator.value, HeaderTags.CloseReceiver.value):
await self._handle_close(stream_id)
elif flag in (HeaderTags.ResetInitiator.value, HeaderTags.ResetReceiver.value):
await self._handle_reset(stream_id)
else:
# Receives messages with an unknown flag
# TODO: logging
async with self.streams_lock:
if stream_id in self.streams:
stream = self.streams[stream_id]
await stream.reset()
async def _handle_new_stream(self, stream_id: StreamID, message: bytes) -> None:
async with self.streams_lock:
if stream_id in self.streams:
# `NewStream` for the same id is received twice...
raise MplexUnavailable(
f"received NewStream message for existing stream: {stream_id}"
)
mplex_stream = await self._initialize_stream(stream_id, message.decode())
await self._wait_until_shutting_down_or_closed(
self.new_stream_queue.put(mplex_stream)
)
async def _handle_message(self, stream_id: StreamID, message: bytes) -> None:
async with self.streams_lock:
if stream_id not in self.streams:
# We receive a message of the stream `stream_id` which is not accepted
# before. It is abnormal. Possibly disconnect?
# TODO: Warn and emit logs about this.
return
stream = self.streams[stream_id]
async with stream.close_lock:
if stream.event_remote_closed.is_set():
# TODO: Warn "Received data from remote after stream was closed by them. (len = %d)" # noqa: E501
return
await self._wait_until_shutting_down_or_closed(
stream.incoming_data.put(message)
)
async def _handle_close(self, stream_id: StreamID) -> None:
async with self.streams_lock:
if stream_id not in self.streams:
# Ignore unmatched messages for now.
return
stream = self.streams[stream_id]
# NOTE: If remote is already closed, then return: Technically a bug
# on the other side. We should consider killing the connection.
async with stream.close_lock:
if stream.event_remote_closed.is_set():
return
is_local_closed: bool
async with stream.close_lock:
stream.event_remote_closed.set()
is_local_closed = stream.event_local_closed.is_set()
# If local is also closed, both sides are closed. Then, we should clean up
# the entry of this stream, to avoid others from accessing it.
if is_local_closed:
async with self.streams_lock:
self.streams.pop(stream_id, None)
async def _handle_reset(self, stream_id: StreamID) -> None:
async with self.streams_lock:
if stream_id not in self.streams:
# This is *ok*. We forget the stream on reset.
return
stream = self.streams[stream_id]
async with stream.close_lock:
if not stream.event_remote_closed.is_set():
stream.event_reset.set()
stream.event_remote_closed.set()
# If local is not closed, we should close it.
if not stream.event_local_closed.is_set():
stream.event_local_closed.set()
async with self.streams_lock:
self.streams.pop(stream_id, None)
async def _cleanup(self) -> None:
if not self.event_shutting_down.is_set():
self.event_shutting_down.set()
async with self.streams_lock:
for stream in self.streams.values():
async with stream.close_lock:
if not stream.event_remote_closed.is_set():
stream.event_remote_closed.set()
stream.event_reset.set()
stream.event_local_closed.set()
self.streams = None
self.event_closed.set()
| 37
| 114
| 0.633507
|
import asyncio
import logging
from typing import Any
from typing import Awaitable, Dict, List, Optional, Tuple
from libp2p.exceptions import ParseError
from libp2p.io.exceptions import IncompleteReadError
from libp2p.network.connection.exceptions import RawConnError
from libp2p.peer.id import ID
from libp2p.security.secure_conn_interface import ISecureConn
from libp2p.stream_muxer.abc import IMuxedConn, IMuxedStream
from libp2p.typing import TProtocol
from libp2p.utils import (
decode_uvarint_from_stream,
encode_uvarint,
encode_varint_prefixed,
read_varint_prefixed_bytes,
)
from .constants import HeaderTags
from .datastructures import StreamID
from .exceptions import MplexUnavailable
from .mplex_stream import MplexStream
MPLEX_PROTOCOL_ID = TProtocol("/mplex/6.7.0")
logger = logging.getLogger("libp2p.stream_muxer.mplex.mplex")
class Mplex(IMuxedConn):
secured_conn: ISecureConn
peer_id: ID
next_channel_id: int
streams: Dict[StreamID, MplexStream]
streams_lock: asyncio.Lock
new_stream_queue: "asyncio.Queue[IMuxedStream]"
event_shutting_down: asyncio.Event
event_closed: asyncio.Event
_tasks: List["asyncio.Future[Any]"]
def __init__(self, secured_conn: ISecureConn, peer_id: ID) -> None:
self.secured_conn = secured_conn
self.next_channel_id = 0
self.peer_id = peer_id
self.streams = {}
self.streams_lock = asyncio.Lock()
self.new_stream_queue = asyncio.Queue()
self.event_shutting_down = asyncio.Event()
self.event_closed = asyncio.Event()
self._tasks = []
self._tasks.append(asyncio.ensure_future(self.handle_incoming()))
@property
def is_initiator(self) -> bool:
return self.secured_conn.is_initiator
async def close(self) -> None:
if self.event_shutting_down.is_set():
return
self.event_shutting_down.set()
await self.secured_conn.close()
await self.event_closed.wait()
def is_closed(self) -> bool:
return self.event_closed.is_set()
def _get_next_channel_id(self) -> int:
next_id = self.next_channel_id
self.next_channel_id += 1
return next_id
async def _initialize_stream(self, stream_id: StreamID, name: str) -> MplexStream:
stream = MplexStream(name, stream_id, self)
async with self.streams_lock:
self.streams[stream_id] = stream
return stream
async def open_stream(self) -> IMuxedStream:
channel_id = self._get_next_channel_id()
stream_id = StreamID(channel_id=channel_id, is_initiator=True)
name = str(channel_id)
stream = await self._initialize_stream(stream_id, name)
await self.send_message(HeaderTags.NewStream, name.encode(), stream_id)
return stream
async def _wait_until_shutting_down_or_closed(self, coro: Awaitable[Any]) -> Any:
task_coro = asyncio.ensure_future(coro)
task_wait_closed = asyncio.ensure_future(self.event_closed.wait())
task_wait_shutting_down = asyncio.ensure_future(self.event_shutting_down.wait())
done, pending = await asyncio.wait(
[task_coro, task_wait_closed, task_wait_shutting_down],
return_when=asyncio.FIRST_COMPLETED,
)
for fut in pending:
fut.cancel()
if task_wait_closed in done:
raise MplexUnavailable("Mplex is closed")
if task_wait_shutting_down in done:
raise MplexUnavailable("Mplex is shutting down")
return task_coro.result()
async def accept_stream(self) -> IMuxedStream:
return await self._wait_until_shutting_down_or_closed(
self.new_stream_queue.get()
)
async def send_message(
self, flag: HeaderTags, data: Optional[bytes], stream_id: StreamID
) -> int:
header = encode_uvarint((stream_id.channel_id << 3) | flag.value)
if data is None:
data = b""
_bytes = header + encode_varint_prefixed(data)
return await self._wait_until_shutting_down_or_closed(
self.write_to_stream(_bytes)
)
async def write_to_stream(self, _bytes: bytes) -> int:
await self.secured_conn.write(_bytes)
return len(_bytes)
async def handle_incoming(self) -> None:
while True:
try:
await self._handle_incoming_message()
except MplexUnavailable as e:
logger.debug("mplex unavailable while waiting for incoming: %s", e)
break
await asyncio.sleep(0)
await self._cleanup()
async def read_message(self) -> Tuple[int, int, bytes]:
try:
header = await decode_uvarint_from_stream(self.secured_conn)
message = await asyncio.wait_for(
read_varint_prefixed_bytes(self.secured_conn), timeout=5
)
except (ParseError, RawConnError, IncompleteReadError) as error:
raise MplexUnavailable(
"failed to read messages correctly from the underlying connection"
) from error
except asyncio.TimeoutError as error:
raise MplexUnavailable(
"failed to read more message body within the timeout"
) from error
flag = header & 0x07
channel_id = header >> 3
return channel_id, flag, message
async def _handle_incoming_message(self) -> None:
channel_id, flag, message = await self._wait_until_shutting_down_or_closed(
self.read_message()
)
stream_id = StreamID(channel_id=channel_id, is_initiator=bool(flag & 1))
if flag == HeaderTags.NewStream.value:
await self._handle_new_stream(stream_id, message)
elif flag in (
HeaderTags.MessageInitiator.value,
HeaderTags.MessageReceiver.value,
):
await self._handle_message(stream_id, message)
elif flag in (HeaderTags.CloseInitiator.value, HeaderTags.CloseReceiver.value):
await self._handle_close(stream_id)
elif flag in (HeaderTags.ResetInitiator.value, HeaderTags.ResetReceiver.value):
await self._handle_reset(stream_id)
else:
async with self.streams_lock:
if stream_id in self.streams:
stream = self.streams[stream_id]
await stream.reset()
async def _handle_new_stream(self, stream_id: StreamID, message: bytes) -> None:
async with self.streams_lock:
if stream_id in self.streams:
raise MplexUnavailable(
f"received NewStream message for existing stream: {stream_id}"
)
mplex_stream = await self._initialize_stream(stream_id, message.decode())
await self._wait_until_shutting_down_or_closed(
self.new_stream_queue.put(mplex_stream)
)
async def _handle_message(self, stream_id: StreamID, message: bytes) -> None:
async with self.streams_lock:
if stream_id not in self.streams:
return
stream = self.streams[stream_id]
async with stream.close_lock:
if stream.event_remote_closed.is_set():
return
await self._wait_until_shutting_down_or_closed(
stream.incoming_data.put(message)
)
async def _handle_close(self, stream_id: StreamID) -> None:
async with self.streams_lock:
if stream_id not in self.streams:
return
stream = self.streams[stream_id]
async with stream.close_lock:
if stream.event_remote_closed.is_set():
return
is_local_closed: bool
async with stream.close_lock:
stream.event_remote_closed.set()
is_local_closed = stream.event_local_closed.is_set()
if is_local_closed:
async with self.streams_lock:
self.streams.pop(stream_id, None)
async def _handle_reset(self, stream_id: StreamID) -> None:
async with self.streams_lock:
if stream_id not in self.streams:
return
stream = self.streams[stream_id]
async with stream.close_lock:
if not stream.event_remote_closed.is_set():
stream.event_reset.set()
stream.event_remote_closed.set()
if not stream.event_local_closed.is_set():
stream.event_local_closed.set()
async with self.streams_lock:
self.streams.pop(stream_id, None)
async def _cleanup(self) -> None:
if not self.event_shutting_down.is_set():
self.event_shutting_down.set()
async with self.streams_lock:
for stream in self.streams.values():
async with stream.close_lock:
if not stream.event_remote_closed.is_set():
stream.event_remote_closed.set()
stream.event_reset.set()
stream.event_local_closed.set()
self.streams = None
self.event_closed.set()
| true
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.