code stringlengths 281 23.7M |
|---|
def run_harness_generation(view, func: Function) -> None:
log.log_debug('Grabbing closed-source template from project folder')
template_file = os.path.join(binaryninja.user_plugin_path(), 'fuzzable/templates/linux_closed_source_harness.cpp')
path = view.file.filename
binary = lief.parse(path)
symbol = func.name
if ('sub_' in symbol):
symbol = hex(func.address_ranges[0].start)
params: t.List[str] = [f'{param.type}' for param in func.parameter_vars.vars]
return_type = str(func.return_type)
log.log_debug('Getting filename to write to')
harness = interaction.get_save_filename_input('Harness path to write to?', 'cpp', '')
if harness:
harness = (harness + '.cpp')
else:
interaction.show_message_box('Error', 'Did not get required C/C++ harness path.')
return
log.log_debug('Getting override shared object to write to')
override_path = interaction.get_save_filename_input('New shared object to write to?', 'so', '')
if override_path:
override_path = (override_path + '.so')
log.log_info('Generating harness from template')
shared_obj = generate.transform_elf_to_so(Path(path), binary, symbol, override_path)
generate.generate_harness(shared_obj, symbol, return_type=return_type, params=params, harness_path=template_file, output=harness)
interaction.show_message_box('Success', f'Done, wrote fuzzer harness to {harness}') |
def extractLbDiaryBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return False
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
def blur2d_compute_root(n: size, consumer: i8[(n, n)], sin: i8[((n + 1), (n + 1))]):
assert ((n % 4) == 0)
producer: i8[((n + 1), (n + 1))]
for i in seq(0, (n + 1)):
for j in seq(0, (n + 1)):
producer[(i, j)] = sin[(i, j)]
for i in seq(0, n):
for j in seq(0, n):
consumer[(i, j)] = ((((producer[(i, j)] + producer[(i, (j + 1))]) + producer[((i + 1), j)]) + producer[((i + 1), (j + 1))]) / 4.0) |
class SaveImageAction(Action):
def __init__(self, window):
self._window = window
self.name = 'S&ave Scene'
def perform(self):
extensions = ['*.png', '*.jpg', '*.tiff', '*.bmp', '*.ps', '*.eps', '*.pdf', '*.tex', '*.rib', '*.wrl', '*.oogl', '*.vrml', '*.obj', '*.iv', '*.pov', '*.x3d']
descriptions = ['PNG', 'JPG', 'TIFF', 'Bitmap', 'PostScript', 'EPS', 'PDF', 'Tex', 'RIB', 'WRL', 'Geomview', 'VRML', 'Wavefront', 'Open Inventor', 'Povray', 'X3D']
wildcard = ''
for (description, extension) in zip(descriptions, extensions):
wildcard += '{} ({})|{}|'.format(description, extension, extension)
wildcard += 'Determine by extension (*.*)|(*.*)'
dlg = FileDialog(parent=self._window.control, action='save as', wildcard=wildcard, title='Save scene to image')
if (dlg.open() == OK):
self._window.scene.save(dlg.path) |
class GetBlockHeadersRequest(BaseRequestResponseEvent[GetBlockHeadersResponse]):
session: SessionAPI
block_number_or_hash: BlockIdentifier
max_headers: int
skip: int
reverse: bool
timeout: float
def expected_response_type() -> Type[GetBlockHeadersResponse]:
return GetBlockHeadersResponse |
(version_base=None, config_path='conf', config_name='config')
def main(cfg: DictConfig) -> None:
ray.init(**cfg.ray.init)
results = []
for model in ['alexnet', 'resnet']:
for dataset in ['cifar10', 'imagenet']:
overrides = [f'dataset={dataset}', f'model={model}']
run_cfg = hydra.compose(overrides=overrides)
ret = train.remote(overrides, run_cfg)
results.append(ret)
for (overrides, score) in ray.get(results):
print(f'Result from {overrides} : {score}') |
class JzCzhz(LCh):
BASE = 'jzazbz'
NAME = 'jzczhz'
SERIALIZE = ('--jzczhz',)
WHITE = WHITES['2deg']['D65']
DYNAMIC_RANGE = 'hdr'
CHANNEL_ALIASES = {'lightness': 'jz', 'chroma': 'cz', 'hue': 'hz'}
ACHROMATIC = Jzazbz.ACHROMATIC
CHANNELS = (Channel('jz', 0.0, 1.0, limit=(0.0, None)), Channel('cz', 0.0, 0.5, limit=(0.0, None)), Channel('hz', 0.0, 360.0, flags=FLG_ANGLE, nans=ACHROMATIC.hue))
def resolve_channel(self, index: int, coords: Vector) -> float:
if (index == 2):
h = coords[2]
return (self.ACHROMATIC.get_ideal_hue(coords[0]) if math.isnan(h) else h)
elif (index == 1):
c = coords[1]
return (self.ACHROMATIC.get_ideal_chroma(coords[0]) if math.isnan(c) else c)
value = coords[index]
return (self.channels[index].nans if math.isnan(value) else value)
def is_achromatic(self, coords: Vector) -> bool:
return ((coords[0] == 0.0) or self.ACHROMATIC.test(*coords))
def hue_name(self) -> str:
return 'hz' |
class Chip(Component):
css_classes = ['mdc-chip-set']
name = 'Material Design Chip'
str_repr = '\n<span {attrs} role="grid">\n <span class="mdc-evolution-chip-set__chips" role="presentation">\n <span class="mdc-evolution-chip" role="row" id="c0">\n <span class="mdc-evolution-chip__cell mdc-evolution-chip__cell--primary" role="gridcell">\n <button class="mdc-evolution-chip__action mdc-evolution-chip__action--primary rounded" style="border:none" type="button" tabindex="0">\n <span class="mdc-evolution-chip__ripple mdc-evolution-chip__ripple--primary"></span>\n <span class="mdc-evolution-chip__text-label">Chip one</span>\n </button>\n </span>\n </span>\n <span class="mdc-evolution-chip" role="row" id="c1">\n <span class="mdc-evolution-chip__cell mdc-evolution-chip__cell--primary" role="gridcell">\n <button class="mdc-evolution-chip__action mdc-evolution-chip__action--primary rounded" style="border:none" type="button" tabindex="-1">\n <span class="mdc-evolution-chip__ripple mdc-evolution-chip__ripple--primary"></span>\n <span class="mdc-evolution-chip__text-label">Chip two</span>\n </button>\n </span>\n </span>\n </span>\n</span>\n'
_js__builder__ = 'window[htmlObj.id] = new mdc.chips.MDCChipSet(htmlObj)' |
class PythonWheelBuilder(BuilderBase):
dist_info_dir: str
template_format_dict: Dict[(str, str)]
def _build(self, install_dirs: List[str], reconfigure: bool) -> None:
wheel_name = self._parse_wheel_name()
name_version_prefix = '-'.join((wheel_name.distribution, wheel_name.version))
dist_info_name = (name_version_prefix + '.dist-info')
data_dir_name = (name_version_prefix + '.data')
self.dist_info_dir = os.path.join(self.src_dir, dist_info_name)
wheel_metadata = self._read_wheel_metadata(wheel_name)
version = wheel_metadata['Wheel-Version']
if (not version.startswith('1.')):
raise Exception(('unsupported wheel version %s' % (version,)))
dep_list = sorted(self.manifest.get_section_as_dict('dependencies', self.ctx).keys())
find_dependency_lines = ['find_dependency({})'.format(dep) for dep in dep_list]
getdeps_cmake_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'CMake')
self.template_format_dict = {'cmake_dir': _to_cmake_path(getdeps_cmake_dir), 'lib_name': self.manifest.name, 'manifest_name': self.manifest.name, 'namespace': self.manifest.name, 'upper_name': self.manifest.name.upper().replace('-', '_'), 'find_dependency_lines': '\n'.join(find_dependency_lines)}
path_mapping = {}
for entry in os.listdir(self.src_dir):
if (entry in (dist_info_name, data_dir_name)):
continue
self._add_sources(path_mapping, os.path.join(self.src_dir, entry), entry)
if os.path.exists(data_dir_name):
raise Exception(('handling of the subdirectories inside %s is not implemented yet' % data_dir_name))
self._write_cmakelists(path_mapping, dep_list)
self._write_cmake_config_template()
self._run_cmake_build(install_dirs, reconfigure)
def _run_cmake_build(self, install_dirs: List[str], reconfigure: bool) -> None:
cmake_builder = CMakeBuilder(build_opts=self.build_opts, ctx=self.ctx, manifest=self.manifest, src_dir=self.build_dir, build_dir=self.build_dir, inst_dir=self.inst_dir, loader=None, defines={}, final_install_prefix=None)
cmake_builder.build(install_dirs=install_dirs, reconfigure=reconfigure)
def _write_cmakelists(self, path_mapping: Dict[(str, str)], dependencies) -> None:
cmake_path = os.path.join(self.build_dir, 'CMakeLists.txt')
with open(cmake_path, 'w') as f:
f.write(CMAKE_HEADER.format(**self.template_format_dict))
for dep in dependencies:
f.write('find_package({0} REQUIRED)\n'.format(dep))
f.write('add_fb_python_library({lib_name}\n'.format(**self.template_format_dict))
f.write((' BASE_DIR "%s"\n' % _to_cmake_path(self.src_dir)))
f.write(' SOURCES\n')
for (src_path, install_path) in path_mapping.items():
f.write((' "%s=%s"\n' % (_to_cmake_path(src_path), _to_cmake_path(install_path))))
if dependencies:
f.write(' DEPENDS\n')
for dep in dependencies:
f.write(' "{0}::{0}"\n'.format(dep))
f.write(')\n')
f.write(CMAKE_FOOTER.format(**self.template_format_dict))
def _write_cmake_config_template(self) -> None:
config_path_name = (self.manifest.name + '-config.cmake.in')
output_path = os.path.join(self.build_dir, config_path_name)
with open(output_path, 'w') as f:
f.write(CMAKE_CONFIG_FILE.format(**self.template_format_dict))
def _add_sources(self, path_mapping: Dict[(str, str)], src_path: str, install_path: str) -> None:
s = os.lstat(src_path)
if (not stat.S_ISDIR(s.st_mode)):
path_mapping[src_path] = install_path
return
for entry in os.listdir(src_path):
self._add_sources(path_mapping, os.path.join(src_path, entry), os.path.join(install_path, entry))
def _parse_wheel_name(self) -> WheelNameInfo:
wheel_name = os.path.basename(self.src_dir)
prefix = (self.manifest.name + '-')
if (not wheel_name.startswith(prefix)):
raise Exception(('expected wheel source directory to be of the form %s-NAME.whl' % (prefix,)))
wheel_name = wheel_name[len(prefix):]
wheel_name_re = re.compile('(?P<distribution>[^-]+)-(?P<version>\\d+[^-]*)(-(?P<build>\\d+[^-]*))?-(?P<python>\\w+\\d+(\\.\\w+\\d+)*)-(?P<abi>\\w+)-(?P<platform>\\w+(\\.\\w+)*)\\.whl')
match = wheel_name_re.match(wheel_name)
if (not match):
raise Exception('bad python wheel name %s: expected to have the form DISTRIBUTION-VERSION-[-BUILD]-PYTAG-ABI-PLATFORM')
return WheelNameInfo(distribution=match.group('distribution'), version=match.group('version'), build=match.group('build'), python=match.group('python'), abi=match.group('abi'), platform=match.group('platform'))
def _read_wheel_metadata(self, wheel_name):
metadata_path = os.path.join(self.dist_info_dir, 'WHEEL')
with codecs.open(metadata_path, 'r', encoding='utf-8') as f:
return email.message_from_file(f) |
def get_size(data: bytes):
size = len(data)
if ((size >= 10) and (data[:6] in (b'GIF87a', b'GIF89a'))):
(w, h) = struct.unpack('<HH', data[6:10])
return (int(w), int(h))
if ((size >= 24) and data.startswith(b'\x89PNG\r\n\x1a\n') and (data[12:16] == b'IHDR')):
(w, h) = struct.unpack('>LL', data[16:24])
return (int(w), int(h))
if ((size >= 16) and data.startswith(b'\x89PNG\r\n\x1a\n')):
(w, h) = struct.unpack('>LL', data[8:16])
return (int(w), int(h))
if ((size >= 30) and (data[:4] == b'RIFF') and (data[8:12] == b'WEBP')):
webp_type = data[12:16]
if (webp_type == b'VP8 '):
(w, h) = struct.unpack('<HH', data[26:30])
elif (webp_type == b'VP8L'):
bits = struct.unpack('<I', data[21:25])[0]
w = (int((bits & 16383)) + 1)
h = (int(((bits >> 14) & 16383)) + 1)
elif (webp_type == b'VP8X'):
w = (int((((data[26] << 16) | (data[25] << 8)) | data[24])) + 1)
h = (int((((data[29] << 16) | (data[28] << 8)) | data[27])) + 1)
else:
w = 0
h = 0
return (w, h)
if (b'<svg' in data):
start = data.index(b'<svg')
end = data.index(b'>', start)
svg = str(data[start:(end + 1)], 'utf8')
w = re.search('width=["\\\'](\\d+)', svg)
h = re.search('height=["\\\'](\\d+)', svg)
return (int((w.group(1) if w else 0)), int((h.group(1) if h else 0)))
if ((size >= 2) and data.startswith(b'\xff\xd8')):
with io.BytesIO(data) as inp:
inp.seek(0)
inp.read(2)
b = inp.read(1)
while (b and (ord(b) != 218)):
while (ord(b) != 255):
b = inp.read(1)
while (ord(b) == 255):
b = inp.read(1)
if (192 <= ord(b) <= 195):
inp.read(3)
(h, w) = struct.unpack('>HH', inp.read(4))
return (int(w), int(h))
inp.read((int(struct.unpack('>H', inp.read(2))[0]) - 2))
b = inp.read(1)
return (0, 0) |
class OptionPlotoptionsLollipopSonificationTracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class InputFile(Input):
name = 'InputFile'
_option_cls = OptInputs.OptionsInputFile
def __init__(self, page: primitives.PageModel, text, placeholder, width, height, html_code, options, attrs, profile):
super(InputFile, self).__init__(page, text, placeholder, width, height, html_code, options, attrs, profile)
self.set_attrs({'type': 'file'})
self.set_attrs({'name': ('%s[]' % self.html_code)})
def options(self) -> OptInputs.OptionsInputFile:
return super().options
def dom(self) -> JsHtmlInput.InputFiles:
if (self._dom is None):
self._dom = JsHtmlInput.InputFiles(self, page=self.page)
return self._dom |
.parametrize('vm_class, val1, val2, expected', ((ConstantinopleVM, '0x', '0x00', '0x'), (ConstantinopleVM, '0x', '0x01', '0x'), (ConstantinopleVM, '0x', '0xff', '0x'), (ConstantinopleVM, '0x', '0x0100', '0x'), (ConstantinopleVM, '0x', '0x0101', '0x'), (ConstantinopleVM, '0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', '0x00', '0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'), (ConstantinopleVM, '0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', '0x01', '0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe'), (ConstantinopleVM, '0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', '0xff', '0x'), (ConstantinopleVM, '0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', '0x0100', '0x'), (ConstantinopleVM, '0x', '0x01', '0x'), (ConstantinopleVM, '0x7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', '0x01', '0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe')))
def test_shl(vm_class, val1, val2, expected):
computation = run_general_computation(vm_class)
computation.stack_push_bytes(decode_hex(val1))
computation.stack_push_bytes(decode_hex(val2))
computation.opcodes[opcode_values.SHL](computation)
result = computation.stack_pop1_int()
assert (encode_hex(pad32(int_to_big_endian(result))) == expected) |
def main():
run('rm', '-rf', 'build/', 'dist/', '*.egg-info', '.eggs')
run('python', 'setup.py', 'sdist', 'bdist_wheel')
for dist in os.listdir(os.path.join(base_dir, 'dist')):
test_dist(os.path.join(base_dir, 'dist', dist))
print('\n\n\n\n * Releases are ready! *\n\n$ python -m twine upload dist/*\n\n') |
def create_sensitivity():
records = [{'doctype': 'Sensitivity', 'sensitivity': _('Low Sensitivity')}, {'doctype': 'Sensitivity', 'sensitivity': _('High Sensitivity')}, {'doctype': 'Sensitivity', 'sensitivity': _('Moderate Sensitivity')}, {'doctype': 'Sensitivity', 'sensitivity': _('Susceptible')}, {'doctype': 'Sensitivity', 'sensitivity': _('Resistant')}, {'doctype': 'Sensitivity', 'sensitivity': _('Intermediate')}]
insert_record(records) |
class OptionPlotoptionsXrangeDatalabels(Options):
def align(self):
return self._config_get('undefined')
def align(self, text: str):
self._config(text, js_type=False)
def allowOverlap(self):
return self._config_get(False)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self) -> 'OptionPlotoptionsXrangeDatalabelsAnimation':
return self._config_sub_data('animation', OptionPlotoptionsXrangeDatalabelsAnimation)
def backgroundColor(self):
return self._config_get(None)
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(0)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def crop(self):
return self._config_get(True)
def crop(self, flag: bool):
self._config(flag, js_type=False)
def defer(self):
return self._config_get(True)
def defer(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def filter(self) -> 'OptionPlotoptionsXrangeDatalabelsFilter':
return self._config_sub_data('filter', OptionPlotoptionsXrangeDatalabelsFilter)
def format(self):
return self._config_get('point.value')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get(None)
def formatter(self, value: Any):
self._config(value, js_type=False)
def inside(self):
return self._config_get(True)
def inside(self, flag: bool):
self._config(flag, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, flag: bool):
self._config(flag, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def overflow(self):
return self._config_get('justify')
def overflow(self, text: str):
self._config(text, js_type=False)
def padding(self):
return self._config_get(5)
def padding(self, num: float):
self._config(num, js_type=False)
def position(self):
return self._config_get('center')
def position(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def shape(self):
return self._config_get('square')
def shape(self, text: str):
self._config(text, js_type=False)
def style(self):
return self._config_get(None)
def style(self, value: Any):
self._config(value, js_type=False)
def textPath(self) -> 'OptionPlotoptionsXrangeDatalabelsTextpath':
return self._config_sub_data('textPath', OptionPlotoptionsXrangeDatalabelsTextpath)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('middle')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get('undefined')
def y(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(6)
def zIndex(self, num: float):
self._config(num, js_type=False) |
class MullerBrownPot(Calculator):
def __init__(self):
super(MullerBrownPot, self).__init__()
def get_energy(self, atoms, coords):
(x, y, z) = coords
A = ((- 200), (- 100), (- 170), 15)
x0 = (1.0, 0.0, (- 0.5), (- 1.0))
y0 = (0.0, 0.5, 1.5, 1.0)
a = ((- 1.0), (- 1.0), (- 6.5), 0.7)
b = (0.0, 0.0, 11.0, 0.6)
c = ((- 10.0), (- 10.0), (- 6.5), 0.7)
energy = 0
for i in range(4):
energy += (A[i] * np.exp((((a[i] * ((x - x0[i]) ** 2)) + ((b[i] * (x - x0[i])) * (y - y0[i]))) + (c[i] * ((y - y0[i]) ** 2)))))
return {'energy': energy}
def get_forces(self, atoms, coords):
(x, y, z) = coords
A = ((- 200), (- 100), (- 170), 15)
x0 = (1.0, 0.0, (- 0.5), (- 1.0))
y0 = (0.0, 0.5, 1.5, 1.0)
a = ((- 1.0), (- 1.0), (- 6.5), 0.7)
b = (0.0, 0.0, 11.0, 0.6)
c = ((- 10.0), (- 10.0), (- 6.5), 0.7)
forces = 0
dx = 0
dy = 0
for i in range(4):
dx += ((A[i] * (((2 * a[i]) * (x - x0[i])) + (b[i] * (y - y0[i])))) * np.exp((((a[i] * ((x - x0[i]) ** 2)) + ((b[i] * (x - x0[i])) * (y - y0[i]))) + (c[i] * ((y - y0[i]) ** 2)))))
dy += ((A[i] * ((b[i] * (x - x0[i])) + ((2 * c[i]) * (y - y0[i])))) * np.exp((((a[i] * ((x - x0[i]) ** 2)) + ((b[i] * (x - x0[i])) * (y - y0[i]))) + (c[i] * ((y - y0[i]) ** 2)))))
dz = np.zeros_like(dx)
forces = (- np.stack((dx, dy, dz), axis=(- 1)))
results = self.get_energy(atoms, coords)
results['forces'] = forces
return results
'\n def get_hessian(self, atoms, coords):\n x, y, z = coords\n self._hessian = ((12*x**2 + 2 - 4*y, -4*x-2),\n (-4*x-2, 4)\n )\n '
def __str__(self):
return 'Muller-Brown-Potential' |
def _get_sub_func_metadata(ops: List[Operator], data_t: str, op_t: str, backend_spec: BackendSpec, float32_t: str) -> Tuple[(List[ElementwiseMetaData], str)]:
use_fp32_acc = Target.current()._kwargs.get('elementwise_use_fp32_acc', False)
if use_fp32_acc:
op_t = data_t
candidate_op_types = [float32_t]
else:
candidate_op_types = backend_spec.get_candidate_op_types(op_t)
func_enums = []
for op in ops:
func_enum = op._attrs['func']
func_enums.append(func_enum)
funcs = backend_spec.func_enum_to_func_name.get(func_enum)
if (funcs is None):
raise NotImplementedError('Func {} is not supported!'.format(func_enum))
for candidate_op_t in candidate_op_types:
func_name = funcs.get(candidate_op_t)
if (func_name is not None):
candidate_op_types = backend_spec.get_candidate_op_types(candidate_op_t)
break
if (len(candidate_op_types) == 0):
raise RuntimeError('Cannot find a common backend data type! candidate_op_types: {}, op_t: {}.'.format(candidate_op_types, op_t))
if (op_t in set(candidate_op_types)):
op_t = candidate_op_types[0]
else:
op_t = data_t
candidate_op_types = backend_spec.get_candidate_op_types(op_t)
sub_func_metadata = []
for op in ops:
func_enum = op._attrs['func']
funcs = backend_spec.func_enum_to_func_name.get(func_enum)
func_name = None
func_op_t = None
for candidate_op_t in candidate_op_types:
func_name = funcs.get(candidate_op_t)
if (func_name is not None):
func_op_t = candidate_op_t
break
if (func_name is None):
raise NotImplementedError('Unsupported func {} and op type {}!'.format(func_enum, op_t))
sub_func_metadata.append(ElementwiseMetaData(func_name, func_op_t, op._attrs['args'], op._attrs['outputs']))
return (sub_func_metadata, op_t, use_fp32_acc) |
class Solution():
def isCousins(self, root: TreeNode, x: int, y: int) -> bool:
def find_vals(node, x, y, lvl, pos1, pos2):
if (node.left is not None):
if (node.left.val == x):
pos1[0] = (lvl + 1)
pos1[1] = node
if (node.left.val == y):
pos2[0] = (lvl + 1)
pos2[1] = node
find_vals(node.left, x, y, (lvl + 1), pos1, pos2)
if (node.right is not None):
if (node.right.val == x):
pos1[0] = (lvl + 1)
pos1[1] = node
if (node.right.val == y):
pos2[0] = (lvl + 1)
pos2[1] = node
find_vals(node.right, x, y, (lvl + 1), pos1, pos2)
if (root is None):
return False
if ((root.val == x) or (root.val == y)):
return False
(pos1, pos2) = ([(- 1), None], [(- 1), None])
find_vals(root, x, y, 0, pos1, pos2)
if ((pos1[0] == pos2[0]) and (pos1[0] != (- 1)) and (pos1[1] != pos2[1])):
return True
return False |
class BaseIsolatedComponent(BaseComponent):
endpoint_name: str = None
loop_monitoring_wakeup_interval = 2
loop_monitoring_max_delay_debug = 0.1
loop_monitoring_max_delay_warning = 1
async def run_in_process(self) -> None:
...
async def _do_run(self) -> None:
...
def get_subprocess_kwargs(self) -> Optional[SubprocessKwargs]:
start_new_session = True
if (os.getenv('TRINITY_SINGLE_PROCESS_GROUP') == '1'):
start_new_session = False
return {'start_new_session': start_new_session}
def get_endpoint_name(cls) -> str:
if (cls.endpoint_name is None):
return friendly_filename_or_url(cls.name)
else:
return cls.endpoint_name |
class SusAcc(BasicAuthWithTopicTestCase):
def setUp(self):
BasicAuthWithTopicTestCase.setUp(self)
topic = models.Topic.objects.create(name='#')
self.acc_allow = False
self.acc = models.PROTO_MQTT_ACC_SUS
models.ACL.objects.create(acc=models.PROTO_MQTT_ACC_PUB, topic=topic)
models.ACL.objects.create(acc=models.PROTO_MQTT_ACC_SUS, topic=topic)
_settings(MQTT_ACL_ALLOW=True)
_settings(MQTT_ACL_ALLOW_ANONIMOUS=True)
def test_no_login_acl_allow_anonymous(self):
response = self._test_no_login()
self.assertEqual(response.status_code, 200)
def test_login_wrong_topic(self):
response = self._test_login_wrong_topic()
self.assertEqual(response.status_code, 200)
def test_login_no_topic(self):
response = self._test_login_no_topic()
self.assertEqual(response.status_code, 200)
def test_login_no_acl_allow(self):
response = self._test_login_no_acl_allow()
self.assertEqual(response.status_code, 200)
def test_login_with_pub_acl_public(self):
response = self._test_login_with_pub_acl_public()
self.assertEqual(response.status_code, 200)
def test_login_with_pub_acl(self):
response = self._test_login_with_pub_acl()
self.assertEqual(response.status_code, 200)
def test_login_with_pub_acl_group(self):
response = self._test_login_with_pub_acl_group()
self.assertEqual(response.status_code, 200) |
()
('dockerfile')
('--show-tag-only/--no-show-tag-only', help='skip build, only print out image tag name', default=False)
_log.simple_verbosity_option(logger)
def build(dockerfile, show_tag_only):
image_tag = assert_image_tag_from_dockerfile(logger, dockerfile)
if show_tag_only:
print(image_tag)
return
dockerfile_dir = os.path.dirname(dockerfile)
project_dir = os.path.dirname(dockerfile_dir)
logger.info('')
logger.info('[*] Building %s with tag %s...', dockerfile, image_tag)
logger.info('')
check_call(('docker build --rm -t %s -f %s %s' % (image_tag, dockerfile, project_dir)), shell=True)
logger.info(check_output(['docker', 'images'])) |
class GEMMBiasReluTestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(GEMMBiasReluTestCase, self).__init__(*args, **kwargs)
self._test_id = 0
def _test_gemm_rcr_bias_relu(self, M=128, K=1024, N=64, dtype='float16', test_suffix=None):
X = Tensor(shape=[M, K], dtype=dtype, name='input_0', is_input=True)
W = Tensor(shape=[N, K], dtype=dtype, name='input_1', is_input=True)
B = Tensor(shape=[N], dtype=dtype, name='input_2', is_input=True)
OP = ops.gemm_rcr_bias_relu()
Y = OP(X, W, B)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
if (test_suffix is None):
test_suffix = dtype
test_name = f'gemm_rcr_bias_relu_{test_suffix}_{self._test_id}'
self._test_id += 1
module = compile_model(Y, detect_target(), './tmp', test_name)
X_pt = get_random_torch_tensor([M, K], dtype)
W_pt = get_random_torch_tensor([N, K], dtype)
B_pt = get_random_torch_tensor([N], dtype)
Y_pt = torch.nn.functional.linear(X_pt, W_pt, bias=B_pt)
Y_pt = torch.relu(Y_pt)
inputs = {'input_0': X_pt, 'input_1': W_pt, 'input_2': B_pt}
y = get_torch_empty_tensor([M, N], dtype)
module.run_with_tensors(inputs, [y])
torch.testing.assert_close(Y_pt, y, **_TOLERANCE_LIMITS[dtype])
def test_gemm_rcr_bias_relu_fp16(self):
self._test_gemm_rcr_bias_relu(dtype='float16')
def test_gemm_rcr_bias_relu_fp16_rocm(self):
self._test_gemm_rcr_bias_relu(dtype='float16')
def test_gemm_rcr_bias_relu_fp32_sm80(self):
self._test_gemm_rcr_bias_relu(dtype='float32')
def test_gemm_rcr_bias_relu_bf16(self):
self._test_gemm_rcr_bias_relu(dtype='bfloat16')
def test_gemm_rcr_bias_relu_sm90(self):
with env_variables(AIT_FORCE_CUTLASS_SM90_KERNELS='1', INSIDE_RE_WORKER='1'):
with self.assertRaisesRegex(expected_exception=RuntimeError, expected_regex='No GEMM op instances are left after filtering'):
self._test_gemm_rcr_bias_relu(K=1020, dtype='float16', test_suffix='wrong_input_alignment_sm90')
with self.assertRaisesRegex(expected_exception=RuntimeError, expected_regex='No GEMM op instances are left after filtering'):
self._test_gemm_rcr_bias_relu(N=63, dtype='float16', test_suffix='wrong_output_alignment_sm90')
self._test_gemm_rcr_bias_relu(dtype='float16', test_suffix='float16_force_sm90')
self._test_gemm_rcr_bias_relu(dtype='bfloat16', test_suffix='bfloat16_force_sm90')
def _test_gemm_rcr_bias_add_relu(self, dtype='float16'):
M = 128
K = 1024
N = 64
tolerance_limits = _TOLERANCE_LIMITS[dtype]
X = Tensor(shape=[M, K], dtype=dtype, name='input_0', is_input=True)
W = Tensor(shape=[N, K], dtype=dtype, name='input_1', is_input=True)
B = Tensor(shape=[N], dtype=dtype, name='input_2', is_input=True)
D = Tensor(shape=[M, N], dtype=dtype, name='input_3', is_input=True)
OP = ops.gemm_rcr_bias_add_relu()
Y = OP(X, W, B, D)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
test_name = f'gemm_rcr_bias_add_relu_{dtype}_{self._test_id}'
self._test_id += 1
module = compile_model(Y, detect_target(), './tmp', test_name)
X_pt = get_random_torch_tensor([M, K], dtype)
W_pt = get_random_torch_tensor([N, K], dtype)
B_pt = get_random_torch_tensor([N], dtype)
D_pt = get_random_torch_tensor([M, N], dtype)
Y_pt = (torch.nn.functional.linear(X_pt, W_pt, bias=B_pt) + D_pt)
Y_pt = torch.relu(Y_pt)
inputs = [X_pt, W_pt, B_pt, D_pt]
y = get_torch_empty_tensor([M, N], dtype)
module.run_with_tensors(inputs, [y])
torch.testing.assert_close(Y_pt, y, **tolerance_limits)
def test_gemm_rcr_bias_add_relu_fp16(self):
self._test_gemm_rcr_bias_add_relu(dtype='float16')
def test_gemm_rcr_bias_add_relu_fp16_rocm(self):
self._test_gemm_rcr_bias_add_relu(dtype='float16')
def test_gemm_rcr_bias_add_relu_fp32_sm80(self):
self._test_gemm_rcr_bias_add_relu(dtype='float32')
def test_gemm_rcr_bias_add_relu_bf16(self):
self._test_gemm_rcr_bias_add_relu(dtype='bfloat16') |
class KiwoomOpenApiPlusDialogToHandle(DialogToHandle, Logging):
def should_handle_by_title(cls, dialog: WindowSpecification, title: str):
try:
cls.logger.debug('Checking dialog title')
dialog_text = dialog.wrapper_object().window_text()
except ElementNotFoundError:
cls.logger.warning('Could not find dialog to check')
else:
return (dialog_text == title)
def handle_by_clicking_button(cls, dialog: WindowSpecification):
try:
cls.logger.debug('Clicking confirm button on dialog')
dialog['Button'].click()
except ElementNotFoundError:
cls.logger.warning('Could not find dialog to confirm')
def handle_by_emiting_should_restart_signal(cls, app: KiwoomOpenApiPlusManagerApplication, restart_type: KiwoomOpenApiPlusManagerApplication.RestartType):
cls.logger.debug("Restarting based on the dialog's instruction")
app.shouldRestart.emit(restart_type)
def __init__(self, title: Optional[str]=None, time: Optional[datetime.time]=None, body: Optional[str]=None, app: Optional[KiwoomOpenApiPlusManagerApplication]=None, restart_type: Optional[KiwoomOpenApiPlusManagerApplication.RestartType]=None):
self._title = title
self._time = time
self._body = body
self._app = app
self._restart_type = restart_type
def to_specification(self) -> WindowSpecification:
desktop = Desktop(allow_magic_lookup=False)
title_re = re.escape(self._title)
specification = desktop.window(title_re=title_re)
return specification
def should_handle(self, dialog: WindowSpecification) -> bool:
return self.should_handle_by_title(dialog, self._title)
def handle(self, dialog: WindowSpecification):
self.handle_by_clicking_button(dialog)
if (self._restart_type is not None):
self.handle_by_emiting_should_restart_signal(self._app, self._restart_type)
def handle_if_needed(self, dialog: WindowSpecification):
if self.should_handle(dialog):
return self.handle(dialog) |
class EventManager(models.Manager):
def get_queryset(self):
today = timezone.localdate()
return super().get_queryset().annotate(attendees_count=models.Count('attendee', distinct=True)).annotate(last_date=models.Max('eventdate__date')).annotate(activity_proposal_is_open=models.Case(models.When(models.Q(limit_proposal_date__gte=today), then=True), default=False, output_field=models.BooleanField())).annotate(registration_is_open=models.Case(models.When(models.Q(last_date__gte=today), then=True), default=False, output_field=models.BooleanField()))
def get_event_by_user(user, tag_slug=None):
if user.is_authenticated():
event_users = EventUser.objects.filter(user=user)
event_ids = [event_user.event.pk for event_user in list(event_users)]
queryset = Event.objects.filter(pk__in=event_ids)
if tag_slug:
queryset = queryset.filter(tags__slug=tag_slug)
else:
queryset = Event.objects.none()
return queryset
def get_event_private_data():
events = []
for event in Event.objects.all():
organizers = Organizer.objects.filter(event_user__event=event)
users = map((lambda organizer: organizer.event_user.user), organizers)
full_names = [user.get_full_name() for user in users]
events.append({'organizers': ','.join(full_names), 'email': event.email, 'id': event.id})
return events |
def test_simple_with_ignore() -> None:
dependencies = [Dependency('click', Path('pyproject.toml')), Dependency('toml', Path('pyproject.toml'))]
modules_locations = [ModuleLocations(ModuleBuilder('toml', {'foo'}, frozenset(), dependencies).build(), [Location(Path('foo.py'), 1, 2)])]
assert (DEP002UnusedDependenciesFinder(modules_locations, dependencies, ignored_modules=('click',)).find() == []) |
_defaults()
class CustomFormOptionSchema(SoftDeletionSchema):
class Meta():
type_ = 'custom-form-option'
self_view = 'v1.custom_form_option_detail'
self_view_kwargs = {'id': '<id>'}
inflect = dasherize
id = fields.Integer(dump_only=True)
value = fields.Str(required=True)
custom_form = Relationship(self_view='v1.custom_form_option_form', self_view_kwargs={'id': '<id>'}, related_view='v1.custom_form_detail', related_view_kwargs={'custom_form_option_id': '<id>'}, schema='CustomFormSchema', type_='custom_form') |
class LocalServer(object):
_queue = None
def run(self, classobjects):
result = windll.ole32.CoInitialize(None)
if (RPC_E_CHANGED_MODE == result):
_debug('Server running in MTA')
self.run_mta()
else:
_debug('Server running in STA')
if (result >= 0):
windll.ole32.CoUninitialize()
self.run_sta()
for obj in classobjects:
obj._revoke_class()
def run_sta(self):
from comtypes import messageloop
messageloop.run()
def run_mta(self):
if (sys.version_info >= (3, 0)):
import queue
else:
import Queue as queue
self._queue = queue.Queue()
self._queue.get()
def Lock(self):
oledll.ole32.CoAddRefServerProcess()
def Unlock(self):
rc = oledll.ole32.CoReleaseServerProcess()
if (rc == 0):
if self._queue:
self._queue.put(42)
else:
windll.user32.PostQuitMessage(0) |
class LPopup():
def __init__(self, latlng=None, options: dict=None, selector=None):
self._selector = selector
(self._js, self.__is_attached) = ([], False)
self.varId = ('L.marker(%s)' % latlng)
def openPopup(self, latlng=None):
if (latlng is not None):
self.setLatLng(latlng)
self._js.append('openPopup()')
return self
def setLatLng(self, latlng):
self._js.append(('setLatLng(%s)' % latlng))
return self
def setZIndexOffset(self, offset: float):
self._js.append(('setZIndexOffset(%s)' % offset))
return self
def setContent(self, content: str, dataflows: List[dict]=None):
self._js.append(('setContent(%s)' % JsUtils.dataFlows(content, dataflows, self.page)))
return self
def openOn(self):
pass
def toStr(self):
if (not self._js):
return self.varId
js_fnc = '.'.join(self._js)
self._js = []
return ('%s.%s' % (self.varId, js_fnc)) |
def delete_atoms_bonds_inplace(as_dict: dict, inds: list[int], atom_offset: int=0, bond_offset: int=0) -> dict[(int, int)]:
to_del = set(inds)
axs = as_dict['atoms_xyzs']
axs_mod = list()
atoms_del = 0
atom_map = {}
for ax in axs:
if (ax['atom_id'] in to_del):
warnings.warn('Charges were not updated after deleting atoms!')
print(f"Deleted atom {ax['atom_name']} with atom_id {ax['atom_id']}")
atoms_del += 1
continue
atom_id = ax['atom_id']
mod_atom_id = ((atom_id - atoms_del) + atom_offset)
atom_map[atom_id] = mod_atom_id
ax['atom_id'] = mod_atom_id
axs_mod.append(ax)
as_dict['atoms_xyzs'] = axs_mod
bonds_mod = list()
bonds_del = 0
for bond in as_dict['bond']:
bond_inds = set((bond['origin_atom_id'], bond['target_atom_id']))
if (bond_inds & to_del):
print(f"Deleted bond with bond_id {bond['bond_id']}")
bonds_del += 1
continue
bond['bond_id'] -= bonds_del
bond['bond_id'] += bond_offset
bond['origin_atom_id'] = atom_map[bond['origin_atom_id']]
bond['target_atom_id'] = atom_map[bond['target_atom_id']]
bonds_mod.append(bond)
as_dict['bond'] = bonds_mod
return atom_map |
def deploy_archived_service(service_name):
response = get_session().post((base_url + 'service-archives/{}'.format(service_name)))
if (response.status_code != 200):
if (response.status_code == 400):
_check_for_manifest_errors(response)
raise get_exception(response)
else:
return response.json() |
class EnergyCharging(object):
swagger_types = {'charging_mode': 'str', 'charging_rate': 'int', 'next_delayed_time': 'str', 'plugged': 'bool', 'remaining_time': 'str', 'status': 'ChargingStatusEnum'}
attribute_map = {'charging_mode': 'chargingMode', 'charging_rate': 'chargingRate', 'next_delayed_time': 'nextDelayedTime', 'plugged': 'plugged', 'remaining_time': 'remainingTime', 'status': 'status'}
def __init__(self, charging_mode=None, charging_rate=None, next_delayed_time=None, plugged=None, remaining_time=None, status=None):
self._charging_mode = None
self._charging_rate = None
self._next_delayed_time = None
self._plugged = None
self._remaining_time = None
self._status = None
self.discriminator = None
if (charging_mode is not None):
self.charging_mode = charging_mode
if (charging_rate is not None):
self.charging_rate = charging_rate
if (next_delayed_time is not None):
self.next_delayed_time = next_delayed_time
if (plugged is not None):
self.plugged = plugged
if (remaining_time is not None):
self.remaining_time = remaining_time
if (status is not None):
self.status = status
def charging_mode(self):
return self._charging_mode
_mode.setter
def charging_mode(self, charging_mode):
allowed_values = ['No', 'Slow', 'Quick']
if (charging_mode not in allowed_values):
raise ValueError('Invalid value for `charging_mode` ({0}), must be one of {1}'.format(charging_mode, allowed_values))
self._charging_mode = charging_mode
def charging_rate(self):
return self._charging_rate
_rate.setter
def charging_rate(self, charging_rate):
if ((charging_rate is not None) and (charging_rate > 500)):
raise ValueError('Invalid value for `charging_rate`, must be a value less than or equal to `500`')
if ((charging_rate is not None) and (charging_rate < 0)):
raise ValueError('Invalid value for `charging_rate`, must be a value greater than or equal to `0`')
self._charging_rate = charging_rate
def next_delayed_time(self):
return self._next_delayed_time
_delayed_time.setter
def next_delayed_time(self, next_delayed_time):
self._next_delayed_time = next_delayed_time
def plugged(self):
return self._plugged
def plugged(self, plugged):
self._plugged = plugged
def remaining_time(self):
return self._remaining_time
_time.setter
def remaining_time(self, remaining_time):
self._remaining_time = remaining_time
def status(self):
return self._status
def status(self, status):
self._status = status
def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
if issubclass(EnergyCharging, dict):
for (key, value) in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if (not isinstance(other, EnergyCharging)):
return False
return (self.__dict__ == other.__dict__)
def __ne__(self, other):
return (not (self == other)) |
class WebSocketManager(object):
def __init__(self):
self._connections = []
def add_connection(self, ws):
self._connections.append(ws)
def delete_connection(self, ws):
self._connections.remove(ws)
def broadcast(self, msg):
for connection in self._connections:
connection.send(msg) |
def main(path):
import sys
import pytest
args = ['-p', 'no:parallel', '-E', 'release']
if ((len(sys.argv) > 1) and (sys.argv[1] == '--no-debug')):
args += ['-o', 'log_cli=False']
else:
logging.basicConfig(level=logging.DEBUG)
args += ['-o', 'log_cli=True']
args += [path]
sys.exit(pytest.main(args)) |
class PrimaryKeyGeneratorBase():
def __init__(self, primary_key: Type[PrimaryKeyBase], query_classes: Set[Type[object]], allowed_id_range: Optional[range]=None) -> None:
self.primary_key = primary_key
self.query_classes = query_classes
self.pks: Dict[(str, Tuple[(int, int)])] = {}
if (allowed_id_range is None):
self.allowed_id_range: range = inclusive_range(1, ((2 ** 63) - 1))
else:
self.allowed_id_range = allowed_id_range
def reserve(self, session: Session, saving_classes: List[Type], item_counts: Optional[Dict[(str, int)]]=None) -> 'PrimaryKeyGeneratorBase':
query_classes = {cls for cls in saving_classes if (cls in self.query_classes)}
for cls in query_classes:
if (item_counts and (cls.__name__ in item_counts)):
count = item_counts[cls.__name__]
else:
count = 1
if (count > 0):
self._reserve_id_range(session, cls, count)
elif (count == 0):
pass
else:
raise ValueError(f'{cls.__name__} count must be >= 0')
return self
def _lock_pk_with_retries(self, session: Session, cls: Type[PrimaryKeyBase]) -> Optional[PrimaryKeyBase]:
cls_pk: Optional[object] = None
retries: int = 6
while (retries > 0):
try:
cls_pk = session.query(self.primary_key).filter((self.primary_key.table_name == cls.__name__)).with_for_update().first()
retries = 0
except exc.OperationalError as ex:
retries -= 1
if (retries == 0):
raise ex
return cls_pk
def _reserve_id_range(self, session: Session, cls: Type, count: int) -> None:
cls_pk = self._lock_pk_with_retries(session, cls)
if (not cls_pk):
current_id = self._get_initial_current_id(session, cls)
try:
session.add(self.primary_key(table_name=cls.__name__, current_id=current_id))
session.commit()
except exc.SQLAlchemyError as err:
log.error('Writing into the primary keys table failed', exc_info=err)
session.rollback()
cls_pk = self._lock_pk_with_retries(session, cls)
assert cls_pk, 'Primary key entry for {cls.__name__} not found after trying to create it'
next_id = (cls_pk.current_id + 1)
max_id = (cls_pk.current_id + count)
assert (next_id in self.allowed_id_range), f"Can't reserve any primary keys for {cls.__name__} because the next id={next_id} would be outside the allowed {self.allowed_id_range}"
assert (max_id in self.allowed_id_range), f"Can't reserve {count} primary keys for {cls.__name__} because the max id={max_id} would be outside the allowed {self.allowed_id_range}"
cls_pk.current_id = max_id
session.commit()
self.pks[cls.__name__] = (next_id, max_id)
def _get_initial_current_id(self, session: Session, cls: Type[object]) -> int:
highest_existing_id = self._get_highest_existing_id(session, cls)
if (highest_existing_id is not None):
assert (highest_existing_id in self.allowed_id_range), f'An existing row in the {cls.__name__} table has an id={highest_existing_id} which is already outside of the allowed {self.allowed_id_range}'
return highest_existing_id
else:
return (self.allowed_id_range.start - 1)
def _get_highest_existing_id(self, session: Session, cls: Type[object]) -> Optional[int]:
row_with_highest_id = session.query(cls.id).order_by(cls.id.desc()).first()
if (row_with_highest_id is None):
return None
return row_with_highest_id.id.resolved()
def get(self, cls: Type[object]) -> int:
assert (cls in self.query_classes), ('%s primary key should be generated by SQLAlchemy' % cls.__name__)
assert (cls.__name__ in self.pks), ('%s primary key needs to be initialized before use' % cls.__name__)
(next_id, max_id) = self.pks[cls.__name__]
assert (next_id <= max_id), ('%s reserved primary key range exhausted' % cls.__name__)
assert (next_id in self.allowed_id_range), f'{cls.__name__} primary key was outside the allowed {self.allowed_id_range}'
self.pks[cls.__name__] = ((next_id + 1), max_id)
return next_id |
class Controller(controller.ControllerProto):
CONTROLLER_ID = 14
CONTROLLER_NAME = 'Generic MQTT'
def __init__(self, controllerindex):
controller.ControllerProto.__init__(self, controllerindex)
self.usesID = False
self.usesAccount = True
self.usesPassword = True
self.usesMQTT = True
self.onmsgcallbacksupported = True
self.controllerport = 1883
self.inchannel = '%sysname%/#/state'
self.outchannel = '%sysname%/#/set'
self.mqttclient = None
self.lastreconnect = 0
self.connectinprogress = 0
self.inch = ''
self.outch = ''
self.authmode = 0
self.certfile = ''
self.laststatus = (- 1)
self.keepalive = 60
self.lwt_topic = '%sysname%/LWT'
self.lwt_t = ''
self.lwtconnmsg = 'Online'
self.lwtdisconnmsg = 'Offline'
self.useJSON = False
self.backreport = True
self.globalretain = False
def controller_init(self, enablecontroller=None):
if (enablecontroller != None):
self.enabled = enablecontroller
self.connectinprogress = 0
(self.inch, state) = commands.parseruleline(self.inchannel)
(self.outch, state) = commands.parseruleline(self.outchannel)
state = self.outch.find('#')
if (state > (- 1)):
self.outch = self.outch[:(state + 1)]
else:
state = self.outch.find('%tskname%')
if (state < 0):
state = self.outch.find('%tskid%')
if (state > (- 1)):
self.outch = (self.outch[:state] + '/#')
else:
state = self.outch.find('%valname%')
if (state > (- 1)):
self.outch = (self.outch[:state] + '/#')
self.outch = self.outch.replace('//', '/').strip()
if ((self.outch == '') or (self.outch == '/') or (self.outch == '/#') or (self.outch == '%/#')):
self.outch = '#'
try:
ls = self.laststatus
except:
self.laststatus = (- 1)
try:
(self.lwt_t, state) = commands.parseruleline(self.lwt_topic)
except:
self.lwt_topic = ''
try:
if self.useJSON:
pass
except:
self.useJSON = False
try:
if self.backreport:
pass
except:
self.backreport = True
try:
if self.globalretain:
pass
except:
self.globalretain = False
self.mqttclient = GMQTTClient()
self.mqttclient.subscribechannel = self.outch
self.mqttclient.controllercb = self.on_message
self.mqttclient.connectcb = self.on_connect
self.mqttclient.disconnectcb = self.on_disconnect
if (self.controllerpassword == '*****'):
self.controllerpassword = ''
self.initialized = True
if self.enabled:
if (self.isconnected() == False):
misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, 'MQTT: Try to connect')
self.connect()
else:
self.laststatus = (- 1)
self.disconnect()
return True
def connect(self):
if (self.enabled and self.initialized):
if self.isconnected():
misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, 'Already connected force disconnect!')
self.disconnect()
self.connectinprogress = 1
self.lastreconnect = time.time()
if (((self.controlleruser != '') or (self.controllerpassword != '')) and (self.isconnected() == False)):
self.mqttclient.username_pw_set(self.controlleruser, self.controllerpassword)
misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, 'Set MQTT password')
try:
am = self.authmode
except:
am = 0
if ((am == 1) or (am == 2)):
try:
import ssl
except:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, 'OpenSSL is not reachable!')
self.initialized = False
return False
if (am == 1):
try:
fname = self.certfile.strip()
except:
fname = ''
if ((fname == '') or (str(fname) == '0') or (os.path.exists(fname) == False)):
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, 'Certificate file not found!')
self.initialized = False
return False
try:
self.mqttclient.tls_set(fname, tls_version=ssl.PROTOCOL_TLSv1_2)
self.mqttclient.tls_insecure_set(True)
except:
pass
elif (am == 2):
try:
ssl_ctx = ssl.create_default_context()
ssl_ctx.check_hostname = False
ssl_ctx.verify_mode = ssl.CERT_NONE
self.mqttclient.tls_set_context(ssl_ctx)
self.mqttclient.tls_insecure_set(True)
except:
pass
try:
kp = self.keepalive
except:
self.keepalive = 60
try:
self.mqttclient.will_set(self.lwt_t, payload=self.lwtdisconnmsg, qos=0, retain=True)
self.mqttclient.connect(self.controllerip, int(self.controllerport), keepalive=self.keepalive)
self.mqttclient.loop_start()
except Exception as e:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ((((('MQTT controller: ' + self.controllerip) + ':') + str(self.controllerport)) + ' connection failed ') + str(e)))
self.laststatus = 0
return self.isconnected()
def disconnect(self):
try:
(mres, mid) = self.mqttclient.publish(self.lwt_t, self.lwtdisconnmsg)
except Exception as e:
print(e)
try:
self.mqttclient.loop_stop(True)
except:
pass
try:
self.mqttclient.disconnect()
except:
pass
stat = self.isconnected()
if (self.enabled != True):
commands.rulesProcessing('GenMQTT#Disconnected', rpieGlobals.RULE_SYSTEM)
return stat
def isconnected(self, ForceCheck=True):
res = False
if (self.enabled and self.initialized):
if (ForceCheck == False):
return (self.laststatus == 1)
if (self.mqttclient is not None):
tstart = self.outch[:(len(self.outch) - 1)]
gtopic = (tstart + 'status')
gval = 'PING'
mres = 1
try:
(mres, mid) = self.mqttclient.publish(gtopic, gval)
except:
mres = 1
if (mres == 0):
res = 1
else:
res = 0
if (res != self.laststatus):
if (res == 0):
commands.rulesProcessing('GenMQTT#Disconnected', rpieGlobals.RULE_SYSTEM)
else:
try:
(mres, mid) = self.mqttclient.publish(self.lwt_t, self.lwtconnmsg)
except:
pass
commands.rulesProcessing('GenMQTT#Connected', rpieGlobals.RULE_SYSTEM)
self.laststatus = res
if ((res == 1) and (self.connectinprogress == 1)):
self.connectinprogress = 0
return res
def webform_load(self):
webserver.addFormTextBox('Report topic', 'inchannel', self.inchannel, 255)
webserver.addFormTextBox('Command topic', 'outchannel', self.outchannel, 255)
try:
kp = self.keepalive
except:
kp = 60
webserver.addFormNumericBox('Keepalive time', 'keepalive', kp, 2, 600)
webserver.addUnit('s')
try:
am = self.authmode
fname = self.certfile
except:
am = 0
fname = ''
options = ['MQTT', 'MQTTS/with cert', 'MQTTS/insecure']
optionvalues = [0, 1, 2]
webserver.addFormSelector('Mode', 'c014_mode', len(optionvalues), options, optionvalues, None, int(am))
webserver.addFormTextBox('Server certificate file', 'c014_cert', str(fname), 120)
webserver.addBrowseButton('Browse', 'c014_cert', startdir=str(fname))
webserver.addFormNote("Upload certificate first at <a href='filelist'>filelist</a> then select here!")
try:
lwt = self.lwt_topic
lwt1 = self.lwtconnmsg
lwt2 = self.lwtdisconnmsg
except:
lwt = '%sysname%/LWT'
lwt1 = 'Online'
lwt2 = 'Offline'
webserver.addFormTextBox('Controller lwl topic', 'c014_lwt', lwt, 255)
webserver.addFormTextBox('LWT Connect Message', 'c014_cmsg', lwt1, 255)
webserver.addFormTextBox('LWT Disconnect Message', 'c014_dcmsg', lwt2, 255)
webserver.addFormCheckBox('Check conn & reconnect if needed at every 30 sec', 'c014_reconnect', self.timer30s)
try:
webserver.addFormCheckBox('Use JSON payload', 'c014_usejson', self.useJSON)
except:
self.useJSON = False
try:
webserver.addFormCheckBox('Echo back control device status (status after set)', 'c014_backreport', self.backreport)
except:
self.backreport = True
try:
webserver.addFormCheckBox('Retain every message', 'c014_retain', self.globalretain)
except:
self.globalretain = False
return True
def webform_save(self, params):
pchange = False
pval = self.inchannel
self.inchannel = webserver.arg('inchannel', params)
if (pval != self.inchannel):
pchange = True
pval = self.outchannel
self.outchannel = webserver.arg('outchannel', params)
if (self.inchannel == self.outchannel):
self.outchannel = (self.outchannel + '/set')
if (pval != self.outchannel):
pchange = True
try:
p1 = self.authmode
p2 = self.certfile
self.authmode = int(webserver.arg('c014_mode', params))
self.certfile = webserver.arg('c014_cert', params)
if ((p1 != self.authmode) or (p2 != self.certfile)):
pchange = True
except:
self.authmode = 0
self.certfile = ''
pval = self.keepalive
try:
self.keepalive = int(webserver.arg('keepalive', params))
except:
self.keepalive = 60
if (pval != self.keepalive):
pchange = True
try:
lwt = self.lwt_topic
lwt1 = self.lwtconnmsg
lwt2 = self.lwtdisconnmsg
self.lwt_topic = webserver.arg('c014_lwt', params)
self.lwtconnmsg = webserver.arg('c014_cmsg', params)
self.lwtdisconnmsg = webserver.arg('c014_dcmsg', params)
except:
self.lwt_topic = '%sysname%/LWT'
self.lwtconnmsg = 'Online'
self.lwtdisconnmsg = 'Offline'
if ((lwt != self.lwt_topic) or (lwt1 != self.lwtconnmsg) or (lwt2 != self.lwtdisconnmsg)):
pchange = True
if (webserver.arg('c014_reconnect', params) == 'on'):
self.timer30s = True
else:
self.timer30s = False
if (webserver.arg('c014_usejson', params) == 'on'):
self.useJSON = True
else:
self.useJSON = False
if (webserver.arg('c014_backreport', params) == 'on'):
self.backreport = True
else:
self.backreport = False
if (webserver.arg('c014_retain', params) == 'on'):
self.globalretain = True
else:
self.globalretain = False
if (pchange and self.enabled):
self.disconnect()
time.sleep(0.1)
self.connect()
return True
def timer_thirty_second(self):
if self.enabled:
if (self.isconnected() == False):
misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, 'MQTT: Try to reconnect')
self.connect()
return self.timer30s
def on_message(self, msg):
success = False
tstart = self.outch[:(len(self.outch) - 1)]
if (msg.topic.startswith(tstart) or (self.outch == '#')):
msg2 = msg.payload.decode('utf-8')
if ((msg.topic == (tstart + 'cmd')) and (self.outch != '#')):
commands.doExecuteCommand(msg2, True)
success = True
else:
try:
dnames = msg.topic.split('/')
dnames2 = self.outchannel.split('/')
except:
dnames = []
if (len(dnames) > 1):
v1 = (- 1)
v2 = (- 1)
if self.outchannel.endswith(('/' + dnames[(len(dnames) - 1)])):
ttaskname = ''
if self.useJSON:
mlist = []
if ('{' in msg2):
try:
mlist = json.loads(msg2)
except Exception as e:
mlist = []
if ('taskname' in mlist):
ttaskname = mlist['taskname']
dnames = []
if (ttaskname == ''):
try:
v1 = dnames2.index('#')
v2 = (v1 + 1)
except:
v1 = (- 1)
if (v1 == (- 1)):
try:
v1 = dnames2.index('%tskname%')
except:
v1 = (- 1)
try:
v2 = dnames2.index('%valname%')
except:
v2 = (- 1)
try:
v3 = dnames2.index('%tskid%')
except:
v3 = (- 1)
if (v3 > (- 1)):
try:
t = (int(dnames[v3]) - 1)
if (Settings.Tasks[t] and (type(Settings.Tasks[t]) is not bool)):
ttaskname = Settings.Tasks[t].gettaskname().strip()
except:
pass
elif ((v1 == (- 1)) and (v2 > (- 1))):
try:
for x in range(len(Settings.Tasks)):
if (Settings.Tasks[x] and (type(Settings.Tasks[x]) is not bool)):
for u in range(Settings.Tasks[x].valuecount):
if (Settings.Tasks[x].valuenames[u] == dnames[v2]):
ttaskname = Settings.Tasks[x].gettaskname().strip()
break
if (ttaskname != ''):
break
except:
pass
if ((ttaskname == '') and (v1 > (- 1))):
ttaskname = dnames[v1]
if (self.useJSON and (ttaskname != '')):
try:
pvalues = [(- 1), (- 1), (- 1), (- 1)]
for x in range(len(Settings.Tasks)):
if (Settings.Tasks[x] and (type(Settings.Tasks[x]) is not bool)):
if (Settings.Tasks[x].gettaskname() == ttaskname):
for u in range(Settings.Tasks[x].valuecount):
valnam = Settings.Tasks[x].valuenames[u]
if (valnam in mlist):
pvalues[u] = mlist[valnam]
break
except:
pass
self.onmsgcallbackfunc(self.controllerindex, (- 1), pvalues, taskname=ttaskname, valuename='')
success = True
return success
if ((ttaskname != '') and (v2 > (- 1)) and (v2 < len(dnames))):
self.onmsgcallbackfunc(self.controllerindex, (- 1), msg2, taskname=ttaskname, valuename=dnames[v2])
success = True
def senddata(self, idx, sensortype, value, userssi=(- 1), usebattery=(- 1), tasknum=(- 1), changedvalue=(- 1)):
if self.enabled:
if (tasknum is None):
return False
success = False
if self.isconnected(False):
if self.useJSON:
changedvalue = 1
if (tasknum != (- 1)):
tname = Settings.Tasks[tasknum].gettaskname()
if (changedvalue == (- 1)):
for u in range(Settings.Tasks[tasknum].valuecount):
vname = Settings.Tasks[tasknum].valuenames[u]
if (vname != ''):
if (('%t' in self.inch) or ('%v' in self.inch)):
gtopic = self.inch.replace('#/', '')
gtopic = gtopic.replace('#', '')
gtopic = gtopic.replace('%tskname%', tname)
gtopic = gtopic.replace('%tskid%', str((tasknum + 1)))
gtopic = gtopic.replace('%valname%', vname)
else:
gtopic = self.inch.replace('#', ((tname + '/') + vname))
gval = str(value[u])
if (gval == ''):
gval = '0'
mres = 1
try:
(mres, mid) = self.mqttclient.publish(gtopic, gval, retain=self.globalretain)
except:
mres = 1
if (mres != 0):
self.isconnected()
break
else:
vname = Settings.Tasks[tasknum].valuenames[(changedvalue - 1)]
if (('%t' in self.inch) or ('%v' in self.inch)):
gtopic = self.inch.replace('#/', '')
gtopic = gtopic.replace('#', '')
gtopic = gtopic.replace('%tskname%', tname)
gtopic = gtopic.replace('%tskid%', str((tasknum + 1)))
gtopic = gtopic.replace('%valname%', vname)
elif self.useJSON:
gtopic = self.inch.replace('#', tname)
else:
gtopic = self.inch.replace('#', ((tname + '/') + vname))
if (vname != ''):
gval = str(value[(changedvalue - 1)])
if (gval == ''):
gval = '0'
if self.useJSON:
gval = (('{"taskname":"' + Settings.Tasks[tasknum].taskname) + '",')
for u in range(Settings.Tasks[tasknum].valuecount):
gval += (('"' + Settings.Tasks[tasknum].valuenames[u]) + '":')
val = value[u]
if str(val).replace('.', '').isnumeric():
gval += str(val)
else:
gval += (('"' + str(val)) + '"')
gval += ','
try:
usebattery = float(str(usebattery).strip())
except Exception as e:
usebattery = (- 1)
bval = (- 1)
if ((usebattery != (- 1)) and (usebattery != 255)):
bval = usebattery
else:
bval = misc.get_battery_value()
if ((bval != (- 1)) and (bval != 255)):
gval += (('"battery":' + str(bval)) + ',')
if (userssi != (- 1)):
gval += (('"rssi":' + str(userssi)) + ',')
ps = str(Settings.Tasks[tasknum].ports)
if ((ps != '0') and (ps != '')):
gval += (('"port":"' + str(ps)) + '",')
gval = (gval[:(- 1)] + '}')
mres = 1
try:
(mres, mid) = self.mqttclient.publish(gtopic, gval, retain=self.globalretain)
except:
mres = 1
if (mres != 0):
self.isconnected()
else:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, 'MQTT taskname error, sending failed.')
else:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, 'MQTT not connected, sending failed.')
if ((time.time() - self.lastreconnect) > 30):
self.connect()
def on_connect(self):
if (self.enabled and self.initialized):
self.isconnected()
else:
self.disconnect()
def on_disconnect(self):
if self.initialized:
self.isconnected() |
class TestOFPActionSetNwSrc(unittest.TestCase):
type_ = {'buf': b'\x00\x06', 'val': ofproto.OFPAT_SET_NW_SRC}
len_ = {'buf': b'\x00\x08', 'val': ofproto.OFP_ACTION_NW_ADDR_SIZE}
nw_addr = {'buf': b'\xc0\xa8z\n', 'val': }
buf = ((type_['buf'] + len_['buf']) + nw_addr['buf'])
c = OFPActionSetNwSrc(nw_addr['val'])
def setUp(self):
pass
def tearDown(self):
pass
def test_init(self):
eq_(self.nw_addr['val'], self.c.nw_addr)
def test_parser_src(self):
res = self.c.parser(self.buf, 0)
eq_(self.nw_addr['val'], res.nw_addr)
def test_parser_dst(self):
type_ = {'buf': b'\x00\x07', 'val': ofproto.OFPAT_SET_NW_DST}
buf = ((type_['buf'] + self.len_['buf']) + self.nw_addr['buf'])
res = self.c.parser(buf, 0)
eq_(self.nw_addr['val'], res.nw_addr)
(AssertionError)
def test_parser_check_type(self):
type_ = {'buf': b'\x00\x05', 'val': 5}
buf = ((type_['buf'] + self.len_['buf']) + self.nw_addr['buf'])
self.c.parser(buf, 0)
(AssertionError)
def test_parser_check_len(self):
len_ = {'buf': b'\x00\x10', 'val': 16}
buf = ((self.type_['buf'] + len_['buf']) + self.nw_addr['buf'])
self.c.parser(buf, 0)
def test_serialize(self):
buf = bytearray()
self.c.serialize(buf, 0)
fmt = ofproto.OFP_ACTION_NW_ADDR_PACK_STR
res = struct.unpack(fmt, six.binary_type(buf))
eq_(self.type_['val'], res[0])
eq_(self.len_['val'], res[1])
eq_(self.nw_addr['val'], res[2]) |
def convert_keys_to_camel_case(data: dict[(str, _typing.Any)]) -> dict[(str, _typing.Any)]:
def snake_to_camel(word: str) -> str:
components = word.split('_')
return (components[0] + ''.join((x.capitalize() for x in components[1:])))
return {snake_to_camel(key): value for (key, value) in data.items()} |
class GradientBoostingClassifier(GradientBoosting):
def __init__(self, n_estimators=200, learning_rate=0.5, min_samples_split=2, min_info_gain=1e-07, max_depth=2, debug=False):
super(GradientBoostingClassifier, self).__init__(n_estimators=n_estimators, learning_rate=learning_rate, min_samples_split=min_samples_split, min_impurity=min_info_gain, max_depth=max_depth, regression=False)
def fit(self, X, y):
y = to_categorical(y)
super(GradientBoostingClassifier, self).fit(X, y) |
class OptionSeriesSplineSonificationDefaultinstrumentoptionsMapping(Options):
def frequency(self) -> 'OptionSeriesSplineSonificationDefaultinstrumentoptionsMappingFrequency':
return self._config_sub_data('frequency', OptionSeriesSplineSonificationDefaultinstrumentoptionsMappingFrequency)
def gapBetweenNotes(self) -> 'OptionSeriesSplineSonificationDefaultinstrumentoptionsMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionSeriesSplineSonificationDefaultinstrumentoptionsMappingGapbetweennotes)
def highpass(self) -> 'OptionSeriesSplineSonificationDefaultinstrumentoptionsMappingHighpass':
return self._config_sub_data('highpass', OptionSeriesSplineSonificationDefaultinstrumentoptionsMappingHighpass)
def lowpass(self) -> 'OptionSeriesSplineSonificationDefaultinstrumentoptionsMappingLowpass':
return self._config_sub_data('lowpass', OptionSeriesSplineSonificationDefaultinstrumentoptionsMappingLowpass)
def noteDuration(self) -> 'OptionSeriesSplineSonificationDefaultinstrumentoptionsMappingNoteduration':
return self._config_sub_data('noteDuration', OptionSeriesSplineSonificationDefaultinstrumentoptionsMappingNoteduration)
def pan(self) -> 'OptionSeriesSplineSonificationDefaultinstrumentoptionsMappingPan':
return self._config_sub_data('pan', OptionSeriesSplineSonificationDefaultinstrumentoptionsMappingPan)
def pitch(self) -> 'OptionSeriesSplineSonificationDefaultinstrumentoptionsMappingPitch':
return self._config_sub_data('pitch', OptionSeriesSplineSonificationDefaultinstrumentoptionsMappingPitch)
def playDelay(self) -> 'OptionSeriesSplineSonificationDefaultinstrumentoptionsMappingPlaydelay':
return self._config_sub_data('playDelay', OptionSeriesSplineSonificationDefaultinstrumentoptionsMappingPlaydelay)
def time(self) -> 'OptionSeriesSplineSonificationDefaultinstrumentoptionsMappingTime':
return self._config_sub_data('time', OptionSeriesSplineSonificationDefaultinstrumentoptionsMappingTime)
def tremolo(self) -> 'OptionSeriesSplineSonificationDefaultinstrumentoptionsMappingTremolo':
return self._config_sub_data('tremolo', OptionSeriesSplineSonificationDefaultinstrumentoptionsMappingTremolo)
def volume(self) -> 'OptionSeriesSplineSonificationDefaultinstrumentoptionsMappingVolume':
return self._config_sub_data('volume', OptionSeriesSplineSonificationDefaultinstrumentoptionsMappingVolume) |
class AbstractExpert(metaclass=abc.ABCMeta):
def __init__(self, blackboard: Blackboard) -> None:
self.blackboard = blackboard
def is_eager_to_contribute(self):
raise NotImplementedError('Must provide implementation in subclass.')
def contribute(self):
raise NotImplementedError('Must provide implementation in subclass.') |
class TorchActorCritic(TorchModel):
def __init__(self, policy: TorchPolicy, critic: Union[(TorchStateCritic, TorchStateActionCritic)], device: str):
assert ((critic is not None) and isinstance(critic, (TorchStateCritic, TorchStateActionCritic))), 'Make sure to provide an appropriate critic when training with actor-critic models!'
assert ((policy is not None) and isinstance(policy, TorchPolicy)), 'Make sure to provide an appropriate policy when training with actor-critic models!'
self.policy = policy
self.critic = critic
TorchModel.__init__(self, device=device)
(TorchModel)
def parameters(self) -> List[torch.Tensor]:
return (self.policy.parameters() + self.critic.parameters())
(TorchModel)
def eval(self) -> None:
self.policy.eval()
self.critic.eval()
(TorchModel)
def train(self) -> None:
self.policy.train()
self.critic.train()
(TorchModel)
def to(self, device: str):
self._device = device
self.policy.to(device)
self.critic.to(device)
def device(self) -> str:
return self._device
(TorchModel)
def state_dict(self) -> Dict:
state_dict = dict()
state_dict.update(self.policy.state_dict())
state_dict.update(self.critic.state_dict())
return state_dict
(TorchModel)
def load_state_dict(self, state_dict: Dict) -> None:
self.policy.load_state_dict(state_dict)
self.critic.load_state_dict(state_dict)
def compute_actor_critic_output(self, record: StructuredSpacesRecord, temperature: float=1.0) -> Tuple[(PolicyOutput, StateCriticOutput)]:
policy_output = self.policy.compute_policy_output(record, temperature=temperature)
critic_input = StateCriticInput.build(policy_output, record)
critic_output = self.critic.predict_values(critic_input)
return (policy_output, critic_output)
def seed(self, seed: int) -> None: |
def _firestore_endpoint_handler(func: (_C1 | _C2), event_type: str, document_pattern: _path_pattern.PathPattern, raw: _ce.CloudEvent) -> None:
event_attributes = raw._get_attributes()
event_data: _typing.Any = raw.get_data()
firestore_event_data: _firestore.DocumentEventData
content_type: str = event_attributes['datacontenttype']
if (('application/json' in content_type) or isinstance(event_data, dict)):
firestore_event_data = _firestore.DocumentEventData.from_json(event_data)
elif (('application/protobuf' in content_type) or isinstance(event_data, bytes)):
firestore_event_data = _firestore.DocumentEventData.deserialize(event_data)
else:
actual_type = type(event_data)
raise TypeError(f"Firestore: Cannot parse event payload of data type '{actual_type}' and content type '{content_type}'.")
event_location = event_attributes['location']
event_project = event_attributes['project']
event_namespace = event_attributes['namespace']
event_document = event_attributes['document']
event_database = event_attributes['database']
time = event_attributes['time']
event_time = _util.timestamp_conversion(time)
if (_DEFAULT_APP_NAME not in _apps):
initialize_app()
app = get_app()
firestore_client = _firestore_v1.Client(project=app.project_id, database=event_database)
firestore_ref: DocumentReference = firestore_client.document(event_document)
value_snapshot: (DocumentSnapshot | None) = None
old_value_snapshot: (DocumentSnapshot | None) = None
if firestore_event_data.value:
document_dict = _firestore_helpers.decode_dict(firestore_event_data.value.fields, firestore_client)
value_snapshot = _firestore_v1.DocumentSnapshot(firestore_ref, document_dict, True, _datetime_to_pb_timestamp(event_time), firestore_event_data.value.create_time, firestore_event_data.value.update_time)
if firestore_event_data.old_value:
document_dict = _firestore_helpers.decode_dict(firestore_event_data.old_value.fields, firestore_client)
old_value_snapshot = _firestore_v1.DocumentSnapshot(firestore_ref, document_dict, True, _datetime_to_pb_timestamp(event_time), firestore_event_data.old_value.create_time, firestore_event_data.old_value.update_time)
if (event_type == _event_type_deleted):
firestore_event_data = old_value_snapshot
if (event_type == _event_type_created):
firestore_event_data = value_snapshot
if (event_type in (_event_type_written, _event_type_updated)):
firestore_event_data = Change(before=old_value_snapshot, after=value_snapshot)
params: dict[(str, str)] = {**document_pattern.extract_matches(event_document)}
database_event = Event(project=event_project, namespace=event_namespace, database=event_database, location=event_location, document=event_document, specversion=event_attributes['specversion'], id=event_attributes['id'], source=event_attributes['source'], type=event_attributes['type'], time=event_time, data=firestore_event_data, subject=event_attributes['subject'], params=params)
func(database_event) |
class OpenAIEmbedder(BaseEmbedder):
def __init__(self, config: Optional[BaseEmbedderConfig]=None):
super().__init__(config=config)
if (self.config.model is None):
self.config.model = 'text-embedding-ada-002'
api_key = (self.config.api_key or os.environ['OPENAI_API_KEY'])
if self.config.deployment_name:
embeddings = AzureOpenAIEmbeddings(deployment=self.config.deployment_name)
embedding_fn = BaseEmbedder._langchain_default_concept(embeddings)
else:
if ((api_key is None) and (os.getenv('OPENAI_ORGANIZATION') is None)):
raise ValueError('OPENAI_API_KEY or OPENAI_ORGANIZATION environment variables not provided')
embedding_fn = OpenAIEmbeddingFunction(api_key=api_key, organization_id=os.getenv('OPENAI_ORGANIZATION'), model_name=self.config.model)
self.set_embedding_fn(embedding_fn=embedding_fn)
self.set_vector_dimension(vector_dimension=VectorDimensions.OPENAI.value) |
_blueprint.route('/project/<project_id>/delete', methods=['GET', 'POST'])
_required
def delete_project(project_id):
project = models.Project.get(Session, project_id)
if (not project):
flask.abort(404)
if (not is_admin()):
flask.abort(401)
project_name = project.name
form = anitya.forms.ConfirmationForm()
confirm = flask.request.form.get('confirm', False)
if form.validate_on_submit():
if confirm:
utilities.publish_message(project=project.__json__(), topic='project.remove', message=dict(agent=flask.g.user.username, project=project.name))
for version in project.versions_obj:
Session.delete(version)
Session.delete(project)
Session.commit()
flask.flash(f'Project {project_name} has been removed')
return flask.redirect(flask.url_for('anitya_ui.projects'))
else:
return flask.redirect(flask.url_for('anitya_ui.project', project_id=project.id))
return flask.render_template('project_delete.html', current='projects', project=project, form=form) |
def arm_epilogue(blk):
if (len(blk.bap.stmts) > 1):
last_stmt = blk.bap.stmts[(- 1)]
if (isinstance(last_stmt, JmpStmt) and isinstance(last_stmt.kind, RetKind)):
stmt = blk.bap.stmts[(- 2)]
if (isinstance(stmt.lhs, RegVar) and (stmt.lhs.name == 'SP') and isinstance(stmt.rhs, BinOpExp) and isinstance(stmt.rhs.e1, RegVar) and isinstance(stmt.rhs.e2, IntExp) and (stmt.rhs.e1.name == 'SP')):
for i in range((len(blk.bap.stmts) - 3), (- 1), (- 1)):
stmt = blk.bap.stmts[i]
if (isinstance(stmt, DefStmt) and isinstance(stmt.lhs, RegVar) and isinstance(stmt.rhs, LoadExp)):
(base_pointer, offset, access) = mem_addr(stmt.rhs.addr, blk, stmt.pc)
if ((base_pointer is not None) and (base_pointer.base_register == 'SP')):
make_temp_offset(base_pointer.base_register, offset, blk, stmt.pc)
make_giv_reg(stmt.lhs.name, stmt.lhs.index, blk, stmt.pc)
else:
break
else:
break |
def main():
segmk = Segmaker('design.bits')
tiledata = {}
pipdata = {}
ignpip = set()
tile_ports = {}
read_pip_data('pcie_int_interface_l.txt', pipdata, tile_ports)
read_pip_data('pcie_int_interface_r.txt', pipdata, tile_ports)
print('Loading tags from design.txt.')
with open('design.txt', 'r') as f:
for line in f:
(tile, pip, src, dst, pnum, pdir) = line.split()
if (not tile.startswith('PCIE_INT_INTERFACE')):
continue
(pip_prefix, _) = pip.split('.')
(tile_from_pip, tile_type) = pip_prefix.split('/')
assert (tile == tile_from_pip)
(_, src) = src.split('/')
(_, dst) = dst.split('/')
pnum = int(pnum)
pdir = int(pdir)
if (tile not in tiledata):
tiledata[tile] = {'type': tile_type, 'pips': set(), 'srcs': set(), 'dsts': set()}
tiledata[tile]['pips'].add((src, dst))
tiledata[tile]['srcs'].add(src)
tiledata[tile]['dsts'].add(dst)
if (pdir == 0):
tiledata[tile]['srcs'].add(dst)
tiledata[tile]['dsts'].add(src)
for (tile, pips_srcs_dsts) in tiledata.items():
tile_type = pips_srcs_dsts['type']
pips = pips_srcs_dsts['pips']
for (src, dst) in pipdata[tile_type]:
if ((src, dst) in ignpip):
pass
elif ((src, dst) in pips):
segmk.add_tile_tag(tile, ('%s.%s' % (dst, src)), 1)
else:
segmk.add_tile_tag(tile, ('%s.%s' % (dst, src)), 0)
segmk.compile(bitfilter=bitfilter)
segmk.write() |
class TestIssuesV3(TestCase):
def test_auto_multi_int_1(self):
class Measurement(int, AddValueEnum, MultiValueEnum, start=0):
one = ''
two = ''
three = ''
self.assertEqual([m.value for m in Measurement], [0, 1, 2])
self.assertEqual([m.name for m in Measurement], ['one', 'two', 'three'])
self.assertIs(Measurement(0), Measurement.one)
self.assertIs(Measurement(''), Measurement.one)
self.assertIs(Measurement(1), Measurement.two)
self.assertIs(Measurement(''), Measurement.two)
self.assertIs(Measurement(2), Measurement.three)
self.assertIs(Measurement(''), Measurement.three)
def test_auto_multi_int_2(self):
class Measurement(int, Enum, settings=(MultiValue, AddValue), start=0):
one = ''
two = ''
three = ''
self.assertEqual([m.value for m in Measurement], [0, 1, 2])
self.assertEqual([m.name for m in Measurement], ['one', 'two', 'three'])
self.assertIs(Measurement(0), Measurement.one)
self.assertIs(Measurement(''), Measurement.one)
self.assertIs(Measurement(1), Measurement.two)
self.assertIs(Measurement(''), Measurement.two)
self.assertIs(Measurement(2), Measurement.three)
self.assertIs(Measurement(''), Measurement.three)
def test_extend_enum_with_init(self):
class Color(Enum, settings=MultiValue, init='foo bar'):
red = ('1', 'yes')
green = ('2', 'no')
blue = ('3', 'maybe')
self.assertEqual(Color.red.value, '1')
self.assertEqual(Color.red.foo, '1')
self.assertEqual(Color.red.bar, 'yes')
extend_enum(Color, 'opacity', '4', 'never')
self.assertEqual(list(Color), [Color.red, Color.green, Color.blue, Color.opacity])
self.assertEqual(Color.opacity.value, '4')
self.assertEqual(Color.opacity.name, 'opacity')
self.assertTrue((Color('4') is Color.opacity))
self.assertTrue((Color('never') is Color.opacity)) |
class PcapPktHdr(object):
_PKT_HDR_FMT = 'IIII'
_PKT_HDR_FMT_BIG_ENDIAN = ('>' + _PKT_HDR_FMT)
_PKT_HDR_FMT_LITTLE_ENDIAN = ('<' + _PKT_HDR_FMT)
PKT_HDR_SIZE = struct.calcsize(_PKT_HDR_FMT)
def __init__(self, ts_sec=0, ts_usec=0, incl_len=0, orig_len=0):
self.ts_sec = ts_sec
self.ts_usec = ts_usec
self.incl_len = incl_len
self.orig_len = orig_len
def parser(cls, buf, byteorder='little'):
if (not buf):
raise IndexError('No data')
if (byteorder == 'big'):
fmt = cls._PKT_HDR_FMT_BIG_ENDIAN
else:
fmt = cls._PKT_HDR_FMT_LITTLE_ENDIAN
(ts_sec, ts_usec, incl_len, orig_len) = struct.unpack_from(fmt, buf)
hdr = cls(ts_sec, ts_usec, incl_len, orig_len)
return (hdr, buf[cls.PKT_HDR_SIZE:(cls.PKT_HDR_SIZE + incl_len)])
def serialize(self):
if (sys.byteorder == 'big'):
fmt = self._PKT_HDR_FMT_BIG_ENDIAN
else:
fmt = self._PKT_HDR_FMT_LITTLE_ENDIAN
return struct.pack(fmt, self.ts_sec, self.ts_usec, self.incl_len, self.orig_len) |
class JsHtmlNumeric(JsHtmlRich):
def to(self, number: float, timer: int=1, profile: types.PROFILE_TYPE=None):
return JsUtils.jsConvertFncs([self.page.js.objects.number(self.content.unformat(), js_code=('%s_counter' % self.htmlCode), set_var=True), self.page.js.window.setInterval([self.page.js.if_((self.page.js.objects.number.get(('window.%s_counter' % self.htmlCode)) < number), [self.page.js.objects.number((self.page.js.objects.number.get(('window.%s_counter' % self.htmlCode)) + 1), js_code=('window.%s_counter' % self.htmlCode), set_var=True), self.component.build(self.page.js.objects.number.get(('window.%s_counter' % self.htmlCode)))]).else_(self.page.js.window.clearInterval(('%s_interval' % self.htmlCode)))], ('%s_interval' % self.htmlCode), timer)], toStr=True, profile=profile)
def add(self, item: float):
return JsObjects.JsVoid(('\n%(component)s.innerText = parseFloat(%(component)s.innerText) + %(value)s' % {'value': item, 'component': self.component.dom.varName})) |
class IsolateController(object):
def Run(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_stream(request, target, '/controller.IsolateController/Run', controller__pb2.HostedRun.SerializeToString, controller__pb2.HostedRunResult.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def Map(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_stream(request, target, '/controller.IsolateController/Map', controller__pb2.HostedMap.SerializeToString, controller__pb2.HostedRunResult.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def CreateUserKey(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/controller.IsolateController/CreateUserKey', controller__pb2.CreateUserKeyRequest.SerializeToString, controller__pb2.CreateUserKeyResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def ListUserKeys(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/controller.IsolateController/ListUserKeys', controller__pb2.ListUserKeysRequest.SerializeToString, controller__pb2.ListUserKeysResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def RevokeUserKey(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/controller.IsolateController/RevokeUserKey', controller__pb2.RevokeUserKeyRequest.SerializeToString, controller__pb2.RevokeUserKeyResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def RegisterApplication(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_stream(request, target, '/controller.IsolateController/RegisterApplication', controller__pb2.RegisterApplicationRequest.SerializeToString, controller__pb2.RegisterApplicationResult.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def UpdateApplication(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/controller.IsolateController/UpdateApplication', controller__pb2.UpdateApplicationRequest.SerializeToString, controller__pb2.UpdateApplicationResult.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def SetAlias(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/controller.IsolateController/SetAlias', controller__pb2.SetAliasRequest.SerializeToString, controller__pb2.SetAliasResult.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def DeleteAlias(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/controller.IsolateController/DeleteAlias', controller__pb2.DeleteAliasRequest.SerializeToString, controller__pb2.DeleteAliasResult.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def ListAliases(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/controller.IsolateController/ListAliases', controller__pb2.ListAliasesRequest.SerializeToString, controller__pb2.ListAliasesResult.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def SetSecret(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/controller.IsolateController/SetSecret', controller__pb2.SetSecretRequest.SerializeToString, controller__pb2.SetSecretResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def ListSecrets(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/controller.IsolateController/ListSecrets', controller__pb2.ListSecretsRequest.SerializeToString, controller__pb2.ListSecretsResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) |
def _api_remote(path, method='GET'):
def decorator(func):
async def wrapper(self, *args, **kwargs):
import
(return_type, actual_dataclass, request_params) = _build_request(self, func, path, method, *args, **kwargs)
async with as client:
response = (await client.request(**request_params))
if (response.status_code == 200):
return _parse_response(response.json(), return_type, actual_dataclass)
else:
error_msg = f'Remote request error, error code: {response.status_code}, error msg: {response.text}'
raise Exception(error_msg)
return wrapper
return decorator |
class Chorolet(GraphPlotly.Chart):
requirements = ('plotly.js',)
__reqJs = ['plotly.js']
def chart(self) -> JsPlotly.Pie:
if (self._chart is None):
self._chart = JsPlotly.Pie(page=self.page, js_code=self.js_code, component=self)
return self._chart
def layout(self) -> LayoutGeo:
if (self._layout is None):
self._layout = LayoutGeo(page=self.page, component=self)
return self._layout
def data(self):
return self._traces[(- 1)]
def add_trace(self, data, type: str='choroplethmapbox', mode: str=None):
c_data = dict(data)
if (type is not None):
c_data['type'] = type
if (mode is not None):
c_data['mode'] = mode
self._traces.append(DataScatterMapBox(component=self, page=self.page, attrs=c_data))
return self |
class Tags(JsHtml.JsHtmlRich):
def content(self):
return JsHtml.ContentFormatters(self.page, ("\n(function(dom){var content = {}; \n dom.childNodes.forEach(function(rec){\n var label = rec.getAttribute('data-category');\n if(!(label in content) && (label != null)){ content[label] = [] }; \n var listItem = rec.querySelector('span[name=chip_value]');\n if (listItem != null && (label != null)){content[label].push(listItem.textContent)}}); \n return content})(%s)\n" % self.querySelector('div[name=panel]')))
def is_duplicated(self, text: str, category: str=None):
return JsObjects.JsObjects.get((" \n(function(dom){var index = -1; var children = dom.childNodes; var count = 0; \n for(child in children){if((typeof children[child] === 'object') && (children[child].querySelector('span[name=chip_value]') != null) && children[child].querySelector('span[name=chip_value]').textContent == %(tezt)s){\n if(children[child].getAttribute('data-category') == %(category)s){ index = count; break; }\n }; count++; }; return index})(%(panel)s)" % {'tezt': text, 'category': category, 'panel': self.querySelector('div[name=panel]')}))
def values(self, category: Union[(str, primitives.JsDataModel)]=None):
if (category is None):
return JsObjects.JsArray.JsArray.get(("(function(dom){var content = []; dom.childNodes.forEach(function(rec){content.push(rec.querySelector('span[name=chip_value]').textContent)}); return content})(%s)" % self.querySelector('div[name=panel]')))
category = JsUtils.jsConvertData(category, None)
return JsObjects.JsObjects.get((" \n(function(dom){var children = dom.childNodes; var values = [];\n for(child in children){if(typeof children[child] === 'object'){\n if(children[child].getAttribute('data-category') == %s){ \n var listItem = children[child].querySelector('span[name=chip_value]');\n if (listItem != null){values.push(listItem.textContent)}}\n }}; return values})(%s)" % (category, self.querySelector('div[name=panel]'))))
def hide(self):
return self.querySelector('div[name=panel]').show()
def show(self):
return self.querySelector('div[name=panel]').show()
def toggle(self):
return self.querySelector('div[name=panel]').toggle()
def add(self, text: Union[(str, primitives.JsDataModel)], category: Union[(str, primitives.JsDataModel)]=None, name: str=None, fixed: bool=False, no_duplicate: bool=True):
text = JsUtils.jsConvertData(text, None)
fixed = JsUtils.jsConvertData(fixed, None)
if (category is None):
category = (name or self.component._jsStyles['category'])
name = (name or category)
category = JsUtils.jsConvertData(category, None)
name = JsUtils.jsConvertData(name, None)
(options, js_options) = (self.component._jsStyles, [])
for (k, v) in options.items():
if isinstance(v, dict):
row = [("'%s': %s" % (s_k, JsUtils.jsConvertData(s_v, None))) for (s_k, s_v) in v.items()]
js_options.append(("'%s': {%s}" % (k, ', '.join(row))))
elif str(v).strip().startswith('function'):
js_options.append(('%s: %s' % (k, v)))
else:
js_options.append(('%s: %s' % (k, JsUtils.jsConvertData(v, None))))
if no_duplicate:
return JsObjects.JsObjects.get(("if ((%(duplicated)s == -1) && (%(text)s != '')){ \nchipAdd(%(panel)s, {name: %(name)s, category: %(category)s, value: %(text)s, disabled: false, fixed: %(fixed)s}, {%(options)s}) }\n" % {'name': name, 'category': category, 'duplicated': self.is_duplicated(text, category), 'panel': self.querySelector('div[name=panel]'), 'fixed': fixed, 'text': text, 'options': ','.join(js_options)}))
return JsObjects.JsObjects.get(('var itemLabel = %(text)s;\nif(Array.isArray(itemLabel)){\n itemLabel.forEach(function(item){\n chipAdd(%(panel)s, {name: %(name)s, category: %(category)s, value: item, disabled: false, fixed: %(fixed)s}, {%(options)s})})}\nelse {chipAdd(%(panel)s, {name: %(name)s, category: %(category)s, value: itemLabel, disabled: false, fixed: %(fixed)s}, {%(options)s})}\n' % {'name': name, 'category': category, 'panel': self.querySelector('div[name=panel]'), 'fixed': fixed, 'text': text, 'options': ','.join(js_options), 'maxHeight': self.component._jsStyles['max_height']}))
def input(self):
return JsObjects.JsObjects.get(('%s.value' % self.querySelector('input')))
def clear(self):
return self.querySelector('div[name=panel]').empty()
def remove(self, text: Union[(str, primitives.JsDataModel)], category: Union[(str, primitives.JsDataModel)]=None):
if (category is None):
category = self.component._jsStyles['category']
category = JsUtils.jsConvertData(category, None)
text = JsUtils.jsConvertData(text, None)
return JsObjects.JsObjects.get(('var itemPos = %(duplicated)s; if (itemPos >= 0){ %(panel)s.childNodes[itemPos].remove()}\n ' % {'duplicated': self.is_duplicated(text, category), 'panel': self.querySelector('div[name=panel]')}))
def count(self):
return self.values().length
def categories(self):
return self.content.dict.keys() |
()
('--model-path', default='RealESRGAN_x4plus.safetensors', help='model path. supports torch or safetensors')
('--width', default=(64, 1024), type=(int, int), nargs=2, help='Minimum and maximum width')
('--height', default=(64, 1024), type=(int, int), nargs=2, help='Minimum and maximum height')
('--batch-size', default=(1, 1), type=(int, int), nargs=2, help='Minimum and maximum batch size')
('--include-constants', default=True, type=bool, help='include constants (model weights) with compiled model')
('--num-in-ch', default=3, type=int, help='Number of in channels')
('--num-out-ch', default=3, type=int, help='Number of out channels')
('--num-feat', default=64, type=int, help='Number of intermediate features')
('--num-block', default=23, type=int, help='Number of RRDB layers')
('--num-grow-ch', default=32, type=int, help='Number of channels for each growth')
('--scale', default=4, type=int, help='Scale')
('--use-fp16-acc', default=True, help='use fp16 accumulation')
('--convert-conv-to-gemm', default=True, help='convert 1x1 conv to gemm')
('--work-dir', default='./tmp', help='Work directory')
('--model-name', default='ESRGANModel', help='Model name')
def compile_esrgan(model_path, width, height, batch_size, include_constants, num_in_ch, num_out_ch, num_feat, num_block, num_grow_ch, scale, use_fp16_acc=True, convert_conv_to_gemm=True, work_dir='./tmp', model_name='ESRGANModel'):
if (scale != 4):
print('Scale != 4 supports static shape only. Maximum value of batch_size, height and width will be used.')
logging.getLogger().setLevel(logging.INFO)
torch.manual_seed(4896)
if (detect_target().name() == 'rocm'):
convert_conv_to_gemm = False
if model_path.endswith('.safetensors'):
pt_model = safetensors.torch.load_file(model_path)
else:
pt_model = torch.load(model_path)
if ('params_ema' in pt_model.keys()):
pt_model = pt_model['params_ema']
elif ('params' in pt_model.keys()):
pt_model = pt_model['params']
rrdbnet = RRDBNet(num_in_ch=num_in_ch, num_out_ch=num_out_ch, scale=scale, num_feat=num_feat, num_block=num_block, num_grow_ch=num_grow_ch)
rrdbnet.name_parameter_tensor()
constants = map_rrdb(pt_model, scale=scale)
batch_size = IntVar(values=list(batch_size), name='batch_size')
channels = num_in_ch
height = IntVar(values=list(height), name='height')
width = IntVar(values=list(width), name='width')
image = Tensor(shape=[batch_size, height, width, channels], name='input_pixels', is_input=True)
Y = rrdbnet(image)
Y = mark_output(Y, 'upscaled_pixels')
target = detect_target(use_fp16_acc=use_fp16_acc, convert_conv_to_gemm=convert_conv_to_gemm)
compile_model(Y, target, work_dir, model_name, constants=(constants if include_constants else None)) |
class StaticAnalysisPatternProvider(PatternProvider):
__patterns = {}
def get(cls, *args) -> List[AbstractPattern]:
patterns = cls.__list_static_patterns()
patterns = list(map(cls.__build_pattern, patterns))
return patterns
def __list_static_patterns(cls):
patterns = static_analysis.discover_patterns()
patterns = sorted(patterns, key=(lambda t: t.id))
return patterns
def __build_pattern(cls, pattern_info):
pattern_id = pattern_info.id
if (pattern_id not in cls.__patterns):
new_pattern_type = type(pattern_id, (AbstractStaticAnalysisPattern,), {'info': pattern_info})
cls.__patterns[pattern_id] = new_pattern_type
return cls.__patterns[pattern_id] |
class ShadowIGUserCatalogProductVariant(AbstractObject):
def __init__(self, api=None):
super(ShadowIGUserCatalogProductVariant, self).__init__()
self._isShadowIGUserCatalogProductVariant = True
self._api = api
class Field(AbstractObject.Field):
product_id = 'product_id'
variant_name = 'variant_name'
_field_types = {'product_id': 'int', 'variant_name': 'string'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
def cookies_check(cookies, url, method, req_headers, req_body, scan_id, res_headers, res_body):
for cookie in cookies:
if ((not cookie.secure) or (not cookie.has_nonstandard_attr('HttpOnly'))):
attack_result = {'id': 22, 'scanid': scan_id, 'url': url, 'alert': 'Cookie not marked secure or 'impact': 'Low', 'req_headers': req_headers, 'req_body': req_body, 'res_headers': res_headers, 'res_body': res_body}
dbupdate.insert_record(attack_result)
break |
def test_shuffle_seeds() -> None:
perform_rollout_seeding_test({'seeding.env_base_seed': '1234', 'seeding.agent_base_seed': '1234', 'seeding.shuffle_seeds': 'true'}, {'seeding.env_base_seed': '1234', 'seeding.agent_base_seed': '1234', 'seeding.shuffle_seeds': 'true'})
perform_rollout_seeding_test({'seeding.env_base_seed': '1234', 'seeding.agent_base_seed': '4321', 'seeding.shuffle_seeds': 'true'}, {'seeding.env_base_seed': '1234', 'seeding.agent_base_seed': '4321', 'seeding.shuffle_seeds': 'true'})
with pytest.raises(AssertionError):
perform_rollout_seeding_test({'seeding.env_base_seed': '1234', 'seeding.agent_base_seed': '2345'}, {'seeding.env_base_seed': '1234', 'seeding.agent_base_seed': '2345', 'seeding.shuffle_seeds': 'true'})
perform_rollout_seeding_test({'seeding.env_base_seed': '1234', 'seeding.agent_base_seed': '1234'}, {'seeding.env_base_seed': '1234', 'seeding.agent_base_seed': '1234', 'seeding.shuffle_seeds': 'true'}) |
class PipelineBrowser(HasTraits):
tree_generator = Trait(FullTreeGenerator(), Instance(TreeGenerator))
renwins = List
root_object = List(TVTKBase)
selected = Instance(TVTKBase)
object_edited = Event
_root = Any
_ui = Any
def __init__(self, renwin=None, **traits):
super(PipelineBrowser, self).__init__(**traits)
self._ui = None
self.view = None
if renwin:
self.renwins.append(renwin)
self._root_object_changed(self.root_object)
def default_traits_view(self):
menu = Menu(Action(name='Refresh', action='editor.update_editor'), Action(name='Expand all', action='editor.expand_all'))
self.menu = menu
nodes = self.tree_generator.get_nodes(menu)
self.tree_editor = TreeEditor(nodes=nodes, editable=False, orientation='vertical', hide_root=True, on_select=self._on_select, on_dclick=self._on_dclick)
view = View(Group(VSplit(Item(name='_root', editor=self.tree_editor, resizable=True), Item(name='selected', style='custom', resizable=True), show_labels=False, show_border=False)), title='Pipeline browser', help=False, resizable=True, undo=False, revert=False, width=0.3, height=0.3)
return view
def show(self, parent=None):
if ((self._ui is not None) and (self._ui.control is not None)):
try:
self._ui.control.Raise()
except AttributeError:
pass
else:
return
else:
view = self.default_traits_view()
if (parent is not None):
self._ui = view.ui(self, parent=parent, kind='subpanel')
else:
self._ui = view.ui(self, parent=parent)
def update(self):
if ((self._ui is not None) and (self._ui.control is not None)):
try:
ed = self._ui._editors[0]
ed.update_editor()
self._ui.control.Refresh()
except (AttributeError, IndexError):
pass
refresh = update
def render(self):
self.object_edited = True
for rw in self.renwins:
rw.render()
def _make_default_root(self):
tree_gen = self.tree_generator
objs = [x.render_window for x in self.renwins]
node = TVTKCollectionNode(object=objs, name='Root', tree_generator=tree_gen)
return node
def _tree_generator_changed(self, tree_gen):
if self._root:
root_obj = self._root.object
else:
root_obj = self.root_object
if root_obj:
ro = root_obj
if (not hasattr(root_obj, '__len__')):
ro = [root_obj]
self._root = TVTKCollectionNode(object=ro, name='Root', tree_generator=tree_gen)
else:
self._root = self._make_default_root()
self.tree_editor.nodes = tree_gen.get_nodes(self.menu)
self.update()
def _root_object_changed(self, root_obj):
tg = self.tree_generator
if root_obj:
self._root = TVTKCollectionNode(object=root_obj, name='Root', tree_generator=tg)
else:
self._root = self._make_default_root()
self.root_object = self._root.object
self.update()
def _root_object_items_changed(self, list_event):
self._root_object_changed(self.root_object)
def _on_dclick(self, obj):
if (hasattr(obj, 'object') and hasattr(obj.object, 'edit_traits')):
object = obj.object
view = object.trait_view()
view.handler = UICloseHandler(browser=self)
object.on_trait_change(self.render)
ui = object.edit_traits(view=view)
def _on_select(self, obj):
if (hasattr(obj, 'object') and hasattr(obj.object, 'edit_traits')):
new = obj.object
old = self.selected
if (new != old):
self.selected = new
if (old is not None):
old.on_trait_change(self.render, remove=True)
if (new is not None):
new.on_trait_change(self.render) |
class HStoreField(DictField):
child = CharField(allow_blank=True, allow_null=True)
def __init__(self, **kwargs):
super().__init__(**kwargs)
assert isinstance(self.child, CharField), 'The `child` argument must be an instance of `CharField`, as the hstore extension stores values as strings.' |
class BaseInference(metaclass=ABCMeta):
_MAX_SEED_VAL: int = ((2 ** 32) - 1)
def get_proposers(self, world: World, target_rvs: Set[RVIdentifier], num_adaptive_sample: int) -> List[BaseProposer]:
raise NotImplementedError
def _get_default_num_adaptive_samples(self, num_samples: int) -> int:
return 0
def _single_chain_infer(self, queries: List[RVIdentifier], observations: RVDict, num_samples: int, num_adaptive_samples: int, show_progress_bar: bool, initialize_fn: InitializeFn, max_init_retries: int, chain_id: int, seed: Optional[int]=None) -> Tuple[(List[torch.Tensor], List[torch.Tensor])]:
if (seed is not None):
set_seed(seed)
if (show_progress_bar and issubclass(tqdm, notebook_tqdm)):
print(' ', end='', flush=True)
sampler = self.sampler(queries, observations, num_samples, num_adaptive_samples, initialize_fn, max_init_retries)
samples = [[] for _ in queries]
log_likelihoods = [[] for _ in observations]
for world in tqdm(sampler, total=(num_samples + num_adaptive_samples), desc='Samples collected', disable=(not show_progress_bar), position=chain_id):
for (idx, obs) in enumerate(observations):
log_likelihoods[idx].append(world.log_prob([obs]))
for (idx, query) in enumerate(queries):
raw_val = world.call(query)
if (not isinstance(raw_val, torch.Tensor)):
raise TypeError('The value returned by a queried function must be a tensor.')
samples[idx].append(raw_val)
samples = [torch.stack(val) for val in samples]
log_likelihoods = [torch.stack(val) for val in log_likelihoods]
return (samples, log_likelihoods)
def infer(self, queries: List[RVIdentifier], observations: RVDict, num_samples: int, num_chains: int=4, num_adaptive_samples: Optional[int]=None, show_progress_bar: bool=True, initialize_fn: InitializeFn=init_to_uniform, max_init_retries: int=100, run_in_parallel: bool=False, mp_context: Optional[Literal[('fork', 'spawn', 'forkserver')]]=None, verbose: Optional[VerboseLevel]=None) -> MonteCarloSamples:
if (verbose is not None):
warnings.warn('The `verbose` argument and `VerboseLevel` are deprecated and will be removed in the next release of Bean Machine. Please use `show_progress_bar` instead.', DeprecationWarning, stacklevel=2)
show_progress_bar = bool(verbose)
_verify_queries_and_observations(queries, observations, observations_must_be_rv=True)
if (num_adaptive_samples is None):
num_adaptive_samples = self._get_default_num_adaptive_samples(num_samples)
single_chain_infer = partial(self._single_chain_infer, queries, observations, num_samples, num_adaptive_samples, show_progress_bar, initialize_fn, max_init_retries)
if (not run_in_parallel):
chain_results = map(single_chain_infer, range(num_chains))
else:
ctx = mp.get_context(mp_context)
first_seed = torch.randint(self._MAX_SEED_VAL, ()).item()
seeds = [((first_seed + (31 * chain_id)) % self._MAX_SEED_VAL) for chain_id in range(num_chains)]
single_chain_infer = partial(_execute_in_new_thread, single_chain_infer)
with ctx.Pool(processes=num_chains, initializer=tqdm.set_lock, initargs=(ctx.Lock(),)) as p:
chain_results = p.starmap(single_chain_infer, enumerate(seeds))
(all_samples, all_log_liklihoods) = zip(*chain_results)
all_samples = [dict(zip(queries, samples)) for samples in all_samples]
all_log_liklihoods = [dict(zip(observations.keys(), log_likelihoods)) for log_likelihoods in all_log_liklihoods]
return MonteCarloSamples(all_samples, num_adaptive_samples, all_log_liklihoods, observations)
def sampler(self, queries: List[RVIdentifier], observations: RVDict, num_samples: Optional[int]=None, num_adaptive_samples: Optional[int]=None, initialize_fn: InitializeFn=init_to_uniform, max_init_retries: int=100) -> Sampler:
_verify_queries_and_observations(queries, observations, observations_must_be_rv=True)
if (num_adaptive_samples is None):
if (num_samples is None):
num_adaptive_samples = 0
else:
num_adaptive_samples = self._get_default_num_adaptive_samples(num_samples)
world = World.initialize_world(queries, observations, initialize_fn, max_init_retries)
kernel = copy.deepcopy(self)
sampler = Sampler(kernel, world, num_samples, num_adaptive_samples)
return sampler |
class CopyPayloadTestCase(unittest.TestCase):
def payload_setup(self, **payload_kwargs):
payload = CopyPayload(**payload_kwargs)
table_obj = parse_create(' CREATE TABLE a ( ID int primary key ) ')
payload._old_table = table_obj
payload._new_table = table_obj
payload._current_db = 'test'
payload.range_start_vars_array = ['']
payload.range_end_vars_array = ['']
return payload
def test_init_table_obj_populate_charset_collation(self):
payload = CopyPayload()
payload.table_exists = Mock(return_value=True)
payload.fetch_table_schema = Mock(return_value=parse_create('\n CREATE TABLE a (\n ID varchar(32) NOT NULL\n ) ENGINE=InnoDB DEFAULT CHARSET=latin1\n '))
payload.fetch_partitions = Mock(return_value=None)
payload._new_table = parse_create('\n CREATE TABLE a (\n ID varchar(32) NOT NULL\n ) ENGINE=InnoDB DEFAULT CHARSET=latin1 COLLATE=latin1_bin\n ')
payload.get_default_collations = Mock(return_value={'latin1': 'latin1_bin'})
payload.get_collations = Mock(return_value={'latin1_bin': 'latin1'})
payload.init_table_obj()
explicit_obj = parse_create('\n CREATE TABLE a (\n ID varchar(32) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL\n ) ENGINE=InnoDB DEFAULT CHARSET=latin1 COLLATE=latin1_bin\n ')
self.assertEqual(payload._old_table, payload._new_table)
self.assertEqual(payload._old_table, explicit_obj)
payload._new_table = parse_create('\n CREATE TABLE a (\n ID varchar(32) NOT NULL\n ) ENGINE=InnoDB COLLATE=latin1_bin\n ')
payload.init_table_obj()
self.assertNotEqual(payload._old_table, payload._new_table)
def test_populate_charset_collation(self):
payload = CopyPayload()
payload.get_default_collations = Mock(return_value={'latin1': 'latin1_bin'})
payload.get_collations = Mock(return_value={'latin1_bin': 'latin1'})
obj1 = parse_create('\n CREATE TABLE a (\n ID varchar(32) NOT NULL\n ) ENGINE=InnoDB DEFAULT CHARSET=latin1\n ')
payload.populate_charset_collation(obj1)
self.assertEqual(obj1.collate, 'latin1_bin')
self.assertEqual(len(obj1.column_list), 1)
self.assertEqual(obj1.column_list[0].charset, 'latin1')
self.assertEqual(obj1.column_list[0].collate, 'latin1_bin')
payload.get_default_collations = Mock(return_value={'latin1': 'latin1_bin', 'utf8mb4': 'utf8mb4_general_ci'})
payload.get_collations = Mock(return_value={'latin1_bin': 'latin1', 'utf8mb4_general_ci': 'utf8mb4'})
obj2 = parse_create('\n CREATE TABLE a (\n ID varchar(32) COLLATE utf8mb4_general_ci NOT NULL\n ) ENGINE=InnoDB DEFAULT CHARSET=latin1\n ')
payload.populate_charset_collation(obj2)
self.assertEqual(obj2.collate, 'latin1_bin')
self.assertEqual(len(obj2.column_list), 1)
self.assertEqual(obj2.column_list[0].charset, 'utf8mb4')
self.assertEqual(obj2.column_list[0].collate, 'utf8mb4_general_ci')
obj3 = parse_create('\n CREATE TABLE a (\n ID varchar(32) COLLATE utf8mb4_general_ci NOT NULL\n ) ENGINE=InnoDB COLLATE=latin1_bin\n ')
payload.populate_charset_collation(obj3)
self.assertEqual(obj3.charset, None)
def test_create_copy_table_populate_charset_collation(self):
payload = CopyPayload()
payload._new_table = parse_create('\n CREATE TABLE a (\n ID varchar(32) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL\n ) ENGINE=InnoDB DEFAULT CHARSET=latin1 COLLATE=latin1_bin\n ')
payload._old_table = payload._new_table
payload.fail_for_implicit_conv = True
payload.rm_partition = False
payload.mysql_version = Mock(is_mysql8=False)
payload.execute_sql = Mock()
payload.fetch_partitions = Mock(return_value=None)
payload.add_drop_table_entry = Mock()
payload.fetch_table_schema = Mock(return_value=parse_create('\n CREATE TABLE a (\n ID varchar(32) NOT NULL\n ) ENGINE=InnoDB DEFAULT CHARSET=latin1\n '))
payload.get_default_collations = Mock(return_value={'latin1': 'latin1_bin'})
payload.get_collations = Mock(return_value={'latin1_bin': 'latin1'})
payload.create_copy_table()
def test_populate_charset_collation_utf8_alias_default_collate(self) -> None:
payload = CopyPayload()
payload.get_default_collations = Mock(return_value={'utf8': 'utf8_general_ci'})
payload.get_collations = Mock(return_value={'utf8_general_ci': 'utf8'})
obj1 = parse_create('CREATE TABLE `t1`(s1 CHAR(1)) ENGINE=InnoDB DEFAULT CHARSET=utf8')
obj2 = parse_create('CREATE TABLE `t1`(s1 CHAR(1)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3')
self.assertEqual(payload.populate_charset_collation(obj1), payload.populate_charset_collation(obj2))
def test_populate_charset_collation_utf8_alias_custom_collate(self) -> None:
payload = CopyPayload()
payload.get_default_collations = Mock(return_value={'utf8': 'utf8_general_ci'})
payload.get_collations = Mock(return_value={'utf8_general_ci': 'utf8', 'utf8_bin': 'utf8'})
obj1 = parse_create('\n CREATE TABLE `t1`(s1 CHAR(1))\n ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin\n ')
obj2 = parse_create('\n CREATE TABLE `t1`(s1 CHAR(1))\n ENGINE=InnoDB DEFAULT CHARSET=utf8mb3 COLLATE=utf8_bin\n ')
self.assertEqual(payload.populate_charset_collation(obj1), payload.populate_charset_collation(obj2))
def test_checksum_running_with_proper_idx(self):
payload = CopyPayload()
payload._new_table = Mock(indexes=[])
payload._old_table = Mock(indexes=[])
pri_key_list = []
for i in range(3):
col = Mock()
col.name = 'col{}'.format(i)
pri_key_list.append(col)
payload._old_table.primary_key = Mock(is_unique=True)
payload._old_table.primary_key.name = 'PRIMARY'
payload._old_table.primary_key.column_list = pri_key_list
payload._new_table.primary_key = Mock(is_unique=True)
payload._new_table.primary_key.name = 'PRIMARY'
payload._new_table.primary_key.column_list = pri_key_list
payload._pk_for_filter = [c.name for c in pri_key_list]
self.assertEqual(payload.find_coverage_index(), 'PRIMARY')
col = Mock()
col.name = 'col4'
pri_key_list.append(col)
payload._new_table.primary_key.column_list = pri_key_list
self.assertEqual(payload.find_coverage_index(), 'PRIMARY')
pri_key_list = []
for i in range(2, (- 1), (- 1)):
col = Mock()
col.name = 'col{}'.format(i)
pri_key_list.append(col)
payload._new_table.primary_key.column_list = pri_key_list
self.assertEqual(payload.find_coverage_index(), None)
def test_replay_gap_will_be_filled(self):
payload = self.payload_setup()
payload._replayed_chg_ids.extend([1, 2, 4, 5])
delta = [{payload.IDCOLNAME: 3}]
payload.query = Mock(return_value=delta)
self.assertEqual(payload.get_gap_changes(), delta)
self.assertEqual(payload._replayed_chg_ids.missing_points(), [])
def test_set_innodb_tmpdir(self):
payload = CopyPayload()
table_obj = parse_create(' CREATE TABLE a ( ID int primary key ) ')
payload._old_table = table_obj
payload._new_table = table_obj
payload.replay_changes = Mock()
payload.execute_sql = Mock(side_effect=MySQLdb.OperationalError(1231, 'abc'))
payload.set_innodb_tmpdir('mock/path')
with self.assertRaises(MySQLdb.OperationalError) as err_context:
payload.execute_sql = Mock(side_effect=MySQLdb.OperationalError(1111, 'abc'))
payload.set_innodb_tmpdir('mock/path')
self.assertEqual(err_context.exception.args[0], 1111)
def test_long_selects_being_killed(self):
payload = self.payload_setup(lock_max_wait_before_kill_seconds=0.01)
mocked_conn = Mock()
payload.get_conn = Mock(return_value=mocked_conn)
payload.execute_sql = Mock(side_effect=(lambda _: time.sleep(0.5)))
query_id = 100
mocked_conn.get_running_queries = Mock(return_value=[{'Info': b'SELECT 1 from a', 'db': 'test', 'Id': query_id}, {'Info': b'SELECT 1 from `a`', 'db': 'test', 'Id': (query_id + 1)}, {'Info': b'alter table a add column `bar` text', 'db': 'test', 'Id': (query_id + 2)}, {'Info': b'select 1 from b', 'db': 'test', 'Id': (query_id + 3)}, {'Info': b'select 1 from `b`', 'db': 'test', 'Id': (query_id + 4)}, {'Info': b'SELECT 1 from c', 'db': 'test', 'Id': (query_id + 5)}, {'Info': b'SELECT 1 from `c`', 'db': 'test', 'Id': (query_id + 6)}, {'Info': b'SELECT 1 from a', 'db': 'information_schema', 'Id': (query_id + 7)}, {'Info': b'SELECT 1 from `a`', 'db': 'information_schema', 'Id': (query_id + 8)}])
payload.lock_tables(tables=['a', 'b'])
payload._last_kill_timer.join(1)
self.assertFalse(payload._last_kill_timer.is_alive())
kill_calls = mocked_conn.kill_query_by_id.call_args_list
self.assertEqual(len(kill_calls), 5)
for (idx, killed) in enumerate((query_id, (query_id + 1), (query_id + 2), (query_id + 3), (query_id + 4))):
(args, kwargs) = kill_calls[idx]
self.assertEqual(len(args), 1)
self.assertEqual(args[0], killed)
def test_selects_not_being_killed(self):
payload = self.payload_setup(lock_max_wait_before_kill_seconds=1)
mocked_conn = Mock()
payload.get_conn = Mock(return_value=mocked_conn)
payload.execute_sql = Mock()
query_id = 100
mocked_conn.get_running_queries = Mock(return_value=[{'Info': b'SELECT 1 from a', 'db': 'test', 'Id': query_id}, {'Info': b'SELECT 1 from `a`', 'db': 'test', 'Id': (query_id + 1)}, {'Info': b'alter table a add column `bar` text', 'db': 'test', 'Id': (query_id + 2)}, {'Info': b'select 1 from b', 'db': 'test', 'Id': (query_id + 3)}, {'Info': b'select 1 from `b`', 'db': 'test', 'Id': (query_id + 4)}, {'Info': b'SELECT 1 from c', 'db': 'test', 'Id': (query_id + 5)}, {'Info': b'SELECT 1 from `c`', 'db': 'test', 'Id': (query_id + 6)}, {'Info': b'SELECT 1 from a', 'db': 'information_schema', 'Id': (query_id + 7)}, {'Info': b'SELECT 1 from `a`', 'db': 'information_schema', 'Id': (query_id + 8)}])
payload.lock_tables(tables=['a', 'b'])
payload._last_kill_timer.join(1)
self.assertFalse(payload._last_kill_timer.is_alive())
mocked_conn.kill_query_by_id.assert_not_called()
def test_set_rocksdb_bulk_load(self):
payload = CopyPayload()
table_obj = parse_create(' CREATE TABLE a ( ID int primary key ) ENGINE=ROCKSDB')
payload._old_table = table_obj
payload._new_table = table_obj
payload.execute_sql = Mock()
payload.change_rocksdb_bulk_load()
self.assertTrue(payload.execute_sql.called)
table_obj = parse_create(' CREATE TABLE a ( ID int primary key ) ENGINE=ROCKSDB')
new_table_obj = parse_create(' CREATE TABLE a ( ID int, id2 int, primary key (ID,id2)) ENGINE=ROCKSDB')
payload._old_table = table_obj
payload._new_table = new_table_obj
payload.execute_sql = Mock()
payload.change_rocksdb_bulk_load()
self.assertFalse(payload.execute_sql.called)
table_obj = parse_create(' CREATE TABLE a ( ID int primary key ) ENGINE=ROCKSDB')
payload._old_table = table_obj
payload._new_table = table_obj
payload.execute_sql = Mock(side_effect=MySQLdb.OperationalError(1193, 'abc'))
payload.change_rocksdb_bulk_load()
def test_skip_cleanup(self):
payload = CopyPayload()
sql = 'CREATE TABLE abc (ID int)'
database = 'db'
payload._old_table = Mock()
payload._old_table.name = 'abc'
payload._new_table = Mock()
payload._new_table.name = 'abc'
payload.outfile_dir = '/path/to/file/dump'
payload.outfile_suffix_end = 2
payload._cleanup_payload = CleanupPayload(db=database)
payload._cleanup_payload.add_drop_table_entry(database, (constant.DELTA_TABLE_PREFIX + 'abc'))
payload._cleanup_payload.add_drop_table_entry(database, (constant.NEW_TABLE_PREFIX + 'abc'))
payload._cleanup_payload.cleanup = Mock()
for suffix in range(1, (payload.outfile_suffix_end + 1)):
payload._cleanup_payload.add_file_entry(((payload.outfile + '.') + str(suffix)))
payload.skip_cleanup_after_kill = False
with self.assertRaises(OSCError) as err_context:
payload.init_connection = Mock(side_effect=MySQLdb.OperationalError(2006, 'MySQL has gone away'))
payload.run_ddl(database, sql)
self.assertEqual(len(payload._cleanup_payload.to_drop), 2)
self.assertEqual(len(payload._cleanup_payload.files_to_clean), payload.outfile_suffix_end)
self.assertEqual(err_context.exception.err_key, 'GENERIC_MYSQL_ERROR')
payload.skip_cleanup_after_kill = True
with self.assertRaises(OSCError) as err_context:
payload.init_connection = Mock(side_effect=MySQLdb.OperationalError(2006, 'MySQL has gone away'))
payload.run_ddl(database, sql)
self.assertEqual(payload._cleanup_payload.to_drop, [])
self.assertEqual(len(payload._cleanup_payload.files_to_clean), 0)
self.assertEqual(err_context.exception.err_key, 'GENERIC_MYSQL_ERROR')
def test_file_exists(self):
payload = self.payload_setup()
with self.assertRaises(OSCError) as err_context:
payload.execute_sql = Mock(side_effect=MySQLdb.OperationalError(1086, 'abc'))
payload.select_full_table_into_outfile()
self.assertEqual(err_context.exception.err_key, 'FILE_ALREADY_EXIST')
with self.assertRaises(OSCError) as err_context:
payload.execute_sql = Mock(side_effect=MySQLdb.OperationalError(1086, 'abc'))
payload.select_chunk_into_outfile(False)
self.assertEqual(err_context.exception.err_key, 'FILE_ALREADY_EXIST')
with self.assertRaises(MySQLdb.OperationalError) as err_context:
payload.execute_sql = Mock(side_effect=MySQLdb.OperationalError(1111, 'abc'))
payload.select_chunk_into_outfile(False)
self.assertEqual(err_context.exception.args[0], 1111)
def test_partitions_being_added(self):
payload = CopyPayload()
table_obj = parse_create(' CREATE TABLE a ( ID int primary key, `time_updated` bigint(20) unsigned NOT NULL) /*!50100 PARTITION BY RANGE (time_updated) (PARTITION p1 VALUES LESS THAN () ENGINE = InnoDB, PARTITION p2 VALUES LESS THAN () ENGINE = InnoDB, PARTITION p3 VALUES LESS THAN () ENGINE = InnoDB)*/')
payload._old_table = table_obj
payload._new_table = table_obj
partitions = ['p1', 'p2', 'p3']
payload.query = Mock(return_value=None)
payload.execute_sql = Mock()
payload.fetch_partitions = Mock(return_value=partitions)
payload._cleanup_payload.add_drop_table_entry = Mock()
payload.create_copy_table()
payload._cleanup_payload.add_drop_table_entry.assert_called_with(payload._current_db, payload.new_table_name, partitions)
def test_sql_statement_generated_due_to_added_partitions_adds_both_partitions(self):
payload = CopyPayload()
table_obj = parse_create(' CREATE TABLE a ( ID int primary key, `time_updated` bigint(20) unsigned NOT NULL) PARTITION BY RANGE (time_updated) (PARTITION p1 VALUES LESS THAN () ENGINE = InnoDB, PARTITION p2 VALUES LESS THAN () ENGINE = InnoDB) ')
payload._old_table = table_obj
payload._new_table = table_obj
partitions = ['p1', 'p2']
payload.query = Mock(return_value=None)
payload.execute_sql = Mock()
def partition_list_names_mock(*args, **kwargs):
if (args[0] == 'a'):
return partitions
elif (args[0] == payload.new_table_name):
return {}
def partition_value_for_name_mock(*args, **kwargs):
if (args[1] == 'p1'):
return ''
if (args[1] == 'p2'):
return ''
payload.get_partition_method = Mock(return_value='RANGE')
payload.list_partition_names = MagicMock(side_effect=partition_list_names_mock)
payload.partition_value_for_name = MagicMock(side_effect=partition_value_for_name_mock)
payload.rm_partition = 'Override'
payload.partitions = partitions
payload.sync_table_partitions()
options = {'ALTER TABLE `__osc_new_a` ADD PARTITION (PARTITION p2 VALUES LESS THAN (), PARTITION p1 VALUES LESS THAN ())', 'ALTER TABLE `__osc_new_a` ADD PARTITION (PARTITION p1 VALUES LESS THAN (), PARTITION p2 VALUES LESS THAN ())'}
success = False
for option in options:
try:
payload.execute_sql.assert_called_with(option)
success = True
except Exception:
print('ignore exception {}', option)
self.assertEqual(True, success)
def test_sql_statement_generated_due_to_dropped_partitions_drops_both_partitions(self):
payload = CopyPayload()
table_obj = parse_create(' CREATE TABLE a ( ID int primary key, `time_updated` bigint(20) unsigned NOT NULL) PARTITION BY RANGE (time_updated) (PARTITION p1 VALUES LESS THAN () ENGINE = InnoDB, PARTITION p2 VALUES LESS THAN () ENGINE = InnoDB) ')
payload._old_table = table_obj
payload._new_table = table_obj
partitions = ['p1', 'p2']
payload.query = Mock(return_value=None)
payload.execute_sql = Mock()
def partition_list_names_mock(*args, **kwargs):
if (args[0] == 'a'):
return {}
elif (args[0] == payload.new_table_name):
return partitions
def partition_value_for_name_mock(*args, **kwargs):
if (args[1] == 'p1'):
return ''
if (args[1] == 'p2'):
return ''
payload.get_partition_method = Mock(return_value='RANGE')
payload.list_partition_names = MagicMock(side_effect=partition_list_names_mock)
payload.partition_value_for_name = MagicMock(side_effect=partition_value_for_name_mock)
payload.rm_partition = 'Override'
payload.partitions = partitions
payload.sync_table_partitions()
options = {'ALTER TABLE `__osc_new_a` DROP PARTITION p1, p2', 'ALTER TABLE `__osc_new_a` DROP PARTITION p2, p1'}
success = False
for option in options:
try:
payload.execute_sql.assert_called_with(option)
success = True
except Exception:
print('ignore exception {}', option)
self.assertEqual(True, success)
def test_sql_statement_generated_with_added_removed_partitions(self):
payload = CopyPayload()
table_obj = parse_create(' CREATE TABLE a ( ID int primary key, `time_updated` bigint(20) unsigned NOT NULL) PARTITION BY RANGE (time_updated) (PARTITION p1 VALUES LESS THAN () ENGINE = InnoDB) ')
payload._old_table = table_obj
payload._new_table = table_obj
partitions = ['p1']
oldPartitions = ['p2']
payload.query = Mock(return_value=None)
payload.execute_sql = Mock()
def partition_list_names_mock(*args, **kwargs):
if (args[0] == 'a'):
return partitions
elif (args[0] == payload.new_table_name):
return oldPartitions
def partition_value_for_name_mock(*args, **kwargs):
if (args[1] == 'p1'):
return ''
if (args[1] == 'p2'):
return ''
payload.get_partition_method = Mock(return_value='RANGE')
payload.list_partition_names = MagicMock(side_effect=partition_list_names_mock)
payload.partition_value_for_name = MagicMock(side_effect=partition_value_for_name_mock)
payload.rm_partition = 'Override'
payload.partitions = partitions
payload.sync_table_partitions()
payload.execute_sql.assert_any_call('ALTER TABLE `__osc_new_a` ADD PARTITION (PARTITION p1 VALUES LESS THAN ())')
payload.execute_sql.assert_called_with('ALTER TABLE `__osc_new_a` DROP PARTITION p2')
def test_dropped_columns(self):
payload = CopyPayload()
table_obj = parse_create(' CREATE TABLE a ( id1 int , id2 int,col1 varchar(10), col2 varchar(10), PRIMARY KEY (id1, id2))')
table_obj_pri_dropped = parse_create(' CREATE TABLE a ( id1 int , col1 varchar(10), col2 varchar(10), PRIMARY KEY (id1))')
table_obj_col_dropped = parse_create(' CREATE TABLE a ( id1 int , id2 int,col1 varchar(10), PRIMARY KEY (id1, id2))')
table_obj_both_dropped = parse_create(' CREATE TABLE a ( id1 int , col1 varchar(10), PRIMARY KEY (id1))')
payload._old_table = table_obj
payload._new_table = table_obj
self.assertEqual(payload.dropped_column_name_list, [])
payload._new_table = table_obj_pri_dropped
self.assertEqual(payload.dropped_column_name_list, ['id2'])
payload._new_table = table_obj_col_dropped
self.assertEqual(payload.dropped_column_name_list, ['col2'])
payload._new_table = table_obj_both_dropped
self.assertEqual(payload.dropped_column_name_list, ['id2', 'col2'])
def test_checksum_column_list(self):
payload = CopyPayload()
table_obj = parse_create(' CREATE TABLE a ( ID int primary key, col1 varchar(10), col2 varchar(10)) ')
table_obj_new = parse_create(' CREATE TABLE a ( ID int primary key, col1 varchar(10), col2 varchar(100)) ')
table_obj_dropped = parse_create(' CREATE TABLE a ( ID int primary key, col2 varchar(100)) ')
payload._old_table = table_obj
payload._new_table = table_obj
self.assertEqual(payload.checksum_column_list, ['col1', 'col2'])
payload._new_table = table_obj_new
payload.skip_checksum_for_modified = False
self.assertEqual(payload.checksum_column_list, ['col1', 'col2'])
payload._new_table = table_obj_new
payload.skip_checksum_for_modified = True
self.assertEqual(payload.checksum_column_list, ['col1'])
payload._new_table = table_obj_dropped
payload.skip_checksum_for_modified = False
self.assertEqual(payload.checksum_column_list, ['col2'])
payload._new_table = table_obj_dropped
payload.skip_checksum_for_modified = False
self.assertEqual(payload.checksum_column_list, ['col2'])
def test_parse_session_overrides_str_empty(self):
payload = self.payload_setup()
overrides_str = ''
expected_overrides = []
overrides = payload.parse_session_overrides_str(overrides_str)
self.assertEqual(overrides, expected_overrides)
def test_parse_session_overrides_str_num(self):
payload = self.payload_setup()
overrides_str = 'var1=1'
expected_overrides = [['var1', '1']]
overrides = payload.parse_session_overrides_str(overrides_str)
self.assertEqual(overrides, expected_overrides)
def test_parse_session_overrides_str_str(self):
payload = self.payload_setup()
overrides_str = 'var1=v'
expected_overrides = [['var1', 'v']]
overrides = payload.parse_session_overrides_str(overrides_str)
self.assertEqual(overrides, expected_overrides)
def test_parse_session_overrides_str_list(self):
payload = self.payload_setup()
overrides_str = 'var1=v;var2=1'
expected_overrides = [['var1', 'v'], ['var2', '1']]
overrides = payload.parse_session_overrides_str(overrides_str)
self.assertEqual(overrides, expected_overrides)
def test_parse_session_overrides_str_malform(self):
payload = self.payload_setup()
overrides_str = 'var1=v;var2='
with self.assertRaises(OSCError) as err_context:
payload.parse_session_overrides_str(overrides_str)
self.assertEqual(err_context.exception.err_key, 'INCORRECT_SESSION_OVERRIDE')
def test_execute_sql_not_called_for_empty_overrides(self):
payload = self.payload_setup()
payload.execute_sql = Mock()
payload.session_overrides_str = ''
payload.override_session_vars()
self.assertFalse(payload.execute_sql.called)
def test_not_skip_affected_rows_check(self):
payload = self.payload_setup()
payload.execute_sql = Mock(return_value=0)
row = {payload.IDCOLNAME: 1}
with self.assertRaises(OSCError) as err_context:
payload.replay_insert_row(row, 1)
self.assertEqual(err_context.exception.err_key, 'REPLAY_WRONG_AFFECTED')
def test_skip_affected_rows_check(self):
payload = self.payload_setup()
payload.skip_affected_rows_check = True
payload.execute_sql = Mock(return_value=0)
row = {payload.IDCOLNAME: 1}
payload.replay_insert_row(row, 1)
def test_is_rbr_safe_stmt(self):
payload = self.payload_setup()
payload.mysql_vars['binlog_format'] = 'STATEMENT'
payload.mysql_version = MySQLVersion('5.1.1')
self.assertTrue(payload.is_trigger_rbr_safe)
def test_is_rbr_safe_row_fb(self):
payload = self.payload_setup()
payload.mysql_vars['binlog_format'] = 'ROW'
payload.mysql_vars['sql_log_bin_triggers'] = 'OFF'
payload.mysql_version = MySQLVersion('5.1.1-fb')
self.assertTrue(payload.is_trigger_rbr_safe)
def test_is_rbr_safe_row_fb_but_logs_on(self):
payload = self.payload_setup()
payload.mysql_vars['binlog_format'] = 'ROW'
payload.mysql_vars['sql_log_bin_triggers'] = 'ON'
payload.mysql_version = MySQLVersion('5.1.1-fb')
self.assertFalse(payload.is_trigger_rbr_safe)
def test_is_rbr_safe_row_other_forks(self):
payload = self.payload_setup()
payload.mysql_vars['binlog_format'] = 'ROW'
payload.mysql_version = MySQLVersion('5.5.30-percona')
self.assertFalse(payload.is_trigger_rbr_safe)
def test_divide_changes_all_the_same_type(self):
payload = CopyPayload()
payload.replay_group_size = 100
type_name = payload.DMLCOLNAME
id_name = payload.IDCOLNAME
chg_rows = [{type_name: 1, id_name: 1}, {type_name: 1, id_name: 2}, {type_name: 1, id_name: 3}, {type_name: 1, id_name: 4}, {type_name: 1, id_name: 5}]
groups = list(payload.divide_changes_to_group(chg_rows))
self.assertEqual(len(groups), 1)
(chg_type, group) = groups[0]
self.assertEqual(chg_type, 1)
self.assertEqual(group, [1, 2, 3, 4, 5])
def test_divide_changes_no_change(self):
payload = CopyPayload()
payload.replay_group_size = 100
chg_rows = []
groups = list(payload.divide_changes_to_group(chg_rows))
self.assertEqual(len(groups), 0)
def test_divide_changes_all_different(self):
payload = CopyPayload()
payload.replay_group_size = 100
type_name = payload.DMLCOLNAME
id_name = payload.IDCOLNAME
chg_rows = [{type_name: 1, id_name: 1}, {type_name: 2, id_name: 2}, {type_name: 3, id_name: 3}, {type_name: 1, id_name: 4}, {type_name: 2, id_name: 5}, {type_name: 3, id_name: 6}]
groups = list(payload.divide_changes_to_group(chg_rows))
self.assertEqual(groups, [(1, [1]), (2, [2]), (3, [3]), (1, [4]), (2, [5]), (3, [6])])
def test_divide_changes_simple_group(self):
payload = CopyPayload()
payload.replay_group_size = 100
type_name = payload.DMLCOLNAME
id_name = payload.IDCOLNAME
chg_rows = [{type_name: 1, id_name: 1}, {type_name: 2, id_name: 2}, {type_name: 2, id_name: 3}, {type_name: 2, id_name: 4}, {type_name: 1, id_name: 5}]
groups = list(payload.divide_changes_to_group(chg_rows))
self.assertEqual(groups, [(1, [1]), (2, [2, 3, 4]), (1, [5])])
def test_divide_changes_no_grouping_for_update(self):
payload = CopyPayload()
payload.replay_group_size = 100
type_name = payload.DMLCOLNAME
id_name = payload.IDCOLNAME
chg_rows = [{type_name: 1, id_name: 1}, {type_name: 3, id_name: 2}, {type_name: 3, id_name: 3}, {type_name: 3, id_name: 4}, {type_name: 1, id_name: 5}]
groups = list(payload.divide_changes_to_group(chg_rows))
self.assertEqual(groups, [(1, [1]), (3, [2]), (3, [3]), (3, [4]), (1, [5])])
def test_divide_changes_group_size_reach_limit(self):
payload = CopyPayload()
payload.replay_group_size = 2
type_name = payload.DMLCOLNAME
id_name = payload.IDCOLNAME
chg_rows = [{type_name: 1, id_name: 1}, {type_name: 2, id_name: 2}, {type_name: 2, id_name: 3}, {type_name: 2, id_name: 4}, {type_name: 1, id_name: 5}]
groups = list(payload.divide_changes_to_group(chg_rows))
self.assertEqual(groups, [(1, [1]), (2, [2, 3]), (2, [4]), (1, [5])])
def test_is_myrocks_table(self):
payload = CopyPayload()
payload._new_table = parse_create('CREATE TABLE abc ( id int primary key ) ENGINE = RocksDB ')
self.assertTrue(payload.is_myrocks_table)
def test_is_myrocks_table_for_innodb(self):
payload = CopyPayload()
payload._new_table = parse_create('CREATE TABLE abc ( id int primary key ) ENGINE = InnoDB ')
self.assertFalse(payload.is_myrocks_table)
def test_myrocks_table_skip_foreign_key_check(self):
payload = CopyPayload()
payload._new_table = parse_create('CREATE TABLE abc ( id int primary key ) ENGINE = RocksDB ')
payload.query = Mock()
payload.foreign_key_check()
self.assertFalse(payload.query.called)
def test_wait_for_slow_query_none(self):
payload = self.payload_setup()
payload.get_long_trx = Mock(return_value=None)
result = payload.wait_until_slow_query_finish()
self.assertTrue(result)
def test_wait_for_slow_query_never_finish(self):
payload = self.payload_setup()
payload.max_wait_for_slow_query = 1
payload.get_long_trx = Mock(return_value={'Time': 100, 'db': 'mydb', 'Id': 123, 'Info': 'select * from a'})
with self.assertRaises(OSCError) as err_context:
payload.wait_until_slow_query_finish()
self.assertEqual(err_context.exception.err_key, 'LONG_RUNNING_TRX')
def test_high_pri_ddl_does_not_wait_for_slow_query(self):
payload = self.payload_setup()
payload.stop_slave_sql = Mock()
payload.ddl_guard = Mock()
payload.mysql_version = MySQLVersion('8.0.1-fb-1')
payload.get_conn = Mock()
payload.execute_sql = Mock()
payload.wait_until_slow_query_finish = Mock()
payload.create_triggers()
self.assertTrue(payload.is_high_pri_ddl_supported)
payload.wait_until_slow_query_finish.assert_not_called()
payload.get_long_trx = Mock(return_value=False)
payload.mysql_version = MySQLVersion('8.0.1-test-1')
payload.wait_until_slow_query_finish = Mock(return_value=True)
self.assertFalse(payload.is_high_pri_ddl_supported)
payload.create_triggers()
payload.wait_until_slow_query_finish.assert_called_once()
def test_auto_table_collation_population(self):
payload = self.payload_setup()
sql = '\n CREATE TABLE abc (\n ID int primary key\n ) charset = latin1\n '
payload._new_table = parse_create(sql)
default_collate = 'latin1_swedish_ci'
payload.get_default_collations = Mock(return_value={'latin1': default_collate})
payload.get_collations = Mock(return_value={default_collate: 'latin1'})
payload.populate_charset_collation(payload._new_table)
self.assertEqual(payload._new_table.collate, 'latin1_swedish_ci')
def test_auto_table_charset_population(self):
payload = self.payload_setup()
sql = '\n CREATE TABLE abc (\n ID int primary key\n ) collate = latin1_swedish_ci\n '
payload._new_table = parse_create(sql)
default_collate = 'latin1_swedish_ci'
payload.get_default_collations = Mock(return_value={'latin1': default_collate})
payload.get_collations = Mock(return_value={default_collate: 'latin1'})
payload.populate_charset_collation(payload._new_table)
self.assertEqual(payload._new_table.charset, None)
def test_auto_removal_of_using_hash(self):
payload = self.payload_setup()
sql1 = "\n CREATE TABLE abc (\n ID int primary key,\n A varchar(10) not null default '',\n B varchar(20) not null default '',\n KEY `ab` (`A`, `B`) USING HASH\n )\n "
sql2 = "\n CREATE TABLE abc (\n ID int primary key,\n A varchar(10) not null default '',\n B varchar(20) not null default '',\n KEY `ab` (`A`, `B`)\n )\n "
payload._new_table = parse_create(sql1)
payload.remove_using_hash_for_80()
self.assertEqual(payload._new_table, parse_create(sql2))
"\n Following test disabled until the high_pri_ddl is fixed\n def test_is_high_pri_ddl_supported_yes_8_0(self):\n payload = self.payload_setup()\n payload.mysql_version = MySQLVersion('8.0.1-fb')\n self.assertTrue(payload.is_high_pri_ddl_supported)\n\n def test_is_high_pri_ddl_supported_yes_5_6_88(self):\n payload = self.payload_setup()\n payload.mysql_version = MySQLVersion('5.6.88-fb')\n self.assertTrue(payload.is_high_pri_ddl_supported)\n\n def test_is_high_pri_ddl_supported_yes_5_7(self):\n payload = self.payload_setup()\n payload.mysql_version = MySQLVersion('5.7.1-fb')\n self.assertTrue(payload.is_high_pri_ddl_supported)\n\n def test_is_high_pri_ddl_supported_no(self):\n payload = self.payload_setup()\n payload.mysql_version = MySQLVersion('5.6.1-fb')\n self.assertFalse(payload.is_high_pri_ddl_supported)\n\n def test_is_high_pri_ddl_supported_no_for_non_fb(self):\n payload = self.payload_setup()\n payload.mysql_version = MySQLVersion('5.7.1')\n self.assertFalse(payload.is_high_pri_ddl_supported)\n\n "
def test_detailed_checksum(self):
payload = self.payload_setup()
payload.find_coverage_index = Mock()
payload.dump_current_chunk = Mock()
payload.checksum_for_single_chunk = Mock(return_value={'col1': 'abce123', 'col2': 'fghi456', '_osc_chunk_cnt': 0})
payload.detailed_checksum()
self.assertFalse(payload.dump_current_chunk.called)
def test_detailed_checksum_mismatch(self):
payload = self.payload_setup()
payload.find_coverage_index = Mock()
payload.dump_current_chunk = Mock()
payload.checksum_for_single_chunk = Mock(side_effect=[{'col1': 'abcd123', 'col2': 'fghi456', '_osc_chunk_cnt': 0}, {'col1': '123abcd', 'col2': 'fghi456', '_osc_chunk_cnt': 0}])
with self.assertRaises(OSCError):
payload.detailed_checksum()
self.assertTrue(payload.dump_current_chunk.called) |
class ValveTestOrderedBidirectionalTunnelACLwithExitInstructions(ValveTestBases.ValveTestTunnel):
TUNNEL_ID = 2
CONFIG = "\nacls:\n tunnel_acl:\n - rule:\n in_port: 1\n dl_type: 0x0800\n ip_proto: 1\n actions:\n output:\n - tunnel: {\n dp: s2,\n port: 1,\n bi_directional: True,\n exit_instructions: [{'vlan_vid': 101}]\n }\nvlans:\n vlan100:\n vid: 1\ndps:\n s1:\n dp_id: 0x1\n hardware: 'GenericTFM'\n stack:\n priority: 1\n dp_acls: [tunnel_acl]\n interfaces:\n 1:\n native_vlan: vlan100\n 3:\n stack: {dp: s2, port: 3}\n s2:\n dp_id: 0x2\n hardware: 'GenericTFM'\n interfaces:\n 1:\n native_vlan: vlan100\n 3:\n stack: {dp: s1, port: 3}\n"
def test_tunnel_bi_directional_with_exit_instructions(self):
valve = self.valves_manager.valves[1]
self.apply_ofmsgs(valve.stack_manager.add_tunnel_acls())
self.validate_tunnel(self.DP_ID, self.DP_ID, 1, 0, 3, self.TUNNEL_ID, True, 'Did not encapsulate and forward')
self.validate_tunnel(int(2), int(2), 3, self.TUNNEL_ID, 1, 101, True, 'Did not apply additional exit instructions', pcp=valve_of.PCP_TUNNEL_FLAG)
self.validate_tunnel(int(2), int(2), 1, self.TUNNEL_ID, 3, self.TUNNEL_ID, True, 'Did not accept reverse tunnel packet', pcp=valve_of.PCP_TUNNEL_REVERSE_DIRECTION_FLAG)
self.validate_tunnel(self.DP_ID, self.DP_ID, 3, [self.TUNNEL_ID, 1], 1, 0, True, 'Did not output to original source, the reverse tunnelled packet', pcp=valve_of.PCP_TUNNEL_REVERSE_DIRECTION_FLAG) |
def test_mft_precomputations():
input_grid = make_pupil_grid(128)
output_grid = make_fft_grid(input_grid, 1, 0.25)
for precompute_matrices in [True, False]:
for allocate_intermediate in [True, False]:
mft = MatrixFourierTransform(input_grid, output_grid, precompute_matrices=precompute_matrices, allocate_intermediate=allocate_intermediate)
mft.forward(input_grid.zeros())
mft.forward(input_grid.ones())
assert ((mft.M1 is not None) == precompute_matrices)
assert ((mft.intermediate_array is not None) == allocate_intermediate) |
class String_Literal(Literal):
def __init__(self, t_string):
super().__init__()
assert isinstance(t_string, MATLAB_Token)
assert (t_string.kind == 'STRING')
self.t_string = t_string
self.t_string.set_ast(self)
def __str__(self):
return (('"' + self.t_string.value) + '"')
def loc(self):
return self.t_string.location
def evaluate_static_string_expression(self):
return self.t_string.value |
class TrainingJob(_common.FlyteIdlEntity):
def __init__(self, algorithm_specification: AlgorithmSpecification, training_job_resource_config: TrainingJobResourceConfig):
self._algorithm_specification = algorithm_specification
self._training_job_resource_config = training_job_resource_config
def algorithm_specification(self) -> AlgorithmSpecification:
return self._algorithm_specification
def training_job_resource_config(self) -> TrainingJobResourceConfig:
return self._training_job_resource_config
def to_flyte_idl(self) -> _training_job_pb2.TrainingJob:
return _training_job_pb2.TrainingJob(algorithm_specification=(self.algorithm_specification.to_flyte_idl() if self.algorithm_specification else None), training_job_resource_config=(self.training_job_resource_config.to_flyte_idl() if self.training_job_resource_config else None))
def from_flyte_idl(cls, pb2_object: _training_job_pb2.TrainingJob):
return cls(algorithm_specification=pb2_object.algorithm_specification, training_job_resource_config=pb2_object.training_job_resource_config) |
.skipif((zopfli is None), reason='zopfli not installed')
def test_ttcompile_ttf_to_woff_with_zopfli(tmpdir):
inttx = os.path.join('Tests', 'ttx', 'data', 'TestTTF.ttx')
outwoff = tmpdir.join('TestTTF.woff')
options = ttx.Options([], 1)
options.flavor = 'woff'
options.useZopfli = True
ttx.ttCompile(inttx, str(outwoff), options)
assert outwoff.check(file=True)
ttf = TTFont(str(outwoff))
expected_tables = ('head', 'hhea', 'maxp', 'OS/2', 'name', 'cmap', 'hmtx', 'fpgm', 'prep', 'cvt ', 'loca', 'glyf', 'post', 'gasp', 'DSIG')
for table in expected_tables:
assert (table in ttf) |
class OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class StartWorkflowExecutionRequest():
domain: str = None
workflow_id: str = None
workflow_type: WorkflowType = None
task_list: TaskList = None
input: bytes = None
execution_start_to_close_timeout_seconds: int = None
task_start_to_close_timeout_seconds: int = None
identity: str = None
request_id: str = None
workflow_id_reuse_policy: WorkflowIdReusePolicy = None
child_policy: ChildPolicy = None
retry_policy: RetryPolicy = None
cron_schedule: str = None
memo: Memo = None
search_attributes: SearchAttributes = None
header: Header = None |
class XiAiNovelPageProcessor(HtmlProcessor.HtmlPageProcessor):
wanted_mimetypes = ['text/html']
want_priority = 80
loggerPath = 'Main.Text.XiAiNovel'
def spotPatch(self, soup):
for pre in soup.find_all('pre'):
pre.name = 'div'
contentstr = pre.encode_contents().decode('utf-8')
formatted = markdown.markdown(contentstr, extensions=['mdx_linkify'])
formatted = WebRequest.as_soup(formatted)
if formatted.find('html'):
formatted.html.unwrap()
formatted.body.unwrap()
pre.replace_with(formatted)
return soup
def wantsUrl(url):
if re.search('^ url):
print(("XiAiNovel Wants url: '%s'" % url))
return True
return False
def preprocessBody(self, soup):
badspans = soup.find_all('span', style=re.compile('color\\W?:\\W?#ffffff', re.I))
for bad in badspans:
bad.decompose()
return soup |
def test_mul_param(some_thr, any_dtype):
input = get_test_array((1000,), any_dtype)
p1 = get_test_array((1,), any_dtype)[0]
p2 = get_test_array((1,), any_dtype)[0]
input_dev = some_thr.to_device(input)
output_dev = some_thr.empty_like(input_dev)
test = get_test_computation(input_dev)
scale = tr.mul_param(input_dev, any_dtype)
test.parameter.input.connect(scale, scale.output, input_prime=scale.input, p1=scale.param)
test.parameter.output.connect(scale, scale.input, output_prime=scale.output, p2=scale.param)
testc = test.compile(some_thr)
testc(output_dev, p1, input_dev, p2)
assert diff_is_negligible(output_dev.get(), ((input * p1) * p2)) |
class ColourHandler(logging.Handler):
def __init__(self, level=logging.DEBUG):
logging.Handler.__init__(self, level)
self.formatter = logging.Formatter(('\r%(name)s%(padding)s - %(style)s%(levelname)s - %(message)s' + clr.Style.RESET_ALL))
clr.init()
self.logPaths = {}
def emit(self, record):
segments = record.name.split('.')
if ((segments[0] == 'Main') and (len(segments) > 1)):
segments.pop(0)
segments[0] = ('Main.' + segments[0])
nameList = []
for (indice, pathSegment) in enumerate(segments):
if (not (indice in self.logPaths)):
self.logPaths[indice] = [pathSegment]
elif (not (pathSegment in self.logPaths[indice])):
self.logPaths[indice].append(pathSegment)
name = clr.Style.RESET_ALL
name += getColor(self.logPaths[indice].index(pathSegment))
name += pathSegment
name += clr.Style.RESET_ALL
nameList.append(name)
record.name = '.'.join(nameList)
if (record.levelname == 'DEBUG'):
record.style = clr.Style.DIM
elif (record.levelname == 'WARNING'):
record.style = clr.Style.BRIGHT
elif (record.levelname == 'ERROR'):
record.style = (clr.Style.BRIGHT + clr.Fore.RED)
elif (record.levelname == 'CRITICAL'):
record.style = ((clr.Style.BRIGHT + clr.Back.BLUE) + clr.Fore.RED)
else:
record.style = clr.Style.NORMAL
if record.args:
if isinstance(record.args, (list, tuple)):
record.args = tuple(((str(argsf).encode('ascii', 'replace').decode('ascii') if isinstance(argsf, str) else argsf) for argsf in record.args))
else:
record.args = str(record.args).encode('ascii', 'replace').decode('ascii')
record.msg = str(record.msg).encode('ascii', 'replace').decode('ascii')
record.padding = ''
msg = self.format(record)
msg = str(msg).encode('ascii', 'replace').decode('ascii')
print(msg) |
def test_download_file(isolated_client, mock_fal_persistent_dirs):
from fal.toolkit.utils.download_utils import FAL_PERSISTENT_DIR
EXAMPLE_FILE_URL = '
relative_directory = 'test'
output_directory = (FAL_PERSISTENT_DIR / relative_directory)
expected_path = (output_directory / 'README.md')
_client()
def absolute_path_persistent_dir():
downloaded_path = download_file(EXAMPLE_FILE_URL, target_dir=output_directory)
return downloaded_path
assert (str(expected_path) == str(absolute_path_persistent_dir())), f"Path should be the target location sent '{expected_path!r}'"
_client()
def absolute_path_non_persistent_dir():
downloaded_path = download_file(EXAMPLE_FILE_URL, target_dir=output_directory)
return downloaded_path
assert (str(expected_path) == str(absolute_path_non_persistent_dir())), f"Path should be the target location sent '{expected_path!r}'"
_client()
def relative_path():
downloaded_path = download_file(EXAMPLE_FILE_URL, target_dir=relative_directory)
return downloaded_path
assert (str(expected_path) == str(relative_path())), f"Path should be the target location sent '{expected_path!r}'"
_client()
def test_with_force():
first_path = download_file(EXAMPLE_FILE_URL, target_dir=output_directory, force=False)
first_path_stat = first_path.stat()
second_path = download_file(EXAMPLE_FILE_URL, target_dir=output_directory, force=False)
second_path_stat = second_path.stat()
third_path = download_file(EXAMPLE_FILE_URL, target_dir=output_directory, force=True)
third_path_stat = third_path.stat()
return (first_path, first_path_stat, second_path, second_path_stat, third_path, third_path_stat)
(first_path, first_path_stat, second_path, second_path_stat, third_path, third_path_stat) = test_with_force()
assert (str(expected_path) == str(first_path)), 'Path should be the target location'
assert (str(expected_path) == str(second_path)), 'Path should be the target location'
assert (str(expected_path) == str(third_path)), 'Path should be the target location'
assert (first_path_stat.st_mtime == second_path_stat.st_mtime), 'The file should not be redownloaded'
assert (second_path_stat.st_mtime < third_path_stat.st_mtime), 'The file should be redownloaded with force=True' |
def extractSapphicdallianceWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('My Disciple Consumes Too Much', 'My Disciple Consumes Too Much', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TestFem(unittest.TestCase):
def setUp(self):
n_el = 16
self.n_el = n_el
self.mesh_obj = mesh.create(self.n_el, h0=0.1)
self.anomaly = {'center': [0.5, 0.5], 'r': 0.1, 'perm': 10.0, 'sign': True}
anomaly = PyEITAnomaly_Circle(center=self.anomaly['center'], r=self.anomaly['r'], perm=self.anomaly['perm'])
self.mesh_new = mesh.set_perm(self.mesh_obj, anomaly=anomaly, background=1.0)
self.protocol_obj = protocol.create(n_el, dist_exc=1, step_meas=1, parser_meas='std')
self.fwd = EITForward(self.mesh_obj, self.protocol_obj)
self.v0 = self.fwd.solve_eit()
self.v1 = self.fwd.solve_eit(perm=self.mesh_new.perm)
def test_bp(self):
eit = pyeit.eit.bp.BP(self.mesh_obj, self.protocol_obj)
eit.setup(weight='none', perm=1)
ds = (192.0 * eit.solve(self.v1, self.v0, normalize=False))
(loc, ds_max, ds_sign) = eit_loc_eval(ds, self.mesh_obj, mode='node')
loc = loc[:len(self.anomaly['center'])]
dist = np.linalg.norm((loc - self.anomaly['center']))
self.assertTrue((ds_sign == int(self.anomaly['sign'])))
self.assertTrue((dist < self.anomaly['r']))
def test_jac(self):
eit = pyeit.eit.jac.JAC(self.mesh_obj, self.protocol_obj)
eit.setup(p=0.5, lamb=0.01, method='kotre', perm=1, jac_normalized=True)
ds = eit.solve(self.v1, self.v0, normalize=True)
(loc, ds_max, ds_sign) = eit_loc_eval(ds, self.mesh_obj, mode='element')
loc = loc[:len(self.anomaly['center'])]
dist = np.linalg.norm((loc - self.anomaly['center']))
self.assertTrue((ds_sign == int(self.anomaly['sign'])))
self.assertTrue((dist < self.anomaly['r']))
def test_svd(self):
eit = pyeit.eit.svd.SVD(self.mesh_obj, self.protocol_obj)
eit.setup(n=50, method='svd', perm=1, jac_normalized=True)
ds = eit.solve(self.v1, self.v0, normalize=True)
(loc, ds_max, ds_sign) = eit_loc_eval(ds, self.mesh_obj, mode='element')
loc = loc[:len(self.anomaly['center'])]
dist = np.linalg.norm((loc - self.anomaly['center']))
self.assertTrue((ds_sign == int(self.anomaly['sign'])))
self.assertTrue((dist < (2 * self.anomaly['r'])))
def test_greit(self):
eit = pyeit.eit.greit.GREIT(self.mesh_obj, self.protocol_obj)
eit.setup(p=0.5, lamb=0.01, perm=1, jac_normalized=True)
ds = eit.solve(self.v1, self.v0, normalize=True)
(x, y, ds) = eit.mask_value(ds, mask_value=np.NAN)
loc = np.where((np.abs(ds) == np.nanmax(np.abs(ds))))
center = np.array([x[loc][0], y[loc][0]])
ds_sign = np.sign(ds[loc][0])
dist = np.linalg.norm((center - self.anomaly['center']))
self.assertTrue((ds_sign == int(self.anomaly['sign'])))
self.assertTrue((dist < self.anomaly['r'])) |
class OptionSeriesSankeySonificationPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
('cuda.ndhwc3to8.gen_function')
def gen_function(func_attrs, template_path, shape_eval_template, shape_save_template):
func_name = func_attrs['name']
backend_spec = CUDASpec()
elem_input_type = backend_spec.dtype_to_backend_type(func_attrs['inputs'][0]._attrs['dtype'])
shape_eval_func = shape_eval_template.render(indent=' ', dtype='int64_t ', x_dim0='*batch', x_dim1='*in_d', x_dim2='*in_h', x_dim3='*in_w')
shape_save_func = shape_save_template.render(indent=' ', y_dim0='*out_batch', y_dim1='*out_d', y_dim2='*out_h', y_dim3='*out_w')
shape_func = (shape_eval_func + shape_save_func)
exec_paths = EXEC_TEMPLATE.render(elem_input_type=elem_input_type)
return SRC_TEMPLATE.render(function_name=func_name, elem_input_type=elem_input_type, shape_function=shape_func, exec_paths=exec_paths) |
class Display():
def __init__(self):
self.W = 960
self.H = 540
def display_points2d(self, img, kpts, matches):
if (kpts != 0):
for kpt in kpts:
cv2.circle(img, (int(kpt.pt[0]), int(kpt.pt[1])), radius=2, color=(0, 255, 0), thickness=(- 1))
if (matches != 0):
for match in matches:
(u1, v1) = np.int32(match[0].pt)
(u2, v2) = np.int32(match[1].pt)
cv2.line(img, (u1, v1), (u2, v2), color=(0, 0, 255), thickness=1)
return img
def display_points3d(self, tripoints3d, pcd, visualizer):
if (tripoints3d is not None):
pcd.clear()
pcd.points = o3d.utility.Vector3dVector(tripoints3d)
visualizer.remove_geometry(pcd)
visualizer.add_geometry(pcd)
visualizer.poll_events()
visualizer.update_renderer()
time.sleep(0.2)
def display_vid(self, img):
cv2.imshow('main', img) |
def test_empty_doc():
nlp = English.from_config(load_config_from_str(CONFIG))
train_examples = []
for t in TRAIN_DATA:
train_examples.append(Example.from_dict(nlp.make_doc(t[0]), t[1]))
optimizer = nlp.initialize(get_examples=(lambda : train_examples))
for i in range(2):
losses = {}
nlp.update(train_examples, sgd=optimizer, losses=losses)
texts = ['first', 'second', 'third', 'fourth', 'and', 'then', 'some', '']
nlp.select_pipes(enable=['transformer', 'tagger'])
docs1 = list(nlp.pipe(texts, batch_size=1))
docs2 = list(nlp.pipe(texts, batch_size=4))
assert ([doc[0].tag_ for doc in docs1[:(- 1)]] == [doc[0].tag_ for doc in docs2[:(- 1)]])
nlp.select_pipes(enable=['tagger'])
docs1 = list(nlp.pipe(texts, batch_size=1))
docs2 = list(nlp.pipe(texts, batch_size=4))
assert ([doc[0].tag_ for doc in docs1[:(- 1)]] == [doc[0].tag_ for doc in docs2[:(- 1)]]) |
class NewTopic(MethodView):
decorators = [login_required, allows.requires(CanAccessForum(), CanPostTopic, on_fail=FlashAndRedirect(message=_('You are not allowed to post a topic here'), level='warning', endpoint=(lambda *a, **k: current_forum.url)))]
def get(self, forum_id, slug=None):
forum_instance = Forum.query.filter_by(id=forum_id).first_or_404()
return render_template('forum/new_topic.html', forum=forum_instance, form=self.form(), edit_mode=False)
def post(self, forum_id, slug=None):
forum_instance = Forum.query.filter_by(id=forum_id).first_or_404()
form = self.form()
if form.validate_on_submit():
topic = form.save(real(current_user), forum_instance)
return redirect(topic.url)
return render_template('forum/new_topic.html', forum=forum_instance, form=form, edit_mode=False)
def form(self):
current_app.pluggy.hook.flaskbb_form_topic(form=NewTopicForm)
return NewTopicForm() |
def _convert_resources_to_resource_entries(resources: Resources) -> List[_ResourceEntry]:
resource_entries = []
if (resources.cpu is not None):
resource_entries.append(_ResourceEntry(name=_ResourceName.CPU, value=resources.cpu))
if (resources.mem is not None):
resource_entries.append(_ResourceEntry(name=_ResourceName.MEMORY, value=resources.mem))
if (resources.gpu is not None):
resource_entries.append(_ResourceEntry(name=_ResourceName.GPU, value=resources.gpu))
if (resources.storage is not None):
resource_entries.append(_ResourceEntry(name=_ResourceName.STORAGE, value=resources.storage))
if (resources.ephemeral_storage is not None):
resource_entries.append(_ResourceEntry(name=_ResourceName.EPHEMERAL_STORAGE, value=resources.ephemeral_storage))
return resource_entries |
class EmbedderFactory():
provider_to_class = {'azure_openai': 'embedchain.embedder.openai.OpenAIEmbedder', 'gpt4all': 'embedchain.embedder.gpt4all.GPT4AllEmbedder', 'huggingface': 'embedchain.embedder.huggingface.HuggingFaceEmbedder', 'openai': 'embedchain.embedder.openai.OpenAIEmbedder', 'vertexai': 'embedchain.embedder.vertexai.VertexAIEmbedder', 'google': 'embedchain.embedder.google.GoogleAIEmbedder'}
provider_to_config_class = {'azure_openai': 'embedchain.config.embedder.base.BaseEmbedderConfig', 'openai': 'embedchain.config.embedder.base.BaseEmbedderConfig', 'gpt4all': 'embedchain.config.embedder.base.BaseEmbedderConfig', 'google': 'embedchain.config.embedder.google.GoogleAIEmbedderConfig'}
def create(cls, provider_name, config_data):
class_type = cls.provider_to_class.get(provider_name)
config_name = ('openai' if (provider_name not in cls.provider_to_config_class) else provider_name)
config_class_type = cls.provider_to_config_class.get(config_name)
if class_type:
embedder_class = load_class(class_type)
embedder_config_class = load_class(config_class_type)
return embedder_class(config=embedder_config_class(**config_data))
else:
raise ValueError(f'Unsupported Embedder provider: {provider_name}') |
def transform_class_name(name):
new_name = transform_name(name)
res = []
for c in new_name:
if ((c in string.ascii_uppercase) and (len(res) > 0)):
res.append('-')
res.append(c.lower())
else:
res.append(c.lower())
return ''.join(res) |
def setup_cache_dirs():
config = get_config()
if ('PYOP2_CACHE_DIR' not in os.environ):
os.environ['PYOP2_CACHE_DIR'] = os.path.join(config['options']['cache_dir'], 'pyop2')
if ('FIREDRAKE_TSFC_KERNEL_CACHE_DIR' not in os.environ):
os.environ['FIREDRAKE_TSFC_KERNEL_CACHE_DIR'] = os.path.join(config['options']['cache_dir'], 'tsfc') |
class LLMOperator(BaseLLM, MapOperator[(ModelRequest, ModelOutput)], ABC):
def __init__(self, llm_client: Optional[LLMClient]=None, **kwargs):
super().__init__(llm_client=llm_client)
MapOperator.__init__(self, **kwargs)
async def map(self, request: ModelRequest) -> ModelOutput:
(await self.current_dag_context.save_to_share_data(self.SHARE_DATA_KEY_MODEL_NAME, request.model))
return (await self.llm_client.generate(request)) |
def unify(x):
if isinstance(x, (tuple, list)):
x = ''.join(x)
if ((x[0] in '\'"') and (x[0] == x[(- 1)]) and (x.count(x[0]) == 2)):
return x
elif re.match('^[\\.\\w]*$', x, re.UNICODE):
return x
elif (re.match('^[\\.\\w]*\\(.*\\)$', x, re.UNICODE) and (x.count(')') == 1)):
return x
elif (re.match('^[\\.\\w]*\\[.*\\]$', x, re.UNICODE) and (x.count(']') == 1)):
return x
elif (re.match('^\\{.*\\}$', x, re.UNICODE) and (x.count('}') == 1)):
return x
else:
return ('(%s)' % x) |
def test():
assert (len(pattern1) == 2), 'pattern1'
assert (len(pattern2) == 3), 'pattern2'
assert (len(pattern1[0]) == 1), 'pattern1'
assert any(((pattern1[0].get(attr) == 'ADJ') for attr in ('pos', 'POS'))), 'pattern1'
assert (len(pattern1[1]) == 1), 'pattern1'
assert any(((pattern1[1].get(attr) == '') for attr in ('text', 'TEXT'))), 'pattern1'
assert any(((pattern2[0].get(attr) == '') for attr in ('text', 'TEXT'))), 'pattern2'
__msg__.good("!'Matcher''PhraseMatcher',") |
class group_stats_reply(stats_reply):
version = 2
type = 19
stats_type = 6
def __init__(self, xid=None, flags=None, entries=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (entries != None):
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = group_stats_reply()
_version = reader.read('!B')[0]
assert (_version == 2)
_type = reader.read('!B')[0]
assert (_type == 19)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 6)
obj.flags = reader.read('!H')[0]
reader.skip(4)
obj.entries = loxi.generic_util.unpack_list(reader, ofp.common.group_stats_entry.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.entries != other.entries):
return False
return True
def pretty_print(self, q):
q.text('group_stats_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REPLY_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('entries = ')
q.pp(self.entries)
q.breakable()
q.text('}') |
def test_custom_configs_are_loaded(app):
settings = Settings(SIMPLELOGIN_BLUEPRINT='custom_blueprint', SIMPLELOGIN_LOGIN_URL='/custom_login/', SIMPLELOGIN_LOGOUT_URL='/custom_logout/', SIMPLELOGIN_HOME_URL='/custom_home/')
sl = create_simple_login(settings)
assert (sl.config['blueprint'] == 'custom_blueprint')
assert (sl.config['login_url'] == '/custom_login/')
assert (sl.config['logout_url'] == '/custom_logout/')
assert (sl.config['home_url'] == '/custom_home/') |
class StatisticalMiddleware(MiddlewareMixin):
def process_request(self, request):
ip = get_ip(request)
online_ips = list(cache.get('online_ips', []))
if online_ips:
online_ips = list(cache.get_many(online_ips).keys())
cache.set(ip, 0, 10)
if (ip not in online_ips):
online_ips.append(ip)
cache.set('online_ips', online_ips)
request.online_list = online_ips |
class RandomIP(object):
def _generateip(self, string):
notvalid = [10, 127, 169, 172, 192]
first = randrange(1, 256)
while (first is notvalid):
first = randrange(1, 256)
_ip = '.'.join([str(first), str(randrange(1, 256)), str(randrange(1, 256)), str(randrange(1, 256))])
return _ip |
class MWidget(HasTraits):
tooltip = Str()
context_menu = Instance('pyface.action.menu_manager.MenuManager')
def create(self, parent=None):
if (parent is not None):
self.parent = parent
self.control = self._create_control(self.parent)
self._initialize_control()
self._add_event_listeners()
def destroy(self):
if (self.control is not None):
self._remove_event_listeners()
self.control = None
def _create(self):
from warnings import warn
warn('The _create() method will be removed in a future version of Pyface. Use create() instead.', DeprecationWarning, stacklevel=2)
self.create()
def _create_control(self, parent):
raise NotImplementedError()
def _initialize_control(self):
self._set_control_tooltip(self.tooltip)
def _add_event_listeners(self):
self.observe(self._tooltip_updated, 'tooltip', dispatch='ui')
self.observe(self._context_menu_updated, 'context_menu', dispatch='ui')
if ((self.control is not None) and (self.context_menu is not None)):
self._observe_control_context_menu()
def _remove_event_listeners(self):
if ((self.control is not None) and (self.context_menu is not None)):
self._observe_control_context_menu(remove=True)
self.observe(self._context_menu_updated, 'context_menu', dispatch='ui', remove=True)
self.observe(self._tooltip_updated, 'tooltip', dispatch='ui', remove=True)
def _get_control_tooltip(self):
raise NotImplementedError()
def _set_control_tooltip(self, tooltip):
raise NotImplementedError()
def _observe_control_context_menu(self, remove=False):
raise NotImplementedError()
def _handle_control_context_menu(self, event):
raise NotImplementedError()
def _tooltip_updated(self, event):
tooltip = event.new
if (self.control is not None):
self._set_control_tooltip(tooltip)
def _context_menu_updated(self, event):
if (self.control is not None):
if (event.new is None):
self._observe_control_context_menu(remove=True)
if (event.old is None):
self._observe_control_context_menu() |
.skipif((sys.version_info < (3, 5)), reason='requires python3.5 or higher due to incompatible pickle file in tests.')
def test_read_results_xarray():
(inputs, results) = ds.get_sim_results(path=SIM_DIR, return_xarray=True, return_status=False)
assert isinstance(inputs, xr.Dataset)
assert isinstance(results, xr.Dataset) |
class LexerTest(unittest.TestCase):
def __init__(self, methodName):
unittest.TestCase.__init__(self, methodName)
if (not hasattr(self, 'assertRaisesRegex')):
self.assertRaisesRegex = self.assertRaisesRegexp
def test_empty(self):
self.assertEqual(lex(''), [])
self.assertEqual(lex(' \t '), [])
def test_name(self):
self.assertEqual(lex('a17'), [(Lexer.NAME, 'a17')])
self.assertEqual(lex('.notdef'), [(Lexer.NAME, '.notdef')])
self.assertEqual(lex('two.oldstyle'), [(Lexer.NAME, 'two.oldstyle')])
self.assertEqual(lex('_'), [(Lexer.NAME, '_')])
self.assertEqual(lex('\\table'), [(Lexer.NAME, '\\table')])
self.assertEqual(lex('a+*:^~!'), [(Lexer.NAME, 'a+*:^~!')])
self.assertEqual(lex('with-dash'), [(Lexer.NAME, 'with-dash')])
def test_cid(self):
self.assertEqual(lex('\\0 \\987'), [(Lexer.CID, 0), (Lexer.CID, 987)])
def test_glyphclass(self):
self.assertEqual(lex(''), [(Lexer.GLYPHCLASS, 'Vowel.sc')])
self.assertEqual(lex('-sc'), [(Lexer.GLYPHCLASS, 'Vowel-sc')])
self.assertRaisesRegex(FeatureLibError, 'Expected glyph class', lex, '(a)')
self.assertRaisesRegex(FeatureLibError, 'Expected glyph class', lex, ' A')
self.assertRaisesRegex(FeatureLibError, 'not be longer than 63 characters', lex, ('' + ('A' * 64)))
self.assertRaisesRegex(FeatureLibError, 'Glyph class names must consist of', lex, ':c')
def test_include(self):
self.assertEqual(lex('include (~/foo/bar baz.fea);'), [(Lexer.NAME, 'include'), (Lexer.FILENAME, '~/foo/bar baz.fea'), (Lexer.SYMBOL, ';')])
self.assertEqual(lex('include # Comment\n (foo) \n;'), [(Lexer.NAME, 'include'), (Lexer.COMMENT, '# Comment'), (Lexer.FILENAME, 'foo'), (Lexer.SYMBOL, ';')])
self.assertRaises(FeatureLibError, lex, 'include blah')
self.assertRaises(FeatureLibError, lex, 'include (blah')
def test_number(self):
self.assertEqual(lex('123 -456'), [(Lexer.NUMBER, 123), (Lexer.NUMBER, (- 456))])
self.assertEqual(lex('0xCAFED00D'), [(Lexer.HEXADECIMAL, )])
self.assertEqual(lex('0xcafed00d'), [(Lexer.HEXADECIMAL, )])
self.assertEqual(lex('010'), [(Lexer.OCTAL, 8)])
def test_float(self):
self.assertEqual(lex('1.23 -4.5'), [(Lexer.FLOAT, 1.23), (Lexer.FLOAT, (- 4.5))])
def test_symbol(self):
self.assertEqual(lex("a'"), [(Lexer.NAME, 'a'), (Lexer.SYMBOL, "'")])
self.assertEqual(lex('-A-B'), [(Lexer.SYMBOL, '-'), (Lexer.NAME, 'A-B')])
self.assertEqual(lex('foo - -2'), [(Lexer.NAME, 'foo'), (Lexer.SYMBOL, '-'), (Lexer.NUMBER, (- 2))])
def test_comment(self):
self.assertEqual(lex('# Comment\n#'), [(Lexer.COMMENT, '# Comment'), (Lexer.COMMENT, '#')])
def test_string(self):
self.assertEqual(lex('"foo" "bar"'), [(Lexer.STRING, 'foo'), (Lexer.STRING, 'bar')])
self.assertEqual(lex('"foo \nbar\r baz \r\nqux\n\n "'), [(Lexer.STRING, 'foo bar baz qux ')])
self.assertEqual(lex('"M\\00fcller-Lanc\\00e9"'), [(Lexer.STRING, 'M\\00fcller-Lanc\\00e9')])
self.assertEqual(lex('"M\\9fller-Lanc\\8e"'), [(Lexer.STRING, 'M\\9fller-Lanc\\8e')])
self.assertRaises(FeatureLibError, lex, '"foo\n bar')
def test_bad_character(self):
self.assertRaises(FeatureLibError, (lambda : lex('123 \x01')))
def test_newline(self):
def lines(s):
return [loc.line for (_, _, loc) in Lexer(s, 'test.fea')]
self.assertEqual(lines('FOO\n\nBAR\nBAZ'), [1, 3, 4])
self.assertEqual(lines('FOO\r\rBAR\rBAZ'), [1, 3, 4])
self.assertEqual(lines('FOO\r\n\r\n BAR\r\nBAZ'), [1, 3, 4])
self.assertEqual(lines('FOO\n\rBAR\r\nBAZ'), [1, 3, 4])
def test_location(self):
def locs(s):
return [str(loc) for (_, _, loc) in Lexer(s, 'test.fea')]
self.assertEqual(locs('a b # Comment\n12 '), ['test.fea:1:1', 'test.fea:1:3', 'test.fea:1:5', 'test.fea:2:1', 'test.fea:2:4'])
def test_scan_over_(self):
lexer = Lexer('abbacabba12', 'test.fea')
self.assertEqual(lexer.pos_, 0)
lexer.scan_over_('xyz')
self.assertEqual(lexer.pos_, 0)
lexer.scan_over_('abc')
self.assertEqual(lexer.pos_, 9)
lexer.scan_over_('abc')
self.assertEqual(lexer.pos_, 9)
lexer.scan_over_('')
self.assertEqual(lexer.pos_, 11)
def test_scan_until_(self):
lexer = Lexer("foo'bar", 'test.fea')
self.assertEqual(lexer.pos_, 0)
lexer.scan_until_("'")
self.assertEqual(lexer.pos_, 3)
lexer.scan_until_("'")
self.assertEqual(lexer.pos_, 3) |
def test_variance_2_correlated_groups(df_test):
(X, y) = df_test
transformer = SmartCorrelatedSelection(variables=None, method='pearson', threshold=0.8, missing_values='raise', selection_method='variance', estimator=None)
Xt = transformer.fit_transform(X, y)
df = X[['var_1', 'var_2', 'var_3', 'var_5', 'var_7', 'var_8', 'var_10', 'var_11']].copy()
assert (transformer.features_to_drop_ == ['var_0', 'var_4', 'var_6', 'var_9'])
pd.testing.assert_frame_equal(Xt, df) |
class Event(object):
swagger_types = {'created_at': 'datetime', 'embedded': 'object', 'links': 'EventLinks', 'event_id': 'str', 'id': 'str', 'type': 'str'}
attribute_map = {'created_at': 'createdAt', 'embedded': '_embedded', 'links': '_links', 'event_id': 'eventId', 'id': 'id', 'type': 'type'}
def __init__(self, created_at=None, embedded=None, links=None, event_id=None, id=None, type=None):
self._created_at = None
self._embedded = None
self._links = None
self._event_id = None
self._id = None
self._type = None
self.discriminator = None
if (created_at is not None):
self.created_at = created_at
if (embedded is not None):
self.embedded = embedded
self.links = links
if (event_id is not None):
self.event_id = event_id
if (id is not None):
self.id = id
if (type is not None):
self.type = type
def created_at(self):
return self._created_at
_at.setter
def created_at(self, created_at):
self._created_at = created_at
def embedded(self):
return self._embedded
def embedded(self, embedded):
self._embedded = embedded
def links(self):
return self._links
def links(self, links):
if (links is None):
raise ValueError('Invalid value for `links`, must not be `None`')
self._links = links
def event_id(self):
return self._event_id
_id.setter
def event_id(self, event_id):
self._event_id = event_id
def id(self):
return self._id
def id(self, id):
self._id = id
def type(self):
return self._type
def type(self, type):
allowed_values = ['Trip', 'Refuel', 'FuelStolen', 'Alert', 'Collision']
if (type not in allowed_values):
raise ValueError('Invalid value for `type` ({0}), must be one of {1}'.format(type, allowed_values))
self._type = type
def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
if issubclass(Event, dict):
for (key, value) in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if (not isinstance(other, Event)):
return False
return (self.__dict__ == other.__dict__)
def __ne__(self, other):
return (not (self == other)) |
def format_sentence(text: str) -> str:
text = re.sub('(?<!\\w)[\'\\"](.*?)[\'\\"](?!\\w)', '\\1', text)
text = text.replace('\n', '').replace('\t', '')
text = re.sub('^[^a-zA-Z]*', '', text)
text = re.sub("\\s*([)'.!,?;:])(?!\\.\\s*\\w)", '\\1', text)
text = re.sub('(\\()\\s*', '\\1', text)
text = re.sub(' +', ' ', text)
text = re.sub('\\s*-\\s*', '-', text)
return text.strip() |
(name='api.mon.base.tasks.mon_hostgroup_create', base=MgmtTask)
_task(log_exception=True)
def mon_hostgroup_create(task_id, dc_id, hostgroup_name, dc_bound=True, **kwargs):
dc = Dc.objects.get_by_id(int(dc_id))
mon = get_monitoring(dc)
try:
result = mon.hostgroup_create(hostgroup_name, dc_bound=dc_bound)
except RemoteObjectAlreadyExists as exc:
raise MgmtTaskException(exc.detail)
detail = ('Monitoring hostgroup "%s" was successfully created' % hostgroup_name)
mon.task_log_success(task_id, obj=mon.server_class(dc), detail=detail, **kwargs['meta'])
return result |
def _build_usas_data_for_spark():
baker.make('recipient.RecipientLookup', recipient_hash='53aea6c7-bbda-4e4b-1ebe-bbf', uei='FABSUEI12345', duns='FABSDUNS12345', legal_business_name='FABS TEST RECIPIENT', parent_uei='PARENTUEI12345', _fill_optional=True)
baker.make('recipient.RecipientLookup', uei='PARENTUEI12345', duns='PARENTDUNS12345', legal_business_name='PARENT RECIPIENT 12345', parent_uei='PARENTUEI12345', _fill_optional=True)
baker.make('recipient.RecipientLookup', recipient_hash='f4d589f1-7921-723a-07c0-c', uei='FPDSUEI12345', duns='FPDSDUNS12345', legal_business_name='FPDS RECIPIENT 12345', parent_uei='PARENTUEI12345', _fill_optional=True)
baker.make('recipient.RecipientProfile', recipient_hash='53aea6c7-bbda-4e4b-1ebe-bbf', uei='FABSUEI12345', recipient_level='C', recipient_name='FABS TEST RECIPIENT', recipient_unique_id='FABSDUNS12345', parent_uei='PARENTUEI12345', recipient_affiliations=['PARENTUEI12345'], _fill_optional=True)
baker.make('recipient.RecipientProfile', recipient_hash='475752fc-dfb9-dac8-072e-3e36f630be93', uei='PARENTUEI12345', recipient_level='P', recipient_name='PARENT RECIPIENT 12345', recipient_unique_id='PARENTDUNS12345', parent_uei='PARENTUEI12345', recipient_affiliations=['FABSUEI12345', 'FPDSUEI12345'], _fill_optional=True)
baker.make('recipient.RecipientProfile', recipient_hash='f4d589f1-7921-723a-07c0-c', uei='FPDSUEI12345', recipient_level='C', recipient_name='FPDS RECIPIENT 12345', recipient_unique_id='FPDSDUNS12345', parent_uei='PARENTUEI12345', recipient_affiliations=['PARENTUEI12345'], _fill_optional=True)
baker.make('recipient.DUNS', broker_duns_id='1', uei='FABSUEI12345', ultimate_parent_uei='PARENTUEI12345', ultimate_parent_unique_ide='PARENTDUNS12345', awardee_or_recipient_uniqu='FABSDUNS12345', ultimate_parent_legal_enti='PARENT RECIPIENT 12345', legal_business_name='FABS TEST RECIPIENT', _fill_optional=True)
funding_toptier_agency = baker.make('references.ToptierAgency', name='TEST AGENCY 1', abbreviation='TA1', _fill_optional=True)
funding_subtier_agency = baker.make('references.SubtierAgency', name='TEST SUBTIER 1', abbreviation='SA1', _fill_optional=True)
funding_agency = baker.make('references.Agency', toptier_agency=funding_toptier_agency, subtier_agency=funding_subtier_agency, toptier_flag=True, _fill_optional=True)
toptier = baker.make('references.ToptierAgency', name='toptier', abbreviation='tt', _fill_optional=True)
subtier = baker.make('references.SubtierAgency', name='subtier', abbreviation='st', _fill_optional=True)
agency = baker.make('references.Agency', toptier_agency=toptier, subtier_agency=subtier, toptier_flag=True, id=32, _fill_optional=True)
awarding_toptier_agency = baker.make('references.ToptierAgency', name='TEST AGENCY 2', abbreviation='TA2', _fill_optional=True)
awarding_subtier_agency = baker.make('references.SubtierAgency', name='TEST SUBTIER 2', abbreviation='SA2', _fill_optional=True)
awarding_agency = baker.make('references.Agency', toptier_agency=awarding_toptier_agency, subtier_agency=awarding_subtier_agency, toptier_flag=True, _fill_optional=True)
baker.make('references.NAICS', code='123456', _fill_optional=True)
psc = baker.make('references.PSC', code='12', _fill_optional=True)
cfda = baker.make('references.Cfda', program_number='12.456', _fill_optional=True)
baker.make('references.CityCountyStateCode', state_alpha='VA', county_numeric='001', county_name='County Name', _fill_optional=True)
baker.make('references.RefCountryCode', country_code='USA', country_name='UNITED STATES', _fill_optional=True)
baker.make('recipient.StateData', code='VA', name='Virginia', fips='51', _fill_optional=True)
baker.make('references.PopCounty', state_code='51', county_number='000', latest_population=1, _fill_optional=True)
baker.make('references.PopCounty', state_code='51', county_number='001', latest_population=1, _fill_optional=True)
baker.make('references.PopCongressionalDistrict', state_code='51', latest_population=1, congressional_district='01')
defc_l = baker.make('references.DisasterEmergencyFundCode', code='L', group_name='covid_19', _fill_optional=True)
defc_m = baker.make('references.DisasterEmergencyFundCode', code='M', group_name='covid_19', _fill_optional=True)
defc_q = baker.make('references.DisasterEmergencyFundCode', code='Q', group_name=None, _fill_optional=True)
federal_account = baker.make('accounts.FederalAccount', parent_toptier_agency=funding_toptier_agency, _fill_optional=True)
tas = baker.make('accounts.TreasuryAppropriationAccount', federal_account=federal_account, allocation_transfer_agency_id=None, _fill_optional=True)
asst_award = baker.make('search.AwardSearch', award_id=1, latest_transaction_id=2, earliest_transaction_search_id=1, latest_transaction_search_id=2, type='07', category='loans', generated_unique_award_id='UNIQUE AWARD KEY B', period_of_performance_start_date='2020-01-01', period_of_performance_current_end_date='2022-01-01', date_signed='2020-01-01', certified_date='2020-04-01', update_date='2020-01-01', action_date='2020-04-01', fiscal_year=2020, award_amount=0.0, total_obligation=0.0, total_subsidy_cost=0.0, total_loan_value=0.0, total_obl_bin='<1M', type_description='Direct Loan', display_award_id='FAIN', fain='FAIN', uri='URI', piid=None, subaward_count=0, transaction_unique_id=2, awarding_agency_id=awarding_agency.id, funding_agency_id=funding_agency.id, awarding_toptier_agency_code=awarding_toptier_agency.toptier_code, awarding_toptier_agency_name=awarding_toptier_agency.name, awarding_toptier_agency_name_raw='TEST AGENCY 2', funding_toptier_agency_code=funding_toptier_agency.toptier_code, funding_toptier_agency_name=funding_toptier_agency.name, funding_toptier_agency_name_raw='TEST AGENCY 1', awarding_subtier_agency_code=awarding_subtier_agency.subtier_code, awarding_subtier_agency_name=awarding_subtier_agency.name, awarding_subtier_agency_name_raw='TEST SUBTIER 2', funding_subtier_agency_code=funding_subtier_agency.subtier_code, funding_subtier_agency_name=funding_subtier_agency.name, funding_subtier_agency_name_raw='TEST SUBTIER 1', funding_toptier_agency_id=funding_agency.id, funding_subtier_agency_id=funding_agency.id, treasury_account_identifiers=[tas.treasury_account_identifier], cfda_number='12.456', cfdas=[json.dumps({'cfda_number': '12.456', 'cfda_program_title': None})], recipient_uei='FABSUEI12345', recipient_unique_id='FABSDUNS12345', recipient_name='FABS RECIPIENT 12345', raw_recipient_name='FABS RECIPIENT 12345', recipient_hash='53aea6c7-bbda-4e4b-1ebe-bbf', recipient_levels=['C'], parent_uei='PARENTUEI12345', parent_recipient_unique_id='PARENTDUNS12345', recipient_location_state_code='VA', recipient_location_state_name='Virginia', recipient_location_state_fips=51, recipient_location_county_code='001', recipient_location_county_name='COUNTY NAME', recipient_location_country_code='USA', recipient_location_country_name='UNITED STATES', recipient_location_congressional_code='01', recipient_location_congressional_code_current=None, pop_state_code='VA', pop_state_name='Virginia', pop_state_fips=51, pop_county_code='001', pop_county_name='COUNTY NAME', pop_country_code='USA', pop_country_name='UNITED STATES', pop_congressional_code='01', pop_congressional_code_current=None, recipient_location_state_population=1, pop_state_population=1, recipient_location_county_population=1, pop_county_population=1, recipient_location_congressional_population=1, pop_congressional_population=1, tas_paths=[f"agency={funding_toptier_agency.toptier_code}faaid={federal_account.agency_identifier}famain={federal_account.main_account_code}aid={tas.agency_id}main={tas.main_account_code}ata={(tas.allocation_transfer_agency_id or '')}sub={tas.sub_account_code}bpoa={(tas.beginning_period_of_availability or '')}epoa={(tas.ending_period_of_availability or '')}a={tas.availability_type_code}"], tas_components=[f"aid={tas.agency_id}main={tas.main_account_code}ata={(tas.allocation_transfer_agency_id or '')}sub={tas.sub_account_code}bpoa={(tas.beginning_period_of_availability or '')}epoa={(tas.ending_period_of_availability or '')}a={tas.availability_type_code}"], disaster_emergency_fund_codes=['L', 'M'], total_covid_outlay=0.0, total_covid_obligation=2.0, covid_spending_by_defc=[{'defc': 'L', 'outlay': 0.0, 'obligation': 1.0}, {'defc': 'M', 'outlay': 0.0, 'obligation': 1.0}], business_categories=None, original_loan_subsidy_cost=0.0, face_value_loan_guarantee=0.0, recipient_location_county_fips='51001', pop_county_fips='51001')
cont_award = baker.make('search.AwardSearch', award_id=2, type='A', category='contract', generated_unique_award_id='UNIQUE AWARD KEY C', latest_transaction_id=4, earliest_transaction_search_id=3, latest_transaction_search_id=4, period_of_performance_start_date='2020-01-01', period_of_performance_current_end_date='2022-01-01', date_signed='2020-07-01', certified_date='2020-10-01', update_date='2020-01-01', action_date='2020-10-01', award_amount=0.0, total_obligation=0.0, total_subsidy_cost=0.0, total_obl_bin='<1M', display_award_id='PIID', piid='PIID', fain=None, uri=None, subaward_count=0, transaction_unique_id=2, treasury_account_identifiers=[tas.treasury_account_identifier], recipient_uei='FPDSUEI12345', recipient_unique_id='FPDSDUNS12345', recipient_name='FPDS RECIPIENT 12345', raw_recipient_name='FPDS RECIPIENT 12345', recipient_hash='f4d589f1-7921-723a-07c0-c', recipient_levels=['C'], parent_uei='PARENTUEI12345', parent_recipient_unique_id='PARENTDUNS12345', awarding_agency_id=awarding_agency.id, funding_agency_id=funding_agency.id, awarding_toptier_agency_code=awarding_toptier_agency.toptier_code, awarding_toptier_agency_name=awarding_toptier_agency.name, awarding_toptier_agency_name_raw='TEST AGENCY 2', funding_toptier_agency_code=funding_toptier_agency.toptier_code, funding_toptier_agency_name=funding_toptier_agency.name, funding_toptier_agency_name_raw='TEST AGENCY 1', awarding_subtier_agency_code=awarding_subtier_agency.subtier_code, awarding_subtier_agency_name=awarding_subtier_agency.name, awarding_subtier_agency_name_raw='TEST SUBTIER 2', funding_subtier_agency_code=funding_subtier_agency.subtier_code, funding_subtier_agency_name=funding_subtier_agency.name, funding_subtier_agency_name_raw='TEST SUBTIER 1', funding_toptier_agency_id=funding_agency.id, funding_subtier_agency_id=funding_agency.id, recipient_location_state_code='VA', recipient_location_state_name='Virginia', recipient_location_state_fips=51, recipient_location_country_code='USA', recipient_location_country_name='UNITED STATES', recipient_location_congressional_code_current=None, cfdas=None, pop_state_code='VA', pop_state_name='Virginia', pop_state_fips=51, pop_country_code='USA', pop_country_name='UNITED STATES', pop_congressional_code_current=None, recipient_location_state_population=1, pop_state_population=1, tas_paths=[f"agency={funding_toptier_agency.toptier_code}faaid={federal_account.agency_identifier}famain={federal_account.main_account_code}aid={tas.agency_id}main={tas.main_account_code}ata={(tas.allocation_transfer_agency_id or '')}sub={tas.sub_account_code}bpoa={(tas.beginning_period_of_availability or '')}epoa={(tas.ending_period_of_availability or '')}a={tas.availability_type_code}"], tas_components=[f"aid={tas.agency_id}main={tas.main_account_code}ata={(tas.allocation_transfer_agency_id or '')}sub={tas.sub_account_code}bpoa={(tas.beginning_period_of_availability or '')}epoa={(tas.ending_period_of_availability or '')}a={tas.availability_type_code}"], disaster_emergency_fund_codes=['Q'], business_categories=None, original_loan_subsidy_cost=0.0, face_value_loan_guarantee=0.0, ordering_period_end_date='2020-07-01', naics_code='123456', product_or_service_code='12', product_or_service_description=psc.description, recipient_location_county_fips=None, pop_county_fips=None)
cont_award2 = baker.make('search.AwardSearch', award_id=3, generated_unique_award_id='UNIQUE AWARD KEY A', latest_transaction_id=434, earliest_transaction_search_id=434, latest_transaction_search_id=434, type='A', category='contract', period_of_performance_start_date='2020-01-01', period_of_performance_current_end_date='2022-01-01', date_signed='2020-01-01', award_amount=0.0, total_obligation=0.0, total_subsidy_cost=0.0, total_obl_bin='<1M', last_modified_date='2020-01-01', update_date='2020-01-01', awarding_agency_id=32, funding_agency_id=32, awarding_toptier_agency_name=toptier.name, awarding_toptier_agency_name_raw='toptier', awarding_toptier_agency_code=toptier.toptier_code, funding_toptier_agency_name=toptier.name, funding_toptier_agency_name_raw='toptier', funding_toptier_agency_code=toptier.toptier_code, awarding_subtier_agency_name=subtier.name, awarding_subtier_agency_name_raw='subtier', awarding_subtier_agency_code=subtier.subtier_code, funding_subtier_agency_name=subtier.name, funding_subtier_agency_name_raw='subtier', funding_subtier_agency_code=subtier.subtier_code, funding_toptier_agency_id=agency.id, funding_subtier_agency_id=agency.id, display_award_id='PIID', piid='PIID', fain=None, uri=None, subaward_count=0, transaction_unique_id=434, is_fpds=True, recipient_uei='FPDSUEI12345', recipient_unique_id='FPDSDUNS12345', recipient_name='FPDS RECIPIENT 12345', raw_recipient_name='FPDS RECIPIENT 12345', recipient_hash='f4d589f1-7921-723a-07c0-c', recipient_levels=['C'], parent_uei='PARENTUEI12345', parent_recipient_unique_id='PARENTDUNS12345', ordering_period_end_date='2020-07-01', recipient_location_country_code='USA', recipient_location_congressional_code_current=None, pop_congressional_code_current=None, pop_country_code='USA', business_categories=None, original_loan_subsidy_cost=0.0, face_value_loan_guarantee=0.0, treasury_account_identifiers=None, cfdas=None, tas_paths=None, tas_components=None, disaster_emergency_fund_codes=None, covid_spending_by_defc=None, recipient_location_county_fips=None, pop_county_fips=None)
baker.make('search.TransactionSearch', transaction_id=1, transaction_unique_id=1, afa_generated_unique=1, action_date='2020-01-01', fiscal_action_date='2020-04-01', award_id=asst_award.award_id, award_amount=asst_award.total_subsidy_cost, generated_unique_award_id=asst_award.generated_unique_award_id, award_certified_date=asst_award.certified_date, award_fiscal_year=2020, fiscal_year=2020, award_date_signed=asst_award.date_signed, etl_update_date=asst_award.update_date, award_category=asst_award.category, piid=asst_award.piid, fain=asst_award.fain, uri=asst_award.uri, is_fpds=False, type='07', awarding_agency_id=awarding_agency.id, funding_agency_id=funding_agency.id, awarding_toptier_agency_abbreviation=awarding_toptier_agency.abbreviation, funding_toptier_agency_abbreviation=funding_toptier_agency.abbreviation, awarding_subtier_agency_abbreviation=awarding_subtier_agency.abbreviation, funding_subtier_agency_abbreviation=funding_subtier_agency.abbreviation, awarding_toptier_agency_name=awarding_toptier_agency.name, awarding_toptier_agency_name_raw='TEST AGENCY 2', funding_toptier_agency_name=funding_toptier_agency.name, funding_toptier_agency_name_raw='TEST AGENCY 1', awarding_subtier_agency_name=awarding_subtier_agency.name, awarding_subtier_agency_name_raw='TEST SUBTIER 2', funding_subtier_agency_name=funding_subtier_agency.name, funding_subtier_agency_name_raw='TEST SUBTIER 1', awarding_toptier_agency_id=awarding_agency.id, funding_toptier_agency_id=funding_agency.id, last_modified_date='2020-01-01', federal_action_obligation=0, cfda_number='12.456', cfda_id=cfda.id, recipient_uei='FABSUEI12345', recipient_unique_id='FABSDUNS12345', recipient_name='FABS RECIPIENT 12345', recipient_name_raw='FABS RECIPIENT 12345', recipient_hash='53aea6c7-bbda-4e4b-1ebe-bbf', recipient_levels=['C'], parent_uei='PARENTUEI12345', parent_recipient_hash='475752fc-dfb9-dac8-072e-3e36f630be93', parent_recipient_unique_id='PARENTDUNS12345', parent_recipient_name='PARENT RECIPIENT 12345', parent_recipient_name_raw='PARENT RECIPIENT 12345', indirect_federal_sharing=0.0, funding_amount=0.0, total_funding_amount=0.0, recipient_location_state_code='VA', recipient_location_state_fips=51, recipient_location_state_name='Virginia', recipient_location_county_code='001', recipient_location_county_name='COUNTY NAME', recipient_location_country_code='USA', recipient_location_country_name='UNITED STATES', recipient_location_congressional_code='01', recipient_location_congressional_code_current=None, pop_state_code='VA', pop_state_fips=51, pop_state_name='Virginia', pop_county_code='001', pop_county_name='COUNTY NAME', pop_country_code='USA', pop_country_name='UNITED STATES', pop_congressional_code='01', pop_congressional_code_current=None, recipient_location_state_population=1, pop_state_population=1, recipient_location_county_population=1, pop_county_population=1, recipient_location_congressional_population=1, pop_congressional_population=1, award_update_date=asst_award.update_date, generated_pragmatic_obligation=0.0, original_loan_subsidy_cost=0.0, face_value_loan_guarantee=0.0, non_federal_funding_amount=0.0, treasury_account_identifiers=[tas.treasury_account_identifier], tas_paths=[f"agency={funding_toptier_agency.toptier_code}faaid={federal_account.agency_identifier}famain={federal_account.main_account_code}aid={tas.agency_id}main={tas.main_account_code}ata={(tas.allocation_transfer_agency_id or '')}sub={tas.sub_account_code}bpoa={(tas.beginning_period_of_availability or '')}epoa={(tas.ending_period_of_availability or '')}a={tas.availability_type_code}"], tas_components=[f"aid={tas.agency_id}main={tas.main_account_code}ata={(tas.allocation_transfer_agency_id or '')}sub={tas.sub_account_code}bpoa={(tas.beginning_period_of_availability or '')}epoa={(tas.ending_period_of_availability or '')}a={tas.availability_type_code}"], federal_accounts=[{'id': federal_account.id, 'account_title': federal_account.account_title, 'federal_account_code': federal_account.federal_account_code}], disaster_emergency_fund_codes=['L', 'M'], recipient_location_county_fips='51001', pop_county_fips='51001')
baker.make('search.TransactionSearch', transaction_id=2, transaction_unique_id=2, afa_generated_unique=2, action_date='2020-04-01', fiscal_action_date='2020-07-01', award_id=asst_award.award_id, award_amount=asst_award.total_subsidy_cost, generated_unique_award_id=asst_award.generated_unique_award_id, award_certified_date=asst_award.certified_date, award_fiscal_year=2020, fiscal_year=2020, award_date_signed=asst_award.date_signed, etl_update_date=asst_award.update_date, award_category=asst_award.category, piid=asst_award.piid, fain=asst_award.fain, uri=asst_award.uri, is_fpds=False, type='07', awarding_agency_id=awarding_agency.id, funding_agency_id=funding_agency.id, awarding_toptier_agency_name=awarding_toptier_agency.name, awarding_toptier_agency_name_raw='TEST AGENCY 2', funding_toptier_agency_name=funding_toptier_agency.name, funding_toptier_agency_name_raw='TEST AGENCY 1', awarding_subtier_agency_name=awarding_subtier_agency.name, awarding_subtier_agency_name_raw='TEST SUBTIER 2', funding_subtier_agency_name=funding_subtier_agency.name, funding_subtier_agency_name_raw='TEST SUBTIER 1', awarding_toptier_agency_abbreviation=awarding_toptier_agency.abbreviation, funding_toptier_agency_abbreviation=funding_toptier_agency.abbreviation, awarding_subtier_agency_abbreviation=awarding_subtier_agency.abbreviation, funding_subtier_agency_abbreviation=funding_subtier_agency.abbreviation, awarding_toptier_agency_id=awarding_agency.id, funding_toptier_agency_id=funding_agency.id, last_modified_date='2020-01-01', federal_action_obligation=0, published_fabs_id=2, cfda_number='12.456', cfda_id=cfda.id, recipient_uei='FABSUEI12345', recipient_unique_id='FABSDUNS12345', recipient_name='FABS RECIPIENT 12345', recipient_name_raw='FABS RECIPIENT 12345', recipient_hash='53aea6c7-bbda-4e4b-1ebe-bbf', recipient_levels=['C'], parent_uei='PARENTUEI12345', parent_recipient_hash='475752fc-dfb9-dac8-072e-3e36f630be93', parent_recipient_unique_id='PARENTDUNS12345', parent_recipient_name='PARENT RECIPIENT 12345', parent_recipient_name_raw='PARENT RECIPIENT 12345', indirect_federal_sharing=0.0, funding_amount=0.0, total_funding_amount=0.0, recipient_location_state_code='VA', recipient_location_state_fips=51, recipient_location_state_name='Virginia', recipient_location_county_code='001', recipient_location_county_name='COUNTY NAME', recipient_location_country_code='USA', recipient_location_country_name='UNITED STATES', recipient_location_congressional_code='01', recipient_location_congressional_code_current=None, pop_state_code='VA', pop_state_fips=51, pop_state_name='Virginia', pop_county_code='001', pop_county_name='COUNTY NAME', pop_country_code='USA', pop_country_name='UNITED STATES', pop_congressional_code='01', pop_congressional_code_current=None, recipient_location_state_population=1, pop_state_population=1, recipient_location_county_population=1, pop_county_population=1, recipient_location_congressional_population=1, pop_congressional_population=1, award_update_date=asst_award.update_date, generated_pragmatic_obligation=0.0, original_loan_subsidy_cost=0.0, face_value_loan_guarantee=0.0, non_federal_funding_amount=0.0, treasury_account_identifiers=[tas.treasury_account_identifier], tas_paths=[f"agency={funding_toptier_agency.toptier_code}faaid={federal_account.agency_identifier}famain={federal_account.main_account_code}aid={tas.agency_id}main={tas.main_account_code}ata={(tas.allocation_transfer_agency_id or '')}sub={tas.sub_account_code}bpoa={(tas.beginning_period_of_availability or '')}epoa={(tas.ending_period_of_availability or '')}a={tas.availability_type_code}"], tas_components=[f"aid={tas.agency_id}main={tas.main_account_code}ata={(tas.allocation_transfer_agency_id or '')}sub={tas.sub_account_code}bpoa={(tas.beginning_period_of_availability or '')}epoa={(tas.ending_period_of_availability or '')}a={tas.availability_type_code}"], federal_accounts=[{'id': federal_account.id, 'account_title': federal_account.account_title, 'federal_account_code': federal_account.federal_account_code}], disaster_emergency_fund_codes=['L', 'M'], recipient_location_county_fips='51001', pop_county_fips='51001')
baker.make('search.TransactionSearch', transaction_id=3, transaction_unique_id=3, detached_award_procurement_id=3, action_date='2020-07-01', fiscal_action_date='2020-10-01', award_id=cont_award.award_id, award_amount=cont_award.total_obligation, generated_unique_award_id=cont_award.generated_unique_award_id, award_certified_date=cont_award.certified_date, award_fiscal_year=2021, fiscal_year=2020, award_date_signed=cont_award.date_signed, etl_update_date=cont_award.update_date, award_category=cont_award.category, piid=cont_award.piid, fain=cont_award.fain, uri=cont_award.uri, is_fpds=True, type='A', awarding_agency_id=awarding_agency.id, funding_agency_id=funding_agency.id, awarding_toptier_agency_name=awarding_toptier_agency.name, awarding_toptier_agency_name_raw='TEST AGENCY 2', funding_toptier_agency_name=funding_toptier_agency.name, funding_toptier_agency_name_raw='TEST AGENCY 1', awarding_subtier_agency_name=awarding_subtier_agency.name, awarding_subtier_agency_name_raw='TEST SUBTIER 2', funding_subtier_agency_name=funding_subtier_agency.name, funding_subtier_agency_name_raw='TEST SUBTIER 1', awarding_toptier_agency_id=awarding_agency.id, funding_toptier_agency_id=funding_agency.id, awarding_toptier_agency_abbreviation=awarding_toptier_agency.abbreviation, funding_toptier_agency_abbreviation=funding_toptier_agency.abbreviation, awarding_subtier_agency_abbreviation=awarding_subtier_agency.abbreviation, funding_subtier_agency_abbreviation=funding_subtier_agency.abbreviation, last_modified_date='2020-01-01', federal_action_obligation=0, naics_code='123456', product_or_service_code='12', recipient_uei='FPDSUEI12345', recipient_unique_id='FPDSDUNS12345', recipient_name='FPDS RECIPIENT 12345', recipient_name_raw='FPDS RECIPIENT 12345', recipient_hash='f4d589f1-7921-723a-07c0-c', recipient_levels=['C'], parent_uei='PARENTUEI12345', parent_recipient_hash='475752fc-dfb9-dac8-072e-3e36f630be93', parent_recipient_unique_id='PARENTDUNS12345', parent_recipient_name='PARENT RECIPIENT 12345', parent_recipient_name_raw='PARENT RECIPIENT 12345', ordering_period_end_date='2020-07-01', recipient_location_country_code='USA', recipient_location_country_name='UNITED STATES', recipient_location_state_code='VA', recipient_location_state_fips=51, recipient_location_state_name='Virginia', recipient_location_congressional_code_current=None, pop_country_code='USA', pop_country_name='UNITED STATES', pop_state_code='VA', pop_state_fips=51, pop_state_name='Virginia', pop_congressional_code_current=None, recipient_location_state_population=1, pop_state_population=1, award_update_date=cont_award.update_date, generated_pragmatic_obligation=0.0, original_loan_subsidy_cost=0.0, face_value_loan_guarantee=0.0, non_federal_funding_amount=0.0, indirect_federal_sharing=0.0, funding_amount=0.0, total_funding_amount=0.0, treasury_account_identifiers=[tas.treasury_account_identifier], tas_paths=[f"agency={funding_toptier_agency.toptier_code}faaid={federal_account.agency_identifier}famain={federal_account.main_account_code}aid={tas.agency_id}main={tas.main_account_code}ata={(tas.allocation_transfer_agency_id or '')}sub={tas.sub_account_code}bpoa={(tas.beginning_period_of_availability or '')}epoa={(tas.ending_period_of_availability or '')}a={tas.availability_type_code}"], tas_components=[f"aid={tas.agency_id}main={tas.main_account_code}ata={(tas.allocation_transfer_agency_id or '')}sub={tas.sub_account_code}bpoa={(tas.beginning_period_of_availability or '')}epoa={(tas.ending_period_of_availability or '')}a={tas.availability_type_code}"], federal_accounts=[{'id': federal_account.id, 'account_title': federal_account.account_title, 'federal_account_code': federal_account.federal_account_code}], disaster_emergency_fund_codes=['Q'], recipient_location_county_fips=None, pop_county_fips=None)
baker.make('search.TransactionSearch', transaction_id=4, transaction_unique_id=4, detached_award_procurement_id=4, action_date='2020-10-01', fiscal_action_date='2021-01-01', award_id=cont_award.award_id, award_amount=cont_award.total_obligation, generated_unique_award_id=cont_award.generated_unique_award_id, award_certified_date=cont_award.certified_date, award_fiscal_year=2021, fiscal_year=2021, award_date_signed=cont_award.date_signed, etl_update_date=cont_award.update_date, award_category=cont_award.category, piid=cont_award.piid, fain=cont_award.fain, uri=cont_award.uri, is_fpds=True, type='A', awarding_agency_id=awarding_agency.id, funding_agency_id=funding_agency.id, awarding_toptier_agency_name=awarding_toptier_agency.name, awarding_toptier_agency_name_raw='TEST AGENCY 2', funding_toptier_agency_name=funding_toptier_agency.name, funding_toptier_agency_name_raw='TEST AGENCY 1', awarding_subtier_agency_name=awarding_subtier_agency.name, awarding_subtier_agency_name_raw='TEST SUBTIER 2', funding_subtier_agency_name=funding_subtier_agency.name, funding_subtier_agency_name_raw='TEST SUBTIER 1', awarding_toptier_agency_id=awarding_agency.id, funding_toptier_agency_id=funding_agency.id, awarding_toptier_agency_abbreviation=awarding_toptier_agency.abbreviation, funding_toptier_agency_abbreviation=funding_toptier_agency.abbreviation, awarding_subtier_agency_abbreviation=awarding_subtier_agency.abbreviation, funding_subtier_agency_abbreviation=funding_subtier_agency.abbreviation, last_modified_date='2020-01-01', federal_action_obligation=0, naics_code='123456', product_or_service_code='12', recipient_uei='FPDSUEI12345', recipient_unique_id='FPDSDUNS12345', recipient_name='FPDS RECIPIENT 12345', recipient_name_raw='FPDS RECIPIENT 12345', recipient_hash='f4d589f1-7921-723a-07c0-c', recipient_levels=['C'], parent_uei='PARENTUEI12345', parent_recipient_hash='475752fc-dfb9-dac8-072e-3e36f630be93', parent_recipient_unique_id='PARENTDUNS12345', parent_recipient_name='PARENT RECIPIENT 12345', parent_recipient_name_raw='PARENT RECIPIENT 12345', ordering_period_end_date='2020-07-01', recipient_location_country_code='USA', recipient_location_country_name='UNITED STATES', recipient_location_state_code='VA', recipient_location_state_fips=51, recipient_location_state_name='Virginia', recipient_location_congressional_code_current=None, pop_country_code='USA', pop_country_name='UNITED STATES', pop_state_code='VA', pop_state_fips=51, pop_state_name='Virginia', pop_congressional_code_current=None, recipient_location_state_population=1, pop_state_population=1, award_update_date=cont_award.update_date, generated_pragmatic_obligation=0.0, original_loan_subsidy_cost=0.0, face_value_loan_guarantee=0.0, non_federal_funding_amount=0.0, indirect_federal_sharing=0.0, funding_amount=0.0, total_funding_amount=0.0, treasury_account_identifiers=[tas.treasury_account_identifier], tas_paths=[f"agency={funding_toptier_agency.toptier_code}faaid={federal_account.agency_identifier}famain={federal_account.main_account_code}aid={tas.agency_id}main={tas.main_account_code}ata={(tas.allocation_transfer_agency_id or '')}sub={tas.sub_account_code}bpoa={(tas.beginning_period_of_availability or '')}epoa={(tas.ending_period_of_availability or '')}a={tas.availability_type_code}"], tas_components=[f"aid={tas.agency_id}main={tas.main_account_code}ata={(tas.allocation_transfer_agency_id or '')}sub={tas.sub_account_code}bpoa={(tas.beginning_period_of_availability or '')}epoa={(tas.ending_period_of_availability or '')}a={tas.availability_type_code}"], federal_accounts=[{'id': federal_account.id, 'account_title': federal_account.account_title, 'federal_account_code': federal_account.federal_account_code}], disaster_emergency_fund_codes=['Q'], recipient_location_county_fips=None, pop_county_fips=None)
baker.make('search.TransactionSearch', transaction_id=434, transaction_unique_id=434, detached_award_procurement_id=434, is_fpds=True, award_id=cont_award2.award_id, award_amount=cont_award2.total_obligation, generated_unique_award_id=cont_award2.generated_unique_award_id, award_certified_date=cont_award2.certified_date, etl_update_date=cont_award2.update_date, award_category=cont_award2.category, piid=cont_award2.piid, fain=cont_award2.fain, uri=cont_award2.uri, type='A', awarding_agency_id=agency.id, funding_agency_id=agency.id, awarding_toptier_agency_name=toptier.name, awarding_toptier_agency_name_raw='toptier', funding_toptier_agency_name=toptier.name, funding_toptier_agency_name_raw='toptier', awarding_subtier_agency_name=subtier.name, awarding_subtier_agency_name_raw='subtier', funding_subtier_agency_name=subtier.name, funding_subtier_agency_name_raw='subtier', awarding_toptier_agency_abbreviation=toptier.abbreviation, funding_toptier_agency_abbreviation=toptier.abbreviation, awarding_subtier_agency_abbreviation=subtier.abbreviation, funding_subtier_agency_abbreviation=subtier.abbreviation, awarding_toptier_agency_id=agency.id, funding_toptier_agency_id=agency.id, last_modified_date='2020-01-01', award_update_date=cont_award2.update_date, generated_pragmatic_obligation=0.0, original_loan_subsidy_cost=0.0, face_value_loan_guarantee=0.0, non_federal_funding_amount=0.0, indirect_federal_sharing=0.0, funding_amount=0.0, total_funding_amount=0.0, federal_action_obligation=0.0, recipient_uei='FPDSUEI12345', recipient_unique_id='FPDSDUNS12345', recipient_name='FPDS RECIPIENT 12345', recipient_name_raw='FPDS RECIPIENT 12345', recipient_hash='f4d589f1-7921-723a-07c0-c', recipient_levels=['C'], recipient_location_congressional_code_current=None, pop_congressional_code_current=None, parent_uei='PARENTUEI12345', parent_recipient_unique_id='PARENTDUNS12345', parent_recipient_hash='475752fc-dfb9-dac8-072e-3e36f630be93', parent_recipient_name='PARENT RECIPIENT 12345', parent_recipient_name_raw='PARENT RECIPIENT 12345', ordering_period_end_date='2020-07-01', recipient_location_county_fips=None, pop_county_fips=None)
baker.make('transactions.SourceProcurementTransaction', detached_award_procurement_id=4, created_at=datetime.fromtimestamp(0), updated_at=datetime.fromtimestamp(0), federal_action_obligation=1000001, _fill_optional=True)
baker.make('transactions.SourceProcurementTransaction', detached_award_procurement_id=5, created_at=datetime.fromtimestamp(0), updated_at=datetime.fromtimestamp(0), federal_action_obligation=1000001, _fill_optional=True)
baker.make('transactions.SourceAssistanceTransaction', published_fabs_id=6, created_at=datetime.fromtimestamp(0), modified_at=datetime.fromtimestamp(0), updated_at=datetime.fromtimestamp(0), indirect_federal_sharing=22.0, is_active=True, federal_action_obligation=1000001, face_value_loan_guarantee=22.0, submission_id=33.0, non_federal_funding_amount=44.0, original_loan_subsidy_cost=55.0, _fill_optional=True)
baker.make('transactions.SourceAssistanceTransaction', published_fabs_id=7, created_at=datetime.fromtimestamp(0), modified_at=datetime.fromtimestamp(0), updated_at=datetime.fromtimestamp(0), indirect_federal_sharing=22.0, is_active=True, federal_action_obligation=1000001, face_value_loan_guarantee=22.0, non_federal_funding_amount=44.0, original_loan_subsidy_cost=55.0, submission_id=33.0, _fill_optional=True)
dabs = baker.make('submissions.DABSSubmissionWindowSchedule', submission_reveal_date='2020-05-01')
sa = baker.make('submissions.SubmissionAttributes', reporting_period_start='2020-04-02', submission_window=dabs)
baker.make('awards.FinancialAccountsByAwards', award_id=asst_award.award_id, treasury_account=tas, disaster_emergency_fund=defc_l, gross_outlay_amount_by_award_cpe=1, transaction_obligated_amount=1, ussgl487200_down_adj_pri_ppaid_undel_orders_oblig_refund_cpe=0, ussgl497200_down_adj_pri_paid_deliv_orders_oblig_refund_cpe=0, submission=sa, _fill_optional=True)
baker.make('awards.FinancialAccountsByAwards', award_id=asst_award.award_id, treasury_account=tas, disaster_emergency_fund=defc_m, submission=sa, gross_outlay_amount_by_award_cpe=1, transaction_obligated_amount=1, ussgl487200_down_adj_pri_ppaid_undel_orders_oblig_refund_cpe=0, ussgl497200_down_adj_pri_paid_deliv_orders_oblig_refund_cpe=0, _fill_optional=True)
baker.make('awards.FinancialAccountsByAwards', award_id=cont_award.award_id, treasury_account=tas, disaster_emergency_fund=defc_q, gross_outlay_amount_by_award_cpe=1, transaction_obligated_amount=1, ussgl487200_down_adj_pri_ppaid_undel_orders_oblig_refund_cpe=0, ussgl497200_down_adj_pri_paid_deliv_orders_oblig_refund_cpe=0, submission=sa, _fill_optional=True)
baker.make('awards.FinancialAccountsByAwards', award_id=cont_award.award_id, treasury_account=tas, disaster_emergency_fund=None, submission=sa, _fill_optional=True) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.