code stringlengths 281 23.7M |
|---|
def test_collect_json_stdout(collect, capsys):
cli.cmdline(['collect', 'logic_analyzer', '--channels', str(LA_CHANNELS), '--json'])
output = json.loads(capsys.readouterr().out)
assert (len(output) == LA_CHANNELS)
assert (len(list(output.values())[0]) == EVENTS)
cli.cmdline(['collect', 'oscilloscope', '--channels', str(SCOPE_CHANNELS), '--json'])
output = json.loads(capsys.readouterr().out)
assert (len(output) == (1 + SCOPE_CHANNELS))
assert (len(list(output.values())[0]) == SAMPLES) |
class OptionPlotoptionsLineSonificationTracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestLabgraphGraphvizAPI(unittest.TestCase):
def setUp(self) -> None:
self.graph: lg.Graph = Demo()
def test_identify_graph_nodes(self) -> None:
nodes = identify_graph_nodes(self.graph)
expected_node_count = 7
self.assertEqual(expected_node_count, len(nodes))
def test_out_edge_node_mapper(self) -> None:
nodes = identify_graph_nodes(self.graph)
out_edge_node_map = out_edge_node_mapper(nodes)
self.assertEqual(4, len(out_edge_node_map))
self.assertEqual('generate_noise', out_edge_node_map['NOISE_GENERATOR/NOISE_GENERATOR_OUTPUT'].name)
self.assertEqual('average', out_edge_node_map['ROLLING_AVERAGER/ROLLING_AVERAGER_OUTPUT'].name)
self.assertEqual('amplify', out_edge_node_map['AMPLIFIER/AMPLIFIER_OUTPUT'].name)
self.assertEqual('attenuate', out_edge_node_map['ATTENUATOR/ATTENUATOR_OUTPUT'].name)
def test_in_out_edge_mapper(self) -> None:
in_out_edge_map = in_out_edge_mapper(self.graph.__streams__.values())
self.assertEqual(6, len(in_out_edge_map))
self.assertEqual('NOISE_GENERATOR/NOISE_GENERATOR_OUTPUT', in_out_edge_map['ROLLING_AVERAGER/ROLLING_AVERAGER_INPUT'])
self.assertEqual('NOISE_GENERATOR/NOISE_GENERATOR_OUTPUT', in_out_edge_map['AMPLIFIER/AMPLIFIER_INPUT'])
self.assertEqual('NOISE_GENERATOR/NOISE_GENERATOR_OUTPUT', in_out_edge_map['ATTENUATOR/ATTENUATOR_INPUT'])
self.assertEqual('ROLLING_AVERAGER/ROLLING_AVERAGER_OUTPUT', in_out_edge_map['SINK/SINK_INPUT_1'])
self.assertEqual('AMPLIFIER/AMPLIFIER_OUTPUT', in_out_edge_map['SINK/SINK_INPUT_2'])
self.assertEqual('ATTENUATOR/ATTENUATOR_OUTPUT', in_out_edge_map['SINK/SINK_INPUT_3'])
def test_connect_to_upstream(self) -> None:
nodes = identify_graph_nodes(self.graph)
streams = self.graph.__streams__.values()
nodes = connect_to_upstream(nodes, streams)
expected_node_count = 7
self.assertEqual(expected_node_count, len(nodes))
def test_build_graph(self) -> None:
self.test_dir: str = pathlib.Path(__file__).parent.absolute()
nodes = identify_graph_nodes(self.graph)
nodes = connect_to_upstream(nodes, self.graph.__streams__.values())
output_dir = f'{self.test_dir}/output'
output_file_name = f'{output_dir}/test'
output_file_format = 'svg'
build_graph('Demo', nodes, output_file_name, output_file_format)
self.assertTrue(os.path.exists(f'{output_file_name}.{output_file_format}'))
def test_generate_graphviz_invalid_graph_instance(self) -> None:
with self.assertRaises(GenerateGraphvizError):
generate_graphviz(None, 'test.svg')
def test_generate_graphviz_invalid_output_file(self) -> None:
with self.assertRaises(GenerateGraphvizError):
generate_graphviz(None, '') |
class TestExpandArrayPathsToPreserve():
def test_no_array_paths(self):
expanded_field_paths = [['A', 'B'], ['A', 'C'], ['A', 'D', 'E', 'F'], ['A', 'G', 'E', 'I']]
assert (_expand_array_paths_to_preserve(expanded_field_paths) == {})
def test_array_at_deepest_level(self):
expanded_field_paths = [['A', 'B', 1], ['A', 'B', 19]]
assert (_expand_array_paths_to_preserve(expanded_field_paths) == {'A.B': [1, 19]})
def test_array_of_objects(self):
expanded_field_paths = [['A', 'B', 0, 'C'], ['A', 'B', 0, 'D'], ['A', 'B', 1, 'C']]
assert (_expand_array_paths_to_preserve(expanded_field_paths) == {'A.B': [0, 1]})
def test_no_paths(self):
expanded_field_paths = []
assert (_expand_array_paths_to_preserve(expanded_field_paths) == {})
def test_multiple_levels_of_paths(self):
expanded_field_paths = [['A', 1], ['B', 'C', 2], ['B', 'D', 3], ['C', 'D', 'E', 5], ['D', 1, 'E', 'F', 'G'], ['D', 2, 'E', 'F'], ['E']]
assert (_expand_array_paths_to_preserve(expanded_field_paths) == {'A': [1], 'B.C': [2], 'B.D': [3], 'C.D.E': [5], 'D': [1, 2]})
def test_multiple_matching_embedded_objects(self):
expanded_field_paths = [['J', 3, 'K'], ['J', 1, 'J'], ['J', 3, 'J']]
assert (_expand_array_paths_to_preserve(expanded_field_paths) == {'J': [3, 1]})
def test_nested_arrays_of_arrays(self):
expanded_field_paths = [['F', 0], ['snacks', 0], ['F', 1, 1], ['thread', 1, 'comment'], ['thread', 2, 'comment'], ['F', 1, 2, 0, 1], ['F', 1, 2, 0, 2], ['Non', 'integer']]
assert (_expand_array_paths_to_preserve(expanded_field_paths) == {'F': [0, 1], 'snacks': [0], 'F.1': [1, 2], 'thread': [1, 2], 'F.1.2': [0], 'F.1.2.0': [1, 2]}) |
class ArtistSortPopupController(OptionsController):
def __init__(self, plugin, viewmgr):
super(ArtistSortPopupController, self).__init__()
self._viewmgr = viewmgr
self.plugin = plugin
cl = CoverLocale()
cl.switch_locale(cl.Locale.LOCALE_DOMAIN)
self.values = OrderedDict([(_('Sort by album name'), 'name_artist'), (_('Sort by year'), 'year_artist'), (_('Sort by rating'), 'rating_artist')])
self.options = list(self.values.keys())
gs = GSetting()
source_settings = gs.get_setting(gs.Path.PLUGIN)
value = source_settings[gs.PluginKey.SORT_BY_ARTIST]
if (value not in list(self.values.values())):
print('here')
value = 'name_artist'
source_settings[gs.PluginKey.SORT_BY_ARTIST] = value
self._spritesheet = None
self.update_images(False)
self.current_key = list(self.values.keys())[list(self.values.values()).index(value)]
print(self.current_key)
def update_images(self, *args):
self._spritesheet = self.create_spritesheet(self.plugin, self._spritesheet, 'sort_artist')
if args[(- 1)]:
self.update_image = True
def do_action(self):
sort = self.values[self.current_key]
gs = GSetting()
settings = gs.get_setting(gs.Path.PLUGIN)
settings[gs.PluginKey.SORT_BY_ARTIST] = sort
self._viewmgr.current_view.get_default_manager().emit('sort', 'artist')
def get_current_image(self):
sort = self.values[self.current_key]
return self._spritesheet[sort] |
def test_repr_property_is_working_properly(create_ref_test_data, create_maya_env):
data = create_ref_test_data
maya_env = create_maya_env
maya_env.save_as(data['asset2_model_take1_v001'])
ref = maya_env.reference(data['repr_version1'])
assert (ref.path == data['repr_version1'].absolute_full_path)
assert (ref.repr == 'ASS') |
def value_at_risk(investment: NUMERIC, mu: FLOAT, sigma: FLOAT, conf_level: FLOAT=0.95) -> FLOAT:
type_validation(investment=investment, mu=mu, sigma=sigma, conf_level=conf_level)
if ((conf_level >= 1) or (conf_level <= 0)):
raise ValueError('confidence level is expected to be between 0 and 1.')
res_value_at_risk: FLOAT = (investment * (mu - (sigma * norm.ppf((1 - conf_level)))))
return res_value_at_risk |
def main(args=None):
parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-s', '--suffix', required=True)
parser.add_argument('input_fonts', metavar='FONTFILE', nargs='+')
output_group = parser.add_mutually_exclusive_group()
output_group.add_argument('-i', '--inplace', action='store_true')
output_group.add_argument('-d', '--output-dir')
output_group.add_argument('-o', '--output-file')
parser.add_argument('-R', '--rename-files', action='store_true')
parser.add_argument('-v', '--verbose', action='count', default=0)
options = parser.parse_args(args)
if (not options.verbose):
level = 'WARNING'
elif (options.verbose == 1):
level = 'INFO'
else:
level = 'DEBUG'
logging.basicConfig(level=level, format='%(message)s')
if (options.output_file and (len(options.input_fonts) > 1)):
parser.error("argument -o/--output-file can't be used with multiple inputs")
if (options.rename_files and (options.inplace or options.output_file)):
parser.error('argument -R not allowed with arguments -i or -o')
for input_name in options.input_fonts:
logger.info("Renaming font: '%s'", input_name)
font = TTFont(input_name)
family_name = add_family_suffix(font, options.suffix)
if options.inplace:
output_name = input_name
elif options.output_file:
output_name = options.output_file
else:
if options.rename_files:
input_name = rename_file(input_name, family_name, options.suffix)
output_name = makeOutputFileName(input_name, options.output_dir)
font.save(output_name)
logger.info("Saved font: '%s'", output_name)
font.close()
del font
logger.info('Done!') |
class SymbolHandler(Handler):
def __init__(self, lifter: ObserverLifter):
super().__init__(lifter)
self.SYMBOL_MAP = {SymbolType.FunctionSymbol: FunctionSymbol, SymbolType.ImportAddressSymbol: ImportedFunctionSymbol, SymbolType.ImportedFunctionSymbol: ImportedFunctionSymbol, SymbolType.DataSymbol: Symbol, SymbolType.ImportedDataSymbol: Symbol, SymbolType.ExternalSymbol: ImportedFunctionSymbol, SymbolType.LibraryFunctionSymbol: Symbol}
def register(self):
self._lifter.HANDLERS.update({CoreSymbol: self.lift_symbol, bSymbol: self.lift_symbol})
def lift_symbol(self, symbol: CoreSymbol, **kwargs) -> Union[(GlobalVariable, Constant)]:
if (not (symbol_type := self.SYMBOL_MAP.get(symbol.type, None))):
warning(f'[Lifter] Can not handle symbols of type {symbol.type}, falling back to constant lifting.')
return Constant(symbol.address)
return symbol_type(self._purge_symbol_name(symbol.short_name[:], symbol.address), symbol.address)
def _purge_symbol_name(self, name: str, addr: int) -> str:
if ((name[:2] == '??') or (len(name) > MAX_SYMBOL_NAME_LENGTH)):
return (GLOBAL_VARIABLE_PREFIX + f'{hex(addr)}')
return name.translate({ord(' '): '_', ord("'"): '', ord('.'): '_', ord('`'): ''}) |
def extract_file_level_comments(message_string):
lines = message_string.splitlines()
index = next((i for (i, v) in enumerate(lines) if (not v.startswith(COMMENT_DELIMITER))), (- 1))
if (index != (- 1)):
file_level_comments = lines[:index]
file_content = lines[index:]
else:
file_level_comments = lines[:]
file_content = []
file_level_comments = [line.lstrip(COMMENT_DELIMITER) for line in file_level_comments]
return (file_level_comments, file_content) |
def parse_FMT35C(buffer, dex_object, pc_point, offset):
A = (int(buffer[1]) >> 4)
G = (int(buffer[1]) & 15)
D = (int(buffer[4]) >> 4)
C = (int(buffer[4]) & 15)
F = (int(buffer[5]) >> 4)
E = (int(buffer[5]) & 15)
(bbbb,) = struct.unpack_from('H', buffer, 2)
if (int(buffer[0]) == 36):
prefix = ('%s' % dex_object.getstringbyid(bbbb))
else:
prefix = ('%s //%s' % (dex_object.getmethodname(bbbb), dex_object.getmethodfullname(bbbb, True)))
pass
if (A == 5):
return (dex_decode[int(buffer[0])][4], dex_decode[int(buffer[0])][1], ('v%d' % C), ('v%d' % D), ('v%d' % E), ('v%d' % F), ('v%d' % G), ('%s' % prefix))
elif (A == 4):
return (dex_decode[int(buffer[0])][4], dex_decode[int(buffer[0])][1], ('v%d' % C), ('v%d' % D), ('v%d' % E), ('v%d' % F), ('%s' % prefix))
elif (A == 3):
return (dex_decode[int(buffer[0])][4], dex_decode[int(buffer[0])][1], ('v%d' % C), ('v%d' % D), ('v%d' % E), ('%s' % prefix))
elif (A == 2):
return (dex_decode[int(buffer[0])][4], dex_decode[int(buffer[0])][1], ('v%d' % C), ('v%d' % D), ('%s' % prefix))
elif (A == 1):
return (dex_decode[int(buffer[0])][4], dex_decode[int(buffer[0])][1], ('v%d' % C), ('%s' % prefix))
elif (A == 0):
return (dex_decode[int(buffer[0])][4], dex_decode[int(buffer[0])][1], ('%s' % prefix))
else:
return (dex_decode[int(buffer[0])][4], 'error .......')
return (dex_decode[int(buffer[0])][4], dex_decode[int(buffer[0])][1], ('v%d' % C), ('v%d' % D), ('v%d' % E), ('v%d' % F), ('v%d' % G), ('%s' % prefix)) |
class CovarianceModule(nn.Module):
def __init__(self, iso_cov_fn, scale_param, scale_prior):
super().__init__()
self.iso_cov_fn = iso_cov_fn
self.scale_param = scale_param
self.scale_prior = scale_prior
def get_scale(self):
return (self.scale_prior * torch.exp(self.scale_param))
def forward(self, coords, E):
K = self.iso_cov_fn(prob_product_quad(coords, E, coords, E))
K_scaled = (K * prob_product_constant(E, E))
K_scaled *= self.get_scale()
return K_scaled |
class Features():
def __init__(self):
self.nonempty_generators = set()
def run_func(self, func_name, *args):
func = getattr(self, func_name, None)
if (not func):
print("Error: Not a function name that's been defined")
return False
results = func(*args)
return results
def get_columns(fields, rows):
new_rows = [{field: row[field] for field in fields if row.get(field, None)} for row in rows]
return new_rows
def _pyshark_row_layers(rows_f):
return filter((lambda row: ('layers' in row)), rows_f()) |
()
('name', required=True)
('--admin/--no-admin', default=False)
('--proven/--no-proven', default=False)
def alter_user(name, admin, proven):
user = models.User.query.filter((models.User.username == name)).first()
if (not user):
print('No user named {0}.'.format(name))
return
user.admin = admin
user.proven = proven
db.session.add(user)
db.session.commit() |
(scope='session', autouse=True)
def _standard_os_environ():
mp = monkeypatch.MonkeyPatch()
out = ((os.environ, 'FLASK_APP', monkeypatch.notset), (os.environ, 'FLASK_ENV', monkeypatch.notset), (os.environ, 'FLASK_DEBUG', monkeypatch.notset), (os.environ, 'FLASK_RUN_FROM_CLI', monkeypatch.notset), (os.environ, 'WERKZEUG_RUN_MAIN', monkeypatch.notset))
for (_, key, value) in out:
if (value is monkeypatch.notset):
mp.delenv(key, False)
else:
mp.setenv(key, value)
(yield out)
mp.undo() |
def installdeps(modulename):
if (len(Settings.UpdateString) > 0):
if (Settings.UpdateString[0] == '!'):
misc.addLog(rpieGlobals.LOG_LEVEL_INFO, 'Update already in progress!')
return False
t = threading.Thread(target=installdeps2, args=(modulename,))
t.daemon = True
t.start()
return True |
def build_diagnositic_cifti_files(tmean_vol, cov_vol, goodvoxels_vol, settings, meshes):
logger.info(section_header('Writing Surface Mapping Diagnotic Files'))
for Hemisphere in ['L', 'R']:
for map_name in ['mean', 'cov']:
if (map_name == 'mean'):
map_vol = tmean_vol
if (map_name == 'cov'):
map_vol = cov_vol
volume_to_surface_plus_resampling(vol_input=map_vol, map_name=map_name, hemisphere=Hemisphere, settings=settings, meshes=meshes, volume_roi=goodvoxels_vol, dilate_factor=settings.dilate_factor)
volume_to_surface_plus_resampling(vol_input=map_vol, map_name='{}_all'.format(map_name), hemisphere=Hemisphere, settings=settings, meshes=meshes, volume_roi=None, dilate_factor=None)
volume_to_surface_plus_resampling(vol_input=goodvoxels_vol, map_name='goodvoxels', hemisphere=Hemisphere, settings=settings, meshes=meshes, volume_roi=None, dilate_factor=None)
if settings.dilate_percent_below:
for low_res_mesh in settings.low_res:
mask_and_resample(map_name='lowvoxels', subject=settings.subject.id, hemisphere=Hemisphere, src_mesh=meshes['AtlasSpaceNative'], dest_mesh=meshes['{}k_fs_LR'.format(low_res_mesh)], surf_reg_name=settings.surf_reg)
map_names = ['goodvoxels', 'mean', 'mean_all', 'cov', 'cov_all']
if settings.dilate_percent_below:
map_names.append('lowvoxels')
for map_name in map_names:
for low_res_mesh in settings.low_res:
mesh_settings = meshes['{}k_fs_LR'.format(low_res_mesh)]
run(['wb_command', '-cifti-create-dense-scalar', os.path.join(settings.diagnostics.path, '{}.atlasroi.{}.dscalar.nii'.format(map_name, mesh_settings['meshname'])), '-left-metric', func_gii_file(settings.subject.id, map_name, 'L', mesh_settings), '-roi-left', medial_wall_roi_file(settings.subject.id, 'L', mesh_settings), '-right-metric', func_gii_file(settings.subject.id, map_name, 'R', mesh_settings), '-roi-right', medial_wall_roi_file(settings.subject.id, 'R', mesh_settings)]) |
def test_hostaliases():
config = '\ndeployment:\n enabled: true\ndaemonset:\n hostAliases:\n - ip: "127.0.0.1"\n hostnames:\n - "foo.local"\n - "bar.local"\n'
r = helm_template(config)
assert ('hostAliases' not in r['deployment'][name]['spec']['template']['spec'])
hostAliases = r['daemonset'][name]['spec']['template']['spec']['hostAliases']
assert ({'ip': '127.0.0.1', 'hostnames': ['foo.local', 'bar.local']} in hostAliases)
config = '\ndeployment:\n enabled: true\n hostAliases:\n - ip: "127.0.0.1"\n hostnames:\n - "foo.local"\n - "bar.local"\n'
r = helm_template(config)
assert ('hostAliases' not in r['daemonset'][name]['spec']['template']['spec'])
hostAliases = r['deployment'][name]['spec']['template']['spec']['hostAliases']
assert ({'ip': '127.0.0.1', 'hostnames': ['foo.local', 'bar.local']} in hostAliases) |
class OptionsPadding(Options):
def top(self):
return self._config_get(None)
def top(self, num):
self._config(num)
def left(self):
return self._config_get(None)
def left(self, num):
self._config(num)
def right(self):
return self._config_get(None)
def right(self, num):
self._config(num)
def bottom(self):
return self._config_get(None)
def bottom(self, num):
self._config(num) |
def getPageRow(url, ignore_cache=False, session=None):
page = RemoteContentObject(url, db_session=session)
print('Page object: ', page)
try:
print('doing fetch: ')
page.fetch(ignore_cache=ignore_cache)
print('Fetched. Yielding')
(yield page)
except DownloadException:
(yield None)
finally:
page.close() |
class TelemetryExtensionType(object):
swagger_types = {}
attribute_map = {}
def __init__(self):
self.discriminator = None
def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
if issubclass(TelemetryExtensionType, dict):
for (key, value) in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if (not isinstance(other, TelemetryExtensionType)):
return False
return (self.__dict__ == other.__dict__)
def __ne__(self, other):
return (not (self == other)) |
def stop_web(signum, frame):
global sub_process
if sub_process:
try:
sub_process.terminate()
except Exception as e:
logging.error('Fail to terminate process pid: {}, killing the process with SIGKILL'.format(sub_process.pid), exc_info=e)
finally:
sub_process.kill() |
class Chart(MixHtmlState.HtmlOverlayStates, Html.Html):
name = 'ChartList'
tag = 'div'
_option_cls = OptChartist.OptionsChartistLine
requirements = ('chartist',)
builder_name = 'EkChartist'
_chart__type = 'Line'
def __init__(self, page: primitives.PageModel, width, height, html_code, options, profile):
self.height = height[0]
super(Chart, self).__init__(page, [], html_code=html_code, profile=profile, options=options, css_attrs={'width': width, 'height': height})
self.style.css.margin_top = 10
self.__defined_options = None
def colors(self, hex_values: list):
(line_colors, bg_colors) = ([], [])
alphabet = list(map(chr, range(97, 123)))
styles = []
for h in hex_values:
if (h.upper() in Colors.defined):
h = Colors.defined[h.upper()]['hex']
if (not isinstance(h, tuple)):
if h.startswith('#'):
line_colors.append(h)
else:
line_colors.append(h)
else:
line_colors.append(h[0])
for (i, color) in enumerate(line_colors):
if (i >= len(alphabet)):
break
styles.append(('.ct-series-%s .ct-line, .ct-series-%s .ct-point {stroke: %s;}' % (alphabet[i], alphabet[i], color)))
self.page.properties.css.add_text(' '.join(styles), 'chartist-colors')
def options(self) -> OptChartist.OptionsChartistLine:
return super().options
def dom(self) -> JsHtmlCharts.ChartJs:
if (self._dom is None):
self._dom = JsHtmlCharts.Chartist(page=self.page, component=self, js_code=self.js_code)
return self._dom
def define(self, options: etypes.JS_DATA_TYPES=None, dataflows: List[dict]=None, component_id: str=None) -> str:
self.js_code = component_id
if (options is None):
if (dataflows is not None):
return ("let chartCtx = %(config)s;window['%(chartId)s'].update(null, chartCtx)" % {'config': JsUtils.jsWrap(JsUtils.dataFlows(JsUtils.jsWrap(("window['%s']" % self.js_code)), dataflows, self.page)), 'chartId': self.js_code})
if (dataflows is not None):
options = JsUtils.jsWrap(JsUtils.dataFlows(options, dataflows, self.page))
return ("window['%(chartId)s'].update(null, %(ctx)s)" % {'chartId': self.js_code, 'ctx': JsUtils.jsConvertData(options, None)})
('chartist')
def build(self, data: etypes.JS_DATA_TYPES=None, options: etypes.JS_DATA_TYPES=None, profile: etypes.PROFILE_TYPE=None, component_id: str=None, stop_state: bool=True, dataflows: List[dict]=None):
self.js_code = component_id
if (data is not None):
builder_fnc = JsUtils.jsWrap(('%s(%s, %s)' % (self.builder_name, JsUtils.dataFlows(data, dataflows, self.page), (self.__defined_options or self.options.config_js(options).toStr()))), profile).toStr()
state_expr = ''
if stop_state:
state_expr = (';%s' % self.hide_state(component_id))
return ('%(chartId)s.update(%(builder)s, %(ctx)s);%(state)s' % {'chartId': self.js_code, 'builder': builder_fnc, 'state': state_expr, 'ctx': self.options.config_js(options).toStr()})
return ('%(chartId)s = new Chartist.%(chartType)s("#"+ %(htmlCode)s, %(ctx)s)' % {'chartId': self.js_code, 'htmlCode': JsUtils.jsConvertData((component_id or self.html_code), None), 'ctx': self.options.config_js(options).toStr(), 'chartType': self._chart__type})
def __str__(self):
self.page.properties.js.add_builders(self.build())
return ('<%s %s></%s>' % (self.tag, self.get_attrs(css_class_names=self.style.get_classes()), self.tag)) |
def find_thread_by_board_name_thread_refno(board_name: str, thread_refno: int) -> Optional[ThreadModel]:
thread_cache = cache.get(cache_key('thread', board_name, thread_refno))
if (not thread_cache):
with session() as s:
q = s.query(ThreadOrmModel)
q = q.filter((ThreadOrmModel.refno == thread_refno), (ThreadOrmModel.board_id == BoardOrmModel.id), (BoardOrmModel.name == board_name))
thread_orm_model = q.one_or_none()
if (not thread_orm_model):
return None
thread = ThreadModel.from_orm_model(thread_orm_model, include_board=True)
return thread
if thread_cache:
return ThreadModel.from_cache(thread_cache)
return None |
class AppServiceStreamlitTokenProvider(AbstractTokenProvider):
config_name = 'streamlit'
def get_token(self) -> 'str | None':
try:
return self.get_streamlit_request_headers()[APP_SERVICE_ACCESS_TOKEN_HEADER]
except (ImportError, KeyError, RuntimeError, ModuleNotFoundError, TypeError):
try:
return self.get_streamlit_request_headers_1_14_0()[APP_SERVICE_ACCESS_TOKEN_HEADER]
except (ImportError, KeyError, RuntimeError, ModuleNotFoundError, TypeError):
return None
def get_streamlit_request_headers(self):
from streamlit.scriptrunner.script_run_context import get_script_run_ctx
from streamlit.server.server import Server
session_info = Server.get_current()._get_session_info(get_script_run_ctx().session_id)
return session_info.ws.request.headers
def get_streamlit_request_headers_1_14_0(self) -> CaseInsensitiveDict:
from streamlit.web.server.websocket_headers import _get_websocket_headers
return CaseInsensitiveDict(_get_websocket_headers()) |
def attribute_around_constant_cube_slices():
cubefile = (EXPATH1 / 'ib_test_cube2.segy')
level1 = 1010
level2 = 1100
mycube = xtgeo.cube_from_file(cubefile)
sabove = xtgeo.surface_from_cube(mycube, level1)
sbelow = xtgeo.surface_from_cube(mycube, level2)
if DEBUG:
sabove.describe()
sbelow.describe()
attrs = 'all'
myattrs = sabove.slice_cube_window(mycube, attribute=attrs, sampling='trilinear', zsurf=sabove, other=sbelow)
for attr in myattrs:
if DEBUG:
myattrs[attr].describe()
myattrs[attr].to_file((TMPDIR / (('myfile_constlevels_' + attr) + '.dat')), fformat='ijxyz') |
_chunk_type
class chunk_data(chunk):
_PACK_STR = '!BBHIHHI'
_MIN_LEN = struct.calcsize(_PACK_STR)
def chunk_type(cls):
return TYPE_DATA
def __init__(self, unordered=0, begin=0, end=0, length=0, tsn=0, sid=0, seq=0, payload_id=0, payload_data=None):
assert (1 == (unordered | 1))
assert (1 == (begin | 1))
assert (1 == (end | 1))
assert (payload_data is not None)
super(chunk_data, self).__init__(self.chunk_type(), length)
self.unordered = unordered
self.begin = begin
self.end = end
self.tsn = tsn
self.sid = sid
self.seq = seq
self.payload_id = payload_id
self.payload_data = payload_data
def parser(cls, buf):
(_, flags, length, tsn, sid, seq, payload_id) = struct.unpack_from(cls._PACK_STR, buf)
unordered = ((flags >> 2) & 1)
begin = ((flags >> 1) & 1)
end = ((flags >> 0) & 1)
fmt = ('!%ds' % (length - cls._MIN_LEN))
(payload_data,) = struct.unpack_from(fmt, buf, cls._MIN_LEN)
return cls(unordered, begin, end, length, tsn, sid, seq, payload_id, payload_data)
def serialize(self):
flags = (((self.unordered << 2) | (self.begin << 1)) | (self.end << 0))
buf = bytearray(struct.pack(self._PACK_STR, self.chunk_type(), flags, self.length, self.tsn, self.sid, self.seq, self.payload_id))
buf.extend(self.payload_data)
if (0 == self.length):
self.length = len(buf)
struct.pack_into('!H', buf, 2, self.length)
return six.binary_type(buf) |
class _SphereItem(QGraphicsEllipseItem, _ActionDelegator):
def __init__(self, node):
self.node = node
d = node.img_style['size']
r = (d / 2.0)
QGraphicsEllipseItem.__init__(self, 0, 0, d, d)
_ActionDelegator.__init__(self)
self.setPen(QPen(QColor(self.node.img_style['fgcolor'])))
gradient = QRadialGradient(r, r, r, (d / 3), (d / 3))
gradient.setColorAt(0.05, Qt.GlobalColor.white)
gradient.setColorAt(1, QColor(self.node.img_style['fgcolor']))
self.setBrush(QBrush(gradient)) |
class TestValidateDatasetField():
def test_return_all_elements_not_string_field(self):
with pytest.raises(ValidationError):
DatasetField(name='test_field', fides_meta=FidesMeta(references=None, identity='identifiable_field_name', primary_key=False, data_type='string', length=None, return_all_elements=True, read_only=None))
def test_return_all_elements_on_array_field(self):
assert DatasetField(name='test_field', fides_meta=FidesMeta(references=None, identity='identifiable_field_name', primary_key=False, data_type='string[]', length=None, return_all_elements=True, read_only=None))
def test_data_categories_at_object_level(self):
with pytest.raises(ValidationError) as exc:
DatasetField(name='test_field', data_categories=['user'], fides_meta=FidesMeta(references=None, identify=None, primary_key=False, data_type='object', length=None, return_all_elements=None, read_only=None), fields=[DatasetField(name='nested_field')])
assert ("Object field 'test_field' cannot have specified data_categories" in str(exc))
def test_object_field_conflicting_types(self):
with pytest.raises(ValidationError) as exc:
DatasetField(name='test_field', data_categories=['user'], fides_meta=FidesMeta(references=None, identify=None, primary_key=False, data_type='string', length=None, return_all_elements=None, read_only=None), fields=[DatasetField(name='nested_field')])
assert ("The data type 'string' on field 'test_field' is not compatible with specified sub-fields." in str(exc))
def test_data_categories_on_nested_fields(self):
DatasetField(name='test_field', fides_meta=FidesMeta(references=None, identify=None, primary_key=False, data_type='object', length=None, read_only=None), fields=[DatasetField(name='nested_field', data_categories=['user'])]) |
class ServiceResponse(ModelComposed):
allowed_values = {('type',): {'VCL': 'vcl', 'WASM': 'wasm'}}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'created_at': (datetime, none_type), 'deleted_at': (datetime, none_type), 'updated_at': (datetime, none_type), 'comment': (str, none_type), 'name': (str,), 'customer_id': (str,), 'type': (str,), 'id': (str,), 'publish_key': (str,), 'paused': (bool,), 'versions': ([SchemasVersionResponse],)}
_property
def discriminator():
return None
attribute_map = {'created_at': 'created_at', 'deleted_at': 'deleted_at', 'updated_at': 'updated_at', 'comment': 'comment', 'name': 'name', 'customer_id': 'customer_id', 'type': 'type', 'id': 'id', 'publish_key': 'publish_key', 'paused': 'paused', 'versions': 'versions'}
read_only_vars = {'created_at', 'deleted_at', 'updated_at', 'id'}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
_property
def _composed_schemas():
lazy_import()
return {'anyOf': [], 'allOf': [ServiceCreate, ServiceResponseAllOf, Timestamps], 'oneOf': []} |
_required
def calendar(request, location_slug):
location = get_object_or_404(Location, slug=location_slug)
today = timezone.localtime(timezone.now())
month = request.GET.get('month')
year = request.GET.get('year')
(start, end, next_month, prev_month, month, year) = get_calendar_dates(month, year)
report_date = datetime.date(year, month, 1)
uses = Use.objects.filter((Q(status='confirmed') | Q(status='approved'))).filter(location=location).exclude(depart__lt=start).exclude(arrive__gt=end).order_by('arrive')
rooms = Resource.objects.filter(location=location)
uses_by_room = []
empty_rooms = 0
num_rows_in_chart = 0
for room in rooms:
num_rows_in_chart += room.max_daily_capacities_between(start, end)
if (len(uses) == 0):
any_uses = False
else:
any_uses = True
for room in rooms:
uses_this_room = []
uses_list_this_room = list(uses.filter(resource=room))
if (len(uses_list_this_room) == 0):
empty_rooms += 1
num_rows_in_chart -= room.max_daily_capacities_between(start, end)
else:
for u in uses_list_this_room:
if (u.arrive < start):
display_start = start
else:
display_start = u.arrive
if (u.depart > end):
display_end = end
else:
display_end = u.depart
uses_this_room.append({'use': u, 'display_start': display_start, 'display_end': display_end})
uses_by_room.append((room, uses_this_room))
logger.debug('Uses by Room for calendar view:')
logger.debug(uses_by_room)
guest_calendar = GuestCalendar(uses, year, month, location).formatmonth(year, month)
return render(request, 'calendar.html', {'uses': uses, 'uses_by_room': uses_by_room, 'month_start': start, 'month_end': end, 'next_month': next_month, 'prev_month': prev_month, 'rows_in_chart': num_rows_in_chart, 'report_date': report_date, 'location': location, 'empty_rooms': empty_rooms, 'any_uses': any_uses, 'calendar': mark_safe(guest_calendar)}) |
def extractKrullscansCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class ValidatorsTests(TestCase):
def test_qs_exists_handles_type_error(self):
class TypeErrorQueryset():
def exists(self):
raise TypeError
assert (qs_exists(TypeErrorQueryset()) is False)
def test_qs_exists_handles_value_error(self):
class ValueErrorQueryset():
def exists(self):
raise ValueError
assert (qs_exists(ValueErrorQueryset()) is False)
def test_qs_exists_handles_data_error(self):
class DataErrorQueryset():
def exists(self):
raise DataError
assert (qs_exists(DataErrorQueryset()) is False)
def test_validator_raises_error_if_not_all_fields_are_provided(self):
validator = BaseUniqueForValidator(queryset=object(), field='foo', date_field='bar')
attrs = {'foo': 'baz'}
with pytest.raises(ValidationError):
validator.enforce_required_fields(attrs)
def test_validator_raises_error_when_abstract_method_called(self):
validator = BaseUniqueForValidator(queryset=object(), field='foo', date_field='bar')
with pytest.raises(NotImplementedError):
validator.filter_queryset(attrs=None, queryset=None, field_name='', date_field_name='')
def test_equality_operator(self):
mock_queryset = MagicMock()
validator = BaseUniqueForValidator(queryset=mock_queryset, field='foo', date_field='bar')
validator2 = BaseUniqueForValidator(queryset=mock_queryset, field='foo', date_field='bar')
assert (validator == validator2)
validator2.date_field = 'bar2'
assert (validator != validator2) |
class CanAccessForum(Requirement):
def fulfill(self, user):
if (not current_forum):
raise FlaskBBError('Could not load forum data')
forum_groups = {g.id for g in current_forum.groups}
user_groups = {g.id for g in user.groups}
return bool((forum_groups & user_groups)) |
.parametrize('cell, degree', [(c, d) for c in (ReferenceInterval, ReferenceTriangle) for d in range(8)])
def test_tabulate_matrix_size(cell, degree):
fe = LagrangeElement(cell, 2)
points = np.ones((4, cell.dim))
shape = fe.tabulate(points).shape
correct_shape = (4, fe.nodes.shape[0])
assert (shape == correct_shape), ('tabulate should have returned an array of shape %s, not %s' % (correct_shape, shape)) |
(help='Generates <airflow-dag-id>_diagrams.py in <output-path> directory which contains the definition to create a diagram. Run this file and you will get a rendered diagram.')
def generate(dag_id: Optional[str]=Option(None, '--airflow-dag-id', '-d', help='The dag id from which to generate the diagram. By default it generates for all.'), host: str=Option(' '--airflow-host', '-h', help='The host of the airflow rest api from where to retrieve the dag tasks information.'), username: str=Option('admin', '--airflow-username', '-u', help='The username of the airflow rest api.'), password: str=Option('admin', '--airflow-password', '-p', help='The password of the airflow rest api.'), output_path: Path=Option('.', '--output-path', '-o', help='The path to output the diagrams to.', exists=True, file_okay=False, writable=True), mapping_file: Path=Option(None, '--mapping-file', '-m', help='The mapping file to use for static mapping from Airflow task to diagram node. By default no mapping file is being used.', exists=True, dir_okay=False), verbose: bool=Option(False, '--verbose', '-v', help='Verbose output i.e. useful for debugging purposes.'), label_wrap: Optional[str]=Option(None, '--label-wrap', help='Specify either a number for label width or a separator to indicate when to wrap a label. By default it does not wrap labels.'), progress_bar: bool=Option(False, '--progress', help='Specify whether to show a progress bar or not. By default it does not show progress.'), from_file: Path=Option(None, '--from-file', '-f', help='The file to read Airflow information from. By default it does not read Airflow info from file.', exists=True, dir_okay=False), export_matches: Path=Option(None, '--export-matches', help='Exports matches to file. This file can be used as mapping file. By default it is not being exported.')) -> None:
if verbose:
_enable_debugging()
if mapping_file:
mappings = load_mappings(mapping_file)
else:
mappings = {}
if from_file:
airflow_info = _load(from_file)
else:
airflow_info = _download_airflow_info(dag_id, host, username, password, progress_bar)
_generate_diagram(output_path, label_wrap, progress_bar, export_matches, mappings, airflow_info)
rprint('[green]Done. ') |
class ProductFeedUploadErrorSample(AbstractCrudObject):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isProductFeedUploadErrorSample = True
super(ProductFeedUploadErrorSample, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
id = 'id'
retailer_id = 'retailer_id'
row_number = 'row_number'
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=ProductFeedUploadErrorSample, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {'id': 'string', 'retailer_id': 'string', 'row_number': 'int'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
def extractSteambunlightnovelCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [("tang yin's adventure in another world", "Tang Yin's Adventure In Another World", 'translated'), ("devil's son-in-law", "Devil's Son-in-Law", 'translated'), ('the cold reagent keeps a fox as a consort', 'the cold reagent keeps a fox as a consort', 'translated'), ('Martial God Space', 'Martial God Space', 'translated'), ("Demon's Diary", "Demon's Diary", 'translated'), ("the king of hell's genius pampered wife", "the king of hell's genius pampered wife", 'translated'), ('Historys Strongest Senior Brother', 'Historys Strongest Senior Brother', 'translated'), ('traveler', 'traveler', 'translated'), ('traffords trading club', 'traffords trading club', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def _dp_parser_v2(dps_conf, acls_conf, meters_conf, routers_conf, vlans_conf, meta_dp_state):
dp_vlans = []
for (dp_key, dp_conf) in dps_conf.items():
try:
(dp, vlans) = _parse_dp(dp_key, dp_conf, acls_conf, meters_conf, routers_conf, vlans_conf)
dp_vlans.append((dp, vlans))
except InvalidConfigError as err:
raise InvalidConfigError(('DP %s: %s' % (dp_key, err))) from err
implicit_vids = set()
for (dp, vlans) in dp_vlans:
implicit_vids.update((set(vlans.keys()) - set(vlans_conf.keys())))
dps = []
for (dp, vlans) in dp_vlans:
for vlan_key in implicit_vids:
if (vlan_key not in vlans):
vlans[vlan_key] = VLAN(vlan_key, dp.dp_id)
dp.reset_refs(vlans=vlans)
dps.append(dp)
for dp in dps:
dp.finalize_config(dps)
for dp in dps:
dp.resolve_stack_topology(dps, meta_dp_state)
for dp in dps:
dp.finalize()
dpid_refs = set()
for dp in dps:
test_config_condition((dp.dp_id in dpid_refs), ('DPID %u is duplicated' % dp.dp_id))
dpid_refs.add(dp.dp_id)
routers_referenced = set()
for dp in dps:
routers_referenced.update(dp.routers.keys())
for router in routers_conf:
test_config_condition((router not in routers_referenced), ('router %s configured but not used by any DP' % router))
return dps |
def clip_channels(color: 'Color', nans: bool=True) -> None:
for (i, value) in enumerate(color[:(- 1)]):
chan = color._space.CHANNELS[i]
if (chan.flags & FLG_ANGLE):
color[i] = util.constrain_hue(value)
continue
if ((not chan.bound) or math.isnan(value)):
continue
color[i] = alg.clamp(value, chan.low, chan.high) |
class GPProject(ManageMembersMixin, Archivable, Document):
on_delete_cascade = ['GP Task', 'GP Discussion', 'GP Project Visit', 'GP Followed Project', 'GP Page', 'GP Pinned Project']
on_delete_set_null = ['GP Notification']
def get_list_query(query):
Project = frappe.qb.DocType('GP Project')
Member = frappe.qb.DocType('GP Member')
member_exists = frappe.qb.from_(Member).select(Member.name).where((Member.parenttype == 'GP Team')).where((Member.parent == Project.team)).where((Member.user == frappe.session.user))
query = query.where(((Project.is_private == 0) | ((Project.is_private == 1) & ExistsCriterion(member_exists))))
if gameplan.is_guest():
GuestAccess = frappe.qb.DocType('GP Guest Access')
project_list = GuestAccess.select(GuestAccess.project).where((GuestAccess.user == frappe.session.user))
query = query.where(Project.name.isin(project_list))
return query
def as_dict(self, *args, **kwargs) -> dict:
d = super().as_dict(*args, **kwargs)
total_tasks = frappe.db.count('GP Task', {'project': self.name})
completed_tasks = frappe.db.count('GP Task', {'project': self.name, 'is_completed': 1})
pending_tasks = (total_tasks - completed_tasks)
overdue_tasks = frappe.db.count('GP Task', {'project': self.name, 'is_completed': 0, 'due_date': ('<', frappe.utils.today())})
d.summary = {'total_tasks': total_tasks, 'completed_tasks': completed_tasks, 'pending_tasks': pending_tasks, 'overdue_tasks': overdue_tasks}
d.is_pinned = bool(frappe.db.exists('GP Pinned Project', {'project': self.name, 'user': frappe.session.user}))
return d
def before_insert(self):
if (not self.icon):
self.icon = get_random_gemoji().emoji
if (not self.readme):
self.readme = f'''
<h3>Welcome to the {self.title} page!</h3>
<p>You can add a brief introduction about this project, links, resources, and other important information here.</p>
'''
self.append('members', {'user': frappe.session.user, 'email': frappe.session.user, 'role': 'Project Owner', 'status': 'Accepted'})
def before_save(self):
if frappe.db.get_value('GP Team', self.team, 'is_private'):
self.is_private = True
def update_progress(self):
result = frappe.db.get_all('GP Task', filters={'project': self.name}, fields=['sum(is_completed) as completed', 'count(name) as total'])[0]
if (result.total > 0):
self.progress = (((result.completed or 0) * 100) / result.total)
self.save()
self.reload()
def delete_group(self, group):
tasks = frappe.db.count('GP Task', {'project': self.name, 'status': group})
if (tasks > 0):
frappe.throw(f'Group {group} cannot be deleted because it has {tasks} tasks')
for state in self.task_states:
if (state.status == group):
self.remove(state)
self.save()
break
def get_activities(self):
activities = []
activities.append({'type': 'info', 'title': 'Project created', 'date': self.creation, 'user': self.owner})
status_updates = frappe.db.get_all('Team Project Status Update', {'project': self.name}, ['creation', 'owner', 'content', 'status'], order_by='creation desc')
for status_update in status_updates:
activities.append({'type': 'content', 'title': 'Status Update', 'content': status_update.content, 'status': status_update.status, 'date': frappe.utils.get_datetime(status_update.creation), 'user': status_update.owner})
activities.sort(key=(lambda x: x['date']), reverse=True)
return activities
()
def move_to_team(self, team):
if ((not team) or (self.team == team)):
return
self.team = team
self.save()
for doctype in ['GP Task', 'GP Discussion']:
for name in frappe.db.get_all(doctype, {'project': self.name}, pluck='name'):
doc = frappe.get_doc(doctype, name)
doc.team = self.team
doc.save()
()
def invite_guest(self, email):
invite_by_email(email, role='Gameplan Guest', projects=[self.name])
()
def remove_guest(self, email):
name = frappe.db.get_value('GP Guest Access', {'project': self.name, 'user': email})
if name:
frappe.delete_doc('GP Guest Access', name)
()
def track_visit(self):
if frappe.flags.read_only:
return
values = {'user': frappe.session.user, 'project': self.name}
existing = frappe.db.get_value('GP Project Visit', values)
if existing:
visit = frappe.get_doc('GP Project Visit', existing)
visit.last_visit = frappe.utils.now()
visit.save(ignore_permissions=True)
else:
visit = frappe.get_doc(doctype='GP Project Visit')
visit.update(values)
visit.last_visit = frappe.utils.now()
visit.insert(ignore_permissions=True)
def is_followed(self):
return bool(frappe.db.exists('GP Followed Project', {'project': self.name, 'user': frappe.session.user}))
()
def follow(self):
if (not self.is_followed):
frappe.get_doc(doctype='GP Followed Project', project=self.name).insert(ignore_permissions=True)
()
def unfollow(self):
follow_id = frappe.db.get_value('GP Followed Project', {'project': self.name, 'user': frappe.session.user})
frappe.delete_doc('GP Followed Project', follow_id) |
def populate_userdir(fargs):
predefined_locations = ['www', 'secure-www']
(userdir, checkmodes) = fargs
locations = []
try:
userdir = os.path.abspath(userdir)
if (not validate_directory(userdir, checkmodes)):
return locations
public_html_location = (userdir + '/public_html')
if validate_directory(public_html_location, checkmodes):
logging.debug('Appending to locations: %s', public_html_location)
locations.append(public_html_location)
sites_location = (userdir + '/sites')
if validate_directory(sites_location, checkmodes):
for site in os.scandir(sites_location):
site = site.name
sitedir = ((sites_location + '/') + site)
if checkmodes:
if (not check_dir_execution_bit(sitedir)):
continue
for predefined_directory in predefined_locations:
sites_location_last = ((sitedir + '/') + predefined_directory)
if validate_directory(sites_location_last, checkmodes):
logging.debug('Appending to locations: %s', sites_location_last)
locations.append(sites_location_last)
except Exception:
logging.error(traceback.format_exc())
return locations |
def extractAshenfeatherWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Sweet Curse! Battle-Android summoned to a Different world!', 'Sweet Curse! Battle-Android summoned to a Different world!', 'translated'), ('She was called God, as well as Satan', 'She was called God, as well as Satan', 'translated'), ('She was Called Both God & Satan', 'She was called God, as well as Satan', 'translated'), ('At the Northern Fort', 'At the Northern Fort', 'translated'), ('Girl with the Golden-Cat Eyes', 'Girl with the Golden-Cat Eyes', 'oel'), ('One in the Chamber', 'One in the Chamber', 'oel'), ('Parabellum', 'Parabellum', 'oel'), ('Sky Gardens', 'Sky Gardens', 'oel'), ('Manuke FPS', 'Manuke FPS', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
if (item['tags'] == ['Uncategorized']):
titlemap = [('Flowered Metal | ', 'Flowered Metal', 'translated'), ('Master of Dungeon', 'Master of Dungeon', 'oel')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TestDefaultFlaskBBAuthProvider(object):
provider = auth.DefaultFlaskBBAuthProvider()
def test_returns_None_if_user_doesnt_exist(self, Fred):
result = self.provider.authenticate('', 'lolnope')
assert (result is None)
def test_returns_None_if_password_doesnt_match(self, Fred):
result = self.provider.authenticate(Fred.email, 'stillnotit')
assert (result is None)
def test_returns_user_if_identifer_and_password_match(self, Fred):
result = self.provider.authenticate(Fred.email, 'fred')
assert (result.username == Fred.username) |
def test_document_inheritance():
assert issubclass(MySubDoc, MyDoc)
assert issubclass(MySubDoc, document.Document)
assert hasattr(MySubDoc, '_doc_type')
assert ({'properties': {'created_at': {'type': 'date'}, 'name': {'type': 'keyword'}, 'title': {'type': 'keyword'}, 'inner': {'type': 'object', 'properties': {'old_field': {'type': 'text'}}}}} == MySubDoc._doc_type.mapping.to_dict()) |
class Task():
def set_run_index(self, index_provider: DynamicIndexProvider) -> None:
self._run_index = index_provider.next()
def run_index(self) -> int:
run_index = getattr(self, '_run_index', None)
assert (run_index is not None)
return run_index
def execute(self, args: argparse.Namespace, fal_dbt: FalDbt) -> int:
raise NotImplementedError |
.parametrize('middleware', [MiddlewareIncompatibleWithWSGI_A(), MiddlewareIncompatibleWithWSGI_B(), MiddlewareIncompatibleWithWSGI_C(), (MiddlewareIncompatibleWithWSGI_C(), MiddlewareIncompatibleWithWSGI_A())])
def test_raise_on_incompatible(middleware):
api = falcon.App()
with pytest.raises(falcon.CompatibilityError):
api.add_middleware(middleware) |
class InSet(Expression):
__slots__ = ('expression', 'container')
precedence = Comparison.precedence
def __init__(self, expression, container):
self.expression = expression
self.container = container
def is_literal(self):
return all((isinstance(v, Literal) for v in self.container))
def is_dynamic(self):
return all(((not isinstance(v, Literal)) for v in self.container))
def get_literals(self):
values = OrderedDict()
for literal in self.container:
if (not isinstance(literal, Literal)):
continue
k = literal.value
if isinstance(literal, String):
values.setdefault(fold_case(k), literal)
else:
values[k] = literal
return values
def __and__(self, other):
if (isinstance(other, InSet) and (self.expression == other.expression)):
if (self.is_literal() and other.is_literal()):
container1 = self.get_literals()
container2 = other.get_literals()
reduced = [v for (k, v) in container1.items() if (k in container2)]
return InSet(self.expression, reduced).optimize()
elif isinstance(other, Not):
if (isinstance(other.term, InSet) and (self.expression == other.term.expression)):
if (self.is_literal() and other.term.is_literal()):
container1 = self.get_literals()
container2 = other.term.get_literals()
reduced = [v for (k, v) in container1.items() if (k not in container2)]
return InSet(self.expression, reduced).optimize()
elif (isinstance(other, Comparison) and (other.comparator == Comparison.EQ) and (self.expression == other.left)):
if (self.is_literal() and isinstance(other.right, Literal)):
return super(InSet, self).__and__(InSet(other.left, [other.right])).optimize()
elif (isinstance(other, Comparison) and (other.comparator == Comparison.NE) and (self.expression == other.left)):
if (self.is_literal() and isinstance(other.right, Literal)):
return super(InSet, self).__and__((~ InSet(other.left, [other.right]))).optimize()
return super(InSet, self).__and__(other)
def __or__(self, other):
if (isinstance(other, InSet) and (self.expression == other.expression)):
if (self.is_literal() and other.is_literal()):
container = self.get_literals()
for (k, v) in other.get_literals().items():
container.setdefault(k, v)
union = [v for v in container.values()]
return InSet(self.expression, union).optimize()
elif (isinstance(other, Comparison) and (self.expression == other.left)):
if (self.is_literal() and isinstance(other.right, Literal)):
return self.__or__(InSet(other.left, [other.right]))
return super(InSet, self).__or__(other)
def split_literals(self):
if (self.is_dynamic() or self.is_literal()):
return self
literals = InSet(self.expression, [])
dynamic = InSet(self.expression, [])
for item in self.container:
if isinstance(item, Literal):
literals.container.append(item)
else:
dynamic.container.append(item)
return (literals.optimize() | dynamic.optimize())
def synonym(self):
return Or([Comparison(self.expression, Comparison.EQ, v) for v in self.container])
def _render(self, negate=False):
values = [v.render() for v in self.container]
expr = self.expression.render(self.precedence)
operator = ('not in' if negate else 'in')
if ((len(self.container) > 3) and (sum((len(v) for v in values)) > 40)):
delim = ',\n'
return '{lhs} {op} (\n{rhs}\n)'.format(lhs=expr, op=operator, rhs=self.indent(delim.join(values)))
else:
delim = ', '
return '{lhs} {op} ({rhs})'.format(lhs=expr, op=operator, rhs=delim.join(values)) |
class OptionPlotoptionsScatter3dDatalabels(Options):
def align(self):
return self._config_get('center')
def align(self, text: str):
self._config(text, js_type=False)
def allowOverlap(self):
return self._config_get(False)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self) -> 'OptionPlotoptionsScatter3dDatalabelsAnimation':
return self._config_sub_data('animation', OptionPlotoptionsScatter3dDatalabelsAnimation)
def backgroundColor(self):
return self._config_get(None)
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(0)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def crop(self):
return self._config_get(True)
def crop(self, flag: bool):
self._config(flag, js_type=False)
def defer(self):
return self._config_get(True)
def defer(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def filter(self) -> 'OptionPlotoptionsScatter3dDatalabelsFilter':
return self._config_sub_data('filter', OptionPlotoptionsScatter3dDatalabelsFilter)
def format(self):
return self._config_get('point.value')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get(None)
def formatter(self, value: Any):
self._config(value, js_type=False)
def inside(self):
return self._config_get(None)
def inside(self, flag: bool):
self._config(flag, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, flag: bool):
self._config(flag, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def overflow(self):
return self._config_get('justify')
def overflow(self, text: str):
self._config(text, js_type=False)
def padding(self):
return self._config_get(5)
def padding(self, num: float):
self._config(num, js_type=False)
def position(self):
return self._config_get('center')
def position(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def shape(self):
return self._config_get('square')
def shape(self, text: str):
self._config(text, js_type=False)
def style(self):
return self._config_get(None)
def style(self, value: Any):
self._config(value, js_type=False)
def textPath(self) -> 'OptionPlotoptionsScatter3dDatalabelsTextpath':
return self._config_sub_data('textPath', OptionPlotoptionsScatter3dDatalabelsTextpath)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('bottom')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(0)
def y(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(6)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def _get_solc_version_list() -> Tuple[(List, List)]:
global AVAILABLE_SOLC_VERSIONS
installed_versions = solcx.get_installed_solc_versions()
if (AVAILABLE_SOLC_VERSIONS is None):
try:
AVAILABLE_SOLC_VERSIONS = solcx.get_installable_solc_versions()
except ConnectionError:
if (not installed_versions):
raise ConnectionError('Solc not installed and cannot connect to GitHub')
AVAILABLE_SOLC_VERSIONS = installed_versions
return (AVAILABLE_SOLC_VERSIONS, installed_versions) |
class SizeAssessmentTests(unittest.TestCase):
def test_matrix_mult(self):
bmg = BMGraphBuilder()
assessor = SizeAssessment(Sizer())
probs = bmg.add_real_matrix(torch.tensor([[0.5, 0.125, 0.125], [0.0625, 0.0625, 0.875]]))
tensor_elements = []
for row in range(0, 2):
row_node = bmg.add_natural(row)
row_prob = bmg.add_column_index(probs, row_node)
for column in range(0, 3):
col_index = bmg.add_natural(column)
prob = bmg.add_vector_index(row_prob, col_index)
bernoulli = bmg.add_bernoulli(prob)
sample = bmg.add_sample(bernoulli)
tensor_elements.append(sample)
matrix2by3_rhs = bmg.add_tensor(Size([2, 3]), *tensor_elements)
matrix2by3 = bmg.add_real_matrix(torch.tensor([[0.21, 0.27, 0.3], [0.5, 0.6, 0.1]]))
matrix1by3 = bmg.add_real_matrix(torch.tensor([[0.1, 0.2, 0.3]]))
matrix3 = bmg.add_real_matrix(torch.tensor([0.1, 0.2, 0.9]))
scalar = bmg.add_real(4.5)
mm_invalid = bmg.add_matrix_multiplication(matrix2by3_rhs, matrix2by3)
error_size_mismatch = assessor.size_error(mm_invalid, bmg)
self.assertIsInstance(error_size_mismatch, BadMatrixMultiplication)
expectation = '\nThe model uses a matrix multiplication () operation unsupported by Bean Machine Graph.\nThe dimensions of the operands are 2x3 and 2x3.\n '
self.assertEqual(expectation.strip(), error_size_mismatch.__str__().strip())
broadcast_not_supported_yet = bmg.add_matrix_multiplication(matrix2by3_rhs, matrix1by3)
error_broadcast_not_supported_yet = assessor.size_error(broadcast_not_supported_yet, bmg)
expectation = '\nThe model uses a matrix multiplication () operation unsupported by Bean Machine Graph.\nThe dimensions of the operands are 2x3 and 1x3.\n '
self.assertEqual(expectation.strip(), error_broadcast_not_supported_yet.__str__().strip())
errors = [assessor.size_error(bmg.add_matrix_multiplication(matrix2by3_rhs, mm), bmg) for mm in [matrix3, scalar]]
for error in errors:
self.assertIsNone(error) |
class IFrame(Widget):
DEFAULT_MIN_SIZE = (10, 10)
CSS = '\n .flx-IFrame {\n border: none;\n }\n '
url = event.StringProp('', settable=True, doc="\n The url to show. ' is automatically prepended if the url\n does not have '://' in it.\n ")
def _create_dom(self):
global document
return document.createElement('iframe')
('size')
def __on_size(self, *events):
self.node.width = self.size[0]
('url')
def _update_url(self, *events):
url = self.url
if (url and ('://' not in url)):
url = (' + url)
self.node.src = url |
def select():
print(' Select which piece to move.')
selecting = True
while selecting:
col = utils.r_c('c')
row = utils.r_c('r')
if (board.Grid(row, col).piece.color != globVar.player):
Canvas.selectError()
else:
board.Grid(row, col).piece.selected = True
selecting = False
return board.Grid(row, col) |
def test_cube_slice_w_ignore_dead_traces_trilinear(tmpdir, generate_plot):
cube1 = xtgeo.cube_from_file(XCUB2)
surf1 = xtgeo.surface_from_cube(cube1, 1000.0)
cells = [(18, 12), (20, 2), (0, 4)]
surf1.slice_cube(cube1, sampling='trilinear', snapxy=True, deadtraces=False)
if generate_plot:
plotfile = join(tmpdir, 'slice_tri1.png')
title = 'Cube with dead traces; trilinear; keep as is at dead traces'
surf1.quickplot(filename=plotfile, minmax=((- 10000), 10000), title=title)
for cell in cells:
(icell, jcell) = cell
assert (surf1.values[(icell, jcell)] == pytest.approx(cube1.values[(icell, jcell, 0)], abs=0.1))
assert (ma.count_masked(surf1.values) == 0) |
class VarGraph(BaseGraph):
var_resolver = VarResolver()
def __init__(self, cls_fields_list: List[Tuple[(_C, Dict)]], input_classes: List):
self._cls_fields_tuple = cls_fields_list
self._input_classes = input_classes
(tmp_dag, self.ref_map) = self._build()
super().__init__(tmp_dag, whoami='Class Variable Reference Graph')
def cls_names(self):
return {spock_cls.__name__ for (spock_cls, _) in self._cls_fields_tuple}
def cls_values(self):
return {spock_cls.__name__: spock_cls for (spock_cls, _) in self._cls_fields_tuple}
def cls_map(self):
return {spock_cls.__name__: fields for (spock_cls, fields) in self._cls_fields_tuple}
def nodes(self):
return self._input_classes
def ref_2_resolve(self) -> Set:
return set(self.ref_map.keys())
def resolve(self, spock_cls: str, spock_space: Dict) -> Tuple[(Dict, Set)]:
changed_vars = set()
if (spock_cls in self.ref_2_resolve):
for ref in self.ref_map[spock_cls]:
(typed_val, _) = self.var_resolver.resolve(value=self.cls_map[spock_cls][ref['val']], value_type=getattr(self.node_map[spock_cls].__attrs_attrs__, ref['val']).type, ref=ref, spock_space=spock_space)
self.cls_map[spock_cls][ref['val']] = typed_val
changed_vars = {n['val'] for n in self.ref_map[spock_cls]}
return (self.cls_map[spock_cls], changed_vars)
def _build(self) -> Tuple[(Dict, Dict)]:
nodes = {val: [] for val in self.node_names}
node_ref = {val: [] for val in self.node_names}
node_ref = {}
for (spock_cls, fields) in self._cls_fields_tuple:
ref_map = []
for (k, v) in fields.items():
if isinstance(v, str):
if self.var_resolver.detect(v, str):
return_list = self.var_resolver.get_regex_match_reference(v)
for (typed_ref, _, annotation, match_val) in return_list:
(dep_cls, dep_val) = typed_ref.split('.')
if (dep_cls not in self.node_names):
raise _SpockVarResolverError(f"Reference to missing decorated class -- `{dep_cls}` was not passed as an *arg to SpockBuilder and/or could not be found via lazy evaluation within sys.modules['spock'].backend.config")
if (dep_cls != spock_cls.__name__):
nodes.get(dep_cls).append(spock_cls)
ref_map.append({'val': k, 'class': dep_cls, 'class_val': dep_val, 'matched': match_val})
if (len(ref_map) > 0):
node_ref.update({spock_cls.__name__: ref_map})
nodes = {key: set(val) for (key, val) in nodes.items()}
return (nodes, node_ref) |
class TestClosestValue():
.parametrize(('input_value', 'expected_value'), [[0.5, 0.6], [0, 0.2], [0.522, 0.6], [1.15, 1], [0.8, 0.8]])
def test_above_value(self, input_value, expected_value):
v = closest_above_value([0.2, 0.4, 0.6, 0.8, 1], input_value)
assert (v == expected_value)
.parametrize(('input_value', 'expected_value'), [[0.5, 0.4], [0, 0.2], [0.522, 0.4], [1.15, 1], [0.8, 0.8]])
def test_below_value(self, input_value, expected_value):
v = closest_below_value([0.2, 0.4, 0.6, 0.8, 1], input_value)
assert (v == expected_value) |
def create_local_queue(executable_script: str, max_submit: int=1, num_realizations: int=10, max_runtime: Optional[int]=None, callback_timeout: Optional['Callable[[int], None]']=None, *, ens_id: Optional[str]=None, ee_uri: Optional[str]=None, ee_cert: Optional[str]=None, ee_token: Optional[str]=None):
job_queue = JobQueue(QueueConfig.from_dict({'driver_type': QueueSystem.LOCAL, 'MAX_SUBMIT': max_submit}), ens_id=ens_id, ee_uri=ee_uri, ee_cert=ee_cert, ee_token=ee_token, verify_token=False)
for iens in range(num_realizations):
Path(DUMMY_CONFIG['run_path'].format(iens)).mkdir(exist_ok=False)
job = JobQueueNode(job_script=executable_script, num_cpu=DUMMY_CONFIG['num_cpu'], run_arg=RunArg(str(iens), MagicMock(spec=EnsembleAccessor), 0, 0, DUMMY_CONFIG['run_path'].format(iens), DUMMY_CONFIG['job_name'].format(iens)), max_runtime=max_runtime, callback_timeout=callback_timeout)
job_queue.add_job(job, iens)
return job_queue |
def test_triplot_3d():
fig = plt.figure()
axes = fig.add_subplot(2, 2, 1, projection='3d')
mesh = CylinderMesh(nr=32, nl=4)
collections = triplot(mesh, axes=axes, boundary_kw={'colors': ['r', 'g']})
assert collections
legend = axes.legend()
assert (len(legend.get_texts()) == 2)
axes = fig.add_subplot(2, 2, 2, projection='3d')
mesh = UnitIcosahedralSphereMesh(3)
triplot(mesh, axes=axes, interior_kw={'edgecolors': 'white'})
legend = axes.legend()
assert (len(legend.get_texts()) == 0)
axes = fig.add_subplot(2, 2, 3, projection='3d')
mesh = UnitCubedSphereMesh(3)
interior_kw = {'facecolors': 'tab:orange', 'alpha': 0.5}
triplot(mesh, axes=axes, interior_kw=interior_kw)
axes = fig.add_subplot(2, 2, 4, projection='3d')
mesh = UnitCubeMesh(3, 3, 3)
colors = ['red', 'green', 'blue', 'orange', 'yellow', 'purple']
boundary_kw = {'facecolors': colors, 'alpha': 0.85, 'linewidths': 0.1}
collections = triplot(mesh, axes=axes, boundary_kw=boundary_kw)
assert collections
legend = axes.legend(loc='upper right')
assert (len(legend.get_texts()) == 6) |
class SetCommandResponder(CommandResponderBase):
SUPPORTED_PDU_TYPES = (rfc1905.SetRequestPDU.tagSet,)
SMI_ERROR_MAP = CommandResponderBase.SMI_ERROR_MAP.copy()
SMI_ERROR_MAP[pysnmp.smi.error.NoSuchObjectError] = 'notWritable'
SMI_ERROR_MAP[pysnmp.smi.error.NoSuchInstanceError] = 'notWritable'
def _getMgmtFun(self, contextName):
return self.snmpContext.getMibInstrum(contextName).writeMibObjects |
class JsHtmlTree(JsHtml.JsHtmlRich):
def hide(self, i: int=None):
if (i is not None):
return JsObjects.JsVoid(('\nlet treeItem = document.querySelectorAll("#%(htmlCode)s i[name=item_arrow]")[%(index)s];\nif (treeItem.getAttribute("class") == "%(iconOpen)s"){dom.click();}\n' % {'htmlCode': self.component.html_code, 'iconOpen': self.component.options.icon_open, 'index': i}))
return JsObjects.JsVoid(('\ndocument.querySelectorAll("#%(htmlCode)s i[name=item_arrow]").forEach( function(dom, k){\n if(dom.getAttribute("class") == "%(iconOpen)s"){dom.click();}\n})' % {'htmlCode': self.component.html_code, 'iconOpen': self.component.options.icon_open}))
def expand(self, i: int=None):
if (i is not None):
return JsObjects.JsVoid(('\nlet treeItem = document.querySelectorAll("#%(htmlCode)s i[name=item_arrow]")[%(index)s];\nif (treeItem.getAttribute("class") == "%(iconClose)s"){dom.click();}\n' % {'htmlCode': self.component.html_code, 'iconClose': self.component.options.icon_close, 'index': i}))
return JsObjects.JsVoid(('\ndocument.querySelectorAll("#%(htmlCode)s i[name=item_arrow]").forEach( function(dom, k){\n if(dom.getAttribute("class") == "%(iconClose)s"){dom.click();}\n})\n' % {'htmlCode': self.component.html_code, 'iconClose': self.component.options.icon_close}))
def copy(self):
return JsObjects.JsVoid(('\nlet treeData = {}; var curBranch = [];\ndocument.querySelectorAll("#%(htmlCode)s span[name=item_value]").forEach( function(dom, k){\n let nodeDepth = parseInt(dom.parentNode.parentNode.parentNode.getAttribute("data-depth"))-1;\n let nodeParent = dom.parentNode.parentNode.parentNode.getAttribute("data-parent");\n let childNodes = dom.parentNode.querySelector("ul");\n if (nodeDepth < curBranch.length){\n curBranch = curBranch.slice(0, nodeDepth)}\n if(childNodes){\n if(nodeParent){\n var curNode = treeData;\n curBranch.forEach(function(node){curNode = curNode[node]})\n curNode[dom.innerHTML] = {} \n curBranch.push(dom.innerHTML)\n }\n else{\n treeData[dom.innerHTML] = {} \n curBranch = [dom.innerHTML]}\n } else {\n var curNode = treeData;\n curBranch.forEach(function(node){curNode = curNode[node]})\n curNode[dom.innerHTML] = {} \n curNode[dom.innerHTML] = dom.innerHTML\n }\n})\nvar dummy = document.createElement("textarea");\ndocument.body.appendChild(dummy);\ndummy.value = JSON.stringify(treeData);\ndummy.select();\ndocument.execCommand("copy");\ndocument.body.removeChild(dummy);\n' % {'htmlCode': self.component.html_code}))
def current_path(self):
return JsObjects.JsArray.JsArray.get(("\n(function(src, parentCode){\nlet childParentNode = src.parentNode; let childPath = []; childPath.push(src.outerText);\n while (childParentNode.id != parentCode){\n childParentNode = childParentNode.parentNode;\n if (childParentNode.hasAttribute('data-parent')){\n childPath.push(childParentNode.getAttribute('data-parent'))\n };\n }; return childPath; })(event.srcElement, '%s')" % self.component.html_code))
def active(self):
return JsObjects.JsArray.JsArray.get(("\n(function(src, parentCode){\nlet childParentNode = src.parentNode; let childPath = []; childPath.push(src.outerText);\n while (childParentNode.id != parentCode){\n childParentNode = childParentNode.parentNode;\n if (childParentNode.hasAttribute('data-parent')){\n childPath.push(childParentNode.getAttribute('data-parent'))\n };\n }; return childPath; })(event.srcElement, '%s')" % self.component.html_code)) |
class HistoricalPriceRecord(collections.namedtuple('HistoricalPriceRecord', ['time', 'open', 'high', 'low', 'close', 'volume'])):
__slots__ = ()
_krx_timezone = get_calendar('XKRX').tz
def from_tuple(cls, tup):
if ('' in tup._fields):
dt = datetime.datetime.strptime(tup., '%Y%m%d')
dt = cls._krx_timezone.localize(dt)
time = (dt.timestamp() * (10 ** 6))
elif ('' in tup._fields):
dt = datetime.datetime.strptime(tup., '%Y%m%d%H%M%S')
dt = cls._krx_timezone.localize(dt)
time = (dt.timestamp() * (10 ** 6))
else:
raise KiwoomOpenApiPlusError('Cannot specify time')
open = abs(float(tup.))
high = abs(float(tup.))
low = abs(float(tup.))
close = abs(float(tup.))
volume = abs(float(tup.))
return cls(time, open, high, low, close, volume)
def records_from_dataframe(cls, df):
return [cls.from_tuple(tup) for tup in df[::(- 1)].itertuples()]
def dict_records_from_dataframe(cls, df):
return [msg._asdict() for msg in cls.records_from_dataframe(df)] |
def rectify(self, context, me=None, bm=None, uv_layers=None):
if (me is None):
me = bpy.context.active_object.data
bm = bmesh.from_edit_mesh(me)
uv_layers = bm.loops.layers.uv.verify()
faces_loops = utilities_uv.selection_store(bm, uv_layers, return_selected_faces_loops=True)
islands = utilities_uv.getSelectionIslands(bm, uv_layers, selected_faces=set(faces_loops.keys()))
for island in islands:
bpy.ops.uv.select_all(action='DESELECT')
utilities_uv.set_selected_faces(island, bm, uv_layers)
main(me, bm, uv_layers, island, faces_loops)
utilities_uv.selection_restore(bm, uv_layers) |
class VideoNoteMessageFactory(MessageFactory):
async def send_message(self, client: TelegramClient, chat_id: int, target: Message=None) -> Message:
return (await client.send_file(chat_id, file='tests/mocks/video_note_0.mp4', video_note=True, reply_to=target))
def compare_message(self, tg_msg: Message, efb_msg: EFBMessage) -> None:
assert (efb_msg.type == MsgType.Video)
assert efb_msg.file
file_size = efb_msg.file.seek(0, 2)
assert (file_size == tg_msg.file.size)
async def finalize_message(self, tg_msg: Message, efb_msg: EFBMessage):
if (efb_msg.file and (not efb_msg.file.closed)):
efb_msg.file.close() |
.parametrize('test_input, expected', [('test', False), (10, False), (1.0, False), (True, False), ('/does/not/exist', False), ([], False), ({}, False)])
def test_bad_file_type(test_input, expected):
assert (config.File.type_check(test_input) == expected)
assert (str(config.File) == 'existing file') |
def extractStarrynightnovelsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Shini Yasui Koshaku Reijo to Shichi-nin no Kikoshi', 'Shini Yasui Koshaku Reijo to Shichi-nin no Kikoshi', 'translated'), ("Stepbrother's Diary", 'Lean Tuna and Her Stepbrothers Plan to Become a Fatty Tuna', 'translated'), ('MaguToro', 'Lean Tuna and Her Stepbrothers Plan to Become a Fatty Tuna', 'translated'), ('Lewd Game', 'I Decided to Participate in a Lewd Game in My Dream', 'translated'), ('summoned hero', 'I Summoned the Hero, to the Present Age', 'translated'), ('Seven Nobles', "Duke's Daughter who is Liable to Die and the Seven Nobles", 'translated'), ('Erica', "Duke's Daughter who is Liable to Die and the Seven Nobles", 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
chp_prefixes = [('IDWBV ', 'I Dont Want to Become a Villainess, So I Aim at Becoming a Perfect Lady Together with the Prince!', 'translated')]
for (prefix, series, tl_type) in chp_prefixes:
if item['title'].lower().startswith(prefix.lower()):
return buildReleaseMessageWithType(item, series, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesVariablepieSonificationTracks(Options):
def activeWhen(self) -> 'OptionSeriesVariablepieSonificationTracksActivewhen':
return self._config_sub_data('activeWhen', OptionSeriesVariablepieSonificationTracksActivewhen)
def instrument(self):
return self._config_get('piano')
def instrument(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionSeriesVariablepieSonificationTracksMapping':
return self._config_sub_data('mapping', OptionSeriesVariablepieSonificationTracksMapping)
def midiName(self):
return self._config_get(None)
def midiName(self, text: str):
self._config(text, js_type=False)
def pointGrouping(self) -> 'OptionSeriesVariablepieSonificationTracksPointgrouping':
return self._config_sub_data('pointGrouping', OptionSeriesVariablepieSonificationTracksPointgrouping)
def roundToMusicalNotes(self):
return self._config_get(True)
def roundToMusicalNotes(self, flag: bool):
self._config(flag, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def type(self):
return self._config_get('instrument')
def type(self, text: str):
self._config(text, js_type=False) |
class TraitListNodeType(NodeType):
klass = Any()
text = Str()
trait_name = Str()
def is_type_for(self, node):
is_type_for = (isinstance(node, list) and hasattr(node, 'object') and isinstance(node.object, self.klass) and (node.name == self.trait_name))
return is_type_for
def allows_children(self, node):
return True
def has_children(self, node):
return (len(node) > 0)
def get_children(self, node):
return node
def get_text(self, node):
return self.text |
def test_save_as_sets_the_render_file_name_for_shots(create_test_data, create_maya_env):
data = create_test_data
maya_env = create_maya_env
version1 = Version(task=data['task6'])
version1.extension = '.ma'
version1.update_paths()
maya_env.save_as(version1)
expected_path = 'renders/{take_name}/v{version_number:03d}/<RenderLayer>/{version_nice_name}_v{version_number:03d}_<RenderLayer>_<RenderPass>'.format(version_path=version1.absolute_path, take_name=version1.take_name, project_code=version1.task.project.code, version_nice_name=version1.nice_name, version_number=version1.version_number)
dRG = pm.PyNode('defaultRenderGlobals')
assert (expected_path == dRG.getAttr('imageFilePrefix')) |
def _fix_argv(argv, sys_path, main_module, *, platform=sys.platform, executable=sys.executable, get_executable=_get_executable, get_main_module_name=main_module_name):
if (not sys_path[0]):
name = get_main_module_name(main_module)
if (name is not None):
argv = argv[:]
argv[0] = f"{(get_executable(executable) or 'python')} -m {name}"
elif platform.startswith('win'):
argv = argv[:]
argv[0] = f"{(get_executable(executable) or 'python')} {argv[0]}"
else:
name = get_executable(argv[0])
argv = argv[:]
argv[0] = name
return argv |
class Requester():
USER_AGENTS = userAgents.split('\n')
def get(self, url, _proxies={}):
return requests.get(url, headers={'User-Agent': random.choice(self.USER_AGENTS)}, proxies=_proxies)
def head(self, url, _proxies={}):
return requests.head(url, headers={'User-Agent': random.choice(self.USER_AGENTS)}, proxies=_proxies) |
def create_imports() -> str:
link_btn = '../../../../website/src/components/LinkButtons.jsx'
cell_out = '../../../../website/src/components/CellOutput.jsx'
plot_out = '../../../../website/src/components/Plotting.jsx'
imports = f'''import LinkButtons from "{link_btn}";
'''
imports += f'''import CellOutput from "{cell_out}";
'''
imports += f'''import {{BokehFigure, PlotlyFigure}} from "{plot_out}";
'''
return f'''{imports}
''' |
class SyncPQServer(SyncServer):
def __init__(self, *, global_model: IFLModel, channel: Optional[ProductQuantizationChannel]=None, **kwargs):
init_self_cfg(self, component_class=__class__, config_class=SyncPQServerConfig, **kwargs)
super().__init__(global_model=global_model, channel=channel, **kwargs)
if (not isinstance(self._channel, ProductQuantizationChannel)):
raise TypeError('SyncPQServer expects channel of type ProductQuantizationChannel,', f' {type(self._channel)} given.')
self._seed_centroids: Dict[(str, Tensor)] = {}
def global_pq_centroids(self):
return self._seed_centroids
def update_seed_centroids(self, aggregated_model: nn.Module):
seed_centroids = {}
state_dict = aggregated_model.state_dict()
for (name, param) in state_dict.items():
if ((param.ndim > 1) and (param.numel() >= self._channel.cfg.min_numel_to_quantize)):
pq = PQ(param.data.size(), self._channel.cfg.max_block_size, self._channel.cfg.num_codebooks, self._channel.cfg.max_num_centroids, self._channel.cfg.num_k_means_iter, self._channel.cfg.verbose)
(centroids, _) = pq.encode(param.data.cpu())
seed_centroids[name] = centroids
self._seed_centroids = seed_centroids
def receive_update_from_client(self, message: Message):
message.seed_centroids = self.global_pq_centroids
super().receive_update_from_client(message) |
.requires_dbt_version('1.3.0')
class TestJsonschema():
.only_on_targets(SUPPORTED_TARGETS)
def test_valid(self, test_id: str, dbt_project: DbtProject):
valid_value = json.dumps(''.join(('*' for _ in range(MIN_LENGTH))))
data = [{COLUMN_NAME: valid_value}]
result = dbt_project.test(test_id, TEST_NAME, dict(column_name=COLUMN_NAME, **SCHEMA), data=data)
assert (result['status'] == 'pass')
.only_on_targets(SUPPORTED_TARGETS)
def test_invalid(self, test_id: str, dbt_project: DbtProject):
invalid_value = json.dumps(''.join(('*' for _ in range((MIN_LENGTH - 1)))))
data = [{COLUMN_NAME: invalid_value}]
result = dbt_project.test(test_id, TEST_NAME, dict(column_name=COLUMN_NAME, **SCHEMA), data=data)
assert (result['status'] == 'fail')
.skip_targets([*SUPPORTED_TARGETS, 'bigquery'])
def test_invalid_target(self, test_id: str, dbt_project: DbtProject):
data = [{COLUMN_NAME: str()}]
result = dbt_project.test(test_id, TEST_NAME, dict(column_name=COLUMN_NAME, **SCHEMA), data=data)
assert (result['status'] == 'error') |
class _ToolBar(QtGui.QToolBar):
def __init__(self, tool_bar_manager, parent):
QtGui.QToolBar.__init__(self, parent)
self.tools = []
self.tool_bar_manager = tool_bar_manager
self.tool_bar_manager.observe(self._on_tool_bar_manager_enabled_changed, 'enabled')
self.tool_bar_manager.observe(self._on_tool_bar_manager_visible_changed, 'visible')
return
def dispose(self):
self.tool_bar_manager.observe(self._on_tool_bar_manager_enabled_changed, 'enabled', remove=True)
self.tool_bar_manager.observe(self._on_tool_bar_manager_visible_changed, 'visible', remove=True)
for item in self.tools:
item.dispose()
self.tools = []
def _on_tool_bar_manager_enabled_changed(self, event):
self.setEnabled(event.new)
def _on_tool_bar_manager_visible_changed(self, event):
self.setVisible(event.new)
return |
def check_session_hijacking(uri, thisList, username, password, scanid):
for keyword in thisList:
if (keyword in uri):
attack_result = {'id': 5, 'scanid': scanid, 'url': uri, 'alert': 'Session Fixation', 'impact': 'High', 'req_headers': 'NA', 'req_body': 'NA', 'res_headers': 'NA', 'res_body': 'NA'}
dbupdate.insert_record(attack_result)
print('Url is vulnerable to session hijacking')
else:
check(username, password, scanid, uri) |
def test_multi_missing(data_client):
s1 = Repository.search()
s2 = Search(index='flat-git')
s3 = Search(index='does_not_exist')
ms = MultiSearch()
ms = ms.add(s1).add(s2).add(s3)
with raises(ApiError):
ms.execute()
(r1, r2, r3) = ms.execute(raise_on_error=False)
assert (1 == len(r1))
assert isinstance(r1[0], Repository)
assert (r1._search is s1)
assert (52 == r2.hits.total.value)
assert (r2._search is s2)
assert (r3 is None) |
class SimpleProgressThread(ProgressThread):
def __init__(self, target, *args, **kwargs):
ProgressThread.__init__(self)
self.__target = (target, args, kwargs)
self.__stop = False
def stop(self):
self.__stop = True
def run(self):
(target, args, kwargs) = self.__target
try:
for progress in target(*args, **kwargs):
self.emit('progress-update', progress)
if self.__stop:
break
except GeneratorExit:
pass
except Exception:
logger.exception('Unhandled exception')
finally:
self.emit('done') |
class FirewallEnforcer(object):
def __init__(self, project, compute_client, expected_rules, current_rules=None, project_sema=None, operation_sema=None, add_rule_callback=None):
self.project = project
self.compute_client = compute_client
self.expected_rules = expected_rules
if current_rules:
self.current_rules = current_rules
else:
self.current_rules = None
self.project_sema = project_sema
if operation_sema:
LOGGER.warning('Operation semaphore is deprecated. Argument ignored.')
self.operation_sema = None
self._add_rule_callback = add_rule_callback
self._rules_to_delete = []
self._rules_to_insert = []
self._rules_to_update = []
self._deleted_rules = []
self._inserted_rules = []
self._updated_rules = []
def apply_firewall(self, prechange_callback=None, networks=None, allow_empty_ruleset=False):
self._rules_to_delete = []
self._rules_to_insert = []
self._rules_to_update = []
if (not self.current_rules):
self.refresh_current_rules()
if ((not self.expected_rules.rules) and (not allow_empty_ruleset)):
raise EmptyProposedFirewallRuleSetError('No rules defined in the expected rules.')
if (self.current_rules.filtered_by_networks(networks) == self.expected_rules.filtered_by_networks(networks)):
LOGGER.info('Current and expected rules match for project %s.', self.project)
return 0
self._build_change_set(networks)
self._validate_change_set(networks)
if prechange_callback:
if (not prechange_callback(self.project, self._rules_to_delete, self._rules_to_insert, self._rules_to_update)):
LOGGER.warning('The Prechange Callback returned False for project %s, changes will not be applied.', self.project)
return 0
if self.project_sema:
self.project_sema.acquire()
try:
delete_before_insert = self._check_change_operation_order(len(self._rules_to_insert), len(self._rules_to_delete))
changed_count = 0
if (not networks):
networks = [None]
for network in networks:
changed_count += self._apply_change_set(delete_before_insert, network)
finally:
if self.project_sema:
self.project_sema.release()
return changed_count
def fetch_rules_to_change(self):
return (self._rules_to_delete, self._rules_to_insert, self._rules_to_update)
def refresh_current_rules(self):
current_rules = FirewallRules(self.project, add_rule_callback=self._add_rule_callback)
current_rules.add_rules_from_api(self.compute_client)
self.current_rules = current_rules
def get_deleted_rules(self):
return self._deleted_rules
def get_inserted_rules(self):
return self._inserted_rules
def get_updated_rules(self):
return self._updated_rules
def _build_change_set(self, networks=None):
if networks:
current_rules = self.current_rules.filtered_by_networks(networks)
expected_rules = self.expected_rules.filtered_by_networks(networks)
else:
current_rules = self.current_rules.rules
expected_rules = self.expected_rules.rules
for rule_name in current_rules:
if (rule_name not in expected_rules):
self._rules_to_delete.append(rule_name)
for rule_name in expected_rules:
if (rule_name not in current_rules):
self._rules_to_insert.append(rule_name)
for rule_name in expected_rules:
if (rule_name in current_rules):
if (expected_rules[rule_name] != current_rules[rule_name]):
self._rules_to_update.append(rule_name)
def _validate_change_set(self, networks=None):
for rule_name in self._rules_to_insert:
if ((rule_name in self.current_rules.rules) and (rule_name not in self._rules_to_delete)):
raise FirewallRuleValidationError(('The rule %s is in the rules to insert set, but the same rule name already exists on project %s. It may be used on a different network.' % (rule_name, self.project)))
if networks:
for rule_name in self._rules_to_update:
impacted_network = get_network_name_from_url(self.current_rules.rules[rule_name]['network'])
if (impacted_network not in networks):
raise NetworkImpactValidationError(('The rule %s is in the rules to update set, but it is currently on a network, "%s", that is not in the allowed networks list for project %s: "%s". Updating the rule to %s would impact the wrong network.' % (rule_name, impacted_network, self.project, ', '.join(networks), self.expected_rules.rules[rule_name])))
def _check_change_operation_order(self, insert_count, delete_count):
delete_before_insert = False
try:
firewall_quota = self.compute_client.get_firewall_quota(self.project)
except KeyError as e:
LOGGER.error('Error getting quota for project %s, %s', self.project, e)
firewall_quota = None
if firewall_quota:
usage = firewall_quota.get('usage', 0)
limit = firewall_quota.get('limit', 0)
if ((usage + insert_count) > limit):
if (((usage - delete_count) + insert_count) > limit):
raise FirewallQuotaExceededError(('Firewall enforcement cannot update the policy for project %s without exceed the current firewalls quota: %u,' % (self.project, limit)))
else:
LOGGER.info('Switching to "delete first" rule update order for project %s.', self.project)
delete_before_insert = True
else:
LOGGER.warning('Unknown firewall quota, switching to "delete first" rule update order for project %s.', self.project)
delete_before_insert = True
return delete_before_insert
def _apply_change_set(self, delete_before_insert, network):
change_count = 0
if delete_before_insert:
change_count += self._delete_rules(network)
change_count += self._insert_rules(network)
else:
change_count += self._insert_rules(network)
change_count += self._delete_rules(network)
change_count += self._update_rules(network)
return change_count
def _insert_rules(self, network):
change_count = 0
if self._rules_to_insert:
LOGGER.info('Inserting rules: %s', ', '.join(self._rules_to_insert))
rules = filter_rules_by_network([self.expected_rules.rules[rule_name] for rule_name in self._rules_to_insert], network)
insert_function = self.compute_client.insert_firewall_rule
(successes, failures, change_errors) = self._apply_change(insert_function, rules)
self._inserted_rules.extend(successes)
change_count += len(successes)
if failures:
raise FirewallEnforcementInsertFailedError('Firewall enforcement failed while inserting rules for project {}. The following errors were encountered: {}'.format(self.project, change_errors))
return change_count
def _delete_rules(self, network):
change_count = 0
if self._rules_to_delete:
LOGGER.info('Deleting rules: %s', ', '.join(self._rules_to_delete))
rules = filter_rules_by_network([self.current_rules.rules[rule_name] for rule_name in self._rules_to_delete], network)
delete_function = self.compute_client.delete_firewall_rule
(successes, failures, change_errors) = self._apply_change(delete_function, rules)
self._deleted_rules.extend(successes)
change_count += len(successes)
if failures:
raise FirewallEnforcementDeleteFailedError('Firewall enforcement failed while deleting rules for project {}. The following errors were encountered: {}'.format(self.project, change_errors))
return change_count
def _update_rules(self, network):
change_count = 0
if self._rules_to_update:
LOGGER.info('Updating rules: %s', ', '.join(self._rules_to_update))
rules = filter_rules_by_network([self.expected_rules.rules[rule_name] for rule_name in self._rules_to_update], network)
rules_to_patch = []
rules_to_replace = []
for rule in rules:
if _rule_update_can_patch(self.current_rules.rules[rule['name']], rule):
rules_to_patch.append(rule)
else:
rules_to_replace.append(rule)
if rules_to_patch:
change_count += self._patch_rules(rules_to_patch)
if rules_to_replace:
change_count += self._replace_rules(rules_to_replace)
return change_count
def _patch_rules(self, rules):
LOGGER.info('Patching rules: %s', ', '.join((rule['name'] for rule in rules)))
patch_function = self.compute_client.patch_firewall_rule
(successes, failures, change_errors) = self._apply_change(patch_function, rules)
self._updated_rules.extend(successes)
if failures:
raise FirewallEnforcementUpdateFailedError('Firewall enforcement failed while deleting rules for project {}. The following errors were encountered: {}'.format(self.project, change_errors))
return len(successes)
def _replace_rules(self, rules):
LOGGER.info('Replacing rules: %s', ', '.join((rule['name'] for rule in rules)))
patch_function = self.compute_client.replace_firewall_rule
(successes, failures, change_errors) = self._apply_change(patch_function, rules)
self._updated_rules.extend(successes)
if failures:
raise FirewallEnforcementUpdateFailedError('Firewall enforcement failed while deleting rules for project {}. The following errors were encountered: {}'.format(self.project, change_errors))
return len(successes)
def _apply_change(self, firewall_function, rules):
applied_rules = []
failed_rules = []
change_errors = []
if (not rules):
return (applied_rules, failed_rules, change_errors)
for rule in rules:
try:
response = firewall_function(self.project, rule, blocking=True, retry_count=OPERATION_RETRY_COUNT, timeout=OPERATION_TIMEOUT)
except (api_errors.ApiNotEnabledError, api_errors.ApiExecutionError) as e:
LOGGER.exception('Error changing firewall rule %s for project %s: %s', rule.get('name', ''), self.project, e)
error_str = ('Rule: %s\nError: %s' % (rule.get('name', ''), e))
change_errors.append(error_str)
failed_rules.append(rule)
continue
except api_errors.OperationTimeoutError as e:
LOGGER.exception('Timeout changing firewall rule %s for project %s: %s', rule.get('name', ''), self.project, e)
error_str = ('Rule: %s\nError: %s' % (rule.get('name', ''), e))
change_errors.append(error_str)
failed_rules.append(rule)
continue
if _is_successful(response):
applied_rules.append(rule)
else:
failed_rules.append(rule)
return (applied_rules, failed_rules, change_errors) |
def test_matcher_start_zero_plus_not_in(matcher):
pattern = [{'ORTH': {'NOT_IN': ['t', 'z']}, 'OP': '*'}, {'ORTH': 'c'}]
matcher.add('TSTEND', [pattern])
nlp = (lambda string: Doc(matcher.vocab, words=string.split()))
assert (len(matcher(nlp('c'))) == 1)
assert (len(matcher(nlp('b c'))) == 1)
assert (len(matcher(nlp('z c'))) == 1)
assert (len(matcher(nlp('z b c'))) == 1)
assert (len(matcher(nlp('t z b c'))) == 1)
assert (len(matcher(nlp('a t z c'))) == 1)
assert (len(matcher(nlp('a t z b c'))) == 1) |
_meta(definition.SealingArrayCard)
class SealingArrayCard():
name = ''
illustrator = ''
cv = 'shoureiN'
tag = 'sealarray'
description = ',,,,'
def is_action_valid(self, c, tl):
if (len(tl) != 1):
return (False, '')
t = tl[0]
if (self.me is t):
return (False, '!')
return (True, '!')
def sound_effect(self, act):
return 'thb-cv-card_sealarray' |
class Type():
def __init__(self, dtype, shape=None, strides=None, offset=0, nbytes=None):
self.shape = (tuple() if (shape is None) else wrap_in_tuple(shape))
self.size = product(self.shape)
self.dtype = dtypes.normalize_type(dtype)
self.ctype = dtypes.ctype_module(self.dtype)
default_strides = helpers.default_strides(self.shape, self.dtype.itemsize)
if (strides is None):
strides = default_strides
else:
strides = tuple(strides)
self._default_strides = (strides == default_strides)
self.strides = strides
default_nbytes = helpers.min_buffer_size(self.shape, self.dtype.itemsize, self.strides)
if (nbytes is None):
nbytes = default_nbytes
self._default_nbytes = (nbytes == default_nbytes)
self.nbytes = nbytes
self.offset = offset
self._cast = dtypes.cast(self.dtype)
def __eq__(self, other):
return ((self.__class__ == other.__class__) and (self.shape == other.shape) and (self.dtype == other.dtype) and (self.strides == other.strides) and (self.offset == other.offset) and (self.nbytes == self.nbytes))
def __hash__(self):
return hash((self.__class__, self.shape, self.dtype, self.strides, self.offset, self.nbytes))
def __ne__(self, other):
return (not (self == other))
def compatible_with(self, other):
if (self.dtype != other.dtype):
return False
common_shape_len = min(len(self.shape), len(other.shape))
if (self.shape[(- common_shape_len):] != other.shape[(- common_shape_len):]):
return False
if (self.strides[(- common_shape_len):] != other.strides[(- common_shape_len):]):
return False
if (helpers.product(self.shape[:(- common_shape_len)]) != 1):
return False
if (helpers.product(other.shape[:(- common_shape_len)]) != 1):
return False
if (self.offset != other.offset):
return False
return True
def broadcastable_to(self, other):
if (len(self.shape) > len(other.shape)):
return False
for i in range(1, (len(self.shape) + 1)):
if (not ((self.shape[(- i)] == 1) or (self.shape[(- i)] == other.shape[(- i)]))):
return False
return True
def with_dtype(self, dtype):
return Type(dtype, shape=self.shape, strides=self.strides, offset=self.offset, nbytes=self.nbytes)
def from_value(cls, val):
if isinstance(val, Type):
return cls(val.dtype, shape=val.shape, strides=val.strides, offset=val.offset, nbytes=val.nbytes)
elif numpy.issctype(val):
return cls(val)
elif (hasattr(val, 'dtype') and hasattr(val, 'shape')):
strides = (val.strides if hasattr(val, 'strides') else None)
offset = (val.offset if hasattr(val, 'offset') else 0)
nbytes = (val.nbytes if hasattr(val, 'nbytes') else None)
return cls(val.dtype, shape=val.shape, strides=strides, offset=offset, nbytes=nbytes)
else:
return cls(dtypes.detect_type(val))
def padded(cls, dtype, shape, pad=0):
dtype = dtypes.normalize_type(dtype)
(strides, offset, nbytes) = helpers.padded_buffer_parameters(shape, dtype.itemsize, pad=pad)
return cls(dtype, shape, strides=strides, offset=offset, nbytes=nbytes)
def __call__(self, val):
return self._cast(val)
def __repr__(self):
if ((len(self.shape) > 0) or (self.offset != 0)):
res = 'Type({dtype}, shape={shape}'.format(dtype=self.dtype, shape=self.shape)
if (not self._default_strides):
res += (', strides=' + str(self.strides))
if (self.offset != 0):
res += (', offset=' + str(self.offset))
if (not self._default_nbytes):
res += (', nbytes=' + str(self.nbytes))
res += ')'
return res
else:
return 'Type({dtype})'.format(dtype=self.dtype)
def __process_modules__(self, process):
tp = Type(self.dtype, shape=self.shape, strides=self.strides, offset=self.offset, nbytes=self.nbytes)
tp.ctype = process(tp.ctype)
return tp |
def match_xtgeo_214_header(header: bytes) -> bool:
first_part_match = header.startswith(b'roff-bin\x00#ROFF file#\x00#Creator: CXTGeo subsystem of XTGeo by JCR#\x00tag\x00filedata\x00int\x00byteswaptest\x00')
problem_area_match = header[99:].startswith(b'char\x00filetype\x00grid\x00char\x00creationDate\x00UNKNOWNendtag')
return (first_part_match and problem_area_match) |
class FolderNode(NodeType):
def is_type_for(self, node):
return isdir(node)
def allows_children(self, node):
return True
def has_children(self, node):
return (len(listdir(node)) > 0)
def get_children(self, node):
return [join(node, filename) for filename in listdir(node)]
def get_text(self, node):
return basename(node) |
def test_correct_response_new_awards_only(client, monkeypatch, elasticsearch_transaction_index, awards_and_transactions):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
resp = client.post('/api/v2/search/spending_by_geography', content_type='application/json', data=json.dumps({'scope': 'place_of_performance', 'geo_layer': 'county', 'filters': {'time_period': [{'date_type': 'new_awards_only', 'start_date': '2020-01-02', 'end_date': '2020-01-15'}]}}))
expected_response = {'scope': 'place_of_performance', 'geo_layer': 'county', 'results': [], 'messages': [get_time_period_message()]}
assert (resp.status_code == status.HTTP_200_OK), 'Failed to return 200 Response'
resp_json = resp.json()
resp_json['results'].sort(key=_get_shape_code_for_sort)
assert (resp_json == expected_response)
resp = client.post('/api/v2/search/spending_by_geography', content_type='application/json', data=json.dumps({'scope': 'place_of_performance', 'geo_layer': 'county', 'filters': {'time_period': [{'date_type': 'new_awards_only', 'start_date': '2019-12-30', 'end_date': '2020-01-15'}]}}))
expected_response = {'scope': 'place_of_performance', 'geo_layer': 'county', 'results': [{'aggregated_amount': 5.0, 'display_name': 'Charleston', 'per_capita': 5.0, 'population': 1, 'shape_code': '45001'}], 'messages': [get_time_period_message()]}
assert (resp.status_code == status.HTTP_200_OK), 'Failed to return 200 Response'
resp_json = resp.json()
resp_json['results'].sort(key=_get_shape_code_for_sort)
assert (resp_json == expected_response) |
def test_js_module_names():
with raises(ValueError):
create_js_module(3, CODE, ['bb'], 'aa', 'simple')
with raises(ValueError):
create_js_module('', CODE, ['bb'], 'aa', 'simple')
code = create_js_module('foo.js', CODE, ['bb'], 'aa', 'simple')
assert ('.foo =' in code) |
class Server(ModelNormal):
allowed_values = {}
validations = {('weight',): {'inclusive_maximum': 100, 'inclusive_minimum': 1}}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'weight': (int,), 'max_conn': (int,), 'port': (int,), 'address': (str,), 'comment': (str, none_type), 'disabled': (bool,), 'override_host': (str, none_type)}
_property
def discriminator():
return None
attribute_map = {'weight': 'weight', 'max_conn': 'max_conn', 'port': 'port', 'address': 'address', 'comment': 'comment', 'disabled': 'disabled', 'override_host': 'override_host'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def setup_chromosome(axis, y_min=None, y_max=None, y_label=None):
if (y_min and y_max):
axis.set_ylim(y_min, y_max)
if (y_min < 0 < y_max):
axis.axhline(color='k')
if y_label:
axis.set_ylabel(y_label)
axis.tick_params(which='both', direction='out')
axis.get_xaxis().tick_bottom()
axis.get_yaxis().tick_left() |
class OptionSeriesBoxplotSonificationTracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestTKOSubsScanScan():
def setup_method(self):
self.tmp_path = Path(tempfile.mkdtemp())
self.scan = TKOSubsScan(target_file=__file__, results_dir=str(self.tmp_path), db_location=str((self.tmp_path / 'testing.sqlite')))
self.scan.exception = False
def teardown_method(self):
shutil.rmtree(self.tmp_path)
def test_scan_requires(self):
with patch('pipeline.recon.web.GatherWebTargets'):
with patch('pipeline.recon.web.subdomain_takeover.meets_requirements'):
retval = self.scan.requires()
assert isinstance(retval, GatherWebTargets)
def test_scan_creates_results_dir(self):
assert (self.scan.results_subfolder == (self.tmp_path / 'tkosubs-results'))
def test_scan_creates_results_file(self):
assert (self.scan.output_file == ((self.tmp_path / 'tkosubs-results') / 'tkosubs.csv'))
def test_scan_creates_database(self):
assert self.scan.db_mgr.location.exists()
assert ((self.tmp_path / 'testing.sqlite') == self.scan.db_mgr.location)
def test_scan_creates_results(self):
self.scan.results_subfolder = tkosubs_results
self.scan.output_file = (self.scan.results_subfolder / 'tkosubs.csv')
self.scan.parse_results()
assert self.scan.output().exists()
def test_parse_results(self):
myresults = ((self.tmp_path / 'tkosubs-results') / 'tkosubs.csv')
myresults.parent.mkdir(parents=True, exist_ok=True)
content = 'Domain,Cname,Provider,IsVulnerable,IsTakenOver,Response\n'
content += 'google.com,Cname,Provider,true,IsTakenOver,Response\n'
content += 'maps.google.com,Cname,Provider,false,IsTakenOver,Response\n'
myresults.write_text(content)
self.scan.output_file = myresults
self.scan.db_mgr.get_or_create_target_by_ip_or_hostname = MagicMock()
self.scan.db_mgr.get_or_create_target_by_ip_or_hostname.return_value = MagicMock()
self.scan.db_mgr.add = MagicMock()
self.scan.parse_results()
assert self.scan.output().exists()
assert self.scan.db_mgr.add.called
assert self.scan.db_mgr.get_or_create_target_by_ip_or_hostname.called
.parametrize('test_input', [['google.com'], None])
def test_scan_run(self, test_input):
with patch('subprocess.run') as mocked_run:
self.scan.parse_results = MagicMock()
self.scan.db_mgr.get_all_hostnames = MagicMock()
self.scan.db_mgr.get_all_hostnames.return_value = test_input
self.scan.run()
if (test_input is None):
assert (not mocked_run.called)
assert (not self.scan.parse_results.called)
else:
assert mocked_run.called
assert self.scan.parse_results.called |
class Page(object):
def from_raw(raw):
return Page(raw['data'], raw.get('before'), raw.get('after'))
def __init__(self, data, before=None, after=None):
self.data = data
self.before = before
self.after = after
def map_data(self, func):
return Page([func(x) for x in self.data], self.before, self.after)
def __repr__(self):
return ('Page(data=%s, before=%s, after=%s)' % (self.data, self.before, self.after))
def __eq__(self, other):
return (isinstance(other, Page) and (self.data == other.data) and (self.before == other.before) and (self.after == other.after))
def set_iterator(client, set_query, map_lambda=None, mapper=None, page_size=None):
def get_page(**kwargs):
queried = query.paginate(set_query, **kwargs)
if (map_lambda is not None):
queried = query.map_(map_lambda, queried)
return Page.from_raw(client.query(queried))
page = get_page(size=page_size)
for val in page.data:
(yield (val if (mapper is None) else mapper(val)))
next_cursor = ('after' if (page.after is not None) else 'before')
while (getattr(page, next_cursor) is not None):
page = get_page(**{'size': page_size, next_cursor: getattr(page, next_cursor)})
for val in page.data:
(yield (val if (mapper is None) else mapper(val))) |
def _check_dependencies_in_registry(registry: BaseRegistry, agent_config: AgentConfig, push_missing: bool) -> None:
for item_type_plural in (PROTOCOLS, CONTRACTS, CONNECTIONS, SKILLS):
dependencies = getattr(agent_config, item_type_plural)
for public_id in dependencies:
if push_missing:
registry.check_item_present_and_push(item_type_plural, public_id)
else:
registry.check_item_present(item_type_plural, public_id) |
.parametrize('version, supported', [('3.0', True), ('3.1', True), ('3.10', True), ('30.0', False), ('31.0', False), ('4.0', False), ('4.1', False), ('4.10', False), ('40.0', False), ('41.0', False), ('2.0', False), ('2.1', False), ('2.10', False), (None, False)])
def test_supported_asgi_version(version, supported):
scope = {'type': 'lifespan', 'asgi': {'spec_version': '2.0', 'version': version}}
if (version is None):
del scope['asgi']['version']
app = App()
resource = testing.SimpleTestResourceAsync()
app.add_route('/', resource)
shutting_down = asyncio.Condition()
req_event_emitter = testing.ASGILifespanEventEmitter(shutting_down)
resp_event_collector = testing.ASGIResponseEventCollector()
async def task():
coro = asyncio.get_event_loop().create_task(app(scope, req_event_emitter, resp_event_collector))
(await asyncio.sleep(0))
assert (len(resp_event_collector.events) == 1)
event = resp_event_collector.events[0]
if supported:
assert (event['type'] == 'lifespan.startup.complete')
else:
assert (event['type'] == 'lifespan.startup.failed')
assert event['message'].startswith('Falcon requires ASGI version 3.x')
async with shutting_down:
shutting_down.notify()
(await coro)
falcon.async_to_sync(task) |
class TestRemove():
def test_remove_dir(self, tmpdir):
path = str(tmpdir.join('test'))
makedirs(path)
remove(path)
assert (not os.path.exists(path))
def test_remove_file(self, tmpdir):
f = str(tmpdir.join('test.txt'))
touch(f)
remove(f)
assert (not os.path.exists(f))
def test_remove_mutil_dirs(self, tmpdir):
path = str(tmpdir.join('test/test'))
makedirs(path)
path = str(tmpdir.join('test'))
remove(path)
assert (not os.path.exists(path))
def test_remove_with_ignore_error(self, tmpdir):
path = str(tmpdir.join('test'))
remove(path, ignore_errors=True)
def test_remove_without_ignore_error(self, tmpdir):
path = str(tmpdir.join('test'))
with pytest.raises(Exception):
remove(path, ignore_errors=False)
def test_remove_without_ignore_error_with_onerror(self):
pass |
def upgrade():
with op.batch_alter_table('users') as batch_op:
batch_op.alter_column('date_joined', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
batch_op.alter_column('lastseen', existing_type=sa.DateTime(), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
batch_op.alter_column('birthday', existing_type=sa.DateTime(), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
batch_op.alter_column('last_failed_login', existing_type=sa.DateTime(), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
with op.batch_alter_table('conversations') as batch_op:
batch_op.alter_column('date_created', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
with op.batch_alter_table('messages') as batch_op:
batch_op.alter_column('date_created', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
with op.batch_alter_table('topicsread') as batch_op:
batch_op.alter_column('last_read', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
with op.batch_alter_table('forumsread') as batch_op:
batch_op.alter_column('last_read', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
batch_op.alter_column('cleared', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
with op.batch_alter_table('reports') as batch_op:
batch_op.alter_column('reported', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
batch_op.alter_column('zapped', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
with op.batch_alter_table('posts') as batch_op:
batch_op.alter_column('date_created', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
batch_op.alter_column('date_modified', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
with op.batch_alter_table('topics') as batch_op:
batch_op.alter_column('date_created', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
batch_op.alter_column('last_updated', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
with op.batch_alter_table('forums') as batch_op:
batch_op.alter_column('last_post_created', existing_type=sa.DateTime(timezone=False), type_=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True) |
def cole_cole(inp, p_dict):
iotc = (np.outer(((2j * np.pi) * p_dict['freq']), inp['tau']) ** inp['c'])
condH = (inp['cond_8'] + ((inp['cond_0'] - inp['cond_8']) / (1 + iotc)))
condV = (condH / (p_dict['aniso'] ** 2))
etaH = (condH + (1j * p_dict['etaH'].imag))
etaV = (condV + (1j * p_dict['etaV'].imag))
return (etaH, etaV) |
class _Take(References):
def __init__(self, parent: References, indices: types.arraydata) -> None:
assert (indices.shape[0] > 1), 'inefficient; this should have been `_Empty` or `_Uniform`'
assert (not isinstance(parent, _Uniform)), 'inefficient; this should have been `_Uniform`'
self.parent = parent
self.indices = numpy.asarray(indices)
_check_take(len(parent), self.indices)
super().__init__(parent.ndims)
def __len__(self) -> int:
return len(self.indices)
def __iter__(self) -> Iterator[Reference]:
return map(self.parent.get, self.indices)
def get(self, index: int) -> Reference:
return self.parent.get(self.indices[index])
def take(self, indices: numpy.ndarray) -> References:
_check_take(len(self), indices)
return self.parent.take(numpy.take(self.indices, indices))
def compress(self, mask: numpy.ndarray) -> References:
_check_compress(len(self), mask)
return self.parent.take(numpy.compress(mask, self.indices)) |
def generateTCF6(iterationsMap, iteration, t):
msg = generateGenericMessage('EiffelTestCaseFinishedEvent', t, '1.0.0', 'TCF6', iteration)
link(msg, iterationsMap[iteration]['TCT6'], 'TEST_CASE_EXECUTION')
msg['data']['outcome'] = {'verdict': randomizeVerdict(0.98), 'conclusion': 'SUCCESSFUL'}
return msg |
class OptionSeriesVectorMarkerStatesSelect(Options):
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get('#cccccc')
def fillColor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#000000')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(2)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(None)
def radius(self, num: float):
self._config(num, js_type=False) |
def is_under_debugger() -> bool:
frames = inspect.stack()
if (len(frames) >= 3):
filename = frames[(- 3)].filename
if filename.endswith('/pdb.py'):
return True
elif filename.endswith('/pydevd.py'):
return True
return (sys.gettrace() is not None) |
def extractEnsjTranslations(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
tagmap = [('RMB', 'Record of Muwuis Battles', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('Tutorials' in item['tags']):
return None
if ('King Shura' in item['tags']):
return buildReleaseMessageWithType(item, 'King Shura', vol, chp, frag=frag, postfix=postfix)
if ("I'm Sorry For Being Born In This World!" in item['tags']):
return buildReleaseMessageWithType(item, "I'm Sorry For Being Born In This World!", vol, chp, frag=frag, postfix=postfix)
if ('The Record of a Thousand Lives' in item['tags']):
return buildReleaseMessageWithType(item, 'The Record of a Thousand Lives', vol, chp, frag=frag, postfix=postfix)
if ('Running Away From The Hero!' in item['tags']):
if (not frag):
match = re.search('\\((\\d+)\\)', item['title'])
if match:
frag = int(match.group(1))
return buildReleaseMessageWithType(item, 'Running Away From The Hero!', vol, chp, frag=frag, postfix=postfix)
titlemap = [('[King Shura]', 'King Shura', 'translated'), ('Invisible dragon', 'Invisible dragon', 'translated')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.