code
stringlengths
281
23.7M
class TestDeleteModel(): def setup_class(cls): cred = testutils.MockCredential() firebase_admin.initialize_app(cred, {'projectId': PROJECT_ID}) def teardown_class(cls): testutils.cleanup_apps() def _url(project_id, model_id): return (BASE_URL + 'projects/{0}/models/{1}'.format(project_id, model_id)) def test_delete_model(self): recorder = instrument_ml_service(status=200, payload=EMPTY_RESPONSE) ml.delete_model(MODEL_ID_1) assert (len(recorder) == 1) assert (recorder[0].method == 'DELETE') assert (recorder[0].url == TestDeleteModel._url(PROJECT_ID, MODEL_ID_1)) assert (recorder[0].headers[HEADER_CLIENT_KEY] == HEADER_CLIENT_VALUE) .parametrize('model_id, exc_type', INVALID_MODEL_ID_ARGS) def test_delete_model_validation_errors(self, model_id, exc_type): with pytest.raises(exc_type) as excinfo: ml.delete_model(model_id) check_error(excinfo, exc_type) def test_delete_model_error(self): recorder = instrument_ml_service(status=404, payload=ERROR_RESPONSE_NOT_FOUND) with pytest.raises(exceptions.NotFoundError) as excinfo: ml.delete_model(MODEL_ID_1) check_firebase_error(excinfo, ERROR_STATUS_NOT_FOUND, ERROR_CODE_NOT_FOUND, ERROR_MSG_NOT_FOUND) assert (len(recorder) == 1) assert (recorder[0].method == 'DELETE') assert (recorder[0].url == self._url(PROJECT_ID, MODEL_ID_1)) assert (recorder[0].headers[HEADER_CLIENT_KEY] == HEADER_CLIENT_VALUE) def test_no_project_id(self): def evaluate(): app = firebase_admin.initialize_app(testutils.MockCredential(), name='no_project_id') with pytest.raises(ValueError): ml.delete_model(MODEL_ID_1, app) testutils.run_without_project_id(evaluate)
def merge_loopy(slate_loopy, output_arg, builder, var2terminal, name): from firedrake.slate.slac.kernel_builder import SlateWrapperBag coeffs = builder.collect_coefficients() constants = builder.collect_constants() builder.bag = SlateWrapperBag(coeffs, constants) (inits, tensor2temp) = builder.initialise_terminals(var2terminal, builder.bag.coefficients) terminal_tensors = list(filter((lambda x: (x.terminal and (not x.assembled))), var2terminal.values())) calls_and_kernels_and_events = tuple(((c, k, e) for terminal in terminal_tensors for (c, k, e) in builder.generate_tsfc_calls(terminal, tensor2temp[terminal]))) if calls_and_kernels_and_events: (tsfc_calls, tsfc_kernels, tsfc_events) = zip(*calls_and_kernels_and_events) else: tsfc_calls = () tsfc_kernels = () (args, tmp_args) = builder.generate_wrapper_kernel_args(tensor2temp) kernel_args = ([output_arg] + args) loopy_args = (([output_arg.loopy_arg] + [a.loopy_arg for a in args]) + tmp_args) (inits, slate_init_event, preamble_init) = profile_insns(('inits_' + name), inits, PETSc.Log.isActive()) insns = inits insns.extend(tsfc_calls) insns.append(builder.slate_call(slate_loopy, tensor2temp.values())) (insns, slate_wrapper_event, preamble) = profile_insns(name, insns, PETSc.Log.isActive()) noop = lp.CInstruction((), '', read_variables=frozenset({a.name for a in loopy_args}), within_inames=frozenset(), within_inames_is_final=True) insns.append(noop) domains = builder.bag.index_creator.domains preamble = ((preamble_init + preamble) if preamble else []) slate_wrapper = lp.make_function(domains, insns, loopy_args, name=name, seq_dependencies=True, target=target, lang_version=(2018, 2), preambles=preamble) from pyop2.codegen.loopycompat import _match_caller_callee_argument_dimension_ from loopy.kernel.function_interface import CallableKernel for tsfc_loopy in tsfc_kernels: slate_wrapper = merge([slate_wrapper, tsfc_loopy]) names = tsfc_loopy.callables_table for name in names: if isinstance(slate_wrapper.callables_table[name], CallableKernel): slate_wrapper = _match_caller_callee_argument_dimension_(slate_wrapper, name) slate_wrapper = merge([slate_wrapper, slate_loopy]) names = slate_loopy.callables_table for name in names: if isinstance(slate_wrapper.callables_table[name], CallableKernel): slate_wrapper = _match_caller_callee_argument_dimension_(slate_wrapper, name) events = ((tsfc_events + (slate_wrapper_event, slate_init_event)) if PETSc.Log.isActive() else ()) return (slate_wrapper, tuple(kernel_args), events)
('value,expected_value', [param(10, CastResults(int=10, float=10.0, str='10', bool=True), id='10'), param(0, CastResults(int=0, float=0.0, str='0', bool=False), id='0'), param(10.0, CastResults(int=10, float=10.0, str='10.0', bool=True), id='10.0'), param(0.0, CastResults(int=0, float=0.0, str='0.0', bool=False), id='0.0'), param('inf', CastResults(int=CastResults.error("OverflowError while evaluating 'int(inf)': cannot convert float infinity to integer"), float=math.inf, str='inf', bool=True), id='inf'), param('nan', CastResults(int=CastResults.error("ValueError while evaluating 'int(nan)': cannot convert float NaN to integer"), float=math.nan, str='nan', bool=True), id='nan'), param('1e6', CastResults(int=1000000, float=1000000.0, str='1000000.0', bool=True), id='1e6'), param("''", CastResults(int=CastResults.error("ValueError while evaluating 'int('')': invalid literal for int() with base 10:"), float=CastResults.error("ValueError while evaluating 'float('')': could not convert string to float:"), str='', bool=CastResults.error("ValueError while evaluating 'bool('')': Cannot cast '' to bool")), id="''"), param("'10'", CastResults(int=10, float=10.0, str='10', bool=CastResults.error("ValueError while evaluating 'bool('10')': Cannot cast '10' to bool")), id="'10'"), param("'10.0'", CastResults(int=CastResults.error("ValueError while evaluating 'int('10.0')': invalid literal for int() with base 10: '10.0'"), float=10.0, str='10.0', bool=CastResults.error("ValueError while evaluating 'bool('10.0')': Cannot cast '10.0' to bool")), id="'10.0'"), param("'true'", CastResults(int=CastResults.error("ValueError while evaluating 'int('true')': invalid literal for int() with base 10: 'true'"), float=CastResults.error("ValueError while evaluating 'float('true')': could not convert string to float: 'true'"), str='true', bool=True), id="'true'"), param("'false'", CastResults(int=CastResults.error("ValueError while evaluating 'int('false')': invalid literal for int() with base 10: 'false'"), float=CastResults.error("ValueError while evaluating 'float('false')': could not convert string to float: 'false'"), str='false', bool=False), id="'false'"), param("'[1,2,3]'", CastResults(int=CastResults.error("ValueError while evaluating 'int('[1,2,3]')': invalid literal for int() with base 10: '[1,2,3]'"), float=CastResults.error("ValueError while evaluating 'float('[1,2,3]')': could not convert string to float: '[1,2,3]'"), str='[1,2,3]', bool=CastResults.error("ValueError while evaluating 'bool('[1,2,3]')': Cannot cast '[1,2,3]' to bool")), id="'[1,2,3]'"), param("'{a:10}'", CastResults(int=CastResults.error("ValueError while evaluating 'int('{a:10}')': invalid literal for int() with base 10: '{a:10}'"), float=CastResults.error("ValueError while evaluating 'float('{a:10}')': could not convert string to float: '{a:10}'"), str='{a:10}', bool=CastResults.error("ValueError while evaluating 'bool('{a:10}')': Cannot cast '{a:10}' to bool")), id="'{a:10}'"), param('true', CastResults(int=1, float=1.0, str='true', bool=True), id='true'), param('false', CastResults(int=0, float=0.0, str='false', bool=False), id='false'), param('[]', CastResults(int=[], float=[], str=[], bool=[]), id='[]'), param('[0,1,2]', CastResults(int=[0, 1, 2], float=[0.0, 1.0, 2.0], str=['0', '1', '2'], bool=[False, True, True]), id='[1,2,3]'), param('[1,[2]]', CastResults(int=[1, [2]], float=[1.0, [2.0]], str=['1', ['2']], bool=[True, [True]]), id='[1,[2]]'), param('[a,1]', CastResults(int=CastResults.error("ValueError while evaluating 'int([a,1])': invalid literal for int() with base 10: 'a'"), float=CastResults.error("ValueError while evaluating 'float([a,1])': could not convert string to float: 'a'"), str=['a', '1'], bool=CastResults.error("ValueError while evaluating 'bool([a,1])': Cannot cast 'a' to bool")), id='[a,1]'), param('{}', CastResults(int={}, float={}, str={}, bool={}), id='{}'), param('{a:10}', CastResults(int={'a': 10}, float={'a': 10.0}, str={'a': '10'}, bool={'a': True}), id='{a:10}'), param('{a:[0,1,2]}', CastResults(int={'a': [0, 1, 2]}, float={'a': [0.0, 1.0, 2.0]}, str={'a': ['0', '1', '2']}, bool={'a': [False, True, True]}), id='{a:[0,1,2]}'), param('{a:10,b:xyz}', CastResults(int=CastResults.error("ValueError while evaluating 'int({a:10,b:xyz})': invalid literal for int() with base 10: 'xyz'"), float=CastResults.error("ValueError while evaluating 'float({a:10,b:xyz})': could not convert string to float: 'xyz'"), str={'a': '10', 'b': 'xyz'}, bool=CastResults.error("ValueError while evaluating 'bool({a:10,b:xyz})': Cannot cast 'xyz' to bool")), id='{a:10,b:xyz}'), param('choice(0,1)', CastResults(int=ChoiceSweep(list=[0, 1]), float=ChoiceSweep(list=[0.0, 1.0]), str=ChoiceSweep(list=['0', '1']), bool=ChoiceSweep(list=[False, True])), id='choice(0,1)'), param('2,1,0', CastResults(int=ChoiceSweep(list=[2, 1, 0], simple_form=True), float=ChoiceSweep(list=[2.0, 1.0, 0.0], simple_form=True), str=ChoiceSweep(list=['2', '1', '0'], simple_form=True), bool=ChoiceSweep(list=[True, True, False], simple_form=True)), id='simple_choice:ints'), param("a,'b',1,1.0,true,[a,b],{a:10}", CastResults(int=CastResults.error("ValueError while evaluating 'int(a,'b',1,1.0,true,[a,b],{a:10})': invalid literal for int() with base 10: 'a'"), float=CastResults.error("ValueError while evaluating 'float(a,'b',1,1.0,true,[a,b],{a:10})': could not convert string to float: 'a'"), str=ChoiceSweep(list=['a', 'b', '1', '1.0', 'true', ['a', 'b'], {'a': '10'}], simple_form=True), bool=CastResults.error("ValueError while evaluating 'bool(a,'b',1,1.0,true,[a,b],{a:10})': Cannot cast 'a' to bool")), id='simple_choice:types'), param('choice(a,b)', CastResults(int=CastResults.error("ValueError while evaluating 'int(choice(a,b))': invalid literal for int() with base 10: 'a'"), float=CastResults.error("ValueError while evaluating 'float(choice(a,b))': could not convert string to float: 'a'"), str=ChoiceSweep(list=['a', 'b']), bool=CastResults.error("ValueError while evaluating 'bool(choice(a,b))': Cannot cast 'a' to bool")), id='choice(a,b)'), param('choice(1,a)', CastResults(int=CastResults.error("ValueError while evaluating 'int(choice(1,a))': invalid literal for int() with base 10: 'a'"), float=CastResults.error("ValueError while evaluating 'float(choice(1,a))': could not convert string to float: 'a'"), str=ChoiceSweep(list=['1', 'a']), bool=CastResults.error("ValueError while evaluating 'bool(choice(1,a))': Cannot cast 'a' to bool")), id='choice(1,a)'), param('interval(1.0, 2.0)', CastResults(int=IntervalSweep(start=1, end=2), float=IntervalSweep(start=1.0, end=2.0), str=CastResults.error("ValueError while evaluating 'str(interval(1.0, 2.0))': Intervals cannot be cast to str"), bool=CastResults.error("ValueError while evaluating 'bool(interval(1.0, 2.0))': Intervals cannot be cast to bool")), id='interval(1.0, 2.0)'), param('range(1,10)', CastResults(int=RangeSweep(start=1, stop=10, step=1), float=RangeSweep(start=1.0, stop=10.0, step=1.0), str=CastResults.error("ValueError while evaluating 'str(range(1,10))': Range can only be cast to int or float"), bool=CastResults.error("ValueError while evaluating 'bool(range(1,10))': Range can only be cast to int or float")), id='range(1,10)'), param('range(1.0,10.0)', CastResults(int=RangeSweep(start=1, stop=10, step=1), float=RangeSweep(start=1.0, stop=10.0, step=1.0), str=CastResults.error("ValueError while evaluating 'str(range(1.0,10.0))': Range can only be cast to int or float"), bool=CastResults.error("ValueError while evaluating 'bool(range(1.0,10.0))': Range can only be cast to int or float")), id='range(1.0,10.0)')]) def test_cast_conversions(value: Any, expected_value: Any) -> None: for field in ('int', 'float', 'bool', 'str'): cast_str = f'{field}({value})' expected = getattr(expected_value, field) if isinstance(expected, RaisesContext): with expected: parser.parse_rule(cast_str, 'function') else: result = parser.parse_rule(cast_str, 'function') assert eq(result, expected), f'{field} cast result mismatch'
class OptionPlotoptionsItemAccessibilityPoint(Options): def dateFormat(self): return self._config_get(None) def dateFormat(self, text: str): self._config(text, js_type=False) def dateFormatter(self): return self._config_get(None) def dateFormatter(self, value: Any): self._config(value, js_type=False) def describeNull(self): return self._config_get(True) def describeNull(self, flag: bool): self._config(flag, js_type=False) def descriptionFormat(self): return self._config_get(None) def descriptionFormat(self, text: str): self._config(text, js_type=False) def descriptionFormatter(self): return self._config_get(None) def descriptionFormatter(self, value: Any): self._config(value, js_type=False) def valueDecimals(self): return self._config_get(None) def valueDecimals(self, num: float): self._config(num, js_type=False) def valueDescriptionFormat(self): return self._config_get('{xDescription}{separator}{value}.') def valueDescriptionFormat(self, text: str): self._config(text, js_type=False) def valuePrefix(self): return self._config_get(None) def valuePrefix(self, text: str): self._config(text, js_type=False) def valueSuffix(self): return self._config_get(None) def valueSuffix(self, text: str): self._config(text, js_type=False)
class OptionSeriesTreemapSonificationDefaultspeechoptionsMappingPitch(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get('undefined') def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get('undefined') def max(self, text: str): self._config(text, js_type=False) def min(self): return self._config_get('undefined') def min(self, text: str): self._config(text, js_type=False) def within(self): return self._config_get('undefined') def within(self, text: str): self._config(text, js_type=False)
def setUpModule(): subprocess.check_output('\nBASE_FOLDER="{config.base_folder}/test_file_zip/"\nrm -rf "$BASE_FOLDER"\n\nmkdir -p "$BASE_FOLDER/dir1/dir2/dir3/dir4"\n\necho -n 1 > "$BASE_FOLDER/dir1/f1"\necho -n 1 > "$BASE_FOLDER/dir1/dir2/f2"\necho -n 1 > "$BASE_FOLDER/dir1/dir2/dir3/f3"\necho -n 1 > "$BASE_FOLDER/dir1/dir2/dir3/dir4/f4"\n\ncd "$BASE_FOLDER" && zip -r "test_0.zip" "dir1/"\n\necho -n 1 > "$BASE_FOLDER/f5"\n\nrm -rf "$BASE_FOLDER/dir1"\n\nchown www-data: -R "$BASE_FOLDER/"\n\n'.format(config=config), shell=True)
class SourceText(namedtuple('SourceText', ['text', 'context', 'encoding', 'category', 'error'])): __slots__ = () def __new__(cls, text, context, encoding, category, error=None): encoding = PYTHON_ENCODING_NAMES.get(encoding, encoding).lower() if (encoding == 'utf-8-sig'): encoding = 'utf-8' if encoding.startswith('utf-16'): encoding = 'utf-16' elif encoding.startswith('utf-32'): encoding = 'utf-32' if encoding: encoding = codecs.lookup(encoding).name if ((RE_CATEGORY_NAME.match(category) is None) and (error is None)): raise ValueError('Invalid category name in SourceText!') return super().__new__(cls, text, context, encoding, category, error) def _is_bytes(self): return isinstance(self.text, bytes) def _has_error(self): return (self.error is not None)
class OptionSeriesItemDatalabelsTextpath(Options): def attributes(self): return self._config_get(None) def attributes(self, value: Any): self._config(value, js_type=False) def enabled(self): return self._config_get(False) def enabled(self, flag: bool): self._config(flag, js_type=False)
def ait_register_acc_op_mapping(op_and_target: Tuple[(str, Union[(str, Callable)])], arg_replacement_tuples: Optional[List[Union[(Tuple[(Union[(str, Tuple[(str, ...)])], str)], Tuple[(Union[(str, Tuple[(str, ...)])], str, bool)])]]]=None, kwargs_to_move_to_acc_out_ty: Optional[List[Union[(Tuple[(str, str, bool)], Tuple[(str, str)])]]]=None): def insert(new_fn_target: Callable): _AIT_ACC_OP_MAPPERS[op_and_target] = AitAccOpMapper(new_fn_target=new_fn_target, arg_replacement_tuples=arg_replacement_tuples, kwargs_to_move_to_acc_out_ty=kwargs_to_move_to_acc_out_ty) return new_fn_target return insert
(os.environ, REQUIRED_ENV_VARS, clear=True) .unit def test_get_deprecated_api_config_from_file(test_deprecated_config_path: str) -> None: config = get_config(test_deprecated_config_path) assert (config.database.user == 'postgres_deprecated') assert (config.database.password == 'fidesctl_deprecated') assert (config.database.port == '5431') assert (config.database.db == 'fidesctl_deprecated') assert (config.database.test_db == 'fidesctl_test_deprecated')
class FlicketGroup(Base): __tablename__ = 'flicket_group' id = db.Column(db.Integer, primary_key=True) group_name = db.Column(db.String(user_field_size['group_max'])) users = db.relationship(FlicketUser, secondary=flicket_groups, backref=db.backref('flicket_groups', lazy='dynamic', order_by=group_name)) def __init__(self, group_name): self.group_name = group_name def __repr__(self): return '<Group: id={}. group_name={}>'.format(self.id, self.group_name)
class OptionPlotoptionsScatter3dSonificationContexttracksMappingLowpassFrequency(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class OptionPlotoptionsStreamgraphOnpointConnectoroptions(Options): def dashstyle(self): return self._config_get(None) def dashstyle(self, text: str): self._config(text, js_type=False) def stroke(self): return self._config_get(None) def stroke(self, text: str): self._config(text, js_type=False) def width(self): return self._config_get(1) def width(self, num: float): self._config(num, js_type=False)
def extractChaleuriaCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('TLCPS', 'The Long Chase for the Presidents Spouse', 'translated'), ('UTDS', 'Urban Tales of Demons and Spirits', 'translated'), ('RIAH', 'Reborn into a Hamster for 233 Days', 'translated'), ('rdf', 'Records of the Dragon Follower', 'translated'), ('RDF', 'Records of the Dragon Follower', 'translated'), ('nmd', 'No Money to Divorce', 'translated'), ('ipc', 'Interstellar Power Couple', 'translated'), ('fs', 'Fake Slackers', 'translated'), ('aol', 'World Hopping: Avenge Our Love', 'translated'), ('DITA', 'Deep in the Act', 'translated'), ('CGPA', 'The Complete Guide to the Use and Care of a Personal Assistant', 'translated'), ('RDE', "Rest in a Demon's Embrace", 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class OptionPlotoptionsDependencywheel(Options): def accessibility(self) -> 'OptionPlotoptionsDependencywheelAccessibility': return self._config_sub_data('accessibility', OptionPlotoptionsDependencywheelAccessibility) def allowPointSelect(self): return self._config_get(False) def allowPointSelect(self, flag: bool): self._config(flag, js_type=False) def animation(self): return self._config_get(True) def animation(self, flag: bool): self._config(flag, js_type=False) def borderColor(self): return self._config_get('#ffffff') def borderColor(self, text: str): self._config(text, js_type=False) def borderWidth(self): return self._config_get(0) def borderWidth(self, num: float): self._config(num, js_type=False) def center(self): return self._config_get([null, null]) def center(self, value: Any): self._config(value, js_type=False) def centerInCategory(self): return self._config_get(False) def centerInCategory(self, flag: bool): self._config(flag, js_type=False) def className(self): return self._config_get(None) def className(self, text: str): self._config(text, js_type=False) def clip(self): return self._config_get(True) def clip(self, flag: bool): self._config(flag, js_type=False) def color(self): return self._config_get(None) def color(self, text: str): self._config(text, js_type=False) def colorByPoint(self): return self._config_get(True) def colorByPoint(self, flag: bool): self._config(flag, js_type=False) def colorIndex(self): return self._config_get(None) def colorIndex(self, num: float): self._config(num, js_type=False) def colors(self): return self._config_get(None) def colors(self, value: Any): self._config(value, js_type=False) def cursor(self): return self._config_get(None) def cursor(self, text: str): self._config(text, js_type=False) def curveFactor(self): return self._config_get(0.6) def curveFactor(self, num: float): self._config(num, js_type=False) def custom(self): return self._config_get(None) def custom(self, value: Any): self._config(value, js_type=False) def dashStyle(self): return self._config_get('Solid') def dashStyle(self, text: str): self._config(text, js_type=False) def dataLabels(self) -> 'OptionPlotoptionsDependencywheelDatalabels': return self._config_sub_data('dataLabels', OptionPlotoptionsDependencywheelDatalabels) def description(self): return self._config_get(None) def description(self, text: str): self._config(text, js_type=False) def enableMouseTracking(self): return self._config_get(True) def enableMouseTracking(self, flag: bool): self._config(flag, js_type=False) def events(self) -> 'OptionPlotoptionsDependencywheelEvents': return self._config_sub_data('events', OptionPlotoptionsDependencywheelEvents) def getExtremesFromAll(self): return self._config_get(False) def getExtremesFromAll(self, flag: bool): self._config(flag, js_type=False) def inactiveOtherPoints(self): return self._config_get(True) def inactiveOtherPoints(self, flag: bool): self._config(flag, js_type=False) def includeInDataExport(self): return self._config_get(None) def includeInDataExport(self, flag: bool): self._config(flag, js_type=False) def keys(self): return self._config_get(None) def keys(self, value: Any): self._config(value, js_type=False) def label(self) -> 'OptionPlotoptionsDependencywheelLabel': return self._config_sub_data('label', OptionPlotoptionsDependencywheelLabel) def legendSymbol(self): return self._config_get('rectangle') def legendSymbol(self, text: str): self._config(text, js_type=False) def levels(self) -> 'OptionPlotoptionsDependencywheelLevels': return self._config_sub_data('levels', OptionPlotoptionsDependencywheelLevels) def linkColorMode(self): return self._config_get('from') def linkColorMode(self, text: str): self._config(text, js_type=False) def linkedTo(self): return self._config_get(None) def linkedTo(self, text: str): self._config(text, js_type=False) def linkOpacity(self): return self._config_get(0.5) def linkOpacity(self, num: float): self._config(num, js_type=False) def minLinkWidth(self): return self._config_get(0) def minLinkWidth(self, num: float): self._config(num, js_type=False) def nodePadding(self): return self._config_get(10) def nodePadding(self, num: float): self._config(num, js_type=False) def nodeWidth(self): return self._config_get(20) def nodeWidth(self, num: float): self._config(num, js_type=False) def onPoint(self) -> 'OptionPlotoptionsDependencywheelOnpoint': return self._config_sub_data('onPoint', OptionPlotoptionsDependencywheelOnpoint) def opacity(self): return self._config_get(1) def opacity(self, num: float): self._config(num, js_type=False) def point(self) -> 'OptionPlotoptionsDependencywheelPoint': return self._config_sub_data('point', OptionPlotoptionsDependencywheelPoint) def pointDescriptionFormat(self): return self._config_get(None) def pointDescriptionFormat(self, value: Any): self._config(value, js_type=False) def pointDescriptionFormatter(self): return self._config_get(None) def pointDescriptionFormatter(self, value: Any): self._config(value, js_type=False) def relativeXValue(self): return self._config_get(False) def relativeXValue(self, flag: bool): self._config(flag, js_type=False) def selected(self): return self._config_get(False) def selected(self, flag: bool): self._config(flag, js_type=False) def showCheckbox(self): return self._config_get(False) def showCheckbox(self, flag: bool): self._config(flag, js_type=False) def showInLegend(self): return self._config_get(False) def showInLegend(self, flag: bool): self._config(flag, js_type=False) def size(self): return self._config_get('100%') def size(self, num: float): self._config(num, js_type=False) def skipKeyboardNavigation(self): return self._config_get(None) def skipKeyboardNavigation(self, flag: bool): self._config(flag, js_type=False) def sonification(self) -> 'OptionPlotoptionsDependencywheelSonification': return self._config_sub_data('sonification', OptionPlotoptionsDependencywheelSonification) def startAngle(self): return self._config_get(0) def startAngle(self, num: float): self._config(num, js_type=False) def states(self) -> 'OptionPlotoptionsDependencywheelStates': return self._config_sub_data('states', OptionPlotoptionsDependencywheelStates) def stickyTracking(self): return self._config_get(False) def stickyTracking(self, flag: bool): self._config(flag, js_type=False) def tooltip(self) -> 'OptionPlotoptionsDependencywheelTooltip': return self._config_sub_data('tooltip', OptionPlotoptionsDependencywheelTooltip) def turboThreshold(self): return self._config_get(1000) def turboThreshold(self, num: float): self._config(num, js_type=False) def visible(self): return self._config_get(True) def visible(self, flag: bool): self._config(flag, js_type=False)
('rocm.gemm_rcr_bias_add_add_relu.gen_function') def gen_function(func_attrs, exec_cond_template, dim_info_dict): return common.gen_function(func_attrs, exec_cond_template, dim_info_dict, 'bias_add_add_relu', extra_code=EXTRA_CODE.render(), input_addr_calculator=common.INPUT_ADDR_CALCULATOR.render(accessor_a=func_attrs['input_accessors'][0], accessor_b=func_attrs['input_accessors'][1]), output_addr_calculator=common.OUTPUT_ADDR_CALCULATOR.render(output_accessor=func_attrs['output_accessors'][0]))
def test_traverse_kwargs(): provider1 = providers.Object('bar') provider2 = providers.Object('baz') provider = providers.Resource(dict, foo='foo', bar=provider1, baz=provider2) all_providers = list(provider.traverse()) assert (len(all_providers) == 2) assert (provider1 in all_providers) assert (provider2 in all_providers)
class OpacityPreference(widgets.ScalePreference): default = 80.0 name = 'plugin/ipconsole/opacity' def __init__(self, preferences, widget): widgets.ScalePreference.__init__(self, preferences, widget) if sys.platform.startswith('win32'): self.set_widget_sensitive(False) self.widget.set_tooltip(_('Opacity cannot be set on Windows due to a bug in Gtk+'))
class CompletionCall(): mode: str logit_mask_or_fixed_id: np.ndarray input_ids: np.ndarray kwargs: Any stopping_phrases: List[str] = None sampling_mode: str = None invert: bool = False def api_mask(self, invert=None): mask = self.logit_mask_or_fixed_id assert nputil.is_array(mask), ('api_mask(): logit_mask_or_fixed_id must be a LongTensor not a ' + str(type(mask))) if self.invert: masked = (mask >= 0) else: masked = (mask < 0) mask_value = (100 if self.invert else (- 100)) return {int(idx): mask_value for idx in np.nonzero(masked)[0]} def continuation_type(self): if (self.mode == 'fixed'): return None parameter_values_key_segment = ((('temp-' + str(self.kwargs['temperature'])) + '-logprobs-') + str(self.kwargs['logprobs'])) if (self.mode == 'complete'): api_mask = self.api_mask mask_key_segment = [f'{id}={value}' for (id, value) in sorted(api_mask.items(), key=(lambda x: x[0]))] mask_key_segment = '-'.join(mask_key_segment) else: mask_key_segment = '*' if (self.sampling_mode is not None): mask_key_segment += ('-' + self.sampling_mode) return f'{parameter_values_key_segment}-{mask_key_segment}'
class RPNHeadConvRegressor(nn.Module): def __init__(self, in_channels, num_anchors, box_dim=4): super(RPNHeadConvRegressor, self).__init__() self.cls_logits = nn.Conv2d(in_channels, num_anchors, kernel_size=1, stride=1) self.bbox_pred = nn.Conv2d(in_channels, (num_anchors * box_dim), kernel_size=1, stride=1) for l in [self.cls_logits, self.bbox_pred]: torch.nn.init.normal_(l.weight, std=0.01) torch.nn.init.constant_(l.bias, 0) def forward(self, x: List[torch.Tensor]): if (not is_fx_tracing()): torch._assert(isinstance(x, (list, tuple)), 'Unexpected data type') logits = [self.cls_logits(y) for y in x] bbox_reg = [self.bbox_pred(y) for y in x] return (logits, bbox_reg)
class InstallerRegistrationForm(ModelForm): protocol = ' url = 'wiki.antifa-glug.org/books/flisol-caba/page/guia-del-buen-instalador' target = '_blank' link_text = 'Sagrada Guia del Buen Instalador' text = 'Afirmo que he leido la "<a href="{0}{1}" target="{2}">{3}</a>"'.format(protocol, url, target, link_text) read_guidelines = forms.BooleanField(label=mark_safe(text), required=True) class Meta(): model = Installer widgets = {'event_user': forms.HiddenInput()} fields = '__all__'
class OptionSeriesPictorialSonificationTracksMappingTremolo(Options): def depth(self) -> 'OptionSeriesPictorialSonificationTracksMappingTremoloDepth': return self._config_sub_data('depth', OptionSeriesPictorialSonificationTracksMappingTremoloDepth) def speed(self) -> 'OptionSeriesPictorialSonificationTracksMappingTremoloSpeed': return self._config_sub_data('speed', OptionSeriesPictorialSonificationTracksMappingTremoloSpeed)
class IngressStatusTestWithIngressClass(AmbassadorTest): status_update = {'loadBalancer': {'ingress': [{'ip': '42.42.42.42'}]}} def init(self): self.target = HTTP() if (not is_ingress_class_compatible()): self.xfail = 'IngressClass is not supported in this cluster' def manifests(self) -> str: return ('\n---\napiVersion: rbac.authorization.k8s.io/v1\nkind: ClusterRole\nmetadata:\n name: {self.path.k8s}-ext\nrules:\n- apiGroups: ["networking.k8s.io"]\n resources: ["ingressclasses"]\n verbs: ["get", "list", "watch"]\n---\napiVersion: rbac.authorization.k8s.io/v1\nkind: ClusterRoleBinding\nmetadata:\n name: {self.path.k8s}-ext\nroleRef:\n apiGroup: rbac.authorization.k8s.io\n kind: ClusterRole\n name: {self.path.k8s}-ext\nsubjects:\n- kind: ServiceAccount\n name: {self.path.k8s}\n namespace: {self.namespace}\n---\napiVersion: networking.k8s.io/v1\nkind: IngressClass\nmetadata:\n annotations:\n getambassador.io/ambassador-id: {self.ambassador_id}\n name: {self.path.k8s}\nspec:\n controller: getambassador.io/ingress-controller\n---\napiVersion: networking.k8s.io/v1\nkind: Ingress\nmetadata:\n annotations:\n getambassador.io/ambassador-id: {self.ambassador_id}\n name: {self.path.k8s}\nspec:\n ingressClassName: {self.path.k8s}\n rules:\n - paths:\n - backend:\n service:\n name: {self.target.path.k8s}\n port:\n number: 80\n path: /{self.name}/\n pathType: Prefix\n' + super().manifests()) def queries(self): if (True or (sys.platform != 'darwin')): text = json.dumps(self.status_update) update_cmd = [KUBESTATUS_PATH, 'Service', '-n', 'default', '-f', f'metadata.name={self.path.k8s}', '-u', '/dev/fd/0'] subprocess.run(update_cmd, input=text.encode('utf-8'), timeout=10) time.sleep(1) (yield Query(self.url((self.name + '/')))) (yield Query(self.url(f'need-normalization/../{self.name}/'))) def check(self): if (not parse_bool(os.environ.get('AMBASSADOR_PYTEST_INGRESS_TEST', 'false'))): pytest.xfail('AMBASSADOR_PYTEST_INGRESS_TEST not set, xfailing...') if (False and (sys.platform == 'darwin')): pytest.xfail('not supported on Darwin') for r in self.results: if r.backend: assert (r.backend.name == self.target.path.k8s), (r.backend.name, self.target.path.k8s) assert r.backend.request assert (r.backend.request.headers['x-envoy-original-path'][0] == f'/{self.name}/') ingress_cmd = ['tools/bin/kubectl', 'get', '-n', 'default', '-o', 'json', 'ingress', self.path.k8s] ingress_run = subprocess.Popen(ingress_cmd, stdout=subprocess.PIPE) (ingress_out, _) = ingress_run.communicate() ingress_json = json.loads(ingress_out) assert (ingress_json['status'] == self.status_update), f"Expected Ingress status to be {self.status_update}, got {ingress_json['status']} instead"
def store_algorithm_state(bms_name, algorithm_name, state=None): fn = (((root_dir + 'bat_state_') + re.sub('[^\\w_. -]', '_', bms_name)) + '.json') with lock: with open(fn, 'a+') as f: try: f.seek(0) bms_state = json.load(f) except: logger.info('init %s bms state storage', bms_name) bms_state = dict(algorithm_state=dict()) if (state is not None): bms_state['algorithm_state'][algorithm_name] = state (f.seek(0), f.truncate()) json.dump(bms_state, f, indent=2) return bms_state['algorithm_state'].get(algorithm_name, None)
.parametrize('request_1__group_by', ['group_by']) .parametrize('offset,per_page', [[0, 10]]) def test_profiler_table(dashboard_user, stack_line, request_1, endpoint, offset, per_page): response = dashboard_user.get('dashboard/api/profiler_table/{0}/{1}/{2}'.format(endpoint.id, offset, per_page)) assert (response.status_code == 200) [data] = response.json assert (data['duration'] == str(request_1.duration)) assert (data['endpoint_id'] == str(endpoint.id)) assert (data['group_by'] == request_1.group_by) assert (data['id'] == str(request_1.id)) assert (data['ip'] == request_1.ip) assert (data['status_code'] == str(request_1.status_code)) assert (data['time_requested'] == str(to_local_datetime(request_1.time_requested))) assert (data['version_requested'] == request_1.version_requested) assert (len(data['stack_lines']) == 1) assert (data['stack_lines'][0]['code']['code'] == stack_line.code.code) assert (data['stack_lines'][0]['code']['filename'] == stack_line.code.filename) assert (data['stack_lines'][0]['code']['function_name'] == stack_line.code.function_name) assert (data['stack_lines'][0]['code']['line_number'] == str(stack_line.code.line_number))
(tags=['candidate'], description=docs.CANDIDATE_TOTAL_AGGREGATE_TAG) class CandidateTotalAggregateView(ApiResource): sort_options = ['election_year', 'office', 'state', 'state_full', 'party', 'district'] schema = schemas.CandidateTotalAggregateSchema page_schema = schemas.CandidateTotalAggregatePageSchema def args(self): return utils.extend(args.paging, args.candidate_total_aggregate, args.make_multi_sort_args(default=['-election_year'], validator=args.SortMultiOptionValidator(self.sort_options))) def build_query(self, **kwargs): total = models.CandidateTotal query = db.session.query(total.election_year.label('election_year'), sa.func.sum(total.receipts).label('total_receipts'), sa.func.sum(total.disbursements).label('total_disbursements'), sa.func.sum(total.individual_itemized_contributions).label('total_individual_itemized_contributions'), sa.func.sum(total.transfers_from_other_authorized_committee).label('total_transfers_from_other_authorized_committee'), sa.func.sum(total.other_political_committee_contributions).label('total_other_political_committee_contributions'), sa.func.sum(total.cash_on_hand_end_period).label('total_cash_on_hand_end_period'), sa.func.sum(total.debts_owed_by_committee).label('total_debts_owed_by_committee')) query = query.filter(total.election_year.isnot(None)) query = query.filter((models.ElectionsList.cycle == total.election_year), (models.ElectionsList.office == total.office), (models.ElectionsList.state == total.state), (models.ElectionsList.district == total.district)) if kwargs.get('election_year'): query = query.filter(total.election_year.in_(kwargs['election_year'])) if kwargs.get('is_active_candidate'): query = query.filter(total.candidate_inactive.is_(False)) elif (('is_active_candidate' in kwargs) and (not kwargs.get('is_active_candidate'))): query = query.filter(total.candidate_inactive.is_(True)) if kwargs.get('election_full'): query = query.filter(total.is_election.is_(kwargs['election_full'])) if kwargs.get('min_election_cycle'): query = query.filter((total.election_year >= kwargs['min_election_cycle'])) if kwargs.get('max_election_cycle'): query = query.filter((total.election_year <= kwargs['max_election_cycle'])) if kwargs.get('office'): query = query.filter((total.office == kwargs['office'])) if kwargs.get('state'): query = query.filter(total.state.in_(kwargs['state'])) if kwargs.get('district'): query = query.filter(total.district.in_(kwargs['district'])) if kwargs.get('party'): if ('DEM' == kwargs.get('party')): query = query.filter(total.party.in_(['DEM', 'DFL'])) elif ('REP' == kwargs.get('party')): query = query.filter(total.party.in_(['REP'])) else: query = query.filter(total.party.notin_(['DEM', 'DFL', 'REP'])) if (kwargs.get('aggregate_by') and ('office' == kwargs.get('aggregate_by'))): query = query.add_columns(total.office.label('office')) query = query.group_by(total.election_year, total.office) elif (kwargs.get('aggregate_by') and ('office-state' == kwargs.get('aggregate_by'))): query = query.add_columns(total.office.label('office')) query = query.add_columns(total.state.label('state')) query = query.add_columns(total.state_full.label('state_full')) query = query.group_by(total.election_year, total.office, total.state, total.state_full) elif (kwargs.get('aggregate_by') and ('office-state-district' == kwargs.get('aggregate_by'))): query = query.add_columns(total.office.label('office')) query = query.add_columns(total.state.label('state')) query = query.add_columns(total.state_full.label('state_full')) query = query.add_columns(total.district.label('district')) query = query.filter(total.district.isnot(None)) query = query.group_by(total.election_year, total.office, total.state, total.district, total.state_full) elif (kwargs.get('aggregate_by') and ('office-party' == kwargs.get('aggregate_by'))): query = query.add_columns(total.office.label('office')) query = query.add_columns(sa.case([((total.party == 'DFL'), 'DEM'), ((total.party == 'DEM'), 'DEM'), ((total.party == 'REP'), 'REP')], else_='Other').label('party')) query = query.group_by(total.election_year, total.office, sa.case([((total.party == 'DFL'), 'DEM'), ((total.party == 'DEM'), 'DEM'), ((total.party == 'REP'), 'REP')], else_='Other')) else: query = query.group_by(total.election_year) return query
class SealingArray(DelayedSpellCardAction, FatetellAction): def __init__(self, source, target): self.source = source self.target = target self.fatetell_target = target def fatetell_action(self, ft): g = self.game if ft.succeeded: turn = PlayerTurn.get_current(g) assert (turn.target is self.target), (turn.target, self.target) try: turn.pending_stages.remove(ActionStage) except Exception: pass return True return False def fatetell_cond(self, c: Card): return (c.suit != Card.HEART) def fatetell_postprocess(self): g = self.game tgt = self.target g.process_action(DropCards(None, tgt, [self.associated_card]))
class Build(dict): def __init__(self, copydict=None): super().__init__() self.disable = '' self.commit = None self.timeout = None self.subdir = None self.submodules = False self.sudo = '' self.init = '' self.patch = [] self.gradle = [] self.maven = None self.output = None self.binary = None self.srclibs = [] self.oldsdkloc = False self.encoding = None self.forceversion = False self.forcevercode = False self.rm = [] self.extlibs = [] self.prebuild = '' self.androidupdate = [] self.target = None self.scanignore = [] self.scandelete = [] self.build = '' self.buildjni = [] self.ndk = None self.preassemble = [] self.gradleprops = [] self.antcommands = [] self.postbuild = '' self.novcheck = False self.antifeatures = dict() if copydict: super().__init__(copydict) return def __getattr__(self, name): if (name in self): return self[name] else: raise AttributeError(('No such attribute: ' + name)) def __setattr__(self, name, value): self[name] = value def __delattr__(self, name): if (name in self): del self[name] else: raise AttributeError(('No such attribute: ' + name)) def to_yaml(cls, representer, node): return representer.represent_dict(node) def build_method(self): for f in ['maven', 'gradle']: if self.get(f): return f if self.output: return 'raw' return 'ant' def output_method(self): if self.output: return 'raw' for f in ['maven', 'gradle']: if self.get(f): return f return 'ant' def ndk_path(self) -> str: ndk = self.ndk if isinstance(ndk, list): ndk = self.ndk[0] path = common.config['ndk_paths'].get(ndk) if (path and (not isinstance(path, str))): raise TypeError('NDK path is not string') if path: return path for (vsn, path) in common.config['ndk_paths'].items(): if ((not vsn.endswith('_orig')) and path and (os.path.basename(path) == ndk)): return path return ''
.parametrize('genesis_config,expected', ((CONSTANTINOPLE_AT_0, CONSTANTINOPLE_AT_0_CONFIG), (CONSTANTINOPLE_AT_5, CONSTANTINOPLE_AT_5_CONFIG), (BYZANTIUM_AT_0, BYZANTIUM_AT_0_CONFIG), (BYZANTIUM_AT_5, BYZANTIUM_AT_5_CONFIG), (SPURIOUS_AT_0, SPURIOUS_AT_0_CONFIG), (SPURIOUS_AT_5, SPURIOUS_AT_5_CONFIG), (TANGERINE_AT_0, TANGERINE_AT_0_CONFIG), (TANGERINE_AT_5, TANGERINE_AT_5_CONFIG), (HOMESTEAD_AT_0, HOMESTEAD_AT_0_CONFIG), (HOMESTEAD_AT_5, HOMESTEAD_AT_5_CONFIG), (FRONTIER_AT_0, FRONTIER_AT_0_CONFIG))) def test_eip1085_extract_vm_configuration(genesis_config, expected): actual = extract_vm_configuration(genesis_config) assert (actual == expected)
class SendMessage(): def __init__(self, parent): builder = Gtk.Builder() builder.add_from_file(locate_resource('send_message.ui')) self.dialog = builder.get_object('dlg_send_message') self.dialog.set_transient_for(parent) self.txv_message = builder.get_object('txv_message') self.ent_title = builder.get_object('ent_title') self.ent_title.set_text(config.settings.get('GUI', 'messages_default_title')) self.chb_markup = builder.get_object('chb_markup') self.chb_markup.set_active(config.settings.getboolean('GUI', 'messages_use_markup')) def run(self): reply = self.dialog.run() if (reply == 1): text = self.txv_message.get_buffer().props.text title = self.ent_title.get_text().strip() use_markup = self.chb_markup.get_active() result = (text, title, use_markup) config.settings.set('GUI', 'messages_default_title', title) config.settings.set('GUI', 'messages_use_markup', str(use_markup)) config.write_ini_file(config.expand_filename('settings'), config.settings) else: result = () self.dialog.hide() return result
class CommandManager(): def __init__(self, loader): self._loader = loader def from_paths(cls, *paths): loaders = [] for p in paths: if (('/' in p) or ('\\' in p) or (p == '.')): loaders.append(DirectoryLoader(p)) else: loaders.append(ModuleLoader(p)) return cls(ChainLoader(*loaders)) def commands(self): return self._loader.load_all() def instantiate_command(self, command_name, argv, cfg): cmd_class = self._loader.load(command_name) if (not issubclass(cmd_class, Command)): raise TypeError(f"'{command_name}' must be a subclass of awsrun.runner.Command") parser = argparse.ArgumentParser(command_name, formatter_class=RawAndDefaultsFormatter, epilog=sys.modules[cmd_class.__module__].__doc__) return cmd_class.from_cli(parser, argv, cfg)
class KNNTransformer(NearestNeighbors, TransformerMixin): def __init__(self, k=3, **kwargs): self.model = NearestNeighbors(n_neighbors=k, **kwargs) def fit(self, documents): self.model.fit(documents) return self def transform(self, documents): return [self.model.kneighbors(document) for document in documents]
def get_wrap_region(view: sublime.View, sel: sublime.Region, config: Config) -> sublime.Region: if sel.empty(): pt = sel.begin() ctx = emmet.get_tag_context(view, pt) if ctx: open_tag = ctx.get('open') close_tag = ctx.get('close') if (in_range(open_tag, pt) or (close_tag and in_range(close_tag, pt))): return sublime.Region(open_tag.begin(), ((close_tag and close_tag.end()) or open_tag.end())) if close_tag: r = sublime.Region(open_tag.end(), close_tag.begin()) next_region = utils.narrow_to_non_space(view, r) padding = view.substr(sublime.Region(next_region.end(), r.end())) ix = padding.find('\n') end = ((next_region.end() + ix) if (ix != (- 1)) else r.end()) next_region = sublime.Region(next_region.begin(), end) return next_region return utils.narrow_to_non_space(view, sel, utils.NON_SPACE_LEFT)
class JQuery(JsPackage): lib_alias = {'js': 'jquery'} lib_selector = 'jQuery("body")' def this(self, reference: str=None): if (len(self._js) > 0): raise ValueError('Selector can only be changed first') if (reference is None): self._selector = 'jQuery(this)' else: self._selector = ("jQuery('%s')" % reference) return self def new(self, tag=None, reference: str=None): if (len(self._js) > 0): raise ValueError('Selector can only be changed first') if ((tag is None) and (reference is None)): raise ValueError('Tag or / and Reference must be defined') if ((tag is None) and (reference is not None)): self._selector = ("jQuery('%s')" % reference) elif (reference is not None): self._selector = ("jQuery('<%s id=\\'%s\\'></%s>')" % (tag, reference, tag)) else: self._selector = ("jQuery('<%s></%s>')" % (tag, tag)) return self def parseHTML(self, text, context=None, keepScripts: bool=False): text = JsUtils.jsConvertData(text, None) return ('%s.parseHTML(%s)' % (JQUERY_ALIAS, text)) def toggle(self, speed=None, easing=None, callback=None): return self.fnc('toggle()') def trigger(self, data: types.JS_DATA_TYPES, js_func: types.JS_FUNCS_TYPES=None): data = JsUtils.jsConvertData(data, js_func) return self.fnc(('trigger(%(data)s)' % {'data': data})) def hide(self, speed: int=None, callback: types.JS_FUNCS_TYPES=None): if (speed is not None): if (callback is not None): if (not isinstance(callback, list)): callback = [callback] jq_func = ('hide(%(speed)s, function(){%(callback)s})' % {'speed': speed, 'callback': ';'.join(callback)}) else: jq_func = ('hide(%(speed)s)' % {'speed': speed}) else: jq_func = 'hide()' return self.fnc(jq_func) def show(self, speed: int=None, callback: types.JS_FUNCS_TYPES=None): if (speed is not None): if (callback is not None): if (not isinstance(callback, list)): callback = [callback] jq_func = ('show(%(speed)s, function(){%(callback)s})' % {'speed': speed, 'callback': ';'.join(callback)}) else: jq_func = ('show(%(speed)s)' % {'speed': speed}) else: jq_func = 'show()' return self.fnc(jq_func) def fadeIn(self, speed: int=None, callback: types.JS_FUNCS_TYPES=None): if (speed is not None): if (callback is not None): if (not isinstance(callback, list)): callback = [callback] jq_func = ('fadeIn(%(speed)s, function(){%(callback)s})' % {'speed': speed, 'callback': ';'.join(callback)}) else: jq_func = ('fadeIn(%(speed)s)' % {'speed': speed}) else: jq_func = 'fadeIn()' return self.fnc(jq_func) def fadeOut(self, speed: int=None, callback: types.JS_FUNCS_TYPES=None): if (speed is not None): if (callback is not None): if (not isinstance(callback, list)): callback = [callback] jq_func = ('fadeOut(%(speed)s, function(){%(callback)s})' % {'speed': speed, 'callback': ';'.join(callback)}) else: jq_func = ('fadeOut(%(speed)s)' % {'speed': speed}) else: jq_func = 'fadeOut()' return self.fnc(jq_func) def fadeToggle(self, speed: int=None, callback: types.JS_FUNCS_TYPES=None): if (speed is not None): if (callback is not None): if (not isinstance(callback, list)): callback = [callback] jq_func = ('fadeToggle(%(speed)s, function(){%(callback)s})' % {'speed': speed, 'callback': ';'.join(callback)}) else: jq_func = ('fadeToggle(%(speed)s)' % {'speed': speed}) else: jq_func = 'fadeToggle()' return self.fnc(jq_func) def fadeTo(self, duration, opacity, easing=None, complete=None): if (complete is not None): if (not isinstance(complete, list)): complete = [complete] complete = ('function(){%s}' % ';'.join(complete)) if (easing is not None): if (complete is not None): jq_func = ('fadeTo(%(speed)s, %(opacity)s, %(easing)s, %(callback)s)' % {'speed': duration, 'opacity': opacity, 'easing': easing, 'callback': complete}) else: jq_func = ('fadeTo(%(speed)s, %(opacity)s, %(easing)s)' % {'speed': duration, 'opacity': opacity, 'easing': easing}) elif (complete is not None): jq_func = ('fadeTo(%(speed)s, %(opacity)s, %(complete)s)' % {'speed': duration, 'opacity': opacity, 'complete': complete}) else: jq_func = ('fadeTo(%(speed)s, %(opacity)s)' % {'speed': duration, 'opacity': opacity}) return self.fnc(jq_func) def slideDown(self, speed: int=None, callback: types.JS_FUNCS_TYPES=None): if (speed is not None): if (callback is not None): if (not isinstance(callback, list)): callback = [callback] jq_func = ('slideDown(%(speed)s, function(){%(callback)s})' % {'speed': speed, 'callback': ';'.join(callback)}) else: jq_func = ('slideDown(%(speed)s)' % {'speed': speed}) else: jq_func = 'slideDown()' return self.fnc(jq_func) def slideUp(self, speed: int=None, callback: types.JS_FUNCS_TYPES=None): if (speed is not None): if (callback is not None): if (not isinstance(callback, list)): callback = [callback] jq_func = ('slideUp(%(speed)s, function(){%(callback)s})' % {'speed': speed, 'callback': ';'.join(callback)}) else: jq_func = ('slideUp(%(speed)s)' % {'speed': speed}) else: jq_func = 'slideUp()' return self.fnc(jq_func) def slideToggle(self, speed: int=None, callback: types.JS_FUNCS_TYPES=None): if (speed is not None): if (callback is not None): if (not isinstance(callback, list)): callback = [callback] jq_func = ('slideToggle(%(speed)s, function(){%(callback)s})' % {'speed': speed, 'callback': ';'.join(callback)}) else: jq_func = ('slideToggle(%(speed)s)' % {'speed': speed}) else: jq_func = 'slideToggle()' return self.fnc(jq_func) def animate(self, params, speed: int=400, easing: str='swing', callback: types.JS_FUNCS_TYPES=None): easing = JsUtils.jsConvertData(easing, None) if (callback is not None): return self.fnc(('animate(%s, %s, %s, %s)' % (params, speed, easing, callback))) return self.fnc(('animate(%s, %s, %s)' % (params, speed, easing))) def stop(self, stop_all: bool=False, go_to_end: bool=False): stop_all = JsUtils.jsConvertData(stop_all, None) go_to_end = JsUtils.jsConvertData(go_to_end, None) return self.fnc(('stop(%(stopAll)s, %(goToEnd)s)' % {'stopAll': stop_all, 'goToEnd': go_to_end})) def remove(self, selector=None): if (selector is not None): selector = JsUtils.jsConvertData(selector, None) return self.fnc(('remove(%s)' % selector)) return self.fnc('remove()') def empty(self): return self.fnc('empty()') def siblings(self, selector: types.JS_DATA_TYPES=None): if (selector is not None): selector = JsUtils.jsConvertData(selector, None) return self.fnc(('siblings(%s)' % selector)) return self.fnc('siblings()') def next(self, selector: types.JS_DATA_TYPES=None): if (selector is not None): selector = JsUtils.jsConvertData(selector, None) return self.fnc(('next(%s)' % selector)) return self.fnc('next()') def prev(self, selector: types.JS_DATA_TYPES=None): if (selector is not None): selector = JsUtils.jsConvertData(selector, None) return self.fnc(('prev(%s)' % selector)) return self.fnc('prev()') def first(self): return self.fnc('first()') def children(self, selector: types.JS_DATA_TYPES=None): if (selector is None): return self.fnc('children()') return self.fnc(('children(%s)' % selector)) def last(self): return self.fnc('last()') def appendTo(self, dstJqId, js_func: types.JS_FUNCS_TYPES=None): return self.fnc(('appendTo(%(dstJqId)s)' % {'dstJqId': JsUtils.jsConvertData(dstJqId, js_func)})) def append(self, dstJqId, js_func: types.JS_FUNCS_TYPES=None): return self.fnc(('append(%(dstJqId)s)' % {'dstJqId': JsUtils.jsConvertData(dstJqId, js_func)})) def prepend(self, data, js_func: types.JS_FUNCS_TYPES=None): return self.fnc(('prepend(%(data)s)' % {'data': JsUtils.jsConvertData(data, js_func)})) def eq(self, i: int): return self.fnc(('eq(%(index)s)' % {'index': i})) def filter(self, selector=None): def _not(self): def find(self, criteria): criteria = JsUtils.jsConvertData(criteria, None) return self.fnc(('find(%s)' % criteria)) def each(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=None): js_funcs = JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile) return self.fnc(('each(function(index, data){%s})' % js_funcs)) def css(self, key, value=None): if hasattr(self.component, 'style'): self.component.style.css(key, value) return self else: if (value is None): return self.fnc(('css(%s)' % JsUtils.jsConvertData(key, None))) return self.fnc(("css('%s', %s)" % (key, JsUtils.jsConvertData(value, None)))) def attr(self, key, value): if (key.lower() in ['style', 'class']): raise ValueError('Only the css() function can be used to change the style') self.component.set_attrs(value=value, name=value) return self.component def prop(self, key: types.JS_DATA_TYPES, value=None): if (value is None): return self.fnc('prop()') return self.fnc(('prop(%s, %s)' % (JsUtils.jsConvertData(key, None), value))) def val(self, data=None, js_func: types.JS_FUNCS_TYPES=None): if (data is None): return self.fnc('val()') return self.fnc(('val(%s)' % JsUtils.jsConvertData(data, js_func))) def text(self, data, js_func: types.JS_FUNCS_TYPES=None): if (data is None): return self.fnc('text()') data = JsUtils.jsConvertData(data, js_func) return self.fnc(('text(%s)' % data)) def html(self, data=None, js_func: types.JS_FUNCS_TYPES=None): if (data is None): return self.fnc('html()') return self.fnc(('html(%s)' % JsUtils.jsConvertData(data, js_func))) def toggleClass(self, clsName, propagate: bool=False): if propagate: self.fnc_closure(('parentNode.childNodes.forEach(function(e){e.classList.remove("%(data)s")})' % {'data': clsName})) return self.fnc(('toggleClass("%(data)s")' % {'data': clsName})) def addClass(self, clsName, attrs=None, eventAttrs=None): if ((attrs is not None) or (eventAttrs is not None)): clsName = self.component.style.cssName(clsName) self.component.style.cssCls(clsName, attrs, eventAttrs, False) return self.fnc(('addClass("%s")' % clsName)) def getJSON(self, url, data, success, dataType='json', jsDataKey=None, isPyData=True, js_func=None, profile=None): success = JsUtils.jsConvertFncs(success, toStr=True, profile=profile) data = JsUtils.jsConvert(data, jsDataKey, isPyData, js_func) return Jsjqxhr(("jQuery.getJSON('%s', {data: JSON.stringify(%s)}, function(data) {%s}, '%s')" % (url, data, success, dataType))) def getJsScript(self, url, data, success, dataType='json', jsDataKey=None, isPyData=True, js_func=None, profile=None): success = JsUtils.jsConvertFncs(success, toStr=True, profile=profile) data = JsUtils.jsConvert(data, jsDataKey, isPyData, js_func) return Jsjqxhr(("jQuery.getScript('%s', {data: JSON.stringify(%s)}, function(data, textStatus, jqxhr) {%s}, '%s')" % (url, data, success, dataType))) def load(self, url, data=None, success_funcs=None, profile=None): url = JsUtils.jsConvertData(url, None) if (success_funcs is None): if (data is None): return ('%s.load(%s)' % (self.varId, url)) return ('%s.load(%s, {data: JSON.stringify(%s)})' % (self.varId, url, data)) str_fncs = JsUtils.jsConvertFncs(success_funcs, toStr=True, profile=profile) if (data is None): return ('%s.load(%s, function(data) {%s})' % (self.varId, url, str_fncs)) return ('%s.load(%s, {data: JSON.stringify(%s)}, function(data) {%s})' % (self.varId, url, data, str_fncs)) def ajaxError(self, js_funcs, profile=False): js_funcs = JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile) return ('jQuery(document).ajaxError(function(event, jqxhr, settings, thrownError) {%s})' % js_funcs) def ajaxStart(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=None): js_funcs = JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile) return ('jQuery(document).ajaxStart(function() {%s})' % js_funcs) def ajaxStop(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=None): js_funcs = JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile) return ('jQuery(document).ajaxStop(function() {%s})' % js_funcs) def ajaxSuccess(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=None): js_funcs = JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile) return ('jQuery(document).ajaxSuccess(function(event, xhr, settings) {%s})' % js_funcs) def ajaxSend(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=None): js_funcs = JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile) return ('jQuery(document).ajaxSend(function(event, jqxhr, settings) {%s})' % js_funcs) def ajaxComplete(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=None): js_funcs = JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile) return ('jQuery(document).ajaxComplete(function() {%s})' % js_funcs) def getParams(self, url, data, success_funcs, error_funcs, options, timeout, props, profile=None): ajax_data = [] if (props is not None): data['_system'] = props if (options is not None): for (k, v) in options.items(): ajax_data.append(('%s: %s' % (k, json.dumps(v)))) ajax_data.extend([('data: {data: JSON.stringify(%s)}' % data), ("url: '%s'" % url)]) if (timeout is not None): ajax_data.append(('timeout: %s' % timeout)) if (success_funcs is not None): ajax_data.append(('success: function(result,status,xhr){%s}' % JsUtils.jsConvertFncs(success_funcs, toStr=True, profile=profile))) if (error_funcs is not None): ajax_data.append(('error: function(xhr, status, error){%s}' % JsUtils.jsConvertFncs(error_funcs, toStr=True, profile=profile))) return ('{%s}' % ', '.join(ajax_data)) def get(self, url, data, success_funcs=None, options=None, timeout=None, props=None) -> Jsjqxhr: return Jsjqxhr(('jQuery.get(%s)' % self.getParams(url, data, success_funcs, None, options, timeout, props))) def post(self, url, data=None, success_funcs=None, options=None, timeout=None, props=None) -> Jsjqxhr: data = (data or {}) return Jsjqxhr(('jQuery.post(%s)' % self.getParams(url, data, success_funcs, None, options, timeout, props))) def ajax(self, type, url, data=None, success_funcs=None, error_funcs=None, options=None, timeout=None, props=None): if (type.upper() not in ['POST', 'GET']): raise ValueError(('Method %s not recognised' % url)) data = (data or {}) return Jsjqxhr(('jQuery.ajax(%s)' % self.getParams(url, data, success_funcs, error_funcs, options, timeout, props))) def click(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=None): self.css('cursor', 'pointer') return self.fnc(('click(function(event){%s})' % JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile))) def on(self, event: Union[(str, List[str])], js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=None, source_event: Optional[str]=None): source_event = (source_event or self.varId) if isinstance(event, list): event = ' '.join(event) js_event = self.component.on(event, js_funcs=js_funcs, profile=profile, source_event=source_event) self.component._browser_data['mouse'][event][source_event]['fncType'] = 'on' return js_event def after(self, html_frg: types.JS_DATA_TYPES): html_frg = JsUtils.jsConvertData(html_frg, None) return self.fnc(('after(%s)' % html_frg)) def before(self, html_frg: types.JS_DATA_TYPES): html_frg = JsUtils.jsConvertData(html_frg, None) return self.fnc(('before(%s)' % html_frg))
_filter('build_source_description') def build_source_description(state): description_map = {'unset': 'No default source', 'link': 'External link to .spec or SRPM', 'upload': 'SRPM or .spec file upload', 'scm': 'Build from an SCM repository', 'pypi': 'Build from PyPI', 'rubygems': 'Build from RubyGems', 'custom': 'Custom build method', 'distgit': 'Build from DistGit'} return description_map.get(state, '')
def get_fruit_names(is_jp: bool) -> list[str]: file_data = game_data_getter.get_file_latest('resLocal', 'GatyaitemName.csv', is_jp) if (file_data is None): helper.error_text('Failed to get catfruit names') return [] item_names = csv_handler.parse_csv(file_data.decode('utf-8'), delimeter=helper.get_text_splitter(is_jp)) file_data = game_data_getter.get_file_latest('DataLocal', 'Matatabi.tsv', is_jp) if (file_data is None): helper.error_text('Failed to get matatabi data') return [] fruit_ids = helper.parse_int_list_list(csv_handler.parse_csv(file_data.decode('utf-8'), delimeter='\t'))[1:] fruit_names: list[str] = [] for fruit in fruit_ids: fruit_names.append(item_names[int(fruit[0])][0]) return fruit_names
class DateEpoch(DateTemplate): def __init__(self, lineBeginOnly=False, pattern=None, longFrm=False): DateTemplate.__init__(self) self.name = ('Epoch' if (not pattern) else pattern) self._longFrm = longFrm self._grpIdx = 1 epochRE = '\\d{10,11}\\b(?:\\.\\d{3,6})?' if longFrm: self.name = ('LongEpoch' if (not pattern) else pattern) epochRE = '\\d{10,11}(?:\\d{3}(?:\\.\\d{1,6}|\\d{3})?)?' if pattern: regex = RE_EPOCH_PATTERN.sub((lambda v: ('(%s)' % epochRE)), pattern) if (not RE_GROUPED.search(pattern)): regex = (('(' + regex) + ')') self._grpIdx = 2 self.setRegex(regex) elif (not lineBeginOnly): regex = ('((?:^|(?P<square>(?<=^\\[))|(?P<selinux>(?<=\\baudit\\()))%s)(?:(?(selinux)(?=:\\d+\\)))|(?(square)(?=\\])))' % epochRE) self.setRegex(regex, wordBegin=False) else: regex = ('((?P<square>(?<=^\\[))?%s)(?(square)(?=\\]))' % epochRE) self.setRegex(regex, wordBegin='start', wordEnd=True) def getDate(self, line, dateMatch=None, default_tz=None): if (not dateMatch): dateMatch = self.matchDate(line) if dateMatch: v = dateMatch.group(self._grpIdx) if (self._longFrm and (len(v) >= 13)): if ((len(v) >= 16) and ('.' not in v)): v = (float(v) / 1000000) else: v = (float(v) / 1000) return (float(v), dateMatch)
def execute_sql_statement(cmd: str, results: bool=False, verbose: bool=False) -> Optional[List[dict]]: rows = None if verbose: print(cmd) with psycopg2.connect(dsn=get_database_dsn_string()) as connection: connection.autocommit = True with connection.cursor() as cursor: cursor.execute(cmd) if results: rows = db_rows_to_dict(cursor) return rows
def test_not_propagating_when_modification_via_pointer_pass_in_function_is_possible_pointer_not_first_in_requirements(): (input_cfg, output_cfg) = graphs_with_pointer_value_modification_via_function_call_pointer_not_first_in_requirements() _run_expression_propagation(input_cfg) assert _graphs_equal(input_cfg, output_cfg)
def extender_modem_status(data, fos): vdom = data['vdom'] extender_modem_status_data = data['extender_modem_status'] filtered_data = underscore_to_hyphen(filter_extender_modem_status_data(extender_modem_status_data)) converted_data = valid_attr_to_invalid_attrs(filtered_data) return fos.set('extender', 'modem-status', data=converted_data, vdom=vdom)
class TestRequireIfMissingValidator(unittest.TestCase): def setUp(self): self.validator = validators.RequireIfMissing def test_missing(self): v = self.validator('phone_type', missing='mail') self.assertEqual(validate(v, dict(phone_type='')), dict(phone_type='Please enter a value')) self.assertEqual(validate(v, dict(phone_type='', mail='')), dict(phone_type='', mail='')) def test_present(self): v = self.validator('phone_type', present='phone') self.assertEqual(validate(v, dict(phone_type='', phone='510 420 4577')), dict(phone_type='Please enter a value')) self.assertEqual(validate(v, dict(phone='')), dict(phone='')) def test_zero(self): v = self.validator('operator', present='operand') self.assertEqual(validate(v, dict(operator='', operand=0)), dict(operator='Please enter a value'))
class TestUnpackerPluginPostscript(TestUnpackerBase): def test_unpacker_selection_adobe_ps(self): mimes = ['text/postscript'] for item in mimes: self.check_unpacker_selection(item, 'Postscript') def test_extraction(self): (files, meta_data) = self.unpacker.extract_files_from_file(TEST_FILE, self.tmp_dir.name) assert (meta_data['plugin_used'] == 'Postscript'), 'wrong plugin selected' assert (meta_data['Title'] == 'Firmware Update'), 'meta data not set correctly' assert (meta_data['ReleaseVersions'] == 'vx=10.80,ps=4.19.0,net=44.38,eng=26.P.1.4.19.0') assert (meta_data['encoding_overhead'] == 0.25), 'encoding overhead not correct' assert (len(meta_data.keys()) == 11), 'number of found meta data not correct' assert (len(files) == 3), 'Number of extracted files not correct' def test_convert_payloads(self): raw_payloads = [b'<~FCfN8~>', b'<~FCfN8?YjFoAR\nAneART?~>'] result = _convert_payloads(raw_payloads) assert (result[0] == b'test'), 'simple payload not correct' assert (result[1] == b'test_line_break'), 'line breaked payload not correct' def test_get_raw_payloads(self): raw_content = get_binary_from_file(TEST_FILE) payloads = _get_raw_payloads(raw_content) assert (len(payloads) == 3), 'number of payloads not correct' assert (payloads[0] == b'<~<+oue+DGm>FD,5.Anc:,F<FCgH#.D-A0C~>'), 'simple payload not correct' assert (payloads[1] == b'<~<+oue+DGm>&+Dl72BHV,0DJ*O$+E1b7Ci<`m+EV:*F<GX<Dfol,+Cf>-FCAm$+\nEM+;ATD3q+Dbb0ATJu&DIal2D]it9/hSa~>'), 'multiline payload not correct' assert (payloads[2] == b'<~;^"*BOu3kAoD^,<;~>'), 'other header format'
class AdminStatisticsEventSchema(Schema): class Meta(): type_ = 'admin-statistics-event' self_view = 'v1.admin_statistics_event_detail' inflect = dasherize id = fields.String() draft = fields.Method('events_draft_count') published = fields.Method('events_published_count') past = fields.Method('events_past_count') def events_draft_count(self, obj): events = Event.query.filter((Event.ends_at > datetime.now(pytz.utc))) return get_count(events.filter_by(state='draft', deleted_at=None)) def events_published_count(self, obj): events = Event.query.filter((Event.ends_at > datetime.now(pytz.utc))) return get_count(events.filter_by(state='published', deleted_at=None)) def events_past_count(self, obj): return get_count(Event.query.filter((Event.ends_at < datetime.now(pytz.utc))))
def udpsender(unitno, data, retrynum=1): global controllerport destip = '255.255.255.255' if (destip != ''): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) if (type(data) is bytes): dsend = data elif (type(data) is str): dsend = bytes(data, 'utf-8') else: dsend = bytes(data) for r in range(retrynum): try: s.sendto(dsend, (destip, int(controllerport))) except: pass if (r < (retrynum - 1)): time.sleep(0.1)
def test_merge_entry_fullOverwrite(): bho1 = BloodHoundObject({'objectsid': '024929', 'otherproperty': 1}) bho2 = BloodHoundObject({'objectsid': '024930', 'otherproperty': 2}) bho1.merge_entry(bho2, base_preference=False) assert (bho1.ObjectIdentifier == '024930') assert (bho1.get_property('otherproperty') == 2)
class UiContainerLabel(QtWidgets.QLabel): def __init__(self, parent: QtWidgets.QWidget, color: QtGui.QColor, capture_mode_func: Callable) -> None: super().__init__(parent) self.color: QtGui.QColor = color self.debug_info: Optional[DebugInfo] = None self.rect: QtCore.QRect = QtCore.QRect() self.rect_pen = QtGui.QPen(self.color, 2, QtCore.Qt.PenStyle.DashLine) self.get_capture_mode = capture_mode_func self.setObjectName('ui_container') self.setStyleSheet(f'#ui_container {{border: 3px solid {self.color.name()};}}') self.setCursor(QtCore.Qt.CursorShape.CrossCursor) self.setScaledContents(True) def _draw_debug_infos(self, painter: QtGui.QPainter, rect: QtCore.QRect) -> None: if ((not self.debug_info) or (not self.debug_info.screen) or (not self.debug_info.screen.screenshot) or (not self.debug_info.window)): return selection = Rect(*cast(tuple, rect.normalized().getCoords())) selection_scaled = selection.scale(self.debug_info.scale_factor) lines = ('[ Screen ]', f'Size: {self.debug_info.screen.size}', f'Position: {self.debug_info.screen.coords}', f'Device pixel ratio: {self.debug_info.screen.device_pixel_ratio}', '', '[ Window ]', f'Size: {self.debug_info.window.size().toTuple()}', f'Position: {cast(tuple, self.debug_info.window.geometry().getCoords())}', f'Device pixel ratio: {self.debug_info.window.devicePixelRatio()}', f'Selected region: {selection.coords}', '', '[ Screenshot ]', f'Size: {self.debug_info.screen.screenshot.size().toTuple()}', f'Selected region (scaled): {selection_scaled.coords}', '', '[ Scaling detected ]', f'Factor: {self.debug_info.scale_factor:.2f}') painter.setPen(QtGui.QColor(0, 0, 0, 0)) painter.setBrush(QtGui.QColor(0, 0, 0, 175)) painter.drawRect(3, 3, 300, ((20 * len(lines)) + 5)) painter.setBrush(QtGui.QColor(0, 0, 0, 0)) painter.setPen(self.color) painter.setFont(QtGui.QFont(QtGui.QFont().family(), 10, 600)) for (idx, line) in enumerate(lines): painter.drawText(10, (20 * (idx + 1)), line) def paintEvent(self, event: QtGui.QPaintEvent) -> None: super().paintEvent(event) if (not (self.rect or self.debug_info)): return painter = QtGui.QPainter(self) self.rect = self.rect.normalized() if self.debug_info: self._draw_debug_infos(painter, self.rect) if (not self.rect): return painter.setPen(self.rect_pen) painter.drawRect(self.rect) if (self.get_capture_mode() is CaptureMode.PARSE): mode_icon = QtGui.QIcon(':parse') else: mode_icon = QtGui.QIcon(':raw') mode_icon.paint(painter, (self.rect.right() - 24), (self.rect.top() - 30), 24, 24) painter.end()
def dump_interactive_plot(config: collections.OrderedDict, data: np.ndarray, longs: pd.DataFrame, shorts: pd.DataFrame, candles_interval=None, theme=''): if (candles_interval is None): candles_interval = config.get('plot_candles_interval', datetime.timedelta(minutes=1)) if (type(candles_interval) is str): candles_interval = CustomDatetime.interval_to_timedelta(candles_interval) if (theme == ''): theme = config.get('plot_theme', pyecharts_globals.ThemeType.INFOGRAPHIC) candlesticks = create_graphs(data, candles_interval, config['ohlcv']) (long_entries, long_profits, long_losses) = create_positions(longs, True) (short_entries, short_profits, short_losses) = create_positions(shorts, False) scatters = [long_entries, long_profits, long_losses, short_entries, short_profits, short_losses] grid_chart = pyecharts.charts.Grid(init_opts=opts.InitOpts(width='100%', height='1000px', animation_opts=opts.AnimationOpts(animation=True), page_title=f"{config['symbol']} : {config['start_date']} to {config['end_date']}", theme=theme)) for scatter in scatters: candlesticks.overlap(scatter) grid_chart.add(candlesticks, grid_opts=opts.GridOpts(pos_left='10%', pos_right='8%', height='80%')) grid_chart.render((config['plots_dirpath'] + 'interactive_plot.html'))
class YAML(_CachingLoaderPlugin): def instantiate(self, args): cfg = self.cfg loader = YAMLAccountLoader(url=args.loader_url, max_age=args.loader_max_age, id_attr=cfg('id_attr', must_exist=True), path=cfg('path', type=List(Str), default=[]), str_template=args.loader_str_template, include_attrs=cfg('include_attrs', type=List(Str), default=[]), exclude_attrs=cfg('exclude_attrs', type=List(Str), default=[]), no_verify=args.loader_no_verify) return loader
def _cmd_heatmap(args): cnarrs = [] for fname in args.filenames: cnarr = read_cna(fname) if args.adjust_xy: is_sample_female = verify_sample_sex(cnarr, args.sample_sex, args.male_reference, args.diploid_parx_genome) cnarr = cnarr.shift_xx(args.male_reference, is_sample_female, args.diploid_parx_genome) cnarrs.append(cnarr) heatmap.do_heatmap(cnarrs, args.chromosome, args.desaturate, args.by_bin, args.delim_sampl, args.vertical, args.title) if args.output: oformat = os.path.splitext(args.output)[(- 1)].replace('.', '') pyplot.savefig(args.output, format=oformat, bbox_inches='tight') logging.info('Wrote %s', args.output) else: pyplot.show()
.django_db def test_load_bureau_title_lookup(): test_data_path = str((((settings.APP_DIR / 'data') / 'testing_data') / 'FMS_GWA_EXPORT_APPN.csv')) call_command('load_bureau_title_lookup', path=test_data_path) assert (BureauTitleLookup.objects.count() == 4) lookup_1 = BureauTitleLookup.objects.filter(federal_account_code='000-0100').first() assert (lookup_1.bureau_title == 'Senate') assert (lookup_1.bureau_slug == 'senate') lookup_2 = BureauTitleLookup.objects.filter(federal_account_code='044-0170').first() assert (lookup_2.bureau_title == 'Architect of the Capitol') assert (lookup_2.bureau_slug == 'architect-of-the-capitol')
class Enhancer(): def __init__(self, width=256, height=256, channels=1, gpus=0): self.width = width self.height = height self.channels = channels self.gpus = gpus enhancer_generator_input = Input(shape=(self.width, self.height, channels)) enhancer_core_features = Input(shape=((self.width / 2), (self.height / 2), 64)) encoder = Conv2D_r(32, 7, 1, enhancer_generator_input) encoder = InstanceNormalization(axis=(- 1))(encoder) encoder = Activation('relu')(encoder) encoder = Conv2D_r(64, 3, 2, encoder) enhancer_and_core = concatenate([encoder, enhancer_core_features], axis=(- 1)) enhancer_and_core = InstanceNormalization(axis=(- 1))(enhancer_and_core) enhancer_and_core = Activation('relu')(enhancer_and_core) enhancer_and_core = Conv2D_r(64, 3, 1, enhancer_and_core) enhancer_and_core = InstanceNormalization(axis=(- 1))(enhancer_and_core) enhancer_and_core = Activation('relu')(enhancer_and_core) def ResidualUnitLocal(input_features): output_features = Conv2D_r(64, 3, 1, input_features) output_features = InstanceNormalization(axis=(- 1))(output_features) output_features = Activation('relu')(output_features) output_features = Conv2D_r(64, 3, 1, output_features) output_features = InstanceNormalization(axis=(- 1))(output_features) output_features = add([input_features, output_features]) output_features = Activation('relu')(output_features) return output_features resnet = ResidualUnitLocal(enhancer_and_core) resnet = ResidualUnitLocal(resnet) resnet = ResidualUnitLocal(resnet) decoder = UpSampling2D(2)(resnet) decoder = Conv2D_r(64, 3, 1, decoder) decoder = InstanceNormalization(axis=(- 1))(decoder) decoder = Activation('relu')(decoder) decoder = Conv2D_r(channels, 7, 2, decoder) enhanced_picture = Activation('tanh')(decoder) if (self.gpus < 2): self.model = Model([enhancer_generator_input, enhancer_core_features], enhanced_picture) self.save_model = self.model else: self.save_model = Model([enhancer_generator_input, enhancer_core_features], enhanced_picture) self.model = multi_gpu_model(self.save_model, gpus=gpus)
def _KeepDirInSync(file1, file2): isfile = os.path.isfile isdir = os.path.isdir join = os.path.join names1 = set(os.listdir(file1)) names2 = set(os.listdir(file2)) both = names1.intersection(names2) if (names1 != names2): Print(('Warning: the directory of %s structure is different from %s. Only files in both are kept in sync.' % (names1, names2))) for name in both: full1 = join(file1, name) full2 = join(file2, name) if (isfile(full1) and isfile(full2)): _KeepInSyncThread._instance.files_to_keep_in_sync_queue.put(_KeepInSyncStruct(full1, full2)) elif (isdir(full1) and isdir(full2)): _KeepDirInSync(full1, full2) else: Print(('Expected %s and %s to be both files or dirs.' % (full1, full2)))
def main(): module_spec = schema_to_module_spec(versioned_schema) mkeyname = 'name' fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'casb_user_activity': {'required': False, 'type': 'dict', 'default': None, 'no_log': True, 'options': {}}} for attribute_name in module_spec['options']: fields['casb_user_activity']['options'][attribute_name] = module_spec['options'][attribute_name] if (mkeyname and (mkeyname == attribute_name)): fields['casb_user_activity']['options'][attribute_name]['required'] = True module = AnsibleModule(argument_spec=fields, supports_check_mode=False) check_legacy_fortiosapi(module) is_error = False has_changed = False result = None diff = None versions_check_result = None if module._socket_path: connection = Connection(module._socket_path) if ('access_token' in module.params): connection.set_option('access_token', module.params['access_token']) if ('enable_log' in module.params): connection.set_option('enable_log', module.params['enable_log']) else: connection.set_option('enable_log', False) fos = FortiOSHandler(connection, module, mkeyname) versions_check_result = check_schema_versioning(fos, versioned_schema, 'casb_user_activity') (is_error, has_changed, result, diff) = fortios_casb(module.params, fos) else: module.fail_json(**FAIL_SOCKET_MSG) if (versions_check_result and (versions_check_result['matched'] is False)): module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv') if (not is_error): if (versions_check_result and (versions_check_result['matched'] is False)): module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff) else: module.exit_json(changed=has_changed, meta=result, diff=diff) elif (versions_check_result and (versions_check_result['matched'] is False)): module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result) else: module.fail_json(msg='Error in repo', meta=result)
class TestMultipleFiles(CustomTestCase): uri = '/cmd/strings' def create_app(): app.config['TESTING'] = True return app def test_multiple_files(self): req_files = {'inputfile': (io.BytesIO(b'Test File #1'), 'inputfile'), 'someotherfile': (io.BytesIO(b'Test File #2'), 'someotherfile')} req_data = {'request_json': json.dumps({'args': ['', '']})} data = {**req_data, **req_files} r1 = self.client.post(self.uri, data=data, content_type='multipart/form-data') key = r1.json['key'] r2 = self.fetch_result(key) r2_json = r2.get_json() self.assertEqual(r2_json['key'], key) self.assertEqual(r2_json['report'], 'Test File #1\nTest File #2\n') self.assertEqual(r2_json['returncode'], 0)
def s7ddrphy_with_ratio(ratio, phy_cls=A7DDRPHY, ddr_clk=None, serdes_reset_cnt=0): ddr_clk = (ddr_clk or f'sys{(4 * ratio)}x') wrapper_cls = DFIRateConverter.phy_wrapper(phy_cls=phy_cls, ratio=ratio, serdes_reset_cnt=serdes_reset_cnt) def wrapper(*args, **kwargs): sys_clk_freq = kwargs.pop('sys_clk_freq', .0) return wrapper_cls(*args, ddr_clk=ddr_clk, sys_clk_freq=(ratio * sys_clk_freq), **kwargs) return wrapper
class OptionPlotoptionsHistogramSonificationContexttracksMappingLowpass(Options): def frequency(self) -> 'OptionPlotoptionsHistogramSonificationContexttracksMappingLowpassFrequency': return self._config_sub_data('frequency', OptionPlotoptionsHistogramSonificationContexttracksMappingLowpassFrequency) def resonance(self) -> 'OptionPlotoptionsHistogramSonificationContexttracksMappingLowpassResonance': return self._config_sub_data('resonance', OptionPlotoptionsHistogramSonificationContexttracksMappingLowpassResonance)
class OptionSeriesPolygonData(Options): def accessibility(self) -> 'OptionSeriesPolygonDataAccessibility': return self._config_sub_data('accessibility', OptionSeriesPolygonDataAccessibility) def className(self): return self._config_get(None) def className(self, text: str): self._config(text, js_type=False) def color(self): return self._config_get(None) def color(self, text: str): self._config(text, js_type=False) def colorIndex(self): return self._config_get(None) def colorIndex(self, num: float): self._config(num, js_type=False) def custom(self): return self._config_get(None) def custom(self, value: Any): self._config(value, js_type=False) def dataLabels(self) -> 'OptionSeriesPolygonDataDatalabels': return self._config_sub_data('dataLabels', OptionSeriesPolygonDataDatalabels) def description(self): return self._config_get(None) def description(self, text: str): self._config(text, js_type=False) def dragDrop(self) -> 'OptionSeriesPolygonDataDragdrop': return self._config_sub_data('dragDrop', OptionSeriesPolygonDataDragdrop) def drilldown(self): return self._config_get(None) def drilldown(self, text: str): self._config(text, js_type=False) def events(self) -> 'OptionSeriesPolygonDataEvents': return self._config_sub_data('events', OptionSeriesPolygonDataEvents) def id(self): return self._config_get(None) def id(self, text: str): self._config(text, js_type=False) def labelrank(self): return self._config_get(None) def labelrank(self, num: float): self._config(num, js_type=False) def marker(self) -> 'OptionSeriesPolygonDataMarker': return self._config_sub_data('marker', OptionSeriesPolygonDataMarker) def name(self): return self._config_get(None) def name(self, text: str): self._config(text, js_type=False) def selected(self): return self._config_get(False) def selected(self, flag: bool): self._config(flag, js_type=False) def x(self): return self._config_get(None) def x(self, num: float): self._config(num, js_type=False) def y(self): return self._config_get(None) def y(self, num: float): self._config(num, js_type=False)
class OptionSeriesAreasplinerangeStatesHoverMarker(Options): def enabled(self): return self._config_get(None) def enabled(self, flag: bool): self._config(flag, js_type=False) def enabledThreshold(self): return self._config_get(2) def enabledThreshold(self, num: float): self._config(num, js_type=False) def fillColor(self): return self._config_get(None) def fillColor(self, text: str): self._config(text, js_type=False) def height(self): return self._config_get(None) def height(self, num: float): self._config(num, js_type=False) def lineColor(self): return self._config_get('#ffffff') def lineColor(self, text: str): self._config(text, js_type=False) def lineWidth(self): return self._config_get(0) def lineWidth(self, num: float): self._config(num, js_type=False) def radius(self): return self._config_get(4) def radius(self, num: float): self._config(num, js_type=False) def width(self): return self._config_get(None) def width(self, num: float): self._config(num, js_type=False)
class BaseHandledException(Exception): MSG: str def __init__(self, exc: Union[(Exception, str)]) -> None: super().__init__() self.exc = exc def __repr__(self) -> str: return self.MSG.format(str(self.exc)) def __str__(self) -> str: return self.__repr__()
.skipif(('ethereum_optimized.state_db' not in sys.modules), reason="missing dependency (use `pip install 'ethereum[optimized]'`)") def test_resurrection() -> None: def actions(impl: Any) -> Any: obj = impl.State() impl.set_account(obj, ADDRESS_FOO, EMPTY_ACCOUNT) impl.set_storage(obj, ADDRESS_FOO, STORAGE_FOO, U256(42)) impl.state_root(obj) impl.destroy_storage(obj, ADDRESS_FOO) impl.state_root(obj) impl.set_account(obj, ADDRESS_FOO, EMPTY_ACCOUNT) return obj state_normal = actions(state) state_optimized = actions(optimized_state) optimized_state.state_root(state_optimized) assert (state.get_storage(state_normal, ADDRESS_FOO, STORAGE_FOO) == optimized_state.get_storage(state_optimized, ADDRESS_FOO, STORAGE_FOO)) assert (state.state_root(state_normal) == optimized_state.state_root(state_optimized))
def lazy_import(): from fastly.model.backend_response import BackendResponse from fastly.model.cache_setting_response import CacheSettingResponse from fastly.model.condition_response import ConditionResponse from fastly.model.director import Director from fastly.model.domain_response import DomainResponse from fastly.model.gzip_response import GzipResponse from fastly.model.header_response import HeaderResponse from fastly.model.healthcheck_response import HealthcheckResponse from fastly.model.request_settings_response import RequestSettingsResponse from fastly.model.response_object_response import ResponseObjectResponse from fastly.model.schemas_snippet_response import SchemasSnippetResponse from fastly.model.schemas_vcl_response import SchemasVclResponse from fastly.model.schemas_version_response import SchemasVersionResponse from fastly.model.version_detail import VersionDetail from fastly.model.version_detail_settings import VersionDetailSettings globals()['BackendResponse'] = BackendResponse globals()['CacheSettingResponse'] = CacheSettingResponse globals()['ConditionResponse'] = ConditionResponse globals()['Director'] = Director globals()['DomainResponse'] = DomainResponse globals()['GzipResponse'] = GzipResponse globals()['HeaderResponse'] = HeaderResponse globals()['HealthcheckResponse'] = HealthcheckResponse globals()['RequestSettingsResponse'] = RequestSettingsResponse globals()['ResponseObjectResponse'] = ResponseObjectResponse globals()['SchemasSnippetResponse'] = SchemasSnippetResponse globals()['SchemasVclResponse'] = SchemasVclResponse globals()['SchemasVersionResponse'] = SchemasVersionResponse globals()['VersionDetail'] = VersionDetail globals()['VersionDetailSettings'] = VersionDetailSettings
def train_step(images, labels): with tf.GradientTape() as tape: logits = mnist_model(images, training=True) tf.debugging.assert_equal(logits.shape, (32, 10)) loss_value = loss_object(labels, logits) loss_history.append(loss_value.numpy().mean()) grads = tape.gradient(loss_value, mnist_model.trainable_variables) optimizer.apply_gradients(zip(grads, mnist_model.trainable_variables))
class TracksDrawer(): def __init__(self, the_poster: Poster): self.poster = the_poster def create_args(self, args_parser: argparse.ArgumentParser) -> None: pass def fetch_args(self, args: argparse.Namespace) -> None: pass def draw(self, dr: svgwrite.Drawing, g: svgwrite.container.Group, size: XY, offset: XY) -> None: pass def color(self, length_range: QuantityRange, length: pint.Quantity, is_special: bool=False) -> str: color1 = (self.poster.colors['special'] if is_special else self.poster.colors['track']) color2 = (self.poster.colors['special2'] if is_special else self.poster.colors['track2']) return utils.interpolate_color(color1, color2, length_range.relative_position(length))
class BackupRestoreSerializer(s.Serializer): target_hostname_or_uuid = s.RegexField('^[A-Za-z0-9][A-Za-z0-9\\._-]*$', required=True) target_disk_id = s.IntegerField(max_value=DISK_ID_MAX_BHYVE, min_value=DISK_ID_MIN, required=True) disk_id = s.IntegerField(max_value=DISK_ID_MAX_BHYVE, min_value=DISK_ID_MIN, default=1, required=True) force = s.BooleanField(default=True)
_ExtendedCommunity.register_type(_ExtendedCommunity.FLOWSPEC_VLAN_ACTION) class BGPFlowSpecVlanActionCommunity(_ExtendedCommunity): _VALUE_PACK_STR = '!BBBHH' _VALUE_FIELDS = ['subtype', 'actions_1', 'actions_2', 'vlan_1', 'vlan_2', 'cos_1', 'cos_2'] ACTION_NAME = 'vlan_action' _COS_MASK = 7 POP = (1 << 7) PUSH = (1 << 6) SWAP = (1 << 5) REWRITE_INNER = (1 << 4) REWRITE_OUTER = (1 << 3) def __init__(self, **kwargs): super(BGPFlowSpecVlanActionCommunity, self).__init__() kwargs['subtype'] = self.SUBTYPE_FLOWSPEC_VLAN_ACTION self.do_init(BGPFlowSpecVlanActionCommunity, self, kwargs) def parse_value(cls, buf): (subtype, actions_1, actions_2, vlan_cos_1, vlan_cos_2) = struct.unpack_from(cls._VALUE_PACK_STR, buf) return {'subtype': subtype, 'actions_1': actions_1, 'vlan_1': int((vlan_cos_1 >> 4)), 'cos_1': int(((vlan_cos_1 >> 1) & cls._COS_MASK)), 'actions_2': actions_2, 'vlan_2': int((vlan_cos_2 >> 4)), 'cos_2': int(((vlan_cos_2 >> 1) & cls._COS_MASK))} def serialize_value(self): return struct.pack(self._VALUE_PACK_STR, self.subtype, self.actions_1, self.actions_2, ((self.vlan_1 << 4) + (self.cos_1 << 1)), ((self.vlan_2 << 4) + (self.cos_2 << 1)))
class ConfigStore(metaclass=Singleton): def instance(*args: Any, **kwargs: Any) -> 'ConfigStore': return Singleton.instance(ConfigStore, *args, **kwargs) repo: Dict[(str, Any)] def __init__(self) -> None: self.repo = {} def store(self, name: str, node: Any, group: Optional[str]=None, package: Optional[str]=None, provider: Optional[str]=None) -> None: cur = self.repo if (group is not None): for d in group.split('/'): if (d not in cur): cur[d] = {} cur = cur[d] if (not name.endswith('.yaml')): name = f'{name}.yaml' assert isinstance(cur, dict) cfg = OmegaConf.structured(node) cur[name] = ConfigNode(name=name, node=cfg, group=group, package=package, provider=provider) def load(self, config_path: str) -> ConfigNode: ret = self._load(config_path) ret = copy.copy(ret) assert isinstance(ret, ConfigNode) ret.node = copy.deepcopy(ret.node) return ret def _load(self, config_path: str) -> ConfigNode: idx = config_path.rfind('/') if (idx == (- 1)): ret = self._open(config_path) if (ret is None): raise ConfigLoadError(f'Structured config not found {config_path}') assert isinstance(ret, ConfigNode) return ret else: path = config_path[0:idx] name = config_path[(idx + 1):] d = self._open(path) if ((d is None) or (not isinstance(d, dict))): raise ConfigLoadError(f'Structured config not found {config_path}') if (name not in d): raise ConfigLoadError(f'Structured config {name} not found in {config_path}') ret = d[name] assert isinstance(ret, ConfigNode) return ret def get_type(self, path: str) -> ObjectType: d = self._open(path) if (d is None): return ObjectType.NOT_FOUND if isinstance(d, dict): return ObjectType.GROUP else: return ObjectType.CONFIG def list(self, path: str) -> List[str]: d = self._open(path) if (d is None): raise OSError(f'Path not found {path}') if (not isinstance(d, dict)): raise OSError(f'Path points to a file : {path}') return sorted(d.keys()) def _open(self, path: str) -> Any: d: Any = self.repo for frag in path.split('/'): if (frag == ''): continue if (frag in d): d = d[frag] else: return None return d
class OptionNavigationAnnotationsoptionsShapes(Options): def dashStyle(self): return self._config_get(None) def dashStyle(self, text: str): self._config(text, js_type=False) def fill(self): return self._config_get('rgba(0, 0, 0, 0.75)') def fill(self, text: str): self._config(text, js_type=False) def height(self): return self._config_get(None) def height(self, num: float): self._config(num, js_type=False) def markerEnd(self): return self._config_get(None) def markerEnd(self, text: str): self._config(text, js_type=False) def markerStart(self): return self._config_get(None) def markerStart(self, text: str): self._config(text, js_type=False) def point(self): return self._config_get(None) def point(self, text: str): self._config(text, js_type=False) def points(self): return self._config_get(None) def points(self, value: Any): self._config(value, js_type=False) def r(self): return self._config_get('0') def r(self, text: str): self._config(text, js_type=True) def ry(self): return self._config_get(None) def ry(self, num: float): self._config(num, js_type=False) def snap(self): return self._config_get(2) def snap(self, num: float): self._config(num, js_type=False) def src(self): return self._config_get(None) def src(self, text: str): self._config(text, js_type=False) def stroke(self): return self._config_get('rgba(0, 0, 0, 0.75)') def stroke(self, text: str): self._config(text, js_type=False) def strokeWidth(self): return self._config_get(1) def strokeWidth(self, num: float): self._config(num, js_type=False) def type(self): return self._config_get('rect') def type(self, text: str): self._config(text, js_type=False) def width(self): return self._config_get(None) def width(self, num: float): self._config(num, js_type=False) def xAxis(self): return self._config_get(None) def xAxis(self, num: float): self._config(num, js_type=False) def yAxis(self): return self._config_get(None) def yAxis(self, num: float): self._config(num, js_type=False)
class OFCapableSwitch(object): def __init__(self, connect_method='connect_ssh', *args, **kwargs): super(OFCapableSwitch, self).__init__() self._connect_method = connect_method self._connect_args = args self._connect_kwargs = kwargs self.version = None self.namespace = None connect = getattr(ncclient.manager, self._connect_method) self.netconf = connect(*self._connect_args, **self._connect_kwargs) def close_session(self): if self.netconf: self.netconf.close_session() self.netconf = None def __enter__(self): return self def __exit__(self): self.close_session() def client_capabilities(self): return self.netconf.client_capabilities def server_capabilities(self): return self.netconf.server_capabilities def _find_capable_switch(self, tree): capable_switch = None for element in tree: (ns, tag) = get_ns_tag(element.tag) if (tag != ofc_consts.CAPABLE_SWITCH): continue assert (capable_switch is None) capable_switch = element if (not self.version): versions = [(version, ns_) for (version, ns_) in of_config.OFCONFIG_YANG_NAMESPACES.items() if (ns == ns_)] if versions: assert (len(versions) == 1) version = versions[0] (self.version, self.namespace) = version if (not capable_switch): raise OFConfigCapableSwitchNotFound() return capable_switch def _find_capable_switch_xml(self, tree): return ncclient.xml_.to_xml(self._find_capable_switch(tree)) def raw_get(self, filter=None): reply = self.netconf.get(filter) return self._find_capable_switch_xml(reply.data_ele) def raw_get_config(self, source, filter=None): reply = self.netconf.get_config(source, filter) return self._find_capable_switch_xml(reply.data_ele) def raw_edit_config(self, target, config, default_operation=None, test_option=None, error_option=None): self.netconf.edit_config(target, config, default_operation, test_option, error_option) def get(self): return ofc.OFCapableSwitchType.from_xml(self.raw_get()) def get_config(self, source): return ofc.OFCapableSwitchType.from_xml(self.raw_get_config(source)) def edit_config(self, target, capable_switch, default_operation=None): xml = ofc.NETCONF_Config(capable_switch=capable_switch).to_xml() self.raw_edit_config(target, xml, default_operation) def delete_config(self, source): self.netconf.delete_config(source) def copy_config(self, source, target): self.netconf.copy_config(source, target) def commit(self): self.netconf.commit() def discard_changes(self): self.netconf.discard_changes()
def get_bank_sizes_post_vilar(num_seq_with_constraints: Dict[(int, int)], beam_size: int): num_constraints = sorted(list(num_seq_with_constraints.keys())) num_banks = len(num_constraints) bank_size = (beam_size // num_banks) remainder = (beam_size - (bank_size * num_banks)) bank_sizes = {det_len: bank_size for det_len in num_constraints} bank_sizes[num_constraints[(- 1)]] += remainder bank_size_keys = sorted(list(bank_sizes.keys())) roll_over = 0 for n_const in bank_size_keys: bank_sizes[n_const] += roll_over roll_over = 0 overfill = (bank_sizes[n_const] - (num_seq_with_constraints[n_const] if (n_const in num_seq_with_constraints) else 0)) if (overfill > 0): bank_sizes[n_const] -= overfill roll_over += overfill for n_const in reversed(bank_size_keys): bank_sizes[n_const] += roll_over roll_over = 0 overfill = (bank_sizes[n_const] - (num_seq_with_constraints[n_const] if (n_const in num_seq_with_constraints) else 0)) if (overfill > 0): bank_sizes[n_const] -= overfill roll_over += overfill assert (sum(bank_sizes.values()) <= beam_size), f'Beam size {beam_size}, total banks: {sum(bank_sizes.values())}' return bank_sizes
class MT47H128M8(DDR2Module): nbanks = 8 nrows = 16384 ncols = 1024 technology_timings = _TechnologyTimings(tREFI=(.0 / 8192), tWTR=(None, 7.5), tCCD=(2, None), tRRD=None) speedgrade_timings = {'default': _SpeedgradeTimings(tRP=15, tRCD=15, tWR=15, tRFC=(None, 127.5), tFAW=None, tRAS=None)}
class FaucetBgp(): exc_logname = None def __init__(self, logger, exc_logname, metrics, send_flow_msgs): self.logger = logger self.exc_logname = exc_logname self.metrics = metrics self._send_flow_msgs = send_flow_msgs self._dp_bgp_speakers = {} self._dp_bgp_rib = {} self._valves = None self.thread = None def _valve_vlan(self, dp_id, vlan_vid): valve = None vlan = None if (dp_id in self._valves): if (vlan_vid in self._valves[dp_id].dp.vlans): valve = self._valves[dp_id] vlan = valve.dp.vlans[vlan_vid] return (valve, vlan) def _neighbor_states(bgp_speaker): neighbor_states = [] if (bgp_speaker is not None): neighbor_states = bgp_speaker.neighbor_states() return neighbor_states _on_exception(exc_logname) def _bgp_up_handler(self, remote_ip, remote_as): self.logger.info(('BGP peer router ID %s AS %s up' % (remote_ip, remote_as))) _on_exception(exc_logname) def _bgp_down_handler(self, remote_ip, remote_as): self.logger.info(('BGP peer router ID %s AS %s down' % (remote_ip, remote_as))) _on_exception(exc_logname) def _bgp_route_handler(self, path_change, bgp_speaker_key): dp_id = bgp_speaker_key.dp_id vlan_vid = bgp_speaker_key.vlan_vid (valve, vlan) = self._valve_vlan(dp_id, vlan_vid) if (vlan is None): return prefix = ipaddress.ip_network(str(path_change.prefix)) route_str = ('BGP route %s' % prefix) if path_change.next_hop: nexthop = ipaddress.ip_address(str(path_change.next_hop)) route_str = ('BGP route %s nexthop %s' % (prefix, nexthop)) if vlan.is_faucet_vip(nexthop): self.logger.error(('Skipping %s because nexthop cannot be us' % route_str)) return if (valve.router_vlan_for_ip_gw(vlan, nexthop) is None): self.logger.info(('Skipping %s because nexthop not in %s' % (route_str, vlan))) return if (bgp_speaker_key not in self._dp_bgp_rib): self._dp_bgp_rib[bgp_speaker_key] = {} flowmods = [] if path_change.is_withdraw: self.logger.info('withdraw %s', route_str) if (prefix in self._dp_bgp_rib[bgp_speaker_key]): del self._dp_bgp_rib[bgp_speaker_key][prefix] flowmods = valve.del_route(vlan, prefix) else: self.logger.info('add %s', route_str) self._dp_bgp_rib[bgp_speaker_key][prefix] = nexthop flowmods = valve.add_route(vlan, nexthop, prefix) if flowmods: self._send_flow_msgs(valve, flowmods) def _vlan_prefixes_by_ipv(vlan, ipv): vlan_prefixes = [(str(faucet_vip), str(faucet_vip.ip)) for faucet_vip in vlan.faucet_vips_by_ipv(ipv)] vlan_prefixes.extend([(str(ip_dst), str(ip_gw)) for (ip_dst, ip_gw) in vlan.routes_by_ipv(ipv).items()]) return vlan_prefixes def _create_bgp_speaker_for_vlan(self, bgp_speaker_key, bgp_router): server_address = sorted(bgp_router.bgp_server_addresses_by_ipv(bgp_speaker_key.ipv))[0] beka = Beka(local_address=str(server_address), bgp_port=bgp_router.bgp_port(), local_as=bgp_router.bgp_as(), router_id=bgp_router.bgp_routerid(), peer_up_handler=self._bgp_up_handler, peer_down_handler=self._bgp_down_handler, route_handler=(lambda x: self._bgp_route_handler(x, bgp_speaker_key)), error_handler=self.logger.warning) for (ip_dst, ip_gw) in self._vlan_prefixes_by_ipv(bgp_router.bgp_vlan(), bgp_speaker_key.ipv): beka.add_route(prefix=str(ip_dst), next_hop=str(ip_gw)) for bgp_neighbor_address in bgp_router.bgp_neighbor_addresses_by_ipv(bgp_speaker_key.ipv): beka.add_neighbor(connect_mode=bgp_router.bgp_connect_mode(), peer_ip=str(bgp_neighbor_address), peer_as=bgp_router.bgp_neighbor_as()) self.thread = hub.spawn(beka.run) self.thread.name = 'beka' return beka def shutdown_bgp_speakers(self): for bgp_speaker in self._dp_bgp_speakers.values(): bgp_speaker.shutdown() self._dp_bgp_speakers = {} def _add_bgp_speaker(self, valve, bgp_speaker_key, bgp_router): if (bgp_speaker_key in self._dp_bgp_speakers): self.logger.info(('Skipping re/configuration of existing %s' % bgp_speaker_key)) bgp_speaker = self._dp_bgp_speakers[bgp_speaker_key] if (bgp_speaker_key in self._dp_bgp_rib): for (prefix, nexthop) in self._dp_bgp_rib[bgp_speaker_key].items(): self.logger.info(('Re-adding %s via %s' % (prefix, nexthop))) bgp_vlan = bgp_router.bgp_vlan() flowmods = valve.add_route(bgp_vlan, nexthop, prefix) if flowmods: self._send_flow_msgs(valve, flowmods) else: self.logger.info(('Adding %s' % bgp_speaker_key)) bgp_speaker = self._create_bgp_speaker_for_vlan(bgp_speaker_key, bgp_router) return {bgp_speaker_key: bgp_speaker} def _add_valve_bgp_speakers(self, valve): bgp_speakers = {} bgp_routers = valve.dp.bgp_routers() if bgp_routers: dp_id = valve.dp.dp_id for bgp_router in bgp_routers: bgp_vlan = bgp_router.bgp_vlan() vlan_vid = bgp_vlan.vid for ipv in bgp_router.bgp_ipvs(): bgp_speaker_key = BgpSpeakerKey(dp_id, vlan_vid, ipv) bgp_speakers.update(self._add_bgp_speaker(valve, bgp_speaker_key, bgp_router)) return bgp_speakers def reset(self, valves): new_dp_bgp_speakers = {} if valves: for valve in valves.values(): new_dp_bgp_speakers.update(self._add_valve_bgp_speakers(valve)) for (bgp_speaker_key, old_bgp_speaker) in self._dp_bgp_speakers.items(): if (bgp_speaker_key not in new_dp_bgp_speakers): new_dp_bgp_speakers[bgp_speaker_key] = old_bgp_speaker self._dp_bgp_speakers = new_dp_bgp_speakers self._valves = valves def update_metrics(self, _now): for (bgp_speaker_key, bgp_speaker) in self._dp_bgp_speakers.items(): dp_id = bgp_speaker_key.dp_id vlan_vid = bgp_speaker_key.vlan_vid ipv = bgp_speaker_key.ipv (valve, vlan) = self._valve_vlan(dp_id, vlan_vid) if (vlan is None): continue neighbor_states = self._neighbor_states(bgp_speaker) for (neighbor, neighbor_state) in neighbor_states: neighbor_labels = dict(valve.dp.base_prom_labels(), vlan=vlan.vid, neighbor=neighbor) self.metrics.bgp_neighbor_uptime_seconds.labels(**neighbor_labels).set(neighbor_state['info']['uptime']) self.metrics.bgp_neighbor_routes.labels(**dict(neighbor_labels, ipv=ipv)).set(vlan.route_count_by_ipv(ipv))
class Solution(): def combinationSum4(self, nums: List[int], target: int) -> int: def get_num_of_ways(nums, target, track): if (target < 0): return 0 elif (target == 0): return 1 if (target in track): return track[target] ss = 0 for n in nums: ss += get_num_of_ways(nums, (target - n), track) track[target] = ss return ss if (target == 0): return 0 track = {} return get_num_of_ways(nums, target, track)
class ServicePlanTestClassCollector(Class): def collect(self): if (not getattr(self.obj, '__test__', True)): return if has_init(self.obj): warning = ('Cannot collect test class %r because it has a __init__ constructor (from: %s)' % (self.obj.__name__, self.parent.nodeid)) if PytestCollectionWarning: self.warn(PytestCollectionWarning(warning)) else: warnings.warn(UserWarning(warning)) return [] elif has_new(self.obj): warning = ('Cannot collect test class %r because it has a __new__ constructor (from: %s)' % (self.obj.__name__, self.parent.nodeid)) if PytestCollectionWarning: self.warn(PytestCollectionWarning(warning)) else: warnings.warn(UserWarning(warning)) return [] self._inject_setup_class_fixture() self._inject_setup_method_fixture() return [ServicePlanTestInstanceCollector(name='()', parent=self)]
class OptionSeriesColumnSonificationContexttracksMappingGapbetweennotes(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class OptionPlotoptionsAreaSonificationContexttracksMappingTremolo(Options): def depth(self) -> 'OptionPlotoptionsAreaSonificationContexttracksMappingTremoloDepth': return self._config_sub_data('depth', OptionPlotoptionsAreaSonificationContexttracksMappingTremoloDepth) def speed(self) -> 'OptionPlotoptionsAreaSonificationContexttracksMappingTremoloSpeed': return self._config_sub_data('speed', OptionPlotoptionsAreaSonificationContexttracksMappingTremoloSpeed)
def _setup_argument_parser() -> ArgumentParser: parser: ArgumentParser = ArgumentParser(description='Convert a JSON-format RP2 configuration file to INI format', formatter_class=RawTextHelpFormatter) parser.add_argument('-f', '--force-overwrite', action='store_true', help='Write the output file even if it already exists') parser.add_argument('-o', '--output_file', action='store', default='', help='Write INI format configuration to OUTPUT_FILE', metavar='OUTPUT_FILE', type=str) parser.add_argument('-v', '--version', action='version', version=f'RP2 configuration translator {_VERSION} ( help='Print version') parser.add_argument('json_file', action='store', help='JSON configuration file', metavar='JSON_FILE', type=str) return parser
def settings_from_cmdline(cmdline: (str | None)) -> dict[(str, Any)]: if ((cmdline is None) or (not cmdline.strip())): return {} pub = Publisher(parser=Parser()) try: pub.process_command_line(shlex.split(cmdline)) except Exception as err: raise AssertionError(f'''Failed to parse commandline: {cmdline} {err}''') return vars(pub.settings)
def prompt_username(ctx: click.Context, param: str, value: str) -> str: config_value = ctx.obj['CONFIG'].user.username if value: return value if config_value: click.echo('> Username found in configuration.') return config_value value = click.prompt(text='Username') return value
def modify_template_contents(template_contents, user_settings, scene_file, sbref_nii, dtseries_sm): surfs_dir = os.path.join(user_settings.work_dir, user_settings.subject, 'MNINonLinear', 'fsaverage_LR32k') T1w_nii = os.path.join(user_settings.work_dir, user_settings.subject, 'MNINonLinear', 'T1w.nii.gz') dtseries_sm_base = os.path.basename(dtseries_sm) dtseries_sm_base_noext = dtseries_sm_base.replace('.dtseries.nii', '') txt = template_contents.replace('SURFS_SUBJECT', user_settings.subject) txt = txt.replace('SURFS_MESHNAME', user_settings.surf_mesh) txt = replace_path_references(txt, 'SURFSDIR', surfs_dir, scene_file) txt = replace_all_references(txt, 'T1W', T1w_nii, scene_file) txt = replace_all_references(txt, 'SBREF', sbref_nii, scene_file) txt = replace_all_references(txt, 'S0DTSERIES', user_settings.dtseries_s0, scene_file) txt = replace_path_references(txt, 'SMDTSERIES', os.path.dirname(dtseries_sm), scene_file) txt = txt.replace('SMDTSERIES_BASENOEXT', dtseries_sm_base_noext) return txt
('rocm.gemm_rrr_bias_permute.gen_function') def gemm_gen_function(func_attrs, exec_cond_template, dim_info_dict): return common.gen_function(func_attrs, exec_cond_template, dim_info_dict, 'bias_permute', extra_code='const int G1={}, G2={}, G3={};'.format(func_attrs['shape'][0], func_attrs['shape'][1], func_attrs['shape'][2]), extra_shape_template=permute_common.EXTRA_SHAPE_TEMPLATE)
class TestFrozen(unittest.TestCase): def test_frozen(self): (frozen=True) class C(): i: int c = C(10) self.assertEqual(c.i, 10) with self.assertRaises(FrozenInstanceError): c.i = 5 self.assertEqual(c.i, 10) def test_inherit(self): (frozen=True) class C(): i: int (frozen=True) class D(C): j: int d = D(0, 10) with self.assertRaises(FrozenInstanceError): d.i = 5 with self.assertRaises(FrozenInstanceError): d.j = 6 self.assertEqual(d.i, 0) self.assertEqual(d.j, 10) def test_inherit_nonfrozen_from_frozen(self): for intermediate_class in [True, False]: with self.subTest(intermediate_class=intermediate_class): (frozen=True) class C(): i: int if intermediate_class: class I(C): pass else: I = C with self.assertRaisesRegex(TypeError, 'cannot inherit non-frozen dataclass from a frozen one'): class D(I): pass def test_inherit_frozen_from_nonfrozen(self): for intermediate_class in [True, False]: with self.subTest(intermediate_class=intermediate_class): class C(): i: int if intermediate_class: class I(C): pass else: I = C with self.assertRaisesRegex(TypeError, 'cannot inherit frozen dataclass from a non-frozen one'): (frozen=True) class D(I): pass def test_inherit_from_normal_class(self): for intermediate_class in [True, False]: with self.subTest(intermediate_class=intermediate_class): class C(): pass if intermediate_class: class I(C): pass else: I = C (frozen=True) class D(I): i: int d = D(10) with self.assertRaises(FrozenInstanceError): d.i = 5 def test_non_frozen_normal_derived(self): (frozen=True) class D(): x: int y: int = 10 class S(D): pass s = S(3) self.assertEqual(s.x, 3) self.assertEqual(s.y, 10) s.cached = True with self.assertRaises(FrozenInstanceError): s.x = 5 with self.assertRaises(FrozenInstanceError): s.y = 5 self.assertEqual(s.x, 3) self.assertEqual(s.y, 10) self.assertEqual(s.cached, True) def test_overwriting_frozen(self): with self.assertRaisesRegex(TypeError, 'Cannot overwrite attribute __setattr__'): (frozen=True) class C(): x: int def __setattr__(self): pass with self.assertRaisesRegex(TypeError, 'Cannot overwrite attribute __delattr__'): (frozen=True) class C(): x: int def __delattr__(self): pass (frozen=False) class C(): x: int def __setattr__(self, name, value): self.__dict__['x'] = (value * 2) self.assertEqual(C(10).x, 20) def test_frozen_hash(self): (frozen=True) class C(): x: Any hash(C(3)) with self.assertRaisesRegex(TypeError, 'unhashable type'): hash(C({}))
_mock.patch('flytekit.clis.flyte_cli.main._friendly_client.SynchronousFlyteClient') def test_archive_project(mock_client): runner = _CliRunner() result = runner.invoke(_main._flyte_cli, ['archive-project', '-p', 'foo', '-h', 'a.b.com', '-i']) assert (result.exit_code == 0) mock_client().update_project.assert_called_with(_Project.archived_project('foo'))
_entry_point def test_intercepted_scope_flyte_user_exception(): assertion_error = user.FlyteAssertion('Bad assert') with pytest.raises(scopes.FlyteScopedUserException) as e: _user_func(assertion_error) e = e.value assert (e.value == assertion_error) assert ('Bad assert' in e.verbose_message) assert ('User error.' in e.verbose_message) assert (e.error_code == 'USER:AssertionError') assert (e.kind == _error_models.ContainerError.Kind.NON_RECOVERABLE) with pytest.raises(scopes.FlyteScopedUserException) as e: _system_func(assertion_error) e = e.value assert (e.value == assertion_error) assert ('Bad assert' in e.verbose_message) assert ('User error.' in e.verbose_message) assert (e.error_code == 'USER:AssertionError') assert (e.kind == _error_models.ContainerError.Kind.NON_RECOVERABLE)
class HFConfigKey(): name: str mapping_to_curated_config: Union[(str, Tuple[(str, Callable[([Any], Any)])])] mapping_from_curated_config: Callable[([Any], Any)] def curated_config_kwarg(self) -> str: return (self.mapping_to_curated_config if isinstance(self.mapping_to_curated_config, str) else self.mapping_to_curated_config[0]) def get_kwarg(self, kwargs: Dict[(str, Any)]) -> Any: return kwargs[self.curated_config_kwarg] def set_kwarg(self, value: Union[('HFConfigKey', Any)], kwargs: Dict[(str, Any)]): if isinstance(value, HFConfigKey): kwargs[self.curated_config_kwarg] = value.get_kwarg(kwargs) return if isinstance(self.mapping_to_curated_config, tuple): (curated, ctor) = self.mapping_to_curated_config else: curated = self.mapping_to_curated_config ctor = (lambda x: x) kwargs[curated] = ctor(value) def remove_kwarg(self, kwargs: Dict[(str, Any)]): kwargs.pop(self.curated_config_kwarg)
_REGISTRY.register() def default_flop_counter(model, cfg): from torch.distributed.fsdp.fully_sharded_data_parallel import FullyShardedDataParallel as FSDP if isinstance(model, FSDP): logger.warn("Default flop counter is disabled because it's not supported for FSDP yet. ") return return add_flop_printing_hook(model, cfg.OUTPUT_DIR)
class Flow_Mod_3(base_tests.SimpleProtocol): def runTest(self): logging.info('Flow_Mod_3 TEST BEGIN') logging.info('Deleting all flows from switch') delete_all_flows(self.controller) sw = Switch() self.assertTrue(sw.connect(self.controller), 'Failed to connect to switch') fi = Flow_Info() fi.rand(10) fc = Flow_Cfg() fc.rand(fi, required_wildcards(self), sw.tbl_stats.entries[0].wildcards, sw.sw_features.actions, sw.valid_ports, sw.valid_queues) fc = fc.canonical() logging.info('Sending flow mod to switch:') logging.info(str(fc)) ft = Flow_Tbl() fc.send_rem = False self.assertTrue(sw.flow_mod(fc, True), 'Failed to modify flows') ft.insert(fc) self.assertTrue(sw.barrier(), 'Barrier failed') result = True sw.settle() if (not sw.errors_verify(0)): result = False sw.flow_tbl = ft if (not sw.flow_tbl_verify()): result = False self.assertTrue(result, 'Flow_Mod_3 TEST FAILED') logging.info('Flow_Mod_3 TEST PASSED')
class TestHeaderSemanticExtractor(): def test_should_set_title_and_abstract(self): semantic_content_list = list(HeaderSemanticExtractor().iter_semantic_content_for_entity_blocks([('<title>', LayoutBlock.for_text(TITLE_1)), ('<abstract>', LayoutBlock.for_text(ABSTRACT_1))])) front = SemanticFront(semantic_content_list) LOGGER.debug('front: %s', front) assert (front.get_text_by_type(SemanticTitle) == TITLE_1) assert (front.get_text_by_type(SemanticAbstract) == ABSTRACT_1) def test_should_ignore_additional_title_and_abstract(self): semantic_content_list = list(HeaderSemanticExtractor().iter_semantic_content_for_entity_blocks([('<title>', LayoutBlock.for_text(TITLE_1)), ('<abstract>', LayoutBlock.for_text(ABSTRACT_1)), ('<title>', LayoutBlock.for_text('other')), ('<abstract>', LayoutBlock.for_text('other'))])) front = SemanticFront(semantic_content_list) LOGGER.debug('front: %s', front) assert (front.get_text_by_type(SemanticTitle) == TITLE_1) assert (front.get_text_by_type(SemanticAbstract) == ABSTRACT_1) def test_should_add_raw_authors(self): semantic_content_list = list(HeaderSemanticExtractor().iter_semantic_content_for_entity_blocks([('<author>', LayoutBlock.for_text(AUTHOR_1))])) front = SemanticFront(semantic_content_list) LOGGER.debug('front: %s', front) assert (front.get_raw_authors_text() == AUTHOR_1) def test_should_add_raw_affiliation_address(self): semantic_content_list = list(HeaderSemanticExtractor().iter_semantic_content_for_entity_blocks([('<affiliation>', LayoutBlock.for_text(AFFILIATION_1)), ('<address>', LayoutBlock.for_text(ADDRESS_1))])) front = SemanticFront(semantic_content_list) LOGGER.debug('front: %s', front) aff_address_list = list(front.iter_by_type(SemanticRawAffiliationAddress)) assert (len(aff_address_list) == 1) aff_address = aff_address_list[0] assert (aff_address.get_text_by_type(SemanticRawAffiliation) == AFFILIATION_1) assert (aff_address.get_text_by_type(SemanticRawAddress) == ADDRESS_1) def test_should_split_raw_affiliation_on_new_aff_without_address(self): semantic_content_list = list(HeaderSemanticExtractor().iter_semantic_content_for_entity_blocks([('<affiliation>', LayoutBlock.for_text(AFFILIATION_1)), ('<affiliation>', LayoutBlock.for_text(AFFILIATION_2))])) front = SemanticFront(semantic_content_list) LOGGER.debug('front: %s', front) aff_address_list = list(front.iter_by_type(SemanticRawAffiliationAddress)) assert ([aff_address.get_text_by_type(SemanticRawAffiliation) for aff_address in aff_address_list] == [AFFILIATION_1, AFFILIATION_2]) def test_should_split_raw_affiliation_on_new_aff_with_address(self): semantic_content_list = list(HeaderSemanticExtractor().iter_semantic_content_for_entity_blocks([('<affiliation>', LayoutBlock.for_text(AFFILIATION_1)), ('<address>', LayoutBlock.for_text(ADDRESS_1)), ('<affiliation>', LayoutBlock.for_text(AFFILIATION_2)), ('<address>', LayoutBlock.for_text(ADDRESS_2))])) front = SemanticFront(semantic_content_list) LOGGER.debug('front: %s', front) aff_address_list = list(front.iter_by_type(SemanticRawAffiliationAddress)) assert ([aff_address.get_text_by_type(SemanticRawAffiliation) for aff_address in aff_address_list] == [AFFILIATION_1, AFFILIATION_2]) assert ([aff_address.get_text_by_type(SemanticRawAddress) for aff_address in aff_address_list] == [ADDRESS_1, ADDRESS_2]) def test_should_split_raw_affiliation_separated_by_other(self): semantic_content_list = list(HeaderSemanticExtractor().iter_semantic_content_for_entity_blocks([('<affiliation>', LayoutBlock.for_text(AFFILIATION_1)), ('O', LayoutBlock.for_text(OTHER_1)), ('<affiliation>', LayoutBlock.for_text(AFFILIATION_2))])) front = SemanticFront(semantic_content_list) LOGGER.debug('front: %s', front) aff_address_list = list(front.iter_by_type(SemanticRawAffiliationAddress)) assert ([aff_address.get_text_by_type(SemanticRawAffiliation) for aff_address in aff_address_list] == [AFFILIATION_1, AFFILIATION_2]) def test_should_split_raw_affiliation_separated_by_known_label(self): semantic_content_list = list(HeaderSemanticExtractor().iter_semantic_content_for_entity_blocks([('<affiliation>', LayoutBlock.for_text(AFFILIATION_1)), ('<author>', LayoutBlock.for_text(AUTHOR_1)), ('<affiliation>', LayoutBlock.for_text(AFFILIATION_2))])) front = SemanticFront(semantic_content_list) LOGGER.debug('front: %s', front) aff_address_list = list(front.iter_by_type(SemanticRawAffiliationAddress)) assert ([aff_address.get_text_by_type(SemanticRawAffiliation) for aff_address in aff_address_list] == [AFFILIATION_1, AFFILIATION_2])
class APIPermissionTests(APITestCase): def setUp(self): create_site() def test_forbidden_actions(self): url = urljoin(urlroot, 'sites/securethe.news/') response1 = self.client.post(url, json={'name': 'Insecure the News?', 'domain': 'insecurethe.news'}) self.assertEqual(response1.status_code, status.HTTP_405_METHOD_NOT_ALLOWED) response2 = self.client.delete(url) self.assertEqual(response2.status_code, status.HTTP_405_METHOD_NOT_ALLOWED) url = urljoin(urlroot, 'sites/insecurethe.news/') response3 = self.client.put(url, json={'name': 'Insecure the News?', 'domain': 'insecurethe.news'}) self.assertEqual(response3.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
class KiwoomOpenApiPlusSendConditionRateLimiter(RateLimiter): def __init__(self, comm_rate_limiter): self._lock = threading.RLock() self._comm_rate_limiter = comm_rate_limiter self._limiters_per_condition = {} def get_limiter_per_condition(self, condition_name, condition_index): limiter_key = (condition_name, condition_index) if (limiter_key not in self._limiters_per_condition): self._limiters_per_condition[limiter_key] = TimeWindowRateLimiter(60, 1) limiter_per_condition = self._limiters_per_condition[limiter_key] return limiter_per_condition def check_sleep_seconds(self, fn, *args, **kwargs): condition_name = None condition_index = None if (fn.__name__ == 'SendCondition'): condition_name = kwargs.get('condition_name', args[1]) condition_index = kwargs.get('condition_index', args[2]) with self._lock: sleep_seconds = self._comm_rate_limiter.check_sleep_seconds() if ((condition_name is not None) and (condition_index is not None)): limiter_per_condition = self.get_limiter_per_condition(condition_name, condition_index) sleep_seconds = max(sleep_seconds, limiter_per_condition.check_sleep_seconds()) return sleep_seconds def add_call_history(self, fn, *args, **kwargs): condition_name = None condition_index = None if (fn.__name__ == 'SendCondition'): condition_name = kwargs.get('condition_name', args[1]) condition_index = kwargs.get('condition_index', args[2]) with self._lock: self._comm_rate_limiter.add_call_history() if ((condition_name is not None) and (condition_index is not None)): limiter_per_condition = self.get_limiter_per_condition(condition_name, condition_index) limiter_per_condition.add_call_history() def sleep_if_necessary(self, fn, *args, **kwargs): with self._lock: sleep_seconds = self.check_sleep_seconds(fn, *args, **kwargs) if (sleep_seconds > 0): time.sleep(sleep_seconds)
class OptionPlotoptionsSunburstSonificationDefaultspeechoptionsMappingPlaydelay(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class AuditLogSink(): def __init__(self, task: 'Actor') -> None: self.task = task async def __call__(self, message: 'Message'): record: 'Record' = message.record self.task.send(json.dumps({'time': record['time'].astimezone(UTC).isoformat(), 'level': record['level'].name, 'message': record['message'], 'extra': record['extra']}, cls=AuditLogSink.Encoder)) class Encoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, uuid.UUID): return str(obj) return json.JSONEncoder.default(self, obj)
class AllEnglandAlertTestCase(ApiTestBase): fixtures = (ApiTestBase.fixtures + ['functional-measures-dont-edit']) def test_all_england_alerts_sent(self): factory = DataFactory() bookmark = factory.create_org_bookmark(None) call_command(CMD_NAME) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].to, [bookmark.user.email])
class OptionSeriesBarZones(Options): def className(self): return self._config_get(None) def className(self, text: str): self._config(text, js_type=False) def color(self): return self._config_get(None) def color(self, text: str): self._config(text, js_type=False) def dashStyle(self): return self._config_get(None) def dashStyle(self, text: str): self._config(text, js_type=False) def fillColor(self): return self._config_get(None) def fillColor(self, text: str): self._config(text, js_type=False)
class RebuildCause(Enum): LAYOUT_CHANGE_VSTACK = 'layout_change_vstack' LAYOUT_CHANGE_HSTACK = 'layout_change_hstack' LAYOUT_CHANGE_SPIRAL = 'layout_change_spiral' LAYOUT_CHANGE_COMPANION = 'layout_change_companion' LAYOUT_CHANGE_2COLUMNS = 'layout_change_2columns' LAYOUT_CHANGE_3COLUMNS = 'layout_change_3columns' LAYOUT_CHANGE_TABBED = 'layout_change_tabbed' LAYOUT_CHANGE_AUTOSPLIT = 'layout_change_autosplit' WORKSPACE_FOCUS = 'workspace_focus' WINDOW_CLOSE = 'window_close' WINDOW_MOVE = 'window_move' WINDOW_NEW = 'window_new' def layout_change(layout_name: str) -> 'RebuildCause': return RebuildCause(f'layout_change_{layout_name}')
class OptionSeriesPolygonSonificationContexttracksMappingPlaydelay(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def test_benzene_wfoverlap(): control_path = (THIS_DIR / 'benzene_3states') td_vec_fn = str((control_path / 'ciss_a')) mos_fn = str((control_path / 'mos')) turbo = Turbomole(control_path) geom = geom_from_library('benzene_bp86sto3g_opt.xyz') geom.set_calculator(turbo) turbo.td_vec_fn = td_vec_fn turbo.mos = mos_fn
class CPASAdvertiserPartnershipRecommendation(AbstractCrudObject): def __init__(self, fbid=None, parent_id=None, api=None): self._isCPASAdvertiserPartnershipRecommendation = True super(CPASAdvertiserPartnershipRecommendation, self).__init__(fbid, parent_id, api) class Field(AbstractObject.Field): advertiser_business_id = 'advertiser_business_id' brand_business_id = 'brand_business_id' brands = 'brands' countries = 'countries' id = 'id' merchant_business_id = 'merchant_business_id' merchant_categories = 'merchant_categories' status = 'status' status_reason = 'status_reason' def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False): from facebook_business.utils import api_utils if ((batch is None) and ((success is not None) or (failure is not None))): api_utils.warning('`success` and `failure` callback only work for batch call.') param_types = {} enums = {} request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=CPASAdvertiserPartnershipRecommendation, api_type='NODE', response_parser=ObjectParser(reuse_object=self)) request.add_params(params) request.add_fields(fields) if (batch is not None): request.add_to_batch(batch, success=success, failure=failure) return request elif pending: return request else: self.assure_call() return request.execute() _field_types = {'advertiser_business_id': 'string', 'brand_business_id': 'string', 'brands': 'list<string>', 'countries': 'list<string>', 'id': 'string', 'merchant_business_id': 'string', 'merchant_categories': 'list<string>', 'status': 'string', 'status_reason': 'string'} def _get_field_enum_info(cls): field_enum_info = {} return field_enum_info
class OptionPlotoptionsCylinderSonificationContexttracksMappingTremoloDepth(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def check_markdown(testfile, extension, extension_config, wrapper, update=False): expected_html = (os.path.splitext(testfile)[0] + '.html') with codecs.open(testfile, 'r', encoding='utf-8') as f: source = f.read() results = (wrapper % markdown.Markdown(extensions=extension, extension_configs=extension_config).convert(source)) try: with codecs.open(expected_html, 'r', encoding='utf-8') as f: expected = f.read().replace('\r\n', '\n') except Exception: expected = '' diff = list(difflib.unified_diff(expected.splitlines(True), results.splitlines(True), expected_html, os.path.join(os.path.dirname(testfile), 'results.html'), n=3)) if diff: if update: print(('Updated: %s' % expected_html)) with codecs.open(expected_html, 'w', encoding='utf-8') as f: f.write(results) else: raise Exception(('Output from "%s" failed to match expected output.\n\n%s' % (testfile, ''.join(diff)))) elif update: print(('Skipped: %s' % expected_html))
.django_db def test_spending_by_category_success(client, monkeypatch, elasticsearch_transaction_index): setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index) resp = client.post('/api/v2/search/spending_by_category', content_type='application/json', data=json.dumps({'category': 'funding_agency', 'filters': {'keywords': ['test', 'testing']}})) assert (resp.status_code == status.HTTP_200_OK) resp = client.post('/api/v2/search/spending_by_category', content_type='application/json', data=json.dumps({'category': 'cfda', 'filters': non_legacy_filters()})) assert (resp.status_code == status.HTTP_200_OK)