code stringlengths 281 23.7M |
|---|
def test_correct_response_with_date_type(client, monkeypatch, elasticsearch_transaction_index, subagency_award):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
resp = client.post('/api/v2/search/spending_by_category/awarding_subagency', content_type='application/json', data=json.dumps({'filters': {'time_period': [{'date_type': 'date_signed', 'start_date': '2020-01-01', 'end_date': '2020-01-01'}], 'agencies': [{'type': 'awarding', 'tier': 'subtier', 'name': 'Awarding Subtier Agency 5', 'toptier_name': 'Awarding Toptier Agency 3'}]}}))
expected_response = {'category': 'awarding_subagency', 'limit': 10, 'page_metadata': {'page': 1, 'next': None, 'previous': None, 'hasNext': False, 'hasPrevious': False}, 'results': [], 'messages': [get_time_period_message()]}
assert (resp.status_code == status.HTTP_200_OK), 'Failed to return 200 Response'
assert (resp.json() == expected_response)
resp = client.post('/api/v2/search/spending_by_category/awarding_subagency', content_type='application/json', data=json.dumps({'filters': {'time_period': [{'date_type': 'date_signed', 'start_date': '2020-01-01', 'end_date': '2020-01-16'}], 'agencies': [{'type': 'awarding', 'tier': 'subtier', 'name': 'Awarding Subtier Agency 5', 'toptier_name': 'Awarding Toptier Agency 3'}]}}))
expected_response = {'category': 'awarding_subagency', 'limit': 10, 'page_metadata': {'page': 1, 'next': None, 'previous': None, 'hasNext': False, 'hasPrevious': False}, 'results': [{'amount': 10.0, 'name': 'Awarding Subtier Agency 5', 'code': 'SA5', 'id': 1005}], 'messages': [get_time_period_message()]}
assert (resp.status_code == status.HTTP_200_OK), 'Failed to return 200 Response'
assert (resp.json() == expected_response) |
def _list_providers(verbose=False):
if isinstance(verbose, str):
try:
verbose = eval(verbose.capitalize())
except (NameError, SyntaxError) as e:
print("Please pass 'True' or 'False'.")
raise e
with _get_data_folder().joinpath('providers-config.yaml').open() as fp:
providers = yaml.safe_load(fp)
print('The following providers are declared:')
if verbose:
_print_verbose_providers_description(providers)
else:
_print_simple_providers_description(providers) |
class Magic():
_magics: ClassVar[dict[(str, BaseMagic)]] = {'SingleLineMagic': SingleLineMagic(), 'MultiLineMagic': MultiLineMagic(), 'ParagraphMagic': ParagraphMagic(), 'EmailMagic': EmailMagic(), 'UrlMagic': UrlMagic()}
def apply(self, ocr_result: OcrResult) -> OcrResult:
ocr_result.magic_scores = self._calc_scores(ocr_result)
if (best_magic_name := ocr_result.best_scored_magic):
best_magic = self._magics[best_magic_name]
ocr_result.parsed = best_magic.transform(ocr_result)
ocr_result.parsed = self._post_process(ocr_result)
return ocr_result
def _post_process(self, ocr_result: OcrResult) -> str:
text = ocr_result.parsed
text = self.clean(text)
if ocr_result.tess_args.is_language_without_spaces():
text = text.replace(' ', '')
return text
def clean(text: str) -> str:
text = re.sub('[]', '"', text)
text = re.sub('[]', "'", text)
return text
def _calc_scores(self, ocr_result: OcrResult) -> dict[(str, float)]:
scores = {name: magic.score(ocr_result) for (name, magic) in self._magics.items()}
logger.debug('Magic scores:\n%s', scores)
return scores |
class EGridHead():
file_head: Filehead
mapunits: (Units | None) = None
mapaxes: (MapAxes | None) = None
gridunit: (GridUnit | None) = None
gdorient: (GdOrient | None) = None
def to_egrid(self) -> list[tuple[(str, Any)]]:
result = [('FILEHEAD', self.file_head.to_egrid())]
if (self.mapunits is not None):
result.append(('MAPUNITS', [self.mapunits.to_bgrdecl()]))
if (self.mapaxes is not None):
result.append(('MAPAXES ', self.mapaxes.to_bgrdecl()))
if (self.gridunit is not None):
result.append(('GRIDUNIT', self.gridunit.to_bgrdecl()))
if (self.gdorient is not None):
result.append(('GDORIENT', self.gdorient.to_bgrdecl()))
return result |
class OptionPlotoptionsAreasplinerangeSonificationTracksMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def get_speakers_layout(init_speakers=[], default_speaker=None):
with gr.Blocks():
n_speakers = gr.Slider(label='Number of Speakers', minimum=1, maximum=MAX_MIXING_SPEAKERS, value=1, step=1)
speakers = []
speaker_weights = []
for i in range(MAX_MIXING_SPEAKERS):
with gr.Row():
speaker = gr.Dropdown(choices=init_speakers, label=f'Speaker Name {i}', type='value', value=(default_speaker or (init_speakers[0] if (len(init_speakers) > 0) else None)), visible=(False if (i > 0) else True))
weight = gr.Slider(label='Weight', minimum=0, maximum=1, value=1, visible=(False if (i > 0) else True))
speakers.append(speaker)
speaker_weights.append(weight)
n_speakers.change((lambda n: [gr.Number.update(visible=(i < int(n))) for i in range(MAX_MIXING_SPEAKERS)]), n_speakers, speakers)
n_speakers.change((lambda n: [gr.Slider.update(visible=(i < int(n))) for i in range(MAX_MIXING_SPEAKERS)]), n_speakers, speaker_weights)
return (speakers, speaker_weights, n_speakers) |
def test_psi4():
geom = geom_from_library('hcn_iso_ts.xyz')
psi4_kwargs = {'pal': 4, 'mem': 2000, 'method': 'b3lyp', 'basis': 'def2-svp'}
psi4 = Psi4(**psi4_kwargs)
geom.set_calculator(psi4)
print(psi4.base_cmd)
f = geom.forces
print(f)
e = geom.energy
print(e)
start = time()
h = geom.hessian
end = time()
print(h)
dur = (end - start)
print('hess calc took', int(dur), 'seconds') |
class GeoGoogle():
def __init__(self, ui):
self.page = ui.page
self.chartFamily = 'GoogleMaps'
def maps(self, latitude: float, longitude: float, profile: Union[(dict, bool)]=None, options: dict=None, width: Union[(int, tuple)]=(100, '%'), height: Union[(int, tuple)]=(Defaults_html.CHARTS_HEIGHT_PX, 'px'), html_code: str=None):
dflt_options = {'center': (latitude, longitude), 'mapTypeId': 'ROADMAP', 'zoom': 10}
if (options is not None):
dflt_options.update(options)
geo_chart = geo.GeoGoogle.ChartGeoGoogle(self.page, width, height, dflt_options, html_code, profile)
return geo_chart
def satellite(self, latitude: float, longitude: float, profile: Union[(dict, bool)]=None, options: dict=None, width: Union[(int, tuple)]=(100, '%'), height: Union[(int, tuple)]=(Defaults_html.CHARTS_HEIGHT_PX, 'px'), html_code: str=None):
dflt_options = {'center': (latitude, longitude), 'mapTypeId': 'satellite', 'zoom': 10}
if (options is not None):
dflt_options.update(options)
geo_chart = geo.GeoGoogle.ChartGeoGoogle(self.page, width, height, dflt_options, html_code, profile)
return geo_chart
def terrain(self, latitude: float, longitude: float, profile: Union[(dict, bool)]=None, options: dict=None, width: Union[(int, tuple)]=(100, '%'), height: Union[(int, tuple)]=(Defaults_html.CHARTS_HEIGHT_PX, 'px'), html_code: str=None):
dflt_options = {'center': (latitude, longitude), 'mapTypeId': 'terrain', 'zoom': 10}
if (options is not None):
dflt_options.update(options)
geo_chart = geo.GeoGoogle.ChartGeoGoogle(self.page, width, height, dflt_options, html_code, profile)
return geo_chart
def current(self, profile: Union[(dict, bool)]=None, options: dict=None, width: Union[(int, tuple)]=(100, '%'), height: Union[(int, tuple)]=(Defaults_html.CHARTS_HEIGHT_PX, 'px'), html_code: str=None):
dflt_options = {'center': 'navigator.geolocation.getCurrentPosition(function(position){console.log(position.coords.latitude); return (position.coords.latitude, position.coords.longitude)})', 'mapTypeId': 'ROADMAP', 'zoom': 10}
if (options is not None):
dflt_options.update(options)
geo_chart = geo.GeoGoogle.ChartGeoGoogle(self.page, width, height, dflt_options, html_code, profile)
return geo_chart
def streetview(self):
pass |
def main():
logging.basicConfig(stream=sys.stdout, level=logging.INFO, format='%(message)s')
known_args = parse_args()
model_save_path = known_args.model_path
epoch = known_args.epoch
sample_count = known_args.sample_count
logger.info(f'Model will be trained with {sample_count} samples for {epoch} epochs and saved at: {model_save_path}')
env = StreamExecutionEnvironment.get_execution_environment()
env.set_parallelism(2)
t_env = StreamTableEnvironment.create(env)
statement_set = t_env.create_statement_set()
schema = Schema.new_builder().column('x', DataTypes.DOUBLE()).column_by_expression('y', expr.call_sql('2 * x + 1')).build()
input_tb = t_env.from_descriptor(TableDescriptor.for_connector('datagen').schema(schema).option('number-of-rows', str(sample_count)).option('fields.x.min', '0').option('fields.x.max', '1').build())
tf_cluster_config = TFClusterConfig.new_builder().set_node_entry(stream_train).set_worker_count(2).set_property('input_types', 'FLOAT_64,FLOAT_64').set_property('model_save_path', model_save_path).set_property('storage_type', 'local_file').build()
train(statement_set, tf_cluster_config, input_tb, epoch)
statement_set.execute().wait() |
class ShotManagerUI(object):
__company_name__ = 'Erkan Ozgur Yilmaz'
__app_name__ = 'Shot Tools'
__version__ = '0.0.1'
def __init__(self, layout):
self.main_layout = layout
self.parent_widget = self.main_layout.parent()
self.form_layout = None
self.active_projects_only_check_box = None
self.project_combo_box = None
self.sequence_combo_box = None
self.handle_spin_box = None
self.take_name_line_edit = None
self.render_presets_combo_box = None
self.reuse_latest_version_check_box = None
self.refresh_render_presets_button = None
self._shot_related_data_is_updating = False
self.project_based_settings_storage = {}
self.settings = QtCore.QSettings(self.__company_name__, self.__app_name__)
self._setup_ui()
def _setup_ui(self):
from anima.utils import do_db_setup
do_db_setup()
color_list = ColorList()
self.form_layout = QtWidgets.QFormLayout()
self.main_layout.addLayout(self.form_layout)
i = 0
label = QtWidgets.QLabel(self.parent_widget)
label.setText('Project')
self.form_layout.setWidget(i, QtWidgets.QFormLayout.LabelRole, label)
from functools import partial
project_horizontal_layout = QtWidgets.QHBoxLayout()
self.form_layout.setLayout(i, QtWidgets.QFormLayout.FieldRole, project_horizontal_layout)
from anima.ui.widgets.project import ProjectComboBox
self.project_combo_box = ProjectComboBox(self.parent_widget)
self.project_combo_box.show_active_projects = True
project_horizontal_layout.addWidget(self.project_combo_box)
self.project_combo_box.currentIndexChanged.connect(partial(self.project_changed))
self.active_projects_only_check_box = QtWidgets.QCheckBox(self.parent_widget)
self.active_projects_only_check_box.setText('Active Projects Only')
self.active_projects_only_check_box.setChecked(True)
self.active_projects_only_check_box.setToolTip('Show active Projects only!')
self.active_projects_only_check_box.stateChanged.connect(partial(self.active_projects_only_check_box_callback))
project_horizontal_layout.addWidget(self.active_projects_only_check_box)
project_horizontal_layout.setStretch(0, 1)
project_horizontal_layout.setStretch(1, 0)
i += 1
label = QtWidgets.QLabel(self.parent_widget)
label.setText('Sequence')
self.form_layout.setWidget(i, QtWidgets.QFormLayout.LabelRole, label)
from anima.ui.widgets.sequence import SequenceComboBox
self.sequence_combo_box = SequenceComboBox(self.parent_widget)
self.form_layout.setWidget(i, QtWidgets.QFormLayout.FieldRole, self.sequence_combo_box)
get_shot_list_push_button = QtWidgets.QPushButton(self.parent_widget)
get_shot_list_push_button.setText('Get Shot List')
self.main_layout.addWidget(get_shot_list_push_button)
set_widget_bg_color(get_shot_list_push_button, color_list)
get_shot_list_push_button.clicked.connect(partial(self.get_shot_list_callback))
color_list.next()
validate_shots_push_button = QtWidgets.QPushButton(self.parent_widget)
validate_shots_push_button.setText('Validate Shots')
self.main_layout.addWidget(validate_shots_push_button)
set_widget_bg_color(validate_shots_push_button, color_list)
validate_shots_push_button.clicked.connect(partial(self.validate_shot_codes_callback))
color_list.next()
check_duplicate_shot_code_push_button = QtWidgets.QPushButton(self.parent_widget)
check_duplicate_shot_code_push_button.setText('Check Duplicate Shot Code')
self.main_layout.addWidget(check_duplicate_shot_code_push_button)
set_widget_bg_color(check_duplicate_shot_code_push_button, color_list)
check_duplicate_shot_code_push_button.clicked.connect(partial(self.check_duplicate_shots_callback))
color_list.next()
handle_horizontal_layout = QtWidgets.QHBoxLayout()
self.main_layout.addLayout(handle_horizontal_layout)
handle_label = QtWidgets.QLabel(self.parent_widget)
handle_label.setText('Handles')
handle_label.setMinimumWidth(140)
handle_label.setMaximumWidth(140)
handle_horizontal_layout.addWidget(handle_label)
self.handle_spin_box = QtWidgets.QSpinBox(self.parent_widget)
self.handle_spin_box.setMinimum(0)
self.handle_spin_box.setValue(0)
self.handle_spin_box.valueChanged.connect(partial(self.shot_related_data_value_changed))
handle_horizontal_layout.addWidget(self.handle_spin_box)
take_name_horizontal_layout = QtWidgets.QHBoxLayout()
self.main_layout.addLayout(take_name_horizontal_layout)
take_name_label = QtWidgets.QLabel(self.parent_widget)
take_name_label.setText('Take Name')
take_name_label.setMinimumWidth(140)
take_name_label.setMaximumWidth(140)
take_name_horizontal_layout.addWidget(take_name_label)
self.take_name_line_edit = QtWidgets.QLineEdit(self.parent_widget)
self.take_name_line_edit.setText(DEFAULT_TAKE_NAME)
self.take_name_line_edit.textEdited.connect(partial(self.shot_related_data_value_changed))
take_name_horizontal_layout.addWidget(self.take_name_line_edit)
render_preset_horizontal_layout = QtWidgets.QHBoxLayout()
self.main_layout.addLayout(render_preset_horizontal_layout)
render_preset_label = QtWidgets.QLabel(self.parent_widget)
render_preset_label.setText('Render Preset')
render_preset_label.setMinimumWidth(140)
render_preset_label.setMaximumWidth(140)
render_preset_horizontal_layout.addWidget(render_preset_label)
self.render_presets_combo_box = QtWidgets.QComboBox(self.parent_widget)
render_preset_horizontal_layout.addWidget(self.render_presets_combo_box)
self.fill_preset_combo_box()
self.render_presets_combo_box.currentIndexChanged.connect(partial(self.shot_related_data_value_changed))
self.refresh_render_presets_button = QtWidgets.QPushButton(self.parent_widget)
self.refresh_render_presets_button.setIcon(self.parent_widget.style().standardIcon(QtWidgets.QStyle.SP_BrowserReload))
self.refresh_render_presets_button.setFixedWidth(24)
self.refresh_render_presets_button.clicked.connect(partial(self.fill_preset_combo_box))
render_preset_horizontal_layout.addWidget(self.refresh_render_presets_button)
reuse_latest_version_layout = QtWidgets.QHBoxLayout()
self.main_layout.addLayout(reuse_latest_version_layout)
reuse_latest_version_label = QtWidgets.QLabel('Reuse Latest Version')
reuse_latest_version_label.setMinimumWidth(140)
reuse_latest_version_label.setMaximumWidth(140)
reuse_latest_version_layout.addWidget(reuse_latest_version_label)
self.reuse_latest_version_check_box = QtWidgets.QCheckBox(self.parent_widget)
self.reuse_latest_version_check_box.setText('')
reuse_latest_version_layout.addWidget(self.reuse_latest_version_check_box)
reuse_latest_version_layout.addSpacerItem(QtWidgets.QSpacerItem(20, 20, QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Fixed))
create_shots_and_render_jobs_button = QtWidgets.QPushButton(self.parent_widget)
create_shots_and_render_jobs_button.setText('Create Shots and Render Jobs')
self.main_layout.addWidget(create_shots_and_render_jobs_button)
set_widget_bg_color(create_shots_and_render_jobs_button, color_list)
create_shots_and_render_jobs_button.clicked.connect(partial(self.create_render_jobs_callback))
color_list.next()
update_shot_thumbnail_button = QtWidgets.QPushButton(self.parent_widget)
update_shot_thumbnail_button.setText('Update Shot Thumbnail')
self.main_layout.addWidget(update_shot_thumbnail_button)
set_widget_bg_color(update_shot_thumbnail_button, color_list)
update_shot_thumbnail_button.clicked.connect(partial(self.update_shot_thumbnail_callback))
color_list.next()
update_shot_record_in_info_button = QtWidgets.QPushButton(self.parent_widget)
update_shot_record_in_info_button.setText('Update Shot Record-In Info')
self.main_layout.addWidget(update_shot_record_in_info_button)
set_widget_bg_color(update_shot_record_in_info_button, color_list)
update_shot_record_in_info_button.clicked.connect(self.update_shot_record_in_info_callback)
color_list.next()
self.main_layout.addStretch()
self.project_changed(None)
def fill_preset_combo_box(self):
shot_manager = ShotManager()
render_preset_list = shot_manager.resolve_project.GetRenderPresetList()
current_text = self.render_presets_combo_box.currentText()
self.render_presets_combo_box.clear()
self.render_presets_combo_box.addItems(sorted(render_preset_list))
if current_text:
index = self.render_presets_combo_box.findText(current_text)
if index:
self.render_presets_combo_box.setCurrentIndex(index)
else:
self.read_settings()
def write_settings(self):
self.settings.beginGroup('ShotToolsLayout')
self.settings.setValue('project_based_settings_storage', self.project_based_settings_storage)
self.settings.endGroup()
def read_settings(self):
self.settings.beginGroup('ShotToolsLayout')
try:
project_based_settings_storage = self.settings.value('project_based_settings_storage')
if project_based_settings_storage:
self.project_based_settings_storage = project_based_settings_storage
except ValueError:
pass
handle = 0
take_name = DEFAULT_TAKE_NAME
render_preset = DEFAULT_RENDER_PRESET_NAME
project = self.project_combo_box.get_current_project()
if (project and (project.id in self.project_based_settings_storage)):
storage = self.project_based_settings_storage[project.id]
if storage:
if ('handle' in storage):
handle = storage['handle']
if ('take_name' in storage):
take_name = storage['take_name']
if ('render_preset' in storage):
render_preset = storage['render_preset']
self.handle_spin_box.setValue(handle)
self.take_name_line_edit.setText(take_name)
index = self.render_presets_combo_box.findText(render_preset, QtCore.Qt.MatchExactly)
if index:
self.render_presets_combo_box.setCurrentIndex(index)
self.settings.endGroup()
def update_project_based_settings_storage(self):
project = self.project_combo_box.get_current_project()
if (not project):
return
handle = self.handle_spin_box.value()
take_name = self.take_name_line_edit.text()
render_preset = self.render_presets_combo_box.currentText()
self.project_based_settings_storage[project.id] = {'handle': handle, 'take_name': take_name, 'render_preset': render_preset}
def shot_related_data_value_changed(self, value):
if self._shot_related_data_is_updating:
return
self._shot_related_data_is_updating = True
self.update_project_based_settings_storage()
self.write_settings()
self._shot_related_data_is_updating = False
def project_changed(self, index):
self._shot_related_data_is_updating = True
self.sequence_combo_box.project = self.project_combo_box.get_current_project()
self.read_settings()
self._shot_related_data_is_updating = False
def active_projects_only_check_box_callback(self, state):
self.project_combo_box.show_active_projects = state
def get_shot_list_callback(cls):
sm = ShotManager()
for shot in sm.get_shot_clips():
isinstance(shot, ShotClip)
print(shot.shot_code)
def update_shot_thumbnail_callback(self):
(project, sequence) = self.get_project_and_sequence()
sm = ShotManager(project, sequence)
shot_clip = sm.get_current_shot_clip()
try:
shot_clip.update_shot_thumbnail()
except BaseException as e:
QtWidgets.QMessageBox.critical(self.parent_widget, 'Shot thumbnail could not be updated', str(e))
else:
QtWidgets.QMessageBox.information(self.parent_widget, 'Updated shot thumbnail ', 'Updated shot thumbnail ')
def update_shot_record_in_info_callback(self):
(project, sequence) = self.get_project_and_sequence()
answer = QtWidgets.QMessageBox.question(self.parent_widget, 'Update shot record info!', ('This will update the shot record_in information of:<br/><br/>%s - %s<br/> <br/>Is this ok?' % (project.name, sequence.name)), QtWidgets.QMessageBox.Yes, QtWidgets.QMessageBox.No)
if (answer == QtWidgets.QMessageBox.Yes):
shot_manager = ShotManager(project=project, sequence=sequence)
shots = shot_manager.get_shot_clips()
for shot in shots:
shot.update_record_in_info()
QtWidgets.QMessageBox.information(self.parent_widget, 'Update shot record info!', 'Done')
def get_project_and_sequence(self):
try:
project = self.project_combo_box.get_current_project()
if (not project):
raise RuntimeError('No project selected!')
sequence = self.sequence_combo_box.get_current_sequence()
if (not sequence):
raise RuntimeError('No sequence selected!')
except RuntimeError as e:
QtWidgets.QMessageBox.critical(self.parent_widget, 'Error', str(e).replace('\n', '<br>'))
raise e
return (project, sequence)
def check_duplicate_shots_callback(self):
sm = ShotManager()
duplicate_shot_codes = sm.check_duplicate_shots()
if duplicate_shot_codes:
QtWidgets.QMessageBox.critical(self.parent_widget, 'Duplicate Shot Codes!!!', ('There are duplicate shot codes:<br>%s' % '<br>'.join(duplicate_shot_codes)))
return False
else:
QtWidgets.QMessageBox.information(self.parent_widget, 'No Duplicate Shots ', 'No duplicate shots ')
return True
def validate_shot_codes_callback(self):
sm = ShotManager()
invalid_shots = sm.validate_shot_codes()
if invalid_shots:
QtWidgets.QMessageBox.critical(self.parent_widget, 'Invalid shot names!!!', ('There are invalid shot codes:<br>%s' % '<br>'.join([shot_clip.shot_code for shot_clip in invalid_shots])))
return False
else:
QtWidgets.QMessageBox.information(self.parent_widget, 'All shots valid ', 'All shots valid ')
return True
def create_render_jobs_callback(self):
(project, sequence) = self.get_project_and_sequence()
sm = ShotManager()
if sm.check_duplicate_shots():
return
if sm.validate_shot_codes():
return
from anima.ui.base import AnimaDialogBase
anima_dialog_base = AnimaDialogBase()
anima_dialog_base.get_logged_in_user()
handle = self.handle_spin_box.value()
take_name = self.take_name_line_edit.text()
preset_name = self.render_presets_combo_box.currentText()
reuse_latest_version = self.reuse_latest_version_check_box.isChecked()
message_box = QtWidgets.QMessageBox(self.parent_widget)
message_box.setText('Which Shots?')
current_shot = QtWidgets.QPushButton('Current')
all_shots = QtWidgets.QPushButton('All')
cancel_button = QtWidgets.QPushButton('Cancel')
message_box.addButton(cancel_button, QtWidgets.QMessageBox.NoRole)
message_box.addButton(all_shots, QtWidgets.QMessageBox.NoRole)
message_box.addButton(current_shot, QtWidgets.QMessageBox.NoRole)
message_box.exec_()
shot_manager = ShotManager(project, sequence)
clicked_button = message_box.clickedButton()
message_box.deleteLater()
success = False
shot_clips = []
if (clicked_button == all_shots):
shot_clips = shot_manager.get_shot_clips()
elif (clicked_button == current_shot):
shot_clip = shot_manager.get_current_shot_clip()
if shot_clip:
shot_clips.append(shot_clip)
try:
shot_manager.create_render_jobs(shot_clips, handle, take_name, preset_name, reuse_latest_version)
if success:
QtWidgets.QMessageBox.information(self.parent_widget, 'Created Shots and Render Jobs ', 'Created Shots and Render Jobs ')
except BaseException as e:
QtWidgets.QMessageBox.critical(self.parent_widget, 'Error', str(e))
raise e |
def resample_and_mask_metric(subject_id, dscalar, hemisphere, source_mesh, dest_mesh, current_sphere='sphere', dest_sphere='sphere'):
map_name = dscalar['mapname']
metric_in = metric_file(subject_id, map_name, hemisphere, source_mesh)
metric_out = metric_file(subject_id, map_name, hemisphere, dest_mesh)
current_midthickness = surf_file(subject_id, 'midthickness', hemisphere, source_mesh)
new_midthickness = surf_file(subject_id, 'midthickness', hemisphere, dest_mesh)
current_sphere_surf = surf_file(subject_id, current_sphere, hemisphere, source_mesh)
dest_sphere_surf = surf_file(subject_id, dest_sphere, hemisphere, dest_mesh)
if dscalar['mask_medialwall']:
run(['wb_command', '-metric-resample', metric_in, current_sphere_surf, dest_sphere_surf, 'ADAP_BARY_AREA', metric_out, '-area-surfs', current_midthickness, new_midthickness, '-current-roi', medial_wall_roi_file(subject_id, hemisphere, source_mesh)])
run(['wb_command', '-metric-mask', metric_out, medial_wall_roi_file(subject_id, hemisphere, dest_mesh), metric_out], dryrun=DRYRUN)
else:
run(['wb_command', '-metric-resample', metric_in, current_sphere_surf, dest_sphere_surf, 'ADAP_BARY_AREA', metric_out, '-area-surfs', current_midthickness, new_midthickness]) |
class DatePicker(Html.Html):
requirements = ('jqueryui',)
name = 'Date Picker'
_option_cls = OptCalendars.OptionDatePicker
def __init__(self, page: primitives.PageModel, value, label: Optional[str], icon: Optional[str], width: tuple, height: tuple, color: Optional[str], html_code: Optional[str], profile: Optional[Union[(dict, bool)]], options: Optional[dict], helper: Optional[str], verbose: bool=False):
super(DatePicker, self).__init__(page, value, html_code=html_code, profile=profile, verbose=verbose)
if ((width[0] is not None) and (width[1] == 'px')):
width = ((width[0] - 30), width[1])
self.input = self.page.ui.inputs.d_date(self.val, width=width, height=height, options=options, html_code=(('%s_input' % html_code) if (html_code is not None) else html_code)).css({'padding': 0})
if (html_code is not None):
self.input.attr['name'] = ('%s_input' % html_code)
self.prepend_child(self.input)
if ((not self.input.options.inline) and icon):
self.add_icon(icon, html_code=self.htmlCode, css={'margin-top': '-4px', 'margin-left': '5px', 'color': (color or 'inherit')}, position='after', family=options.get('icon_family'))
else:
self.icon = None
if (self.icon is not None):
self.icon.click([self.input.dom.events.trigger('click').toStr()])
self.add_label(label, html_code=self.htmlCode, css={'height': 'auto', 'margin-top': '1px', 'margin-bottom': '1px'}, options=options)
self.add_helper(helper, css={'float': 'none', 'margin-left': '5px'})
self.css({'color': (color or 'inherit'), 'vertical-align': 'middle', 'display': 'block', 'width': 'auto'})
def options(self) -> OptCalendars.OptionDatePicker:
return super().options
def dom(self) -> JsHtmlJqueryUI.JsHtmlDateFieldPicker:
if (self._dom is None):
self._dom = JsHtmlJqueryUI.JsHtmlDateFieldPicker(self, page=self.page)
return self._dom
def select(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=None):
if (not isinstance(js_funcs, list)):
js_funcs = [js_funcs]
if (self.icon is not None):
self.icon.tooltip(self.input.dom.content)
js_funcs.append(self.icon.dom.setattr('title', self.input.dom.content))
self.input.options.onSelect(js_funcs, profile)
return self
def excluded_dates(self, dts: Optional[List[str]]=None, js_funcs: types.JS_FUNCS_TYPES=None, dataflows: List[dict]=None, profile: types.PROFILE_TYPE=False):
return self.input.excluded_dates(dts, js_funcs, dataflows, profile)
def included_dates(self, dts: List[str]=None, selected: str=None, js_funcs: types.JS_FUNCS_TYPES=None, dataflows: List[dict]=None, profile: types.PROFILE_TYPE=False):
return self.input.included_dates(dts, selected, js_funcs, dataflows, profile)
def add_options(self, options: dict=None, name: str=None, value: str=None):
if ((options is None) and (name is None)):
raise ValueError('Either the attrs or the name should be specified')
if (options is None):
options = {name: value}
for (k, v) in options.items():
setattr(self.input.options, k, v)
return self
def __str__(self):
return ('<div %(attr)s>%(helper)s</div>' % {'attr': self.get_attrs(css_class_names=self.style.get_classes()), 'helper': self.helper}) |
class Snapshot(Base):
__tablename__ = 'snapshots'
id = Column(Integer, primary_key=True)
name = Column(String, index=True, nullable=False)
timestamp = Column(Timestamp(timezone=True), server_default=current_timestamp())
library_id = Column(Integer, ForeignKey('libraries.id'), nullable=False)
library = relationship('Library', back_populates='snapshots')
versions = relationship('SnapshotAssociation', back_populates='snapshot')
__table_args__ = (UniqueConstraint('library_id', 'name'),)
def get_symbols(self):
session = object_session(self)
associations = session.query(SnapshotAssociation).with_parent(self)
versions = session.query(Version).join(associations.subquery())
symbols = session.query(Symbol).join(versions.subquery())
symbols = symbols.all()
return symbols
def get_versions(self):
session = object_session(self)
associations = session.query(SnapshotAssociation).with_parent(self)
versions = session.query(Version).join(associations.subquery())
versions = versions.all()
return versions
def get_version_of_symbol(self, symbol):
session = object_session(self)
if isinstance(symbol, str):
symbol = session.query(Symbol).filter(((Symbol.library_id == self.library_id) and (Symbol.name == symbol))).one()
associations = session.query(SnapshotAssociation).with_parent(self)
version = session.query(Version).with_parent(symbol).join(associations.subquery()).one()
return version
def delete(self):
session = object_session(self)
associations = session.query(SnapshotAssociation).with_parent(self)
associations.delete()
session.delete(self) |
def legacy_convert_requirement(parsed_req):
reqs = []
conflicts = []
for spec in parsed_req.specs:
req = legacy_convert(parsed_req.project_name, spec[0], spec[1])
if (spec[0] == '~='):
reqs.append(req[0])
reqs.append(req[1])
elif (spec[0] == '!='):
conflicts.append(req)
else:
reqs.append(req)
if (len(reqs) == 0):
reqs.append('{name}')
conflicts.sort(reverse=True)
reqs.sort(reverse=True)
return ([['Conflicts', parsed_req.project_name, r] for r in conflicts] + [['Requires', parsed_req.project_name, r] for r in reqs]) |
_flyte_cli.command('update-cluster-resource-attributes', cls=_FlyteSubCommand)
_host_option
_insecure_option
_project_option
_domain_option
_optional_name_option
_click.option('--attributes', type=(str, str), multiple=True)
def update_cluster_resource_attributes(host, insecure, project, domain, name, attributes):
_welcome_message()
client = _get_client(host, insecure)
cluster_resource_attributes = _ClusterResourceAttributes({attribute[0]: attribute[1] for attribute in attributes})
matching_attributes = _MatchingAttributes(cluster_resource_attributes=cluster_resource_attributes)
if (name is not None):
client.update_workflow_attributes(project, domain, name, matching_attributes)
_click.echo('Successfully updated cluster resource attributes for project: {}, domain: {}, and workflow: {}'.format(project, domain, name))
else:
client.update_project_domain_attributes(project, domain, matching_attributes)
_click.echo('Successfully updated cluster resource attributes for project: {} and domain: {}'.format(project, domain)) |
class LazySpeller():
def __init__(self):
self.speller = None
def __call__(self, sentence):
print('autocorrect.spell is deprecated, use autocorrect.Speller instead')
if (self.speller is None):
self.speller = Speller()
return self.speller(sentence) |
def _fft(vals, modulus, roots_of_unity):
if ((len(vals) <= 4) and (type(vals[0]) != tuple)):
return _simple_ft(vals, modulus, roots_of_unity)
elif ((len(vals) == 1) and (type(vals[0]) == tuple)):
return vals
L = _fft(vals[::2], modulus, roots_of_unity[::2])
R = _fft(vals[1::2], modulus, roots_of_unity[::2])
o = [0 for i in vals]
for (i, (x, y)) in enumerate(zip(L, R)):
y_times_root = (b.multiply(y, roots_of_unity[i]) if (type(y) == tuple) else (y * roots_of_unity[i]))
o[i] = (b.add(x, y_times_root) if (type(x) == tuple) else ((x + y_times_root) % modulus))
o[(i + len(L))] = (b.add(x, b.neg(y_times_root)) if (type(x) == tuple) else ((x - y_times_root) % modulus))
return o |
.django_db(transaction=True)
def test_download_awards_with_all_award_types(client, _award_download_data):
download_generation.retrieve_db_string = Mock(return_value=get_database_dsn_string())
filters = {'agency': 'all', 'prime_award_types': [*list(award_type_mapping.keys())], 'sub_award_types': [*all_subaward_types], 'date_type': 'action_date', 'date_range': {'start_date': '2016-10-01', 'end_date': '2017-09-30'}}
dl_resp = client.post('/api/v2/bulk_download/awards', content_type='application/json', data=json.dumps({'filters': filters, 'columns': []}))
assert (dl_resp.status_code == status.HTTP_200_OK)
resp = client.get('/api/v2/download/status/?file_name={}'.format(dl_resp.json()['file_name']))
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.json()['total_rows'] == 9)
assert (resp.json()['total_columns'] == 638) |
class ValidationError(APIException):
status_code = status.HTTP_400_BAD_REQUEST
default_detail = _('Invalid input.')
default_code = 'invalid'
default_params = {}
def __init__(self, detail=None, code=None, params=None):
if (detail is None):
detail = self.default_detail
if (code is None):
code = self.default_code
if (params is None):
params = self.default_params
if isinstance(detail, str):
detail = [(detail % params)]
elif isinstance(detail, ValidationError):
detail = detail.detail
elif isinstance(detail, (list, tuple)):
final_detail = []
for detail_item in detail:
if isinstance(detail_item, ValidationError):
final_detail += detail_item.detail
else:
final_detail += [((detail_item % params) if isinstance(detail_item, str) else detail_item)]
detail = final_detail
elif ((not isinstance(detail, dict)) and (not isinstance(detail, list))):
detail = [detail]
self.detail = _get_error_details(detail, code) |
.filterwarnings('ignore:Default values*')
def test_irapasc_bytesio_threading(default_surface):
def test_xtgeo():
stream = io.BytesIO()
surface = xtgeo.RegularSurface(**default_surface)
surface.to_file(stream, fformat='irap_ascii')
print('XTGeo succeeded')
threading.Timer(1.0, test_xtgeo).start() |
def parse_arguments():
parser = argparse.ArgumentParser(description='args for main.py')
parser.add_argument('--input', type=str, default='Suggest at least five related search terms to "Mang neural nhan tao".')
parser.add_argument('--approx_model_name', type=str, default=MODELZOO['llama2-7b'])
parser.add_argument('--target_model_name', type=str, default=MODELZOO['llama2-70b'])
parser.add_argument('--verbose', '-v', action='store_true', default=False, help='enable verbose mode')
parser.add_argument('--seed', '-s', type=int, default=None, help='set a random seed, which can makes the result reproducible')
parser.add_argument('--benchmark', '-b', action='store_true', default=False, help='show benchmark results.')
parser.add_argument('--profiling', '-p', action='store_true', default=False, help='collect torch profiler results.')
parser.add_argument('--max_tokens', '-M', type=int, default=20, help='max token number generated.')
parser.add_argument('--gamma', '-g', type=int, default=4, help='guess time.')
args = parser.parse_args()
return args |
class ETMMsg(Message):
file_id: Optional[str] = None
file_unique_id: Optional[str] = None
type_telegram: TGMsgType
chat: ETMChatType
author: ETMChatMember
__file = None
__path = None
__filename = None
def __init__(self, attributes: Optional[MessageAttribute]=None, author: ChatMember=None, chat: Chat=None, commands: Optional[MessageCommands]=None, deliver_to: Channel=None, edit: bool=False, edit_media: bool=False, file: Optional[BinaryIO]=None, filename: Optional[str]=None, is_system: bool=False, mime: Optional[str]=None, path: Optional[Path]=None, reactions: Reactions=None, substitutions: Optional[Substitutions]=None, target: 'Optional[Message]'=None, text: str='', type: MsgType=MsgType.Unsupported, uid: Optional[MessageID]=None, vendor_specific: Dict[(str, Any)]=None, type_telegram: TGMsgType=TGMsgType.System, file_id: Optional[str]=None):
super().__init__(attributes=attributes, chat=chat, author=author, commands=commands, deliver_to=deliver_to, edit=edit, edit_media=edit_media, file=file, filename=filename, is_system=is_system, mime=mime, path=path, reactions=reactions, substitutions=substitutions, target=target, text=text, type=type, uid=uid, vendor_specific=vendor_specific)
self.__initialized = False
self.type_telegram = type_telegram
self.file_id = file_id
def _load_file(self):
if self.file_id:
bot = coordinator.master.bot_manager
try:
file_meta = bot.get_file(self.file_id)
except BadRequest as e:
logger.exception('Bad request while trying to get file metadata: %s', e)
return
if (not self.mime):
ext = os.path.splitext(file_meta.file_path)[1]
mime = mimetypes.guess_type(file_meta.file_path, strict=False)[0]
else:
ext = mimetypes.guess_extension(self.mime, strict=False)
mime = self.mime
file = tempfile.NamedTemporaryFile(suffix=ext)
file_meta.download(out=file)
file.seek(0)
if (not mime):
mime = magic.from_buffer(file.read(1048576), mime=True)
if (type(mime) is bytes):
mime = mime.decode()
self.mime = mime
self.__file = file
self.__path = Path(file.name)
self.__filename = (self.__filename or os.path.basename(file.name))
if (self.type_telegram in (TGMsgType.Animation, TGMsgType.VideoSticker)):
gif_file = utils.gif_conversion(file, self.deliver_to.channel_id)
self.__file = gif_file
self.__path = gif_file.name
self.__filename = (self.__filename or os.path.basename(gif_file.name))
self.mime = 'image/gif'
elif (self.type_telegram == TGMsgType.Sticker):
out_file = tempfile.NamedTemporaryFile(suffix='.png')
Image.open(file).convert('RGBA').save(out_file, 'png')
file.close()
out_file.seek(0)
self.mime = 'image/png'
self.__filename = ((self.__filename or os.path.basename(file.name)) + '.png')
self.__file = out_file
self.__path = out_file.name
elif (self.type_telegram == TGMsgType.AnimatedSticker):
out_file = tempfile.NamedTemporaryFile(suffix='.gif')
if utils.convert_tgs_to_gif(file, out_file):
file.close()
out_file.seek(0)
self.mime = 'image/gif'
self.__filename = ((self.__filename or os.path.basename(file.name)) + '.gif')
else:
out_file.close()
file.seek(0)
out_file = file
self.mime = 'application/json'
self.__filename = ((self.__filename or os.path.basename(file.name)) + '.json')
self.__file = out_file
self.__path = out_file.name
self.__initialized = True
def get_file(self) -> Optional[BinaryIO]:
if (not self.__initialized):
self._load_file()
return self.__file
def set_file(self, value: Optional[BinaryIO]):
self.__initialized = True
self.__file = value
def get_path(self) -> Optional[str]:
if (not self.__initialized):
self._load_file()
return self.__path
def set_path(self, value: Optional[str]):
self.__initialized = True
self.__path = value
def get_filename(self) -> Optional[str]:
if (not self.__initialized):
self._load_file()
return self.__filename
def set_filename(self, value: Optional[str]):
self.__filename = value
file: Optional[BinaryIO] = property(get_file, set_file)
path: Optional[str] = property(get_path, set_path)
filename: Optional[str] = property(get_filename, set_filename)
def from_efbmsg(source: Message, chat_manager: ChatObjectCacheManager) -> 'ETMMsg':
target = ETMMsg()
target.__dict__.update(source.__dict__)
if (not isinstance(target.chat, ETMChatType)):
target.chat = chat_manager.update_chat_obj(target.chat)
if (not isinstance(target.author, ETMChatMember)):
target.author = target.chat.get_member(target.author.uid)
if isinstance(target.reactions, dict):
for i in target.reactions:
if any(((not isinstance(j, ETMChatMember)) for j in target.reactions[i])):
target.reactions[i] = list(map((lambda a: target.chat.get_member(a.uid)), target.reactions[i]))
return target
def put_telegram_file(self, message: telegram.Message):
is_common_file = False
for tg_media_type in ('animation', 'document', 'video', 'voice'):
attachment = getattr(message, tg_media_type, None)
if attachment:
is_common_file = True
self.file_id = attachment.file_id
self.file_unique_id = attachment.file_unique_id
self.mime = attachment.mime_type
break
if (not is_common_file):
if (self.type_telegram is TGMsgType.Audio):
assert message.audio
self.file_id = message.audio.file_id
self.file_unique_id = message.audio.file_unique_id
self.mime = message.audio.mime_type
self.filename = message.audio.file_name
elif (self.type_telegram is TGMsgType.Sticker):
assert message.sticker
self.file_id = message.sticker.file_id
self.file_unique_id = message.sticker.file_unique_id
self.mime = 'image/webp'
elif (self.type_telegram is TGMsgType.AnimatedSticker):
assert message.sticker
self.file_id = message.sticker.file_id
self.file_unique_id = message.sticker.file_unique_id
self.mime = 'application/json+tgs'
self.type = MsgType.Animation
elif (self.type_telegram is TGMsgType.VideoSticker):
assert message.sticker
self.file_id = message.sticker.file_id
self.file_unique_id = message.sticker.file_unique_id
self.mime = 'video/webm'
self.type = MsgType.Animation
elif getattr(message, 'photo', None):
attachment = message.photo[(- 1)]
self.file_id = attachment.file_id
self.file_unique_id = attachment.file_unique_id
self.mime = 'image/jpeg'
elif (self.type_telegram is TGMsgType.VideoNote):
assert message.video_note
self.file_id = message.video_note.file_id
self.file_unique_id = message.video_note.file_unique_id
self.mime = 'video/mpeg' |
def extractAfterhourssolaceCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [("today's dinner is the hero", 'Todays Dinner is the Hero.', 'translated'), ("today's dinner is the hero.", 'Todays Dinner is the Hero.', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TestComposerThread_check_all_karma_thresholds(ComposerThreadBaseTestCase):
('bodhi.server.models.Update.check_karma_thresholds', mock.MagicMock(side_effect=exceptions.BodhiException('BOOM')))
('bodhi.server.tasks.composer.log')
def test_BodhiException(self, mocked_log):
mocked_log.exception = mock.MagicMock()
task = self._make_task()
t = ComposerThread(self.semmock, task['composes'][0], 'bowlofeggs', self.Session, self.tempdir)
t.compose = Compose.from_dict(self.db, task['composes'][0])
t.db = self.db
t.check_all_karma_thresholds()
mocked_log.exception.assert_called_once_with('Problem checking karma thresholds')
self.assert_sems(0) |
def test_app_mount(tmpdir, test_client_factory):
path = os.path.join(tmpdir, 'example.txt')
with open(path, 'w') as file:
file.write('<file content>')
app = Starlette(routes=[Mount('/static', StaticFiles(directory=tmpdir))])
client = test_client_factory(app)
response = client.get('/static/example.txt')
assert (response.status_code == 200)
assert (response.text == '<file content>')
response = client.post('/static/example.txt')
assert (response.status_code == 405)
assert (response.text == 'Method Not Allowed') |
class Database():
def __init__(self, convert_bits=False):
self.all_bits = set()
self.properties_bits = dict()
self.convert_bits = convert_bits
self.populate()
def populate(self):
for (file1, file2) in get_file_pairs():
for (property_str, bit) in generate_differing_bits(file1, file2):
self.update_all_bits(bit)
if (property_str in self.properties_bits):
self.properties_bits[property_str].add(bit)
else:
self.properties_bits[property_str] = set()
self.properties_bits[property_str].add(bit)
def update_all_bits(self, bit):
self.all_bits.add(bit)
def print_all_bits(self):
print(self.all_bits)
def get_keys(self):
return self.properties_bits.keys()
def print_bits(self, key):
if (key in self.properties_bits):
print(('%s: %s' % (key, self.properties_bits[key])))
else:
print('The specified property is not in the database')
def convert_bit_format(self, item):
(dummy, address, word, bit) = item.split('_')
address = int(address[(- 2):], 16)
bit = (((int(word) % 4) * 32) + int(bit))
return '{address}_{bit}'.format(address=address, bit=bit)
def convert_header(self, header):
converted_bits = []
for bit in header:
converted_bits.append(self.convert_bit_format(bit))
return converted_bits
def get_csv_header(self):
header = list(self.all_bits)
header.sort()
self.csv_header = header
if self.convert_bits:
header = self.convert_header(header)
line = 'property,v,i,r,'
for title in header:
line += (title + ',')
return (line + '\n')
def extract_rvi_parameters(self, rvi):
(iostandard, slew, drive) = rvi.split('_')
if (iostandard[(- 2):] == '12'):
voltage = 1.2
elif (iostandard[(- 2):] == '15'):
voltage = 1.5
elif (iostandard[(- 2):] == '18'):
voltage = 1.8
elif (iostandard[(- 2):] == '25'):
voltage = 2.5
else:
voltage = 3.3
resistance = (voltage / (int(drive) * 0.001))
return ('%.1f,%s,%.3f' % (voltage, drive, resistance))
def get_csv_body(self):
lines = ''
keys = list(self.get_keys())
keys.sort()
for properties_key in keys:
line = (((properties_key + ',') + self.extract_rvi_parameters(properties_key)) + ',')
for title in self.csv_header:
if (title in self.properties_bits[properties_key]):
line += 'X,'
else:
line += ' ,'
line += '\n'
lines += line
return lines
def write_csv(self, filename):
filename = ((os.getcwd() + '/') + filename)
fp = open(filename, 'w')
fp.write(self.get_csv_header())
fp.write(self.get_csv_body())
fp.close()
print(('Written results to %s file.\n' % filename)) |
def test_template_matching_phase_raises_exception_if_incorrect_trace_size(sf, template_datas):
ths_building = scared.traces.formats.read_ths_from_ram(template_datas.building_samples, plaintext=template_datas.building_plaintext, key=np.array([template_datas.building_key for i in range(len(template_datas.building_samples))]))
building_cont = scared.Container(ths=ths_building)
template = scared.TemplateAttack(container_building=building_cont, reverse_selection_function=sf, model=scared.Value(), convergence_step=1)
template.build()
ths_matching = scared.traces.formats.read_ths_from_ram(template_datas.matching_samples, plaintext=template_datas.matching_plaintext, key=np.array([template_datas.matching_key for i in range(len(template_datas.matching_samples))]))
matching_cont = scared.Container(ths=ths_matching, frame=slice(0, 10))
with pytest.raises(scared.DistinguisherError):
template.run(matching_cont) |
def test_enctype_debug_helper(app, client):
from flask.debughelpers import DebugFilesKeyError
app.debug = True
('/fail', methods=['POST'])
def index():
return flask.request.files['foo'].filename
with client:
with pytest.raises(DebugFilesKeyError) as e:
client.post('/fail', data={'foo': 'index.txt'})
assert ('no file contents were transmitted' in str(e.value))
assert ("This was submitted: 'index.txt'" in str(e.value)) |
class TraceContextInterceptor(ClientInterceptor):
def intercept(self, method: Callable, request_or_iterator: Any, call_details: ClientCallDetails):
current_span = get_current_span_context()
if (current_span is not None):
new_details = call_details._replace(metadata=(*(call_details.metadata or []), ('x-datadog-trace-id', current_span.trace_id), ('x-datadog-parent-id', current_span.span_id), ('x-fal-invocation-id', current_span.invocation_id)))
return method(request_or_iterator, new_details)
return method(request_or_iterator, call_details) |
class Coords(Tidy3dBaseModel):
x: Coords1D = pd.Field(..., title='X Coordinates', description='1-dimensional array of x coordinates.')
y: Coords1D = pd.Field(..., title='Y Coordinates', description='1-dimensional array of y coordinates.')
z: Coords1D = pd.Field(..., title='Z Coordinates', description='1-dimensional array of z coordinates.')
def to_dict(self):
return {key: np.array(value) for (key, value) in self.dict(exclude={TYPE_TAG_STR}).items()}
def to_list(self):
return list(self.to_dict.values())
def spatial_interp(self, array: Union[(SpatialDataArray, ScalarFieldDataArray)], interp_method: InterpMethod, fill_value: Union[(Literal['extrapolate'], float)]='extrapolate') -> Union[(SpatialDataArray, ScalarFieldDataArray)]:
result_coords = dict(self.to_dict)
if any(((len(v) == 0) for v in result_coords.values())):
for c in array.coords:
if (c not in result_coords):
result_coords[c] = array.coords[c].values
result_shape = tuple((len(v) for v in result_coords.values()))
result = DataArray(np.empty(result_shape, dtype=array.dtype), coords=result_coords)
return result
interp_ax = []
isel_ax = []
for ax in 'xyz':
if (array.sizes[ax] == 1):
isel_ax.append(ax)
else:
interp_ax.append(ax)
if (len(isel_ax) > 0):
array = array.isel({ax: ([0] * len(self.to_dict[ax])) for ax in isel_ax})
array = array.assign_coords({ax: self.to_dict[ax] for ax in isel_ax})
if (len(interp_ax) == 0):
return array
is_sorted = all((np.all((np.diff(array.coords[f]) > 0)) for f in interp_ax))
interp_param = {'method': interp_method, 'assume_sorted': is_sorted, 'kwargs': {'bounds_error': False, 'fill_value': fill_value}}
if ((fill_value == 'extrapolate') and (interp_method != 'nearest')):
interp_param['kwargs']['fill_value'] = np.nan
interp_array = array.interp({ax: self.to_dict[ax] for ax in interp_ax}, **interp_param)
if ((fill_value == 'extrapolate') and (interp_method != 'nearest')):
interp_param['method'] = 'nearest'
interp_param['kwargs']['fill_value'] = 'extrapolate'
nearest_array = array.interp({ax: self.to_dict[ax] for ax in interp_ax}, **interp_param)
interp_array.values[:] = np.where(np.isnan(interp_array.values), nearest_array.values, interp_array.values)
return interp_array |
def test_get_authenticator_pkce():
cfg = PlatformConfig()
authn = get_authenticator(cfg, get_client_config())
assert authn
assert isinstance(authn, PKCEAuthenticator)
cfg = PlatformConfig(insecure_skip_verify=True)
authn = get_authenticator(cfg, get_client_config())
assert authn
assert isinstance(authn, PKCEAuthenticator)
assert (authn._verify is False)
cfg = PlatformConfig(ca_cert_file_path='/file')
authn = get_authenticator(cfg, get_client_config())
assert authn
assert isinstance(authn, PKCEAuthenticator)
assert (authn._verify == '/file') |
class OptionSeriesSolidgaugeSonificationContexttracksMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def s3_setup_args(s3_cfg: configuration.S3Config, anonymous: bool=False) -> Dict[(str, Any)]:
kwargs: Dict[(str, Any)] = {'cache_regions': True}
if s3_cfg.access_key_id:
kwargs[_FSSPEC_S3_KEY_ID] = s3_cfg.access_key_id
if s3_cfg.secret_access_key:
kwargs[_FSSPEC_S3_SECRET] = s3_cfg.secret_access_key
if (s3_cfg.endpoint is not None):
kwargs['client_kwargs'] = {'endpoint_url': s3_cfg.endpoint}
if anonymous:
kwargs[_ANON] = True
return kwargs |
('./contracts/v2/bulk_download/status.md > Bulk Download Status > GET')
def before_bulk_download_status_test(transaction):
body = {'filters': {'agency': 50, 'award_types': ['contracts', 'grants'], 'date_range': {'start_date': '2019-01-01', 'end_date': '2019-12-31'}, 'date_type': 'action_date'}, 'award_levels': ['prime_awards', 'sub_awards']}
response = _post_request_response(protocol=transaction.get('protocol'), host=transaction.get('host'), port=transaction.get('port'), path='/api/v2/bulk_download/awards/', body=body)
file_name = response['file_name']
transaction['fullPath'] = transaction['fullPath'].replace('012_PrimeTransactions_2020-01-13_H20M58S.zip', file_name) |
class _AEAYamlDumper(yaml.SafeDumper):
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
_AEAYamlDumper.add_representer(OrderedDict, self._dict_representer)
def _dict_representer(dumper: '_AEAYamlDumper', data: OrderedDict) -> MappingNode:
return dumper.represent_mapping(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, data.items()) |
class TelemetryMessageEmbedded(object):
swagger_types = {'extension': 'TelemetryExtension'}
attribute_map = {'extension': 'extension'}
def __init__(self, extension=None):
self._extension = None
self.discriminator = None
if (extension is not None):
self.extension = extension
def extension(self):
return self._extension
def extension(self, extension):
self._extension = extension
def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
if issubclass(TelemetryMessageEmbedded, dict):
for (key, value) in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if (not isinstance(other, TelemetryMessageEmbedded)):
return False
return (self.__dict__ == other.__dict__)
def __ne__(self, other):
return (not (self == other)) |
def mod_group_entry(dp, group, cmd):
ofp = dp.ofproto
parser = dp.ofproto_parser
group_type = str(group.get('type', 'ALL'))
t = UTIL.ofp_group_type_from_user(group_type)
group_type = (t if (t != group_type) else None)
if (group_type is None):
LOG.error('Unknown group type: %s', group.get('type'))
group_id = UTIL.ofp_group_from_user(group.get('group_id', 0))
command_bucket_id = str_to_int(group.get('command_bucket_id', 0))
properties = []
buckets = []
for bucket in group.get('buckets', []):
bucket_id = str_to_int(bucket.get('bucket_id', 0))
bucket_actions = []
for dic in bucket.get('actions', []):
action = to_action(dp, dic)
if (action is not None):
bucket_actions.append(action)
bucket_properties = []
for p in bucket.get('properties', []):
group_bp_type = str(p.get('type', 'WEIGHT'))
t = UTIL.ofp_group_bucket_prop_type_from_user(group_bp_type)
group_bp_type = (t if (t != group_bp_type) else ofp.OFPGBPT_WEIGHT)
if (group_bp_type == ofp.OFPGBPT_WEIGHT):
weight = str_to_int(p.get('weight', 0))
bucket_properties.append(parser.OFPGroupBucketPropWeight(type_=group_bp_type, weight=weight))
elif (group_bp_type == ofp.OFPGBPT_WATCH_PORT):
watch_port = str_to_int(p.get('watch', dp.ofproto.OFPP_ANY))
bucket_properties.append(parser.OFPGroupBucketPropWatch(type_=group_bp_type, watch=watch_port))
elif (group_bp_type == ofp.OFPGBPT_WATCH_GROUP):
watch_group = str_to_int(p.get('watch', dp.ofproto.OFPG_ANY))
bucket_properties.append(parser.OFPGroupBucketPropWatch(type_=group_bp_type, watch=watch_group))
elif (group_bp_type == ofp.OFPGBPT_EXPERIMENTER):
experimenter = p.get('experimenter', 0)
exp_type = p.get('exp_type', 0)
data_type = p.get('data_type', 'ascii')
if (data_type not in ['ascii', 'base64']):
LOG.error('Unknown data type: %s', data_type)
data = p.get('data', '')
if (data_type == 'base64'):
data = base64.b64decode(data)
bucket_properties.append(parser.OFPGroupBucketPropExperimenter(type_=group_bp_type, experimenter=experimenter, exp_type=exp_type, data=data))
else:
LOG.error('Unknown group bucket prop type: %s', p['type'])
bucket = parser.OFPBucket(bucket_id=bucket_id, actions=bucket_actions, properties=bucket_properties)
buckets.append(bucket)
group_mod = parser.OFPGroupMod(dp, cmd, group_type, group_id, command_bucket_id, buckets, properties)
ofctl_utils.send_msg(dp, group_mod, LOG) |
class Test_SendMail():
.dict('bodhi.server.mail.config', {'smtp_server': 'smtp.fp.o'})
('bodhi.server.mail.log.warning')
('bodhi.server.mail.smtplib.SMTP')
def test_recipients_refused(self, SMTP, warning):
smtp = SMTP.return_value
smtp.sendmail.side_effect = smtplib.SMTPRecipientsRefused('nooope!')
mail._send_mail('', '', 'hi')
SMTP.assert_called_once_with('smtp.fp.o')
smtp.sendmail.assert_called_once_with('', [''], b'hi')
warning.assert_called_once_with('"recipient refused" for \'\', {}'.format(repr(smtp.sendmail.side_effect)))
smtp.quit.assert_called_once_with()
.dict('bodhi.server.mail.config', {'smtp_server': ''})
('bodhi.server.mail.log.info')
('bodhi.server.mail.smtplib.SMTP')
def test_smtp_server_not_configured(self, SMTP, info):
mail._send_mail('', '', 'hi')
assert (SMTP.call_count == 0)
info.assert_called_once_with('Not sending email: No smtp_server defined') |
class IPv6Dest(Destination, NonVrfPathProcessingMixin):
ROUTE_FAMILY = RF_IPv6_UC
def _best_path_lost(self):
old_best_path = self._best_path
NonVrfPathProcessingMixin._best_path_lost(self)
self._core_service._signal_bus.best_path_changed(old_best_path, True)
def _new_best_path(self, best_path):
NonVrfPathProcessingMixin._new_best_path(self, best_path)
self._core_service._signal_bus.best_path_changed(best_path, False) |
class ResourceNotFoundError(FandoghAPIError):
message = 'Resource Not found'
def __init__(self, response, message=None):
self.response = response
if message:
self.message = message
if hasattr(self.response, 'json'):
self.message = self.response.json().get('message', self.message) |
class Sidebar():
def __init__(self):
self.ADDON_NOTES_TAB: int = 1
self.PDF_IMPORT_TAB: int = 2
self.SPECIAL_SEARCHES_TAB: int = 3
self.tab: int = self.ADDON_NOTES_TAB
self._editor: Editor = None
def set_editor(self, editor: Editor):
self._editor = editor
def _html(self) -> HTML:
tab_displayed_name = self._tab_displayed_name()
if (self.tab == self.ADDON_NOTES_TAB):
(tmap, tcounts) = get_all_tags_with_counts()
def iterateMap(tmap, prefix, start=False):
if start:
html = "<ul class='deck-sub-list outer'>"
else:
html = "<ul class='deck-sub-list'>"
for (key, value) in tmap.items():
full = (((prefix + '::') + key) if prefix else key)
html += ('<li class=\'deck-list-item\' onclick="event.stopPropagation(); searchUserNoteTag(event, \'%s\');"><b class=\'exp\' data-t=\'%s\'>%s</b> %s <span class=\'siac-tag-cnt\'>%s</span><i class=\'siac-tl-plus fa fa-plus mr-5 ml-5\' onclick=\'event.stopPropagation(); pycmd("siac-create-note-tag-prefill %s") \'></i>%s</li>' % (full, full.replace("'", ''), ('[+]' if value else ''), utility.text.trim_if_longer_than(key, 35), tcounts.get(full.lower(), '?'), full, iterateMap(value, full, False)))
html += '</ul>'
return html
tag_html = iterateMap(tmap, '', True)
tag_len = (len(tmap) if (tmap is not None) else 0)
scheduled_for_today = get_notes_scheduled_for_today()
if ((scheduled_for_today is not None) and (len(scheduled_for_today) > 0)):
sched_today_menu_item = f"""<div class='siac-notes-sidebar-item' onclick='pycmd("siac-r-show-due-today")'> Due today ({len(scheduled_for_today)})</div>"""
else:
sched_today_menu_item = ''
tab_html = filled_template('sidebar_main/sidebar_addon_tab', dict(sched_today_menu_item=sched_today_menu_item, tag_len=tag_len, tag_html=tag_html))
elif (self.tab == self.PDF_IMPORT_TAB):
folders_to_search = conf_or_def('pdf.import.folders_to_search', [])
exp = ''
if (len(folders_to_search) == 0):
folders = '\n <div style=\'padding: 15px; box-sizing: border-box; word-break: break-word;\' class=\'siac-sidebar-bg h-100\'>\n <center>\n <strong>\n To browse local folders for pdfs, add some entries to the config option\n "pdf.import.folders_to_search", e.g. <br><br>\n "pdf.import.folders_to_search" : ["Some/Path/Documents/Uni", "Some/Path/Documents/Unsorted"] <br><br>\n \n The given folders (including their subfolders!) will be scanned for *.pdf files.\n Don\'t use too large folders here, because they are searched everytime the tab is opened so you might see a delay then. \n </strong>\n </center>\n </div>\n '
else:
cleaned = []
for f in folders_to_search:
if (len(f.strip()) == 0):
continue
cleaned.append(f.replace('\\', '/'))
if (len(cleaned) == 0):
folders = "\n <div style='padding: 15px;' class='siac-sidebar-bg'>\n <center style='margin-top: 100px;'>\n <strong>\n Could not find any pdf files in the specified folders.\n </strong>\n </center>\n </div>\n "
else:
files = []
for f in cleaned:
files += utility.misc.find_pdf_files_in_dir_recursive(f, cut_path=False)
map = utility.tags.to_tag_hierarchy(files, sep='/')
map = utility.tags.flatten_map(map, '/')
def iterateMap(tmap, prefix, start=False):
if start:
html = "<ul class='deck-sub-list outer'>"
else:
html = "<ul class='deck-sub-list'>"
for (key, value) in tmap.items():
full = (((prefix + '/') + key) if prefix else key)
if (isMac or (isLin and (not full.startswith('/')))):
full = f'/{full}'
click = (f'pycmd("siac-create-note-source-prefill {full}")' if full.endswith('.pdf') else '')
exp = ('[+]' if value else '')
should_bold = ("style='font-weight: bold;'" if value else '')
html = f"{html}<li class='deck-list-item' onclick='event.stopPropagation(); {click}' {should_bold}><b class='exp'>{exp}</b> {utility.text.trim_if_longer_than(key, 35)}{iterateMap(value, full, False)}</li>"
html += '</ul>'
return html
folders = iterateMap(map, '', True)
folders = f'''<div style='margin-top: 15px;'>
{folders}
</div>'''
exp = f'''
<div class='' style='flex: 1 0 auto;'>
<div class='w-100' style='margin-top: 20px;'><b>PDFs in Folders</b>
<b class='siac-tags-exp-icon' style='margin-right: 15px; padding: 0 2px 0 2px;' onclick='noteSidebarCollapseAll();'>▲</b>
<b class='siac-tags-exp-icon mr-5' style='padding: 0 2px 0 2px;' onclick='noteSidebarExpandAll();'>▼</b>
</div>
</div>
'''
tab_html = f'''
{exp}
<div class='' style='flex: 1 0 auto; overflow-y: auto;'>
{folders}
</div>
'''
elif (self.tab == self.SPECIAL_SEARCHES_TAB):
anki_tags = mw.col.tags.all()
tmap = utility.tags.to_tag_hierarchy(anki_tags)
def iterateMap(tmap, prefix, start=False):
if start:
html = "<ul class='deck-sub-list outer'>"
else:
html = "<ul class='deck-sub-list'>"
for (key, value) in tmap.items():
full = (((prefix + '::') + key) if prefix else key)
html += ('<li class=\'deck-list-item\' onclick="event.stopPropagation(); pycmd(\'siac-r-search-tag %s\');"><b class=\'exp\' data-t=\'%s\'>%s</b> %s %s</li>' % (full, full.replace("'", ''), ('[+]' if value else ''), utility.text.trim_if_longer_than(key, 35), iterateMap(value, full, False)))
html += '</ul>'
return html
tag_html = iterateMap(tmap, '', True)
tab_html = filled_template('sidebar_main/sidebar_anki_tab', {'tags': tag_html})
return filled_template('sidebar_main/sidebar', dict(tab_html=tab_html, tab_displayed_name=tab_displayed_name))
def display(self):
html = self._html()
self._editor.web.eval(('var sbFn = () => {\n if (!document.getElementById(\'resultsWrapper\')) {\n setTimeout(sbFn, 50);\n return;\n }\n if (document.getElementById(\'siac-notes-sidebar\')) {\n $(\'#siac-notes-sidebar\').remove();\n }\n document.getElementById(\'resultsWrapper\').insertAdjacentHTML("afterbegin", `%s`); \n if (typeof(window._siacSidebar) === \'undefined\') {\n window._siacSidebar = {\n addonTagsExpanded : [],\n ankiTagsExpanded : [],\n tab: \'\',\n };\n }\n window._siacSidebar.tab = %s;\n \n $(\'#siac-notes-sidebar .exp\').click(function(e) {\n e.preventDefault();\n e.stopPropagation();\n let icn = $(this);\n if (icn.text()) {\n if (icn.text() === \'[+]\') {\n icn.text(\'[-]\');\n if (_siacSidebar.tab === 1 || _siacSidebar.tab === 3) {\n let exp_list = window._siacSidebar.tab === 1 ? _siacSidebar.addonTagsExpanded : _siacSidebar.ankiTagsExpanded;\n if (exp_list.indexOf(this.dataset.t) === -1) {\n exp_list.push(this.dataset.t);\n }\n }\n } else {\n icn.text(\'[+]\');\n if (_siacSidebar.tab === 1 || _siacSidebar.tab === 3) {\n let exp_list = window._siacSidebar.tab === 1 ? _siacSidebar.addonTagsExpanded : _siacSidebar.ankiTagsExpanded;\n if (exp_list.indexOf(this.dataset.t) !== -1) {\n exp_list.splice(exp_list.indexOf(this.dataset.t), 1);\n }\n }\n }\n }\n $(this).parent().children(\'ul\').toggle();\n });\n let exp = [];\n let scrollTop = 0;\n if (window._siacSidebar.tab === 1) {\n exp = window._siacSidebar.addonTagsExpanded;\n scrollTop = window._siacSidebar.addonTagsScrollTop;\n } else if (window._siacSidebar.tab === 3) {\n exp = window._siacSidebar.ankiTagsExpanded;\n scrollTop = window._siacSidebar.ankiTagsScrollTop;\n }\n for (var t of exp) {\n $(\'#siac-notes-sidebar .exp[data-t="\'+t+\'"]\').trigger(\'click\');\n }\n if (scrollTop && scrollTop > 0) {\n $(\'.tag_scroll\').first().get(0).scrollTop = scrollTop;\n }\n };\n sbFn();\n ' % (html, self.tab)))
def hide(self):
self._editor.web.eval("$('#siac-notes-sidebar').remove(); $('#resultsWrapper').css('padding-left', 0);")
def refresh_tab(self, tab: int):
if conf_or_def('notes.sidebar.visible', False):
if (self.tab == tab):
self.refresh()
def refresh(self):
if (self._editor is None):
return
self.display()
def show_tab(self, tab: int):
self.tab = tab
self.refresh()
def _tab_displayed_name(self) -> str:
if (self.tab == self.PDF_IMPORT_TAB):
return 'PDF Import'
elif (self.tab == self.ADDON_NOTES_TAB):
return 'Add-on Notes'
elif (self.tab == self.SPECIAL_SEARCHES_TAB):
return 'Anki Notes' |
def tuplify_forward(model, X, is_train):
Ys = []
backprops = []
for layer in model.layers:
(Y, backprop) = layer(X, is_train)
Ys.append(Y)
backprops.append(backprop)
def backprop_tuplify(dYs):
dXs = [bp(dY) for (bp, dY) in zip(backprops, dYs)]
dX = dXs[0]
for dx in dXs[1:]:
dX += dx
return dX
return (tuple(Ys), backprop_tuplify) |
def verify_deposit_data_json(filefolder: str, credentials: Sequence[Credential]) -> bool:
with open(filefolder, 'r') as f:
deposit_json = json.load(f)
with click.progressbar(deposit_json, label=load_text(['msg_deposit_verification']), show_percent=False, show_pos=True) as deposits:
return all([validate_deposit(deposit, credential) for (deposit, credential) in zip(deposits, credentials)])
return False |
class PortModFlood(base_tests.SimpleDataPlane):
def runTest(self):
logging.info('Running PortModFlood Test')
of_ports = config['port_map'].keys()
of_ports.sort()
logging.info('Sends Features Request and retrieve Port Configuration from reply')
(hw_addr, port_config, advert) = port_config_get(self.controller, of_ports[0])
self.assertTrue((port_config is not None), 'Did not get port config')
logging.debug(((('No flood bit port ' + str(of_ports[0])) + ' is now ') + str((port_config & ofp.OFPPC_NO_FLOOD))))
logging.info('Modify Port Configuration using Port Modification Message:OFPT_PORT_MOD')
rv = port_config_set(self.controller, of_ports[0], (port_config ^ ofp.OFPPC_NO_FLOOD), ofp.OFPPC_NO_FLOOD)
self.assertTrue((rv != (- 1)), 'Error sending port mod')
do_barrier(self.controller)
logging.info('Verify the change and then set it back')
(hw_addr, port_config2, advert) = port_config_get(self.controller, of_ports[0])
logging.debug(((('No flood bit port ' + str(of_ports[0])) + ' is now ') + str((port_config2 & ofp.OFPPC_NO_FLOOD))))
self.assertTrue((port_config2 is not None), 'Did not get port config2')
self.assertTrue(((port_config2 & ofp.OFPPC_NO_FLOOD) != (port_config & ofp.OFPPC_NO_FLOOD)), 'Bit change did not take')
rv = port_config_set(self.controller, of_ports[0], port_config, ofp.OFPPC_NO_FLOOD)
self.assertTrue((rv != (- 1)), 'Error sending port mod')
do_barrier(self.controller) |
def extractHaraftranslationWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def extractKobatoChanDaiSukiScan(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('Lookism' in item['tags']):
return None
if ('webtoon' in item['tags']):
return None
if ('*Announcements*' in item['tags']):
return None
if ('*STAFF ONLY*' in item['tags']):
return None
tagmap = [("Can't Stop Craving Potions Again and Again", "Can't Stop Craving Potions Again and Again", 'translated'), ("Can't Stop Craving Potions", "Can't Stop Craving Potions", 'translated'), ('Royal Roader on My Own', 'Royal Roader on My Own', 'translated'), ('A Bird That Drinks Tears', 'A Bird That Drinks Tears', 'translated'), ('All Things Wrong', 'Doing All Things Wrong And Somehow Becoming The Best In The Game', 'translated'), ('Cheat Skill: Sleep Learning', 'Cheat Skill: Sleep Learning', 'translated'), ('Coder Lee YongHo', 'Coder Lee YongHo', 'translated'), ('FFF-Class Trashero', 'FFF-Class Trashero', 'translated'), ('Dragon Poor', 'Dragon Poor', 'translated'), ('Everyone Else is a Returnee', 'Everyone Else is a Returnee', 'translated'), ('God of Cooking', 'God of Cooking', 'translated'), ('God of Crime', 'God of Crime', 'translated'), ('God of Music', 'God of Music', 'translated'), ('God of Thunder', 'God of Thunder', 'translated'), ('God-level Bodyguard in the City', 'God-level Bodyguard in the City', 'translated'), ('Green Skin', 'Green Skin', 'translated'), ('I am the monarch', 'I am the Monarch', 'translated'), ('Kenkyo kenjitsu o motto ni ikite orimasu!', 'Kenkyo, Kenjitsu o Motto ni Ikite Orimasu!', 'translated'), ('Life of the Damned', 'Life of the Damned', 'translated'), ('Forest of Funerals', 'Forest of Funerals', 'translated'), ('Link the Orc', 'Link the Orc', 'translated'), ('maou no hajimekata', 'Maou no Hajimekata', 'translated'), ('Miracle Drawing!', 'Miracle Drawing!', 'translated'), ('Omni Genius', 'Omni Genius', 'translated'), ('Omocha no Kyousou-sama', 'Omocha no Kyousou-sama', 'translated'), ('One Man Army', 'One Man Army', 'translated'), ('Reincarnator', 'Reincarnator', 'translated'), ('Rise Strongest Warrior', 'Rise Strongest Warrior', 'translated'), ('Solo Clear', 'Solo Clear', 'translated'), ('Survival World RPG', 'Survival World RPG', 'translated'), ('Ten Thousand Heaven Controlling Sword', 'Ten Thousand Heaven Controlling Sword', 'translated'), ('The Bird That Drinks Tears', 'The Bird That Drinks Tears', 'translated'), ('The Sorcerer Laughs in the Mirror', 'The Sorcerer Laughs in the Mirror', 'translated'), ('The Stone of Days', 'The Stone of Days', 'translated'), ('The Strongest System', 'The Strongest System', 'translated'), ('Wagahai no Kare wa Baka de aru', 'Wagahai no Kare wa Baka de aru', 'translated'), ('When The Star Flutters', 'When The Star Flutters', 'translated'), ('Magician of Insa-Dong', 'Magician of Insa-Dong', 'translated'), ('Hero', 'Hero', 'oel'), ('Immortal Ascension Tower', 'Immortal Ascension Tower', 'oel'), ("The Overlord's Elite is now a Human?!", "The Overlord's Elite is now a Human?!", 'oel'), ("Titan's Throne", "Titan's Throne", 'oel'), ('Conquest', 'Conquest', 'oel'), ('The Empyrean Nethervoid', 'The Empyrean Nethervoid', 'oel')]
for (tag, sname, tl_type) in tagmap:
if (tag in item['tags']):
return buildReleaseMessageWithType(item, sname, vol, chp, frag=frag, tl_type=tl_type)
titlemap = [('fujimaru wrote a new post, FFF-Class Trashero - Chapter', 'FFF-Class Trashero', 'translated'), ('kobatochandaisuki wrote a new post, I Am the Monarch - Chapter', 'I Am the Monarch', 'translated'), ('Engebu wrote a new post, I Am the Monarch - Chapter', 'I Am the Monarch', 'translated'), ('Calvis wrote a new post, Dragon Poor - Chapter', 'Dragon Poor', 'translated'), ('Calvis wrote a new post, Green Skin - Chapter', 'Green Skin', 'translated'), ('Calvis wrote a new post, Rise, Strongest Warrior - Chapter', 'Rise, Strongest Warrior', 'translated'), ('Calvis wrote a new post, The Stone of Days - ', 'The Stone of Days', 'translated'), ('Calvis wrote a new post, The Stone of Days - Chapter', 'The Stone of Days', 'translated'), ('csvtranslator wrote a new post, I Am the Monarch - Chapter', 'I Am the Monarch', 'translated'), ('Koukouseidesu wrote a new post, Everyone Else is a Returnee - Chapter', 'Everyone Else is a Returnee', 'translated'), ('kuhaku wrote a new post, Solo Clear - Chapter ', 'Solo Clear', 'translated'), ('miraclerifle wrote a new post, God of Cooking - Chapter', 'God of Cooking', 'translated'), ('miraclerifle wrote a new post, Royal Roader on My Own - Chapter', 'Royal Roader on My Own', 'translated'), ('pyrenose wrote a new post, Rise, Strongest Warrior - Chapter', 'Rise, Strongest Warrior', 'translated'), ('Saquacon wrote a new post, All Things Wrong - Chapter', 'Doing All Things Wrong And Somehow Becoming The Best In The Game', 'translated')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_span_finder_model():
nlp = Language()
docs = [nlp('This is an example.'), nlp('This is the second example.')]
docs[0].spans[TRAINING_KEY] = [docs[0][3:4]]
docs[1].spans[TRAINING_KEY] = [docs[1][3:5]]
total_tokens = 0
for doc in docs:
total_tokens += len(doc)
config = Config().from_str(span_finder_default_config).interpolate()
model = registry.resolve(config)['model']
model.initialize(X=docs)
predictions = model.predict(docs)
assert (len(predictions) == total_tokens)
assert (len(predictions[0]) == 2) |
class OptionPlotoptionsColumnpyramidTooltip(Options):
def clusterFormat(self):
return self._config_get('Clustered points: {point.clusterPointsAmount}')
def clusterFormat(self, text: str):
self._config(text, js_type=False)
def dateTimeLabelFormats(self) -> 'OptionPlotoptionsColumnpyramidTooltipDatetimelabelformats':
return self._config_sub_data('dateTimeLabelFormats', OptionPlotoptionsColumnpyramidTooltipDatetimelabelformats)
def distance(self):
return self._config_get(6)
def distance(self, num: float):
self._config(num, js_type=False)
def followPointer(self):
return self._config_get(False)
def followPointer(self, flag: bool):
self._config(flag, js_type=False)
def followTouchMove(self):
return self._config_get(True)
def followTouchMove(self, flag: bool):
self._config(flag, js_type=False)
def footerFormat(self):
return self._config_get('')
def footerFormat(self, text: str):
self._config(text, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def headerFormat(self):
return self._config_get(None)
def headerFormat(self, text: str):
self._config(text, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, text: str):
self._config(text, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def pointFormat(self):
return self._config_get(None)
def pointFormat(self, text: str):
self._config(text, js_type=False)
def pointFormatter(self):
return self._config_get(None)
def pointFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False)
def xDateFormat(self):
return self._config_get(None)
def xDateFormat(self, text: str):
self._config(text, js_type=False) |
def kill_running_rally_instances():
def rally_process(p):
return ((p.name() == 'esrally') or (p.name() == 'rally') or (p.name().lower().startswith('python') and any((('esrally' in e) for e in p.cmdline())) and (not any((('esrallyd' in e) for e in p.cmdline())))))
kill_all(rally_process) |
class ActivityFailureInfo(betterproto.Message):
scheduled_event_id: int = betterproto.int64_field(1)
started_event_id: int = betterproto.int64_field(2)
identity: str = betterproto.string_field(3)
activity_type: v1common.ActivityType = betterproto.message_field(4)
activity_id: str = betterproto.string_field(5)
retry_state: v1enums.RetryState = betterproto.enum_field(6) |
class OptionSeriesScatterSonificationDefaultinstrumentoptionsMapping(Options):
def frequency(self) -> 'OptionSeriesScatterSonificationDefaultinstrumentoptionsMappingFrequency':
return self._config_sub_data('frequency', OptionSeriesScatterSonificationDefaultinstrumentoptionsMappingFrequency)
def gapBetweenNotes(self) -> 'OptionSeriesScatterSonificationDefaultinstrumentoptionsMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionSeriesScatterSonificationDefaultinstrumentoptionsMappingGapbetweennotes)
def highpass(self) -> 'OptionSeriesScatterSonificationDefaultinstrumentoptionsMappingHighpass':
return self._config_sub_data('highpass', OptionSeriesScatterSonificationDefaultinstrumentoptionsMappingHighpass)
def lowpass(self) -> 'OptionSeriesScatterSonificationDefaultinstrumentoptionsMappingLowpass':
return self._config_sub_data('lowpass', OptionSeriesScatterSonificationDefaultinstrumentoptionsMappingLowpass)
def noteDuration(self) -> 'OptionSeriesScatterSonificationDefaultinstrumentoptionsMappingNoteduration':
return self._config_sub_data('noteDuration', OptionSeriesScatterSonificationDefaultinstrumentoptionsMappingNoteduration)
def pan(self) -> 'OptionSeriesScatterSonificationDefaultinstrumentoptionsMappingPan':
return self._config_sub_data('pan', OptionSeriesScatterSonificationDefaultinstrumentoptionsMappingPan)
def pitch(self) -> 'OptionSeriesScatterSonificationDefaultinstrumentoptionsMappingPitch':
return self._config_sub_data('pitch', OptionSeriesScatterSonificationDefaultinstrumentoptionsMappingPitch)
def playDelay(self) -> 'OptionSeriesScatterSonificationDefaultinstrumentoptionsMappingPlaydelay':
return self._config_sub_data('playDelay', OptionSeriesScatterSonificationDefaultinstrumentoptionsMappingPlaydelay)
def time(self) -> 'OptionSeriesScatterSonificationDefaultinstrumentoptionsMappingTime':
return self._config_sub_data('time', OptionSeriesScatterSonificationDefaultinstrumentoptionsMappingTime)
def tremolo(self) -> 'OptionSeriesScatterSonificationDefaultinstrumentoptionsMappingTremolo':
return self._config_sub_data('tremolo', OptionSeriesScatterSonificationDefaultinstrumentoptionsMappingTremolo)
def volume(self) -> 'OptionSeriesScatterSonificationDefaultinstrumentoptionsMappingVolume':
return self._config_sub_data('volume', OptionSeriesScatterSonificationDefaultinstrumentoptionsMappingVolume) |
def test_in_thread4():
res = []
class MyComp1(event.Component):
foo = event.IntProp(0, settable=True)
('foo')
def on_foo(self, *events):
for ev in events:
res.append(ev.new_value)
def main():
loop = asyncio.new_event_loop()
event.loop.integrate(loop, reset=False)
component.set_foo(3)
loop.call_later(0.4, loop.stop)
loop.run_forever()
event.loop.reset()
component = MyComp1()
t = threading.Thread(target=main)
t.start()
time.sleep(0.2)
component.set_foo(4)
t.join()
event.loop.integrate(reset=True)
assert (res == [0, 3, 4]) |
class TaxRateDetails(QuickbooksBaseObject):
qbo_object_name = 'TaxRateDetails'
def __init__(self):
super(TaxRateDetails, self).__init__()
self.TaxRateName = None
self.TaxRateId = None
self.RateValue = None
self.TaxAgencyId = None
self.TaxApplicableOn = 'Sales'
def __str__(self):
return self.TaxRateName |
def award_with_toptier_agency(id, toa=0, outlay=0):
agency = baker.make('references.Agency', toptier_agency_id=id, toptier_flag=True, _fill_optional=True)
a1 = baker.make('search.AwardSearch', award_id=id, type='A', funding_agency_id=agency.id, funding_toptier_agency_code=f'00{agency.id}', total_loan_value=0, latest_transaction_id=id, disaster_emergency_fund_codes=['M'], total_covid_obligation=toa, total_covid_outlay=outlay, covid_spending_by_defc=[{'defc': 'M', 'outlay': outlay, 'obligaton': toa}], action_date='2020-10-01')
baker.make('search.TransactionSearch', transaction_id=id, award=a1, action_date='2020-04-01', is_fpds=True, funding_agency_id=agency.id)
return a1 |
def guess_new_para_interactive(wordmap):
print('What is:', wordmap)
if (not wordmap['lemma']):
print('No guessing paras without lemmas yet')
return 'X_IGNORE'
s = wordmap['lemma']
para = None
if s.endswith('kko'):
para = choose_from(['NOUN_UKKO', 'NOUN_LEPAKKO', 'PROPN_UKKO', 'PROPN_LEPAKKO', 'ADJ_KOLKKO', 'ADJ_HUPAKKO'])
elif s.endswith('nko'):
para = choose_from(['NOUN_RUNKO', 'PROPN_RUNKO', 'ADJ_LENKO'])
elif s.endswith('oko'):
para = choose_from(['NOUN_RUOKO', 'PROPN_RUOKO', 'NOUN_KOKO', 'PROPN_KOKO'])
elif s.endswith('tto'):
para = choose_from(['NOUN_HIRTTO', 'PROPN_HIRTTO'])
if para:
return para
if s.endswith('ko'):
para = choose_from(['NOUN_PELKO', 'NOUN_VIHKO', 'NOUN_TEKO', 'PROPN_PELKO', 'PROPN_VIHKO', 'PROPN_TEKO', 'ADJ_LAKO'])
if para:
return para
if s.endswith('o'):
para = choose_from(['NOUN_TALO', 'PROPN_TALO', 'ADJ_TUMMAHKO'])
if para:
return para
if s.endswith('u'):
para = choose_from(['NOUN_ASU', 'NOUN_SEIKKAILU', 'NOUN_KUNGFU', 'PROPN_ASU', 'PROPN_SEIKKAILU', 'PROPN_KUNGFU', 'ADJ_VALKAISTU'])
if para:
return para
if s.endswith('y'):
para = choose_from(['NOUN_KARRY', 'NOUN_VEHKEILY', 'NOUN_GAY', 'PROPN_KARRY', 'PROPN_SPOTIFY', 'PROPN_GAY', 'PROPN_JOCKEY', 'ADJ_HAPAISTY'])
if para:
return para
if s.endswith('o'):
para = choose_from(['NOUN_MOMMO', 'NOUN_HAIRIO', 'NOUN_JAATELO', 'PROPN_MOMMO', 'PROPN_HAIRIO', 'PROPN_JAATELO'])
if para:
return para
if s.endswith('aa'):
para = choose_from(['NOUN_MAA', 'NOUN_VAINAA', 'NOUN_NUGAA', 'PROPN_MAA', 'PROPN_VAINAA', 'PROPN_VAINAA'])
if para:
return para
if s.endswith('a'):
para = choose_from(['NOUN_ASEMA', 'NOUN_KIRJA', 'NOUN_KITARA', 'NOUN_MAKKARA', 'NOUN_PROBLEEMA', 'NOUN_VOIMA', 'PROPN_ASEMA', 'PROPN_KIRJA', 'PROPN_MINERVA', 'PROPN_KITARA', 'PROPN_BOTSWANA', 'PROPN_LAHELMA', 'PROPN_MAKKARA', 'PROPN_PROBLEEMAPROPN_VOIMA', 'PROPN_WADA', 'PROPN_FIFA'])
if para:
return para
if s.endswith('i'):
if is_back(s):
para = choose_from(['NOUN_RUUVI', 'NOUN_KANAALI', 'NOUN_ONNI', 'NOUN_PROTOLYYSI', 'NOUN_PYRAMIDI', 'NOUN_ORINOUN_RUUHI', 'NOUN_TULI', 'NOUN_SAVI', 'NOUN_SANKARI', 'NOUN_AAMUKAKSI', 'PROPN_RUUVI', 'PROPN_KANAALI', 'PROPN_ONNI', 'PROPN_HKI', 'PROPN_RUUHI', 'PROPN_TULI', 'PROPN_SAVI'])
if para:
return para
if s.endswith('ee'):
if is_back(s):
para = choose_from(['NOUN_MATEE', 'NOUN_PATEE', 'NOUN_TOKEE', 'PROPN_PATEE', 'PROPN_TOKEE'])
else:
para = choose_from(['NOUN_TEE', 'PROPN_TEE', 'PROPN_LENTTEE'])
if para:
return para
if s.endswith('e'):
if is_back(s):
para = choose_from(['NOUN_NALLE', 'NOUN_ASTE', 'NOUN_ZOMBIE', 'NOUN_BRASSERIE', 'NOUN_REGGAE', 'ADJ_AHNEADJ_OIKEE', 'ADJ_TOOPE', 'PROPN_ASTE', 'PROPN_NALLE', 'PROPN_EUGENE', 'PROPN_ZOMBIE', 'PROPN_BRASSERIE', 'PROPN_FONDUE'])
else:
para = choose_from(['NOUN_NISSE', 'NOUN_PISTE', 'PROPN_BERNIE', 'PROPN_BRIE', 'PROPN_NISSE', 'PROPN_PISTE', 'PROPN_SELENE', 'PROPN_BRIE'])
if para:
return para
consonants = 'bcdfghjklmnpqrstvwxz'
for c in consonants:
if s.endswith(c):
if is_back(s):
para = choose_from(['NOUN_PUNK', 'NOUN_STADION', 'PROPN_PUNK', 'PROPN_STADION'])
else:
para = choose_from(['NOUN_ZEN', 'NOUN_BESSERWISSER', 'PROPN_ZEN', 'PROPN_BESSERWISSER'])
if para:
return para
else:
print('conclusion: not-a-word (new para??)')
para = choose_from(['X_IGNORE'])
if para:
return 'X_IGNORE'
else:
return None |
class MacToPortTable(object):
def __init__(self):
super(MacToPortTable, self).__init__()
self.mac_to_port = {}
def dpid_add(self, dpid):
LOG.debug('dpid_add: 0x%016x', dpid)
self.mac_to_port.setdefault(dpid, {})
def port_add(self, dpid, port, mac):
old_port = self.mac_to_port[dpid].get(mac, None)
self.mac_to_port[dpid][mac] = port
if ((old_port is not None) and (old_port != port)):
LOG.debug('port_add: 0x%016x 0x%04x %s', dpid, port, haddr_to_str(mac))
return old_port
def port_get(self, dpid, mac):
return self.mac_to_port[dpid].get(mac)
def mac_list(self, dpid, port):
return [mac for (mac, port_) in self.mac_to_port.get(dpid, {}).items() if (port_ == port)]
def mac_del(self, dpid, mac):
del self.mac_to_port[dpid][mac] |
def test_records_next_observations():
env = build_dummy_structured_env()
rollout_generator = RolloutGenerator(env=env, record_next_observations=True)
policy = RandomPolicy(env.action_spaces_dict)
trajectory = rollout_generator.rollout(policy, n_steps=10)
assert (len(trajectory) == 10)
sub_step_keys = env.action_spaces_dict.keys()
last_next_obs = None
for record in trajectory.step_records:
assert (sub_step_keys == record.observations_dict.keys())
assert (sub_step_keys == record.next_observations_dict.keys())
assert (record.batch_shape is None)
for step_key in sub_step_keys:
curr_obs = record.observations_dict[step_key]
if last_next_obs:
assert (list(curr_obs.keys()) == list(last_next_obs.keys()))
for obs_key in curr_obs.keys():
assert np.all((curr_obs[obs_key] == last_next_obs[obs_key]))
last_next_obs = record.next_observations_dict[step_key] |
(cls=FlaskBBGroup, create_app=make_app, add_version_option=False, invoke_without_command=True)
('--config', expose_value=False, callback=set_config, required=False, is_flag=False, is_eager=True, metavar='CONFIG', help="Specify the config to use either in dotted module notation e.g. 'flaskbb.configs.default.DefaultConfig' or by using a path like '/path/to/flaskbb.cfg'")
('--instance', expose_value=False, callback=set_instance, required=False, is_flag=False, is_eager=True, metavar='PATH', help="Specify the instance path to use. By default the folder 'instance' next to the package or module is assumed to be the instance path.")
('--version', expose_value=False, callback=get_version, is_flag=True, is_eager=True, help='Show the FlaskBB version.')
_context
def flaskbb(ctx):
if (ctx.invoked_subcommand is None):
click.echo(ctx.get_help()) |
def check_return(result_array, checks, only_docs=False):
comm_lines = []
found_docs = False
idx = 0
for (i, hover_line) in enumerate(result_array['contents']['value'].splitlines()):
if (hover_line == '-----'):
found_docs = True
if (found_docs and only_docs):
comm_lines.append((idx, hover_line))
idx += 1
elif (not only_docs):
comm_lines.append((i, hover_line))
assert (len(comm_lines) == len(checks))
for i in range(len(checks)):
assert (comm_lines[i][0] == checks[i][0])
assert (comm_lines[i][1] == checks[i][1]) |
def _get_benchmark_names(benchmarksdir):
manifest = os.path.join(benchmarksdir, 'MANIFEST')
if os.path.isfile(manifest):
with open(manifest) as infile:
for line in infile:
if (line.strip() == '[benchmarks]'):
for line in infile:
if (line.strip() == 'name\tmetafile'):
break
else:
raise NotImplementedError(manifest)
break
else:
raise NotImplementedError(manifest)
for line in infile:
if line.startswith('['):
break
line = line.strip()
if ((not line) or line.startswith('#')):
continue
(name, _) = line.split('\t')
(yield name)
else:
for name in os.listdir(benchmarksdir):
if name.startswith('bm_'):
(yield name[3:]) |
class HTMLCompress(jinja2.ext.Extension):
context_class = HTMLCompressContext
token_class = jinja2.lexer.Token
block_tokens = {'variable_begin': 'variable_end', 'block_begin': 'block_end'}
def filter_stream(self, stream):
transform = self.context_class()
lineno = 0
skip_until_token = None
for token in stream:
if skip_until_token:
(yield token)
if (token.type == skip_until_token):
skip_until_token = None
continue
if (token.type != 'data'):
for data in transform.finish():
(yield self.token_class(lineno, 'data', data))
(yield token)
skip_until_token = self.block_tokens.get(token.type)
continue
if (not transform.pending):
lineno = token.lineno
for data in transform.feed(token.value):
(yield self.token_class(lineno, 'data', data))
lineno = token.lineno
for data in transform.finish():
(yield self.token_class(lineno, 'data', data)) |
def deselect(self, context):
bm = bmesh.from_edit_mesh(bpy.context.active_object.data)
uv_layers = bm.loops.layers.uv.verify()
islands = utilities_uv.getSelectionIslands(bm, uv_layers)
if islands:
for face in islands[0]:
for loop in face.loops:
loop[uv_layers].select = False
utilities_uv.multi_object_loop_stop = True |
class OptionPlotoptionsTreegraphSonificationTracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class OptionSeriesDumbbellMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def states(self) -> 'OptionSeriesDumbbellMarkerStates':
return self._config_sub_data('states', OptionSeriesDumbbellMarkerStates)
def symbol(self):
return self._config_get(None)
def symbol(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
def extractThelotusworldCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def bfs(graph, start):
dist = {start: 0}
parents = {start: None}
for i in range(len(graph)):
if (i != start):
dist[i] = math.inf
parents[i] = []
queue = [start]
while queue:
node = queue.pop()
for neighbor in graph[node]:
if (dist[neighbor] == math.inf):
dist[neighbor] = (dist[node] + 1)
parents[neighbor].append(node)
queue.insert(0, neighbor)
elif ((dist[node] + 1) == dist[neighbor]):
parents[neighbor].append(node)
elif ((dist[node] + 1) < dist[neighbor]):
dist[neighbor] = (dist[node] + 1)
parents[neighbor].clear()
parents[neighbor].append(node)
return parents |
def find_program(name):
if (os.path.isabs(name) and PathInfo(name).is_executable):
return name
for path in os.environ['PATH'].split(os.pathsep):
name_path = os.path.abspath(os.path.join(path, name))
if PathInfo(name_path).is_executable:
return name_path
return None |
class TestCompositeContext():
def test_cannot_be_used_outside_of_composite(self):
with pytest.raises(exceptions.RallyAssertionError) as exc:
runner.CompositeContext.put('test', 1)
assert (exc.value.args[0] == 'This operation is only allowed inside a composite operation.')
.asyncio
async def test_put_get_and_remove(self):
async with runner.CompositeContext():
runner.CompositeContext.put('test', 1)
runner.CompositeContext.put("don't clear this key", 1)
assert (runner.CompositeContext.get('test') == 1)
runner.CompositeContext.remove('test')
async with runner.CompositeContext():
with pytest.raises(KeyError) as exc:
runner.CompositeContext.get("don't clear this key")
assert (exc.value.args[0] == "Unknown property [don't clear this key]. Currently recognized properties are [].")
.asyncio
async def test_fails_to_read_unknown_key(self):
async with runner.CompositeContext():
with pytest.raises(KeyError) as exc:
runner.CompositeContext.put('test', 1)
runner.CompositeContext.get('unknown')
assert (exc.value.args[0] == 'Unknown property [unknown]. Currently recognized properties are [test].')
.asyncio
async def test_fails_to_remove_unknown_key(self):
async with runner.CompositeContext():
with pytest.raises(KeyError) as exc:
runner.CompositeContext.put('test', 1)
runner.CompositeContext.remove('unknown')
assert (exc.value.args[0] == 'Unknown property [unknown]. Currently recognized properties are [test].') |
('no_yaml_module_installed')
def test_option_no_yaml_installed(config, yaml_config_file_1):
with raises(errors.Error) as error:
config.option.from_yaml(yaml_config_file_1)
assert (error.value.args[0] == 'Unable to load yaml configuration - PyYAML is not installed. Install PyYAML or install Dependency Injector with yaml extras: "pip install dependency-injector[yaml]"') |
def test_tuple():
c = Config('testconfig', foo=('1,2', [int], ''), bar=((1, 2, 3), [str], ''))
assert (c.foo == (1, 2))
assert (c.bar == ('1', '2', '3'))
c.foo = (1.2, 3.3, 5)
assert (c.foo == (1, 3, 5))
c.foo = '(7, 8, 9)'
assert (c.foo == (7, 8, 9))
c.foo = '1, 2,-3,4'
assert (c.foo == (1, 2, (- 3), 4))
c.foo = [1, '2']
assert (c.foo == (1, 2))
for val in ([[]], [None], ['a'], ['0a'], ['1.2'], 3):
with raises(ValueError):
c.foo = val
c.bar = 'hello, there, you '
assert (c.bar == ('hello', 'there', 'you'))
c.bar = [1, '2']
assert (c.bar == ('1', '2')) |
class OptionPlotoptionsSplineSonificationTracksMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Experiment(SAC):
def __init__(self, config, create_train_env, create_env, create_agent):
super().__init__(config, create_train_env, create_env, create_agent)
def _create_model(self):
module = SACPolicy(self.obs_dim, self.action_dim, 16)
module.apply(weight_init)
return module
def _create_q(self):
module = SACQ(self.obs_dim, self.action_dim, 16)
module.apply(weight_init)
return module |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = 'name'
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'dlp_sensor': {'required': False, 'type': 'dict', 'default': None, 'options': {}}}
for attribute_name in module_spec['options']:
fields['dlp_sensor']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['dlp_sensor']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=True)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'dlp_sensor')
(is_error, has_changed, result, diff) = fortios_dlp(module.params, fos, module.check_mode)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
class PaymentManager(BaseManager):
def __init__(self, name, credentials, unit_price_4dps=False, user_agent=None):
self.credentials = credentials
self.name = name
self.base_url = (credentials.base_url + XERO_API_URL)
self.extra_params = ({'unitdp': 4} if unit_price_4dps else {})
self.singular = singular(name)
self.user_agent = resolve_user_agent(user_agent, getattr(credentials, 'user_agent', None))
for method_name in self.DECORATED_METHODS:
method = getattr(self, ('_%s' % method_name))
setattr(self, method_name, self._get_data(method))
def _delete(self, id):
uri = '/'.join([self.base_url, self.name, id])
data = {'Status': 'DELETED'}
body = self._prepare_data_for_save(data)
return (uri, {}, 'post', body, None, False) |
class OptionSeriesPieSonificationTracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
def lazy_import():
from fastly.model.included_with_waf_exclusion import IncludedWithWafExclusion
from fastly.model.pagination import Pagination
from fastly.model.pagination_links import PaginationLinks
from fastly.model.pagination_meta import PaginationMeta
from fastly.model.waf_exclusion_response_data import WafExclusionResponseData
from fastly.model.waf_exclusions_response_all_of import WafExclusionsResponseAllOf
globals()['IncludedWithWafExclusion'] = IncludedWithWafExclusion
globals()['Pagination'] = Pagination
globals()['PaginationLinks'] = PaginationLinks
globals()['PaginationMeta'] = PaginationMeta
globals()['WafExclusionResponseData'] = WafExclusionResponseData
globals()['WafExclusionsResponseAllOf'] = WafExclusionsResponseAllOf |
class BaseNodeSpec(EntitySpec):
def __init__(self, params):
super().__init__(params)
def _lookup(self, depth, unlocked=False):
name = self._params['config']['name']
return SpecView(self, depth=[depth], name=name, unlocked=unlocked)
def config(self) -> SpecView:
return self._lookup('config', unlocked=True)
def inputs(self) -> SpecView:
return self._lookup('inputs')
def outputs(self) -> SpecView:
return self._lookup('outputs')
def states(self) -> SpecView:
return self._lookup('states')
def initialize(self, spec_cls):
import eagerx.core.register as register
try:
params = register.LOOKUP_TYPES[spec_cls.callback]
except KeyError:
if (spec_cls.__name__ == 'EnvNode'):
params = dict()
else:
raise
if ('targets' in params):
from eagerx.core.entities import ResetNode
assert issubclass(spec_cls, ResetNode), 'You can only have targets registered for nodes that inherit from the ResetNode baseclass.'
add_ft = True
else:
add_ft = False
for (component, cnames) in params.items():
for (cname, space) in cnames.items():
if (component == 'outputs'):
if (cname not in self.config.outputs):
self.config.outputs.append(cname)
mapping = dict(rate='$(config rate)', processor=None, space=space)
if add_ft:
mapping_ft = dict(delay=0.0, window=1, skip=False, processor=None, space=space, address=None)
with self.feedthroughs as d:
d[cname] = mapping_ft
elif (component == 'inputs'):
if (cname not in self.config.inputs):
self.config.inputs.append(cname)
space = (eagerx.Space(shape=(), dtype='int64') if (cname == 'tick') else space)
mapping = dict(delay=0.0, window=1, skip=False, processor=None, space=space, address=None)
elif (component == 'targets'):
if (cname not in self.config.targets):
self.config.targets.append(cname)
mapping = dict(processor=None, space=space, address=None)
else:
if (cname not in self.config.states):
self.config.states.append(cname)
component = 'states'
mapping = dict(processor=None, space=space)
with getattr(self, component) as d:
d[cname] = mapping
def add_input(self, cname: str, window: int=1, delay: float=0.0, skip: bool=False, address: str=None, processor: Optional[ProcessorSpec]=None, space: Optional[gym.spaces.Space]=None):
mapping = dict(window=window, delay=delay, skip=skip, space=space, address=address, processor=(processor.params if processor else None))
with self.inputs as d:
d[cname] = mapping
def add_output(self, cname: str, processor: Optional[ProcessorSpec]=None, space: Optional[gym.spaces.Space]=None):
mapping = dict(rate='$(config rate)', space=space, processor=(processor.params if processor else None))
with self.outputs as d:
d[cname] = mapping
def build(self, ns: str):
params = self.params
name = self.config.name
entity_id = self.config.entity_id
context = {'ns': {'env_name': ns, 'node_name': name}, 'config': params['config']}
substitute_args(params, context, only=['config', 'ns'])
inputs = []
for cname in self.config.inputs:
assert (cname in params['inputs']), f"""Received unknown {'input'} "{cname}". Check the spec of "{name}" with entity_id "{entity_id}"."""
assert (('targets' not in params) or (cname not in params['targets'])), f'Input "{cname}" cannot have the same cname as a target. Change either the input or target cname. Check the spec of "{name}" with entity_id "{entity_id}".'
n = RxInput(name=cname, **params['inputs'][cname])
inputs.append(n)
outputs = []
for cname in self.config.outputs:
msg = f"The rate ({params['outputs'][cname]['rate']} Hz) set for action '{cname}' does not equal the environment rate ({self.config.rate} Hz)."
assert (params['outputs'][cname]['rate'] == self.config.rate), msg
assert (cname in params['outputs']), f"""Received unknown {'output'} "{cname}". Check the spec of "{name}" with entity_id "{entity_id}"."""
if ('address' in params['outputs'][cname]):
address = params['outputs'][cname].pop('address')
else:
address = ('%s/outputs/%s' % (name, cname))
n = RxOutput(name=cname, address=address, **params['outputs'][cname])
outputs.append(n)
states = []
for cname in self.config.states:
assert (cname in params['states']), f"""Received unknown {'state'} "{cname}". Check the spec of "{name}" with entity_id "{entity_id}"."""
if ('address' in params['states'][cname]):
n = RxState(name=cname, **params['states'][cname])
else:
address = ('%s/states/%s' % (name, cname))
n = RxState(name=cname, address=address, **params['states'][cname])
states.append(n)
targets = []
if ('targets' in self.config):
for cname in self.config.targets:
assert (cname in params['targets']), f"""Received unknown {'target'} "{cname}". Check the spec of "{name}" with entity_id "{entity_id}"."""
n = RxState(name=cname, **params['targets'][cname])
targets.append(n)
feedthroughs = []
if ('feedthroughs' in params):
assert ('targets' in self.config), f'No targets defined for ResetNode "{name}".'
assert (len(self.config.targets) > 0), f'No targets selected for ResetNode "{name}".'
for cname in self.config.outputs:
assert (cname in params['feedthroughs']), f'Feedthrough "{cname}" must directly correspond to a selected output. Check the spec of "{name}" with entity_id "{entity_id}".'
params['feedthroughs'][cname]['space'] = params['outputs'][cname]['space']
n = RxFeedthrough(feedthrough_to=cname, **params['feedthroughs'][cname])
feedthroughs.append(n)
params['outputs'] = [i.build(ns=ns) for i in outputs]
params['inputs'] = [i.build(ns=ns) for i in inputs]
params['states'] = [i.build(ns=ns) for i in states]
params['targets'] = [i.build(ns=ns) for i in targets]
params['feedthroughs'] = [i.build(ns=ns) for i in feedthroughs]
chars_ns = (len(ns) + 1)
rate_dict = dict()
for i in params['outputs']:
assert ((i['rate'] is not None) and isinstance(i['rate'], (int, float)) and (i['rate'] > 0)), f"""The rate of node "{name}" (and output cname "{i['name']}") is misspecified: rate="{i['rate']}". Make sure that it is of type(rate)=("int", "float",) and rate > 0."""
address = i['address'][chars_ns:]
rate_dict[address] = i['rate']
node_params = {name: params, 'rate': rate_dict}
return replace_None(node_params) |
class TestAndIncr(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec
subtypeSpec += ConstraintsUnion(ValueRangeConstraint(0, ))
if mibBuilder.loadTexts:
description = "Represents integer-valued information used for atomic operations. When the\nmanagement protocol is used to specify that an object instance having this\nsyntax is to be modified, the new value supplied via the management protocol\nmust precisely match the value presently held by the instance. If not, the\nmanagement protocol set operation fails with an error of `inconsistentValue'.\nOtherwise, if the current value is the maximum value of 2^31-1 (\ndecimal), then the value held by the instance is wrapped to zero; otherwise,\nthe value held by the instance is incremented by one. (Note that regardless of\nwhether the management protocol set operation succeeds, the variable- binding\nin the request and response PDUs are identical.) The value of the ACCESS clause\nfor objects having this syntax is either `read-write' or `read-create'. When an\ninstance of a columnar object having this syntax is created, any value may be\nsupplied via the management protocol. When the network management portion of\nthe system is re- initialized, the value of every object instance having this\nsyntax must either be incremented from its value prior to the re-\ninitialization, or (if the value prior to the re- initialization is unknown) be\nset to a pseudo-randomly generated value.\n"
defaultValue = 0
def setValue(self, value):
if (value is not None):
if (value != self):
raise InconsistentValueError()
value += 1
if (value > ):
value = 0
if (value is None):
value = univ.noValue
return self.clone(value) |
class SudoGenericTokenError(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'msg': (str,)}
_property
def discriminator():
return None
attribute_map = {'msg': 'msg'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def plot_diagnostics(samples: MonteCarloSamples, ordering: Union[(None, List[str])]=None, plot_posterior: bool=False) -> List[Figure]:
COLORS = ['#2a2eec', '#fa7c17', '#328c06', '#c10c90']
samples_xr = samples.to_xarray()
data = {str(key): value.values for (key, value) in samples_xr.data_vars.items()}
if (ordering is not None):
diagnostics_data = {}
for key in ordering:
key = str(key)
diagnostics_data[key] = data[key]
else:
diagnostics_data = data
diagnostics_plots = []
for (key, value) in diagnostics_data.items():
posterior_plot = None
if plot_posterior:
posterior_plot = az.plot_posterior({key: value}, show=False)[0][0]
posterior_plot.plot_width = 300
posterior_plot.plot_height = 300
posterior_plot.grid.grid_line_alpha = 0.2
posterior_plot.grid.grid_line_color = 'gray'
posterior_plot.grid.grid_line_width = 0.3
posterior_plot.yaxis.minor_tick_line_color = None
posterior_plot.outline_line_color = 'black'
tr_plot = az.plot_trace(az.from_dict({key: value}), show=False)[0][1]
line_index = 0
circle_index = 0
for renderer in tr_plot.renderers:
glyph = renderer._property_values['glyph']
if isinstance(glyph, Line):
glyph.line_color = COLORS[line_index]
glyph.line_dash = 'solid'
glyph.line_width = 2
glyph.line_alpha = 0.6
line_index += 1
if isinstance(renderer._property_values['glyph'], Circle):
glyph.fill_color = COLORS[circle_index]
glyph.line_color = COLORS[circle_index]
glyph.fill_alpha = 0.6
circle_index += 1
tr_plot.plot_width = 300
tr_plot.plot_height = 300
tr_plot.grid.grid_line_alpha = 0.2
tr_plot.grid.grid_line_color = 'gray'
tr_plot.grid.grid_line_width = 0.3
tr_plot.yaxis.minor_tick_line_color = None
tr_plot.outline_line_color = 'black'
tr_plot.title.text = f'{tr_plot.title.text} trace plot'
ac_plot = az.plot_autocorr({key: value}, show=False)[0].tolist()
for (i, p) in enumerate(ac_plot):
for renderer in p.renderers:
glyph = renderer._property_values['glyph']
glyph.line_color = COLORS[i]
p.plot_width = 300
p.plot_height = 300
p.grid.grid_line_alpha = 0.2
p.grid.grid_line_color = 'gray'
p.grid.grid_line_width = 0.3
p.yaxis.minor_tick_line_color = None
p.outline_line_color = 'black'
p.title.text = f'''{p.title.text.split()[0]}
autocorrelation chain {i}'''
if plot_posterior:
ps = [posterior_plot, tr_plot, *ac_plot]
else:
ps = [tr_plot, *ac_plot]
diagnostics_plots.append(ps)
return diagnostics_plots |
class StalkerSceneAddAllShotLightingOutputsOperator(bpy.types.Operator):
bl_label = 'Add All Shot Lighting Outputs'
bl_idname = 'stalker.scene_add_all_shot_lighting_outputs_op'
stalker_entity_id = bpy.props.IntProperty(name='stalker_entity_id')
stalker_entity_name = bpy.props.StringProperty(name='stalker_entity_name')
def execute(self, context):
logger.debug(('inside %s.execute()' % self.__class__.__name__))
scene = Task.query.get(self.stalker_entity_id)
logger.debug(('scene: %s' % scene))
return set(['FINISHED']) |
class Solution():
def __init__(self, nums: List[int]):
self.nums = nums
def pick(self, target: int) -> int:
choice = (- 1)
count = 0
for (i, e) in enumerate(self.nums):
if (e != target):
continue
if (random.randint(0, count) == 0):
choice = i
count += 1
return choice |
def setup_qat_get_optimizer_param_groups(model, qat_method):
if (not qat_method.startswith('learnable')):
return model
assert _is_q_state_dict(model.state_dict())
model = mixin_with_subclass(model, ModelGetOptimizerParamGroupLearnableQATMixin)
assert hasattr(model, 'get_optimizer_param_groups')
return model |
def test_records_episode_with_correct_data():
env = build_dummy_maze_env()
env = ActionRecordingWrapper.wrap(env, record_maze_actions=True, record_actions=True, output_dir='action_records')
actions = []
env.seed(1234)
env.reset()
cum_reward = 0.0
for _ in range(5):
action = env.action_space.sample()
actions.append(action)
(observation, rew, _, _) = env.step(action)
cum_reward += rew
episode_id = env.get_episode_id()
expected_file_path = (str(episode_id) + '.pkl')
assert (not (expected_file_path in os.listdir('action_records')))
env.seed(1234)
env.reset()
assert (expected_file_path in os.listdir('action_records'))
with open(('action_records/' + expected_file_path), 'rb') as in_f:
action_record = pickle.load(in_f)
assert (action_record.cum_action_record_reward == cum_reward)
assert isinstance(action_record, ActionRecord)
assert (len(action_record) == len(actions))
for i in range(5):
recorded_action = action_record.get_agent_action(i, ActorID(0, 0))
for k in recorded_action.keys():
assert np.all((recorded_action[k] == actions[i][k])) |
class OptionPlotoptionsScatterSonificationContexttracksMappingTremolo(Options):
def depth(self) -> 'OptionPlotoptionsScatterSonificationContexttracksMappingTremoloDepth':
return self._config_sub_data('depth', OptionPlotoptionsScatterSonificationContexttracksMappingTremoloDepth)
def speed(self) -> 'OptionPlotoptionsScatterSonificationContexttracksMappingTremoloSpeed':
return self._config_sub_data('speed', OptionPlotoptionsScatterSonificationContexttracksMappingTremoloSpeed) |
def extractMtlparadiseWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
.signal_handling
def test_dispatch_with_multi_kwarg_message_transformer_succeeds(fake_sqs_queue):
queue = fake_sqs_queue
queue.send_message(MessageBody=1234)
dispatcher = SQSWorkDispatcher(queue, worker_process_name='Test Worker Process', long_poll_seconds=1, monitor_sleep_time=1)
def do_some_work(task_id, task_id_times_two):
assert (task_id == 1234)
assert (task_id_times_two == 2468)
queue_in_use = fake_sqs_queue
queue_in_use.send_message(MessageBody=9999)
dispatcher.dispatch(do_some_work, message_transformer=(lambda x: {'task_id': x.body, 'task_id_times_two': (x.body * 2)}))
dispatcher._worker_process.join(5)
messages = queue.receive_messages(WaitTimeSeconds=1, MaxNumberOfMessages=10)
assert (len(messages) == 1)
assert (messages[0].body == 9999)
assert (dispatcher._worker_process.exitcode == 0) |
def shfl_scan(arr_in: ti.template(), in_beg: ti.i32, in_end: ti.i32, sum_smem: ti.template(), single_block: ti.template()):
ti.loop_config(block_dim=BLOCK_SZ)
for i in range(in_beg, in_end):
val = arr_in[i]
thread_id = (i % BLOCK_SZ)
block_id = int(((i - in_beg) // BLOCK_SZ))
lane_id = (thread_id % WARP_SZ)
warp_id = (thread_id // WARP_SZ)
val = inclusive_add(val)
barrier()
if ((thread_id % WARP_SZ) == (WARP_SZ - 1)):
sum_smem[(block_id, warp_id)] = val
barrier()
if ((warp_id == 0) and (lane_id == 0)):
for k in range(1, (BLOCK_SZ / WARP_SZ)):
sum_smem[(block_id, k)] += sum_smem[(block_id, (k - 1))]
barrier()
warp_sum = 0
if (warp_id > 0):
warp_sum = sum_smem[(block_id, (warp_id - 1))]
val += warp_sum
arr_in[i] = val
if ((not single_block) and (thread_id == (BLOCK_SZ - 1))):
arr_in[(in_end + block_id)] = val |
def seed_bigquery_integration_db(bigquery_integration_engine) -> None:
statements = ['\n DROP TABLE IF EXISTS fidesopstest.report;\n ', '\n DROP TABLE IF EXISTS fidesopstest.service_request;\n ', '\n DROP TABLE IF EXISTS fidesopstest.login;\n ', '\n DROP TABLE IF EXISTS fidesopstest.visit;\n ', '\n DROP TABLE IF EXISTS fidesopstest.order_item;\n ', '\n DROP TABLE IF EXISTS fidesopstest.orders;\n ', '\n DROP TABLE IF EXISTS fidesopstest.payment_card;\n ', '\n DROP TABLE IF EXISTS fidesopstest.employee;\n ', '\n DROP TABLE IF EXISTS fidesopstest.customer;\n ', '\n DROP TABLE IF EXISTS fidesopstest.address;\n ', '\n DROP TABLE IF EXISTS fidesopstest.product;\n\n ', '\n CREATE TABLE fidesopstest.product (\n id INT,\n name STRING,\n price DECIMAL(10,2)\n );\n ', '\n CREATE TABLE fidesopstest.address (\n id BIGINT,\n house STRING,\n street STRING,\n city STRING,\n state STRING,\n zip STRING\n );\n ', '\n CREATE TABLE fidesopstest.customer (\n id INT,\n email STRING,\n name STRING,\n created TIMESTAMP,\n address_id BIGINT\n );\n ', '\n CREATE TABLE fidesopstest.employee (\n id INT,\n email STRING,\n name STRING,\n address_id BIGINT\n );\n ', '\n CREATE TABLE fidesopstest.payment_card (\n id STRING,\n name STRING,\n ccn BIGINT,\n code SMALLINT,\n preferred BOOLEAN,\n customer_id INT,\n billing_address_id BIGINT\n );\n ', '\n CREATE TABLE fidesopstest.orders (\n id STRING,\n customer_id INT,\n shipping_address_id BIGINT,\n payment_card_id STRING\n );\n ', '\n CREATE TABLE fidesopstest.order_item (\n order_id STRING,\n item_no SMALLINT,\n product_id INT,\n quantity SMALLINT\n );\n ', '\n CREATE TABLE fidesopstest.visit (\n email STRING,\n last_visit TIMESTAMP\n );\n ', '\n CREATE TABLE fidesopstest.login (\n id INT,\n customer_id INT,\n time TIMESTAMP\n );\n ', '\n CREATE TABLE fidesopstest.service_request (\n id STRING,\n email STRING,\n alt_email STRING,\n opened DATE,\n closed DATE,\n employee_id INT\n );\n ', '\n CREATE TABLE fidesopstest.report (\n id INT,\n email STRING,\n name STRING,\n year INT,\n month INT,\n total_visits INT\n );\n ', "\n INSERT INTO fidesopstest.product VALUES\n (1, 'Example Product 1', 10.00),\n (2, 'Example Product 2', 20.00),\n (3, 'Example Product 3', 50.00);\n ", "\n INSERT INTO fidesopstest.address VALUES\n (1, '123', 'Example Street', 'Exampletown', 'NY', '12345'),\n (2, '4', 'Example Lane', 'Exampletown', 'NY', '12321'),\n (3, '555', 'Example Ave', 'Example City', 'NY', '12000');\n ", "\n INSERT INTO fidesopstest.customer VALUES\n (1, 'customer-', 'John Customer', '2020-04-01 11:47:42', 1),\n (2, 'customer-', 'Jill Customer', '2020-04-01 11:47:42', 2);\n ", "\n INSERT INTO fidesopstest.employee VALUES\n (1, 'employee-', 'Jack Employee', 3),\n (2, 'employee-', 'Jane Employee', 3);\n ", "\n INSERT INTO fidesopstest.payment_card VALUES\n ('pay_aaa-aaa', 'Example Card 1', , 321, true, 1, 1),\n ('pay_bbb-bbb', 'Example Card 2', , 123, false, 2, 1);\n ", "\n INSERT INTO fidesopstest.orders VALUES\n ('ord_aaa-aaa', 1, 2, 'pay_aaa-aaa'),\n ('ord_bbb-bbb', 2, 1, 'pay_bbb-bbb'),\n ('ord_ccc-ccc', 1, 1, 'pay_aaa-aaa'),\n ('ord_ddd-ddd', 1, 1, 'pay_bbb-bbb');\n ", "\n INSERT INTO fidesopstest.order_item VALUES\n ('ord_aaa-aaa', 1, 1, 1),\n ('ord_bbb-bbb', 1, 1, 1),\n ('ord_ccc-ccc', 1, 1, 1),\n ('ord_ccc-ccc', 2, 2, 1),\n ('ord_ddd-ddd', 1, 1, 1);\n ", "\n INSERT INTO fidesopstest.visit VALUES\n ('customer-', '2021-01-06 01:00:00'),\n ('customer-', '2021-01-06 01:00:00');\n ", "\n INSERT INTO fidesopstest.login VALUES\n (1, 1, '2021-01-01 01:00:00'),\n (2, 1, '2021-01-02 01:00:00'),\n (5, 1, '2021-01-05 01:00:00'),\n (6, 1, '2021-01-06 01:00:00'),\n (7, 2, '2021-01-06 01:00:00');\n ", "\n INSERT INTO fidesopstest.service_request VALUES\n ('ser_aaa-aaa', 'customer-', 'customer-1-', '2021-01-01', '2021-01-03', 1),\n ('ser_bbb-bbb', 'customer-', null, '2021-01-04', null, 1),\n ('ser_ccc-ccc', 'customer-', null, '2021-01-05', '2020-01-07', 1),\n ('ser_ddd-ddd', 'customer-', null, '2021-05-05', '2020-05-08', 2);\n ", "\n INSERT INTO fidesopstest.report VALUES\n (1, 'admin-', 'Monthly Report', 2021, 8, 100),\n (2, 'admin-', 'Monthly Report', 2021, 9, 100),\n (3, 'admin-', 'Monthly Report', 2021, 10, 100),\n (4, 'admin-', 'Monthly Report', 2021, 11, 100);\n "]
with bigquery_integration_engine.connect() as connection:
[connection.execute(stmt) for stmt in statements]
return |
def fortios_authentication(data, fos, check_mode):
fos.do_member_operation('authentication', 'rule')
if data['authentication_rule']:
resp = authentication_rule(data, fos, check_mode)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'authentication_rule'))
if check_mode:
return resp
return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {}) |
class ResourceRulesEngineTest(ForsetiTestCase):
def setUp(self):
resource_rules_engine.LOGGER = mock.MagicMock()
def test_build_rule_book_from_local_yaml_file(self):
rule = '\nrules:\n- name: Resource test rule\n mode: required\n resource_types: [project]\n resource_trees: []\n'
rules_engine = get_rules_engine_with_rule(rule)
self.assertEqual(1, len(rules_engine.rule_book.rules))
def test_build_rule_book_no_resource_types(self):
rule = '\nrules:\n- name: Resource test rule\n mode: required\n resource_types: []\n resource_trees: []\n'
with self.assertRaises(InvalidRulesSchemaError):
get_rules_engine_with_rule(rule)
def test_build_rule_book_no_mode(self):
rule = '\nrules:\n- name: Resource test rule\n resource_types: [project]\n resource_trees: []\n'
with self.assertRaises(InvalidRulesSchemaError):
get_rules_engine_with_rule(rule)
def test_get_applicable_resource_types(self):
rule = '\nrules:\n- name: rule 1\n mode: required\n resource_types: [project]\n resource_trees: []\n- name: rule 2\n mode: required\n resource_types: [organization, project]\n resource_trees: []\n'
rules_engine = get_rules_engine_with_rule(rule)
got_types = rules_engine.rule_book.get_applicable_resource_types()
self.assertEqual(got_types, set(['organization', 'project']))
def test_find_violations_single_node_match(self):
rule = '\nrules:\n- name: Resource test rule\n mode: required\n resource_types: [project]\n resource_trees:\n - type: project\n resource_id: p1\n'
rules_engine = get_rules_engine_with_rule(rule)
got_violations = list(rules_engine.find_violations([data.PROJECT1]))
self.assertEqual(got_violations, [])
def test_find_violations_single_node_no_match(self):
rule = '\nrules:\n- name: Resource test rule\n mode: required\n resource_types: [project]\n resource_trees:\n - type: project\n resource_id: p1\n'
rules_engine = get_rules_engine_with_rule(rule)
got_violations = list(rules_engine.find_violations([data.PROJECT1, data.PROJECT2]))
self.assertEqual(got_violations, data.build_violations(data.PROJECT2))
def test_find_violations_multiple_roots(self):
rule = '\nrules:\n- name: Resource test rule\n mode: required\n resource_types: [project]\n resource_trees:\n - type: project\n resource_id: p1\n - type: project\n resource_id: p2\n'
rules_engine = get_rules_engine_with_rule(rule)
got_violations = list(rules_engine.find_violations([data.PROJECT1, data.PROJECT2]))
self.assertEqual(got_violations, [])
def test_find_violations_child_found(self):
rule = "\nrules:\n- name: Resource test rule\n mode: required\n resource_types: [organization, project]\n resource_trees:\n - type: organization\n resource_id: '234'\n children:\n - type: project\n resource_id: p1\n"
rules_engine = get_rules_engine_with_rule(rule)
got_violations = list(rules_engine.find_violations([data.ORGANIZATION, data.PROJECT1]))
self.assertEqual(got_violations, [])
def test_find_violations_missing(self):
rule = '\nrules:\n- name: Resource test rule\n mode: required\n resource_types: [project]\n resource_trees:\n - type: project\n resource_id: p1\n'
rules_engine = get_rules_engine_with_rule(rule)
got_violations = list(rules_engine.find_violations([]))
violation = data.build_violations(data.PROJECT2)[0]
violation = resource_rules_engine.RuleViolation(resource_id='p1', resource_name='p1', resource_type='project', full_name='p1', rule_index=0, rule_name='Resource test rule', violation_type='RESOURCE_VIOLATION', violation_data='', resource_data='')
self.assertEqual(got_violations, [violation])
def test_find_violations_child_missing(self):
rule = "\nrules:\n- name: Resource test rule\n mode: required\n resource_types: [organization, project]\n resource_trees:\n - type: organization\n resource_id: '234'\n children:\n - type: project\n resource_id: p1\n"
rules_engine = get_rules_engine_with_rule(rule)
got_violations = list(rules_engine.find_violations([data.ORGANIZATION]))
violation = resource_rules_engine.RuleViolation(resource_id='p1', resource_name='p1', resource_type='project', full_name='p1', rule_index=0, rule_name='Resource test rule', violation_type='RESOURCE_VIOLATION', violation_data='', resource_data='')
self.assertEqual(got_violations, [violation])
def test_find_violations_wrong_parent(self):
rule = '\nrules:\n- name: Resource test rule\n mode: required\n resource_types: [project, bucket]\n resource_trees:\n - type: project\n resource_id: p1\n - type: project\n resource_id: p2\n children:\n - type: bucket\n resource_id: p1-bucket1\n'
rules_engine = get_rules_engine_with_rule(rule)
got_violations = list(rules_engine.find_violations([data.PROJECT1, data.PROJECT2, data.BUCKET]))
node_violation = resource_rules_engine.RuleViolation(resource_id='p1-bucket1', resource_name='p1-bucket1', resource_type='bucket', full_name='p1-bucket1', rule_index=0, rule_name='Resource test rule', violation_type='RESOURCE_VIOLATION', violation_data='', resource_data='')
self.assertEqual(got_violations, (data.build_violations(data.BUCKET) + [node_violation]))
def test_find_violations_wildcard(self):
rule = "\nrules:\n- name: Resource test rule\n mode: required\n resource_types: [project]\n resource_trees:\n - type: project\n resource_id: '*'\n"
rules_engine = get_rules_engine_with_rule(rule)
got_violations = list(rules_engine.find_violations([data.PROJECT1]))
self.assertEqual(got_violations, [])
def test_find_violations_wildcard_and_sibling(self):
rule = "\nrules:\n- name: Resource test rule\n mode: required\n resource_types: [organization, project]\n resource_trees:\n - type: organization\n resource_id: '*'\n - type: organization\n resource_id: '234'\n children:\n - type: project\n resource_id: p1\n"
rules_engine = get_rules_engine_with_rule(rule)
got_violations = list(rules_engine.find_violations([data.ORGANIZATION, data.PROJECT1]))
self.assertEqual(got_violations, [])
def test_find_violations_empty_tree(self):
rule = '\nrules:\n- name: Resource test rule\n mode: required\n resource_types: [organization]\n resource_trees: []\n'
rules_engine = get_rules_engine_with_rule(rule)
got_violations = list(rules_engine.find_violations([data.ORGANIZATION]))
self.assertEqual(got_violations, data.build_violations(data.ORGANIZATION)) |
_HOOK_REGISTRY.register()
class ActivationCheckpointModelingHook(mh.ModelingHook):
def apply(self, model: nn.Module) -> nn.Module:
logger.info('Activation Checkpointing is used')
wrapper_fn = partial(checkpoint_wrapper, checkpoint_impl=(CheckpointImpl.NO_REENTRANT if (not self.cfg.ACTIVATION_CHECKPOINT.REENTRANT) else CheckpointImpl.REENTRANT))
policy_name = self.cfg.ACTIVATION_CHECKPOINT.AUTO_WRAP_POLICY
assert (policy_name != 'size_based_auto_wrap_policy'), 'ActivationCheckpointing should always be wrapped at module boundary'
policy_kwargs = {'layer_names': self.cfg.ACTIVATION_CHECKPOINT.AUTO_WRAP_LAYER_CLS}
auto_wrap_policy = (D2GO_WRAP_POLICY_REGISTRY.get(policy_name)(model, **policy_kwargs) if (policy_name != '') else (lambda _: True))
apply_activation_checkpointing(model, checkpoint_wrapper_fn=wrapper_fn, auto_wrap_policy=auto_wrap_policy)
return model
def unapply(self, model: nn.Module) -> nn.Module:
raise NotImplementedError("ActivationCheckpointModelingHook.unapply() not implemented: can't unwrap an activation checkpoint module") |
class TestPeriodFilterAbsolute(TestCase):
def builder(self, key='2'):
self.client = Mock()
self.client.info.return_value = get_es_ver()
self.client.cat.indices.return_value = get_testvals(key, 'state')
self.client.indices.get_settings.return_value = get_testvals(key, 'settings')
self.client.indices.stats.return_value = get_testvals(key, 'stats')
self.client.indices.exists_alias.return_value = False
self.ilo = IndexList(self.client)
def test_bad_period_type(self):
self.builder()
self.assertRaises(ValueError, self.ilo.filter_period, period_type='invalid')
def test_none_value_raises(self):
self.builder()
self.assertRaises(ConfigurationError, self.ilo.filter_period, period_type='absolute', date_from=None)
def test_fail_on_bad_date(self):
unit = 'months'
date_from = '2016.17'
date_from_format = '%Y.%m'
date_to = '2017.01'
date_to_format = '%Y.%m'
self.builder()
self.assertRaises(FailedExecution, self.ilo.filter_period, unit=unit, source='creation_date', period_type='absolute', date_from=date_from, date_to=date_to, date_from_format=date_from_format, date_to_format=date_to_format) |
def _check_offsets(offsets_list: List[List[int]]) -> None:
offsets_len = len(offsets_list[0])
for offsets in offsets_list:
assert (offsets[0] == 0)
assert (len(offsets) == offsets_len)
for j in range(1, len(offsets)):
assert (offsets[j] >= offsets[(j - 1)])
offsets_len = (offsets[(- 1)] + 1) |
def main():
parser = argparse.ArgumentParser(description='Verify IOB FASM vs BELs.')
parser.add_argument('--fasm')
parser.add_argument('--params')
args = parser.parse_args()
if ((not args.fasm) and (not args.params)):
scan_specimens()
else:
count = process_specimen(fasm_file=args.fasm, params_json=args.params)
print('No errors found in {} IO sites'.format(count)) |
def extractPlebianfinetranslationWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesAreaSonificationDefaultinstrumentoptionsPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class Node(ABC):
parent: Optional['Node']
children: List['Node']
name: str
ambassador_id: str
namespace: str
is_ambassador = False
xfail: Optional[str]
def __init__(self, *args, name: Optional[str]=None, namespace: Optional[str]=None, _clone: Optional['Node']=None, **kwargs) -> None:
self.skip_node = False
self.xfail: Optional[str] = None
if _clone:
args = _clone._args
kwargs = _clone._kwargs
if name:
name = '-'.join((_clone.name, name))
else:
name = _clone.name
self._args = _clone._args
self._kwargs = _clone._kwargs
else:
self._args = args
self._kwargs = kwargs
if name:
name = '-'.join((self.__class__.__name__, name))
else:
name = self.__class__.__name__
saved = _local.current
self.parent = _local.current
if namespace:
self.namespace = namespace
if (not getattr(self, 'namespace', '')):
if (self.parent and self.parent.namespace):
self.namespace = self.parent.namespace
else:
self.namespace = 'default'
_local.current = self
self.children = []
if (self.parent is not None):
self.parent.children.append(self)
try:
init = getattr(self, 'init', (lambda *a, **kw: None))
init(*_argprocess(args), **_argprocess(kwargs))
finally:
_local.current = saved
self.name = self.format(name)
names = {}
for c in self.children:
assert (c.name not in names), ('test %s of type %s has duplicate children: %s of type %s, %s' % (self.name, self.__class__.__name__, c.name, c.__class__.__name__, names[c.name].__class__.__name__))
names[c.name] = c
def clone(self, name=None):
return self.__class__(_clone=self, name=name)
def variants(cls):
(yield cls())
def path(self) -> Name:
if (self.parent is None):
return Name(self.name, self.namespace)
else:
return Name(((self.parent.path.name + '.') + self.name), self.namespace)
def traversal(self):
(yield self)
for c in self.children:
for d in c.traversal:
(yield d)
def ancestors(self):
(yield self)
if (self.parent is not None):
for a in self.parent.ancestors:
(yield a)
def depth(self):
if (self.parent is None):
return 0
else:
return (self.parent.depth + 1)
def format(self, st, **kwargs):
return integration_manifests.format(st, self=self, **kwargs)
_cache()
def matches(self, pattern):
if fnmatch.fnmatch(self.path.name, ('*%s*' % pattern)):
return True
for c in self.children:
if c.matches(pattern):
return True
return False
def requirements(self):
(yield from ())
def log_kube_artifacts(self):
if (not getattr(self, 'already_logged', False)):
self.already_logged = True
print(f'logging kube artifacts for {self.path.k8s}')
sys.stdout.flush()
DEV = (os.environ.get('AMBASSADOR_DEV', '0').lower() in ('1', 'yes', 'true'))
log_path = f'/tmp/kat-logs-{self.path.k8s}'
if DEV:
os.system(f'docker logs {self.path.k8s} >{log_path} 2>&1')
else:
os.system(f'tools/bin/kubectl logs -n {self.namespace} {self.path.k8s} >{log_path} 2>&1')
event_path = f'/tmp/kat-events-{self.path.k8s}'
fs1 = f'involvedObject.name={self.path.k8s}'
fs2 = f'involvedObject.namespace={self.namespace}'
cmd = f'tools/bin/kubectl get events -o json --field-selector "{fs1}" --field-selector "{fs2}"'
os.system(f'echo ==== "{cmd}" >{event_path}')
os.system(f'{cmd} >>{event_path} 2>&1') |
def get_images(html, url):
if ('This post was deleted' in html):
raise SkipEpisodeError(always=True)
login_check(html)
result = ''
if (match := re.search('<a [^>]*highres[^>]*>', html)):
result = re.search('href="([^"]+)"', match.group(0)).group(1)
elif (match := re.search('embed src="([^"]+)"', html)):
result = match.group(1)
return [urljoin(url, unescape(result))] |
class CreateTemplateParamSource(ABC, ParamSource):
def __init__(self, track, params, templates, **kwargs):
super().__init__(track, params, **kwargs)
self.request_params = params.get('request-params', {})
self.template_definitions = []
if (('template' in params) and ('body' in params)):
self.template_definitions.append((params['template'], params['body']))
elif templates:
filter_template = params.get('template')
settings = params.get('settings')
template_definitions = []
for template in templates:
if ((not filter_template) or (template.name == filter_template)):
body = self._create_or_merge(template.content, ['template', 'settings'], settings)
template_definitions.append((template.name, body))
if (filter_template and (not template_definitions)):
template_names = ', '.join([template.name for template in templates])
raise exceptions.InvalidSyntax(f'Unknown template: {filter_template}. Available templates: {template_names}.')
self.template_definitions.extend(template_definitions)
else:
raise exceptions.InvalidSyntax(f"Please set the properties 'template' and 'body' for the {params.get('operation-type')} operation or declare composable and/or component templates in the track")
def _create_or_merge(content, path, new_content):
original_content = content
if new_content:
for sub_path in path:
if (sub_path not in content):
content[sub_path] = {}
content = content[sub_path]
CreateTemplateParamSource.__merge(content, new_content)
return original_content
def __merge(dct, merge_dct):
for k in merge_dct.keys():
if ((k in dct) and isinstance(dct[k], dict) and isinstance(merge_dct[k], collections.abc.Mapping)):
CreateTemplateParamSource.__merge(dct[k], merge_dct[k])
else:
dct[k] = merge_dct[k]
def params(self):
return {'templates': self.template_definitions, 'request-params': self.request_params} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.