code stringlengths 281 23.7M |
|---|
class OptionPlotoptionsPackedbubbleSonificationDefaultinstrumentoptionsMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestSerializeJson():
def client(self):
class SomeResource():
async def on_get(self, req, resp):
async def emitter():
(yield SSEvent(json={'foo': 'bar'}))
(yield SSEvent(json={'bar': 'baz'}))
resp.sse = emitter()
resource = SomeResource()
app = App()
app.add_route('/', resource)
client = testing.TestClient(app)
return client
def test_use_media_handler_dumps(self, client):
h = client.app.resp_options.media_handlers[falcon.MEDIA_JSON]
h._dumps = (lambda x: json.dumps(x).upper())
result = client.simulate_get()
assert (result.text == 'data: {"FOO": "BAR"}\n\ndata: {"BAR": "BAZ"}\n\n')
def test_no_json_media_handler(self, client):
for h in list(client.app.resp_options.media_handlers):
if ('json' in h.casefold()):
client.app.resp_options.media_handlers.pop(h)
result = client.simulate_get()
assert (result.text == 'data: {"foo": "bar"}\n\ndata: {"bar": "baz"}\n\n') |
class NoAuthenticationClassesTests(TestCase):
def test_permission_message_with_no_authentication_classes(self):
class DummyPermission(permissions.BasePermission):
message = 'Dummy permission message'
def has_permission(self, request, view):
return False
request = factory.get('/')
view = MockView.as_view(authentication_classes=(), permission_classes=(DummyPermission,))
response = view(request)
assert (response.status_code == status.HTTP_403_FORBIDDEN)
assert (response.data == {'detail': 'Dummy permission message'}) |
_os(*metadata.platforms)
def main():
masquerade = '/tmp/bash'
masquerade2 = '/tmp/testmodify'
tmp_file = f'{Path.home()}/Library/LaunchAgents/com.apple.test.plist'
if (not Path(tmp_file).exists()):
Path(tmp_file).write_text('test')
common.copy_file('/bin/bash', masquerade)
common.create_macos_masquerade(masquerade2)
common.execute(['codesign', '--remove-signature', masquerade], timeout=5, kill=True)
common.log('Launching fake commands to modify com.apple.test.plist')
command = f"{masquerade} -c echo '1' >> {tmp_file}"
common.execute([masquerade2, 'childprocess', command], shell=True, timeout=5, kill=True)
common.remove_file(masquerade)
common.remove_file(masquerade2)
common.remove_file(tmp_file) |
()
('path-source', type=click.Path(exists=True, file_okay=True))
('--config', default=None, help='Path to the YAML configuration file (default: PATH_SOURCE/_config.yml)')
('--toc', default=None, help='Path to the Table of Contents YAML file (default: PATH_SOURCE/_toc.yml)')
_context
def sphinx(ctx, path_source, config, toc):
from jupyter_book.config import get_final_config
(path_config, full_path_source, config_overrides) = ctx.invoke(build, path_source=path_source, config=config, toc=toc, get_config_only=True)
(sphinx_config, _) = get_final_config(user_yaml=(Path(path_config) if path_config else None), sourcedir=Path(full_path_source), cli_config=config_overrides)
lines = ['', '# Auto-generated by `jupyter-book config`', '# If you wish to continue using _config.yml, make edits to that file and', '# re-generate this one.', '']
for key in sorted(sphinx_config):
lines.append(f'{key} = {sphinx_config[key]!r}')
content = ('\n'.join(lines).rstrip() + '\n')
out_folder = (Path(path_config).parent if path_config else Path(full_path_source))
out_folder.joinpath('conf.py').write_text(content, encoding='utf8')
click.secho(f'Wrote conf.py to {out_folder}', fg='green') |
def get_notes_by_future_due_date() -> Dict[(str, List[SiacNote])]:
conn = _get_connection()
res = conn.execute(f"select * from notes where substr(reminder, 21, 10) >= '{utility.date.date_x_days_ago_stamp(DUE_NOTES_BOUNDARY)}'").fetchall()
conn.close()
res = _to_notes(res)
d = dict()
today = datetime.today().date()
for n in res:
if (n.current_due_date().date() < today):
due = today.strftime('%Y-%m-%d')
else:
due = n.current_due_date().date().strftime('%Y-%m-%d')
if (not (due in d)):
d[due] = []
d[due].append(n)
return d |
('/directory', defaults={'path': ''})
('/directory/<path:path>')
def directory(path):
sort_property = get_cookie_browse_sorting(path, 'text')
(sort_fnc, sort_reverse) = browse_sortkey_reverse(sort_property)
try:
file = PlayableDirectory.from_urlpath(path)
if file.is_directory:
return stream_template('audio.player.html', file=file, sort_property=sort_property, sort_fnc=sort_fnc, sort_reverse=sort_reverse, playlist=True)
except OutsideDirectoryBase:
pass
return NotFound() |
def _warn_or_exception(value, cause=None):
if (warnings_action == 'ignore'):
pass
elif (warnings_action == 'error'):
if cause:
raise MetaDataException(value) from cause
else:
raise MetaDataException(value)
else:
logging.warning(value) |
class OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
_blueprint.route('/project/new', methods=['GET', 'POST'])
_required
def new_project():
backend_plugins = anitya_plugins.load_plugins(Session)
plg_names = [plugin.name for plugin in backend_plugins]
version_plugins = anitya_plugins.load_plugins(Session, family='versions')
version_plg_names = [plugin.name for plugin in version_plugins]
distros = models.Distro.all(Session)
distro_names = []
for distro in distros:
distro_names.append(distro.name)
form = anitya.forms.ProjectForm(backends=plg_names, version_schemes=version_plg_names, distros=distro_names)
if (flask.request.method == 'GET'):
form.name.data = flask.request.args.get('name', '')
form.homepage.data = flask.request.args.get('homepage', '')
form.backend.data = flask.request.args.get('backend', '')
form.version_scheme.data = flask.request.args.get('version_scheme', '')
form.distro.data = flask.request.args.get('distro', '')
form.package_name.data = flask.request.args.get('package_name', '')
return flask.render_template('project_new.html', context='Add', current='Add projects', form=form, plugins=backend_plugins)
elif form.validate_on_submit():
project = None
try:
project = utilities.create_project(Session, name=form.name.data.strip(), homepage=form.homepage.data.strip(), backend=form.backend.data.strip(), version_scheme=form.version_scheme.data.strip(), version_url=(form.version_url.data.strip() if form.version_url.data else None), version_prefix=(form.version_prefix.data.strip() if form.version_prefix.data else None), pre_release_filter=(form.pre_release_filter.data.strip() if form.pre_release_filter.data else None), version_filter=(form.version_filter.data.strip() if form.version_filter.data else None), regex=(form.regex.data.strip() if form.regex.data else None), user_id=flask.g.user.username, releases_only=form.releases_only.data)
Session.commit()
if (form.distro.data and form.package_name.data):
utilities.map_project(Session, project=project, package_name=form.package_name.data, distribution=form.distro.data, user_id=flask.g.user.username)
Session.commit()
flask.flash('Project created')
except exceptions.AnityaException as err:
flask.flash(str(err))
return (flask.render_template('project_new.html', context='Add', current='Add projects', form=form, plugins=backend_plugins), 409)
if (form.check_release.data is True):
try:
utilities.check_project_release(project, Session)
except exceptions.AnityaException:
flask.flash('Check failed')
return flask.redirect(flask.url_for('anitya_ui.project', project_id=project.id))
return (flask.render_template('project_new.html', context='Add', current='Add projects', form=form, plugins=backend_plugins), 400) |
class SimpleSwitchController(ControllerBase):
def __init__(self, req, link, data, **config):
super(SimpleSwitchController, self).__init__(req, link, data, **config)
self.simple_switch_app = data[simple_switch_instance_name]
('simpleswitch', url, methods=['GET'], requirements={'dpid': dpid_lib.DPID_PATTERN})
def list_mac_table(self, req, **kwargs):
simple_switch = self.simple_switch_app
dpid = kwargs['dpid']
if (dpid not in simple_switch.mac_to_port):
return Response(status=404)
mac_table = simple_switch.mac_to_port.get(dpid, {})
body = json.dumps(mac_table)
return Response(content_type='application/json', text=body)
('simpleswitch', url, methods=['PUT'], requirements={'dpid': dpid_lib.DPID_PATTERN})
def put_mac_table(self, req, **kwargs):
simple_switch = self.simple_switch_app
dpid = kwargs['dpid']
try:
new_entry = (req.json if req.body else {})
except ValueError:
raise Response(status=400)
if (dpid not in simple_switch.mac_to_port):
return Response(status=404)
try:
mac_table = simple_switch.set_mac_to_port(dpid, new_entry)
body = json.dumps(mac_table)
return Response(content_type='application/json', text=body)
except Exception as e:
return Response(status=500) |
class AutoscalingClient(NamespacedClient):
_rewrite_parameters()
def delete_autoscaling_policy(self, *, name: str, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, pretty: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]:
if (name in SKIP_IN_PATH):
raise ValueError("Empty value passed for parameter 'name'")
__path = f'/_autoscaling/policy/{_quote(name)}'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
__headers = {'accept': 'application/json'}
return self.perform_request('DELETE', __path, params=__query, headers=__headers)
_rewrite_parameters()
def get_autoscaling_capacity(self, *, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, pretty: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]:
__path = '/_autoscaling/capacity'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
__headers = {'accept': 'application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters()
def get_autoscaling_policy(self, *, name: str, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, pretty: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]:
if (name in SKIP_IN_PATH):
raise ValueError("Empty value passed for parameter 'name'")
__path = f'/_autoscaling/policy/{_quote(name)}'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
__headers = {'accept': 'application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters(body_name='policy')
def put_autoscaling_policy(self, *, name: str, policy: t.Optional[t.Mapping[(str, t.Any)]]=None, body: t.Optional[t.Mapping[(str, t.Any)]]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, pretty: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]:
if (name in SKIP_IN_PATH):
raise ValueError("Empty value passed for parameter 'name'")
if ((policy is None) and (body is None)):
raise ValueError("Empty value passed for parameters 'policy' and 'body', one of them should be set.")
elif ((policy is not None) and (body is not None)):
raise ValueError("Cannot set both 'policy' and 'body'")
__path = f'/_autoscaling/policy/{_quote(name)}'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
__body = (policy if (policy is not None) else body)
__headers = {'accept': 'application/json', 'content-type': 'application/json'}
return self.perform_request('PUT', __path, params=__query, headers=__headers, body=__body) |
class OptionSeriesScatter3dSonificationTracks(Options):
def activeWhen(self) -> 'OptionSeriesScatter3dSonificationTracksActivewhen':
return self._config_sub_data('activeWhen', OptionSeriesScatter3dSonificationTracksActivewhen)
def instrument(self):
return self._config_get('piano')
def instrument(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionSeriesScatter3dSonificationTracksMapping':
return self._config_sub_data('mapping', OptionSeriesScatter3dSonificationTracksMapping)
def midiName(self):
return self._config_get(None)
def midiName(self, text: str):
self._config(text, js_type=False)
def pointGrouping(self) -> 'OptionSeriesScatter3dSonificationTracksPointgrouping':
return self._config_sub_data('pointGrouping', OptionSeriesScatter3dSonificationTracksPointgrouping)
def roundToMusicalNotes(self):
return self._config_get(True)
def roundToMusicalNotes(self, flag: bool):
self._config(flag, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def type(self):
return self._config_get('instrument')
def type(self, text: str):
self._config(text, js_type=False) |
def doExecutePluginCommand(cmdline):
retvalue = False
if (len(Settings.Tasks) < 1):
return False
for s in range(len(Settings.Tasks)):
if ((type(Settings.Tasks[s]) != bool) and Settings.Tasks[s].enabled):
try:
retvalue = Settings.Tasks[s].plugin_write(cmdline)
except Exception as e:
retvalue = str(e)
if (retvalue != False):
return retvalue
return retvalue |
def execmd(cmd, params=None, timeout=None):
if (not re.search('^\\.?/', cmd)):
cmd = get_cmd_path(cmd)
if ((not cmd) or (not os.path.isfile(cmd))):
raise Exception('Command not found')
cmd = (([cmd] + params) if params else [cmd])
exe = CommandExecutor(cmd, stderr=True)
(out, err) = exe.execute(timeout)
return {'out': out, 'err': err, 'returncode': exe.returncode} |
def create_readers(ffrom, patch, to_size):
fpatch = BytesIO(patch)
(data_pointers_blocks_present, code_pointers_blocks_present, data_pointers_header, code_pointers_header, from_data_offset, from_data_begin, from_data_end, from_code_begin, from_code_end) = unpack_pointers_header(fpatch)
call0_header = Blocks.unpack_header(fpatch)
(data_pointers_blocks, code_pointers_blocks) = unpack_pointers_blocks(fpatch, data_pointers_blocks_present, code_pointers_blocks_present, data_pointers_header, code_pointers_header)
call0_blocks = Blocks.from_fpatch(call0_header, fpatch)
(call0, data_pointers, code_pointers) = disassemble(ffrom, from_data_offset, ((from_data_offset + from_data_end) - from_data_begin), from_data_begin, from_data_end, from_code_begin, from_code_end)
diff_reader = DiffReader(ffrom, to_size, call0, data_pointers, code_pointers, call0_blocks, data_pointers_blocks, code_pointers_blocks)
from_reader = FromReader(ffrom, call0, data_pointers, code_pointers, call0_blocks, data_pointers_blocks, code_pointers_blocks)
return (diff_reader, from_reader) |
class RoughViz(MixHtmlState.HtmlOverlayStates, Html.Html):
requirements = ('rough-viz',)
name = 'rough_viz'
tag = 'div'
_chart__type = 'Line'
_option_cls = OptChartRoughViz.RoughVizLine
builder_name = 'RvCharts'
def __init__(self, page, width, height, html_code, options, profile):
super(RoughViz, self).__init__(page, [], html_code=html_code, profile=profile, options=options, css_attrs={'width': width, 'height': height})
(self._d3, self._chart, self._datasets, self._data_attrs, self._attrs) = (None, None, [], {}, {})
self.options.element = ('#%s' % self.htmlCode)
def shared(self) -> OptChartRoughViz.OptionsChartSharedRoughViz:
return OptChartRoughViz.OptionsChartSharedRoughViz(self)
def dom(self) -> JsHtmlCharts.RoughViz:
if (self._dom is None):
self._dom = JsHtmlCharts.RoughViz(page=self.page, component=self)
return self._dom
def options(self) -> OptChartRoughViz.RoughVizLine:
return super().options
def datasets(self):
return self._datasets
def d3(self) -> JsD3.D3Select:
if (self._d3 is None):
self._d3 = JsD3.D3Select(page=self.page, component=self, selector=("d3.select('#%s')" % self.htmlCode), set_var=False)
return self._d3
def add_dataset(self, data: list, label: str='', colors: list=None, opacity: float=None, kind: float=None):
dataset = self.options.data.add(label, data)
return dataset
def _set_js_code(self, html_code: str, js_code: str):
self.js.varName = js_code
self.dom.varName = ('document.getElementById(%s)' % JsUtils.jsConvertData(html_code, None))
self.options._config(("'#' + %s" % JsUtils.jsConvertData(html_code, None)), name='element', js_type=True)
('roughviz')
def build(self, data: etypes.JS_DATA_TYPES=None, options: etypes.OPTION_TYPE=None, profile: etypes.PROFILE_TYPE=False, component_id: str=None, stop_state: bool=True, dataflows: List[dict]=None):
self.js_code = component_id
if (data is not None):
builder_fnc = JsUtils.jsWrap(('%s(%s, %s)' % (self.builder_name, JsUtils.dataFlows(data, dataflows, self.page), self.options.config_js(options).toStr())), profile).toStr()
state_expr = ''
if stop_state:
state_expr = (';%s' % self.hide_state(component_id))
options = (options or {})
options['data'] = data
return ('%(chartId)s.data = %(builder)s; %(state)s' % {'chartId': self.js_code, 'builder': builder_fnc, 'state': state_expr})
return ('%(chartId)s = new roughViz.%(chartType)s(%(config)s)' % {'chartId': self.js_code, 'chartType': self._chart__type, 'config': self.options.config_js(options)})
def colors(self, hex_values: List[str]):
...
def labels(self, labels: list, series_id: str=None):
...
def define(self, options: etypes.JS_DATA_TYPES=None, dataflows: List[dict]=None) -> str:
return ''
def __str__(self):
self.page.properties.js.add_builders(self.build())
return ('<%s %s></%s>' % (self.tag, self.get_attrs(css_class_names=self.style.get_classes()), self.tag)) |
def containerize_code(code_string):
code_string = code_string.replace('your_openai_api_key_here', OPENAI_KEY)
try:
output_buffer = io.StringIO()
with contextlib.redirect_stdout(output_buffer):
exec(code_string, globals())
except Exception as e:
(exc_type, exc_value, exc_traceback) = sys.exc_info()
tb = traceback.extract_tb(exc_traceback)
(filename, line, func, text) = tb[(- 1)]
error_msg = f'{exc_type.__name__}: {str(e)}'
return (False, f'Error: {error_msg}. Getting the error from function: {func} (line: {line})')
code_printout = output_buffer.getvalue()
return (True, code_printout) |
class MyGaussianBlur(ImageFilter.GaussianBlur):
name = 'GaussianBlur'
def __init__(self, size, radius=2, bounds=None):
super().__init__()
self.radius = radius
self.bounds = bounds
self.size = size
def filter(self, image):
print(1)
if self.bounds:
bounds1 = (0, 0, self.size[0], self.bounds[1])
clips = image.crop(bounds1).gaussian_blur(self.radius)
image.paste(clips, bounds1)
bounds2 = (0, self.bounds[1], self.bounds[0], self.bounds[3])
clips = image.crop(bounds2).gaussian_blur(self.radius)
image.paste(clips, bounds2)
bounds3 = (0, self.bounds[3], self.size[0], self.size[1])
clips = image.crop(bounds3).gaussian_blur(self.radius)
image.paste(clips, bounds3)
bounds4 = (self.bounds[2], self.bounds[1], self.size[0], self.bounds[3])
clips = image.crop(bounds4).gaussian_blur(self.radius)
image.paste(clips, bounds4)
return image
else:
return image.gaussian_blur(self.radius) |
class Switch(Html.Html):
requirements = ('bootstrap', 'jquery')
name = 'Switch Buttons'
builder_name = 'HtmlSwitch'
def __init__(self, page: primitives.PageModel, records: dict, color: str, width: types.SIZE_TYPE, height: types.SIZE_TYPE, html_code: str, options: dict, profile: types.PROFILE_TYPE, verbose: bool=False):
self.width = width[0]
super(Switch, self).__init__(page, records, html_code=html_code, options=options, profile=profile, css_attrs={'width': width, 'height': height, 'color': color}, verbose=verbose)
self.style.add_classes.radio.switch_checked()
self._clicks = {'on': [], 'off': [], 'profile': False}
is_on = options.get('is_on', False)
self.checkbox = page.ui.inputs.checkbox(is_on, width=(None, '%'))
self.checkbox.style.add_classes.radio.switch_checkbox()
self.checkbox.options.managed = False
if is_on:
self.checkbox.attr['checked'] = is_on
self.switch_label = page.ui.texts.label(page.entities.non_breaking_space)
self.switch_label.style.clear_all(no_default=True)
self.switch_label.style.css.display = 'inline-block'
self.switch_label.style.css.width = ('%spx' % htmlDefaults.INPUTS_TOGGLE_WIDTH)
self.switch_label.style.add_classes.radio.switch_label()
self.switch_label.options.managed = False
self.switch_label.style.css.line_height = int((self.page.body.style.globals.line_height / 2))
self.switch_text = page.ui.tags.p((self.val['on'] if is_on else self.val['off']))
self.switch_text.css({'display': 'inline-block', 'margin-left': '3px', 'font-weight': 'bold', 'margin-top': 0})
self.switch_text.tooltip(self.val.get('text', ''))
self.switch_text.style.css.font_size = (int((self.page.body.style.globals.line_height / 2)) + 2)
self.switch_text.options.managed = False
self.switch_text.style.css.line_height = (int((self.page.body.style.globals.line_height / 2)) + 2)
self.switch = self.dom.querySelector('label')
def on(self):
return self._vals['on']
def on(self, value: str):
self._vals['on'] = value
if (self.checkbox.attr.get('checked') is not None):
self.switch_text._vals = value
def off(self):
return self._vals['off']
def off(self, value: str):
self._vals['off'] = value
if (self.checkbox.attr.get('checked') is None):
self.switch_text._vals = value
def dom(self) -> JsHtmlSelect.JsHtmlSwitch:
if (self._dom is None):
self._dom = JsHtmlSelect.JsHtmlSwitch(self, page=self.page)
return self._dom
def js(self) -> JsComponents.Switch:
if (self._js is None):
self._js = JsComponents.Switch(self, page=self.page)
return self._js
def event_fnc(self, event: str):
return list(self._browser_data['mouse'][event][self.switch.toStr()]['content'])
def click(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=None, source_event: str=None, on_ready: bool=False):
if on_ready:
self.page.body.onReady([self.dom.events.trigger('click')])
return self.on('click', js_funcs, profile, self.switch.toStr())
def toggle(self, on_funcs: types.JS_FUNCS_TYPES=None, off_funcs: types.JS_FUNCS_TYPES=None, profile: types.PROFILE_TYPE=None, on_ready: bool=False):
self._clicks['profile'] = profile
if (on_funcs is not None):
if (not isinstance(on_funcs, list)):
on_funcs = [on_funcs]
self._clicks['on'].extend(on_funcs)
if (off_funcs is not None):
if (not isinstance(off_funcs, list)):
off_funcs = [off_funcs]
self._clicks['off'].extend(off_funcs)
def __str__(self):
self.page.properties.js.add_builders(('var %s_data = %s' % (self.htmlCode, self._vals)))
self.page.properties.js.add_builders(self.switch.onclick(("\n var input_check = this.parentNode.querySelector('input');\n if(input_check.checked){\n %(clickOn)s; this.parentNode.querySelector('p').innerHTML = %(htmlCode)s_data.off; \n input_check.checked = false}\n else {\n %(clickOff)s; input_check.checked = true; \n this.parentNode.querySelector('p').innerHTML = %(htmlCode)s_data.on}\n " % {'clickOn': JsUtils.jsConvertFncs(self._clicks['off'], toStr=True, profile=self._clicks['profile']), 'htmlCode': self.htmlCode, 'clickOff': JsUtils.jsConvertFncs(self._clicks['on'], toStr=True, profile=self._clicks['profile'])})).toStr())
return ('\n <div %s>%s %s %s</div>' % (self.get_attrs(css_class_names=self.style.get_classes()), self.checkbox.html(), self.switch_label.html(), self.switch_text.html())) |
class BallGroup(StrEnum):
REDS = auto()
COLORS = auto()
def balls(self) -> Tuple[(str, ...)]:
return _group_to_balls_dict[self]
def get(cls, balls: Tuple[(str, ...)]) -> BallGroup:
if (balls in _group_to_balls_dict):
return _balls_to_group_dict[balls]
if all(((ball in _group_to_balls_dict[cls.COLORS]) for ball in balls)):
return cls.COLORS
if all(((ball in _group_to_balls_dict[cls.REDS]) for ball in balls)):
return cls.REDS
raise ValueError(f'Cannot match {balls} to a BallGroup') |
class TestActive(util.TestCase):
def test_active(self):
markup = '\n <div>\n <p>Some text <span id="1" class="foo:bar:foobar"> in a paragraph</span>.\n <a id="2" class="bar" href=" <a id="3">Placeholder text.</a>\n </p>\n </div>\n '
self.assert_selector(markup, 'a:active', [], flags=util.HTML) |
class OptionSeriesScatter3dOnpoint(Options):
def connectorOptions(self) -> 'OptionSeriesScatter3dOnpointConnectoroptions':
return self._config_sub_data('connectorOptions', OptionSeriesScatter3dOnpointConnectoroptions)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def position(self) -> 'OptionSeriesScatter3dOnpointPosition':
return self._config_sub_data('position', OptionSeriesScatter3dOnpointPosition) |
def _extract_base_type(typed):
if (hasattr(typed, '__args__') and (not isinstance(typed, _SpockVariadicGenericAlias))):
name = _get_name_py_version(typed=typed)
bracket_val = f'{name}[{_extract_base_type(typed.__args__[0])}]'
return bracket_val
else:
bracket_value = _get_name_py_version(typed=typed)
return bracket_value |
def ExportModel(sess, model_dir, input, output, assets):
if os.path.isdir(model_dir):
shutil.rmtree(model_dir)
logging.info('Exporting trained model to %s', model_dir)
saver = tf.train.Saver()
model_exporter = exporter.Exporter(saver)
signature = exporter.regression_signature(input_tensor=input, output_tensor=output)
model_exporter.init(sess.graph.as_graph_def(), default_graph_signature=signature, assets_collection=assets)
model_exporter.export(model_dir, tf.constant(1), sess)
writer = tf.train.SummaryWriter(model_dir, sess.graph)
writer.flush()
f1 = open((model_dir + '/graph.pbtxt'), 'w+')
((print >> f1), str(tf.get_default_graph().as_graph_def())) |
def add_arguments_to_parser(parser: argparse.ArgumentParser):
parser.add_argument('config', help='path to a compatible config .yaml file', nargs='+')
parser.add_argument('--modes', nargs='+', default='all')
parser.add_argument('--verbose', action='store_true', help='Enable debug logging')
parser.add_argument('--use-cache', action='store_true', help='Only split changed segments in config')
parser.add_argument('--skip-version-check', action='store_true', help="Skips the disassembler's version check")
parser.add_argument('--stdout-only', help='Print all output to stdout', action='store_true')
parser.add_argument('--disassemble-all', help='Disasemble matched functions and migrated data', action='store_true') |
('delete')
('--username', '-u', help='The username of the user.')
('--force', '-f', default=False, is_flag=True, help='Removes the user without asking for confirmation.')
def delete_user(username, force):
if (not username):
username = click.prompt(click.style('Username', fg='magenta'), type=str, default=os.environ.get('USER', ''))
user = User.query.filter_by(username=username).first()
if (user is None):
raise FlaskBBCLIError('The user with username {} does not exist.'.format(username), fg='red')
if ((not force) and (not click.confirm(click.style('Are you sure?', fg='magenta')))):
sys.exit(0)
user.delete()
click.secho('[+] User {} deleted.'.format(user.username), fg='cyan') |
class TestJoinImageName(unittest.TestCase):
def test_simple(self):
image_name = join_image_name('icons', 'red_ball.jpg')
self.assertEqual(image_name, ':red_ball.jpg')
def test_extension(self):
image_name = join_image_name('icons', 'red_ball.png')
self.assertEqual(image_name, ':red_ball')
def test_double_extension(self):
image_name = join_image_name('icons', 'red_ball.foo.png')
self.assertEqual(image_name, ':red_ball.foo.png') |
def download_s3(bucket: str, obj: str, temp_path: str, s3_session: BaseClient, download_config: S3DownloadConfig) -> str:
try:
extra_options = {k: v for (k, v) in attr.asdict(download_config).items() if (v is not None)}
file_size = s3_session.head_object(Bucket=bucket, Key=obj, **extra_options)['ContentLength']
print(f'Attempting to download s3://{bucket}/{obj} (size: {size(file_size)})')
current_progress = 0
n_ticks = 50
def _s3_progress_bar(chunk):
nonlocal current_progress
current_progress += chunk
done = int((n_ticks * (current_progress / file_size)))
sys.stdout.write((f'''
[%s%s] {(int((current_progress / file_size)) * 100)}%%''' % (('=' * done), (' ' * (n_ticks - done)))))
sys.stdout.flush()
sys.stdout.write('\n\n')
s3_session.download_file(bucket, obj, temp_path, Callback=_s3_progress_bar, ExtraArgs=extra_options)
return temp_path
except IOError:
print(f'Failed to download file from S3 (bucket: {bucket}, object: {obj}) and write to {temp_path}') |
def extractRandnovelstlsamatchateaWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_llm_logs_default_null_handler(nlp: Language, capsys: pytest.CaptureFixture):
nlp('This is a test')
captured = capsys.readouterr()
assert (captured.out == '')
assert (captured.err == '')
stream_handler = logging.StreamHandler(sys.stdout)
spacy_llm.logger.addHandler(stream_handler)
spacy_llm.logger.setLevel(logging.DEBUG)
doc = nlp('This is a test')
captured = capsys.readouterr()
assert (f'Generated prompt for doc: {doc.text}' in captured.out)
assert ("Don't do anything" in captured.out)
assert (f'LLM response for doc: {doc.text}' in captured.out)
spacy_llm.logger.removeHandler(stream_handler)
doc = nlp('This is a test with no handler')
captured = capsys.readouterr()
assert (f'Generated prompt for doc: {doc.text}' not in captured.out)
assert ("Don't do anything" not in captured.out)
assert (f'LLM response for doc: {doc.text}' not in captured.out) |
class SimpleVizGroup(lg.Group):
INPUT = lg.Topic(HeatMapMessage)
HEATMAP: HeatMap
COLOR_MAP: ColorMap
WINDOW: Window
def setup(self) -> None:
self.HEATMAP.configure(HeatMapConfig(data='data', channel_map='channel_map', shape=(32, 32), external_timer=True))
self.COLOR_MAP.configure(ColorMapConfig(data='data', channel_map='channel_map', shape=(32, 32), external_timer=True))
self.WINDOW.HEATMAP = self.HEATMAP
self.WINDOW.COLOR_MAP = self.COLOR_MAP
def connections(self) -> lg.Connections:
return ((self.INPUT, self.HEATMAP.INPUT), (self.INPUT, self.COLOR_MAP.INPUT)) |
class OptionSeriesAreasplineSonificationDefaultinstrumentoptionsMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def check_configs(fips_dir, proj_dir):
log.colored(log.YELLOW, '=== configs:')
configs = config.load(fips_dir, proj_dir, '*')
num_errors = 0
for cfg in configs:
log.colored(log.BLUE, cfg['name'])
(valid, errors) = config.check_config_valid(fips_dir, proj_dir, cfg)
if valid:
log.colored(log.GREEN, ' ok')
else:
for error in errors:
log.error(' {}'.format(error), False)
num_errors += 1
if (num_errors > 0):
log.colored(log.RED, '\n{} build config problem(s) found.\n'.format(num_errors))
else:
log.colored(log.GREEN, '\n Build configs all ok.\n') |
class DispatchProxyServiceStub(object):
def __init__(self, channel):
self.GetDispatch = channel.unary_unary('/DispatchProxyService/GetDispatch', request_serializer=koapy_dot_common_dot_DispatchProxyService__pb2.GetDispatchRequest.SerializeToString, response_deserializer=koapy_dot_common_dot_DispatchProxyService__pb2.GetDispatchResponse.FromString)
self.GetAttr = channel.unary_unary('/DispatchProxyService/GetAttr', request_serializer=koapy_dot_common_dot_DispatchProxyService__pb2.GetAttrRequest.SerializeToString, response_deserializer=koapy_dot_common_dot_DispatchProxyService__pb2.GetAttrResponse.FromString)
self.SetAttr = channel.unary_unary('/DispatchProxyService/SetAttr', request_serializer=koapy_dot_common_dot_DispatchProxyService__pb2.SetAttrRequest.SerializeToString, response_deserializer=koapy_dot_common_dot_DispatchProxyService__pb2.SetAttrResponse.FromString)
self.CallMethod = channel.unary_unary('/DispatchProxyService/CallMethod', request_serializer=koapy_dot_common_dot_DispatchProxyService__pb2.CallMethodRequest.SerializeToString, response_deserializer=koapy_dot_common_dot_DispatchProxyService__pb2.CallMethodResponse.FromString)
self.ConnectEvent = channel.stream_stream('/DispatchProxyService/ConnectEvent', request_serializer=koapy_dot_common_dot_DispatchProxyService__pb2.ConnectEventRequest.SerializeToString, response_deserializer=koapy_dot_common_dot_DispatchProxyService__pb2.ConnectEventResponse.FromString) |
def test_task_set_ulimits_exclusively(task_definition):
assert (len(task_definition.containers[0]['ulimits']) == 1)
assert ('memlock' == task_definition.containers[0]['ulimits'][0]['name'])
task_definition.set_ulimits(((u'webserver', u'cpu', 80, 85),), exclusive=True)
assert (len(task_definition.containers[0]['ulimits']) == 1)
assert ('cpu' == task_definition.containers[0]['ulimits'][0]['name'])
assert ({'name': 'cpu', 'softLimit': 80, 'hardLimit': 85} in task_definition.containers[0]['ulimits']) |
def html_template_loader(file_path: str, values: Optional[dict]=None, new_var_format: Optional[str]=None, ref_expr: Optional[str]=None, directives: Optional[dict]=None) -> dict:
if ((values is not None) and (new_var_format is not None)):
raise ValueError('Both values and var_format cannot be defined')
html_path = Path(file_path)
if html_path.exists():
with open(html_path) as hf:
return html_formatter(hf.read(), values=values, new_var_format=new_var_format, ref_expr=ref_expr, directives=directives) |
class OptionSeriesPolygonSonificationContexttracksMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class FormatPTests(unittest.TestCase):
def test_escaping(self):
assert (util.formatp('[razamba \\[ mabe \\]]') == 'razamba [ mabe ]')
def test_numerical(self):
assert (util.formatp('[{t} - [schmuh {x}]]', t=1, x=2) == '1 - schmuh 2')
assert (util.formatp('[{t} - [schmuh {x}]]', t=1, x=0) == '1 - ')
assert (util.formatp('[{t} - [schmuh {x}]]', t=0, x=0) == '')
def test_nesting(self):
s = '[[{artist} - ]{album} - ]{title}'
assert (util.formatp(s, title='Black rose') == 'Black rose')
assert (util.formatp(s, artist='In Flames', title='Gyroscope') == 'Gyroscope')
assert (util.formatp(s, artist='SOAD', album='Toxicity', title='Science') == 'SOAD - Toxicity - Science')
assert (util.formatp(s, album='Toxicity', title='Science') == 'Toxicity - Science')
def test_bare(self):
assert (util.formatp('{foo} blar', foo='bar') == 'bar blar')
def test_presuffix(self):
assert (util.formatp('ALINA[{title} schnacke]KOMMAHER', title='') == 'ALINAKOMMAHER')
assert (util.formatp('grml[{title}]') == 'grml')
assert (util.formatp('[{t}]grml') == 'grml')
def test_side_by_side(self):
s = '{status} [{artist} / [{album} / ]]{title}[ {song_elapsed}/{song_length}]'
assert (util.formatp(s, status='', title='Only For The Weak', song_elapsed='1:41', song_length='4:55') == ' Only For The Weak 1:41/4:55')
assert (util.formatp(s, status='', album='Foo', title='Die, Die, Crucified', song_elapsed='2:52') == ' Die, Die, Crucified')
assert (util.formatp('[[{a}][{b}]]', b=1) == '1')
def test_complex_field(self):
class NS():
pass
obj = NS()
obj.attr = 'bar'
s = '[{a:.3f} m]{obj.attr}'
assert (util.formatp(s, a=3., obj=obj) == '3.141 mbar')
assert (util.formatp(s, a=0.0, obj=obj) == 'bar') |
()
('username')
def dump_user(username):
user = models.User.query.filter((models.User.username == username)).first()
if (not user):
print('There is no user named {0}.'.format(username))
return 1
dumper = users_logic.UserDataDumper(user)
print(dumper.dumps(pretty=True)) |
class BackupProcess(FledgeProcess):
_MODULE_NAME = 'fledge_backup_sqlite_process'
_BACKUP_FILE_NAME_PREFIX = 'fledge_backup_'
_MESSAGES_LIST = {'i000001': 'Execution started.', 'i000002': 'Execution completed.', 'e000000': 'general error', 'e000001': 'cannot initialize the logger - error details |{0}|', 'e000002': 'cannot retrieve the configuration from the manager, trying retrieving from file - error details |{0}|', 'e000003': 'cannot retrieve the configuration from file - error details |{0}|', 'e000004': '...', 'e000005': '...', 'e000006': '...', 'e000007': 'backup failed.', 'e000008': 'cannot execute the backup, either a backup or a restore is already running - pid |{0}|', 'e000009': '...', 'e000010': "directory used to store backups doesn't exist - dir |{0}|", 'e000011': "directory used to store semaphores for backup/restore synchronization doesn't exist - dir |{0}|", 'e000012': 'cannot create the configuration cache file, neither FLEDGE_DATA nor FLEDGE_ROOT are defined.', 'e000013': 'cannot create the configuration cache file, provided path is not a directory - dir |{0}|', 'e000014': "the identified path of backups doesn't exists, creation was tried - dir |{0}| - error details |{1}|", 'e000015': 'The command is not available neither using the unmanaged approach - command |{0}|', 'e000019': 'The command is not available using the managed approach - command |{0}|'}
_logger = None
def __init__(self):
super().__init__()
if (not self._logger):
self._logger = logger.setup(self._MODULE_NAME, destination=_LOGGER_DESTINATION, level=_LOGGER_LEVEL)
self._backup = Backup(self._storage_async)
self._backup_lib = lib.BackupRestoreLib(self._storage_async, self._logger)
self._job = lib.Job()
lib._logger = self._logger
lib._storage = self._storage_async
def _generate_file_name(self):
self._logger.debug('{func}'.format(func='_generate_file_name'))
execution_time = time.strftime('%Y_%m_%d_%H_%M_%S')
full_file_name = (((self._backup_lib.dir_backups + '/') + self._BACKUP_FILE_NAME_PREFIX) + execution_time)
ext = 'db'
_backup_file = '{file}.{ext}'.format(file=full_file_name, ext=ext)
return _backup_file
def check_for_execution_backup(self):
def init(self):
self._logger.debug('{func}'.format(func='init'))
self._backup_lib.evaluate_paths()
self._backup_lib.retrieve_configuration()
self.check_for_execution_backup()
pid = self._job.is_running()
if (pid == 0):
pid = os.getpid()
self._job.set_as_running(self._backup_lib.JOB_SEM_FILE_BACKUP, pid)
else:
_message = self._MESSAGES_LIST['e000008'].format(pid)
self._logger.warning('{0}'.format(_message))
raise exceptions.BackupOrRestoreAlreadyRunning
def execute_backup(self):
self._logger.debug('{func}'.format(func='execute_backup'))
self._purge_old_backups()
backup_file = self._generate_file_name()
(backup_file_tar_base, dummy) = os.path.splitext(backup_file)
backup_file_tar = (backup_file_tar_base + '.tar.gz')
self._logger.debug('execute_backup - backup_file :{}: backup_file_tar :{}: -'.format(backup_file, backup_file_tar))
self._backup_lib.sl_backup_status_create(backup_file_tar, lib.BackupType.FULL, lib.BackupStatus.RUNNING)
(status, exit_code) = self._run_backup_command(backup_file)
t = tarfile.open(backup_file_tar, 'w:gz')
t.add(backup_file, arcname=os.path.basename(backup_file))
backup_path = (self._backup_lib.dir_fledge_data + '/scripts')
if os.path.isdir(backup_path):
t.add(backup_path, arcname=os.path.basename(backup_path))
t.add(self._backup_lib.dir_fledge_data_etc, arcname=os.path.basename(self._backup_lib.dir_fledge_data_etc))
data = {'plugins': PluginDiscovery.get_plugins_installed(), 'services': get_service_installed()}
temp_software_file = '{}/software.json'.format(self._backup_lib.dir_backups)
with open(temp_software_file, 'w') as outfile:
json.dump(data, outfile, indent=4)
t.add(temp_software_file, arcname=os.path.basename(temp_software_file))
t.close()
os.remove(backup_file)
os.remove(temp_software_file)
backup_information = self._backup_lib.sl_get_backup_details_from_file_name(backup_file_tar)
self._backup_lib.sl_backup_status_update(backup_information['id'], status, exit_code)
audit = AuditLogger(self._storage_async)
loop = asyncio.get_event_loop()
if (status != lib.BackupStatus.COMPLETED):
self._logger.error(self._MESSAGES_LIST['e000007'])
loop.run_until_complete(audit.information('BKEXC', {'status': 'failed'}))
raise exceptions.BackupFailed
else:
loop.run_until_complete(audit.information('BKEXC', {'status': 'completed'}))
def _purge_old_backups(self):
backups_info = asyncio.get_event_loop().run_until_complete(self._backup.get_all_backups(self._backup_lib.MAX_NUMBER_OF_BACKUPS_TO_RETRIEVE, 0, None, lib.SortOrder.ASC))
backups_n = len(backups_info)
last_to_delete = (backups_n - (self._backup_lib.config['retention'] - 1))
if (last_to_delete > 0):
backups_to_delete = backups_info[:last_to_delete]
for row in backups_to_delete:
backup_id = row['id']
file_name = row['file_name']
self._logger.debug('{func} - id |{id}| - file_name |{file}|'.format(func='_purge_old_backups', id=backup_id, file=file_name))
asyncio.get_event_loop().run_until_complete(self._backup.delete_backup(backup_id))
def _run_backup_command(self, _backup_file):
self._logger.debug('{func} - file_name |{file}|'.format(func='_run_backup_command', file=_backup_file))
cmd = "{sqlite_cmd} {path}/{db} 'PRAGMA wal_checkpoint(PASSIVE);'".format(sqlite_cmd=self._backup_lib.SQLITE_SQLITE, path=self._backup_lib.dir_fledge_data, db=self._backup_lib.config['database-filename'])
(_exit_code, output) = lib.exec_wait_retry(cmd, output_capture=True, exit_code_ok=0, max_retry=self._backup_lib.config['max_retry'], timeout=self._backup_lib.config['timeout'])
cmd = "{sqlite_cmd} {path}/{db} '{backup_cmd} {file}'".format(sqlite_cmd=self._backup_lib.SQLITE_SQLITE, path=self._backup_lib.dir_fledge_data, db=self._backup_lib.config['database-filename'], backup_cmd=self._backup_lib.SQLITE_BACKUP, file=_backup_file)
(_exit_code, output) = lib.exec_wait_retry(cmd, output_capture=True, exit_code_ok=0, max_retry=self._backup_lib.config['max_retry'], timeout=self._backup_lib.config['timeout'])
if (_exit_code == 0):
_status = lib.BackupStatus.COMPLETED
else:
_status = lib.BackupStatus.FAILED
self._logger.debug('{func} - status |{status}| - exit_code |{exit_code}| - cmd |{cmd}| output |{output}| '.format(func='_run_backup_command', status=_status, exit_code=_exit_code, cmd=cmd, output=output))
return (_status, _exit_code)
def shutdown(self):
self._logger.debug('{func}'.format(func='shutdown'))
self._job.set_as_completed(self._backup_lib.JOB_SEM_FILE_BACKUP)
def run(self):
self.init()
try:
self.execute_backup()
except Exception as _ex:
_message = _MESSAGES_LIST['e000002'].format(_ex)
_logger.error(_message)
self.shutdown()
raise exceptions.RestoreFailed(_message)
else:
self.shutdown() |
def validate_fides_key_suitability(names: ResultProxy, table_name: str) -> None:
for row in names:
name: str = row['name'].strip(' ').replace(' ', '_')
try:
FidesKey.validate(name)
except FidesValidationError as exc:
raise Exception(f"Cannot auto-migrate, adjust existing {table_name} name: '{name}' to remove invalid characters: {exc}.") |
def test_providers_with_default_value(config):
config.set_default({'a': {'b': {'c': 1, 'd': 2}}})
a = config.a
ab = config.a.b
abc = config.a.b.c
abd = config.a.b.d
assert (a() == {'b': {'c': 1, 'd': 2}})
assert (ab() == {'c': 1, 'd': 2})
assert (abc() == 1)
assert (abd() == 2) |
def load_regulations():
es_client = create_es_client()
if es_client.indices.exists(index=AO_ALIAS):
eregs_api = env.get_credential('FEC_EREGS_API', '')
if (not eregs_api):
logger.error('Regulations could not be loaded, environment variable FEC_EREGS_API not set.')
return
logger.info('Uploading regulations...')
reg_versions = requests.get((eregs_api + 'regulation')).json()['versions']
logger.debug(('reg_versions =' + json.dumps(reg_versions, indent=3, cls=DateTimeEncoder)))
regulation_part_count = 0
document_count = 0
for reg in reg_versions:
url = ('%sregulation/%s/%s' % (eregs_api, reg['regulation'], reg['version']))
logger.debug(('url=' + url))
regulation = requests.get(url).json()
sections = get_sections(regulation)
each_part_document_count = 0
logger.debug(('Loading part %s' % reg['regulation']))
for section_label in sections:
doc_id = ('%s_%s' % (section_label[0], section_label[1]))
logger.debug(('(%d) doc_id= %s' % ((each_part_document_count + 1), doc_id)))
section_formatted = ('%s-%s' % (section_label[0], section_label[1]))
reg_url = '/regulations/{0}/{1}#{0}'.format(section_formatted, reg['version'])
no = ('%s.%s' % (section_label[0], section_label[1]))
name = sections[section_label]['title'].split(no)[1].strip()
doc = {'type': 'regulations', 'doc_id': doc_id, 'name': name, 'text': sections[section_label]['text'], 'url': reg_url, 'no': no, 'sort1': int(section_label[0]), 'sort2': int(section_label[1])}
each_part_document_count += 1
document_count += 1
es_client.index(AO_ALIAS, doc, id=doc['doc_id'])
logger.debug(('Part %s: %d document(s) are loaded.' % (reg['regulation'], each_part_document_count)))
regulation_part_count += 1
logger.info('%d Regulation parts with %d documents are loaded.', regulation_part_count, document_count)
else:
logger.error(" The index alias '{0}' is not found, cannot load regulations.".format(AO_ALIAS)) |
def extractWelcometoashfordWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
.integration
class TestLoadDefaultTaxonomy():
async def test_add_to_default_taxonomy(self, monkeypatch: pytest.MonkeyPatch, test_config: FidesConfig, data_category: DataCategory, async_session: AsyncSession) -> None:
result = _api.get(test_config.cli.server_url, 'data_category', data_category.fides_key, headers=CONFIG.user.auth_header)
assert (result.status_code == 404)
updated_default_taxonomy = DEFAULT_TAXONOMY.copy()
updated_default_taxonomy.data_category.append(data_category)
monkeypatch.setattr(seed, 'DEFAULT_TAXONOMY', updated_default_taxonomy)
(await seed.load_default_resources(async_session))
result = _api.get(test_config.cli.server_url, 'data_category', data_category.fides_key, headers=CONFIG.user.auth_header)
assert (result.status_code == 200)
async def test_does_not_override_user_changes(self, test_config: FidesConfig, async_session: AsyncSession) -> None:
default_category = DEFAULT_TAXONOMY.data_category[0].copy()
new_description = 'foo description'
default_category.description = new_description
result = _api.update(test_config.cli.server_url, 'data_category', json_resource=default_category.json(), headers=CONFIG.user.auth_header)
assert (result.status_code == 200)
(await seed.load_default_resources(async_session))
result = _api.get(test_config.cli.server_url, 'data_category', default_category.fides_key, headers=CONFIG.user.auth_header)
assert (result.json()['description'] == new_description)
async def test_does_not_remove_user_added_taxonomies(self, test_config: FidesConfig, data_category: DataCategory, async_session: AsyncSession) -> None:
_api.create(test_config.cli.server_url, 'data_category', json_resource=data_category.json(), headers=CONFIG.user.auth_header)
(await seed.load_default_resources(async_session))
result = _api.get(test_config.cli.server_url, 'data_category', data_category.fides_key, headers=CONFIG.user.auth_header)
assert (result.status_code == 200) |
def gen_fill_string(out):
out.write("\n\n/**\n * The increment to use on values inside a string\n */\n#define OF_TEST_STR_INCR 3\n\n/**\n * Fill in a buffer with incrementing values starting\n * at the given offset with the given value\n * buf The buffer to fill\n * value The value to use for data\n * len The number of bytes to fill\n */\n\nvoid\nof_test_str_fill(uint8_t *buf, int value, int len)\n{\n int i;\n\n for (i = 0; i < len; i++) {\n *buf = value;\n value += OF_TEST_STR_INCR;\n buf++;\n }\n}\n\n/**\n * Given a buffer, verify that it's filled as above\n * buf The buffer to check\n * value The value to use for data\n * len The number of bytes to fill\n * Boolean True on equality (success)\n */\n\nint\nof_test_str_check(uint8_t *buf, int value, int len)\n{\n int i;\n uint8_t val8;\n\n val8 = value;\n\n for (i = 0; i < len; i++) {\n if (*buf != val8) {\n return 0;\n }\n val8 += OF_TEST_STR_INCR;\n buf++;\n }\n\n return 1;\n}\n\n/**\n * Global that determines how octets should be populated\n * -1 means use value % MAX (below) to determine length\n * 0, 1, ... means used that fixed length\n *\n * Note: Was 16K, but that made objects too big. May add flexibility\n * to call populate with a max parameter for length\n */\nint octets_pop_style = -1;\n#define OCTETS_MAX_VALUE (64) /* 16K was too big */\n#define OCTETS_MULTIPLIER 6367 /* A prime */\n\nint\nof_octets_populate(of_octets_t *octets, int value)\n{\n if (octets_pop_style < 0) {\n octets->bytes = (value * OCTETS_MULTIPLIER) % OCTETS_MAX_VALUE;\n } else {\n octets->bytes = octets_pop_style;\n }\n\n if (octets->bytes != 0) {\n if ((octets->data = (uint8_t *)MALLOC(octets->bytes)) == NULL) {\n return 0;\n }\n of_test_str_fill(octets->data, value, octets->bytes);\n value += 1;\n }\n\n return value;\n}\n\nint\nof_octets_check(of_octets_t *octets, int value)\n{\n int len;\n\n if (octets_pop_style < 0) {\n len = (value * OCTETS_MULTIPLIER) % OCTETS_MAX_VALUE;\n TEST_ASSERT(octets->bytes == len);\n } else {\n TEST_ASSERT(octets->bytes == octets_pop_style);\n }\n\n if (octets->bytes != 0) {\n TEST_ASSERT(of_test_str_check(octets->data, value, octets->bytes)\n == 1);\n value += 1;\n }\n\n return value;\n}\n\nint\nof_match_populate(of_match_t *match, of_version_t version, int value)\n{\n MEMSET(match, 0, sizeof(*match));\n match->version = version;\n")
def populate_match_version(wire_version, keys):
out.write(('\n if (version == %d) {' % wire_version))
for key in keys:
entry = match.of_match_members[key]
out.write(('\n OF_MATCH_MASK_%(ku)s_EXACT_SET(match);\n VAR_%(u_type)s_INIT(match->fields.%(key)s, value);\n value += 1;\n' % dict(key=key, u_type=entry['m_type'].upper(), ku=key.upper())))
out.write('\n }\n\n')
for (wire_version, match_keys) in match.match_keys.items():
populate_match_version(wire_version, match_keys)
out.write('\n if (value % 2) {\n /* Sometimes set ipv4 addr masks to non-exact */\n match->masks.ipv4_src = 0xffff0000;\n match->masks.ipv4_dst = 0xfffff800;\n }\n\n /* Restrict values according to masks */\n of_memmask(&match->fields, &match->masks, sizeof(match->fields));\n return value;\n}\n\nint\nof_match_check(of_match_t *match, of_version_t version, int value)\n{\n of_match_t check;\n\n value = of_match_populate(&check, match->version, value);\n TEST_ASSERT(value != 0);\n TEST_ASSERT(MEMCMP(match, &check, sizeof(check)) == 0);\n\n return value;\n}\n') |
def test_encode_partial_keywords():
df = pd.DataFrame({'var_A': (((((((['A'] * 5) + (['B'] * 11)) + (['C'] * 4)) + (['D'] * 9)) + (['E'] * 2)) + (['F'] * 2)) + (['G'] * 7)), 'var_B': (((((((['A'] * 11) + (['B'] * 7)) + (['C'] * 4)) + (['D'] * 9)) + (['E'] * 2)) + (['F'] * 2)) + (['G'] * 5)), 'var_C': (((((((['A'] * 4) + (['B'] * 5)) + (['C'] * 11)) + (['D'] * 9)) + (['E'] * 2)) + (['F'] * 2)) + (['G'] * 7))})
encoder = StringSimilarityEncoder(top_categories=2, keywords={'var_A': ['XYZ']})
X = encoder.fit_transform(df)
assert (encoder.top_categories == 2)
transf = {'var_A_XYZ': 0, 'var_B_A': 11, 'var_B_D': 9, 'var_C_D': 9, 'var_C_C': 11}
assert (encoder.variables_ == ['var_A', 'var_B', 'var_C'])
assert (encoder.n_features_in_ == 3)
assert (encoder.encoder_dict_ == {'var_A': ['XYZ'], 'var_B': ['A', 'D'], 'var_C': ['C', 'D']})
for col in transf.keys():
assert (X[col].sum() == transf[col])
assert ('var_B' not in X.columns)
assert ('var_B_F' not in X.columns) |
('bodhi.server.models.tag_update_builds_task', mock.Mock())
('bodhi.server.models.work_on_bugs_task', mock.Mock())
('bodhi.server.models.fetch_test_cases_task', mock.Mock())
class TestUpdateVersionHash(BasePyTestCase):
def test_version_hash(self):
update = model.Update.query.first()
initial_expected_hash = '19504edccbed061be0ba94d973138'
assert (update.version_hash == initial_expected_hash)
initial_expected_builds = 'bodhi-2.0-1.fc17'
assert (len(update.builds) == 1)
builds = ' '.join(sorted([x.nvr for x in update.builds]))
assert (builds == initial_expected_builds)
initial_calculated_hash = hashlib.sha1(str(builds).encode('utf-8')).hexdigest()
assert (update.version_hash == initial_calculated_hash)
package = model.RpmPackage(name='python-rpdb')
self.db.add(package)
build = model.RpmBuild(nvr='python-rpdb-1.3-1.fc17', package=package)
self.db.add(build)
update = model.Update.query.first()
data = {'edited': update.alias, 'builds': [update.builds[0].nvr, build.nvr], 'bugs': []}
request = mock.MagicMock()
request.buildinfo = {build.nvr: {'nvr': build._get_n_v_r(), 'info': buildsys.get_session().getBuild(build.nvr)}}
request.db = self.db
request.identity.name = 'tester'
self.db.flush()
with mock_sends(Message, Message):
model.Update.edit(request, data)
updated_expected_hash = 'd89b54971bd761f8b5ee64e8c'
assert (initial_expected_hash != updated_expected_hash)
assert (update.version_hash == updated_expected_hash)
updated_expected_builds = 'bodhi-2.0-1.fc17 python-rpdb-1.3-1.fc17'
assert (len(update.builds) == 2)
builds = ' '.join(sorted([x.nvr for x in update.builds]))
assert (builds == updated_expected_builds)
updated_calculated_hash = hashlib.sha1(str(builds).encode('utf-8')).hexdigest()
assert (update.version_hash == updated_calculated_hash) |
.parametrize(('global_size', 'flat_local_size', 'expected_local_size'), vals_find_local_size, ids=[str(x[:2]) for x in vals_find_local_size])
def test_find_local_size(global_size, flat_local_size, expected_local_size):
local_size = vsize.find_local_size(global_size, flat_local_size)
assert (product(local_size) == flat_local_size)
assert (local_size == expected_local_size) |
def get_images(html, url):
js_url = re.search('src="([^"]+base64\\.js)"', html).group(1)
js_content = grabhtml(urljoin(url, js_url))
data = re.search('(var chapterTree=.+?)</script>', html, re.DOTALL).group(1)
match = re.search('window\\["\\\\x65\\\\x76\\\\x61\\\\x6c"\\](.+?)</script>', html, re.DOTALL)
data2 = (match.group(1) if match else '')
imgs = eval('\n\tconst document = {{}};\n\t{};\n\t{};\n\teval({});\n\tgetUrlpics().map(getrealurl);\n\t'.format(js_content, data, data2))
return imgs |
class OptionPlotoptionsTreegraphSonificationContexttracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class BigQueryClient(object):
def __init__(self, global_configs, **kwargs):
(max_calls, quota_period) = api_helpers.get_ratelimiter_config(global_configs, API_NAME)
cache_discovery = (global_configs['cache_discovery'] if ('cache_discovery' in global_configs) else False)
self.repository = BigQueryRepositoryClient(quota_max_calls=max_calls, quota_period=quota_period, use_rate_limiter=kwargs.get('use_rate_limiter', True), cache_discovery=cache_discovery, cache=global_configs.get('cache'))
def get_bigquery_projectids(self):
try:
results = self.repository.projects.list(fields='nextPageToken,projects/id')
flattened_results = api_helpers.flatten_list_results(results, 'projects')
LOGGER.debug('Request and page through bigquery projectids, flattened_results = %s', flattened_results)
except (errors.HttpError, HttpLib2Error) as e:
raise api_errors.ApiExecutionError('bigquery', e)
project_ids = [result.get('id') for result in flattened_results if ('id' in result)]
return project_ids
def get_datasets_for_projectid(self, project_id):
try:
results = self.repository.datasets.list(resource=project_id, all=True)
flattened_results = api_helpers.flatten_list_results(results, 'datasets')
LOGGER.debug('Getting bigquery datasets for a given project, project_id = %s, flattened_results = %s', project_id, flattened_results)
return flattened_results
except (errors.HttpError, HttpLib2Error) as e:
raise api_errors.ApiExecutionError(project_id, e)
def get_dataset_access(self, project_id, dataset_id):
try:
results = self.repository.datasets.get(resource=project_id, target=dataset_id, fields='access')
access = results.get('access', [])
LOGGER.debug('Geting the access portion of the dataset resource object, project_id = %s, dataset_id = %s, results = %s', project_id, dataset_id, access)
return access
except (errors.HttpError, HttpLib2Error) as e:
raise api_errors.ApiExecutionError(project_id, e)
def get_tables(self, project_id, dataset_id):
try:
results = self.repository.tables.list(projectId=project_id, datasetId=dataset_id)
flattened_results = api_helpers.flatten_list_results(results, 'tables')
LOGGER.debug('Getting tables for the project %s and dataset %s', project_id, dataset_id)
return flattened_results
except (errors.HttpError, HttpLib2Error) as e:
raise api_errors.ApiExecutionError(((project_id + ';') + dataset_id), e) |
class OptionPlotoptionsArcdiagramSonificationTracks(Options):
def activeWhen(self) -> 'OptionPlotoptionsArcdiagramSonificationTracksActivewhen':
return self._config_sub_data('activeWhen', OptionPlotoptionsArcdiagramSonificationTracksActivewhen)
def instrument(self):
return self._config_get('piano')
def instrument(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionPlotoptionsArcdiagramSonificationTracksMapping':
return self._config_sub_data('mapping', OptionPlotoptionsArcdiagramSonificationTracksMapping)
def midiName(self):
return self._config_get(None)
def midiName(self, text: str):
self._config(text, js_type=False)
def pointGrouping(self) -> 'OptionPlotoptionsArcdiagramSonificationTracksPointgrouping':
return self._config_sub_data('pointGrouping', OptionPlotoptionsArcdiagramSonificationTracksPointgrouping)
def roundToMusicalNotes(self):
return self._config_get(True)
def roundToMusicalNotes(self, flag: bool):
self._config(flag, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def type(self):
return self._config_get('instrument')
def type(self, text: str):
self._config(text, js_type=False) |
class zip_longest(zip):
__slots__ = ('fillvalue',)
__doc__ = getattr(_coconut.zip_longest, '__doc__', 'Version of zip that fills in missing values with fillvalue.')
def __new__(cls, *iterables, **kwargs):
self = zip.__new__(cls, *iterables, strict=False)
self.fillvalue = kwargs.pop('fillvalue', None)
if kwargs:
raise _coconut.TypeError(((cls.__name__ + '() got unexpected keyword arguments ') + _coconut.repr(kwargs)))
return self
def __getitem__(self, index):
self_len = None
if _coconut.isinstance(index, _coconut.slice):
if (self_len is None):
self_len = self.__len__()
if (self_len is _coconut.NotImplemented):
return self_len
new_ind = _coconut.slice(((index.start + self_len) if ((index.start is not None) and (index.start < 0)) else index.start), ((index.stop + self_len) if ((index.stop is not None) and (index.stop < 0)) else index.stop), index.step)
return self.__class__(*(_coconut_iter_getitem(it, new_ind) for it in self.iters))
if (index < 0):
if (self_len is None):
self_len = self.__len__()
if (self_len is _coconut.NotImplemented):
return self_len
index += self_len
result = []
got_non_default = False
for it in self.iters:
try:
result.append(_coconut_iter_getitem(it, index))
except _coconut.IndexError:
result.append(self.fillvalue)
else:
got_non_default = True
if (not got_non_default):
raise _coconut.IndexError('zip_longest index out of range')
return _coconut.tuple(result)
def __len__(self):
if (not _coconut.all((_coconut.isinstance(it, _coconut.abc.Sized) for it in self.iters))):
return _coconut.NotImplemented
return _coconut.max((_coconut.len(it) for it in self.iters))
def __repr__(self):
return ('zip_longest(%s, fillvalue=%s)' % (', '.join((_coconut.repr(it) for it in self.iters)), _coconut.repr(self.fillvalue)))
def __reduce__(self):
return (self.__class__, self.iters, {'fillvalue': self.fillvalue})
def __copy__(self):
self.iters = _coconut.tuple((reiterable(it) for it in self.iters))
return self.__class__(*self.iters, fillvalue=self.fillvalue)
def __iter__(self):
return _coconut.iter(_coconut.zip_longest(*self.iters, fillvalue=self.fillvalue)) |
def test_block_inference_changing_shape():
model = ChangingShapeModel()
queries = ([model.K()] + [model.component(j) for j in range(3)])
compositional = bm.CompositionalInference(nnc_compile=False)
with pytest.raises(RuntimeError):
compositional.infer(queries, {}, num_samples=10, num_chains=1) |
_exception
def _vm_backup_deleted_last_snapshot_names(data):
deleted_last_snapshot_names = data.get('deleted_last_snapshot_names', None)
if deleted_last_snapshot_names:
bkp_ids = [b[3:] for b in deleted_last_snapshot_names if b.startswith('is-')]
Backup.objects.filter(id__in=bkp_ids).update(last=False) |
def test_validate_discount_code_require_same_event_id(db):
(discount, tickets) = _create_discount_code(db)
discount.event = EventFactoryBasic()
db.session.commit()
with pytest.raises(UnprocessableEntityError, match='Invalid Discount Code'):
discount.validate(event_id='40', tickets=tickets)
with pytest.raises(UnprocessableEntityError, match='Invalid Discount Code'):
discount.validate(event_id=100, tickets=tickets)
assert (discount.validate(event_id=discount.event_id, tickets=tickets) == discount) |
class TestNXActionResubmitTable(unittest.TestCase):
type_ = {'buf': b'\xff\xff', 'val': ofproto.OFPAT_VENDOR}
len_ = {'buf': b'\x00\x10', 'val': ofproto.NX_ACTION_RESUBMIT_SIZE}
vendor = {'buf': b'\x00\x00# ', 'val': 8992}
subtype = {'buf': b'\x00\x0e', 'val': 14}
in_port = {'buf': b'\nL', 'val': 2636}
table_id = {'buf': b'R', 'val': 82}
zfill = (b'\x00' * 3)
buf = ((((((type_['buf'] + len_['buf']) + vendor['buf']) + subtype['buf']) + in_port['buf']) + table_id['buf']) + zfill)
c = NXActionResubmitTable(in_port['val'], table_id['val'])
def setUp(self):
pass
def tearDown(self):
pass
def test_init(self):
eq_(self.subtype['val'], self.c.subtype)
eq_(self.in_port['val'], self.c.in_port)
eq_(self.table_id['val'], self.c.table_id)
def test_parser(self):
res = OFPActionVendor.parser(self.buf, 0)
eq_(self.type_['val'], res.type)
eq_(self.len_['val'], res.len)
eq_(self.in_port['val'], res.in_port)
eq_(self.table_id['val'], res.table_id)
def test_serialize(self):
buf = bytearray()
self.c.serialize(buf, 0)
fmt = ofproto.NX_ACTION_RESUBMIT_PACK_STR
res = struct.unpack(fmt, six.binary_type(buf))
eq_(self.type_['val'], res[0])
eq_(self.len_['val'], res[1])
eq_(self.vendor['val'], res[2])
eq_(self.subtype['val'], res[3])
eq_(self.in_port['val'], res[4])
eq_(self.table_id['val'], res[5]) |
def _start_watchdog_for_metafile(app):
class Handler(FileSystemEventHandler):
def on_any_event(self, event):
if (event.event_type == 'modified'):
_set_assets_details(app)
logger.debug('Assets metadata reloaded')
observer = Observer()
observer.schedule(Handler(), config.asset_build_meta_file)
observer.start() |
class OptionPlotoptionsOrganizationSonificationTracksMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def construct_graph(number: int) -> (List[BasicBlock], ControlFlowGraph):
defined_instructions = [instructions.Phi(expressions.Variable('v', Integer.int32_t(), 0), [expressions.Variable('v', Integer.int32_t(), 1), expressions.Variable('v', Integer.int32_t(), 2)]), instructions.Assignment(expressions.Variable('v', Integer.int32_t(), 1), expressions.Variable('u', Integer.int32_t())), instructions.Assignment(expressions.Variable('v', Integer.int32_t(), 2), expressions.Constant(5)), instructions.Assignment(expressions.Variable('u', Integer.int32_t()), expressions.Constant(3))]
node = []
for (index, instruction) in enumerate(defined_instructions):
node.append(BasicBlock(index, instructions=[instruction]))
cfg = ControlFlowGraph()
task = DecompilerTask('test', cfg)
cfg.add_edges_from([UnconditionalEdge(node[3], node[2]), UnconditionalEdge(node[3], node[1]), UnconditionalEdge(node[2], node[0]), UnconditionalEdge(node[1], node[0])])
if (number == 1):
return (node, task)
if (number == 2):
node[1].replace_instruction(defined_instructions[1], [defined_instructions[3]])
node[3].replace_instruction(defined_instructions[3], [instructions.Assignment(expressions.Variable('v', Integer.int32_t(), 1), expressions.Variable('w', Integer.int32_t(), 0))])
return (node, task)
if (number == 3):
node[1].replace_instruction(defined_instructions[1], [instructions.Assignment(expressions.Variable('w', Integer.int32_t(), 1), expressions.Constant(3))])
return (node, task)
elif (number == 4):
node += [BasicBlock(4, instructions=[instructions.Assignment(expressions.Variable('u', Integer.int32_t(), 1), expressions.Constant(7))]), BasicBlock(5, instructions=[instructions.Assignment(expressions.Variable('u', Integer.int32_t(), 2), expressions.Constant(6))])]
cfg.remove_edge(cfg.get_edge(node[1], node[0]))
cfg.add_edges_from([UnconditionalEdge(node[1], node[4]), UnconditionalEdge(node[4], node[0]), UnconditionalEdge(node[1], node[5]), UnconditionalEdge(node[5], node[0])])
return (node, task)
if (number == 5):
node += [BasicBlock(4, instructions=[instructions.Assignment(expressions.Variable('u', Integer.int32_t(), 1), expressions.Constant(7))])]
node[1].replace_instruction(defined_instructions[1], [instructions.Assignment(expressions.Variable('w', Integer.int32_t(), 1), expressions.Constant(3))])
cfg.add_edges_from([UnconditionalEdge(node[3], node[4]), UnconditionalEdge(node[4], node[0])])
return (node, task)
if (number == 6):
node[1].replace_instruction(defined_instructions[1], [defined_instructions[3]])
node[3].replace_instruction(defined_instructions[3], [instructions.Assignment(expressions.Variable('v', Integer.int32_t(), 1), expressions.Variable('v', Integer.int32_t(), 0)), instructions.Assignment(expressions.Variable('v', Integer.int32_t(), 2), expressions.Variable('v', Integer.int32_t(), 1))])
node[2].replace_instruction(defined_instructions[2], [instructions.Assignment(expressions.Variable('u', Integer.int32_t()), expressions.Constant(4))])
return (node, task)
if (number <= 8):
node[1].replace_instruction(defined_instructions[1], [defined_instructions[3]])
node[3].replace_instruction(defined_instructions[3], [instructions.Assignment(expressions.Variable('x', Integer.int32_t()), expressions.Constant(2))])
node.append(BasicBlock(4, instructions=[defined_instructions[1]]))
node.append(BasicBlock(4, instructions=[defined_instructions[1]]))
cfg.remove_edge(cfg.get_edge(node[3], node[1]))
cfg.add_edges_from([UnconditionalEdge(node[3], node[4]), UnconditionalEdge(node[2], node[1])])
if (number == 7):
cfg.add_edge(UnconditionalEdge(node[4], node[1]))
return (node, task) |
def parse_args(clp):
options = clp['ap'].parse_args()
if (options.version or options.include_version):
print(FULL_NAME)
if options.version:
sys.exit(0)
if ((not options.brief) and (sys.stdout.encoding.lower() != 'utf-8')):
print('WARNING: It looks like your environment is not set up quite')
print((' right since python will encode to %s on stdout.' % sys.stdout.encoding))
print()
print('To fix set one of the following environment variables:')
print(' LC_ALL=en_GB.UTF-8 (or something similar)')
print(' PYTHONIOENCODING=UTF-8')
for item in options.files:
if (not (os.path.isdir(item) or os.path.isfile(item))):
clp['ap'].error(('%s is neither a file nor directory' % item))
try:
.encode(options.input_encoding)
except LookupError:
clp['ap'].error(("invalid encoding '%s'" % options.input_encoding))
options.language = None
if (options.octave is not None):
try:
(major, minor) = Base_Octave_Language.parse_version(options.octave)
options.language = Base_Octave_Language.get_version(major, minor)
except ValueError as verr:
clp['ap'].error(verr.args[0])
del options.octave
if (options.matlab is not None):
try:
(major, minor) = Base_MATLAB_Language.parse_version(options.matlab)
options.language = Base_MATLAB_Language.get_version(major, minor)
except ValueError as verr:
clp['ap'].error(verr.args[0])
del options.matlab
return options |
def generate_header(args):
header = 'Auto-generated on {} by {}'.format(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'), sys.argv[0])
args_str = pprint.pformat(vars(args))
arg_lines = args_str.split('\n')
lines = [(60 * '='), header, (60 * '-'), *arg_lines, (60 * '=')]
return '\n'.join((('# ' + line) for line in lines)) |
.django_db
def test_new_awards_failures(client, monkeypatch, add_award_recipients, elasticsearch_award_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_award_index)
test_payload = {'group': 'quarter', 'filters': {'time_period': [{'start_date': '2008-10-01', 'end_date': '2010-09-30'}], 'recipient_id': '63248e89-7fb7-2d51-4085-d9-P'}}
resp = client.post(get_new_awards_over_time_url(), content_type='application/json', data=json.dumps(test_payload))
assert (resp.status_code == 400)
test_payload['filters']['recipient_hash'] = 'enriwerniewrn'
resp = client.post(get_new_awards_over_time_url(), content_type='application/json', data=json.dumps(test_payload))
assert (resp.status_code == 400) |
def extractWwwNovelsluttyverseCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class Angular(node.Node):
def __init__(self, root_path: str, name: str=None, page=None, app_folder: str=node.APP_FOLDER, assets_folder: str=node.ASSET_FOLDER):
super(Angular, self).__init__(root_path, name, page)
(self._app_folder, self._app_asset, self.__clis) = (app_folder, assets_folder, None)
self.__app = None
def routing(self) -> str:
return APP_ROUTE_FILE
def views_path(self) -> Path:
return Path(self._app_path, self._app_name, PROJECT_SRC_ALIAS, self._app_folder)
def node_modules_path(self) -> Path:
return Path(self._app_path, self._app_name, 'node_modules')
def assets_path(self):
return Path(self._app_path, self._app_name, PROJECT_SRC_ALIAS, self._app_asset)
def create(self, name: str):
project_path = Path(self.root_path, name)
self._app_name = name
if (not project_path.exists()):
logging.info(('Creating %s to this location %s' % (name, self.root_path)))
subprocess.run(('ng new %s' % name), shell=True, cwd=str(self.root_path))
else:
logging.info(('Application %s already available here %s' % (name, self.root_path)))
def serve(self, name: str=None, host: str=None, port: int=None):
if (name is not None):
self._app_name = name
subprocess.run(('ng serve --open --host=%s --port=%s' % ((host or self.HOST), (port or self.PORT))), shell=True, cwd=str(self.app_path))
def router(self, name: str=None, **kwargs):
if (name is not None):
self._app_name = name
routing_file = Path(self.app_path, self.routing)
if (not routing_file.exists()):
subprocess.run('ng generate module app-routing --module app --flat', shell=True, cwd=str(self.app_path))
with open(Path(self.app_path, self.routing), 'w') as f:
f.write(templates.ANGULAR_ROUTER)
else:
logging.warning('Router already exist please update it manually with the above content')
def ng(self, app_name: str=None):
if (app_name is not None):
self._app_name = app_name
return NG(self, self.envs)
def cli(self, app_name: str=None):
if (app_name is not None):
self._app_name = app_name
return NG(self, self.envs)
def get_view(self, name: str='') -> Path:
return Path(self.views_path, name)
def app(self, page=None, target_folder: str=node.APP_FOLDER) -> App:
if (target_folder is not None):
self._app_folder = target_folder
if (page is not None):
self._page = page
if (self.__app is None):
self.__app = App(server=self)
return self.__app
def ng_modules(self, app_name: str=None, file_name: str=None) -> NgModules:
if (self._fmw_modules is None):
if ((self._route is not None) and (self._route.ng_modules is not None)):
self._fmw_modules = self._route.ng_modules
else:
self._fmw_modules = NgModules(self)
return self._fmw_modules
def route(self, component: str, alias: str) -> RouteModule:
if (self._route is None):
self._route = RouteModule(component, alias, self)
return self._route
def publish(self, alias: str, selector: str=None, page=None, install: bool=False, target_folder: str=node.APP_FOLDER):
if (target_folder is not None):
self._app_folder = target_folder
if (self.__app is None):
self.__app = self.app(page)
self.__app.export(selector=selector)
self.route(selector, alias)
packages = node.requirements(self.page, self.node_modules_path)
missing_package = [k for (k, v) in packages.items() if (not v)]
if (install and missing_package):
self.npm(missing_package)
def home_page(self, page=None, app_name=None, install: bool=False, target_folder: str=node.APP_FOLDER):
if (app_name is not None):
self._app_name = app_name
self.__app = self.app(page)
self.__app.export(selector='app-root')
packages = node.requirements(self.page, self.node_modules_path)
missing_package = [k for (k, v) in packages.items() if (not v)]
if (install and missing_package):
self.npm(missing_package)
def sync_components(self, page=None, install: bool=True, target_folder: str=node.APP_FOLDER):
if (target_folder is not None):
self._app_folder = target_folder
if (self.__app is None):
self.__app = self.app(page)
add_to_app(self.page._props['schema'].values(), self.app_path, folder=self.assets_path.name)
packages = node.requirements(self.page, self.node_modules_path)
missing_package = [k for (k, v) in packages.items() if (not v)]
if (install and missing_package):
self.npm(missing_package) |
_blueprint.route('/settings/tokens/new', methods=('POST',))
_required
def new_token():
form = anitya.forms.TokenForm()
if form.validate_on_submit():
token = models.ApiToken(user=flask.g.user, description=form.description.data)
Session.add(token)
Session.commit()
return flask.redirect(flask.url_for('anitya_ui.settings'))
else:
flask.abort(400) |
def draw_arrow(box, tip, orientation='right', arrow_type='', style=None, tooltip=None):
(x, y, dx, dy) = box
if (orientation == 'right'):
arrow = ((x, y), (((x + dx) - tip), y), ((x + dx), (y + (dy / 2))), (((x + dx) - tip), (y + dy)), (x, (y + dy)))
elif (orientation == 'left'):
arrow = ((x, (y + (dy / 2))), ((x + tip), y), ((x + dx), y), ((x + dx), (y + dy)), ((x + tip), (y + dy)))
return ['polygon', arrow, arrow_type, (style or {}), (tooltip or '')] |
class OptionSeriesTreemapDatalabelsFilter(Options):
def operator(self):
return self._config_get(None)
def operator(self, value: Any):
self._config(value, js_type=False)
def property(self):
return self._config_get(None)
def property(self, text: str):
self._config(text, js_type=False) |
def _get_new_items() -> List[ItemCode]:
new_items = frappe.db.sql(f'''
SELECT item.item_code
FROM tabItem item
LEFT JOIN `tabEcommerce Item` ei
ON ei.erpnext_item_code = item.item_code
WHERE ei.erpnext_item_code is NULL
AND item.{ITEM_SYNC_CHECKBOX} = 1
''')
return [item[0] for item in new_items] |
class OptionSeriesColumnrangeOnpointConnectoroptions(Options):
def dashstyle(self):
return self._config_get(None)
def dashstyle(self, text: str):
self._config(text, js_type=False)
def stroke(self):
return self._config_get(None)
def stroke(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(1)
def width(self, num: float):
self._config(num, js_type=False) |
def lambda_handler(event, context):
sample_words_response = {}
all_lists = vocab_list_service.get_vocab_lists()
for list in all_lists:
sample_words_response[list['list_id']] = []
all_words = list_word_service.get_words_in_list(list['list_id'])
for i in range(5):
sample_words_response[list['list_id']].append(select_random_word(all_words))
return {'statusCode': 200, 'headers': {'Access-Control-Allow-Methods': 'GET,OPTIONS', 'Access-Control-Allow-Origin': '*'}, 'body': json.dumps(sample_words_response)} |
def _children_from_attrs(cur, n, *args) -> Iterable[Node]:
for attr in args:
children = getattr(n, attr)
if isinstance(children, list):
for i in range(len(children)):
(yield cur._child_node(attr, i))
else:
(yield cur._child_node(attr, None)) |
def extractShallotnoodleWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('surrounded mob wants to quietly withdraw', 'surrounded mob wants to quietly withdraw', 'translated'), ('live broadcasting raising dragons in the interstellar', 'live broadcasting raising dragons in the interstellar', 'translated'), ('straight playboy sub', 'straight playboy sub', 'translated'), ('one and only', 'One and Only', 'translated'), ('blue black', 'Blue-Black', 'translated'), ('after being picked up by the top alpha', 'after being picked up by the top alpha', 'translated'), ('Starry Night', 'Starry Night', 'translated'), ('i use my straight friend as a sex toy', 'i use my straight friend as a sex toy', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class LoopJS():
integrate = undefined
integrate_tornado = undefined
integrate_pyqt4 = undefined
integrate_pyside = undefined
_integrate_qt = undefined
_thread_match = undefined
def __init__(self):
self._active_components = []
self.reset()
def _call_soon_func(self, func):
setTimeout(func, 0)
def _iter_callback(self):
self._scheduled_call_to_iter = False
return self.iter() |
class OptionPlotoptionsGaugeLabelStyle(Options):
def fontSize(self):
return self._config_get('0.8em')
def fontSize(self, num: float):
self._config(num, js_type=False)
def fontWeight(self):
return self._config_get('bold')
def fontWeight(self, text: str):
self._config(text, js_type=False) |
class Namespace():
__slots__ = ('_attributes', '_arg_shapes', 'default_geometry_name', '_fixed_lengths', '_fallback_length', '_functions')
_re_assign = re.compile('^([a-zA-Z--][a-zA-Z--0-9]*)(_[a-z]+)?$')
_default_functions = dict(opposite=function.opposite, sin=numpy.sin, cos=numpy.cos, tan=numpy.tan, sinh=numpy.sinh, cosh=numpy.cosh, tanh=numpy.tanh, arcsin=numpy.arcsin, arccos=numpy.arccos, arctan=numpy.arctan, arctan2=_arctan2_expr, arctanh=numpy.arctanh, exp=numpy.exp, abs=numpy.abs, ln=numpy.log, log=numpy.log, log2=numpy.log2, log10=numpy.log10, sqrt=numpy.sqrt, sign=numpy.sign, d=function.d, surfgrad=function.surfgrad, n=function.normal, sum=_sum_expr, norm2=_norm2_expr, J=_J_expr)
def __init__(self, *, default_geometry_name: str='x', fallback_length: Optional[int]=None, functions: Optional[Mapping[(str, Callable)]]=None, **kwargs: Any) -> None:
if (not isinstance(default_geometry_name, str)):
raise ValueError('default_geometry_name: Expected a str, got {!r}.'.format(default_geometry_name))
if (('_' in default_geometry_name) or (not self._re_assign.match(default_geometry_name))):
raise ValueError('default_geometry_name: Invalid variable name: {!r}.'.format(default_geometry_name))
fixed_lengths = {}
for (name, value) in kwargs.items():
if (not name.startswith('length_')):
raise TypeError('__init__() got an unexpected keyword argument {!r}'.format(name))
for index in name[7:]:
if (index in fixed_lengths):
raise ValueError('length of index {} specified more than once'.format(index))
fixed_lengths[index] = value
super().__setattr__('_attributes', {})
super().__setattr__('_arg_shapes', {})
super().__setattr__('_fixed_lengths', types.frozendict(({i: l for (indices, l) in fixed_lengths.items() for i in indices} if fixed_lengths else {})))
super().__setattr__('_fallback_length', fallback_length)
super().__setattr__('default_geometry_name', default_geometry_name)
super().__setattr__('_functions', dict(itertools.chain(self._default_functions.items(), (() if (functions is None) else functions.items()))))
super().__init__()
def __getstate__(self) -> Dict[(str, Any)]:
attrs = ('_arg_shapes', '_attributes', 'default_geometry_name', '_fixed_lengths', '_fallback_length', '_functions')
return {k: getattr(self, k) for k in attrs}
def __setstate__(self, d: Mapping[(str, Any)]) -> None:
for (k, v) in d.items():
super().__setattr__(k, v)
def arg_shapes(self) -> Mapping[(str, function.Shape)]:
return builtin_types.MappingProxyType(self._arg_shapes)
def default_geometry(self) -> str:
return getattr(self, self.default_geometry_name)
def __call__(*args, **subs: function.IntoArray) -> 'Namespace':
if (len(args) != 1):
raise TypeError('{} instance takes 1 positional argument but {} were given'.format(type(args[0]).__name__, len(args)))
(self,) = args
ns = Namespace(default_geometry_name=self.default_geometry_name)
for (k, v) in self._attributes.items():
setattr(ns, k, function.replace_arguments(v, subs))
return ns
def copy_(self, *, default_geometry_name: Optional[str]=None) -> 'Namespace':
if (default_geometry_name is None):
default_geometry_name = self.default_geometry_name
ns = Namespace(default_geometry_name=default_geometry_name, fallback_length=self._fallback_length, functions=self._functions, **{'length_{i}': l for (i, l) in self._fixed_lengths.items()})
for (k, v) in self._attributes.items():
setattr(ns, k, v)
return ns
def __getattr__(self, name: str) -> Any:
if name.startswith('eval_'):
return (lambda expr: _eval_ast(parse(expr, variables=self._attributes, indices=name[5:], arg_shapes=self._arg_shapes, default_geometry_name=self.default_geometry_name, fixed_lengths=self._fixed_lengths, fallback_length=self._fallback_length)[0], self._functions))
try:
return self._attributes[name]
except KeyError:
pass
raise AttributeError(name)
def __setattr__(self, name: str, value: Any) -> Any:
if (name in self.__slots__):
raise AttributeError('readonly')
m = self._re_assign.match(name)
if ((not m) or (m.group(2) and (len(set(m.group(2))) != len(m.group(2))))):
raise AttributeError('{!r} object has no attribute {!r}'.format(type(self), name))
else:
(name, indices) = m.groups()
indices = (indices[1:] if indices else None)
if isinstance(value, str):
(ast, arg_shapes) = parse(value, variables=self._attributes, indices=indices, arg_shapes=self._arg_shapes, default_geometry_name=self.default_geometry_name, fixed_lengths=self._fixed_lengths, fallback_length=self._fallback_length)
value = _eval_ast(ast, self._functions)
self._arg_shapes.update(arg_shapes)
else:
assert (not indices)
self._attributes[name] = function.Array.cast(value)
def __delattr__(self, name: str) -> None:
if (name in self.__slots__):
raise AttributeError('readonly')
elif (name in self._attributes):
del self._attributes[name]
else:
raise AttributeError('{!r} object has no attribute {!r}'.format(type(self), name))
def __rmatmul__(self, expr: str) -> function.Array:
...
def __rmatmul__(self, expr: Union[(Tuple[(str, ...)], List[str])]) -> Tuple[(function.Array, ...)]:
...
def __rmatmul__(self, expr: Union[(str, Tuple[(str, ...)], List[str])]) -> Union[(function.Array, Tuple[(function.Array, ...)])]:
if isinstance(expr, (tuple, list)):
return tuple(map(self.__rmatmul__, expr))
if (not isinstance(expr, str)):
return NotImplemented
try:
ast = parse(expr, variables=self._attributes, indices=None, arg_shapes=self._arg_shapes, default_geometry_name=self.default_geometry_name, fixed_lengths=self._fixed_lengths, fallback_length=self._fallback_length)[0]
except AmbiguousAlignmentError:
raise ValueError('`expression Namespace` cannot be used because the expression has more than one dimension. Use `Namespace.eval_...(expression)` instead')
return _eval_ast(ast, self._functions) |
('cuda.reshape.func_call')
('cuda.flatten.func_call')
def reshape_gen_function_call(func_attrs, indent=' '):
func_name = ('ait_' + func_attrs['name'])
input_names = []
if _is_intvar(func_attrs):
for (i, inp) in enumerate(func_attrs['inputs']):
if (i == 0):
continue
input_names.append(inp._attrs['int_var']._attrs['name'])
else:
input_names = [shape._attrs['name'] for shape in func_attrs['inputs'][0]._attrs['shape']]
output_names = [shape._attrs['name'] for shape in func_attrs['outputs'][0]._attrs['shape']]
return FUNC_CALL_TEMPLATE.render(func_name=func_name, input_names=input_names, output_names_except_last=output_names[:(- 1)], last_output=output_names[(- 1)], indent=indent) |
def test_substitute_loop_node_no_parent():
asgraph = AbstractSyntaxInterface()
code_node = asgraph._add_code_node([Assignment(var('a'), const(2))])
loop = asgraph.add_endless_loop_with_body(code_node)
replacement_loop = asgraph.factory.create_while_loop_node(condition=LogicCondition.initialize_symbol('a', asgraph.factory.logic_context))
asgraph.substitute_loop_node(loop, replacement_loop)
assert ((asgraph.edges == ((replacement_loop, code_node),)) and (len(asgraph.nodes) == 2) and (asgraph.get_roots == (replacement_loop,))) |
((detect_target().name() == 'rocm'), 'Not supported by ROCM.')
class SplitGetItemTestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(SplitGetItemTestCase, self).__init__(*args, **kwargs)
self._test_id = 0
def _test_split_getitem(self, batch_size=(1, 3), X_shape=(16, 32), split_sections=(4, 8, 2, 2), split_dim=1, item_idx=0, test_name='split_getitem', dtype='float16'):
assert (len(X_shape) == 2), f'expected X_shape to be 2 but got {X_shape}'
target = detect_target()
b_dim = shape_utils.gen_int_var_min_max(batch_size, name='input_batch')
X = Tensor(shape=[b_dim, *X_shape], dtype=dtype, name='input_0', is_input=True)
N = 16
if (split_dim == 1):
K = X_shape[1]
elif (split_dim == 2):
K = split_sections[item_idx]
else:
assert 0, f'expected split_dim to be either 1 or 2 but got {split_dim}'
W = Tensor(shape=[b_dim, N, K], dtype=dtype, name='input_1', is_input=True)
Y1 = ops.split()(X, split_sections, split_dim)
Y2 = ops.getitem()(Y1, item_idx)
Y = ops.bmm_rcr()(Y2, W)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
module = compile_model(Y, target, './tmp', f'{test_name}_{self._test_id}')
self._test_id += 1
for b in batch_size:
X_shape_pt = (b, *X_shape)
X_pt = get_random_torch_tensor(X_shape_pt, dtype=dtype)
W_pt = get_random_torch_tensor([b, N, K], dtype=dtype)
WT = torch.transpose(W_pt, 2, 1)
Y1_pt = torch.split(X_pt, split_sections, split_dim)
Y_pt = torch.bmm(Y1_pt[item_idx], WT)
y = torch.empty_like(Y_pt)
module.run_with_tensors({'input_0': X_pt, 'input_1': W_pt}, [y])
self.assertTrue(torch.allclose(Y_pt, y, atol=0.01, rtol=0.01))
(**filter_test_cases_by_params({TestEnv.CUDA_LESS_THAN_SM80: ['float16'], TestEnv.CUDA_SM80: ['float32']}))
def test_split_getitem(self, dtype):
self._test_split_getitem(test_name=f'split_getitem_{dtype}', dtype=dtype)
self._test_split_getitem(batch_size=[5], X_shape=(16, 32), split_sections=[8, 20, 4], split_dim=2, item_idx=1, test_name=f'split_getitem_{dtype}', dtype=dtype)
def _test_split_getitem_output(self, batch_size=(1, 3), X_shape=(16, 32), split_sections=(4, 8, 2, 2), split_dim=1, item_idx=0, test_name='split_getitem_output', dtype='float16'):
assert (len(X_shape) == 2), f'expected X_shape to be 2 but got {X_shape}'
target = detect_target()
b_dim = shape_utils.gen_int_var_min_max(batch_size, name='input_batch')
X = Tensor(shape=[b_dim, *X_shape], dtype=dtype, name='input_0', is_input=True)
Y1 = ops.split()(X, split_sections, split_dim)
Y = ops.getitem()(Y1, item_idx)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
module = compile_model(Y, target, './tmp', f'{test_name}_{self._test_id}')
self._test_id += 1
for b in batch_size:
X_shape_pt = (b, *X_shape)
X_pt = get_random_torch_tensor(X_shape_pt, dtype=dtype)
Y1_pt = torch.split(X_pt, split_sections, split_dim)
Y_pt = Y1_pt[item_idx]
y = torch.empty_like(Y_pt)
module.run_with_tensors([X_pt], [y])
self.assertTrue(torch.allclose(Y_pt, y, atol=0.01, rtol=0.01))
(**filter_test_cases_by_params({TestEnv.CUDA_LESS_THAN_SM80: ['float16'], TestEnv.CUDA_SM80: ['float32']}))
def test_split_getitem_output(self, dtype):
self._test_split_getitem_output(test_name='split_getitem_output', dtype=dtype)
self._test_split_getitem_output(batch_size=[10], X_shape=(16, 31), split_sections=[9, 19, 3], split_dim=2, item_idx=1, test_name='split_getitem_output', dtype=dtype)
def _test_split_multiple_getitems(self, batch_size=(1, 3), X_shape=(16, 32), split_sections=(4, 4, 6, 2), split_dim=1, test_name='split_multiple_getitems', dtype='float16'):
assert (len(X_shape) == 2), f'expected X_shape to be 2 but got {X_shape}'
assert (len(split_sections) >= 2), f'expected split_sections to have at least 2 values, but got {split_sections}'
target = detect_target()
b_dim = shape_utils.gen_int_var_min_max(batch_size, name='input_batch')
X = Tensor(shape=[b_dim, *X_shape], dtype=dtype, name='input_0', is_input=True)
X2_shape = list(X_shape)
item_idx0 = 0
item_idx1 = 1
assert (split_sections[item_idx0] == split_sections[item_idx1]), f'expected values of split_sections at {item_idx0} and {item_idx1} are equal, but got {{split_sections[item_idx0]}} and {{split_sections[item_idx1]}}'
X2_shape[(split_dim - 1)] = split_sections[item_idx0]
X2 = Tensor(shape=[b_dim, *X2_shape], dtype=dtype, name='input_2', is_input=True)
Y1 = ops.split()(X, split_sections, split_dim)
Y2 = ops.getitem()(Y1, item_idx0)
Y3 = ops.getitem()(Y1, item_idx1)
Y4 = ops.elementwise(FuncEnum.ADD)(Y2, X2)
Y5 = ops.elementwise(FuncEnum.ADD)(Y3, Y3)
Y = ops.elementwise(FuncEnum.ADD)(Y4, Y5)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
module = compile_model(Y, target, './tmp', f'{test_name}_{self._test_id}')
self._test_id += 1
for b in batch_size:
X_shape_pt = (b, *X_shape)
X_pt = get_random_torch_tensor(X_shape_pt, dtype=dtype)
X2_shape_pt = (b, *X2_shape)
X2_pt = get_random_torch_tensor(X2_shape_pt, dtype=dtype)
Y1_pt = torch.split(X_pt, split_sections, split_dim)
Y2_pt = Y1_pt[item_idx0]
Y3_pt = Y1_pt[item_idx1]
Y4_pt = (Y2_pt + X2_pt)
Y5_pt = (Y3_pt + Y3_pt)
Y_pt = (Y4_pt + Y5_pt)
y = torch.empty_like(Y_pt)
module.run_with_tensors({'input_0': X_pt, 'input_2': X2_pt}, [y])
self.assertTrue(torch.allclose(Y_pt, y, atol=0.01, rtol=0.01))
(**filter_test_cases_by_params({TestEnv.CUDA_LESS_THAN_SM80: ['float16'], TestEnv.CUDA_SM80: ['float32']}))
def test_split_mutiple_getitems(self, dtype):
self._test_split_multiple_getitems(test_name=f'split_multiple_getitems_{dtype}', dtype=dtype)
self._test_split_multiple_getitems(batch_size=[10], X_shape=(16, 31), split_sections=[9, 9, 13], split_dim=2, test_name=f'split_multiple_getitems_{dtype}', dtype=dtype) |
class TestDataBox(unittest.TestCase, UnittestTools):
def setUp(self):
xs = np.arange(0, 5)
ys = np.arange(0, 5)
index = GridDataSource(xdata=xs, ydata=ys, sort_order=('ascending', 'ascending'))
index_mapper = GridMapper(range=DataRange2D(index))
color_source = ImageData(data=np.ones(shape=(5, 5)), depth=1)
self.plot = CMapImagePlot(index=index, index_mapper=index_mapper, value=color_source)
self.databox = DataBox(component=self.plot, data_position=[0, 0])
self.plot.overlays.append(self.databox)
def test_update_data_position(self):
with self.assertTraitChanges(self.databox, 'data_position') as result:
self.databox.position = [1, 1]
starting_position = [0.0, 0.0]
expected = (self.databox, 'data_position', starting_position, starting_position)
self.assertSequenceEqual([expected], result.events) |
class Glyph(object):
def __init__(self, data=b''):
if (not data):
self.numberOfContours = 0
return
self.data = data
def compact(self, glyfTable, recalcBBoxes=True):
data = self.compile(glyfTable, recalcBBoxes)
self.__dict__.clear()
self.data = data
def expand(self, glyfTable):
if (not hasattr(self, 'data')):
return
if (not self.data):
del self.data
self.numberOfContours = 0
return
(dummy, data) = sstruct.unpack2(glyphHeaderFormat, self.data, self)
del self.data
if (self.numberOfContours == 0):
return
if self.isComposite():
self.decompileComponents(data, glyfTable)
elif self.isVarComposite():
self.decompileVarComponents(data, glyfTable)
else:
self.decompileCoordinates(data)
def compile(self, glyfTable, recalcBBoxes=True, *, boundsDone=None):
if hasattr(self, 'data'):
if recalcBBoxes:
self.expand(glyfTable)
else:
return self.data
if (self.numberOfContours == 0):
return b''
if recalcBBoxes:
self.recalcBounds(glyfTable, boundsDone=boundsDone)
data = sstruct.pack(glyphHeaderFormat, self)
if self.isComposite():
data = (data + self.compileComponents(glyfTable))
elif self.isVarComposite():
data = (data + self.compileVarComponents(glyfTable))
else:
data = (data + self.compileCoordinates())
return data
def toXML(self, writer, ttFont):
if self.isComposite():
for compo in self.components:
compo.toXML(writer, ttFont)
haveInstructions = hasattr(self, 'program')
elif self.isVarComposite():
for compo in self.components:
compo.toXML(writer, ttFont)
haveInstructions = False
else:
last = 0
for i in range(self.numberOfContours):
writer.begintag('contour')
writer.newline()
for j in range(last, (self.endPtsOfContours[i] + 1)):
attrs = [('x', self.coordinates[j][0]), ('y', self.coordinates[j][1]), ('on', (self.flags[j] & flagOnCurve))]
if (self.flags[j] & flagOverlapSimple):
attrs.append(('overlap', 1))
if (self.flags[j] & flagCubic):
attrs.append(('cubic', 1))
writer.simpletag('pt', attrs)
writer.newline()
last = (self.endPtsOfContours[i] + 1)
writer.endtag('contour')
writer.newline()
haveInstructions = (self.numberOfContours > 0)
if haveInstructions:
if self.program:
writer.begintag('instructions')
writer.newline()
self.program.toXML(writer, ttFont)
writer.endtag('instructions')
else:
writer.simpletag('instructions')
writer.newline()
def fromXML(self, name, attrs, content, ttFont):
if (name == 'contour'):
if (self.numberOfContours < 0):
raise ttLib.TTLibError("can't mix composites and contours in glyph")
self.numberOfContours = (self.numberOfContours + 1)
coordinates = GlyphCoordinates()
flags = bytearray()
for element in content:
if (not isinstance(element, tuple)):
continue
(name, attrs, content) = element
if (name != 'pt'):
continue
coordinates.append((safeEval(attrs['x']), safeEval(attrs['y'])))
flag = bool(safeEval(attrs['on']))
if (('overlap' in attrs) and bool(safeEval(attrs['overlap']))):
flag |= flagOverlapSimple
if (('cubic' in attrs) and bool(safeEval(attrs['cubic']))):
flag |= flagCubic
flags.append(flag)
if (not hasattr(self, 'coordinates')):
self.coordinates = coordinates
self.flags = flags
self.endPtsOfContours = [(len(coordinates) - 1)]
else:
self.coordinates.extend(coordinates)
self.flags.extend(flags)
self.endPtsOfContours.append((len(self.coordinates) - 1))
elif (name == 'component'):
if (self.numberOfContours > 0):
raise ttLib.TTLibError("can't mix composites and contours in glyph")
self.numberOfContours = (- 1)
if (not hasattr(self, 'components')):
self.components = []
component = GlyphComponent()
self.components.append(component)
component.fromXML(name, attrs, content, ttFont)
elif (name == 'varComponent'):
if (self.numberOfContours > 0):
raise ttLib.TTLibError("can't mix composites and contours in glyph")
self.numberOfContours = (- 2)
if (not hasattr(self, 'components')):
self.components = []
component = GlyphVarComponent()
self.components.append(component)
component.fromXML(name, attrs, content, ttFont)
elif (name == 'instructions'):
self.program = ttProgram.Program()
for element in content:
if (not isinstance(element, tuple)):
continue
(name, attrs, content) = element
self.program.fromXML(name, attrs, content, ttFont)
def getCompositeMaxpValues(self, glyfTable, maxComponentDepth=1):
assert (self.isComposite() or self.isVarComposite())
nContours = 0
nPoints = 0
initialMaxComponentDepth = maxComponentDepth
for compo in self.components:
baseGlyph = glyfTable[compo.glyphName]
if (baseGlyph.numberOfContours == 0):
continue
elif (baseGlyph.numberOfContours > 0):
(nP, nC) = baseGlyph.getMaxpValues()
else:
(nP, nC, componentDepth) = baseGlyph.getCompositeMaxpValues(glyfTable, (initialMaxComponentDepth + 1))
maxComponentDepth = max(maxComponentDepth, componentDepth)
nPoints = (nPoints + nP)
nContours = (nContours + nC)
return CompositeMaxpValues(nPoints, nContours, maxComponentDepth)
def getMaxpValues(self):
assert (self.numberOfContours > 0)
return (len(self.coordinates), len(self.endPtsOfContours))
def decompileComponents(self, data, glyfTable):
self.components = []
more = 1
haveInstructions = 0
while more:
component = GlyphComponent()
(more, haveInstr, data) = component.decompile(data, glyfTable)
haveInstructions = (haveInstructions | haveInstr)
self.components.append(component)
if haveInstructions:
(numInstructions,) = struct.unpack('>h', data[:2])
data = data[2:]
self.program = ttProgram.Program()
self.program.fromBytecode(data[:numInstructions])
data = data[numInstructions:]
if (len(data) >= 4):
log.warning('too much glyph data at the end of composite glyph: %d excess bytes', len(data))
def decompileVarComponents(self, data, glyfTable):
self.components = []
while (len(data) >= GlyphVarComponent.MIN_SIZE):
component = GlyphVarComponent()
data = component.decompile(data, glyfTable)
self.components.append(component)
def decompileCoordinates(self, data):
endPtsOfContours = array.array('H')
endPtsOfContours.frombytes(data[:(2 * self.numberOfContours)])
if (sys.byteorder != 'big'):
endPtsOfContours.byteswap()
self.endPtsOfContours = endPtsOfContours.tolist()
pos = (2 * self.numberOfContours)
(instructionLength,) = struct.unpack('>h', data[pos:(pos + 2)])
self.program = ttProgram.Program()
self.program.fromBytecode(data[(pos + 2):((pos + 2) + instructionLength)])
pos += (2 + instructionLength)
nCoordinates = (self.endPtsOfContours[(- 1)] + 1)
(flags, xCoordinates, yCoordinates) = self.decompileCoordinatesRaw(nCoordinates, data, pos)
self.coordinates = coordinates = GlyphCoordinates.zeros(nCoordinates)
xIndex = 0
yIndex = 0
for i in range(nCoordinates):
flag = flags[i]
if (flag & flagXShort):
if (flag & flagXsame):
x = xCoordinates[xIndex]
else:
x = (- xCoordinates[xIndex])
xIndex = (xIndex + 1)
elif (flag & flagXsame):
x = 0
else:
x = xCoordinates[xIndex]
xIndex = (xIndex + 1)
if (flag & flagYShort):
if (flag & flagYsame):
y = yCoordinates[yIndex]
else:
y = (- yCoordinates[yIndex])
yIndex = (yIndex + 1)
elif (flag & flagYsame):
y = 0
else:
y = yCoordinates[yIndex]
yIndex = (yIndex + 1)
coordinates[i] = (x, y)
assert (xIndex == len(xCoordinates))
assert (yIndex == len(yCoordinates))
coordinates.relativeToAbsolute()
for i in range(len(flags)):
flags[i] &= keepFlags
self.flags = flags
def decompileCoordinatesRaw(self, nCoordinates, data, pos=0):
flags = bytearray(nCoordinates)
xFormat = '>'
yFormat = '>'
j = 0
while True:
flag = data[pos]
pos += 1
repeat = 1
if (flag & flagRepeat):
repeat = (data[pos] + 1)
pos += 1
for k in range(repeat):
if (flag & flagXShort):
xFormat = (xFormat + 'B')
elif (not (flag & flagXsame)):
xFormat = (xFormat + 'h')
if (flag & flagYShort):
yFormat = (yFormat + 'B')
elif (not (flag & flagYsame)):
yFormat = (yFormat + 'h')
flags[j] = flag
j = (j + 1)
if (j >= nCoordinates):
break
assert (j == nCoordinates), 'bad glyph flags'
xDataLen = struct.calcsize(xFormat)
yDataLen = struct.calcsize(yFormat)
if (((len(data) - pos) - (xDataLen + yDataLen)) >= 4):
log.warning('too much glyph data: %d excess bytes', ((len(data) - pos) - (xDataLen + yDataLen)))
xCoordinates = struct.unpack(xFormat, data[pos:(pos + xDataLen)])
yCoordinates = struct.unpack(yFormat, data[(pos + xDataLen):((pos + xDataLen) + yDataLen)])
return (flags, xCoordinates, yCoordinates)
def compileComponents(self, glyfTable):
data = b''
lastcomponent = (len(self.components) - 1)
more = 1
haveInstructions = 0
for i in range(len(self.components)):
if (i == lastcomponent):
haveInstructions = hasattr(self, 'program')
more = 0
compo = self.components[i]
data = (data + compo.compile(more, haveInstructions, glyfTable))
if haveInstructions:
instructions = self.program.getBytecode()
data = ((data + struct.pack('>h', len(instructions))) + instructions)
return data
def compileVarComponents(self, glyfTable):
return b''.join((c.compile(glyfTable) for c in self.components))
def compileCoordinates(self):
assert (len(self.coordinates) == len(self.flags))
data = []
endPtsOfContours = array.array('H', self.endPtsOfContours)
if (sys.byteorder != 'big'):
endPtsOfContours.byteswap()
data.append(endPtsOfContours.tobytes())
instructions = self.program.getBytecode()
data.append(struct.pack('>h', len(instructions)))
data.append(instructions)
deltas = self.coordinates.copy()
deltas.toInt()
deltas.absoluteToRelative()
deltas = self.compileDeltasGreedy(self.flags, deltas)
data.extend(deltas)
return b''.join(data)
def compileDeltasGreedy(self, flags, deltas):
compressedFlags = bytearray()
compressedXs = bytearray()
compressedYs = bytearray()
lastflag = None
repeat = 0
for (flag, (x, y)) in zip(flags, deltas):
if (x == 0):
flag = (flag | flagXsame)
elif ((- 255) <= x <= 255):
flag = (flag | flagXShort)
if (x > 0):
flag = (flag | flagXsame)
else:
x = (- x)
compressedXs.append(x)
else:
compressedXs.extend(struct.pack('>h', x))
if (y == 0):
flag = (flag | flagYsame)
elif ((- 255) <= y <= 255):
flag = (flag | flagYShort)
if (y > 0):
flag = (flag | flagYsame)
else:
y = (- y)
compressedYs.append(y)
else:
compressedYs.extend(struct.pack('>h', y))
if ((flag == lastflag) and (repeat != 255)):
repeat = (repeat + 1)
if (repeat == 1):
compressedFlags.append(flag)
else:
compressedFlags[(- 2)] = (flag | flagRepeat)
compressedFlags[(- 1)] = repeat
else:
repeat = 0
compressedFlags.append(flag)
lastflag = flag
return (compressedFlags, compressedXs, compressedYs)
def compileDeltasOptimal(self, flags, deltas):
candidates = []
bestTuple = None
bestCost = 0
repeat = 0
for (flag, (x, y)) in zip(flags, deltas):
(flag, coordBytes) = flagBest(x, y, flag)
bestCost += (1 + coordBytes)
newCandidates = [(bestCost, bestTuple, flag, coordBytes), ((bestCost + 1), bestTuple, (flag | flagRepeat), coordBytes)]
for (lastCost, lastTuple, lastFlag, coordBytes) in candidates:
if (((lastCost + coordBytes) <= (bestCost + 1)) and (lastFlag & flagRepeat) and (lastFlag < 65280) and flagSupports(lastFlag, flag)):
if (((lastFlag & 255) == (flag | flagRepeat)) and (lastCost == (bestCost + 1))):
continue
newCandidates.append(((lastCost + coordBytes), lastTuple, (lastFlag + 256), coordBytes))
candidates = newCandidates
bestTuple = min(candidates, key=(lambda t: t[0]))
bestCost = bestTuple[0]
flags = []
while bestTuple:
(cost, bestTuple, flag, coordBytes) = bestTuple
flags.append(flag)
flags.reverse()
compressedFlags = bytearray()
compressedXs = bytearray()
compressedYs = bytearray()
coords = iter(deltas)
ff = []
for flag in flags:
(repeatCount, flag) = ((flag >> 8), (flag & 255))
compressedFlags.append(flag)
if (flag & flagRepeat):
assert (repeatCount > 0)
compressedFlags.append(repeatCount)
else:
assert (repeatCount == 0)
for i in range((1 + repeatCount)):
(x, y) = next(coords)
flagEncodeCoords(flag, x, y, compressedXs, compressedYs)
ff.append(flag)
try:
next(coords)
raise Exception('internal error')
except StopIteration:
pass
return (compressedFlags, compressedXs, compressedYs)
def recalcBounds(self, glyfTable, *, boundsDone=None):
if (self.isComposite() and self.tryRecalcBoundsComposite(glyfTable, boundsDone=boundsDone)):
return
try:
(coords, endPts, flags) = self.getCoordinates(glyfTable)
(self.xMin, self.yMin, self.xMax, self.yMax) = coords.calcIntBounds()
except NotImplementedError:
pass
def tryRecalcBoundsComposite(self, glyfTable, *, boundsDone=None):
for compo in self.components:
if (hasattr(compo, 'firstPt') or hasattr(compo, 'transform')):
return False
if ((not float(compo.x).is_integer()) or (not float(compo.y).is_integer())):
return False
bounds = None
for compo in self.components:
glyphName = compo.glyphName
g = glyfTable[glyphName]
if ((boundsDone is None) or (glyphName not in boundsDone)):
g.recalcBounds(glyfTable, boundsDone=boundsDone)
if (boundsDone is not None):
boundsDone.add(glyphName)
if (g.numberOfContours == 0):
continue
(x, y) = (compo.x, compo.y)
bounds = updateBounds(bounds, ((g.xMin + x), (g.yMin + y)))
bounds = updateBounds(bounds, ((g.xMax + x), (g.yMax + y)))
if (bounds is None):
bounds = (0, 0, 0, 0)
(self.xMin, self.yMin, self.xMax, self.yMax) = bounds
return True
def isComposite(self):
if hasattr(self, 'data'):
return ((struct.unpack('>h', self.data[:2])[0] == (- 1)) if self.data else False)
else:
return (self.numberOfContours == (- 1))
def isVarComposite(self):
if hasattr(self, 'data'):
return ((struct.unpack('>h', self.data[:2])[0] == (- 2)) if self.data else False)
else:
return (self.numberOfContours == (- 2))
def getCoordinates(self, glyfTable):
if (self.numberOfContours > 0):
return (self.coordinates, self.endPtsOfContours, self.flags)
elif self.isComposite():
allCoords = GlyphCoordinates()
allFlags = bytearray()
allEndPts = []
for compo in self.components:
g = glyfTable[compo.glyphName]
try:
(coordinates, endPts, flags) = g.getCoordinates(glyfTable)
except RecursionError:
raise ttLib.TTLibError(("glyph '%s' contains a recursive component reference" % compo.glyphName))
coordinates = GlyphCoordinates(coordinates)
if hasattr(compo, 'firstPt'):
if hasattr(compo, 'transform'):
coordinates.transform(compo.transform)
(x1, y1) = allCoords[compo.firstPt]
(x2, y2) = coordinates[compo.secondPt]
move = ((x1 - x2), (y1 - y2))
coordinates.translate(move)
else:
move = (compo.x, compo.y)
if (not hasattr(compo, 'transform')):
coordinates.translate(move)
else:
apple_way = (compo.flags & SCALED_COMPONENT_OFFSET)
ms_way = (compo.flags & UNSCALED_COMPONENT_OFFSET)
assert (not (apple_way and ms_way))
if (not (apple_way or ms_way)):
scale_component_offset = SCALE_COMPONENT_OFFSET_DEFAULT
else:
scale_component_offset = apple_way
if scale_component_offset:
coordinates.translate(move)
coordinates.transform(compo.transform)
else:
coordinates.transform(compo.transform)
coordinates.translate(move)
offset = len(allCoords)
allEndPts.extend(((e + offset) for e in endPts))
allCoords.extend(coordinates)
allFlags.extend(flags)
return (allCoords, allEndPts, allFlags)
elif self.isVarComposite():
raise NotImplementedError('use TTGlyphSet to draw VarComposite glyphs')
else:
return (GlyphCoordinates(), [], bytearray())
def getComponentNames(self, glyfTable):
if (hasattr(self, 'data') and self.isVarComposite()):
self.expand(glyfTable)
if (not hasattr(self, 'data')):
if (self.isComposite() or self.isVarComposite()):
return [c.glyphName for c in self.components]
else:
return []
if ((not self.data) or (struct.unpack('>h', self.data[:2])[0] >= 0)):
return []
data = self.data
i = 10
components = []
more = 1
while more:
(flags, glyphID) = struct.unpack('>HH', data[i:(i + 4)])
i += 4
flags = int(flags)
components.append(glyfTable.getGlyphName(int(glyphID)))
if (flags & ARG_1_AND_2_ARE_WORDS):
i += 4
else:
i += 2
if (flags & WE_HAVE_A_SCALE):
i += 2
elif (flags & WE_HAVE_AN_X_AND_Y_SCALE):
i += 4
elif (flags & WE_HAVE_A_TWO_BY_TWO):
i += 8
more = (flags & MORE_COMPONENTS)
return components
def trim(self, remove_hinting=False):
if (not hasattr(self, 'data')):
if remove_hinting:
if self.isComposite():
if hasattr(self, 'program'):
del self.program
elif self.isVarComposite():
pass
else:
self.program = ttProgram.Program()
self.program.fromBytecode([])
return
if (not self.data):
return
numContours = struct.unpack('>h', self.data[:2])[0]
data = bytearray(self.data)
i = 10
if (numContours >= 0):
i += (2 * numContours)
nCoordinates = (((data[(i - 2)] << 8) | data[(i - 1)]) + 1)
instructionLen = ((data[i] << 8) | data[(i + 1)])
if remove_hinting:
data[i] = data[(i + 1)] = 0
i += 2
if instructionLen:
data = (data[:i] + data[(i + instructionLen):])
instructionLen = 0
else:
i += (2 + instructionLen)
coordBytes = 0
j = 0
while True:
flag = data[i]
i = (i + 1)
repeat = 1
if (flag & flagRepeat):
repeat = (data[i] + 1)
i = (i + 1)
xBytes = yBytes = 0
if (flag & flagXShort):
xBytes = 1
elif (not (flag & flagXsame)):
xBytes = 2
if (flag & flagYShort):
yBytes = 1
elif (not (flag & flagYsame)):
yBytes = 2
coordBytes += ((xBytes + yBytes) * repeat)
j += repeat
if (j >= nCoordinates):
break
assert (j == nCoordinates), 'bad glyph flags'
i += coordBytes
data = data[:i]
elif self.isComposite():
more = 1
we_have_instructions = False
while more:
flags = ((data[i] << 8) | data[(i + 1)])
if remove_hinting:
flags &= (~ WE_HAVE_INSTRUCTIONS)
if (flags & WE_HAVE_INSTRUCTIONS):
we_have_instructions = True
data[(i + 0)] = (flags >> 8)
data[(i + 1)] = (flags & 255)
i += 4
flags = int(flags)
if (flags & ARG_1_AND_2_ARE_WORDS):
i += 4
else:
i += 2
if (flags & WE_HAVE_A_SCALE):
i += 2
elif (flags & WE_HAVE_AN_X_AND_Y_SCALE):
i += 4
elif (flags & WE_HAVE_A_TWO_BY_TWO):
i += 8
more = (flags & MORE_COMPONENTS)
if we_have_instructions:
instructionLen = ((data[i] << 8) | data[(i + 1)])
i += (2 + instructionLen)
data = data[:i]
elif self.isVarComposite():
i = 0
MIN_SIZE = GlyphVarComponent.MIN_SIZE
while (len(data[i:(i + MIN_SIZE)]) >= MIN_SIZE):
size = GlyphVarComponent.getSize(data[i:(i + MIN_SIZE)])
i += size
data = data[:i]
self.data = data
def removeHinting(self):
self.trim(remove_hinting=True)
def draw(self, pen, glyfTable, offset=0):
if self.isComposite():
for component in self.components:
(glyphName, transform) = component.getComponentInfo()
pen.addComponent(glyphName, transform)
return
(coordinates, endPts, flags) = self.getCoordinates(glyfTable)
if offset:
coordinates = coordinates.copy()
coordinates.translate((offset, 0))
start = 0
maybeInt = (lambda v: (int(v) if (v == int(v)) else v))
for end in endPts:
end = (end + 1)
contour = coordinates[start:end]
cFlags = [(flagOnCurve & f) for f in flags[start:end]]
cuFlags = [(flagCubic & f) for f in flags[start:end]]
start = end
if (1 not in cFlags):
assert (all(cuFlags) or (not any(cuFlags)))
cubic = all(cuFlags)
if cubic:
count = len(contour)
assert ((count % 2) == 0), 'Odd number of cubic off-curves undefined'
l = contour[(- 1)]
f = contour[0]
p0 = (maybeInt(((l[0] + f[0]) * 0.5)), maybeInt(((l[1] + f[1]) * 0.5)))
pen.moveTo(p0)
for i in range(0, count, 2):
p1 = contour[i]
p2 = contour[(i + 1)]
p4 = contour[((i + 2) if ((i + 2) < count) else 0)]
p3 = (maybeInt(((p2[0] + p4[0]) * 0.5)), maybeInt(((p2[1] + p4[1]) * 0.5)))
pen.curveTo(p1, p2, p3)
else:
contour.append(None)
pen.qCurveTo(*contour)
else:
firstOnCurve = (cFlags.index(1) + 1)
contour = (contour[firstOnCurve:] + contour[:firstOnCurve])
cFlags = (cFlags[firstOnCurve:] + cFlags[:firstOnCurve])
cuFlags = (cuFlags[firstOnCurve:] + cuFlags[:firstOnCurve])
pen.moveTo(contour[(- 1)])
while contour:
nextOnCurve = (cFlags.index(1) + 1)
if (nextOnCurve == 1):
if (len(contour) > 1):
pen.lineTo(contour[0])
else:
cubicFlags = [f for f in cuFlags[:(nextOnCurve - 1)]]
assert (all(cubicFlags) or (not any(cubicFlags)))
cubic = any(cubicFlags)
if cubic:
assert all(cubicFlags), 'Mixed cubic and quadratic segment undefined'
count = nextOnCurve
assert (count >= 3), 'At least two cubic off-curve points required'
assert (((count - 1) % 2) == 0), 'Odd number of cubic off-curves undefined'
for i in range(0, (count - 3), 2):
p1 = contour[i]
p2 = contour[(i + 1)]
p4 = contour[(i + 2)]
p3 = (maybeInt(((p2[0] + p4[0]) * 0.5)), maybeInt(((p2[1] + p4[1]) * 0.5)))
lastOnCurve = p3
pen.curveTo(p1, p2, p3)
pen.curveTo(*contour[(count - 3):count])
else:
pen.qCurveTo(*contour[:nextOnCurve])
contour = contour[nextOnCurve:]
cFlags = cFlags[nextOnCurve:]
cuFlags = cuFlags[nextOnCurve:]
pen.closePath()
def drawPoints(self, pen, glyfTable, offset=0):
if self.isComposite():
for component in self.components:
(glyphName, transform) = component.getComponentInfo()
pen.addComponent(glyphName, transform)
return
(coordinates, endPts, flags) = self.getCoordinates(glyfTable)
if offset:
coordinates = coordinates.copy()
coordinates.translate((offset, 0))
start = 0
for end in endPts:
end = (end + 1)
contour = coordinates[start:end]
cFlags = flags[start:end]
start = end
pen.beginPath()
if (cFlags[(- 1)] & flagOnCurve):
segmentType = 'line'
elif (cFlags[(- 1)] & flagCubic):
segmentType = 'curve'
else:
segmentType = 'qcurve'
for (i, pt) in enumerate(contour):
if (cFlags[i] & flagOnCurve):
pen.addPoint(pt, segmentType=segmentType)
segmentType = 'line'
else:
pen.addPoint(pt)
segmentType = ('curve' if (cFlags[i] & flagCubic) else 'qcurve')
pen.endPath()
def __eq__(self, other):
if (type(self) != type(other)):
return NotImplemented
return (self.__dict__ == other.__dict__)
def __ne__(self, other):
result = self.__eq__(other)
return (result if (result is NotImplemented) else (not result)) |
class OptionPlotoptionsWindbarbSonificationTracksMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def get_identity(identity):
if isinstance(identity, AnonymousUser):
return (identity, None)
if isinstance(identity, get_user_model()):
return (identity, None)
elif isinstance(identity, Group):
return (None, identity)
else:
raise NotUserNorGroup('User/AnonymousUser or Group instance is required (got {})'.format(identity)) |
class ClassificationInstance(Base):
__tablename__ = 'cls_classification_instance'
status = Column(Text)
organization_key = Column(Text)
dataset_key = Column(Text)
dataset_name = Column(Text)
target = Column(Text)
type = Column(Text)
created_at = Column(DateTime(timezone=True), server_default=func.now())
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now()) |
class Constraint(Validator, PhantomType):
def __init__(self, for_type, predicates):
self.type = for_type
self.predicates = predicates
def validate_instance(self, inst, sampler=None):
self.type.validate_instance(inst, sampler)
for p in self.predicates:
if (not p(inst)):
raise TypeMismatchError(inst, self)
def __ge__(self, other):
return False
def __le__(self, other):
return (self.type <= other) |
class FuncCall(object):
def pcall(obj, method_name, *args):
tracer.debug('pcall: trying to call [%s]', method_name)
if (not hasattr(obj, method_name)):
tracer.debug('pcall: method [%s] does not exist.', method_name)
return
tracer.debug('pcall: calling method [%s]', method_name)
return getattr(obj, method_name)(*args) |
class Migration(migrations.Migration):
dependencies = [('financial_activities', '0003_financialaccountsbyprogramactivityobjectclass_disaster_emergency_fund')]
operations = [migrations.RemoveField(model_name='financialaccountsbyprogramactivityobjectclass', name='final_of_fy'), migrations.RunSQL(sql=[f"{Path('usaspending_api/download/sql/vw_financial_accounts_by_program_activity_object_class_download.sql').read_text()}"])] |
_nx_subtype(ofproto.NXT_PACKET_IN)
class NXTPacketIn(NiciraHeader):
def __init__(self, datapath, buffer_id, total_len, reason, table_id, cookie, match_len, match, frame):
super(NXTPacketIn, self).__init__(datapath, ofproto.NXT_PACKET_IN)
self.buffer_id = buffer_id
self.total_len = total_len
self.reason = reason
self.table_id = table_id
self.cookie = cookie
self.match_len = match_len
self.match = match
self.frame = frame
def parser(cls, datapath, buf, offset):
(buffer_id, total_len, reason, table_id, cookie, match_len) = struct.unpack_from(ofproto.NX_PACKET_IN_PACK_STR, buf, offset)
offset += (ofproto.NX_PACKET_IN_SIZE - ofproto.NICIRA_HEADER_SIZE)
match = nx_match.NXMatch.parser(buf, offset, match_len)
offset += (((match_len + 7) // 8) * 8)
frame = buf[offset:]
if (total_len < len(frame)):
frame = frame[:total_len]
return cls(datapath, buffer_id, total_len, reason, table_id, cookie, match_len, match, frame) |
def kinetic3d_41(ax, da, A, bx, db, B):
result = numpy.zeros((15, 3), dtype=float)
x0 = (2.0 * ax)
x1 = (((2.0 * bx) + x0) ** (- 1.0))
x2 = (- ax)
x3 = ((ax + bx) ** (- 1.0))
x4 = ((- x3) * ((ax * A[0]) + (bx * B[0])))
x5 = ((- x4) - A[0])
x6 = (x5 ** 2)
x7 = (2.0 * (ax ** 2))
x8 = ((- x2) - (x7 * (x1 + x6)))
x9 = (bx * x3)
x10 = (ax * x9)
x11 = numpy.exp(((- x10) * ((A[0] - B[0]) ** 2)))
x12 = (1. * numpy.sqrt(x3))
x13 = (x11 * x12)
x14 = (x13 * x5)
x15 = (x14 * x8)
x16 = (x0 * x9)
x17 = ((x14 * x16) + x15)
x18 = ((- x4) - B[0])
x19 = (x13 * x18)
x20 = (x19 * (x16 + x8))
x21 = (x1 * (x17 + x20))
x22 = (x20 * x5)
x23 = (x1 * x13)
x24 = (x14 * x18)
x25 = (x23 + x24)
x26 = (x23 * x8)
x27 = (((x16 * x25) + x22) + x26)
x28 = (x27 * x5)
x29 = (x1 * (x14 + x19))
x30 = (x25 * x5)
x31 = (x29 + x30)
x32 = (x9 * ((x0 * x31) - x19))
x33 = (4.0 * x10)
x34 = (x17 * x5)
x35 = (x13 * x6)
x36 = (x23 + x35)
x37 = (x9 * ((x0 * x36) - x13))
x38 = (x34 + x37)
x39 = (x26 + x38)
x40 = (2.0 * x23)
x41 = (x5 * (x36 + x40))
x42 = (((x1 * ((x14 * x33) + (2.0 * x15))) + (x39 * x5)) + (x9 * ((x0 * x41) - (2.0 * x14))))
x43 = (3.0 * x26)
x44 = ((x21 + x28) + x32)
x45 = (3.0 * x23)
x46 = (2.0 * x24)
x47 = ((x1 * ((x35 + x45) + x46)) + (x31 * x5))
x48 = (((x1 * ((((2.0 * x22) + (x25 * x33)) + x38) + x43)) + (x44 * x5)) - (x9 * (((((- 2.0) * ax) * x47) + x40) + x46)))
x49 = ((3.0 * x29) + (3.0 * x30))
x50 = ((x1 * (x41 + x49)) + (x47 * x5))
x51 = numpy.exp(((- x10) * ((A[1] - B[1]) ** 2)))
x52 = numpy.exp(((- x10) * ((A[2] - B[2]) ** 2)))
x53 = ((3. * x3) * x52)
x54 = (x51 * x53)
x55 = ((- x3) * ((ax * A[1]) + (bx * B[1])))
x56 = ((- x55) - A[1])
x57 = (x56 ** 2)
x58 = ((- x2) - (x7 * (x1 + x57)))
x59 = (x50 * x54)
x60 = ((- x3) * ((ax * A[2]) + (bx * B[2])))
x61 = ((- x60) - A[2])
x62 = (x61 ** 2)
x63 = ((- x2) - (x7 * (x1 + x62)))
x64 = (da * db)
x65 = (0. * x64)
x66 = ((- x55) - B[1])
x67 = (x12 * x51)
x68 = (x66 * x67)
x69 = (x68 * (x16 + x58))
x70 = ((3.0 * x35) + x45)
x71 = ((x1 * x70) + (x41 * x5))
x72 = (x12 * x52)
x73 = (x54 * (((x1 * (((3.0 * x34) + (3.0 * x37)) + x43)) + (x42 * x5)) + (x9 * (((2.0 * ax) * x71) - x70))))
x74 = (x54 * x71)
x75 = ((- x60) - B[2])
x76 = (x72 * x75)
x77 = (x76 * (x16 + x63))
x78 = (x56 * x67)
x79 = (x58 * x78)
x80 = ((x16 * x78) + x79)
x81 = (x48 * x54)
x82 = (x54 * x56)
x83 = (0. * x64)
x84 = (x56 * x69)
x85 = (x1 * x67)
x86 = (x66 * x78)
x87 = (x85 + x86)
x88 = (x58 * x85)
x89 = (((x16 * x87) + x84) + x88)
x90 = (x41 * x72)
x91 = (x61 * x72)
x92 = (x63 * x91)
x93 = ((x16 * x91) + x92)
x94 = (x54 * x61)
x95 = (x61 * x77)
x96 = (x1 * x72)
x97 = (x75 * x91)
x98 = (x96 + x97)
x99 = (x63 * x96)
x100 = (((x16 * x98) + x95) + x99)
x101 = (x41 * x67)
x102 = (x56 * x80)
x103 = (x57 * x67)
x104 = (x103 + x85)
x105 = (x9 * ((x0 * x104) - x67))
x106 = (x102 + x105)
x107 = (x106 + x88)
x108 = (x31 * x72)
x109 = (0. * x64)
x110 = (x1 * (x68 + x78))
x111 = (x56 * x87)
x112 = (x110 + x111)
x113 = (x112 * x72)
x114 = (x1 * (x69 + x80))
x115 = (x56 * x89)
x116 = (x9 * ((x0 * x112) - x68))
x117 = ((x114 + x115) + x116)
x118 = (1. * x109)
x119 = (x61 * x93)
x120 = (x62 * x72)
x121 = (x120 + x96)
x122 = (x9 * ((x0 * x121) - x72))
x123 = (x119 + x122)
x124 = (x123 + x99)
x125 = (x31 * x67)
x126 = (x1 * (x76 + x91))
x127 = (x61 * x98)
x128 = (x126 + x127)
x129 = (x128 * x67)
x130 = (x1 * (x77 + x93))
x131 = (x100 * x61)
x132 = (x9 * ((x0 * x128) - x76))
x133 = ((x130 + x131) + x132)
x134 = (2.0 * x85)
x135 = (x56 * (x104 + x134))
x136 = (x135 * x72)
x137 = (((x1 * ((x33 * x78) + (2.0 * x79))) + (x107 * x56)) + (x9 * ((x0 * x135) - (2.0 * x78))))
x138 = (3.0 * x85)
x139 = (2.0 * x86)
x140 = ((x1 * ((x103 + x138) + x139)) + (x112 * x56))
x141 = (3.0 * x88)
x142 = (((x1 * (((x106 + x141) + (x33 * x87)) + (2.0 * x84))) + (x117 * x56)) - (x9 * (((((- 2.0) * ax) * x140) + x134) + x139)))
x143 = (x11 * x53)
x144 = (x142 * x143)
x145 = (x143 * x5)
x146 = (((3. * x11) * x3) * x51)
x147 = (x146 * x5)
x148 = (2.0 * x96)
x149 = (x61 * (x121 + x148))
x150 = (x149 * x67)
x151 = (((x1 * ((x33 * x91) + (2.0 * x92))) + (x124 * x61)) + (x9 * ((x0 * x149) - (2.0 * x91))))
x152 = (3.0 * x96)
x153 = (2.0 * x97)
x154 = ((x1 * ((x120 + x152) + x153)) + (x128 * x61))
x155 = (3.0 * x99)
x156 = (((x1 * (((x123 + x155) + (x33 * x98)) + (2.0 * x95))) + (x133 * x61)) - (x9 * (((((- 2.0) * ax) * x154) + x148) + x153)))
x157 = (x146 * x156)
x158 = ((3.0 * x103) + x138)
x159 = ((x1 * x158) + (x135 * x56))
x160 = (x143 * (((x1 * (((3.0 * x102) + (3.0 * x105)) + x141)) + (x137 * x56)) + (x9 * (((2.0 * ax) * x159) - x158))))
x161 = (x143 * x159)
x162 = ((3.0 * x110) + (3.0 * x111))
x163 = ((x1 * (x135 + x162)) + (x140 * x56))
x164 = (x143 * x163)
x165 = (x143 * x61)
x166 = (x13 * x135)
x167 = (x112 * x13)
x168 = (x128 * x13)
x169 = (x146 * x56)
x170 = (x13 * x149)
x171 = ((3.0 * x120) + x152)
x172 = ((x1 * x171) + (x149 * x61))
x173 = (x146 * (((x1 * (((3.0 * x119) + (3.0 * x122)) + x155)) + (x151 * x61)) + (x9 * (((2.0 * ax) * x172) - x171))))
x174 = (x146 * x172)
x175 = ((3.0 * x126) + (3.0 * x127))
x176 = ((x1 * (x149 + x175)) + (x154 * x61))
x177 = (x146 * x176)
result[(0, 0)] = numpy.sum((x65 * (((x54 * (((x1 * ((((3.0 * x21) + (3.0 * x28)) + (3.0 * x32)) + x42)) + (x48 * x5)) + (x9 * (((2.0 * ax) * x50) - x49)))) + (x58 * x59)) + (x59 * x63))))
result[(0, 1)] = numpy.sum((x65 * ((((x63 * x66) * x74) + (x66 * x73)) + ((x69 * x71) * x72))))
result[(0, 2)] = numpy.sum((x65 * ((((x58 * x74) * x75) + ((x67 * x71) * x77)) + (x73 * x75))))
result[(1, 0)] = numpy.sum((x83 * ((((x47 * x63) * x82) + ((x47 * x72) * x80)) + (x56 * x81))))
result[(1, 1)] = numpy.sum((x83 * ((((x42 * x72) * x87) + ((x63 * x87) * x90)) + (x89 * x90))))
result[(1, 2)] = numpy.sum((x83 * ((((x41 * x76) * x80) + ((x41 * x77) * x78)) + ((x42 * x75) * x82))))
result[(2, 0)] = numpy.sum((x83 * ((((x47 * x58) * x94) + ((x47 * x67) * x93)) + (x61 * x81))))
result[(2, 1)] = numpy.sum((x83 * ((((x41 * x68) * x93) + ((x41 * x69) * x91)) + ((x42 * x66) * x94))))
result[(2, 2)] = numpy.sum((x83 * (((x100 * x101) + ((x101 * x58) * x98)) + ((x42 * x67) * x98))))
result[(3, 0)] = numpy.sum((x109 * ((((x104 * x108) * x63) + ((x104 * x44) * x72)) + (x107 * x108))))
result[(3, 1)] = numpy.sum((x109 * ((((x113 * x36) * x63) + (x113 * x39)) + ((x117 * x36) * x72))))
result[(3, 2)] = numpy.sum((x109 * ((((x104 * x36) * x77) + ((x104 * x39) * x76)) + ((x107 * x36) * x76))))
result[(4, 0)] = numpy.sum((x118 * ((((x31 * x78) * x93) + ((x31 * x80) * x91)) + ((x44 * x61) * x82))))
result[(4, 1)] = numpy.sum((x118 * ((((x36 * x87) * x93) + ((x36 * x89) * x91)) + ((x39 * x87) * x91))))
result[(4, 2)] = numpy.sum((x118 * ((((x100 * x36) * x78) + ((x36 * x80) * x98)) + ((x39 * x78) * x98))))
result[(5, 0)] = numpy.sum((x109 * ((((x121 * x125) * x58) + ((x121 * x44) * x67)) + (x124 * x125))))
result[(5, 1)] = numpy.sum((x109 * ((((x121 * x36) * x69) + ((x121 * x39) * x68)) + ((x124 * x36) * x68))))
result[(5, 2)] = numpy.sum((x109 * ((((x129 * x36) * x58) + (x129 * x39)) + ((x133 * x36) * x67))))
result[(6, 0)] = numpy.sum((x83 * ((((x136 * x25) * x63) + (x136 * x27)) + ((x137 * x25) * x72))))
result[(6, 1)] = numpy.sum((x83 * ((((x140 * x145) * x63) + ((x140 * x17) * x72)) + (x144 * x5))))
result[(6, 2)] = numpy.sum((x83 * ((((x135 * x14) * x77) + ((x135 * x17) * x76)) + ((x137 * x145) * x75))))
result[(7, 0)] = numpy.sum((x118 * ((((x104 * x25) * x93) + ((x104 * x27) * x91)) + ((x107 * x25) * x91))))
result[(7, 1)] = numpy.sum((x118 * ((((x112 * x14) * x93) + ((x112 * x17) * x91)) + ((x117 * x145) * x61))))
result[(7, 2)] = numpy.sum((x118 * ((((x100 * x104) * x14) + ((x104 * x17) * x98)) + ((x107 * x14) * x98))))
result[(8, 0)] = numpy.sum((x118 * ((((x121 * x25) * x80) + ((x121 * x27) * x78)) + ((x124 * x25) * x78))))
result[(8, 1)] = numpy.sum((x118 * ((((x121 * x14) * x89) + ((x121 * x17) * x87)) + ((x124 * x14) * x87))))
result[(8, 2)] = numpy.sum((x118 * ((((x128 * x14) * x80) + ((x128 * x17) * x78)) + ((x133 * x147) * x56))))
result[(9, 0)] = numpy.sum((x83 * ((((x150 * x25) * x58) + (x150 * x27)) + ((x151 * x25) * x67))))
result[(9, 1)] = numpy.sum((x83 * ((((x14 * x149) * x69) + ((x147 * x151) * x66)) + ((x149 * x17) * x68))))
result[(9, 2)] = numpy.sum((x83 * ((((x147 * x154) * x58) + ((x154 * x17) * x67)) + (x157 * x5))))
result[(10, 0)] = numpy.sum((x65 * ((((x159 * x20) * x72) + (x160 * x18)) + ((x161 * x18) * x63))))
result[(10, 1)] = numpy.sum((x65 * (((x143 * (((x1 * ((((3.0 * x114) + (3.0 * x115)) + (3.0 * x116)) + x137)) + (x142 * x56)) + (x9 * (((2.0 * ax) * x163) - x162)))) + (x164 * x63)) + (x164 * x8))))
result[(10, 2)] = numpy.sum((x65 * ((((x13 * x159) * x77) + (x160 * x75)) + ((x161 * x75) * x8))))
result[(11, 0)] = numpy.sum((x83 * ((((x135 * x19) * x93) + ((x135 * x20) * x91)) + ((x137 * x165) * x18))))
result[(11, 1)] = numpy.sum((x83 * ((((x13 * x140) * x93) + ((x140 * x165) * x8)) + (x144 * x61))))
result[(11, 2)] = numpy.sum((x83 * (((x100 * x166) + ((x13 * x137) * x98)) + ((x166 * x8) * x98))))
result[(12, 0)] = numpy.sum((x109 * ((((x104 * x121) * x20) + ((x104 * x124) * x19)) + ((x107 * x121) * x19))))
result[(12, 1)] = numpy.sum((x109 * ((((x117 * x121) * x13) + ((x121 * x167) * x8)) + (x124 * x167))))
result[(12, 2)] = numpy.sum((x109 * ((((x104 * x13) * x133) + ((x104 * x168) * x8)) + (x107 * x168))))
result[(13, 0)] = numpy.sum((x83 * ((((x149 * x19) * x80) + ((x149 * x20) * x78)) + ((x151 * x169) * x18))))
result[(13, 1)] = numpy.sum((x83 * ((((x13 * x151) * x87) + ((x170 * x8) * x87)) + (x170 * x89))))
result[(13, 2)] = numpy.sum((x83 * ((((x13 * x154) * x80) + ((x154 * x169) * x8)) + (x157 * x56))))
result[(14, 0)] = numpy.sum((x65 * ((((x172 * x20) * x67) + (x173 * x18)) + ((x174 * x18) * x58))))
result[(14, 1)] = numpy.sum((x65 * ((((x13 * x172) * x69) + (x173 * x66)) + ((x174 * x66) * x8))))
result[(14, 2)] = numpy.sum((x65 * (((x146 * (((x1 * ((((3.0 * x130) + (3.0 * x131)) + (3.0 * x132)) + x151)) + (x156 * x61)) + (x9 * (((2.0 * ax) * x176) - x175)))) + (x177 * x58)) + (x177 * x8))))
return result |
def initialize_encoder(encoder_or_encoder_class: Union[(Type[Encoder], Encoder, str)], schema: AbstractSchemaNode, **kwargs: Any) -> Encoder:
if isinstance(encoder_or_encoder_class, str):
encoder_name = encoder_or_encoder_class.lower()
if (encoder_name not in _ENCODER_REGISTRY):
raise ValueError(f'Unknown encoder {encoder_name}. Use one of {sorted(_ENCODER_REGISTRY)}')
encoder_or_encoder_class = _ENCODER_REGISTRY[encoder_name]
if isinstance(encoder_or_encoder_class, type(Encoder)):
if issubclass(encoder_or_encoder_class, SchemaBasedEncoder):
return encoder_or_encoder_class(schema, **kwargs)
else:
return encoder_or_encoder_class(**kwargs)
elif isinstance(encoder_or_encoder_class, Encoder):
if kwargs:
raise ValueError('Unable to use kwargs with an encoder instance')
return encoder_or_encoder_class
else:
raise TypeError(f'Expected str, an encoder or encoder class, got {type(encoder_or_encoder_class)}') |
class OptionPlotoptionsPyramid3dPointEvents(Options):
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def drag(self):
return self._config_get(None)
def drag(self, value: Any):
self._config(value, js_type=False)
def dragStart(self):
return self._config_get(None)
def dragStart(self, value: Any):
self._config(value, js_type=False)
def drop(self):
return self._config_get(None)
def drop(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def remove(self):
return self._config_get(None)
def remove(self, value: Any):
self._config(value, js_type=False)
def select(self):
return self._config_get(None)
def select(self, value: Any):
self._config(value, js_type=False)
def unselect(self):
return self._config_get(None)
def unselect(self, value: Any):
self._config(value, js_type=False)
def update(self):
return self._config_get(None)
def update(self, value: Any):
self._config(value, js_type=False) |
class SlowImportWarningMixin(ColorFormatterMixin):
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
if (self.import_secs and (self.import_secs > 1) and self._import_secs_warn):
self.print_yellow(('Warning: Importing test modules alone took %.1fs! To speed this up, remove object construction from module level. If not possible, consider using lazy_import(). Try using --import-profiler to profile your imports.' % self.import_secs))
self._import_secs_warn = False |
def change_dir():
if (os.path.exists('config.json') or ('init' in sys.argv)):
return
dir_path_file = '/etc/frappe_bench_dir'
if os.path.exists(dir_path_file):
with open(dir_path_file) as f:
dir_path = f.read().strip()
if os.path.exists(dir_path):
os.chdir(dir_path) |
('os.symlink')
.object(full_docker.Benchmark, 'run_image')
.object(full_docker.Benchmark, 'execute_benchmark')
.object(docker_image.DockerImage, 'pull_image')
.object(source_manager.SourceManager, 'have_build_options')
.object(source_manager.SourceManager, 'get_envoy_hashes_for_benchmark')
def test_execute_dockerized_benchmark_using_images_only(mock_hashes_for_benchmarks, mock_have_build_options, mock_pull_image, mock_execute, mock_run_image, mock_symlink):
job_control = proto_control.JobControl(remote=False, dockerized_benchmark=True)
generate_test_objects.generate_environment(job_control)
generate_test_objects.generate_images(job_control)
mock_run_image.return_value = b'benchmark_ output....'
mock_execute.return_value = None
mock_have_build_options.return_value = False
mock_hashes_for_benchmarks.return_value = {'tag1', 'tag2'}
benchmark = run_benchmark.BenchmarkRunner(job_control)
benchmark.execute()
mock_have_build_options.assert_called()
mock_pull_image.assert_called()
mock_symlink.assert_called()
mock_execute.assert_has_calls([mock.call(), mock.call()]) |
class TestNamespaceAllowedAndDefaultVersion():
def test_no_namespace_without_default(self):
class FakeResolverMatch():
namespace = None
scheme = versioning.NamespaceVersioning
view = AllowedVersionsView.as_view(versioning_class=scheme)
request = factory.get('/endpoint/')
request.resolver_match = FakeResolverMatch
response = view(request)
assert (response.status_code == status.HTTP_404_NOT_FOUND)
def test_no_namespace_with_default(self):
class FakeResolverMatch():
namespace = None
scheme = versioning.NamespaceVersioning
view = AllowedAndDefaultVersionsView.as_view(versioning_class=scheme)
request = factory.get('/endpoint/')
request.resolver_match = FakeResolverMatch
response = view(request)
assert (response.status_code == status.HTTP_200_OK)
assert (response.data == {'version': 'v2'})
def test_no_match_without_default(self):
class FakeResolverMatch():
namespace = 'no_match'
scheme = versioning.NamespaceVersioning
view = AllowedVersionsView.as_view(versioning_class=scheme)
request = factory.get('/endpoint/')
request.resolver_match = FakeResolverMatch
response = view(request)
assert (response.status_code == status.HTTP_404_NOT_FOUND)
def test_no_match_with_default(self):
class FakeResolverMatch():
namespace = 'no_match'
scheme = versioning.NamespaceVersioning
view = AllowedAndDefaultVersionsView.as_view(versioning_class=scheme)
request = factory.get('/endpoint/')
request.resolver_match = FakeResolverMatch
response = view(request)
assert (response.status_code == status.HTTP_200_OK)
assert (response.data == {'version': 'v2'})
def test_with_default(self):
class FakeResolverMatch():
namespace = 'v1'
scheme = versioning.NamespaceVersioning
view = AllowedAndDefaultVersionsView.as_view(versioning_class=scheme)
request = factory.get('/endpoint/')
request.resolver_match = FakeResolverMatch
response = view(request)
assert (response.status_code == status.HTTP_200_OK)
assert (response.data == {'version': 'v1'})
def test_no_match_without_default_but_none_allowed(self):
class FakeResolverMatch():
namespace = 'no_match'
scheme = versioning.NamespaceVersioning
view = AllowedWithNoneVersionsView.as_view(versioning_class=scheme)
request = factory.get('/endpoint/')
request.resolver_match = FakeResolverMatch
response = view(request)
assert (response.status_code == status.HTTP_200_OK)
assert (response.data == {'version': None})
def test_no_match_with_default_and_none_allowed(self):
class FakeResolverMatch():
namespace = 'no_match'
scheme = versioning.NamespaceVersioning
view = AllowedWithNoneAndDefaultVersionsView.as_view(versioning_class=scheme)
request = factory.get('/endpoint/')
request.resolver_match = FakeResolverMatch
response = view(request)
assert (response.status_code == status.HTTP_200_OK)
assert (response.data == {'version': 'v2'}) |
_test
def test_producer_and_consumer() -> None:
stream_name = random_string(length=RANDOM_ID_LENGTH)
stream_interface = register_stream(name=stream_name, message_type=MyMessage)
received_messages = []
with Producer(stream_interface=stream_interface) as producer:
def callback(params: LabgraphCallbackParams[MyMessage]) -> None:
received_messages.append(params.message)
with Consumer(stream_interface=stream_interface, sample_callback=callback):
for i in range(NUM_MESSAGES):
producer.produce_message(MyMessage(int_field=i))
time.sleep((1 / SAMPLE_RATE))
assert (len(received_messages) == NUM_MESSAGES)
for i in range(NUM_MESSAGES):
assert (received_messages[i].int_field == i) |
class DMLLoss(nn.Layer):
def __init__(self, act=None, use_log=False):
super().__init__()
if (act is not None):
assert (act in ['softmax', 'sigmoid'])
if (act == 'softmax'):
self.act = nn.Softmax(axis=(- 1))
elif (act == 'sigmoid'):
self.act = nn.Sigmoid()
else:
self.act = None
self.use_log = use_log
self.jskl_loss = KLJSLoss(mode='js')
def forward(self, out1, out2):
if (self.act is not None):
out1 = self.act(out1)
out2 = self.act(out2)
if self.use_log:
log_out1 = paddle.log(out1)
log_out2 = paddle.log(out2)
loss = ((F.kl_div(log_out1, out2, reduction='batchmean') + F.kl_div(log_out2, out1, reduction='batchmean')) / 2.0)
else:
loss = self.jskl_loss(out1, out2)
return loss |
class DaapConnection():
def __init__(self, name, server, port, user_agent):
if ((':' in server) and (server[0] != '[')):
server = (('[' + server) + ']')
self.all = []
self.session = None
self.connected = False
self.tracks = None
self.server = server
self.port = port
self.name = name
self.auth = False
self.password = None
self.user_agent = user_agent
def connect(self, password=None):
try:
client = DAAPClient()
if (AUTH and password):
client.connect(self.server, self.port, password, self.user_agent)
else:
client.connect(self.server, self.port, None, self.user_agent)
self.session = client.login()
self.connected = True
except Exception:
logger.exception('failed to connect to ({0},{1})'.format(self.server, self.port))
self.auth = True
self.connected = False
raise
def disconnect(self):
try:
self.session.logout()
except Exception:
pass
self.session = None
self.tracks = None
self.database = None
self.all = []
self.connected = False
def reload(self):
self.tracks = None
self.database = None
self.all = []
self.get_database()
t = time.time()
self.convert_list()
logger.debug('{0} tracks loaded in {1}s'.format(len(self.all), (time.time() - t)))
def get_database(self):
if self.session:
self.database = self.session.library()
self.get_tracks(1)
def get_tracks(self, reset=False):
if (reset or (self.tracks is None)):
if (self.database is None):
self.database = self.session.library()
self.tracks = self.database.tracks()
return self.tracks
def convert_list(self):
eqiv = {'title': 'minm', 'artist': 'asar', 'album': 'asal', 'tracknumber': 'astn', 'date': 'asyr', 'discnumber': 'asdn', 'albumartist': 'asaa'}
for tr in self.tracks:
if (tr is not None):
uri = (' % (self.server, self.port, self.database.id, tr.id, tr.type, self.session.sessionid))
temp = trax.Track(uri, scan=False)
for field in eqiv.keys():
try:
tag = ('%s' % tr.atom.getAtom(eqiv[field]))
if (tag != 'None'):
temp.set_tag_raw(field, [tag], notify_changed=False)
except Exception:
if (field == 'tracknumber'):
temp.set_tag_raw('tracknumber', [0], notify_changed=False)
try:
temp.set_tag_raw('__length', (tr.atom.getAtom('astm') // 1000), notify_changed=False)
except Exception:
temp.set_tag_raw('__length', 0, notify_changed=False)
self.all.append(temp)
def get_track(self, track_id, filename):
for t in self.tracks:
if (t.id == track_id):
try:
t.save(filename)
except
Gtk.MessageDialog(buttons=Gtk.ButtonsType.OK, message_type=Gtk.MessageType.INFO, modal=True, text=_('This server does not support multiple connections.\nYou must stop playback before downloading songs.'), transient_for=main.mainwindow().window)
return |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.