code stringlengths 281 23.7M |
|---|
def test_surface(surface_data):
(xdata, ydata) = surface_data
sp = csaps.NdGridCubicSmoothingSpline(xdata, ydata)
noisy_s = sp(xdata)
assert isinstance(sp.smooth, tuple)
assert (len(sp.smooth) == 2)
assert isinstance(sp.spline, csaps.NdGridSplinePPForm)
assert (noisy_s.shape == ydata.shape) |
class DB2Cursor(object):
arraysize = 1
def __init__(self, connection):
self.connection = connection
self.stmt = None
def execute(self, query):
self.stmt = self.connection.execute(query)
def executemany(self, query):
raise NotSupportedError
def fetchone(self):
return self.stmt.fetchone()
def fetchmany(self, size=None):
if (size is None):
size = self.arraysize
return self.stmt.fetchmany(size)
def fetchall(self):
return self.stmt.fetchall()
def nextset(self):
raise NotSupportedError
def setinputsizes(self):
pass
def setoutputsize(self):
pass
def description(self):
cols = []
for (k, v) in self.stmt.columns().items():
cols.append((k, v.get('type', None), v.get('size', None), v.get('size', None), v.get('precision', None), v.get('scale', None), True))
return cols
def rowcount(self):
return self.stmt.rowcount()
def close(self):
self.stmt.close() |
class LinkedInOAuth2(BaseOAuth2[Dict[(str, Any)]]):
display_name = 'LinkedIn'
logo_svg = LOGO_SVG
def __init__(self, client_id: str, client_secret: str, scopes: Optional[List[str]]=BASE_SCOPES, name: str='linkedin'):
super().__init__(client_id, client_secret, AUTHORIZE_ENDPOINT, ACCESS_TOKEN_ENDPOINT, ACCESS_TOKEN_ENDPOINT, name=name, base_scopes=scopes)
async def get_id_email(self, token: str) -> Tuple[(str, Optional[str])]:
async with self.get_ as client:
profile_response = (await client.get(PROFILE_ENDPOINT, headers={'Authorization': f'Bearer {token}'}, params={'projection': '(id)'}))
if (profile_response.status_code >= 400):
raise GetIdEmailError(profile_response.json())
email_response = (await client.get(EMAIL_ENDPOINT, headers={'Authorization': f'Bearer {token}'}, params={'q': 'members', 'projection': '(elements*(handle~))'}))
if (email_response.status_code >= 400):
raise GetIdEmailError(email_response.json())
profile_data = cast(Dict[(str, Any)], profile_response.json())
user_id = profile_data['id']
email_data = cast(Dict[(str, Any)], email_response.json())
user_email = email_data['elements'][0]['handle~']['emailAddress']
return (user_id, user_email) |
(CLIENT_BY_ID, dependencies=[Security(verify_oauth_client, scopes=[CLIENT_DELETE])])
def delete_client(client_id: str, db: Session=Depends(get_db)) -> None:
client = ClientDetail.get(db, object_id=client_id, config=CONFIG)
if (not client):
return
logger.info('Deleting client')
client.delete(db) |
class Migration(migrations.Migration):
dependencies = [('manager', '0024_auto__2349')]
operations = [migrations.RemoveField(model_name='activity', name='type'), migrations.AlterField(model_name='activity', name='activity_type', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='manager.ActivityType'))] |
def update(name, score):
replace = False
highscores = find()
if (highscores['names'] == []):
highscores['names'].append(name)
highscores['scores'].append(score)
else:
if (len(highscores['names']) < 5):
inserted = False
else:
inserted = True
if (score < highscores['loser'][1]):
highscores['loser'][0] = name
highscores['loser'][1] = score
for i in range(0, len(highscores['names'])):
if (score > highscores['scores'][i]):
highscores['names'].insert(i, name)
highscores['scores'].insert(i, score)
replace = True
inserted = True
break
if (not inserted):
highscores['names'].append(name)
highscores['scores'].append(score)
if replace:
highscores['names'].pop((- 1))
highscores['scores'].pop((- 1))
picklefile = open(datafile, 'wb')
pickle.dump(highscores, picklefile)
picklefile.close()
return current(highscores) |
def find_parent_dir_containing(target: str, max_up: int=6, initial_dir: str=os.getcwd()) -> str:
cur = initial_dir
while ((not os.path.exists(os.path.join(cur, target))) and (max_up > 0)):
cur = os.path.relpath(os.path.join(cur, '..'))
max_up = (max_up - 1)
if (max_up == 0):
raise OSError(f'Could not find {target} in parents of {os.getcwd()}')
return cur |
class Chart(MixHtmlState.HtmlOverlayStates, Html.Html):
name = 'Highcharts'
tag = 'div'
requirements = ('highcharts',)
_chart__type = None
_option_cls = OptChartHighcharts.OptionsHighcharts
builder_name = 'Hcharts'
def __init__(self, page: primitives.PageModel, width, height, html_code, options, profile):
self.height = height[0]
super(Chart, self).__init__(page, [], html_code=html_code, profile=profile, options=options, css_attrs={'width': width, 'height': height})
self.style.css.margin_top = 10
def options(self) -> OptChartHighcharts.OptionsHighcharts:
return super().options
def dom(self) -> JsHtmlHighCharts.HighCharts:
if (self._dom is None):
self._dom = JsHtmlHighCharts.HighCharts(page=self.page, component=self)
return self._dom
def define(self, options: types.JS_DATA_TYPES=None, dataflows: List[dict]=None) -> str:
if (options is None):
if (dataflows is not None):
return ('%s;%s' % (JsUtils.jsWrap(JsUtils.dataFlows(JsUtils.jsWrap(self.js_code), dataflows, self.page)), self.js.update()))
if (dataflows is not None):
options = JsUtils.jsWrap(JsUtils.dataFlows(options, dataflows, self.page))
return self.js.update(options)
def colors(self, hex_values: list):
(line_colors, bg_colors) = ([], [])
for h in hex_values:
if (h.upper() in Colors.defined):
h = Colors.defined[h.upper()]['hex']
if (not isinstance(h, tuple)):
if h.startswith('#'):
line_colors.append(h)
else:
line_colors.append(h)
else:
line_colors.append(h[0])
self.options._config(line_colors, name='colors')
for (i, rec) in enumerate(self.options.js_tree.get('series', [])):
rec.color = line_colors[i]
def click(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=False, source_event: str=None, on_ready: bool=False):
js_funcs.insert(0, ('let activePoints = [%s]' % self.dom.active()))
self.options.plotOptions.series.point.events.click(js_funcs=js_funcs, profile=profile)
def js(self) -> JsHighcharts.Highcharts:
if (self._js is None):
self._js = JsHighcharts.Highcharts(selector=self.js_code, component=self, page=self.page)
return self._js
('highcharts')
def build(self, data: etypes.JS_DATA_TYPES=None, options: etypes.JS_DATA_TYPES=None, profile: etypes.PROFILE_TYPE=None, component_id: str=None, stop_state: bool=True, dataflows: List[dict]=None):
self.js_code = component_id
if (data is not None):
builder_fnc = JsUtils.jsWrap(('%s(%s, Object.assign(%s.options, %s))' % (self.builder_name, JsUtils.dataFlows(data, dataflows, self.page), self.js_code, JsUtils.jsConvertData(options, None))), profile).toStr()
state_expr = ''
if stop_state:
state_expr = (';%s' % self.hide_state(component_id))
return ('%(chartId)s = Highcharts.chart(%(htmlCode)s, %(builder)s)' % {'chartId': self.js_code, 'htmlCode': JsUtils.jsConvertData((component_id or self.html_code), None), 'builder': builder_fnc, 'state': state_expr})
return ('%(chartId)s = Highcharts.chart(%(htmlCode)s, %(ctx)s)' % {'chartId': self.js_code, 'htmlCode': JsUtils.jsConvertData((component_id or self.html_code), None), 'ctx': self.options.config_js(options)})
def __str__(self):
self.page.properties.js.add_builders(self.build())
return ('<%s %s></%s>' % (self.tag, self.get_attrs(css_class_names=self.style.get_classes()), self.tag)) |
def get_latents(sequence):
sequence = np.array(mel)
sequence = Variable(torch.from_numpy(sequence)).unsqueeze(0)
sequence = sequence.cuda()
with torch.no_grad():
(latents, entropy) = model.quantizer.get_quantizedindices(sequence.unsqueeze(2))
return (latents, entropy) |
def set_item(d, keys, value):
item = d
i = 0
j = len(keys)
while (i < j):
key = keys[i]
if (i < (j - 1)):
item = _get_or_new_item_value(item, key, keys[(i + 1)])
i += 1
continue
_set_item_value(item, key, value)
break |
class MetadataResourceHandler(HttpErrorMixin, APIHandler):
async def get(self, schemaspace, resource):
schemaspace = url_unescape(schemaspace)
resource = url_unescape(resource)
parent = self.settings.get('elyra')
try:
metadata_manager = MetadataManager(schemaspace=schemaspace, parent=parent)
metadata = metadata_manager.get(resource)
except (ValidationError, ValueError, NotImplementedError) as err:
raise web.HTTPError(400, str(err)) from err
except MetadataNotFoundError as err:
raise web.HTTPError(404, str(err)) from err
except Exception as err:
raise web.HTTPError(500, repr(err)) from err
self.set_header('Content-Type', 'application/json')
self.finish(metadata.to_dict(trim=True))
async def put(self, schemaspace, resource):
schemaspace = url_unescape(schemaspace)
resource = url_unescape(resource)
parent = self.settings.get('elyra')
try:
payload = self.get_json_body()
metadata_manager = MetadataManager(schemaspace=schemaspace, parent=parent)
metadata_manager.get(resource)
if (('name' in payload) and (payload['name'] != resource)):
raise NotImplementedError(f"The attempt to rename instance '{resource}' to '{payload['name']}' is not supported.")
instance = Metadata.from_dict(schemaspace, {**payload})
self.log.debug(f"MetadataHandler: Updating metadata instance '{resource}' in schemaspace '{schemaspace}'...")
metadata = metadata_manager.update(resource, instance)
except (ValidationError, ValueError, NotImplementedError) as err:
raise web.HTTPError(400, str(err)) from err
except MetadataNotFoundError as err:
raise web.HTTPError(404, str(err)) from err
except Exception as err:
raise web.HTTPError(500, repr(err)) from err
self.set_status(200)
self.set_header('Content-Type', 'application/json')
self.finish(metadata.to_dict(trim=True))
async def delete(self, schemaspace, resource):
schemaspace = url_unescape(schemaspace)
resource = url_unescape(resource)
parent = self.settings.get('elyra')
try:
self.log.debug(f"MetadataHandler: Deleting metadata instance '{resource}' in schemaspace '{schemaspace}'...")
metadata_manager = MetadataManager(schemaspace=schemaspace, parent=parent)
metadata_manager.remove(resource)
except (ValidationError, ValueError) as err:
raise web.HTTPError(400, str(err)) from err
except PermissionError as err:
raise web.HTTPError(403, str(err)) from err
except MetadataNotFoundError as err:
raise web.HTTPError(404, str(err)) from err
except Exception as err:
raise web.HTTPError(500, repr(err)) from err
self.set_status(204)
self.finish()
def write_error(self, status_code, **kwargs):
HttpErrorMixin.write_error(self, status_code, **kwargs) |
def _pack_binary(x, write):
sz = len(x)
if (sz <= ((2 ** 8) - 1)):
write(b'\xc4')
write(_struct_pack('B', sz))
write(x)
elif (sz <= ((2 ** 16) - 1)):
write(b'\xc5')
write(_struct_pack('>H', sz))
write(x)
elif (sz <= ((2 ** 32) - 1)):
write(b'\xc6')
write(_struct_pack('>I', sz))
write(x)
else:
raise UnsupportedTypeException('huge binary string') |
class EqlLexer(RegexLexer):
name = 'Event Query Language'
aliases = ['eql']
filenames = ['*.eql']
_sign = '[\\-+]'
_integer = '\\d+'
_float = '\\d*\\.\\d+([Ee][-+]?\\d+)?'
_time_units = 's|sec\\w+|m|min\\w+|h|hour|hr|d|day'
_name = '[a-zA-Z][_a-zA-Z0-9]*'
_pipe_names = set(list_pipes())
tokens = {'comments': [('//(\\n|[\\w\\W]*?[^\\\\]\\n)', token.Comment.Single), ('/[*][\\w\\W]*?[*]/', token.Comment.Multiline), ('/[*][\\w\\W]*', token.Comment.Multiline)], 'whitespace': [('\\s+', token.Whitespace)], 'root': [include('whitespace'), include('comments'), ('(and|in|not|or)\\b', token.Operator.Word), ('(join|sequence|until|where|sample)\\b', token.Keyword), (('(%s)(=\\s+)(where)\\b' % _name), bygroups(token.Name, token.Whitespace, token.Keyword)), (('(const)(\\s+)(%s)\\b' % _name), bygroups(token.Keyword.Declaration, token.Whitespace, token.Name.Constant)), (('(macro)(\\s+)(%s)\\b' % _name), bygroups(token.Keyword.Declaration, token.Whitespace, token.Name.Constant)), ('(by|of|with)\\b', token.Keyword.QueryModifier), ('(true|false|null)\\b', token.Name.Builtin), (('(\\|)(\\s*)(%s)' % '|'.join(reversed(sorted(_pipe_names, key=len)))), bygroups(token.Operator, token.Whitespace, token.Name.Function.Magic)), (('(%s)(\\s*\\()' % '|'.join(list_functions())), bygroups(token.Name.Function, token.Text)), ('[A-Z][_A-Z0-9]+\\b', token.Name.Other), (_name, token.Name), (('(%s|%s)[ \\t]*(%s)\\b' % (_float, _integer, _time_units)), token.Literal.Date), (((_sign + '?') + _float), token.Number.Float), (((_sign + '?') + _integer), token.Number.Integer), ('"(\\\\[btnfr"\\\'\\\\]|[^\\r\\n"\\\\])*"?', token.String), ('\'(\\\\[btnfr\'\\"\\\\]|[^\\r\\n\'\\\\])*\'?', token.String), ('\\?"(\\\\"|[^"])*"?', token.String.Regex), ("\\?'(\\\\'|[^'])*'?", token.String.Regex), ('(==|=|!=|<|<=|>=|>|\\+|\\-|\\*|/|\\%|:)', token.Operator), ('[()\\[\\],.]', token.Punctuation)]} |
class _UnitTestDbPartsNoneConfig(DefaultConfig):
ENV_CODE = 'utdbpn'
USASPENDING_DB_HOST: str = None
USASPENDING_DB_PORT: str = None
USASPENDING_DB_NAME: str = None
USASPENDING_DB_USER: str = None
USASPENDING_DB_PASSWORD: SecretStr = None
BROKER_DB_HOST: str = None
BROKER_DB_PORT: str = None
BROKER_DB_NAME: str = None
BROKER_DB_USER: str = None
BROKER_DB_PASSWORD: SecretStr = None |
def test_geodetic_distance_vs_spherical():
ellipsoid = bl.WGS84
point_a = ((- 69.3), (- 36.4), 405)
point_b = ((- 71.2), (- 33.3), 1025)
dist = distance(point_a, point_b, coordinate_system='geodetic', ellipsoid=ellipsoid)
point_a_sph = ellipsoid.geodetic_to_spherical(*point_a)
point_b_sph = ellipsoid.geodetic_to_spherical(*point_b)
dist_sph = distance(point_a_sph, point_b_sph, coordinate_system='spherical')
npt.assert_allclose(dist, dist_sph) |
def test_ignore_small_changes_both(test_id: str, dbt_project: DbtProject):
now = datetime.utcnow()
data = ([{TIMESTAMP_COLUMN: cur_date.strftime(DATE_FORMAT)} for cur_date in generate_dates(base_date=now, step=timedelta(days=1)) if (cur_date < (now - timedelta(days=1)))] * 30)
data += ([{TIMESTAMP_COLUMN: (now - timedelta(days=1)).strftime(DATE_FORMAT)}] * 14)
test_args = {**DBT_TEST_ARGS, 'time_bucket': {'period': 'day', 'count': 1}, 'ignore_small_changes': {'spike_failure_percent_threshold': 2, 'drop_failure_percent_threshold': 50}}
test_result = dbt_project.test(test_id, DBT_TEST_NAME, test_args, data=data)
assert (test_result['status'] == 'fail') |
class Solution(object):
def maximumProduct(self, nums):
if (len(nums) == 3):
return ((nums[0] * nums[1]) * nums[2])
positive = sorted([n for n in nums if (n >= 0)])
negative = sorted([n for n in nums if (n < 0)])
cs1 = positive[(- 3):]
cs2 = negative[:2]
if (len(cs1) < 3):
return ((cs2[1] * cs2[0]) * cs1[(- 1)])
m = ((cs1[1] * cs1[0]) * cs1[2])
if (len(cs2) == 2):
m = max(((cs2[1] * cs2[0]) * cs1[2]), m)
return m |
def plot_gamut_in_space(space, gamut, title='', dark=False, resolution=200, opacity=1.0, edges=False, size=(800, 800), camera=None, aspect=None, projection='perspective'):
io.templates.default = ('plotly_dark' if dark else 'plotly')
if (resolution == 50):
resolution = 51
if (camera is None):
camera = {'a': 45, 'e': 45, 'r': math.sqrt((((1.25 ** 2) + (1.25 ** 2)) + (1.25 ** 2)))}
a = math.radians(((90 - camera['a']) % 360))
e = math.radians((90 - camera['e']))
r = camera['r']
y = ((r * math.sin(e)) * math.cos(a))
x = ((r * math.sin(e)) * math.sin(a))
z = (r * math.cos(e))
if (aspect is None):
aspect = {'x': 1, 'y': 1, 'z': 1}
target = Color.CS_MAP[space]
if (len(target.CHANNELS) > 3):
print('Color spaces with dimensions greater than 3 are not supported')
return None
names = target.CHANNELS
is_regular = isinstance(target, Regular)
is_cyl = isinstance(target, Cylindrical)
is_labish = isinstance(target, Labish)
is_lchish = isinstance(target, LChish)
is_hslish_hsvish = isinstance(target, (HSLish, HSVish))
if is_labish:
(c1, c2, c3) = target.indexes()
axm = [c2, c3, c1]
elif is_lchish:
(c1, c2, c3) = target.indexes()
axm = [c3, c2, c1]
elif is_hslish_hsvish:
axm = [0, 1, 2]
else:
axm = [0, 1, 2]
showbackground = True
backgroundcolor = ('rgb(230, 230, 230)' if (not dark) else '#282830')
gridcolor = ('rgb(255, 255, 255)' if (not dark) else '#111')
zerolinecolor = ('rgb(255, 255, 255)' if (not dark) else '#111')
axis = {'showbackground': showbackground, 'backgroundcolor': backgroundcolor, 'gridcolor': gridcolor, 'zerolinecolor': zerolinecolor}
xaxis = (str(names[axm[0]]) if (not is_cyl) else '{} (0 - 360 )'.format(names[axm[0]]))
yaxis = str(names[axm[1]])
zaxis = str(names[axm[2]])
layout = go.Layout(title=title, width=size[0], height=size[1], scene=go.layout.Scene(xaxis=go.layout.scene.XAxis(title=xaxis, showticklabels=(not is_cyl), **axis), yaxis=go.layout.scene.YAxis(title=yaxis, **axis), zaxis=go.layout.scene.ZAxis(title=zaxis, **axis), aspectratio=aspect), scene_camera={'projection': go.layout.scene.camera.Projection(type=projection), 'center': {'x': 0, 'y': 0, 'z': 0}, 'up': {'x': 0, 'y': 0, 'z': 1}, 'eye': {'x': x, 'y': y, 'z': z}})
fig = go.Figure(layout=layout)
target = Color.CS_MAP[space]
if is_regular:
return render_space_rect(fig, space, gamut, resolution, opacity, edges)
else:
return render_space_cyl(fig, space, gamut, resolution, opacity, edges) |
class Layer():
def __init__(self, data):
self._data = data
self._style = data.default_style
def add_action(self, actions):
if self._data:
actions.append(self._data)
if self._style:
actions.append(self._style)
def style(self, style):
self._style = style
def update(self, action, value):
return self._style.update(action, value) |
def symlink(target, link_name):
(link_path, _) = os.path.split(link_name)
if (len(link_path) == 0):
target_check = target
else:
if (not os.path.isdir(link_path)):
print(f'Creating directory for link: {link_path}')
os.makedirs(link_path)
target_check = os.path.join(link_path, target)
if (not os.path.exists(target_check)):
raise IOError(f'{target} (target) and {link_name} (link_name) requested, which implies that {target_check} must exist, but it does not.')
if os.path.islink(link_name):
os.unlink(link_name)
os.symlink(target, link_name)
print(f"Linking '{link_name}' -> '{target}' [ cwd:{os.getcwd()} ]") |
class GenerateOperatorWidgets(QtWidgets.QDialog):
_DEFAULT_WINDOW_WIDTH = 500
_MAX_INPUT_VARIABLES_COUNT = 5
_MAX_OUTPUT_VARIABLES_COUNT = 5
_MAX_ATTRIBUTES_COUNT = 10
def __init__(self, opset=DEFAULT_OPSET, parent=None) -> None:
super().__init__(parent)
self.setModal(False)
self.setWindowTitle('generate operator')
self.initUI(opset)
def initUI(self, opset: int):
set_font(self, font_size=BASE_FONT_SIZE)
base_layout = QtWidgets.QVBoxLayout()
base_layout.setSizeConstraint(base_layout.SizeConstraint.SetFixedSize)
layout = QtWidgets.QFormLayout()
layout.setLabelAlignment(QtCore.Qt.AlignRight)
self.cmb_optype = QtWidgets.QComboBox()
self.cmb_optype.setEditable(True)
lbl_op_type = QtWidgets.QLabel('op_type')
set_font(lbl_op_type, font_size=LARGE_FONT_SIZE, bold=True)
layout.addRow(lbl_op_type, self.cmb_optype)
self.cmb_opset = QtWidgets.QComboBox()
self.cmb_opset.setEditable(True)
for i in range(1, (latest_opset + 1)):
self.cmb_opset.addItem(str(i), i)
lbl_opset = QtWidgets.QLabel('opset')
set_font(lbl_opset, font_size=LARGE_FONT_SIZE, bold=True)
layout.addRow(lbl_opset, self.cmb_opset)
self.tb_opname = QtWidgets.QLineEdit()
self.tb_opname.setText('')
lbl_op_name = QtWidgets.QLabel('op_name')
set_font(lbl_op_name, font_size=LARGE_FONT_SIZE, bold=True)
layout.addRow(lbl_op_name, self.tb_opname)
self.layout_valiables = QtWidgets.QVBoxLayout()
self.visible_input_valiables_count = 1
self.visible_output_valiables_count = 1
self.add_input_valiables = {}
self.add_output_valiables = {}
for i in range(self._MAX_INPUT_VARIABLES_COUNT):
self.create_variables_widget(i, is_input=True)
for i in range(self._MAX_OUTPUT_VARIABLES_COUNT):
self.create_variables_widget(i, is_input=False)
self.btn_add_input_valiables = QtWidgets.QPushButton('+')
self.btn_del_input_valiables = QtWidgets.QPushButton('-')
self.btn_add_input_valiables.clicked.connect(self.btn_add_input_valiables_clicked)
self.btn_del_input_valiables.clicked.connect(self.btn_del_input_valiables_clicked)
self.btn_add_output_valiables = QtWidgets.QPushButton('+')
self.btn_del_output_valiables = QtWidgets.QPushButton('-')
self.btn_add_output_valiables.clicked.connect(self.btn_add_output_valiables_clicked)
self.btn_del_output_valiables.clicked.connect(self.btn_del_output_valiables_clicked)
self.layout_valiables.addItem(QtWidgets.QSpacerItem(self._DEFAULT_WINDOW_WIDTH, 20))
lbl_inp_val = QtWidgets.QLabel('input valiables [optional]')
set_font(lbl_inp_val, font_size=LARGE_FONT_SIZE, bold=True)
self.layout_valiables.addWidget(lbl_inp_val)
for (key, widgets) in self.add_input_valiables.items():
self.layout_valiables.addWidget(widgets['base'])
layout_btn_input = QtWidgets.QHBoxLayout()
layout_btn_input.addWidget(self.btn_add_input_valiables)
layout_btn_input.addWidget(self.btn_del_input_valiables)
self.layout_valiables.addLayout(layout_btn_input)
self.layout_valiables.addItem(QtWidgets.QSpacerItem(self._DEFAULT_WINDOW_WIDTH, 20))
lbl_out_val = QtWidgets.QLabel('output valiables [optional]')
set_font(lbl_out_val, font_size=LARGE_FONT_SIZE, bold=True)
self.layout_valiables.addWidget(lbl_out_val)
for (key, widgets) in self.add_output_valiables.items():
self.layout_valiables.addWidget(widgets['base'])
layout_btn_output = QtWidgets.QHBoxLayout()
layout_btn_output.addWidget(self.btn_add_output_valiables)
layout_btn_output.addWidget(self.btn_del_output_valiables)
self.layout_valiables.addLayout(layout_btn_output)
self.layout_attributes = QtWidgets.QVBoxLayout()
self.layout_attributes.addItem(QtWidgets.QSpacerItem(self._DEFAULT_WINDOW_WIDTH, 20))
lbl_atrributes = QtWidgets.QLabel('atrributes [optional]')
set_font(lbl_atrributes, font_size=LARGE_FONT_SIZE, bold=True)
self.layout_attributes.addWidget(lbl_atrributes)
self.visible_attributes_count = 3
self.attributes = {}
for index in range(self._MAX_ATTRIBUTES_COUNT):
self.attributes[index] = {}
self.attributes[index]['base'] = QtWidgets.QWidget()
self.attributes[index]['layout'] = QtWidgets.QHBoxLayout(self.attributes[index]['base'])
self.attributes[index]['layout'].setContentsMargins(0, 0, 0, 0)
self.attributes[index]['name'] = QtWidgets.QLineEdit()
self.attributes[index]['name'].setPlaceholderText('name')
self.attributes[index]['value'] = QtWidgets.QLineEdit()
self.attributes[index]['value'].setPlaceholderText('value')
self.attributes[index]['layout'].addWidget(self.attributes[index]['name'])
self.attributes[index]['layout'].addWidget(self.attributes[index]['value'])
self.layout_attributes.addWidget(self.attributes[index]['base'])
self.btn_add_attributes = QtWidgets.QPushButton('+')
self.btn_del_attributes = QtWidgets.QPushButton('-')
self.btn_add_attributes.clicked.connect(self.btn_add_attributes_clicked)
self.btn_del_attributes.clicked.connect(self.btn_del_attributes_clicked)
layout_btn_attributes = QtWidgets.QHBoxLayout()
layout_btn_attributes.addWidget(self.btn_add_attributes)
layout_btn_attributes.addWidget(self.btn_del_attributes)
self.layout_attributes.addLayout(layout_btn_attributes)
base_layout.addLayout(layout)
base_layout.addLayout(self.layout_valiables)
base_layout.addLayout(self.layout_attributes)
btn = QtWidgets.QDialogButtonBox((QtWidgets.QDialogButtonBox.Ok | QtWidgets.QDialogButtonBox.Cancel))
btn.accepted.connect(self.accept)
btn.rejected.connect(self.reject)
base_layout.addWidget(btn)
self.setLayout(base_layout)
self.cmb_optype.currentIndexChanged.connect(self.cmb_optype_currentIndexChanged)
self.cmb_opset.currentIndexChanged.connect(self.cmb_opset_currentIndexChanged)
self.cmb_opset.setCurrentIndex((opset - 1))
def create_variables_widget(self, index: int, is_input=True) -> QtWidgets.QBoxLayout:
if is_input:
self.add_input_valiables[index] = {}
self.add_input_valiables[index]['base'] = QtWidgets.QWidget()
self.add_input_valiables[index]['layout'] = QtWidgets.QHBoxLayout(self.add_input_valiables[index]['base'])
self.add_input_valiables[index]['layout'].setContentsMargins(0, 0, 0, 0)
self.add_input_valiables[index]['name'] = QtWidgets.QLineEdit()
self.add_input_valiables[index]['name'].setPlaceholderText('name')
self.add_input_valiables[index]['dtype'] = QtWidgets.QComboBox()
for dtype in AVAILABLE_DTYPES:
self.add_input_valiables[index]['dtype'].addItem(dtype)
self.add_input_valiables[index]['dtype'].setEditable(True)
self.add_input_valiables[index]['dtype'].setFixedSize(100, 20)
self.add_input_valiables[index]['shape'] = QtWidgets.QLineEdit()
self.add_input_valiables[index]['shape'].setPlaceholderText('shape. e.g. `[1, 2, 3]`')
self.add_input_valiables[index]['layout'].addWidget(self.add_input_valiables[index]['name'])
self.add_input_valiables[index]['layout'].addWidget(self.add_input_valiables[index]['dtype'])
self.add_input_valiables[index]['layout'].addWidget(self.add_input_valiables[index]['shape'])
else:
self.add_output_valiables[index] = {}
self.add_output_valiables[index]['base'] = QtWidgets.QWidget()
self.add_output_valiables[index]['layout'] = QtWidgets.QHBoxLayout(self.add_output_valiables[index]['base'])
self.add_output_valiables[index]['layout'].setContentsMargins(0, 0, 0, 0)
self.add_output_valiables[index]['name'] = QtWidgets.QLineEdit()
self.add_output_valiables[index]['name'].setPlaceholderText('name')
self.add_output_valiables[index]['dtype'] = QtWidgets.QComboBox()
for dtype in AVAILABLE_DTYPES:
self.add_output_valiables[index]['dtype'].addItem(dtype)
self.add_output_valiables[index]['dtype'].setEditable(True)
self.add_output_valiables[index]['dtype'].setFixedSize(100, 20)
self.add_output_valiables[index]['dtype'].setPlaceholderText('dtype. e.g. `float32`')
self.add_output_valiables[index]['shape'] = QtWidgets.QLineEdit()
self.add_output_valiables[index]['shape'].setPlaceholderText('shape. e.g. `[1, 2, 3]`')
self.add_output_valiables[index]['layout'].addWidget(self.add_output_valiables[index]['name'])
self.add_output_valiables[index]['layout'].addWidget(self.add_output_valiables[index]['dtype'])
self.add_output_valiables[index]['layout'].addWidget(self.add_output_valiables[index]['shape'])
def set_visible_input_valiables(self):
for (key, widgets) in self.add_input_valiables.items():
widgets['base'].setVisible((key < self.visible_input_valiables_count))
if (self.visible_input_valiables_count == 0):
self.btn_add_input_valiables.setEnabled(True)
self.btn_del_input_valiables.setEnabled(False)
elif (self.visible_input_valiables_count >= self._MAX_INPUT_VARIABLES_COUNT):
self.btn_add_input_valiables.setEnabled(False)
self.btn_del_input_valiables.setEnabled(True)
else:
self.btn_add_input_valiables.setEnabled(True)
self.btn_del_input_valiables.setEnabled(True)
def set_visible_output_valiables(self):
for (key, widgets) in self.add_output_valiables.items():
widgets['base'].setVisible((key < self.visible_output_valiables_count))
if (self.visible_output_valiables_count == 0):
self.btn_add_output_valiables.setEnabled(True)
self.btn_del_output_valiables.setEnabled(False)
elif (self.visible_output_valiables_count >= self._MAX_OUTPUT_VARIABLES_COUNT):
self.btn_add_output_valiables.setEnabled(False)
self.btn_del_output_valiables.setEnabled(True)
else:
self.btn_add_output_valiables.setEnabled(True)
self.btn_del_output_valiables.setEnabled(True)
def set_visible_add_op_attributes(self):
for (key, widgets) in self.attributes.items():
widgets['base'].setVisible((key < self.visible_attributes_count))
if (self.visible_attributes_count == 0):
self.btn_add_attributes.setEnabled(True)
self.btn_del_attributes.setEnabled(False)
elif (self.visible_attributes_count >= self._MAX_ATTRIBUTES_COUNT):
self.btn_add_attributes.setEnabled(False)
self.btn_del_attributes.setEnabled(True)
else:
self.btn_add_attributes.setEnabled(True)
self.btn_del_attributes.setEnabled(True)
def btn_add_input_valiables_clicked(self, e):
self.visible_input_valiables_count = min(max(0, (self.visible_input_valiables_count + 1)), self._MAX_INPUT_VARIABLES_COUNT)
self.set_visible_input_valiables()
def btn_del_input_valiables_clicked(self, e):
self.visible_input_valiables_count = min(max(0, (self.visible_input_valiables_count - 1)), self._MAX_INPUT_VARIABLES_COUNT)
self.set_visible_input_valiables()
def btn_add_output_valiables_clicked(self, e):
self.visible_output_valiables_count = min(max(0, (self.visible_output_valiables_count + 1)), self._MAX_OUTPUT_VARIABLES_COUNT)
self.set_visible_output_valiables()
def btn_del_output_valiables_clicked(self, e):
self.visible_output_valiables_count = min(max(0, (self.visible_output_valiables_count - 1)), self._MAX_OUTPUT_VARIABLES_COUNT)
self.set_visible_output_valiables()
def btn_add_attributes_clicked(self, e):
self.visible_attributes_count = min(max(0, (self.visible_attributes_count + 1)), self._MAX_ATTRIBUTES_COUNT)
self.set_visible_add_op_attributes()
def btn_del_attributes_clicked(self, e):
self.visible_attributes_count = min(max(0, (self.visible_attributes_count - 1)), self._MAX_ATTRIBUTES_COUNT)
self.set_visible_add_op_attributes()
def cmb_optype_currentIndexChanged(self, selected_index: int):
selected_operator: OperatorVersion = self.cmb_optype.currentData()
if selected_operator:
self.visible_input_valiables_count = selected_operator.inputs
self.visible_output_valiables_count = selected_operator.outputs
self.visible_attributes_count = min(max(0, len(selected_operator.attrs)), self._MAX_ATTRIBUTES_COUNT)
for (i, att) in enumerate(selected_operator.attrs):
self.attributes[i]['name'].setText(att.name)
self.attributes[i]['value'].setText(att.default_value)
for j in range(len(selected_operator.attrs), self._MAX_ATTRIBUTES_COUNT):
self.attributes[j]['name'].setText('')
self.attributes[j]['value'].setText('')
self.set_visible_input_valiables()
self.set_visible_output_valiables()
self.set_visible_add_op_attributes()
def cmb_opset_currentIndexChanged(self, selected_index: int):
current_opset: int = self.cmb_opset.currentData()
current_optype = self.cmb_optype.currentText()
current_optype_index = 0
self.cmb_optype.clear()
for (i, op) in enumerate(onnx_opsets):
for v in op.versions:
if (v.since_opset <= current_opset):
if (op.name == current_optype):
current_optype_index = self.cmb_optype.count()
self.cmb_optype.addItem(op.name, v)
break
self.cmb_optype.setCurrentIndex(current_optype_index)
def get_properties(self) -> GenerateOperatorProperties:
op_type = self.cmb_optype.currentText()
opset = self.cmb_opset.currentText()
if opset:
opset = literal_eval(opset)
if (not isinstance(opset, int)):
opset = ''
op_name = self.tb_opname.text()
input_variables = {}
output_variables = {}
for i in range(self.visible_input_valiables_count):
name = self.add_input_valiables[i]['name'].text()
dtype = self.add_input_valiables[i]['dtype'].currentText()
shape = self.add_input_valiables[i]['shape'].text()
if (name and dtype and shape):
input_variables[name] = [dtype, literal_eval(shape)]
for i in range(self.visible_output_valiables_count):
name = self.add_output_valiables[i]['name'].text()
dtype = self.add_output_valiables[i]['dtype'].currentText()
shape = self.add_output_valiables[i]['shape'].text()
if (name and dtype and shape):
output_variables[name] = [dtype, literal_eval(shape)]
if (len(input_variables) == 0):
input_variables = None
if (len(output_variables) == 0):
output_variables = None
attributes = {}
for i in range(self.visible_attributes_count):
name = self.attributes[i]['name'].text()
value = self.attributes[i]['value'].text()
if (name and value):
attributes[name] = literal_eval(value)
if (len(attributes) == 0):
attributes = None
return GenerateOperatorProperties(op_type=op_type, opset=opset, op_name=op_name, input_variables=input_variables, output_variables=output_variables, attributes=attributes)
def accept(self) -> None:
invalid = False
props = self.get_properties()
print(props)
err_msgs = []
if (not (props.op_type in opnames)):
err_msgs.append('- op_type is invalid.')
invalid = True
if (not isinstance(props.opset, int)):
err_msgs.append('- opset must be unsigned integer.')
invalid = True
if (not props.op_name):
err_msgs.append('- op_name is not set.')
invalid = True
if invalid:
for m in err_msgs:
print(m)
MessageBox.error(err_msgs, 'generate operator', parent=self)
return
return super().accept() |
_router.get('/item/{item_uid}/revision/', response_model=CollectionItemRevisionListResponse, dependencies=PERMISSIONS_READ)
def item_revisions(item_uid: str, limit: int=50, iterator: t.Optional[str]=None, prefetch: Prefetch=PrefetchQuery, user: UserType=Depends(get_authenticated_user), items: CollectionItemQuerySet=Depends(get_item_queryset)):
item = get_object_or_404(items, uid=item_uid)
queryset = item.revisions.order_by('-id')
if (iterator is not None):
iterator_obj = get_object_or_404(queryset, uid=iterator)
queryset = queryset.filter(id__lt=iterator_obj.id)
result = list(queryset[:(limit + 1)])
if (len(result) < (limit + 1)):
done = True
else:
done = False
result = result[:(- 1)]
context = Context(user, prefetch)
ret_data = [CollectionItemRevisionInOut.from_orm_context(revision, context) for revision in result]
iterator = (ret_data[(- 1)].uid if (len(result) > 0) else None)
return CollectionItemRevisionListResponse(data=ret_data, iterator=iterator, done=done) |
class OptionPlotoptionsLineSonificationTracksMappingHighpass(Options):
def frequency(self) -> 'OptionPlotoptionsLineSonificationTracksMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsLineSonificationTracksMappingHighpassFrequency)
def resonance(self) -> 'OptionPlotoptionsLineSonificationTracksMappingHighpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsLineSonificationTracksMappingHighpassResonance) |
class DataModelBuilderTest(ForsetiTestCase):
def test_all_enabled(self):
builder = data_model_builder.DataModelBuilder(FAKE_GLOBAL_CONFIGS, fake_data_models.ALL_ENABLED, mock.MagicMock(), '')
data_model_pipeline = builder.build()
self.assertEqual(1, len(data_model_pipeline))
expected_data_models = ['CaiDataModel']
for data_model in data_model_pipeline:
self.assertTrue((type(data_model).__name__ in expected_data_models))
def test_all_disabled(self):
builder = data_model_builder.DataModelBuilder(FAKE_GLOBAL_CONFIGS, fake_data_models.ALL_DISABLED, mock.MagicMock(), '')
data_model_pipeline = builder.build()
self.assertEqual(0, len(data_model_pipeline))
def test_non_existent_data_model_is_handled(self):
builder = data_model_builder.DataModelBuilder(FAKE_GLOBAL_CONFIGS, fake_data_models.NONEXISTENT_DATA_MODEL_ENABLED, mock.MagicMock(), '')
data_models = builder.build()
self.assertEqual(1, len(data_models)) |
class TestPatient(FrappeTestCase):
def test_customer_created(self):
frappe.db.sql('delete from `tabPatient`')
frappe.db.set_value('Healthcare Settings', None, 'link_customer_to_patient', 1)
patient = create_patient()
self.assertTrue(frappe.db.get_value('Patient', patient, 'customer'))
def test_patient_registration(self):
frappe.db.sql('delete from `tabPatient`')
settings = frappe.get_single('Healthcare Settings')
settings.collect_registration_fee = 1
settings.registration_fee = 500
settings.save()
patient = create_patient()
patient = frappe.get_doc('Patient', patient)
self.assertEqual(patient.status, 'Disabled')
result = patient.invoice_patient_registration()
self.assertTrue(frappe.db.exists('Sales Invoice', result.get('invoice')))
self.assertTrue(patient.status, 'Active')
settings.collect_registration_fee = 0
settings.save()
def test_patient_contact(self):
frappe.db.sql("delete from `tabPatient` where name like '_Test Patient%'")
frappe.db.sql("delete from `tabCustomer` where name like '_Test Patient%'")
frappe.db.sql("delete from `tabContact` where name like'_Test Patient%'")
frappe.db.sql("delete from `tabDynamic Link` where parent like '_Test Patient%'")
patient = create_patient(patient_name='_Test Patient Contact', email='test-', mobile='+91 ')
customer = frappe.db.get_value('Patient', patient, 'customer')
self.assertTrue(customer)
self.assertTrue(frappe.db.exists('Dynamic Link', {'parenttype': 'Contact', 'link_doctype': 'Patient', 'link_name': patient}))
self.assertTrue(frappe.db.exists('Dynamic Link', {'parenttype': 'Contact', 'link_doctype': 'Customer', 'link_name': customer}))
new_patient = create_patient(email='test-', mobile='+91 ', customer=customer)
self.assertTrue(frappe.db.exists('Dynamic Link', {'parenttype': 'Contact', 'link_doctype': 'Patient', 'link_name': new_patient}))
self.assertTrue(frappe.db.exists('Dynamic Link', {'parenttype': 'Contact', 'link_doctype': 'Customer', 'link_name': customer}))
def test_patient_user(self):
frappe.db.sql("delete from `tabUser` where email='test-patient-'")
frappe.db.sql("delete from `tabDynamic Link` where parent like '_Test Patient%'")
frappe.db.sql("delete from `tabPatient` where name like '_Test Patient%'")
patient = create_patient(patient_name='_Test Patient User', email='test-patient-', mobile='+91 ', create_user=True)
user = frappe.db.get_value('Patient', patient, 'user_id')
self.assertTrue(frappe.db.exists('User', user))
new_patient = frappe.get_doc({'doctype': 'Patient', 'first_name': '_Test Patient Duplicate User', 'sex': 'Male', 'email': 'test-patient-', 'mobile': '+91 ', 'invite_user': 1})
self.assertRaises(frappe.exceptions.DuplicateEntryError, new_patient.insert)
def test_patient_image_update_should_update_customer_image(self):
settings = frappe.get_single('Healthcare Settings')
settings.link_customer_to_patient = 1
settings.save()
patient_name = create_patient()
patient = frappe.get_doc('Patient', patient_name)
patient.image = os.path.abspath('assets/frappe/images/default-avatar.png')
patient.save()
customer = frappe.get_doc('Customer', patient.customer)
self.assertEqual(customer.image, patient.image)
def test_multiple_paients_linked_with_same_customer(self):
frappe.db.sql('delete from `tabPatient`')
frappe.db.set_single_value('Healthcare Settings', 'link_customer_to_patient', 1)
patient_name_1 = create_patient(patient_name='John Doe')
p1_customer_name = frappe.get_value('Patient', patient_name_1, 'customer')
p1_customer = frappe.get_doc('Customer', p1_customer_name)
self.assertEqual(p1_customer.customer_name, 'John Doe')
patient_name_2 = create_patient(patient_name='Jane Doe', customer=p1_customer.name)
p2_customer_name = frappe.get_value('Patient', patient_name_2, 'customer')
p2_customer = frappe.get_doc('Customer', p2_customer_name)
self.assertEqual(p1_customer_name, p2_customer_name)
self.assertEqual(p2_customer.customer_name, 'John Doe') |
def get_practitioner_billing_details(practitioner, is_inpatient):
service_item = None
practitioner_charge = None
if is_inpatient:
fields = ['inpatient_visit_charge_item', 'inpatient_visit_charge']
else:
fields = ['op_consulting_charge_item', 'op_consulting_charge']
if practitioner:
(service_item, practitioner_charge) = frappe.db.get_value('Healthcare Practitioner', practitioner, fields)
return (service_item, practitioner_charge) |
def test_encoder(ref_encoder: CLIPVisionModelWithProjection, our_encoder: CLIPImageEncoderH, test_device: torch.device):
x = torch.randn(1, 3, 224, 224).to(test_device)
with no_grad():
ref_embeddings = ref_encoder(x).image_embeds
our_embeddings = our_encoder(x)
assert (ref_embeddings.shape == (1, 1024))
assert (our_embeddings.shape == (1, 1024))
assert ((our_embeddings - ref_embeddings).abs().max() < 0.01) |
def getSignificantlySimilarArches(filePath, distance=4):
log = logging.getLogger('Main.DedupServer')
try:
ck = ArchChecker(filePath, pathNegativeFilter=settings.masked_path_prefixes)
return ck.getSignificantlySimilarArches(searchDistance=distance)
except Exception:
log.critical('Exception when processing item!')
for line in traceback.format_exc().split('\n'):
log.critical(line)
return 'error!' |
class TokenResponse(ModelComposed):
allowed_values = {('scope',): {'GLOBAL': 'global', 'PURGE_SELECT': 'purge_select', 'PURGE_ALL': 'purge_all', 'GLOBAL:READ': 'global:read'}}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'services': ([str],), 'name': (str,), 'scope': (str,), 'created_at': (str,), 'deleted_at': (datetime, none_type), 'updated_at': (datetime, none_type), 'id': (str,), 'user_id': (str,), 'last_used_at': (str,), 'expires_at': (str,), 'ip': (str,), 'user_agent': (str,)}
_property
def discriminator():
return None
attribute_map = {'services': 'services', 'name': 'name', 'scope': 'scope', 'created_at': 'created_at', 'deleted_at': 'deleted_at', 'updated_at': 'updated_at', 'id': 'id', 'user_id': 'user_id', 'last_used_at': 'last_used_at', 'expires_at': 'expires_at', 'ip': 'ip', 'user_agent': 'user_agent'}
read_only_vars = {'services', 'deleted_at', 'updated_at', 'id', 'user_id', 'last_used_at'}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
_property
def _composed_schemas():
lazy_import()
return {'anyOf': [], 'allOf': [Timestamps, Token, TokenResponseAllOf], 'oneOf': []} |
class CyclicRegionFinderDream(CyclicRegionFinder):
def __init__(self, t_cfg: TransitionCFG, asforest: AbstractSyntaxForest):
super().__init__(t_cfg, asforest)
self.abnormal_entry_restructurer = AbnormalEntryRestructurer(t_cfg, asforest)
self.abnormal_exit_restructurer = AbnormalExitRestructurer(t_cfg, asforest)
def find(self, head: TransitionBlock) -> Tuple[(TransitionCFG, List[TransitionBlock])]:
self.loop_region = self._compute_initial_loop_nodes(head)
if any(((edge.property == EdgeProperty.retreating) for edge in self.t_cfg.get_in_edges(head))):
logging.info(f'Restructure Abnormal Entry in loop region with head {head}')
self.abnormal_entry_restructurer.restructure(self.loop_region)
loop_successors: List[TransitionBlock] = self._compute_loop_successors()
if (len(loop_successors) > 1):
logging.info(f'Restructure Abnormal Exit in loop region with head {self.loop_region.root}')
loop_successors = [self.abnormal_exit_restructurer.restructure(self.loop_region, loop_successors)]
return (self.loop_region, loop_successors)
def _compute_initial_loop_nodes(self, head: TransitionBlock) -> TransitionCFG:
latching_nodes: List[TransitionBlock] = self._get_latching_nodes(head)
return GraphSlice.compute_graph_slice_for_sink_nodes(self.t_cfg, head, latching_nodes, back_edges=False)
def _get_latching_nodes(self, head: TransitionBlock):
return [edge.source for edge in self.t_cfg.get_in_edges(head) if (edge.property != EdgeProperty.non_loop)]
def _compute_loop_successors(self) -> List[TransitionBlock]:
initial_successor_nodes = self._get_initial_loop_successor_nodes()
return self._refine_initial_successor_nodes(initial_successor_nodes)
def _get_initial_loop_successor_nodes(self) -> InsertionOrderedSet[TransitionBlock]:
initial_successor_nodes = InsertionOrderedSet()
for node in self.loop_region:
for successor in self.t_cfg.get_successors(node):
if (successor not in self.loop_region):
initial_successor_nodes.add(successor)
return initial_successor_nodes
def _refine_initial_successor_nodes(self, successor_nodes: InsertionOrderedSet[TransitionBlock]) -> List[TransitionBlock]:
while (len(successor_nodes) > 1):
new_successor_nodes: InsertionOrderedSet[TransitionBlock] = InsertionOrderedSet()
for node in list(successor_nodes):
if ((node != self.t_cfg.root) and self._all_predecessors_in_current_region(node)):
successor_nodes.remove(node)
self._add_node_to_current_region(node)
for successor in self.t_cfg.get_successors(node):
if (successor not in self.loop_region):
new_successor_nodes.add(successor)
if ((len(new_successor_nodes) + len(successor_nodes)) <= 1):
break
if (not new_successor_nodes):
break
successor_nodes.update(new_successor_nodes)
return [succ_node for succ_node in successor_nodes if (succ_node not in self.loop_region)]
def _all_predecessors_in_current_region(self, node: TransitionBlock) -> bool:
return all(((predecessor in self.loop_region) for predecessor in self.t_cfg.get_predecessors(node)))
def _add_node_to_current_region(self, node: TransitionBlock) -> None:
self.loop_region.add_node(node)
self.loop_region.add_edges_from(self.t_cfg.get_in_edges(node))
self.loop_region.add_edges_from((edge for edge in self.t_cfg.get_out_edges(node) if (edge.sink in self.loop_region))) |
class OptionSeriesPieSonificationDefaultinstrumentoptionsMappingHighpass(Options):
def frequency(self) -> 'OptionSeriesPieSonificationDefaultinstrumentoptionsMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionSeriesPieSonificationDefaultinstrumentoptionsMappingHighpassFrequency)
def resonance(self) -> 'OptionSeriesPieSonificationDefaultinstrumentoptionsMappingHighpassResonance':
return self._config_sub_data('resonance', OptionSeriesPieSonificationDefaultinstrumentoptionsMappingHighpassResonance) |
def get_language_name_from_code(isocode: str) -> str:
if (isocode is None):
return ''
if (isocode == AUTO_DETECT):
return AUTO_DETECT_NAME
if ('-' not in isocode):
language = (pycountry.languages.get(alpha_2=isocode) if (len(isocode) == 2) else pycountry.languages.get(alpha_3=isocode))
if ((not language) and (not isocode)):
return ''
if language:
return language.name
try:
language = Language.get(isocode)
output = language.display_name()
except tag_parser.LanguageTagError:
return ''
return format_language_name(output, isocode) |
def test_create_backfiller_error():
no_schedule = LaunchPlan.get_or_create(workflow=example_wf, name='nos', fixed_inputs={'v': 10})
rate_schedule = LaunchPlan.get_or_create(workflow=example_wf, name='rate', fixed_inputs={'v': 10}, schedule=FixedRate(duration=timedelta(days=1)))
start_date = datetime(2022, 12, 1, 8)
end_date = (start_date + timedelta(days=10))
with pytest.raises(ValueError):
create_backfill_workflow(start_date, end_date, no_schedule)
with pytest.raises(ValueError):
create_backfill_workflow(end_date, start_date, no_schedule)
with pytest.raises(ValueError):
create_backfill_workflow(end_date, start_date, None)
with pytest.raises(NotImplementedError):
create_backfill_workflow(start_date, end_date, rate_schedule) |
def nnc_jit(f: Callable[(P, R)], static_argnums: Optional[Tuple[int]]=None) -> Callable[(P, R)]:
try:
from beanmachine.ppl.inference.proposer.nnc.utils import nnc_jit as raw_nnc_jit
except Exception as e:
logger.warning(f'''Fails to initialize NNC due to the following error: {str(e)}
Falling back to default inference engine.''')
return f
return raw_nnc_jit(f, static_argnums) |
class StrideExprCursor(ExprCursor):
def name(self) -> str:
assert isinstance(self._impl, C.Node)
assert isinstance(self._impl._node, LoopIR.StrideExpr)
return self._impl._node.name.name()
def dim(self) -> int:
assert isinstance(self._impl, C.Node)
assert isinstance(self._impl._node, LoopIR.StrideExpr)
return self._impl._node.dim |
def ast_innerWhile_simple_condition_complexity() -> AbstractSyntaxTree:
true_value = LogicCondition.initialize_true((context := LogicCondition.generate_new_context()))
ast = AbstractSyntaxTree((root := SeqNode(true_value)), condition_map={logic_cond('x1', context): Condition(OperationType.less, [Variable('a'), Constant(5)]), logic_cond('x2', context): Condition(OperationType.less, [Variable('b'), Constant(5)]), logic_cond('x3', context): Condition(OperationType.less, [Variable('c'), Constant(5)]), logic_cond('x4', context): Condition(OperationType.less, [Variable('d'), Constant(5)])})
init_code_node = ast._add_code_node([Assignment(Variable('a'), Constant(0))])
outer_while = ast.factory.create_while_loop_node(logic_cond('x1', context))
outer_while_body = ast.factory.create_seq_node()
outer_while_init = ast._add_code_node([Assignment(Variable('b'), Constant(0)), Assignment(Variable('c'), Constant(0)), Assignment(Variable('d'), Constant(0))])
outer_while_exit = ast._add_code_node([Assignment(Variable('a'), BinaryOperation(OperationType.plus, [Variable('a'), Constant(1)]))])
inner_while = ast.factory.create_while_loop_node(((logic_cond('x2', context) & logic_cond('x3', context)) & logic_cond('x4', context)))
inner_while_body = ast._add_code_node([Assignment(Variable('b'), BinaryOperation(OperationType.plus, [Variable('b'), Constant(1)])), Assignment(Variable('c'), BinaryOperation(OperationType.plus, [Variable('c'), Constant(1)])), Assignment(Variable('d'), BinaryOperation(OperationType.plus, [Variable('d'), Constant(1)]))])
ast._add_nodes_from((outer_while, outer_while_body, inner_while))
ast._add_edges_from([(root, init_code_node), (root, outer_while), (outer_while, outer_while_body), (outer_while_body, outer_while_init), (outer_while_body, inner_while), (outer_while_body, outer_while_exit), (inner_while, inner_while_body)])
return ast |
class WafFirewall(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'data': (WafFirewallData,)}
_property
def discriminator():
return None
attribute_map = {'data': 'data'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def append_tei_children_list_and_get_whitespace(children: T_ElementChildrenList, semantic_content: SemanticContentWrapper, pending_whitespace: str, context: TeiElementFactoryContext) -> str:
(tail_children, tail_whitespace) = get_tei_children_and_whitespace_for_semantic_content(semantic_content, context=context)
if (not tail_children):
return pending_whitespace
if pending_whitespace:
children.append(pending_whitespace)
children.extend(tail_children)
return tail_whitespace |
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [migrations.CreateModel(name='Setting', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(help_text='(e.g. SETTING_NAME)', max_length=50, unique=True, verbose_name='Name')), ('value_type', models.CharField(choices=[('bool', 'bool'), ('date', 'date'), ('datetime', 'datetime'), ('decimal', 'decimal'), ('email', 'email'), ('file', 'file'), ('float', 'float'), ('image', 'image'), ('int', 'int'), ('string', 'string'), ('text', 'text'), ('time', 'time'), ('url', 'url')], max_length=20, verbose_name='Type')), ('value_bool', models.BooleanField(default=False, verbose_name='Value')), ('value_date', models.DateField(blank=True, null=True, verbose_name='Value')), ('value_datetime', models.DateTimeField(blank=True, null=True, verbose_name='Value')), ('value_decimal', models.DecimalField(blank=True, decimal_places=10, default=Decimal('0.0'), max_digits=19, verbose_name='Value')), ('value_email', models.EmailField(blank=True, max_length=254, verbose_name='Value')), ('value_file', models.FileField(blank=True, upload_to='files', verbose_name='Value')), ('value_float', models.FloatField(blank=True, default=0.0, verbose_name='Value')), ('value_image', models.FileField(blank=True, upload_to='images', verbose_name='Value')), ('value_int', models.IntegerField(blank=True, default=0, verbose_name='Value')), ('value_string', models.CharField(blank=True, max_length=50, verbose_name='Value')), ('value_text', models.TextField(blank=True, verbose_name='Value')), ('value_time', models.TimeField(blank=True, null=True, verbose_name='Value')), ('value_url', models.URLField(blank=True, verbose_name='Value'))], options={'verbose_name': 'Setting', 'verbose_name_plural': 'Settings', 'ordering': ['name']})] |
class FuseExpandBmmTestCase(unittest.TestCase):
def setUpClass(cls) -> None:
torch.manual_seed(0)
def __init__(self, *args, **kwargs):
super(FuseExpandBmmTestCase, self).__init__(*args, **kwargs)
self.test_count = 0
def _compile_and_check(self, Y, test_name, expected_num_ops, expected_op, no_expand=True):
target = detect_target()
dll_name = f'test_{self.test_count}.so'
module = compile_model(Y, target, './tmp', test_name, dll_name=dll_name)
self.test_count += 1
sorted_graph = module.debug_sorted_graph
sorted_ops = graph_utils.get_sorted_ops(sorted_graph)
self.assertEqual(len(sorted_ops), expected_num_ops)
if (expected_num_ops == 1):
self.assertEqual(sorted_ops[0]._attrs['op'], expected_op)
elif no_expand:
self.assertTrue(all(((lambda op: (op._attrs['op'] != 'expand')) for op in sorted_ops)))
return module
def _test_non_fusible_expand_bmm_1(self, B, M, N, K, expected_num_ops, test_name, dtype='float16'):
batch_sizes = [1, B]
batch_dim = shape_utils.gen_int_var_min_max(batch_sizes, 'batch_0')
X0 = Tensor(shape=[1, M, K], dtype=dtype, name='x0', is_input=True)
X1 = Tensor(shape=[batch_dim, K, N], dtype=dtype, name='x1', is_input=True)
Y0 = ops.expand()(X0, [batch_dim, (- 1), (- 1)])
Y0._attrs['name'] = 'output0'
Y0._attrs['is_output'] = True
Y1 = ops.bmm_rrr()(Y0, X1)
Y1._attrs['name'] = 'output1'
Y1._attrs['is_output'] = True
module = self._compile_and_check([Y0, Y1], test_name, expected_num_ops, 'bmm_rrr', no_expand=False)
for batch in [1, B]:
x0_pt = get_random_torch_tensor([1, M, K], dtype)
x1_pt = get_random_torch_tensor([batch, K, N], dtype)
y0_pt = x0_pt.expand(batch, (- 1), (- 1))
y1_pt = torch.matmul(y0_pt, x1_pt)
y0 = get_torch_empty_tensor(y0_pt.size(), dtype)
y1 = get_torch_empty_tensor(y1_pt.size(), dtype)
inputs = {'x0': x0_pt, 'x1': x1_pt}
module.run_with_tensors(inputs, [y0, y1])
torch.testing.assert_close(y0_pt, y0, atol=0.1, rtol=0.1)
torch.testing.assert_close(y1_pt, y1, atol=0.1, rtol=0.1)
def test_non_fusible_expand_bmm_1(self):
self._test_non_fusible_expand_bmm_1(B=10, M=4, N=12, K=6, expected_num_ops=2, test_name='test_non_fusible_expand_bmm_1')
def _test_non_fusible_expand_bmm_2(self, B, M, N, expected_num_ops, test_name, dtype='float16'):
batch_sizes = [1, B]
batch_dim = shape_utils.gen_int_var_min_max(batch_sizes, 'batch_0')
X0 = Tensor(shape=[1, M, N], dtype=dtype, name='x0', is_input=True)
X1 = Tensor(shape=[batch_dim, N, N], dtype=dtype, name='x1', is_input=True)
expand_0 = ops.expand()(X0, [batch_dim, (- 1), (- 1)])
bmm_rrr_1 = ops.bmm_rrr()(expand_0, X1)
Y = ops.elementwise(FuncEnum.ADD)(expand_0, bmm_rrr_1)
Y._attrs['name'] = 'output'
Y._attrs['is_output'] = True
module = self._compile_and_check(Y, test_name, expected_num_ops, 'bmm_rrr', no_expand=False)
for batch in [1, B]:
x0_pt = get_random_torch_tensor([1, M, N], dtype)
x1_pt = get_random_torch_tensor([batch, N, N], dtype)
expand_0_pt = x0_pt.expand(batch, (- 1), (- 1))
bmm_rrr_1_pt = torch.matmul(expand_0_pt, x1_pt)
y_pt = (expand_0_pt + bmm_rrr_1_pt)
y = get_torch_empty_tensor(y_pt.size(), dtype)
inputs = {'x0': x0_pt, 'x1': x1_pt}
module.run_with_tensors(inputs, [y])
torch.testing.assert_close(y_pt, y, atol=0.1, rtol=0.1)
def test_non_fusible_expand_bmm_2(self):
self._test_non_fusible_expand_bmm_1(B=10, M=4, N=12, K=6, expected_num_ops=2, test_name='test_non_fusible_expand_bmm_1')
def _test_fuse_expand_bmm_rrr_a(self, B, M, N, K, expected_num_ops, test_name, dtype='float16'):
batch_sizes = [1, B]
batch_dim = shape_utils.gen_int_var_min_max(batch_sizes, 'batch_0')
X0 = Tensor(shape=[1, M, K], dtype=dtype, name='x0', is_input=True)
X1 = Tensor(shape=[batch_dim, K, N], dtype=dtype, name='x1', is_input=True)
expand_0 = ops.expand()(X0, [batch_dim, (- 1), (- 1)])
Y = ops.bmm_rrr()(expand_0, X1)
Y._attrs['name'] = 'output0'
Y._attrs['is_output'] = True
module = self._compile_and_check(Y, test_name, expected_num_ops, 'bmm_rrr')
for batch in [1, B]:
x0_pt = get_random_torch_tensor([1, M, K], dtype)
x1_pt = get_random_torch_tensor([batch, K, N], dtype)
expand_0_pt = x0_pt.expand(batch, (- 1), (- 1))
y_pt = torch.matmul(expand_0_pt, x1_pt)
y = get_torch_empty_tensor(y_pt.size(), dtype)
inputs = {'x0': x0_pt, 'x1': x1_pt}
module.run_with_tensors(inputs, [y])
torch.testing.assert_close(y_pt, y, atol=0.1, rtol=0.1)
def test_fuse_expand_bmm_rrr_a(self):
self._test_fuse_expand_bmm_rrr_a(B=10, M=4, N=12, K=11, expected_num_ops=2, test_name='test_fuse_expand_bmm_rrr_a')
self._test_fuse_expand_bmm_rrr_a(B=10, M=4, N=12, K=6, expected_num_ops=1, test_name='test_fuse_expand_bmm_rrr_a')
def _test_fuse_expand_bmm_rrc_add_b(self, B, M, N, K, expected_num_ops, test_name, dtype='float16'):
batch_sizes = [1, B]
batch_dim = shape_utils.gen_int_var_min_max(batch_sizes, 'batch_0')
X0 = Tensor(shape=[batch_dim, M, K], dtype=dtype, name='x0', is_input=True)
X1 = Tensor(shape=[1, K, N], dtype=dtype, name='x1', is_input=True)
X2 = Tensor(shape=[batch_dim, N, M], dtype=dtype, name='x2', is_input=True)
expand_0 = ops.expand()(X1, [batch_dim, (- 1), (- 1)])
Y = ops.bmm_rrc_add()(X0, expand_0, X2)
Y._attrs['name'] = 'output0'
Y._attrs['is_output'] = True
module = self._compile_and_check(Y, test_name, expected_num_ops, 'bmm_rrc_add')
for batch in [1, B]:
x0_pt = get_random_torch_tensor([batch, M, K], dtype)
x1_pt = get_random_torch_tensor([1, K, N], dtype)
x2_pt = get_random_torch_tensor([batch, N, M], dtype)
expand_0_pt = x1_pt.expand(batch, (- 1), (- 1))
y_pt = torch.matmul(x0_pt, expand_0_pt)
y_pt = (y_pt.transpose(2, 1) + x2_pt)
y = get_torch_empty_tensor(y_pt.size(), dtype)
inputs = {'x0': x0_pt, 'x1': x1_pt, 'x2': x2_pt}
module.run_with_tensors(inputs, [y])
torch.testing.assert_close(y_pt, y, atol=0.1, rtol=0.1)
def test_fuse_expand_bmm_rrc_add_b(self):
self._test_fuse_expand_bmm_rrc_add_b(B=10, M=4, N=12, K=11, expected_num_ops=3, test_name='test_fuse_expand_bmm_rrc_add_b')
self._test_fuse_expand_bmm_rrc_add_b(B=10, M=4, N=12, K=6, expected_num_ops=1, test_name='test_fuse_expand_bmm_rrc_add_b')
def _test_fuse_expand_bmm_crr_a(self, B, M, N, K, expected_num_ops, test_name, dtype='float16'):
batch_sizes = [1, B]
batch_dim = shape_utils.gen_int_var_min_max(batch_sizes, 'batch_0')
X0 = Tensor(shape=[1, K, M], dtype=dtype, name='x0', is_input=True)
X1 = Tensor(shape=[1, K, M], dtype=dtype, name='x1', is_input=True)
X2 = Tensor(shape=[batch_dim, K, N], dtype=dtype, name='x2', is_input=True)
add_0 = ops.elementwise(FuncEnum.ADD)(X0, X1)
expand_1 = ops.expand()(add_0, [batch_dim, (- 1), (- 1)])
Y = ops.bmm_crr()(expand_1, X2)
Y._attrs['name'] = 'output0'
Y._attrs['is_output'] = True
module = self._compile_and_check(Y, test_name, expected_num_ops, 'bmm_crr')
for batch in [1, B]:
x0_pt = get_random_torch_tensor([1, K, M], dtype)
x1_pt = get_random_torch_tensor([1, K, M], dtype)
x2_pt = get_random_torch_tensor([batch, K, N], dtype)
add_0_pt = (x0_pt + x1_pt)
expand_1_pt = add_0_pt.expand(batch, (- 1), (- 1))
expand_1_tran_pt = torch.transpose(expand_1_pt, 2, 1)
y_pt = torch.matmul(expand_1_tran_pt, x2_pt)
y = get_torch_empty_tensor(y_pt.size(), dtype)
inputs = {'x0': x0_pt, 'x1': x1_pt, 'x2': x2_pt}
module.run_with_tensors(inputs, [y])
torch.testing.assert_close(y_pt, y, atol=0.1, rtol=0.1)
def test_fuse_expand_bmm_crr_a(self):
self._test_fuse_expand_bmm_crr_a(B=10, M=5, N=12, K=11, expected_num_ops=4, test_name='test_fuse_expand_bmm_crr_a')
self._test_fuse_expand_bmm_crr_a(B=10, M=4, N=12, K=11, expected_num_ops=2, test_name='test_fuse_expand_bmm_crr_a')
def _test_fuse_expand_bmm_crc_add_b(self, B, M, N, K, expected_num_ops, test_name, dtype='float16'):
batch_sizes = [1, B]
batch_dim = shape_utils.gen_int_var_min_max(batch_sizes, 'batch_0')
X0 = Tensor(shape=[batch_dim, K, M], dtype=dtype, name='x0', is_input=True)
X1 = Tensor(shape=[1, K, N], dtype=dtype, name='x1', is_input=True)
X2 = Tensor(shape=[1, K, N], dtype=dtype, name='x2', is_input=True)
X3 = Tensor(shape=[batch_dim, N, M], dtype=dtype, name='x3', is_input=True)
add_0 = ops.elementwise(FuncEnum.ADD)(X1, X2)
expand_1 = ops.expand()(add_0, [batch_dim, (- 1), (- 1)])
Y = ops.bmm_crc_add()(X0, expand_1, X3)
Y._attrs['name'] = 'output0'
Y._attrs['is_output'] = True
module = self._compile_and_check(Y, test_name, expected_num_ops, 'bmm_crc_add')
for batch in [1, B]:
x0_pt = get_random_torch_tensor([batch, K, M], dtype)
x1_pt = get_random_torch_tensor([1, K, N], dtype)
x2_pt = get_random_torch_tensor([1, K, N], dtype)
x3_pt = get_random_torch_tensor([batch, N, M], dtype)
add_0_pt = (x1_pt + x2_pt)
expand_1_pt = add_0_pt.expand(batch, (- 1), (- 1))
x0_tran_pt = torch.transpose(x0_pt, 2, 1)
y_pt = torch.matmul(x0_tran_pt, expand_1_pt)
y_pt = (y_pt.transpose(2, 1) + x3_pt)
y = get_torch_empty_tensor(y_pt.size(), dtype)
inputs = {'x0': x0_pt, 'x1': x1_pt, 'x2': x2_pt, 'x3': x3_pt}
module.run_with_tensors(inputs, [y])
torch.testing.assert_close(y_pt, y, atol=0.1, rtol=0.1)
def test_fuse_expand_bmm_crc_add_b(self):
self._test_fuse_expand_bmm_crc_add_b(B=10, M=5, N=12, K=6, expected_num_ops=5, test_name='test_fuse_expand_bmm_crc_add_b')
self._test_fuse_expand_bmm_crc_add_b(B=10, M=4, N=12, K=11, expected_num_ops=2, test_name='test_fuse_expand_bmm_crc_add_b')
def _test_fuse_expand_bmm_rcr_a(self, B, M, N, K, expected_num_ops, test_name, dtype='float16'):
batch_sizes = [1, B]
batch_dim = shape_utils.gen_int_var_min_max(batch_sizes, 'batch_0')
X0 = Tensor(shape=[1, M, K], dtype=dtype, name='x0', is_input=True)
X1 = Tensor(shape=[1, M, K], dtype=dtype, name='x1', is_input=True)
X2 = Tensor(shape=[batch_dim, N, K], dtype=dtype, name='x2', is_input=True)
add_0 = ops.elementwise(FuncEnum.ADD)(X0, X1)
expand_1 = ops.expand()(add_0, [batch_dim, (- 1), (- 1)])
Y = ops.bmm_rcr()(expand_1, X2)
Y._attrs['name'] = 'output0'
Y._attrs['is_output'] = True
module = self._compile_and_check(Y, test_name, expected_num_ops, 'bmm_rcr')
for batch in [1, B]:
x0_pt = get_random_torch_tensor([1, M, K], dtype)
x1_pt = get_random_torch_tensor([1, M, K], dtype)
x2_pt = get_random_torch_tensor([batch, N, K], dtype)
add_0_pt = (x0_pt + x1_pt)
expand_1_pt = add_0_pt.expand(batch, (- 1), (- 1))
x2_tran_pt = torch.transpose(x2_pt, 2, 1)
y_pt = torch.matmul(expand_1_pt, x2_tran_pt)
y = get_torch_empty_tensor(y_pt.size(), dtype)
inputs = {'x0': x0_pt, 'x1': x1_pt, 'x2': x2_pt}
module.run_with_tensors(inputs, [y])
torch.testing.assert_close(y_pt, y, atol=0.1, rtol=0.1)
def test_fuse_expand_bmm_rcr_a(self):
self._test_fuse_expand_bmm_rcr_a(B=10, M=4, N=12, K=11, expected_num_ops=4, test_name='test_fuse_expand_bmm_rcr_a')
self._test_fuse_expand_bmm_rcr_a(B=10, M=5, N=12, K=6, expected_num_ops=2, test_name='test_fuse_expand_bmm_rcr_a')
def _test_fuse_expand_bmm_rcc_add_b(self, B, M, N, K, expected_num_ops, test_name, dtype='float16'):
batch_sizes = [1, B]
batch_dim = shape_utils.gen_int_var_min_max(batch_sizes, 'batch_0')
X0 = Tensor(shape=[batch_dim, M, K], dtype=dtype, name='x0', is_input=True)
X1 = Tensor(shape=[1, N, K], dtype=dtype, name='x1', is_input=True)
X2 = Tensor(shape=[1, N, K], dtype=dtype, name='x2', is_input=True)
X3 = Tensor(shape=[batch_dim, N, M], dtype=dtype, name='x3', is_input=True)
add_0 = ops.elementwise(FuncEnum.ADD)(X1, X2)
expand_1 = ops.expand()(add_0, [batch_dim, (- 1), (- 1)])
Y = ops.bmm_rcc_add()(X0, expand_1, X3)
Y._attrs['name'] = 'output0'
Y._attrs['is_output'] = True
module = self._compile_and_check(Y, test_name, expected_num_ops, 'bmm_rcc_add')
for batch in [1, B]:
x0_pt = get_random_torch_tensor([batch, M, K], dtype)
x1_pt = get_random_torch_tensor([1, N, K], dtype)
x2_pt = get_random_torch_tensor([1, N, K], dtype)
x3_pt = get_random_torch_tensor([batch, N, M], dtype)
add_0_pt = (x1_pt + x2_pt)
expand_1_pt = add_0_pt.expand(batch, (- 1), (- 1))
expand_1_tran_pt = torch.transpose(expand_1_pt, 2, 1)
y_pt = torch.matmul(x0_pt, expand_1_tran_pt)
y_pt = (y_pt.transpose(2, 1) + x3_pt)
y = get_torch_empty_tensor(y_pt.size(), dtype)
inputs = {'x0': x0_pt, 'x1': x1_pt, 'x2': x2_pt, 'x3': x3_pt}
module.run_with_tensors(inputs, [y])
torch.testing.assert_close(y_pt, y, atol=0.1, rtol=0.1)
def test_fuse_expand_bmm_rcc_add_b(self):
self._test_fuse_expand_bmm_rcc_add_b(B=10, M=6, N=12, K=5, expected_num_ops=4, test_name='test_fuse_expand_bmm_rcc_add_b')
self._test_fuse_expand_bmm_rcc_add_b(B=10, M=4, N=12, K=6, expected_num_ops=2, test_name='test_fuse_expand_bmm_rcc_add_b')
def _test_fuse_expand_bmm_ccr_a(self, B, M, N, K, expected_num_ops, test_name, dtype='float16'):
batch_sizes = [1, B]
batch_dim = shape_utils.gen_int_var_min_max(batch_sizes, 'batch_0')
X0 = Tensor(shape=[1, K, M], dtype=dtype, name='x0', is_input=True)
X1 = Tensor(shape=[1, K, M], dtype=dtype, name='x1', is_input=True)
X2 = Tensor(shape=[batch_dim, N, K], dtype=dtype, name='x2', is_input=True)
add_0 = ops.elementwise(FuncEnum.ADD)(X0, X1)
expand_1 = ops.expand()(add_0, [batch_dim, (- 1), (- 1)])
Y = ops.bmm_ccr()(expand_1, X2)
Y._attrs['name'] = 'output0'
Y._attrs['is_output'] = True
module = self._compile_and_check(Y, test_name, expected_num_ops, 'bmm_ccr')
for batch in [1, B]:
x0_pt = get_random_torch_tensor([1, K, M], dtype)
x1_pt = get_random_torch_tensor([1, K, M], dtype)
x2_pt = get_random_torch_tensor([batch, N, K], dtype)
add_0_pt = (x0_pt + x1_pt)
expand_1_pt = add_0_pt.expand(batch, (- 1), (- 1))
expand_1_tran_pt = torch.transpose(expand_1_pt, 2, 1)
x2_tran_pt = torch.transpose(x2_pt, 2, 1)
y_pt = torch.matmul(expand_1_tran_pt, x2_tran_pt)
y = get_torch_empty_tensor(y_pt.size(), dtype)
inputs = {'x0': x0_pt, 'x1': x1_pt, 'x2': x2_pt}
module.run_with_tensors(inputs, [y])
torch.testing.assert_close(y_pt, y, atol=0.1, rtol=0.1)
def test_fuse_expand_bmm_ccr_a(self):
self._test_fuse_expand_bmm_ccr_a(B=10, M=4, N=12, K=11, expected_num_ops=3, test_name='test_fuse_expand_bmm_ccr_a')
self._test_fuse_expand_bmm_ccr_a(B=10, M=4, N=12, K=6, expected_num_ops=2, test_name='test_fuse_expand_bmm_ccr_a')
def _test_fuse_expand_bmm_ccc_add_b(self, B, M, N, K, expected_num_ops, test_name, dtype='float16'):
batch_sizes = [1, B]
batch_dim = shape_utils.gen_int_var_min_max(batch_sizes, 'batch_0')
X0 = Tensor(shape=[batch_dim, K, M], dtype=dtype, name='x0', is_input=True)
X1 = Tensor(shape=[1, N, K], dtype=dtype, name='x1', is_input=True)
X2 = Tensor(shape=[1, N, K], dtype=dtype, name='x2', is_input=True)
X3 = Tensor(shape=[batch_dim, N, M], dtype=dtype, name='x3', is_input=True)
add_0 = ops.elementwise(FuncEnum.ADD)(X1, X2)
expand_1 = ops.expand()(add_0, [batch_dim, (- 1), (- 1)])
Y = ops.bmm_ccc_add()(X0, expand_1, X3)
Y._attrs['name'] = 'output0'
Y._attrs['is_output'] = True
module = self._compile_and_check(Y, test_name, expected_num_ops, 'bmm_ccc_add')
for batch in [1, B]:
x0_pt = get_random_torch_tensor([batch, K, M], dtype)
x1_pt = get_random_torch_tensor([1, N, K], dtype)
x2_pt = get_random_torch_tensor([1, N, K], dtype)
x3_pt = get_random_torch_tensor([batch, N, M], dtype)
add_0_pt = (x1_pt + x2_pt)
expand_1_pt = add_0_pt.expand(batch, (- 1), (- 1))
expand_1_tran_pt = torch.transpose(expand_1_pt, 2, 1)
x0_tran_pt = torch.transpose(x0_pt, 2, 1)
y_pt = torch.matmul(x0_tran_pt, expand_1_tran_pt)
y_pt = (y_pt.transpose(2, 1) + x3_pt)
y = get_torch_empty_tensor(y_pt.size(), dtype)
inputs = {'x0': x0_pt, 'x1': x1_pt, 'x2': x2_pt, 'x3': x3_pt}
module.run_with_tensors(inputs, [y])
torch.testing.assert_close(y_pt, y, atol=0.1, rtol=0.1)
def test_fuse_expand_bmm_ccc_add_b(self):
self._test_fuse_expand_bmm_ccc_add_b(B=10, M=5, N=12, K=6, expected_num_ops=5, test_name='test_fuse_expand_bmm_ccc_add_b')
self._test_fuse_expand_bmm_ccc_add_b(B=10, M=4, N=12, K=6, expected_num_ops=2, test_name='test_fuse_expand_bmm_ccc_add_b')
def _test_fuse_size_expand_bmm_rrr(self, B, M, N, K, expected_num_ops, test_name, dtype='float16'):
batch_sizes = [1, B]
batch_dim = shape_utils.gen_int_var_min_max(batch_sizes, 'batch_0')
X0 = Tensor(shape=[1, M, K], dtype=dtype, name='x0', is_input=True)
X1 = Tensor(shape=[1, M, K], dtype=dtype, name='x1', is_input=True)
X2 = Tensor(shape=[1, M, K], dtype=dtype, name='x2', is_input=True)
X3 = Tensor(shape=[batch_dim, K, N], dtype=dtype, name='x3', is_input=True)
X4 = Tensor(shape=[batch_dim, K, N], dtype=dtype, name='x4', is_input=True)
add_0 = ops.elementwise(FuncEnum.ADD)(X3, X4)
(size_1, _, _) = ops.size()(add_0)
expand_to_shape = [size_1, (- 1), (- 1)]
expand_2 = ops.expand()(X0, expand_to_shape)
expand_3 = ops.expand()(X1, expand_to_shape)
expand_4 = ops.expand()(X2, expand_to_shape)
bmm_5 = ops.bmm_rrr()(expand_2, add_0)
bmm_6 = ops.bmm_rrr()(expand_3, add_0)
bmm_7 = ops.bmm_rrr()(expand_4, add_0)
add_8 = ops.elementwise(FuncEnum.ADD)(bmm_5, bmm_6)
Y = ops.elementwise(FuncEnum.ADD)(bmm_7, add_8)
Y._attrs['name'] = 'output0'
Y._attrs['is_output'] = True
module = self._compile_and_check(Y, test_name, expected_num_ops, 'bmm_rrr')
for batch in [1, B]:
x0_pt = get_random_torch_tensor([1, M, K], dtype)
x1_pt = get_random_torch_tensor([1, M, K], dtype)
x2_pt = get_random_torch_tensor([1, M, K], dtype)
x3_pt = get_random_torch_tensor([batch, K, N], dtype)
x4_pt = get_random_torch_tensor([batch, K, N], dtype)
add_0_pt = (x3_pt + x4_pt)
size_1 = batch
expand_2_pt = x0_pt.expand(size_1, (- 1), (- 1))
expand_3_pt = x1_pt.expand(size_1, (- 1), (- 1))
expand_4_pt = x2_pt.expand(size_1, (- 1), (- 1))
bmm_5_pt = torch.matmul(expand_2_pt, add_0_pt)
bmm_6_pt = torch.matmul(expand_3_pt, add_0_pt)
bmm_7_pt = torch.matmul(expand_4_pt, add_0_pt)
add_8_pt = (bmm_5_pt + bmm_6_pt)
y_pt = (bmm_7_pt + add_8_pt)
y = get_torch_empty_tensor(y_pt.size(), dtype)
inputs = {'x0': x0_pt, 'x1': x1_pt, 'x2': x2_pt, 'x3': x3_pt, 'x4': x4_pt}
module.run_with_tensors(inputs, [y])
torch.testing.assert_close(y_pt, y, atol=0.1, rtol=0.1)
def test_fuse_size_expand_bmm_rrr(self):
self._test_fuse_size_expand_bmm_rrr(B=10, M=4, N=12, K=11, expected_num_ops=7, test_name='test_fuse_size_expand_bmm_rrr')
self._test_fuse_size_expand_bmm_rrr(B=10, M=4, N=12, K=6, expected_num_ops=4, test_name='test_fuse_size_expand_bmm_rrr') |
class _TestSlideFixture():
def _register_assertion(self, assertion: Callable) -> None:
self._assertions.append(assertion)
def __enter__(self) -> '_TestSlideFixture':
self._assertions: List[Callable] = []
testslide_module.mock_callable.register_assertion = self._register_assertion
return self
def __exit__(self, exc_type: Optional[type], exc_val: Optional[Exception], exc_tb: TracebackType):
aggregated_exceptions = testslide_module.AggregatedExceptions()
try:
for assertion in self._assertions:
try:
assertion()
except BaseException as be:
aggregated_exceptions.append_exception(be)
finally:
testslide_module.mock_callable.unpatch_all_callable_mocks()
testslide_module.mock_constructor.unpatch_all_constructor_mocks()
testslide_module.patch_attribute.unpatch_all_mocked_attributes()
if aggregated_exceptions.exceptions:
pytest.fail(str(aggregated_exceptions), False)
def mock_callable(*args: Any, **kwargs: Any) -> testslide_module.mock_callable._MockCallableDSL:
return testslide_module.mock_callable.mock_callable(*args, **kwargs)
def mock_async_callable(*args: Any, **kwargs: Any) -> testslide_module.mock_callable._MockAsyncCallableDSL:
return testslide_module.mock_callable.mock_async_callable(*args, **kwargs)
def mock_constructor(*args: Any, **kwargs: Any) -> testslide_module.mock_constructor._MockConstructorDSL:
return testslide_module.mock_constructor.mock_constructor(*args, **kwargs)
def patch_attribute(*args: Any, **kwargs: Any) -> None:
return testslide_module.patch_attribute.patch_attribute(*args, **kwargs) |
class OptionSeriesItemSonificationTracksMappingTremolo(Options):
def depth(self) -> 'OptionSeriesItemSonificationTracksMappingTremoloDepth':
return self._config_sub_data('depth', OptionSeriesItemSonificationTracksMappingTremoloDepth)
def speed(self) -> 'OptionSeriesItemSonificationTracksMappingTremoloSpeed':
return self._config_sub_data('speed', OptionSeriesItemSonificationTracksMappingTremoloSpeed) |
def test_url_blank_params():
q = QueryParams('a=123&abc&def&b=456')
assert ('a' in q)
assert ('abc' in q)
assert ('def' in q)
assert ('b' in q)
val = q.get('abc')
assert (val is not None)
assert (len(val) == 0)
assert (len(q['a']) == 3)
assert (list(q.keys()) == ['a', 'abc', 'def', 'b']) |
class _RecipientSearchText(_Filter):
underscore_name = 'recipient_search_text'
def generate_elasticsearch_query(cls, filter_values: List[str], query_type: _QueryType, **options) -> ES_Q:
recipient_search_query = []
fields = ['recipient_name']
for filter_value in filter_values:
upper_recipient_string = es_sanitize(filter_value.upper())
query = (es_sanitize(upper_recipient_string) + '*')
if ('\\' in es_sanitize(upper_recipient_string)):
query = (es_sanitize(upper_recipient_string) + '\\*')
recipient_name_query = ES_Q('query_string', query=query, default_operator='AND', fields=fields)
if ((len(upper_recipient_string) == 9) and upper_recipient_string[:5].isnumeric()):
recipient_duns_query = ES_Q('match', recipient_unique_id=upper_recipient_string)
recipient_search_query.append(ES_Q('dis_max', queries=[recipient_name_query, recipient_duns_query]))
if (len(upper_recipient_string) == 12):
recipient_uei_query = ES_Q('match', recipient_uei=upper_recipient_string)
recipient_search_query.append(ES_Q('dis_max', queries=[recipient_name_query, recipient_uei_query]))
if upper_recipient_string.endswith('.'):
recipient_search_query.append(recipient_name_query)
recipient_search_query.append(ES_Q({'regexp': {'recipient_name.keyword': f"{upper_recipient_string.rstrip('.')}\..*"}}))
else:
recipient_search_query.append(recipient_name_query)
return ES_Q('bool', should=recipient_search_query, minimum_should_match=1) |
class Controller(controller.ControllerProto):
CONTROLLER_ID = 1
CONTROLLER_NAME = 'Domoticz HTTP'
def __init__(self, controllerindex):
controller.ControllerProto.__init__(self, controllerindex)
self.usesID = True
self.usesAccount = True
self.usesPassword = True
self.authmode = 0
def webform_load(self):
try:
am = self.authmode
except:
am = 0
options = ['HTTP', 'HTTPS/auto negotiation', 'HTTPS/disable verify']
optionvalues = [0, 1, 2]
webserver.addFormSelector('Mode', 'c001_mode', len(optionvalues), options, optionvalues, None, int(am))
return True
def webform_save(self, params):
try:
self.authmode = int(webserver.arg('c001_mode', params))
except:
self.authmode = 0
return True
def senddata(self, idx, sensortype, value, userssi=(- 1), usebattery=(- 1), tasknum=(- 1), changedvalue=(- 1)):
if self.enabled:
if (int(idx) != 0):
try:
usebattery = float(usebattery)
except:
usebattery = (- 1)
if (int(sensortype) == rpieGlobals.SENSOR_TYPE_SWITCH):
url = '/json.htm?type=command¶m=switchlight&idx='
url += str(idx)
url += '&switchcmd='
if (round(float(value[0])) == 0):
url += 'Off'
else:
url += 'On'
elif (int(sensortype) == rpieGlobals.SENSOR_TYPE_DIMMER):
url = '/json.htm?type=command¶m=switchlight&idx='
url += str(idx)
url += '&switchcmd='
if (float(value[0]) == 0):
url += 'Off'
else:
url += 'Set%20Level&level='
url += str(value[0])
else:
url = '/json.htm?type=command¶m=udevice&idx='
url += str(idx)
url += '&nvalue=0&svalue='
url += formatDomoticzSensorType(sensortype, value)
url += '&rssi='
url += mapRSSItoDomoticz(userssi)
if ((int(usebattery) != (- 1)) and (int(usebattery) != 255)):
url += '&battery='
url += str(int(usebattery))
else:
bval = misc.get_battery_value()
url += '&battery='
url += str(int(bval))
urlstr = ((((self.controllerip + ':') + self.controllerport) + url) + self.getaccountstr())
misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, urlstr)
= Process(target=self.urlget, args=(urlstr,))
else:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, 'MQTT : IDX cannot be zero!')
def urlget(self, url):
try:
am = self.authmode
except:
am = 0
if (am == 0):
url = (' + str(url))
elif ((am == 1) or (am == 2)):
url = (' + str(url))
try:
import ssl
except:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, 'OpenSSL is not reachable!')
return False
if (am == 2):
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
else:
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS)
try:
if (am == 0):
content = urllib.request.urlopen(url, None, 2)
else:
content = urllib.request.urlopen(url, None, 2, context=ctx)
except Exception as e:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ((('Controller: ' + self.controllerip) + ' connection failed ') + str(e)))
def getaccountstr(self):
retstr = ''
if ((self.controlleruser != '') or (self.controllerpassword != '')):
acc = base64.b64encode(bytes(self.controlleruser, 'utf-8')).decode('utf-8')
pw = base64.b64encode(bytes(self.controllerpassword, 'utf-8')).decode('utf-8')
retstr = ((('&username=' + str(acc)) + '&password=') + str(pw))
return retstr |
class PickledWidget(Textarea):
def render(self, name, value, attrs=None, renderer=None):
repr_value = repr(value)
if (attrs is not None):
attrs['name'] = name
else:
attrs = {'name': name}
attrs['cols'] = 30
rows = 1
if (isinstance(value, str) and ('\n' in repr_value)):
rows = max(1, len(value.split('\n')))
attrs['rows'] = rows
attrs = self.build_attrs(attrs)
try:
value = literal_eval(repr_value)
except (ValueError, SyntaxError):
value = repr_value
return super().render(name, value, attrs=attrs, renderer=renderer)
def value_from_datadict(self, data, files, name):
dat = data.get(name)
return dat |
def tas_submissions_across_multiple_years():
dabs = baker.make('submissions.DABSSubmissionWindowSchedule', submission_reveal_date=f'{CURRENT_FISCAL_YEAR}-10-09', submission_fiscal_year=CURRENT_FISCAL_YEAR, submission_fiscal_month=12, submission_fiscal_quarter=4, is_quarter=False, period_start_date=f'{CURRENT_FISCAL_YEAR}-09-01', period_end_date=f'{CURRENT_FISCAL_YEAR}-10-01')
ta1 = baker.make('references.ToptierAgency', toptier_code='007', _fill_optional=True)
baker.make('references.Agency', id=1, toptier_flag=True, toptier_agency=ta1, _fill_optional=True)
sub1 = baker.make('submissions.SubmissionAttributes', reporting_fiscal_year=2020, reporting_fiscal_period=12, toptier_code=ta1.toptier_code, is_final_balances_for_fy=True, submission_window_id=dabs.id)
sub2 = baker.make('submissions.SubmissionAttributes', reporting_fiscal_year=CURRENT_FISCAL_YEAR, reporting_fiscal_period=12, toptier_code=ta1.toptier_code, is_final_balances_for_fy=True, submission_window_id=dabs.id)
fa1 = baker.make('accounts.FederalAccount', federal_account_code='001-0000', account_title='FA 1')
tas1 = baker.make('accounts.TreasuryAppropriationAccount', funding_toptier_agency=ta1, budget_function_code=100, budget_function_title='NAME 1', budget_subfunction_code=1100, budget_subfunction_title='NAME 1A', federal_account=fa1, account_title='TA 1', tas_rendering_label='001-X-0000-000')
baker.make('accounts.AppropriationAccountBalances', treasury_account_identifier=tas1, submission=sub1)
baker.make('accounts.AppropriationAccountBalances', treasury_account_identifier=tas1, submission=sub2)
pa1 = baker.make('references.RefProgramActivity', program_activity_code='000', program_activity_name='NAME 1')
pa2 = baker.make('references.RefProgramActivity', program_activity_code='1000', program_activity_name='NAME 2')
pa3 = baker.make('references.RefProgramActivity', program_activity_code='4567', program_activity_name='NAME 3')
oc = 'references.ObjectClass'
oc1 = baker.make(oc, major_object_class=10, major_object_class_name='Other', object_class=100, object_class_name='equipment')
oc2 = baker.make(oc, major_object_class=10, major_object_class_name='Other', object_class=110, object_class_name='hvac')
oc3 = baker.make(oc, major_object_class=10, major_object_class_name='Other', object_class=120, object_class_name='supplies')
fabpaoc = 'financial_activities.FinancialAccountsByProgramActivityObjectClass'
baker.make(fabpaoc, treasury_account=tas1, submission=sub1, program_activity=pa1, object_class=oc1, obligations_incurred_by_program_object_class_cpe=1, gross_outlay_amount_by_program_object_class_cpe=)
baker.make(fabpaoc, treasury_account=tas1, submission=sub1, program_activity=pa2, object_class=oc2, obligations_incurred_by_program_object_class_cpe=10, gross_outlay_amount_by_program_object_class_cpe=1000000)
baker.make(fabpaoc, treasury_account=tas1, submission=sub2, program_activity=pa3, object_class=oc3, obligations_incurred_by_program_object_class_cpe=100, gross_outlay_amount_by_program_object_class_cpe=100000) |
def create_pw_dict(data: List[List[Tuple[(str, str)]]]) -> Dict[(str, List[Tuple[(str, float)]])]:
model = dict()
for sentence in data:
for (i, (_, curr_pos)) in enumerate(sentence):
prev_word = (sentence[(i - 1)][0] if (i > 0) else DUMMY)
model.setdefault(prev_word, Counter()).update([curr_pos])
return to_probs(model) |
('gitlabber.cli.logging')
('gitlabber.cli.sys')
('gitlabber.cli.os')
('gitlabber.cli.log')
('gitlabber.cli.GitlabTree')
def test_args_logging(mock_tree, mock_log, mock_os, mock_sys, mock_logging):
args_mock = mock.Mock()
args_mock.return_value = Node(name='test', version=None, verbose=True, include='', exclude='', url='test_url', token='test_token', method=CloneMethod.SSH, naming=FolderNaming.PATH, archived=ArchivedResults.INCLUDE, file=None, concurrency=1, recursive=False, disble_progress=True, print=None, dest='.', root_group=None)
cli.parse_args = args_mock
mock_streamhandler = mock.Mock()
mock_logging.StreamHandler = mock_streamhandler
streamhandler_instance = mock_streamhandler.return_value
mock_formatter = mock.Mock()
streamhandler_instance.setFormatter = mock_formatter
cli.main()
mock_streamhandler.assert_called_once_with(mock_sys.stdout)
mock_formatter.assert_called_once() |
.skipif(version.is_version_plus('1.4.0'), reason='mocking <1.4 modules')
('dbt.contracts.graph.parsed.ParsedModelNode')
('fal.dbt.FalDbt')
def test_add_before_scripts(parsed_node, fal_dbt_class):
graph = nx.DiGraph()
node_lookup = {}
modelA = create_mock_model(parsed_node, 'modelA', [], [], before_script_paths=['scriptA.py', 'scriptB.py'])
fal_dbt_instance = fal_dbt_class('/dir', '/profiles')
fal_dbt_instance.scripts_dir = '/dir'
(graph, node_lookup) = _add_before_scripts(modelA, 'model.modelA', fal_dbt_instance, graph, node_lookup)
assert_contains_only(list(node_lookup.keys()), ['script.modelA.BEFORE.scriptA.py', 'script.modelA.BEFORE.scriptB.py'])
assert_contains_only(list(graph.predecessors('model.modelA')), ['script.modelA.BEFORE.scriptA.py', 'script.modelA.BEFORE.scriptB.py']) |
class IPv6Ecn(MatchTest):
def runTest(self):
match = ofp.match([ofp.oxm.eth_type(34525), ofp.oxm.ip_ecn(2)])
matching = {'dscp=4 ecn=2': simple_tcpv6_packet(ipv6_tc=18), 'dscp=6 ecn=2': simple_tcpv6_packet(ipv6_tc=26)}
nonmatching = {'dscp=4 ecn=0': simple_tcpv6_packet(ipv6_tc=16), 'dscp=4 ecn=3': simple_tcpv6_packet(ipv6_tc=19)}
self.verify_match(match, matching, nonmatching) |
class groupby_test_case(unittest.TestCase):
def test_groupby(self):
ls = [{'id': 1, 'name': 'John'}, {'id': 2, 'name': 'Frank'}, {'id': 3, 'name': 'Tony'}, {'id': 4, 'name': 'Jimmy'}, {'id': 3, 'name': 'Sam'}, {'id': 1, 'name': 'Charles'}, {'id': 3, 'name': 'Bob'}, {'id': 4, 'name': 'Paul'}, {'id': 1, 'name': 'Michael'}]
ls_clone = _clone(ls)
d = _groupby(ls, 'id')
self.assertEqual(ls, ls_clone)
self.assertTrue(isinstance(d, dict))
self.assertEqual(len(d), 4)
self.assertTrue(all([(1 in d), (2 in d), (3 in d), (4 in d)]))
self.assertTrue(all([isinstance(d[1], list), isinstance(d[2], list), isinstance(d[3], list), isinstance(d[4], list)]))
self.assertEqual(len(d[1]), 3)
self.assertEqual(len(d[2]), 1)
self.assertEqual(len(d[3]), 3)
self.assertEqual(len(d[4]), 2)
def test_groupby_with_wrong_input(self):
ls = {'id': 1, 'name': 'John'}
with self.assertRaises(ValueError):
_ = _groupby(ls, 'id')
ls = [[{'id': 1, 'name': 'John'}], [{'id': 2, 'name': 'Frank'}]]
with self.assertRaises(ValueError):
_ = _groupby(ls, 'id') |
class TestWeightedMinHashLSHCassandra(unittest.TestCase):
((not DO_TEST_CASSANDRA), 'Skipping test_cassandra__init')
def test_cassandra__init(self):
lsh = MinHashLSH(threshold=0.8, storage_config=STORAGE_CONFIG_CASSANDRA)
self.assertTrue(lsh.is_empty())
(b1, r1) = (lsh.b, lsh.r)
lsh = MinHashLSH(threshold=0.8, weights=(0.2, 0.8), storage_config=STORAGE_CONFIG_CASSANDRA)
(b2, r2) = (lsh.b, lsh.r)
self.assertTrue((b1 < b2))
self.assertTrue((r1 > r2))
((not DO_TEST_CASSANDRA), 'Skipping test_cassandra__H')
def test_cassandra__H(self):
mg = WeightedMinHashGenerator(100, sample_size=128)
for l in range(2, (mg.sample_size + 1), 16):
m = mg.minhash(np.random.randint(1, , 100))
lsh = MinHashLSH(num_perm=128, storage_config=STORAGE_CONFIG_CASSANDRA)
lsh.insert('m', m)
sizes = [len(H) for ht in lsh.hashtables for H in ht]
self.assertTrue(all(((sizes[0] == s) for s in sizes)))
((not DO_TEST_CASSANDRA), 'Skipping test_cassandra__insert')
def test_cassandra__insert(self):
lsh = MinHashLSH(threshold=0.5, num_perm=4, storage_config=STORAGE_CONFIG_CASSANDRA)
mg = WeightedMinHashGenerator(10, 4)
m1 = mg.minhash(np.random.uniform(1, 10, 10))
m2 = mg.minhash(np.random.uniform(1, 10, 10))
lsh.insert('a', m1)
lsh.insert('b', m2)
for t in lsh.hashtables:
self.assertTrue((len(t) >= 1))
items = []
for H in t:
items.extend(t[H])
self.assertTrue(('a' in items))
self.assertTrue(('b' in items))
self.assertTrue(('a' in lsh))
self.assertTrue(('b' in lsh))
for (i, H) in enumerate(lsh.keys['a']):
self.assertTrue(('a' in lsh.hashtables[i][H]))
mg = WeightedMinHashGenerator(10, 5)
m3 = mg.minhash(np.random.uniform(1, 10, 10))
self.assertRaises(ValueError, lsh.insert, 'c', m3)
((not DO_TEST_CASSANDRA), 'Skipping test_cassandra__query')
def test_cassandra__query(self):
lsh = MinHashLSH(threshold=0.5, num_perm=4, storage_config=STORAGE_CONFIG_CASSANDRA)
mg = WeightedMinHashGenerator(10, 4)
m1 = mg.minhash(np.random.uniform(1, 10, 10))
m2 = mg.minhash(np.random.uniform(1, 10, 10))
lsh.insert('a', m1)
lsh.insert('b', m2)
result = lsh.query(m1)
self.assertTrue(('a' in result))
result = lsh.query(m2)
self.assertTrue(('b' in result))
mg = WeightedMinHashGenerator(10, 5)
m3 = mg.minhash(np.random.uniform(1, 10, 10))
self.assertRaises(ValueError, lsh.query, m3)
((not DO_TEST_CASSANDRA), 'Skipping test_cassandra__remove')
def test_cassandra__remove(self):
lsh = MinHashLSH(threshold=0.5, num_perm=4, storage_config=STORAGE_CONFIG_CASSANDRA)
mg = WeightedMinHashGenerator(10, 4)
m1 = mg.minhash(np.random.uniform(1, 10, 10))
m2 = mg.minhash(np.random.uniform(1, 10, 10))
lsh.insert('a', m1)
lsh.insert('b', m2)
lsh.remove('a')
self.assertTrue(('a' not in lsh.keys))
for table in lsh.hashtables:
for H in table:
self.assertGreater(len(table[H]), 0)
self.assertTrue(('a' not in table[H]))
self.assertRaises(ValueError, lsh.remove, 'c') |
class ChainSyncPerformanceTracker():
def __init__(self, head: BlockHeaderAPI) -> None:
self.prev_head = head
self.latest_head = head
self.timer = Timer()
self.blocks_per_second_ema = EMA(initial_value=0, smoothing_factor=0.05)
self.transactions_per_second_ema = EMA(initial_value=0, smoothing_factor=0.05)
self.num_transactions = 0
def record_transactions(self, count: int) -> None:
self.num_transactions += count
def set_latest_head(self, head: BlockHeaderAPI) -> None:
self.latest_head = head
def report(self) -> ChainSyncStats:
elapsed = self.timer.pop_elapsed()
num_blocks = (self.latest_head.block_number - self.prev_head.block_number)
blocks_per_second = (num_blocks / elapsed)
transactions_per_second = (self.num_transactions / elapsed)
self.blocks_per_second_ema.update(blocks_per_second)
self.transactions_per_second_ema.update(transactions_per_second)
stats = ChainSyncStats(prev_head=self.prev_head, latest_head=self.latest_head, elapsed=elapsed, num_blocks=num_blocks, blocks_per_second=self.blocks_per_second_ema.value, num_transactions=self.num_transactions, transactions_per_second=self.transactions_per_second_ema.value)
self.num_transactions = 0
self.prev_head = self.latest_head
return stats |
def test_dataset_missing_values_metric_different_missing_values() -> None:
test_dataset = pd.DataFrame({'category_feature_1': ['', 'n/a', '3'], 'category_feature_2': ['', None, np.inf], 'numerical_feature_1': [3, (- 9999), 0], 'numerical_feature_2': [0, None, (- np.inf)], 'prediction': [1, pd.NaT, 1], 'target': [None, np.NAN, 1]})
data_mapping = ColumnMapping()
metric = DatasetMissingValuesMetric()
report = Report(metrics=[metric])
report.run(current_data=test_dataset, reference_data=None, column_mapping=data_mapping)
result = metric.get_result()
assert (result is not None)
assert (result.current.different_missing_values == {None: 5, (- np.inf): 1, np.inf: 1, '': 2})
assert (result.current.number_of_different_missing_values == 4)
assert (result.current.number_of_missing_values == 9)
assert (result.current.number_of_rows_with_missing_values == 3)
assert (result.current.different_missing_values_by_column == {'category_feature_1': {None: 0, (- np.inf): 0, np.inf: 0, '': 1}, 'category_feature_2': {None: 1, (- np.inf): 0, np.inf: 1, '': 1}, 'numerical_feature_1': {None: 0, (- np.inf): 0, np.inf: 0, '': 0}, 'numerical_feature_2': {None: 1, (- np.inf): 1, np.inf: 0, '': 0}, 'prediction': {None: 1, (- np.inf): 0, np.inf: 0, '': 0}, 'target': {None: 2, (- np.inf): 0, np.inf: 0, '': 0}})
assert (result.current.number_of_different_missing_values_by_column == {'category_feature_1': 1, 'category_feature_2': 3, 'numerical_feature_1': 0, 'numerical_feature_2': 2, 'prediction': 1, 'target': 1})
assert (result.current.number_of_missing_values_by_column == {'category_feature_1': 1, 'category_feature_2': 3, 'numerical_feature_1': 0, 'numerical_feature_2': 2, 'prediction': 1, 'target': 2})
assert (result.reference is None)
metric = DatasetMissingValuesMetric(missing_values=['n/a'], replace=False)
report = Report(metrics=[metric])
report.run(current_data=test_dataset, reference_data=None, column_mapping=data_mapping)
result = metric.get_result()
assert (result is not None)
assert (result.current.number_of_different_missing_values == 5)
assert (result.current.number_of_missing_values == 10)
assert (result.reference is None)
metric = DatasetMissingValuesMetric(missing_values=['', 0, 'n/a', (- 9999), None], replace=True)
report = Report(metrics=[metric])
report.run(current_data=test_dataset, reference_data=None, column_mapping=data_mapping)
result = metric.get_result()
assert (result is not None)
assert (result.current.number_of_different_missing_values == 5)
assert (result.current.number_of_missing_values == 11)
assert (result.reference is None)
metric = DatasetMissingValuesMetric(missing_values=['', 0, 'n/a', (- 9999)], replace=True)
report = Report(metrics=[metric])
report.run(current_data=test_dataset, reference_data=None, column_mapping=data_mapping)
result = metric.get_result()
assert (result is not None)
assert (result.current.number_of_different_missing_values == 4)
assert (result.current.number_of_missing_values == 6)
assert (result.reference is None) |
class CommonKData(BaseDocType):
id = Keyword()
timestamp = Date()
updateTimestamp = Date()
securityId = Keyword()
code = Keyword()
name = Keyword()
open = Float()
close = Float()
high = Float()
low = Float()
volume = Float()
turnover = Float()
class Meta():
doc_type = 'doc'
all = MetaField(enabled=False)
dynamic = MetaField('strict') |
def _vm_save_ip_from_json(vm, net, ipaddress, allowed_ips=False):
try:
ip = net.ipaddress_set.get(ip=ipaddress)
except IPAddress.DoesNotExist:
ip = IPAddress(subnet=net, ip=ipaddress, usage=IPAddress.VM_REAL)
logger.warning('Adding new IP %s into subnet %s for server %s', ip.ip, net.name, vm)
else:
if ip.vm:
err = ('IP %s in subnet %s for server %s is already taken!' % (ip.ip, net.name, vm))
return (ip, err)
if allowed_ips:
ip.vms.add(vm)
else:
ip.vm = vm
ip.save()
logger.info('Server %s association with IP %s (%s) was successfully saved', vm, ip.ip, net.name)
return (ip, None) |
class Tick(Html.Html):
requirements = (cssDefaults.ICON_FAMILY,)
name = 'Tick'
tag = 'span'
def __init__(self, page: primitives.PageModel, position: str, icon: str, text: str, tooltip: str, width: tuple, height: tuple, html_code: str, options: Optional[dict], profile: Optional[Union[(bool, dict)]], verbose: bool=False):
self._options = options
super(Tick, self).__init__(page, '', html_code=html_code, profile=profile, css_attrs={'width': width, 'height': height, 'float': ('left' if (position is None) else position)}, verbose=verbose)
if (tooltip is not None):
self.tooltip(tooltip)
self.add_span(text, css={'float': 'right'})
self.add_icon(icon, {'color': self.page.theme.success.base, 'margin': '2px', 'font-size': page.body.style.globals.font.normal()}, html_code=self.htmlCode, family=options.get('icon_family'))
self.icon.style.add_classes.div.background_hover()
self.css({'margin': '5px 0', 'cursor': 'pointer'})
self.style.css.float = position
self.style.css.display = 'inline-block'
self.css({'text-align': 'center'})
if (text is not None):
self.span.css({'line-height': ('%spx' % 25), 'vertical-align': 'middle'})
self.icon.css({'border-radius': ('%spx' % 25), 'width': ('%spx' % 25), 'margin-right': 'auto', 'margin': 'auto', 'color': 'blue', 'line-height': ('%s%s' % (25, width[1]))})
def dom(self) -> JsHtmlSelect.Tick:
if (self._dom is None):
self._dom = JsHtmlSelect.Tick(self, page=self.page)
self._dom.options = self._options
return self._dom
def __str__(self):
return ('<%(t)s %(a)s></%(t)s>' % {'a': self.get_attrs(css_class_names=self.style.get_classes()), 't': self.tag}) |
class EmbeddedSequenceTests(unittest.TestCase):
def setUp(self):
self.data = range(0, 9)
def test_embedded_sequence_1_4(self):
self.assertEqual(embed_seq(self.data, 1, 4).all(), numpy.asarray([[0.0, 1.0, 2.0, 3.0], [1.0, 2.0, 3.0, 4.0], [2.0, 3.0, 4.0, 5.0], [3.0, 4.0, 5.0, 6.0], [4.0, 5.0, 6.0, 7.0], [5.0, 6.0, 7.0, 8.0]]).all())
def test_embedded_sequence_2_3(self):
self.assertEqual(embed_seq(self.data, 2, 3).all(), numpy.asarray([[0.0, 2.0, 4.0], [1.0, 3.0, 5.0], [2.0, 4.0, 6.0], [3.0, 5.0, 7.0], [4.0, 6.0, 8.0]]).all())
def test_embedded_sequence_4_1(self):
self.assertEqual(embed_seq(self.data, 2, 3).all(), numpy.asarray([[0.0], [1.0], [2.0], [3.0], [4.0], [5.0], [6.0], [7.0], [8.0]]).all()) |
class InventoryImporter(object):
def __init__(self, session, readonly_session, model, dao, service_config, inventory_index_id, *args, **kwargs):
del args, kwargs
self.readonly_session = readonly_session
self.session = session
self.model = model
self.dao = dao
self.service_config = service_config
self.inventory_index_id = inventory_index_id
self.session.add(self.model)
self.role_cache = {}
self.permission_cache = {}
self.resource_cache = ResourceCache()
self.membership_items = []
self.membership_map = {}
self.member_cache = {}
self.member_cache_policies = {}
self.groups_settings_cache = set()
self.found_root = False
def _flush_session(self):
try:
self.session.flush()
except SQLAlchemyError:
LOGGER.exception('Unexpected SQLAlchemyError occurred during model creation.')
self.session.rollback()
def _commit_session(self):
try:
self.session.commit()
except SQLAlchemyError:
LOGGER.exception('Unexpected SQLAlchemyError occurred during model creation.')
self.session.rollback()
def run(self):
autocommit = self.session.autocommit
autoflush = self.session.autoflush
try:
self.session.autocommit = False
self.session.autoflush = True
root = DataAccess.get_root(self.readonly_session, self.inventory_index_id)
inventory_index = DataAccess.get(self.readonly_session, self.inventory_index_id)
description = {'source': 'inventory', 'source_info': {'inventory_index_id': self.inventory_index_id}, 'source_root': self._type_name(root), 'pristine': True, 'gsuite_enabled': DataAccess.type_exists(self.readonly_session, self.inventory_index_id, ['gsuite_group', 'gsuite_user'])}
LOGGER.debug('Model description: %s', description)
self.model.add_description(json.dumps(description, sort_keys=True))
if (root.get_resource_type() in ['organization']):
LOGGER.debug('Root resource is organization: %s', root)
else:
LOGGER.debug('Root resource is not organization: %s.', root)
item_counter = 0
LOGGER.debug('Start storing resources into models.')
for resource in DataAccess.iter(self.readonly_session, self.inventory_index_id, GCP_TYPE_LIST):
item_counter += 1
self._store_resource(resource)
if (not (item_counter % 1000)):
LOGGER.debug('Flushing model write session: %s', item_counter)
self._flush_session()
if (not (item_counter % 100000)):
LOGGER.debug('Commiting model write session: %s', item_counter)
self._commit_session()
self._commit_session()
LOGGER.debug('Finished storing resources into models.')
item_counter += self.model_action_wrapper(DataAccess.iter(self.readonly_session, self.inventory_index_id, ['role']), self._convert_role)
item_counter += self.model_action_wrapper(DataAccess.iter(self.readonly_session, self.inventory_index_id, GCP_TYPE_LIST, fetch_category=Categories.dataset_policy), self._convert_dataset_policy)
item_counter += self.model_action_wrapper(DataAccess.iter(self.readonly_session, self.inventory_index_id, GCP_TYPE_LIST, fetch_category=Categories.gcs_policy), self._convert_gcs_policy)
item_counter += self.model_action_wrapper(DataAccess.iter(self.readonly_session, self.inventory_index_id, GCP_TYPE_LIST, fetch_category=Categories.kubernetes_service_config), self._convert_service_config)
self.model_action_wrapper(DataAccess.iter(self.readonly_session, self.inventory_index_id, GSUITE_TYPE_LIST), self._store_gsuite_principal)
self.model_action_wrapper(DataAccess.iter(self.readonly_session, self.inventory_index_id, GCP_TYPE_LIST, fetch_category=Categories.enabled_apis), self._convert_enabled_apis)
self.model_action_wrapper(DataAccess.iter(self.readonly_session, self.inventory_index_id, MEMBER_TYPE_LIST, with_parent=True), self._store_gsuite_membership, post_action=self._store_gsuite_membership_post)
self.model_action_wrapper(DataAccess.iter(self.readonly_session, self.inventory_index_id, GROUPS_SETTINGS_LIST), self._store_groups_settings)
self.dao.denorm_group_in_group(self.session)
self.model_action_wrapper(DataAccess.iter(self.readonly_session, self.inventory_index_id, GCP_TYPE_LIST, fetch_category=Categories.iam_policy), self._store_iam_policy)
self.dao.expand_special_members(self.session)
except Exception as e:
LOGGER.exception(e)
buf = StringIO()
traceback.print_exc(file=buf)
buf.seek(0)
message = buf.read()
LOGGER.debug('Importer has an exception: %s', message)
self.model.set_error(message)
else:
LOGGER.debug('Set model status.')
for row in inventory_index.warning_messages:
self.model.add_warning('{}: {}'.format(row.resource_full_name, row.warning_message))
self.model.set_done(item_counter)
finally:
LOGGER.debug('Finished running importer.')
self.session.commit()
self.session.autocommit = autocommit
self.session.autoflush = autoflush
def model_action_wrapper(self, inventory_iterable, action, post_action=None, flush_count=1000, commit_count=50000):
LOGGER.debug('Performing model action: %s', action)
idx = 0
for (idx, inventory_data) in enumerate(inventory_iterable, start=1):
LOGGER.debug('Processing inventory data: %s', inventory_data)
if isinstance(inventory_data, tuple):
action(*inventory_data)
else:
action(inventory_data)
if (not (idx % flush_count)):
LOGGER.debug('Flushing write session: %s.', idx)
self._flush_session()
if (not (idx % commit_count)):
LOGGER.debug('Committing write session: %s', idx)
self._commit_session()
if (idx % flush_count):
self._flush_session()
if post_action:
LOGGER.debug('Running post action: %s', post_action)
post_action()
LOGGER.debug('Committing model action: %s, with resource count: %s', action, idx)
self._commit_session()
return idx
def _store_gsuite_principal(self, principal):
gsuite_type = principal.get_resource_type()
data = principal.get_resource_data()
if (gsuite_type == 'gsuite_user'):
member = 'user/{}'.format(data['primaryEmail'].lower())
elif (gsuite_type == 'gsuite_group'):
member = 'group/{}'.format(data['email'].lower())
else:
raise Exception('Unknown gsuite principal: {}'.format(gsuite_type))
if (member not in self.member_cache):
(m_type, name) = member.split('/', 1)
self.member_cache[member] = self.dao.TBL_MEMBER(name=member, type=m_type, member_name=name)
self.session.add(self.member_cache[member])
def _store_gsuite_membership_post(self):
if (not self.member_cache):
return
self.session.flush()
if self.membership_items:
if (get_sql_dialect(self.session) == 'sqlite'):
for item in self.membership_items:
stmt = self.dao.TBL_MEMBERSHIP.insert(item)
self.session.execute(stmt)
else:
stmt = self.dao.TBL_MEMBERSHIP.insert(self.membership_items)
self.session.execute(stmt)
def _store_gsuite_membership(self, child, parent):
def member_name(child):
data = child.get_resource_data()
return '{}/{}'.format(data['type'].lower(), data['email'].lower())
member = member_name(child)
if (member not in self.member_cache):
(m_type, name) = member.split('/', 1)
self.member_cache[member] = self.dao.TBL_MEMBER(name=member, type=m_type, member_name=name)
self.session.add(self.member_cache[member])
parent_group = group_name(parent)
if (parent_group not in self.membership_map):
self.membership_map[parent_group] = set()
if (member not in self.membership_map[parent_group]):
self.membership_map[parent_group].add(member)
self.membership_items.append(dict(group_name=group_name(parent), members_name=member))
def _store_groups_settings(self, settings):
settings_dict = settings.get_resource_data()
group_email = group_name(settings)
if (group_email not in self.groups_settings_cache):
self.groups_settings_cache.add(group_email)
settings_row = dict(group_name=group_email, settings=json.dumps(settings_dict, sort_keys=True))
stmt = self.dao.TBL_GROUPS_SETTINGS.insert(settings_row)
self.session.execute(stmt)
def _store_iam_policy(self, policy):
bindings = policy.get_resource_data().get('bindings', [])
policy_type_name = self._type_name(policy)
for binding in bindings:
role = binding['role']
if (role not in self.role_cache):
msg = 'Role reference in iam policy not found: {}'.format(role)
self.model.add_warning(msg)
continue
members = set(binding['members'])
db_members = set()
for member in members:
member = member.replace(':', '/', 1).lower()
if ((member in self.member_cache) and (member not in db_members)):
db_members.add(self.member_cache[member])
continue
if ((member not in self.member_cache) and (member not in self.member_cache_policies)):
try:
(m_type, name) = member.split('/', 1)
except ValueError:
(m_type, name) = (member, member)
self.member_cache_policies[member] = self.dao.TBL_MEMBER(name=member, type=m_type, member_name=name)
self.session.add(self.member_cache_policies[member])
db_members.add(self.member_cache_policies[member])
binding_object = self.dao.TBL_BINDING(resource_type_name=policy_type_name, role_name=role, members=list(db_members))
self.session.add(binding_object)
self._convert_iam_policy(policy)
def _store_resource(self, resource):
handlers = {'appengine_app': self._convert_gae_resource, 'appengine_instance': self._convert_gae_instance_resource, 'appengine_service': self._convert_gae_resource, 'appengine_version': self._convert_gae_resource, 'backendservice': self._convert_computeengine_resource, 'bigquery_table': self._convert_bigquery_table, 'billing_account': self._convert_billing_account, 'bucket': self._convert_bucket, 'cloudsqlinstance': self._convert_cloudsqlinstance, 'composite_root': self._convert_composite_root, 'compute_autoscaler': self._convert_computeengine_resource, 'compute_backendbucket': self._convert_computeengine_resource, 'compute_healthcheck': self._convert_computeengine_resource, 'compute_ self._convert_computeengine_resource, 'compute_ self._convert_computeengine_resource, 'compute_license': self._convert_computeengine_resource, 'compute_project': self._convert_computeengine_resource, 'compute_router': self._convert_computeengine_resource, 'compute_sslcertificate': self._convert_computeengine_resource, 'compute_target self._convert_computeengine_resource, 'compute_target self._convert_computeengine_resource, 'compute_targetinstance': self._convert_computeengine_resource, 'compute_targetpool': self._convert_computeengine_resource, 'compute_targetsslproxy': self._convert_computeengine_resource, 'compute_targettcpproxy': self._convert_computeengine_resource, 'compute_targetvpngateway': self._convert_computeengine_resource, 'compute_urlmap': self._convert_computeengine_resource, 'compute_vpntunnel': self._convert_computeengine_resource, 'crm_access_policy': self._convert_crm_access_policy, 'crm_access_level': self._convert_crm_access_level, 'crm_org_policy': self._convert_crm_org_policy, 'crm_service_perimeter': self._convert_crm_service_perimeter, 'dataproc_cluster': self._convert_dataproc_cluster, 'dataset': self._convert_dataset, 'disk': self._convert_computeengine_resource, 'dns_managedzone': self._convert_clouddns_resource, 'dns_policy': self._convert_clouddns_resource, 'firewall': self._convert_computeengine_resource, 'folder': self._convert_folder, 'forwardingrule': self._convert_computeengine_resource, 'image': self._convert_computeengine_resource, 'instance': self._convert_computeengine_resource, 'instancegroup': self._convert_computeengine_resource, 'instancegroupmanager': self._convert_computeengine_resource, 'instancetemplate': self._convert_computeengine_resource, 'kms_cryptokey': self._convert_kms_resource, 'kms_cryptokeyversion': self._convert_kms_ckv_resource, 'kms_keyring': self._convert_kms_resource, 'kubernetes_cluster': self._convert_kubernetes_cluster, 'kubernetes_clusterrole': self._convert_kubernetes_clusterrole, 'kubernetes_clusterrolebinding': self._convert_kubernetes_binding, 'kubernetes_namespace': self._convert_kubernetes_namespace, 'kubernetes_node': self._convert_kubernetes_node, 'kubernetes_pod': self._convert_kubernetes_pod, 'kubernetes_role': self._convert_kubernetes_role, 'kubernetes_rolebinding': self._convert_kubernetes_rolebinding, 'lien': self._convert_lien, 'network': self._convert_computeengine_resource, 'organization': self._convert_organization, 'project': self._convert_project, 'pubsub_subscription': self._convert_pubsub_resource, 'pubsub_topic': self._convert_pubsub_resource, 'serviceaccount': self._convert_serviceaccount, 'serviceaccount_key': self._convert_serviceaccount_key, 'sink': self._convert_sink, 'snapshot': self._convert_computeengine_resource, 'spanner_database': self._convert_spanner_db_resource, 'spanner_instance': self._convert_spanner_resource, 'subnetwork': self._convert_computeengine_resource, None: None}
res_type = (resource.get_resource_type() if resource else None)
handler = handlers.get(res_type)
if handler:
handler(resource)
else:
self.model.add_warning('No handler for type "{}"'.format(res_type))
def _convert_resource(self, resource, cached=False, display_key='name', email_key='email', display_name=''):
data = resource.get_resource_data()
if self._is_root(resource):
(parent, type_name) = (None, self._type_name(resource))
full_res_name = to_full_resource_name('', type_name)
else:
(parent, full_res_name, type_name) = self._full_resource_name(resource)
row = self.dao.TBL_RESOURCE(cai_resource_name=resource.get_cai_resource_name(), cai_resource_type=resource.get_cai_resource_type(), full_name=full_res_name, type_name=type_name, name=resource.get_resource_id(), type=resource.get_resource_type(), display_name=(display_name or data.get(display_key, '')), email=(data.get(email_key, '') if isinstance(data, dict) else ''), data=resource.get_resource_data_raw(), parent=parent)
self.session.add(row)
if cached:
self._add_to_cache(row, resource.id)
def _convert_crm_access_level(self, crm_access_level):
self._convert_resource(crm_access_level, cached=False, display_key='name')
def _convert_crm_access_policy(self, crm_access_policy):
self._convert_resource(crm_access_policy, cached=True, display_key='name')
def _convert_crm_org_policy(self, crm_org_policy):
self._convert_resource(crm_org_policy, cached=False, display_key='name', display_name=crm_org_policy.get_resource_id())
def _convert_crm_service_perimeter(self, crm_service_perimeter):
self._convert_resource(crm_service_perimeter, cached=False, display_key='name')
def _convert_billing_account(self, billing_account):
self._convert_resource(billing_account, cached=True, display_key='displayName')
def _convert_bucket(self, bucket):
self._convert_resource(bucket, cached=True)
def _convert_clouddns_resource(self, resource):
self._convert_resource(resource, cached=False)
def _convert_composite_root(self, resource):
self._convert_resource(resource, cached=True)
def _convert_computeengine_resource(self, resource):
self._convert_resource(resource, cached=False)
def _convert_dataproc_cluster(self, cluster):
self._convert_resource(cluster, cached=True, display_key='clusterName')
def _convert_dataset(self, dataset):
self._convert_resource(dataset, cached=True)
def _convert_folder(self, folder):
self._convert_resource(folder, cached=True, display_key='displayName')
def _convert_gae_instance_resource(self, resource):
self._convert_resource(resource, cached=False)
def _convert_gae_resource(self, resource):
self._convert_resource(resource, cached=True)
def _convert_kms_ckv_resource(self, resource):
self._convert_resource(resource, cached=False, display_key='name')
def _convert_kms_resource(self, resource):
self._convert_resource(resource, cached=True, display_key='name')
def _convert_kubernetes_cluster(self, kubernetes_cluster):
self._convert_resource(kubernetes_cluster, cached=True, display_key='kubernetesClusterName')
def _convert_kubernetes_clusterrole(self, kubernetes_clusterrole):
self._convert_resource(kubernetes_clusterrole, cached=False, display_key='kubernetesClusterRole')
def _convert_kubernetes_binding(self, kubernetes_clusterrolebinding):
self._convert_resource(kubernetes_clusterrolebinding, cached=False, display_key='kubernetesClusterRoleBinding')
def _convert_kubernetes_namespace(self, kubernetes_namespace):
self._convert_resource(kubernetes_namespace, cached=True, display_key='kubernetesNamespace')
def _convert_kubernetes_node(self, kubernetes_node):
self._convert_resource(kubernetes_node, cached=False, display_key='kubernetesNode')
def _convert_kubernetes_pod(self, kubernetes_pod):
self._convert_resource(kubernetes_pod, cached=False, display_key='kubernetesPod')
def _convert_kubernetes_role(self, kubernetes_role):
self._convert_resource(kubernetes_role, cached=False, display_key='kubernetesRole')
def _convert_kubernetes_rolebinding(self, kubernetes_rolebinding):
self._convert_resource(kubernetes_rolebinding, cached=False, display_key='kubernetesRoleBinding')
def _convert_lien(self, lien):
self._convert_resource(lien, cached=True)
def _convert_organization(self, organization):
self._convert_resource(organization, cached=True, display_key='displayName')
def _convert_pubsub_resource(self, resource):
self._convert_resource(resource, cached=True)
def _convert_project(self, project):
self._convert_resource(project, cached=True)
def _convert_serviceaccount(self, service_account):
self._convert_resource(service_account, cached=True, display_key='displayName', email_key='email')
def _convert_serviceaccount_key(self, service_account_key):
self._convert_resource(service_account_key, cached=False)
def _convert_sink(self, sink):
self._convert_resource(sink, cached=False, email_key='writerIdentity')
def _convert_spanner_db_resource(self, resource):
self._convert_resource(resource, cached=False)
def _convert_spanner_resource(self, resource):
self._convert_resource(resource, cached=True, display_key='displayName')
def _convert_cloudsqlinstance(self, cloudsqlinstance):
data = cloudsqlinstance.get_resource_data()
(parent, full_res_name, type_name) = self._full_resource_name(cloudsqlinstance)
parent_key = get_resource_id_from_type_name(parent.type_name)
resource_identifier = '{}:{}'.format(parent_key, cloudsqlinstance.get_resource_id())
type_name = to_type_name(cloudsqlinstance.get_resource_type(), resource_identifier)
resource = self.dao.TBL_RESOURCE(cai_resource_name=cloudsqlinstance.get_cai_resource_name(), cai_resource_type=cloudsqlinstance.get_cai_resource_type(), full_name=full_res_name, type_name=type_name, name=cloudsqlinstance.get_resource_id(), type=cloudsqlinstance.get_resource_type(), display_name=data.get('name', ''), email=data.get('email', ''), data=cloudsqlinstance.get_resource_data_raw(), parent=parent)
self.session.add(resource)
def _convert_dataset_policy(self, dataset_policy):
(parent, full_res_name) = self._get_parent(dataset_policy)
policy_type_name = to_type_name(dataset_policy.get_category(), dataset_policy.get_resource_id())
policy_res_name = to_full_resource_name(full_res_name, policy_type_name)
resource = self.dao.TBL_RESOURCE(cai_resource_name=dataset_policy.get_cai_resource_name(), cai_resource_type=dataset_policy.get_cai_resource_type(), full_name=policy_res_name, type_name=policy_type_name, name=dataset_policy.get_resource_id(), type=dataset_policy.get_category(), data=dataset_policy.get_resource_data_raw(), parent=parent)
self.session.add(resource)
def _convert_enabled_apis(self, enabled_apis):
(parent, full_res_name) = self._get_parent(enabled_apis)
apis_type_name = to_type_name(enabled_apis.get_category(), ':'.join(parent.type_name.split('/')))
apis_res_name = to_full_resource_name(full_res_name, apis_type_name)
resource = self.dao.TBL_RESOURCE(cai_resource_name=enabled_apis.get_cai_resource_name(), cai_resource_type=enabled_apis.get_cai_resource_type(), full_name=apis_res_name, type_name=apis_type_name, name=enabled_apis.get_resource_id(), type=enabled_apis.get_category(), data=enabled_apis.get_resource_data_raw(), parent=parent)
self.session.add(resource)
def _convert_gcs_policy(self, gcs_policy):
(parent, full_res_name) = self._get_parent(gcs_policy)
policy_type_name = to_type_name(gcs_policy.get_category(), gcs_policy.get_resource_id())
policy_res_name = to_full_resource_name(full_res_name, policy_type_name)
resource = self.dao.TBL_RESOURCE(cai_resource_name=gcs_policy.get_cai_resource_name(), cai_resource_type=gcs_policy.get_cai_resource_type(), full_name=policy_res_name, type_name=policy_type_name, name=gcs_policy.get_resource_id(), type=gcs_policy.get_category(), data=gcs_policy.get_resource_data_raw(), parent=parent)
self.session.add(resource)
def _convert_iam_policy(self, iam_policy):
(_, full_res_name) = self._get_parent(iam_policy)
parent_type_name = self._type_name(iam_policy)
iam_policy_type_name = to_type_name(iam_policy.get_category(), ':'.join(parent_type_name.split('/')))
iam_policy_full_res_name = to_full_resource_name(full_res_name, iam_policy_type_name)
resource = self.dao.TBL_RESOURCE(cai_resource_name=iam_policy.get_cai_resource_name(), cai_resource_type=iam_policy.get_cai_resource_type(), full_name=iam_policy_full_res_name, type_name=iam_policy_type_name, name=iam_policy.get_resource_id(), type=iam_policy.get_category(), data=iam_policy.get_resource_data_raw(), parent_type_name=parent_type_name)
self.session.add(resource)
def _convert_role(self, role):
data = role.get_resource_data()
role_name = data.get('name')
LOGGER.debug('Converting role: %s', role_name)
LOGGER.debug('role data: %s', data)
if (role_name in self.role_cache):
LOGGER.warning('Duplicate role_name: %s', role_name)
return
is_custom = (not role_name.startswith('roles/'))
db_permissions = []
if ('includedPermissions' not in data):
self.model.add_warning('Role missing permissions: {}'.format(data.get('name', '<missing name>')))
else:
for perm_name in data['includedPermissions']:
if (perm_name not in self.permission_cache):
permission = self.dao.TBL_PERMISSION(name=perm_name)
self.permission_cache[perm_name] = permission
self.session.add(permission)
db_permissions.append(self.permission_cache[perm_name])
dbrole = self.dao.TBL_ROLE(name=role_name, title=data.get('title', ''), stage=data.get('stage', ''), description=data.get('description', ''), custom=is_custom, permissions=db_permissions)
self.role_cache[data['name']] = dbrole
self.session.add(dbrole)
LOGGER.debug('Adding role %s to session', role_name)
if is_custom:
(parent, full_res_name, type_name) = self._full_resource_name(role)
role_resource = self.dao.TBL_RESOURCE(cai_resource_name=role.get_cai_resource_name(), cai_resource_type=role.get_cai_resource_type(), full_name=full_res_name, type_name=type_name, name=role.get_resource_id(), type=role.get_resource_type(), display_name=data.get('title'), data=role.get_resource_data_raw(), parent=parent)
self._add_to_cache(role_resource, role.id)
self.session.add(role_resource)
LOGGER.debug('Adding role resource :%s to session', role_name)
LOGGER.debug('Role resource :%s', role_resource)
def _convert_role_post(self):
self.session.add_all(list(self.permission_cache.values()))
self.session.add_all(list(self.role_cache.values()))
def _convert_service_config(self, service_config):
(parent, full_res_name) = self._get_parent(service_config)
sc_type_name = to_type_name(service_config.get_category(), parent.type_name)
sc_res_name = to_full_resource_name(full_res_name, sc_type_name)
resource = self.dao.TBL_RESOURCE(cai_resource_name=service_config.get_cai_resource_name(), cai_resource_type=service_config.get_cai_resource_type(), full_name=sc_res_name, type_name=sc_type_name, name=service_config.get_resource_id(), type=service_config.get_category(), data=service_config.get_resource_data_raw(), parent=parent)
self.session.add(resource)
def _convert_bigquery_table(self, table):
self._convert_resource(table, cached=True)
def _add_to_cache(self, resource, resource_id):
full_res_name = resource.full_name
self.resource_cache[resource_id] = (resource, full_res_name)
def _full_resource_name(self, resource):
type_name = self._type_name(resource)
(parent, full_res_name) = self._get_parent(resource)
full_resource_name = to_full_resource_name(full_res_name, type_name)
return (parent, full_resource_name, type_name)
def _get_parent(self, resource):
parent_id = resource.get_parent_id()
return self.resource_cache[parent_id]
def _is_root(self, resource):
if (not self.found_root):
is_root = (not resource.get_parent_id())
if is_root:
self.found_root = True
return is_root
return False
def _type_name(resource):
return to_type_name(resource.get_resource_type(), resource.get_resource_id()) |
.django_db
def test_agg_fields(monkeypatch, aggregate_models):
request = Mock()
request.query_params = {}
request.data = {'field': 'total_obligation', 'group': 'type', 'show_nulls': True}
a = AggregateQuerysetMixin()
agg = a.aggregate(request=request, queryset=Award.objects.all())
assert (agg.count() == 3)
fields = agg.first().keys()
assert (len(fields) == 3)
assert ('aggregate' in fields)
assert ('item' in fields)
assert ('type' in fields) |
def _create_plot_component():
xs = linspace(0, 10, 600)
ys = linspace(0, 5, 600)
(x, y) = meshgrid(xs, ys)
z = exp(((- ((x ** 2) + (y ** 2))) / 100))
pd = ArrayPlotData()
pd.set_data('imagedata', z)
plot = Plot(pd)
img_plot = plot.img_plot('imagedata', xbounds=(0, 10), ybounds=(0, 5), colormap=viridis)[0]
plot.title = 'My First Image Plot'
plot.padding = 50
plot.tools.append(PanTool(plot))
zoom = ZoomTool(component=img_plot, tool_mode='box', always_on=False)
img_plot.overlays.append(zoom)
return plot |
class OptionAutoComplete(OptionsInput):
def appendTo(self):
return self._config_get(None)
def appendTo(self, value):
self._config(value)
def autoFocus(self):
return self._config_get(False)
def autoFocus(self, value: bool):
self._config(value)
def classes(self):
return self._config_get([])
def classes(self, value):
self._config(value)
def delay(self):
return self._config_get(300)
def delay(self, value: int):
self._config(value)
def disabled(self):
return self._config_get(True)
def disabled(self, value: bool):
self._config(value)
def minLength(self):
return self._config_get(0)
def minLength(self, value: int):
self._config(value)
def position(self, my='left top', at='left bottom', of=None, using=None, within=None, collision=None):
props = {'my': my, 'at': at, 'collision': (collision or 'flip')}
if (of is not None):
props['of'] = of
if (using is not None):
props['using'] = using
if (within is not None):
props['within'] = within
self._config(props)
return self
def reset(self):
return self.get(False)
def reset(self, flag: bool):
self.set(flag)
def on_select(self, js_funcs, profile=None):
js_funcs = JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)
self._config(('function(e, ui) {var value = ui.item.value; %s}' % js_funcs), js_type=True, name='select')
def source(self):
return self._config_get([])
def source(self, value):
self._config(value)
def startswith(self, values: list):
values = JsUtils.jsConvertData(values, None)
self._config(('function(request, response) {\n var matcher = new RegExp("^" + $.ui.autocomplete.escapeRegex(request.term), "i");\n response($.grep(%s, function(item){return matcher.test(item);}) );\n }' % values), 'source', js_type=True) |
.slow
.skipif((not has_bitsandbytes), reason='requires bitsandbytes')
def test_quantization_on_non_gpu():
with pytest.raises(ValueError, match='only be performed on CUDA'):
model = DollyV2Generator.from_hf_hub(name='databricks/dolly-v2-3b', device=None, quantization_config=BitsAndBytesConfig.for_8bit())
with pytest.raises(ValueError, match='only be performed on CUDA'):
model = DollyV2Generator.from_hf_hub(name='databricks/dolly-v2-3b', device=torch.device('cpu'), quantization_config=BitsAndBytesConfig.for_4bit()) |
def merge(a, b, path=None):
if (path is None):
path = []
for key in b:
if isinstance(b[key], EntitySpec):
b[key] = b[key].params
if isinstance(b[key], SpecView):
b[key] = b[key].to_dict()
if (key in a):
if (isinstance(a[key], dict) and isinstance(b[key], dict)):
merge(a[key], b[key], (path + [str(key)]))
elif (a[key] == b[key]):
pass
else:
a[key] = b[key]
else:
a[key] = b[key]
return a |
def update_user_data(cognito_user_id, body):
response = table.update_item(Key={'PK': ('USER#' + cognito_user_id), 'SK': ('USER#' + cognito_user_id)}, UpdateExpression='set #alias = :u, #pinyin = :p, #emoji = :e, #char = :c', ExpressionAttributeNames={'#alias': 'User alias', '#pinyin': 'User alias pinyin', '#emoji': 'User alias emoji', '#char': 'Character set preference'}, ExpressionAttributeValues={':u': body['user_alias'], ':p': body['user_alias_pinyin'], ':e': body['user_alias_emoji'], ':c': body['character_set_preference']}, ReturnValues='UPDATED_NEW')
return response |
.django_db(transaction=True)
def test_download_failure_with_two_defc(client, monkeypatch, awards_and_transactions, elasticsearch_award_index):
download_generation.retrieve_db_string = Mock(return_value=get_database_dsn_string())
setup_elasticsearch_test(monkeypatch, elasticsearch_award_index)
resp = _post(client, def_codes=['L', 'M'])
assert (resp.status_code == status.HTTP_400_BAD_REQUEST)
assert (resp.json()['detail'] == 'The Disaster Download is currently limited to either all COVID-19 DEFC or a single COVID-19 DEFC.') |
class SocialAuthTests(mixins.SocialAuthMixin, SchemaTestCase):
query = '\n mutation SocialAuth($provider: String!, $accessToken: String!) {\n socialAuth(provider: $provider, accessToken: $accessToken) {\n social {\n uid\n extraData\n }\n }\n }'
class Mutations(graphene.ObjectType):
social_auth = graphql_social_auth.SocialAuth.Field() |
class Probe(object):
def __init__(self, target_pid, max_syscalls=MAX_SYSCALLS):
self.target_pid = target_pid
self.comm = comm_for_pid(self.target_pid)
if (self.comm is None):
print(("can't find comm for pid %d" % self.target_pid))
quit()
self.max_syscalls = max_syscalls
self.code = '\n BPF_PERCPU_ARRAY(histogram, u32, MAX_SYSCALLS);\n\n TRACEPOINT_PROBE(raw_syscalls, sys_enter)\n {\n // filter by target pid\n u64 pid = bpf_get_current_pid_tgid() >> 32;\n if(pid != TARGET_PID) {\n return 0;\n }\n\n // populate histogram\n u32 key = (u32)args->id;\n u32 value = 0, *pval = NULL;\n pval = histogram.lookup_or_try_init(&key, &value);\n if(pval) {\n *pval += 1;\n }\n\n return 0;\n }\n '.replace('TARGET_PID', str(self.target_pid)).replace('MAX_SYSCALLS', str(self.max_syscalls))
def start(self):
probe = BPF(text=self.code)
return probe.get_table('histogram', reducer=(lambda x, y: (x + y))) |
def example(page):
size = 15
gap = 3
duration = 2000
c1 = colors.PINK_500
c2 = colors.AMBER_500
c3 = colors.LIGHT_GREEN_500
c4 = colors.DEEP_PURPLE_500
all_colors = [colors.AMBER_400, colors.AMBER_ACCENT_400, colors.BLUE_400, colors.BROWN_400, colors.CYAN_700, colors.DEEP_ORANGE_500, colors.CYAN_500, colors.INDIGO_600, colors.ORANGE_ACCENT_100, colors.PINK, colors.RED_600, colors.GREEN_400, colors.GREEN_ACCENT_200, colors.TEAL_ACCENT_200, colors.LIGHT_BLUE_500]
parts = [(0, 0, c1), (0, 1, c1), (0, 2, c1), (0, 3, c1), (0, 4, c1), (1, 0, c1), (1, 2, c1), (2, 0, c1), (4, 0, c2), (4, 1, c2), (4, 2, c2), (4, 3, c2), (4, 4, c2), (5, 4, c2), (6, 4, c2), (8, 0, c3), (9, 0, c3), (10, 0, c3), (8, 1, c3), (8, 2, c3), (9, 2, c3), (10, 2, c3), (8, 3, c3), (8, 4, c3), (9, 4, c3), (10, 4, c3), (12, 0, c4), (13, 0, c4), (14, 0, c4), (13, 1, c4), (13, 2, c4), (13, 3, c4), (13, 4, c4)]
width = (16 * (size + gap))
height = (15 * (size + gap))
canvas = Stack(width=width, height=height, animate_scale=duration, animate_opacity=duration)
for i in range(len(parts)):
canvas.controls.append(Container(animate=duration, animate_position=duration, animate_rotation=duration))
def randomize(e):
random.seed()
for i in range(len(parts)):
c = canvas.controls[i]
part_size = random.randrange(int((size / 2)), int((size * 3)))
c.left = random.randrange(0, width)
c.top = random.randrange(0, height)
c.bgcolor = all_colors[random.randrange(0, len(all_colors))]
c.width = part_size
c.height = part_size
c.border_radius = random.randrange(0, int((size / 2)))
c.rotate = (((random.randrange(0, 90) * 2) * pi) / 360)
canvas.scale = 5
canvas.opacity = 0.3
go_button.visible = True
again_button.visible = False
page.update()
def assemble(e):
i = 0
for (left, top, bgcolor) in parts:
c = canvas.controls[i]
c.left = (left * (size + gap))
c.top = (top * (size + gap))
c.bgcolor = bgcolor
c.width = size
c.height = size
c.border_radius = 5
c.rotate = 0
i += 1
canvas.scale = 1
canvas.opacity = 1
go_button.visible = False
again_button.visible = True
page.update()
go_button = ElevatedButton('Go!', on_click=assemble, visible=True)
again_button = ElevatedButton('Again!', on_click=randomize, visible=False)
randomize(None)
return ft.Container(expand=True, alignment=ft.alignment.center, content=ft.Column(alignment=ft.MainAxisAlignment.CENTER, horizontal_alignment=ft.CrossAxisAlignment.CENTER, tight=True, controls=[canvas, go_button, again_button])) |
('bodhi.server.buildsys.log.debug')
class TestWaitForTasks():
('bodhi.server.buildsys.time.sleep')
def test_wait_on_unfinished_task(self, sleep, debug):
tasks = [1, 2, 3]
session = mock.MagicMock()
session.taskFinished.side_effect = [True, False, False, True, True]
session.getTaskInfo.return_value = {'state': koji.TASK_STATES['CLOSED']}
ret = buildsys.wait_for_tasks(tasks, session, sleep=0.01)
assert (ret == [])
assert (debug.mock_calls == [mock.call('Waiting for 3 tasks to complete: [1, 2, 3]'), mock.call('3 tasks completed successfully, 0 tasks failed.')])
assert (session.taskFinished.mock_calls == [mock.call(1), mock.call(2), mock.call(2), mock.call(2), mock.call(3)])
assert (sleep.mock_calls == [mock.call(0.01), mock.call(0.01)])
assert (session.getTaskInfo.mock_calls == [mock.call(1), mock.call(2), mock.call(3)])
def test_with_failed_task(self, debug):
tasks = [1, 2, 3]
session = mock.MagicMock()
session.taskFinished.side_effect = [True, True, True]
session.getTaskInfo.side_effect = [{'state': koji.TASK_STATES['CLOSED']}, {'state': koji.TASK_STATES['FAILED']}, {'state': koji.TASK_STATES['CLOSED']}]
ret = buildsys.wait_for_tasks(tasks, session, sleep=0.01)
assert (ret == [2])
assert (debug.mock_calls == [mock.call('Waiting for 3 tasks to complete: [1, 2, 3]'), mock.call('2 tasks completed successfully, 1 tasks failed.')])
assert (session.taskFinished.mock_calls == [mock.call(1), mock.call(2), mock.call(3)])
assert (session.getTaskInfo.mock_calls == [mock.call(1), mock.call(2), mock.call(3)])
def test_with_falsey_task(self, debug):
tasks = [1, False, 3]
session = mock.MagicMock()
session.taskFinished.side_effect = [True, True]
session.getTaskInfo.side_effect = [{'state': koji.TASK_STATES['CLOSED']}, {'state': koji.TASK_STATES['CLOSED']}]
ret = buildsys.wait_for_tasks(tasks, session, sleep=0.01)
assert (ret == [])
assert (debug.mock_calls == [mock.call('Waiting for 3 tasks to complete: [1, False, 3]'), mock.call('Skipping task: False'), mock.call('3 tasks completed successfully, 0 tasks failed.')])
assert (session.taskFinished.mock_calls == [mock.call(1), mock.call(3)])
assert (session.getTaskInfo.mock_calls == [mock.call(1), mock.call(3)])
def test_with_successful_tasks(self, debug):
tasks = [1, 2, 3]
session = mock.MagicMock()
session.taskFinished.side_effect = [True, True, True]
session.getTaskInfo.side_effect = [{'state': koji.TASK_STATES['CLOSED']}, {'state': koji.TASK_STATES['CLOSED']}, {'state': koji.TASK_STATES['CLOSED']}]
ret = buildsys.wait_for_tasks(tasks, session, sleep=0.01)
assert (ret == [])
assert (debug.mock_calls == [mock.call('Waiting for 3 tasks to complete: [1, 2, 3]'), mock.call('3 tasks completed successfully, 0 tasks failed.')])
assert (session.taskFinished.mock_calls == [mock.call(1), mock.call(2), mock.call(3)])
assert (session.getTaskInfo.mock_calls == [mock.call(1), mock.call(2), mock.call(3)])
('bodhi.server.buildsys.get_session')
def test_without_session(self, get_session, debug):
tasks = [1, 2, 3]
get_session.return_value.taskFinished.side_effect = [True, True, True]
get_session.return_value.getTaskInfo.side_effect = [{'state': koji.TASK_STATES['CLOSED']}, {'state': koji.TASK_STATES['CLOSED']}, {'state': koji.TASK_STATES['CLOSED']}]
ret = buildsys.wait_for_tasks(tasks, sleep=0.01)
assert (ret == [])
assert (debug.mock_calls == [mock.call('Waiting for 3 tasks to complete: [1, 2, 3]'), mock.call('3 tasks completed successfully, 0 tasks failed.')])
get_session.assert_called_once_with()
assert (get_session.return_value.taskFinished.mock_calls == [mock.call(1), mock.call(2), mock.call(3)])
assert (get_session.return_value.getTaskInfo.mock_calls == [mock.call(1), mock.call(2), mock.call(3)]) |
def extractXiaoyaoiplaygroundWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('The Shamans Poison', 'Poison of the Human Panacea', 'translated'), ('Poison of the Human Panacea', 'Poison of the Human Panacea', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def exposed_load_feed_names_from_file(json_file):
with open(json_file) as fp:
df = json.load(fp)
for (url, title) in df.items():
try:
with db.session_context() as sess:
fname = WebMirror.OutputFilters.util.feedNameLut.getNiceName(sess, url, debug=True)
bad = ((not fname) or (fname and (fname in url)))
netloc = urllib.parse.urlparse(url).netloc
if bad:
_update_feed_name(sess, netloc, netloc, title)
print((url, title, fname, bad))
except Exception as e:
print('Wat?')
print(e) |
class Consist(Model):
email = Field()
url = Field()
ip = Field()
image = Field.upload()
emails = Field.string_list()
emailsplit = Field.string_list()
validation = {'email': {'is': 'email'}, 'url': {'is': 'url'}, 'ip': {'is': 'ip'}, 'image': {'is': 'image'}, 'emails': {'is': 'list:email'}, 'emailsplit': {'is': {'list:email': {'splitter': ',;'}}}} |
class OptionPlotoptionsPieSonificationContexttracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class FirmwareHeader():
header_length = 1024
magic_string = b'\x10\x12 \x03'
def __init__(self, file_content: bytes, offset: int=0, is_subheader: bool=False):
self.file_content = file_content
self.is_subheader = is_subheader
self.offset = offset
header_content = file_content[offset:]
self.magic = header_content[0:4]
self.target = header_content[4:8]
self.variant = header_content[8:12]
self.version = struct.unpack('>4b', header_content[12:16])
self.length = struct.unpack('>i', header_content[16:20])[0]
self.base = header_content[20:24]
self.checksum = header_content[24:28]
self.type = struct.unpack('>i', header_content[28:32])[0]
self.signature = header_content[76:332]
self.key_blob = header_content[588:844]
def __str__(self):
header_prefix = ('Sub-' if self.is_subheader else '')
output = [f'Firmware {header_prefix}Header at offset {self.offset}:']
for attribute in ['magic', 'target', 'variant', 'version', 'length', 'base', 'checksum', 'type', 'signature', 'key_blob']:
value = getattr(self, attribute)
value = (f'0x{value.hex()}' if isinstance(value, bytes) else str(value))
output.append(f'{attribute}: {value}')
return '\n'.join(output)
def next_header_exists(self, next_offset):
return ((next_offset > self.offset) and (len(self.file_content) >= (next_offset + self.header_length)) and self._magic_matches(next_offset))
def _magic_matches(self, offset):
return (self.file_content[offset:(offset + 4)] == self.magic_string) |
def test_batch_stateful_many():
uid_generator = string_generator()
request_info1 = dm.RequestInfo(input=np.array(range(10)), parameters={'gif_id': 12})
request_object1 = dm.RequestObject(uid=next(uid_generator), source_id='internal_123_123', request_info=request_info1, model=stub_stateful_model)
request_info2 = dm.RequestInfo(input=np.array(range(10)), parameters={})
request_object2 = dm.RequestObject(uid=next(uid_generator), source_id='internal_123_124', request_info=request_info2, model=blur_stateful_model)
request_info3 = dm.RequestInfo(input=np.array(range(10)), parameters={'gif_id': 12})
request_object3 = dm.RequestObject(uid=next(uid_generator), source_id='internal_123_123', request_info=request_info3, model=stub_stateful_model)
batch_uid_generator = string_generator()
expected_value = dm.Batches(batches=[dm.BatchObject(uid=next(batch_uid_generator), requests_info=[request_object1.request_info, request_object3.request_info], model=stub_stateful_model, request_objects=[request_object1, request_object3], source_id=request_object1.source_id), dm.BatchObject(uid=next(batch_uid_generator), model=blur_stateful_model, requests_info=[request_object2.request_info], request_objects=[request_object2], source_id=request_object2.source_id)])
result_value = build_batches([request_object1, request_object2, request_object3], uid_generator=string_generator())
assert (result_value == expected_value) |
class _FlowSpecBitmask(_FlowSpecOperatorBase):
NOT = (1 << 1)
MATCH = (1 << 0)
_comparison_conditions = {'!=': NOT, '==': MATCH}
_bitmask_flags = {}
def _to_value(cls, value):
try:
return cls.__dict__[value]
except KeyError:
raise ValueError(('Invalid params: %s="%s"' % (cls.COMPONENT_NAME, value)))
def to_str(self):
string = ''
if (self.operator & self.AND):
string += '&'
operator = (self.operator & (self.NOT | self.MATCH))
for (k, v) in self._comparison_conditions.items():
if (operator == v):
string += k
plus = ''
for (k, v) in self._bitmask_flags.items():
if (self.value & k):
string += (plus + v)
plus = '+'
return string |
class OptionSeriesWindbarbSonificationContexttracksMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
(private_key_bytes=private_key_st)
def test_public_key_compression_is_equal(private_key_bytes, native_key_api, coincurve_key_api):
native_public_key = native_key_api.PrivateKey(private_key_bytes).public_key
coincurve_public_key = coincurve_key_api.PrivateKey(private_key_bytes).public_key
native_compressed_public_key = native_public_key.to_compressed_bytes()
coincurve_compressed_public_key = coincurve_public_key.to_compressed_bytes()
assert (native_compressed_public_key == coincurve_compressed_public_key) |
def split(grpc_path, with_scheme=False):
url = grpc_path
if (not grpc_path):
url = nmduri()
if (url and (not url.startswith('grpc://'))):
raise ValueError(('Invalid grpc path to split: %s; `grpc` scheme missed!' % grpc_path))
url_parse_result = urlparse(url)
if with_scheme:
return (('%s://%s' % (url_parse_result.scheme, url_parse_result.netloc)), url_parse_result.path)
return (url_parse_result.netloc, url_parse_result.path) |
def do_stats_numeric(series: pd.Series, updated_dict: dict):
stats = updated_dict['stats']
stats['max'] = series.max()
stats['mean'] = series.mean()
for (percentile, value) in series.quantile([0.95, 0.75, 0.5, 0.25, 0.05]).to_dict().items():
stats[f'perc{int((percentile * 100))}'] = value
stats['min'] = series.min()
stats['range'] = (stats['max'] - stats['min'])
stats['iqr'] = (stats['perc75'] - stats['perc25'])
stats['std'] = series.std()
stats['variance'] = series.var()
stats['kurtosis'] = series.kurt()
stats['skewness'] = series.skew()
stats['sum'] = series.sum()
stats['cv'] = ((stats['std'] / stats['mean']) if stats['mean'] else np.NaN)
return updated_dict |
def install(path: str, packages: list=None, node_server: bool=False, update: bool=False, verbose: bool=True, page: primitives.PageModel=None):
if (packages is None):
if (page is None):
raise ValueError('Package or page must be defined')
packages = page.imports.requirements
if node_server:
if (not isinstance(packages, list)):
packages = [packages]
if update:
subprocess.run(('npm update %s' % ' '.join(packages)), shell=True, cwd=path)
else:
to_be_installed = []
for p in packages:
package_path = os.path.join(path, 'node_modules', p)
if (not os.path.exists(package_path)):
to_be_installed.append(p)
if to_be_installed:
subprocess.run(('npm install %s' % ' '.join(to_be_installed)), shell=True, cwd=path)
if verbose:
logging.warning((' PYK_NPM >> All packages installed [%s]' % ' '.join(to_be_installed)))
elif verbose:
logging.warning(' PYK_NPM >> packages already installed')
else:
if (verbose and (not path.endswith('node_modules'))):
logging.warning('NodeJs is using a node_modules folder.')
for p in packages:
if ((not os.path.exists(os.path.join(path, p))) or update):
for category in [Imports.JS_IMPORTS, Imports.CSS_IMPORTS]:
for mod in category.get(p, {}).get('modules', []):
request = Request(Imports.script_cdnjs_path(p, mod))
response = urlopen(request).read()
script_path = Imports.script_npm_path(p, mod, path)
(mod_path, script) = os.path.split(script_path)
if (not os.path.exists(mod_path)):
os.makedirs(mod_path)
with open(script_path, 'wb') as f:
f.write(response)
for mod in category.get(p, {}).get('assets', []):
request = Request(Imports.script_cdnjs_path(p, mod))
response = urlopen(request).read()
script_path = Imports.script_npm_path(p, mod, path)
(mod_path, script) = os.path.split(script_path)
if (not os.path.exists(mod_path)):
os.makedirs(mod_path)
with open(script_path, 'wb') as f:
f.write(response) |
class OptionSeriesTreegraphSonificationDefaultspeechoptionsActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
.integration_mysql
.integration
class TestMySQLConnectionTestSecretsAPI():
(scope='function')
def url(self, oauth_client, policy, connection_config_mysql) -> str:
return f'{V1_URL_PREFIX}{CONNECTIONS}/{connection_config_mysql.key}/test'
def test_connection_configuration_test_not_authenticated(self, url, api_client: TestClient, db: Session, generate_auth_header, connection_config_mysql) -> None:
assert (connection_config_mysql.last_test_timestamp is None)
resp = api_client.get(url)
assert (resp.status_code == 401)
db.refresh(connection_config_mysql)
assert (connection_config_mysql.last_test_timestamp is None)
assert (connection_config_mysql.last_test_succeeded is None)
def test_connection_configuration_test_incorrect_scopes(self, url, api_client: TestClient, db: Session, generate_auth_header, connection_config_mysql) -> None:
assert (connection_config_mysql.last_test_timestamp is None)
auth_header = generate_auth_header(scopes=[STORAGE_READ])
resp = api_client.get(url, headers=auth_header)
assert (resp.status_code == 403)
db.refresh(connection_config_mysql)
assert (connection_config_mysql.last_test_timestamp is None)
assert (connection_config_mysql.last_test_succeeded is None)
def test_connection_configuration_test_failed_response(self, url, api_client: TestClient, db: Session, generate_auth_header, connection_config_mysql) -> None:
assert (connection_config_mysql.last_test_timestamp is None)
connection_config_mysql.secrets = {'host': 'invalid_host', 'dbname': 'mysql_example'}
connection_config_mysql.save(db)
auth_header = generate_auth_header(scopes=[CONNECTION_READ])
resp = api_client.get(url, headers=auth_header)
assert (resp.status_code == 200)
body = json.loads(resp.text)
db.refresh(connection_config_mysql)
assert (connection_config_mysql.last_test_timestamp is not None)
assert (connection_config_mysql.last_test_succeeded is False)
assert (body['test_status'] == 'failed')
assert ('Operational Error connecting to mysql db.' == body['failure_reason'])
assert (body['msg'] == f'Test completed for ConnectionConfig with key: {connection_config_mysql.key}.')
def test_connection_configuration_test(self, url, api_client: TestClient, db: Session, generate_auth_header, connection_config_mysql) -> None:
assert (connection_config_mysql.last_test_timestamp is None)
auth_header = generate_auth_header(scopes=[CONNECTION_READ])
resp = api_client.get(url, headers=auth_header)
assert (resp.status_code == 200)
body = json.loads(resp.text)
assert (body['msg'] == f'Test completed for ConnectionConfig with key: {connection_config_mysql.key}.')
assert (body['failure_reason'] is None)
assert (body['test_status'] == 'succeeded')
db.refresh(connection_config_mysql)
assert (connection_config_mysql.last_test_timestamp is not None)
assert (connection_config_mysql.last_test_succeeded is True) |
class JournalEntryLineDetailTests(unittest.TestCase):
def test_init(self):
journalentry = JournalEntryLineDetail()
self.assertEqual(journalentry.PostingType, '')
self.assertEqual(journalentry.TaxApplicableOn, 'Sales')
self.assertEqual(journalentry.TaxAmount, 0)
self.assertEqual(journalentry.BillableStatus, None)
self.assertEqual(journalentry.Entity, None)
self.assertEqual(journalentry.AccountRef, None)
self.assertEqual(journalentry.ClassRef, None)
self.assertEqual(journalentry.DepartmentRef, None)
self.assertEqual(journalentry.TaxCodeRef, None) |
def test_global_blueprint(dash_duo):
app = _get_basic_dash_proxy()
clientside_callback('function(x){return x;}', Output('log_client', 'children'), Input('btn', 'n_clicks'))
(Output('log_server', 'children'), Input('btn', 'n_clicks'))
def update_log(n_clicks):
return n_clicks
_basic_dash_proxy_test(dash_duo, app)
dash_extensions.enrich.GLOBAL_BLUEPRINT = DashBlueprint() |
class OptionSeriesTilemapSonificationDefaultinstrumentoptionsActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class OptionPlotoptionsGaugeSonificationDefaultinstrumentoptionsMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class JsBoolean(JsObject.JsObject):
_jsClass = 'Boolean'
def __init__(self, data, js_code: Optional[str]=None, set_var: bool=False, is_py_data: bool=True, page: Optional[primitives.PageModel]=None, component: primitives.HtmlModel=None):
if ((not hasattr(data, 'varName')) and is_py_data):
is_py_data = True
data = json.dumps(data)
super(JsBoolean, self).__init__(data, js_code, set_var, is_py_data, page=page, component=component)
def not_(self):
self.varName = ('!%s' % self.varId)
return self
def valueOf(self):
return JsBoolean(('%s.valueOf()' % self.varId), is_py_data=False) |
class ToolHistoryMixin(HasTraits):
reset_state_key = Instance(KeySpec, args=('Esc',))
prev_state_key = Instance(KeySpec, args=('Left', 'control'))
next_state_key = Instance(KeySpec, args=('Right', 'control'))
_history = List
_history_index = Int
def _next_state_pressed(self):
pass
def _prev_state_pressed(self):
pass
def _reset_state_pressed(self):
pass
def _current_state(self):
return self._history[self._history_index]
def _reset_state(self, state):
self._history = [state]
self._history_index = 0
def _append_state(self, state, set_index=True):
new_history = (self._history[:(self._history_index + 1)] + [state])
self._history = new_history
if set_index:
self._history_index = (len(self._history) - 1)
def _pop_state(self):
if (len(self._history) == 0):
raise IndexError('Unable to pop empty history stack.')
if (self._history_index == (len(self._history) - 1)):
self._history_index -= 1
return self._history.pop()
def normal_key_pressed(self, event):
self._history_handle_key(event)
def _history_handle_key(self, event):
if ((self.reset_state_key is not None) and self.reset_state_key.match(event)):
self._history_index = 0
self._reset_state_pressed()
event.handled = True
elif ((self.prev_state_key is not None) and self.prev_state_key.match(event)):
if (self._history_index > 0):
self._history_index -= 1
self._prev_state_pressed()
event.handled = True
elif ((self.next_state_key is not None) and self.next_state_key.match(event)):
if (self._history_index <= (len(self._history) - 2)):
self._history_index += 1
self._next_state_pressed()
event.handled = True
else:
return |
class flow_stats_reply(stats_reply):
version = 6
type = 19
stats_type = 1
def __init__(self, xid=None, flags=None, entries=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (entries != None):
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = flow_stats_reply()
_version = reader.read('!B')[0]
assert (_version == 6)
_type = reader.read('!B')[0]
assert (_type == 19)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 1)
obj.flags = reader.read('!H')[0]
reader.skip(4)
obj.entries = loxi.generic_util.unpack_list(reader, ofp.common.flow_stats_entry.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.entries != other.entries):
return False
return True
def pretty_print(self, q):
q.text('flow_stats_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REPLY_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('entries = ')
q.pp(self.entries)
q.breakable()
q.text('}') |
class Urlify(Validator):
message = 'Not convertible to url'
def __init__(self, maxlen=80, check=False, keep_underscores=False, message=None):
super().__init__(message=message)
self.maxlen = maxlen
self.check = check
self.message = message
self.keep_underscores = keep_underscores
def __call__(self, value):
if (self.check and (value != self._urlify(value))):
return (value, translate(self.message))
return (self._urlify(value), None)
def _urlify(self, s):
s = to_unicode(s)
s = s.lower()
s = unicodedata.normalize('NFKD', s)
s = s.encode('ascii', 'ignore').decode('ascii')
s = re.sub('&\\w+?;', '', s)
if self.keep_underscores:
s = re.sub('\\s+', '-', s)
s = re.sub('[^\\w\\-]', '', s)
else:
s = re.sub('[\\s_]+', '-', s)
s = re.sub('[^a-z0-9\\-]', '', s)
s = re.sub('[-_][-_]+', '-', s)
s = s.strip('-')
return s[:self.maxlen] |
class WebsocketProviderV2(PersistentConnectionProvider):
logger = logging.getLogger('web3.providers.WebsocketProviderV2')
is_async: bool = True
_max_connection_retries: int = 5
def __init__(self, endpoint_uri: Optional[Union[(URI, str)]]=None, websocket_kwargs: Optional[Dict[(str, Any)]]=None, request_timeout: Optional[float]=DEFAULT_PERSISTENT_CONNECTION_TIMEOUT) -> None:
self.endpoint_uri = URI(endpoint_uri)
if (self.endpoint_uri is None):
self.endpoint_uri = get_default_endpoint()
if (not any((self.endpoint_uri.startswith(prefix) for prefix in VALID_WEBSOCKET_URI_PREFIXES))):
raise Web3ValidationError(f"Websocket endpoint uri must begin with 'ws://' or 'wss://': {self.endpoint_uri}")
if (websocket_kwargs is not None):
found_restricted_keys = set(websocket_kwargs).intersection(RESTRICTED_WEBSOCKET_KWARGS)
if found_restricted_keys:
raise Web3ValidationError(f'Found restricted keys for websocket_kwargs: {found_restricted_keys}.')
self.websocket_kwargs = merge(DEFAULT_WEBSOCKET_KWARGS, (websocket_kwargs or {}))
super().__init__(endpoint_uri, request_timeout=request_timeout)
def __str__(self) -> str:
return f'Websocket connection: {self.endpoint_uri}'
async def is_connected(self, show_traceback: bool=False) -> bool:
if (not self._ws):
return False
try:
(await self._ws.pong())
return True
except WebSocketException as e:
if show_traceback:
raise ProviderConnectionError(f"Error connecting to endpoint: '{self.endpoint_uri}'") from e
return False
async def connect(self) -> None:
_connection_attempts = 0
_backoff_rate_change = 1.75
_backoff_time = 1.75
while (_connection_attempts != self._max_connection_retries):
try:
_connection_attempts += 1
self._ws = (await connect(self.endpoint_uri, **self.websocket_kwargs))
break
except WebSocketException as e:
if (_connection_attempts == self._max_connection_retries):
raise ProviderConnectionError(f'Could not connect to endpoint: {self.endpoint_uri}. Retries exceeded max of {self._max_connection_retries}.') from e
self.logger.info(f'Could not connect to endpoint: {self.endpoint_uri}. Retrying in {round(_backoff_time, 1)} seconds.', exc_info=True)
(await asyncio.sleep(_backoff_time))
_backoff_time *= _backoff_rate_change
async def disconnect(self) -> None:
if ((self._ws is not None) and (not self._ws.closed)):
(await self._ws.close())
self._ws = None
self.logger.debug(f'Successfully disconnected from endpoint: "{self.endpoint_uri}')
self._request_processor.clear_caches()
async def make_request(self, method: RPCEndpoint, params: Any) -> RPCResponse:
request_data = self.encode_rpc_request(method, params)
if (self._ws is None):
raise ProviderConnectionError('Connection to websocket has not been initiated for the provider.')
(await asyncio.wait_for(self._ws.send(request_data), timeout=self.request_timeout))
current_request_id = json.loads(request_data)['id']
response = (await self._get_response_for_request_id(current_request_id))
return response
async def _get_response_for_request_id(self, request_id: RPCId) -> RPCResponse:
async def _match_response_id_to_request_id() -> RPCResponse:
request_cache_key = generate_cache_key(request_id)
while True:
(await asyncio.sleep(0))
if (request_cache_key in self._request_processor._request_response_cache):
self.logger.debug(f'Response for id {request_id} is already cached, pop it from the cache.')
return self._request_processor.pop_raw_response(cache_key=request_cache_key)
elif (not self._ws_lock.locked()):
async with self._ws_lock:
self.logger.debug(f'Response for id {request_id} is not cached, calling `recv()` on websocket.')
try:
response = (await self._ws_recv(timeout=0.5))
except asyncio.TimeoutError:
continue
response_id = response.get('id')
if (response_id == request_id):
self.logger.debug(f'Received and returning response for id {request_id}.')
return response
else:
self.logger.debug('Undesired response received, caching.')
is_subscription = (response.get('method') == 'eth_subscription')
self._request_processor.cache_raw_response(response, subscription=is_subscription)
try:
return (await asyncio.wait_for(_match_response_id_to_request_id(), self.request_timeout))
except asyncio.TimeoutError:
raise TimeExhausted(f'Timed out waiting for response with request id `{request_id}` after {self.request_timeout} second(s). This may be due to the provider not returning a response with the same id that was sent in the request or an exception raised during the request was caught and allowed to continue.')
async def _ws_recv(self, timeout: float=None) -> RPCResponse:
return json.loads((await asyncio.wait_for(self._ws.recv(), timeout=timeout))) |
def get_bitwise_code(code_logic):
code = ('>' * 7)
code += '[-]'
code += '>'
code += '>[-]<'
code += '[-]['
code += '<'
code += ('<[-]' * 5)
code += '++'
code += '<<'
code += '['
code += '-'
code += '>>-'
code += '[>+>>+<<<-]>[<+>-]'
code += '>>'
code += '>>+<<'
code += '-['
code += '<+'
code += '<<++'
code += ('>' * 5)
code += '--'
code += '<<'
code += '+'
code += ']'
code += '<<<<<'
code += ']'
code += '>>>>[<<<<+>>>>-]'
code += '<<[-]++'
code += '<'
code += '['
code += '-'
code += '>-'
code += '[>+>>+<<<-]>[<+>-]'
code += '>>'
code += '>+<'
code += '-['
code += '>--<'
code += '<+'
code += '<<++'
code += '>>>'
code += '+'
code += ']'
code += '<<<<'
code += ']'
code += '>>>[<<<+>>>-]'
code += '>>'
code += code_logic
code += '>[<+<+>>-]<[>+<-]'
code += '<'
code += '['
code += '<'
code += '[<+>-]'
code += '<[>++<-]'
code += '>>-'
code += ']'
code += '<'
code += '[>>>>+<<<<-]'
code += '>>>'
code += ('-' * 7)
code += ']'
code += '>[+-]'
code += ''
return code |
class TestRunShell():
def test_should_call_and_populate_defaults(self):
cmd = ['/usr/bin/env', 'true']
process = run_shell(cmd)
assert (process.returncode == 0)
assert (process.args == cmd)
assert (not process.stderr)
assert (not process.stdout)
def test_should_call_with_args(self, fake_project):
process = run_shell(cmd=['/usr/bin/env', 'true'], cwd=fake_project['root'], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
assert (process.returncode == 0)
def test_should_raise_when_subprocess_failed_and_check_is_true(self):
check = True
with pytest.raises(subprocess.CalledProcessError):
run_shell(cmd=['/usr/bin/env', 'false'], check=check, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
def test_should_not_raise_when_subprocess_failed_and_check_is_false(self):
check = False
process = run_shell(cmd=['/usr/bin/env', 'false'], check=check, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
assert (process.returncode != 0)
.parametrize('env, expected', [({'SHELL': '/bin/bash', 'HOSTNAME': 'foobar', 'PWD': '/home/foobar/repos/fastapi-mvc', 'LOGNAME': 'foobar', 'HOME': '/home/foobar', 'USERNAME': 'foobar', 'LANG': 'en_GB.UTF-8', 'VIRTUAL_ENV': '/home/foobar/repos/fastapi-mvc/.venv', 'USER': 'foobar', 'PATH': '/home/foobar/repos/fastapi-mvc/.venv/bin:/home/foobar/bin:/home/foobar/.local/bin:/home/foobar/.poetry/bin:/home/foobar/bin:/home/foobar/.local/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin'}, {'SHELL': '/bin/bash', 'HOSTNAME': 'foobar', 'PWD': '/home/foobar/repos/fastapi-mvc', 'LOGNAME': 'foobar', 'HOME': '/home/foobar', 'USERNAME': 'foobar', 'LANG': 'en_GB.UTF-8', 'USER': 'foobar', 'PATH': '/home/foobar/bin:/home/foobar/.local/bin:/home/foobar/.poetry/bin:/home/foobar/bin:/home/foobar/.local/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin'}), ({'VIRTUAL_ENV': '/home/foobar/repos/fastapi-mvc/.venv', 'PATH': '/home/foobar/.local/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/home/foobar/repos/fastapi-mvc/.venv/bin'}, {'PATH': '/home/foobar/.local/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin'})])
('fastapi_mvc.utils.shell.subprocess.run')
def test_should_remove_venv_from_path_if_activated(self, run_mock, env, expected):
with mock.patch.dict(os.environ, env, clear=True):
run_shell(['make', 'install'], '/path/to/execute')
run_mock.assert_called_once_with(['make', 'install'], cwd='/path/to/execute', env=expected, check=False, stdout=None, stderr=None, input=None, capture_output=False) |
class SVGParser(HTMLParser):
output = ''
ignore = 0
ignore_path = 0
def handle_starttag(self, tag, attrs):
if ((tag == 'path') and ('d' not in [a[0] for a in attrs])):
self.ignore_path += 1
return
if ((tag == 'metadata') or tag.startswith('rdf') or tag.startswith('cc') or tag.startswith('dc')):
self.ignore += 1
return
if (len(attrs) == 0):
self.output += f'<{tag}>'
else:
self.output += f'<{tag}'
for (x, y) in attrs:
if x.startswith('xmlns'):
continue
self.output += f' {x}="{y}"'
if (len(attrs) != 0):
self.output += '>'
def handle_endtag(self, tag):
if ((tag == 'metadata') or tag.startswith('rdf') or tag.startswith('cc') or tag.startswith('dc')):
self.ignore -= 1
return
if ((tag == 'path') and (self.ignore_path > 0)):
self.ignore_path -= 1
return
self.output += f'</{tag}>'
def handle_data(self, data):
if (self.ignore == 0):
self.output += data |
def form_partitions(sv_signatures, max_distance):
sorted_signatures = sorted(sv_signatures, key=(lambda evi: evi.get_key()))
partitions = []
current_partition = []
for signature in sorted_signatures:
if ((len(current_partition) > 0) and (current_partition[(- 1)].downstream_distance_to(signature) > max_distance)):
partitions.append(current_partition[:])
current_partition = []
current_partition.append(signature)
if (len(current_partition) > 0):
partitions.append(current_partition[:])
return partitions |
class AnaphoricityScorer(nn.Module):
def __init__(self, in_features: int, hidden_size, depth, dropout):
super().__init__()
hidden_size = hidden_size
if (not depth):
hidden_size = in_features
layers = []
for i in range(depth):
layers.extend([torch.nn.Linear((hidden_size if i else in_features), hidden_size), torch.nn.LeakyReLU(), torch.nn.Dropout(dropout)])
self.hidden = torch.nn.Sequential(*layers)
self.out = torch.nn.Linear(hidden_size, out_features=1)
def forward(self, *, all_mentions: torch.Tensor, mentions_batch: torch.Tensor, pairwise_batch: torch.Tensor, top_indices_batch: torch.Tensor, top_rough_scores_batch: torch.Tensor) -> torch.Tensor:
pair_matrix = self._get_pair_matrix(all_mentions, mentions_batch, pairwise_batch, top_indices_batch)
scores = (top_rough_scores_batch + self._ffnn(pair_matrix))
scores = add_dummy(scores, eps=True)
return scores
def _ffnn(self, x: torch.Tensor) -> torch.Tensor:
x = self.out(self.hidden(x))
return x.squeeze(2)
def _get_pair_matrix(all_mentions: torch.Tensor, mentions_batch: torch.Tensor, pairwise_batch: torch.Tensor, top_indices_batch: torch.Tensor) -> torch.Tensor:
emb_size = mentions_batch.shape[1]
n_ants = pairwise_batch.shape[1]
a_mentions = mentions_batch.unsqueeze(1).expand((- 1), n_ants, emb_size)
b_mentions = all_mentions[top_indices_batch]
similarity = (a_mentions * b_mentions)
out = torch.cat((a_mentions, b_mentions, similarity, pairwise_batch), dim=2)
return out |
def test_load(config):
config.load()
assert (config() == {'section1': {'value1': '11', 'value11': '11'}, 'section2': {'value2': '2'}, 'section3': {'value3': '3'}})
assert (config.section1() == {'value1': '11', 'value11': '11'})
assert (config.section1.value1() == '11')
assert (config.section1.value11() == '11')
assert (config.section2() == {'value2': '2'})
assert (config.section2.value2() == '2')
assert (config.section3() == {'value3': '3'})
assert (config.section3.value3() == '3') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.