code stringlengths 281 23.7M |
|---|
def login(request):
username = request.POST['username']
password = request.POST['password']
user = authenticate(username=username, password=password)
if (user is not None):
if user.is_active:
_login(request, user)
else:
pass
else:
pass
return HttpResponseRedirect(request.POST.get('next', '/')) |
class OptionPlotoptionsSplineTooltipDatetimelabelformats(Options):
def day(self):
return self._config_get('%A, %e %b %Y')
def day(self, text: str):
self._config(text, js_type=False)
def hour(self):
return self._config_get('%A, %e %b, %H:%M')
def hour(self, text: str):
self._config(text, js_type=False)
def millisecond(self):
return self._config_get('%A, %e %b, %H:%M:%S.%L')
def millisecond(self, text: str):
self._config(text, js_type=False)
def minute(self):
return self._config_get('%A, %e %b, %H:%M')
def minute(self, text: str):
self._config(text, js_type=False)
def month(self):
return self._config_get('%B %Y')
def month(self, text: str):
self._config(text, js_type=False)
def second(self):
return self._config_get('%A, %e %b, %H:%M:%S')
def second(self, text: str):
self._config(text, js_type=False)
def week(self):
return self._config_get('Week from %A, %e %b %Y')
def week(self, text: str):
self._config(text, js_type=False)
def year(self):
return self._config_get('%Y')
def year(self, text: str):
self._config(text, js_type=False) |
def test_vector_spatialcoordinate_interpolation(parentmesh, vertexcoords):
if (parentmesh.name == 'immersedsphere'):
vertexcoords = immersed_sphere_vertexcoords(parentmesh, vertexcoords)
vm = VertexOnlyMesh(parentmesh, vertexcoords, missing_points_behaviour=None)
vertexcoords = vm.coordinates.dat.data_ro
W = VectorFunctionSpace(vm, 'DG', 0)
expr = (2 * SpatialCoordinate(parentmesh))
w_expr = interpolate(expr, W)
assert np.allclose(w_expr.dat.data_ro, (2 * np.asarray(vertexcoords))) |
class CustomPropertiesBinWidget(QtWidgets.QWidget):
property_changed = QtCore.Signal(str, str, object)
def __init__(self, parent=None, node_graph=None):
super(CustomPropertiesBinWidget, self).__init__(parent)
self.setWindowTitle('Properties Bin')
self._prop_list = _PropertiesList()
self.resize(450, 400)
self._block_signal = False
self._lock = False
layout = QtWidgets.QVBoxLayout(self)
layout.addWidget(self._prop_list, 1)
node_graph.add_properties_bin(self)
node_graph.node_double_clicked.connect(self.add_node)
def __repr__(self):
return '<{} object at {}>'.format(self.__class__.__name__, hex(id(self)))
def __on_prop_close(self, node_id):
items = self._prop_list.findItems(node_id, QtCore.Qt.MatchExactly)
[self._prop_list.removeRow(i.row()) for i in items]
def add_node(self, node):
rows = self._prop_list.rowCount()
if (rows >= 1):
self._prop_list.removeRow((rows - 1))
itm_find = self._prop_list.findItems(node.id, QtCore.Qt.MatchExactly)
if itm_find:
self._prop_list.removeRow(itm_find[0].row())
self._prop_list.insertRow(0)
prop_widget = CustomNodePropWidget(node=node)
prop_widget.property_closed.connect(self.__on_prop_close)
self._prop_list.setCellWidget(0, 0, prop_widget)
item = QtWidgets.QTableWidgetItem(node.id)
self._prop_list.setItem(0, 0, item)
self._prop_list.selectRow(0)
def remove_node(self, node):
node_id = (node if isinstance(node, str) else node.id)
self.__on_prop_close(node_id)
def prop_widget(self, node):
node_id = (node if isinstance(node, str) else node.id)
itm_find = self._prop_list.findItems(node_id, QtCore.Qt.MatchExactly)
if itm_find:
item = itm_find[0]
return self._prop_list.cellWidget(item.row(), 0) |
class OptionPlotoptionsPyramidSonificationTracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesFunnelDataEvents(Options):
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def drag(self):
return self._config_get(None)
def drag(self, value: Any):
self._config(value, js_type=False)
def dragStart(self):
return self._config_get(None)
def dragStart(self, value: Any):
self._config(value, js_type=False)
def drop(self):
return self._config_get(None)
def drop(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def remove(self):
return self._config_get(None)
def remove(self, value: Any):
self._config(value, js_type=False)
def select(self):
return self._config_get(None)
def select(self, value: Any):
self._config(value, js_type=False)
def unselect(self):
return self._config_get(None)
def unselect(self, value: Any):
self._config(value, js_type=False)
def update(self):
return self._config_get(None)
def update(self, value: Any):
self._config(value, js_type=False) |
class Pagination():
def __init__(self, current_page, all_count, base_url, query_params, per_page=20, pager_page_count=11, position='pos'):
self.all_count = all_count
self.per_page = per_page
self.position = position
self.current_count = math.ceil((all_count / per_page))
try:
self.current_page = int(current_page)
if (not (0 < self.current_page <= self.current_count)):
self.current_page = 1
except Exception:
self.current_page = 1
self.base_url = base_url
self.query_params = query_params
self.pager_page_count = pager_page_count
self.half_pager_count = int((self.pager_page_count / 2))
if (self.current_count < self.pager_page_count):
self.pager_page_count = self.current_count
def page_html(self):
start = (self.current_page - self.half_pager_count)
end = (self.current_page + self.half_pager_count)
if (self.current_page <= self.half_pager_count):
start = 1
end = self.pager_page_count
if ((self.current_page + self.half_pager_count) >= self.current_count):
start = ((self.current_count - self.pager_page_count) + 1)
end = self.current_count
page_list = []
if (self.current_page != 1):
self.query_params['page'] = (self.current_page - 1)
page_list.append(f'<li><a href="{self.base_url}?{self.query_encode}#{self.position}"></a></li>')
for i in range(start, (end + 1)):
self.query_params['page'] = i
if (self.current_page == i):
li = f'<li class="active"><a href="{self.base_url}?{self.query_encode}#{self.position}">{i}</a></li>'
else:
li = f'<li><a href="{self.base_url}?{self.query_encode}#{self.position}">{i}</a></li>'
page_list.append(li)
if (self.current_page != self.current_count):
self.query_params['page'] = (self.current_page + 1)
page_list.append(f'<li><a href="{self.base_url}?{self.query_encode}#{self.position}"></a></li>')
if (len(page_list) == 1):
page_list = []
return ''.join(page_list)
def query_encode(self):
return self.query_params.urlencode()
def start(self):
return ((self.current_page - 1) * self.per_page)
def end(self):
return (self.current_page * self.per_page) |
class RedisCli(BaseView):
remapped_commands = {'del': 'delete'}
excluded_commands = set(('pubsub', 'set_response_callback', 'from_url'))
def __init__(self, redis, name=None, category=None, endpoint=None, url=None):
super(RedisCli, self).__init__(name, category, endpoint, url)
self.redis = redis
self.commands = {}
self._inspect_commands()
self._contribute_commands()
def _inspect_commands(self):
for name in dir(self.redis):
if (not name.startswith('_')):
attr = getattr(self.redis, name)
if (callable(attr) and (name not in self.excluded_commands)):
doc = (getattr(attr, '__doc__', '') or '').strip()
self.commands[name] = (attr, doc)
for (new, old) in self.remapped_commands.items():
self.commands[new] = self.commands[old]
def _contribute_commands(self):
self.commands['help'] = (self._cmd_help, 'Help!')
def _execute_command(self, name, args):
new_cmd = self.remapped_commands.get(name)
if new_cmd:
name = new_cmd
if (name not in self.commands):
return self._error(gettext('Cli: Invalid command.'))
(handler, _) = self.commands[name]
return self._result(handler(*args))
def _parse_cmd(self, cmd):
return tuple(shlex.split(cmd))
def _error(self, msg):
return Markup(('<div class="error">%s</div>' % msg))
def _result(self, result):
return self.render('admin/rediscli/response.html', type_name=(lambda d: type(d).__name__), result=result)
def _cmd_help(self, *args):
if (not args):
help = 'Usage: help <command>.\nList of supported commands: '
help += ', '.join((n for n in sorted(self.commands)))
return TextWrapper(help)
cmd = args[0]
if (cmd not in self.commands):
raise CommandError('Invalid command.')
help = self.commands[cmd][1]
if (not help):
return TextWrapper('Command does not have any help.')
return TextWrapper(help)
('/')
def console_view(self):
return self.render('admin/rediscli/console.html')
('/run/', methods=('POST',))
def execute_view(self):
try:
cmd = request.form.get('cmd')
if (not cmd):
return self._error('Cli: Empty command.')
parts = self._parse_cmd(cmd)
if (not parts):
return self._error('Cli: Failed to parse command.')
return self._execute_command(parts[0], parts[1:])
except CommandError as err:
return self._error(('Cli: %s' % err))
except Exception as ex:
log.exception(ex)
return self._error(('Cli: %s' % ex)) |
class serienRecNewRunAutoCheckScreen(Screen):
DESKTOP_WIDTH = getDesktop(0).size().width()
DESKTOP_HEIGHT = getDesktop(0).size().height()
screenWidth = 600
screenHeight = 120
if (DESKTOP_WIDTH > 1280):
factor = 1.5
screenWidth *= factor
screenHeight *= factor
skin = ('\n\t\t\t<screen name="SerienRecorderAutoCheck" position="%d,%d" size="%d,%d" title="%s" backgroundColor="#26181d20" flags="wfNoBorder">\n\t\t\t\t<widget name="headline" position="10,20" size="%d,40" foregroundColor="#00ff4a3c" backgroundColor="#26181d20" transparent="1" font="Regular;26" valign="center" halign="left" />\n\t\t\t\t<widget name="progressslider" position="10,75" size="%d,25" borderWidth="1" zPosition="1" backgroundColor="#"/>\n\t\t\t</screen>' % (((DESKTOP_WIDTH - screenWidth) / 2), ((DESKTOP_HEIGHT - screenHeight) / 2), screenWidth, screenHeight, 'SerienRecorder Timer-Suchlauf', (screenWidth - 20), (screenWidth - 20)))
def __init__(self, session, version):
self.session = session
self.version = version
Screen.__init__(self, session)
self['headline'] = Label('')
self['progressslider'] = ProgressBar()
self['actions'] = ActionMap(['SerienRecorderActions'], {'ok': self.keyExit, 'cancel': self.keyExit}, (- 1))
self.onLayoutFinish.append(self.__onLayoutFinished)
def __onLayoutFinished(self):
self['headline'].setText('Timer-Suchlauf wird ausgefuhrt - bitte warten...')
self['progressslider'].setValue((- 1))
def keyExit(self):
self.close() |
_os(*metadata.platforms)
def main():
cscript = 'C:\\Users\\Public\\cscript.exe'
explorer = 'C:\\Users\\Public\\explorer.exe'
userinit = 'C:\\Users\\Public\\userinit.exe'
winlogon = 'C:\\Users\\Public\\winlogon.exe'
rcedit = 'C:\\Users\\Public\\rcedit.exe'
common.copy_file(EXE_FILE, cscript)
common.copy_file(EXE_FILE, explorer)
common.copy_file(EXE_FILE, userinit)
common.copy_file(EXE_FILE, winlogon)
common.copy_file(RENAMER, rcedit)
common.log('Modifying the OriginalFileName attribute')
common.execute([rcedit, cscript, '--set-version-string', 'OriginalFilename', 'cscript.exe'])
common.execute([winlogon, '/c', userinit], timeout=5, kill=True)
common.execute([explorer, '/c', cscript], timeout=5, kill=True)
common.remove_files(cscript, explorer, userinit, winlogon) |
def gen_function_call(func_attrs, indent=' ', conv2d_flag=''):
x = func_attrs['inputs'][0]
xshape = x._attrs['shape']
w = func_attrs['inputs'][1]
wshape = w._attrs['shape']
y = func_attrs['outputs'][0]
yshape = y._attrs['shape']
bias_ptr = ''
res_ptr = ''
if ('bias' in conv2d_flag):
b = func_attrs['inputs'][2]
bias_ptr = b._attrs['name']
if (conv2d_flag in ['bias_add_relu', 'bias_add_identity']):
r = func_attrs['inputs'][3]
res_ptr = r._attrs['name']
return FUNC_CALL_TEMPLATE.render(func_name=func_attrs['name'], in_ptr=x._attrs['name'], weight_ptr=w._attrs['name'], out_ptr=y._attrs['name'], bias_ptr=bias_ptr, res_ptr=res_ptr, p_batch=('&' + xshape[0]._attrs['name']), p_out_ch=('&' + wshape[0]._attrs['name']), p_in_ch=('&' + xshape[3]._attrs['name']), p_kernel_h=('&' + wshape[1]._attrs['name']), p_kernel_w=('&' + wshape[2]._attrs['name']), p_in_h=('&' + xshape[1]._attrs['name']), p_in_w=('&' + xshape[2]._attrs['name']), p_out_batch=('&' + yshape[0]._attrs['name']), p_out_h=('&' + yshape[1]._attrs['name']), p_out_w=('&' + yshape[2]._attrs['name']), stride=func_attrs['stride'], dilation=func_attrs['dilate'], pad=func_attrs['pad'], group=func_attrs['group'], indent=indent, conv2d_flag=conv2d_flag) |
class OptionDropShadow(Options):
def enabled(self):
return self._config_get()
def enabled(self, flag):
self._config(flag)
def color(self):
return self._config_get()
def color(self, value):
self._config(value)
def top(self):
return self._config_get()
def top(self, num):
self._config(num)
def left(self):
return self._config_get()
def left(self, num):
self._config(num)
def blur(self):
return self._config_get()
def blur(self, num):
self._config(num)
def opacity(self):
return self._config_get()
def opacity(self, num):
self._config(num) |
def create_mbs(item: dict) -> MBS:
mbs_class = mbs_table.get(item['template_id'], None)
if (not mbs_class):
return None
mbs = MBS(mbs_class.template_id, mbs_class.name, mbs_class.type, mbs_class.saved_claims)
mbs.asset_id = item['asset_id']
mbs.next_availability = datetime.fromtimestamp(item['next_availability'])
return mbs |
def test_roc_auc_test_cannot_calculate_roc_auc() -> None:
test_dataset = pd.DataFrame({'target': ['a', 'a', 'a', 'b', 'b', 'b', 'c', 'c', 'c', 'c'], 'prediction': ['a', 'a', 'a', 'b', 'a', 'c', 'a', 'c', 'c', 'c']})
column_mapping = ColumnMapping(target='target', prediction='prediction')
suite = TestSuite(tests=[TestRocAuc(lt=1)])
suite.run(current_data=test_dataset, reference_data=None, column_mapping=column_mapping)
assert (not suite)
test_info = suite.as_dict()['tests'][0]
assert (test_info['description'] == 'Not enough data to calculate ROC AUC. Consider providing probabilities instead of labels.')
assert (test_info['status'] == 'ERROR') |
def extractZhuwangsqcBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def compare_image(scene, img_path):
abs_img_path = img_path
if (not os.path.isabs(img_path)):
abs_img_path = fixpath(img_path)
s = scene.scene
s.disable_render = True
ren = s.renderer
s.render_window.remove_renderer(ren)
rw = tvtk.RenderWindow(size=(300, 300))
rw.add_renderer(ren)
ren.reset_camera()
rw.render()
try:
compare_image_raw(rw, abs_img_path)
finally:
rw.remove_renderer(ren)
s.render_window.add_renderer(ren)
s.disable_render = False
ren.reset_camera()
s.render() |
def magnetic_angles_to_vec(intensity, inclination, declination):
inc_rad = np.radians(inclination)
dec_rad = np.radians(declination)
magnetic_e = ((intensity * np.cos(inc_rad)) * np.sin(dec_rad))
magnetic_n = ((intensity * np.cos(inc_rad)) * np.cos(dec_rad))
magnetic_u = ((- intensity) * np.sin(inc_rad))
return (magnetic_e, magnetic_n, magnetic_u) |
_chunk_type
class chunk_shutdown_complete(chunk):
_PACK_STR = '!BBH'
_MIN_LEN = struct.calcsize(_PACK_STR)
def chunk_type(cls):
return TYPE_SHUTDOWN_COMPLETE
def __init__(self, tflag=0, length=0):
assert (1 == (tflag | 1))
super(chunk_shutdown_complete, self).__init__(self.chunk_type(), length)
self.tflag = tflag
def parser(cls, buf):
(_, flags, length) = struct.unpack_from(cls._PACK_STR, buf)
tflag = (flags & 1)
msg = cls(tflag, length)
return msg
def serialize(self):
if (0 == self.length):
self.length = self._MIN_LEN
buf = struct.pack(self._PACK_STR, self.chunk_type(), self.tflag, self.length)
return buf |
def test_tess_args_eng():
tess_args = TessArgs(tessdata_path=None, lang='eng', oem=OEM.DEFAULT, psm=PSM.COUNT)
args = ' '.join(tess_args.as_list())
assert ('--oem 3' in args)
assert ('--psm 14' in args)
assert ('--tessdata-dir' not in args)
assert ('-c preserve_interword_spaces=1' not in args)
assert (not tess_args.is_language_without_spaces()) |
def multimethod(*types):
def register(function):
name = function.__name__
mm = _multi_registry.get(name)
if (mm is None):
mm = _multi_registry[name] = _MultiMethod(name)
mm.register_function_for_types(types, function)
return mm
return register |
def filter(d, predicate):
if (not callable(predicate)):
raise ValueError('predicate argument must be a callable.')
new_dict = clone(d, empty=True)
keys = list(d.keys())
for key in keys:
value = d.get(key, None)
if predicate(key, value):
new_dict[key] = value
return new_dict |
def extractNovelbestcomWpcomstagingCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class ElectionsList(db.Model):
__tablename__ = 'ofec_elections_list_mv'
idx = db.Column(db.Integer, primary_key=True)
sort_order = db.Column(db.Integer)
office = db.Column(db.String, doc=docs.OFFICE)
state = db.Column(db.String, doc=docs.STATE_GENERIC)
district = db.Column(db.String, doc=docs.DISTRICT)
cycle = db.Column(db.Integer)
incumbent_id = db.Column(db.String, doc=docs.CANDIDATE_ID)
incumbent_name = db.Column(db.String, doc=docs.CANDIDATE_NAME) |
class AnimateTransform(Html.Html):
name = 'SVG AnimateTransform'
def __init__(self, page, attribute_name, type, from_pos, to_pos, duration, repeat_count):
super(AnimateTransform, self).__init__(page, '')
self.set_attrs({'attributeName': attribute_name, 'type': type, 'from': from_pos, 'to': to_pos, 'dur': ('%ss' % duration), 'repeatCount': repeat_count})
def __str__(self):
return ('<animateTransform %s />' % self.get_attrs(css_class_names=self.style.get_classes())) |
class Base64CoreSerializer(AbstractSerializer):
def __init__(self):
super().__init__(extensions=['b64', 'base64'])
def _fix_url_encoding_and_padding(self, s):
s = unquote(s)
m = (len(s) % 4)
if (m != 0):
s += ('=' * (4 - m))
return s
def decode(self, s, **kwargs):
value = self._fix_url_encoding_and_padding(s)
encoding = kwargs.pop('encoding', 'utf-8')
if encoding:
value = value.encode(encoding)
value = base64.b64decode(value)
if encoding:
return value.decode(encoding)
return value
def encode(self, d, **kwargs):
value = d
encoding = kwargs.pop('encoding', 'utf-8')
if (encoding and type_util.is_string(value)):
value = value.encode(encoding)
value = base64.b64encode(value)
if encoding:
value = value.decode(encoding)
return value |
def test_get_previous_tag():
GIT_TAG_LIST = '\nv1.15.2\nv1.16.0\n'
origin = _DEFAULT_HTTPS_REPO_URL
source = _generate_source_tree_from_origin(origin)
current_tag = 'v1.16.0'
expected_tag = 'v1.15.2'
git_cmd = 'git tag --list --sort v:refname'
with mock.patch('src.lib.cmd_exec.run_command', mock.MagicMock(return_value=GIT_TAG_LIST)) as magic_mock:
previous_tag = source.get_previous_tag(current_tag)
cmd_params = cmd_exec.CommandParameters(cwd=mock.ANY)
magic_mock.assert_called_once_with(git_cmd, cmd_params)
assert (expected_tag == previous_tag) |
def _parse_output(text):
result = {}
for line in text.splitlines():
line = line.strip()
if (not line):
continue
(kind, duration) = line.split()
if (kind not in KINDS):
raise NotImplementedError
if (kind in result):
raise NotImplementedError
result[kind] = parse_duration(duration)
return result |
class OptionSeriesErrorbarStatesHoverMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
def test_01_09_predictions(nlp):
text = 'Apple bringt neues Modell X Pro im Sommer'
doc = nlp(text)
ents = [(ent.text, ent.label_) for ent in doc.ents]
assert (len(ents) == 1)
assert (ents[0] == ('Apple', 'ORG'))
assert (doc[3].ent_type == 0)
assert (doc[4].ent_type == 0) |
def test():
curve_sets = CurveSet.instakit_curve_sets()
image_paths = list(map((lambda image_file: asset.path('img', image_file)), asset.listfiles('img')))
image_inputs = list(map((lambda image_path: Mode.RGB.open(image_path)), image_paths))
for image_input in image_inputs[:1]:
image_input.show()
for curve_set in curve_sets:
curve_set.process(image_input).show()
print(curve_sets)
print(image_paths)
import tempfile
temppath = tempfile.mktemp(suffix='.acv')
assert (not CurveSet(path=temppath).file_exists) |
def install_user():
if (not os.path.isdir(USER_EXTENSION_DIR)):
os.makedirs(USER_EXTENSION_DIR)
shutil.copy(os.path.join(ROOT, EXTENSION_FILE), os.path.join(USER_EXTENSION_DIR, EXTENSION_FILE))
if (not os.path.isdir(USER_GLIB_SCHEMA_DIR)):
os.makedirs(USER_GLIB_SCHEMA_DIR)
shutil.copy(GLIB_SCHEMA_SOURCE, os.path.join(USER_GLIB_SCHEMA_DIR, GLIB_SCHEMA_FILE))
if is_glib_compile_schema_installed():
subprocess.call([GLIB_COMPILE_SCHEMA, USER_GLIB_SCHEMA_DIR])
print('GLib schema successfully compiled.')
else:
print('GLib schema cannot be compiled. Please install GLib schema compiler and run the following command:')
print(' '.join([GLIB_COMPILE_SCHEMA, USER_GLIB_SCHEMA_DIR]))
print('Nautilus Terminal extension successfully installed on the current user.') |
class GitLfsRequirement(object):
def __init__(self):
self.name = 'git-lfs'
_ersilia_exception
def is_installed(self, install_if_necessary=True):
check = run_command_check_output('git-lfs')
if check.startswith('git-lfs'):
return True
elif install_if_necessary:
self.install()
else:
raise GitLfsSetupError
def activate(self):
run_command('git-lfs install')
def install(self):
run_command('conda install -c conda-forge git-lfs') |
def test():
assert ('after="ner"' in __solution__), "Ajoutes-tu le composant explicitement apres l'entity recognizer ?"
assert (nlp.pipe_names[6] == 'animal_component'), "As-tu ajoute le composant apres l'entity recognizer ?"
assert (len(doc.ents) == 2), 'As-tu correctement ajoute les entites ?'
assert all(((ent.label_ == 'ANIMAL') for ent in doc.ents)), 'As-tu affecte le label ANIMAL?'
__msg__.good("Bien joue ! Tu as contruit ton premier composant de pipeline pour la recherche de correspondance d'entites basee sur des regles.") |
class FalServerlessConnection():
hostname: str
credentials: Credentials
_stack: ExitStack = field(default_factory=ExitStack)
_stub: (isolate_proto.IsolateControllerStub | None) = None
def __enter__(self):
return self
def __exit__(self, *exc_info):
self._stack.close()
def close(self):
self._stack.close()
def stub(self) -> isolate_proto.IsolateControllerStub:
if self._stub:
return self._stub
options = self.credentials.server_credentials.extra_options
channel_creds = self.credentials.to_grpc()
channel = self._stack.enter_context(grpc.secure_channel(self.hostname, channel_creds, options))
channel = grpc.intercept_channel(channel, TraceContextInterceptor())
self._stub = isolate_proto.IsolateControllerStub(channel)
return self._stub
def create_user_key(self, scope: KeyScope, alias: (str | None)) -> tuple[(str, str)]:
scope_proto = (isolate_proto.CreateUserKeyRequest.Scope.ADMIN if (scope is KeyScope.ADMIN) else isolate_proto.CreateUserKeyRequest.Scope.API)
request = isolate_proto.CreateUserKeyRequest(scope=scope_proto, alias=alias)
response = self.stub.CreateUserKey(request)
return (response.key_secret, response.key_id)
def list_user_keys(self) -> list[UserKeyInfo]:
request = isolate_proto.ListUserKeysRequest()
response: isolate_proto.ListUserKeysResponse = self.stub.ListUserKeys(request)
return [UserKeyInfo(key.key_id, isolate_proto.datetime_from_timestamp(key.created_at), KeyScope.from_proto(key.scope), key.alias) for key in response.user_keys]
def revoke_user_key(self, key_id) -> None:
request = isolate_proto.RevokeUserKeyRequest(key_id=key_id)
self.stub.RevokeUserKey(request)
def define_environment(self, kind: str, **options: Any) -> isolate_proto.EnvironmentDefinition:
struct = isolate_proto.Struct()
struct.update(options)
return isolate_proto.EnvironmentDefinition(kind=kind, configuration=struct)
def register(self, function: Callable[(..., ResultT)], environments: list[isolate_proto.EnvironmentDefinition], application_name: (str | None)=None, application_auth_mode: (Literal[('public', 'private', 'shared')] | None)=None, *, serialization_method: str=_DEFAULT_SERIALIZATION_METHOD, machine_requirements: (MachineRequirements | None)=None, metadata: (dict[(str, Any)] | None)=None) -> Iterator[isolate_proto.RegisterApplicationResult]:
wrapped_function = to_serialized_object(function, serialization_method)
if machine_requirements:
wrapped_requirements = isolate_proto.MachineRequirements(machine_type=machine_requirements.machine_type, keep_alive=machine_requirements.keep_alive, base_image=machine_requirements.base_image, exposed_port=machine_requirements.exposed_port, scheduler=machine_requirements.scheduler, scheduler_options=to_struct((machine_requirements.scheduler_options or {})), max_concurrency=machine_requirements.max_concurrency, max_multiplexing=machine_requirements.max_multiplexing)
else:
wrapped_requirements = None
if (application_auth_mode == 'public'):
auth_mode = isolate_proto.ApplicationAuthMode.PUBLIC
elif (application_auth_mode == 'shared'):
auth_mode = isolate_proto.ApplicationAuthMode.SHARED
else:
auth_mode = isolate_proto.ApplicationAuthMode.PRIVATE
struct_metadata = None
if metadata:
struct_metadata = isolate_proto.Struct()
struct_metadata.update(metadata)
request = isolate_proto.RegisterApplicationRequest(function=wrapped_function, environments=environments, machine_requirements=wrapped_requirements, application_name=application_name, auth_mode=auth_mode, metadata=struct_metadata)
for partial_result in self.stub.RegisterApplication(request):
(yield from_grpc(partial_result))
def scale(self, application_name: str, max_concurrency: (int | None)=None) -> None:
raise NotImplementedError
def update_application(self, application_name: str, keep_alive: (int | None)=None, max_multiplexing: (int | None)=None, max_concurrency: (int | None)=None) -> AliasInfo:
request = isolate_proto.UpdateApplicationRequest(application_name=application_name, keep_alive=keep_alive, max_multiplexing=max_multiplexing, max_concurrency=max_concurrency)
res: isolate_proto.UpdateApplicationResult = self.stub.UpdateApplication(request)
return from_grpc(res.alias_info)
def run(self, function: Callable[(..., ResultT)], environments: list[isolate_proto.EnvironmentDefinition], *, serialization_method: str=_DEFAULT_SERIALIZATION_METHOD, machine_requirements: (MachineRequirements | None)=None, setup_function: (Callable[([], InputT)] | None)=None) -> Iterator[HostedRunResult[ResultT]]:
wrapped_function = to_serialized_object(function, serialization_method)
if machine_requirements:
wrapped_requirements = isolate_proto.MachineRequirements(machine_type=machine_requirements.machine_type, keep_alive=machine_requirements.keep_alive, base_image=machine_requirements.base_image, exposed_port=machine_requirements.exposed_port, scheduler=machine_requirements.scheduler, scheduler_options=to_struct((machine_requirements.scheduler_options or {})), max_concurrency=machine_requirements.max_concurrency, max_multiplexing=machine_requirements.max_multiplexing)
else:
wrapped_requirements = None
request = isolate_proto.HostedRun(function=wrapped_function, environments=environments, machine_requirements=wrapped_requirements)
if setup_function:
request.setup_func.MergeFrom(to_serialized_object(setup_function, serialization_method))
for partial_result in self.stub.Run(request):
(yield from_grpc(partial_result))
def create_alias(self, alias: str, revision: str, auth_mode: Literal[('public', 'private', 'shared')]):
if (auth_mode == 'public'):
auth = isolate_proto.ApplicationAuthMode.PUBLIC
elif (auth_mode == 'shared'):
auth = isolate_proto.ApplicationAuthMode.SHARED
else:
auth = isolate_proto.ApplicationAuthMode.PRIVATE
request = isolate_proto.SetAliasRequest(alias=alias, revision=revision, auth_mode=auth)
self.stub.SetAlias(request)
def delete_alias(self, alias: str) -> str:
request = isolate_proto.DeleteAliasRequest(alias=alias)
res: isolate_proto.DeleteAliasResult = self.stub.DeleteAlias(request)
return res.revision
def list_aliases(self) -> list[AliasInfo]:
request = isolate_proto.ListAliasesRequest()
response: isolate_proto.ListAliasesResult = self.stub.ListAliases(request)
return [from_grpc(alias) for alias in response.aliases]
def set_secret(self, name: str, value: str) -> None:
request = isolate_proto.SetSecretRequest(name=name, value=value)
self.stub.SetSecret(request)
def delete_secret(self, name: str) -> None:
request = isolate_proto.SetSecretRequest(name=name, value=None)
self.stub.SetSecret(request)
def list_secrets(self) -> list[ServerlessSecret]:
request = isolate_proto.ListSecretsRequest()
response = self.stub.ListSecrets(request)
return [ServerlessSecret(name=secret.name, created_at=isolate_proto.datetime_from_timestamp(secret.created_time)) for secret in response.secrets] |
class OptionSeriesTreegraphSonificationPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def initializePlugin(mobject):
mplugin = OpenMayaMPx.MFnPlugin(mobject, 'E. Ozgur Yilmaz', __version__, 'Any')
try:
mplugin.registerNode(kPluginNodeTypeName, oyTrajectoryDrawerNodeId, nodeCreator, nodeInitializer, OpenMayaMPx.MPxNode.kLocatorNode)
except:
sys.stderr.write(('Failed to register node: %s' % kPluginNodeTypeName))
raise |
def test_chunker():
data = [1, 2, (- 1), 3, 4]
expected = [[1, 2], [3, 4]]
result = list(chunker(data, is_boundary=(lambda x: (x == (- 1)))))
assert (result == expected)
data = [(- 1), 1, 2, (- 1), 3, 4]
expected = [[1, 2], [3, 4]]
result = list(chunker(data, is_boundary=(lambda x: (x == (- 1)))))
assert (result == expected)
data = [1, 2, (- 1), 3, 4, (- 1)]
expected = [[1, 2], [3, 4]]
result = list(chunker(data, is_boundary=(lambda x: (x == (- 1)))))
assert (result == expected)
data = [(- 1)]
expected = []
result = list(chunker(data, is_boundary=(lambda x: (x == (- 1)))))
assert (result == expected)
data = []
expected = []
result = list(chunker(data, is_boundary=(lambda x: (x == (- 1)))))
assert (result == expected) |
class _ProjectFields():
homepage: Url
contact: String
description: String
instructions: String
devel_mode: Boolean
unlisted_on_hp: Boolean
auto_prune: Boolean
enable_net: Boolean
bootstrap: String
isolation: String
module_hotfixes: Boolean
appstream: Boolean
packit_forge_projects_allowed: String
follow_fedora_branching: Boolean
repo_priority: Integer |
class OptionPlotoptionsLollipopStatesHover(Options):
def animation(self) -> 'OptionPlotoptionsLollipopStatesHoverAnimation':
return self._config_sub_data('animation', OptionPlotoptionsLollipopStatesHoverAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def halo(self) -> 'OptionPlotoptionsLollipopStatesHoverHalo':
return self._config_sub_data('halo', OptionPlotoptionsLollipopStatesHoverHalo)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def marker(self) -> 'OptionPlotoptionsLollipopStatesHoverMarker':
return self._config_sub_data('marker', OptionPlotoptionsLollipopStatesHoverMarker) |
def fortios_endpoint_control(data, fos, check_mode):
fos.do_member_operation('endpoint-control', 'registered-forticlient')
if data['endpoint_control_registered_forticlient']:
resp = endpoint_control_registered_forticlient(data, fos, check_mode)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'endpoint_control_registered_forticlient'))
if check_mode:
return resp
return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {}) |
class RemoteRegistry(BaseRegistry):
def __init__(self, ctx: Context) -> None:
self.ctx = ctx
def check_item_present(self, item_type_plural: str, public_id: PublicId) -> None:
item_type = ITEM_TYPE_PLURAL_TO_TYPE[item_type_plural]
try:
get_package_meta(item_type, public_id)
except click.ClickException as e:
raise click.ClickException(f'Package not found in remote registry: {str(e)}. You can try to add {PUSH_ITEMS_FLAG} flag.')
def push_item(self, item_type_plural: str, public_id: PublicId) -> None:
item_type = ITEM_TYPE_PLURAL_TO_TYPE[item_type_plural]
_push_item_remote(self.ctx, item_type, public_id) |
def test_handler(test_client_factory):
async def app(scope, receive, send):
raise RuntimeError('Something went wrong')
def error_500(request, exc):
return JSONResponse({'detail': 'Server Error'}, status_code=500)
app = ServerErrorMiddleware(app, handler=error_500)
client = test_client_factory(app, raise_server_exceptions=False)
response = client.get('/')
assert (response.status_code == 500)
assert (response.json() == {'detail': 'Server Error'}) |
class WFGSeriesScraper(common.LogBase.LoggerMixin):
loggerPath = 'Main.Wat'
url_base = '
def __init__(self):
super().__init__()
self.wg = WebRequest.WebGetRobust()
def get_intermediate_listing(self):
ret = []
for idx in itertools.count():
url = '
try:
soup = self.wg.getSoup(url)
except WebRequest.FetchFailureError:
continue
content_div = soup.find('div', class_='listings')
sdivs = content_div.find_all('div', class_='summary')
self.log.info('Found %s series items on page (%s so far)', len(sdivs), len(ret))
if (not sdivs):
break
for sdiv in sdivs:
linktag = sdiv.h3.a
ret.append((linktag.get('href'), linktag.get_text(strip=True)))
self.log.info('Found %s series', len(ret))
return ret
def resolve_actual_urls(self, s_page_url, s_title):
self.log.info('Fetching actual URL for %s -> %s', s_title, s_page_url)
try:
soup = self.wg.getSoup(s_page_url)
except WebRequest.FetchFailureError:
return None
linkdiv = soup.find('div', class_='center')
actual_url = linkdiv.a.get('href')
param = common.management.util.get_page_title(self.wg, actual_url)
param['url'] = actual_url
param.setdefault('is-wp', False)
return (s_title, param)
def get_series(self):
surls = self.get_intermediate_listing()
for (surl, stitle) in surls:
resp = self.resolve_actual_urls(surl, stitle)
print('Site: ', resp) |
class OptionSeriesParetoSonificationTracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
('cuda.perm021fc_ccr_bias_permute.config')
def config(func_attrs, dtype='float16'):
def fproc(op):
import cutlass_lib
return common.default_fproc(op=op, a_layout=cutlass_lib.library.LayoutType.ColumnMajor, b_layout=cutlass_lib.library.LayoutType.ColumnMajor, c_layout=cutlass_lib.library.LayoutType.RowMajor, dtype=func_attrs['inputs'][0].dtype(), epilogue_name=func_attrs['epilogue'], permute_layout=func_attrs['layout'])
func_attrs['op_instance'] = common_permute.extract_config(fproc, func_attrs) |
class OptionPlotoptionsPictorialDragdrop(Options):
def draggableX(self):
return self._config_get(None)
def draggableX(self, flag: bool):
self._config(flag, js_type=False)
def draggableY(self):
return self._config_get(None)
def draggableY(self, flag: bool):
self._config(flag, js_type=False)
def dragHandle(self) -> 'OptionPlotoptionsPictorialDragdropDraghandle':
return self._config_sub_data('dragHandle', OptionPlotoptionsPictorialDragdropDraghandle)
def dragMaxX(self):
return self._config_get(None)
def dragMaxX(self, num: float):
self._config(num, js_type=False)
def dragMaxY(self):
return self._config_get(None)
def dragMaxY(self, num: float):
self._config(num, js_type=False)
def dragMinX(self):
return self._config_get(None)
def dragMinX(self, num: float):
self._config(num, js_type=False)
def dragMinY(self):
return self._config_get(None)
def dragMinY(self, num: float):
self._config(num, js_type=False)
def dragPrecisionX(self):
return self._config_get(0)
def dragPrecisionX(self, num: float):
self._config(num, js_type=False)
def dragPrecisionY(self):
return self._config_get(0)
def dragPrecisionY(self, num: float):
self._config(num, js_type=False)
def dragSensitivity(self):
return self._config_get(2)
def dragSensitivity(self, num: float):
self._config(num, js_type=False)
def groupBy(self):
return self._config_get(None)
def groupBy(self, text: str):
self._config(text, js_type=False)
def guideBox(self) -> 'OptionPlotoptionsPictorialDragdropGuidebox':
return self._config_sub_data('guideBox', OptionPlotoptionsPictorialDragdropGuidebox)
def liveRedraw(self):
return self._config_get(True)
def liveRedraw(self, flag: bool):
self._config(flag, js_type=False) |
def test_get_all_nodes(tmpdir):
cluster = make_cluster(tmpdir)
all_nodes = cluster.get_all_nodes()
assert (len(all_nodes) == 3)
assert (len([node for node in all_nodes if node.name.startswith('frontend')]) == 1)
assert (len([node for node in all_nodes if node.name.startswith('compute')]) == 2) |
class DataMonitoringAlerts(DataMonitoring):
def __init__(self, config: Config, tracking: Optional[Tracking]=None, selector_filter: FiltersSchema=FiltersSchema(), force_update_dbt_package: bool=False, disable_samples: bool=False, send_test_message_on_success: bool=False, global_suppression_interval: int=0, override_config: bool=False):
super().__init__(config, tracking, force_update_dbt_package, disable_samples, selector_filter)
self.global_suppression_interval = global_suppression_interval
self.override_config = override_config
self.alerts_api = AlertsAPI(self.internal_dbt_runner, self.config, self.elementary_database_and_schema, self.global_suppression_interval, self.override_config)
self.sent_alert_count = 0
self.send_test_message_on_success = send_test_message_on_success
self.override_config_defaults = override_config
self.alerts_integration = self._get_integration_client()
def _get_integration_client(self) -> BaseIntegration:
return Integrations.get_integration(config=self.config, tracking=self.tracking, override_config_defaults=self.override_config_defaults)
def _fetch_data(self, days_back: int) -> AlertsSchema:
return self.alerts_api.get_new_alerts(days_back=days_back, disable_samples=self.disable_samples, filter=self.selector_filter)
def _format_alerts(self, alerts: AlertsSchema) -> List[Union[(TestAlertModel, ModelAlertModel, SourceFreshnessAlertModel, GroupedByTableAlerts)]]:
formatted_alerts = []
grouped_by_table_alerts = []
model_ids_to_alerts_map = defaultdict((lambda : []))
default_alerts_group_by_strategy = GroupingType(self.config.slack_group_alerts_by)
for alert in alerts.all_alerts:
group_alerts_by = (alert.group_alerts_by or default_alerts_group_by_strategy)
formatted_alert = alert.format_alert(timezone=self.config.timezone, report_url=self.config.report_url, elementary_database_and_schema=self.elementary_database_and_schema, global_suppression_interval=self.global_suppression_interval, override_config=self.override_config)
try:
grouping_type = GroupingType(group_alerts_by)
if (grouping_type == GroupingType.BY_TABLE):
model_ids_to_alerts_map[formatted_alert.model_unique_id].append(formatted_alert)
else:
formatted_alerts.append(formatted_alert)
except ValueError:
formatted_alerts.append(formatted_alert)
logger.error(f"Failed to extract value as a group-by config: '{group_alerts_by}'. Allowed Values: {list(GroupingType.__members__.keys())} Ignoring it for now and default grouping strategy will be used")
for alerts_by_model in model_ids_to_alerts_map.values():
grouped_by_table_alerts.append(GroupedByTableAlerts(alerts=alerts_by_model))
self.execution_properties['had_group_by_table'] = (len(grouped_by_table_alerts) > 0)
self.execution_properties['had_group_by_alert'] = (len(formatted_alerts) > 0)
all_alerts = (formatted_alerts + grouped_by_table_alerts)
return sorted(all_alerts, key=(lambda alert: (alert.detected_at or datetime.max)))
def _send_test_message(self):
self.alerts_integration.send_test_message(channel_name=self.config.slack_channel_name)
def _send_alerts(self, alerts: List[Union[(TestAlertModel, ModelAlertModel, SourceFreshnessAlertModel, GroupedByTableAlerts)]]):
if (not alerts):
self.execution_properties['sent_alert_count'] = self.sent_alert_count
return
sent_alert_ids_by_type: Dict[(ResourceType, List[str])] = {ResourceType.TEST: [], ResourceType.MODEL: [], ResourceType.SOURCE_FRESHNESS: []}
alerts_with_progress_bar = alive_it(alerts, title='Sending alerts')
sent_successfully_alerts = []
for alert in alerts_with_progress_bar:
sent_successfully = self.alerts_integration.send_alert(alert=alert)
if sent_successfully:
if isinstance(alert, GroupedByTableAlerts):
sent_successfully_alerts.extend(alert.alerts)
else:
sent_successfully_alerts.append(alert)
else:
if isinstance(alert, GroupedByTableAlerts):
for grouped_alert in alert.alerts:
logger.error(f'Could not send the alert - {grouped_alert.id}. Full alert: {json.dumps(grouped_alert.data)}')
else:
logger.error(f'Could not send the alert - {alert.id}. Full alert: {json.dumps(alert.data)}')
self.success = False
for sent_alert in sent_successfully_alerts:
if isinstance(sent_alert, TestAlertModel):
sent_alert_ids_by_type[ResourceType.TEST].append(sent_alert.id)
elif isinstance(sent_alert, ModelAlertModel):
sent_alert_ids_by_type[ResourceType.MODEL].append(sent_alert.id)
elif isinstance(sent_alert, SourceFreshnessAlertModel):
sent_alert_ids_by_type[ResourceType.SOURCE_FRESHNESS].append(sent_alert.id)
for (resource_type, alert_ids) in sent_alert_ids_by_type.items():
self.sent_alert_count += len(alert_ids)
self.alerts_api.update_sent_alerts(alert_ids, resource_type)
self.execution_properties['sent_alert_count'] = self.sent_alert_count
def _skip_alerts(self, alerts: AlertsSchema):
self.alerts_api.skip_alerts(alerts.tests.skip, ResourceType.TEST)
self.alerts_api.skip_alerts(alerts.models.skip, ResourceType.MODEL)
self.alerts_api.skip_alerts(alerts.source_freshnesses.skip, ResourceType.SOURCE_FRESHNESS)
def run_alerts(self, days_back: int, dbt_full_refresh: bool=False, dbt_vars: Optional[dict]=None) -> bool:
logger.info('Running internal dbt run to aggregate alerts')
success = self.internal_dbt_runner.run(models='elementary_cli.alerts', full_refresh=dbt_full_refresh, vars=dbt_vars)
self.execution_properties['alerts_run_success'] = success
if (not success):
logger.info('Could not aggregate alerts successfully')
self.success = False
self.execution_properties['success'] = self.success
return self.success
alerts = self._fetch_data(days_back)
self._skip_alerts(alerts)
formatted_alerts = self._format_alerts(alerts=alerts)
self._send_alerts(formatted_alerts)
if (self.send_test_message_on_success and (alerts.count == 0)):
self._send_test_message()
self.execution_properties['alert_count'] = alerts.count
self.execution_properties['elementary_test_count'] = len([alert for alert in formatted_alerts if (isinstance(alert, TestAlertModel) and alert.is_elementary_test)])
self.execution_properties['has_subscribers'] = any((alert.subscribers for alert in alerts.all_alerts))
self.execution_properties['run_end'] = True
self.execution_properties['success'] = self.success
return self.success |
class KeyParser():
keyword = 'Api-Key'
def get(self, request: HttpRequest) -> typing.Optional[str]:
custom_header = getattr(settings, 'API_KEY_CUSTOM_HEADER', None)
if (custom_header is not None):
return self.get_from_header(request, custom_header)
return self.get_from_authorization(request)
def get_from_authorization(self, request: HttpRequest) -> typing.Optional[str]:
authorization = request.META.get('HTTP_AUTHORIZATION', '')
if (not authorization):
return None
(keyword, found, key) = authorization.partition(' ')
if (not found):
return None
if (keyword.lower() != self.keyword.lower()):
return None
return key
def get_from_header(self, request: HttpRequest, name: str) -> typing.Optional[str]:
return (request.META.get(name) or None) |
class PreprocessFunc(object):
def __init__(self, size_divisibility, device):
self.size_divisibility = size_divisibility
self.device = device
def __call__(self, batched_inputs: List[Dict[(str, Any)]]) -> torch.Tensor:
images = [x['image'].to(self.device) for x in batched_inputs]
images = ImageList.from_tensors(images, self.size_divisibility)
return images.tensor |
class FileRequest(Request):
def __init__(self, files=None, progress_callback=None, **kwargs):
super(FileRequest, self).__init__(**kwargs)
self.files = files
self.progress_callback = progress_callback
def _request_params(self, *args, **kwargs):
params = super(FileRequest, self)._request_params(*args, **kwargs)
data = (self.files or {})
data['json'] = ('json', json.dumps(params['json']), 'application/json')
callback = (self.progress_callback or (lambda x: x))
m = MultipartEncoder(data)
params['json'] = None
params['data'] = MultipartEncoderMonitor(m, callback)
params['headers'] = {'Content-Type': params['data'].content_type}
return params |
class IndexDataModel(AbstractDataModel):
index_manager = Instance(TupleIndexManager, ())
shape = List(Int, [2, 3, 4])
def get_column_count(self):
return self.shape[(- 1)]
def can_have_children(self, row):
return (len(row) < (len(self.shape) - 1))
def get_row_count(self, row):
if (len(row) == (len(self.shape) - 1)):
return 0
else:
return self.shape[len(row)]
def get_value(self, row, column):
return '{} {}'.format(row, column)
def get_value_type(self, row, column):
return TextValue(is_editable=False) |
.integration_postgres
.integration
class TestRetrievingData():
def connector(self, integration_postgres_config):
return get_connector(integration_postgres_config)
def traversal_node(self, example_datasets, integration_postgres_config):
dataset = Dataset(**example_datasets[0])
graph = convert_dataset_to_graph(dataset, integration_postgres_config.key)
node = Node(graph, graph.collections[1])
traversal_node = TraversalNode(node)
return traversal_node
('fides.api.graph.traversal.TraversalNode.incoming_edges')
def test_retrieving_data(self, mock_incoming_edges: Mock, privacy_request, db, connector, traversal_node, postgres_integration_db):
mock_incoming_edges.return_value = {Edge(FieldAddress('fake_dataset', 'fake_collection', 'email'), FieldAddress('postgres_example_test_dataset', 'customer', 'email'))}
results = connector.retrieve_data(traversal_node, Policy(), privacy_request, {'email': ['customer-']})
assert (len(results) is 1)
assert (results == [{'address_id': 1, 'created': datetime(2020, 4, 1, 11, 47, 42), 'email': 'customer-', 'id': 1, 'name': 'John Customer'}])
('fides.api.graph.traversal.TraversalNode.incoming_edges')
def test_retrieving_data_no_input(self, mock_incoming_edges: Mock, privacy_request, db, connector, traversal_node):
mock_incoming_edges.return_value = {Edge(FieldAddress('fake_dataset', 'fake_collection', 'email'), FieldAddress('postgres_example_test_dataset', 'customer', 'email'))}
assert ([] == connector.retrieve_data(traversal_node, Policy(), privacy_request, {'email': []}))
assert ([] == connector.retrieve_data(traversal_node, Policy(), privacy_request, {}))
assert ([] == connector.retrieve_data(traversal_node, Policy(), privacy_request, {'bad_key': ['test']}))
assert ([] == connector.retrieve_data(traversal_node, Policy(), privacy_request, {'email': [None]}))
assert ([] == connector.retrieve_data(traversal_node, Policy(), privacy_request, {'email': None}))
('fides.api.graph.traversal.TraversalNode.incoming_edges')
def test_retrieving_data_input_not_in_table(self, mock_incoming_edges: Mock, db, privacy_request, connection_config, example_datasets, connector, traversal_node, postgres_integration_db):
mock_incoming_edges.return_value = {Edge(FieldAddress('fake_dataset', 'fake_collection', 'email'), FieldAddress('postgres_example_test_dataset', 'customer', 'email'))}
results = connector.retrieve_data(traversal_node, Policy(), privacy_request, {'email': ['customer_not_in_']})
assert (results == []) |
def on_message(client, device, msg):
command = msg.topic[len(topic_prefix):]
if isinstance(device, dict):
for subprefix in device:
if command.startswith(subprefix):
device = device[subprefix]
command = command[len(subprefix):]
break
else:
logging.error(('MQTT topic %s has no recognized device reference, expected one of %s' % (msg.topic, ','.join(device.keys()))))
return
if ((command == 'temperature') or (command == 'humidity') or (command == 'energy') or (command == 'sensors') or (command == 'position') or (command == 'state') or command.startswith('state/') or command.startswith('sensor/')):
return
try:
action = msg.payload.decode('utf-8').lower()
logging.debug(((('Received MQTT message ' + msg.topic) + ' ') + action))
if (command == 'power'):
if ((device.type == 'SP1') or (device.type == 'SP2') or (device.type == 'SP3S')):
state = ((action == 'on') or (action == '1'))
logging.debug('Setting power state to {0}'.format(state))
device.set_power((1 if state else 0))
return
if (device.type == 'MP1'):
parts = action.split('/', 2)
if (len(parts) == 2):
sid = int(parts[0])
state = ((parts[1] == 'on') or (parts[1] == '1'))
logging.debug('Setting power state of socket {0} to {1}'.format(sid, state))
device.set_power(sid, state)
return
if (device.type == 'BG1'):
state = ((action == 'on') or (action == '1'))
logging.debug('Setting power state of all sockets to {0}'.format(state))
device.set_state(pwr1=state, pwr2=state)
return
if (command.startswith('power/') and (device.type == 'MP1')):
sid = int(command[6:])
state = ((action == 'on') or (action == '1'))
logging.debug('Setting power state of socket {0} to {1}'.format(sid, state))
device.set_power(sid, state)
return
if (command.startswith('power/') and (device.type == 'BG1')):
sid = int(command[6:])
state = ((action == 'on') or (action == '1'))
logging.debug('Setting power state of socket {0} to {1}'.format(sid, state))
if (sid == 1):
device.set_state(pwr1=state)
elif (sid == 2):
device.set_state(pwr2=state)
return
if ((command == 'brightness') and (device.type == 'BG1')):
state = int(action)
logging.debug('Setting led brightness to {0}'.format(state))
device.set_state(idcbrightness=state)
return
if (command == 'action'):
if (device.type == 'Dooya DT360E'):
if (action == 'open'):
logging.debug('Opening curtain')
device.open()
device.publish(100)
elif (action == 'close'):
logging.debug('Closing curtain')
device.close()
device.publish(0)
elif (action == 'stop'):
logging.debug('Stopping curtain')
device.stop()
device.publish(device.get_percentage())
else:
logging.warning(('Unrecognized curtain action ' + action))
return
if ((command == 'set') and (device.type == 'Dooya DT360E')):
percentage = int(action)
logging.debug('Setting curtain position to {0}'.format(percentage))
device.set_percentage_and_wait(percentage)
device.publish(device.get_percentage())
return
if ((device.type == 'RM2') or (device.type == 'RM4') or (device.type == 'RM4PRO') or (device.type == 'RMMINI') or (device.type == 'RM4MINI') or (device.type == 'RMMINIB') or (device.type == 'RMPRO')):
file = ((dirname + 'commands/') + command)
handy_file = ((file + '/') + action)
if (command == 'macro'):
file = ((dirname + 'macros/') + action)
macro(device, file)
return
elif ((action == '') or (action == 'auto')):
record_or_replay(device, file)
return
elif (action == 'autorf'):
record_or_replay_rf(device, file)
return
elif os.path.isfile(handy_file):
replay(device, handy_file)
return
elif (action == 'record'):
record(device, file)
return
elif (action == 'recordrf'):
record_rf(device, file)
return
elif (action == 'replay'):
replay(device, file)
return
elif (action == 'macro'):
file = ((dirname + 'macros/') + command)
macro(device, file)
return
logging.warning(('Unrecognized MQTT message ' + action))
except Exception:
logging.exception('Error') |
def parse_remaining(delta):
null_delta = timedelta(0)
sign = ('' if (delta >= null_delta) else '-')
delta = abs(delta)
seconds = (((3600 * 24) * delta.days) + delta.seconds)
if (seconds >= ((2 * 24) * 3600)):
string = '{} days'.format(delta.days)
elif (seconds >= (2 * 3600)):
string = '{} hours'.format(((24 * delta.days) + (delta.seconds // 3600)))
elif (seconds >= (2 * 60)):
string = '{} minutes'.format((seconds // 60))
else:
string = '{} seconds'.format(seconds)
return '{}{}'.format(sign, string) |
def ask_path(string, default_path):
v = None
while (v is None):
v = input(('%s [%s] ' % (string, default_path))).strip()
if (v == ''):
v = default_path
if (not os.path.exists(v)):
((print >> sys.stderr), v, 'does not exist.')
v = None
return v |
def GetModule(tlib):
if isinstance(tlib, base_text_type):
tlib_string = tlib
frame = sys._getframe(1)
_file_ = frame.f_globals.get('__file__', None)
(pathname, is_abs) = _resolve_filename(tlib_string, (_file_ and os.path.dirname(_file_)))
logger.debug('GetModule(%s), resolved: %s', pathname, is_abs)
tlib = _load_tlib(pathname)
if (not is_abs):
pathname = tlbparser.get_tlib_filename(tlib)
if (pathname is None):
logger.info('GetModule(%s): could not resolve to a filename', tlib)
pathname = tlib_string
assert ((not os.path.isabs(pathname)) or os.path.exists(pathname))
else:
pathname = None
tlib = _load_tlib(tlib)
logger.debug('GetModule(%s)', tlib.GetLibAttr())
mod = _create_wrapper_module(tlib, pathname)
modulename = codegenerator.name_friendly_module(tlib)
if (modulename is None):
return mod
if (sys.version_info < (3, 0)):
modulename = modulename.encode('mbcs')
return _create_friendly_module(tlib, modulename) |
.parametrize('event_signature,expected', (('Transfer(address,address,uint256)', '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef'),))
def test_event_signature_to_log_topic(event_signature, expected):
bytes_topic = event_signature_to_log_topic(event_signature)
hex_topic = encode_hex(bytes_topic)
assert (hex_topic == expected) |
class FrontEdge(edges.Edge):
def __call__(self, length, **kw):
ws = self.slot_width
wt = self.tool_width
ds = self.slot_depth
r1 = min(self.radius, (ds / 2), ((wt - ws) / 2))
r2 = min(self.radius, (ws / 2))
w = (((wt - ws) / 2) - r1)
for i in range(self.number):
self.polyline(w, (90, r1), ((ds - r1) - r2), ((- 90), r2), (ws - (2 * r2)), ((- 90), r2), ((ds - r1) - r2), (90, r1), w) |
class Message():
def __init__(self, *args, **kwargs):
self.datapath = kwargs['datapath']
self.cookie = kwargs['cookie']
self.port = kwargs['port']
self.data = kwargs['data']
self.total_len = len(self.data)
self.match = kwargs
self.args = args |
class TestZebraMessage(unittest.TestCase):
def test_get_header_size(self):
eq_(zebra.ZebraMessage.V0_HEADER_SIZE, zebra.ZebraMessage.get_header_size(0))
eq_(zebra.ZebraMessage.V1_HEADER_SIZE, zebra.ZebraMessage.get_header_size(2))
eq_(zebra.ZebraMessage.V3_HEADER_SIZE, zebra.ZebraMessage.get_header_size(3))
eq_(zebra.ZebraMessage.V3_HEADER_SIZE, zebra.ZebraMessage.get_header_size(4))
(ValueError)
def test_get_header_size_invalid_version(self):
eq_(zebra.ZebraMessage.V0_HEADER_SIZE, zebra.ZebraMessage.get_header_size(255)) |
.parametrize('api_reverse_name, fields, model', [('event-list', [], pytest.lazy_fixture('event1')), ('event-list', ['name'], pytest.lazy_fixture('event1')), ('event-list', ['name', 'abstract', 'event_slug'], pytest.lazy_fixture('event1')), ('activity-list', [], pytest.lazy_fixture('activity1')), ('activity-list', ['title'], pytest.lazy_fixture('activity1')), ('activity-list', ['title', 'abstract', 'start_date'], pytest.lazy_fixture('activity1')), ('installation-list', [], pytest.lazy_fixture('installation1')), ('installation-list', ['notes'], pytest.lazy_fixture('installation1')), ('installation-list', ['software', 'installer'], pytest.lazy_fixture('installation1'))])
.django_db(transaction=True)
def test_api_filter_fields(api_reverse_name, fields, api_request_factory, api_client, model):
endpoint = reverse(api_reverse_name)
fields_string = ','.join(fields)
url = '{}?fields={}'.format(endpoint, fields_string)
request = api_request_factory.get(url, format='json')
url = request.get_raw_uri()
response = api_client.get(url)
assert (response.status_code == 200)
json = response.json()
assert (json['count'] == 1)
assert (json['next'] is None)
assert (json['previous'] is None)
if fields:
assert (set(json['results'][0].keys()) == set(fields)) |
class RecsysPreset(MetricPreset):
k: int
min_rel_score: Optional[int]
no_feedback_users: bool
normalize_arp: bool
user_ids: Optional[List[Union[(int, str)]]]
display_features: Optional[List[str]]
item_features: Optional[List[str]]
user_bias_columns: Optional[List[str]]
item_bias_columns: Optional[List[str]]
def __init__(self, k: int, min_rel_score: Optional[int]=None, no_feedback_users: bool=False, normalize_arp: bool=False, user_ids: Optional[List[Union[(int, str)]]]=None, display_features: Optional[List[str]]=None, item_features: Optional[List[str]]=None, user_bias_columns: Optional[List[str]]=None, item_bias_columns: Optional[List[str]]=None):
super().__init__()
self.k = k
self.min_rel_score = min_rel_score
self.no_feedback_users = no_feedback_users
self.normalize_arp = normalize_arp
self.user_ids = user_ids
self.display_features = display_features
self.item_features = item_features
self.user_bias_columns = user_bias_columns
self.item_bias_columns = item_bias_columns
def generate_metrics(self, data_definition: DataDefinition, additional_data: Optional[Dict[(str, Any)]]):
is_train_data = False
if (additional_data is not None):
is_train_data = ('current_train_data' in additional_data.keys())
metrics = [PrecisionTopKMetric(k=self.k, min_rel_score=self.min_rel_score, no_feedback_users=self.no_feedback_users), RecallTopKMetric(k=self.k, min_rel_score=self.min_rel_score, no_feedback_users=self.no_feedback_users), FBetaTopKMetric(k=self.k, min_rel_score=self.min_rel_score, no_feedback_users=self.no_feedback_users), MAPKMetric(k=self.k, min_rel_score=self.min_rel_score, no_feedback_users=self.no_feedback_users), NDCGKMetric(k=self.k, min_rel_score=self.min_rel_score, no_feedback_users=self.no_feedback_users), MRRKMetric(k=self.k, min_rel_score=self.min_rel_score, no_feedback_users=self.no_feedback_users), HitRateKMetric(k=self.k, min_rel_score=self.min_rel_score, no_feedback_users=self.no_feedback_users)]
if is_train_data:
metrics.append(PopularityBias(k=self.k, normalize_arp=self.normalize_arp))
metrics.append(RecCasesTable(user_ids=self.user_ids, display_features=self.display_features))
if (data_definition.recommendations_type == RecomType.RANK):
metrics.append(ScoreDistribution(k=self.k))
metrics.append(PersonalizationMetric(k=self.k))
if (self.item_features is not None):
metrics.append(DiversityMetric(k=self.k, item_features=self.item_features))
if ((self.item_features is not None) and is_train_data):
metrics.append(SerendipityMetric(k=self.k, item_features=self.item_features))
if is_train_data:
metrics.append(NoveltyMetric(k=self.k))
if ((self.item_bias_columns is not None) and is_train_data):
for col in self.item_bias_columns:
metrics.append(ItemBiasMetric(k=self.k, column_name=col))
if ((self.user_bias_columns is not None) and is_train_data):
for col in self.user_bias_columns:
metrics.append(UserBiasMetric(column_name=col))
return metrics |
class FirewallRulesTest(ForsetiTestCase):
def setUp(self):
self.firewall_rules = fe.FirewallRules(constants.TEST_PROJECT)
self.test_rule = copy.deepcopy(constants.EXPECTED_FIREWALL_RULES['test-network-allow-internal-1'])
def test_add_rule_for_an_invalid_rule_type(self):
rule_types_to_try = [[], '', 1]
for rule in rule_types_to_try:
with self.assertRaises(fe.InvalidFirewallRuleError) as r:
self.firewall_rules.add_rule(rule, network_name=constants.TEST_NETWORK)
def test_add_rules_from_api(self):
mock_compute_client = mock.Mock(spec=compute.ComputeClient)
mock_compute_client.get_firewall_rules.return_value = constants.EXPECTED_FIREWALL_API_RESPONSE
self.firewall_rules.add_rules_from_api(mock_compute_client)
self.assertSameStructure(constants.EXPECTED_FIREWALL_RULES, self.firewall_rules.rules)
def test_add_rules_from_api_add_rule_false(self):
mock_compute_client = mock.Mock(spec=compute.ComputeClient)
mock_compute_client.get_firewall_rules.return_value = constants.EXPECTED_FIREWALL_API_RESPONSE
self.firewall_rules._add_rule_callback = (lambda _: False)
self.firewall_rules.add_rules_from_api(mock_compute_client)
self.assertEqual({}, self.firewall_rules.rules)
def test_add_rules_from_api_add_rule(self):
mock_compute_client = mock.Mock(spec=compute.ComputeClient)
mock_compute_client.get_firewall_rules.return_value = constants.EXPECTED_FIREWALL_API_RESPONSE
callback = (lambda rule: (rule['name'] == 'test-network-allow-internal-1'))
self.firewall_rules._add_rule_callback = callback
self.firewall_rules.add_rules_from_api(mock_compute_client)
expected = {'test-network-allow-internal-1': constants.EXPECTED_FIREWALL_RULES['test-network-allow-internal-1']}
self.assertSameStructure(expected, self.firewall_rules.rules)
def test_add_rules_for_network(self):
test_rules = json.loads(constants.RAW_EXPECTED_JSON_POLICY)
self.firewall_rules.add_rules(test_rules, network_name=constants.TEST_NETWORK)
self.assertSameStructure(constants.EXPECTED_FIREWALL_RULES, self.firewall_rules.rules)
def test_add_rules_for_network_short_form(self):
test_rule_name = 'test-rule'
test_rule = _GenerateTestRule(test_rule_name)
test_rule['network'] = 'global/networks/default'
self.firewall_rules.add_rules([test_rule], network_name='default')
expected_network = '
self.assertEqual(expected_network, self.firewall_rules.rules[test_rule_name]['network'])
def test_add_rules_for_network_long_name(self):
test_rules = json.loads(constants.RAW_EXPECTED_JSON_POLICY)
test_network = ('x' * 63)
self.firewall_rules.add_rules(test_rules, network_name=test_network)
expected_rule_names = [((('x' * (62 - len(rule['name']))) + '-') + rule['name']) for rule in test_rules]
self.assertCountEqual(expected_rule_names, list(self.firewall_rules.rules.keys()))
def test_add_rules_for_network_long_name_duplicate_rule(self):
test_rules = json.loads(constants.RAW_EXPECTED_JSON_POLICY)
test_rules = test_rules[:1]
rule_name = test_rules[0]['name']
test_networks = [(('x' * 62) + str(i)) for i in range(3)]
for network in test_networks:
self.firewall_rules.add_rules(test_rules, network_name=network)
expected_rule_names = []
expected_rule_names.append(((('x' * (62 - len(rule_name))) + '-') + rule_name))
expected_rule_names.append(((('hn-' + fe.hashlib.md5(test_networks[1].encode()).hexdigest()) + '-') + rule_name))
expected_rule_names.append(((('hn-' + fe.hashlib.md5(test_networks[2].encode()).hexdigest()) + '-') + rule_name))
self.assertCountEqual(expected_rule_names, list(self.firewall_rules.rules.keys()))
def test_add_rules_for_network_is_idempotent(self):
test_rules = json.loads(constants.RAW_EXPECTED_JSON_POLICY)
copy_of_test_rules = copy.deepcopy(test_rules)
self.firewall_rules.add_rules(test_rules, network_name=constants.TEST_NETWORK)
self.assertSameStructure(test_rules, copy_of_test_rules)
def test_add_rules_for_network_negative_match(self):
test_rules = list(constants.EXPECTED_FIREWALL_RULES.values())
test_network = 'default'
self.firewall_rules.add_rules(test_rules, network_name=test_network)
self.assertEqual({}, self.firewall_rules.rules)
def test_get_rules_for_network(self):
test_rules = json.loads(constants.RAW_EXPECTED_JSON_POLICY)
test_networks = ['default', constants.TEST_NETWORK]
for network in test_networks:
self.firewall_rules.add_rules(test_rules, network_name=network)
expected_firewall_rules = fe.FirewallRules(constants.TEST_PROJECT)
expected_firewall_rules.add_rules(test_rules, network_name=constants.TEST_NETWORK)
self.assertNotEqual(expected_firewall_rules, self.firewall_rules)
new_firewall_rules = self.firewall_rules.filtered_by_networks([constants.TEST_NETWORK])
self.assertEqual(expected_firewall_rules.rules, new_firewall_rules)
def test_export_and_import_of_rules(self):
test_rules = list(constants.EXPECTED_FIREWALL_RULES.values())
self.firewall_rules.add_rules(test_rules)
json_rules = self.firewall_rules.as_json()
new_firewall_rules = fe.FirewallRules(constants.TEST_PROJECT)
new_firewall_rules.add_rules_from_json(json_rules)
self.assertEqual(self.firewall_rules, new_firewall_rules)
def test_add_rule_duplicate_rules(self):
self.firewall_rules.add_rule(self.test_rule)
new_rule = copy.deepcopy(constants.EXPECTED_FIREWALL_RULES['test-network-allow-internal-0'])
new_rule['name'] = self.test_rule['name']
with self.assertRaises(fe.DuplicateFirewallRuleNameError):
self.firewall_rules.add_rule(new_rule) |
class ExternalIP(IntervalModule):
interval = 15
settings = ('format', 'color', ('color_down', 'color when the http request failed'), ('color_hide', 'color when the user has decide to switch to the hide format'), ('format_down', 'format when the http request failed'), ('format_hide', 'format when the user has decide to switch to the hide format'), ('ip_website', 'http website where the IP is directly available as raw'), ('timeout', 'timeout in seconds when the http request is taking too much time'))
format = '{country_name} {country_code} {ip}'
format_hide = '{country_code}'
format_down = 'Timeout'
ip_website = '
timeout = 5
color = '#FFFFFF'
color_hide = '#FFFF00'
color_down = '#FF0000'
on_leftclick = 'switch_hide'
on_rightclick = 'run'
(internet)
def get_external_ip(self):
try:
request = urllib.request.urlopen(self.ip_website, timeout=self.timeout)
return request.read().decode().strip()
except Exception:
return None
def run(self):
ip = self.get_external_ip()
if (not ip):
return self.disable()
gi = GeoIP.GeoIP(GeoIP.GEOIP_STANDARD)
country_code = gi.country_code_by_addr(ip)
country_name = gi.country_name_by_addr(ip)
if (not country_code):
return self.disable()
fdict = {'country_name': country_name, 'country_code': country_code, 'ip': ip}
self.output = {'full_text': formatp(self.format, **fdict).strip(), 'color': self.color}
def disable(self):
self.output = {'full_text': self.format_down, 'color': self.color_down}
def switch_hide(self):
(self.format, self.format_hide) = (self.format_hide, self.format)
(self.color, self.color_hide) = (self.color_hide, self.color)
self.run() |
def test_wrong_type_match_data(w3):
data = ('hello', 'goodbye')
match_data_and_abi = (('string', (,)), ('string', (,)))
(abi_types, match_data) = zip(*match_data_and_abi)
encoded_data = w3.codec.encode(abi_types, data)
with pytest.raises(ValueError):
match_fn(w3.codec, match_data_and_abi, encoded_data) |
class CabinetHingeSettings(Settings):
absolute_params = {'bore': 3.2, 'eyes_per_hinge': 5, 'hinges': 2, 'style': ('inside', 'outside')}
relative_params = {'eye': 1.5, 'play': 0.05, 'spacing': 2.0}
def edgeObjects(self, boxes, chars: str='uUvV', add: bool=True):
edges = [CabinetHingeEdge(boxes, self), CabinetHingeEdge(boxes, self, top=True), CabinetHingeEdge(boxes, self, angled=True), CabinetHingeEdge(boxes, self, top=True, angled=True)]
for (e, c) in zip(edges, chars):
e.char = c
return self._edgeObjects(edges, boxes, chars, add) |
def cmd_queue_move(jobs: Jobs, reqid: RequestID, position: int, relative: Optional[str]=None) -> None:
position = int(position)
if (position <= 0):
raise ValueError(f'expected positive position, got {position}')
if (relative and (relative not in '+-')):
raise ValueError(f'expected relative of + or -, got {relative}')
reqid = RequestID.from_raw(reqid)
if relative:
logger.info('Moving job %s %s%s in the queue...', reqid, relative, position)
else:
logger.info('Moving job %s to position %s in the queue...', reqid, position)
job = jobs.get(reqid)
if (not job):
logger.error('request %s not found', reqid)
sys.exit(1)
if jobs.queues[reqid.workerid].paused:
logger.warning('job queue is paused')
status = job.get_status()
if (not status):
logger.error('request %s not found', reqid)
sys.exit(1)
elif (status is not Result.STATUS.PENDING):
logger.warning('request %s has been updated since queued', reqid)
pos = jobs.queues[reqid.workerid].move(reqid, position, relative)
logger.info('...moved to position %s', pos) |
def test_owly_short_method_bad_response():
params = urlencode({'apiKey': 'TEST_KEY', 'longUrl': expanded})
body = "{'rerrsults': {'shortUrl': shorten}}"
mock_url = f'{owly.api_url}shorten?{params}'
responses.add(responses.GET, mock_url, body=body, match_querystring=True)
with pytest.raises(json.decoder.JSONDecodeError):
owly.short(expanded) |
def cnv_on_genome(axis, probes, segments, do_trend=False, y_min=None, y_max=None, segment_color=SEG_COLOR):
axis.axhline(color='k')
axis.set_ylabel('Copy ratio (log2)')
if (not (y_min and y_max)):
if segments:
low_chroms = segments.chromosome.isin(('6', 'chr6', 'Y', 'chrY'))
seg_auto_vals = segments[(~ low_chroms)]['log2'].dropna()
if (not y_min):
y_min = (np.nanmin([(seg_auto_vals.min() - 0.2), (- 1.5)]) if len(seg_auto_vals) else (- 2.5))
if (not y_max):
y_max = (np.nanmax([(seg_auto_vals.max() + 0.2), 1.5]) if len(seg_auto_vals) else 2.5)
else:
if (not y_min):
y_min = (- 2.5)
if (not y_max):
y_max = 2.5
axis.set_ylim(y_min, y_max)
if probes:
chrom_sizes = plots.chromosome_sizes(probes)
chrom_probes = dict(probes.by_chromosome())
window_size = int(round(((0.15 * len(probes)) / probes.chromosome.nunique())))
else:
chrom_sizes = plots.chromosome_sizes(segments)
chrom_segs = (dict(segments.by_chromosome()) if segments else {})
x_starts = plots.plot_chromosome_dividers(axis, chrom_sizes)
for (chrom, x_offset) in x_starts.items():
if (probes and (chrom in chrom_probes)):
subprobes = chrom_probes[chrom]
x = ((0.5 * (subprobes['start'] + subprobes['end'])) + x_offset)
axis.scatter(x, subprobes['log2'], marker='.', color=POINT_COLOR, edgecolor='none', alpha=0.2)
if do_trend:
axis.plot(x, subprobes.smooth_log2(), color=POINT_COLOR, linewidth=2, zorder=(- 1), snap=False)
if (chrom in chrom_segs):
for seg in chrom_segs[chrom]:
color = choose_segment_color(seg, segment_color)
axis.plot(((seg.start + x_offset), (seg.end + x_offset)), (seg.log2, seg.log2), color=color, linewidth=3, solid_capstyle='round', snap=False)
return axis |
class OptionSeriesAreasplinerangeSonificationDefaultinstrumentoptionsMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
(scope='session')
def mariadb_example_db() -> Generator:
example_mariadb_uri = 'mariadb+pymysql://mariadb_user:mariadb__example/mariadb_example'
engine = get_db_engine(database_uri=example_mariadb_uri)
SessionLocal = get_db_session(config=CONFIG, engine=engine, autocommit=True, autoflush=True)
the_session = SessionLocal()
(yield the_session)
the_session.close()
engine.dispose() |
def test_reference_creates_references_of_representations_with_correct_namespace(create_test_data, create_maya_env):
data = create_test_data
maya_env = create_maya_env
pm.newFile(force=True)
a_base_v3 = create_version(data['asset1'], 'Main')
a_bbox_v1 = create_version(data['asset1'], '', a_base_v3)
some_other_version = create_version(data['asset2'], 'Main')
pm.newFile(force=True)
maya_env.save_as(some_other_version)
ref = maya_env.reference(a_bbox_v1)
assert (ref.namespace == os.path.basename(a_base_v3.nice_name)) |
def update_from_hydra(data: tp.Any, cfg: DictConfig):
dct = OmegaConf.to_container(cfg, resolve=True)
assert isinstance(dct, dict)
for (key, value) in dct.items():
assert isinstance(key, str)
if hasattr(data, key):
setattr(data, key, value)
else:
raise AttributeError(f'Object of type {data.__class__} does not have an attribute {key}') |
class AbstractBorrower(object):
genTexts = False
exts = ''
def __init__(self, reader, genTexts=False):
if (genTexts is not None):
self.genTexts = genTexts
self._reader = reader
def __str__(self):
return ('%s{%s, genTexts=%s, exts=%s}' % (self.__class__.__name__, self._reader, self.genTexts, self.exts))
def setOptions(self, **kwargs):
self._reader.setOptions(**kwargs)
for k in kwargs:
setattr(self, k, kwargs[k])
return self
def getData(self, mibname, **options):
if (bool(options.get('genTexts')) != self.genTexts):
((debug.logger & debug.flagBorrower) and debug.logger(('skipping incompatible borrower %s for file %s' % (self, mibname))))
raise error.PySmiFileNotFoundError(mibname=mibname, reader=self._reader)
((debug.logger & debug.flagBorrower) and debug.logger(('trying to borrow file %s from %s' % (mibname, self._reader))))
if ('exts' not in options):
options['exts'] = self.exts
return self._reader.getData(mibname, **options) |
_s3
def test_download_log_files_and_skip_existing_files():
with tempfile.TemporaryDirectory() as dirpath:
given_a_bucket('some-bucket')
given_an_object('some-bucket', TEST_LOG_KEY, 'some-file-content')
given_an_object('some-bucket', TEST_LOG_KEY_EXISTING, 'some-file-content')
given_a_file(dirpath, TEST_LOG_KEY_EXISTING, 'some-content-already-existing')
download_cloudtrail_logs(target_dir=dirpath, bucket='some-bucket', cloudtrail_prefix='some-prefix/', from_date=datetime.datetime(2017, 1, 1, tzinfo=pytz.utc), to_date=datetime.datetime(2018, 1, 1, tzinfo=pytz.utc), account_ids=['000'], org_ids=[], regions=['some-region-1'], parallelism=10)
assert (file_content(dirpath, TEST_LOG_KEY) == 'some-file-content')
assert (file_content(dirpath, TEST_LOG_KEY_EXISTING) == 'some-content-already-existing') |
def test_csrf():
class SecureModelView(MockModelView):
form_base_class = form.SecureForm
def scaffold_form(self):
return form.SecureForm
def get_csrf_token(data):
data = data.split('name="csrf_token" type="hidden" value="')[1]
token = data.split('"')[0]
return token
(app, admin) = setup()
view = SecureModelView(Model, endpoint='secure')
admin.add_view(view)
client = app.test_client()
rv = client.get('/admin/secure/new/')
assert (rv.status_code == 200)
assert (u'name="csrf_token"' in rv.data.decode('utf-8'))
csrf_token = get_csrf_token(rv.data.decode('utf-8'))
rv = client.post('/admin/secure/new/', data=dict(name='test1'))
assert (rv.status_code == 200)
rv = client.post('/admin/secure/new/', data=dict(name='test1', csrf_token=csrf_token))
assert (rv.status_code == 302)
rv = client.get('/admin/secure/edit/?url=%2Fadmin%2Fsecure%2F&id=1')
assert (rv.status_code == 200)
assert (u'name="csrf_token"' in rv.data.decode('utf-8'))
csrf_token = get_csrf_token(rv.data.decode('utf-8'))
rv = client.post('/admin/secure/edit/?url=%2Fadmin%2Fsecure%2F&id=1', data=dict(name='test1'))
assert (rv.status_code == 200)
rv = client.post('/admin/secure/edit/?url=%2Fadmin%2Fsecure%2F&id=1', data=dict(name='test1', csrf_token=csrf_token))
assert (rv.status_code == 302)
rv = client.get('/admin/secure/')
assert (rv.status_code == 200)
assert (u'name="csrf_token"' in rv.data.decode('utf-8'))
csrf_token = get_csrf_token(rv.data.decode('utf-8'))
rv = client.post('/admin/secure/delete/', data=dict(id='1', url='/admin/secure/'), follow_redirects=True)
assert (rv.status_code == 200)
assert (u'Record was successfully deleted.' not in rv.data.decode('utf-8'))
assert (u'Failed to delete record.' in rv.data.decode('utf-8'))
rv = client.post('/admin/secure/delete/', data=dict(id='1', url='/admin/secure/', csrf_token=csrf_token), follow_redirects=True)
assert (rv.status_code == 200)
assert (u'Record was successfully deleted.' in rv.data.decode('utf-8'))
rv = client.get('/admin/secure/')
assert (rv.status_code == 200)
assert (u'name="csrf_token"' in rv.data.decode('utf-8'))
csrf_token = get_csrf_token(rv.data.decode('utf-8'))
rv = client.post('/admin/secure/action/', data=dict(rowid='1', url='/admin/secure/', action='delete'), follow_redirects=True)
assert (rv.status_code == 200)
assert (u'Record was successfully deleted.' not in rv.data.decode('utf-8'))
assert (u'Failed to perform action.' in rv.data.decode('utf-8')) |
def extractByzabootlWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Santairiku Eiyuuki', 'Heroic Chronicles of The Three Continents Chronicles of Rebuilding An Empire with Modern Knowledge', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TimeField(WritableField):
type_name = 'TimeField'
type_label = 'time'
widget = widgets.TimeInput
form_field_class = forms.TimeField
default_error_messages = {'invalid': _('Time has wrong format. Use one of these formats instead: %s')}
empty = None
input_formats = api_settings.TIME_INPUT_FORMATS
format = api_settings.TIME_FORMAT
def __init__(self, input_formats=None, format=None, *args, **kwargs):
self.input_formats = (input_formats if (input_formats is not None) else self.input_formats)
self.format = (format if (format is not None) else self.format)
super(TimeField, self).__init__(*args, **kwargs)
def from_native(self, value):
if (value in validators.EMPTY_VALUES):
return None
if isinstance(value, datetime.time):
return value
for fmt in self.input_formats:
if (fmt.lower() == ISO_8601):
try:
parsed = parse_time(value)
except (ValueError, TypeError):
pass
else:
if (parsed is not None):
return parsed
else:
try:
parsed = datetime.datetime.strptime(value, fmt)
except (ValueError, TypeError):
pass
else:
return parsed.time()
msg = (self.error_messages['invalid'] % readable_time_formats(self.input_formats))
raise ValidationError(msg)
def to_native(self, value):
if ((value is None) or (self.format is None)):
return value
if isinstance(value, datetime.datetime):
value = value.time()
if (self.format.lower() == ISO_8601):
return value.isoformat()
return value.strftime(self.format) |
def setup_logging(*pathnames):
import configparser
parser = configparser.ConfigParser()
parser.optionxform = str
parser.read(pathnames)
DEFAULTS = {'handler': 'StreamHandler()', 'format': '%(levelname)s:%(name)s:%(message)s', 'level': 'WARNING'}
def get(section, option):
try:
return parser.get(section, option, True)
except (configparser.NoOptionError, configparser.NoSectionError):
return DEFAULTS[option]
levelname = get('logging', 'level')
format = get('logging', 'format')
handlerclass = get('logging', 'handler')
level = getattr(logging, levelname)
handler = eval(handlerclass, vars(logging))
formatter = logging.Formatter(format)
handler.setFormatter(formatter)
logging.root.addHandler(handler)
logging.root.setLevel(level)
try:
for (name, value) in parser.items('logging.levels', True):
value = getattr(logging, value)
logging.getLogger(name).setLevel(value)
except configparser.NoSectionError:
pass |
class CCObjectInfo710(Structure):
CCMaxMetrics = CCMaxMetrics
_fields_ = [('_id', c_ssize_t), ('bounding_box', RectangleInfo), ('color', PixelInfo), ('centroid', PointInfo), ('area', c_double), ('census', c_double), ('merge', c_bool), ('metric', (c_double * CCMaxMetrics)), ('key', c_ssize_t)] |
class Canvas(AbstractCrudObject):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isCanvas = True
super(Canvas, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
background_color = 'background_color'
body_elements = 'body_elements'
business_id = 'business_id'
canvas_link = 'canvas_link'
collection_hero_image = 'collection_hero_image'
collection_hero_video = 'collection_hero_video'
collection_thumbnails = 'collection_thumbnails'
dynamic_setting = 'dynamic_setting'
element_payload = 'element_payload'
elements = 'elements'
fb_body_elements = 'fb_body_elements'
id = 'id'
is_hidden = 'is_hidden'
is_published = 'is_published'
last_editor = 'last_editor'
linked_documents = 'linked_documents'
name = 'name'
owner = 'owner'
property_list = 'property_list'
source_template = 'source_template'
store_url = 'store_url'
style_list = 'style_list'
tags = 'tags'
ui_property_list = 'ui_property_list'
unused_body_elements = 'unused_body_elements'
update_time = 'update_time'
use_retailer_item_ids = 'use_retailer_item_ids'
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Canvas, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_update(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'background_color': 'string', 'body_element_ids': 'list<string>', 'enable_swipe_to_open': 'bool', 'is_hidden': 'bool', 'is_published': 'bool', 'name': 'string', 'source_template_id': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Canvas, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_previews(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.textwithentities import TextWithEntities
param_types = {'user_ids': 'list<int>'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/previews', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=TextWithEntities, api_type='EDGE', response_parser=ObjectParser(target_class=TextWithEntities, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {'background_color': 'string', 'body_elements': 'list<Object>', 'business_id': 'string', 'canvas_link': 'string', 'collection_hero_image': 'Photo', 'collection_hero_video': 'AdVideo', 'collection_thumbnails': 'list<CanvasCollectionThumbnail>', 'dynamic_setting': 'CanvasDynamicSetting', 'element_payload': 'string', 'elements': 'list<RichMediaElement>', 'fb_body_elements': 'list<Object>', 'id': 'string', 'is_hidden': 'bool', 'is_published': 'bool', 'last_editor': 'User', 'linked_documents': 'list<Canvas>', 'name': 'string', 'owner': 'Page', 'property_list': 'list<string>', 'source_template': 'Object', 'store_url': 'string', 'style_list': 'list<string>', 'tags': 'list<string>', 'ui_property_list': 'list<string>', 'unused_body_elements': 'list<Object>', 'update_time': 'int', 'use_retailer_item_ids': 'bool'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
def reset_state(shortcuts: List[Tuple], editor: Editor):
state.note_editor_shown = False
state.set_bool(state.editor_is_ready, False)
index = get_index()
if index:
UI.frozen = False
def cb(night_mode: bool):
state.set_nightmode(night_mode)
editor.web.evalWithCallback("(() => { return document.body.classList.contains('nightMode'); })();", cb) |
class OptionSeriesVennDataEvents(Options):
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def drag(self):
return self._config_get(None)
def drag(self, value: Any):
self._config(value, js_type=False)
def dragStart(self):
return self._config_get(None)
def dragStart(self, value: Any):
self._config(value, js_type=False)
def drop(self):
return self._config_get(None)
def drop(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def remove(self):
return self._config_get(None)
def remove(self, value: Any):
self._config(value, js_type=False)
def select(self):
return self._config_get(None)
def select(self, value: Any):
self._config(value, js_type=False)
def unselect(self):
return self._config_get(None)
def unselect(self, value: Any):
self._config(value, js_type=False)
def update(self):
return self._config_get(None)
def update(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesVariablepieSonificationDefaultinstrumentoptionsMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionPlotoptionsOrganizationOnpointPosition(Options):
def offsetX(self):
return self._config_get(None)
def offsetX(self, num: float):
self._config(num, js_type=False)
def offsetY(self):
return self._config_get(None)
def offsetY(self, num: float):
self._config(num, js_type=False)
def x(self):
return self._config_get(None)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(None)
def y(self, num: float):
self._config(num, js_type=False) |
class BoltJobSummary():
job_name: str
publisher_instance_id: str
partner_instance_id: str
is_success: bool
bolt_metrics: List[BoltMetric] = field(default_factory=list)
def job_metrics(self) -> List[BoltMetric]:
return [s for s in self.bolt_metrics if (s.stage is None)]
def stage_metrics(self) -> List[BoltMetric]:
return [s for s in self.bolt_metrics if (s.stage is not None)]
def partner_metrics(self) -> List[BoltMetric]:
return [s for s in self.bolt_metrics if (s.role is PrivateComputationRole.PARTNER)]
def publisher_metrics(self) -> List[BoltMetric]:
return [s for s in self.bolt_metrics if (s.role is PrivateComputationRole.PUBLISHER)]
def get_stage_metrics(self, stage: PrivateComputationBaseStageFlow) -> List[BoltMetric]:
return [s for s in self.bolt_metrics if (s.stage is stage)] |
def _get_db_engine(orm_base=None, database: str=None, create_not_exist_table: bool=False):
db_engine = None
connection: RDBMSDatabase = None
db_type = DBType.of_db_type(CFG.LOCAL_DB_TYPE)
if ((db_type is None) or (db_type == DBType.Mysql)):
db_engine = create_engine(f'mysql+pymysql://{CFG.LOCAL_DB_USER}:{CFG.LOCAL_DB_PASSWORD}{CFG.LOCAL_DB_HOST}:{CFG.LOCAL_DB_PORT}/{database}', echo=True)
else:
db_namager = CFG.LOCAL_DB_MANAGE
if (not db_namager):
raise Exception('LOCAL_DB_MANAGE is not initialized, please check the system configuration')
if db_type.is_file_db():
db_path = CFG.LOCAL_DB_PATH
if ((db_path is None) or (db_path == '')):
raise ValueError('You LOCAL_DB_TYPE is file db, but LOCAL_DB_PATH is not configured, please configure LOCAL_DB_PATH in you .env file')
(_, database) = db_namager._parse_file_db_info(db_type.value(), db_path)
logger.info(f'Current DAO database is file database, db_type: {db_type.value()}, db_path: {db_path}, db_name: {database}')
logger.info(f'Get DAO database connection with database name {database}')
connection: RDBMSDatabase = db_namager.get_connect(database)
if (not isinstance(connection, RDBMSDatabase)):
raise ValueError('Currently only supports `RDBMSDatabase` database as the underlying database of BaseDao, please check your database configuration')
db_engine = connection._engine
if (db_type.is_file_db() and (orm_base is not None) and create_not_exist_table):
logger.info('Current database is file database, create not exist table')
orm_base.metadata.create_all(db_engine)
return (db_engine, connection) |
class OptionPlotoptionsLollipopSonificationTracks(Options):
def activeWhen(self) -> 'OptionPlotoptionsLollipopSonificationTracksActivewhen':
return self._config_sub_data('activeWhen', OptionPlotoptionsLollipopSonificationTracksActivewhen)
def instrument(self):
return self._config_get('piano')
def instrument(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionPlotoptionsLollipopSonificationTracksMapping':
return self._config_sub_data('mapping', OptionPlotoptionsLollipopSonificationTracksMapping)
def midiName(self):
return self._config_get(None)
def midiName(self, text: str):
self._config(text, js_type=False)
def pointGrouping(self) -> 'OptionPlotoptionsLollipopSonificationTracksPointgrouping':
return self._config_sub_data('pointGrouping', OptionPlotoptionsLollipopSonificationTracksPointgrouping)
def roundToMusicalNotes(self):
return self._config_get(True)
def roundToMusicalNotes(self, flag: bool):
self._config(flag, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def type(self):
return self._config_get('instrument')
def type(self, text: str):
self._config(text, js_type=False) |
class TestCoprDeleteBuild(CoprsTestCase):
('u1')
def test_copr_build_submitter_can_delete_build_old(self, f_users, f_coprs, f_build_few_chroots, f_db):
self.db.session.add_all([self.u1, self.c1, self.b_few_chroots])
self.b_few_chroots.build_chroots[1].status = StatusEnum('canceled')
self.db.session.commit()
expected_chroot_builddirs = {'srpm-builds': [self.b_few_chroots.result_dir]}
self.b_few_chroots.copr.appstream = True
for chroot in self.b_few_chroots.build_chroots:
expected_chroot_builddirs[chroot.name] = [chroot.result_dir]
expected_dir = self.b_few_chroots.result_dir
r = self.test_client.post('/coprs/{0}/{1}/delete_build/{2}/'.format(self.u1.name, self.c1.name, self.b_few_chroots.id), data={}, follow_redirects=True)
assert (b'Build has been deleted' in r.data)
b = self.models.Build.query.filter((self.models.Build.id == self.b_few_chroots.id)).first()
assert (b is None)
act = self.models.Action.query.first()
data = json.loads(act.data)
assert (act.object_type == 'build')
assert (data.get('ownername') == 'user1')
assert (data.get('projectname') == 'foocopr')
assert (json.loads(act.data)['chroot_builddirs'] == expected_chroot_builddirs)
('u1')
def test_copr_build_submitter_can_delete_build(self, f_users, f_coprs, f_mock_chroots, f_builds):
self.db.session.add(self.b1)
self.db.session.commit()
b_id = self.b1.id
self.b1.copr.appstream = True
url = '/coprs/{0}/{1}/delete_build/{2}/'.format(self.u1.name, self.c1.name, b_id)
r = self.test_client.post(url, data={}, follow_redirects=True)
assert (r.status_code == 200)
b = self.models.Build.query.filter((self.models.Build.id == b_id)).first()
assert (b is None)
act = self.models.Action.query.first()
data = json.loads(act.data)
assert (act.object_type == 'build')
assert (data.get('ownername') == 'user1')
assert (data.get('projectname') == 'foocopr')
('u2')
def test_copr_build_non_submitter_cannot_delete_build(self, f_users, f_coprs, f_mock_chroots, f_builds, f_db):
self.db.session.add_all([self.u1, self.c1, self.b1])
r = self.test_client.post('/coprs/{0}/{1}/delete_build/{2}/'.format(self.u1.name, self.c1.name, self.b1.id), data={}, follow_redirects=True)
assert (b"doesn't have permissions to delete" in r.data)
b = self.models.Build.query.filter((self.models.Build.id == self.b1.id)).first()
assert (b is not None)
('u1')
def test_copr_delete_multiple_builds_sends_single_action(self, f_users, f_coprs, f_pr_build):
for bc in self.b2_bc:
bc.status = StatusEnum('canceled')
self.db.session.add_all(self.b2_bc)
self.db.session.add_all([self.b1, self.b2])
self.db.session.commit()
b_id1 = self.b1.id
b_id2 = self.b2.id
b_id3 = self.b_pr.id
url = '/coprs/{0}/{1}/delete_builds/'.format(self.u1.name, self.c1.name)
r = self.test_client.post(url, data={'build_ids[]': [b_id1, b_id2, b_id3]}, follow_redirects=True)
assert (r.status_code == 200)
b1 = self.models.Build.query.filter((self.models.Build.id == b_id1)).first()
b2 = self.models.Build.query.filter((self.models.Build.id == b_id2)).first()
assert (b1 is None)
assert (b2 is None)
act = self.models.Action.query.first()
data = json.loads(act.data)
assert (act.object_type == 'builds')
assert (data.get('ownername') == 'user1')
assert (data.get('projectname') == 'foocopr')
assert (data.get('project_dirnames') == {'foocopr': {'fedora-18-x86_64': ['bar', 'bar'], 'srpm-builds': ['bar', '']}, 'foocopr:PR': {'fedora-17-x86_64': ['0000PR-pr-package'], 'srpm-builds': ['0000PR']}})
('u1')
def test_copr_delete_package_sends_single_action(self, f_users, f_coprs, f_mock_chroots, f_builds):
for bc in self.b2_bc:
bc.status = StatusEnum('canceled')
self.db.session.add_all(self.b2_bc)
self.db.session.add_all([self.b1, self.b2, self.p1])
self.db.session.commit()
b_id1 = self.b1.id
b_id2 = self.b2.id
p_id = self.p1.id
url = '/coprs/{0}/{1}/package/{2}/delete'.format(self.u1.name, self.c1.name, p_id)
r = self.test_client.post(url, data={}, follow_redirects=True)
assert (r.status_code == 200)
b1 = self.models.Build.query.filter((self.models.Build.id == b_id1)).first()
b2 = self.models.Build.query.filter((self.models.Build.id == b_id2)).first()
assert (b1 is None)
assert (b2 is None)
act = self.models.Action.query.first()
data = json.loads(act.data)
assert (act.object_type == 'builds')
assert (data.get('ownername') == 'user1')
assert (data.get('projectname') == 'foocopr') |
def test_get_summary_fo(backend_db, common_db):
(fw, parent_fo, child_fo) = create_fw_with_parent_and_child()
backend_db.insert_multiple_objects(fw, parent_fo, child_fo)
summary = common_db.get_summary(parent_fo, 'dummy')
assert (parent_fo.uid in summary['sum a']), 'summary of the file itself should be included'
assert (parent_fo.uid in summary['file exclusive sum b']), 'summary of the file itself should be included'
assert (fw.uid not in summary['sum a']), 'parent summary should not be included'
assert (child_fo.uid in summary['sum a']), 'child summary should be included'
assert (child_fo.uid in summary['file exclusive sum b']), 'child summary should be included' |
.param_file((FIXTURE_PATH / 'containers.md'))
def test_containers(file_params, sphinx_doctree_no_tr: CreateDoctree, monkeypatch):
monkeypatch.setattr(SphinxRenderer, '_random_label', (lambda self: 'mock-uuid'))
sphinx_doctree_no_tr.set_conf({'extensions': ['myst_parser'], 'myst_enable_extensions': ['colon_fence']})
result = sphinx_doctree_no_tr(file_params.content, 'index.md')
file_params.assert_expected(result.pformat('index'), rstrip_lines=True) |
()
def rpc_client(open_port):
from testrpc.client.utils import force_obj_to_text
endpoint = f'
def make_request(method, params=None):
global nonce
nonce += 1
payload = {'id': nonce, 'jsonrpc': '2.0', 'method': method, 'params': (params or [])}
payload_data = json.dumps(force_obj_to_text(payload, True))
response = requests.post(endpoint, data=payload_data, headers={'Content-Type': 'application/json'})
result = response.json()
if ('error' in result):
raise AssertionError(result['error'])
return result['result']
return make_request |
class OptionPlotoptionsNetworkgraphSonificationContexttracksMappingLowpass(Options):
def frequency(self) -> 'OptionPlotoptionsNetworkgraphSonificationContexttracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsNetworkgraphSonificationContexttracksMappingLowpassFrequency)
def resonance(self) -> 'OptionPlotoptionsNetworkgraphSonificationContexttracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsNetworkgraphSonificationContexttracksMappingLowpassResonance) |
def set_final_index_config(client, index):
es_settingsfile = str(((settings.APP_DIR / 'etl') / 'es_config_objects.json'))
with open(es_settingsfile) as f:
settings_dict = json.load(f)
final_index_settings = settings_dict['final_index_settings']
current_settings = client.indices.get(index)[index]['settings']['index']
client.indices.put_settings(final_index_settings, index)
client.indices.refresh(index)
for (setting, value) in final_index_settings.items():
message = f'Changing "{setting}" from {current_settings.get(setting)} to {value}'
logger.info(format_log(message, action='ES Settings')) |
class OptionPlotoptionsScatter3dEvents(Options):
def afterAnimate(self):
return self._config_get(None)
def afterAnimate(self, value: Any):
self._config(value, js_type=False)
def checkboxClick(self):
return self._config_get(None)
def checkboxClick(self, value: Any):
self._config(value, js_type=False)
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def hide(self):
return self._config_get(None)
def hide(self, value: Any):
self._config(value, js_type=False)
def legendItemClick(self):
return self._config_get(None)
def legendItemClick(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def show(self):
return self._config_get(None)
def show(self, value: Any):
self._config(value, js_type=False) |
class DrQConfig():
target_entropy: float
augmentation: augmentations.DataAugmentation = augmentations.batched_random_crop
min_replay_size: int = 1000
max_replay_size: int = 1000000
replay_table_name: str = adders_reverb.DEFAULT_PRIORITY_TABLE
prefetch_size: Optional[int] = None
discount: float = 0.99
batch_size: int = 128
initial_num_steps: int = 1000
critic_learning_rate: float = 0.0003
critic_target_update_frequency: int = 1
critic_q_soft_update_rate: float = 0.005
actor_learning_rate: float = 0.0003
actor_update_frequency: int = 1
temperature_learning_rate: float = 0.0003
temperature_adam_b1: float = 0.5
init_temperature: float = 0.1 |
.django_db(transaction=True)
def test_pstxt_download_success(client, monkeypatch, awards_and_transactions, elasticsearch_award_index):
download_generation.retrieve_db_string = Mock(return_value=get_database_dsn_string())
setup_elasticsearch_test(monkeypatch, elasticsearch_award_index)
resp = _post(client, def_codes=['L'], file_format='pstxt')
resp_json = resp.json()
assert (resp.status_code == status.HTTP_200_OK)
assert re.match('.*COVID-19_Profile_.*\\.zip', resp_json['file_url'])
assert (resp_json['download_request']['file_format'] == 'pstxt') |
class Extension(extensions.Extension):
def handle_model(self):
self.model.add_to_class('_content_title', models.TextField(_('content title'), blank=True, help_text=_('The first line is the main title, the following lines are subtitles.')))
self.model.add_to_class('_page_title', models.CharField(_('page title'), max_length=69, blank=True, help_text=_('Page title for browser window. Same as title by default. Must be 69 characters or fewer.')))
_property(self.model)
def page_title(self):
if self._page_title:
return self._page_title
return self.content_title
_property(self.model)
def content_title(self):
if (not self._content_title):
return self.title
return self._content_title.splitlines()[0]
_property(self.model)
def content_subtitle(self):
return '\n'.join(self._content_title.splitlines()[1:])
def handle_modeladmin(self, modeladmin):
modeladmin.add_extension_options(_('Titles'), {'fields': ('_content_title', '_page_title'), 'classes': ('collapse',)}) |
class Test_Constants(ut.TestCase):
def test_punk(self):
obj = comtypes.client.CreateObject(Scripting.Dictionary)
consts = comtypes.client.Constants(obj)
self.assertEqual(consts.BinaryCompare, Scripting.BinaryCompare)
self.assertEqual(consts.TextCompare, Scripting.TextCompare)
self.assertEqual(consts.DatabaseCompare, Scripting.DatabaseCompare)
with self.assertRaises(AttributeError):
consts.Foo
CompareMethod = consts.CompareMethod
self.assertEqual(CompareMethod.BinaryCompare, Scripting.BinaryCompare)
self.assertEqual(CompareMethod.TextCompare, Scripting.TextCompare)
self.assertEqual(CompareMethod.DatabaseCompare, Scripting.DatabaseCompare)
with self.assertRaises(AttributeError):
CompareMethod.Foo
with self.assertRaises(AttributeError):
CompareMethod.TextCompare = 1
with self.assertRaises(AttributeError):
CompareMethod.Foo = 1
with self.assertRaises(TypeError):
CompareMethod['Foo'] = 1
with self.assertRaises(TypeError):
del CompareMethod['Foo']
with self.assertRaises(TypeError):
CompareMethod |= {'Foo': 3}
with self.assertRaises(TypeError):
CompareMethod.clear()
with self.assertRaises(TypeError):
CompareMethod.pop('TextCompare')
with self.assertRaises(TypeError):
CompareMethod.popitem()
with self.assertRaises(TypeError):
CompareMethod.setdefault('Bar', 3)
def test_alias(self):
obj = comtypes.client.CreateObject(Scripting.FileSystemObject)
consts = comtypes.client.Constants(obj)
StandardStreamTypes = consts.StandardStreamTypes
real_name = '__MIDL___MIDL_itf_scrrun_0001_0001_0003'
self.assertEqual(StandardStreamTypes, getattr(consts, real_name))
self.assertEqual(StandardStreamTypes.StdIn, Scripting.StdIn)
self.assertEqual(StandardStreamTypes.StdOut, Scripting.StdOut)
self.assertEqual(StandardStreamTypes.StdErr, Scripting.StdErr)
def test_progid(self):
consts = comtypes.client.Constants('scrrun.dll')
self.assertEqual(consts.BinaryCompare, Scripting.BinaryCompare)
self.assertEqual(consts.TextCompare, Scripting.TextCompare)
self.assertEqual(consts.DatabaseCompare, Scripting.DatabaseCompare)
def test_returns_other_than_enum_members(self):
obj = comtypes.client.CreateObject('SAPI.SpVoice')
from comtypes.gen import SpeechLib as sapi
consts = comtypes.client.Constants(obj)
self.assertEqual(consts.Speech_Max_Word_Length, sapi.Speech_Max_Word_Length)
self.assertEqual(consts.SpeechVoiceSkipTypeSentence, sapi.SpeechVoiceSkipTypeSentence)
self.assertEqual(consts.SpeechAudioFormatGUIDWave, sapi.SpeechAudioFormatGUIDWave)
self.assertEqual(consts.SpeechRegistryLocalMachineRoot, sapi.SpeechRegistryLocalMachineRoot)
self.assertEqual(consts.SpeechGrammarTagDictation, sapi.SpeechGrammarTagDictation)
self.assertEqual(consts.Speech_Default_Weight, sapi.Speech_Default_Weight)
((sys.version_info >= (3, 0)), 'Some words are not in Python2 keywords')
def test_munged_definitions(self):
with silence_stdout():
MSVidCtlLib = comtypes.client.GetModule('msvidctl.dll')
consts = comtypes.client.Constants('msvidctl.dll')
self.assertEqual(consts.MSVidCCService.None_, consts.None_)
self.assertEqual(MSVidCtlLib.None_, consts.None_) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.