code stringlengths 281 23.7M |
|---|
def concat_tb(head: (TracebackType | None), tail: (TracebackType | None)) -> (TracebackType | None):
head_tbs = []
pointer = head
while (pointer is not None):
head_tbs.append(pointer)
pointer = pointer.tb_next
current_head = tail
for head_tb in reversed(head_tbs):
current_head = copy_tb(head_tb, tb_next=current_head)
return current_head |
def upgrade(saveddata_engine):
for (replacement_item, list) in CONVERSIONS.items():
for retired_item in list:
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?', (replacement_item, retired_item))
saveddata_engine.execute('UPDATE "modules" SET "baseItemID" = ? WHERE "baseItemID" = ?', (replacement_item, retired_item))
saveddata_engine.execute('UPDATE "cargo" SET "itemID" = ? WHERE "itemID" = ?', (replacement_item, retired_item)) |
def test_reload_config(manager_nospawn):
manager_nospawn.start((lambda : BareConfig(file_path=(configs_dir / 'reloading.py'))))
(ignore_exceptions=(AssertionError,))
def assert_dd_appeared():
assert ('dd' in manager_nospawn.c.group.info()['windows'])
assert (manager_nospawn.c.eval('len(self.keys_map)') == (True, '1'))
assert (manager_nospawn.c.eval('len(self._mouse_map)') == (True, '1'))
assert (''.join(manager_nospawn.c.get_groups().keys()) == '12345S')
assert (len(manager_nospawn.c.group.info()['layouts']) == 1)
assert (manager_nospawn.c.widget['clock'].eval('self.background') == (True, 'None'))
screens = manager_nospawn.c.get_screens()[0]
assert ((screens['gaps']['bottom'][3] == 24) and (not screens['gaps']['top']))
assert (len(manager_nospawn.c.internal_windows()) == 1)
assert (manager_nospawn.c.eval('self.dgroups.key_binder') == (True, 'None'))
assert (manager_nospawn.c.eval('len(self.dgroups.rules)') == (True, '6'))
manager_nospawn.test_window('one')
assert (manager_nospawn.c.window.info()['floating'] is True)
manager_nospawn.c.window.kill()
if (manager_nospawn.backend.name == 'x11'):
assert (manager_nospawn.c.eval('self.core.wmname') == (True, 'LG3D'))
manager_nospawn.c.group['S'].dropdown_toggle('dropdown1')
assert_dd_appeared()
manager_nospawn.c.group['S'].dropdown_toggle('dropdown1')
manager_nospawn.c.eval('self.test_data = 1')
manager_nospawn.c.eval('self.test_data_config_evaluations = 0')
manager_nospawn.c.reload_config()
assert (manager_nospawn.c.eval('self.test_data_config_evaluations') == (True, '2'))
assert (manager_nospawn.c.eval('len(self.keys_map)') == (True, '2'))
assert (manager_nospawn.c.eval('len(self._mouse_map)') == (True, '2'))
assert (''.join(manager_nospawn.c.get_groups().keys()) == 'S')
assert (len(manager_nospawn.c.group.info()['layouts']) == 2)
assert (manager_nospawn.c.widget['currentlayout'].eval('self.background') == (True, '#ff0000'))
screens = manager_nospawn.c.get_screens()[0]
assert ((screens['gaps']['top'][3] == 32) and (not screens['gaps']['bottom']))
assert (len(manager_nospawn.c.internal_windows()) == 1)
(_, binder) = manager_nospawn.c.eval('self.dgroups.key_binder')
assert ('function simple_key_binder' in binder)
assert (manager_nospawn.c.eval('len(self.dgroups.rules)') == (True, '11'))
manager_nospawn.test_window('one')
assert (manager_nospawn.c.window.info()['floating'] is False)
manager_nospawn.c.window.kill()
if (manager_nospawn.backend.name == 'x11'):
assert (manager_nospawn.c.eval('self.core.wmname') == (True, 'TEST'))
manager_nospawn.c.group['S'].dropdown_toggle('dropdown2')
assert_dd_appeared()
manager_nospawn.c.group['S'].dropdown_toggle('dropdown1')
assert ('dd' in manager_nospawn.c.get_groups()['S']['windows'])
assert ('dd' in manager_nospawn.c.get_groups()['S']['windows'])
manager_nospawn.c.eval('del self.test_data')
manager_nospawn.c.eval('del self.test_data_config_evaluations')
manager_nospawn.c.reload_config()
assert (manager_nospawn.c.eval('len(self.keys_map)') == (True, '1'))
assert (manager_nospawn.c.eval('len(self._mouse_map)') == (True, '1'))
assert (''.join(manager_nospawn.c.get_groups().keys()) == '12345S')
assert (len(manager_nospawn.c.group.info()['layouts']) == 1)
assert (manager_nospawn.c.widget['clock'].eval('self.background') == (True, 'None'))
screens = manager_nospawn.c.get_screens()[0]
assert ((screens['gaps']['bottom'][3] == 24) and (not screens['gaps']['top']))
assert (len(manager_nospawn.c.internal_windows()) == 1)
assert (manager_nospawn.c.eval('self.dgroups.key_binder') == (True, 'None'))
assert (manager_nospawn.c.eval('len(self.dgroups.rules)') == (True, '6'))
manager_nospawn.test_window('one')
assert (manager_nospawn.c.window.info()['floating'] is True)
manager_nospawn.c.window.kill()
if (manager_nospawn.backend.name == 'x11'):
assert (manager_nospawn.c.eval('self.core.wmname') == (True, 'LG3D'))
assert ('dd' in manager_nospawn.c.get_groups()['S']['windows'])
assert ('dd' in manager_nospawn.c.get_groups()['1']['windows']) |
def _jensen_shannon_div(logit1, logit2, T=1.0):
prob1 = F.softmax((logit1 / T), dim=1)
prob2 = F.softmax((logit2 / T), dim=1)
mean_prob = (0.5 * (prob1 + prob2))
logsoftmax = torch.log(mean_prob.clamp(min=1e-08))
jsd = F.kl_div(logsoftmax, prob1, reduction='batchmean')
jsd += F.kl_div(logsoftmax, prob2, reduction='batchmean')
return (jsd * 0.5) |
def parse_inlinefunc(string, strip=False, available_funcs=None, stacktrace=False, **kwargs):
global _PARSING_CACHE
usecache = False
if (not available_funcs):
available_funcs = _INLINE_FUNCS
usecache = True
else:
tmp = _DEFAULT_FUNCS.copy()
tmp.update(available_funcs)
available_funcs = tmp
if (usecache and (string in _PARSING_CACHE)):
stack = _PARSING_CACHE[string]
elif (not _RE_STARTTOKEN.search(string)):
return string
else:
stack = ParseStack()
ncallable = 0
nlparens = 0
nvalid = 0
if stacktrace:
out = 'STRING: {} =>'.format(string)
print(out)
logger.log_info(out)
for match in _RE_TOKEN.finditer(string):
gdict = match.groupdict()
if stacktrace:
out = ' MATCH: {}'.format({key: val for (key, val) in gdict.items() if val})
print(out)
logger.log_info(out)
if gdict['singlequote']:
stack.append(gdict['singlequote'])
elif gdict['doublequote']:
stack.append(gdict['doublequote'])
elif gdict['leftparens']:
if ncallable:
nlparens += 1
stack.append('(')
elif gdict['end']:
if (nlparens > 0):
nlparens -= 1
stack.append(')')
continue
if (ncallable <= 0):
stack.append(')')
continue
args = []
while stack:
operation = stack.pop()
if callable(operation):
if (not strip):
stack.append((operation, [arg for arg in reversed(args)]))
ncallable -= 1
break
else:
args.append(operation)
elif gdict['start']:
funcname = _RE_STARTTOKEN.match(gdict['start']).group(1)
try:
stack.append(available_funcs[funcname])
nvalid += 1
except KeyError:
stack.append(available_funcs['nomatch'])
stack.append(funcname)
stack.append(None)
ncallable += 1
elif gdict['escaped']:
token = gdict['escaped'].lstrip('\\')
stack.append(token)
elif gdict['comma']:
if (ncallable > 0):
stack.append(None)
else:
stack.append(',')
else:
stack.append(gdict['rest'])
if (ncallable > 0):
return string
if ((_STACK_MAXSIZE > 0) and (_STACK_MAXSIZE < nvalid)):
return (string + available_funcs['stackfull'](*args, **kwargs))
elif usecache:
_PARSING_CACHE[string] = stack
def _run_stack(item, depth=0):
retval = item
if isinstance(item, tuple):
if strip:
return ''
else:
(func, arglist) = item
args = ['']
for arg in arglist:
if (arg is None):
args.append('')
else:
args[(- 1)] += _run_stack(arg, depth=(depth + 1))
kwargs['inlinefunc_stack_depth'] = depth
retval = ('' if strip else func(*args, **kwargs))
return utils.to_str(retval)
retval = ''.join((_run_stack(item) for item in stack))
if stacktrace:
out = 'STACK: \n{} => {}\n'.format(stack, retval)
print(out)
logger.log_info(out)
return retval |
def _fix_py(cells):
new_cells = []
for html in cells:
if (not isinstance(html, str)):
if html.__module__.startswith('py.'):
warnings.warn("The 'py' module is deprecated and support will be removed in a future release.", DeprecationWarning)
html = str(html)
html = html.replace('col=', 'data-column-type=')
new_cells.append(html)
return new_cells |
def calc_prop_RingP(unseen_smile_ls):
RingP_collect = []
for (i, smi) in enumerate(unseen_smile_ls):
(mol, smi_canon, did_convert) = evo.sanitize_smiles(smi)
if did_convert:
cycle_list = mol.GetRingInfo().AtomRings()
if (len(cycle_list) == 0):
cycle_length = 0
else:
cycle_length = max([len(j) for j in cycle_list])
if (cycle_length <= 6):
cycle_length = 0
else:
cycle_length = (cycle_length - 6)
RingP_collect.append(cycle_length)
if ((i % 1000) == 0):
print(i)
else:
raise Exception('Invalid smile encountered while atempting to calculate Ring penalty')
return RingP_collect |
_fixtures(WebFixture, CueInputFixture)
def test_cue_is_visible_when_js_disabled(web_fixture, cue_input_fixture):
fixture = cue_input_fixture
class FormWithCueInput(Form):
def __init__(self, view):
super().__init__(view, 'test')
self.use_layout(FormLayout())
cue_input = CueInput(TextInput(self, fixture.domain_object.fields.field), P(view, 'this is your cue'))
self.layout.add_input(cue_input)
wsgi_app = web_fixture.new_wsgi_app(child_factory=FormWithCueInput.factory(), enable_js=False)
web_fixture.reahl_server.set_app(wsgi_app)
browser = web_fixture.driver_browser
browser.open('/')
browser.refresh()
browser.wait_for_element_visible(fixture.cue_element_xpath) |
class TestSequentialVectorReader(_TestSequentialReaders, unittest.TestCase, VectorExampleMixin):
def checkRead(self, idx, pair):
(k, v) = pair
if (idx == 0):
self.assertEqual('one', k)
self.assertTrue(np.array_equal([3.0, 5.0, 7.0], v.numpy()))
elif (idx == 1):
self.assertEqual('two', k)
self.assertTrue(np.array_equal([1.0, 2.0, 3.0], v.numpy()))
elif (idx == 2):
self.assertEqual('three', k)
self.assertEqual(0, len(v.numpy()))
else:
self.fail("shouldn't happen") |
class SortingBox(QWidget):
circle_count = square_count = triangle_count = 1
def __init__(self):
super(SortingBox, self).__init__()
self.circlePath = QPainterPath()
self.squarePath = QPainterPath()
self.trianglePath = QPainterPath()
self.shapeItems = []
self.previousPosition = QPoint()
self.setMouseTracking(True)
self.setBackgroundRole(QPalette.Base)
self.itemInMotion = None
self.newCircleButton = self.createToolButton('New Circle', QIcon(':/images/circle.png'), self.createNewCircle)
self.newSquareButton = self.createToolButton('New Square', QIcon(':/images/square.png'), self.createNewSquare)
self.newTriangleButton = self.createToolButton('New Triangle', QIcon(':/images/triangle.png'), self.createNewTriangle)
self.circlePath.addEllipse(0, 0, 100, 100)
self.squarePath.addRect(0, 0, 100, 100)
x = self.trianglePath.currentPosition().x()
y = self.trianglePath.currentPosition().y()
self.trianglePath.moveTo((x + (120 / 2)), y)
self.trianglePath.lineTo(0, 100)
self.trianglePath.lineTo(120, 100)
self.trianglePath.lineTo((x + (120 / 2)), y)
self.setWindowTitle('Tooltips')
self.resize(500, 300)
self.createShapeItem(self.circlePath, 'Circle', self.initialItemPosition(self.circlePath), self.initialItemColor())
self.createShapeItem(self.squarePath, 'Square', self.initialItemPosition(self.squarePath), self.initialItemColor())
self.createShapeItem(self.trianglePath, 'Triangle', self.initialItemPosition(self.trianglePath), self.initialItemColor())
def event(self, event):
if (event.type() == QEvent.ToolTip):
helpEvent = event
index = self.itemAt(helpEvent.pos())
if (index != (- 1)):
QToolTip.showText(helpEvent.globalPos(), self.shapeItems[index].toolTip())
else:
QToolTip.hideText()
event.ignore()
return True
return super(SortingBox, self).event(event)
def resizeEvent(self, event):
margin = self.style().pixelMetric(QStyle.PM_DefaultTopLevelMargin)
x = (self.width() - margin)
y = (self.height() - margin)
y = self.updateButtonGeometry(self.newCircleButton, x, y)
y = self.updateButtonGeometry(self.newSquareButton, x, y)
self.updateButtonGeometry(self.newTriangleButton, x, y)
def paintEvent(self, event):
painter = QPainter(self)
painter.setRenderHint(QPainter.Antialiasing)
for shapeItem in self.shapeItems:
painter.translate(shapeItem.position())
painter.setBrush(shapeItem.color())
painter.drawPath(shapeItem.path())
painter.translate((- shapeItem.position()))
def mousePressEvent(self, event):
if (event.button() == Qt.LeftButton):
index = self.itemAt(event.pos())
if (index != (- 1)):
self.itemInMotion = self.shapeItems[index]
self.previousPosition = event.pos()
value = self.shapeItems[index]
del self.shapeItems[index]
self.shapeItems.insert((len(self.shapeItems) - 1), value)
self.update()
def mouseMoveEvent(self, event):
if ((event.buttons() & Qt.LeftButton) and self.itemInMotion):
self.moveItemTo(event.pos())
def mouseReleaseEvent(self, event):
if ((event.button() == Qt.LeftButton) and self.itemInMotion):
self.moveItemTo(event.pos())
self.itemInMotion = None
def createNewCircle(self):
SortingBox.circle_count += 1
self.createShapeItem(self.circlePath, ('Circle <%d>' % SortingBox.circle_count), self.randomItemPosition(), self.randomItemColor())
def createNewSquare(self):
SortingBox.square_count += 1
self.createShapeItem(self.squarePath, ('Square <%d>' % SortingBox.square_count), self.randomItemPosition(), self.randomItemColor())
def createNewTriangle(self):
SortingBox.triangle_count += 1
self.createShapeItem(self.trianglePath, ('Triangle <%d>' % SortingBox.triangle_count), self.randomItemPosition(), self.randomItemColor())
def itemAt(self, pos):
for i in range((len(self.shapeItems) - 1), (- 1), (- 1)):
item = self.shapeItems[i]
if item.path().contains(QPointF((pos - item.position()))):
return i
return (- 1)
def moveItemTo(self, pos):
offset = (pos - self.previousPosition)
self.itemInMotion.setPosition((self.itemInMotion.position() + offset))
self.previousPosition = QPoint(pos)
self.update()
def updateButtonGeometry(self, button, x, y):
size = button.sizeHint()
button.setGeometry((x - size.width()), (y - size.height()), size.width(), size.height())
return ((y - size.height()) - self.style().pixelMetric(QStyle.PM_DefaultLayoutSpacing))
def createShapeItem(self, path, toolTip, pos, color):
shapeItem = ShapeItem()
shapeItem.setPath(path)
shapeItem.setToolTip(toolTip)
shapeItem.setPosition(pos)
shapeItem.setColor(color)
self.shapeItems.append(shapeItem)
self.update()
def createToolButton(self, toolTip, icon, member):
button = QToolButton(self)
button.setToolTip(toolTip)
button.setIcon(icon)
button.setIconSize(QSize(32, 32))
button.clicked.connect(member)
return button
def initialItemPosition(self, path):
y = ((self.height() - path.controlPointRect().height()) / 2)
if (len(self.shapeItems) == 0):
x = ((((3 * self.width()) / 2) - path.controlPointRect().width()) / 2)
else:
x = (((self.width() / len(self.shapeItems)) - path.controlPointRect().width()) / 2)
return QPoint(x, y)
def randomItemPosition(self):
x = random.randint(0, (self.width() - 120))
y = random.randint(0, (self.height() - 120))
return QPoint(x, y)
def initialItemColor(self):
hue = (((len(self.shapeItems) + 1) * 85) % 256)
return QColor.fromHsv(hue, 255, 190)
def randomItemColor(self):
return QColor.fromHsv(random.randint(0, 256), 255, 190) |
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--clang-format-executable', metavar='EXECUTABLE', help='path to the clang-format executable', default='clang-format')
parser.add_argument('--extensions', help=f'comma separated list of file extensions (default: {DEFAULT_EXTENSIONS})', default=DEFAULT_EXTENSIONS)
parser.add_argument('-r', '--recursive', action='store_true', help='run recursively over directories')
parser.add_argument('files', metavar='file', nargs='+')
parser.add_argument('-q', '--quiet', action='store_true')
parser.add_argument('-j', metavar='N', type=int, default=0, help='run N clang-format jobs in parallel (default number of cpus + 1)')
parser.add_argument('--color', default='auto', choices=['auto', 'always', 'never'], help='show colored diff (default: auto)')
parser.add_argument('-e', '--exclude', metavar='PATTERN', action='append', default=[], help='exclude paths matching the given glob-like pattern(s) from recursive search')
args = parser.parse_args()
signal.signal(signal.SIGINT, signal.SIG_DFL)
try:
signal.SIGPIPE
except AttributeError:
pass
else:
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
colored_stdout = False
colored_stderr = False
if (args.color == 'always'):
colored_stdout = True
colored_stderr = True
elif (args.color == 'auto'):
colored_stdout = sys.stdout.isatty()
colored_stderr = sys.stderr.isatty()
version_invocation = [args.clang_format_executable, '--version']
try:
subprocess.check_call(version_invocation, stdout=DEVNULL)
except subprocess.CalledProcessError as e:
print_trouble(parser.prog, str(e), use_colors=colored_stderr)
return ExitStatus.TROUBLE
except OSError as e:
print_trouble(parser.prog, f"Command '{subprocess.list2cmdline(version_invocation)}' failed to start: {e}", use_colors=colored_stderr)
return ExitStatus.TROUBLE
retcode = ExitStatus.SUCCESS
files = list_files(args.files, recursive=args.recursive, exclude=args.exclude, extensions=args.extensions.split(','))
if (not files):
return
njobs = args.j
if (njobs == 0):
njobs = (multiprocessing.cpu_count() + 1)
njobs = min(len(files), njobs)
if (njobs == 1):
it = (run_clang_format_diff_wrapper(args, file) for file in files)
pool = None
else:
pool = multiprocessing.Pool(njobs)
it = pool.imap_unordered(partial(run_clang_format_diff_wrapper, args), files)
while True:
try:
(outs, errs) = next(it)
except StopIteration:
break
except DiffError as e:
print_trouble(parser.prog, str(e), use_colors=colored_stderr)
retcode = ExitStatus.TROUBLE
sys.stderr.writelines(e.errs)
except UnexpectedError as e:
print_trouble(parser.prog, str(e), use_colors=colored_stderr)
sys.stderr.write(e.formatted_traceback)
retcode = ExitStatus.TROUBLE
if pool:
pool.terminate()
break
else:
sys.stderr.writelines(errs)
if (outs == []):
continue
if (not args.quiet):
print_diff(outs, use_color=colored_stdout)
if (retcode == ExitStatus.SUCCESS):
retcode = ExitStatus.DIFF
return retcode |
.requires_user_action
class WINDOW_ACTIVATE(InteractiveTestCase):
def test_activate(self):
w = window.Window(200, 200)
try:
w.push_handlers(WindowEventLogger())
last_time = time.time()
while (not w.has_exit):
if ((time.time() - last_time) > 5):
w.activate()
last_time = time.time()
print('Activated window.')
w.dispatch_events()
finally:
w.close()
self.user_verify('Pass test?', take_screenshot=False) |
def isValidImplantImport(text):
pattern = 'x\\d+$'
for line in lineIter(text):
if re.search(pattern, line):
return (False, ())
itemData = parseAdditions(text)
if (not itemData):
return (False, ())
for (item, amount, mutation) in itemData:
if (not item.isImplant):
return (False, ())
return (True, itemData) |
class AnnotateButton(QtWidgets.QPushButton):
def enterEvent(self, e):
if (self.window().showhelp is True):
QtWidgets.QToolTip.showText(e.globalPos(), '<h3>Annotation (click) callback</h3>Add a basic annotation if you click anywhere on the map.<p>The annotation will show lon/lat as well as x/y coordinates of the used projection.') |
def draw_box(img, bboxes, wh):
(h, w) = wh
for (idx, bbox) in enumerate(bboxes):
(cx, cy, _w, _h) = bbox
(x, y) = ((cx - (_w / 2)), (cy - (_h / 2)))
(x1, y1, x2, y2) = (x, y, (x + _w), (y + _h))
(x1, y1, x2, y2) = (int((x1 * w)), int((y1 * h)), int((x2 * w)), int((y2 * h)))
img = cv2.rectangle(img, (x1, y1), (x2, y2), (0, 255, 0), 1)
return img |
def ql_syscall_rmdir(ql: Qiling, pathname: int):
vpath = ql.os.utils.read_cstring(pathname)
hpath = ql.os.path.virtual_to_host_path(vpath)
if (not ql.os.path.is_safe_host_path(hpath)):
raise PermissionError(f'unsafe path: {hpath}')
try:
if os.path.exists(hpath):
os.rmdir(hpath)
except OSError:
regreturn = (- 1)
else:
regreturn = 0
ql.log.debug(f'rmdir("{vpath}") = {regreturn}')
return regreturn |
class TestWidgetOverviewApp(unittest.TestCase):
def setUpClass(cls):
import widgets_overview_app
cls.AppClass = widgets_overview_app.MyApp
def setUp(self):
self.AppClass.log_request = (lambda x, y: None)
def tearDown(self):
del self.AppClass.log_request
self.app.on_close()
def test_main(self):
self.app = self.AppClass(MockRequest(), ('0.0.0.0', 8888), MockServer())
root_widget = self.app.main()
html = root_widget.repr()
assertValidHTML(html) |
class TTRTime(TTR):
name = 'TTRTime'
def __init__(self, source, alpha: float=0.15, beta: float=0.8, epsilon=1e-05):
super().__init__(source, alpha, beta, epsilon)
self.p = dict()
self.r = dict()
self._vis = set()
def push(self, node, edges: list, **kwargs):
if (self.r.get(node) is None):
self.r[node] = dict()
if ((node == self.source) and (self.source not in self._vis)):
self._vis.add(self.source)
self.p[self.source] = self.alpha
out_sum = sum([(e['value'] if (e['from'] == self.source) else 0) for e in edges])
in_sum = sum([(e['value'] if (e['to'] == self.source) else 0) for e in edges])
for e in edges:
if ((e['from'] == self.source) and (out_sum != 0)):
self.r[self.source][e['timeStamp']] = ((((1 - self.alpha) * self.beta) * e['value']) / out_sum)
elif ((e['to'] == self.source) and (in_sum != 0)):
self.r[self.source][e['timeStamp']] = ((((1 - self.alpha) * (1 - self.beta)) * e['value']) / in_sum)
if (out_sum == 0):
self.r[self.source][0] = ((1 - self.alpha) * self.beta)
if (in_sum == 0):
self.r[self.source][sys.maxsize] = ((1 - self.alpha) * (1 - self.beta))
return
r = self.r[node]
self.r[node] = dict()
self._self_push(node, r)
self._forward_push(node, edges, r)
self._backward_push(node, edges, r)
if (node not in self._vis):
self._vis.add(node)
(yield from edges)
def _self_push(self, node, r: dict):
sum_r = 0
for (_, v) in r.items():
sum_r += v
self.p[node] = (self.p.get(node, 0) + (self.alpha * sum_r))
def _forward_push(self, node, edges: list, r: dict):
es_out = list()
for e in edges:
if (e['from'] == node):
es_out.append(e)
r_node = [(t, v) for (t, v) in r.items()]
es_out.sort(key=(lambda _e: _e['timeStamp']))
r_node.sort(key=(lambda _c: _c[0]))
j = (len(es_out) - 1)
(sum_w, W) = (0, dict())
for i in range((len(r_node) - 1), (- 1), (- 1)):
c = r_node[i]
while ((j >= 0) and (es_out[j]['timeStamp'] > c[0])):
sum_w += es_out[j]['value']
j -= 1
W[c] = sum_w
j = 0
d = 0
for i in range(0, len(es_out)):
e = es_out[i]
while ((j < len(r_node)) and (e['timeStamp'] > r_node[j][0])):
d += ((r_node[j][1] / W[r_node[j]]) if (W[r_node[j]] > 0) else 0)
j += 1
if (self.r.get(e['to']) is None):
self.r[e['to']] = dict()
inc = ((((1 - self.alpha) * self.beta) * e['value']) * d)
self.r[e['to']][e['timeStamp']] = (self.r[e['to']].get(e['timeStamp'], 0) + inc)
while (j < len(r_node)):
self.r[node][r_node[j][0]] = (self.r[node].get(r_node[j][0], 0) + (((1 - self.alpha) * self.beta) * r_node[j][1]))
j += 1
def _backward_push(self, node, edges: list, r: dict):
es_in = list()
for e in edges:
if (e['to'] == node):
es_in.append(e)
r_node = [(t, v) for (t, v) in r.items()]
es_in.sort(key=(lambda _e: _e['timeStamp']))
r_node.sort(key=(lambda _c: _c[0]))
j = 0
(sum_w, W) = (0, dict())
for i in range(0, len(r_node)):
c = r_node[i]
while ((j < len(es_in)) and (es_in[j]['timeStamp'] < c[0])):
sum_w += es_in[j]['value']
j += 1
W[c] = sum_w
j = (len(r_node) - 1)
d = 0
for i in range((len(es_in) - 1), (- 1), (- 1)):
e = es_in[i]
while ((j >= 0) and (e['timeStamp'] < r_node[j][0])):
d += ((r_node[j][1] / W[r_node[j]]) if (W[r_node[j]] > 0) else 0)
j -= 1
if (self.r.get(e['from']) is None):
self.r[e['from']] = dict()
inc = ((((1 - self.alpha) * (1 - self.beta)) * e['value']) * d)
self.r[e['from']][e['timeStamp']] = (self.r[e['from']].get(e['timeStamp'], 0) + inc)
while (j >= 0):
self.r[node][r_node[j][0]] = (self.r[node].get(r_node[j][0], 0) + (((1 - self.alpha) * (1 - self.beta)) * r_node[j][1]))
j -= 1
def pop(self):
(node, r) = (None, self.epsilon)
for (_node, chips) in self.r.items():
sum_r = 0
for v in chips.values():
sum_r += v
if (sum_r > r):
(node, r) = (_node, sum_r)
return (dict(node=node, residual=r) if (node is not None) else None) |
_model
def vit_small_r26_s32_224(pretrained=False, **kwargs):
backbone = _resnetv2((2, 2, 2, 2), **kwargs)
model_kwargs = dict(embed_dim=384, depth=12, num_heads=6, **kwargs)
model = _create_vision_transformer_hybrid('vit_small_r26_s32_224', backbone=backbone, pretrained=pretrained, **model_kwargs)
return model |
def test_ptr_optimization():
zc = Zeroconf(interfaces=['127.0.0.1'])
type_ = '_test-srvc-type._tcp.local.'
name = 'xxxyyy'
registration_name = f'{name}.{type_}'
desc = {'path': '/~paulsm/'}
info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, 'ash-2.local.', addresses=[socket.inet_aton('10.0.1.2')])
zc.register_service(info)
query = r.DNSOutgoing(const._FLAGS_QR_QUERY)
query.add_question(r.DNSQuestion(info.type, const._TYPE_PTR, const._CLASS_IN))
question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in query.packets()], False)
assert question_answers
assert (not question_answers.ucast)
assert (not question_answers.mcast_now)
assert (not question_answers.mcast_aggregate)
assert question_answers.mcast_aggregate_last_second
_clear_cache(zc)
query = r.DNSOutgoing(const._FLAGS_QR_QUERY)
query.add_question(r.DNSQuestion(info.type, const._TYPE_PTR, const._CLASS_IN))
question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in query.packets()], False)
assert question_answers
assert (not question_answers.ucast)
assert (not question_answers.mcast_now)
assert (not question_answers.mcast_aggregate_last_second)
has_srv = has_txt = has_a = False
nbr_additionals = 0
nbr_answers = len(question_answers.mcast_aggregate)
additionals = set().union(*question_answers.mcast_aggregate.values())
for answer in additionals:
nbr_additionals += 1
if (answer.type == const._TYPE_SRV):
has_srv = True
elif (answer.type == const._TYPE_TXT):
has_txt = True
elif (answer.type == const._TYPE_A):
has_a = True
assert ((nbr_answers == 1) and (nbr_additionals == 4))
assert (has_srv and has_txt and has_a)
zc.unregister_service(info)
zc.close() |
class GruvboxDarkStyle(Style):
name = 'gruvbox-dark'
background_color = '#282828'
highlight_color = '#ebdbb2'
styles = {Token: '#dddddd', Comment: 'italic #928374', Comment.PreProc: '#8ec07c', Comment.Special: 'bold italic #ebdbb2', Keyword: '#fb4934', Operator.Word: '#fb4934', String: '#b8bb26', String.Escape: '#fe8019', Number: '#d3869b', Name.Builtin: '#fe8019', Name.Variable: '#83a598', Name.Constant: '#d3869b', Name.Class: '#8ec07c', Name.Function: '#8ec07c', Name.Namespace: '#8ec07c', Name.Exception: '#fb4934', Name.Tag: '#8ec07c', Name.Attribute: '#fabd2f', Name.Decorator: '#fb4934', Generic.Heading: 'bold #ebdbb2', Generic.Subheading: 'underline #ebdbb2', Generic.Deleted: 'bg:#fb4934 #282828', Generic.Inserted: 'bg:#b8bb26 #282828', Generic.Error: '#fb4934', Generic.Emph: 'italic', Generic.Strong: 'bold', Generic.EmphStrong: 'bold italic', Generic.Prompt: '#a89984', Generic.Output: '#f2e5bc', Generic.Traceback: '#fb4934', Error: 'bg:#fb4934 #282828'} |
_node(_caller_attrs, _attr_select)
def node_attrs(caller):
def _currentcmp(propval, flatval):
cmp1 = [(tup[0].lower(), (tup[2].lower() if tup[2] else None)) for tup in propval]
return [tup for tup in flatval if ((tup[0].lower(), (tup[2].lower() if tup[2] else None)) not in cmp1)]
text = '\n |cAttributes|n are custom properties of the object. Enter attributes on one of these forms:\n\n attrname=value\n attrname;category=value\n attrname;category;lockstring=value\n\n To give an attribute without a category but with a lockstring, leave that spot empty\n (attrname;;lockstring=value). Attribute values can have embedded $protfuncs.\n\n {current}\n '.format(current=_get_current_value(caller, 'attrs', comparer=_currentcmp, formatter=(lambda lst: ('\n' + '\n'.join((_display_attribute(tup) for tup in lst)))), only_inherit=True))
_set_actioninfo(caller, _format_list_actions('examine', 'remove', prefix='Actions: '))
helptext = "\n Most commonly, Attributes don't need any categories or locks. If using locks, the lock-types\n 'attredit' and 'attrread' are used to limit editing and viewing of the Attribute. Putting\n the lock-type `attrcreate` in the |clocks|n prototype key can be used to restrict builders\n from adding new Attributes.\n\n |c$protfuncs\n\n {pfuncs}\n ".format(pfuncs=_format_protfuncs())
text = (text, helptext)
options = _wizard_options('attrs', 'aliases', 'tags')
options.append({'key': '_default', 'goto': _attrs_actions})
return (text, options) |
def create_script(typeclass=None, key=None, obj=None, account=None, locks=None, interval=None, start_delay=None, repeats=None, persistent=None, autostart=True, report_to=None, desc=None, tags=None, attributes=None):
global _ScriptDB
if (not _ScriptDB):
from evennia.scripts.models import ScriptDB as _ScriptDB
typeclass = (typeclass if typeclass else settings.BASE_SCRIPT_TYPECLASS)
if isinstance(typeclass, str):
typeclass = class_from_module(typeclass, settings.TYPECLASS_PATHS)
kwarg = {}
if key:
kwarg['db_key'] = key
if account:
kwarg['db_account'] = dbid_to_obj(account, _AccountDB)
if obj:
kwarg['db_obj'] = dbid_to_obj(obj, _ObjectDB)
if interval:
kwarg['db_interval'] = interval
if start_delay:
kwarg['db_start_delay'] = start_delay
if repeats:
kwarg['db_repeats'] = repeats
if persistent:
kwarg['db_persistent'] = persistent
if desc:
kwarg['db_desc'] = desc
tags = (make_iter(tags) if (tags is not None) else None)
attributes = (make_iter(attributes) if (attributes is not None) else None)
new_script = typeclass(**kwarg)
new_script._createdict = dict(key=key, obj=obj, account=account, locks=locks, interval=interval, start_delay=start_delay, repeats=repeats, persistent=persistent, autostart=autostart, report_to=report_to, desc=desc, tags=tags, attributes=attributes)
new_script.save()
if (not new_script.id):
return None
signals.SIGNAL_SCRIPT_POST_CREATE.send(sender=new_script)
return new_script |
_module(force=True)
class PascalContextDataset59(CustomDataset):
CLASSES = ('aeroplane', 'bag', 'bed', 'bedclothes', 'bench', 'bicycle', 'bird', 'boat', 'book', 'bottle', 'building', 'bus', 'cabinet', 'car', 'cat', 'ceiling', 'chair', 'cloth', 'computer', 'cow', 'cup', 'curtain', 'dog', 'door', 'fence', 'floor', 'flower', 'food', 'grass', 'ground', 'horse', 'keyboard', 'light', 'motorbike', 'mountain', 'mouse', 'person', 'plate', 'platform', 'potted plant', 'road', 'rock', 'sheep', 'shelves', 'sidewalk', 'sign', 'sky', 'snow', 'sofa', 'table', 'track', 'train', 'tree', 'truck', 'tvmonitor', 'wall', 'water', 'window', 'wood')
PALETTE = [[180, 120, 120], [6, 230, 230], [80, 50, 50], [4, 200, 3], [120, 120, 80], [140, 140, 140], [204, 5, 255], [230, 230, 230], [4, 250, 7], [224, 5, 255], [235, 255, 7], [150, 5, 61], [120, 120, 70], [8, 255, 51], [255, 6, 82], [143, 255, 140], [204, 255, 4], [255, 51, 7], [204, 70, 3], [0, 102, 200], [61, 230, 250], [255, 6, 51], [11, 102, 255], [255, 7, 71], [255, 9, 224], [9, 7, 230], [220, 220, 220], [255, 9, 92], [112, 9, 255], [8, 255, 214], [7, 255, 224], [255, 184, 6], [10, 255, 71], [255, 41, 10], [7, 255, 255], [224, 255, 8], [102, 8, 255], [255, 61, 6], [255, 194, 7], [255, 122, 8], [0, 255, 20], [255, 8, 41], [255, 5, 153], [6, 51, 255], [235, 12, 255], [160, 150, 20], [0, 163, 255], [140, 140, 140], [250, 10, 15], [20, 255, 0], [31, 255, 0], [255, 31, 0], [255, 224, 0], [153, 255, 0], [0, 0, 255], [255, 71, 0], [0, 235, 255], [0, 173, 255], [31, 0, 255]]
def __init__(self, split, **kwargs):
super(PascalContextDataset59, self).__init__(img_suffix='.jpg', seg_map_suffix='.png', split=split, reduce_zero_label=True, **kwargs)
assert (osp.exists(self.img_dir) and (self.split is not None)) |
_vectorize_node.register(DimShuffle)
def vectorize_dimshuffle(op: DimShuffle, node: Apply, x: TensorVariable) -> Apply:
batched_ndims = (x.type.ndim - node.inputs[0].type.ndim)
if (not batched_ndims):
return node.op.make_node(x)
input_broadcastable = (x.type.broadcastable[:batched_ndims] + op.input_broadcastable)
new_order = (list(range(batched_ndims)) + [('x' if (o == 'x') else (o + batched_ndims)) for o in op.new_order])
return DimShuffle(input_broadcastable, new_order).make_node(x) |
def test_node_rewriter_str():
_rewriter([op1, MyOp])
def local_rewriter_1(fgraph, node):
pass
assert (str(local_rewriter_1) == 'local_rewriter_1')
res = repr(local_rewriter_1)
assert res.startswith('FromFunctionNodeRewriter(')
assert ('Op1' in res)
assert ('local_rewriter_1' in res) |
def days_at_time(days, t, tz, day_offset=0):
days = pd.DatetimeIndex(days).tz_localize(None)
if (len(days) == 0):
return days.tz_localize(UTC)
delta = pd.Timedelta(days=day_offset, hours=t.hour, minutes=t.minute, seconds=t.second)
return (days + delta).tz_localize(tz).tz_convert(UTC) |
def solve():
problem = Problem()
total = 5.0
variables = ('0.01', '0.05', '0.10', '0.50', '1.00')
values = [float(x) for x in variables]
for (variable, value) in zip(variables, values):
problem.addVariable(variable, range(int((total / value))))
problem.addConstraint(ExactSumConstraint(total, values), variables)
problem.addConstraint(ExactSumConstraint(100))
solutions = problem.getSolutionIter()
return (solutions, variables) |
class HITAN5(FinTS3Segment):
tan_process = DataElementField(type='code', length=1, _d='TAN-Prozess')
task_hash_value = DataElementField(type='bin', max_length=256, required=False, _d='Auftrags-Hashwert')
task_reference = DataElementField(type='an', max_length=35, required=False, _d='Auftragsreferenz')
challenge = DataElementField(type='an', max_length=2048, required=False, _d='Challenge')
challenge_hhduc = DataElementField(type='bin', required=False, _d='Challenge HHD_UC')
challenge_valid_until = DataElementGroupField(type=ChallengeValidUntil, required=False, _d='Gultigkeitsdatum und -uhrzeit fur Challenge')
tan_list_number = DataElementField(type='an', max_length=20, required=False, _d='TAN-Listennummer')
ben = DataElementField(type='an', max_length=99, required=False, _d='BEN')
tan_medium_name = DataElementField(type='an', max_length=32, required=False, _d='Bezeichnung des TAN-Mediums') |
def extract_metadata(container_data):
metadata = {}
def add_metadata(key, value):
if (key == 'author'):
if ('authors' not in metadata):
metadata[key] = value
metadata['authors'] = [value]
elif (value not in metadata['authors']):
metadata['authors'].append(value)
else:
metadata[key] = value
for entity in container_data:
if (entity.type == 'book_metadata'):
for category in entity.value['categorised_metadata']:
for meta in category['metadata']:
add_metadata(meta['key'], meta['value'])
if (entity.type == 'metadata'):
for (key, value) in entity.value.items():
add_metadata(key, value)
cover_image = metadata.get('cover_image')
if cover_image:
for entity in container_data:
if ((entity.type == 'external_resource') and (entity.id == cover_image)):
location = entity.value['location']
break
else:
location = None
if location:
for entity in container_data:
if ((entity.type == 'bcRawMedia') and (entity.id == location)):
metadata['cover_image_data'] = entity.value
break
return metadata |
class BaseFeeder(data.Dataset):
def __init__(self, prefix, gloss_dict, drop_ratio=1, num_gloss=(- 1), mode='train', transform_mode=True, datatype='lmdb'):
self.mode = mode
self.ng = num_gloss
self.prefix = prefix
self.dict = gloss_dict
self.data_type = datatype
self.feat_prefix = f'{prefix}/features/fullFrame-256x256px/{mode}'
self.transform_mode = ('train' if transform_mode else 'test')
self.inputs_list = np.load(f'./preprocess/phoenix2014/{mode}_info.npy', allow_pickle=True).item()
print(mode, len(self))
self.data_aug = self.transform()
print('')
def __getitem__(self, idx):
if (self.data_type == 'video'):
(input_data, label, fi) = self.read_video(idx)
(input_data, label) = self.normalize(input_data, label)
return (input_data, torch.LongTensor(label), self.inputs_list[idx]['original_info'])
elif (self.data_type == 'lmdb'):
(input_data, label, fi) = self.read_lmdb(idx)
(input_data, label) = self.normalize(input_data, label)
return (input_data, torch.LongTensor(label), self.inputs_list[idx]['original_info'])
else:
(input_data, label) = self.read_features(idx)
return (input_data, label, self.inputs_list[idx]['original_info'])
def read_video(self, index, num_glosses=(- 1)):
fi = self.inputs_list[index]
img_folder = os.path.join(self.prefix, ('features/fullFrame-256x256px/' + fi['folder']))
img_list = sorted(glob.glob(img_folder))
label_list = []
for phase in fi['label'].split(' '):
if (phase == ''):
continue
if (phase in self.dict.keys()):
label_list.append(self.dict[phase][0])
return ([cv2.cvtColor(cv2.imread(img_path), cv2.COLOR_BGR2RGB) for img_path in img_list], label_list, fi)
def read_features(self, index):
fi = self.inputs_list[index]
data = np.load(f"./features/{self.mode}/{fi['fileid']}_features.npy", allow_pickle=True).item()
return (data['features'], data['label'])
def normalize(self, video, label, file_id=None):
(video, label) = self.data_aug(video, label, file_id)
video = ((video.float() / 127.5) - 1)
return (video, label)
def transform(self):
if (self.transform_mode == 'train'):
print('Apply training transform.')
return video_augmentation.Compose([video_augmentation.RandomCrop(224), video_augmentation.RandomHorizontalFlip(0.5), video_augmentation.ToTensor(), video_augmentation.TemporalRescale(0.2)])
else:
print('Apply testing transform.')
return video_augmentation.Compose([video_augmentation.CenterCrop(224), video_augmentation.ToTensor()])
def byte_to_img(self, byteflow):
unpacked = pa.deserialize(byteflow)
imgbuf = unpacked[0]
buf = six.BytesIO()
buf.write(imgbuf)
buf.seek(0)
img = Image.open(buf).convert('RGB')
return img
def collate_fn(batch):
batch = [item for item in sorted(batch, key=(lambda x: len(x[0])), reverse=True)]
(video, label, info) = list(zip(*batch))
if (len(video[0].shape) > 3):
max_len = len(video[0])
video_length = torch.LongTensor([((np.ceil((len(vid) / 4.0)) * 4) + 12) for vid in video])
left_pad = 6
right_pad = (((int(np.ceil((max_len / 4.0))) * 4) - max_len) + 6)
max_len = ((max_len + left_pad) + right_pad)
padded_video = [torch.cat((vid[0][None].expand(left_pad, (- 1), (- 1), (- 1)), vid, vid[(- 1)][None].expand(((max_len - len(vid)) - left_pad), (- 1), (- 1), (- 1))), dim=0) for vid in video]
padded_video = torch.stack(padded_video)
else:
max_len = len(video[0])
video_length = torch.LongTensor([len(vid) for vid in video])
padded_video = [torch.cat((vid, vid[(- 1)][None].expand((max_len - len(vid)), (- 1))), dim=0) for vid in video]
padded_video = torch.stack(padded_video).permute(0, 2, 1)
label_length = torch.LongTensor([len(lab) for lab in label])
if (max(label_length) == 0):
return (padded_video, video_length, [], [], info)
else:
padded_label = []
for lab in label:
padded_label.extend(lab)
padded_label = torch.LongTensor(padded_label)
return (padded_video, video_length, padded_label, label_length, info)
def __len__(self):
return (len(self.inputs_list) - 1)
def record_time(self):
self.cur_time = time.time()
return self.cur_time
def split_time(self):
split_time = (time.time() - self.cur_time)
self.record_time()
return split_time |
class TriviaQA(Task):
VERSION = 1
DATASET_PATH = inspect.getfile(lm_eval.datasets.triviaqa.triviaqa)
DATASET_NAME = None
def has_training_docs(self):
return True
def has_validation_docs(self):
return True
def has_test_docs(self):
return False
def training_docs(self):
return self.dataset['train']
def validation_docs(self):
return self.dataset['validation']
def test_docs(self):
raise NotImplementedError()
def doc_to_text(self, doc):
return f'''Question: {doc['question']}
Answer:'''
def should_decontaminate(self):
return True
def doc_to_decontamination_query(self, doc):
return doc['question']
def doc_to_target(self, doc):
return (' ' + doc['answer']['value'])
def _remove_prefixes(self, aliases):
aliases.sort()
ret = [aliases[0]]
for alias in aliases[1:]:
if (not alias.startswith(ret[(- 1)])):
ret.append(alias)
return ret
def construct_requests(self, doc, ctx):
ret = []
for alias in self._remove_prefixes(doc['answer']['aliases']):
(_, is_prediction) = rf.loglikelihood(ctx, (' ' + alias))
ret.append(is_prediction)
return ret
def process_results(self, doc, results):
return {'acc': float(any(results))}
def aggregation(self):
return {'acc': mean}
def higher_is_better(self):
return {'acc': True} |
class Reddit5kNet(torch.nn.Module):
def __init__(self):
super(Reddit5kNet, self).__init__()
self.lin0 = Lin(1, 32)
self.tanh0 = Tanh()
self.conv_block = nn.ModuleList([GraphConv(32, 32) for _ in range(3)])
self.relus = nn.ModuleList([ReLU() for _ in range(3)])
self.batch_norms = nn.ModuleList([BatchNorm(32) for _ in range(2)])
self.lin1 = torch.nn.Linear(32, 64)
self.relu0 = ReLU()
self.lin2 = torch.nn.Linear(64, 5)
def forward(self, x, edge_index, edge_attr, batch):
graph_x = self.get_graph_rep(x, edge_index, edge_attr, batch)
return self.get_pred(graph_x)
def get_node_reps(self, x, edge_index, edge_weight, batch):
x = self.tanh0(self.lin0(x))
for (ReLU, conv, norm) in zip(self.relus, self.conv_block, self.batch_norms):
x = ReLU(conv(x, edge_index, edge_weight))
x = norm(x)
return x
def get_graph_rep(self, x, edge_index, edge_attr, batch):
edge_weight = edge_attr.view((- 1))
node_x = self.get_node_reps(x, edge_index, edge_weight, batch)
graph_x = global_mean_pool(node_x, batch)
return graph_x
def get_pred(self, graph_x):
pred = self.relu0(self.lin1(graph_x))
pred = self.lin2(pred)
softmax = Softmax(dim=1)
self.readout = softmax(pred)
return pred
def reset_parameters(self):
with torch.no_grad():
for param in self.parameters():
param.uniform_((- 1.0), 1.0) |
def test_search_text(textpage):
searcher = textpage.search('labor')
occ_1a = searcher.get_next()
occ_2a = searcher.get_next()
occ_3a = searcher.get_next()
occ_4x = searcher.get_next()
occ_2b = searcher.get_prev()
occ_1b = searcher.get_prev()
assert (occ_1a == (89, 5))
assert (occ_2a == (181, 5))
assert (occ_3a == (430, 5))
assert (occ_4x is None)
assert ((occ_1a == occ_1b) and (occ_2a == occ_2b))
occs = (occ_1a, occ_2a, occ_3a)
exp_rectlists = [[(57, 675, 511, 690)], [(58, 638, 537, 653)], [(58, 549, 367, 561)]]
for (occ, exp_rects) in zip(occs, exp_rectlists):
rects = _get_rects(textpage, occ)
assert ([pytest.approx(r, abs=0.5) for r in rects] == exp_rects) |
def test_polar_stereographic_a_operation__defaults():
aeaop = PolarStereographicAConversion(90)
assert (aeaop.name == 'unknown')
assert (aeaop.method_name == 'Polar Stereographic (variant A)')
assert (_to_dict(aeaop) == {'Latitude of natural origin': 90.0, 'Longitude of natural origin': 0.0, 'False easting': 0.0, 'False northing': 0.0, 'Scale factor at natural origin': 1.0}) |
class DataSet(object):
def __init__(self, images, labels, fake_data=False, one_hot=False, dtype=dtypes.float32, reshape=True):
images = images.reshape(((- 1), 32, 32, 1))
dtype = dtypes.as_dtype(dtype).base_dtype
if (dtype not in (dtypes.uint8, dtypes.float32)):
raise TypeError(('Invalid image dtype %r, expected uint8 or float32' % dtype))
if fake_data:
self._num_examples = 10000
self.one_hot = one_hot
else:
assert (images.shape[0] == labels.shape[0]), ('images.shape: %s labels.shape: %s' % (images.shape, labels.shape))
self._num_examples = images.shape[0]
if reshape:
assert (images.shape[3] == 1)
images = images.reshape(images.shape[0], (images.shape[1] * images.shape[2]))
if (dtype == dtypes.float32):
images = images.astype(numpy.float32)
images = numpy.multiply(images, (1.0 / 255.0))
self._images = images
self._labels = labels
self._epochs_completed = 0
self._index_in_epoch = 0
def images(self):
return self._images
def labels(self):
return self._labels
def num_examples(self):
return self._num_examples
def epochs_completed(self):
return self._epochs_completed
def next_batch(self, batch_size, fake_data=False):
if fake_data:
fake_image = ([1] * 784)
if self.one_hot:
fake_label = ([1] + ([0] * 9))
else:
fake_label = 0
return ([fake_image for _ in xrange(batch_size)], [fake_label for _ in xrange(batch_size)])
start = self._index_in_epoch
self._index_in_epoch += batch_size
if (self._index_in_epoch > self._num_examples):
self._epochs_completed += 1
perm = numpy.arange(self._num_examples)
numpy.random.shuffle(perm)
self._images = self._images[perm]
self._labels = self._labels[perm]
start = 0
self._index_in_epoch = batch_size
assert (batch_size <= self._num_examples)
end = self._index_in_epoch
return (self._images[start:end], self._labels[start:end]) |
def test_get_projection(ntg1, ntg2, ntg3, ntg_weird, ntg_rgba, ntg_cmyk, ntg_latlon):
assert (ntg1.get_projection() == 'PLAT')
assert (ntg2.get_projection() == 'PLAT')
assert (ntg3.get_projection() == 'NPOL')
assert (ntg_cmyk.get_projection() == 'SPOL')
assert (ntg_rgba.get_projection() == 'MERC')
assert (ntg_latlon.get_projection() == 'PLAT')
with pytest.raises(ValueError, match='Unknown mapping from area .*'):
ntg_weird.get_projection() |
def test_validate_formatting() -> None:
my_re = re.compile(b'foo')
with pytest.raises(LocalProtocolError) as excinfo:
validate(my_re, b'', 'oops')
assert ('oops' in str(excinfo.value))
with pytest.raises(LocalProtocolError) as excinfo:
validate(my_re, b'', 'oops {}')
assert ('oops {}' in str(excinfo.value))
with pytest.raises(LocalProtocolError) as excinfo:
validate(my_re, b'', 'oops {} xx', 10)
assert ('oops 10 xx' in str(excinfo.value)) |
class CNN(nn.Module):
def __init__(self, input_channel=3, n_outputs=10, dropout_rate=0.25, momentum=0.1):
self.dropout_rate = dropout_rate
self.momentum = momentum
super(CNN, self).__init__()
self.c1 = nn.Conv2d(input_channel, 64, kernel_size=3, stride=1, padding=1)
self.c2 = nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1)
self.c3 = nn.Conv2d(64, 128, kernel_size=3, stride=1, padding=1)
self.c4 = nn.Conv2d(128, 128, kernel_size=3, stride=1, padding=1)
self.c5 = nn.Conv2d(128, 196, kernel_size=3, stride=1, padding=1)
self.c6 = nn.Conv2d(196, 16, kernel_size=3, stride=1, padding=1)
self.linear1 = nn.Linear(256, n_outputs)
self.bn1 = nn.BatchNorm2d(64, momentum=self.momentum)
self.bn2 = nn.BatchNorm2d(64, momentum=self.momentum)
self.bn3 = nn.BatchNorm2d(128, momentum=self.momentum)
self.bn4 = nn.BatchNorm2d(128, momentum=self.momentum)
self.bn5 = nn.BatchNorm2d(196, momentum=self.momentum)
self.bn6 = nn.BatchNorm2d(16, momentum=self.momentum)
def forward(self, x):
h = x
h = self.c1(h)
h = F.relu(call_bn(self.bn1, h))
h = self.c2(h)
h = F.relu(call_bn(self.bn2, h))
h = F.max_pool2d(h, kernel_size=2, stride=2)
h = self.c3(h)
h = F.relu(call_bn(self.bn3, h))
h = self.c4(h)
h = F.relu(call_bn(self.bn4, h))
h = F.max_pool2d(h, kernel_size=2, stride=2)
h = self.c5(h)
h = F.relu(call_bn(self.bn5, h))
h = self.c6(h)
h = F.relu(call_bn(self.bn6, h))
h = F.max_pool2d(h, kernel_size=2, stride=2)
h = h.view(h.size(0), (- 1))
logit = self.linear1(h)
return logit |
class InHierarchyFilter():
def __init__(self, pyname, implementations_only=False):
self.pyname = pyname
self.impl_only = implementations_only
self.pyclass = self._get_containing_class(pyname)
if (self.pyclass is not None):
self.name = pyname.get_object().get_name()
self.roots = self._get_root_classes(self.pyclass, self.name)
else:
self.roots = None
def __call__(self, occurrence):
if (self.roots is None):
return
pyclass = self._get_containing_class(occurrence.get_pyname())
if (pyclass is not None):
roots = self._get_root_classes(pyclass, self.name)
if self.roots.intersection(roots):
return True
def _get_containing_class(self, pyname):
if isinstance(pyname, pynames.DefinedName):
scope = pyname.get_object().get_scope()
parent = scope.parent
if ((parent is not None) and (parent.get_kind() == 'Class')):
return parent.pyobject
def _get_root_classes(self, pyclass, name):
if (self.impl_only and (pyclass == self.pyclass)):
return {pyclass}
result = set()
for superclass in pyclass.get_superclasses():
if (name in superclass):
result.update(self._get_root_classes(superclass, name))
if (not result):
return {pyclass}
return result |
def test_cmd2_subcommand_completion_single_end(sc_app):
text = 'f'
line = 'base {}'.format(text)
endidx = len(line)
begidx = (endidx - len(text))
first_match = complete_tester(text, line, begidx, endidx, sc_app)
assert ((first_match is not None) and (sc_app.completion_matches == ['foo '])) |
class CalcToggleBoosterStatesCommand(wx.Command):
def __init__(self, fitID, mainPosition, positions, forceStates=None):
wx.Command.__init__(self, True, 'Toggle Booster States')
self.fitID = fitID
self.mainPosition = mainPosition
self.positions = positions
self.forceStates = forceStates
self.savedStates = None
def Do(self):
pyfalog.debug('Doing toggling of booster state at position {}/{} for fit {}'.format(self.mainPosition, self.positions, self.fitID))
fit = Fit.getInstance().getFit(self.fitID)
positions = self.positions[:]
if (self.mainPosition not in positions):
positions.append(self.mainPosition)
self.savedStates = {p: fit.boosters[p].active for p in positions}
if (self.forceStates is not None):
for (position, state) in self.forceStates.items():
booster = fit.boosters[position]
booster.active = state
elif fit.boosters[self.mainPosition].active:
for position in positions:
booster = fit.boosters[position]
if booster.active:
booster.active = False
else:
for position in positions:
booster = fit.boosters[position]
if (not booster.active):
booster.active = True
return True
def Undo(self):
pyfalog.debug('Undoing toggling of booster state at position {}/{} for fit {}'.format(self.mainPosition, self.positions, self.fitID))
cmd = CalcToggleBoosterStatesCommand(fitID=self.fitID, mainPosition=self.mainPosition, positions=self.positions, forceStates=self.savedStates)
return cmd.Do() |
(scope='session')
def geos_mesoscale_area(create_test_area):
shape = (10, 10)
proj_dict = {'h': , 'sweep': 'x', 'x_0': 0, 'y_0': 0, 'ellps': 'GRS80', 'no_defs': None, 'type': 'crs', 'lon_0': (- 75), 'proj': 'geos', 'units': 'm'}
area_extent = ((- 501004.322), 3286588.35232, 501004.322, 4288596.99632)
return create_test_area(proj_dict, shape[0], shape[1], area_extent) |
('pypyr.cache.stepcache.step_cache.get_step')
def test_call_with_success_handler_retry(mock_step_cache):
mock21 = DeepCopyMagicMock()
def step21(context):
mock21(context)
if (context['retryCounter'] == 2):
context['call'] = 'sg1'
call_step(['sg1'])(context)
else:
raise ValueError(context['retryCounter'])
mock_step_cache.side_effect = [step21, nothing_step, call_step(['sg4']), call_step(['sg3']), nothing_step, nothing_step, nothing_step, nothing_step, nothing_step]
context = Context({'a': 'b'})
pipeline = Pipeline('arb')
steps_runner = StepsRunner(get_retry_pipeline(), context)
pipeline.steps_runner = steps_runner
with context.pipeline_scope(pipeline):
steps_runner.run_step_groups(groups=['sg2'], success_group='sg5', failure_group=None)
assert (mock21.mock_calls == [call({'a': 'b', 'retryCounter': 1}), call({'a': 'b', 'retryCounter': 2})])
assert (context == {'a': 'b', 'retryCounter': 2, 'call': 'arb'})
assert (mock_step_cache.mock_calls == [call('sg2.step1'), call('sg1.step1'), call('sg1.step2'), call('sg4.step1'), call('sg3.step1'), call('sg3.step2'), call('sg4.step2'), call('sg2.step2'), call('sg5.step1')]) |
class AttrVI_ATTR_ASRL_END_IN(EnumAttribute):
resources = [(constants.InterfaceType.asrl, 'INSTR')]
py_name = 'end_input'
visa_name = 'VI_ATTR_ASRL_END_IN'
visa_type = 'ViUInt16'
default = constants.SerialTermination.termination_char
(read, write, local) = (True, True, True)
enum_type = constants.SerialTermination |
class DETECTION(BASE):
def __init__(self, db_config):
super(DETECTION, self).__init__()
self._configs['categories'] = 80
self._configs['rand_scales'] = [1]
self._configs['rand_scale_min'] = 0.8
self._configs['rand_scale_max'] = 1.4
self._configs['rand_scale_step'] = 0.2
self._configs['input_size'] = [511]
self._configs['output_sizes'] = [[128, 128]]
self._configs['nms_threshold'] = 0.5
self._configs['max_per_image'] = 100
self._configs['top_k'] = 100
self._configs['ae_threshold'] = 0.5
self._configs['aggr_weight'] = 0.1
self._configs['scores_thresh'] = 0.1
self._configs['center_thresh'] = 0.1
self._configs['suppres_ghost'] = False
self._configs['nms_kernel'] = 3
self._configs['nms_algorithm'] = 'exp_soft_nms'
self._configs['weight_exp'] = 8
self._configs['merge_bbox'] = False
self._configs['data_aug'] = True
self._configs['lighting'] = True
self._configs['border'] = 128
self._configs['gaussian_bump'] = True
self._configs['gaussian_iou'] = 0.7
self._configs['gaussian_radius'] = (- 1)
self._configs['rand_crop'] = False
self._configs['rand_color'] = False
self._configs['rand_pushes'] = False
self._configs['rand_samples'] = False
self._configs['special_crop'] = False
self._configs['test_scales'] = [1]
self.update_config(db_config)
if (self._configs['rand_scales'] is None):
self._configs['rand_scales'] = np.arange(self._configs['rand_scale_min'], self._configs['rand_scale_max'], self._configs['rand_scale_step']) |
class HKWPD5(FinTS3Segment):
account = DataElementGroupField(type=Account2, _d='Depot')
currency = DataElementField(type='cur', required=False, _d='Wahrung der Depotaufstellung')
quality = DataElementField(type='num', length=1, required=False, _d='Kursqualitat')
max_number_responses = DataElementField(type='num', max_length=4, required=False, _d='Maximale Anzahl Eintrage')
touchdown_point = DataElementField(type='an', max_length=35, required=False, _d='Aufsetzpunkt') |
def _check_type_alias_name(_node_type: str, name: str) -> List[str]:
error_msgs = []
if (not _is_in_pascal_case(name)):
error_msgs.append(f'Type alias name "{name}" should be in PascalCase format. Type alias names should have the first letter of each word capitalized with no separation between each word.')
return error_msgs |
class Manager(BaseManager):
def __init__(self, widget_list, browser, port=5888, log_level=logging.INFO, parent=None):
super().__init__(parent)
self.experiments = ExperimentQueue()
self._worker = None
self._running_experiment = None
self._monitor = None
self.log_level = log_level
self.widget_list = widget_list
self.browser = browser
self.port = port
def load(self, experiment):
super().load(experiment)
self.browser.add(experiment)
for curve in experiment.curve_list:
if curve:
curve.wdg.load(curve)
def remove(self, experiment):
super().remove(experiment)
self.browser.takeTopLevelItem(self.browser.indexOfTopLevelItem(experiment.browser_item))
for curve in experiment.curve_list:
if curve:
curve.wdg.remove(curve)
def _finish(self):
log.debug("Manager's running experiment has finished")
experiment = self._running_experiment
self._clean_up()
experiment.browser_item.setProgress(100)
for curve in experiment.curve_list:
if curve:
curve.update_data()
self.finished.emit(experiment)
if self._is_continuous:
self.next() |
def list_from_file(filename, prefix='', offset=0, max_num=0, encoding='utf-8', file_client_args=None):
cnt = 0
item_list = []
file_client = FileClient.infer_client(file_client_args, filename)
with StringIO(file_client.get_text(filename, encoding)) as f:
for _ in range(offset):
f.readline()
for line in f:
if (0 < max_num <= cnt):
break
item_list.append((prefix + line.rstrip('\n\r')))
cnt += 1
return item_list |
_fixtures(SqlAlchemyFixture, AccessDomainFixture)
def demo_setup(sql_alchemy_fixture, access_domain_fixture):
sql_alchemy_fixture.commit = True
access_domain_fixture.address_book
john = access_domain_fixture.account
jane = access_domain_fixture.new_account(email='')
jane_book = access_domain_fixture.new_address_book(owner=jane)
someone = access_domain_fixture.new_account(email='')
someone_book = access_domain_fixture.new_address_book(owner=someone)
someone_else = access_domain_fixture.new_account(email='')
someone_else_book = access_domain_fixture.new_address_book(owner=someone_else)
jane_book.allow(john, can_add_addresses=True, can_edit_addresses=True)
someone_book.allow(john, can_add_addresses=False, can_edit_addresses=True)
someone_else_book.allow(john, can_add_addresses=False, can_edit_addresses=False)
Address(address_book=jane_book, email_address='', name='Friend1').save()
Address(address_book=jane_book, email_address='', name='Friend2').save()
Address(address_book=jane_book, email_address='', name='Friend3').save()
Address(address_book=jane_book, email_address='', name='Friend4').save()
Address(address_book=someone_book, email_address='', name='Friend11').save()
Address(address_book=someone_book, email_address='', name='Friend12').save()
Address(address_book=someone_book, email_address='', name='Friend13').save()
Address(address_book=someone_book, email_address='', name='Friend14').save()
Address(address_book=someone_else_book, email_address='', name='Friend21').save()
Address(address_book=someone_else_book, email_address='', name='Friend22').save()
Address(address_book=someone_else_book, email_address='', name='Friend23').save()
Address(address_book=someone_else_book, email_address='', name='Friend24').save() |
def build_classifier(opt, dicts):
opt = backward_compatible(opt)
if ('langs' not in dicts):
dicts['langs'] = {'src': 0, 'tgt': 1}
opt.n_languages = len(dicts['langs'])
generators = [onmt.modules.base_seq2seq.Generator(opt.model_size, dicts['tgt'].size(), fix_norm=opt.fix_norm_output_embedding)]
onmt.constants.init_value = opt.param_init
from onmt.models.speech_recognizer.relative_transformer import SpeechTransformerEncoder
from onmt.models.speech_recognizer.classifier import TransformerClassifier
if (opt.model in ['wav2vec2', 'wav2vec']):
from onmt.models.speech_recognizer.wav2vec2 import FairseqWav2Vec, Wav2vecBERT
encoder = FairseqWav2Vec(opt, model_path=opt.wav2vec2_pretrained_model)
elif (opt.model in ['LSTM', 'lstm']):
onmt.constants.init_value = opt.param_init
from onmt.models.speech_recognizer.lstm import SpeechLSTMDecoder, SpeechLSTMEncoder, SpeechLSTMSeq2Seq
encoder = SpeechLSTMEncoder(opt, None, opt.encoder_type)
else:
encoder = SpeechTransformerEncoder(opt, None, None, opt.encoder_type)
model = TransformerClassifier(encoder, nn.ModuleList(generators), mpc=opt.mpc)
return model |
def _parse_cmudict(file):
cmudict = {}
for line in file:
if (len(line) and (((line[0] >= 'A') and (line[0] <= 'Z')) or (line[0] == "'"))):
parts = line.split(' ')
word = re.sub(_alt_re, '', parts[0])
pronunciation = _get_pronunciation(parts[1])
if pronunciation:
if (word in cmudict):
cmudict[word].append(pronunciation)
else:
cmudict[word] = [pronunciation]
return cmudict |
def norm_constraint(tensor_var, max_norm, norm_axes=None, epsilon=1e-07):
ndim = tensor_var.ndim
if (norm_axes is not None):
sum_over = tuple(norm_axes)
elif (ndim == 2):
sum_over = (0,)
elif (ndim in [3, 4, 5]):
sum_over = tuple(range(1, ndim))
else:
raise ValueError('Unsupported tensor dimensionality {}.Must specify `norm_axes`'.format(ndim))
dtype = np.dtype(pytensor.config.floatX).type
norms = pt.sqrt(pt.sum(pt.sqr(tensor_var), axis=sum_over, keepdims=True))
target_norms = pt.clip(norms, 0, dtype(max_norm))
constrained_output = (tensor_var * (target_norms / (dtype(epsilon) + norms)))
return constrained_output |
class DirectSoundBuffer():
def __init__(self, native_buffer, audio_format, buffer_size):
self.audio_format = audio_format
self.buffer_size = buffer_size
self._native_buffer = native_buffer
if ((audio_format is not None) and (audio_format.channels == 1)):
self._native_buffer3d = lib.IDirectSound3DBuffer()
self._native_buffer.QueryInterface(lib.IID_IDirectSound3DBuffer, ctypes.byref(self._native_buffer3d))
else:
self._native_buffer3d = None
def delete(self):
if (self._native_buffer is not None):
self._native_buffer.Stop()
self._native_buffer.Release()
self._native_buffer = None
if (self._native_buffer3d is not None):
self._native_buffer3d.Release()
self._native_buffer3d = None
def volume(self):
vol = lib.LONG()
_check(self._native_buffer.GetVolume(ctypes.byref(vol)))
return vol.value
def volume(self, value):
_check(self._native_buffer.SetVolume(value))
def current_position(self):
play_cursor = lib.DWORD()
write_cursor = lib.DWORD()
_check(self._native_buffer.GetCurrentPosition(play_cursor, write_cursor))
return _CurrentPosition(play_cursor.value, write_cursor.value)
_position.setter
def current_position(self, value):
_check(self._native_buffer.SetCurrentPosition(value))
def is3d(self):
return (self._native_buffer3d is not None)
def is_playing(self):
return ((self._get_status() & lib.DSBSTATUS_PLAYING) != 0)
def is_buffer_lost(self):
return ((self._get_status() & lib.DSBSTATUS_BUFFERLOST) != 0)
def _get_status(self):
status = lib.DWORD()
_check(self._native_buffer.GetStatus(status))
return status.value
def position(self):
if self.is3d:
position = lib.D3DVECTOR()
_check(self._native_buffer3d.GetPosition(ctypes.byref(position)))
return (position.x, position.y, position.z)
else:
return (0, 0, 0)
def position(self, position):
if self.is3d:
(x, y, z) = position
_check(self._native_buffer3d.SetPosition(x, y, z, lib.DS3D_IMMEDIATE))
def min_distance(self):
if self.is3d:
value = lib.D3DVALUE()
_check(self._native_buffer3d.GetMinDistance(ctypes.byref(value)))
return value.value
else:
return 0
_distance.setter
def min_distance(self, value):
if self.is3d:
_check(self._native_buffer3d.SetMinDistance(value, lib.DS3D_IMMEDIATE))
def max_distance(self):
if self.is3d:
value = lib.D3DVALUE()
_check(self._native_buffer3d.GetMaxDistance(ctypes.byref(value)))
return value.value
else:
return 0
_distance.setter
def max_distance(self, value):
if self.is3d:
_check(self._native_buffer3d.SetMaxDistance(value, lib.DS3D_IMMEDIATE))
def frequency(self):
value = lib.DWORD()
_check(self._native_buffer.GetFrequency(value))
return value.value
def frequency(self, value):
_check(self._native_buffer.SetFrequency(value))
def cone_orientation(self):
if self.is3d:
orientation = lib.D3DVECTOR()
_check(self._native_buffer3d.GetConeOrientation(ctypes.byref(orientation)))
return (orientation.x, orientation.y, orientation.z)
else:
return (0, 0, 0)
_orientation.setter
def cone_orientation(self, value):
if self.is3d:
(x, y, z) = value
_check(self._native_buffer3d.SetConeOrientation(x, y, z, lib.DS3D_IMMEDIATE))
_ConeAngles = namedtuple('_ConeAngles', ['inside', 'outside'])
def cone_angles(self):
if self.is3d:
inside = lib.DWORD()
outside = lib.DWORD()
_check(self._native_buffer3d.GetConeAngles(ctypes.byref(inside), ctypes.byref(outside)))
return self._ConeAngles(inside.value, outside.value)
else:
return self._ConeAngles(0, 0)
def set_cone_angles(self, inside, outside):
if self.is3d:
_check(self._native_buffer3d.SetConeAngles(inside, outside, lib.DS3D_IMMEDIATE))
def cone_outside_volume(self):
if self.is3d:
volume = lib.LONG()
_check(self._native_buffer3d.GetConeOutsideVolume(ctypes.byref(volume)))
return volume.value
else:
return 0
_outside_volume.setter
def cone_outside_volume(self, value):
if self.is3d:
_check(self._native_buffer3d.SetConeOutsideVolume(value, lib.DS3D_IMMEDIATE))
def create_listener(self):
native_listener = lib.IDirectSound3DListener()
self._native_buffer.QueryInterface(lib.IID_IDirectSound3DListener, ctypes.byref(native_listener))
return DirectSoundListener(native_listener)
def play(self):
_check(self._native_buffer.Play(0, 0, lib.DSBPLAY_LOOPING))
def stop(self):
_check(self._native_buffer.Stop())
class _WritePointer():
def __init__(self):
self.audio_ptr_1 = ctypes.c_void_p()
self.audio_length_1 = lib.DWORD()
self.audio_ptr_2 = ctypes.c_void_p()
self.audio_length_2 = lib.DWORD()
def lock(self, write_cursor, write_size):
assert _debug('DirectSoundBuffer.lock({}, {})'.format(write_cursor, write_size))
pointer = self._WritePointer()
_check(self._native_buffer.Lock(write_cursor, write_size, ctypes.byref(pointer.audio_ptr_1), ctypes.byref(pointer.audio_length_1), ctypes.byref(pointer.audio_ptr_2), ctypes.byref(pointer.audio_length_2), 0))
return pointer
def unlock(self, pointer):
_check(self._native_buffer.Unlock(pointer.audio_ptr_1, pointer.audio_length_1, pointer.audio_ptr_2, pointer.audio_length_2)) |
class PolynomialProfile(BaseParticle):
def __init__(self, param, domain, options, phase='primary'):
super().__init__(param, domain, options, phase)
self.name = getattr(self.options, self.domain)['particle']
if (self.name == 'Fickian diffusion'):
raise ValueError("Particle type must be 'uniform profile', 'quadratic profile' or 'quartic profile'")
pybamm.citations.register('Subramanian2005')
def get_fundamental_variables(self):
(domain, Domain) = self.domain_Domain
variables = {}
if (self.size_distribution is False):
c_s_rav = pybamm.Variable(f'R-averaged {domain} particle concentration [mol.m-3]', domain=f'{domain} electrode', auxiliary_domains={'secondary': 'current collector'}, bounds=(0, self.phase_param.c_max), scale=self.phase_param.c_max)
r = pybamm.SpatialVariable(f'r_{domain[0]}', domain=[f'{domain} particle'], auxiliary_domains={'secondary': f'{domain} electrode', 'tertiary': 'current collector'}, coord_sys='spherical polar')
R = self.phase_param.R
else:
c_s_rav_distribution = pybamm.Variable(f'R-averaged {domain} particle concentration distribution [mol.m-3]', domain=f'{domain} particle size', auxiliary_domains={'secondary': f'{domain} electrode', 'tertiary': 'current collector'}, bounds=(0, self.phase_param.c_max), scale=self.phase_param.c_max)
r = pybamm.SpatialVariable(f'r_{domain[0]}', domain=[f'{domain} particle'], auxiliary_domains={'secondary': f'{domain} particle size', 'tertiary': f'{domain} electrode', 'quaternary': 'current collector'}, coord_sys='spherical polar')
R = pybamm.SpatialVariable(f'R_{domain[0]}', domain=[f'{domain} particle size'], auxiliary_domains={'secondary': f'{domain} electrode', 'tertiary': 'current collector'}, coord_sys='cartesian')
variables = self._get_distribution_variables(R)
variables.update(self._get_standard_concentration_distribution_variables(c_s_rav_distribution))
f_v_dist = variables[f'{Domain} volume-weighted particle-size distribution [m-1]']
c_s_rav = pybamm.Integral((f_v_dist * c_s_rav_distribution), R)
c_s_surf = c_s_rav
c_s = pybamm.PrimaryBroadcast(c_s_rav, [f'{domain} particle'])
variables.update(self._get_standard_concentration_variables(c_s, c_s_rav=c_s_rav, c_s_surf=c_s_surf))
return variables
if (self.name == 'uniform profile'):
c_s_surf = c_s_rav
elif (self.name in ['quadratic profile', 'quartic profile']):
c_s_surf = pybamm.Variable(f'{Domain} particle surface concentration [mol.m-3]', domain=f'{domain} electrode', auxiliary_domains={'secondary': 'current collector'}, bounds=(0, self.phase_param.c_max), scale=self.phase_param.c_max)
if (self.name == 'quartic profile'):
q_s_rav = pybamm.Variable(f'R-averaged {domain} particle concentration gradient [mol.m-4]', domain=f'{domain} electrode', auxiliary_domains={'secondary': 'current collector'}, scale=(self.phase_param.c_max / self.phase_param.R_typ))
variables.update({f'R-averaged {domain} particle concentration gradient [mol.m-4]': q_s_rav})
if (self.name == 'uniform profile'):
A = c_s_rav
B = pybamm.FullBroadcast(0, f'{domain} electrode', 'current collector')
C = pybamm.FullBroadcast(0, f'{domain} electrode', 'current collector')
elif (self.name == 'quadratic profile'):
A = (((5 / 2) * c_s_rav) - ((3 / 2) * c_s_surf))
B = ((5 / 2) * (c_s_surf - c_s_rav))
C = pybamm.FullBroadcast(0, f'{domain} electrode', 'current collector')
elif (self.name == 'quartic profile'):
A = ((((39 / 4) * c_s_surf) - ((3 * q_s_rav) * R)) - ((35 / 4) * c_s_rav))
B = ((((- 35) * c_s_surf) + (10 * q_s_rav)) + (35 * c_s_rav))
C = ((((105 / 4) * c_s_surf) - ((7 * q_s_rav) * R)) - ((105 / 4) * c_s_rav))
A = pybamm.PrimaryBroadcast(A, [f'{domain} particle'])
B = pybamm.PrimaryBroadcast(B, [f'{domain} particle'])
C = pybamm.PrimaryBroadcast(C, [f'{domain} particle'])
c_s = ((A + ((B * (r ** 2)) / (R ** 2))) + ((C * (r ** 4)) / (R ** 4)))
variables.update(self._get_standard_concentration_variables(c_s, c_s_rav=c_s_rav, c_s_surf=c_s_surf))
return variables
def get_coupled_variables(self, variables):
(domain, Domain) = self.domain_Domain
if (self.size_distribution is False):
c_s = variables[f'{Domain} particle concentration [mol.m-3]']
c_s_rav = variables[f'R-averaged {domain} particle concentration [mol.m-3]']
c_s_surf = variables[f'{Domain} particle surface concentration [mol.m-3]']
T = pybamm.PrimaryBroadcast(variables[f'{Domain} electrode temperature [K]'], [f'{domain} particle'])
current = variables['Total current density [A.m-2]']
D_eff = self._get_effective_diffusivity(c_s, T, current)
r = pybamm.SpatialVariable(f'r_{domain[0]}', domain=[f'{domain} particle'], auxiliary_domains={'secondary': f'{domain} electrode', 'tertiary': 'current collector'}, coord_sys='spherical polar')
R = variables[f'{Domain} particle radius [m]']
variables.update(self._get_standard_diffusivity_variables(D_eff))
else:
pass
if (self.name == 'uniform profile'):
N_s = pybamm.FullBroadcastToEdges(0, [f'{domain} particle'], auxiliary_domains={'secondary': f'{domain} electrode', 'tertiary': 'current collector'})
elif (self.name == 'quadratic profile'):
N_s = (((((- D_eff) * 5) * (c_s_surf - c_s_rav)) * r) / (R ** 2))
elif (self.name == 'quartic profile'):
q_s_rav = variables[f'R-averaged {domain} particle concentration gradient [mol.m-4]']
N_s = ((- D_eff) * (((((((- 70) * c_s_surf) + ((20 * q_s_rav) * R)) + (70 * c_s_rav)) * r) / (R ** 2)) + (((((105 * c_s_surf) - ((28 * q_s_rav) * R)) - (105 * c_s_rav)) * (r ** 3)) / (R ** 4))))
variables.update(self._get_standard_flux_variables(N_s))
return variables
def set_rhs(self, variables):
(domain, Domain) = self.domain_Domain
if (self.size_distribution is False):
c_s_rav = variables[f'R-averaged {domain} particle concentration [mol.m-3]']
j = variables[f'{Domain} electrode interfacial current density [A.m-2]']
R = variables[f'{Domain} particle radius [m]']
else:
c_s_rav = variables[f'R-averaged {domain} particle concentration distribution [mol.m-3]']
j = variables[f'{Domain} electrode interfacial current density distribution [A.m-2]']
R = variables[f'{Domain} particle sizes [m]']
self.rhs = {c_s_rav: ((((- 3) * j) / self.param.F) / R)}
if (self.name == 'quartic profile'):
q_s_rav = variables[f'R-averaged {domain} particle concentration gradient [mol.m-4]']
c_s_rav = variables[f'R-averaged {domain} particle concentration [mol.m-3]']
D_eff = variables[f'{Domain} particle effective diffusivity [m2.s-1]']
self.rhs.update({q_s_rav: (((((- 30) * pybamm.r_average(D_eff)) * q_s_rav) / (R ** 2)) - ((((45 / 2) * j) / self.param.F) / (R ** 2)))})
def set_algebraic(self, variables):
if (self.name == 'uniform profile'):
return
(domain, Domain) = self.domain_Domain
c_s_surf = variables[f'{Domain} particle surface concentration [mol.m-3]']
c_s_rav = variables[f'R-averaged {domain} particle concentration [mol.m-3]']
D_eff = variables[f'{Domain} particle effective diffusivity [m2.s-1]']
j = variables[f'{Domain} electrode interfacial current density [A.m-2]']
R = variables[f'{Domain} particle radius [m]']
c_max = self.phase_param.c_max
T_ref = self.param.T_ref
D_c_max_scale = (self.phase_param.D(c_max, T_ref) * c_max)
if (self.name == 'quadratic profile'):
self.algebraic = {c_s_surf: (((pybamm.surf(D_eff) * (c_s_surf - c_s_rav)) + (((j * R) / self.param.F) / 5)) / D_c_max_scale)}
elif (self.name == 'quartic profile'):
q_s_rav = variables[f'R-averaged {domain} particle concentration gradient [mol.m-4]']
D_c_max_over_R_scale = (D_c_max_scale / self.phase_param.R_typ)
self.algebraic = {c_s_surf: (((pybamm.surf(D_eff) * (((35 / R) * (c_s_surf - c_s_rav)) - (8 * q_s_rav))) + (j / self.param.F)) / D_c_max_over_R_scale)}
def set_initial_conditions(self, variables):
(domain, Domain) = self.domain_Domain
c_init = pybamm.r_average(self.phase_param.c_init)
if (self.size_distribution is False):
c_s_rav = variables[f'R-averaged {domain} particle concentration [mol.m-3]']
else:
c_s_rav = variables[f'R-averaged {domain} particle concentration distribution [mol.m-3]']
c_init = pybamm.PrimaryBroadcast(c_init, [f'{domain} particle size'])
self.initial_conditions = {c_s_rav: c_init}
if (self.name in ['quadratic profile', 'quartic profile']):
c_s_surf = variables[f'{Domain} particle surface concentration [mol.m-3]']
self.initial_conditions.update({c_s_surf: c_init})
if (self.name == 'quartic profile'):
q_s_rav = variables[f'R-averaged {domain} particle concentration gradient [mol.m-4]']
self.initial_conditions.update({q_s_rav: 0}) |
def search_regex(pattern, string, name, default=NO_DEFAULT, fatal=True, flags=0, group=None):
if isinstance(pattern, str):
mobj = re.search(pattern, string, flags)
else:
for p in pattern:
mobj = re.search(p, string, flags)
if mobj:
break
_name = name
if mobj:
if (group is None):
return next((g for g in mobj.groups() if (g is not None)))
else:
return mobj.group(group)
elif (default is not NO_DEFAULT):
return default
elif fatal:
print(('[-] Unable to extract %s' % _name))
exit(0)
else:
print(('[-] unable to extract %s' % _name))
exit(0) |
def patch_error_messages(patch_data: dict):
for (file_name, file_data) in patch_data.items():
for (checker_name, checker_data) in file_data.items():
checker = getattr(import_module(file_name), checker_name)
if hasattr(checker, 'msgs'):
for (error_id, new_msg) in checker_data.items():
lst_msg = list(checker.msgs[error_id])
lst_msg[0] = new_msg
checker.msgs[error_id] = tuple(lst_msg)
else:
print('no msgs attribute!') |
class TestTexioPSW360L30():
RESOURCE = 'TCPIP::192.168.10.119::2268::SOCKET'
INSTR = TexioPSW360L30(RESOURCE)
CURRENT_LIMIT = [0.1, 0.5, 1]
VOLTAGE_SETPOINT = [1, 2, 3, 4, 5]
def instr(self):
self.INSTR.reset()
return self.INSTR
.parametrize('case', CURRENT_LIMIT)
def test_current_limit_no_output(self, instr, case):
instr.current_limit = case
assert (instr.current_limit == case)
.parametrize('case', VOLTAGE_SETPOINT)
def test_voltage_setpoint_no_output(self, instr, case):
instr.voltage_setpoint = case
assert (instr.voltage_setpoint == case)
.parametrize('voltage_setpoint', VOLTAGE_SETPOINT)
.parametrize('current_limit', CURRENT_LIMIT)
def test_everything_without_apply(self, instr, voltage_setpoint, current_limit):
instr.current_limit = current_limit
instr.voltage_setpoint = voltage_setpoint
instr.output_enabled = True
time.sleep(1)
assert (instr.output_enabled is True)
assert (instr.voltage_setpoint == voltage_setpoint)
assert (instr.current_limit == current_limit)
assert (instr.voltage == pytest.approx(voltage_setpoint, abs=0.1))
assert (instr.current == pytest.approx(0, abs=0.1))
assert (instr.power == pytest.approx(0, abs=0.1))
instr.output_enabled = False
assert (instr.output_enabled is False)
.parametrize('voltage_setpoint', VOLTAGE_SETPOINT)
.parametrize('current_limit', CURRENT_LIMIT)
def test_everything_with_apply(self, instr, voltage_setpoint, current_limit):
instr.applied = (voltage_setpoint, current_limit)
instr.output_enabled = True
time.sleep(1)
assert (instr.output_enabled is True)
assert (instr.voltage_setpoint == voltage_setpoint)
assert (instr.current_limit == current_limit)
assert (instr.voltage == pytest.approx(voltage_setpoint, abs=0.1))
assert (instr.current == pytest.approx(0, abs=0.1))
assert (instr.power == pytest.approx(0, abs=0.1))
instr.output_enabled = False
assert (instr.output_enabled is False) |
def test_create_poetry_fails_on_invalid_mode() -> None:
with pytest.raises(RuntimeError) as e:
Factory().create_poetry((((Path(__file__).parent / 'fixtures') / 'invalid_mode') / 'pyproject.toml'))
expected = 'The Poetry configuration is invalid:\n - Invalid value for package-mode: invalid\n'
assert (str(e.value) == expected) |
('/v1/organization/<orgname>/validateproxycache')
_if(features.PROXY_CACHE)
class ProxyCacheConfigValidation(ApiResource):
schemas = {'NewProxyCacheConfig': {'type': 'object', 'description': 'Proxy cache configuration for an organization', 'required': ['upstream_registry'], 'properties': {'upstream_registry': {'type': 'string', 'description': 'Name of the upstream registry that is to be cached'}}}}
('validateProxyCacheConfig')
_json_request('NewProxyCacheConfig')
def post(self, orgname):
permission = AdministerOrganizationPermission(orgname)
if ((not permission.can()) and (not allow_if_superuser())):
raise Unauthorized()
try:
model.proxy_cache.get_proxy_cache_config_for_org(orgname)
request_error('Proxy Cache Configuration already exists')
except model.InvalidProxyCacheConfigException:
pass
data = request.get_json()
data = {k: v for (k, v) in data.items() if (v is not None)}
try:
config = ProxyCacheConfig(**data)
existing = model.organization.get_organization(orgname)
config.organization = existing
proxy = Proxy(config, validation=True)
response = proxy.get(f'{proxy.base_url}/v2/')
if (response.status_code == 200):
return ('Valid', 202)
if (response.status_code == 401):
return ('Anonymous', 202)
except UpstreamRegistryError as e:
raise request_error(message='Failed login to remote registry. Please verify entered details and try again.')
raise request_error(message='Failed to validate Proxy cache configuration') |
def elf_reader(file_obj):
ehdr_data = file_obj.read(ElfHeader.NBYTES)
ehdr = ElfHeader(ehdr_data)
if (ehdr.ident[0:4] != '\x7fELF'):
raise ValueError('Not a valid ELF file')
file_obj.seek((ehdr.shoff + (ehdr.shstrndx * ehdr.shentsize)))
shdr_data = file_obj.read(ehdr.shentsize)
shdr = ElfSectionHeader(shdr_data)
file_obj.seek(shdr.offset)
shstrtab_data = file_obj.read(shdr.size).decode()
symtab_data = None
strtab_data = None
mem_image = SparseMemoryImage()
for section_idx in range(ehdr.shnum):
file_obj.seek((ehdr.shoff + (section_idx * ehdr.shentsize)))
shdr_data = file_obj.read(ehdr.shentsize)
shdr_data = shdr_data.ljust(ElfSectionHeader.NBYTES, b'\x00')
shdr = ElfSectionHeader(shdr_data)
start = shstrtab_data[shdr.name:]
section_name = start.partition('\x00')[0]
if (not (shdr.flags & ElfSectionHeader.FLAGS_ALLOC)):
continue
if (section_name not in ['.sbss', '.bss']):
file_obj.seek(shdr.offset)
data = file_obj.read(shdr.size)
else:
data = (b'\x00' * shdr.size)
if (shdr.type == ElfSectionHeader.TYPE_STRTAB):
strtab_data = data
elif (shdr.type == ElfSectionHeader.TYPE_SYMTAB):
symtab_data = data
else:
section = SparseMemoryImage.Section(section_name, shdr.addr, data)
mem_image.add_section(section)
return mem_image |
def multibox(fpn_level: int, num_anchors: int, num_classes: int, fea_channel: int, dis_channel: int, conv_block: nn.Module) -> tuple:
(loc_layers, conf_layers, dist_layers) = (list(), list(), list())
for _ in range(fpn_level):
loc_layers.append(nn.Sequential(conv_block(fea_channel, fea_channel, 3, padding=1), nn.Conv2d(fea_channel, (num_anchors * 4), 1)))
conf_layers.append(nn.Sequential(conv_block(fea_channel, fea_channel, 3, padding=1), nn.Conv2d(fea_channel, (num_anchors * num_classes), 1)))
dist_layers.append(nn.Sequential(conv_block(fea_channel, fea_channel, 3, padding=1), nn.Conv2d(fea_channel, dis_channel, 1)))
return (nn.ModuleList(loc_layers), nn.ModuleList(conf_layers), nn.ModuleList(dist_layers)) |
class F13_SshPw(KickstartCommand):
removedKeywords = KickstartCommand.removedKeywords
removedAttrs = KickstartCommand.removedAttrs
def __init__(self, writePriority=0, *args, **kwargs):
KickstartCommand.__init__(self, writePriority, *args, **kwargs)
self.op = self._getParser()
self.sshUserList = kwargs.get('sshUserList', [])
def __str__(self):
retval = ''
for user in self.sshUserList:
retval += user.__str__()
return retval
def _getParser(self):
op = KSOptionParser(prog='sshpw', description='\n The installer can start up ssh to provide for\n interactivity and inspection, just like it can with\n telnet. The "inst.sshd" option must be specified on\n the kernel command-line for Anaconda to start an ssh\n daemon. The sshpw command is used to control the\n accounts created in the installation environment that\n may be remotely logged into. For each instance of\n this command given, a user will be created. These\n users will not be created on the final system -\n they only exist for use while the installer is\n running.\n\n Note that by default, root has a blank password. If\n you don\'t want any user to be able to ssh in and\n have full access to your hardware, you must specify\n sshpw for username root. Also note that if Anaconda\n fails to parse the kickstart file, it will allow\n anyone to login as root and have full access to\n your hardware.', version=F13)
op.add_argument('--username', required=True, metavar='<name>', version=F13, help='\n Provides the name of the user. This option is required.\n ')
op.add_argument('--iscrypted', dest='isCrypted', action='store_true', default=False, version=F13, help='\n If this is present, the password argument is assumed to\n already be encrypted.')
op.add_argument('--plaintext', dest='isCrypted', action='store_false', version=F13, help='\n If this is present, the password argument is assumed to\n not be encrypted. This is the default.')
op.add_argument('--lock', action='store_true', default=False, version=F13, help='\n If this is present, the new user account is locked by\n default. That is, the user will not be able to login\n from the console.')
op.add_argument('password', metavar='<password>', nargs='*', version=F13, help='\n The password string to use.')
return op
def parse(self, args):
ud = self.dataClass()
(ns, extra) = self.op.parse_known_args(args=args, lineno=self.lineno)
if (not ns.password):
raise KickstartParseError((_('A single argument is expected for the %s command') % 'sshpw'), lineno=self.lineno)
if extra:
mapping = {'command': 'sshpw', 'options': extra}
raise KickstartParseError((_('Unexpected arguments to %(command)s command: %(options)s') % mapping), lineno=self.lineno)
self.set_to_obj(ns, ud)
ud.password = ' '.join(ns.password)
ud.lineno = self.lineno
if (ud in self.dataList()):
warnings.warn((_('An ssh user with the name %s has already been defined.') % ud.username), KickstartParseWarning)
return ud
def dataList(self):
return self.sshUserList
def dataClass(self):
return self.handler.SshPwData |
class GeneralizedLiftedStructureLoss(GenericPairLoss):
def __init__(self, neg_margin=1, pos_margin=0, **kwargs):
super().__init__(mat_based_loss=True, **kwargs)
self.neg_margin = neg_margin
self.pos_margin = pos_margin
self.add_to_recordable_attributes(list_of_names=['pos_margin', 'neg_margin'], is_stat=False)
def _compute_loss(self, mat, pos_mask, neg_mask):
remaining_pos_margin = self.distance.margin(mat, self.pos_margin)
remaining_neg_margin = self.distance.margin(self.neg_margin, mat)
pos_loss = lmu.logsumexp(remaining_pos_margin, keep_mask=pos_mask.bool(), add_one=False)
neg_loss = lmu.logsumexp(remaining_neg_margin, keep_mask=neg_mask.bool(), add_one=False)
return {'loss': {'losses': torch.relu((pos_loss + neg_loss)), 'indices': c_f.torch_arange_from_size(mat), 'reduction_type': 'element'}} |
def create_dictionary(dataroot):
dictionary = Dictionary()
questions = []
files = ['v2_OpenEnded_mscoco_train2014_questions.json', 'v2_OpenEnded_mscoco_val2014_questions.json', 'v2_OpenEnded_mscoco_test2015_questions.json', 'v2_OpenEnded_mscoco_test-dev2015_questions.json']
for path in files:
question_path = os.path.join(dataroot, path)
qs = json.load(open(question_path))['questions']
for q in qs:
dictionary.tokenize(q['question'], True)
dictionary.tokenize('wordmask', True)
return dictionary |
class AdditionsExportAll(ContextMenuSelection):
visibilitySetting = 'additionsCopyPaste'
def __init__(self):
self.mainFrame = gui.mainFrame.MainFrame.getInstance()
self.viewSpecMap = {'droneItemMisc': (_t('Drones'), exportDrones), 'fighterItemMisc': (_t('Fighters'), exportFighters), 'cargoItemMisc': (_t('Cargo Items'), exportCargo), 'implantItemMisc': (_t('Implants'), exportImplants), 'implantItemMiscChar': (_t('Implants'), exportImplants), 'boosterItemMisc': (_t('Boosters'), exportBoosters)}
def display(self, callingWindow, srcContext, selection):
if (srcContext not in self.viewSpecMap):
return False
if (not selection):
return False
fit = Fit.getInstance().getFit(self.mainFrame.getActiveFit())
if (fit is None):
return False
self.srcContext = srcContext
return True
def getText(self, callingWindow, itmContext, selection):
return _t('Copy Selected {}').format(self.viewSpecMap[self.srcContext][0])
def activate(self, callingWindow, fullContext, selection, i):
export = self.viewSpecMap[self.srcContext][1](selection)
if export:
toClipboard(export) |
class PushButton(WidgetBase):
def __init__(self, x, y, pressed, depressed, hover=None, batch=None, group=None):
super().__init__(x, y, depressed.width, depressed.height)
self._pressed_img = pressed
self._depressed_img = depressed
self._hover_img = (hover or depressed)
self._batch = (batch or pyglet.graphics.Batch())
self._user_group = group
bg_group = Group(order=0, parent=group)
self._sprite = pyglet.sprite.Sprite(self._depressed_img, x, y, batch=batch, group=bg_group)
self._pressed = False
def _update_position(self):
self._sprite.position = (self._x, self._y, 0)
def value(self):
return self._pressed
def value(self, value):
assert (type(value) is bool), "This Widget's value must be True or False."
self._pressed = value
self._sprite.image = (self._pressed_img if self._pressed else self._depressed_img)
def update_groups(self, order):
self._sprite.group = Group(order=(order + 1), parent=self._user_group)
def on_mouse_press(self, x, y, buttons, modifiers):
if ((not self.enabled) or (not self._check_hit(x, y))):
return
self._sprite.image = self._pressed_img
self._pressed = True
self.dispatch_event('on_press')
def on_mouse_release(self, x, y, buttons, modifiers):
if ((not self.enabled) or (not self._pressed)):
return
self._sprite.image = (self._hover_img if self._check_hit(x, y) else self._depressed_img)
self._pressed = False
self.dispatch_event('on_release')
def on_mouse_motion(self, x, y, dx, dy):
if ((not self.enabled) or self._pressed):
return
self._sprite.image = (self._hover_img if self._check_hit(x, y) else self._depressed_img)
def on_mouse_drag(self, x, y, dx, dy, buttons, modifiers):
if ((not self.enabled) or self._pressed):
return
self._sprite.image = (self._hover_img if self._check_hit(x, y) else self._depressed_img) |
class InceptionResNetV2(nn.Module):
def __init__(self, num_classes=1001, image_size=299, model_input_channels=3):
super(InceptionResNetV2, self).__init__()
self.input_space = None
self.input_size = (299, 299, 3)
self.mean = None
self.std = None
self.conv2d_1a = BasicConv2d(3, 32, kernel_size=3, stride=2)
self.conv2d_2a = BasicConv2d(32, 32, kernel_size=3, stride=1)
self.conv2d_2b = BasicConv2d(32, 64, kernel_size=3, stride=1, padding=1)
self.maxpool_3a = nn.MaxPool2d(3, stride=2)
self.conv2d_3b = BasicConv2d(64, 80, kernel_size=1, stride=1)
self.conv2d_4a = BasicConv2d(80, 192, kernel_size=3, stride=1)
self.maxpool_5a = nn.MaxPool2d(3, stride=2)
self.mixed_5b = Mixed_5b()
self.repeat = nn.Sequential(Block35(scale=0.17), Block35(scale=0.17), Block35(scale=0.17), Block35(scale=0.17), Block35(scale=0.17), Block35(scale=0.17), Block35(scale=0.17), Block35(scale=0.17), Block35(scale=0.17), Block35(scale=0.17))
self.mixed_6a = Mixed_6a()
self.repeat_1 = nn.Sequential(Block17(scale=0.1), Block17(scale=0.1), Block17(scale=0.1), Block17(scale=0.1), Block17(scale=0.1), Block17(scale=0.1), Block17(scale=0.1), Block17(scale=0.1), Block17(scale=0.1), Block17(scale=0.1), Block17(scale=0.1), Block17(scale=0.1), Block17(scale=0.1), Block17(scale=0.1), Block17(scale=0.1), Block17(scale=0.1), Block17(scale=0.1), Block17(scale=0.1), Block17(scale=0.1), Block17(scale=0.1))
self.mixed_7a = Mixed_7a()
self.repeat_2 = nn.Sequential(Block8(scale=0.2), Block8(scale=0.2), Block8(scale=0.2), Block8(scale=0.2), Block8(scale=0.2), Block8(scale=0.2), Block8(scale=0.2), Block8(scale=0.2), Block8(scale=0.2))
self.block8 = Block8(noReLU=True)
self.conv2d_7b = BasicConv2d(2080, 1536, kernel_size=1, stride=1)
self.avgpool_1a = nn.AvgPool2d(8, count_include_pad=False)
self.last_linear = nn.Linear(1536, num_classes)
def features(self, input):
x = self.conv2d_1a(input)
x = self.conv2d_2a(x)
x = self.conv2d_2b(x)
x = self.maxpool_3a(x)
x = self.conv2d_3b(x)
x = self.conv2d_4a(x)
x = self.maxpool_5a(x)
x = self.mixed_5b(x)
x = self.repeat(x)
x = self.mixed_6a(x)
x = self.repeat_1(x)
x = self.mixed_7a(x)
x = self.repeat_2(x)
x = self.block8(x)
x = self.conv2d_7b(x)
return x
def logits(self, features):
x = self.avgpool_1a(features)
x = x.view(x.size(0), (- 1))
x = self.last_linear(x)
return x
def forward(self, input):
x = self.features(input)
x = self.logits(x)
return x |
('/logs', methods=['GET', 'POST'], endpoint='logs')
('/logs/<int:start_line>', methods=['GET', 'POST'], endpoint='logs')
_required('LOGS')
def logs(start_line=(- 1)):
s = flask.session
api = flask.current_app.config['PYLOAD_API']
per_page = s.get('perpage', 34)
reversed = s.get('reversed', False)
warning = ''
conf = api.get_config_value('log', 'filelog')
if (not conf):
warning = 'Warning: File log is disabled, see settings page.'
per_page_selection = ((20, 20), (34, 34), (40, 40), (100, 100), (0, 'all'))
fro = None
if (flask.request.method == 'POST'):
try:
from_form = flask.request.form['from']
fro = datetime.datetime.strptime(from_form, '%Y-%m-%d %H:%M:%S')
except Exception:
pass
per_page = int(flask.request.form.get('perpage', 34))
s['perpage'] = per_page
reversed = bool(flask.request.form.get('reversed', False))
s['reversed'] = reversed
log_entries = api.get_log()
if (not per_page):
start_line = 0
if (start_line < 1):
start_line = (1 if ((((len(log_entries) - per_page) + 1) < 1) or (per_page == 0)) else ((len(log_entries) - per_page) + 1))
if isinstance(fro, datetime.datetime):
start_line = (- 1)
data = []
inpage_counter = 0
for (counter, logline) in enumerate(log_entries, start=1):
if (counter >= start_line):
try:
(date, time, level, source, message) = _RE_LOGLINE.match(logline).groups()
dtime = datetime.datetime.strptime(((date + ' ') + time), '%Y-%m-%d %H:%M:%S')
message = message.strip()
except (AttributeError, IndexError):
dtime = None
date = '?'
time = ' '
level = '?'
source = '?'
message = logline
if ((start_line == (- 1)) and (dtime is not None) and (fro <= dtime)):
start_line = counter
if (start_line >= 0):
data.append({'line': counter, 'date': ((date + ' ') + time), 'level': level, 'source': source, 'message': message.rstrip('\n')})
inpage_counter += 1
if ((fro is None) and (dtime is not None)):
fro = dtime
if (inpage_counter >= per_page > 0):
break
if (fro is None):
fro = datetime.datetime.now()
if reversed:
data.reverse()
context = {'warning': warning, 'log': data, 'from': fro.strftime('%Y-%m-%d %H:%M:%S'), 'reversed': reversed, 'perpage': per_page, 'perpage_p': sorted(per_page_selection), 'iprev': max((start_line - per_page), 1), 'inext': ((start_line + per_page) if ((start_line + per_page) <= len(log_entries)) else start_line)}
return render_template('logs.html', **context) |
class ProjectList(list):
def __init__(self, workspace):
super().__init__()
self.workspace = workspace
self.name_index = {}
def append(self, something, ignore_duplicates=False):
assert (ignore_duplicates or (something.project_name not in self.name_index)), 'Attempt to add duplicate project to project list'
if (something.project_name not in self.name_index):
self.name_index[something.project_name] = something
super().append(something)
def project_in(self, directory):
full_path = os.path.normpath(directory)
for project in self:
if (project.directory == full_path):
return project
raise ProjectNotFound(full_path)
def has_project_named(self, name):
return (name in self.name_index.keys())
def project_named(self, name):
try:
return self.name_index[name]
except KeyError:
raise ProjectNotFound(name)
def from_file(cls, filename, workspace):
projects = ProjectList(workspace)
projects.read(filename)
return projects
def collect_projects(self, directories):
for directory in directories:
for (root, dirs, files) in os.walk(os.path.abspath(directory)):
if ('.reahlignore' in files):
ignore_list = DirectoryList.from_file(os.path.join(root, '.reahlignore'))
for i in ignore_list:
try:
dirs.remove(i)
except ValueError:
pass
if (('pyproject.toml' in files) and PyprojectMetadata.from_file_in(root).is_complete):
project = Project.from_file_in(self.workspace, root)
self.append(project, ignore_duplicates=True)
if (not project.has_children):
dirs[:] = []
def save(self, filename):
pathlib.Path(pathlib.Path(filename).parent).mkdir(parents=True, exist_ok=True)
f = open(filename, 'w')
f.writelines([('%s\n' % i.relative_directory) for i in self])
f.close()
def delete(self, filename):
os.remove(filename)
def read(self, filename):
self[:] = []
if (not os.path.isfile(filename)):
return
with open(filename, 'r') as f:
project_dirs = f.read().splitlines()
for name in project_dirs:
full_dir = os.path.join(self.workspace.directory, name)
if os.path.isdir(full_dir):
self.append(Project.from_file_in(self.workspace, full_dir))
else:
logging.getLogger(__name__).warning(('Skipping %s, it does not exist anymore' % name))
def select(self, append=False, all_=False):
if append:
selection = self.workspace.selection
else:
selection = ProjectList(self.workspace)
for i in self:
selection.append(i)
return selection
def select_one(self, directory):
selection = ProjectList(self.workspace)
for i in self:
if (i.directory == directory):
selection.append(i)
return selection
selection.append(Project.from_file_in(self.workspace, directory))
return selection |
def load(file, file_format=None, **kwargs):
if isinstance(file, Path):
file = str(file)
if isinstance(file, bytes):
file = BytesIO(file)
if is_str(file):
if (file_format is None):
file_format = file.split('.')[(- 1)]
if (file_format not in file_handlers):
raise TypeError(f'Unsupported format: {file_format}')
handler = file_handlers[file_format]
if is_str(file):
file = osp.expanduser(file)
obj = handler.load_from_path(file, **kwargs)
elif hasattr(file, 'read'):
obj = handler.load_from_fileobj(file, **kwargs)
else:
raise TypeError('"file" must be a filepath str or a file-object')
return obj |
def test_convert_overwrite_with_option(runner, tmpdir):
outputname = str(tmpdir.join('test.tif'))
result = runner.invoke(main_group, ['convert', 'tests/data/RGB.byte.tif', '-o', outputname, '-f', 'JPEG'])
assert (result.exit_code == 0)
result = runner.invoke(main_group, ['convert', 'tests/data/RGB.byte.tif', '-o', outputname, '-f', 'JPEG', '--overwrite'])
assert (result.exit_code == 0) |
class TestResizablePanes(unittest.TestCase):
def setUpClass(cls):
import resizable_panes
cls.AppClass = resizable_panes.MyApp
def setUp(self):
self.AppClass.log_request = (lambda x, y: None)
def tearDown(self):
del self.AppClass.log_request
self.app.on_close()
def test_main(self):
self.app = self.AppClass(MockRequest(), ('0.0.0.0', 8888), MockServer())
root_widget = self.app.main()
html = root_widget.repr()
assertValidHTML(html) |
def test_with_dependency_groups(package_with_groups: Package) -> None:
package = package_with_groups.with_dependency_groups([])
assert (len(package.requires) == 2)
assert (len(package.all_requires) == 3)
package = package_with_groups.with_dependency_groups(['optional'])
assert (len(package.requires) == 2)
assert (len(package.all_requires) == 4) |
def updateMjlx(unitaries, qnnarch, rho, omega, l, j, x):
part11 = part1ofcommutator1and2(unitaries, qnnarch, rho, l, j, x)
part12 = part2ofcommutator1(unitaries, qnnarch, omega, l, j, x)
mjlx = qt.commutator(part11, part12)
for z in range(1, x):
part21 = part1ofcommutator1and2(unitaries, qnnarch, rho, l, j, z)
part22 = part2ofcommutator2(unitaries, qnnarch, omega, l, j, x, z)
mjlx = (mjlx + qt.commutator(part21, part22))
return mjlx |
class FC3_TestCase(CommandTest):
def __init__(self, *kargs, **kwargs):
CommandTest.__init__(self, *kargs, **kwargs)
self.bootProtos = ['dhcp', 'bootp', 'static']
def runTest(self):
self.assertEqual(self.assert_parse('network --device=eth0'), self.assert_parse('network --device=eth0'))
self.assertNotEqual(self.assert_parse('network --device=eth0'), None)
self.assertNotEqual(self.assert_parse('network --device=eth0'), self.assert_parse('network --device=eth1'))
self.assert_parse('network --device=eth0 --dhcpclass CLASS', 'network --bootproto=dhcp --dhcpclass=CLASS --device=eth0\n')
self.assert_parse('network --device=eth0 --essid ESSID --wepkey WEPKEY', 'network --bootproto=dhcp --device=eth0 --essid="ESSID" --wepkey=WEPKEY\n')
self.assert_parse('network --device=eth0 --ethtool "gro on" --mtu=1200', 'network --bootproto=dhcp --device=eth0 --ethtool="gro on" --mtu=1200\n')
self.assert_parse('network --device=eth0 --gateway gateway.wherever.com --hostname server.wherever.com', 'network --bootproto=dhcp --device=eth0 --gateway=gateway.wherever.com --hostname=server.wherever.com\n')
self.assert_parse('network --device=eth0 --ip 1.2.3.4 --netmask 255.255.255.0', 'network --bootproto=dhcp --device=eth0 --ip=1.2.3.4 --netmask=255.255.255.0\n')
self.assert_parse('network --device=eth0 --nameserver ns.wherever.com', 'network --bootproto=dhcp --device=eth0 --nameserver=ns.wherever.com\n')
self.assert_parse('network --device=eth0 --nodns', 'network --bootproto=dhcp --device=eth0 --nodns\n')
self.assert_parse('network --device=eth0 --onboot=off', 'network --bootproto=dhcp --device=eth0 --onboot=off\n')
for bp in self.bootProtos:
self.assert_parse(('network --device=eth0 --bootproto=%s' % bp), ('network --bootproto=%s --device=eth0\n' % bp))
self.assert_parse_error('network --device=eth0 --bootproto=bogus')
self.assert_parse_error('network --bogus-option')
nic = self.handler().NetworkData(device='eth0')
cmd = self.handler().commands['network']
self.assertEqual(cmd.__str__(), '')
cmd.network.append(nic)
self.assertEqual(cmd.__str__(), '# Network information\nnetwork --bootproto=dhcp --device=eth0\n') |
def train_video_interpolator(cfg):
training_steps = 50000
frames = read_frames_from_dir(f'./images/video/{cfg.image_name}')
crop_size = int((min(frames[0].shape[(- 2):]) * 0.95))
train_dataset = TemporalInterpolationFrameSet(frames=frames, crop_size=crop_size)
train_loader = DataLoader(train_dataset, batch_size=1, num_workers=4, shuffle=True)
model = NextNet(in_channels=9, filters_per_layer=cfg.network_filters, depth=cfg.network_depth)
diffusion = ConditionalDiffusion(model, training_target='x0', timesteps=cfg.diffusion_timesteps)
model_callbacks = [pl.callbacks.ModelSummary(max_depth=(- 1)), pl.callbacks.ModelCheckpoint(filename='single-level-{step}', save_last=True, save_top_k=3, monitor='train_loss', mode='min')]
tb_logger = pl.loggers.TensorBoardLogger('lightning_logs/', name=cfg.image_name, version=(cfg.run_name + '_interpolator'))
trainer = pl.Trainer(max_steps=training_steps, gpus=1, auto_select_gpus=True, logger=tb_logger, log_every_n_steps=10, callbacks=model_callbacks)
trainer.fit(diffusion, train_loader) |
def search_for_possible_decoding_options(trf_table_contents, version, item_parts_length, item_parts):
line_grouping_range = item_parts_length
linac_state_codes_column_range = range(0, 50)
possible_groupings = []
for line_grouping in line_grouping_range:
for linac_state_codes_column in linac_state_codes_column_range:
try:
decode_rows(trf_table_contents, version=version, item_parts_length=item_parts_length, item_parts=item_parts)
possible_groupings.append([line_grouping, linac_state_codes_column])
print(f'Line Grouping: {line_grouping}, Linac State Codes Column: {linac_state_codes_column}')
except ValueError:
pass
return possible_groupings |
def make_ccys(db: ccydb) -> None:
dfr = 4
dollar = '\\u0024'
peso = '\\u20b1'
kr = 'kr'
insert = db.insert
insert('EUR', '978', 'EU', 1, 'Euro', dfr, 'EU', '30/360', 'ACT/360', future='FE', symbol_raw='\\u20ac', html='€')
insert('GBP', '826', 'BP', 2, 'British Pound', dfr, 'GB', 'ACT/365', 'ACT/365', symbol_raw='\\u00a3', html='£')
insert('AUD', '036', 'AD', 3, 'Australian Dollar', dfr, 'AU', 'ACT/365', 'ACT/365', symbol_raw=dollar, html='$')
insert('NZD', '554', 'ND', 4, 'New-Zealand Dollar', dfr, 'NZ', 'ACT/365', 'ACT/365', symbol_raw=dollar, html='$')
insert('USD', '840', 'UD', 5, 'US Dollar', 0, 'US', '30/360', 'ACT/360', future='ED', symbol_raw=dollar, html='$')
insert('CAD', '124', 'CD', 6, 'Canadian Dollar', dfr, 'CA', 'ACT/365', 'ACT/365', symbol_raw=dollar, html='$')
insert('CHF', '756', 'SF', 7, 'Swiss Franc', dfr, 'CH', '30/360', 'ACT/360', symbol_raw='Fr', html='₣')
insert('NOK', '578', 'NK', 8, 'Norwegian Krona', dfr, 'NO', '30/360', 'ACT/360', symbol_raw=kr, html=kr)
insert('SEK', '752', 'SK', 9, 'Swedish Krona', dfr, 'SE', '30/360', 'ACT/360', symbol_raw=kr, html=kr)
insert('DKK', '208', 'DK', 10, 'Danish Krona', dfr, 'DK', '30/360', 'ACT/360', symbol_raw=kr, html=kr)
insert('JPY', '392', 'JY', 10000, 'Japanese Yen', 2, 'JP', 'ACT/365', 'ACT/360', symbol_raw='\\u00a5', html='¥')
insert('CNY', '156', 'CY', 680, 'Chinese Renminbi', dfr, 'CN', 'ACT/365', 'ACT/365', symbol_raw='\\u00a5', html='¥')
insert('KRW', '410', 'KW', 110000, 'South Korean won', 2, 'KR', 'ACT/365', 'ACT/365', symbol_raw='\\u20a9', html='₩')
insert('SGD', '702', 'SD', 15, 'Singapore Dollar', dfr, 'SG', 'ACT/365', 'ACT/365', symbol_raw=dollar, html='$')
insert('IDR', '360', 'IH', 970000, 'Indonesian Rupiah', 0, 'ID', 'ACT/360', 'ACT/360', symbol_raw='Rp', html='Rp')
insert('THB', '764', 'TB', 3300, 'Thai Baht', 2, 'TH', 'ACT/365', 'ACT/365', symbol_raw='\\u0e3f', html='฿')
insert('TWD', '901', 'TD', 18, 'Taiwan Dollar', dfr, 'TW', 'ACT/365', 'ACT/365', symbol_raw=dollar, html='$')
insert('HKD', '344', 'HD', 19, 'Hong Kong Dollar', dfr, 'HK', 'ACT/365', 'ACT/365', symbol_raw='\\u5713', html='HK$')
insert('PHP', '608', 'PP', 4770, 'Philippines Peso', dfr, 'PH', 'ACT/360', 'ACT/360', symbol_raw=peso, html='₱')
insert('INR', '356', 'IR', 4500, 'Indian Rupee', dfr, 'IN', 'ACT/365', 'ACT/365', symbol_raw='\\u20a8', html='₨')
insert('MYR', '458', 'MR', 345, 'Malaysian Ringgit', dfr, 'MY', 'ACT/365', 'ACT/365')
insert('VND', '704', 'VD', 1700000, 'Vietnamese Dong', 0, 'VN', 'ACT/365', 'ACT/365', symbol_raw='\\u20ab', html='₫')
insert('BRL', '986', 'BC', 200, 'Brazilian Real', dfr, 'BR', 'BUS/252', 'BUS/252', symbol_raw='R$')
insert('PEN', '604', 'PS', 220, 'Peruvian New Sol', dfr, 'PE', 'ACT/360', 'ACT/360', symbol_raw='S/.')
insert('ARS', '032', 'AP', 301, 'Argentine Peso', dfr, 'AR', '30/360', 'ACT/360', symbol_raw=dollar, html='$')
insert('MXN', '484', 'MP', 1330, 'Mexican Peso', dfr, 'MX', 'ACT/360', 'ACT/360', symbol_raw=dollar, html='$')
insert('CLP', '152', 'CH', 54500, 'Chilean Peso', 2, 'CL', 'ACT/360', 'ACT/360', symbol_raw=dollar, html='$')
insert('COP', '170', 'CL', 190000, 'Colombian Peso', 2, 'CO', 'ACT/360', 'ACT/360', symbol_raw=dollar, html='$')
insert('JMD', '388', 'JD', 410, 'Jamaican Dollar', dfr, 'JM', 'ACT/360', 'ACT/360', symbol_raw=dollar, html='$')
insert('TTD', '780', 'TT', 410, 'Trinidad and Tobago Dollar', dfr, 'TT', 'ACT/360', 'ACT/360', symbol_raw=dollar, html='$')
insert('BMD', '060', 'BD', 410, 'Bermudian Dollar', dfr, 'BM', symbol_raw=dollar, html='$')
insert('CZK', '203', 'CK', 28, 'Czech Koruna', dfr, 'CZ', 'ACT/360', 'ACT/360', symbol_raw='\\u004b\\u010d')
insert('PLN', '985', 'PZ', 29, 'Polish Zloty', dfr, 'PL', 'ACT/ACT', 'ACT/365', symbol_raw='\\u0050\\u0142')
insert('TRY', '949', 'TY', 30, 'Turkish Lira', dfr, 'TR', 'ACT/360', 'ACT/360', symbol_raw='\\u0054\\u004c')
insert('HUF', '348', 'HF', 32, 'Hungarian Forint', dfr, 'HU', 'ACT/365', 'ACT/360', symbol_raw='Ft', html='Ft')
insert('RON', '946', 'RN', 34, 'Romanian Leu', dfr, 'RO', 'ACT/360', 'ACT/360')
insert('UAH', '980', 'UH', 35, 'Ukrainian Hryvnia', dfr, 'UA', 'ACT/ACT', 'ACT/ACT', symbol_raw='\\u20b4', html='₴')
insert('RUB', '643', 'RR', 36, 'Russian Ruble', dfr, 'RU', 'ACT/ACT', 'ACT/ACT', symbol_raw='\\u0440\\u0443\\u0431')
insert('KZT', '398', 'KT', 410, 'Tenge', dfr, 'KZ', symbol_raw='\\u20b8', html='₸')
insert('BGN', '975', 'BN', 410, 'Bulgarian Lev', dfr, 'BG', symbol_raw='\\u043b\\u0432.', html='лв')
insert('ILS', '376', 'IS', 410, 'Israeli Shekel', dfr, 'IL', 'ACT/365', 'ACT/365', symbol_raw='\\u20aa', html='₪')
insert('AED', '784', 'AE', 410, 'United Arab Emirates Dirham', dfr, 'AE')
insert('QAR', '634', 'QA', 410, 'Qatari Riyal', dfr, 'QA', symbol_raw='\\ufdfc', html='﷼')
insert('SAR', '682', 'SR', 410, 'Saudi Riyal', dfr, 'SA', symbol_raw='\\ufdfc', html='﷼')
insert('EGP', '818', 'EP', 550, 'Egyptian Pound', dfr, 'EG', symbol_raw='\\u00a3', html='£')
insert('NGN', '566', 'NG', 650, 'Nigerian Naira', dfr, 'NG', symbol_raw='\\u20a6', html='₦')
insert('ZAR', '710', 'SA', 750, 'South African Rand', dfr, 'ZA', 'ACT/365', 'ACT/365', symbol_raw='R', html='R')
insert('XBT', '000', 'BT', (- 1), 'Bitcoin', 8, 'WW', symbol_raw='\\u0e3f', html='฿') |
.parametrize('username,password', users)
.parametrize('issue_id', issues)
.parametrize('project_id', projects)
def test_issue_send_get(db, client, username, password, project_id, issue_id):
client.login(username=username, password=password)
issue = Issue.objects.filter(project_id=project_id, id=issue_id).first()
url = reverse('issue_update', args=[project_id, issue_id])
response = client.get(url)
if issue:
if (project_id in change_issue_permission_map.get(username, [])):
assert (response.status_code == 200)
elif password:
assert (response.status_code == 403)
else:
assert (response.status_code == 302)
else:
assert (response.status_code == 404) |
def __create_playlist(name, source_dir, files, songs_lib, pl_lib):
songs = []
win = WaitLoadWindow(None, len(files), _('Importing playlist.\n\n%(current)d/%(total)d songs added.'))
win.show()
for (_i, filename) in enumerate(files):
song = None
if (not uri_is_valid(filename)):
song = _af_for(filename, songs_lib, source_dir)
else:
try:
filename = uri2fsn(filename)
except ValueError:
song = formats.remote.RemoteFile(filename)
else:
song = _af_for(filename, songs_lib, source_dir)
if (song is not None):
songs.append(song)
elif (os.path.exists(filename) or os.path.exists(os.path.join(source_dir, filename))):
print_w(f"Can't add file to playlist: Unsupported file format. '{filename}'")
else:
print_w(f"Can't add file to playlist: File not found. '{filename}'")
if win.step():
break
win.destroy()
return pl_lib.create_from_songs(songs) |
def test_logger_with_console_handler():
log_path = None
with patch.object(logging, 'basicConfig') as mock_logger:
pypyr.log.logger.set_root_logger(10, log_path)
mock_logger.assert_called_once()
(args, kwargs) = mock_logger.call_args
assert (kwargs['format'] == '%(asctime)s %(levelname)s:%(name)s:%(funcName)s: %(message)s')
assert (kwargs['datefmt'] == '%Y-%m-%d %H:%M:%S')
assert (kwargs['level'] == 10)
assert (len(kwargs['handlers']) == 2)
assert isinstance(kwargs['handlers'][0], logging.StreamHandler)
assert (kwargs['handlers'][0].stream == sys.stdout)
assert isinstance(kwargs['handlers'][1], logging.StreamHandler)
assert (kwargs['handlers'][1].stream == sys.stderr) |
def add_plot_parser(subparsers):
parser_plt = subparsers.add_parser('plot_curve', help='Parser for plotting curves')
parser_plt.add_argument('json_logs', type=str, nargs='+', help='Path of train log in json format')
parser_plt.add_argument('--keys', type=str, nargs='+', default=['loss'], help='The metric that you want to plot')
parser_plt.add_argument('--title', type=str, help='Title of figure')
parser_plt.add_argument('--legend', type=str, nargs='+', default=None, help='Legend of each plot')
parser_plt.add_argument('--backend', type=str, default=None, help='Backend of plt')
parser_plt.add_argument('--style', type=str, default='dark', help='Style of plt')
parser_plt.add_argument('--out', type=str, default=None) |
class Where(operator):
def __init__(self, expr):
self.expr = expr
def used_vars(self):
from pythonql.Ast import get_all_vars, get_ast
return get_all_vars(get_ast(self.expr))
def execute(self, table, prior_locs, prior_globs):
from pythonql.Executor import processWhereClause
return processWhereClause(self, table, prior_locs, prior_globs)
def __repr__(self):
return (('Where (' + self.expr) + ')') |
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--domain', type=str, choices=AVAILABLE_DOMAINS, default=None)
parser.add_argument('--task', type=str, choices=AVAILABLE_TASKS, default='default')
parser.add_argument('--policy', type=str, choices=('gaussian', 'gmm', 'lsp'), default='gaussian')
parser.add_argument('--env', type=str, default=DEFAULT_ENV)
parser.add_argument('--exp_name', type=str, default=timestamp())
parser.add_argument('--mode', type=str, default='local')
parser.add_argument('--tau', type=float, default=0.005)
parser.add_argument('--log_dir', type=str, default=None)
parser.add_argument('--lr', type=float, default=(- 1.0))
parser.add_argument('--l1regpi', type=float, default=0.0)
parser.add_argument('--l2regpi', type=float, default=0.0)
parser.add_argument('--l1regvf', type=float, default=0.0)
parser.add_argument('--l2regvf', type=float, default=0.0)
parser.add_argument('--wclippi', type=float, default=0.0)
parser.add_argument('--wclipvf', type=float, default=0.0)
parser.add_argument('--dropoutpi', type=float, default=1.0)
parser.add_argument('--dropoutvf', type=float, default=1.0)
parser.add_argument('--ent_coef', type=float, default=0.0)
parser.add_argument('--batchnormpi', type=bool, default=False)
parser.add_argument('--batchnormvf', type=bool, default=False)
parser.add_argument('--reward_scale', type=float, default=(- 1.0))
parser.add_argument('--num_hidden', type=int, default=256)
parser.add_argument('--seed', type=int, default=1)
parser.add_argument('--policypath', type=str, default='')
parser.add_argument('--valuepath', type=str, default='')
args = parser.parse_args()
return args |
class ByteSpec(Spec):
def __init__(self, name, default=0):
super(ByteSpec, self).__init__(name, default)
def read(self, header, frame, data):
return (bytearray(data)[0], data[1:])
def write(self, config, frame, value):
return bchr(value)
def validate(self, frame, value):
if (value is not None):
bchr(value)
return value |
class Effect6947(BaseEffect):
type = 'passive'
def handler(fit, src, context, projectionRange, **kwargs):
fit.modules.filteredChargeBoost((lambda mod: mod.charge.requiresSkill('Astrometrics')), 'baseSensorStrength', src.getModifiedItemAttr('subsystemBonusCaldariDefensive2'), skill='Caldari Defensive Systems', **kwargs) |
class BinaryTests(ProtocolTestCase):
def test_client_sends_binary(self):
client = Protocol(CLIENT)
with self.enforce_mask(b'\x00\x00\x00\x00'):
client.send_binary(b'\x01\x02\xfe\xff')
self.assertEqual(client.data_to_send(), [b'\x82\x84\x00\x00\x00\x00\x01\x02\xfe\xff'])
def test_server_sends_binary(self):
server = Protocol(SERVER)
server.send_binary(b'\x01\x02\xfe\xff')
self.assertEqual(server.data_to_send(), [b'\x82\x04\x01\x02\xfe\xff'])
def test_client_receives_binary(self):
client = Protocol(CLIENT)
client.receive_data(b'\x82\x04\x01\x02\xfe\xff')
self.assertFrameReceived(client, Frame(OP_BINARY, b'\x01\x02\xfe\xff'))
def test_server_receives_binary(self):
server = Protocol(SERVER)
server.receive_data(b'\x82\x84\x00\x00\x00\x00\x01\x02\xfe\xff')
self.assertFrameReceived(server, Frame(OP_BINARY, b'\x01\x02\xfe\xff'))
def test_client_receives_binary_over_size_limit(self):
client = Protocol(CLIENT, max_size=3)
client.receive_data(b'\x82\x04\x01\x02\xfe\xff')
self.assertIsInstance(client.parser_exc, PayloadTooBig)
self.assertEqual(str(client.parser_exc), 'over size limit (4 > 3 bytes)')
self.assertConnectionFailing(client, CloseCode.MESSAGE_TOO_BIG, 'over size limit (4 > 3 bytes)')
def test_server_receives_binary_over_size_limit(self):
server = Protocol(SERVER, max_size=3)
server.receive_data(b'\x82\x84\x00\x00\x00\x00\x01\x02\xfe\xff')
self.assertIsInstance(server.parser_exc, PayloadTooBig)
self.assertEqual(str(server.parser_exc), 'over size limit (4 > 3 bytes)')
self.assertConnectionFailing(server, CloseCode.MESSAGE_TOO_BIG, 'over size limit (4 > 3 bytes)')
def test_client_sends_fragmented_binary(self):
client = Protocol(CLIENT)
with self.enforce_mask(b'\x00\x00\x00\x00'):
client.send_binary(b'\x01\x02', fin=False)
self.assertEqual(client.data_to_send(), [b'\x02\x82\x00\x00\x00\x00\x01\x02'])
with self.enforce_mask(b'\x00\x00\x00\x00'):
client.send_continuation(b'\xee\xff\x01\x02', fin=False)
self.assertEqual(client.data_to_send(), [b'\x00\x84\x00\x00\x00\x00\xee\xff\x01\x02'])
with self.enforce_mask(b'\x00\x00\x00\x00'):
client.send_continuation(b'\xee\xff', fin=True)
self.assertEqual(client.data_to_send(), [b'\x80\x82\x00\x00\x00\x00\xee\xff'])
def test_server_sends_fragmented_binary(self):
server = Protocol(SERVER)
server.send_binary(b'\x01\x02', fin=False)
self.assertEqual(server.data_to_send(), [b'\x02\x02\x01\x02'])
server.send_continuation(b'\xee\xff\x01\x02', fin=False)
self.assertEqual(server.data_to_send(), [b'\x00\x04\xee\xff\x01\x02'])
server.send_continuation(b'\xee\xff', fin=True)
self.assertEqual(server.data_to_send(), [b'\x80\x02\xee\xff'])
def test_client_receives_fragmented_binary(self):
client = Protocol(CLIENT)
client.receive_data(b'\x02\x02\x01\x02')
self.assertFrameReceived(client, Frame(OP_BINARY, b'\x01\x02', fin=False))
client.receive_data(b'\x00\x04\xfe\xff\x01\x02')
self.assertFrameReceived(client, Frame(OP_CONT, b'\xfe\xff\x01\x02', fin=False))
client.receive_data(b'\x80\x02\xfe\xff')
self.assertFrameReceived(client, Frame(OP_CONT, b'\xfe\xff'))
def test_server_receives_fragmented_binary(self):
server = Protocol(SERVER)
server.receive_data(b'\x02\x82\x00\x00\x00\x00\x01\x02')
self.assertFrameReceived(server, Frame(OP_BINARY, b'\x01\x02', fin=False))
server.receive_data(b'\x00\x84\x00\x00\x00\x00\xee\xff\x01\x02')
self.assertFrameReceived(server, Frame(OP_CONT, b'\xee\xff\x01\x02', fin=False))
server.receive_data(b'\x80\x82\x00\x00\x00\x00\xfe\xff')
self.assertFrameReceived(server, Frame(OP_CONT, b'\xfe\xff'))
def test_client_receives_fragmented_binary_over_size_limit(self):
client = Protocol(CLIENT, max_size=3)
client.receive_data(b'\x02\x02\x01\x02')
self.assertFrameReceived(client, Frame(OP_BINARY, b'\x01\x02', fin=False))
client.receive_data(b'\x80\x02\xfe\xff')
self.assertIsInstance(client.parser_exc, PayloadTooBig)
self.assertEqual(str(client.parser_exc), 'over size limit (2 > 1 bytes)')
self.assertConnectionFailing(client, CloseCode.MESSAGE_TOO_BIG, 'over size limit (2 > 1 bytes)')
def test_server_receives_fragmented_binary_over_size_limit(self):
server = Protocol(SERVER, max_size=3)
server.receive_data(b'\x02\x82\x00\x00\x00\x00\x01\x02')
self.assertFrameReceived(server, Frame(OP_BINARY, b'\x01\x02', fin=False))
server.receive_data(b'\x80\x82\x00\x00\x00\x00\xfe\xff')
self.assertIsInstance(server.parser_exc, PayloadTooBig)
self.assertEqual(str(server.parser_exc), 'over size limit (2 > 1 bytes)')
self.assertConnectionFailing(server, CloseCode.MESSAGE_TOO_BIG, 'over size limit (2 > 1 bytes)')
def test_client_sends_unexpected_binary(self):
client = Protocol(CLIENT)
client.send_binary(b'', fin=False)
with self.assertRaises(ProtocolError) as raised:
client.send_binary(b'', fin=False)
self.assertEqual(str(raised.exception), 'expected a continuation frame')
def test_server_sends_unexpected_binary(self):
server = Protocol(SERVER)
server.send_binary(b'', fin=False)
with self.assertRaises(ProtocolError) as raised:
server.send_binary(b'', fin=False)
self.assertEqual(str(raised.exception), 'expected a continuation frame')
def test_client_receives_unexpected_binary(self):
client = Protocol(CLIENT)
client.receive_data(b'\x02\x00')
self.assertFrameReceived(client, Frame(OP_BINARY, b'', fin=False))
client.receive_data(b'\x02\x00')
self.assertIsInstance(client.parser_exc, ProtocolError)
self.assertEqual(str(client.parser_exc), 'expected a continuation frame')
self.assertConnectionFailing(client, CloseCode.PROTOCOL_ERROR, 'expected a continuation frame')
def test_server_receives_unexpected_binary(self):
server = Protocol(SERVER)
server.receive_data(b'\x02\x80\x00\x00\x00\x00')
self.assertFrameReceived(server, Frame(OP_BINARY, b'', fin=False))
server.receive_data(b'\x02\x80\x00\x00\x00\x00')
self.assertIsInstance(server.parser_exc, ProtocolError)
self.assertEqual(str(server.parser_exc), 'expected a continuation frame')
self.assertConnectionFailing(server, CloseCode.PROTOCOL_ERROR, 'expected a continuation frame')
def test_client_sends_binary_after_sending_close(self):
client = Protocol(CLIENT)
with self.enforce_mask(b'\x00\x00\x00\x00'):
client.send_close(CloseCode.GOING_AWAY)
self.assertEqual(client.data_to_send(), [b'\x88\x82\x00\x00\x00\x00\x03\xe9'])
with self.assertRaises(InvalidState):
client.send_binary(b'')
def test_server_sends_binary_after_sending_close(self):
server = Protocol(SERVER)
server.send_close(CloseCode.NORMAL_CLOSURE)
self.assertEqual(server.data_to_send(), [b'\x88\x02\x03\xe8'])
with self.assertRaises(InvalidState):
server.send_binary(b'')
def test_client_receives_binary_after_receiving_close(self):
client = Protocol(CLIENT)
client.receive_data(b'\x88\x02\x03\xe8')
self.assertConnectionClosing(client, CloseCode.NORMAL_CLOSURE)
client.receive_data(b'\x82\x00')
self.assertFrameReceived(client, None)
self.assertFrameSent(client, None)
def test_server_receives_binary_after_receiving_close(self):
server = Protocol(SERVER)
server.receive_data(b'\x88\x82\x00\x00\x00\x00\x03\xe9')
self.assertConnectionClosing(server, CloseCode.GOING_AWAY)
server.receive_data(b'\x82\x80\x00\xff\x00\xff')
self.assertFrameReceived(server, None)
self.assertFrameSent(server, None) |
class Solution():
def jump(self, nums):
if (len(nums) <= 1):
return 0
end = (0 + nums[0])
start = 0
step = 1
maxDis = (0 + nums[0])
while (end < (len(nums) - 1)):
for i in range((start + 1), (end + 1)):
maxDis = max(maxDis, (nums[i] + i))
start = end
end = maxDis
step += 1
return step |
class BaseRequiredTextAsset(BaseRequiredAsset):
ASSET_CLASS = TextAsset
label = models.CharField(max_length=256, help_text="What's the title used to display the text input to the sponsor?")
help_text = models.CharField(max_length=256, help_text='Any helper comment on how the input should be populated', default='', blank=True)
max_length = models.IntegerField(default=None, help_text='Limit to length of the input, empty means unlimited', null=True, blank=True)
class Meta(BaseRequiredAsset.Meta):
abstract = True |
class VirtualEnv():
IGNORED_ENV_VARS = ('__PYVENV_LAUNCHER__', 'PYTHONHOME')
def __init__(self, directory, platform, verbosity=0):
self.directory = directory
self.platform = platform
self.verbosity = verbosity
self.python_info = PythonInfo(platform)
self._env_vars_to_restore = {}
self._executables_directory = None
def activate(self):
self._env_vars_to_restore['VIRTUAL_ENV'] = os.environ.pop('VIRTUAL_ENV', None)
os.environ['VIRTUAL_ENV'] = str(self.directory)
old_path = os.environ.pop('PATH', None)
self._env_vars_to_restore['PATH'] = old_path
if (old_path is None):
os.environ['PATH'] = str(self.executables_directory)
else:
os.environ['PATH'] = f'{self.executables_directory}{os.pathsep}{old_path}'
for env_var in self.IGNORED_ENV_VARS:
self._env_vars_to_restore[env_var] = os.environ.pop(env_var, None)
def deactivate(self):
for (env_var, value) in self._env_vars_to_restore.items():
if (value is None):
os.environ.pop(env_var, None)
else:
os.environ[env_var] = value
self._env_vars_to_restore.clear()
def create(self, python, *, allow_system_packages=False):
from virtualenv import cli_run
self.directory.ensure_parent_dir_exists()
command = [str(self.directory), '--no-download', '--no-periodic-update', '--python', python]
if allow_system_packages:
command.append('--system-site-packages')
add_verbosity_flag(command, self.verbosity, adjustment=(- 1))
cli_run(command)
def remove(self):
self.directory.remove()
def exists(self):
return self.directory.is_dir()
def executables_directory(self):
if (self._executables_directory is None):
exe_dir = (self.directory / ('Scripts' if self.platform.windows else 'bin'))
if exe_dir.is_dir():
self._executables_directory = exe_dir
elif self.platform.windows:
exe_dir = (self.directory / 'bin')
if exe_dir.is_dir():
self._executables_directory = exe_dir
else:
msg = f'Unable to locate executables directory within: {self.directory}'
raise OSError(msg)
elif (self.directory / 'local').is_dir():
exe_dir = ((self.directory / 'local') / 'bin')
if exe_dir.is_dir():
self._executables_directory = exe_dir
else:
msg = f'Unable to locate executables directory within: {self.directory}'
raise OSError(msg)
else:
msg = f'Unable to locate executables directory within: {self.directory}'
raise OSError(msg)
return self._executables_directory
def environment(self):
return self.python_info.environment
def sys_path(self):
return self.python_info.sys_path
def __enter__(self):
self.activate()
return self
def __exit__(self, exc_type, exc_value, traceback):
self.deactivate() |
def update_u_gates(drag_params, pi2_pulse_schedules=None, qubits=None, inst_map=None, drives=None):
for qubit in qubits:
drive_ch = drives[qubit]
if (pi2_pulse_schedules is None):
x90_pulse = pulse_lib.drag(**drag_params[qubit])
x90_sched = Schedule()
x90_sched += Play(x90_pulse, drive_ch).shift(0)
else:
x90_sched = pi2_pulse_schedules[qubit]
for _u2_group in _find_channel_groups('u2', qubits=qubit, inst_map=inst_map):
if (drive_ch in _u2_group):
break
else:
_u2_group = (drive_ch,)
for _u3_group in _find_channel_groups('u3', qubits=qubit, inst_map=inst_map):
if (drive_ch in _u3_group):
break
else:
_u3_group = (drive_ch,)
with pulse.build(name=f'u2_{qubit}', default_alignment='sequential') as u2_sched:
P0 = Parameter('P0')
P1 = Parameter('P1')
for ch in _u2_group:
pulse.shift_phase(((- P1) + (np.pi / 2)), ch)
pulse.call(x90_sched)
for ch in _u2_group:
pulse.shift_phase(((- P0) - (np.pi / 2)), ch)
with pulse.build(name=f'u3_{qubit}', default_alignment='sequential') as u3_sched:
P0 = Parameter('P0')
P1 = Parameter('P1')
P2 = Parameter('P2')
for ch in _u3_group:
pulse.shift_phase((- P2), ch)
pulse.call(x90_sched)
for ch in _u3_group:
pulse.shift_phase(((- P0) - np.pi), ch)
pulse.call(x90_sched)
for ch in _u3_group:
pulse.shift_phase(((- P1) + np.pi), ch)
inst_map.add('u2', qubits=qubit, schedule=u2_sched)
inst_map.add('u3', qubits=qubit, schedule=u3_sched) |
def mcepalpha(fs, start=0.0, stop=1.0, step=0.001, num_points=1000):
alpha_candidates = np.arange(start, stop, step)
mel = _melscale_vector(fs, num_points)
distances = [rms_distance(mel, _warping_vector(alpha, num_points)) for alpha in alpha_candidates]
return alpha_candidates[np.argmin(distances)] |
def _test():
import torch
pretrained = False
models = [(wrn20_10_1bit_cifar10, 10), (wrn20_10_1bit_cifar100, 100), (wrn20_10_1bit_svhn, 10), (wrn20_10_32bit_cifar10, 10), (wrn20_10_32bit_cifar100, 100), (wrn20_10_32bit_svhn, 10)]
for (model, num_classes) in models:
net = model(pretrained=pretrained)
net.eval()
weight_count = _calc_width(net)
print('m={}, {}'.format(model.__name__, weight_count))
assert ((model != wrn20_10_1bit_cifar10) or (weight_count == ))
assert ((model != wrn20_10_1bit_cifar100) or (weight_count == ))
assert ((model != wrn20_10_1bit_svhn) or (weight_count == ))
assert ((model != wrn20_10_32bit_cifar10) or (weight_count == ))
assert ((model != wrn20_10_32bit_cifar100) or (weight_count == ))
assert ((model != wrn20_10_32bit_svhn) or (weight_count == ))
x = torch.randn(1, 3, 32, 32)
y = net(x)
y.sum().backward()
assert (tuple(y.size()) == (1, num_classes)) |
class CalcChangeImplantLocationCommand(wx.Command):
def __init__(self, fitID, source):
wx.Command.__init__(self, True, 'Change Implant Location')
self.fitID = fitID
self.source = source
self.savedSource = None
def Do(self):
pyfalog.debug('Doing changing of implant source to {} for fit {}'.format(self.fitID, self.source))
fit = Fit.getInstance().getFit(self.fitID)
self.savedSource = fit.implantSource
if (self.source == self.savedSource):
return False
fit.implantSource = self.source
return True
def Undo(self):
cmd = CalcChangeImplantLocationCommand(fitID=self.fitID, source=self.savedSource)
return cmd.Do() |
class HTTPRepository(CachedRepository):
def __init__(self, name: str, url: str, config: (Config | None)=None, disable_cache: bool=False, pool_size: int=requests.adapters.DEFAULT_POOLSIZE) -> None:
super().__init__(name, disable_cache, config)
self._url = url
self._authenticator = Authenticator(config=config, cache_id=name, disable_cache=disable_cache, pool_size=pool_size)
self._authenticator.add_repository(name, url)
self.get_page = functools.lru_cache(maxsize=None)(self._get_page)
def session(self) -> Authenticator:
return self._authenticator
def url(self) -> str:
return self._url
def certificates(self) -> RepositoryCertificateConfig:
return self._authenticator.get_certs_for_url(self.url)
def authenticated_url(self) -> str:
return self._authenticator.authenticated_url(url=self.url)
def _download(self, url: str, dest: Path) -> None:
return download_file(url, dest, session=self.session)
def _cached_or_downloaded_file(self, link: Link) -> Iterator[Path]:
self._log(f'Downloading: {link.url}', level='debug')
with temporary_directory() as temp_dir:
filepath = (Path(temp_dir) / link.filename)
self._download(link.url, filepath)
(yield filepath)
def _get_info_from_wheel(self, url: str) -> PackageInfo:
from poetry.inspection.info import PackageInfo
with self._cached_or_downloaded_file(Link(url)) as filepath:
return PackageInfo.from_wheel(filepath)
def _get_info_from_sdist(self, url: str) -> PackageInfo:
from poetry.inspection.info import PackageInfo
with self._cached_or_downloaded_file(Link(url)) as filepath:
return PackageInfo.from_sdist(filepath)
def _get_info_from_urls(self, urls: dict[(str, list[str])]) -> PackageInfo:
wheels = urls.get('bdist_wheel')
if wheels:
universal_wheel = None
universal_python2_wheel = None
universal_python3_wheel = None
platform_specific_wheels = []
for wheel in wheels:
link = Link(wheel)
m = wheel_file_re.match(link.filename)
if (not m):
continue
pyver = m.group('pyver')
abi = m.group('abi')
plat = m.group('plat')
if ((abi == 'none') and (plat == 'any')):
if (pyver == 'py2.py3'):
universal_wheel = wheel
elif (pyver == 'py2'):
universal_python2_wheel = wheel
else:
universal_python3_wheel = wheel
else:
platform_specific_wheels.append(wheel)
if (universal_wheel is not None):
return self._get_info_from_wheel(universal_wheel)
info = None
if (universal_python2_wheel and universal_python3_wheel):
info = self._get_info_from_wheel(universal_python2_wheel)
py3_info = self._get_info_from_wheel(universal_python3_wheel)
if (info.requires_python or py3_info.requires_python):
info.requires_python = str(parse_constraint((info.requires_python or '^2.7')).union(parse_constraint((py3_info.requires_python or '^3'))))
if py3_info.requires_dist:
if (not info.requires_dist):
info.requires_dist = py3_info.requires_dist
return info
py2_requires_dist = {Dependency.create_from_pep_508(r).to_pep_508() for r in info.requires_dist}
py3_requires_dist = {Dependency.create_from_pep_508(r).to_pep_508() for r in py3_info.requires_dist}
base_requires_dist = (py2_requires_dist & py3_requires_dist)
py2_only_requires_dist = (py2_requires_dist - py3_requires_dist)
py3_only_requires_dist = (py3_requires_dist - py2_requires_dist)
requires_dist = list(base_requires_dist)
for requirement in py2_only_requires_dist:
dep = Dependency.create_from_pep_508(requirement)
dep.marker = dep.marker.intersect(parse_marker("python_version == '2.7'"))
requires_dist.append(dep.to_pep_508())
for requirement in py3_only_requires_dist:
dep = Dependency.create_from_pep_508(requirement)
dep.marker = dep.marker.intersect(parse_marker("python_version >= '3'"))
requires_dist.append(dep.to_pep_508())
info.requires_dist = sorted(set(requires_dist))
if info:
return info
if universal_python3_wheel:
return self._get_info_from_wheel(universal_python3_wheel)
if universal_python2_wheel:
return self._get_info_from_wheel(universal_python2_wheel)
if platform_specific_wheels:
first_wheel = platform_specific_wheels[0]
return self._get_info_from_wheel(first_wheel)
return self._get_info_from_sdist(urls['sdist'][0])
def _links_to_data(self, links: list[Link], data: PackageInfo) -> dict[(str, Any)]:
if (not links):
raise PackageNotFound(f'No valid distribution links found for package: "{data.name}" version: "{data.version}"')
urls = defaultdict(list)
files: list[dict[(str, Any)]] = []
for link in links:
if (link.yanked and (not data.yanked)):
continue
if link.is_wheel:
urls['bdist_wheel'].append(link.url)
elif link.filename.endswith(('.tar.gz', '.zip', '.bz2', '.xz', '.Z', '.tar')):
urls['sdist'].append(link.url)
file_hash = (f'{link.hash_name}:{link.hash}' if link.hash else None)
if ((not link.hash) or ((link.hash_name is not None) and (link.hash_name not in ('sha256', 'sha384', 'sha512')) and hasattr(hashlib, link.hash_name))):
file_hash = (self.calculate_sha256(link) or file_hash)
files.append({'file': link.filename, 'hash': file_hash})
data.files = files
info = self._get_info_from_urls(urls)
data.summary = info.summary
data.requires_dist = info.requires_dist
data.requires_python = info.requires_python
return data.asdict()
def calculate_sha256(self, link: Link) -> (str | None):
with self._cached_or_downloaded_file(link) as filepath:
known_hash = (getattr(hashlib, link.hash_name)() if link.hash_name else None)
required_hash = hashlib.sha256()
chunksize = 4096
with filepath.open('rb') as f:
while True:
chunk = f.read(chunksize)
if (not chunk):
break
if known_hash:
known_hash.update(chunk)
required_hash.update(chunk)
if ((not known_hash) or (known_hash.hexdigest() == link.hash)):
return f'{required_hash.name}:{required_hash.hexdigest()}'
return None
def _get_response(self, endpoint: str) -> (requests.Response | None):
url = (self._url + endpoint)
try:
response: requests.Response = self.session.get(url, raise_for_status=False, timeout=REQUESTS_TIMEOUT)
if (response.status_code in (401, 403)):
self._log(f'Authorization error accessing {url}', level='warning')
return None
if (response.status_code == 404):
return None
response.raise_for_status()
except requests.exceptions.HTTPError as e:
raise RepositoryError(e)
if (response.url != url):
self._log(f'Response URL {response.url} differs from request URL {url}', level='debug')
return response
def _get_page(self, name: NormalizedName) -> LinkSource:
response = self._get_response(f'/{name}/')
if (not response):
raise PackageNotFound(f'Package [{name}] not found.')
return HTMLPage(response.url, response.text) |
class TradingCentre(metaclass=TradingCentreMeta):
abstract = True
onedaydelta = datetime.timedelta(days=1)
def __new__(cls):
obj = super(TradingCentre, cls).__new__(cls)
obj._start = None
obj._end = None
obj._cache = {}
return obj
def __get_code(self):
return self.__class__.__name__
code = property(__get_code)
def isbizday(self, dte):
if (dte.isoweekday() in isoweekend):
return False
else:
return (self._isholiday(dte) is False)
def _isholiday(self, dte):
year = dte.year
if self._start:
if ((year >= self._start) and (year <= self._end)):
return self._cache.get(dte, False)
elif (year < self._start):
start = year
end = (self._start - 1)
else:
start = (self._end + 1)
end = year
else:
start = year
end = year
for year in range(start, (end + 1)):
self.build_dates(year)
return self._cache.get(dte, False)
def build_dates(self, year):
for holiday in self.holidays.values():
days = holiday.allholidays(year)
for day in days:
self._cache[day] = True |
_module()
class CustomDataset(Dataset):
CLASSES = None
PALETTE = None
def __init__(self, pipeline, img_dir, img_suffix='.jpg', ann_dir=None, seg_map_suffix='.png', split=None, data_root=None, test_mode=False, ignore_index=255, reduce_zero_label=False, classes=None, palette=None):
self.pipeline = Compose(pipeline)
self.img_dir = img_dir
self.img_suffix = img_suffix
self.ann_dir = ann_dir
self.seg_map_suffix = seg_map_suffix
self.split = split
self.data_root = data_root
self.test_mode = test_mode
self.ignore_index = ignore_index
self.reduce_zero_label = reduce_zero_label
self.label_map = None
(self.CLASSES, self.PALETTE) = self.get_classes_and_palette(classes, palette)
if (self.data_root is not None):
if (not osp.isabs(self.img_dir)):
self.img_dir = osp.join(self.data_root, self.img_dir)
if (not ((self.ann_dir is None) or osp.isabs(self.ann_dir))):
self.ann_dir = osp.join(self.data_root, self.ann_dir)
if (not ((self.split is None) or osp.isabs(self.split))):
self.split = osp.join(self.data_root, self.split)
self.img_infos = self.load_annotations(self.img_dir, self.img_suffix, self.ann_dir, self.seg_map_suffix, self.split)
def __len__(self):
return len(self.img_infos)
def load_annotations(self, img_dir, img_suffix, ann_dir, seg_map_suffix, split):
img_infos = []
if (split is not None):
with open(split) as f:
for line in f:
img_name = line.strip()
img_info = dict(filename=(img_name + img_suffix))
if (ann_dir is not None):
seg_map = (img_name + seg_map_suffix)
img_info['ann'] = dict(seg_map=seg_map)
img_infos.append(img_info)
else:
for img in mmcv.scandir(img_dir, img_suffix, recursive=True):
img_info = dict(filename=img)
if (ann_dir is not None):
seg_map = img.replace(img_suffix, seg_map_suffix)
img_info['ann'] = dict(seg_map=seg_map)
img_infos.append(img_info)
print_log(f'Loaded {len(img_infos)} images', logger=get_root_logger())
return img_infos
def get_ann_info(self, idx):
return self.img_infos[idx]['ann']
def pre_pipeline(self, results):
results['seg_fields'] = []
results['img_prefix'] = self.img_dir
results['seg_prefix'] = self.ann_dir
if self.custom_classes:
results['label_map'] = self.label_map
def __getitem__(self, idx):
if self.test_mode:
return self.prepare_test_img(idx)
else:
return self.prepare_train_img(idx)
def prepare_train_img(self, idx):
img_info = self.img_infos[idx]
ann_info = self.get_ann_info(idx)
results = dict(img_info=img_info, ann_info=ann_info)
self.pre_pipeline(results)
return self.pipeline(results)
def prepare_test_img(self, idx):
img_info = self.img_infos[idx]
results = dict(img_info=img_info)
self.pre_pipeline(results)
return self.pipeline(results)
def format_results(self, results, **kwargs):
def get_gt_seg_maps(self, efficient_test=False):
gt_seg_maps = []
for img_info in self.img_infos:
seg_map = osp.join(self.ann_dir, img_info['ann']['seg_map'])
if efficient_test:
gt_seg_map = seg_map
else:
gt_seg_map = mmcv.imread(seg_map, flag='unchanged', backend='pillow')
gt_seg_maps.append(gt_seg_map)
return gt_seg_maps
def get_classes_and_palette(self, classes=None, palette=None):
if (classes is None):
self.custom_classes = False
return (self.CLASSES, self.PALETTE)
self.custom_classes = True
if isinstance(classes, str):
class_names = mmcv.list_from_file(classes)
elif isinstance(classes, (tuple, list)):
class_names = classes
else:
raise ValueError(f'Unsupported type {type(classes)} of classes.')
if self.CLASSES:
if (not set(classes).issubset(self.CLASSES)):
raise ValueError('classes is not a subset of CLASSES.')
self.label_map = {}
for (i, c) in enumerate(self.CLASSES):
if (c not in class_names):
self.label_map[i] = (- 1)
else:
self.label_map[i] = classes.index(c)
palette = self.get_palette_for_custom_classes(class_names, palette)
return (class_names, palette)
def get_palette_for_custom_classes(self, class_names, palette=None):
if (self.label_map is not None):
palette = []
for (old_id, new_id) in sorted(self.label_map.items(), key=(lambda x: x[1])):
if (new_id != (- 1)):
palette.append(self.PALETTE[old_id])
palette = type(self.PALETTE)(palette)
elif (palette is None):
if (self.PALETTE is None):
palette = np.random.randint(0, 255, size=(len(class_names), 3))
else:
palette = self.PALETTE
return palette
def evaluate(self, results, metric='mIoU', logger=None, efficient_test=False, **kwargs):
if isinstance(metric, str):
metric = [metric]
allowed_metrics = ['mIoU', 'mDice', 'mFscore']
if (not set(metric).issubset(set(allowed_metrics))):
raise KeyError('metric {} is not supported'.format(metric))
eval_results = {}
gt_seg_maps = self.get_gt_seg_maps(efficient_test)
if (self.CLASSES is None):
num_classes = len(reduce(np.union1d, [np.unique(_) for _ in gt_seg_maps]))
else:
num_classes = len(self.CLASSES)
ret_metrics = eval_metrics(results, gt_seg_maps, num_classes, self.ignore_index, metric, label_map=self.label_map, reduce_zero_label=self.reduce_zero_label)
if (self.CLASSES is None):
class_names = tuple(range(num_classes))
else:
class_names = self.CLASSES
ret_metrics_summary = OrderedDict({ret_metric: np.round((np.nanmean(ret_metric_value) * 100), 2) for (ret_metric, ret_metric_value) in ret_metrics.items()})
ret_metrics.pop('aAcc', None)
ret_metrics_class = OrderedDict({ret_metric: np.round((ret_metric_value * 100), 2) for (ret_metric, ret_metric_value) in ret_metrics.items()})
ret_metrics_class.update({'Class': class_names})
ret_metrics_class.move_to_end('Class', last=False)
class_table_data = PrettyTable()
for (key, val) in ret_metrics_class.items():
class_table_data.add_column(key, val)
summary_table_data = PrettyTable()
for (key, val) in ret_metrics_summary.items():
if (key == 'aAcc'):
summary_table_data.add_column(key, [val])
else:
summary_table_data.add_column(('m' + key), [val])
print_log('per class results:', logger)
print_log(('\n' + class_table_data.get_string()), logger=logger)
print_log('Summary:', logger)
print_log(('\n' + summary_table_data.get_string()), logger=logger)
for (key, value) in ret_metrics_summary.items():
if (key == 'aAcc'):
eval_results[key] = (value / 100.0)
else:
eval_results[('m' + key)] = (value / 100.0)
ret_metrics_class.pop('Class', None)
for (key, value) in ret_metrics_class.items():
eval_results.update({((key + '.') + str(name)): (value[idx] / 100.0) for (idx, name) in enumerate(class_names)})
if mmcv.is_list_of(results, str):
for file_name in results:
os.remove(file_name)
return eval_results |
class LoadResults():
successful: List[GreasemonkeyScript] = dataclasses.field(default_factory=list)
errors: List[Tuple[(str, str)]] = dataclasses.field(default_factory=list)
def successful_str(self) -> str:
if (not self.successful):
return 'No Greasemonkey scripts loaded'
names = '\n'.join((str(script) for script in sorted(self.successful, key=str)))
return f'''Loaded Greasemonkey scripts:
{names}'''
def error_str(self) -> Optional[str]:
if (not self.errors):
return None
lines = '\n'.join((f'{script}: {error}' for (script, error) in sorted(self.errors)))
return f'''Greasemonkey scripts failed to load:
{lines}''' |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.