code stringlengths 281 23.7M |
|---|
def test_MinMaxScaler_weights(decision_matrix):
dm = decision_matrix(seed=42, min_alternatives=10, max_alternatives=10, min_criteria=20, max_criteria=20, min_objectives_proportion=0.5)
expected = skcriteria.mkdm(matrix=dm.matrix, objectives=dm.objectives, weights=((dm.weights - np.min(dm.weights)) / (np.max(dm.weights) - np.min(dm.weights))), alternatives=dm.alternatives, criteria=dm.criteria, dtypes=dm.dtypes)
scaler = MinMaxScaler(target='weights')
result = scaler.transform(dm)
assert result.aequals(expected) |
class Entity():
def __init__(self, name, comment=None, tags=[]):
self.id = name
self.comment = None
self.note(comment)
self.tags = []
self.tag(tags)
def note(self, comment):
if ((comment is not None) and (comment.strip() != '')):
self.comment = (comment.strip() if (self.comment is None) else ((self.comment + ' - ') + comment.strip()))
return self
def tag(self, tags):
if (tags is not None):
toks = ((tok.strip() for tok in tags.strip().split(' ')) if isinstance(tags, str) else (tok.strip() for tok in tags))
self.tags.extend([tok for tok in toks if ((tok != '') and (tok not in self.tags))])
return self
def same_type_and_basic_attributes(self, other):
return ((type(self) == type(other)) and (self.comment == other.comment) and (self.tags == other.tags))
def mergeable_with(self, other):
return ((type(self) == type(other)) and ((self.comment == other.comment) or (None in {self.comment, other.comment})) and ((self.tags == other.tags) or (0 in {len(self.tags), len(other.tags)})))
def blank_basic_attributes(self):
return ((self.comment is None) and (self.tags == []))
def clear_basic_attributes(self):
self.comment = None
self.tags = []
def copy_basic_attributes_of(self, other):
assert isinstance(other, Entity)
self.comment = other.comment
self.tags = other.tags
return self |
class JsonWriterTest(Json, WriterTest, TestCase):
()
def test_fields(self, context):
context.set_input_fields(['foo', 'bar'])
context.write_sync(('a', 'b'), ('c', 'd'))
context.stop()
assert (self.readlines() == ('[{"foo": "a", "bar": "b"},', '{"foo": "c", "bar": "d"}]'))
()
def test_fields_from_type(self, context):
context.set_input_type(namedtuple('Point', 'x y'))
context.write_sync((1, 2), (3, 4))
context.stop()
assert (self.readlines() == ('[{"x": 1, "y": 2},', '{"x": 3, "y": 4}]'))
()
def test_nofields_multiple_args(self, context):
context.write_sync((FOOBAR, FOOBAR), (OD_ABC, FOOBAR), (FOOBAZ, FOOBAR))
context.stop()
assert (self.readlines() == ('[{"foo": "bar"},', '{"foo": "bar"},', '{"a": "A", "b": "B", "c": "C"},', '{"foo": "bar"},', '{"foo": "baz"},', '{"foo": "bar"}]'))
()
def test_nofields_multiple_args_length_mismatch(self, context):
with pytest.raises(TypeError):
context.write_sync((FOOBAR, FOOBAR), OD_ABC)
()
def test_nofields_single_arg(self, context):
context.write_sync(FOOBAR, OD_ABC, FOOBAZ)
context.stop()
assert (self.readlines() == ('[{"foo": "bar"},', '{"a": "A", "b": "B", "c": "C"},', '{"foo": "baz"}]'))
()
def test_nofields_empty_args(self, context):
context.write_sync(EMPTY, EMPTY, EMPTY)
context.stop()
assert (self.readlines() == ('[]',)) |
def _get_update_tickets_errors(response, input: UpdateAttendeeTicketInput) -> UpdateAttendeeTicketErrors:
errors = []
for field in ('attendee_name', 'attendee_email'):
if response.get(field):
errors.append(UpdateAttendeeTicketError(field=field, message=response[field][0]))
if response.get('answers'):
for (index, answer) in enumerate(input.answers):
answer_error = response['answers'][index]
if answer_error:
for field in ('answer', 'options'):
if answer_error.get(field):
error = UpdateAttendeeTicketError(field=answer.question, message=answer_error[field][0])
errors.append(error)
return UpdateAttendeeTicketErrors(id=input.id, errors=errors) |
class Window(QWidget):
def __init__(self, parent=None):
super(Window, self).__init__(parent)
self.setupModel()
nameLabel = QLabel('Na&me:')
nameEdit = QLineEdit()
addressLabel = QLabel('&Address:')
addressEdit = QTextEdit()
typeLabel = QLabel('&Type:')
typeComboBox = QComboBox()
self.nextButton = QPushButton('&Next')
self.previousButton = QPushButton('&Previous')
nameLabel.setBuddy(nameEdit)
addressLabel.setBuddy(addressEdit)
typeLabel.setBuddy(typeComboBox)
typeComboBox.setModel(self.typeModel)
self.mapper = QDataWidgetMapper(self)
self.mapper.setModel(self.model)
self.mapper.addMapping(nameEdit, 0)
self.mapper.addMapping(addressEdit, 1)
self.mapper.addMapping(typeComboBox, 2, b'currentIndex')
self.previousButton.clicked.connect(self.mapper.toPrevious)
self.nextButton.clicked.connect(self.mapper.toNext)
self.mapper.currentIndexChanged.connect(self.updateButtons)
layout = QGridLayout()
layout.addWidget(nameLabel, 0, 0, 1, 1)
layout.addWidget(nameEdit, 0, 1, 1, 1)
layout.addWidget(self.previousButton, 0, 2, 1, 1)
layout.addWidget(addressLabel, 1, 0, 1, 1)
layout.addWidget(addressEdit, 1, 1, 2, 1)
layout.addWidget(self.nextButton, 1, 2, 1, 1)
layout.addWidget(typeLabel, 3, 0, 1, 1)
layout.addWidget(typeComboBox, 3, 1, 1, 1)
self.setLayout(layout)
self.setWindowTitle('Delegate Widget Mapper')
self.mapper.toFirst()
def setupModel(self):
items = ('Home', 'Work', 'Other')
self.typeModel = QStringListModel(items, self)
self.model = QStandardItemModel(5, 3, self)
names = ('Alice', 'Bob', 'Carol', 'Donald', 'Emma')
addresses = ('<qt>123 Main Street<br/>Market Town</qt>', '<qt>PO Box 32<br/>Mail Handling Service<br/>Service City</qt>', '<qt>The Lighthouse<br/>Remote Island</qt>', '<qt>47338 Park Avenue<br/>Big City</qt>', '<qt>Research Station<br/>Base Camp<br/>Big Mountain</qt>')
types = ('0', '1', '2', '0', '2')
for (row, name) in enumerate(names):
self.model.setItem(row, 0, QStandardItem(name))
self.model.setItem(row, 1, QStandardItem(addresses[row]))
self.model.setItem(row, 2, QStandardItem(types[row]))
def updateButtons(self, row):
self.previousButton.setEnabled((row > 0))
self.nextButton.setEnabled((row < (self.model.rowCount() - 1))) |
class PairingManager():
def __init__(self):
self.enabled = False
self.enabled_automatically = False
self.agent_manager = BluezAgentManagerAPI.connect()
def register(self, server: AdvertisingAPI) -> None:
SystemBus().publish_object(PairingAgentAPI.path, PairingAgent(server))
def enable(self) -> None:
if self.enabled:
return
self.agent_manager.RegisterAgent(PairingAgentAPI.path, 'DisplayYesNo')
self.agent_manager.RequestDefaultAgent(PairingAgentAPI.path)
self.enabled = True
self.enabled_automatically = False
def disable(self) -> None:
if (not self.enabled):
return
self.agent_manager.UnregisterAgent(PairingAgentAPI.path)
self.enabled = False
self.enabled_automatically = False
def enable_automatically(self) -> None:
if self.enabled:
return
self.enable()
self.enabled_automatically = True
def disable_if_enabled_automatically(self) -> None:
if (not self.enabled):
return
if (not self.enabled_automatically):
return
self.disable() |
def index_vars_to_types(entry, slice_ok=True):
if (isinstance(entry, (np.ndarray, Variable)) and hasattr(entry, 'dtype') and (entry.dtype == 'bool')):
raise AdvancedIndexingError('Invalid index type or slice for Subtensor')
if (isinstance(entry, Variable) and ((entry.type in invalid_scal_types) or (entry.type in invalid_tensor_types))):
raise TypeError('Expected an integer')
if (isinstance(entry, Variable) and (entry.type in scal_types)):
return entry.type
elif (isinstance(entry, Type) and (entry in scal_types)):
return entry
if (isinstance(entry, Variable) and (entry.type in tensor_types) and all(entry.type.broadcastable)):
return ps.get_scalar_type(entry.type.dtype)
elif (isinstance(entry, Type) and (entry in tensor_types) and all(entry.broadcastable)):
return ps.get_scalar_type(entry.dtype)
elif (slice_ok and isinstance(entry, slice)):
a = entry.start
b = entry.stop
c = entry.step
if (a is not None):
slice_a = index_vars_to_types(a, False)
else:
slice_a = None
if ((b is not None) and (b != sys.maxsize)):
slice_b = index_vars_to_types(b, False)
else:
slice_b = None
if (c is not None):
slice_c = index_vars_to_types(c, False)
else:
slice_c = None
return slice(slice_a, slice_b, slice_c)
elif isinstance(entry, (int, np.integer)):
raise TypeError()
else:
raise AdvancedIndexingError('Invalid index type or slice for Subtensor') |
class Telegraph():
__slots__ = ('_telegraph',)
def __init__(self, access_token=None, domain='telegra.ph'):
self._telegraph = TelegraphApi(access_token, domain)
def get_access_token(self):
return self._telegraph.access_token
async def create_account(self, short_name, author_name=None, author_url=None, replace_token=True):
response = (await self._telegraph.method('createAccount', values={'short_name': short_name, 'author_name': author_name, 'author_url': author_url}))
if replace_token:
self._telegraph.access_token = response.get('access_token')
return response
async def edit_account_info(self, short_name=None, author_name=None, author_url=None):
return (await self._telegraph.method('editAccountInfo', values={'short_name': short_name, 'author_name': author_name, 'author_url': author_url}))
async def revoke_access_token(self):
response = (await self._telegraph.method('revokeAccessToken'))
self._telegraph.access_token = response.get('access_token')
return response
async def get_page(self, path, return_content=True, return_html=True):
response = (await self._telegraph.method('getPage', path=path, values={'return_content': return_content}))
if (return_content and return_html):
response['content'] = nodes_to_html(response['content'])
return response
async def create_page(self, title, content=None, html_content=None, author_name=None, author_url=None, return_content=False):
if (content is None):
content = html_to_nodes(html_content)
content_json = json_dumps(content)
return (await self._telegraph.method('createPage', values={'title': title, 'author_name': author_name, 'author_url': author_url, 'content': content_json, 'return_content': return_content}))
async def edit_page(self, path, title, content=None, html_content=None, author_name=None, author_url=None, return_content=False):
if (content is None):
content = html_to_nodes(html_content)
content_json = json_dumps(content)
return (await self._telegraph.method('editPage', path=path, values={'title': title, 'author_name': author_name, 'author_url': author_url, 'content': content_json, 'return_content': return_content}))
async def get_account_info(self, fields=None):
return (await self._telegraph.method('getAccountInfo', {'fields': (json_dumps(fields) if fields else None)}))
async def get_page_list(self, offset=0, limit=50):
return (await self._telegraph.method('getPageList', {'offset': offset, 'limit': limit}))
async def get_views(self, path, year=None, month=None, day=None, hour=None):
return (await self._telegraph.method('getViews', path=path, values={'year': year, 'month': month, 'day': day, 'hour': hour}))
async def upload_file(self, f):
return (await self._telegraph.upload_file(f)) |
class AppendDictAction(argparse.Action):
def __init__(self, allow_commas=True, *args, **kwargs):
self.allow_commas = allow_commas
super(AppendDictAction, self).__init__(*args, **kwargs)
def __call__(self, parser, namespace, values, option_string=None):
items = (getattr(namespace, self.dest, {}) or {})
if ((',' not in values) and ('=' not in values)):
items['*'] = values
else:
try:
if self.allow_commas:
vals = values.split(',')
for t in vals:
(k, v) = t.split('=', 1)
items[k] = v
else:
(k, v) = values.split('=', 1)
items[k] = v
except ValueError:
parser.error('could not parse {}'.format(self.dest))
setattr(namespace, self.dest, items) |
def get_attacker(attack_method, arch, predict, p, epsilon, num_steps, step_size, image_dim, image_size, grid_scale, sample_grid_num, sample_times, momentum=0.0, gamma=1.0, lam=0.0, ti_size=1, m=0, sigma=15):
if ((('SGM' in attack_method) or ('Hybrid' in attack_method)) and (gamma > 1)):
raise Exception('gamma of SGM method should be less than 1')
if ((('MI' in attack_method) or ('Hybrid' in attack_method)) and (momentum == 0)):
raise Exception('momentum of MI method should be greater than 0')
if ((('VR' in attack_method) or ('Hybrid' in attack_method)) and (m == 0)):
raise Exception('m of VR method should be greater than 0')
if (('TI' in attack_method) and (ti_size == 1)):
raise Exception('ti_size of the TI method should be greater than 0')
if ((('IR' in attack_method) or ('Hybrid' in attack_method)) and (lam == 0)):
raise Exception('lam of the IR method should be greater than 0')
if (p == 'inf'):
p = np.inf
epsilon = (epsilon / 255.0)
step_size = (step_size / 255.0)
num_steps = num_steps
elif (int(p) == 2):
p = 2
epsilon = ((epsilon / 255.0) * np.sqrt(image_dim))
step_size = float(step_size)
num_steps = num_steps
else:
raise NotImplementedError('p should be inf or 2')
if ((gamma < 1.0) and (('SGM' in attack_method) or ('Hybrid' in attack_method))):
if (arch in ['resnet18', 'resnet34', 'resnet50', 'resnet101', 'resnet152']):
register_hook_for_resnet(predict, arch=arch, gamma=gamma)
elif (arch in ['densenet121', 'densenet169', 'densenet201']):
register_hook_for_densenet(predict, arch=arch, gamma=gamma)
else:
raise ValueError('Current code only supports resnet/densenet. You can extend this code to other architectures.')
adversary = ProjectionAttacker(model=predict, epsilon=epsilon, num_steps=num_steps, step_size=step_size, image_width=image_size, momentum=momentum, ti_size=ti_size, loss_fn=nn.CrossEntropyLoss(reduction='sum'), targeted=False, lam=lam, grid_scale=grid_scale, sample_times=sample_times, sample_grid_num=sample_grid_num, m=m, sigma=sigma, ord=p)
return adversary |
def integral_mini_interval_Pprecision_CDFmethod(I, J, E):
integral_min_piece = integral_mini_interval_P_CDFmethod__min_piece(I, J, E)
e_min = min(E)
j_min = min(J)
j_max = max(J)
e_max = max(E)
i_min = min(I)
i_max = max(I)
d_min = max((i_min - j_max), (j_min - i_max))
d_max = max((i_max - j_max), (j_min - i_min))
integral_linear_piece = ((1 / 2) * ((d_max ** 2) - (d_min ** 2)))
integral_remaining_piece = ((j_max - j_min) * (i_max - i_min))
DeltaI = (i_max - i_min)
DeltaE = (e_max - e_min)
output = (DeltaI - ((1 / DeltaE) * ((integral_min_piece + integral_linear_piece) + integral_remaining_piece)))
return output |
def iload_pyrocko_events(file_path, segment, content):
from pyrocko import model as pmodel
for (iev, ev) in enumerate(pmodel.Event.load_catalog(file_path)):
nut = model.make_event_nut(file_segment=0, file_element=iev, codes=model.CodesX((ev.catalog or '')), tmin=ev.time, tmax=ev.time)
if ('event' in content):
nut.content = ev
(yield nut) |
def write_ts_properties(training_set_properties: dict) -> None:
training_set = constants.training_set
dict_path = f'{training_set[:(- 4)]}.csv'
with open(dict_path, 'w') as csv_file:
csv_writer = csv.writer(csv_file, delimiter=';')
for (key, value) in training_set_properties.items():
if ('validity_tensor' in key):
continue
if isinstance(value, np.ndarray):
csv_writer.writerow([key, list(value)])
elif isinstance(value, torch.Tensor):
try:
csv_writer.writerow([key, float(value)])
except ValueError:
csv_writer.writerow([key, [float(i) for i in value]])
else:
csv_writer.writerow([key, value]) |
def test_warn_deprecated_formatting(recwarn_always: pytest.WarningsRecorder) -> None:
warn_deprecated(old, '1.0', issue=1, instead=new)
got = recwarn_always.pop(TrioDeprecationWarning)
assert isinstance(got.message, Warning)
assert ('test_deprecate.old is deprecated' in got.message.args[0])
assert ('test_deprecate.new instead' in got.message.args[0]) |
class ResNetConfig(PretrainedConfig):
model_type = 'resnet'
layer_types = ['basic', 'bottleneck']
def __init__(self, num_channels=3, embedding_size=64, hidden_sizes=[256, 512, 1024, 2048], depths=[3, 4, 6, 3], layer_type='bottleneck', hidden_act='relu', downsample_in_first_stage=False, out_features=None, **kwargs):
super().__init__(**kwargs)
if (layer_type not in self.layer_types):
raise ValueError(f"layer_type={layer_type} is not one of {','.join(self.layer_types)}")
self.num_channels = num_channels
self.embedding_size = embedding_size
self.hidden_sizes = hidden_sizes
self.depths = depths
self.layer_type = layer_type
self.hidden_act = hidden_act
self.downsample_in_first_stage = downsample_in_first_stage
self.stage_names = (['stem'] + [f'stage{idx}' for idx in range(1, (len(depths) + 1))])
if (out_features is not None):
if (not isinstance(out_features, list)):
raise ValueError('out_features should be a list')
for feature in out_features:
if (feature not in self.stage_names):
raise ValueError(f'Feature {feature} is not a valid feature name. Valid names are {self.stage_names}')
self.out_features = out_features |
_config
def test_hints_setting_unsetting(xmanager, conn):
w = None
def no_input_hint():
nonlocal w
w = conn.create_window(5, 5, 10, 10)
w.map()
conn.conn.flush()
try:
xmanager.create_window(no_input_hint)
assert xmanager.c.window.get_hints()['input']
hints = ([0] * 14)
w.set_property('WM_HINTS', hints, type='WM_HINTS', format=32)
conn.flush()
assert xmanager.c.window.get_hints()['input']
hints[0] = xcbq.HintsFlags['InputHint']
hints[1] = 0
w.set_property('WM_HINTS', hints, type='WM_HINTS', format=32)
conn.flush()
assert (not xmanager.c.window.get_hints()['input'])
hints[0] = xcbq.HintsFlags['InputHint']
hints[1] = 1
w.set_property('WM_HINTS', hints, type='WM_HINTS', format=32)
conn.flush()
assert xmanager.c.window.get_hints()['input']
finally:
w.kill_client() |
class WrappedSubplan(operator):
def __init__(self, database, query, tuple_vars, vars):
self.database = database
self.query = query
self.tuple_vars = tuple_vars
self.vars = vars
def __repr__(self):
return (((((('Wrapped(' + self.query) + ',') + repr([x['tuple_var'] for x in self.tuple_vars])) + ',') + repr(self.vars)) + ')')
def execute(self, table, prior_locs, prior_globs):
res = self.database.execute(self.query, self.tuple_vars, self.vars)
return self.database.execute(self.query, self.tuple_vars, self.vars) |
def test_enable_with_flag(hatch, devpi, temp_dir_cache, helpers, published_project_name, config_file):
config_file.model.publish['index']['user'] = devpi.user
config_file.model.publish['index']['auth'] = devpi.auth
config_file.model.publish['index']['ca-cert'] = devpi.ca_cert
config_file.model.publish['index']['repo'] = 'dev'
config_file.model.publish['index']['repos'] = {'dev': devpi.repo}
config_file.model.publish['index']['disable'] = True
config_file.save()
with temp_dir_cache.as_cwd():
result = hatch('new', published_project_name)
assert (result.exit_code == 0), result.output
path = (temp_dir_cache / published_project_name)
with path.as_cwd():
del os.environ[PublishEnvVars.REPO]
current_version = timestamp_to_version(helpers.get_current_timestamp())
result = hatch('version', current_version)
assert (result.exit_code == 0), result.output
result = hatch('build')
assert (result.exit_code == 0), result.output
build_directory = (path / 'dist')
artifacts = list(build_directory.iterdir())
result = hatch('publish', '-y')
assert (result.exit_code == 0), result.output
assert (result.output == helpers.dedent(f'''
{artifacts[0].relative_to(path)} ... success
{artifacts[1].relative_to(path)} ... success
[{published_project_name}]
{devpi.repo}{published_project_name}/{current_version}/
''')) |
class _DictionaryMock(dict):
def __init__(self, item):
super().__init__()
self._value = item
def __setitem__(self, key, item):
self._value = item
def __getitem__(self, key):
return self._value
def __repr__(self):
return repr("{{'*': {0}}}".format(self._value)) |
('evennia.server.server.LoopingCall', MagicMock())
('evennia.server.portal.amp.amp.BinaryBoxProtocol.transport')
class TestAMPClientSend(_TestAMP):
def test_msgserver2portal(self, mocktransport):
self._connect_client(mocktransport)
self.amp_client.send_MsgServer2Portal(self.session, text={'foo': 'bar'})
wire_data = self._catch_wire_read(mocktransport)[0]
self._connect_server(mocktransport)
self.amp_server.dataReceived(wire_data)
self.portal.sessions.data_out.assert_called_with(self.portalsession, text={'foo': 'bar'})
def test_adminserver2portal(self, mocktransport):
self._connect_client(mocktransport)
self.amp_client.send_AdminServer2Portal(self.session, operation=amp.PSYNC, info_dict={}, spid=None)
wire_data = self._catch_wire_read(mocktransport)[0]
self._connect_server(mocktransport)
self.amp_server.data_in = MagicMock()
self.amp_server.dataReceived(wire_data)
self.amp_server.data_in.assert_called() |
def get_preprocess_fn(is_training, is_pretrain):
if (FLAGS.image_size <= 32):
test_crop = False
else:
test_crop = True
return functools.partial(data_util.preprocess_image, height=FLAGS.image_size, width=FLAGS.image_size, is_training=is_training, color_distort=is_pretrain, test_crop=test_crop) |
class LogtalkLexer(RegexLexer):
name = 'Logtalk'
url = '
aliases = ['logtalk']
filenames = ['*.lgt', '*.logtalk']
mimetypes = ['text/x-logtalk']
version_added = '0.10'
tokens = {'root': [('^\\s*:-\\s', Punctuation, 'directive'), ('%.*?\\n', Comment), ('/\\*(.|\\n)*?\\*/', Comment), ('\\n', Text), ('\\s+', Text), ("0'[\\\\]?.", Number), ('0b[01]+', Number.Bin), ('0o[0-7]+', Number.Oct), ('0x[0-9a-fA-F]+', Number.Hex), ('\\d+\\.?\\d*((e|E)(\\+|-)?\\d+)?', Number), ('([A-Z_][a-zA-Z0-9_]*)', Name.Variable), ('(after|before)(?=[(])', Keyword), ('forward(?=[(])', Keyword), ('(context|parameter|this|se(lf|nder))(?=[(])', Keyword), ('(current_predicate|predicate_property)(?=[(])', Keyword), ('(expand_(goal|term)|(goal|term)_expansion|phrase)(?=[(])', Keyword), ('(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])', Keyword), ('(object|protocol|category)_property(?=[(])', Keyword), ('co(mplements_object|nforms_to_protocol)(?=[(])', Keyword), ('extends_(object|protocol|category)(?=[(])', Keyword), ('imp(lements_protocol|orts_category)(?=[(])', Keyword), ('(instantiat|specializ)es_class(?=[(])', Keyword), ('(current_event|(abolish|define)_events)(?=[(])', Keyword), ('(create|current|set)_logtalk_flag(?=[(])', Keyword), ('logtalk_(compile|l(ibrary_path|oad|oad_context)|make(_target_action)?)(?=[(])', Keyword), ('\\blogtalk_make\\b', Keyword), ('(clause|retract(all)?)(?=[(])', Keyword), ('a(bolish|ssert(a|z))(?=[(])', Keyword), ('(ca(ll|tch)|throw)(?=[(])', Keyword), ('(fa(il|lse)|true|(instantiation|system)_error)\\b', Keyword), ('(type|domain|existence|permission|representation|evaluation|resource|syntax)_error(?=[(])', Keyword), ('((bag|set)of|f(ind|or)all)(?=[(])', Keyword), ('threaded(_(ca(ll|ncel)|once|ignore|exit|peek|wait|notify))?(?=[(])', Keyword), ('threaded_engine(_(create|destroy|self|next|next_reified|yield|post|fetch))?(?=[(])', Keyword), ('(subsumes_term|unify_with_occurs_check)(?=[(])', Keyword), ('(functor|arg|copy_term|numbervars|term_variables)(?=[(])', Keyword), ('(div|rem|m(ax|in|od)|abs|sign)(?=[(])', Keyword), ('float(_(integer|fractional)_part)?(?=[(])', Keyword), ('(floor|t(an|runcate)|round|ceiling)(?=[(])', Keyword), ('(cos|a(cos|sin|tan|tan2)|exp|log|s(in|qrt)|xor)(?=[(])', Keyword), ('(var|atom(ic)?|integer|float|c(allable|ompound)|n(onvar|umber)|ground|acyclic_term)(?=[(])', Keyword), ('compare(?=[(])', Keyword), ('(curren|se)t_(in|out)put(?=[(])', Keyword), ('(open|close)(?=[(])', Keyword), ('flush_output(?=[(])', Keyword), ('(at_end_of_stream|flush_output)\\b', Keyword), ('(stream_property|at_end_of_stream|set_stream_position)(?=[(])', Keyword), ('(nl|(get|peek|put)_(byte|c(har|ode)))(?=[(])', Keyword), ('\\bnl\\b', Keyword), ('read(_term)?(?=[(])', Keyword), ('write(q|_(canonical|term))?(?=[(])', Keyword), ('(current_)?op(?=[(])', Keyword), ('(current_)?char_conversion(?=[(])', Keyword), ('atom_(length|c(hars|o(ncat|des)))(?=[(])', Keyword), ('(char_code|sub_atom)(?=[(])', Keyword), ('number_c(har|ode)s(?=[(])', Keyword), ('(se|curren)t_prolog_flag(?=[(])', Keyword), ('\\bhalt\\b', Keyword), ('halt(?=[(])', Keyword), ('(::|:|\\^\\^)', Operator), ('[{}]', Keyword), ('(ignore|once)(?=[(])', Keyword), ('\\brepeat\\b', Keyword), ('(key)?sort(?=[(])', Keyword), ('(>>|<<|/\\\\||\\\\)', Operator), ('\\bas\\b', Operator), ('\\bis\\b', Keyword), ('(=:=|=\\\\=|<|=<|>=|>)', Operator), ('=\\.\\.', Operator), ('(=|\\\\=)', Operator), ('(==|\\\\==|=<|<|>=|>)', Operator), ('(//|[-+*/])', Operator), ('\\b(e|pi|div|mod|rem)\\b', Operator), ('\\b\\*\\*\\b', Operator), ('-->', Operator), ('([!;]|->)', Operator), ('\\\\+', Operator), ('[?]', Operator), ('\\^', Operator), ('"(|\\\\[^\\\\]|[^"\\\\])*"', String), ('[()\\[\\],.|]', Text), ('[a-z][a-zA-Z0-9_]*', Text), ("'", String, 'quoted_atom')], 'quoted_atom': [("''", String), ("'", String, '#pop'), ('\\\\([\\\\abfnrtv"\\\']|(x[a-fA-F0-9]+|[0-7]+)\\\\)', String.Escape), ("[^\\\\'\\n]+", String), ('\\\\', String)], 'directive': [('(el)?if(?=[(])', Keyword, 'root'), ('(e(lse|ndif))(?=[.])', Keyword, 'root'), ('(category|object|protocol)(?=[(])', Keyword, 'entityrelations'), ('(end_(category|object|protocol))(?=[.])', Keyword, 'root'), ('(public|protected|private)(?=[(])', Keyword, 'root'), ('e(n(coding|sure_loaded)|xport)(?=[(])', Keyword, 'root'), ('in(clude|itialization|fo)(?=[(])', Keyword, 'root'), ('(built_in|dynamic|synchronized|threaded)(?=[.])', Keyword, 'root'), ('(alias|d(ynamic|iscontiguous)|m(eta_(non_terminal|predicate)|ode|ultifile)|s(et_(logtalk|prolog)_flag|ynchronized))(?=[(])', Keyword, 'root'), ('op(?=[(])', Keyword, 'root'), ('(c(alls|oinductive)|module|reexport|use(s|_module))(?=[(])', Keyword, 'root'), ('[a-z][a-zA-Z0-9_]*(?=[(])', Text, 'root'), ('[a-z][a-zA-Z0-9_]*(?=[.])', Text, 'root')], 'entityrelations': [('(complements|extends|i(nstantiates|mp(lements|orts))|specializes)(?=[(])', Keyword), ("0'[\\\\]?.", Number), ('0b[01]+', Number.Bin), ('0o[0-7]+', Number.Oct), ('0x[0-9a-fA-F]+', Number.Hex), ('\\d+\\.?\\d*((e|E)(\\+|-)?\\d+)?', Number), ('([A-Z_][a-zA-Z0-9_]*)', Name.Variable), ('[a-z][a-zA-Z0-9_]*', Text), ("'", String, 'quoted_atom'), ('"(|\\\\[^\\\\]|[^"\\\\])*"', String), ('([)]\\.)', Text, 'root'), ('(::)', Operator), ('[()\\[\\],.|]', Text), ('%.*?\\n', Comment), ('/\\*(.|\\n)*?\\*/', Comment), ('\\n', Text), ('\\s+', Text)]}
def analyse_text(text):
if (':- object(' in text):
return 1.0
elif (':- protocol(' in text):
return 1.0
elif (':- category(' in text):
return 1.0
elif re.search('^:-\\s[a-z]', text, re.M):
return 0.9
else:
return 0.0 |
def on_episode_end(episode_summary, logger, global_step, steps_count):
episode_return = sum(episode_summary['reward'])
steps = (global_step + steps_count)
print('\nFinished episode with return: {}'.format(episode_return))
summary = {'training/episode_return': episode_return}
if ('cost' in episode_summary['info'][(- 1)].keys()):
sum_costs = sum(list(map((lambda info: info['cost']), episode_summary['info'])))
summary['training/episode_cost_return'] = sum_costs
print('Finished episode with cost return: {}'.format(sum_costs))
logger.log_evaluation_summary(summary, steps) |
_fixtures(SqlAlchemyFixture, QueryFixture)
def test_query_as_sequence_last_sort_wins(sql_alchemy_fixture, query_fixture):
fixture = query_fixture
with sql_alchemy_fixture.persistent_test_classes(fixture.MyObject):
[object1, object2, object3] = fixture.objects
fixture.query_as_sequence.sort(key=fixture.MyObject.name)
fixture.query_as_sequence.sort(key=fixture.MyObject.name, reverse=True)
sorted_items = [item for item in fixture.query_as_sequence]
assert (sorted_items == [object3, object1, object2]) |
.parametrize('case', [np.array([[0, 5, 1], [1, 6, 1], [2, 7, 0.5]]), [[0, 5, 'red'], (1, 6, 'blue'), [2, 7, {'this': 'also works'}]], pd.DataFrame([[0, 5, 'red'], [1, 6, 'blue'], [2, 7, 'something']], columns=['lat', 'lng', 'color'])])
def test_fast_marker_cluster_data(case):
data = FastMarkerCluster(case).data
assert isinstance(data, list)
assert (len(data) == 3)
for i in range(len(data)):
assert isinstance(data[i], list)
assert (len(data[i]) == 3)
assert (data[i][0] == float(i))
assert (data[i][1] == float((i + 5))) |
def CISD(mf, frozen=None, mo_coeff=None, mo_occ=None):
if mf.istype('UHF'):
return UCISD(mf, frozen, mo_coeff, mo_occ)
elif mf.istype('ROHF'):
from pyscf import lib
lib.logger.warn(mf, 'RCISD method does not support ROHF method. ROHF object is converted to UHF object and UCISD method is called.')
return UCISD(mf, frozen, mo_coeff, mo_occ)
else:
return RCISD(mf, frozen, mo_coeff, mo_occ) |
def get_data_loader(max_bag_size: int=20) -> Generator[(Batch, None, None)]:
for _ in range(EPOCH_SIZE):
values = []
lengths = []
for _ in range(len(TABLES)):
for _ in range(BATCH_SIZE):
length = torch.randint(max_bag_size, (1,))
values.append(torch.randint(EMBEDDING_DIM, (int(length.item()),)))
lengths.append(length)
(yield Batch(dense_features=torch.rand((BATCH_SIZE, DENSE_IN_FEATURES)), sparse_features=torchrec.KeyedJaggedTensor(keys=['f1', 'f2'], values=torch.cat(values), lengths=torch.cat(lengths)), labels=torch.randn((BATCH_SIZE, NUM_CLASSES)))) |
def max_status(left: TestStatus, right: TestStatus) -> TestStatus:
if (left == right):
return left
elif ((left == TestStatus.TEST_CRASHED) or (right == TestStatus.TEST_CRASHED)):
return TestStatus.TEST_CRASHED
elif ((left == TestStatus.FAILURE) or (right == TestStatus.FAILURE)):
return TestStatus.FAILURE
elif (left == TestStatus.SKIPPED):
return right
else:
return left |
class CmdDarkHelp(Command):
key = 'help'
locks = 'cmd:all()'
help_category = 'TutorialWorld'
def func(self):
string = "Can't help you until you find some light! Try looking/feeling around for something to burn. You shouldn't give up even if you don't find anything right away."
self.caller.msg(string) |
def markup_inline_word(format, pronunc):
pronunc = as_utf8(pronunc)
format = checkSetting(format, 'inline_format', '%s')
if (type(format) in [bytes, unicode]):
if (type(format) == unicode):
format = format.encode('utf-8')
return (format % pronunc)
else:
return format(pronunc) |
def _sparse_to_arrays(sparray, ids=None):
sparray = sparray.tocoo(copy=False)
if (ids is not None):
ids = np.asarray(ids)
if (sparray.shape[0] != ids.shape[0]):
raise ValueError(f'The length of ids ({ids.shape[0]}) does not match the shape of sparse {sparray.shape}.')
sorter = sparray.row.argsort()
head = ids[sparray.row][sorter]
tail = ids[sparray.col][sorter]
data = sparray.data[sorter]
else:
sorter = sparray.row.argsort()
head = sparray.row[sorter]
tail = sparray.col[sorter]
data = sparray.data[sorter]
ids = np.arange(sparray.shape[0], dtype=int)
return _resolve_islands(head, tail, ids, data) |
def main(path_list, target_file_path, search_item, mask):
script_state = True
while script_state:
dsz.ui.Echo(list_size_status(path_list, mask), dsz.WARNING)
num_to_process = user_prompt()
dsz.ui.Echo('Processing {0} files'.format(num_to_process))
if (num_to_process > len(path_list)):
dsz.ui.Echo('Input greater than total paths.', dsz.ERROR)
elif (num_to_process == 0):
dsz.ui.Echo('Input is 0, please provide a number greater than 0.', dsz.ERROR)
else:
path_list = process(num_to_process, path_list, target_file_path, search_item)
if (len(path_list) == 0):
script_state = False
to_dict(results)
to_xml(results_dict) |
class Effect3212(BaseEffect):
type = 'passive'
def handler(fit, container, context, projectionRange, **kwargs):
level = (container.level if ('skill' in context) else 1)
fit.modules.filteredChargeBoost((lambda mod: mod.charge.requiresSkill('Auto-Targeting Missiles')), 'aoeCloudSize', (container.getModifiedItemAttr('aoeCloudSizeBonus') * level), **kwargs) |
class ConcatSentencesDataset(FairseqDataset):
def __init__(self, *datasets):
super().__init__()
self.datasets = datasets
assert all(((len(ds) == len(datasets[0])) for ds in datasets)), 'datasets must have the same length'
def __getitem__(self, index):
return torch.cat([ds[index] for ds in self.datasets])
def __len__(self):
return len(self.datasets[0])
def collater(self, samples):
return self.datasets[0].collater(samples)
def sizes(self):
return sum((ds.sizes for ds in self.datasets))
def num_tokens(self, index):
return sum((ds.num_tokens(index) for ds in self.datasets))
def size(self, index):
return sum((ds.size(index) for ds in self.datasets))
def ordered_indices(self):
return self.datasets[0].ordered_indices()
def supports_prefetch(self):
return any((getattr(ds, 'supports_prefetch', False) for ds in self.datasets))
def prefetch(self, indices):
for ds in self.datasets:
if getattr(ds, 'supports_prefetch', False):
ds.prefetch(indices) |
class Result():
extension = None
def _is_valid_type(cls, type_):
return True
def peek(cls, filepath):
return ResultMetadata(*archive.Archiver.peek(filepath))
def extract(cls, filepath, output_dir):
return archive.Archiver.extract(filepath, output_dir)
def load(cls, filepath):
from qiime2.core.cache import get_cache
cache = get_cache()
peek = cls.peek(filepath)
archiver = cache._load_uuid(peek.uuid)
if (not archiver):
try:
archiver = archive.Archiver.load(filepath)
except OSError as e:
if (e.errno == 28):
temp = tempfile.tempdir
raise ValueError(f'There was not enough space left on {temp!r} to extract the artifact {filepath!r}. (Try setting $TMPDIR to a directory with more space, or increasing the size of {temp!r})')
else:
raise e
if Artifact._is_valid_type(archiver.type):
result = Artifact.__new__(Artifact)
elif Visualization._is_valid_type(archiver.type):
result = Visualization.__new__(Visualization)
else:
raise TypeError(('Cannot load filepath %r into an Artifact or Visualization because type %r is not supported.' % (filepath, archiver.type)))
if ((type(result) is not cls) and (cls is not Result)):
raise TypeError(('Attempting to load %s with `%s.load`. Use `%s.load` instead.' % (type(result).__name__, cls.__name__, type(result).__name__)))
result._archiver = archiver
return result
def _from_archiver(cls, archiver):
if Artifact._is_valid_type(archiver.type):
result = Artifact.__new__(Artifact)
elif Visualization._is_valid_type(archiver.type):
result = Visualization.__new__(Visualization)
else:
raise TypeError(('Cannot load filepath %r into an Artifact or Visualization because type %r is not supported.' % (archiver.path, archiver.type)))
if ((type(result) is not cls) and (cls is not Result)):
raise TypeError(('Attempting to load %s with `%s.load`. Use `%s.load` instead.' % (type(result).__name__, cls.__name__, type(result).__name__)))
result._archiver = archiver
return result
def type(self):
return self._archiver.type
def uuid(self):
return self._archiver.uuid
def format(self):
return self._archiver.format
def citations(self):
return self._archiver.citations
def __init__(self):
raise NotImplementedError(('%(classname)s constructor is private, use `%(classname)s.load`, `%(classname)s.peek`, or `%(classname)s.extract`.' % {'classname': self.__class__.__name__}))
def __new__(cls):
result = object.__new__(cls)
result._archiver = None
return result
def __repr__(self):
return ('<%s: %r uuid: %s>' % (self.__class__.__name__.lower(), self.type, self.uuid))
def __hash__(self):
return hash(self.uuid)
def __eq__(self, other):
return ((type(self) is type(other)) and (self.uuid == other.uuid))
def __ne__(self, other):
return (not (self == other))
def export_data(self, output_dir):
distutils.dir_util.copy_tree(str(self._archiver.data_dir), str(output_dir))
return None
def _destructor(self):
return self._archiver._destructor
def save(self, filepath, ext=None):
if (ext is None):
ext = self.extension
filepath = str(filepath)
filepath = filepath.rstrip('.')
ext = ('.' + ext.lstrip('.'))
if (not filepath.endswith(ext)):
filepath += ext
self._archiver.save(filepath)
return filepath
def _alias(self, provenance_capture):
def clone_original(into):
into.rmdir()
shutil.copytree(str(self._archiver.data_dir), str(into), copy_function=os.link)
cls = type(self)
alias = cls.__new__(cls)
alias._archiver = archive.Archiver.from_data(self.type, self.format, clone_original, provenance_capture)
return alias
def validate(self, level=NotImplemented):
diff = self._archiver.validate_checksums()
if (diff.changed or diff.added or diff.removed):
error = ''
if diff.added:
error += 'Unrecognized files:\n'
for key in diff.added:
error += (' - %r\n' % key)
if diff.removed:
error += 'Missing files:\n'
for key in diff.removed:
error += (' - %r\n' % key)
if diff.changed:
error += 'Changed files:\n'
for (key, (exp, obs)) in diff.changed.items():
error += (' - %r: %s -> %s\n' % (key, exp, obs))
raise exceptions.ValidationError(error)
def result(self):
return self |
def create_window(window):
def create():
browser = BrowserView.BrowserForm(window, cache_dir)
BrowserView.instances[window.uid] = browser
if window.hidden:
browser.Opacity = 0
browser.Show()
browser.Hide()
browser.Opacity = 1
else:
browser.Show()
_main_window_created.set()
if (window.uid == 'master'):
app.Run()
app = WinForms.Application
if (window.uid == 'master'):
if is_chromium:
init_storage()
if (sys.getwindowsversion().major >= 6):
windll.user32.SetProcessDPIAware()
if is_cef:
CEF.init(window, cache_dir)
thread = Thread(ThreadStart(create))
thread.SetApartmentState(ApartmentState.STA)
thread.Start()
thread.Join()
else:
_main_window_created.wait()
i = list(BrowserView.instances.values())[0]
i.Invoke(Func[Type](create)) |
class SwitchGraphDataRegion(GraphDataRegion):
def __init__(self, key, exec_comm_id, pid, tid, comm, thread_id, comm_id):
super(SwitchGraphDataRegion, self).__init__(key)
self.title = ((((str(pid) + ' / ') + str(tid)) + ' ') + comm)
self.ordinal = ((str(pid).rjust(16) + str(exec_comm_id).rjust(8)) + str(tid).rjust(16))
self.exec_comm_id = exec_comm_id
self.pid = pid
self.tid = tid
self.comm = comm
self.thread_id = thread_id
self.comm_id = comm_id |
class PixelShuffleBlcok(nn.Module):
def __init__(self, in_feat, num_feat, num_out_ch):
super(PixelShuffleBlcok, self).__init__()
self.conv_before_upsample = nn.Sequential(nn.Conv2d(in_feat, num_feat, 3, 1, 1), nn.LeakyReLU(inplace=True))
self.upsample = nn.Sequential(nn.Conv2d(num_feat, (4 * num_feat), 3, 1, 1), nn.PixelShuffle(2), nn.Conv2d(num_feat, (4 * num_feat), 3, 1, 1), nn.PixelShuffle(2))
self.conv_last = nn.Conv2d(num_feat, num_out_ch, 3, 1, 1)
def forward(self, x):
x = self.conv_before_upsample(x)
x = self.conv_last(self.upsample(x))
return x |
_flax
class FlaxBigBirdModelTest(FlaxModelTesterMixin, unittest.TestCase):
all_model_classes = ((FlaxBigBirdForCausalLM, FlaxBigBirdModel, FlaxBigBirdForPreTraining, FlaxBigBirdForMaskedLM, FlaxBigBirdForMultipleChoice, FlaxBigBirdForQuestionAnswering, FlaxBigBirdForSequenceClassification, FlaxBigBirdForTokenClassification) if is_flax_available() else ())
test_attn_probs = False
test_mismatched_shapes = False
def setUp(self):
self.model_tester = FlaxBigBirdModelTester(self)
def test_from_pretrained_save_pretrained(self):
super().test_from_pretrained_save_pretrained()
def test_from_pretrained_with_no_automatic_init(self):
super().test_from_pretrained_with_no_automatic_init()
def test_no_automatic_init(self):
super().test_no_automatic_init()
def test_hidden_states_output(self):
super().test_hidden_states_output()
def test_model_from_pretrained(self):
for model_class_name in self.all_model_classes:
model = model_class_name.from_pretrained('google/bigbird-roberta-base')
outputs = model(np.ones((1, 1)))
self.assertIsNotNone(outputs)
def test_attention_outputs(self):
if self.test_attn_probs:
super().test_attention_outputs()
def test_jit_compilation(self):
(config, inputs_dict) = self.model_tester.prepare_config_and_inputs_for_common()
for model_class in self.all_model_classes:
with self.subTest(model_class.__name__):
prepared_inputs_dict = self._prepare_for_class(inputs_dict, model_class)
model = model_class(config)
def model_jitted(input_ids, attention_mask=None, **kwargs):
return model(input_ids=input_ids, attention_mask=attention_mask, **kwargs)
with self.subTest('JIT Enabled'):
jitted_outputs = model_jitted(**prepared_inputs_dict).to_tuple()
with self.subTest('JIT Disabled'):
with jax.disable_jit():
outputs = model_jitted(**prepared_inputs_dict).to_tuple()
self.assertEqual(len(outputs), len(jitted_outputs))
for (jitted_output, output) in zip(jitted_outputs, outputs):
self.assertEqual(jitted_output.shape, output.shape)
def check_pt_flax_outputs(self, fx_outputs, pt_outputs, model_class, tol=1e-05, name='outputs', attributes=None):
if name.startswith('outputs.attentions'):
return
else:
super().check_pt_flax_outputs(fx_outputs, pt_outputs, model_class, tol, name, attributes) |
class Hotel(Accommodation):
roomNumber: int = 0
def __init__(self, name: str='Hotel'):
self.name = name
def setRoomNumber(self, n: int) -> None:
self.roomNumber = n
def getRoomNumber(self) -> int:
return self.roomNumber
def getLocation(self) -> str:
if (self.roomNumber == 0):
return ''
else:
return f'Room number {self.roomNumber}' |
(name='test-dist')
def test_dist(session: nox.Session) -> None:
tmp_dir = Path(session.create_tmp())
dist = (tmp_dir / 'dist')
_build(session, dist)
python_versions = (session.posargs or PYTHON_ALL_VERSIONS)
for version in python_versions:
session.notify(f'_test_sdist-{version}', [str(dist)])
session.notify(f'_test_wheel-{version}', [str(dist)]) |
def test_var_replacement():
X_mean = pm.floatX(np.linspace(0, 10, 10))
y = pm.floatX(np.random.normal((X_mean * 4), 0.05))
inp_size = pytensor.shared(np.array(10, dtype='int64'), name='inp_size')
with pm.Model():
inp = pm.Normal('X', X_mean, size=(inp_size,))
coef = pm.Normal('b', 4.0)
mean = (inp * coef)
pm.Normal('y', mean, 0.1, shape=inp.shape, observed=y)
advi = pm.fit(100)
assert (advi.sample_node(mean).eval().shape == (10,))
inp_size.set_value(11)
x_new = pm.floatX(np.linspace(0, 10, 11))
assert (advi.sample_node(mean, more_replacements={inp: x_new}).eval().shape == (11,)) |
.django_db
def test_django_assert_num_queries_db_connection(django_assert_num_queries: DjangoAssertNumQueries) -> None:
from django.db import connection
with django_assert_num_queries(1, connection=connection):
Item.objects.create(name='foo')
with django_assert_num_queries(1, connection=None):
Item.objects.create(name='foo')
with pytest.raises(AttributeError):
with django_assert_num_queries(1, connection=False):
pass |
def build_from_cfg(cfg, registry, default_args=None):
if (not isinstance(cfg, dict)):
raise TypeError(f'cfg must be a dict, but got {type(cfg)}')
if ('NAME' not in cfg):
if ((default_args is None) or ('NAME' not in default_args)):
raise KeyError(f'''`cfg` or `default_args` must contain the key "NAME", but got {cfg}
{default_args}''')
if (not isinstance(registry, Registry)):
raise TypeError(f'registry must be an mmcv.Registry object, but got {type(registry)}')
if (not (isinstance(default_args, dict) or (default_args is None))):
raise TypeError(f'default_args must be a dict or None, but got {type(default_args)}')
if (default_args is not None):
cfg = config.merge_new_config(cfg, default_args)
obj_type = cfg.get('NAME')
if isinstance(obj_type, str):
obj_cls = registry.get(obj_type)
if (obj_cls is None):
raise KeyError(f'{obj_type} is not in the {registry.name} registry')
elif inspect.isclass(obj_type):
obj_cls = obj_type
else:
raise TypeError(f'type must be a str or valid type, but got {type(obj_type)}')
try:
return obj_cls(cfg)
except Exception as e:
raise type(e)(f'{obj_cls.__name__}: {e}') |
class GetInlineBotResults():
async def get_inline_bot_results(self: 'pyrogram.Client', bot: Union[(int, str)], query: str='', offset: str='', latitude: float=None, longitude: float=None):
try:
return (await self.invoke(raw.functions.messages.GetInlineBotResults(bot=(await self.resolve_peer(bot)), peer=raw.types.InputPeerSelf(), query=query, offset=offset, geo_point=(raw.types.InputGeoPoint(lat=latitude, long=longitude) if ((latitude is not None) and (longitude is not None)) else None))))
except UnknownError as e:
if ((e.value.error_code == (- 503)) and (e.value.error_message == 'Timeout')):
raise TimeoutError("The inline bot didn't answer in time") from None
else:
raise e |
class CocoEval(keras.callbacks.Callback):
def __init__(self, generator, tensorboard=None, threshold=0.05):
self.generator = generator
self.threshold = threshold
self.tensorboard = tensorboard
super(CocoEval, self).__init__()
def on_epoch_end(self, epoch, logs=None):
logs = (logs or {})
coco_tag = ['AP [ IoU=0.50:0.95 | area= all | maxDets=100 ]', 'AP [ IoU=0.50 | area= all | maxDets=100 ]', 'AP [ IoU=0.75 | area= all | maxDets=100 ]', 'AP [ IoU=0.50:0.95 | area= small | maxDets=100 ]', 'AP [ IoU=0.50:0.95 | area=medium | maxDets=100 ]', 'AP [ IoU=0.50:0.95 | area= large | maxDets=100 ]', 'AR [ IoU=0.50:0.95 | area= all | maxDets= 1 ]', 'AR [ IoU=0.50:0.95 | area= all | maxDets= 10 ]', 'AR [ IoU=0.50:0.95 | area= all | maxDets=100 ]', 'AR [ IoU=0.50:0.95 | area= small | maxDets=100 ]', 'AR [ IoU=0.50:0.95 | area=medium | maxDets=100 ]', 'AR [ IoU=0.50:0.95 | area= large | maxDets=100 ]']
coco_eval_stats = evaluate_coco(self.generator, self.model, self.threshold)
if ((coco_eval_stats is not None) and (self.tensorboard is not None) and (self.tensorboard.writer is not None)):
import tensorflow as tf
summary = tf.Summary()
for (index, result) in enumerate(coco_eval_stats):
summary_value = summary.value.add()
summary_value.simple_value = result
summary_value.tag = '{}. {}'.format((index + 1), coco_tag[index])
self.tensorboard.writer.add_summary(summary, epoch)
logs[coco_tag[index]] = result |
def prepare_roidb(imdb):
roidb = imdb.roidb
if (not (imdb.name.startswith('coco') or imdb.name.startswith('vg'))):
sizes = [PIL.Image.open(imdb.image_path_at(i)).size for i in range(imdb.num_images)]
for i in range(len(imdb.image_index)):
roidb[i]['img_id'] = imdb.image_id_at(i)
roidb[i]['image'] = imdb.image_path_at(i)
if (not (imdb.name.startswith('coco') or imdb.name.startswith('vg'))):
roidb[i]['width'] = sizes[i][0]
roidb[i]['height'] = sizes[i][1]
gt_overlaps = roidb[i]['gt_overlaps'].toarray()
max_overlaps = gt_overlaps.max(axis=1)
max_classes = gt_overlaps.argmax(axis=1)
roidb[i]['max_classes'] = max_classes
roidb[i]['max_overlaps'] = max_overlaps
zero_inds = np.where((max_overlaps == 0))[0]
assert all((max_classes[zero_inds] == 0))
nonzero_inds = np.where((max_overlaps > 0))[0]
assert all((max_classes[nonzero_inds] != 0)) |
class Paint(object):
pen_size = 5.0
color = 'black'
def __init__(self):
self.root = Tk()
self.pen_button = Button(self.root, text='pen', command=self.use_pen)
self.pen_button.grid(row=0, column=0)
self.brush_button = Button(self.root, text='brush', command=self.use_brush)
self.brush_button.grid(row=0, column=1)
self.color_button = Button(self.root, text='color', command=self.choose_color)
self.color_button.grid(row=0, column=2)
self.eraser_button = Button(self.root, text='eraser', command=self.use_eraser)
self.eraser_button.grid(row=0, column=3)
self.choose_size_button = Scale(self.root, from_=1, to=10, orient=HORIZONTAL)
self.choose_size_button.grid(row=0, column=4)
self.c = Canvas(self.root, bg='white', width=600, height=600)
self.c.grid(row=1, columnspan=5)
self.setup()
self.root.mainloop()
def setup(self):
self.old_x = None
self.old_y = None
self.line_width = self.choose_size_button.get()
self.color = self.color
self.eraser_on = False
self.active_button = self.pen_button
self.c.bind('<B1-Motion>', self.print)
self.c.bind('<ButtonRelease-1>', self.reset)
def use_pen(self):
self.activate_button(self.pen_button)
def use_brush(self):
self.activate_button(self.brush_button)
def choose_color(self):
self.eraser_on = False
self.color = askcolor(color=self.color)[1]
def use_eraser(self):
self.activate_button(self.eraser_button, eraser_mode=True)
def activate_button(self, some_button, eraser_mode=False):
self.activate_button.config(relief=RAISED)
some_button.config(relief=SUNKEN)
self.active_button = some_button
self.eraser_on = eraser_mode
def print(self, event):
self.line_width = self.choose_size_button.get()
paint_color = ('white' if self.eraser_on else self.color)
if (self.old_x and self.old_y):
self.c.create_line(self.old_x, self.old_y, event.x, event.y, width=self.line_width, fill=paint_color, capstyle=ROUND, smooth=TRUE, splinesteps=36)
self.old_x = event.x
self.old_y = event.y
def reset(self, event):
(self.old_x, self.old_y) = (None, None) |
class TestCygwinCCompiler(support.TempdirManager):
def _get_config_h_filename(self):
return self.python_h
.skipif('sys.platform != "cygwin"')
.skipif('not os.path.exists("/usr/lib/libbash.dll.a")')
def test_find_library_file(self):
from distutils.cygwinccompiler import CygwinCCompiler
compiler = CygwinCCompiler()
link_name = 'bash'
linkable_file = compiler.find_library_file(['/usr/lib'], link_name)
assert (linkable_file is not None)
assert os.path.exists(linkable_file)
assert (linkable_file == f'/usr/lib/lib{link_name:s}.dll.a')
.skipif('sys.platform != "cygwin"')
def test_runtime_library_dir_option(self):
from distutils.cygwinccompiler import CygwinCCompiler
compiler = CygwinCCompiler()
assert (compiler.runtime_library_dir_option('/foo') == [])
def test_check_config_h(self):
sys.version = '2.6.1 (r261:67515, Dec 6 2008, 16:42:21) \n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]'
assert (check_config_h()[0] == CONFIG_H_OK)
sys.version = 'something without the *CC word'
assert (check_config_h()[0] == CONFIG_H_UNCERTAIN)
self.write_file(self.python_h, 'xxx')
assert (check_config_h()[0] == CONFIG_H_NOTOK)
self.write_file(self.python_h, 'xxx __GNUC__ xxx')
assert (check_config_h()[0] == CONFIG_H_OK)
def test_get_msvcr(self):
sys.version = '2.6.1 (r261:67515, Dec 6 2008, 16:42:21) \n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]'
assert (get_msvcr() is None)
sys.version = '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) [MSC v.1300 32 bits (Intel)]'
assert (get_msvcr() == ['msvcr70'])
sys.version = '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) [MSC v.1310 32 bits (Intel)]'
assert (get_msvcr() == ['msvcr71'])
sys.version = '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) [MSC v.1400 32 bits (Intel)]'
assert (get_msvcr() == ['msvcr80'])
sys.version = '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) [MSC v.1500 32 bits (Intel)]'
assert (get_msvcr() == ['msvcr90'])
sys.version = '3.10.0 (tags/v3.10.0:b494f59, Oct 4 2021, 18:46:30) [MSC v.1929 32 bit (Intel)]'
assert (get_msvcr() == ['vcruntime140'])
sys.version = '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) [MSC v.2000 32 bits (Intel)]'
with pytest.raises(ValueError):
get_msvcr() |
_lazy('cudf')
def get_device_memory_objects_register_cudf():
import cudf.core.frame
import cudf.core.index
import cudf.core.multiindex
import cudf.core.series
(cudf.core.frame.Frame)
def get_device_memory_objects_cudf_frame(obj):
ret = []
for col in obj._data.columns:
ret += dispatch(col)
return ret
(cudf.core.indexed_frame.IndexedFrame)
def get_device_memory_objects_cudf_indexed_frame(obj):
return (dispatch(obj._index) + get_device_memory_objects_cudf_frame(obj))
(cudf.core.series.Series)
def get_device_memory_objects_cudf_series(obj):
return (dispatch(obj._index) + dispatch(obj._column))
(cudf.core.index.RangeIndex)
def get_device_memory_objects_cudf_range_index(obj):
return []
(cudf.core.index.Index)
def get_device_memory_objects_cudf_index(obj):
return dispatch(obj._values)
(cudf.core.multiindex.MultiIndex)
def get_device_memory_objects_cudf_multiindex(obj):
return dispatch(obj._columns) |
def process_pattern(tree, vars):
if ((len(tree.children) > 1) and isinstance(tree.children[1], Node) and (tree.children[1].label == 'pattern_object_list')):
list = tree.children[1]
res = []
for l in list.children:
if (not isinstance(l, Node)):
continue
res.append(process_pattern(l, vars))
if len(tree.children[3].children):
var = getText(tree.children[3].children[1])
res.append({'bind_parent_to': var})
vars.append(var)
return res
elif ((len(tree.children) == 1) and isinstance(tree.children[0], Node) and (tree.children[0].label == 'pattern_object_element')):
return process_pattern(tree.children[0], vars)
else:
res = {'match': getText(tree.children[0])}
if (isinstance(tree.children[2], Node) and (tree.children[2].label == 'pattern_object')):
res['pattern'] = process_pattern(tree.children[2], vars)
return res
if (tree.children[2].type == 'STRING_LITERAL'):
res['const_cond'] = getText(tree.children[2])
return res
if (tree.children[2].type == 'NAME'):
res['var_cond'] = getText(tree.children[2])
return res
if (tree.children[2].type == 'WHERE'):
res['expr_cond'] = getText(tree.children[3])
return res
if (tree.children[2].type == 'AS'):
res['bind_to'] = getText(tree.children[3])
vars.append(res['bind_to'])
if (len(tree.children) == 6):
res['expr_cond'] = getText(tree.children[5])
return res |
class FocalLoss(nn.Module):
def __init__(self, alpha: float=0.25, gamma: float=2.0, loss_weight: float=2.0) -> None:
super(FocalLoss, self).__init__()
self.alpha = alpha
self.gamma = gamma
self.loss_weight = loss_weight
def forward(self, pred: torch.Tensor, target: torch.Tensor, mask: torch.Tensor=None) -> torch.Tensor:
if (mask is not None):
(pred, target) = (pred[mask], target[mask])
pred_sigmoid = pred.sigmoid()
target = target.type_as(pred)
pt = (((1 - pred_sigmoid) * target) + (pred_sigmoid * (1 - target)))
focal_weight = (((self.alpha * target) + ((1 - self.alpha) * (1 - target))) * pt.pow(self.gamma))
loss = (F.binary_cross_entropy_with_logits(pred, target, reduction='none') * focal_weight)
loss = (loss.sum() / (target > 0).float().sum())
return (loss * self.loss_weight) |
class CommonOptions():
head: ((Sequence[VdomDict] | VdomDict) | str) = (html.title('ReactPy'), html.link({'rel': 'icon', 'href': '/_reactpy/assets/reactpy-logo.ico', 'type': 'image/x-icon'}))
url_prefix: str = ''
serve_index_route: bool = True
def __post_init__(self) -> None:
if (self.url_prefix and (not self.url_prefix.startswith('/'))):
msg = "Expected 'url_prefix' to start with '/'"
raise ValueError(msg) |
class PurePyShpWrapper(fileio.FileIO):
FORMATS = ['shp', 'shx']
MODES = ['w', 'r', 'wb', 'rb']
def __init__(self, *args, **kwargs):
fileio.FileIO.__init__(self, *args, **kwargs)
self.dataObj = None
if ((self.mode == 'r') or (self.mode == 'rb')):
self.__open()
elif ((self.mode == 'w') or (self.mode == 'wb')):
self.__create()
def __len__(self) -> int:
if (self.dataObj is not None):
return len(self.dataObj)
else:
return 0
def __open(self):
self.dataObj = shp_file(self.dataPath)
self.header = self.dataObj.header
self.bbox = self.dataObj.bbox
try:
self.type = STRING_TO_TYPE[self.dataObj.type()]
except KeyError:
msg = '%s does not support shapes of type: %s.'
msg = (msg % (self.__class__.__name__, self.dataObj.type()))
raise TypeError(msg) from None
def __create(self):
self.write = self.__firstWrite
def __firstWrite(self, shape):
self.type = TYPE_TO_STRING[type(shape)]
if (self.type == 'POINT'):
if (len(shape) == 3):
self.type = 'POINTM'
if (len(shape) == 4):
self.type = 'POINTZ'
self.dataObj = shp_file(self.dataPath, 'w', self.type)
self.write = self.__writer
self.write(shape)
def __writer(self, shape):
if (TYPE_TO_STRING[type(shape)] != self.type):
raise TypeError(('This file only supports %s type shapes.' % self.type))
rec = {}
rec['Shape Type'] = shp_file.SHAPE_TYPES[self.type]
if (self.type == 'POINT'):
rec['X'] = shape[0]
rec['Y'] = shape[1]
if (len(shape) > 2):
rec['M'] = shape[2]
if (len(shape) > 3):
rec['Z'] = shape[3]
shape = rec
else:
rec['BBOX Xmin'] = shape.bounding_box.left
rec['BBOX Ymin'] = shape.bounding_box.lower
rec['BBOX Xmax'] = shape.bounding_box.right
rec['BBOX Ymax'] = shape.bounding_box.upper
if (self.type == 'POLYGON'):
holes = [hole[::(- 1)] for hole in shape.holes if hole]
rec['NumParts'] = (len(shape.parts) + len(holes))
all_parts = (shape.parts + holes)
else:
rec['NumParts'] = len(shape.parts)
all_parts = shape.parts
partsIndex = [0]
for l_ in [len(part) for part in all_parts][:(- 1)]:
partsIndex.append((partsIndex[(- 1)] + l_))
rec['Parts Index'] = partsIndex
verts = sum(all_parts, [])
verts = list(verts)
rec['NumPoints'] = len(verts)
rec['Vertices'] = verts
self.dataObj.add_shape(rec)
self.pos += 1
def _read(self):
try:
rec = self.dataObj.get_shape(self.pos)
except IndexError:
return None
self.pos += 1
if (self.dataObj.type() == 'POINT'):
shp = self.type((rec['X'], rec['Y']))
elif (self.dataObj.type() == 'POINTZ'):
shp = self.type((rec['X'], rec['Y']))
shp.Z = rec['Z']
shp.M = rec['M']
elif (rec['NumParts'] > 1):
partsIndex = list(rec['Parts Index'])
partsIndex.append(None)
parts = [rec['Vertices'][partsIndex[i]:partsIndex[(i + 1)]] for i in range(rec['NumParts'])]
if (self.dataObj.type() == 'POLYGON'):
is_cw = [cg.is_clockwise(part) for part in parts]
vertices = [part for (part, cw) in zip(parts, is_cw, strict=True) if cw]
holes = [part for (part, cw) in zip(parts, is_cw, strict=True) if (not cw)]
if (not holes):
holes = None
shp = self.type(vertices, holes)
else:
vertices = parts
shp = self.type(vertices)
elif (rec['NumParts'] == 1):
vertices = rec['Vertices']
if ((self.dataObj.type() == 'POLYGON') and (not cg.is_clockwise(vertices))):
msg = 'SHAPEFILE WARNING: Polygon %d '
msg += 'topology has been fixed. (ccw -> cw).'
msg = (msg % self.pos)
warn(msg, RuntimeWarning, stacklevel=2)
print(msg)
shp = self.type(vertices)
else:
warn(('Polygon %d has zero parts.' % self.pos), RuntimeWarning, stacklevel=2)
shp = self.type([[]])
if self.ids:
shp.id = self.rIds[(self.pos - 1)]
else:
shp.id = self.pos
return shp
def close(self):
self.dataObj.close()
fileio.FileIO.close(self) |
def randomFFD(img_name, ffd_type=1, random_type=0, control_points=(20, 20, 20), num_samples=5, **kwargs):
img_name = os.path.basename(img_name)
image_suffix = kwargs.pop('image_suffix', 'image.nii.gz')
label_suffix = kwargs.pop('label_suffix', 'label.nii.gz')
lab_name = img_name.replace(image_suffix, label_suffix)
save_path = kwargs.pop('save_path', './FFD_augmented')
resave2int = kwargs.pop('resave2int', False)
if (ffd_type == 1):
fn = 'ffd'
elif (ffd_type == 2):
fn = 'ffd2'
else:
raise ValueError(('Unknown FFD type: %s!' % ffd_type))
if (random_type == 0):
u = kwargs.pop('mu', 0)
s = kwargs.pop('sigma', 1)
elif (random_type == 1):
u = kwargs.pop('a', (- 1))
s = kwargs.pop('b', 1)
else:
raise ValueError(('Unknown random type: %s!' % random_type))
for i in range(num_samples):
ffd_marker = os.path.join(save_path, img_name.replace(image_suffix, ('ffd%s' % i)))
if resave2int:
os.system('zxhimageop -float {img_name:s} -toi'.format(img_name=img_name))
os.system('zxhInitPreTransform {ffd_name} -{fn:s} {img_name:s} {spx:d} {spy:d} {spz:d} -genrandom {random_type:d} {u:.1f} {s:.1f}'.format(ffd_name=ffd_marker, fn=fn, img_name=img_name, spx=control_points[0], spy=control_points[1], spz=control_points[2], random_type=random_type, u=u, s=s))
output_marker = os.path.join(save_path, img_name.replace(image_suffix, ('ffd%s_' % i)))
os.system('zxhtransform {target} {source} -o {output} -n 1 -t {ffd_name}'.format(target=img_name, source=img_name, output=(output_marker + image_suffix), ffd_name=(ffd_marker + '.FFD')))
os.system('zxhtransform {target} {source} -o {output} -n 1 -t {ffd_name} -nearest'.format(target=lab_name, source=lab_name, output=(output_marker + label_suffix), ffd_name=(ffd_marker + '.FFD'))) |
def interpolate_background(a, b, blend):
if ((type(a) is Background) and (type(b) is Background)):
return Background(color=interpolate_color(a.color, b.color, blend))
else:
return BackgroundGradient(color_top=interpolate_color(a.color_top, b.color_top, blend), color_bottom=interpolate_color(a.color_bottom, b.color_bottom, blend)) |
def node_view_and_apply_settings(wizard):
pp = pprint.PrettyPrinter(indent=4)
saves = False
game_index_txt = 'No changes to save for Game Index.'
if hasattr(wizard, 'game_index_listing'):
if (wizard.game_index_listing != settings.GAME_INDEX_LISTING):
game_index_txt = 'No changes to save for Game Index.'
else:
game_index_txt = ('GAME_INDEX_ENABLED = True\nGAME_INDEX_LISTING = \\\n' + pp.pformat(wizard.game_index_listing))
saves = True
text = game_index_txt
wizard.display(f'''Settings to save:
{text}''')
if saves:
if (wizard.ask_yesno('Do you want to save these settings?') == 'yes'):
wizard.save_output = text
_save_changes(wizard)
wizard.display('... saved!')
else:
wizard.display('... cancelled.')
wizard.ask_continue()
node_start(wizard) |
def load_kasvs_dh_vectors(vector_data):
vectors = []
data: typing.Dict[(str, typing.Any)] = {'fail_z': False, 'fail_agree': False}
for line in vector_data:
line = line.strip()
if ((not line) or line.startswith('#')):
continue
if line.startswith('P = '):
data['p'] = int(line.split('=')[1], 16)
elif line.startswith('Q = '):
data['q'] = int(line.split('=')[1], 16)
elif line.startswith('G = '):
data['g'] = int(line.split('=')[1], 16)
elif line.startswith('Z = '):
z_hex = line.split('=')[1].strip().encode('ascii')
data['z'] = binascii.unhexlify(z_hex)
elif line.startswith('XstatCAVS = '):
data['x1'] = int(line.split('=')[1], 16)
elif line.startswith('YstatCAVS = '):
data['y1'] = int(line.split('=')[1], 16)
elif line.startswith('XstatIUT = '):
data['x2'] = int(line.split('=')[1], 16)
elif line.startswith('YstatIUT = '):
data['y2'] = int(line.split('=')[1], 16)
elif line.startswith('Result = '):
result_str = line.split('=')[1].strip()
match = KASVS_RESULT_REGEX.match(result_str)
assert (match is not None)
if (match.group(1) == 'F'):
if (int(match.group(2)) in (5, 10)):
data['fail_z'] = True
else:
data['fail_agree'] = True
vectors.append(data)
data = {'p': data['p'], 'q': data['q'], 'g': data['g'], 'fail_z': False, 'fail_agree': False}
return vectors |
def fmt_phi_structure(ps, title='Phi-structure', subsystem=True):
distinctions = len(ps.distinctions)
if ps.requires_filter_relations:
relations = sum_phi = sum_phi_r = sii = selectivity = '[requires filter]'
elif (ps.relations is None):
relations = sum_phi = sum_phi_r = sii = selectivity = '[not computed]'
else:
relations = len(ps.relations)
sum_phi = ps.sum_phi()
sum_phi_r = ps.relations.sum_phi()
sii = ps.system_intrinsic_information()
selectivity = ps.selectivity()
columns = [('Distinctions', distinctions), ('Relations', relations), ('_d', ps.sum_phi_distinctions()), ('_r', sum_phi_r), ('', sum_phi), ('Selectivity', selectivity), ('S.I.I.', sii)]
lines = align_columns(columns)
if subsystem:
lines = align_columns((lines + [f'Subsystem: {ps.subsystem.nodes}']), types='tt', split_columns=True)
body = '\n'.join(lines)
if title:
body = header(title, body, HEADER_BAR_1, HEADER_BAR_1)
return body |
class TestGUI(WrapperTester):
script_name = 'bar-script.pyw'
wrapper_source = win_launcher_exe('gui')
wrapper_name = 'bar.exe'
script_tmpl = textwrap.dedent("\n #!%(python_exe)s\n import sys\n f = open(sys.argv[1], 'wb')\n bytes_written = f.write(repr(sys.argv[2]).encode('utf-8'))\n f.close()\n ").strip()
def test_basic(self, tmpdir):
self.create_script(tmpdir)
cmd = [str((tmpdir / 'bar.exe')), str((tmpdir / 'test_output.txt')), 'Test Argument']
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
(stdout, stderr) = proc.communicate()
assert (not stdout)
assert (not stderr)
with (tmpdir / 'test_output.txt').open('rb') as f_out:
actual = f_out.read().decode('ascii')
assert (actual == repr('Test Argument')) |
class BackgroundKnowledge(object):
def __init__(self):
self.forbidden_rules_specs: Set[Tuple[(Node, Node)]] = set()
self.forbidden_pattern_rules_specs: Set[Tuple[(str, str)]] = set()
self.required_rules_specs: Set[Tuple[(Node, Node)]] = set()
self.required_pattern_rules_specs: Set[Tuple[(str, str)]] = set()
self.tier_map: Dict[(int, Set[Node])] = {}
self.tier_value_map: Dict[(Node, int)] = {}
def add_forbidden_by_node(self, node1: Node, node2: Node):
if ((not isinstance(node1, Node)) or (not isinstance(node2, Node))):
raise TypeError(((('node must not be instance of Node. node1 = ' + str(type(node1))) + ' node2 = ') + str(type(node2))))
self.forbidden_rules_specs.add((node1, node2))
return self
def add_required_by_node(self, node1: Node, node2: Node):
if ((not isinstance(node1, Node)) or (not isinstance(node2, Node))):
raise TypeError(((('node must not be instance of Node. node1 = ' + str(type(node1))) + ' node2 = ') + str(type(node2))))
self.required_rules_specs.add((node1, node2))
return self
def add_forbidden_by_pattern(self, node_pattern1: str, node_pattern2: str):
if ((type(node_pattern1) != str) or (type(node_pattern2) != str)):
raise TypeError(((('node_pattern must be type of str. node_pattern1 = ' + str(type(node_pattern1))) + ' node_pattern2 = ') + str(type(node_pattern2))))
self.forbidden_pattern_rules_specs.add((node_pattern1, node_pattern2))
return self
def add_required_by_pattern(self, node_pattern1: str, node_pattern2: str):
if ((type(node_pattern1) != str) or (type(node_pattern2) != str)):
raise TypeError(((('node_pattern must be type of str. node_pattern1 = ' + str(type(node_pattern1))) + ' node_pattern2 = ') + str(type(node_pattern2))))
self.required_pattern_rules_specs.add((node_pattern1, node_pattern2))
return self
def _ensure_tiers(self, tier: int):
if (type(tier) != int):
raise TypeError(('tier must be int type. tier = ' + str(type(tier))))
for t in range((tier + 1)):
if (not self.tier_map.keys().__contains__(t)):
self.tier_map[t] = set()
def add_node_to_tier(self, node: Node, tier: int):
if ((not isinstance(node, Node)) or (type(tier) != int)):
raise TypeError(((('node must be instance of Node. tier must be int type. node = ' + str(type(node))) + ' tier = ') + str(type(tier))))
if (tier < 0):
raise TypeError(('tier must be a non-negative integer. tier = ' + str(tier)))
self._ensure_tiers(tier)
self.tier_map.get(tier).add(node)
self.tier_value_map[node] = tier
return self
def _is_node_match_regular_expression(self, pattern: str, node: Node) -> bool:
return (re.match(pattern, node.get_name()) is not None)
def is_forbidden(self, node1: Node, node2: Node) -> bool:
if ((not isinstance(node1, Node)) or (not isinstance(node2, Node))):
raise TypeError(((('node1 and node2 must be instance of Node. node1 = ' + str(type(node1))) + ' node2 = ') + str(type(node2))))
for (from_node, to_node) in self.forbidden_rules_specs:
if ((from_node == node1) and (to_node == node2)):
return True
for (from_node_pattern, to_node_pattern) in self.forbidden_pattern_rules_specs:
if (self._is_node_match_regular_expression(from_node_pattern, node1) and self._is_node_match_regular_expression(to_node_pattern, node2)):
return True
if (self.tier_value_map.keys().__contains__(node1) and self.tier_value_map.keys().__contains__(node2)):
if (self.tier_value_map.get(node1) >= self.tier_value_map.get(node2)):
return True
return False
def is_required(self, node1: Node, node2: Node) -> bool:
if ((not isinstance(node1, Node)) or (not isinstance(node2, Node))):
raise TypeError(((('node1 and node2 must be instance of Node. node1 = ' + str(type(node1))) + ' node2 = ') + str(type(node2))))
for (from_node, to_node) in self.required_rules_specs:
if ((from_node == node1) and (to_node == node2)):
return True
for (from_node_pattern, to_node_pattern) in self.required_pattern_rules_specs:
if (self._is_node_match_regular_expression(from_node_pattern, node1) and self._is_node_match_regular_expression(to_node_pattern, node2)):
return True
return False
def remove_forbidden_by_node(self, node1: Node, node2: Node):
if ((not isinstance(node1, Node)) or (not isinstance(node2, Node))):
raise TypeError(((('node must not be instance of Node. node1 = ' + str(type(node1))) + ' node2 = ') + str(type(node2))))
if self.forbidden_rules_specs.__contains__((node1, node2)):
self.forbidden_rules_specs.remove((node1, node2))
return self
def remove_required_by_node(self, node1: Node, node2: Node):
if ((not isinstance(node1, Node)) or (not isinstance(node2, Node))):
raise TypeError(((('node must not be instance of Node. node1 = ' + str(type(node1))) + ' node2 = ') + str(type(node2))))
if self.required_rules_specs.__contains__((node1, node2)):
self.required_rules_specs.remove((node1, node2))
return self
def remove_forbidden_by_pattern(self, node_pattern1: str, node_pattern2: str):
if ((type(node_pattern1) != str) or (type(node_pattern2) != str)):
raise TypeError(((('node_pattern must be type of str. node_pattern1 = ' + str(type(node_pattern1))) + ' node_pattern2 = ') + str(type(node_pattern2))))
if self.forbidden_pattern_rules_specs.__contains__((node_pattern1, node_pattern2)):
self.forbidden_pattern_rules_specs.remove((node_pattern1, node_pattern2))
return self
def remove_required_by_pattern(self, node_pattern1: str, node_pattern2: str):
if ((type(node_pattern1) != str) or (type(node_pattern2) != str)):
raise TypeError(((('node_pattern must be type of str. node_pattern1 = ' + str(type(node_pattern1))) + ' node_pattern2 = ') + str(type(node_pattern2))))
if self.required_pattern_rules_specs.__contains__((node_pattern1, node_pattern2)):
self.required_pattern_rules_specs.remove((node_pattern1, node_pattern2))
return self
def remove_node_from_tier(self, node: Node, tier: int):
if ((not isinstance(node, Node)) or (type(tier) != int)):
raise TypeError(((('node must be instance of Node. tier must be int type. node = ' + str(type(node))) + ' tier = ') + str(type(tier))))
if (tier < 0):
raise TypeError(('tier must be a non-negative integer. tier = ' + str(tier)))
self._ensure_tiers(tier)
if self.tier_map.get(tier).__contains__(node):
self.tier_map.get(tier).remove(node)
if self.tier_value_map.keys().__contains__(node):
self.tier_value_map.pop(node)
return self
def is_in_which_tier(self, node: Node) -> int:
return (self.tier_value_map[node] if self.tier_value_map.__contains__(node) else (- 1)) |
def validate_search(args, val_data, device, model):
model.eval()
choice_dict = {}
val_loss = 0.0
val_top1 = AvgrageMeter()
val_top5 = AvgrageMeter()
criterion = nn.CrossEntropyLoss()
choice = random_choice(m=args.m)
while (choice in check_dict):
print('Duplicate Index !')
choice = random_choice(m=args.m)
check_dict.append(choice)
with torch.no_grad():
for (step, (inputs, targets)) in enumerate(val_data):
(inputs, targets) = (inputs.to(device), targets.to(device))
outputs = model(inputs, choice)
loss = criterion(outputs, targets)
val_loss += loss.item()
(prec1, prec5) = accuracy(outputs, targets, topk=(1, 5))
n = inputs.size(0)
val_top1.update(prec1.item(), n)
val_top5.update(prec5.item(), n)
choice_dict['op'] = choice['op']
choice_dict['path'] = choice['path']
choice_dict['val_loss'] = (val_loss / (step + 1))
choice_dict['val_top1'] = val_top1.avg
return choice_dict |
class DefaultStyle(Style):
name = 'default'
background_color = '#f8f8f8'
styles = {Whitespace: '#bbbbbb', Comment: 'italic #3D7B7B', Comment.Preproc: 'noitalic #9C6500', Keyword: 'bold #008000', Keyword.Pseudo: 'nobold', Keyword.Type: 'nobold #B00040', Operator: '#666666', Operator.Word: 'bold #AA22FF', Name.Builtin: '#008000', Name.Function: '#0000FF', Name.Class: 'bold #0000FF', Name.Namespace: 'bold #0000FF', Name.Exception: 'bold #CB3F38', Name.Variable: '#19177C', Name.Constant: '#880000', Name.Label: '#767600', Name.Entity: 'bold #717171', Name.Attribute: '#687822', Name.Tag: 'bold #008000', Name.Decorator: '#AA22FF', String: '#BA2121', String.Doc: 'italic', String.Interpol: 'bold #A45A77', String.Escape: 'bold #AA5D1F', String.Regex: '#A45A77', String.Symbol: '#19177C', String.Other: '#008000', Number: '#666666', Generic.Heading: 'bold #000080', Generic.Subheading: 'bold #800080', Generic.Deleted: '#A00000', Generic.Inserted: '#008400', Generic.Error: '#E40000', Generic.Emph: 'italic', Generic.Strong: 'bold', Generic.EmphStrong: 'bold italic', Generic.Prompt: 'bold #000080', Generic.Output: '#717171', Generic.Traceback: '#04D', Error: 'border:#FF0000'} |
def export_cli(args):
ip = args.ip
csv_path = args.csv_path
log_level = logging.INFO
logger = logging.getLogger(__name__)
logger.setLevel(log_level)
ch = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
ch.setLevel(log_level)
logger.addHandler(ch)
qc = QuickCheck(ip)
qc.connect()
if qc.connected:
qc.get_measurements()
print((' Saving data to ' + csv_path))
qc.close()
qc.measurements.to_csv(csv_path) |
class _SectBlockElementIterator():
_compiled_blocks_xpath: (etree.XPath | None) = None
_compiled_count_xpath: (etree.XPath | None) = None
def __init__(self, sectPr: CT_SectPr):
self._sectPr = sectPr
def iter_sect_block_elements(cls, sectPr: CT_SectPr) -> Iterator[BlockElement]:
return cls(sectPr)._iter_sect_block_elements()
def _iter_sect_block_elements(self) -> Iterator[BlockElement]:
(sectPr, sectPrs) = (self._sectPr, self._sectPrs)
sectPr_idx = sectPrs.index(sectPr)
n_blks_to_skip = (0 if (sectPr_idx == 0) else self._count_of_blocks_in_and_above_section(sectPrs[(sectPr_idx - 1)]))
for element in self._blocks_in_and_above_section(sectPr)[n_blks_to_skip:]:
(yield element)
def _blocks_in_and_above_section(self, sectPr: CT_SectPr) -> Sequence[BlockElement]:
if (self._compiled_blocks_xpath is None):
self._compiled_blocks_xpath = etree.XPath(self._blocks_in_and_above_section_xpath, namespaces=nsmap, regexp=False)
xpath = self._compiled_blocks_xpath
return cast(Sequence[BlockElement], xpath(sectPr))
def _blocks_in_and_above_section_xpath(self) -> str:
p_sect_term_block = './parent::w:pPr/parent::w:p'
body_sect_term = 'self::w:sectPr[parent::w:body]'
pred_ps_and_tbls = 'preceding-sibling::*[self::w:p | self::w:tbl]'
return f'{p_sect_term_block} | {p_sect_term_block}/{pred_ps_and_tbls} | {body_sect_term}/{pred_ps_and_tbls}'
def _count_of_blocks_in_and_above_section(self, sectPr: CT_SectPr) -> int:
if (self._compiled_count_xpath is None):
self._compiled_count_xpath = etree.XPath(f'count({self._blocks_in_and_above_section_xpath})', namespaces=nsmap, regexp=False)
xpath = self._compiled_count_xpath
return int(cast(float, xpath(sectPr)))
def _sectPrs(self) -> Sequence[CT_SectPr]:
return self._sectPr.xpath('/w:document/w:body/w:p/w:pPr/w:sectPr | /w:document/w:body/w:sectPr') |
class PresetMenu(QtWidgets.QMenu):
action_customize: QtGui.QAction
action_delete: QtGui.QAction
action_history: QtGui.QAction
action_export: QtGui.QAction
action_duplicate: QtGui.QAction
action_map_tracker: QtGui.QAction
action_required_tricks: QtGui.QAction
action_import: QtGui.QAction
action_view_deleted: QtGui.QAction
def __init__(self, parent: QtWidgets.QWidget):
super().__init__(parent)
self.action_customize = QtGui.QAction(parent)
self.action_delete = QtGui.QAction(parent)
self.action_history = QtGui.QAction(parent)
self.action_export = QtGui.QAction(parent)
self.action_duplicate = QtGui.QAction(parent)
self.action_map_tracker = QtGui.QAction(parent)
self.action_required_tricks = QtGui.QAction(parent)
self.action_import = QtGui.QAction(parent)
self.action_view_deleted = QtGui.QAction(parent)
self.action_customize.setText('Customize')
self.action_delete.setText('Delete')
self.action_history.setText('View previous versions')
self.action_export.setText('Export')
self.action_duplicate.setText('Duplicate')
self.action_map_tracker.setText('Open map tracker')
self.action_required_tricks.setText('View expected trick usage')
self.action_import.setText('Import')
self.action_view_deleted.setText('View deleted presets')
self.addAction(self.action_customize)
self.addAction(self.action_delete)
self.addAction(self.action_history)
self.addAction(self.action_export)
self.addAction(self.action_duplicate)
self.addAction(self.action_map_tracker)
self.addAction(self.action_required_tricks)
self.addSeparator()
self.addAction(self.action_import)
self.addAction(self.action_view_deleted)
self.action_view_deleted.setVisible(False)
def set_preset(self, preset: (VersionedPreset | None)):
has_any_preset = (preset is not None)
has_valid_preset = has_any_preset
try:
if (preset is not None):
preset.get_preset()
except InvalidPreset:
has_valid_preset = False
for p in [self.action_delete, self.action_history]:
p.setEnabled((has_any_preset and (not preset.is_included_preset)))
self.action_export.setEnabled((has_valid_preset and (not preset.is_included_preset)))
for p in [self.action_customize, self.action_duplicate, self.action_map_tracker, self.action_required_tricks]:
p.setEnabled(has_valid_preset) |
def is_hash160(addr):
if (not addr):
return False
if (not isinstance(addr, str)):
return False
if (not (len(addr) == 40)):
return False
for char in addr:
if (((char < '0') or (char > '9')) and ((char < 'A') or (char > 'F')) and ((char < 'a') or (char > 'f'))):
return False
return True |
class ResNet(nn.Module):
def __init__(self, block, num_blocks, num_classes=10, zero_init_residual=False):
super(ResNet, self).__init__()
self.in_planes = 64
self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)
self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)
self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2)
self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2)
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.linear = nn.Linear((512 * block.expansion), num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
if zero_init_residual:
for m in self.modules():
if isinstance(m, Bottleneck):
nn.init.constant_(m.bn3.weight, 0)
elif isinstance(m, BasicBlock):
nn.init.constant_(m.bn2.weight, 0)
def get_feat_modules(self):
feat_m = nn.ModuleList([])
feat_m.append(self.conv1)
feat_m.append(self.bn1)
feat_m.append(self.layer1)
feat_m.append(self.layer2)
feat_m.append(self.layer3)
feat_m.append(self.layer4)
return feat_m
def get_bn_before_relu(self):
if isinstance(self.layer1[0], Bottleneck):
bn1 = self.layer1[(- 1)].bn3
bn2 = self.layer2[(- 1)].bn3
bn3 = self.layer3[(- 1)].bn3
bn4 = self.layer4[(- 1)].bn3
elif isinstance(self.layer1[0], BasicBlock):
bn1 = self.layer1[(- 1)].bn2
bn2 = self.layer2[(- 1)].bn2
bn3 = self.layer3[(- 1)].bn2
bn4 = self.layer4[(- 1)].bn2
else:
raise NotImplementedError('ResNet unknown block error !!!')
return [bn1, bn2, bn3, bn4]
def _make_layer(self, block, planes, num_blocks, stride):
strides = ([stride] + ([1] * (num_blocks - 1)))
layers = []
for i in range(num_blocks):
stride = strides[i]
layers.append(block(self.in_planes, planes, stride, (i == (num_blocks - 1))))
self.in_planes = (planes * block.expansion)
return nn.Sequential(*layers)
def forward(self, x, is_feat=False, preact=False):
out = F.relu(self.bn1(self.conv1(x)))
f0 = out
out = self.layer1(out)
f1 = out
out = self.layer2(out)
f2 = out
out = self.layer3(out)
f3 = out
out = self.layer4(out)
f4 = out
out = self.avgpool(out)
out = out.view(out.size(0), (- 1))
f5 = out
out = self.linear(out)
if is_feat:
return ([f1, f2, f3, f4], out)
else:
return out |
('/gitlab/callback/trigger', methods=['GET'])
_show_if(features.GITLAB_BUILD)
_session_login
def attach_gitlab_build_trigger():
state = request.args.get('state', None)
if (not state):
abort(400)
state = state[len('repo:'):]
try:
[namespace, repository] = state.split('/')
except ValueError:
abort(400)
permission = AdministerRepositoryPermission(namespace, repository)
if permission.can():
code = request.args.get('code')
token = gitlab_trigger.exchange_code_for_token(app.config, client, code, redirect_suffix='/trigger')
if (not token):
msg = 'Could not exchange token. It may have expired.'
abort(404, message=msg)
repo = model.repository.get_repository(namespace, repository)
if (not repo):
msg = ('Invalid repository: %s/%s' % (namespace, repository))
abort(404, message=msg)
elif (repo.kind.name != 'image'):
abort(501)
trigger = model.build.create_build_trigger(repo, 'gitlab', token, current_user.db_user())
repo_path = ('%s/%s' % (namespace, repository))
full_url = url_for('web.buildtrigger', path=repo_path, trigger=trigger.uuid)
logger.debug('Redirecting to full url: %s', full_url)
return redirect(full_url)
abort(403) |
class Checker():
raw_options: InitVar[Optional[Options]] = None
options: Options = field(init=False)
arg_spec_cache: ArgSpecCache = field(init=False)
ts_finder: TypeshedFinder = field(init=False)
reexport_tracker: ImplicitReexportTracker = field(init=False)
callable_tracker: CallableTracker = field(init=False)
type_object_cache: Dict[(Union[(type, super, str)], TypeObject)] = field(default_factory=dict, init=False, repr=False)
assumed_compatibilities: List[Tuple[(TypeObject, TypeObject)]] = field(default_factory=list)
vnv_map: Dict[(str, VariableNameValue)] = field(default_factory=dict)
type_alias_cache: Dict[(object, TypeAlias)] = field(default_factory=dict)
_should_exclude_any: bool = False
_has_used_any_match: bool = False
def __post_init__(self, raw_options: Optional[Options]) -> None:
if (raw_options is None):
self.options = Options.from_option_list()
else:
self.options = raw_options
self.ts_finder = TypeshedFinder.make(self, self.options)
self.arg_spec_cache = ArgSpecCache(self.options, self.ts_finder, self, vnv_provider=self.maybe_get_variable_name_value)
self.reexport_tracker = ImplicitReexportTracker(self.options)
self.callable_tracker = CallableTracker()
for vnv in self.options.get_value_for(VariableNameValues):
for variable in vnv.varnames:
self.vnv_map[variable] = vnv
def maybe_get_variable_name_value(self, varname: str) -> Optional[VariableNameValue]:
return VariableNameValue.from_varname(varname, self.vnv_map)
def perform_final_checks(self) -> List[Failure]:
return self.callable_tracker.check()
def get_additional_bases(self, typ: Union[(type, super)]) -> Set[type]:
bases = set()
for provider in self.options.get_value_for(AdditionalBaseProviders):
bases |= provider(typ)
return bases
def make_type_object(self, typ: Union[(type, super, str)]) -> TypeObject:
try:
in_cache = (typ in self.type_object_cache)
except Exception:
return self._build_type_object(typ)
if in_cache:
return self.type_object_cache[typ]
type_object = self._build_type_object(typ)
self.type_object_cache[typ] = type_object
return type_object
def _build_type_object(self, typ: Union[(type, super, str)]) -> TypeObject:
if isinstance(typ, str):
bases = self._get_typeshed_bases(typ)
is_protocol = any((is_typing_name(base, 'Protocol') for base in bases))
if is_protocol:
protocol_members = self._get_protocol_members(bases)
else:
protocol_members = set()
return TypeObject(typ, bases, is_protocol=is_protocol, protocol_members=protocol_members)
elif isinstance(typ, super):
return TypeObject(typ, self.get_additional_bases(typ))
else:
plugin_bases = self.get_additional_bases(typ)
typeshed_bases = self._get_recursive_typeshed_bases(typ)
additional_bases = (plugin_bases | typeshed_bases)
if self.ts_finder.is_protocol(typ):
return TypeObject(typ, additional_bases, is_protocol=True, protocol_members=self._get_protocol_members(typeshed_bases))
if (is_instance_of_typing_name(typ, '_ProtocolMeta') and safe_getattr(typ, '_is_protocol', False)):
bases = get_mro(typ)
members = set(itertools.chain.from_iterable((_extract_protocol_members(base) for base in bases)))
return TypeObject(typ, additional_bases, is_protocol=True, protocol_members=members)
return TypeObject(typ, additional_bases)
def _get_recursive_typeshed_bases(self, typ: Union[(type, str)]) -> Set[Union[(type, str)]]:
seen = set()
to_do = {typ}
result = set()
while to_do:
typ = to_do.pop()
if (typ in seen):
continue
bases = self._get_typeshed_bases(typ)
result |= bases
to_do |= bases
seen.add(typ)
return result
def _get_typeshed_bases(self, typ: Union[(type, str)]) -> Set[Union[(type, str)]]:
base_values = self.ts_finder.get_bases_recursively(typ)
return {base.typ for base in base_values if isinstance(base, TypedValue)}
def _get_protocol_members(self, bases: Iterable[Union[(type, str)]]) -> Set[str]:
return set(itertools.chain.from_iterable((self.ts_finder.get_all_attributes(base) for base in bases)))
def get_generic_bases(self, typ: Union[(type, str)], generic_args: Sequence[Value]=()) -> GenericBases:
return self.arg_spec_cache.get_generic_bases(typ, generic_args)
def get_signature(self, obj: object, is_asynq: bool=False) -> Optional[ConcreteSignature]:
sig = self.arg_spec_cache.get_argspec(obj, is_asynq=is_asynq)
if isinstance(sig, Signature):
return sig
elif isinstance(sig, BoundMethodSignature):
return sig.get_signature(ctx=self)
elif isinstance(sig, OverloadedSignature):
return sig
return None
def can_assume_compatibility(self, left: TypeObject, right: TypeObject) -> bool:
return ((left, right) in self.assumed_compatibilities)
def assume_compatibility(self, left: TypeObject, right: TypeObject) -> Iterator[None]:
pair = (left, right)
self.assumed_compatibilities.append(pair)
try:
(yield)
finally:
new_pair = self.assumed_compatibilities.pop()
assert (pair == new_pair)
def display_value(self, value: Value) -> str:
message = f"'{value!s}'"
if isinstance(value, KnownValue):
sig = self.arg_spec_cache.get_argspec(value.val)
elif isinstance(value, UnboundMethodValue):
sig = value.get_signature(self)
elif (isinstance(value, SubclassValue) and value.exactly):
sig = self.signature_from_value(value)
else:
sig = None
if (sig is not None):
message += f', signature is {sig!s}'
return message
def has_used_any_match(self) -> bool:
return self._has_used_any_match
def record_any_used(self) -> None:
self._has_used_any_match = True
def reset_any_used(self) -> ContextManager[None]:
return qcore.override(self, '_has_used_any_match', False)
def set_exclude_any(self) -> ContextManager[None]:
return qcore.override(self, '_should_exclude_any', True)
def should_exclude_any(self) -> bool:
return self._should_exclude_any
def signature_from_value(self, value: Value, *, get_return_override: Callable[([MaybeSignature], Optional[Value])]=(lambda _: None), get_call_attribute: Optional[Callable[([Value], Value)]]=None) -> MaybeSignature:
if isinstance(value, AnnotatedValue):
value = value.value
if isinstance(value, TypeVarValue):
value = value.get_fallback_value()
if isinstance(value, KnownValue):
argspec = self.arg_spec_cache.get_argspec(value.val)
if (argspec is None):
if (get_call_attribute is not None):
method_object = get_call_attribute(value)
else:
method_object = self.get_attribute_from_value(value, '__call__')
if (method_object is UNINITIALIZED_VALUE):
return None
else:
return ANY_SIGNATURE
if isinstance(value, KnownValueWithTypeVars):
return argspec.substitute_typevars(value.typevars)
return argspec
elif isinstance(value, UnboundMethodValue):
method = value.get_method()
if (method is not None):
sig = self.arg_spec_cache.get_argspec(method)
if (sig is None):
return ANY_SIGNATURE
return_override = get_return_override(sig)
bound = make_bound_method(sig, value.composite, return_override)
if ((bound is not None) and (value.typevars is not None)):
bound = bound.substitute_typevars(value.typevars)
return bound
return None
elif isinstance(value, CallableValue):
return value.signature
elif isinstance(value, TypedValue):
typ = value.typ
if ((typ is collections.abc.Callable) or (typ is types.FunctionType)):
return ANY_SIGNATURE
if isinstance(typ, str):
if (get_call_attribute is not None):
call_method = get_call_attribute(value)
else:
call_method = self.get_attribute_from_value(value, '__call__')
if (call_method is UNINITIALIZED_VALUE):
return None
return self.signature_from_value(call_method, get_return_override=get_return_override, get_call_attribute=get_call_attribute)
if ((getattr(typ.__call__, '__objclass__', None) is type) and (not issubclass(typ, type))):
return None
call_fn = typ.__call__
sig = self.arg_spec_cache.get_argspec(call_fn)
return_override = get_return_override(sig)
bound_method = make_bound_method(sig, Composite(value), return_override)
if (bound_method is None):
return None
return bound_method.get_signature(ctx=self)
elif isinstance(value, SubclassValue):
if isinstance(value.typ, TypedValue):
if (value.typ.typ is tuple):
return ANY_SIGNATURE
argspec = self.arg_spec_cache.get_argspec(value.typ.typ, allow_synthetic_type=True)
if (argspec is None):
return ANY_SIGNATURE
return argspec
else:
return ANY_SIGNATURE
elif isinstance(value, AnyValue):
return ANY_SIGNATURE
elif isinstance(value, MultiValuedValue):
sigs = [self.signature_from_value(subval, get_return_override=get_return_override, get_call_attribute=get_call_attribute) for subval in value.vals]
if all(((sig is not None) for sig in sigs)):
return ANY_SIGNATURE
else:
return None
else:
return None
def get_attribute_from_value(self, root_value: Value, attribute: str, *, prefer_typeshed: bool=False) -> Value:
if isinstance(root_value, TypeVarValue):
root_value = root_value.get_fallback_value()
if is_union(root_value):
results = [self.get_attribute_from_value(subval, attribute, prefer_typeshed=prefer_typeshed) for subval in flatten_values(root_value)]
return unite_values(*results)
ctx = CheckerAttrContext(Composite(root_value), attribute, self.options, skip_mro=False, skip_unwrap=False, prefer_typeshed=prefer_typeshed, checker=self)
return get_attribute(ctx) |
def _create_hypotheses_widgets() -> dict[(str, tuple[(str, QtWidgets.QWidget)])]:
hypotheses = btrack.optimise.hypothesis.H_TYPES
tooltips = ['Hypothesis that a tracklet is a false positive detection. Always required.', 'Hypothesis that a tracklet starts at the beginning of the movie or edge of the field of view.', 'Hypothesis that a tracklet ends at the end of the movie or edge of the field of view.', 'Hypothesis that two tracklets should be linked together.', 'Hypothesis that a tracklet can split into two daughter tracklets.', 'Hypothesis that a tracklet terminates without leaving the field of view.', 'Hypothesis that two tracklets merge into one tracklet.']
widget = QtWidgets.QListWidget()
widget.addItems([f"{h.replace('_', '(')})" for h in hypotheses])
flags = QtCore.Qt.ItemFlags((QtCore.Qt.ItemIsUserCheckable | QtCore.Qt.ItemIsEnabled))
for (i, tooltip) in enumerate(tooltips):
widget.item(i).setFlags(flags)
widget.item(i).setToolTip(tooltip)
widget.item(hypotheses.index('P_FP')).setFlags(QtCore.Qt.ItemIsUserCheckable)
return {'hypotheses': ('hypotheses', widget)} |
class Pool(object):
def __init__(self, nworkers=0, name='Pool'):
self._closed = False
self._tasks = task_group()
self._pool = ([None] * default_num_threads())
def apply(self, func, args=(), kwds=dict()):
return self.apply_async(func, args, kwds).get()
def map(self, func, iterable, chunksize=None):
return self.map_async(func, iterable, chunksize).get()
def imap(self, func, iterable, chunksize=1):
collector = OrderedResultCollector(as_iterator=True)
self._create_sequences(func, iterable, chunksize, collector)
return iter(collector)
def imap_unordered(self, func, iterable, chunksize=1):
collector = UnorderedResultCollector()
self._create_sequences(func, iterable, chunksize, collector)
return iter(collector)
def apply_async(self, func, args=(), kwds=dict(), callback=None):
assert (not self._closed)
apply_result = ApplyResult(callback=callback)
job = Job(func, args, kwds, apply_result)
self._tasks.run(job)
return apply_result
def map_async(self, func, iterable, chunksize=None, callback=None):
apply_result = ApplyResult(callback=callback)
collector = OrderedResultCollector(apply_result, as_iterator=False)
if (not self._create_sequences(func, iterable, chunksize, collector)):
apply_result._set_value([])
return apply_result
def imap_async(self, func, iterable, chunksize=None, callback=None):
apply_result = ApplyResult(callback=callback)
collector = OrderedResultCollector(apply_result, as_iterator=True)
if (not self._create_sequences(func, iterable, chunksize, collector)):
apply_result._set_value(iter([]))
return apply_result
def imap_unordered_async(self, func, iterable, chunksize=None, callback=None):
apply_result = ApplyResult(callback=callback)
collector = UnorderedResultCollector(apply_result)
if (not self._create_sequences(func, iterable, chunksize, collector)):
apply_result._set_value(iter([]))
return apply_result
def close(self):
self._closed = True
def terminate(self):
self.close()
self._tasks.cancel()
def join(self):
self._tasks.wait()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.join()
def __del__(self):
self.terminate()
self.join()
def _create_sequences(self, func, iterable, chunksize, collector):
assert (not self._closed)
it_ = iter(iterable)
exit_loop = False
sequences = []
while (not exit_loop):
seq = []
for _ in range((chunksize or 1)):
try:
arg = next(it_)
except StopIteration:
exit_loop = True
break
apply_result = ApplyResult(collector)
job = Job(func, (arg,), {}, apply_result)
seq.append(job)
if seq:
sequences.append(JobSequence(seq))
for t in sequences:
self._tasks.run(t)
return sequences |
class Trainer(object):
def __init__(self, ps_rref):
self.ps_rref = ps_rref
self.loss_fn = nn.MSELoss()
self.one_hot_indices = torch.LongTensor(batch_size).random_(0, num_classes).view(batch_size, 1)
def get_next_batch(self):
for _ in range(num_batches):
inputs = torch.randn(batch_size, 3, image_w, image_h)
labels = torch.zeros(batch_size, num_classes).scatter_(1, self.one_hot_indices, 1)
(yield (inputs.cuda(), labels.cuda()))
def train(self):
name = rpc.get_worker_info().name
m = self.ps_rref.rpc_sync().get_model().cuda()
for (inputs, labels) in self.get_next_batch():
timed_log(f'{name} processing one batch')
self.loss_fn(m(inputs), labels).backward()
timed_log(f'{name} reporting grads')
m = rpc.rpc_sync(self.ps_rref.owner(), BatchUpdateParameterServer.update_and_fetch_model, args=(self.ps_rref, [p.grad for p in m.cpu().parameters()])).cuda()
timed_log(f'{name} got updated model') |
class TestConvertSelection(EndianTest):
def setUp(self):
self.req_args_0 = {'property': , 'requestor': , 'selection': , 'target': , 'time': }
self.req_bin_0 = b"\x18\x00\x06\x00\x0b'no7\xd6\nPTp4&;\xd2\xbck\xd3\x18\xcaQ"
def testPackRequest0(self):
bin = request.ConvertSelection._request.to_binary(*(), **self.req_args_0)
self.assertBinaryEqual(bin, self.req_bin_0)
def testUnpackRequest0(self):
(args, remain) = request.ConvertSelection._request.parse_binary(self.req_bin_0, dummy_display, 1)
self.assertBinaryEmpty(remain)
self.assertEqual(args, self.req_args_0) |
def model_setenv(cpu_only):
import random
random.seed(42)
torch.manual_seed(42)
if cpu_only:
os.environ['DEVICE'] = 'cpu'
elif ((os.environ.get('DEVICE') != 'cuda') and (os.environ.get('DEVICE') != 'cpu')):
os.environ['DEVICE'] = ('cuda' if torch.cuda.is_available() else 'cpu')
if (os.environ['DEVICE'] == 'cuda'):
torch.backends.cudnn.enabled = True
torch.backends.cudnn.benchmark = True
print('Running Environment:')
print('')
print(' DEVICE: ', os.environ['DEVICE']) |
class MobileNetV2(nn.Module):
def __init__(self, num_classes=1000, width_mult=1.0):
super(MobileNetV2, self).__init__()
self.cfgs = [[1, 16, 1, 1], [6, 24, 2, 2], [6, 32, 3, 2], [6, 64, 4, 2], [6, 96, 3, 1], [6, 160, 3, 2], [6, 320, 1, 1]]
input_channel = _make_divisible((32 * width_mult), (4 if (width_mult == 0.1) else 8))
layers = [conv_3x3_bn(3, input_channel, 2)]
block = InvertedResidual
for (t, c, n, s) in self.cfgs:
output_channel = _make_divisible((c * width_mult), (4 if (width_mult == 0.1) else 8))
for i in range(n):
layers.append(block(input_channel, output_channel, (s if (i == 0) else 1), t))
input_channel = output_channel
self.features = nn.Sequential(*layers)
output_channel = (_make_divisible((1280 * width_mult), (4 if (width_mult == 0.1) else 8)) if (width_mult > 1.0) else 1280)
self.conv = conv_1x1_bn(input_channel, output_channel)
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.classifier = nn.Linear(output_channel, num_classes)
self._initialize_weights()
def forward(self, x):
x = self.features(x)
x = self.conv(x)
x = self.avgpool(x)
x = x.view(x.size(0), (- 1))
x = self.classifier(x)
return x
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = ((m.kernel_size[0] * m.kernel_size[1]) * m.out_channels)
m.weight.data.normal_(0, math.sqrt((2.0 / n)))
if (m.bias is not None):
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
m.weight.data.normal_(0, 0.01)
m.bias.data.zero_() |
def test_admin_session_download_permalink_no_layout(clean_database, mock_emit_session_update, flask_app, mock_audit):
user1 = database.User.create(id=1234, name='The Name')
session = database.MultiplayerSession.create(id=1, name='Debug', state=MultiplayerSessionVisibility.VISIBLE, creator=user1)
database.MultiplayerMembership.create(user=user1, session=session, admin=True)
sa = MagicMock()
sa.get_current_user.return_value = user1
with flask_app.test_request_context(), pytest.raises(error.InvalidActionError):
session_admin.admin_session(sa, 1, SessionAdminGlobalAction.REQUEST_PERMALINK.value)
mock_emit_session_update.assert_not_called()
mock_audit.assert_not_called() |
def print1d(comp, type, wid, label, arr, doinp=False, **kwargs):
if (arr is None):
return
if doinpprt(label, arr, doinp=False, **kwargs):
return
if (label != ' '):
labstr = ('%6s=' % label)
else:
labstr = ' '
(npl, pkstr, fwid) = printpars(type, wid)
i = 0
ndone = 0
while (i < arr.size):
if ((abs(arr[i]) >= 1e-12) or (not comp)):
if (ndone == 0):
print(labstr, end='', **kwargs)
if comp:
print(('%8d=' % (i + 1)), end='', **kwargs)
str = formatx(fwid, ' ', pkstr, 0.0, arr[i])
print(str, end='', **kwargs)
ndone = (ndone + 1)
if (ndone == npl):
print('', **kwargs)
ndone = 0
i = (i + 1)
if (ndone > 0):
print('', **kwargs) |
.skipif('sys.platform == "win32" and platform.python_implementation() == "PyPy"')
def test_xdist_no_data_collected(testdir):
testdir.makepyfile(target='x = 123')
script = testdir.makepyfile('\nimport target\ndef test_foobar():\n assert target.x == 123\n')
result = testdir.runpytest('-v', '--cov=target', '-n', '1', script)
assert ('no-data-collected' not in result.stderr.str())
assert ('no-data-collected' not in result.stdout.str())
assert ('module-not-imported' not in result.stderr.str())
assert ('module-not-imported' not in result.stdout.str())
assert (result.ret == 0) |
class ScrimsSlotReserve(ScrimsView):
def __init__(self, ctx: Context, scrim: Scrim):
super().__init__(ctx)
self.ctx = ctx
self.record = scrim
async def initial_embed(self):
_e = discord.Embed(color=self.bot.color)
_e.description = f'''**{self.record} - Reserved Slots**
'''
reserved = (await self.record.reserved_slots.order_by('num'))
_l = []
for _ in range(self.record.start_from, (self.record.total_slots + self.record.start_from)):
team_name = next((f"{i.team_name} [{('Lifetime' if (not i.expires) else i.expires.strftime('%b/%d'))}]" for i in reserved if (i.num == _)), '')
_l.append(((f'Slot {_:02} --> ' + team_name) + '\n'))
_e.description += f"```{''.join(_l)}```"
_e.set_footer(text=f"Page - {' / '.join((await self.record.scrim_posi()))}")
return _e
async def refresh_view(self):
(await self.add_buttons())
try:
self.message = (await self.message.edit(embed=(await self.initial_embed), view=self))
except discord.HTTPException:
(await self.on_timeout())
async def add_buttons(self):
self.clear_items()
self.add_item(NewReserve(self.ctx))
self.add_item(RemoveReserve(self.ctx, (not bool((await self.record.reserved_slots.all().count())))))
if ((await Scrim.filter(guild_id=self.ctx.guild.id).count()) >= 2):
self.add_item(Prev(self.ctx, 2))
self.add_item(SkipTo(self.ctx, 2))
self.add_item(Next(self.ctx, 2))
self.add_item(Discard(self.ctx, 'Main Menu', 2)) |
class PackageInclude(Include):
def __init__(self, base: Path, include: str, formats: (list[str] | None)=None, source: (str | None)=None, target: (str | None)=None) -> None:
self._package: str
self._is_package = False
self._is_module = False
self._source = source
self._target = target
if (source is not None):
base = (base / source)
super().__init__(base, include, formats=formats)
self.check_elements()
def package(self) -> str:
return self._package
def source(self) -> (str | None):
return self._source
def target(self) -> (str | None):
return self._target
def is_package(self) -> bool:
return self._is_package
def is_module(self) -> bool:
return self._is_module
def refresh(self) -> PackageInclude:
super().refresh()
return self.check_elements()
def is_stub_only(self) -> bool:
return ((self.package or '').endswith('-stubs') and all((((el.suffix == '.pyi') or (el.name == 'py.typed')) for el in self.elements if el.is_file())))
def has_modules(self) -> bool:
return any(((element.suffix == '.py') for element in self.elements))
def check_elements(self) -> PackageInclude:
if (not self._elements):
raise ValueError(f'{(self._base / self._include)} does not contain any element')
root = self._elements[0]
if (len(self._elements) > 1):
self._is_package = True
self._package = root.parent.name
if (not (self.is_stub_only() or self.has_modules())):
raise ValueError(f'{root.name} is not a package.')
elif root.is_dir():
self._package = root.name
self._elements: list[Path] = sorted(root.glob('**/*'))
if (not (self.is_stub_only() or self.has_modules())):
raise ValueError(f'{root.name} is not a package.')
self._is_package = True
else:
self._package = root.stem
self._is_module = True
return self |
def connection_options(func):
('-m', '--metadir', default='yadagemeta', help='directory to store workflow metadata')
('--accept-metadir/--no-accept-metadir', default=True)
('-r', '--controller', default='frommodel')
('-o', '--ctrlopt', multiple=True, default=None, help='options for the workflow controller')
('-s', '--modelsetup', default='filebacked')
('-l', '--modelopt', multiple=True, default=None, help='options for the workflow state models')
('-b', '--backend', default='foregroundasync')
('--local/--remote', default=True)
(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper |
def check_master_taint(master_nodes, master_label):
schedulable_masters = []
for master_node in master_nodes:
node_info = get_node_info(master_node)
node = node_info.metadata.name
NoSchedule_taint = False
try:
if (node_info.spec is not None):
if (node_info.spec.taints is not None):
for taint in node_info.spec.taints:
if ((taint.key == str(master_label)) and (taint.effect == 'NoSchedule')):
NoSchedule_taint = True
break
if (not NoSchedule_taint):
schedulable_masters.append(node)
except Exception as e:
logging.info(('Exception getting master nodes' + str(e)))
schedulable_masters.append(node)
return schedulable_masters |
class RestoreTest(unittest.TestCase):
def get_rfcs(self):
base_rf = _repo_shadow._RestoreFile(restore_base_rp, restore_base_rp, [])
rfs = base_rf.yield_sub_rfs()
rfcs = []
for rf in rfs:
if (rf.mirror_rp.dirsplit()[1] in [b'dir']):
log.Log("skipping 'dir'", 5)
continue
rfc = RestoreFileComparer(rf)
for inc in rf.inc_list:
test_time = inc.getinctime()
rfc.add_rpath(self.get_correct(rf.mirror_rp, test_time), test_time)
rfc.add_rpath(rf.mirror_rp, mirror_time)
rfcs.append(rfc)
return rfcs
def get_correct(self, mirror_rp, test_time):
self.assertGreater(test_time, (- 1))
self.assertLess(test_time, )
(dirname, basename) = mirror_rp.dirsplit()
for filename in restore_base_filenames:
comps = filename.split(b'.')
base = b'.'.join(comps[:(- 1)])
t = Time.bytestotime(comps[(- 1)])
if ((t == test_time) and (basename == base)):
return restore_base_rp.append(filename)
return restore_base_rp.append((b'%b.%b' % (basename, Time.timetobytes(test_time))))
def testBothLocal(self):
self.restore_dir_test(1, 1)
def testMirrorRemote(self):
self.restore_dir_test(0, 1)
def testDestRemote(self):
self.restore_dir_test(1, 0)
def testBothRemote(self):
self.restore_dir_test(0, 0)
def restore_dir_test(self, mirror_local, dest_local):
Myrm(abs_output_dir)
restore3_dir = os.path.join(old_test_dir, b'restoretest3')
target_rp = rpath.RPath(Globals.local_connection, abs_output_dir)
inc1_rp = rpath.RPath(Globals.local_connection, os.path.join(old_test_dir, b'increment1'))
inc2_rp = rpath.RPath(Globals.local_connection, os.path.join(old_test_dir, b'increment2'))
inc3_rp = rpath.RPath(Globals.local_connection, os.path.join(old_test_dir, b'increment3'))
inc4_rp = rpath.RPath(Globals.local_connection, os.path.join(old_test_dir, b'increment4'))
InternalRestore(mirror_local, dest_local, restore3_dir, abs_output_dir, 45000)
self.assertTrue(compare_recursive(inc4_rp, target_rp))
InternalRestore(mirror_local, dest_local, restore3_dir, abs_output_dir, 35000)
self.assertTrue(compare_recursive(inc3_rp, target_rp, compare_hardlinks=0))
InternalRestore(mirror_local, dest_local, restore3_dir, abs_output_dir, 25000)
self.assertTrue(compare_recursive(inc2_rp, target_rp, compare_hardlinks=0))
InternalRestore(mirror_local, dest_local, restore3_dir, abs_output_dir, 5000)
self.assertTrue(compare_recursive(inc1_rp, target_rp, compare_hardlinks=0))
def testRestoreNoincs(self):
Myrm(abs_output_dir)
InternalRestore(1, 1, os.path.join(old_test_dir, b'restoretest5', b'regular_file'), abs_output_dir, 10000)
self.assertTrue(os.lstat(abs_output_dir)) |
class TestAttributes():
def test_sets_attrs(self):
class C():
x = attr.ib()
assert ('x' == C.__attrs_attrs__[0].name)
assert all((isinstance(a, Attribute) for a in C.__attrs_attrs__))
def test_empty(self):
class C3():
pass
assert ('C3()' == repr(C3()))
assert (C3() == C3())
(attr=attrs_st, attr_name=sampled_from(Attribute.__slots__))
def test_immutable(self, attr, attr_name):
with pytest.raises(AttributeError):
setattr(attr, attr_name, 1)
.parametrize('method_name', ['__repr__', '__eq__', '__hash__', '__init__'])
def test_adds_all_by_default(self, method_name):
sentinel = object()
class C():
x = attr.ib()
setattr(C, method_name, sentinel)
C = attr.s(C)
meth = getattr(C, method_name)
assert (sentinel != meth)
if (method_name == '__hash__'):
assert (meth is None)
.parametrize(('arg_name', 'method_name'), [('repr', '__repr__'), ('eq', '__eq__'), ('order', '__le__'), ('hash', '__hash__'), ('init', '__init__')])
def test_respects_add_arguments(self, arg_name, method_name):
sentinel = object()
am_args = {'repr': True, 'eq': True, 'order': True, 'hash': True, 'init': True}
am_args[arg_name] = False
if (arg_name == 'eq'):
am_args['order'] = False
class C():
x = attr.ib()
setattr(C, method_name, sentinel)
C = attr.s(**am_args)(C)
assert (sentinel == getattr(C, method_name))
.parametrize('init', [True, False])
def test_respects_init_attrs_init(self, init):
class C():
x = attr.ib()
C = attr.s(init=init)(C)
assert (hasattr(C, '__attrs_init__') != init)
(slots_outer=booleans(), slots_inner=booleans())
def test_repr_qualname(self, slots_outer, slots_inner):
(slots=slots_outer)
class C():
(slots=slots_inner)
class D():
pass
assert ('C.D()' == repr(C.D()))
assert ('GC.D()' == repr(GC.D()))
(slots_outer=booleans(), slots_inner=booleans())
def test_repr_fake_qualname(self, slots_outer, slots_inner):
(slots=slots_outer)
class C():
(repr_ns='C', slots=slots_inner)
class D():
pass
assert ('C.D()' == repr(C.D()))
(slots_outer=booleans(), slots_inner=booleans())
def test_name_not_overridden(self, slots_outer, slots_inner):
(slots=slots_outer)
class C():
(slots=slots_inner)
class D():
pass
assert (C.D.__name__ == 'D')
assert (C.D.__qualname__ == (C.__qualname__ + '.D'))
.usefixtures('with_and_without_validation')
def test_pre_init(self):
class C():
def __attrs_pre_init__(self2):
self2.z = 30
c = C()
assert (30 == getattr(c, 'z', None))
.usefixtures('with_and_without_validation')
def test_pre_init_args(self):
class C():
x = attr.ib()
def __attrs_pre_init__(self2, x):
self2.z = (x + 1)
c = C(x=10)
assert (11 == getattr(c, 'z', None))
.usefixtures('with_and_without_validation')
def test_pre_init_kwargs(self):
class C():
x = attr.ib()
y = attr.field(kw_only=True)
def __attrs_pre_init__(self2, x, y):
self2.z = ((x + y) + 1)
c = C(10, y=11)
assert (22 == getattr(c, 'z', None))
.usefixtures('with_and_without_validation')
def test_pre_init_kwargs_only(self):
class C():
y = attr.field(kw_only=True)
def __attrs_pre_init__(self2, y):
self2.z = (y + 1)
c = C(y=11)
assert (12 == getattr(c, 'z', None))
.usefixtures('with_and_without_validation')
def test_post_init(self):
class C():
x = attr.ib()
y = attr.ib()
def __attrs_post_init__(self2):
self2.z = (self2.x + self2.y)
c = C(x=10, y=20)
assert (30 == getattr(c, 'z', None))
.usefixtures('with_and_without_validation')
def test_pre_post_init_order(self):
class C():
x = attr.ib()
def __attrs_pre_init__(self2):
self2.z = 30
def __attrs_post_init__(self2):
self2.z += self2.x
c = C(x=10)
assert (40 == getattr(c, 'z', None))
def test_types(self):
class C():
x = attr.ib(type=int)
y = attr.ib(type=str)
z = attr.ib()
assert (int is fields(C).x.type)
assert (str is fields(C).y.type)
assert (None is fields(C).z.type)
def test_clean_class(self, slots):
(slots=slots)
class C():
x = attr.ib()
x = getattr(C, 'x', None)
assert (not isinstance(x, _CountingAttr))
def test_factory_sugar(self):
class C():
x = attr.ib(factory=list)
assert (Factory(list) == attr.fields(C).x.default)
def test_sugar_factory_mutex(self):
with pytest.raises(ValueError, match='mutually exclusive'):
class C():
x = attr.ib(factory=list, default=Factory(list))
def test_sugar_callable(self):
with pytest.raises(ValueError, match='must be a callable'):
class C():
x = attr.ib(factory=Factory(list))
def test_inherited_does_not_affect_hashing_and_equality(self):
class BaseClass():
x = attr.ib()
class SubClass(BaseClass):
pass
ba = attr.fields(BaseClass)[0]
sa = attr.fields(SubClass)[0]
assert (ba == sa)
assert (hash(ba) == hash(sa)) |
_ARCH_REGISTRY.register()
class CamAwareBaseline(Baseline):
def forward(self, batched_inputs):
outputs = super().forward(batched_inputs)
if self.training:
camids = batched_inputs['camids'].long().to(self.device)
outputs['camids'] = camids
return outputs
else:
return outputs
def losses(self, outs):
outputs = outs['outputs']
cam_labels = outs['camids']
cam_cls_outputs = outputs['cam_cls_outputs']
loss_dict = super().losses(outs)
loss_dict['loss_cam'] = (cross_entropy_loss(cam_cls_outputs, cam_labels, 0.1) * 0.1)
return loss_dict |
def run(params):
dataset = get_criteo_dataset(params)
train_dataset = dataset['train']
test_dataset = dataset['test']
train_data = tf.data.Dataset.from_tensor_slices((dict(train_dataset['x']), train_dataset['labels'], train_dataset['delay_labels']))
train_data = train_data.batch(params['batch_size']).prefetch(1)
test_data = tf.data.Dataset.from_tensor_slices((dict(test_dataset['x']), test_dataset['labels']))
test_data = test_data.batch(params['batch_size']).prefetch(1)
model = get_model(params['model'], params)
optimizer = get_optimizer(params['optimizer'], params)
best_acc = 0
for ep in range(params['epoch']):
train(model, optimizer, train_data, params)
model.save_weights(params['model_ckpt_path'], save_format='tf') |
class SaveLogger(object):
def __init__(self, file_name, save_every=10, verbose=0):
self.file_name = file_name
self.save_every = save_every
self.verbose = verbose
def __repr__(self):
return ('%s(file_name="%s", save_every=%s)' % (self.__class__.__name__, self.file_name, self.save_every))
def __call__(self, learner, iteration=0):
if ((iteration == 'final') or (not (iteration % self.save_every))):
file_name = self.file_name
if ('%d' in file_name):
file_name = (file_name % iteration)
if (self.verbose > 0):
print(('saving %s to file %s' % (learner, file_name)))
self.save(learner, file_name)
def save(self, learner, file_name):
with open(file_name, 'wb') as f:
if hasattr(learner, 'inference_cache_'):
(learner.inference_cache_, tmp) = (None, learner.inference_cache_)
pickle.dump(learner, f, (- 1))
learner.inference_cache_ = tmp
else:
pickle.dump(learner, f, (- 1))
def load(self):
with open(self.file_name, 'rb') as f:
learner = pickle.load(f)
return learner |
_db
def test_submit_talk_with_not_valid_language_code(graphql_client, user, conference_factory, topic_factory):
graphql_client.force_login(user)
conference = conference_factory(topics=('my-topic',), languages=('it',), submission_types=('tutorial',), active_cfp=True, durations=('50',), audience_levels=('Beginner',))
(resp, _) = _submit_talk(graphql_client, conference, languages=['fit'])
assert (resp['data']['sendSubmission']['__typename'] == 'SendSubmissionErrors')
assert (resp['data']['sendSubmission']['errors']['validationLanguages'] == ['Language (fit) is not allowed']) |
('read-linklet-bundle-hash', [values.W_InputPort], simple=False)
def read_linklet_bundle_hash(in_port, env, cont):
from pycket.racket_entry import get_primitive
from pycket.fasl import Fasl
from pycket.util import console_log
current_load_relative_dir_path = get_primitive('current-load-relative-directory').get_cell_value(cont)
fasl_to_s_exp = get_primitive('fasl->s-exp')
with PerfRegion('fasl->s-exp'):
bundle_map = Fasl(current_load_relative_dir_path).to_sexp_from_w_port(in_port)
if (not isinstance(bundle_map, W_HashTable)):
raise SchemeException(('got something that is not a table: %s' % bundle_map.tostring()))
console_log(('BUNDLE SEXP FASL-READ from ZO: %s' % deserialize_loop(bundle_map).tostring()), 7)
with PerfRegion('s-exp->ast'):
return return_value(deserialize_loop(bundle_map), env, cont) |
class DataTable():
def __init__(self, num_rows: int, num_columns: int, column_names: list, bokeh_document: Optional[BokehDocument], row_index_names: list=None):
self.total = (num_rows * num_columns)
self.row_names = row_index_names
if row_index_names:
data_frame = pd.DataFrame(index=np.arange(num_rows), columns=(['index'] + column_names))
data_frame['index'] = row_index_names
self.row_index_to_row_name_map = self.map_row_names()
else:
data_frame = pd.DataFrame(index=np.arange(num_rows), columns=column_names)
data_frame.fillna('', inplace=True)
self.source = ColumnDataSource(data=data_frame)
columns = [TableColumn(field=column_str, title=column_str) for column_str in data_frame.columns]
self.data_table = BokehDataTable(source=self.source, columns=columns, width=1500)
if (bokeh_document is not None):
bokeh_document.add(self.data_table)
def update_table(self, column_name, row, value):
if isinstance(row, int):
self.source.data[column_name][row] = value
self.source.data[column_name] = list(self.source.data[column_name])
return
row_index = self.row_index_to_row_name_map[row]
self.source.data[column_name][row_index] = value
self.source.data[column_name] = list(self.source.data[column_name])
def map_row_names(self):
row_name_to_numerical_index_map = {}
row_index_counter = 0
for row in self.row_names:
row_name_to_numerical_index_map[row] = row_index_counter
row_index_counter += 1
return row_name_to_numerical_index_map |
def random_inj_per_layer(pfi: core.FaultInjection, min_val: int=(- 1), max_val: int=1):
(batch, layer_num, c_rand, h_rand, w_rand, value) = ([] for i in range(6))
b = random_batch_element(pfi)
for i in range(pfi.get_total_layers()):
(layer, C, H, W) = random_neuron_location(pfi, layer=i)
batch.append(b)
layer_num.append(layer)
c_rand.append(C)
h_rand.append(H)
w_rand.append(W)
value.append(random_value(min_val=min_val, max_val=max_val))
return pfi.declare_neuron_fault_injection(batch=batch, layer_num=layer_num, dim1=c_rand, dim2=h_rand, dim3=w_rand, value=value) |
def _create_view(tensor, stride, inner_dims):
outdim = ((tensor.size(0) - stride) + 1)
size = (outdim, stride, *inner_dims)
inner_dim_prod = int(np.prod(inner_dims))
multidim_stride = ([inner_dim_prod, inner_dim_prod] + ([1] * len(inner_dims)))
return torch.as_strided(tensor, size=size, stride=multidim_stride) |
def loadLSTMLMCheckpoint(pathLSTMCheckpoint, pathData):
model_args = argparse.Namespace(task='language_modeling', output_dictionary_size=(- 1), data=pathData, path=pathLSTMCheckpoint)
task = tasks.setup_task(model_args)
(models, _model_args) = checkpoint_utils.load_model_ensemble([model_args.path], task=task)
model = models[0]
return (model, task) |
class SpinBox(Input):
_attribute_decorator('WidgetSpecific', 'Defines the actual value for the spin box.', float, {'possible_values': '', 'min': (- 65535), 'max': 65535, 'default': 0, 'step': 1})
def attr_value(self):
return self.attributes.get('value', '0')
_value.setter
def attr_value(self, value):
self.attributes['value'] = str(value)
_attribute_decorator('WidgetSpecific', 'Defines the minimum value for the spin box.', float, {'possible_values': '', 'min': (- 65535), 'max': 65535, 'default': 0, 'step': 1})
def attr_min(self):
return self.attributes.get('min', '0')
_min.setter
def attr_min(self, value):
self.attributes['min'] = str(value)
_attribute_decorator('WidgetSpecific', 'Defines the maximum value for the spin box.', float, {'possible_values': '', 'min': (- 65535), 'max': 65535, 'default': 0, 'step': 1})
def attr_max(self):
return self.attributes.get('max', '65535')
_max.setter
def attr_max(self, value):
self.attributes['max'] = str(value)
_attribute_decorator('WidgetSpecific', 'Defines the step value for the spin box.', float, {'possible_values': '', 'min': 0.0, 'max': 65535.0, 'default': 0, 'step': 1})
def attr_step(self):
return self.attributes.get('step', '1')
_step.setter
def attr_step(self, value):
self.attributes['step'] = str(value)
def __init__(self, default_value=0, min_value=0, max_value=65535, step=1, allow_editing=True, **kwargs):
super(SpinBox, self).__init__('number', str(default_value), **kwargs)
self.attributes['min'] = str(min_value)
self.attributes['max'] = str(max_value)
self.attributes['step'] = str(step)
js = 'var key = event.keyCode || event.charCode;'
js += 'return (event.charCode >= 48 && event.charCode <= 57)'
if allow_editing:
js += ' || (key == 8 || key == 46 || key == 45|| key == 44 )'
js += ' || (key == 13)'
self.attributes[self.EVENT_ONKEYPRESS] = ('%s;' % js)
self.attributes[self.EVENT_ONKEYUP] = ("var key = event.keyCode || event.charCode;if(key==13){var params={};params['value']=document.getElementById('%(id)s').value;remi.sendCallbackParam('%(id)s','%(evt)s',params); return true;}return false;" % {'id': self.identifier, 'evt': self.EVENT_ONCHANGE})
_set_on_listener('(self, emitter, value)')
_event
def onchange(self, value):
_type = int
try:
(_, _, _) = (int(value), int(self.attributes['min']), int(self.attributes['max']))
except:
_type = float
try:
_value = max(_type(value), _type(self.attributes['min']))
_value = min(_type(_value), _type(self.attributes['max']))
self.disable_update()
self.attributes['value'] = str(_value)
self.enable_update()
if (_type(value) != _value):
self.attributes.onchange()
except:
_type = int
try:
(_, _, _) = (int(self.attributes['value']), int(self.attributes['min']), int(self.attributes['max']))
except:
_type = float
_value = _type(self.attributes['value'])
self.attributes.onchange()
return (_value,) |
('the deleted latent style is not in the latent styles collection')
def then_the_deleted_latent_style_is_not_in_the_collection(context):
latent_styles = context.latent_styles
try:
latent_styles['Normal']
except KeyError:
return
raise AssertionError('Latent style not deleted') |
def init_argparse():
parser = argparse.ArgumentParser(usage='%(prog)s --domain example.com --file subdomains2ips.txt', description='Generate Network Graph For Sudomy.')
parser.add_argument('--domain', type=str)
parser.add_argument('--file', type=str, help='subdomains2ips.txt')
return parser |
def build_dataset_iter(datasets, fields, opt, is_train=True, task_type='task'):
if is_train:
if (task_type == 'task'):
batch_size = opt.batch_size
else:
batch_size = opt.batch_size2
else:
batch_size = opt.valid_batch_size
if (is_train and (opt.batch_type == 'tokens')):
def batch_size_fn(new, count, sofar):
global max_src_in_batch, max_tgt_in_batch
if (count == 1):
max_src_in_batch = 0
max_tgt_in_batch = 0
max_src_in_batch = max(max_src_in_batch, (len(new.src) + 2))
if hasattr(new, 'tgt'):
max_tgt_in_batch = max(max_tgt_in_batch, (len(new.tgt) + 1))
else:
max_tgt_in_batch = max(max_tgt_in_batch, (len(new.tgt2) + 1))
src_elements = (count * max_src_in_batch)
tgt_elements = (count * max_tgt_in_batch)
return max(src_elements, tgt_elements)
else:
batch_size_fn = None
if opt.gpu_ranks:
device = 'cuda'
else:
device = 'cpu'
return DatasetIter(datasets, fields, batch_size, batch_size_fn, device, is_train) |
def _create_completion(model: str, messages: list, stream: bool, temperature: float=0.7, **kwargs):
payload = {'temperature': 0.7, 'messages': messages, 'model': model, 'stream': True}
headers = {'user-agent': 'ChatX/39 CFNetwork/1408.0.4 Darwin/22.5.0'}
response = requests.post(' json=payload, headers=headers, stream=True)
for token in response.iter_lines():
if (b'The model: `gpt-4` does not exist' in token):
print('error, retrying...')
_create_completion(model=model, messages=messages, stream=stream, temperature=temperature, **kwargs)
if (b'content' in token):
token = json.loads(token.decode('utf-8').split('data: ')[1])['choices'][0]['delta'].get('content')
if token:
(yield token) |
def cylinder(bm, radius=1, height=2, segs=10):
circle = bmesh.ops.create_circle(bm, cap_ends=True, cap_tris=False, segments=segs, radius=radius)
verts = circle['verts']
face = list(verts[0].link_faces)
cylinder = bmesh.ops.extrude_discrete_faces(bm, faces=face)
bmesh.ops.translate(bm, verts=cylinder['faces'][(- 1)].verts, vec=(0, 0, height))
result = {'verts': (verts + list(cylinder['faces'][(- 1)].verts))}
bmesh.ops.translate(bm, verts=result['verts'], vec=(0, 0, ((- height) / 2)))
return result |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.