code stringlengths 281 23.7M |
|---|
def getOpenFileName(*, parent, title, filter='', config: 'SimpleConfig') -> Optional[str]:
directory = config.get('io_dir', os.path.expanduser('~'))
(fileName, __) = QFileDialog.getOpenFileName(parent, title, directory, filter)
if (fileName and (directory != os.path.dirname(fileName))):
config.set_key('io_dir', os.path.dirname(fileName), True)
return fileName |
class TestInvalidityDate():
def test_invalid_invalidity_date(self):
with pytest.raises(TypeError):
x509.InvalidityDate('notadate')
def test_eq(self):
invalid1 = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 1))
invalid2 = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 1))
assert (invalid1 == invalid2)
def test_ne(self):
invalid1 = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 1))
invalid2 = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 2))
assert (invalid1 != invalid2)
assert (invalid1 != object())
def test_repr(self):
invalid1 = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 1))
assert (repr(invalid1) == '<InvalidityDate(invalidity_date=2015-01-01 01:01:00)>')
def test_hash(self):
invalid1 = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 1))
invalid2 = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 1))
invalid3 = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 2))
assert (hash(invalid1) == hash(invalid2))
assert (hash(invalid1) != hash(invalid3))
def test_public_bytes(self):
ext = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 1))
assert (ext.public_bytes() == b'\x18\x0fZ') |
.parametrize(['summary', 'details', 'description'], [('fakesummary', 'fakedetails', 'fakesummary'), ('fakesummary\nanother line', 'fakedetails', 'fakesummary another line'), (None, 'fakedetails', 'fakedetails'), (None, 'fakedetails\nanother line', 'fakedetails another line'), (None, None, 'N/A')])
def test_pypi_vuln_description_fallbacks(monkeypatch, cache_dir, summary, details, description):
def get_mock_response():
class MockResponse():
def raise_for_status(self):
pass
def json(self):
return {'vulnerabilities': [{'aliases': ['foo', 'bar'], 'id': 'VULN-0', 'summary': summary, 'details': details, 'fixed_in': ['1.1', '1.4'], 'published': '2019-08-24T14:15:22Z'}]}
return MockResponse()
monkeypatch.setattr(service.pypi, 'caching_session', (lambda _: get_mock_session(get_mock_response)))
pypi = service.PyPIService(cache_dir)
dep = service.ResolvedDependency('foo', Version('1.0'))
results: dict[(service.Dependency, list[service.VulnerabilityResult])] = dict(pypi.query_all(iter([dep])))
assert (len(results) == 1)
assert (dep in results)
assert (len(results[dep]) == 1)
assert (results[dep][0] == service.VulnerabilityResult(id='VULN-0', description=description, fix_versions=[Version('1.1'), Version('1.4')], aliases={'foo', 'bar'}, published=datetime(2019, 8, 24, 14, 15, 22))) |
def test_direct_origin_does_not_download_url_dependency_when_cached(fixture_dir: FixtureDirGetter, mocker: MockerFixture) -> None:
artifact_cache = MagicMock()
artifact_cache.get_cached_archive_for_link = MagicMock(return_value=(fixture_dir('distributions') / 'demo-0.1.2-py2.py3-none-any.whl'))
direct_origin = DirectOrigin(artifact_cache)
url = '
download_file = mocker.patch('poetry.packages.direct_origin.download_file', side_effect=Exception('download_file should not be called'))
package = direct_origin.get_package_from_url(url)
assert (package.name == 'demo')
artifact_cache.get_cached_archive_for_link.assert_called_once_with(Link(url), strict=True, download_func=download_file) |
def test_invalid_tuple_sizes():
with pytest.raises(ValueError, match='HUD color must be a tuple of 3 ints.'):
PrimeCosmeticPatches(hud_color=(0, 0, 0, 0))
with pytest.raises(ValueError, match='Suit color rotations must be a tuple of 4 ints.'):
PrimeCosmeticPatches(suit_color_rotations=(0, 0, 0)) |
class ConfigDialog(QtWidgets.QDialog):
attributes = ['show_cursor', 'default_gf_dir', 'nvectors', 'vector_color', 'vector_relative_length', 'vector_pen_thickness', 'view_east', 'view_north', 'view_down', 'view_los']
def __init__(self, *args, **kwargs):
QtWidgets.QDialog.__init__(self, *args, **kwargs)
self.completer = QtWidgets.QCompleter()
self.completer_model = QtGui.QFileSystemModel(self.completer)
self.completer.setModel(self.completer_model)
self.completer.setMaxVisibleItems(8)
loadUi(get_resource('dialog_config.ui'), self)
self.ok_button.released.connect(self.setAttributes)
self.ok_button.released.connect(self.close)
self.apply_button.released.connect(self.setAttributes)
self.vector_color_picker = QtWidgets.QColorDialog(self)
self.vector_color_picker.setCurrentColor(QtGui.QColor(*getConfig().vector_color))
self.vector_color_picker.setOption(self.vector_color_picker.ShowAlphaChannel)
self.vector_color_picker.colorSelected.connect(self.updateVectorColor)
self.vector_color_picker.setModal(True)
self.vector_color.clicked.connect(self.vector_color_picker.show)
self.vector_color.setValue = self.setButtonColor
self.vector_color.value = self.getButtonColor
self.chooseStoreDirButton.released.connect(self.chooseStoreDir)
self.completer_model.setRootPath('')
self.completer.setParent(self.default_gf_dir)
self.default_gf_dir.setCompleter(self.completer)
self.getAttributes()
()
def chooseStoreDir(self):
folder = QtWidgets.QFileDialog.getExistingDirectory(self, 'Open Pyrocko GF Store', os.getcwd())
if (folder != ''):
self.default_gf_dir.setText(folder)
self.setAttributes()
def getAttributes(self):
for attr in self.attributes:
qw = self.__getattribute__(attr)
value = getConfig().__getattribute__(attr)
if isinstance(value, bool):
qw.setChecked(value)
elif isinstance(value, str):
qw.setText(value)
else:
qw.setValue(value)
()
def setAttributes(self):
for attr in self.attributes:
qw = self.__getattribute__(attr)
if isinstance(qw, QtWidgets.QCheckBox):
value = qw.isChecked()
elif isinstance(qw, QtWidgets.QLineEdit):
value = str(qw.text())
else:
value = qw.value()
getConfig().__setattr__(attr, value)
getConfig().saveConfig()
def setButtonColor(self, rgba):
self.vector_color.setStyleSheet(('background-color: rgb(%d, %d, %d, %d);border: none;' % rgba))
def getButtonColor(self):
return getConfig().vector_color
(QtGui.QColor)
def updateVectorColor(self, qcolor):
getConfig().vector_color = (qcolor.red(), qcolor.green(), qcolor.blue(), qcolor.alpha())
self.setButtonColor(getConfig().vector_color) |
class Message(TLObject):
ID =
__slots__ = ['msg_id', 'seq_no', 'length', 'body']
QUALNAME = 'Message'
def __init__(self, body: TLObject, msg_id: int, seq_no: int, length: int):
self.msg_id = msg_id
self.seq_no = seq_no
self.length = length
self.body = body
def read(data: BytesIO, *args: Any) -> 'Message':
msg_id = Long.read(data)
seq_no = Int.read(data)
length = Int.read(data)
body = data.read(length)
return Message(TLObject.read(BytesIO(body)), msg_id, seq_no, length)
def write(self, *args: Any) -> bytes:
b = BytesIO()
b.write(Long(self.msg_id))
b.write(Int(self.seq_no))
b.write(Int(self.length))
b.write(self.body.write())
return b.getvalue() |
def cmd_venv_create(options, root, python, benchmarks):
from . import _venv
from .venv import Requirements, VenvForBenchmarks
if _venv.venv_exists(root):
sys.exit(f'ERROR: the virtual environment already exists at {root}')
requirements = Requirements.from_benchmarks(benchmarks)
venv = VenvForBenchmarks.ensure(root, (python or sys.executable), inherit_environ=options.inherit_environ)
venv.ensure_pip()
try:
venv.install_pyperformance()
venv.ensure_reqs(requirements)
except _venv.RequirementsInstallationFailedError:
sys.exit(1)
print(('The virtual environment %s has been created' % root)) |
class Effect1585(BaseEffect):
type = 'passive'
def handler(fit, skill, context, projectionRange, **kwargs):
fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Capital Energy Turret')), 'damageMultiplier', (skill.getModifiedItemAttr('damageMultiplierBonus') * skill.level), **kwargs) |
class TDK_Gen40_38(TDK_Lambda_Base):
voltage_values = [0, 40]
current_values = [0, 38]
over_voltage_values = [2, 44]
under_voltage_values = [0, 38]
def __init__(self, adapter, name='TDK Lambda Gen40-38', address=6, **kwargs):
super().__init__(adapter, name, address, **kwargs) |
def _weighting(filter_type, first, last):
third_oct_bands = third(12.5, 20000.0).tolist()
low = third_oct_bands.index(first)
high = third_oct_bands.index(last)
if (filter_type == 'a'):
freq_weightings = THIRD_OCTAVE_A_WEIGHTING
elif (filter_type == 'c'):
freq_weightings = THIRD_OCTAVE_C_WEIGHTING
return freq_weightings[low:(high + 1)] |
class Effect11373(BaseEffect):
type = 'passive'
def handler(fit, src, context, projectionRange, **kwargs):
fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Capital Shield Operation')), 'shieldBonus', src.getModifiedItemAttr('shipBonusDreadnoughtM1'), skill='Minmatar Dreadnought', **kwargs)
fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Capital Repair Systems')), 'armorDamageAmount', src.getModifiedItemAttr('shipBonusDreadnoughtM1'), skill='Minmatar Dreadnought', **kwargs) |
(version_base=None, config_path='.', config_name='gpt2_train_cfg')
def main(cfg: DictConfig):
ddp_setup()
gpt_cfg = GPTConfig(**cfg['gpt_config'])
opt_cfg = OptimizerConfig(**cfg['optimizer_config'])
data_cfg = DataConfig(**cfg['data_config'])
trainer_cfg = TrainerConfig(**cfg['trainer_config'])
(model, optimizer, train_data, test_data) = get_train_objs(gpt_cfg, opt_cfg, data_cfg)
trainer = Trainer(trainer_cfg, model, optimizer, train_data, test_data)
trainer.train()
destroy_process_group() |
('/comparison', methods=['GET', 'POST'])
def comparison():
form = MainForm()
if form.validate_on_submit():
if form.upload.data:
process_upload(form)
if form.upload2.data:
process_upload(form, True)
if ((not session.get('SAVEPATH')) or (not session.get('SAVEPATH2'))):
flash(Markup('<span class="glyphicon glyphicon-exclamation-sign" aria-hidden="true"></span><span class="sr-only">Error:</span> Please upload <strong>two (2)</strong> input files.'), 'danger alert-dismissible')
return render_template('comparison.html', title='Comparison', form=form)
else:
process_form(form, True)
return render_template('comparison.html', title='Comparison', form=form, jsmol=True, plot=compare())
return render_template('comparison.html', title='Comparison', form=form) |
class TenluaVn(BaseAccount):
__name__ = 'TenluaVn'
__type__ = 'account'
__version__ = '0.02'
__status__ = 'testing'
__description__ = 'TenluaVn account plugin'
__license__ = 'GPLv3'
__authors__ = [('GammaC0de', 'nitzo2001[AT]yahoo[DOT]com')]
API_URL = '
def api_request(self, method, **kwargs):
kwargs['a'] = method
sid = kwargs.pop('sid', None)
return json.loads(self.load(self.API_URL, get=({'sid': sid} if (sid is not None) else {}), post=json.dumps([kwargs])))
def grab_info(self, user, password, data):
user_info = self.api_request('user_info', sid=data['sid'])[0]
validuntil = time.mktime(time.strptime(user_info['endGold'], '%d-%m-%Y'))
premium = (user_info['free_used'] != 'null')
return {'premium': premium, 'trafficleft': (- 1), 'validuntil': validuntil}
def signin(self, user, password, data):
try:
login_info = self.api_request('user_login', user=user, password=password, permanent=False)
except BadHeader as exc:
if (exc.code == 401):
self.fail_login()
else:
self.fail_login(self._('BadHeader {}').format(exc.code))
data['sid'] = login_info[0] |
class NetworkCIFAR(nn.Module):
def __init__(self, C, num_classes, layers, auxiliary, genotype, reweight=False):
super(NetworkCIFAR, self).__init__()
self._layers = layers
self._auxiliary = auxiliary
self.drop_path_prob = 0
stem_multiplier = 3
C_curr = (stem_multiplier * C)
self.stem = nn.Sequential(nn.Conv2d(3, C_curr, 3, padding=1, bias=False), nn.BatchNorm2d(C_curr))
(C_prev_prev, C_prev, C_curr) = (C_curr, C_curr, C)
self.cells = nn.ModuleList()
reduction_prev = False
for i in range(layers):
if (i in [(layers // 3), ((2 * layers) // 3)]):
C_curr *= 2
reduction = True
else:
reduction = False
cell = Cell(genotype, C_prev_prev, C_prev, C_curr, reduction, reduction_prev, reweight=reweight)
reduction_prev = reduction
self.cells += [cell]
(C_prev_prev, C_prev) = (C_prev, (cell.multiplier * C_curr))
if (i == ((2 * layers) // 3)):
C_to_auxiliary = C_prev
if auxiliary:
self.auxiliary_head = AuxiliaryHeadCIFAR(C_to_auxiliary, num_classes)
self.global_pooling = nn.AdaptiveAvgPool2d(1)
self.classifier = nn.Linear(C_prev, num_classes)
def forward(self, input):
logits_aux = None
s0 = s1 = self.stem(input)
for (i, cell) in enumerate(self.cells):
(s0, s1) = (s1, cell(s0, s1, self.drop_path_prob))
if (i == ((2 * self._layers) // 3)):
if (self._auxiliary and self.training):
logits_aux = self.auxiliary_head(s1)
out = self.global_pooling(s1)
logits = self.classifier(out.view(out.size(0), (- 1)))
return (logits, logits_aux) |
def decompose_union(expected_type: Value, parent_value: Value, ctx: CanAssignContext, exclude_any: bool) -> Optional[Tuple[(BoundsMap, Value)]]:
value = unannotate(parent_value)
if isinstance(value, MultiValuedValue):
bounds_maps = []
remaining_values = []
for val in value.vals:
can_assign = can_assign_maybe_exclude_any(expected_type, val, ctx, exclude_any)
if isinstance(can_assign, CanAssignError):
remaining_values.append(val)
else:
bounds_maps.append(can_assign)
if bounds_maps:
result = unify_bounds_maps(bounds_maps)
assert remaining_values, f'all union members matched between {expected_type} and {parent_value}'
return (result, unite_values(*remaining_values))
return None |
def make_field(arr, dtype=None):
dtype = (dtype or arr.dtype)
if (arr.name is None):
name = 'values'
else:
name = arr.name
field = {'name': name, 'type': as_json_table_type(dtype)}
if is_categorical_dtype(arr):
if hasattr(arr, 'categories'):
cats = arr.categories
ordered = arr.ordered
else:
cats = arr.cat.categories
ordered = arr.cat.ordered
field['constraints'] = {'enum': list(cats)}
field['ordered'] = ordered
elif is_datetime64tz_dtype(arr):
if hasattr(arr, 'dt'):
field['tz'] = arr.dt.tz.zone
else:
field['tz'] = arr.tz.zone
return field |
class DependingTransition(Transition):
def __init__(self, source, dest, conditions=None, unless=None, before=None, after=None, prepare=None, **kwargs):
self._result = self._dest = None
super(DependingTransition, self).__init__(source, dest, conditions, unless, before, after, prepare)
if isinstance(dest, dict):
try:
self._func = kwargs.pop('depends_on')
except KeyError:
raise AttributeError("A multi-destination transition requires a 'depends_on'")
else:
self.execute = super(DependingTransition, self).execute
def execute(self, event_data):
func = (getattr(event_data.model, self._func) if isinstance(self._func, string_types) else self._func)
self._result = func(*event_data.args, **event_data.kwargs)
super(DependingTransition, self).execute(event_data)
def dest(self):
return (self._dest[self._result] if (self._result is not None) else self._dest)
def dest(self, value):
self._dest = value |
def clean_voltage(df):
repl_voltage = {'medium': '33000', '19.1 kV': '19100', 'high': '220000', '240 VAC': '240', '2*220000': '220000;220000', 'KV30': '30kV'}
df.dropna(subset=['voltage'], inplace=True)
df['voltage'] = df['voltage'].astype(str).replace(repl_voltage).str.lower().str.replace(' ', '').str.replace('_', '').str.replace('kv', '000').str.replace('v', '')
return df |
def main(args):
at_step = args.step
output_dir_name = args.output_dir
layer_name = args.layer_name
block_type = args.block_type
postfix = args.postfix
probe_type = args.probe_type
normalized = args.normalized
smoothed = args.smoothed
lasso = (True if (args.lasso == 'yes') else False)
l1_lambda = float(args.l1_lambda)
weights_postfix = ''
print('At denoising step', (at_step + 1))
if (block_type == 'resnets'):
torch_layer_type = torch.nn.Conv2d
elif (block_type == 'attentions'):
torch_layer_type = torch.nn.Linear
probe_checkpoints_dir = f'probe_checkpoints/large_syn_dataset_continuous/at_step_{at_step}/'
if (not os.path.exists(probe_checkpoints_dir)):
os.makedirs(probe_checkpoints_dir)
probe_accuracy_dir = f'probe_accuracy/large_syn_dataset_continuous/at_step_{at_step}/'
if (not os.path.exists(probe_accuracy_dir)):
os.makedirs(probe_accuracy_dir)
train_split_prompts_seeds = pd.read_csv('train_split_prompts_seeds.csv', encoding='ISO-8859-1')
test_split_prompts_seeds = pd.read_csv('test_split_prompts_seeds.csv', encoding='ISO-8859-1')
combo_df = pd.concat([train_split_prompts_seeds, test_split_prompts_seeds])
dataset_path = 'datasets/images/'
files = os.listdir(dataset_path)
files = [file for file in files if file.endswith('.png')]
prompt_indexes = [int(file[(file.find('prompt_') + 7):file.find('_seed')]) for file in files]
sample_seeds = [int(file[(file.find('seed_') + 5):file.find('.png')]) for file in files]
vae_pretrained = 'CompVis/stable-diffusion-v1-4'
CLIPtokenizer_pretrained = 'openai/clip-vit-large-patch14'
CLIPtext_encoder_pretrained = 'openai/clip-vit-large-patch14'
denoise_unet_pretrained = 'CompVis/stable-diffusion-v1-4'
(vae, tokenizer, text_encoder, unet, scheduler) = _init_models(vae_pretrained=vae_pretrained, CLIPtokenizer_pretrained=CLIPtokenizer_pretrained, CLIPtext_encoder_pretrained=CLIPtext_encoder_pretrained, denoise_unet_pretrained=denoise_unet_pretrained)
for block in ['down', 'mid', 'up']:
if (block == 'down'):
i_s = 0
i_e = 3
layer_range = 2
elif (block == 'up'):
i_s = 1
i_e = 4
layer_range = 3
elif (block == 'mid'):
i_s = 0
i_e = 1
if (block_type == 'resnets'):
layer_range = 2
else:
layer_range = 1
for block_ind in range(i_s, i_e):
data_path = 'datasets'
for (prompt_ind, seed_num) in zip(prompt_indexes, sample_seeds):
features = OrderedDict()
for (name, module) in unet.named_modules():
if isinstance(module, torch_layer_type):
features[name] = ModuleHook(module)
prompt = combo_df.loc[(combo_df['prompt_inds'] == prompt_ind)]['prompts'].item()
image = generate_image(prompt, seed_num, num_inference_steps=15, net=unet, tokenizer=tokenizer, text_encoder=text_encoder, scheduler=scheduler, vae=vae, stop_at_step=(at_step + 1))
for feature in features.values():
feature.close()
for layer_ind in range(layer_range):
dataset_path = 'internal_repres/'
dataset_path += f'{block}_{block_ind}_{output_dir_name}_{layer_ind}'
if (block == 'mid'):
chosen_layer_name = f'mid_block.{block_type}.{layer_ind}.{layer_name}'
else:
chosen_layer_name = f'{block}_blocks.{block_ind}.{block_type}.{layer_ind}.{layer_name}'
sel_output = features[chosen_layer_name].features[at_step]
sel_output = sel_output.unsqueeze(0).cpu().detach()
if (not os.path.exists(os.path.join(data_path, dataset_path))):
os.makedirs(os.path.join(data_path, dataset_path))
with open(os.path.join(data_path, dataset_path, f'{block}_{block_ind}_layer_{layer_ind}_{prompt_ind}_{seed_num}.pkl'), 'wb') as outfile:
pickle.dump(sel_output, outfile)
for layer_ind in range(layer_range):
dataset_path = 'internal_repres/'
layer = f'{block}_{block_ind}_{output_dir_name}_{layer_ind}'
dataset_path += layer
dataset = ProbeDEDataset('datasets/images/', os.path.join(data_path, dataset_path), 'datasets/depth_gt/', pre_load=True, target_transform=scale_and_norm, transform=min_max_norm_image, scale_factor=1)
input_dim = input_dims_dict[f'{block}_{block_ind}']
scale = scale_dict[f'{block}_{block_ind}']
weights_postfix = ''
if (probe_type.lower() == 'linear'):
probe = probeLinearDense(input_dim, 1, scale, use_bias=True).to(torch_device)
weights_postfix += ''
elif (probe_type.lower() == 'linear-no-bias'):
probe = probeLinearDense(input_dim, 1, scale, use_bias=False).to(torch_device)
weights_postfix += '_linear_no_bias'
elif (probe_type.lower() == 'nonlinear'):
probe = probeTwoNonLinearDense(input_dim, 1, scale, use_bias=True, mid_channels=(input_dim // 2)).to(torch_device)
weights_postfix += '_nonlinear'
elif (probe_type.lower() == 'nonlinear-no-bias'):
probe = probeTwoNonLinearDense(input_dim, 1, scale, use_bias=False, mid_channels=(input_dim // 2)).to(torch_device)
weights_postfix += '_nonlinear_no_bias'
generator = torch.manual_seed(100)
with open('train_indices.pkl', 'rb') as infile:
train_indices = pickle.load(infile)
with open('test_indices.pkl', 'rb') as infile:
test_indices = pickle.load(infile)
training_data = torch.utils.data.Subset(dataset, train_indices)
test_data = torch.utils.data.Subset(dataset, test_indices)
train_dataloader = DataLoader(training_data, batch_size=4, shuffle=True)
test_dataloader = DataLoader(test_data, batch_size=32, shuffle=False)
optimizer = torch.optim.Adam(probe.parameters(), lr=0.001)
max_epoch = 30
loss_func = nn.HuberLoss()
if (smoothed.lower() == 'yes'):
smooth_loss_func = InverseDepthSmoothnessLoss()
weights_postfix += ''
elif (smoothed.lower() == 'no'):
smooth_loss_func = None
weights_postfix += '_unsmoothed'
min_loss = 1000000.0
for epoch in range(1, (max_epoch + 1)):
verbosity = False
if (epoch == max_epoch):
verbosity = True
print(f'''
{block} Block {block_ind} Layer {layer_ind} {layer_name}''')
train_results = train_continuous_depth(probe, torch_device, train_dataloader, optimizer, epoch, loss_func=loss_func, verbose_interval=None, head=None, verbosity=False, smooth_loss=smooth_loss_func, alpha=1)
test_results = test_continuous_depth(probe, torch_device, test_dataloader, loss_func=loss_func, return_raw_outputs=True, head=None, scheduler=None, verbosity=verbosity, smooth_loss=smooth_loss_func, alpha=1)
if (test_results[0] < min_loss):
min_loss = test_results[0]
torch.save(probe.state_dict(), f'probe_checkpoints/large_syn_dataset_continuous/at_step_{at_step}/regression_probe_{layer}{weights_postfix}.pth')
with open(f'probe_accuracy/large_syn_dataset_continuous/at_step_{at_step}/saved_test_results_{block}_{block_ind}_layer_{layer_ind}_{postfix}{weights_postfix}.pkl', 'wb') as outfile:
pickle.dump(test_results[2], outfile)
torch.save(probe.state_dict(), f'probe_checkpoints/large_syn_dataset_continuous/at_step_{at_step}/regression_probe_{layer}_final{weights_postfix}.pth')
plt_test_results_continuous_depth(probe, test_dataloader, test_data, loss_func, smooth_loss=smooth_loss_func, head=None, save_plt=True, norm_output=False, save_filename=f'probe_accuracy/large_syn_dataset_continuous/at_step_{at_step}/saved_test_results_{block}_{block_ind}_layer_{layer_ind}_{postfix}{weights_postfix}.png')
dataset_path = os.path.join(data_path, dataset_path)
clear_dir(dataset_path, file_extention='.pkl') |
def test_json_index_page() -> None:
c = ConfigParser()
c.add_section('mirror')
c['mirror']['workers'] = '1'
s = SimpleAPI(FilesystemStorage(config=c), SimpleFormat.ALL, [], 'sha256', False, None)
with TemporaryDirectory() as td:
td_path = Path(td)
simple_dir = (td_path / 'simple')
sixtynine_dir = (simple_dir / '69')
foo_dir = (simple_dir / 'foo')
for a_dir in (sixtynine_dir, foo_dir):
a_dir.mkdir(parents=True)
sixtynine_html = (sixtynine_dir / 'index.html')
foo_html = (foo_dir / 'index.html')
for a_file in (sixtynine_html, foo_html):
a_file.touch()
s.sync_index_page(True, td_path, 12345, pretty=True)
assert ('simple\nsimple{0}69\nsimple{0}69{0}index.html\nsimple{0}foo\nsimple{0}foo{0}index.html\nsimple{0}index.html\nsimple{0}index.v1_html\nsimple{0}index.v1_json'.format(sep) == utils.find(td_path))
assert ((simple_dir / 'index.v1_json').open('r').read() == EXPECTED_SIMPLE_GLOBAL_JSON_PRETTY) |
def evaluate_code_prompt(path):
def _parse_option(option: str, question: str):
solution = question.split(option)[1].split(')')[0]
if ('none' in solution.lower()):
return None
solution = ''.join([c for c in solution if (c.isdigit() or (c == '.'))])
if ((solution[0] == '.') and (solution.count('.') > 1)):
solution = solution[1:]
solution = float(solution)
return solution
def find_closest_option(result: float, question: str):
options = ['(a)', '(b)', '(c)', '(d)', '(e)']
option_solutions = [_parse_option(option, question) for option in options if (_parse_option(option, question) is not None)]
has_None = (len(option_solutions) < len(options))
closest_option = options[np.argmin(np.abs((np.array(option_solutions) - result)))]
closest_option_dist = np.abs((_parse_option(closest_option, question) - result))
relative_dist = abs((closest_option_dist / (result + 1e-06)))
if ((relative_dist > 10) and has_None):
return '(e)'
return closest_option
data = read_json(path)
num_corr = 0
for (rec_idx, row) in tqdm(data.iterrows(), total=len(data)):
question = row['question'].split('Question: ')[(- 1)].split('#')[0]
soln = row['generated_answer']
soln = soln.split('\n\n\n')[0].strip()
soln_lines = soln.split('\n')
soln_lines = [line for line in soln_lines if ('from sympy import *' not in line)]
for (i, soln_line) in enumerate(soln_lines):
if (('solve(' in soln_line) and ('dict=True' not in soln_line)):
soln_lines[i] = soln_line.replace(')', ', dict=True)')
j = (i + 1)
while ('sol[' in soln_lines[j]):
if ('sol[0][' not in soln_lines[j]):
soln_lines[j] = soln_lines[j].replace('[', '[0][')
j += 1
soln = '\n'.join(soln_lines)
soln = ('from sympy import *\nimport numpy as np\nimport math\nfrom math import sqrt\n' + soln)
result = None
os.system('rm -rf __pycache__')
os.system('rm -f temp_result.pyc')
with open('temp_result.py', 'w') as f:
f.write(soln)
try:
import temp_result
reload(temp_result)
correct_solution_option = row['answer']
with timeout(5):
exec(soln)
result = float(temp_result.solution())
del temp_result
closest_option = find_closest_option(result, question)
except Exception as e:
pass
if (not (isinstance(result, int) or isinstance(result, float))):
continue
is_corr = (closest_option == correct_solution_option)
num_corr += int(is_corr)
print(f'Accuracy = {(num_corr / len(data)):.2%} ({num_corr}/{len(data)})')
return (num_corr / len(data)) |
class ITypeHintingFactory():
def make_param_provider(self):
raise NotImplementedError
def make_return_provider(self):
raise NotImplementedError
def make_assignment_provider(self):
raise NotImplementedError
def make_resolver(self):
raise NotImplementedError |
class RemoteExpert(nn.Module):
def __init__(self, uid, endpoint: Endpoint):
super().__init__()
(self.uid, self.endpoint) = (uid, endpoint)
self._info = None
def stub(self):
return _get_expert_stub(self.endpoint)
def forward(self, *args, **kwargs):
assert (len(kwargs) == len(self.info['keyword_names'])), f"Keyword args should be {self.info['keyword_names']}"
kwargs = {key: kwargs[key] for key in self.info['keyword_names']}
forward_inputs = (args, kwargs)
if (not nested_compare(forward_inputs, self.info['forward_schema'])):
raise TypeError(f'Inputs do not match expert input schema. Did you pass the right number of parameters?')
flat_outputs = _RemoteModuleCall.apply(DUMMY, self.uid, self.stub, self.info, *nested_flatten(forward_inputs))
return nested_pack(flat_outputs, structure=self.info['outputs_schema'])
def info(self):
if (self._info is None):
outputs = self.stub.info(runtime_pb2.ExpertUID(uid=self.uid))
self._info = pickle.loads(outputs.serialized_info)
return self._info
def extra_repr(self):
return f'uid={self.uid}, endpoint={self.endpoint}' |
def main():
app = Flask(__name__)
app.config.update(DB_CONNECTION_STRING=':memory:', SQLALCHEMY_DATABASE_URI='sqlite://')
app.debug = True
with app.app_context():
injector = Injector([AppModule(app)])
configure_views(app=app)
FlaskInjector(app=app, injector=injector)
client = app.test_client()
response = client.get('/')
print(('%s\n%s%s' % (response.status, response.headers, response.data)))
response = client.post('/', data={'key': 'foo', 'value': 'bar'})
print(('%s\n%s%s' % (response.status, response.headers, response.data)))
response = client.get('/')
print(('%s\n%s%s' % (response.status, response.headers, response.data)))
response = client.get('/hello')
print(('%s\n%s%s' % (response.status, response.headers, response.data)))
response = client.delete('/hello')
print(('%s\n%s%s' % (response.status, response.headers, response.data)))
response = client.get('/')
print(('%s\n%s%s' % (response.status, response.headers, response.data)))
response = client.get('/hello')
print(('%s\n%s%s' % (response.status, response.headers, response.data)))
response = client.delete('/hello')
print(('%s\n%s%s' % (response.status, response.headers, response.data))) |
class HFAttribute(HFProxy):
def __init__(self, root, attr: str):
self.root = root
self.attr = attr
self.tracer = root.tracer
self._node = None
def node(self):
if (self._node is None):
self._node = self.tracer.create_proxy('call_function', getattr, (self.root, self.attr), {}).node
return self._node
def __call__(self, *args, **kwargs):
return self.tracer.create_proxy('call_method', self.attr, ((self.root,) + args), kwargs) |
class SitemapGenerator(object):
def __init__(self, context, settings, path, theme, output_path, *null):
self.output_path = output_path
self.context = context
self.now = datetime.now()
self.siteurl = settings.get('SITEURL')
self.default_timezone = settings.get('TIMEZONE', 'UTC')
self.timezone = getattr(self, 'timezone', self.default_timezone)
self.timezone = timezone(self.timezone)
self.format = 'xml'
self.changefreqs = {'articles': 'monthly', 'indexes': 'daily', 'pages': 'monthly'}
self.priorities = {'articles': 0.5, 'indexes': 0.5, 'pages': 0.5}
config = settings.get('SITEMAP', {})
if (not isinstance(config, dict)):
warning('sitemap plugin: the SITEMAP setting must be a dict')
else:
fmt = config.get('format')
pris = config.get('priorities')
chfreqs = config.get('changefreqs')
if (fmt not in ('xml', 'txt')):
warning("sitemap plugin: SITEMAP['format'] must be `txt' or `xml'")
warning("sitemap plugin: Setting SITEMAP['format'] on `xml'")
elif (fmt == 'txt'):
self.format = fmt
return
valid_keys = ('articles', 'indexes', 'pages')
valid_chfreqs = ('always', 'hourly', 'daily', 'weekly', 'monthly', 'yearly', 'never')
if isinstance(pris, dict):
for (k, v) in pris.items():
if ((k in valid_keys) and (not isinstance(v, (int, float)))):
default = self.priorities[k]
warning('sitemap plugin: priorities must be numbers')
warning("sitemap plugin: setting SITEMAP['priorities']['{0}'] on {1}".format(k, default))
pris[k] = default
self.priorities.update(pris)
elif (pris is not None):
warning("sitemap plugin: SITEMAP['priorities'] must be a dict")
warning('sitemap plugin: using the default values')
if isinstance(chfreqs, dict):
for (k, v) in chfreqs.items():
if ((k in valid_keys) and (v not in valid_chfreqs)):
default = self.changefreqs[k]
warning("sitemap plugin: invalid changefreq `{0}'".format(v))
warning("sitemap plugin: setting SITEMAP['changefreqs']['{0}'] on '{1}'".format(k, default))
chfreqs[k] = default
self.changefreqs.update(chfreqs)
elif (chfreqs is not None):
warning("sitemap plugin: SITEMAP['changefreqs'] must be a dict")
warning('sitemap plugin: using the default values')
def write_url(self, page, fd):
if (getattr(page, 'status', 'published') != 'published'):
return
if (not page.save_as):
return
page_path = os.path.join(self.output_path, page.save_as)
if (not os.path.exists(page_path)):
return
lastdate = getattr(page, 'date', self.now)
try:
lastdate = self.get_date_modified(page, lastdate)
except ValueError:
warning((('sitemap plugin: ' + page.save_as) + ' has invalid modification date,'))
warning('sitemap plugin: using date value as lastmod.')
lastmod = format_date(lastdate)
if isinstance(page, contents.Article):
pri = self.priorities['articles']
chfreq = self.changefreqs['articles']
elif isinstance(page, contents.Page):
pri = self.priorities['pages']
chfreq = self.changefreqs['pages']
else:
pri = self.priorities['indexes']
chfreq = self.changefreqs['indexes']
pageurl = ('' if (page.url == 'index.html') else page.url)
sitemapExclude = []
if (self.format == 'xml'):
if (pageurl not in sitemapExclude):
fd.write(XML_URL.format(self.siteurl, pageurl, lastmod, chfreq, pri))
else:
fd.write((((self.siteurl + '/') + pageurl) + '\n'))
def get_date_modified(self, page, default):
if hasattr(page, 'modified'):
if isinstance(page.modified, datetime):
return page.modified
return get_date(page.modified)
else:
return default
def set_url_wrappers_modification_date(self, wrappers):
for (wrapper, articles) in wrappers:
lastmod = datetime.min.replace(tzinfo=self.timezone)
for article in articles:
lastmod = max(lastmod, article.date.replace(tzinfo=self.timezone))
try:
modified = self.get_date_modified(article, datetime.min).replace(tzinfo=self.timezone)
lastmod = max(lastmod, modified)
except ValueError:
pass
setattr(wrapper, 'modified', str(lastmod))
def generate_output(self, writer):
path = os.path.join(self.output_path, 'sitemap.{0}'.format(self.format))
pages = ((((self.context['pages'] + self.context['articles']) + [c for (c, a) in self.context['categories']]) + [t for (t, a) in self.context['tags']]) + [a for (a, b) in self.context['authors']])
self.set_url_wrappers_modification_date(self.context['categories'])
self.set_url_wrappers_modification_date(self.context['tags'])
self.set_url_wrappers_modification_date(self.context['authors'])
for article in self.context['articles']:
pages += article.translations
info('writing {0}'.format(path))
with open(path, 'w', encoding='utf-8') as fd:
if (self.format == 'xml'):
fd.write(XML_HEADER)
else:
fd.write(TXT_HEADER.format(self.siteurl))
FakePage = collections.namedtuple('FakePage', ['status', 'date', 'url', 'save_as'])
for standard_page_url in ['index.html', 'archives.html', 'tags.html', 'categories.html']:
fake = FakePage(status='published', date=self.now, url=standard_page_url, save_as=standard_page_url)
self.write_url(fake, fd)
for page in pages:
self.write_url(page, fd)
if (self.format == 'xml'):
fd.write(XML_FOOTER) |
class ChangeOccurrencesTest(unittest.TestCase):
def setUp(self):
self.project = testutils.sample_project()
self.mod = testutils.create_module(self.project, 'mod')
def tearDown(self):
testutils.remove_project(self.project)
super().tearDown()
def test_simple_case(self):
self.mod.write(dedent(' a_var = 1\n print(a_var)\n '))
changer = rename.ChangeOccurrences(self.project, self.mod, self.mod.read().index('a_var'))
changer.get_changes('new_var').do()
self.assertEqual(dedent(' new_var = 1\n print(new_var)\n '), self.mod.read())
def test_only_performing_inside_scopes(self):
self.mod.write(dedent(' a_var = 1\n new_var = 2\n def f():\n print(a_var)\n '))
changer = rename.ChangeOccurrences(self.project, self.mod, self.mod.read().rindex('a_var'))
changer.get_changes('new_var').do()
self.assertEqual(dedent(' a_var = 1\n new_var = 2\n def f():\n print(new_var)\n '), self.mod.read())
def test_only_performing_on_calls(self):
self.mod.write(dedent(' def f1():\n pass\n def f2():\n pass\n g = f1\n a = f1()\n '))
changer = rename.ChangeOccurrences(self.project, self.mod, self.mod.read().rindex('f1'))
changer.get_changes('f2', only_calls=True).do()
self.assertEqual(dedent(' def f1():\n pass\n def f2():\n pass\n g = f1\n a = f2()\n '), self.mod.read())
def test_only_performing_on_reads(self):
self.mod.write(dedent(' a = 1\n b = 2\n print(a)\n '))
changer = rename.ChangeOccurrences(self.project, self.mod, self.mod.read().rindex('a'))
changer.get_changes('b', writes=False).do()
self.assertEqual(dedent(' a = 1\n b = 2\n print(b)\n '), self.mod.read()) |
def h3_input_df(spark_context, spark_session):
data = [{'id': 1, 'origin_ts': '2016-04-11 11:31:11', 'feature1': 200, 'feature2': 200, 'lat': (- 23.55419), 'lng': (- 46.670723), 'house_id': 8921}, {'id': 1, 'origin_ts': '2016-04-11 11:44:12', 'feature1': 300, 'feature2': 300, 'lat': (- 23.55419), 'lng': (- 46.670723), 'house_id': 8921}]
df = spark_session.read.json(spark_context.parallelize(data, 1))
df = df.withColumn(TIMESTAMP_COLUMN, df.origin_ts.cast(DataType.TIMESTAMP.spark))
return df |
def compare_proposer_leaders(x, y):
print('Leader diff')
x_list = [(int(i[0]), i[1]) for i in x['proposer_leaders'].items()]
y_list = [(int(i[0]), i[1]) for i in y['proposer_leaders'].items()]
for (idx, l, r) in compare_list(x_list, y_list):
if (l is not None):
l = 'hash {} (lvl {:3})'.format(l[1][(- 6):], l[0])
if (r is not None):
r = 'hash {} (lvl {:3})'.format(r[1][(- 6):], r[0])
print('\t{:3}, left: {}, right: {}'.format(idx, l, r)) |
def test_user(host):
user = host.user('sshd')
assert user.exists
assert (user.name == 'sshd')
assert (user.uid == 100)
assert (user.gid == 65534)
assert (user.group == 'nogroup')
assert (user.gids == [65534])
assert (user.groups == ['nogroup'])
assert (user.shell == '/usr/sbin/nologin')
assert (user.home == '/run/sshd')
assert (user.password == '!') |
def _set_allowed_requests(sec_class, sec_level):
requests = {'RedirectedRun', 'VirtualFile.readfromid', 'VirtualFile.closebyid', 'Globals.get', 'log.Log.open_logfile_allconn', 'log.Log.close_logfile_allconn', 'log.Log.log_to_file', 'robust.install_signal_handlers', 'SetConnections.add_redirected_conn', 'sys.stdout.write', '_repo_shadow.RepoShadow.is_locked', '_repo_shadow.RepoShadow.lock', '_repo_shadow.RepoShadow.setup_paths', '_repo_shadow.RepoShadow.unlock'}
if ((sec_level == 'read-only') or (sec_level == 'update-only') or (sec_level == 'read-write')):
requests.update(['rpath.gzip_open_local_read', 'rpath.make_file_dict', 'rpath.open_local_read', 'rpath.setdata_local', 'os.getuid', 'os.listdir', 'platform.system', '_repo_shadow.RepoShadow.get_config', '_repo_shadow.RepoShadow.get_mirror_time', '_repo_shadow.RepoShadow.needs_regress'])
if ((sec_level == 'read-only') or (sec_level == 'read-write')):
requests.update(['_dir_shadow.ReadDirShadow.compare_full', '_dir_shadow.ReadDirShadow.compare_hash', '_dir_shadow.ReadDirShadow.compare_meta', '_dir_shadow.ReadDirShadow.get_diffs', '_dir_shadow.ReadDirShadow.get_fs_abilities', '_dir_shadow.ReadDirShadow.get_select', '_dir_shadow.ReadDirShadow.set_select', '_repo_shadow.RepoShadow.get_fs_abilities_readonly', '_repo_shadow.RepoShadow.init_loop', '_repo_shadow.RepoShadow.get_increment_times', '_repo_shadow.RepoShadow.set_select', '_repo_shadow.RepoShadow.finish_loop', '_repo_shadow.RepoShadow.get_diffs', '_repo_shadow.RepoShadow.list_files_changed_since', '_repo_shadow.RepoShadow.list_files_at_time', '_repo_shadow.RepoShadow.init_and_get_loop', '_repo_shadow.RepoShadow.verify'])
if ((sec_level == 'update-only') or (sec_level == 'read-write')):
requests.update(['VirtualFile.writetoid', 'log.ErrorLog.close', 'log.ErrorLog.isopen', 'log.ErrorLog.open', 'log.ErrorLog.write_if_open', 'log.Log.close_logfile_local', 'log.Log.open_logfile_local', 'statistics.record_error', '_repo_shadow.RepoShadow.close_statistics', '_repo_shadow.RepoShadow.get_fs_abilities_readwrite', '_repo_shadow.RepoShadow.get_sigs', '_repo_shadow.RepoShadow.apply', '_repo_shadow.RepoShadow.remove_current_mirror', '_repo_shadow.RepoShadow.set_config', '_repo_shadow.RepoShadow.touch_current_mirror'])
if (sec_level == 'read-write'):
requests.update(['rpath.delete_dir_no_files', 'rpath.copy_reg_file', 'rpath.make_socket_local', 'rpath.RPath.fsync_local', 'os.chmod', 'os.chown', 'os.lchown', 'os.link', 'os.makedev', 'os.makedirs', 'os.mkdir', 'os.mkfifo', 'os.mknod', 'os.remove', 'os.rename', 'os.rmdir', 'os.symlink', 'os.unlink', 'os.utime', 'shutil.rmtree', '_repo_shadow.RepoShadow.regress', '_repo_shadow.RepoShadow.remove_increments_older_than', '_dir_shadow.WriteDirShadow.get_fs_abilities', '_dir_shadow.WriteDirShadow.get_sigs_select', '_dir_shadow.WriteDirShadow.apply', '_dir_shadow.WriteDirShadow.set_select'])
if (sec_class == 'server'):
requests.update(['log.Log.setverbosity', 'log.Log.setterm_verbosity', 'SetConnections.init_connection_remote', '_repo_shadow.RepoShadow.init_owners_mapping', '_dir_shadow.WriteDirShadow.init_owners_mapping', 'Globals.set_api_version'])
return requests |
def gen_grid2d(grid_size: int, left_end: float=(- 1), right_end: float=1) -> torch.Tensor:
x = torch.linspace(left_end, right_end, grid_size)
(x, y) = torch.meshgrid([x, x], indexing='ij')
grid = torch.cat((x.reshape((- 1), 1), y.reshape((- 1), 1)), dim=1).reshape(grid_size, grid_size, 2)
return grid |
class ViewRecordDal(object):
def create_view_domain(domain_name):
if domain_name.endswith(VIEW_ZONE):
for (k, v) in NORMAL_TO_VIEW.items():
if domain_name.endswith(v):
return (domain_name, k)
raise BadParam('invalid domain', msg_ch=(u'view: %s' % NORMAL_TO_VIEW.values()))
normal_zone = ViewRecordDal.select_zone(domain_name)
if (normal_zone is None):
raise BadParam(('not zone for this domain: %s' % domain_name), msg_ch='dnsdbzone')
if (normal_zone not in NORMAL_TO_VIEW):
raise BadParam(('invalid domain: %s' % domain_name), msg_ch=(u'%s : %s' % (normal_zone, ','.join(NORMAL_TO_VIEW.keys()))))
return (domain_name.replace(normal_zone, NORMAL_TO_VIEW[normal_zone]), normal_zone)
def get_view_domain_zone(domain_name):
return db.session.query(ViewRecords.zone_name).filter_by(domain_name=domain_name).group_by(ViewRecords.zone_name).first().zone_name
def zone_domain_count():
zone_count = []
exclude_zones = NORMAL_TO_CNAME.values()
for item in db.session.query(DnsRecord.zone_name, func.count(DnsRecord.zone_name)).group_by(DnsRecord.zone_name):
if (item[0] in exclude_zones):
continue
zone_count.append({'zone': item[0], 'count': item[1]})
result = sorted(zone_count, key=(lambda x: x['count']), reverse=True)[:5]
if VIEW_ZONE:
result.append({'zone': VIEW_ZONE, 'count': ViewDomainNames.query.count()})
return result
def is_migrate_domain(domain_name):
return (ViewDomainNameState.query.filter_by(domain_name=domain_name).filter((ViewDomainNameState.enabled_rooms != ViewDomainNameState.origin_enabled_rooms)).first() is not None)
def list_server_room(**conditions):
return DnsColo.query.filter_by(**conditions)
def get_domain_name_record(domain_name):
view_domain = ViewRecordDal.get_view_domain_name(domain_name)
if (not view_domain):
raise BadParam(('No such domain_name: %s' % domain_name), msg_ch=u'view')
record_dict = {}
for record in ViewRecords.query.filter_by(domain_name=view_domain):
if (record.record_type in A_RECORDS):
if (record.property not in record_dict):
record_dict[record.property] = []
record_dict[record.property].append(record.record)
else:
record_dict[record.property] = record.record
return (view_domain, record_dict)
def get_view_domain_name(domain):
record = ViewDomainNames.query.filter(sqlalchemy.or_((ViewDomainNames.domain_name == domain), (ViewDomainNames.cname == domain))).first()
if (not record):
return None
else:
return record.cname
def get_view_domain_info(domain):
view_domain = ViewRecordDal.get_view_domain_name(domain)
if (not view_domain):
raise BadParam(('No such domain_name: %s' % domain), msg_ch=u'view')
records = ViewRecords.query.filter_by(domain_name=view_domain).all()
state_list = ViewDomainNameState.query.filter_by(domain_name=view_domain).all()
isp_list = [item.name_in_english for item in ViewIsps.query.all()]
room_confs = defaultdict((lambda : {'is_enabled': False, 'isps': {i: False for i in isp_list}, 'ips': []}))
cdn_conf = {'cdn': [{'name': r.property, 'cname': r.record} for r in records if (r.record_type == 'CNAME')], 'isps': {isp: {'name': 'cdn', 'cname': 'cdn'} for isp in isp_list}}
for record in records:
record_type = record.record_type
if (record_type in A_RECORDS):
room = record.property
room_info = room_confs[room]
if (record.record not in room_info['ips']):
room_info['ips'].append(record.record)
for s in state_list:
if ((s.state == 'A') and (room in json.loads(s.enabled_rooms))):
room_info['isps'][s.isp] = True
room_info['is_enabled'] = True
elif (record_type == 'CNAME'):
for s in state_list:
if (s.state.isdigit() and (int(s.state) == int(record.id))):
cdn_conf['isps'][s.isp]['name'] = record.property
cdn_conf['isps'][s.isp]['cname'] = record.record
else:
raise BadParam(('Unexepected record type:%s' % record_type), msg_ch=(u'[A AAAA CNAME]: %s' % record_type))
return {'domain_name': domain, 'is_migrate': ViewRecordDal.is_migrate_domain(view_domain), 'cnames': cdn_conf, 'rooms': room_confs}
def get_isp_enable(domain_name, isps):
q = ViewDomainNameState.query
q = q.filter((ViewDomainNameState.domain_name == domain_name))
q = q.filter(ViewDomainNameState.isp.in_(isps))
state = {}
for domain_state in q:
try:
if (domain_state.state == 'A'):
state[domain_state.isp] = json.loads(domain_state.enabled_rooms)
elif (domain_state.state == 'disabled'):
pass
else:
record = ViewRecords.query.filter((ViewRecords.id == int(domain_state.state))).one()
state[domain_state.isp] = record.record
except Exception as e:
log.error('search isp enabel failed')
return state
def search_view_domain(domain='', rooms=(), isps=(), select_cdn=True):
records = ViewDomainNames.query.order_by(ViewDomainNames.domain_name).all()
view2normal = {record.cname: record.domain_name for record in records}
domain_names = []
view_domain = domain
if domain:
(view_domain, _) = ViewRecordDal.create_view_domain(domain)
if ((len(rooms) == 0) and (domain != '')):
result = db.session.query(ViewDomainNameState).filter((ViewDomainNameState.domain_name == view_domain)).all()
domain_names.extend([record.domain_name for record in result])
for room in rooms:
like_str = '%{}%'.format(room)
q = db.session.query(ViewDomainNameState).filter(ViewDomainNameState.enabled_rooms.like(like_str))
if isps:
q = q.filter(ViewDomainNameState.isp.in_(isps))
result = q.all()
domain_names.extend([record.domain_name for record in result])
if select_cdn:
result = db.session.query(ViewDomainNameState).filter((ViewDomainNameState.state != 'A')).filter(ViewDomainNameState.isp.in_(isps)).all()
domain_names.extend([record.domain_name for record in result])
domain_names = [domain_name for domain_name in list(set(domain_names)) if (view_domain in domain_name)]
domain_names = sorted(domain_names, key=(lambda x: len(x)))
return [{'view_domain': domain_name, 'domain': view2normal[domain_name]} for domain_name in domain_names]
_on_success
def increase_serial_num(zone_name):
serials = DnsSerial.query.filter_by(zone_name=zone_name).all()
if (len(serials) != 1):
raise BadParam(('Zone serial should be unique: %s' % zone_name), msg_ch=u'zone serial')
serial = serials[0]
serial.serial_num += 1
return serial.serial_num
def select_zone(domain):
zones = set([zone.zone_name for zone in DnsSerial.query.all()])
for index in range(1, len(domain.split('.'))):
best_match = domain.split('.', index)[(- 1)]
if (best_match in zones):
return best_match
return None
def is_cname_used(record_id, domain_name):
return (ViewDomainNameState.query.filter_by(state=str(record_id), domain_name=domain_name).count() != 0)
_on_success
def insert_view_record(domain_name, cnames, rooms, cname_zone):
for (name, cdn) in cnames.items():
record = ViewRecords.query.filter_by(domain_name=domain_name, record=cdn).first()
if record:
record.property = name
db.session.add(record)
else:
db.session.add(ViewRecords(domain_name=domain_name, ttl=60, record=cdn, record_type='CNAME', zone_name=cname_zone, property=name))
for (room, ips) in rooms.items():
for ip in ips:
(ip, version) = format_ip(ip)
db.session.add(ViewRecords(domain_name=domain_name, ttl=60, record=ip, record_type=('A' if (version == 4) else 'AAAA'), zone_name=cname_zone, property=room))
_on_success
def _del_unused_cname(domain_name, cnames):
new_names = [(name, cdn) for (name, cdn) in cnames.items()]
records = ViewRecords.query.filter_by(domain_name=domain_name, record_type='CNAME').all()
for record in records:
if ((record.property, record.record) in new_names):
continue
if ViewRecordDal.is_cname_used(record.id, domain_name):
raise BadParam(('Failed to delete %s because of using.' % record.property), msg_ch=u'CDN,')
db.session.delete(record)
log.info('Success to delete {0}'.format(record.record))
def _get_acitive_record(domain_name):
states = ViewDomainNameState.query.filter_by(domain_name=domain_name, state='A').all()
active_room = set()
origin_active_room = set()
for state in states:
active_room.update(json.loads(state.enabled_rooms))
origin_active_room.update(json.loads(state.origin_enabled_rooms))
active_record = defaultdict((lambda : []))
if active_room:
records = ViewRecords.query.filter_by(domain_name=domain_name).filter(ViewRecords.property.in_(active_room)).all()
for record in records:
active_record[record.property].append(record.record)
return (dict(active_record), (active_room | origin_active_room))
def _can_remove_room(active_room, rooms):
can_remove = True
cannot_remove_rooms = []
for room in active_room:
if ((room not in rooms) or (not rooms[room])):
can_remove = False
cannot_remove_rooms.append(room)
return (can_remove, cannot_remove_rooms)
def insert_view_domain(username, domain_name, cnames, rooms):
if ((not cnames) and (not rooms)):
raise BadParam(message='No rooms and cnames conf.', msg_ch=u'CDN')
if ViewRecordDal.get_view_domain_name(domain_name):
raise BadParam('Domain already existed', msg_ch=u'')
for item in NORMAL_TO_VIEW.keys():
if domain_name.endswith(item):
break
else:
raise BadParam('Invalid domain', msg_ch=(u': %s' % ','.join(NORMAL_TO_VIEW.keys())))
(view_domain, normal_zone) = ViewRecordDal.create_view_domain(domain_name)
cname_zone = NORMAL_TO_CNAME[normal_zone]
normal_records = DnsRecord.query.filter_by(domain_name=domain_name).all()
if normal_records:
if ((len(normal_records) > 1) or (normal_records[0].record_type == 'A')):
raise BadParam(('Doamin %s already exist A record' % domain_name), msg_ch=u'A,view')
record = normal_records[0].record
if (record != view_domain):
raise BadParam(('Doamin %s already exist CNAME record: %s' % (domain_name, record)), msg_ch=(u'CNAME: %s,view' % record))
else:
with db.session.begin(subtransactions=True):
insert_record = DnsRecord(domain_name=domain_name, record=view_domain, zone_name=normal_zone, update_user=username, record_type='CNAME', ttl=CONF.view.cname_ttl)
db.session.add(insert_record)
ViewRecordDal.increase_serial_num(normal_zone)
with db.session.begin(subtransactions=True):
db.session.add(ViewDomainNames(domain_name=domain_name, cname=view_domain))
ViewRecordDal.insert_view_record(view_domain, cnames, rooms, cname_zone)
with db.session.begin(subtransactions=True):
isp_list = [item.name_in_english for item in ViewIsps.query.all()]
for isp in isp_list:
db.session.add(ViewDomainNameState(domain_name=view_domain, isp=isp))
ViewRecordDal.increase_serial_num(VIEW_ZONE)
def update_view_domain(username, domain_name, cnames, rooms):
view_domain = ViewRecordDal.get_view_domain_name(domain_name)
if (not view_domain):
raise BadParam(('No such domain name:%s' % domain_name), msg_ch=u'')
ViewRecordDal._del_unused_cname(view_domain, cnames)
cname_zone = ViewRecordDal.get_view_domain_zone(view_domain)
(active_record, active_room) = ViewRecordDal._get_acitive_record(view_domain)
(can_remove, room) = ViewRecordDal._can_remove_room(active_room, rooms)
if (not can_remove):
raise BadParam(message='room %s is using, can not delete', msg_ch=(u'%s,' % room))
with db.session.begin(subtransactions=True):
records = ViewRecords.query.filter_by(domain_name=view_domain).filter(ViewRecords.record_type.in_(A_RECORDS))
for record in records:
db.session.delete(record)
ViewRecordDal.insert_view_record(view_domain, cnames, rooms, cname_zone)
need_reload = _need_reload_zone(active_record, rooms)
log.info(('update domain %s need reload: %s' % (domain_name, need_reload)))
if need_reload:
ViewRecordDal.increase_serial_num(cname_zone)
def upsert_view_domain(username, domain_name, rooms, cnames, action):
cnames = ({} if (not cnames) else cnames)
rooms = ({} if (not rooms) else rooms)
mapper = {'insert': ViewRecordDal.insert_view_domain, 'update': ViewRecordDal.update_view_domain}
if (action not in mapper):
raise BadParam('action {0} not supperted'.format(action))
_validate_domain_args(domain_name, rooms, cnames)
with db.session.begin(subtransactions=True):
return mapper[action](username, domain_name, cnames, rooms)
def _check_update_state_args(isp_dict, domain_name):
record_properties = set()
for item in ViewRecords.query.filter_by(domain_name=domain_name):
if (item.record_type in A_RECORDS):
record_properties.add(item.property)
cur_state_list = ViewDomainNameState.query.filter_by(domain_name=domain_name).all()
cur_state_dict = {item.isp: item for item in cur_state_list}
for (isp, conf) in isp_dict.items():
state = cur_state_dict.get(isp, None)
if (not state):
raise BadParam(('Domain %s has no state record for isp: %s' % (domain_name, isp)), msg_ch=(u'%s%sstate' % (domain_name, isp)))
enabled_rooms = conf.get('rooms', [])
if enabled_rooms:
if (not set(enabled_rooms).issubset(record_properties)):
raise BadParam(('Enabled room has no record for domain: %s' % domain_name), msg_ch=u'ip')
cdn = conf.get('cdn', '')
if cdn:
res = ViewRecords.query.filter_by(domain_name=state.domain_name, record=cdn, record_type='CNAME').first()
if (not res):
raise BadParam(('Enabled cdn %s is invalid for domain: %s' % (cdn, domain_name)), msg_ch=(u' %s cdn: %s' % (domain_name, cdn)))
if ((not enabled_rooms) and (not cdn)):
raise BadParam(('rooms and cdn both null for isp %s' % isp), msg_ch=(u'%s' % isp))
need_update_isp = set()
for (isp, item) in cur_state_dict.items():
state = item.state
if (state == 'disabled'):
need_update_isp.add(isp)
elif (state == 'A'):
if (len(json.loads(item.enabled_rooms)) == 0):
need_update_isp.add(isp)
elif (not state.isdigit()):
need_update_isp.add(isp)
error_isp = (need_update_isp - set(isp_dict.keys()))
if error_isp:
s = ' '.join(error_isp)
raise BadParam((' isp %s state is disabled.' % s), msg_ch=(u'%s' % s))
def update_view_domain_state(domain_name, isp_dict):
update_isps = isp_dict.keys()
view_domain = ViewRecordDal.get_view_domain_name(domain_name)
if (not view_domain):
raise BadParam(('No such domain_name: %s' % domain_name), msg_ch=u'view')
op_before = ViewRecordDal.get_isp_enable(view_domain, update_isps)
ViewRecordDal._check_update_state_args(isp_dict, view_domain)
is_migrate = ViewRecordDal.is_migrate_domain(view_domain)
with db.session.begin(subtransactions=True):
query = ViewDomainNameState.query.filter_by(domain_name=view_domain).filter(ViewDomainNameState.isp.in_(update_isps))
for state in query:
isp = state.isp
conf = isp_dict[isp]
enabled_rooms = conf.get('rooms', [])
if enabled_rooms:
state.enabled_rooms = json.dumps(conf['rooms'])
state.state = 'A'
if (not is_migrate):
state.origin_enabled_rooms = json.dumps(conf['rooms'])
state.origin_state = 'A'
cdn = conf.get('cdn', '')
if cdn:
res = ViewRecords.query.filter_by(domain_name=state.domain_name, record=cdn, record_type='CNAME').first()
state.enabled_rooms = json.dumps([])
state.state = str(res.id)
if (not is_migrate):
state.enabled_rooms = json.dumps([])
state.origin_state = str(res.id)
zone = ViewRecordDal.get_view_domain_zone(view_domain)
ViewRecordDal.increase_serial_num(zone)
return op_before
_on_success
def delete_view_domain(domain_name):
need_update = [VIEW_ZONE]
need_update.append(ViewRecordDal.get_view_domain_zone(domain_name))
ViewRecords.query.filter_by(domain_name=domain_name).delete()
ViewDomainNameState.query.filter_by(domain_name=domain_name).delete()
ViewDomainNames.query.filter_by(cname=domain_name).delete()
record = DnsRecord.query.filter_by(record=domain_name).first()
need_update.append(record.zone_name)
db.session.delete(record)
for update_zone in need_update:
ViewRecordDal.increase_serial_num(update_zone) |
def report_results(split_df, opt, report_obs_number=False, max_char=None, rename_model_ids=False, VM_path=True):
difficulty_groups = ['Po', 'Pn', 'No', 'Nn', 'F1_o', 'F1_n']
accuracies = []
f1s = []
cases = []
for case in difficulty_groups:
if ('F1_' in case):
case = case[(- 1)]
[tp, tn, fp, fn] = count_tp_tn_fp_fn(split_df, case=case)
if ('N' in case):
accuracies.append(calculate_true_neg_rate(tp, tn, fp, fn))
elif ('P' in case):
accuracies.append(calculate_recall(tp, tn, fp, fn))
elif (len(case) == 1):
precision = calculate_precision(tp, tn, fp, fn)
recall = calculate_recall(tp, tn, fp, fn)
accuracies.append(calculate_f1(precision, recall))
model_name = opt['id']
if rename_model_ids:
if (not (type(model_name) is str)):
model_name = '{}-{}'.format(get_model_info(opt['id'], VM_path=VM_path), opt['id'])
if (not (max_char is None)):
if (len(str(model_name)) > max_char):
model_name = model_name[:max_char]
cases.append((((tp + tn) + fp) + fn))
[tp, tn, fp, fn] = count_tp_tn_fp_fn(split_df)
accuracies.append(calculate_accuracy(tp, tn, fp, fn))
precision = calculate_precision(tp, tn, fp, fn)
recall = calculate_recall(tp, tn, fp, fn)
accuracies.append(calculate_f1(precision, recall))
difficulty_groups.append('total_accu')
difficulty_groups.append('total_f1')
df = pd.DataFrame(accuracies, difficulty_groups, columns=[model_name])
if report_obs_number:
task = opt['tasks'][0]
cases.append((((tp + tn) + fp) + fn))
cases.append((((tp + tn) + fp) + fn))
df[('obs_' + task)] = cases
return df |
class OpenLidState(DefaultScript):
def at_script_creation(self):
self.key = 'open_lid_script'
self.desc = 'Script that manages the opened-state cmdsets for red button.'
self.persistent = True
def at_start(self):
self.obj.cmdset.add(cmdsetexamples.LidOpenCmdSet)
def is_valid(self):
return self.obj.db.lid_open
def at_stop(self):
self.obj.cmdset.delete(cmdsetexamples.LidOpenCmdSet) |
class _TrafficSignalState(VersionBase):
def __init__(self, signal_id, state):
self.signal_id = signal_id
self.state = state
def __eq__(self, other):
if isinstance(other, _TrafficSignalState):
if (self.get_attributes() == other.get_attributes()):
return True
return False
def parse(element):
signal_id = element.attrib['trafficSignalId']
state = element.attrib['state']
return _TrafficSignalState(signal_id=signal_id, state=state)
def get_attributes(self):
retdict = {}
retdict['trafficSignalId'] = self.signal_id
retdict['state'] = self.state
return retdict
def get_element(self):
return ET.Element('TrafficSignalState', attrib=self.get_attributes()) |
def cut_tsv(file, debug):
m = TSV_REGEX.match(file)
if (m is None):
raise ValueError(f'{file} is not matching tsv pattern')
src = m.groups()[0]
tgt = m.groups()[1]
to_file1 = f'{file}.{src}'
to_file2 = f'{file}.{tgt}'
cmd1 = f"cat {file} | cut -f1 |awk '{{$1=$1}};1' > {to_file1}"
cmd2 = f"cat {file} | cut -f2 |awk '{{$1=$1}};1' > {to_file2}"
if (os.path.exists(to_file1) and os.path.exists(to_file2)):
(debug and print(f'{file} already processed to {to_file1} and {to_file2}; so skip'))
return file
call(cmd1, debug=debug)
call(cmd2, debug=debug)
return file |
class NetworkCardAssets(models.Model):
asset = models.ForeignKey('Assets', related_name='network_card_assets', on_delete=models.CASCADE)
network_card_name = models.CharField(max_length=20, blank=True, null=True, verbose_name='')
network_card_mac = models.CharField(max_length=64, blank=True, null=True, verbose_name='MAC')
network_card_ip = models.CharField(max_length=16, blank=True, null=True, verbose_name='IP')
network_card_model = models.CharField(max_length=50, blank=True, null=True, verbose_name='')
network_card_mtu = models.CharField(max_length=50, blank=True, null=True, verbose_name='MTU')
network_card_status = models.SmallIntegerField(blank=True, null=True, verbose_name='')
class Meta():
db_table = 'ops_network_card_assets'
unique_together = ('asset', 'network_card_mac')
verbose_name = ''
verbose_name_plural = '' |
class Polyline(VersionBase):
def __init__(self, time, positions):
if (time and (len(time) < 2)):
raise ValueError('not enough time inputs')
if (len(positions) < 2):
raise ValueError('not enough position inputs')
if (time and (len(time) != len(positions))):
raise ValueError('time and positions are not the same lenght')
for p in positions:
if (not isinstance(p, _PositionType)):
raise TypeError('position input is not a valid position')
self.positions = positions
self.time = [convert_float(x) for x in time]
def __eq__(self, other):
if isinstance(other, Polyline):
if ((self.time == other.time) and (self.positions == other.positions)):
return True
return False
def parse(element):
polyline_element = element.find('Polyline')
vertexes = polyline_element.findall('Vertex')
time_list = []
position_list = []
for vertex in vertexes:
if ('time' in vertex.attrib):
time_list.append(convert_float(vertex.attrib['time']))
position_list.append(_PositionFactory.parse_position(vertex.find('Position')))
return Polyline(time_list, position_list)
def get_element(self):
shape = ET.Element('Shape')
element = ET.SubElement(shape, 'Polyline')
for i in range(len(self.positions)):
time_dict = {}
if self.time:
time_dict = {'time': str(self.time[i])}
vert = ET.SubElement(element, 'Vertex', attrib=time_dict)
vert.append(self.positions[i].get_element())
return shape |
class TestInstallColormap(EndianTest):
def setUp(self):
self.req_args_0 = {'cmap': }
self.req_bin_0 = b'Q\x00\x02\x00&\xf0DO'
def testPackRequest0(self):
bin = request.InstallColormap._request.to_binary(*(), **self.req_args_0)
self.assertBinaryEqual(bin, self.req_bin_0)
def testUnpackRequest0(self):
(args, remain) = request.InstallColormap._request.parse_binary(self.req_bin_0, dummy_display, 1)
self.assertBinaryEmpty(remain)
self.assertEqual(args, self.req_args_0) |
class TernaryOpMixin(_GenericOpMixin):
def test_mathematically_correct(self, op, data_l, data_m, data_r, out_type):
(left, mid, right) = (data_l(), data_m(), data_r())
expected = self.op_numpy(left.to_array(), mid.to_array(), right.to_array())
test = op(left, mid, right)
assert isinstance(test, out_type)
if issubclass(out_type, Data):
assert (test.shape == expected.shape)
np.testing.assert_allclose(test.to_array(), expected, atol=self.atol, rtol=self.rtol)
else:
np.testing.assert_allclose(test, expected, atol=self.atol, rtol=self.rtol)
def test_incorrect_shape_raises(self, op, data_l, data_m, data_r):
with pytest.raises(ValueError):
op(data_l(), data_m(), data_r()) |
class PVTNetwork_2(nn.Module):
def __init__(self, channel=32, n_classes=1, deep_supervision=True):
super().__init__()
self.deep_supervision = deep_supervision
print(f'use Conv2d(7, 1) Conv2d(1, 7) and My attention layer'.center(80, '='))
self.backbone = pvt_v2_b2()
path = '/afs/crc.nd.edu/user/y/ypeng4/Polyp-PVT_2/pvt_pth/pvt_v2_b2.pth'
save_model = torch.load(path)
model_dict = self.backbone.state_dict()
state_dict = {k: v for (k, v) in save_model.items() if (k in model_dict.keys())}
model_dict.update(state_dict)
self.backbone.load_state_dict(model_dict)
self.Translayer_1 = _GlobalConvModule(64, channel, (7, 7))
self.Translayer_2 = _GlobalConvModule(128, channel, (7, 7))
self.Translayer_3 = _GlobalConvModule(320, channel, (7, 7))
self.Translayer_4 = _GlobalConvModule(512, channel, (7, 7))
self.attention_1 = AttentionLayer()
self.attention_2 = AttentionLayer()
self.attention_3 = AttentionLayer()
self.attention_4 = AttentionLayer()
self.seg_outs = nn.ModuleList([nn.Conv2d(channel, n_classes, 1, 1) for _ in range(4)])
self.deconv2 = nn.ConvTranspose2d(channel, channel, kernel_size=4, stride=2, padding=1, bias=False)
self.deconv3 = nn.ConvTranspose2d(channel, channel, kernel_size=4, stride=2, padding=1, bias=False)
self.deconv4 = nn.ConvTranspose2d(channel, channel, kernel_size=4, stride=2, padding=1, bias=False)
self.deconv5 = nn.ConvTranspose2d(channel, channel, kernel_size=4, stride=2, padding=1, bias=False)
def forward(self, x):
seg_outs = []
(f1, f2, f3, f4) = self.backbone(x)
f1 = self.Translayer_1(f1)
f2 = self.Translayer_2(f2)
f3 = self.Translayer_3(f3)
f4 = self.Translayer_4(f4)
f41 = self.attention_4([f1, f2, f3, f4], f4)
seg_outs.append(self.seg_outs[0](f41))
f31 = self.attention_3([f1, f2, f3, f4], f3)
f21 = self.attention_2([f1, f2, f3, f4], f2)
f11 = self.attention_1([f1, f2, f3, f4], f1)
y = (self.deconv2(f41) + f31)
seg_outs.append(self.seg_outs[1](y))
y = (self.deconv3(y) + f21)
seg_outs.append(self.seg_outs[2](y))
y = (self.deconv4(y) + f11)
seg_outs.append(self.seg_outs[3](y))
for (i, o) in enumerate(seg_outs):
seg_outs[i] = F.interpolate(o, scale_factor=4, mode='bilinear')
if self.deep_supervision:
return seg_outs[::(- 1)]
else:
return seg_outs[(- 1)] |
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [migrations.CreateModel(name='AudienceLevel', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=100, unique=True))], options={'verbose_name': 'Audience Level', 'verbose_name_plural': 'Audience Levels'}), migrations.CreateModel(name='Conference', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')), ('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')), ('start', models.DateTimeField(blank=True, null=True, verbose_name='start')), ('end', models.DateTimeField(blank=True, null=True, verbose_name='end')), ('name', models.CharField(max_length=100, verbose_name='name')), ('code', models.CharField(max_length=10, unique=True, verbose_name='code')), ('timezone', timezone_field.fields.TimeZoneField())], options={'verbose_name': 'Conference', 'verbose_name_plural': 'Conferences'}), migrations.CreateModel(name='Deadline', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('start', models.DateTimeField(blank=True, null=True, verbose_name='start')), ('end', models.DateTimeField(blank=True, null=True, verbose_name='end')), ('name', models.CharField(blank=True, default='', max_length=100, verbose_name='name')), ('type', models.CharField(choices=[('cfp', 'Call for proposal'), ('voting', 'Voting'), ('refund', 'Ticket refund'), ('custom', 'Custom deadline')], max_length=10, verbose_name='type'))], options={'abstract': False}), migrations.CreateModel(name='Duration', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=100, verbose_name='name')), ('duration', models.PositiveIntegerField(validators=[django.core.validators.MinValueValidator(1)], verbose_name='duration')), ('notes', models.TextField(blank=True, verbose_name='notes'))], options={'verbose_name': 'Duration', 'verbose_name_plural': 'Durations'}), migrations.CreateModel(name='Topic', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=100, unique=True))], options={'verbose_name': 'Topic', 'verbose_name_plural': 'Topics'}), migrations.CreateModel(name='TicketFare', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')), ('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')), ('start', models.DateTimeField(blank=True, null=True, verbose_name='start')), ('end', models.DateTimeField(blank=True, null=True, verbose_name='end')), ('code', models.CharField(max_length=10, verbose_name='code')), ('name', models.CharField(max_length=100, verbose_name='name')), ('description', models.TextField(verbose_name='description')), ('price', models.DecimalField(decimal_places=2, max_digits=10, verbose_name='price')), ('conference', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='ticket_fares', to='conferences.Conference', verbose_name='conference'))], options={'verbose_name': 'Ticket Fare', 'verbose_name_plural': 'Ticket fares'})] |
class MatrixMultiplication(BinaryOperator):
def __init__(self, left, right):
super().__init__('', left, right)
def diff(self, variable):
raise NotImplementedError("diff not implemented for symbol of type 'MatrixMultiplication'")
def _binary_jac(self, left_jac, right_jac):
(left, right) = self.orphans
if (isinstance(left, pybamm.Array) or (isinstance(left, pybamm.Negate) and isinstance(left.child, pybamm.Array))):
left = pybamm.Matrix(csr_matrix(left.evaluate()))
return (left right_jac)
else:
raise NotImplementedError(f'''jac of 'MatrixMultiplication' is only
implemented for left of type 'pybamm.Array',
not {left.__class__}''')
def _binary_evaluate(self, left, right):
return (left right)
def _sympy_operator(self, left, right):
sympy = have_optional_dependency('sympy')
left = sympy.Matrix(left)
right = sympy.Matrix(right)
return (left * right) |
def conv_bn(data, cfg, num_filters, kernel=(3, 3), stride=(1, 1), pad=(1, 1), group=1, workspace=512, bn_mom=0.9, name=''):
body = mx.sym.Convolution(data=data, num_filter=num_filters, kernel=kernel, stride=stride, pad=pad, num_group=group, no_bias=True, workspace=workspace, name=(name + '_conv'))
body = mx.sym.BatchNorm(data=body, fix_gamma=False, eps=2e-05, momentum=bn_mom, name=(name + '_bn'))
return body |
def generate_parity_permutations(seq):
if isinstance(seq, str):
seq = [x for x in seq]
indices = seq[1:]
permutations = [([seq[0]], 1)]
while indices:
index_to_inject = indices.pop(0)
new_permutations = []
for perm in permutations:
for put_index in range((len(perm[0]) + 1)):
new_index_list = copy.deepcopy(perm[0])
new_index_list.insert((len(perm[0]) - put_index), index_to_inject)
new_permutations.append((new_index_list, (perm[1] * ((- 1) ** put_index))))
permutations = new_permutations
return permutations |
def discriminator(image, options, reuse=False, name='discriminator'):
with tf.variable_scope(name):
if reuse:
tf.get_variable_scope().reuse_variables()
else:
assert (tf.get_variable_scope().reuse is False)
h0 = lrelu(conv2d(image, options.df_dim, name='d_h0_conv'))
h1 = lrelu(instance_norm(conv2d(h0, (options.df_dim * 2), name='d_h1_conv'), 'd_bn1'))
h2 = lrelu(instance_norm(conv2d(h1, (options.df_dim * 4), name='d_h2_conv'), 'd_bn2'))
h3 = lrelu(instance_norm(conv2d(h2, (options.df_dim * 8), s=1, name='d_h3_conv'), 'd_bn3'))
h4 = conv2d(h3, 1, s=1, name='d_h3_pred')
return h4 |
def electrolyte_conductivity_base_Landesfeind2019(c_e, T, coeffs):
c = (c_e / 1000)
(p1, p2, p3, p4, p5, p6) = coeffs
A = (p1 * (1 + (T - p2)))
B = ((1 + (p3 * pybamm.sqrt(c))) + ((p4 * (1 + (p5 * np.exp((1000 / T))))) * c))
C = (1 + ((c ** 4) * (p6 * np.exp((1000 / T)))))
sigma_e = (((A * c) * B) / C)
return (sigma_e / 10) |
class StepLRScheduler(Scheduler):
def __init__(self, optimizer: torch.optim.Optimizer, decay_t: float, decay_rate: float=1.0, warmup_t=0, warmup_lr_init=0, t_in_epochs=True, noise_range_t=None, noise_pct=0.67, noise_std=1.0, noise_seed=42, initialize=True) -> None:
super().__init__(optimizer, param_group_field='lr', noise_range_t=noise_range_t, noise_pct=noise_pct, noise_std=noise_std, noise_seed=noise_seed, initialize=initialize)
self.decay_t = decay_t
self.decay_rate = decay_rate
self.warmup_t = warmup_t
self.warmup_lr_init = warmup_lr_init
self.t_in_epochs = t_in_epochs
if self.warmup_t:
self.warmup_steps = [((v - warmup_lr_init) / self.warmup_t) for v in self.base_values]
super().update_groups(self.warmup_lr_init)
else:
self.warmup_steps = [1 for _ in self.base_values]
def _get_lr(self, t):
if (t < self.warmup_t):
lrs = [(self.warmup_lr_init + (t * s)) for s in self.warmup_steps]
else:
lrs = [(v * (self.decay_rate ** (t // self.decay_t))) for v in self.base_values]
return lrs
def get_epoch_values(self, epoch: int):
if self.t_in_epochs:
return self._get_lr(epoch)
else:
return None
def get_update_values(self, num_updates: int):
if (not self.t_in_epochs):
return self._get_lr(num_updates)
else:
return None |
def is_hermitian(operator):
if isinstance(operator, (FermionOperator, BosonOperator, InteractionOperator)):
return (normal_ordered(operator) == normal_ordered(hermitian_conjugated(operator)))
if isinstance(operator, (QubitOperator, QuadOperator)):
return (operator == hermitian_conjugated(operator))
elif isinstance(operator, spmatrix):
difference = (operator - hermitian_conjugated(operator))
discrepancy = 0.0
if difference.nnz:
discrepancy = max(abs(difference.data))
return (discrepancy < EQ_TOLERANCE)
elif isinstance(operator, numpy.ndarray):
difference = (operator - hermitian_conjugated(operator))
discrepancy = numpy.amax(abs(difference))
return (discrepancy < EQ_TOLERANCE)
else:
raise TypeError('Checking whether a {} is hermitian is not supported.'.format(type(operator).__name__)) |
class PositionWeightedModuleTest(unittest.TestCase):
def test_populate_weights(self) -> None:
pw = PositionWeightedModule(max_feature_length=10)
features = KeyedJaggedTensor.from_offsets_sync(keys=['f1', 'f2'], values=torch.tensor([0, 1, 2, 3, 4, 5, 6, 7]), offsets=torch.tensor([0, 2, 2, 3, 4, 5, 8]))
features = features.to_dict()
jt = features['f1']
weighted_features = pw(jt)
self.assertEqual(weighted_features.weights().size(), (3,))
pw_f1_ref = torch.gather(pw.state_dict()['position_weight'], 0, torch.tensor([0, 1, 0]))
pw_f1 = weighted_features.weights().detach()
self.assertTrue(torch.allclose(pw_f1_ref, pw_f1))
position_weighted_module_gm = symbolic_trace(pw)
position_weighted_module_gm_script = torch.jit.script(position_weighted_module_gm)
weighted_features_gm_script = position_weighted_module_gm_script(jt)
torch.testing.assert_close(weighted_features.values(), weighted_features_gm_script.values())
torch.testing.assert_close(weighted_features.lengths(), weighted_features_gm_script.lengths())
((torch.cuda.device_count() <= 0), 'Not enough GPUs, this test requires at least one GPU')
def test_rematerialize_from_meta(self) -> None:
pw = PositionWeightedModule(max_feature_length=10, device=torch.device('meta'))
self.assertTrue(pw.position_weight.is_meta)
init_parameters(pw, torch.device('cuda'))
self.assertTrue((not pw.position_weight.is_meta))
torch.testing.assert_close(pw.position_weight, torch.ones_like(pw.position_weight)) |
def test_preloop_hook(capsys):
testargs = ['prog', 'say hello', 'quit']
with mock.patch.object(sys, 'argv', testargs):
app = PluggedApp()
app.register_preloop_hook(app.prepost_hook_one)
app.cmdloop()
(out, err) = capsys.readouterr()
assert (out == 'one\nhello\n')
assert (not err) |
class ShapeNetPart(Dataset):
def __init__(self, root: str, split: str='train', point_num: int=2500, transform=None):
super().__init__()
self.root = root
self.point_num = point_num
self.transform = transform
self.category_id = {}
with open(os.path.join(root, 'synsetoffset2category.txt')) as cat_file:
for line in cat_file:
tokens = line.strip().split()
self.category_id[tokens[1]] = tokens[0]
self.category_names = list(self.category_id.values())
split_file_path = os.path.join(root, 'train_test_split', 'shuffled_{}_file_list.json'.format(split))
split_file_list = json.load(open(split_file_path, 'r'))
cat_ids = list(self.category_id.keys())
self.file_list = []
for name in split_file_list:
(_, cat_id, obj_id) = name.strip().split('/')
if (cat_id in cat_ids):
self.file_list.append(os.path.join(cat_id, obj_id))
def get_mask(self, category):
mask = torch.zeros(TOTAL_PARTS_NUM)
mask[get_valid_labels(category)] = 1
mask = mask.unsqueeze(0).repeat(self.point_num, 1)
return mask
def get_catgory_onehot(self, category):
onehot = torch.zeros(len(self.category_names))
index = self.category_names.index(category)
onehot[index] = 1
return onehot
def __len__(self):
return len(self.file_list)
def __getitem__(self, index):
(cat_id, obj_id) = self.file_list[index].split('/')
category = self.category_id[cat_id]
points = torch.FloatTensor(np.genfromtxt(os.path.join(self.root, cat_id, 'points', '{}.pts'.format(obj_id))))
labels = torch.LongTensor(np.genfromtxt(os.path.join(self.root, cat_id, 'points_label', '{}.seg'.format(obj_id))))
labels = ((labels - 1) + get_valid_labels(category)[0])
sample_ids = torch.multinomial(torch.ones(points.size(0)), num_samples=self.point_num, replacement=True)
points = points[sample_ids]
labels = labels[sample_ids]
if self.transform:
points = self.transform(points)
mask = self.get_mask(category)
onehot = self.get_catgory_onehot(category)
return (category, obj_id, points, labels, mask, onehot) |
class SIMIaccess():
def __init__(self, path=None):
assert os.path.exists(path), 'similarity matrix {} is not exists.'.format(path)
df_sim = pd.read_csv(path, index_col=0)
self.matrix = df_sim.values
self.labels = list(df_sim.columns)
def findSimi(self, dt_label, gt_label):
assert isinstance(dt_label, np.int64), 'detection label should be in int type, but is {} type'.format(type(dt_label))
assert isinstance(gt_label, np.int64), 'groundtruth label should be in int type, but is {} type'.format(type(gt_label))
dt_label = str(dt_label)
gt_label = str(gt_label)
if (dt_label == gt_label):
return 1
elif ((dt_label in self.labels) and (gt_label in self.labels)):
index_i = self.labels.index(dt_label)
index_j = self.labels.index(gt_label)
simi = self.matrix[(index_i, index_j)]
return simi
else:
return 0 |
def get_random_outer_outputs(scan_args: ScanArgs) -> List[Tuple[(int, TensorVariable, TensorVariable)]]:
rv_vars = []
for (n, oo_var) in enumerate([o for o in scan_args.outer_outputs if (not isinstance(o.type, RandomType))]):
oo_info = scan_args.find_among_fields(oo_var)
io_type = oo_info.name[(oo_info.name.index('_', 6) + 1):]
inner_out_type = f'inner_out_{io_type}'
io_var = getattr(scan_args, inner_out_type)[oo_info.index]
if (io_var.owner and isinstance(io_var.owner.op, MeasurableVariable)):
rv_vars.append((n, oo_var, io_var))
return rv_vars |
def _generate_list_url(mailto: str) -> str:
list_name_domain = mailto.lower().removeprefix('mailto:').strip()
list_name = list_name_domain.split('')[0]
if list_name_domain.endswith(''):
return f'
if (not list_name_domain.endswith('')):
return mailto
if (list_name in {'csv', 'db-sig', 'doc-sig', 'python-list', 'web-sig'}):
return f'
if (list_name in {'import-sig', 'python-3000'}):
return f'
return f' |
def demo(opt):
os.environ['CUDA_VISIBLE_DEVICES'] = opt.gpus_str
opt.debug = max(opt.debug, 1)
Detector = detector_factory[opt.task]
detector = Detector(opt)
if ((opt.demo == 'webcam') or (opt.demo[(opt.demo.rfind('.') + 1):].lower() in video_ext)):
cam = cv2.VideoCapture((0 if (opt.demo == 'webcam') else opt.demo))
detector.pause = False
while True:
(_, img) = cam.read()
cv2.imshow('input', img)
ret = detector.run(img)
time_str = ''
for stat in time_stats:
time_str = (time_str + '{} {:.3f}s |'.format(stat, ret[stat]))
print(time_str)
if (cv2.waitKey(1) == 27):
return
else:
if os.path.isdir(opt.demo):
image_names = []
ls = os.listdir(opt.demo)
for file_name in sorted(ls):
ext = file_name[(file_name.rfind('.') + 1):].lower()
if (ext in image_ext):
image_names.append(os.path.join(opt.demo, file_name))
else:
image_names = [opt.demo]
for image_name in image_names:
ret = detector.run(image_name)
time_str = ''
for stat in time_stats:
time_str = (time_str + '{} {:.3f}s |'.format(stat, ret[stat]))
print(time_str) |
def _create_test_scenes(num_scenes=2, shape=DEFAULT_SHAPE, area=None):
from satpy import Scene
ds1 = _create_test_dataset('ds1', shape=shape, area=area)
ds2 = _create_test_dataset('ds2', shape=shape, area=area)
scenes = []
for _ in range(num_scenes):
scn = Scene()
scn['ds1'] = ds1.copy()
scn['ds2'] = ds2.copy()
scenes.append(scn)
return scenes |
class TestQueueConsumerServer():
def build_server(self):
server = [None]
def _build_server(max_concurrency=5, pump_raises=None, handler_raises=None):
server[0] = QueueConsumerServer.new(consumer_factory=FakeQueueConsumerFactory(pump_raises=pump_raises, handler_raises=handler_raises), max_concurrency=max_concurrency, listener=mock.Mock(spec=socket.socket), stop_timeout=datetime.timedelta(seconds=30))
return server[0]
(yield _build_server)
try:
if (server[0] is not None):
server[0].stop()
except AssertionError:
pass
def server(self, build_server):
return build_server()
def test_new(self):
max_concurrency = 5
server = QueueConsumerServer.new(consumer_factory=FakeQueueConsumerFactory(), max_concurrency=max_concurrency, listener=mock.Mock(spec=socket.socket), stop_timeout=datetime.timedelta(seconds=30))
assert (not server.started)
assert (not server.stopped)
assert (not server.pump.started)
assert (not server.pump.stopped)
assert (server.pump.work_queue.maxsize == 7)
server.healthcheck_server.start.assert_not_called()
server.healthcheck_server.stop.assert_not_called()
assert (len(server.handlers) == max_concurrency)
assert (len(server.handlers) == len(server.threads))
for handler in server.handlers:
assert (not handler.started)
assert (not handler.stopped)
def test_start(self, server):
server.start()
assert server.started
assert (not server.stopped)
assert server.pump.started
assert (not server.pump.stopped)
server.healthcheck_server.start.assert_called_once()
server.healthcheck_server.stop.assert_not_called()
for handler in server.handlers:
assert handler.started
assert (not handler.stopped)
with pytest.raises(AssertionError):
server.start()
def test_stop(self, server):
server.start()
server.stop()
assert server.started
assert server.stopped
assert server.pump.started
assert server.pump.stopped
server.healthcheck_server.start.assert_called_once()
server.healthcheck_server.stop.assert_called_once()
for handler in server.handlers:
assert handler.started
assert handler.stopped
with pytest.raises(AssertionError):
server.stop()
def test_stop_before_start(self, server):
with pytest.raises(AssertionError):
server.stop()
def test_pump_exception_terminates(self, build_server):
server = build_server(max_concurrency=1, pump_raises=Exception())
server._terminate = mock.Mock()
server.start()
time.sleep(0.5)
server._terminate.assert_called_once()
def test_handler_exception_terminates(self, build_server):
server = build_server(max_concurrency=1, handler_raises=Exception())
server._terminate = mock.Mock()
server.start()
time.sleep(0.5)
server._terminate.assert_called_once()
def test_handler_timeout_terminates(self, build_server):
server = build_server(max_concurrency=1, handler_raises=ServerTimeout('', 10, False))
server._terminate = mock.Mock()
server.start()
time.sleep(0.5)
server._terminate.assert_called_once() |
class PromptView(QuotientView):
def __init__(self, ctx: Context, alert: Alert):
super().__init__(ctx, timeout=300)
self.ctx = ctx
self.alert = alert
.button(style=discord.ButtonStyle.green, label='Read Now')
async def read_now(self, inter: discord.Interaction, btn: discord.Button):
_e = discord.Embed.from_dict(self.alert.message)
(await inter.response.send_message(embed=_e, ephemeral=True))
self.stop()
(await self.message.delete(delay=0))
(await self.alert.refresh_from_db())
read = (await Read.create(user_id=inter.user.id))
(await self.alert.reads.add(read))
.button(style=discord.ButtonStyle.red, label='Dismiss')
async def dismiss(self, inter: discord.Interaction, btn: discord.Button):
self.stop()
(await self.message.delete(delay=0)) |
def load_dataset_splits(args, task):
task.load_dataset(args.train_subset, combine=True)
for split in args.valid_subset.split(','):
for k in itertools.count():
split_k = (split + (str(k) if (k > 0) else ''))
try:
task.load_dataset(split_k, combine=False)
except FileNotFoundError as e:
if (k > 0):
break
raise e |
.slow
_figures_equal()
def test_DecisionMatrixPlotter_bar(decision_matrix, fig_test, fig_ref):
dm = decision_matrix(seed=42, min_alternatives=3, max_alternatives=3, min_criteria=3, max_criteria=3)
plotter = plot.DecisionMatrixPlotter(dm=dm)
test_ax = fig_test.subplots()
plotter.bar(ax=test_ax)
df = dm.matrix
df.columns = [f'{c} {o.to_symbol()}' for (c, o) in zip(dm.criteria, dm.objectives)]
df.columns.name = 'Criteria'
exp_ax = fig_ref.subplots()
df.plot.bar(ax=exp_ax) |
def _basic_diff(f, x, n=1):
if isinstance(f, (Expr, Symbol, numbers.Number)):
return diff(f, x, n)
elif hasattr(f, '_eval_derivative_n_times'):
return f._eval_derivative_n_times(x, n)
else:
raise ValueError((('In_basic_diff type(arg) = ' + str(type(f))) + ' not allowed.')) |
('/v1/organization/<orgname>/private')
_param('orgname', 'The name of the organization')
_only
_user_resource(PrivateRepositories)
_if(features.BILLING)
class OrgPrivateRepositories(ApiResource):
_scope(scopes.ORG_ADMIN)
('getOrganizationPrivateAllowed')
def get(self, orgname):
permission = CreateRepositoryPermission(orgname)
if permission.can():
organization = model.organization.get_organization(orgname)
private_repos = model.user.get_private_repo_count(organization.username)
data = {'privateAllowed': False}
repos_allowed = 0
if organization.stripe_id:
cus = stripe.Customer.retrieve(organization.stripe_id)
if cus.subscription:
plan = get_plan(cus.subscription.plan.id)
if plan:
repos_allowed = plan['privateRepos']
if features.RH_MARKETPLACE:
query = organization_skus.get_org_subscriptions(organization.id)
rh_subscriptions = (list(query.dicts()) if (query is not None) else [])
for subscription in rh_subscriptions:
subscription_sku = marketplace_subscriptions.get_subscription_sku(subscription['subscription_id'])
equivalent_stripe_plan = get_plan_using_rh_sku(subscription_sku)
if equivalent_stripe_plan:
repos_allowed += equivalent_stripe_plan['privateRepos']
data['privateAllowed'] = (private_repos < repos_allowed)
if AdministerOrganizationPermission(orgname).can():
data['privateCount'] = private_repos
return data
raise Unauthorized() |
class ModuleInPathTest(resources.SysPathSetup, unittest.TestCase):
def test_success(self) -> None:
datadir = resources.find('')
assert modutils.module_in_path('data.module', datadir)
assert modutils.module_in_path('data.module', (datadir,))
assert modutils.module_in_path('data.module', os.path.abspath(datadir))
assert modutils.module_in_path('pyi_data.module', datadir)
assert modutils.module_in_path('pyi_data.module', (datadir,))
assert modutils.module_in_path('pyi_data.module', os.path.abspath(datadir))
assert modutils.module_in_path('data.module', '')
assert modutils.module_in_path('pyi_data.module', '')
def test_bad_import(self) -> None:
datadir = resources.find('')
assert (not modutils.module_in_path('this_module_is_no_more', datadir))
def test_no_filename(self) -> None:
datadir = resources.find('')
assert (not modutils.module_in_path('sys', datadir))
def test_failure(self) -> None:
datadir = resources.find('')
assert (not modutils.module_in_path('etree', datadir))
assert (not modutils.module_in_path('astroid', datadir)) |
('enqueue-files', args=1)
def _enqueue_files(app, value):
library = app.library
window = app.window
songs = []
for param in split_escape(value, ','):
try:
song_path = uri2fsn(param)
except ValueError:
song_path = param
if (song_path in library):
songs.append(library[song_path])
elif os.path.isfile(song_path):
songs.append(library.add_filename(os.path.realpath(value)))
if songs:
window.playlist.enqueue(songs) |
class Generator(nn.Module):
def __init__(self, latent_dim, target_dim):
super().__init__()
self.net = nn.Sequential(nn.Linear(latent_dim, 600), nn.LayerNorm(600), nn.ReLU(), nn.Linear(600, 200), nn.LayerNorm(200), nn.ReLU(), nn.Linear(200, 100), nn.LayerNorm(100), nn.ReLU(), nn.Linear(100, target_dim), nn.Sigmoid())
def forward(self, x):
return self.net(x) |
def _get_remaining_args(obj: dict, cls: type, constructor_args: dict, strict: bool, fork_inst: type) -> dict:
remaining_attrs = {attr_name: obj[attr_name] for attr_name in obj if ((attr_name not in constructor_args) and (attr_name != META_ATTR))}
if (strict and remaining_attrs):
unexpected_arg = list(remaining_attrs.keys())[0]
err_msg = 'Type "{}" does not expect "{}".'.format(get_class_name(cls), unexpected_arg)
raise SignatureMismatchError(err_msg, unexpected_arg, obj, cls)
return remaining_attrs |
class TestMacroResolving(unittest.TestCase):
def setUp(self):
self.environment = pynag.Utils.misc.FakeNagiosEnvironment()
self.environment.create_minimal_environment()
self.environment.update_model()
resource_cfg_file = os.path.join(tests_dir, 'testconfigs/custom.macros.resource.cfg')
self.environment.import_config(resource_cfg_file)
self.environment.config._edit_static_file(attribute='resource_file', new_value=os.path.join(self.environment.objects_dir, 'custom.macros.resource.cfg'))
cfg_file = os.path.join(tests_dir, 'testconfigs/custom.macros.cfg')
self.environment.import_config(cfg_file)
self.environment.config.parse_maincfg()
self.macroservice = pynag.Model.Service.objects.get_by_name('macroservice')
self.macrohost = pynag.Model.Host.objects.get_by_shortname('macrohost')
self.macrohost2 = pynag.Model.Host.objects.get_by_shortname('macrohost2')
def tearDown(self):
self.environment.terminate()
def test_get_custom_variable_macro_normal(self):
self.assertEqual('macro1', self.macroservice._get_custom_variable_macro('$_SERVICE_MACRO1$'))
def test_get_custom_variable_macro_nonexistant_macro(self):
self.assertEqual('', self.macroservice._get_custom_variable_macro('$_SERVICE_DOES_NOT_EXIST$'))
def test_get_custom_variable_macro_raises_on_invalid_macro(self):
with self.assertRaises(pynag.Model.InvalidMacro):
self.macroservice._get_custom_variable_macro('$_TEST')
def test_get_custom_variable_macro_raises_on_invalid_object_type(self):
with self.assertRaises(pynag.Model.InvalidMacro):
self.macroservice._get_custom_variable_macro('$_HOST_MACRO1$')
def test_get_service_macro_standard(self):
self.assertEqual('macroservice', self.macroservice._get_service_macro('$SERVICEDESC$'))
def test_get_service_macro_nonexistant(self):
self.assertEqual('', self.macroservice._get_service_macro('$SERVICEFOOBAR$'))
def test_get_service_macro_invalid_format(self):
self.assertEqual('', self.macroservice._get_service_macro('$SERVICEFOOBAR'))
def test_get_service_macro_custom_variable(self):
self.assertEqual('macro1', self.macroservice._get_service_macro('$_SERVICE_MACRO1$'))
def test_get_service_macro_custom_variable_empty(self):
self.assertEqual('', self.macroservice._get_service_macro('$_SERVICE_empty$'))
def test_get_service_macro_custom_variable_wrong_type(self):
self.assertEqual('', self.macroservice._get_service_macro('$NOT_A_SERVICE_MACRO$'))
def test_get_host_macro_standard(self):
self.assertEqual('hostaddress', self.macrohost._get_host_macro('$HOSTADDRESS$'))
def test_get_host_macro_host_name(self):
self.assertEqual('macrohost', self.macrohost._get_host_macro('$HOSTNAME$'))
def test_get_host_macro_returns_host_name_when_there_is_no_address(self):
self.macrohost.address = None
self.macrohost.save()
self.assertEqual('macrohost', self.macrohost._get_host_macro('$HOSTADDRESS$'))
def test_get_host_macro_invalid_format(self):
self.assertEqual('', self.macrohost._get_host_macro('$HOSTADDRESS%'))
def test_get_host_macro_custom_variable(self):
self.assertEqual('macro1', self.macrohost._get_host_macro('$_HOST_MACRO1$'))
def test_get_host_macro_custom_variable_missing(self):
self.assertEqual('', self.macrohost._get_host_macro('$_HOST_foo$'))
def test_get_host_macro_custom_variable_inherited_from_parent(self):
self.assertEqual('macro1', self.macrohost2._get_host_macro('$_HOST_MACRO1$'))
def test_service_get_host_macro_normal(self):
self.assertEqual('macrohost', self.macroservice._get_host_macro('$HOSTNAME$'))
def test_service_get_host_macro_invalid_macro_format(self):
self.assertEqual('', self.macroservice._get_host_macro('foobar'))
def test_service_get_host_macro_nonexistant_standard_host_macro(self):
self.assertEqual('', self.macroservice._get_host_macro('$HOSTFOOOOOO$'))
def test_service_get_host_macro_nonexistant_custom_host_variable(self):
self.assertEqual('', self.macroservice._get_host_macro('$_HOSTFOOOOOO$'))
def test_service_get_host_macro_not_a_host_variable(self):
self.assertEqual('', self.macroservice._get_host_macro('$CONTACTNAME$'))
def test_service_get_host_macro_when_different_host_specified(self):
self.assertEqual('macrohost2', self.macroservice._get_host_macro('$HOSTNAME$', host_name='macrohost2'))
def test_service_get_host_macro_when_nonexistant_host_specified(self):
self.assertEqual('', self.macroservice._get_host_macro('$HOSTNAME$', host_name='foo'))
def test_service_get_host_macro_service_has_no_host(self):
self.macroservice.host_name = None
self.macroservice.hostgroups = None
self.macroservice.save()
self.assertEqual('', self.macroservice._get_host_macro('$HOSTNAME$'))
def test_get_command_macro_normal(self):
self.assertEqual('macro1', self.macroservice._get_command_macro('$ARG1$'))
def test_get_command_macro_invalid_format(self):
self.assertEqual('', self.macroservice._get_command_macro('arg1'))
def test_get_command_macro_resource_macro(self):
self.assertEqual('/path/to/user1', self.macroservice._get_command_macro('$ARG2$'))
def test_get_command_macro_nonexistant_macro(self):
self.assertEqual('', self.macroservice._get_command_macro('$ARG17$'))
def test_get_command_macro_when_check_command_is_undefined(self):
self.macroservice.check_command = None
self.macroservice.save()
self.assertEqual('', self.macroservice._get_command_macro('$ARG1$'))
def test_host_get_macro_returns_empty_on_nonexistant_macro(self):
self.assertEqual('', self.macrohost2.get_macro('$INVALID_MACRO$'))
self.assertEqual('', self.macrohost2.get_macro('$HOST_INVALID$'))
self.assertEqual('', self.macrohost2.get_macro('$_HOST_INVALID$'))
def test_host_get_macro_address(self):
self.assertEqual('macrohost2', self.macrohost2.get_macro('$HOSTADDRESS$'))
def test_host_get_macro_address_defaults_to_name(self):
self.macrohost2.address = 'my_addr'
self.assertEqual('my_addr', self.macrohost2.get_macro('$HOSTADDRESS$'))
def test_host_get_macro_display_name(self):
self.macrohost2.display_name = 'display_name'
self.assertEqual('display_name', self.macrohost2.get_macro('$HOSTDISPLAYNAME$'))
def test_host_get_macro_display_name_defaults_to_name(self):
self.assertEqual('macrohost2', self.macrohost2.get_macro('$HOSTDISPLAYNAME$'))
def test_host_get_macro_custom(self):
self.assertEqual('macrohost2', self.macrohost2.get_macro('$_HOST_macrohost2$'))
def test_host_get_macro_standard(self):
self.assertEqual('macrohost2', self.macrohost2.get_macro('$HOSTNAME$'))
def test_get_all_macros(self):
expected_macros = {'$_SERVICE_EMPTY$': '', '$_SERVICE_NONEXISTANT$': '', '$_SERVICE_NOT_USED$': 'this.macro.is.not.used', '$_SERVICE_MACRO1$': 'macro1', '$_HOST_NONEXISTANT$': '', '$_HOST_MACRO1$': 'macro1', '$_HOST_EMPTY$': '', '$ARG1$': 'macro1', '$ARG2$': '/path/to/user1', '$HOSTADDRESS$': 'hostaddress', '$USER1$': '/path/to/user1'}
macros = self.macroservice.get_all_macros()
self.assertEqual(expected_macros, macros)
def test_get_effective_command_line(self):
expected_command_line = "/path/to/user1/macro -H 'hostaddress' host_empty='' service_empty='' host_macro1='macro1' arg1='macro1' host_nonexistant='' service_nonexistant='' escaped_dollarsign=$$ user1_as_argument=/path/to/user1"
actual_command_line = self.macroservice.get_effective_command_line()
self.assertEqual(expected_command_line, actual_command_line)
def test_service_get_macro_returns_empty_on_nonexistant_macro(self):
self.assertEqual('', self.macroservice.get_macro('$INVALID_MACRO$'))
self.assertEqual('', self.macroservice.get_macro('$HOST_INVALID$'))
self.assertEqual('', self.macroservice.get_macro('$_HOST_INVALID$'))
self.assertEqual('', self.macroservice.get_macro('$SERVICE_INVALID$'))
self.assertEqual('', self.macroservice.get_macro('$_SERVICE_INVALID$'))
self.assertEqual('', self.macroservice.get_macro('$ARG17$'))
self.assertEqual('', self.macroservice.get_macro('$ARGINVALID$'))
def test_service_get_macro_invalid_macro(self):
self.assertEqual('', self.macroservice.get_macro('INVALID'))
def test_service_get_macro_standard_macro(self):
self.assertEqual('macroservice', self.macroservice.get_macro('$SERVICEDESC$'))
def test_service_get_macro_command_argument1(self):
self.assertEqual('macro1', self.macroservice.get_macro('$ARG1$'))
def test_service_get_macro_custom_variable(self):
self.assertEqual('macro1', self.macroservice.get_macro('$_SERVICE_MACRO1$'))
def test_service_get_macro_from_host(self):
self.assertEqual('macro1', self.macroservice.get_macro('$_HOST_MACRO1$'))
def test_service_get_macro_custom_host_variable_inherited_from_parent(self):
self.assertEqual('macro1', self.macroservice.get_macro('$_HOST_MACRO1$', host_name='macrohost2'))
def test_service_get_macro_where_host_is_applied_via_hostgroup(self):
self.assertEqual('macrohost2', self.macroservice.get_macro('$_HOST_macrohost2$', host_name='macrohost2'))
def test_service_get_macro_where_command_arg_comes_from_host(self):
self.macroservice.check_command = 'only_arg!$HOSTADDRESS$'
self.assertEqual('hostaddress', self.macroservice.get_macro('$ARG1$'))
self.assertEqual('macrohost2', self.macroservice.get_macro('$ARG1$', host_name='macrohost2')) |
class TestUngrabPointer(EndianTest):
def setUp(self):
self.req_args_0 = {'time': }
self.req_bin_0 = b'\x1b\x00\x00\x02\x07k\x17\x8d'
def testPackRequest0(self):
bin = request.UngrabPointer._request.to_binary(*(), **self.req_args_0)
self.assertBinaryEqual(bin, self.req_bin_0)
def testUnpackRequest0(self):
(args, remain) = request.UngrabPointer._request.parse_binary(self.req_bin_0, dummy_display, 1)
self.assertBinaryEmpty(remain)
self.assertEqual(args, self.req_args_0) |
def make_dot(var, params=None):
if (params is not None):
assert isinstance(params.values()[0], Variable)
param_map = {id(v): k for (k, v) in params.items()}
node_attr = dict(style='filled', shape='box', align='left', fontsize='12', ranksep='0.1', height='0.2')
dot = Digraph(node_attr=node_attr, graph_attr=dict(size='12,12'))
seen = set()
def size_to_str(size):
return (('(' + ', '.join([('%d' % v) for v in size])) + ')')
def add_nodes(var):
if (var not in seen):
if torch.is_tensor(var):
dot.node(str(id(var)), size_to_str(var.size()), fillcolor='orange')
elif hasattr(var, 'variable'):
u = var.variable
name = (param_map[id(u)] if (params is not None) else '')
node_name = ('%s\n %s' % (name, size_to_str(u.size())))
dot.node(str(id(var)), node_name, fillcolor='lightblue')
else:
dot.node(str(id(var)), str(type(var).__name__))
seen.add(var)
if hasattr(var, 'next_functions'):
for u in var.next_functions:
if (u[0] is not None):
dot.edge(str(id(u[0])), str(id(var)))
add_nodes(u[0])
if hasattr(var, 'saved_tensors'):
for t in var.saved_tensors:
dot.edge(str(id(t)), str(id(var)))
add_nodes(t)
add_nodes(var.grad_fn)
return dot |
def do_train(cfg, model, resume=False):
model.train()
optimizer = build_optimizer(cfg, model)
scheduler = build_lr_scheduler(cfg, optimizer)
checkpointer = DetectionCheckpointer(model, cfg.OUTPUT_DIR, optimizer=optimizer, scheduler=scheduler)
start_iter = (checkpointer.resume_or_load(cfg.MODEL.WEIGHTS, resume=resume).get('iteration', (- 1)) + 1)
if cfg.SOLVER.RESET_ITER:
logger.info('Reset loaded iteration. Start training from iteration 0.')
start_iter = 0
max_iter = (cfg.SOLVER.MAX_ITER if (cfg.SOLVER.TRAIN_ITER < 0) else cfg.SOLVER.TRAIN_ITER)
periodic_checkpointer = PeriodicCheckpointer(checkpointer, cfg.SOLVER.CHECKPOINT_PERIOD, max_iter=max_iter)
writers = ([CommonMetricPrinter(max_iter), JSONWriter(os.path.join(cfg.OUTPUT_DIR, 'metrics.json')), TensorboardXWriter(cfg.OUTPUT_DIR)] if comm.is_main_process() else [])
mapper = (DatasetMapper(cfg, True) if (cfg.INPUT.CUSTOM_AUG == '') else DatasetMapper(cfg, True, augmentations=build_custom_augmentation(cfg, True)))
if (cfg.DATALOADER.SAMPLER_TRAIN in ['TrainingSampler', 'RepeatFactorTrainingSampler']):
data_loader = build_detection_train_loader(cfg, mapper=mapper)
else:
from centernet.data.custom_dataset_dataloader import build_custom_train_loader
data_loader = build_custom_train_loader(cfg, mapper=mapper)
logger.info('Starting training from iteration {}'.format(start_iter))
with EventStorage(start_iter) as storage:
step_timer = Timer()
data_timer = Timer()
start_time = time.perf_counter()
for (data, iteration) in zip(data_loader, range(start_iter, max_iter)):
data_time = data_timer.seconds()
storage.put_scalars(data_time=data_time)
step_timer.reset()
iteration = (iteration + 1)
storage.step()
loss_dict = model(data)
losses = sum((loss for (k, loss) in loss_dict.items()))
assert torch.isfinite(losses).all(), loss_dict
loss_dict_reduced = {k: v.item() for (k, v) in comm.reduce_dict(loss_dict).items()}
losses_reduced = sum((loss for loss in loss_dict_reduced.values()))
if comm.is_main_process():
storage.put_scalars(total_loss=losses_reduced, **loss_dict_reduced)
optimizer.zero_grad()
losses.backward()
optimizer.step()
storage.put_scalar('lr', optimizer.param_groups[0]['lr'], smoothing_hint=False)
step_time = step_timer.seconds()
storage.put_scalars(time=step_time)
data_timer.reset()
scheduler.step()
if ((cfg.TEST.EVAL_PERIOD > 0) and ((iteration % cfg.TEST.EVAL_PERIOD) == 0) and (iteration != max_iter)):
do_test(cfg, model)
comm.synchronize()
if (((iteration - start_iter) > 5) and (((iteration % 20) == 0) or (iteration == max_iter))):
for writer in writers:
writer.write()
periodic_checkpointer.step(iteration)
total_time = (time.perf_counter() - start_time)
logger.info('Total training time: {}'.format(str(datetime.timedelta(seconds=int(total_time))))) |
_rewriter([NegBinomialRV])
def negative_binomial_from_gamma_poisson(fgraph, node):
(rng, *other_inputs, n, p) = node.inputs
(next_rng, g) = _gamma.make_node(rng, *other_inputs, n, ((1 - p) / p)).outputs
(next_rng, p) = poisson.make_node(next_rng, *other_inputs, g).outputs
return [next_rng, p] |
def weights_init_normal(m):
classname = m.__class__.__name__
if (classname.find('Conv') != (- 1)):
init.normal_(m.weight.data, 0.0, 0.02)
elif (classname.find('Linear') != (- 1)):
init.normal_(m.weight.data, 0.0, 0.02)
elif (classname.find('BatchNorm2d') != (- 1)):
init.normal_(m.weight.data, 1.0, 0.02)
init.constant_(m.bias.data, 0.0) |
class ResnetFeatureExtractor(nn.Module):
def __init__(self, weights: Optional[str]='DEFAULT') -> None:
super().__init__()
self.model = models.resnet.resnet18(weights=weights)
self.model.fc = nn.Identity()
self.model.eval()
def forward(self, x: Tensor) -> Tensor:
x = F.interpolate(x, size=(224, 224), mode='bilinear', align_corners=False)
x = self.model(x)
return x |
(max_runs=3, min_passes=1)
_test(timeout=60)
def test_from_kafka():
j = random.randint(0, 10000)
ARGS = {'bootstrap.servers': 'localhost:9092', 'group.id': ('streamz-test%i' % j)}
with kafka_service() as kafka:
(kafka, TOPIC) = kafka
stream = Stream.from_kafka([TOPIC], ARGS, asynchronous=True)
out = stream.sink_to_list()
stream.start()
(yield gen.sleep(1.1))
for i in range(10):
(yield gen.sleep(0.1))
kafka.produce(TOPIC, (b'value-%d' % i))
kafka.flush()
wait_for((lambda : (len(out) == 10)), 10, period=0.1)
assert (out[(- 1)] == b'value-9')
kafka.produce(TOPIC, b'final message')
kafka.flush()
wait_for((lambda : (out[(- 1)] == b'final message')), 10, period=0.1)
stream._close_consumer()
kafka.produce(TOPIC, b'lost message')
kafka.flush()
(yield gen.sleep(1))
assert (out[(- 1)] == b'final message')
stream._close_consumer() |
def pytest_namespace():
try:
import numpy as np
except ImportError:
np = None
try:
import scipy
except ImportError:
scipy = None
try:
from pybind11_tests.eigen import have_eigen
except ImportError:
have_eigen = False
pypy = (platform.python_implementation() == 'PyPy')
skipif = pytest.mark.skipif
return {'suppress': suppress, 'requires_numpy': skipif((not np), reason='numpy is not installed'), 'requires_scipy': skipif((not np), reason='scipy is not installed'), 'requires_eigen_and_numpy': skipif(((not have_eigen) or (not np)), reason='eigen and/or numpy are not installed'), 'requires_eigen_and_scipy': skipif(((not have_eigen) or (not scipy)), reason='eigen and/or scipy are not installed'), 'unsupported_on_pypy': skipif(pypy, reason='unsupported on PyPy'), 'unsupported_on_py2': skipif((sys.version_info.major < 3), reason='unsupported on Python 2.x'), 'gc_collect': gc_collect} |
class GraphFactorization(object):
def __init__(self, graph, rep_size=128, epoch=120, learning_rate=0.003, weight_decay=1.0):
self.g = graph
self.node_size = graph.G.number_of_nodes()
self.rep_size = rep_size
self.max_iter = epoch
self.lr = learning_rate
self.lamb = weight_decay
self.sess = tf.Session()
self.adj_mat = self.getAdj()
self.vectors = {}
self.embeddings = self.get_train()
look_back = self.g.look_back_list
for (i, embedding) in enumerate(self.embeddings):
self.vectors[look_back[i]] = embedding
def getAdj(self):
node_size = self.g.node_size
look_up = self.g.look_up_dict
adj = np.zeros((node_size, node_size))
for edge in self.g.G.edges():
adj[look_up[edge[0]]][look_up[edge[1]]] = self.g.G[edge[0]][edge[1]]['weight']
return adj
def get_train(self):
adj_mat = self.adj_mat
mat_mask = (1.0 * (adj_mat > 0))
_embeddings = tf.Variable(tf.contrib.layers.xavier_initializer()([self.node_size, self.rep_size]), dtype=tf.float32, name='embeddings')
Adj = tf.placeholder(tf.float32, [self.node_size, self.node_size], name='adj_mat')
AdjMask = tf.placeholder(tf.float32, [self.node_size, self.node_size], name='adj_mask')
cost = (tf.reduce_sum(tf.square((Adj - (tf.matmul(_embeddings, tf.transpose(_embeddings)) * AdjMask)))) + (self.lamb * tf.reduce_sum(tf.square(_embeddings))))
optimizer = tf.train.AdamOptimizer(self.lr)
train_op = optimizer.minimize(cost)
init = tf.global_variables_initializer()
self.sess.run(init)
print(('total iter: %i' % self.max_iter))
for step in range(self.max_iter):
self.sess.run(train_op, feed_dict={Adj: adj_mat, AdjMask: mat_mask})
if ((step % 50) == 0):
print(('step %i: cost: %g' % (step, self.sess.run(cost, feed_dict={Adj: adj_mat, AdjMask: mat_mask}))))
return self.sess.run(_embeddings)
def save_embeddings(self, filename):
fout = open(filename, 'w')
node_num = len(self.vectors)
fout.write('{} {}\n'.format(node_num, self.rep_size))
for (node, vec) in self.vectors.items():
fout.write('{} {}\n'.format(node, ' '.join([str(x) for x in vec])))
fout.close() |
class ReactpyAsyncWebsocketConsumer(AsyncJsonWebsocketConsumer):
async def connect(self) -> None:
from reactpy_django import models
from reactpy_django.config import REACTPY_AUTH_BACKEND, REACTPY_BACKHAUL_THREAD
(await super().connect())
user = self.scope.get('user')
if (user and user.is_authenticated):
try:
(await login(self.scope, user, backend=REACTPY_AUTH_BACKEND))
except Exception:
(await asyncio.to_thread(_logger.exception, 'ReactPy websocket authentication has failed!'))
elif (user is None):
(await asyncio.to_thread(_logger.debug, 'ReactPy websocket is missing AuthMiddlewareStack! Users will not be accessible within `use_scope` or `use_websocket`!'))
if self.scope.get('session'):
try:
(await database_sync_to_async(self.scope['session'].save)())
except Exception:
(await asyncio.to_thread(_logger.exception, "ReactPy has failed to save scope['session']!"))
else:
(await asyncio.to_thread(_logger.debug, 'ReactPy websocket is missing SessionMiddlewareStack! Sessions will not be accessible within `use_scope` or `use_websocket`!'))
self.dispatcher: (Future | asyncio.Task)
self.threaded = REACTPY_BACKHAUL_THREAD
self.component_session: (models.ComponentSession | None) = None
if self.threaded:
if (not backhaul_thread.is_alive()):
(await asyncio.to_thread(_logger.debug, 'Starting ReactPy backhaul thread.'))
backhaul_thread.start()
self.dispatcher = asyncio.run_coroutine_threadsafe(self.run_dispatcher(), backhaul_loop)
else:
self.dispatcher = asyncio.create_task(self.run_dispatcher())
async def disconnect(self, code: int) -> None:
self.dispatcher.cancel()
if self.component_session:
try:
(await database_sync_to_async(delete_expired_sessions)())
except Exception:
(await asyncio.to_thread(_logger.exception, 'ReactPy has failed to delete expired component sessions!'))
try:
(await self.component_session.asave())
except Exception:
(await asyncio.to_thread(_logger.exception, 'ReactPy has failed to save component session!'))
(await super().disconnect(code))
async def receive_json(self, content: Any, **_) -> None:
if self.threaded:
asyncio.run_coroutine_threadsafe(self.recv_queue.put(content), backhaul_loop)
else:
(await self.recv_queue.put(content))
async def decode_json(cls, text_data):
return orjson.loads(text_data)
async def encode_json(cls, content):
return orjson.dumps(content).decode()
async def run_dispatcher(self):
from reactpy_django import models
from reactpy_django.config import REACTPY_REGISTERED_COMPONENTS, REACTPY_SESSION_MAX_AGE
scope = self.scope
dotted_path = scope['url_route']['kwargs']['dotted_path']
uuid = scope['url_route']['kwargs'].get('uuid')
search = scope['query_string'].decode()
self.recv_queue: asyncio.Queue = asyncio.Queue()
connection = Connection(scope=scope, location=Location(pathname=scope['path'], search=(f'?{search}' if (search and (search != 'undefined')) else '')), carrier=ComponentWebsocket(self.close, self.disconnect, dotted_path))
now = timezone.now()
component_session_args: Sequence[Any] = ()
component_session_kwargs: MutableMapping[(str, Any)] = {}
try:
component_constructor = REACTPY_REGISTERED_COMPONENTS[dotted_path]
except KeyError:
(await asyncio.to_thread(_logger.warning, f'Attempt to access invalid ReactPy component: {dotted_path!r}'))
return
try:
if uuid:
self.component_session = (await models.ComponentSession.objects.aget(uuid=uuid, last_accessed__gt=(now - timedelta(seconds=REACTPY_SESSION_MAX_AGE))))
params: ComponentParams = pickle.loads(self.component_session.params)
component_session_args = params.args
component_session_kwargs = params.kwargs
component_instance = component_constructor(*component_session_args, **component_session_kwargs)
except models.ComponentSession.DoesNotExist:
(await asyncio.to_thread(_logger.warning, f"Component session for '{dotted_path}:{uuid}' not found. The session may have already expired beyond REACTPY_SESSION_MAX_AGE. If you are using a custom host, you may have forgotten to provide args/kwargs."))
return
except Exception:
(await asyncio.to_thread(_logger.exception, f"Failed to construct component {component_constructor} with args='{component_session_args}' kwargs='{component_session_kwargs}'!"))
return
with contextlib.suppress(Exception):
(await serve_layout(Layout(ConnectionContext(component_instance, value=connection)), self.send_json, self.recv_queue.get)) |
class DTFC(nn.Module):
def __init__(self, in_channels, out_channels, num_layers, gr, kt, kf, activation):
super(DTFC, self).__init__()
assert (num_layers > 2)
self.first_conv = nn.Sequential(nn.Conv2d(in_channels=in_channels, out_channels=gr, kernel_size=(kf, kt), stride=1, padding=((kt // 2), (kf // 2))), nn.BatchNorm2d(gr), activation())
c = gr
d = 1
self.H = nn.ModuleList()
for i in range((num_layers - 2)):
self.H.append(nn.Sequential(nn.Conv2d(in_channels=c, out_channels=gr, kernel_size=(kf, kt), stride=1, padding=(((kt // 2) * d), ((kf // 2) * d)), dilation=d), nn.BatchNorm2d(gr), activation()))
c += gr
d += 2
self.last_conv = nn.Sequential(nn.Conv2d(in_channels=c, out_channels=out_channels, kernel_size=(kf, kt), stride=1, padding=((kt // 2), (kf // 2))), nn.BatchNorm2d(out_channels), activation())
self.activation = self.H[(- 1)][(- 1)]
def forward(self, x):
x = self.first_conv(x)
for h in self.H:
x_ = h(x)
x = torch.cat((x_, x), 1)
return self.last_conv(x) |
class TestNot_():
DEFAULT_EXC_TYPES = (ValueError, TypeError)
def test_not_all(self):
assert (not_.__name__ in validator_module.__all__)
def test_repr(self):
wrapped = in_([3, 4, 5])
v = not_(wrapped)
assert (f'<not_ validator wrapping {wrapped!r}, capturing {v.exc_types!r}>' == repr(v))
def test_success_because_fails(self):
def always_fails(inst, attr, value):
raise ValueError('always fails')
v = not_(always_fails)
a = simple_attr('test')
input_value = 3
v(1, a, input_value)
def test_fails_because_success(self):
def always_passes(inst, attr, value):
pass
v = not_(always_passes)
a = simple_attr('test')
input_value = 3
with pytest.raises(ValueError) as e:
v(1, a, input_value)
assert (("not_ validator child '{!r}' did not raise a captured error".format(always_passes), a, always_passes, input_value, self.DEFAULT_EXC_TYPES) == e.value.args)
def test_composable_with_in_pass(self):
v = not_(in_('abc'))
a = simple_attr('test')
input_value = 'd'
v(None, a, input_value)
def test_composable_with_in_fail(self):
wrapped = in_('abc')
v = not_(wrapped)
a = simple_attr('test')
input_value = 'b'
with pytest.raises(ValueError) as e:
v(None, a, input_value)
assert (("not_ validator child '{!r}' did not raise a captured error".format(in_('abc')), a, wrapped, input_value, self.DEFAULT_EXC_TYPES) == e.value.args)
def test_composable_with_matches_re_pass(self):
v = not_(matches_re('[a-z]{3}'))
a = simple_attr('test')
input_value = 'spam'
v(None, a, input_value)
def test_composable_with_matches_re_fail(self):
wrapped = matches_re('[a-z]{3}')
v = not_(wrapped)
a = simple_attr('test')
input_value = 'egg'
with pytest.raises(ValueError) as e:
v(None, a, input_value)
assert ((f"not_ validator child '{wrapped!r}' did not raise a captured error", a, wrapped, input_value, self.DEFAULT_EXC_TYPES) == e.value.args)
def test_composable_with_instance_of_pass(self):
v = not_(instance_of((int, float)))
a = simple_attr('test')
v(None, a, 'spam')
def test_composable_with_instance_of_fail(self):
wrapped = instance_of((int, float))
v = not_(wrapped)
a = simple_attr('test')
input_value = 2.
with pytest.raises(ValueError) as e:
v(None, a, input_value)
assert (("not_ validator child '{!r}' did not raise a captured error".format(instance_of((int, float))), a, wrapped, input_value, self.DEFAULT_EXC_TYPES) == e.value.args)
def test_custom_capture_match(self):
v = not_(in_('abc'), exc_types=ValueError)
a = simple_attr('test')
v(None, a, 'd')
def test_custom_capture_miss(self):
class MyError(Exception):
wrapped = in_('abc')
v = not_(wrapped, exc_types=MyError)
a = simple_attr('test')
input_value = 'd'
with pytest.raises(ValueError) as e:
v(None, a, input_value)
with pytest.raises(Exception) as e_from_wrapped:
wrapped(None, a, input_value)
assert (e_from_wrapped.value.args == e.value.args)
def test_custom_msg(self):
custom_msg = 'custom message!'
wrapped = in_('abc')
v = not_(wrapped, msg=custom_msg)
a = simple_attr('test')
input_value = 'a'
with pytest.raises(ValueError) as e:
v(None, a, input_value)
assert ((custom_msg, a, wrapped, input_value, self.DEFAULT_EXC_TYPES) == e.value.args)
def test_bad_exception_args(self):
wrapped = in_('abc')
with pytest.raises(TypeError) as e:
not_(wrapped, exc_types=(str, int))
assert ("'exc_types' must be a subclass of <class 'Exception'> (got <class 'str'>)." == e.value.args[0]) |
def main():
(log_level, directory, output, ar, paths) = parse_arguments()
level = getattr(logging, log_level)
logging.basicConfig(format='%(levelname)s: %(message)s', level=level)
line_matcher = re.compile(_LINE_PATTERN)
compile_commands = []
for path in paths:
if os.path.isdir(path):
cmdfiles = cmdfiles_in_dir(path)
elif path.endswith('.o'):
cmdfiles = cmdfiles_for_o(path)
elif path.endswith('.a'):
cmdfiles = cmdfiles_for_a(path, ar)
elif path.endswith('modules.order'):
cmdfiles = cmdfiles_for_modorder(path)
else:
sys.exit('{}: unknown file type'.format(path))
for cmdfile in cmdfiles:
with open(cmdfile, 'rt') as f:
result = line_matcher.match(f.readline())
if result:
try:
entry = process_line(directory, result.group(1), result.group(2))
compile_commands.append(entry)
except ValueError as err:
logging.info('Could not add line from %s: %s', cmdfile, err)
with open(output, 'wt') as f:
json.dump(compile_commands, f, indent=2, sort_keys=True) |
def update_camera_cfgs_from_dict(camera_cfgs: Dict[(str, CameraConfig)], cfg_dict: Dict[(str, dict)]):
if cfg_dict.pop('use_stereo_depth', False):
from .depth_camera import StereoDepthCameraConfig
for (name, cfg) in camera_cfgs.items():
camera_cfgs[name] = StereoDepthCameraConfig.fromCameraConfig(cfg)
for (k, v) in cfg_dict.items():
if (k in camera_cfgs):
continue
for cfg in camera_cfgs.values():
if (k == 'add_segmentation'):
cfg.texture_names += ('Segmentation',)
elif (not hasattr(cfg, k)):
raise AttributeError(f'{k} is not a valid attribute of CameraConfig')
else:
setattr(cfg, k, v)
for (name, v) in cfg_dict.items():
if (name not in camera_cfgs):
continue
if v.pop('use_stereo_depth', False):
from .depth_camera import StereoDepthCameraConfig
cfg = camera_cfgs[name]
camera_cfgs[name] = StereoDepthCameraConfig.fromCameraConfig(cfg)
cfg = camera_cfgs[name]
for kk in v:
assert hasattr(cfg, kk), f'{kk} is not a valid attribute of CameraConfig'
cfg.__dict__.update(v) |
def test_observation__flags():
obs_json = deepcopy(j_observation_v2)
obs_json['flags'] = [j_flag_1]
obs = Observation.from_json(obs_json)
flag = obs.flags[0]
assert isinstance(flag, Flag)
assert (flag.id == 123456)
assert (flag.resolved is False)
assert (flag.user.login == 'some_user')
assert (str(flag) == 'Flag(id=123456, flag=spam, resolved=False, username=some_user)') |
def _create_dense_predictor(args: SharedArgs, input_shape: InputShape, label_maps: Dict[(Task, LabelMap)], chunk_prediction_border: float) -> DensePredictor:
predictor_heads = _dense_predictor_heads(args, label_maps)
model = _create_keras_model(args, input_shape, predictor_heads)
return _dense_predictor(model, predictor_heads, args, chunk_prediction_border) |
class DocCog(commands.Cog):
def __init__(self, bot: Bot):
self.base_urls = {}
self.bot = bot
self.doc_symbols: dict[(str, DocItem)] = {}
self.item_fetcher = _batch_parser.BatchParser()
self.renamed_symbols = defaultdict(list)
self.inventory_scheduler = Scheduler(self.__class__.__name__)
self.refresh_event = asyncio.Event()
self.refresh_event.set()
self.symbol_get_event = SharedEvent()
async def cog_load(self) -> None:
(await self.bot.wait_until_guild_available())
(await self.refresh_inventories())
def update_single(self, package_name: str, base_url: str, inventory: InventoryDict) -> None:
self.base_urls[package_name] = base_url
for (group, items) in inventory.items():
for (symbol_name, relative_doc_url) in items:
group_name = group.split(':')[1]
symbol_name = self.ensure_unique_symbol_name(package_name, group_name, symbol_name)
(relative_url_path, _, symbol_id) = relative_doc_url.partition('#')
doc_item = DocItem(package_name, sys.intern(group_name), base_url, sys.intern(relative_url_path), symbol_id)
self.doc_symbols[symbol_name] = doc_item
self.item_fetcher.add_item(doc_item)
log.trace(f'Fetched inventory for {package_name}.')
async def update_or_reschedule_inventory(self, api_package_name: str, base_url: str, inventory_url: str) -> None:
try:
package = (await fetch_inventory(inventory_url))
except InvalidHeaderError as e:
log.warning(f'Invalid inventory header at {inventory_url}. Reason: {e}')
return
if (not package):
if (api_package_name in self.inventory_scheduler):
self.inventory_scheduler.cancel(api_package_name)
delay = FETCH_RESCHEDULE_DELAY.repeated
else:
delay = FETCH_RESCHEDULE_DELAY.first
log.info(f'Failed to fetch inventory; attempting again in {delay} minutes.')
self.inventory_scheduler.schedule_later((delay * 60), api_package_name, self.update_or_reschedule_inventory(api_package_name, base_url, inventory_url))
else:
if (not base_url):
base_url = self.base_url_from_inventory_url(inventory_url)
self.update_single(api_package_name, base_url, package)
def ensure_unique_symbol_name(self, package_name: str, group_name: str, symbol_name: str) -> str:
if ((item := self.doc_symbols.get(symbol_name)) is None):
return symbol_name
def rename(prefix: str, *, rename_extant: bool=False) -> str:
new_name = f'{prefix}.{symbol_name}'
if (new_name in self.doc_symbols):
if rename_extant:
new_name = f'{item.package}.{item.group}.{symbol_name}'
else:
new_name = f'{package_name}.{group_name}.{symbol_name}'
self.renamed_symbols[symbol_name].append(new_name)
if rename_extant:
self.doc_symbols[new_name] = self.doc_symbols[symbol_name]
return symbol_name
return new_name
if (package_name != item.package):
if (package_name in PRIORITY_PACKAGES):
return rename(item.package, rename_extant=True)
return rename(package_name)
if (group_name in FORCE_PREFIX_GROUPS):
if (item.group in FORCE_PREFIX_GROUPS):
needs_moving = (FORCE_PREFIX_GROUPS.index(group_name) < FORCE_PREFIX_GROUPS.index(item.group))
else:
needs_moving = False
return rename((item.group if needs_moving else group_name), rename_extant=needs_moving)
return rename(item.group, rename_extant=True)
async def refresh_inventories(self) -> None:
self.refresh_event.clear()
(await self.symbol_get_event.wait())
log.debug('Refreshing documentation inventory...')
self.inventory_scheduler.cancel_all()
self.base_urls.clear()
self.doc_symbols.clear()
self.renamed_symbols.clear()
(await self.item_fetcher.clear())
coros = [self.update_or_reschedule_inventory(package['package'], package['base_url'], package['inventory_url']) for package in (await self.bot.api_client.get('bot/documentation-links'))]
(await asyncio.gather(*coros))
log.debug('Finished inventory refresh.')
self.refresh_event.set()
def get_symbol_item(self, symbol_name: str) -> tuple[(str, (DocItem | None))]:
doc_item = self.doc_symbols.get(symbol_name)
if ((doc_item is None) and (' ' in symbol_name)):
symbol_name = symbol_name.split(maxsplit=1)[0]
doc_item = self.doc_symbols.get(symbol_name)
return (symbol_name, doc_item)
async def get_symbol_markdown(self, doc_item: DocItem) -> str:
markdown = (await doc_cache.get(doc_item))
if (markdown is None):
log.debug(f'Redis cache miss with {doc_item}.')
try:
markdown = (await self.item_fetcher.get_markdown(doc_item))
except aio as e:
log.warning(f'A network error has occurred when requesting parsing of {doc_item}.', exc_info=e)
return 'Unable to parse the requested symbol due to a network error.'
except Exception:
log.exception(f'An unexpected error has occurred when requesting parsing of {doc_item}.')
return 'Unable to parse the requested symbol due to an error.'
if (markdown is None):
return 'Unable to parse the requested symbol.'
return markdown
async def create_symbol_embed(self, symbol_name: str) -> (discord.Embed | None):
log.trace(f'Building embed for symbol `{symbol_name}`')
if (not self.refresh_event.is_set()):
log.debug('Waiting for inventories to be refreshed before processing item.')
(await self.refresh_event.wait())
with self.symbol_get_event:
(symbol_name, doc_item) = self.get_symbol_item(symbol_name)
if (doc_item is None):
log.debug('Symbol does not exist.')
return None
self.bot.stats.incr(f'doc_fetches.{doc_item.package}')
if (symbol_name in self.renamed_symbols):
renamed_symbols = ', '.join(self.renamed_symbols[symbol_name])
footer_text = textwrap.shorten(('Similar names: ' + renamed_symbols), 200, placeholder=' ...')
else:
footer_text = ''
embed = discord.Embed(title=discord.utils.escape_markdown(symbol_name), url=f'{doc_item.url}#{doc_item.symbol_id}', description=(await self.get_symbol_markdown(doc_item)))
embed.set_footer(text=footer_text)
return embed
(name='docs', aliases=('doc', 'd'), invoke_without_command=True)
async def docs_group(self, ctx: commands.Context, *, symbol_name: (str | None)) -> None:
(await self.get_command(ctx, symbol_name=symbol_name))
_group.command(name='getdoc', aliases=('g',))
async def get_command(self, ctx: commands.Context, *, symbol_name: (str | None)) -> None:
if (not symbol_name):
inventory_embed = discord.Embed(title=f'All inventories (`{len(self.base_urls)}` total)', colour=discord.Colour.blue())
lines = sorted((f'- [`{name}`]({url})' for (name, url) in self.base_urls.items()))
if self.base_urls:
(await LinePaginator.paginate(lines, ctx, inventory_embed, max_size=400, empty=False))
else:
inventory_embed.description = "Hmmm, seems like there's nothing here yet."
(await ctx.send(embed=inventory_embed))
else:
symbol = symbol_name.strip('`')
async with ctx.typing():
doc_embed = (await self.create_symbol_embed(symbol))
if (doc_embed is None):
error_message = (await send_denial(ctx, 'No documentation found for the requested symbol.'))
(await wait_for_deletion(error_message, (ctx.author.id,), timeout=NOT_FOUND_DELETE_DELAY))
if (not (ctx.message.mentions or ctx.message.role_mentions)):
with suppress(discord.NotFound):
(await ctx.message.delete())
(await error_message.delete())
else:
msg = (await ctx.send(embed=doc_embed))
(await wait_for_deletion(msg, (ctx.author.id,)))
def base_url_from_inventory_url(inventory_url: str) -> str:
return (inventory_url.removesuffix('/').rsplit('/', maxsplit=1)[0] + '/')
_group.command(name='setdoc', aliases=('s',))
_any_role(*MODERATION_ROLES)
(NAMESPACE, COMMAND_LOCK_SINGLETON, raise_error=True)
async def set_command(self, ctx: commands.Context, package_name: PackageName, inventory: Inventory, base_url: ValidURL='') -> None:
if (base_url and (not base_url.endswith('/'))):
raise commands.BadArgument('The base url must end with a slash.')
(inventory_url, inventory_dict) = inventory
body = {'package': package_name, 'base_url': base_url, 'inventory_url': inventory_url}
try:
(await self.bot.api_client.post('bot/documentation-links', json=body))
except ResponseCodeError as err:
if ((err.status == 400) and ('already exists' in err.response_json.get('package', [''])[0])):
log.info(f'Ignoring HTTP 400 as package {package_name} has already been added.')
(await ctx.send(f'Package {package_name} has already been added.'))
return
raise
log.info((f'''User {ctx.author} ({ctx.author.id}) added a new documentation package:
''' + '\n'.join((f'{key}: {value}' for (key, value) in body.items()))))
if (not base_url):
base_url = self.base_url_from_inventory_url(inventory_url)
self.update_single(package_name, base_url, inventory_dict)
(await ctx.send(f'Added the package `{package_name}` to the database and updated the inventories.'))
_group.command(name='deletedoc', aliases=('removedoc', 'rm', 'd'))
_any_role(*MODERATION_ROLES)
(NAMESPACE, COMMAND_LOCK_SINGLETON, raise_error=True)
async def delete_command(self, ctx: commands.Context, package_name: PackageName) -> None:
(await self.bot.api_client.delete(f'bot/documentation-links/{package_name}'))
async with ctx.typing():
(await self.refresh_inventories())
(await doc_cache.delete(package_name))
(await ctx.send(f'Successfully deleted `{package_name}` and refreshed the inventories.'))
_group.command(name='refreshdoc', aliases=('rfsh', 'r'))
_any_role(*MODERATION_ROLES)
(NAMESPACE, COMMAND_LOCK_SINGLETON, raise_error=True)
async def refresh_command(self, ctx: commands.Context) -> None:
old_inventories = set(self.base_urls)
async with ctx.typing():
(await self.refresh_inventories())
new_inventories = set(self.base_urls)
if (added := ', '.join((new_inventories - old_inventories))):
added = ('+ ' + added)
if (removed := ', '.join((old_inventories - new_inventories))):
removed = ('- ' + removed)
embed = discord.Embed(title='Inventories refreshed', description=(f'''```diff
{added}
{removed}```''' if (added or removed) else ''))
(await ctx.send(embed=embed))
_group.command(name='cleardoccache', aliases=('deletedoccache',))
_any_role(*MODERATION_ROLES)
async def clear_cache_command(self, ctx: commands.Context, package_name: (PackageName | Literal['*'])) -> None:
if (await doc_cache.delete(package_name)):
(await self.item_fetcher.stale_inventory_notifier.symbol_counter.delete(package_name))
(await ctx.send(f'Successfully cleared the cache for `{package_name}`.'))
else:
(await ctx.send('No keys matching the package found.'))
async def cog_unload(self) -> None:
self.inventory_scheduler.cancel_all()
(await self.item_fetcher.clear()) |
class ModelSpecTest(tf.test.TestCase):
def test_prune_noop(self):
model1 = model_spec.ModelSpec(np.array([[0, 1, 0], [0, 0, 1], [0, 0, 0]]), [0, 0, 0])
assert model1.valid_spec
assert np.array_equal(model1.original_matrix, model1.matrix)
assert (model1.original_ops == model1.original_ops)
model2 = model_spec.ModelSpec(np.array([[0, 1, 1], [0, 0, 1], [0, 0, 0]]), [0, 0, 0])
assert model2.valid_spec
assert np.array_equal(model2.original_matrix, model2.matrix)
assert (model2.original_ops == model2.ops)
model3 = model_spec.ModelSpec(np.array([[0, 1, 1, 0], [0, 0, 0, 1], [0, 0, 0, 1], [0, 0, 0, 0]]), [0, 0, 0, 0])
assert model3.valid_spec
assert np.array_equal(model3.original_matrix, model3.matrix)
assert (model3.original_ops == model3.ops)
def test_prune_islands(self):
model1 = model_spec.ModelSpec(np.array([[0, 1, 0, 0], [0, 0, 0, 1], [0, 0, 0, 0], [0, 0, 0, 0]]), [1, 2, 3, 4])
assert model1.valid_spec
assert np.array_equal(model1.matrix, np.array([[0, 1, 0], [0, 0, 1], [0, 0, 0]]))
assert (model1.ops == [1, 2, 4])
model2 = model_spec.ModelSpec(np.array([[0, 1, 0, 0, 0], [0, 0, 0, 0, 1], [0, 0, 0, 1, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]]), [1, 2, 3, 4, 5])
assert model2.valid_spec
assert np.array_equal(model2.matrix, np.array([[0, 1, 0], [0, 0, 1], [0, 0, 0]]))
assert (model2.ops == [1, 2, 5])
def test_prune_dangling(self):
model1 = model_spec.ModelSpec(np.array([[0, 1, 1, 0], [0, 0, 0, 0], [0, 0, 0, 1], [0, 0, 0, 0]]), [1, 2, 3, 4])
assert model1.valid_spec
assert np.array_equal(model1.matrix, np.array([[0, 1, 0], [0, 0, 1], [0, 0, 0]]))
assert (model1.ops == [1, 3, 4])
model2 = model_spec.ModelSpec(np.array([[0, 0, 1, 0], [0, 0, 0, 1], [0, 0, 0, 1], [0, 0, 0, 0]]), [1, 2, 3, 4])
assert model2.valid_spec
assert np.array_equal(model2.matrix, np.array([[0, 1, 0], [0, 0, 1], [0, 0, 0]]))
assert (model2.ops == [1, 3, 4])
def test_prune_disconnected(self):
model1 = model_spec.ModelSpec(np.array([[0, 0], [0, 0]]), [0, 0])
assert (not model1.valid_spec)
model2 = model_spec.ModelSpec(np.array([[0, 1, 0, 0], [0, 0, 0, 0], [0, 0, 0, 1], [0, 0, 0, 0]]), [1, 2, 3, 4])
assert (not model2.valid_spec)
model3 = model_spec.ModelSpec(np.array([[0, 0, 0, 0], [0, 0, 1, 0], [0, 0, 0, 0], [0, 0, 0, 0]]), [1, 2, 3, 4])
assert (not model3.valid_spec)
def test_is_upper_triangular(self):
m0 = np.array([[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]])
assert model_spec.is_upper_triangular(m0)
m1 = np.array([[0, 1, 1, 1], [0, 0, 1, 1], [0, 0, 0, 1], [0, 0, 0, 0]])
assert model_spec.is_upper_triangular(m1)
m2 = np.array([[0, 1, 1, 1], [0, 0, 1, 1], [1, 0, 0, 1], [0, 0, 0, 0]])
assert (not model_spec.is_upper_triangular(m2))
m3 = np.array([[0, 0, 0, 0], [0, 0, 0, 0], [1, 0, 0, 0], [0, 0, 0, 0]])
assert (not model_spec.is_upper_triangular(m3))
m4 = np.array([[1, 0, 0, 0], [1, 1, 0, 0], [1, 1, 1, 0], [1, 1, 1, 1]])
assert (not model_spec.is_upper_triangular(m4))
m5 = np.array([[0]])
assert model_spec.is_upper_triangular(m5)
m6 = np.array([[1]])
assert (not model_spec.is_upper_triangular(m6)) |
def to_bytes(something, encoding='utf8') -> bytes:
if isinstance(something, bytes):
return something
if isinstance(something, str):
return something.encode(encoding)
elif isinstance(something, bytearray):
return bytes(something)
else:
raise TypeError('Not a string or bytes like object') |
class InviteDismissViewTest(TestCase):
def setUpTestData(cls):
add_default_data()
def login(self, name, password=None):
self.client.login(username=name, password=(password if password else name))
self.pu = PytitionUser.objects.get(user__username=name)
return self.pu
def logout(self):
self.client.logout()
def test_InviteDismissViewOk(self):
julia = self.login('julia')
julia_perms = Permission.objects.get(organization__slugname='rap', user=julia)
julia_perms.can_add_members = True
julia_perms.save()
response = self.client.get((reverse('org_add_user', args=['rap']) + '?user=max'))
self.assertEqual(response.status_code, 200)
self.logout()
max = self.login('max')
response = self.client.get(reverse('invite_dismiss', kwargs={'orgslugname': 'rap'}), follow=True)
self.assertRedirects(response, reverse('user_dashboard'))
rap = Organization.objects.get(slugname='rap')
self.assertNotIn(max, rap.members.all())
self.assertNotIn(rap, max.organization_set.all())
self.assertNotIn(rap, max.invitations.all()) |
def gen_tutorials(repo_dir: str) -> None:
with open(os.path.join(repo_dir, 'website', 'tutorials.json'), 'r') as infile:
tutorial_config = json.loads(infile.read())
tutorial_ids = {x['id'] for v in tutorial_config.values() for x in v}
for tid in tutorial_ids:
print('Generating {} tutorial'.format(tid))
ipynb_in_path = os.path.join(repo_dir, 'tutorials', '{}.ipynb'.format(tid))
with open(ipynb_in_path, 'r') as infile:
nb_str = infile.read()
nb = nbformat.reads(nb_str, nbformat.NO_CONVERT)
nb['metadata']['kernelspec']['display_name'] = 'python3'
exporter = HTMLExporter()
(html, meta) = exporter.from_notebook_node(nb)
soup = BeautifulSoup(html, 'html.parser')
nb_meat = soup.find('body', {'class': 'jp-Notebook'})
nb_meat.attrs['class'] = ['notebook']
html_out = (JS_SCRIPTS + str(nb_meat))
html_out_path = os.path.join(repo_dir, 'website', '_tutorials', '{}.html'.format(tid))
with open(html_out_path, 'w') as html_outfile:
html_outfile.write(html_out)
script = TEMPLATE.format(tid)
js_out_path = os.path.join(repo_dir, 'website', 'pages', 'tutorials', '{}.js'.format(tid))
with open(js_out_path, 'w') as js_outfile:
js_outfile.write(script)
ipynb_out_path = os.path.join(repo_dir, 'website', 'static', 'files', '{}.ipynb'.format(tid))
with open(ipynb_out_path, 'w') as ipynb_outfile:
ipynb_outfile.write(nb_str)
exporter = ScriptExporter()
(script, meta) = exporter.from_notebook_node(nb)
py_out_path = os.path.join(repo_dir, 'website', 'static', 'files', '{}.py'.format(tid))
with open(py_out_path, 'w') as py_outfile:
py_outfile.write(script) |
_module()
def orthogonal_init(module, gain=1, bias=0):
if hasattr(module, 'weight'):
nn.init.orthogonal_(module.weight, gain)
elif hasattr(module, 'kernel'):
nn.init.orthogonal_(module.kernel, gain)
if (hasattr(module, 'bias') and (module.bias is not None)):
nn.init.constant_(module.bias, bias) |
def pause_splitter_tokens(tokens, split_by={':', ';', '--', '', ''}):
sents = []
sent = []
for tok in tokens:
sent += [tok]
if (tok in split_by):
if sent:
sents += [sent]
sent = []
if sent:
sents += [sent]
return sents |
def changeDirectory(path):
global currentDirectory
pathC = path.split('>')
if (pathC[0] == ''):
pathC.remove(pathC[0])
myPath = ((currentDirectory + '/') + '/'.join(pathC))
print(myPath)
try:
os.chdir(myPath)
ans = True
if (currentDirectory not in os.getcwd()):
ans = False
except:
ans = False
return ans |
class UCCSD(VariationalForm):
def __init__(self, num_orbitals: int, num_particles: Union[(Tuple[(int, int)], List[int], int)], reps: int=1, active_occupied: Optional[List[int]]=None, active_unoccupied: Optional[List[int]]=None, initial_state: Optional[Union[(QuantumCircuit, InitialState)]]=None, qubit_mapping: str='parity', two_qubit_reduction: bool=True, num_time_slices: int=1, shallow_circuit_concat: bool=True, z2_symmetries: Optional[Z2Symmetries]=None, method_singles: str='both', method_doubles: str='ucc', excitation_type: str='sd', same_spin_doubles: bool=True, skip_commute_test: bool=False) -> None:
validate_min('num_orbitals', num_orbitals, 1)
if (isinstance(num_particles, list) and (len(num_particles) != 2)):
raise ValueError('Num particles value {}. Number of values allowed is 2'.format(num_particles))
validate_min('reps', reps, 1)
validate_in_set('qubit_mapping', qubit_mapping, {'jordan_wigner', 'parity', 'bravyi_kitaev'})
validate_min('num_time_slices', num_time_slices, 1)
validate_in_set('method_singles', method_singles, {'both', 'alpha', 'beta'})
validate_in_set('method_doubles', method_doubles, {'ucc', 'pucc', 'succ', 'succ_full'})
validate_in_set('excitation_type', excitation_type, {'sd', 's', 'd'})
super().__init__()
self._z2_symmetries = (Z2Symmetries([], [], [], []) if (z2_symmetries is None) else z2_symmetries)
self._num_qubits = (num_orbitals if (not two_qubit_reduction) else (num_orbitals - 2))
self._num_qubits = (self._num_qubits if self._z2_symmetries.is_empty() else (self._num_qubits - len(self._z2_symmetries.sq_list)))
self._reps = reps
self._num_orbitals = num_orbitals
if isinstance(num_particles, (tuple, list)):
self._num_alpha = num_particles[0]
self._num_beta = num_particles[1]
else:
logger.info('We assume that the number of alphas and betas are the same.')
self._num_alpha = (num_particles // 2)
self._num_beta = (num_particles // 2)
self._num_particles = [self._num_alpha, self._num_beta]
if (sum(self._num_particles) > self._num_orbitals):
raise ValueError('# of particles must be less than or equal to # of orbitals.')
self._initial_state = initial_state
self._qubit_mapping = qubit_mapping
self._two_qubit_reduction = two_qubit_reduction
self._num_time_slices = num_time_slices
self._shallow_circuit_concat = shallow_circuit_concat
self._method_singles = method_singles
self._method_doubles = method_doubles
self._excitation_type = excitation_type
self.same_spin_doubles = same_spin_doubles
self._skip_commute_test = skip_commute_test
(self._single_excitations, self._double_excitations) = UCCSD.compute_excitation_lists([self._num_alpha, self._num_beta], self._num_orbitals, active_occupied, active_unoccupied, same_spin_doubles=self.same_spin_doubles, method_singles=self._method_singles, method_doubles=self._method_doubles, excitation_type=self._excitation_type)
(self._hopping_ops, self._num_parameters) = self._build_hopping_operators()
self._excitation_pool = None
self._bounds = [((- np.pi), np.pi) for _ in range(self._num_parameters)]
self._logging_construct_circuit = True
self._support_parameterized_circuit = True
self.uccd_singlet = False
if (self._method_doubles == 'succ_full'):
self.uccd_singlet = True
(self._single_excitations, self._double_excitations) = UCCSD.compute_excitation_lists([self._num_alpha, self._num_beta], self._num_orbitals, active_occupied, active_unoccupied, same_spin_doubles=self.same_spin_doubles, method_singles=self._method_singles, method_doubles=self._method_doubles, excitation_type=self._excitation_type)
if self.uccd_singlet:
(self._hopping_ops, _) = self._build_hopping_operators()
else:
(self._hopping_ops, self._num_parameters) = self._build_hopping_operators()
self._bounds = [((- np.pi), np.pi) for _ in range(self._num_parameters)]
if self.uccd_singlet:
self._double_excitations_grouped = UCCSD.compute_excitation_lists_singlet(self._double_excitations, num_orbitals)
self.num_groups = len(self._double_excitations_grouped)
logging.debug('Grouped double excitations for singlet ucc')
logging.debug(self._double_excitations_grouped)
self._num_parameters = self.num_groups
self._bounds = [((- np.pi), np.pi) for _ in range(self.num_groups)]
self.labeled_double_excitations = []
for (i, _) in enumerate(self._double_excitations):
self.labeled_double_excitations.append((self._double_excitations[i], i))
order_hopping_op = UCCSD.order_labels_for_hopping_ops(self._double_excitations, self._double_excitations_grouped)
logging.debug('New order for hopping ops')
logging.debug(order_hopping_op)
self._hopping_ops_doubles_temp = []
self._hopping_ops_doubles = self._hopping_ops[len(self._single_excitations):]
for i in order_hopping_op:
self._hopping_ops_doubles_temp.append(self._hopping_ops_doubles[i])
self._hopping_ops[len(self._single_excitations):] = self._hopping_ops_doubles_temp
self._logging_construct_circuit = True
def single_excitations(self):
return self._single_excitations
def double_excitations(self):
return self._double_excitations
def excitation_pool(self) -> List[WeightedPauliOperator]:
return self._excitation_pool
_pool.setter
def excitation_pool(self, excitation_pool: List[WeightedPauliOperator]) -> None:
self._excitation_pool = excitation_pool.copy()
def _build_hopping_operators(self):
if logger.isEnabledFor(logging.DEBUG):
TextProgressBar(sys.stderr)
results = parallel_map(UCCSD._build_hopping_operator, (self._single_excitations + self._double_excitations), task_args=(self._num_orbitals, self._num_particles, self._qubit_mapping, self._two_qubit_reduction, self._z2_symmetries, self._skip_commute_test), num_processes=aqua_globals.num_processes)
hopping_ops = []
s_e_list = []
d_e_list = []
for (op, index) in results:
if ((op is not None) and (not op.is_empty())):
hopping_ops.append(op)
if (len(index) == 2):
s_e_list.append(index)
else:
d_e_list.append(index)
self._single_excitations = s_e_list
self._double_excitations = d_e_list
num_parameters = (len(hopping_ops) * self._reps)
return (hopping_ops, num_parameters)
def _build_hopping_operator(index, num_orbitals, num_particles, qubit_mapping, two_qubit_reduction, z2_symmetries, skip_commute_test=False):
h_1 = np.zeros((num_orbitals, num_orbitals))
h_2 = np.zeros((num_orbitals, num_orbitals, num_orbitals, num_orbitals))
if (len(index) == 2):
(i, j) = index
h_1[(i, j)] = 1.0
h_1[(j, i)] = (- 1.0)
elif (len(index) == 4):
(i, j, k, m) = index
h_2[(i, j, k, m)] = 1.0
h_2[(m, k, j, i)] = (- 1.0)
dummpy_fer_op = FermionicOperator(h1=h_1, h2=h_2)
qubit_op = dummpy_fer_op.mapping(qubit_mapping)
if two_qubit_reduction:
qubit_op = Z2Symmetries.two_qubit_reduction(qubit_op, num_particles)
if (not z2_symmetries.is_empty()):
symm_commuting = True
for symmetry in z2_symmetries.symmetries:
symmetry_op = WeightedPauliOperator(paulis=[[1.0, symmetry]])
symm_commuting = qubit_op.commute_with(symmetry_op)
if (not symm_commuting):
break
if (not skip_commute_test):
qubit_op = (z2_symmetries.taper(qubit_op) if symm_commuting else None)
else:
qubit_op = z2_symmetries.taper(qubit_op)
if (qubit_op is None):
logger.debug('Excitation (%s) is skipped since it is not commuted with symmetries', ','.join([str(x) for x in index]))
return (qubit_op, index)
def manage_hopping_operators(self):
if (self._excitation_pool is None):
self._excitation_pool = self._hopping_ops.copy()
if (self._reps != 1):
logger.warning('The reps of the variational form was not 1 but %i which does not work in the adaptive VQE algorithm. Thus, it has been reset to 1.')
self._reps = 1
self._hopping_ops = []
self._num_parameters = 0
self._bounds = []
def push_hopping_operator(self, excitation):
self._hopping_ops.append(excitation)
self._num_parameters = (len(self._hopping_ops) * self._reps)
self._bounds = [((- np.pi), np.pi) for _ in range(self._num_parameters)]
def pop_hopping_operator(self):
self._hopping_ops.pop()
self._num_parameters = (len(self._hopping_ops) * self._reps)
self._bounds = [((- np.pi), np.pi) for _ in range(self._num_parameters)]
def construct_circuit(self, parameters, q=None):
if (len(parameters) != self._num_parameters):
raise ValueError('The number of parameters has to be {}'.format(self._num_parameters))
if (q is None):
q = QuantumRegister(self._num_qubits, name='q')
if isinstance(self._initial_state, QuantumCircuit):
circuit = QuantumCircuit(q)
circuit.compose(self._initial_state, inplace=True)
elif isinstance(self._initial_state, InitialState):
circuit = self._initial_state.construct_circuit('circuit', q)
else:
circuit = QuantumCircuit(q)
if (logger.isEnabledFor(logging.DEBUG) and self._logging_construct_circuit):
logger.debug('Evolving hopping operators:')
TextProgressBar(sys.stderr)
self._logging_construct_circuit = False
num_excitations = len(self._hopping_ops)
if (not self.uccd_singlet):
list_excitation_operators = [(self._hopping_ops[(index % num_excitations)], parameters[index]) for index in range((self._reps * num_excitations))]
else:
list_excitation_operators = []
counter = 0
for i in range(int((self._reps * self.num_groups))):
for _ in range(len(self._double_excitations_grouped[(i % self.num_groups)])):
list_excitation_operators.append((self._hopping_ops[counter], parameters[i]))
counter += 1
results = parallel_map(UCCSD._construct_circuit_for_one_excited_operator, list_excitation_operators, task_args=(q, self._num_time_slices), num_processes=aqua_globals.num_processes)
if self._shallow_circuit_concat:
for qc in results:
for (_, qbits, _) in qc._data:
for (i, _) in enumerate(qbits):
qbits[i] = circuit.qubits[qbits[i].index]
for qc in results:
circuit._data += qc._data
else:
for qc in results:
circuit += qc
return circuit
def _construct_circuit_for_one_excited_operator(qubit_op_and_param, qr, num_time_slices):
(qubit_op, param) = qubit_op_and_param
qubit_op = (qubit_op * (- 1j))
qc = qubit_op.evolve(state_in=None, evo_time=param, num_time_slices=num_time_slices, quantum_registers=qr)
return qc
def preferred_init_points(self):
if (self._initial_state is None):
return None
else:
return np.zeros(self._num_parameters, dtype=float)
def compute_excitation_lists(num_particles, num_orbitals, active_occ_list=None, active_unocc_list=None, same_spin_doubles=True, method_singles='both', method_doubles='ucc', excitation_type='sd'):
if isinstance(num_particles, (tuple, list)):
num_alpha = num_particles[0]
num_beta = num_particles[1]
else:
logger.info('We assume that the number of alphas and betas are the same.')
num_alpha = (num_particles // 2)
num_beta = (num_particles // 2)
num_particles = (num_alpha + num_beta)
if (num_particles < 2):
raise ValueError('Invalid number of particles {}'.format(num_particles))
if ((num_orbitals < 4) or ((num_orbitals % 2) != 0)):
raise ValueError('Invalid number of orbitals {}'.format(num_orbitals))
if (num_orbitals <= num_particles):
raise ValueError('No unoccupied orbitals')
active_occ_list_alpha = []
active_occ_list_beta = []
active_unocc_list_alpha = []
active_unocc_list_beta = []
beta_idx = (num_orbitals // 2)
if (active_occ_list is not None):
active_occ_list = [(i if (i >= 0) else (i + max(num_alpha, num_beta))) for i in active_occ_list]
for i in active_occ_list:
if (i < num_alpha):
active_occ_list_alpha.append(i)
else:
raise ValueError('Invalid index {} in active active_occ_list {}'.format(i, active_occ_list))
if (i < num_beta):
active_occ_list_beta.append((i + beta_idx))
else:
raise ValueError('Invalid index {} in active active_occ_list {}'.format(i, active_occ_list))
else:
active_occ_list_alpha = list(range(0, num_alpha))
active_occ_list_beta = [(i + beta_idx) for i in range(0, num_beta)]
if (active_unocc_list is not None):
active_unocc_list = [((i + min(num_alpha, num_beta)) if (i >= 0) else (i + (num_orbitals // 2))) for i in active_unocc_list]
for i in active_unocc_list:
if (i >= num_alpha):
active_unocc_list_alpha.append(i)
else:
raise ValueError('Invalid index {} in active active_unocc_list {}'.format(i, active_unocc_list))
if (i >= num_beta):
active_unocc_list_beta.append((i + beta_idx))
else:
raise ValueError('Invalid index {} in active active_unocc_list {}'.format(i, active_unocc_list))
else:
active_unocc_list_alpha = list(range(num_alpha, (num_orbitals // 2)))
active_unocc_list_beta = [(i + beta_idx) for i in range(num_beta, (num_orbitals // 2))]
logger.debug('active_occ_list_alpha %s', active_occ_list_alpha)
logger.debug('active_unocc_list_alpha %s', active_unocc_list_alpha)
logger.debug('active_occ_list_beta %s', active_occ_list_beta)
logger.debug('active_unocc_list_beta %s', active_unocc_list_beta)
single_excitations = []
double_excitations = []
if (method_singles == 'alpha '):
for occ_alpha in active_occ_list_alpha:
for unocc_alpha in active_unocc_list_alpha:
single_excitations.append([occ_alpha, unocc_alpha])
elif (method_singles == 'beta'):
for occ_beta in active_occ_list_beta:
for unocc_beta in active_unocc_list_beta:
single_excitations.append([occ_beta, unocc_beta])
else:
for occ_alpha in active_occ_list_alpha:
for unocc_alpha in active_unocc_list_alpha:
single_excitations.append([occ_alpha, unocc_alpha])
for occ_beta in active_occ_list_beta:
for unocc_beta in active_unocc_list_beta:
single_excitations.append([occ_beta, unocc_beta])
logger.info('Singles excitations with alphas and betas orbitals are used.')
if (method_doubles in ['ucc', 'succ_full']):
for occ_alpha in active_occ_list_alpha:
for unocc_alpha in active_unocc_list_alpha:
for occ_beta in active_occ_list_beta:
for unocc_beta in active_unocc_list_beta:
double_excitations.append([occ_alpha, unocc_alpha, occ_beta, unocc_beta])
elif (method_doubles == 'pucc'):
for occ_alpha in active_occ_list_alpha:
for unocc_alpha in active_unocc_list_alpha:
for occ_beta in active_occ_list_beta:
for unocc_beta in active_unocc_list_beta:
if (((occ_beta - occ_alpha) == (num_orbitals / 2)) and ((unocc_beta - unocc_alpha) == (num_orbitals / 2))):
double_excitations.append([occ_alpha, unocc_alpha, occ_beta, unocc_beta])
elif (method_doubles == 'succ'):
for i in active_occ_list_alpha:
for i_prime in active_unocc_list_alpha:
for j in active_occ_list_beta:
for j_prime in active_unocc_list_beta:
if (((j - beta_idx) >= i) and ((j_prime - beta_idx) >= i_prime)):
double_excitations.append([i, i_prime, j, j_prime])
same_spin_doubles = False
logger.info('Same spin double excitations are forced to be disabled insinglet ucc')
if (same_spin_doubles and (len(active_occ_list_alpha) > 1) and (len(active_unocc_list_alpha) > 1)):
for (i, occ_alpha) in enumerate(active_occ_list_alpha[:(- 1)]):
for (j, unocc_alpha) in enumerate(active_unocc_list_alpha[:(- 1)]):
for occ_alpha_1 in active_occ_list_alpha[(i + 1):]:
for unocc_alpha_1 in active_unocc_list_alpha[(j + 1):]:
double_excitations.append([occ_alpha, unocc_alpha, occ_alpha_1, unocc_alpha_1])
up_active_occ_list = active_occ_list_beta
up_active_unocc_list = active_unocc_list_beta
for (i, occ_beta) in enumerate(up_active_occ_list[:(- 1)]):
for (j, unocc_beta) in enumerate(up_active_unocc_list[:(- 1)]):
for occ_beta_1 in up_active_occ_list[(i + 1):]:
for unocc_beta_1 in up_active_unocc_list[(j + 1):]:
double_excitations.append([occ_beta, unocc_beta, occ_beta_1, unocc_beta_1])
if (excitation_type == 's'):
double_excitations = []
elif (excitation_type == 'd'):
single_excitations = []
else:
logger.info('Singles and Doubles excitations are used.')
logger.debug('single_excitations (%s) %s', len(single_excitations), single_excitations)
logger.debug('double_excitations (%s) %s', len(double_excitations), double_excitations)
return (single_excitations, double_excitations)
def compute_excitation_lists_singlet(double_exc, num_orbitals):
de_groups = UCCSD.group_excitations_if_same_ao(double_exc, num_orbitals)
return de_groups
def same_ao_double_excitation_block_spin(de_1, de_2, num_orbitals):
half_active_space = int((num_orbitals / 2))
de_1_new = copy.copy(de_1)
de_2_new = copy.copy(de_2)
count = (- 1)
for ind in de_1_new:
count += 1
if (ind >= half_active_space):
de_1_new[count] = (ind % half_active_space)
count = (- 1)
for ind in de_2_new:
count += 1
if (ind >= half_active_space):
de_2_new[count] = (ind % half_active_space)
if (collections.Counter(de_1_new) == collections.Counter(de_2_new)):
return 1
else:
return 0
def group_excitations(list_de, num_orbitals):
list_de_temp = copy.copy(list_de)
list_same_ao_group = []
de1 = list_de[0]
counter = 0
for de2 in list_de:
if (UCCSD.same_ao_double_excitation_block_spin(de1, de2, num_orbitals) == 1):
counter += 1
if (counter == 1):
list_same_ao_group.append(de1)
for i in list_de_temp:
if (i == de1):
list_de_temp.remove(de1)
if (de1 != de2):
list_same_ao_group.append(de2)
for i in list_de_temp:
if (i == de2):
list_de_temp.remove(de2)
return (list_same_ao_group, list_de_temp)
def group_excitations_if_same_ao(list_de, num_orbitals):
list_groups = []
(list_same_ao_group, list_de_temp) = UCCSD.group_excitations(list_de, num_orbitals)
list_groups.append(list_same_ao_group)
while (len(list_de_temp) != 0):
(list_same_ao_group, list_de_temp) = UCCSD.group_excitations(list_de_temp, num_orbitals)
list_groups.append(list_same_ao_group)
return list_groups
def order_labels_for_hopping_ops(double_exc, gde):
labeled_de = []
for (i, _) in enumerate(double_exc):
labeled_de.append((double_exc[i], i))
ordered_labels = []
for group in gde:
for exc in group:
for l_e in labeled_de:
if (exc == l_e[0]):
ordered_labels.append(l_e[1])
return ordered_labels |
class GetFieldsTests(OptionsBaseTests):
def test_get_fields_is_immutable(self):
msg = (IMMUTABLE_WARNING % 'get_fields()')
for _ in range(2):
fields = CassandraThing._meta.get_fields()
with self.assertRaisesMessage(AttributeError, msg):
fields += ['errors'] |
class EpisodicBatchSampler(object):
def __init__(self, n_classes, n_way, n_episodes):
self.n_classes = n_classes
self.n_way = n_way
self.n_episodes = n_episodes
def __len__(self):
return self.n_episodes
def __iter__(self):
for i in range(self.n_episodes):
(yield torch.randperm(self.n_classes)[:self.n_way]) |
def focal_loss_legacy(logits, targets, alpha: float, gamma: float, normalizer):
positive_label_mask = (targets == 1.0)
cross_entropy = F.binary_cross_entropy_with_logits(logits, targets.to(logits.dtype), reduction='none')
neg_logits = ((- 1.0) * logits)
modulator = torch.exp((((gamma * targets) * neg_logits) - (gamma * torch.log1p(torch.exp(neg_logits)))))
loss = (modulator * cross_entropy)
weighted_loss = torch.where(positive_label_mask, (alpha * loss), ((1.0 - alpha) * loss))
return (weighted_loss / normalizer) |
def check_chain_id(chain_id: ChainID, web3: Web3) -> None:
while True:
try:
current_id = web3.eth.chain_id
except requests.exceptions.ConnectionError:
raise RuntimeError('Could not reach ethereum RPC. Please check that your ethereum node is running and accessible.')
if (chain_id != current_id):
raise RuntimeError(f'Raiden was running on network with id {chain_id} and it detected that the underlying ethereum client network id changed to {current_id}. Changing the underlying blockchain while the Raiden node is running is not supported.')
gevent.sleep(CHECK_CHAIN_ID_INTERVAL) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.