code stringlengths 281 23.7M |
|---|
def get_and_load_typeclasses(parent=None, excluded_parents=None):
import evennia
evennia._init()
tmap = get_all_typeclasses(parent=parent)
excluded_parents = (excluded_parents or [])
tpaths = [path for (path, tclass) in tmap.items() if (not any((inherits_from(tclass, excl) for excl in excluded_parents)))]
tpaths = sorted(tpaths, key=(lambda k: ((1 if k.startswith('evennia.') else 0), k)))
tpaths = [path for path in tpaths if (path not in ('evennia.objects.models.ObjectDB', 'evennia.accounts.models.AccountDB', 'evennia.scripts.models.ScriptDB', 'evennia.comms.models.ChannelDB'))]
return [(path, path) for path in tpaths if path] |
class ReturningHandler(THBEventHandler):
interested = ['action_before']
def handle(self, evt_type, act):
if ((evt_type == 'action_before') and isinstance(act, PrepareStage)):
tgt = act.target
if (not tgt.has_skill(Returning)):
return act
g = self.game
ncards = (len(tgt.cards) + len(tgt.showncards))
if ((tgt.life <= 2) and (tgt.life < ncards)):
g.process_action(ReturningAwake(tgt, tgt))
return act |
class _01_LinuxExpandMacros(BaseClasses.AfterPass):
regex_base = re.compile('([A-Za-z0-9,_-]+)\\.o|\\$\\(([A-Za-z0-9,_-]+)\\)')
def __init__(self, model, arch):
super(_01_LinuxExpandMacros, self).__init__(model, arch)
def expand_macro(self, name, path, condition, already_expanded, parser, maxdepth=3):
if (maxdepth == 0):
return
if (name in already_expanded):
return
else:
already_expanded.add(name)
basepath = os.path.dirname(name)
filename = os.path.basename(name)
basename = ''
match = self.regex_base.match(filename)
if (not match):
return
if (match.group(1) is None):
basename = match.group(2)
if basename.endswith('y'):
basename = basename[:(- 1)]
elif (match.group(2) is None):
basename = match.group(1)
scan_regex_string = ''
if (match.group(1) is None):
scan_regex_string = (((('\\s*' + basename) + '(|y|\\$\\(') + CONFIG_FORMAT) + '\\))\\s*(:=|\\+=|=)\\s*(.*)')
else:
scan_regex_string = (((('\\s*' + basename) + '(|-y|-objs|-\\$\\(') + CONFIG_FORMAT) + '\\))\\s*(:=|\\+=|=)\\s*(.*)')
scan_regex = re.compile(scan_regex_string)
if (not (path in parser.file_content_cache)):
parser.read_whole_file(path)
inputs = parser.file_content_cache[path]
for line in inputs:
if line.invalid:
continue
ifdef_condition = line.condition
line = line.processed_line
match = scan_regex.match(line)
if (not match):
continue
config_in_composite = match.group(2)
condition_comp = ''
if config_in_composite:
condition_comp = Helper.get_config_string(config_in_composite, self.model)
rhs = match.group(4)
matches = [x for x in re.split('\t| ', rhs) if x]
for item in matches:
fullpath = ((basepath + '/') + item)
passdown_condition = condition[:]
if config_in_composite:
passdown_condition.append(condition_comp)
if os.path.isdir(fullpath):
parser.local_vars['dir_cond_collection'][fullpath].add_alternative(passdown_condition[:])
else:
sourcefile = Helper.guess_source_for_target(fullpath)
if (not sourcefile):
self.expand_macro(fullpath, path, passdown_condition, already_expanded, parser, (maxdepth - 1))
else:
full_condition = DataStructures.Precondition()
if (len(condition) > 0):
full_condition = condition[:]
if config_in_composite:
full_condition.append(condition_comp)
if (len(ifdef_condition) > 0):
full_condition.extend(ifdef_condition)
parser.local_vars['file_features'][sourcefile].add_alternative(full_condition[:])
already_expanded.discard(name)
def process(self, parser, path, condition_for_current_dir):
for obj in parser.local_vars.get_variable('composite_map'):
downward_condition = Helper.build_precondition(parser.local_vars['composite_map'][obj])
already_expanded = set()
self.expand_macro(obj, path, downward_condition, already_expanded, parser) |
class OptionSeriesColumnSonificationDefaultspeechoptionsMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('undefined')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('undefined')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('undefined')
def min(self, text: str):
self._config(text, js_type=False)
def within(self):
return self._config_get('undefined')
def within(self, text: str):
self._config(text, js_type=False) |
class RangeSlider(ConstrainedControl):
def __init__(self, start_value: [float], end_value: [float], ref: Optional[Ref]=None, key: Optional[str]=None, width: OptionalNumber=None, height: OptionalNumber=None, left: OptionalNumber=None, top: OptionalNumber=None, right: OptionalNumber=None, bottom: OptionalNumber=None, expand: Union[(None, bool, int)]=None, col: Optional[ResponsiveNumber]=None, opacity: OptionalNumber=None, rotate: RotateValue=None, scale: ScaleValue=None, offset: OffsetValue=None, aspect_ratio: OptionalNumber=None, animate_opacity: AnimationValue=None, animate_size: AnimationValue=None, animate_position: AnimationValue=None, animate_rotation: AnimationValue=None, animate_scale: AnimationValue=None, animate_offset: AnimationValue=None, on_animation_end=None, tooltip: Optional[str]=None, visible: Optional[bool]=None, disabled: Optional[bool]=None, data: Any=None, label: Optional[str]=None, min: OptionalNumber=None, max: OptionalNumber=None, divisions: Optional[int]=None, round: Optional[int]=None, active_color: Optional[str]=None, inactive_color: Optional[str]=None, overlay_color: Union[(None, str, Dict[(MaterialState, str)])]=None, on_change=None, on_change_start=None, on_change_end=None):
ConstrainedControl.__init__(self, ref=ref, key=key, width=width, height=height, left=left, top=top, right=right, bottom=bottom, expand=expand, col=col, opacity=opacity, rotate=rotate, scale=scale, offset=offset, aspect_ratio=aspect_ratio, animate_opacity=animate_opacity, animate_size=animate_size, animate_position=animate_position, animate_rotation=animate_rotation, animate_scale=animate_scale, animate_offset=animate_offset, on_animation_end=on_animation_end, tooltip=tooltip, visible=visible, disabled=disabled, data=data)
self.start_value = start_value
self.end_value = end_value
self.label = label
self.min = min
self.max = max
self.divisions = divisions
self.round = round
self.active_color = active_color
self.inactive_color = inactive_color
self.overlay_color = overlay_color
self.on_change = on_change
self.on_change_start = on_change_start
self.on_change_end = on_change_end
def _get_control_name(self):
return 'rangeslider'
def _before_build_command(self):
super()._before_build_command()
self._set_attr_json('overlayColor', self.__overlay_color)
def start_value(self) -> float:
return self._get_attr('startvalue')
_value.setter
def start_value(self, value: float):
self._set_attr('startvalue', value)
def end_value(self) -> float:
return self._get_attr('endvalue')
_value.setter
def end_value(self, value: float):
self._set_attr('endvalue', value)
def label(self) -> str:
return self._get_attr('label')
def label(self, value: str):
self._set_attr('label', value)
def min(self) -> OptionalNumber:
return self._get_attr('min')
def min(self, value: OptionalNumber):
self._set_attr('min', value)
def max(self) -> OptionalNumber:
return self._get_attr('max')
def max(self, value: OptionalNumber):
self._set_attr('max', value)
def divisions(self) -> Optional[int]:
return self._get_attr('divisions')
def divisions(self, value: Optional[int]):
self._set_attr('divisions', value)
def round(self) -> Optional[int]:
return self._get_attr('round')
def round(self, value: Optional[int]):
self._set_attr('round', value)
def active_color(self):
return self._get_attr('activeColor')
_color.setter
def active_color(self, value):
self._set_attr('activeColor', value)
def inactive_color(self):
return self._get_attr('inactiveColor')
_color.setter
def inactive_color(self, value):
self._set_attr('inactiveColor', value)
def overlay_color(self) -> Union[(None, str, Dict[(MaterialState, str)])]:
return self.__overlay_color
_color.setter
def overlay_color(self, value: Union[(None, str, Dict[(MaterialState, str)])]):
self.__overlay_color = value
def on_change(self):
return self._get_event_handler('change')
_change.setter
def on_change(self, handler):
self._add_event_handler('change', handler)
def on_change_start(self):
return self._get_event_handler('change_start')
_change_start.setter
def on_change_start(self, handler):
self._add_event_handler('change_start', handler)
def on_change_end(self):
return self._get_event_handler('change_end')
_change_end.setter
def on_change_end(self, handler):
self._add_event_handler('change_end', handler) |
()
('--local-dir', default='./tmp/diffusers-pipeline/stabilityai/stable-diffusion-v2', help='the local diffusers pipeline directory')
('--width', default=512, help='Width of generated image')
('--height', default=512, help='Height of generated image')
('--prompt', default='A fantasy landscape, trending on artstation', help='prompt')
('--benchmark', type=bool, default=False, help='run stable diffusion e2e benchmark')
def run(local_dir, width, height, prompt, benchmark):
device = 'cuda'
pipe = StableDiffusionImg2ImgAITPipeline.from_pretrained(local_dir, revision='fp16', torch_dtype=torch.float16, safety_checker=None, feature_extractor=None)
pipe = pipe.to(device)
url = '
response = requests.get(url)
init_image = Image.open(BytesIO(response.content)).convert('RGB')
init_image = init_image.resize((height, width))
with torch.autocast('cuda'):
images = pipe(prompt=prompt, init_image=init_image, strength=0.75, guidance_scale=7.5).images
if benchmark:
args = (prompt, init_image)
t = benchmark_torch_function(10, pipe, *args)
print(f'sd e2e: {t} ms')
images[0].save('fantasy_landscape_ait.png') |
class AmazonApi(ProviderInterface, AmazonOcrApi, AmazonAudioApi, AmazonImageApi, AmazonTextApi, AmazonTranslationApi, AmazonVideoApi):
provider_name = 'amazon'
def __init__(self, api_keys: Dict={}) -> None:
self.api_settings = load_provider(ProviderDataEnum.KEY, 'amazon', api_keys=api_keys)
self.clients = clients(self.api_settings)
self.storage_clients = storage_clients(self.api_settings) |
.usefixtures('use_tmpdir')
def test_template_multiple_input():
with open('template', 'w', encoding='utf-8') as template_file:
template_file.write(mulitple_input_template)
with open('parameters.json', 'w', encoding='utf-8') as json_file:
json_file.write(json.dumps(default_parameters))
with open('second.json', 'w', encoding='utf-8') as json_file:
parameters = {'key1': {'subkey2': 1400}}
json.dump(parameters, json_file)
with open('third.json', 'w', encoding='utf-8') as json_file:
parameters = {'key1': {'subkey1': 3000.22}}
json.dump(parameters, json_file)
render_template(['second.json', 'third.json'], 'template', 'out_file')
with open('out_file', 'r', encoding='utf-8') as parameter_file:
expected_output = ((('FILENAME\n' + 'F1 1999.22\n') + 'OTH 1400\n') + 'OTH_TEST 3000.22')
assert (parameter_file.read() == expected_output) |
class TestDependencyManager():
def setup(self):
self.deps = DependencyManager([SecretDependency(), ServiceDependency(), IngressClassesDependency()])
def test_cyclic(self):
a = self.deps.for_instance(object())
b = self.deps.for_instance(object())
c = self.deps.for_instance(object())
a.provide(SecretDependency)
a.want(ServiceDependency)
b.provide(ServiceDependency)
b.want(IngressClassesDependency)
c.provide(IngressClassesDependency)
c.want(SecretDependency)
with pytest.raises(ValueError):
self.deps.sorted_watt_keys()
def test_sort(self):
a = self.deps.for_instance(object())
b = self.deps.for_instance(object())
c = self.deps.for_instance(object())
a.want(SecretDependency)
a.want(ServiceDependency)
a.provide(IngressClassesDependency)
b.provide(SecretDependency)
c.provide(ServiceDependency)
assert (self.deps.sorted_watt_keys() == ['secret', 'service', 'ingressclasses']) |
def send_installation_email(event_name, postinstall_email, attendee):
email = EmailMultiAlternatives()
first_name = attendee.first_name
last_name = attendee.last_name
email.subject = get_installation_subject(first_name, last_name, event_name)
email.from_email = postinstall_email.contact_email
email.body = ''
email.attach_alternative(postinstall_email.message, 'text/html')
email.to = [attendee.email]
email.send(fail_silently=False) |
_module()
class ISTFTNet(pl.LightningModule):
def __init__(self, checkpoint_path: str='checkpoints/istft_net/g_', config_file: Optional[str]=None, use_natural_log: bool=True, **kwargs):
super().__init__()
if (config_file is None):
config_file = (Path(checkpoint_path).parent / 'config.json')
with open(config_file) as f:
data = f.read()
json_config = json.loads(data)
self.h = AttrDict(json_config)
self.model = Generator(self.h)
self.use_natural_log = use_natural_log
cp_dict = torch.load(checkpoint_path)
if ('state_dict' not in cp_dict):
self.model.load_state_dict(cp_dict['generator'], map_location='cpu')
else:
self.model.load_state_dict({k.replace('generator.', ''): v for (k, v) in cp_dict['state_dict'].items() if k.startswith('generator.')})
self.model.eval()
self.model.remove_weight_norm()
self.mel_transform = PitchAdjustableMelSpectrogram(sample_rate=self.h.sampling_rate, n_fft=self.h.n_fft, win_length=self.h.win_size, hop_length=self.h.hop_size, f_min=self.h.fmin, f_max=self.h.fmax, n_mels=self.h.num_mels)
if ('mel_channels' in kwargs):
kwargs['num_mels'] = kwargs.pop('mel_channels')
for (k, v) in kwargs.items():
if (getattr(self.h, k, None) != v):
raise ValueError(f'Incorrect value for {k}: {v}')
_grad()
def spec2wav(self, mel, f0):
c = mel[None]
if (self.use_natural_log is False):
c = (2.30259 * c)
f0 = f0[None].to(c.dtype)
(spec, phase) = self.model(c)
y = torch.istft((spec * torch.exp((phase * 1j))), n_fft=self.h.gen_istft_n_fft, hop_length=self.h.gen_istft_hop_size, win_length=self.h.gen_istft_n_fft, window=self.hanning_window)
return y[0]
_grad()
def wav2spec(self, wav_torch, sr=None, key_shift=0, speed=1.0):
if (sr is None):
sr = self.h.sampling_rate
if (sr != self.h.sampling_rate):
_wav_torch = librosa.resample(wav_torch.cpu().numpy(), orig_sr=sr, target_sr=self.h.sampling_rate)
wav_torch = torch.from_numpy(_wav_torch).to(wav_torch.device)
mel_torch = self.mel_transform(wav_torch, key_shift=key_shift, speed=speed)[0]
mel_torch = dynamic_range_compression(mel_torch)
if (self.use_natural_log is False):
mel_torch = (0.434294 * mel_torch)
return mel_torch |
def print_cid_info(buttons):
cnt = buttons.get_count()
cids = []
print('### CID INFO ###')
print(' CID TID virtual persist divert reprog fntog hotkey fkey mouse pos group gmask rawXY')
for i in range(cnt):
cid_info = buttons.get_cid_info(i)
cids += [cid_info['cid']]
print('0x{cid:04X} 0x{tid:04X} {virtual!r:<7} {persist!r:<7} {divert!r:<6} {reprog!r:<6} {fntog!r:<5} {hotkey!r:<6} {fkey!r:<5} {mouse!r:<5} {pos:<3} {group:<5} {gmask:#010b} {rawXY!r:<5}'.format(**cid_info))
return cids |
def fortios_log_tacacsplusaccounting(data, fos):
fos.do_member_operation('log.tacacs+accounting', 'setting')
if data['log_tacacsplusaccounting_setting']:
resp = log_tacacsplusaccounting_setting(data, fos)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'log_tacacsplusaccounting_setting'))
return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {}) |
def add_to_sent(analyses, surf):
global sent
tags = set()
for analysis in analyses:
parts = analysis[0].split('][')
for part in parts:
tag = part.rstrip(']').lstrip('[')
tags.add(tag)
tags.add(('SURF=' + surf))
sent.append(tags)
if ('BOUNDARY=SENTENCE' in tags):
parse_sentence()
sent = list()
elif (len(sent) >= max_window_size):
print('ERROR! Too long sentence skipped from start:')
for coh in sent[:20]:
for tag in coh:
if tag.startswith('SURF='):
print(tag[len('SURF='):], end=' ')
print('restarting at [...]:')
for tag in sent[(- 1)]:
if tag.startswith('SURF='):
print(tag[len('SURF='):])
sent = list() |
def fade_in(main, exaile):
logger.debug('fade_in() called.')
temp_volume = (settings.get_option('plugin/multialarmclock/fade_min_volume', 0) / 100.0)
fade_max_volume = (settings.get_option('plugin/multialarmclock/fade_max_volume', 100) / 100.0)
fade_inc = (settings.get_option('plugin/multialarmclock/fade_increment', 1) / 100.0)
time_per_inc = (settings.get_option('plugin/multialarmclock/fade_time', 30) / ((fade_max_volume - temp_volume) / fade_inc))
while (temp_volume < fade_max_volume):
logger.debug('set volume to {0}'.format(temp_volume))
settings.set_option('player/volume', temp_volume)
temp_volume += fade_inc
time.sleep(time_per_inc)
if (player.PLAYER.is_paused() or (not player.PLAYER.is_playing())):
return
settings.set_option('player/volume', fade_max_volume) |
def info(patch, fsize):
fpatch = BytesIO(patch)
(data_pointers_blocks_present, code_pointers_blocks_present, data_pointers_header, code_pointers_header, from_data_offset, from_data_begin, from_data_end, from_code_begin, from_code_end) = unpack_pointers_header(fpatch)
bw_header = Blocks.unpack_header(fpatch)
bl_header = Blocks.unpack_header(fpatch)
ldr_header = Blocks.unpack_header(fpatch)
ldr_w_header = Blocks.unpack_header(fpatch)
(data_pointers_blocks, data_pointers_blocks_size, code_pointers_blocks, code_pointers_blocks_size) = unpack_pointers_blocks_with_length(fpatch, data_pointers_blocks_present, code_pointers_blocks_present, data_pointers_header, code_pointers_header)
(bw_blocks, bw_blocks_size) = load_blocks(bw_header, fpatch)
(bl_blocks, bl_blocks_size) = load_blocks(bl_header, fpatch)
(ldr_blocks, ldr_blocks_size) = load_blocks(ldr_header, fpatch)
(ldr_w_blocks, ldr_w_blocks_size) = load_blocks(ldr_w_header, fpatch)
fout = StringIO()
with redirect_stdout(fout):
format_instruction('b.w', bw_blocks, bw_blocks_size, fsize)
format_instruction('bl', bl_blocks, bl_blocks_size, fsize)
format_instruction('ldr', ldr_blocks, ldr_blocks_size, fsize)
format_instruction('ldr.w', ldr_w_blocks, ldr_w_blocks_size, fsize)
format_pointers(data_pointers_blocks_present, from_data_offset, from_data_begin, from_data_end, data_pointers_blocks, data_pointers_blocks_size, code_pointers_blocks_present, from_code_begin, from_code_end, code_pointers_blocks, code_pointers_blocks_size, fsize)
return fout.getvalue() |
class TestBenchmarks(unittest.TestCase):
def test_lists_vs_dicts(self):
list_time = timeit('item = l[9000]', 'l = [0] * 10000')
dict_time = timeit('item = d[9000]', 'd = {x: 0 for x in range(10000)}')
self.assertTrue((list_time < dict_time), ('%s < %s' % (list_time, dict_time)))
def test_call_vs_inline(self):
no_call = timeit('l[0] += 1', 'l = [0]')
call = timeit('add(); l[0] += 1', 'l = [0]\ndef add():\n pass')
self.assertTrue((no_call < call), ('%s (no call) < %s (call)' % (no_call, call)))
def test_startswith_vs_regex(self):
re_time = timeit('r.match(t, 19)', "import re\nr = re.compile('hello')\nt = 'this is the finest hello ever'")
startswith_time = timeit("t.startswith('hello', 19)", "t = 'this is the finest hello ever'")
self.assertTrue((startswith_time < re_time), ('%s (startswith) < %s (re)' % (startswith_time, re_time))) |
class proj_spsd(proj):
def __init__(self, **kwargs):
super(proj_spsd, self).__init__(**kwargs)
def _prox(self, x, T):
isreal = np.isreal(x).all()
sol = ((x + np.conj(x.T)) / 2)
(D, V) = np.linalg.eig(sol)
D = np.real(D)
if isreal:
V = np.real(V)
D = np.clip(D, 0, np.inf)
sol = ((V np.diag(D)) np.conj(V.T))
return sol |
def DoRearrangeDim(decl_cursor, permute_vector):
decl_s = decl_cursor._node
assert isinstance(decl_s, (LoopIR.Alloc, LoopIR.fnarg))
all_permute = {decl_s.name: permute_vector}
def permute(buf, es):
permutation = all_permute[buf]
return [es[i] for i in permutation]
def check_permute_window(buf, idx):
permutation = all_permute[buf]
keep_perm = [i for i in permutation if isinstance(idx[i], LoopIR.Interval)]
for (i, ii) in zip(keep_perm[:(- 1)], keep_perm[1:]):
if (i > ii):
return False
return True
new_hi = permute(decl_s.name, decl_s.type.hi)
new_type = LoopIR.Tensor(new_hi, decl_s.type.is_window, decl_s.type.type)
(ir, fwd) = decl_cursor._child_node('type')._replace(new_type)
def mk_read(c):
rd = c._node
if isinstance(c.parent()._node, LoopIR.Call):
raise SchedulingError(f"Cannot permute buffer '{rd.name}' because it is passed as a sub-procedure argument at {rd.srcinfo}")
if (not (rd.name in all_permute)):
return None
if (isinstance(rd, LoopIR.WindowExpr) and (not check_permute_window(rd.name, rd.idx))):
raise SchedulingError(f'Permuting the window expression at {rd.srcinfo} would change the meaning of the window; propagating dimension rearrangement through windows is not currently supported')
return {'idx': permute(rd.name, rd.idx)}
def mk_write(c):
s = c._node
if (s.name in all_permute):
new_idx = permute(s.name, s.idx)
return {'idx': new_idx}
def mk_stride_expr(c):
e = c._node
if (e.name in all_permute):
new_dim = all_permute[e.name].index(e.dim)
return {'dim': new_dim}
if isinstance(decl_s, LoopIR.Alloc):
rest_of_block = get_rest_of_block(decl_cursor)
else:
rest_of_block = decl_cursor.root().body()
for c in rest_of_block:
for name in all_permute.keys():
assert isinstance(name, Sym)
(ir, fwd) = _replace_reads(ir, fwd, c, name, mk_read)
(ir, fwd) = _replace_pats(ir, fwd, c, f'stride({repr(name)}, _)', mk_stride_expr)
(ir, fwd) = _replace_writes(ir, fwd, c, name, mk_write)
return (ir, fwd) |
class TestRecoveryStats():
('esrally.metrics.EsMetricsStore.put_doc')
def test_no_metrics_if_no_pending_recoveries(self, metrics_store_put_doc):
response = {}
cfg = create_config()
metrics_store = metrics.EsMetricsStore(cfg)
client = Client(indices=SubClient(recovery=response))
recorder = telemetry.RecoveryStatsRecorder(cluster_name='leader', client=client, metrics_store=metrics_store, sample_interval=1, indices=['index1'])
recorder.record()
assert (metrics_store_put_doc.call_count == 0)
('esrally.metrics.EsMetricsStore.put_doc')
def test_stores_single_shard_stats(self, metrics_store_put_doc):
response = {'index1': {'shards': [{'id': 0, 'type': 'STORE', 'stage': 'DONE', 'primary': True, 'start_time': '2014-02-24T12:38:06.349', 'start_time_in_millis': '', 'stop_time': '2014-02-24T12:38:08.464', 'stop_time_in_millis': '', 'total_time': '2.1s', 'total_time_in_millis': 2115, 'source': {'id': 'RGMdRc-yQWWKIBM4DGvwqQ', 'host': 'my.fqdn', 'transport_address': 'my.fqdn', 'ip': '10.0.1.7', 'name': 'my_es_node'}, 'target': {'id': 'RGMdRc-yQWWKIBM4DGvwqQ', 'host': 'my.fqdn', 'transport_address': 'my.fqdn', 'ip': '10.0.1.7', 'name': 'my_es_node'}, 'index': {'size': {'total': '24.7mb', 'total_in_bytes': , 'reused': '24.7mb', 'reused_in_bytes': , 'recovered': '0b', 'recovered_in_bytes': 0, 'percent': '100.0%'}, 'files': {'total': 26, 'reused': 26, 'recovered': 0, 'percent': '100.0%'}, 'total_time': '2ms', 'total_time_in_millis': 2, 'source_throttle_time': '0s', 'source_throttle_time_in_millis': 0, 'target_throttle_time': '0s', 'target_throttle_time_in_millis': 0}, 'translog': {'recovered': 71, 'total': 0, 'percent': '100.0%', 'total_on_start': 0, 'total_time': '2.0s', 'total_time_in_millis': 2025}, 'verify_index': {'check_index_time': 0, 'check_index_time_in_millis': 0, 'total_time': '88ms', 'total_time_in_millis': 88}}]}}
cfg = create_config()
metrics_store = metrics.EsMetricsStore(cfg)
client = Client(indices=SubClient(recovery=response))
recorder = telemetry.RecoveryStatsRecorder(cluster_name='leader', client=client, metrics_store=metrics_store, sample_interval=1, indices=['index1'])
recorder.record()
shard_metadata = {'cluster': 'leader', 'index': 'index1', 'shard': 0}
metrics_store_put_doc.assert_has_calls([mock.call({'name': 'recovery-stats', 'shard': response['index1']['shards'][0]}, level=MetaInfoScope.cluster, meta_data=shard_metadata)], any_order=True)
('esrally.metrics.EsMetricsStore.put_doc')
def test_stores_multi_index_multi_shard_stats(self, metrics_store_put_doc):
response = {'index1': {'shards': [{'id': 0, 'type': 'STORE', 'stage': 'DONE', 'primary': True, 'total_time_in_millis': 100}, {'id': 1, 'type': 'STORE', 'stage': 'DONE', 'primary': True, 'total_time_in_millis': 200}]}, 'index2': {'shards': [{'id': 0, 'type': 'STORE', 'stage': 'DONE', 'primary': True, 'total_time_in_millis': 300}, {'id': 1, 'type': 'STORE', 'stage': 'DONE', 'primary': True, 'total_time_in_millis': 400}, {'id': 2, 'type': 'STORE', 'stage': 'DONE', 'primary': True, 'total_time_in_millis': 500}]}}
cfg = create_config()
metrics_store = metrics.EsMetricsStore(cfg)
client = Client(indices=SubClient(recovery=response))
recorder = telemetry.RecoveryStatsRecorder(cluster_name='leader', client=client, metrics_store=metrics_store, sample_interval=1, indices=['index1', 'index2'])
recorder.record()
metrics_store_put_doc.assert_has_calls([mock.call({'name': 'recovery-stats', 'shard': response['index1']['shards'][0]}, level=MetaInfoScope.cluster, meta_data={'cluster': 'leader', 'index': 'index1', 'shard': 0}), mock.call({'name': 'recovery-stats', 'shard': response['index1']['shards'][1]}, level=MetaInfoScope.cluster, meta_data={'cluster': 'leader', 'index': 'index1', 'shard': 1}), mock.call({'name': 'recovery-stats', 'shard': response['index2']['shards'][0]}, level=MetaInfoScope.cluster, meta_data={'cluster': 'leader', 'index': 'index2', 'shard': 0}), mock.call({'name': 'recovery-stats', 'shard': response['index2']['shards'][1]}, level=MetaInfoScope.cluster, meta_data={'cluster': 'leader', 'index': 'index2', 'shard': 1}), mock.call({'name': 'recovery-stats', 'shard': response['index2']['shards'][2]}, level=MetaInfoScope.cluster, meta_data={'cluster': 'leader', 'index': 'index2', 'shard': 2})], any_order=True) |
class BackupStatus(object):
UNDEFINED = (- 1)
RUNNING = 1
COMPLETED = 2
CANCELLED = 3
INTERRUPTED = 4
FAILED = 5
RESTORED = 6
ALL = 999
text = {UNDEFINED: 'undefined', RUNNING: 'running', COMPLETED: 'completed', CANCELLED: 'cancelled', INTERRUPTED: 'interrupted', FAILED: 'failed', RESTORED: 'restored', ALL: 'all'} |
def fence_generic_format(math, language='math', class_name='arithmatex', options=None, md=None, wrap='\\[\n%s\n\\]', **kwargs):
classes = kwargs['classes']
id_value = kwargs['id_value']
attrs = kwargs['attrs']
classes.insert(0, class_name)
id_value = (' id="{}"'.format(id_value) if id_value else '')
classes = ' class="{}"'.format(' '.join(classes))
attrs = ((' ' + ' '.join(('{k}="{v}"'.format(k=k, v=v) for (k, v) in attrs.items()))) if attrs else '')
return ('<div%s%s%s>%s</div>' % (id_value, classes, attrs, (wrap % math))) |
def extractPerpetualdaydreamsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('iwal', 'I Won a Lottery So I Moved to the Other World', 'translated'), ('fpyq', 'Fei Pin Ying Qiang', 'translated'), ('TYQHM', 'Those Years in Quest of Honor Mine', 'translated'), ('vio', 'Violant of the Silver', 'translated'), ('nhad', 'Nurturing the Hero to Avoid Death', 'translated'), ('below', 'I Cant Write Any Below-the-Neck Scenes', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def _format_html_time_tag(datetime, what_to_display):
if (what_to_display == 'date-only'):
content = babel_format_date(datetime, locale=_get_user_locale())
elif (what_to_display == 'time-only'):
content = babel_format_time(datetime, format='short', locale=_get_user_locale())
elif (what_to_display == 'date-and-time'):
content = babel_format_datetime(datetime, tzinfo=UTC, locale=_get_user_locale())
content += ' UTC'
else:
raise ValueError('what_to_display argument invalid')
isoformat = datetime.isoformat()
return Markup('<time datetime="{}" data-what_to_display="{}">{}</time>'.format(isoformat, what_to_display, content, content)) |
class User(auth_models.User):
class Meta():
proxy = True
verbose_name = ''
verbose_name_plural = ''
token_signer = itsdangerous.TimestampSigner(settings.SECRET_KEY)
def token(self):
data = base64.b64encode(msgpack.dumps({'type': 'user', 'id': self.id}))
return self.token_signer.sign(data).decode('utf-8')
def uid_from_token(cls, token, max_age=(30 * 86400)):
try:
data = cls.token_signer.unsign(token.encode('utf-8'), max_age=max_age)
except Exception as e:
log.info('User.from_token unsign failed: %s', e)
return None
data = msgpack.loads(base64.b64decode(data))
if (data.get('type') != 'user'):
return None
return data['id']
def from_token(cls, token, max_age=(30 * 86400)):
uid = (cls.uid_from_token(token, max_age) or None)
return (uid and cls.objects.filter(id=uid).first()) |
def use_androguard():
try:
import androguard
if use_androguard.show_path:
logging.debug(_('Using androguard from "{path}"').format(path=androguard.__file__))
use_androguard.show_path = False
if (options and options.verbose):
logging.getLogger('androguard.axml').setLevel(logging.INFO)
logging.getLogger('androguard.core.api_specific_resources').setLevel(logging.ERROR)
return True
except ImportError:
return False |
def checksum_by_chunk(table_name, columns, pk_list, range_start_values, range_end_values, chunk_size, using_where, force_index: str='PRIMARY') -> str:
if using_where:
row_range = get_range_start_condition(pk_list, range_start_values)
where_clause = ' WHERE {} '.format(row_range)
else:
where_clause = ''
assign = assign_range_end_vars(pk_list, range_end_values)
bit_xor_assign = checksum_column_list(assign)
bit_xor_non_pk = checksum_column_list(['`{}`'.format(escape(col)) for col in columns])
if bit_xor_non_pk:
column_name_list = '{}, {}'.format(bit_xor_assign, bit_xor_non_pk)
else:
column_name_list = bit_xor_assign
return 'SELECT count(*) as cnt, {} FROM ( SELECT * FROM `{}` FORCE INDEX (`{}`) {} ORDER BY {} LIMIT {} ) as tmp'.format(column_name_list, escape(table_name), escape(force_index), where_clause, list_to_col_str(pk_list), chunk_size) |
class DummyNonRetryableStageFlow(PrivateComputationBaseStageFlow):
CREATED = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.CREATION_INITIALIZED, started_status=PrivateComputationInstanceStatus.CREATION_STARTED, completed_status=PrivateComputationInstanceStatus.CREATED, failed_status=PrivateComputationInstanceStatus.CREATION_FAILED, is_joint_stage=False)
NON_RETRYABLE_STAGE = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.ID_MATCHING_INITIALIZED, started_status=PrivateComputationInstanceStatus.ID_MATCHING_STARTED, completed_status=PrivateComputationInstanceStatus.ID_MATCHING_COMPLETED, failed_status=PrivateComputationInstanceStatus.ID_MATCHING_FAILED, is_joint_stage=True, is_retryable=False)
def get_stage_service(self, args: PrivateComputationStageServiceArgs) -> PrivateComputationStageService:
raise NotImplementedError() |
class TTVisitorTest(object):
def getpath(testfile):
path = os.path.dirname(__file__)
return os.path.join(path, 'data', testfile)
def test_ttvisitor(self):
font = TTFont(self.getpath('TestVGID-Regular.otf'))
visitor = TestVisitor()
visitor.visit(font, 1)
assert (len(visitor.value) == 14) |
def add_QueryServicer_to_server(servicer, server):
rpc_method_handlers = {'Allowance': grpc.unary_unary_rpc_method_handler(servicer.Allowance, request_deserializer=cosmos_dot_feegrant_dot_v1beta1_dot_query__pb2.QueryAllowanceRequest.FromString, response_serializer=cosmos_dot_feegrant_dot_v1beta1_dot_query__pb2.QueryAllowanceResponse.SerializeToString), 'Allowances': grpc.unary_unary_rpc_method_handler(servicer.Allowances, request_deserializer=cosmos_dot_feegrant_dot_v1beta1_dot_query__pb2.QueryAllowancesRequest.FromString, response_serializer=cosmos_dot_feegrant_dot_v1beta1_dot_query__pb2.QueryAllowancesResponse.SerializeToString)}
generic_handler = grpc.method_handlers_generic_handler('cosmos.feegrant.v1beta1.Query', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,)) |
class Mode(ModeAncestor):
MONO = auto()
L = auto()
I = auto()
F = auto()
P = auto()
RGB = auto()
RGBX = auto()
RGBA = auto()
CMYK = auto()
YCbCr = auto()
LAB = auto()
HSV = auto()
RGBa = auto()
LA = auto()
La = auto()
PA = auto()
I16 = auto()
I16L = auto()
I16B = auto()
def of(cls, image):
for mode in cls:
if mode.check(image):
return mode
raise ValueError(f'Image has unknown mode {image.mode}')
def for_string(cls, string):
for mode in cls:
if (mode.to_string() == string):
return mode
raise ValueError(f'for_string(): unknown mode {string}')
def to_string(self):
return str(self.value)
def __str__(self):
return self.to_string()
def __repr__(self):
repr_string = '%s(%s: [%s/%s] {%s %s}) %s'
return (repr_string % (type(self).__qualname__, self.label, self.basemode, self.basetype, self.dtype_code(), self.dtype, id(self)))
def __bytes__(self):
return bytes(self.to_string(), encoding=ENCODING)
def ctx(self, image, **kwargs):
return ModeContext(image, self, **kwargs)
def dtype_code(self):
return (dtypes_for_modes.get(self.to_string(), None) or self.basetype.dtype_code())
def band_count(self):
return len(self.value.bands)
def bands(self):
return self.value.bands
def basemode(self):
return type(self).for_string(self.value.basemode)
def basetype(self):
return type(self).for_string(self.value.basetype)
def dtype(self):
return numpy.dtype(self.dtype_code())
def is_memory_mapped(self):
return junkdrawer.is_mapped(self.to_string())
def label(self):
return (((str(self) == self.name) and self.name) or f'{self!s} ({self.name})')
def check(self, image):
return (junkdrawer.imode(image) is self.value)
def merge(self, *channels):
return Image.merge(self.to_string(), channels)
def render(self, image, *args, **kwargs):
if self.check(image):
return image
return image.convert(self.to_string(), *args, **kwargs)
def process(self, image):
return self.render(image)
def new(self, size, color=0):
return Image.new(self.to_string(), size, color=color)
def open(self, fileish):
return self.render(Image.open(fileish))
def frombytes(self, size, data, decoder_name='raw', *args):
return Image.frombytes(self.to_string(), size, data, decoder_name, *args) |
def test_decorations(manager_nospawn, minimal_conf_noscreen):
config = minimal_conf_noscreen
decorated_widget = widget.ScriptExit(decorations=[RectDecoration(), BorderDecoration(), RectDecoration(radius=0, filled=True)])
config.screens = [libqtile.config.Screen(top=libqtile.bar.Bar([decorated_widget], 10))]
manager_nospawn.start(config)
(_, decs) = manager_nospawn.c.widget['scriptexit'].eval('len(self.decorations)')
assert (int(decs) == 3) |
def copy_and_replace(bmg_original: BMGraphBuilder, transformer_creator: Callable[([Cloner, Sizer], NodeTransformer)]) -> typing.Tuple[(BMGraphBuilder, ErrorReport)]:
cloner = Cloner(bmg_original)
transformer = transformer_creator(cloner, cloner.sizer)
for original in bmg_original.all_nodes():
inputs = []
for c in original.inputs.inputs:
inputs.append(cloner.copy_context[c])
assessment = transformer.assess_node(original, cloner.bmg_original)
if (len(assessment.error_report.errors) > 0):
return (cloner.bmg, assessment.error_report)
elif assessment.node_needs_transform:
image = transformer.transform_node(original, inputs)
else:
parents = flatten(inputs)
image = cloner.clone(original, parents)
if (not cloner.copy_context.__contains__(original)):
cloner.copy_context[original] = image
return (cloner.bmg, ErrorReport()) |
.parametrize('ref, K0_ref, K_unitless_ref', ((DuschinskyRef.INITIAL, 0., 1.), (DuschinskyRef.FINAL, (- 0.), (- 1.))))
def test_duschinsky(ref, K0_ref, K_unitless_ref):
L_init = np.array([[(- 0.)], [0.0], [0.0], [0.], [0.0], [0.0]])
L_final = np.array([[(- 0.)], [0.0], [0.0], [0.], [0.0], [0.0]])
coords3d_init = (np.array([(- 0.0236), 0.0, 0.0, 1.2236, 0.0, 0.0]).reshape((- 1), 3) / BOHR2ANG)
coords3d_final = (np.array([0.0, 0.0, 0.0, 1.4397, 0.0, 0.0]).reshape((- 1), 3) / BOHR2ANG)
masses = np.array((11.0093, 1.0078))
res = duschinsky(L_init, coords3d_init, L_final, coords3d_final, masses=masses, reference=ref, with_axis_switch=False)
K = res.K
assert (K[0] == pytest.approx(K0_ref))
wavenums = np.array((1363.2,))
K_unitless = res.to_K_unitless(wavenums)
assert (K_unitless[0] == pytest.approx(K_unitless_ref)) |
def test_trace_parent_wrong_trace_options_field(caplog):
header = '00-0af7651916cd43dd8448eb211c80319c-b7ad6b-xx'
with caplog.at_level('DEBUG', 'elasticapm.utils'):
trace_parent = TraceParent.from_string(header)
assert (trace_parent is None)
assert_any_record_contains(caplog.records, 'Invalid trace-options field, value xx') |
def _get_completion_help() -> str:
from hydra.core.plugins import Plugins
from hydra.plugins.completion_plugin import CompletionPlugin
completion_plugins = Plugins.instance().discover(CompletionPlugin)
completion_info: List[str] = []
for plugin_cls in completion_plugins:
assert issubclass(plugin_cls, CompletionPlugin)
for cmd in ['install', 'uninstall']:
head = f'{plugin_cls.provides().capitalize()} - {cmd.capitalize()}:'
completion_info.append(head)
completion_info.append(plugin_cls.help(cmd).format(_get_exec_command()))
completion_info.append('')
completion_help = '\n'.join(((f' {x}' if x else x) for x in completion_info))
return completion_help |
class PanedCollapsible(Gtk.Paned):
__gtype_name__ = 'PanedCollapsible'
collapsible1 = GObject.property(type=bool, default=False)
collapsible2 = GObject.property(type=bool, default=False)
Paned = enum(DEFAULT=1, EXPAND=2, COLLAPSE=3)
collapsible_y = GObject.property(type=int, default=0)
collapsible_label = GObject.property(type=str, default='')
__gsignals__ = {'expanded': (GObject.SIGNAL_RUN_LAST, None, (bool,))}
Min_Paned_Size = 80
def __init__(self, *args, **kwargs):
super(PanedCollapsible, self).__init__(*args, **kwargs)
self._connect_properties()
self._from_paned_handle = 0
def _connect_properties(self):
self.connect('notify::collapsible1', self._on_collapsible1_changed)
self.connect('notify::collapsible2', self._on_collapsible2_changed)
self.connect('notify::collapsible_label', self._on_collapsible_label_changed)
def _on_collapsible1_changed(self, *args):
if (self.collapsible1 and self.collapsible2):
self.collapsible2 = False
child = self.get_child1()
self._wrap_unwrap_child(child, self.collapsible1, self.add1)
def _on_collapsible2_changed(self, *args):
if (self.collapsible1 and self.collapsible2):
self.collapsible1 = False
child = self.get_child2()
self._wrap_unwrap_child(child, self.collapsible2, self.add2)
def _wrap_unwrap_child(self, child, wrap, add):
if child:
self.remove(child)
if (not wrap):
inner_child = child.get_child()
child.remove(inner_child)
child = inner_child
add(child)
def _on_collapsible_label_changed(self, *args):
if self._expander:
self._expander.set_label(self.collapsible_label)
def _on_collapsible_expanded(self, *args):
expand = self._expander.get_expanded()
if (not expand):
self.collapsible_y = self.get_position()
self._collapse()
else:
if (not self.collapsible_y):
new_y = (self.get_allocated_height() / 2)
self.collapsible_y = new_y
current_pos = (self.get_allocated_height() - self.get_handle_window().get_height())
if ((current_pos - self.collapsible_y) < self.Min_Paned_Size):
self.collapsible_y = (self.get_allocated_height() / 2)
self.set_position(self.collapsible_y)
self.emit('expanded', expand)
def do_button_press_event(self, event):
self._from_paned_handle = 1
if (event.type == Gdk.EventType._2BUTTON_PRESS):
self._from_paned_handle = 2
Gtk.Paned.do_button_press_event(self, event)
def do_button_release_event(self, *args):
if (self._from_paned_handle != 0):
Gtk.Paned.do_button_release_event(self, *args)
if (((not self._expander) or self._expander.get_expanded()) and (self._from_paned_handle == 1)):
print('in an expanded situation')
self.collapsible_y = self.get_position()
current_pos = (self.get_allocated_height() - self.get_handle_window().get_height())
if ((current_pos - self.collapsible_y) < self.Min_Paned_Size):
self.expand(PanedCollapsible.Paned.COLLAPSE)
if (self._from_paned_handle == 2):
if self._expander.get_expanded():
print('collapsing')
self.expand(PanedCollapsible.Paned.COLLAPSE)
else:
self.expand(PanedCollapsible.Paned.EXPAND)
print('expanding')
self.set_position(0)
print(self.get_position())
self._from_paned_handle = 0
def do_remove(self, widget):
if (self.collapsible1 and (self.get_child1().get_child() is widget)):
expander = self.get_child1()
expander.remove(widget)
widget = expander
elif (self.collapsible2 and (self.get_child2().get_child() is widget)):
expander = self.get_child2()
expander.remove(widget)
widget = expander
self._expander = None
Gtk.Paned.remove(self, widget)
def do_add(self, widget):
if (not self.get_child1()):
self.do_add1(widget)
elif (not self.get_child2()):
self.do_add2(widget)
else:
print('GtkPaned cannot have more than 2 children')
def do_add1(self, widget):
self.do_pack1(widget, True, True)
def do_pack1(self, widget, *args, **kwargs):
if self.collapsible1:
widget = self._create_expander(widget)
Gtk.Paned.pack1(self, widget, *args, **kwargs)
def do_add2(self, widget):
self.do_pack2(widget, True, True)
def do_pack2(self, widget, *args, **kwargs):
if self.collapsible2:
widget = self._create_expander(widget)
Gtk.Paned.pack2(self, widget, *args, **kwargs)
def _create_expander(self, widget):
self._expander = HiddenExpander(label=self.collapsible_label, visible=True)
self._expander.add(widget)
self._expander.connect('notify::expanded', self._on_collapsible_expanded)
self._allocate_id = self._expander.connect('size-allocate', self._initial_collapse)
return self._expander
def _initial_collapse(self, *args):
self._collapse()
self._expander.disconnect(self._allocate_id)
del self._allocate_id
def _collapse(self):
new_y = (self.get_allocated_height() - self.get_handle_window().get_height())
self.set_position(new_y)
def expand(self, force):
if self._expander:
if (force == PanedCollapsible.Paned.EXPAND):
self._expander.set_expanded(True)
elif (force == PanedCollapsible.Paned.COLLAPSE):
self._expander.set_expanded(False)
elif (force == PanedCollapsible.Paned.DEFAULT):
self._expander.set_expanded((not self._expander.get_expanded()))
def get_expansion_status(self):
value = PanedCollapsible.Paned.COLLAPSE
if (self._expander and self._expander.get_expanded()):
value = PanedCollapsible.Paned.EXPAND
return value |
def test_matrices_div():
for (M, S, is_field) in _all_matrices():
M1234 = M([[1, 2], [3, 4]])
if is_field:
assert ((M1234 / 2) == M([[(S(1) / 2), S(1)], [(S(3) / 2), 2]]))
assert ((M1234 / S(2)) == M([[(S(1) / 2), S(1)], [(S(3) / 2), 2]]))
assert raises((lambda : (M1234 / 0)), ZeroDivisionError)
assert raises((lambda : (M1234 / S(0))), ZeroDivisionError)
raises((lambda : (M1234 / None)), TypeError)
raises((lambda : (None / M1234)), TypeError) |
class AssignConfigCursor(StmtCursor):
def config(self) -> Config:
assert isinstance(self._impl, C.Node)
assert isinstance(self._impl._node, LoopIR.WriteConfig)
return self._impl._node.config
def field(self) -> str:
assert isinstance(self._impl, C.Node)
assert isinstance(self._impl._node, LoopIR.WriteConfig)
return self._impl._node.field
def rhs(self) -> ExprCursor:
assert isinstance(self._impl, C.Node)
assert isinstance(self._impl._node, LoopIR.WriteConfig)
return self._child_node('rhs') |
def git_push(ctx):
new_version = version.__version__
values = list(map((lambda x: int(x)), new_version.split('.')))
local(ctx, f'git add {project_name}/version.py version.py')
local(ctx, 'git commit -m "updated version"')
local(ctx, f'git tag {values[0]}.{values[1]}.{values[2]}')
local(ctx, 'git push origin --tags')
local(ctx, 'git push') |
class DummyAdvanced(Computation):
def __init__(self, arr, coeff):
Computation.__init__(self, [Parameter('C', Annotation(arr, 'io')), Parameter('D', Annotation(arr, 'io')), Parameter('coeff1', Annotation(coeff)), Parameter('coeff2', Annotation(coeff))])
def _build_plan(self, plan_factory, device_params, C, D, coeff1, coeff2):
plan = plan_factory()
nested = Dummy(C, D, coeff1, same_A_B=True)
C_temp = plan.temp_array_like(C)
D_temp = plan.temp_array_like(D)
plan.computation_call(nested, C_temp, D, C, C, coeff1)
arr_dtype = C.dtype
coeff_dtype = coeff2.dtype
mul = functions.mul(arr_dtype, coeff_dtype)
div = functions.div(arr_dtype, coeff_dtype)
template = template_from('\n <%def name="dummy(kernel_declaration, CC, C, D, coeff)">\n ${kernel_declaration}\n {\n VIRTUAL_SKIP_THREADS;\n VSIZE_T idx0 = virtual_global_id(0);\n VSIZE_T idx1 = virtual_global_id(1);\n\n ${CC.store_idx}(idx0, idx1,\n ${C.load_idx}(idx0, idx1) +\n ${mul}(${D.load_idx}(idx0, idx1), ${coeff}));\n }\n </%def>\n ')
plan.kernel_call(template.get_def('dummy'), [C, C_temp, C_temp, coeff2], global_size=C.shape, render_kwds=dict(mul=mul))
return plan |
def _get_package_data(module, rel_path):
if ((module.__spec__ is None) or (module.__spec__.submodule_search_locations is None)):
module_dir_path = os.path.dirname(module.__file__)
path = os.path.join(module_dir_path, *rel_path.split('/'))
with open(path, 'rb') as fp:
return fp.read()
return files(module).joinpath(rel_path).read_bytes() |
def generate_oura_activity_content(date):
if ((not date) or (date == datetime.today().date())):
date = app.session.query(func.max(ouraActivitySummary.summary_date))[0][0]
df = pd.read_sql(sql=app.session.query(ouraActivitySummary).filter((ouraActivitySummary.summary_date == date)).statement, con=engine, index_col='summary_date')
app.session.remove()
return [html.Div(className='row', children=[html.Div(id='oura-activity-content-kpi-trend', className='col', style={'height': '0%'})]), html.Div(id='activity-content-kpi', className='row', children=[html.Div(className='col', children=[dbc.Button(id='goal-progress-button', className='col-lg-4 contentbutton', children=[html.Div(children=['CAL PROGRESS']), html.H6('{} / {}'.format(df['cal_active'].max(), df['target_calories'].max()), className='mb-0')]), dbc.Button(id='total-burn-button', className='col-lg-4 contentbutton', children=[html.Div(children=['TOTAL BURN (CAL)']), html.H6('{}'.format(df['cal_total'].max()), className='mb-0')]), dbc.Button(id='walking-equivalency-button', className='col-lg-4 contentbutton', children=[html.Div(children=['WALKING EQUIV.']), html.H6('{:.1f} mi'.format((df['daily_movement'].max() * 0.)), className='mb-0')])])]), html.Div(className='row', children=[html.Div(id='daily-movement-day-trend', className='col', children=[html.H6('Daily Movement', style={'marginBottom': '0%'}), html.Div(id='daily-movement-chart-conainer', className='col', children=generate_daily_movement_chart(date))])]), html.Div(className='row', children=[html.Div(id='activity-contributors', className='col-lg-12', children=[html.H6('Activity Contributors'), generate_contributor_bar(df=df, id='stay-active', column_name='score_stay_active', top_left_title='Stay Active', top_right_title='{}h {}m'.format((df['inactive'].max() // 60), (df['inactive'].max() % 60))), generate_contributor_bar(df=df, id='move-every-hour', column_name='score_move_every_hour', top_left_title='Move Every Hour', top_right_title='{:.0f} alerts'.format(df['inactivity_alerts'].max())), generate_contributor_bar(df=df, id='meet-daily-goals', column_name='score_meet_daily_targets', top_left_title='Meet Daily Goals'), generate_contributor_bar(df=df, id='training-frequency', column_name='score_training_frequency', top_left_title='Training Frequency'), generate_contributor_bar(df=df, id='training-volume', column_name='score_training_volume', top_left_title='Training Volume'), generate_contributor_bar(df=df, id='recovery-time', column_name='score_recovery_time', top_left_title='Recovery Time')])])] |
def setup_memory(avatar, memory, record_memories=None):
if (memory.emulate is not None):
emulate = getattr(peripheral_emulators, memory.emulate)
else:
emulate = None
log.info(('Adding Memory: %s Addr: 0x%08x Size: 0x%08x' % (memory.name, memory.base_addr, memory.size)))
avatar.add_memory_range(memory.base_addr, memory.size, name=memory.name, file=memory.file, permissions=memory.permissions, emulate=emulate)
if (record_memories is not None):
if ('w' in memory.permissions):
record_memories.append((memory.base_addr, memory.size)) |
def appy_partial_on_base_config(base, partial):
new_config = FakeConfig(base)
new_config.VM_CONTROLLER = partial['VM_CONTROLLER']
new_config.VM_NAME = partial['VM_NAME']
new_config.SNAPSHOT_NAME = partial['SNAPSHOT_NAME']
new_config.UNPACKER_CONFIG['host_port'] = partial['host_port']
new_config.UNPACKER_CONFIG['guest_ip'] = partial['guest_ip']
return new_config |
class _NoneTrait(TraitType):
info_text = 'None'
default_value = None
default_value_type = DefaultValue.constant
def __init__(self, **metadata):
default_value = metadata.pop('default_value', None)
if (default_value is not None):
raise ValueError('Cannot set default value {} for _NoneTrait'.format(default_value))
super().__init__(**metadata)
def validate(self, obj, name, value):
if (value is None):
return value
self.error(obj, name, value) |
def fortios_log_fortianalyzer(data, fos):
fos.do_member_operation('log.fortianalyzer', 'override-setting')
if data['log_fortianalyzer_override_setting']:
resp = log_fortianalyzer_override_setting(data, fos)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'log_fortianalyzer_override_setting'))
return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {}) |
class Sigmoid(Fixed):
codomain = constraints.unit_interval
def _forward(self, x: torch.Tensor, params: Optional[Sequence[torch.Tensor]]) -> Tuple[(torch.Tensor, Optional[torch.Tensor])]:
y = clipped_sigmoid(x)
ladj = self._log_abs_det_jacobian(x, y, params)
return (y, ladj)
def _inverse(self, y: torch.Tensor, params: Optional[Sequence[torch.Tensor]]) -> Tuple[(torch.Tensor, Optional[torch.Tensor])]:
finfo = torch.finfo(y.dtype)
y = y.clamp(min=finfo.tiny, max=(1.0 - finfo.eps))
x = (y.log() - torch.log1p((- y)))
ladj = self._log_abs_det_jacobian(x, y, params)
return (x, ladj)
def _log_abs_det_jacobian(self, x: torch.Tensor, y: torch.Tensor, params: Optional[Sequence[torch.Tensor]]) -> torch.Tensor:
return ((- F.softplus((- x))) - F.softplus(x)) |
def test_text_align_enum():
r = ft.Text()
assert (r.text_align == ft.TextAlign.NONE)
assert (r._get_attr('textAlign') is None)
r = ft.Text(text_align=ft.TextAlign.RIGHT)
assert isinstance(r.text_align, ft.TextAlign)
assert (r.text_align == ft.TextAlign.RIGHT)
assert (r._get_attr('textAlign') == 'right')
r = ft.Text(text_align='left')
assert isinstance(r.text_align, str)
assert (r._get_attr('textAlign') == 'left') |
class TestDataFrameInit():
def test_init(self):
with pytest.raises(ValueError):
ed.DataFrame()
with pytest.raises(ValueError):
ed.DataFrame(es_client=ES_TEST_CLIENT)
with pytest.raises(ValueError):
ed.DataFrame(es_index_pattern=FLIGHTS_INDEX_NAME)
ed.DataFrame(ES_TEST_CLIENT, FLIGHTS_INDEX_NAME)
ed.DataFrame(es_client=ES_TEST_CLIENT, es_index_pattern=FLIGHTS_INDEX_NAME)
qc = QueryCompiler(client=ES_TEST_CLIENT, index_pattern=FLIGHTS_INDEX_NAME)
ed.DataFrame(_query_compiler=qc) |
('/backend/update/', methods=['POST', 'PUT'])
def backend_update():
debug_output(flask.request.json, 'RECEIVED:')
update = flask.request.json
for build in update.get('builds', []):
if ((build['status'] == 0) or (build['status'] == 1)):
build_results.append(build)
started_build_task_dict.pop(build['task_id'], None)
test_for_server_end()
if (build['status'] == 3):
build_task_dict.pop(build['task_id'], None)
started_build_task_dict[build['task_id']] = build
for action in update.get('actions', []):
action_task_dict.pop(action['id'], None)
action_results.append(action)
test_for_server_end()
response = {}
debug_output(response, 'SENDING BACK:', delim=False)
return flask.jsonify(response) |
class SDPHYCMDR(LiteXModule):
def __init__(self, sys_clk_freq, cmd_timeout, cmdw, busy_timeout=1):
self.pads_in = pads_in = stream.Endpoint(_sdpads_layout)
self.pads_out = pads_out = stream.Endpoint(_sdpads_layout)
self.sink = sink = stream.Endpoint([('cmd_type', 2), ('data_type', 2), ('length', 8)])
self.source = source = stream.Endpoint([('data', 8), ('status', 3)])
timeout = Signal(32, reset=int((cmd_timeout * sys_clk_freq)))
count = Signal(8)
busy = Signal()
cmdr = SDPHYR(cmd=True, data_width=1, skip_start_bit=False)
self.comb += pads_in.connect(cmdr.pads_in)
fsm = FSM(reset_state='IDLE')
self.submodules += (cmdr, fsm)
fsm.act('IDLE', NextValue(timeout, int((cmd_timeout * sys_clk_freq))), NextValue(count, 0), NextValue(busy, 1), If(((sink.valid & pads_out.ready) & cmdw.done), NextValue(cmdr.reset, 1), NextState('WAIT')))
fsm.act('WAIT', pads_out.clk.eq(1), NextValue(cmdr.reset, 0), If(cmdr.source.valid, NextState('CMD')), NextValue(timeout, (timeout - 1)), If((timeout == 0), NextState('TIMEOUT')))
fsm.act('CMD', pads_out.clk.eq(1), source.valid.eq(cmdr.source.valid), source.status.eq(SDCARD_STREAM_STATUS_OK), source.last.eq((count == (sink.length - 1))), source.data.eq(cmdr.source.data), If((cmdr.source.valid & source.ready), cmdr.source.ready.eq(1), NextValue(count, (count + 1)), If(source.last, sink.ready.eq(1), If((sink.cmd_type == SDCARD_CTRL_RESPONSE_SHORT_BUSY), source.valid.eq(0), NextValue(timeout, int((busy_timeout * sys_clk_freq))), NextState('BUSY')).Elif((sink.data_type == SDCARD_CTRL_DATA_TRANSFER_NONE), NextValue(count, 0), NextState('CLK8')).Else(NextState('IDLE')))), NextValue(timeout, (timeout - 1)), If((timeout == 0), NextState('TIMEOUT')))
fsm.act('BUSY', pads_out.clk.eq(1), If((pads_in.valid & pads_in.data.i[0]), NextValue(busy, 0)), If((~ busy), source.valid.eq(1), source.last.eq(1), source.status.eq(SDCARD_STREAM_STATUS_OK), If(source.ready, NextValue(count, 0), NextState('CLK8'))), NextValue(timeout, (timeout - 1)), If((timeout == 0), NextState('TIMEOUT')))
fsm.act('CLK8', pads_out.clk.eq(1), pads_out.cmd.oe.eq(1), pads_out.cmd.o.eq(1), If(pads_out.ready, NextValue(count, (count + 1)), If((count == (8 - 1)), NextState('IDLE'))))
fsm.act('TIMEOUT', sink.ready.eq(1), source.valid.eq(1), source.last.eq(1), source.status.eq(SDCARD_STREAM_STATUS_TIMEOUT), If(source.ready, NextState('IDLE'))) |
def test_configuration_set_via_cmd_and_default_config(hydra_sweep_runner: TSweepRunner) -> None:
sweep = hydra_sweep_runner(calling_file='tests/test_ax_sweeper_plugin.py', calling_module=None, task_function=quadratic, config_path='config', config_name='config.yaml', overrides=['hydra/launcher=basic', 'hydra.sweeper.ax_config.max_trials=2', 'hydra.sweeper.ax_config.early_stop.max_epochs_without_improvement=2', 'quadratic=basic', 'quadratic.x=interval(-5, -2)', 'quadratic.y=interval(-1, 1)'])
with sweep:
assert (sweep.returns is None)
returns = OmegaConf.load(f'{sweep.temp_dir}/optimization_results.yaml')
assert isinstance(returns, DictConfig)
best_parameters = returns.ax
assert ('quadratic.x' in best_parameters)
assert ('quadratic.y' in best_parameters) |
class OptionPlotoptionsLollipopStatesSelect(Options):
def animation(self) -> 'OptionPlotoptionsLollipopStatesSelectAnimation':
return self._config_sub_data('animation', OptionPlotoptionsLollipopStatesSelectAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def halo(self) -> 'OptionPlotoptionsLollipopStatesSelectHalo':
return self._config_sub_data('halo', OptionPlotoptionsLollipopStatesSelectHalo)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def marker(self) -> 'OptionPlotoptionsLollipopStatesSelectMarker':
return self._config_sub_data('marker', OptionPlotoptionsLollipopStatesSelectMarker) |
def initLogging(logLevel=logging.INFO):
global LOGGING_INITIALIZED
if LOGGING_INITIALIZED:
print('ERROR - Logging initialized twice!')
try:
print(traceback.format_exc())
return
except Exception:
pass
LOGGING_INITIALIZED = True
print('Setting up loggers....')
if (not os.path.exists(os.path.join('./logs'))):
os.mkdir(os.path.join('./logs'))
ch = ColourHandler()
mainLogger = logging.getLogger('Main')
mainLogger.setLevel(logLevel)
mainLogger.addHandler(ch)
saltLogger = logging.getLogger('salt')
saltLogger.setLevel(logLevel)
saltLogger.addHandler(ch)
sys.excepthook = exceptHook
print('done') |
_api.route((api_url + 'departments/'), methods=['GET'])
_auth.login_required
def get_departments():
page = request.args.get('page', 1, type=int)
per_page = min(request.args.get('per_page', app.config['posts_per_page'], type=int), 100)
data = FlicketDepartment.to_collection_dict(FlicketDepartment.query.order_by(FlicketDepartment.department.asc()), page, per_page, 'bp_api.get_departments')
return jsonify(data) |
def extractSleepykoreanWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('lucia' in item['tags']):
return buildReleaseMessageWithType(item, 'Lucia', vol, chp, frag=frag, postfix=postfix)
return False |
class FieldOutFileInfo(FieldFileInfo):
def __init__(self, fimeta):
super().__init__(fimeta)
if (self.param.form_type == 'text'):
self.link_name = request.form[self.key]
self.file_suffix = request.form[self.key]
else:
self.file_suffix = '.out'
def save(self):
name = secure_filename(self.key)
(fd, filename) = tempfile.mkstemp(dir=self.temp_dir(), prefix=name, suffix=self.file_suffix)
logger.info(f'Creating empty file for {self.key} as {filename}')
self.file_path = filename |
def _load_hardware_key(keyfile):
key = keyfile.read()
if (len(key) not in [16, 24, 32, 64]):
raise esptool.FatalError(('Key file contains wrong length (%d bytes), 16, 24, 32 or 64 expected.' % len(key)))
if (len(key) == 16):
key = _sha256_digest(key)
print('Using 128-bit key (extended)')
elif (len(key) == 24):
key = (key + key[8:16])
assert (len(key) == 32)
print('Using 192-bit key (extended)')
elif (len(key) == 32):
print('Using 256-bit key')
else:
print('Using 512-bit key')
return key |
class CopilotOperator(MapOperator[(TriggerReqBody, Dict[(str, Any)])]):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self._default_prompt_manager = PromptManager()
async def map(self, input_value: TriggerReqBody) -> Dict[(str, Any)]:
from dbgpt.serve.prompt.serve import SERVE_APP_NAME as PROMPT_SERVE_APP_NAME
from dbgpt.serve.prompt.serve import Serve as PromptServe
prompt_serve = self.system_app.get_component(PROMPT_SERVE_APP_NAME, PromptServe, default_component=None)
if prompt_serve:
pm = prompt_serve.prompt_manager
else:
pm = self._default_prompt_manager
load_or_save_prompt_template(pm)
user_language = self.system_app.config.get_current_lang(default='en')
prompt_list = pm.prefer_query(input_value.command, prefer_prompt_language=user_language)
if (not prompt_list):
error_msg = f'Prompt not found for command {input_value.command}, user_language: {user_language}'
logger.error(error_msg)
raise ValueError(error_msg)
prompt = prompt_list[0].to_prompt_template()
if (input_value.command == CODE_TRANSLATE):
format_params = {'source_language': input_value.language, 'target_language': input_value.target_language}
else:
format_params = {'language': input_value.language}
system_message = prompt.format(**format_params)
messages = [ModelMessage(role=ModelMessageRoleType.SYSTEM, content=system_message), ModelMessage(role=ModelMessageRoleType.HUMAN, content=input_value.messages)]
context = (input_value.context.dict() if input_value.context else {})
return {'messages': messages, 'stream': input_value.stream, 'model': input_value.model, 'context': context} |
def contact_exists(sendgrid_erasure_identity_email: str, sendgrid_secrets):
base_url = f"
body = {'emails': [sendgrid_erasure_identity_email]}
headers = {'Authorization': f"Bearer {sendgrid_secrets['api_key']}"}
contact_response = requests.post(url=f'{base_url}/v3/marketing/contacts/search/emails', headers=headers, json=body)
if (404 == contact_response.status_code):
return None
return contact_response.json()['result'][sendgrid_erasure_identity_email]['contact'] |
class DatasetHasOpenTransactionError(FoundryAPIError):
def __init__(self, dataset_rid: str, open_transaction_rid: str, response: (requests.Response | None)=None):
super().__init__((f'''Dataset {dataset_rid} already has open transaction {open_transaction_rid}.
''' + (response.text if (response is not None) else '')))
self.dataset_rid = dataset_rid
self.open_transaction_rid = open_transaction_rid
self.response = response |
def test_transfer16_bulk(la: LogicAnalyzer, slave: SPISlave):
la.capture(4, block=False)
value = slave.transfer16_bulk([WRITE_DATA16, WRITE_DATA16])
la.stop()
(sck, sdo, cs, sdi) = la.fetch_data()
sdi_initstate = la.get_initial_states()[SDI[0]]
assert (len(cs) == (CS_START + CS_STOP))
assert (len(sck) == (SCK_WRITE16 + SCK_WRITE16))
assert (len(sdo) == 0)
assert verify_value(value[0], sck, sdi_initstate, sdi[:32])
assert verify_value(value[1], sck, sdi_initstate, sdi[32:]) |
class MongoFileField(fields.FileField):
widget = widgets.MongoFileInput()
def __init__(self, label=None, validators=None, **kwargs):
super(MongoFileField, self).__init__(label, validators, **kwargs)
self._should_delete = False
def process(self, formdata, data=unset_value, extra_filters=None):
if formdata:
marker = ('_%s-delete' % self.name)
if (marker in formdata):
self._should_delete = True
return super(MongoFileField, self).process(formdata, data, extra_filters)
def populate_obj(self, obj, name):
field = getattr(obj, name, None)
if (field is not None):
if self._should_delete:
field.delete()
return
if (isinstance(self.data, FileStorage) and (not is_empty(self.data.stream))):
if (not field.grid_id):
func = field.put
else:
func = field.replace
func(self.data.stream, filename=self.data.filename, content_type=self.data.content_type) |
def process_message_call(message: Message, env: Environment) -> MessageCallOutput:
if (message.target == Bytes0(b'')):
is_collision = account_has_code_or_nonce(env.state, message.current_target)
if is_collision:
return MessageCallOutput(Uint(0), U256(0), tuple(), set(), AddressCollision())
else:
evm = process_create_message(message, env)
else:
evm = process_message(message, env)
if evm.error:
logs: Tuple[(Log, ...)] = ()
accounts_to_delete = set()
refund_counter = U256(0)
else:
logs = evm.logs
accounts_to_delete = evm.accounts_to_delete
refund_counter = evm.refund_counter
tx_end = TransactionEnd((message.gas - evm.gas_left), evm.output, evm.error)
evm_trace(evm, tx_end)
return MessageCallOutput(gas_left=evm.gas_left, refund_counter=refund_counter, logs=logs, accounts_to_delete=accounts_to_delete, error=evm.error) |
class ReflectionServiceServicer(object):
def ListAllInterfaces(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListImplementations(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!') |
def test_create_and_link_node():
def t1(a: typing.Union[(int, typing.List[int])]) -> typing.Union[(int, typing.List[int])]:
return a
with pytest.raises(FlyteAssertion, match='Cannot create node when not compiling...'):
ctx = context_manager.FlyteContext.current_context()
create_and_link_node(ctx, t1, a=3)
ctx = context_manager.FlyteContext.current_context().with_compilation_state(CompilationState(prefix=''))
p = create_and_link_node(ctx, t1, a=3)
assert (p.ref.node_id == 'n0')
assert (p.ref.var == 'o0')
assert (len(p.ref.node.bindings) == 1)
def t2(a: typing.Optional[int]=None) -> typing.Optional[int]:
return a
p = create_and_link_node(ctx, t2)
assert (p.ref.var == 'o0')
assert (len(p.ref.node.bindings) == 0) |
_gui
class TestTasksApplication(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, self.tmpdir)
_with_flaky_pyside
def test_layout_save_with_protocol_3(self):
state_location = self.tmpdir
app = TasksApplication(state_location=state_location, layout_save_protocol=3)
app.on_trait_change(app.exit, 'application_initialized')
memento_file = os.path.join(state_location, app.state_filename)
self.assertFalse(os.path.exists(memento_file))
app.run()
self.assertTrue(os.path.exists(memento_file))
with open(memento_file, 'rb') as f:
protocol_bytes = f.read(2)
self.assertEqual(protocol_bytes, b'\x80\x03')
_with_flaky_pyside
def test_layout_save_creates_directory(self):
state_location = (pathlib.Path(self.tmpdir) / 'subdir')
state_filename = 'memento_test'
state_path = (state_location / state_filename)
self.assertFalse(state_location.exists())
self.assertFalse(state_path.exists())
app = TasksApplication(state_location=state_location, state_filename=state_filename)
app.on_trait_change(app.exit, 'application_initialized')
app.run()
self.assertTrue(state_location.exists())
self.assertTrue(state_path.exists())
_with_flaky_pyside
def test_layout_load(self):
stored_state_location = pkg_resources.resource_filename('envisage.ui.tasks.tests', 'data')
state_location = self.tmpdir
shutil.copyfile(os.path.join(stored_state_location, 'application_memento_v2.pkl'), os.path.join(state_location, DEFAULT_STATE_FILENAME))
app = TasksApplication(state_location=state_location)
app.on_trait_change(app.exit, 'application_initialized')
app.run()
state = app._state
self.assertEqual(state.previous_window_layouts[0].size, (492, 743))
_with_flaky_pyside
def test_layout_load_pickle_protocol_3(self):
stored_state_location = pkg_resources.resource_filename('envisage.ui.tasks.tests', 'data')
state_location = self.tmpdir
shutil.copyfile(os.path.join(stored_state_location, 'application_memento_v3.pkl'), os.path.join(state_location, 'fancy_state.pkl'))
app = TasksApplication(state_location=state_location, state_filename='fancy_state.pkl')
app.on_trait_change(app.exit, 'application_initialized')
app.run()
state = app._state
self.assertEqual(state.previous_window_layouts[0].size, (492, 743))
def test_gui_trait_expects_IGUI_interface(self):
app = TasksApplication()
app.gui = DummyGUI()
_with_flaky_pyside
def test_simple_lifecycle(self):
app = TasksApplication(state_location=self.tmpdir)
app.observe((lambda event: app.exit()), 'application_initialized')
app.run()
_with_flaky_pyside
def test_lifecycle_with_plugin(self):
events = []
plugin = LifecycleRecordingPlugin(record_to=events)
plugin.observe(events.append, 'started,stopped')
gui = LifecycleRecordingGUI()
gui.observe(events.append, 'starting,stopped')
app = TasksApplication(gui=gui, state_location=self.tmpdir, plugins=[plugin])
app.observe(events.append, 'starting,started,stopping,stopped')
app.observe((lambda event: app.exit()), 'application_initialized')
app.run()
self.assertEqual([(event.object, event.name) for event in events], [(app, 'starting'), (plugin, 'started'), (app, 'started'), (gui, 'starting'), (gui, 'stopped'), (app, 'stopping'), (plugin, 'stopped'), (app, 'stopped')]) |
(params=grpc_server_fixture_params)
def grpc_client_and_server_url(env_fixture, request):
env = {k: v for (k, v) in env_fixture.items()}
if (request.param == 'async'):
env['GRPC_SERVER_ASYNC'] = '1'
(server_proc, free_port) = setup_grpc_server(env)
server_addr = f'localhost:{free_port}'
test_channel = grpc.insecure_channel(server_addr)
test_client = TestServiceStub(test_channel)
(yield (test_client, server_addr))
server_proc.terminate() |
def init_emb_lookup(collectiveArgs, commsParams, backendFuncs):
try:
from fbgemm_gpu.split_embedding_utils import generate_requests
from fbgemm_gpu.split_table_batched_embeddings_ops import ComputeDevice, EmbeddingLocation, OptimType, SplitTableBatchedEmbeddingBagsCodegen
except ImportError:
logger.error('benchmarking with emb_lookup kernels requires fbgemm_gpu library')
return
collectiveArgs.direction = commsParams.direction
collectiveArgs.emb_dim = commsParams.emb_dim
num_embeddings = commsParams.num_embs
collectiveArgs.batch_size = commsParams.batch_size
num_tables_per_device = commsParams.num_emb_tables_per_device
collectiveArgs.num_emb_tables_batched = commsParams.num_emb_tables_batched
bag_size = commsParams.bag_size
num_emb_tables_batched = (num_tables_per_device if (collectiveArgs.num_emb_tables_batched == (- 1)) else collectiveArgs.num_emb_tables_batched)
collectiveArgs.num_emb_ops = (num_tables_per_device // num_emb_tables_batched)
collectiveArgs.emb = [SplitTableBatchedEmbeddingBagsCodegen(embedding_specs=[(num_embeddings, collectiveArgs.emb_dim, (EmbeddingLocation.DEVICE if (commsParams.device == 'cuda') else EmbeddingLocation.HOST), (ComputeDevice.CUDA if (commsParams.device == 'cuda') else ComputeDevice.CPU)) for _ in range(num_emb_tables_batched)], device=backendFuncs.get_device(), optimizer=OptimType.EXACT_ROWWISE_ADAGRAD) for _ in range(collectiveArgs.num_emb_ops)]
collectiveArgs.embRequests = generate_requests(iters=collectiveArgs.num_emb_ops, B=collectiveArgs.batch_size, T=num_emb_tables_batched, L=bag_size, E=num_embeddings)
if (collectiveArgs.direction == 'backward'):
for i in range(len(collectiveArgs.embRequests)):
(indices, offsets, weights) = collectiveArgs.embRequests[i]
collectiveArgs.LookupOut = collectiveArgs.emb[i].forward(indices, offsets, weights)
collectiveArgs.grad_output = torch.rand_like(collectiveArgs.LookupOut).to(collectiveArgs.device) |
class RpcReceiver():
def __init__(self):
super().__init__()
self.requests = dict()
self.request_timeout = 10
self.recv_size = 4096
self.check_timeouts_task = None
self.check_timeouts_sleep = None
def addCall(self, xid):
if (xid in self.requests):
raise RuntimeError(f'Download xid {xid} already taken')
future = Future()
self.requests[xid] = (future, time.time())
return future
def start(self, loop):
asyncio.run_coroutine_threadsafe(self.checkTimeoutsTask(), loop)
def stop(self, loop):
asyncio.run_coroutine_threadsafe(self.stopCheckTimeoutsTask(), loop).result()
if self.requests:
logging.warning('stopped but still %d in queue', len(self.requests))
async def checkTimeoutsTask(self):
self.check_timeouts_task = asyncio.current_task()
while True:
try:
self.check_timeouts_sleep = asyncio.create_task(asyncio.sleep(1))
(await self.check_timeouts_sleep)
self.checkTimeouts()
except asyncio.CancelledError:
return
async def stopCheckTimeoutsTask(self):
if ((self.check_timeouts_sleep is None) or (self.check_timeouts_task is None)):
logging.warning('unable to properly stop checkTimeoutsTask')
return
self.check_timeouts_sleep.cancel()
(await self.check_timeouts_task)
def socketRead(self, sock):
self.handleReceivedData(sock.recv(self.recv_size))
def handleReceivedData(self, data):
if (len(data) == 0):
logging.error('BUG: no data received!')
try:
rpcreply = RpcMsg.parse(data)
except Exception as e:
logging.warning('Failed to parse RPC reply: %s', e)
return
if (not (rpcreply.xid in self.requests)):
logging.warning('Ignoring unknown RPC XID %d', rpcreply.xid)
return
(result_future, _) = self.requests.pop(rpcreply.xid)
if (rpcreply.content.reply_stat != 'accepted'):
result_future.set_exception(RuntimeError(('RPC call denied: ' + rpcreply.content.reject_stat)))
if (rpcreply.content.content.accept_stat != 'success'):
result_future.set_exception(RuntimeError(('RPC call unsuccessful: ' + rpcreply.content.content.accept_stat)))
result_future.set_result(rpcreply.content.content.content)
def checkTimeouts(self):
deadline = (time.time() - self.request_timeout)
for (id, (future, started_at)) in list(self.requests.items()):
if (started_at < deadline):
logging.warning('Removing XID %d which has timed out', id)
future.set_exception(ReceiveTimeout(f'Request timed out after {self.request_timeout} seconds'))
del self.requests[id] |
def doSingleSearch(phash):
print('Search for: ', phash)
phash = int(phash)
hash_print(phash)
print(phash)
print("Finding files similar to: '{}'".format(phash))
remote = rpyc.connect('localhost', 12345)
commons = remote.root.single_phash_search(phash=phash)
print('Common files:')
for item in commons:
print(item[1].ljust(100), item[0])
hash_comp(phash, item[4]) |
def order_nested_filter_tree_object(nested_object):
if (not isinstance(nested_object, dict)):
return order_nested_object(nested_object)
return OrderedDict([(key, (sorted(nested_object[key]) if ((key in ('require', 'exclude')) and isinstance(nested_object[key], list)) else order_nested_object(nested_object[key]))) for key in sorted(nested_object.keys())]) |
class TestBookmarkUtilsSavingsPossible(TestBookmarkUtilsSavingsBase):
def setUp(self):
super(TestBookmarkUtilsSavingsPossible, self).setUp()
_makeCostSavingMeasureValues(self.measure, self.practice, [0, 1500, 2000])
def test_possible_savings_for_practice(self):
finder = bookmark_utils.InterestingMeasureFinder(self.practice)
savings = finder.top_and_total_savings_in_period(3)
self.assertEqual(savings['possible_savings'], [(self.measure, 3500)])
self.assertEqual(savings['achieved_savings'], [])
self.assertEqual(savings['possible_top_savings_total'], 350000)
def test_possible_savings_for_practice_not_enough_months(self):
finder = bookmark_utils.InterestingMeasureFinder(self.practice)
savings = finder.top_and_total_savings_in_period(10)
self.assertEqual(savings['possible_savings'], [])
self.assertEqual(savings['achieved_savings'], [])
self.assertEqual(savings['possible_top_savings_total'], 0)
def test_possible_savings_for_ccg(self):
finder = bookmark_utils.InterestingMeasureFinder(self.practice.ccg)
savings = finder.top_and_total_savings_in_period(3)
self.assertEqual(savings['possible_savings'], [])
self.assertEqual(savings['achieved_savings'], [])
self.assertEqual(savings['possible_top_savings_total'], 0)
def test_possible_savings_low_is_good(self):
self.measure.low_is_good = True
self.measure.save()
finder = bookmark_utils.InterestingMeasureFinder(self.practice)
savings = finder.top_and_total_savings_in_period(3)
self.assertEqual(savings['possible_savings'], [(self.measure, 3500)])
self.assertEqual(savings['achieved_savings'], [])
self.assertEqual(savings['possible_top_savings_total'], 350.0) |
def _handle_comparison(p: Pattern, s: str) -> PatternRule:
op = ast.Attribute(value=ast.Name('operator', ctx=ast.Load()), attr=s, ctx=ast.Load())
return PatternRule(assign(value=binary_compare(p)), (lambda a: ast.Assign(a.targets, _make_bmg_call('handle_function', [op, ast.List(elts=[a.value.left, a.value.comparators[0]], ctx=ast.Load())])))) |
class OptionSeriesVennSonificationDefaultinstrumentoptionsMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class VehicleLinks(object):
swagger_types = {'alerts': 'Link', 'last_position': 'Link', 'maintenance': 'Link', '_self': 'Link', 'status': 'Link', 'telemetry': 'Link', 'trips': 'Link'}
attribute_map = {'alerts': 'alerts', 'last_position': 'lastPosition', 'maintenance': 'maintenance', '_self': 'self', 'status': 'status', 'telemetry': 'telemetry', 'trips': 'trips'}
def __init__(self, alerts=None, last_position=None, maintenance=None, _self=None, status=None, telemetry=None, trips=None):
self._alerts = None
self._last_position = None
self._maintenance = None
self.__self = None
self._status = None
self._telemetry = None
self._trips = None
self.discriminator = None
if (alerts is not None):
self.alerts = alerts
if (last_position is not None):
self.last_position = last_position
if (maintenance is not None):
self.maintenance = maintenance
if (_self is not None):
self._self = _self
if (status is not None):
self.status = status
if (telemetry is not None):
self.telemetry = telemetry
if (trips is not None):
self.trips = trips
def alerts(self):
return self._alerts
def alerts(self, alerts):
self._alerts = alerts
def last_position(self):
return self._last_position
_position.setter
def last_position(self, last_position):
self._last_position = last_position
def maintenance(self):
return self._maintenance
def maintenance(self, maintenance):
self._maintenance = maintenance
def _self(self):
return self.__self
_self.setter
def _self(self, _self):
self.__self = _self
def status(self):
return self._status
def status(self, status):
self._status = status
def telemetry(self):
return self._telemetry
def telemetry(self, telemetry):
self._telemetry = telemetry
def trips(self):
return self._trips
def trips(self, trips):
self._trips = trips
def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
if issubclass(VehicleLinks, dict):
for (key, value) in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if (not isinstance(other, VehicleLinks)):
return False
return (self.__dict__ == other.__dict__)
def __ne__(self, other):
return (not (self == other)) |
def test_headerdb_canonical_header_retrieval_by_number(headerdb, genesis_header):
headerdb.persist_header(genesis_header)
headers = mk_header_chain(genesis_header, length=10)
headerdb.persist_header_chain(headers)
actual = headerdb.get_canonical_block_header_by_number(genesis_header.block_number)
assert_headers_eq(actual, genesis_header)
for header in headers:
actual = headerdb.get_canonical_block_header_by_number(header.block_number)
assert_headers_eq(actual, header) |
def test_print_tree_yaml(monkeypatch):
gl = gitlab_util.create_test_gitlab(monkeypatch)
gl.load_tree()
from gitlabber.format import PrintFormat
import yaml
with output_util.captured_output() as (out, err):
gl.print_tree(PrintFormat.YAML)
output = yaml.safe_load(out.getvalue())
with open(gitlab_util.YAML_TEST_OUTPUT_FILE, 'r') as yamlFile:
output_file = yaml.safe_load(yamlFile)
assert (yaml.dump(output_file) == yaml.dump(output)) |
class Solution(object):
def spiralMatrixIII(self, R, C, r0, c0):
def change_dire(dire):
if ((dire[0] == 0) and (dire[1] == 1)):
return [1, 0]
elif ((dire[0] == 1) and (dire[1] == 0)):
return [0, (- 1)]
elif ((dire[0] == 0) and (dire[1] == (- 1))):
return [(- 1), 0]
else:
return [0, 1]
curr = [r0, c0]
dire = [0, 1]
(step_b, acc) = (2, 1)
(ll1, ll2, final) = (1, 1, (R * C))
ret = [curr]
while (ll2 < final):
if (ll1 == (step_b * step_b)):
acc += 1
step_b += 1
elif (acc == step_b):
dire = change_dire(dire)
acc = 2
else:
acc += 1
curr = [(curr[0] + dire[0]), (curr[1] + dire[1])]
ll1 += 1
if ((curr[0] < 0) or (curr[0] >= R) or (curr[1] < 0) or (curr[1] >= C)):
continue
ret.append(curr)
ll2 += 1
return ret |
def test(config, ini_config_file_1):
config.from_ini(ini_config_file_1)
assert (config() == {'section1': {'value1': '1'}, 'section2': {'value2': '2'}})
assert (config.section1() == {'value1': '1'})
assert (config.section1.value1() == '1')
assert (config.section2() == {'value2': '2'})
assert (config.section2.value2() == '2') |
class TimeCode(object):
def __init__(self, success_msg=None, sc='green', fc='red', failure_msg=None, run=Run(), quiet=False, suppress_first=0):
self.run = run
self.run.single_line_prefixes = {0: ' ', 1: ' '}
self.quiet = quiet
self.suppress_first = suppress_first
(self.sc, self.fc) = (sc, fc)
(self.s_msg, self.f_msg) = (success_msg, failure_msg)
self.s_msg = (self.s_msg if self.s_msg else 'Code finished after ')
self.f_msg = (self.f_msg if self.f_msg else 'Code encountered error after ')
def __enter__(self):
self.timer = Timer()
return self
def __exit__(self, exception_type, exception_value, traceback):
self.time = self.timer.timedelta_to_checkpoint(self.timer.timestamp())
if (self.quiet or (self.time <= datetime.timedelta(seconds=self.suppress_first))):
return
return_code = (0 if (exception_type is None) else 1)
(msg, color) = ((self.s_msg, self.sc) if (not return_code) else (self.f_msg, self.fc))
self.run.info_single((msg + str(self.time)), nl_before=1, mc=color, level=return_code) |
def get_combo_operator(data, comboText, con):
if (data == _constants.FIELD_PROC_PATH):
return (Config.RULE_TYPE_SIMPLE, Config.OPERAND_PROCESS_PATH, con.process_path)
elif (data == _constants.FIELD_PROC_ARGS):
if ((len(con.process_args) == 0) or (con.process_args[0] == '')):
return (Config.RULE_TYPE_SIMPLE, Config.OPERAND_PROCESS_PATH, con.process_path)
return (Config.RULE_TYPE_SIMPLE, Config.OPERAND_PROCESS_COMMAND, ' '.join(con.process_args))
elif (data == _constants.FIELD_PROC_ID):
return (Config.RULE_TYPE_SIMPLE, Config.OPERAND_PROCESS_ID, '{0}'.format(con.process_id))
elif (data == _constants.FIELD_USER_ID):
return (Config.RULE_TYPE_SIMPLE, Config.OPERAND_USER_ID, ('%s' % con.user_id))
elif (data == _constants.FIELD_DST_PORT):
return (Config.RULE_TYPE_SIMPLE, Config.OPERAND_DEST_PORT, ('%s' % con.dst_port))
elif (data == _constants.FIELD_DST_IP):
return (Config.RULE_TYPE_SIMPLE, Config.OPERAND_DEST_IP, con.dst_ip)
elif (data == _constants.FIELD_DST_HOST):
return (Config.RULE_TYPE_SIMPLE, Config.OPERAND_DEST_HOST, comboText)
elif (data == _constants.FIELD_DST_NETWORK):
parts = comboText.split(' ')
text = parts[(len(parts) - 1)]
return (Config.RULE_TYPE_NETWORK, Config.OPERAND_DEST_NETWORK, text)
elif (data == _constants.FIELD_REGEX_HOST):
parts = comboText.split(' ')
text = parts[(len(parts) - 1)]
dsthost = '\\.'.join(text.split('.')).replace('*', '')
dsthost = ('^(|.*\\.)%s' % dsthost[2:])
return (Config.RULE_TYPE_REGEXP, Config.OPERAND_DEST_HOST, dsthost)
elif (data == _constants.FIELD_REGEX_IP):
parts = comboText.split(' ')
text = parts[(len(parts) - 1)]
return (Config.RULE_TYPE_REGEXP, Config.OPERAND_DEST_IP, ('%s' % '\\.'.join(text.split('.')).replace('*', '.*')))
elif (data == _constants.FIELD_APPIMAGE):
appimage_bin = os.path.basename(con.process_path)
appimage_path = os.path.dirname(con.process_path).replace('.', '\\.')
appimage_path = appimage_path[0:(len(_constants.APPIMAGE_PREFIX) + 7)]
return (Config.RULE_TYPE_REGEXP, Config.OPERAND_PROCESS_PATH, '^{0}[0-9A-Za-z]{{6}}\\/.*{1}$'.format(appimage_path, appimage_bin)) |
def test_error_if_df_contains_negative_values(df_vartypes):
df_neg = df_vartypes.copy()
df_neg.loc[(1, 'Age')] = (- 1)
transformer = BoxCoxTransformer()
with pytest.raises(ValueError):
transformer.fit(df_neg)
transformer = BoxCoxTransformer()
transformer.fit(df_vartypes)
with pytest.raises(ValueError):
transformer.transform(df_neg) |
class TestPutMessagingConfigSecretTwilioEmail():
(scope='function')
def url(self, messaging_config_twilio_email) -> str:
return (V1_URL_PREFIX + MESSAGING_SECRETS).format(config_key=messaging_config_twilio_email.key)
(scope='function')
def payload(self):
return {MessagingServiceSecrets.TWILIO_API_KEY.value: '23p48btcpy14b'}
def test_put_config_secrets(self, db: Session, api_client: TestClient, payload, url, generate_auth_header, messaging_config_twilio_email):
auth_header = generate_auth_header([MESSAGING_CREATE_OR_UPDATE])
response = api_client.put(url, headers=auth_header, json=payload)
assert (200 == response.status_code)
db.refresh(messaging_config_twilio_email)
assert (json.loads(response.text) == {'msg': 'Secrets updated for MessagingConfig with key: my_twilio_email_config.', 'test_status': None, 'failure_reason': None})
assert (messaging_config_twilio_email.secrets[MessagingServiceSecrets.TWILIO_API_KEY.value] == '23p48btcpy14b') |
def test_select1(compiler):
def select1(x: f32):
zero: f32
zero = 0.0
two: f32
two = 2.0
x = select(x, zero, two, x)
actual = np.array([(- 4.0), 0.0, 4.0], dtype=np.float32)
expected = (((actual < 0) * 2.0) + ((actual >= 0) * actual))
fn = compiler.compile(select1)
fn(None, actual)
np.testing.assert_almost_equal(actual, expected) |
def test_serialize_model_shims_roundtrip_bytes():
fwd = (lambda model, X, is_train: (X, (lambda dY: dY)))
test_shim = SerializableShim(None)
shim_model = Model('shimmodel', fwd, shims=[test_shim])
model = chain(Linear(2, 3), shim_model, Maxout(2, 3))
model.initialize()
assert (model.layers[1].shims[0].value == 'shimdata')
model_bytes = model.to_bytes()
with pytest.raises(ValueError):
Linear(2, 3).from_bytes(model_bytes)
test_shim = SerializableShim(None)
shim_model = Model('shimmodel', fwd, shims=[test_shim])
new_model = chain(Linear(2, 3), shim_model, Maxout(2, 3)).from_bytes(model_bytes)
assert (new_model.layers[1].shims[0].value == 'shimdata from bytes') |
class CMY(Space):
BASE = 'srgb'
NAME = 'cmy'
SERIALIZE = ('--cmy',)
CHANNELS = (Channel('c', 0.0, 1.0, bound=True), Channel('m', 0.0, 1.0, bound=True), Channel('y', 0.0, 1.0, bound=True))
CHANNEL_ALIASES = {'cyan': 'c', 'magenta': 'm', 'yellow': 'y'}
WHITE = WHITES['2deg']['D65']
def to_base(self, coords: Vector) -> Vector:
return cmy_to_srgb(coords)
def from_base(self, coords: Vector) -> Vector:
return srgb_to_cmy(coords) |
class Preferences(Gtk.Dialog):
AUTOSTART_DIR = '{}/.config/autostart'.format(os.getenv('HOME'))
AUTOSTART_PATH = '{}/.config/autostart/indicator-sysmonitor.desktop'.format(os.getenv('HOME'))
DESKTOP_PATH = '/usr/share/applications/indicator-sysmonitor.desktop'
sensors_regex = re.compile('{.+?}')
SETTINGS_FILE = (os.getenv('HOME') + '/.cache/indicator-sysmonitor/preferences.json')
settings = {}
def __init__(self, parent):
Gtk.Dialog.__init__(self)
self.ind_parent = parent
self.custom_entry = None
self.interval_entry = None
self.sensor_mgr = SensorManager()
self._create_content()
self.set_data()
self.show_all()
self.display_icon_checkbutton.set_visible(False)
self.iconpath_button.set_visible(False)
self.iconpath_entry.set_visible(False)
def _create_content(self):
self.connect('delete-event', self.on_cancel)
self.set_title(_('Preferences'))
self.set_size_request(600, 600)
self.set_position(Gtk.WindowPosition.CENTER_ALWAYS)
ui = Gtk.Builder()
file_path = os.path.dirname(os.path.abspath(__file__))
ui.add_from_file((file_path + '/preferences.ui'))
self.autostart_check = ui.get_object('autostart_check')
self.autostart_check.set_active(self.get_autostart())
version_label = ui.get_object('version_label')
version_label.set_label(_('This is indicator-sysmonitor version: {}').format(__version__))
self.custom_entry = ui.get_object('custom_entry')
self.interval_entry = ui.get_object('interval_entry')
self.display_icon_checkbutton = ui.get_object('display_icon_checkbutton')
self.iconpath_entry = ui.get_object('iconpath_entry')
self.iconpath_button = ui.get_object('iconpath_button')
sensors_list = SensorsListModel(self)
vbox = ui.get_object('advanced_box')
vbox.pack_start(sensors_list.get_view(), True, True, 3)
vbox = self.get_content_area()
notebook = ui.get_object('preferences_notebook')
vbox.pack_start(notebook, True, True, 4)
handlers = {'on_test': self.on_test, 'on_save': self.on_save, 'on_cancel': self.on_cancel}
ui.connect_signals(handlers)
buttons = ui.get_object('footer_buttonbox')
vbox.pack_end(buttons, False, False, 5)
self.set_resizable(False)
def save_prefs(self):
try:
os.makedirs(os.path.dirname(Preferences.PREF_SETTINGS_FILE), exist_ok=True)
with open(Preferences.PREF_SETTINGS_FILE, 'w') as f:
f.write(json.dumps(self.pref_settings))
except Exception as ex:
logging.exception(ex)
logging.error('Writing settings failed')
def load_settings(self):
try:
with open(Preferences.PREF.SETTINGS_FILE, 'r') as f:
self.settings = json.load(f)
except Exception as ex:
logging.exception(ex)
logging.error('Reading settings failed')
def on_iconpath_button_clicked(self, *args):
pass
def on_display_icon_checkbutton_toggled(self, *args):
if (not self.display_icon_checkbutton.get_active()):
self.iconpath_entry.set_text('')
self.iconpath_entry.set_sensitive(False)
self.iconpath_button.set_sensitive(False)
else:
self.iconpath_entry.set_sensitive(True)
self.iconpath_button.set_sensitive(True)
def on_test(self, evnt=None, data=None):
try:
self.update_parent()
except Exception as ex:
error_dialog = Gtk.MessageDialog(None, Gtk.DialogFlags.DESTROY_WITH_PARENT, Gtk.MessageType.ERROR, Gtk.ButtonsType.CLOSE, ex)
error_dialog.set_title('Error')
error_dialog.run()
error_dialog.destroy()
return False
def on_save(self, evnt=None, data=None):
try:
self.update_parent()
except Exception as ex:
error_dialog = Gtk.MessageDialog(None, Gtk.DialogFlags.DESTROY_WITH_PARENT, Gtk.MessageType.ERROR, Gtk.ButtonsType.CLOSE, ex)
error_dialog.set_title('Error')
error_dialog.run()
error_dialog.destroy()
return False
self.ind_parent.save_settings()
self.update_autostart()
self.destroy()
def on_cancel(self, evnt=None, data=None):
self.ind_parent.load_settings()
self.destroy()
def update_parent(self, evnt=None, data=None):
custom_text = self.custom_entry.get_text()
sensors = Preferences.sensors_regex.findall(custom_text)
for sensor in sensors:
sensor = sensor[1:(- 1)]
if (not self.sensor_mgr.exists(sensor)):
raise ISMError(_('{{{}}} sensor not supported.').format(sensor))
self.sensor_mgr.check(sensor)
try:
interval = float(self.interval_entry.get_text())
if (interval < 1):
raise ISMError(_('Interval value should be greater then or equal to 1 '))
except ValueError:
raise ISMError(_('Interval value is not valid.'))
self.sensor_mgr.set_custom_text(custom_text)
self.sensor_mgr.set_interval(interval)
self.ind_parent.update_settings()
self.ind_parent.update_indicator_guide()
def set_data(self):
self.custom_entry.set_text(self.sensor_mgr.get_custom_text())
self.interval_entry.set_text(str(self.sensor_mgr.get_interval()))
def update_autostart(self):
autostart = self.autostart_check.get_active()
if (not autostart):
try:
os.remove(Preferences.AUTOSTART_PATH)
except:
pass
else:
try:
if (not os.path.exists(Preferences.AUTOSTART_DIR)):
os.makedirs(Preferences.AUTOSTART_DIR)
shutil.copy(Preferences.DESKTOP_PATH, Preferences.AUTOSTART_PATH)
except Exception as ex:
logging.exception(ex)
def get_autostart(self):
return os.path.exists(Preferences.AUTOSTART_PATH) |
class SetActivePerspectiveAction(WorkbenchAction):
enabled = Delegate('perspective')
id = Delegate('perspective')
name = Delegate('perspective')
style = 'radio'
perspective = Instance(IPerspective)
def destroy(self):
self.window = None
def perform(self, event):
self.window.active_perspective = self.perspective
return
('perspective,window.active_perspective')
def _refresh_checked(self, event):
self.checked = ((self.perspective is not None) and (self.window is not None) and (self.window.active_perspective is not None) and (self.perspective.id is self.window.active_perspective.id))
return |
def define_paired_EDs(num_pairs, input_nc, z_nc, ngf, K, bottleneck, n_downsample_global=3, n_blocks_global=9, max_mult=16, norm='instance', gpu_ids=[], vaeLike=False):
norm_layer = get_norm_layer(norm_type=norm)
list_of_encoders = []
list_of_decoders = []
for i in range(num_pairs):
encoder = E_Resnet(input_nc, z_nc, ngf, K, bottleneck, n_downsample_global, n_blocks_global, max_mult, norm_layer, vaeLike=vaeLike)
decoder = D_NLayers(input_nc, z_nc, ngf, K, bottleneck, n_downsample_global, n_blocks_global, max_mult, norm_layer, vaeLike=vaeLike)
if (len(gpu_ids) > 0):
assert torch.cuda.is_available()
encoder.cuda(gpu_ids[0])
decoder.cuda(gpu_ids[0])
encoder.apply(weights_init)
decoder.apply(weights_init)
list_of_encoders.append(encoder)
list_of_decoders.append(decoder)
return (list_of_encoders, list_of_decoders) |
class Reviewer(models.Model):
objects = EventUserManager()
created_at = models.DateTimeField(_('Created At'), auto_now_add=True)
updated_at = models.DateTimeField(_('Updated At'), auto_now=True)
event_user = models.ForeignKey(EventUser, verbose_name=_('Event User'), blank=True, null=True)
def __str__(self):
return str(self.event_user) |
_os(*metadata.platforms)
def main():
masquerade = '/tmp/sed'
if (common.CURRENT_OS == 'linux'):
source = common.get_path('bin', 'linux.ditto_and_spawn')
common.copy_file(source, masquerade)
else:
common.create_macos_masquerade(masquerade)
common.log('Executing fake sed command for LinPEAS behavior.')
common.execute([masquerade, 'testImPoSSssSiBlEeetest'], timeout=5, kill=True, shell=True)
common.remove_file(masquerade) |
class MakeBuild(DistutilsBuild):
def run(self):
DistutilsBuild.run(self)
if (not find_executable('make')):
sys.exit("ERROR: 'make' command is unavailable")
try:
subprocess.check_call(['make'])
except subprocess.CalledProcessError as e:
sys.exit('Compilation error: ', e) |
class JSONWebTokenMiddleware():
def __init__(self):
self.cached_allow_any = set()
if jwt_settings.JWT_ALLOW_ARGUMENT:
self.cached_authentication = PathDict()
def authenticate_context(self, info, **kwargs):
root_path = info.path[0]
if (root_path not in self.cached_allow_any):
if jwt_settings.JWT_ALLOW_ANY_HANDLER(info, **kwargs):
self.cached_allow_any.add(root_path)
else:
return True
return False
def resolve(self, next, root, info, **kwargs):
context = info.context
token_argument = get_token_argument(context, **kwargs)
if (jwt_settings.JWT_ALLOW_ARGUMENT and (token_argument is None)):
user = self.cached_authentication.parent(info.path)
if (user is not None):
context.user = user
elif hasattr(context, 'user'):
if hasattr(context, 'session'):
context.user = get_user(context)
self.cached_authentication.insert(info.path, context.user)
else:
context.user = AnonymousUser()
if ((_authenticate(context) or (token_argument is not None)) and self.authenticate_context(info, **kwargs)):
user = authenticate(request=context, **kwargs)
if (user is not None):
context.user = user
if jwt_settings.JWT_ALLOW_ARGUMENT:
self.cached_authentication.insert(info.path, user)
return next(root, info, **kwargs) |
def test_round_trip_ttx(font):
table = table_S_V_G_()
for (name, attrs, content) in parseXML(OTSVG_TTX):
table.fromXML(name, attrs, content, font)
compiled = table.compile(font)
table = table_S_V_G_()
table.decompile(compiled, font)
assert (getXML(table.toXML, font) == OTSVG_TTX) |
class P2PLibp2pConnection(Connection):
connection_id = PUBLIC_ID
DEFAULT_MAX_RESTARTS = 5
def __init__(self, **kwargs: Any) -> None:
super().__init__(**kwargs)
ledger_id = self.configuration.config.get('ledger_id', DEFAULT_LEDGER)
if (ledger_id not in SUPPORTED_LEDGER_IDS):
raise ValueError("Ledger id '{}' is not supported. Supported ids: '{}'".format(ledger_id, SUPPORTED_LEDGER_IDS))
libp2p_local_uri: Optional[str] = self.configuration.config.get('local_uri')
libp2p_public_uri: Optional[str] = self.configuration.config.get('public_uri')
libp2p_delegate_uri: Optional[str] = self.configuration.config.get('delegate_uri')
libp2p_monitoring_uri: Optional[str] = self.configuration.config.get('monitoring_uri')
libp2p_entry_peers = self.configuration.config.get('entry_peers')
if (libp2p_entry_peers is None):
libp2p_entry_peers = []
libp2p_entry_peers = list(cast(List, libp2p_entry_peers))
log_file: Optional[str] = self.configuration.config.get('log_file')
env_file: Optional[str] = self.configuration.config.get('env_file')
peer_registration_delay: Optional[str] = self.configuration.config.get('peer_registration_delay')
records_storage_path: Optional[str] = self.configuration.config.get('storage_path')
node_connection_timeout: Optional[float] = self.configuration.config.get('node_connection_timeout', PIPE_CONN_TIMEOUT)
if (self.has_crypto_store and (self.crypto_store.crypto_objects.get(ledger_id, None) is not None)):
key = self.crypto_store.crypto_objects[ledger_id]
else:
raise ValueError(f"Couldn't find connection key for {str(ledger_id)} in connections keys. Please ensure agent private key is added with `aea add-key`.")
uri = None
if (libp2p_local_uri is not None):
uri = Uri(libp2p_local_uri)
public_uri = None
if (libp2p_public_uri is not None):
public_uri = Uri(libp2p_public_uri)
delegate_uri = None
if (libp2p_delegate_uri is not None):
delegate_uri = Uri(libp2p_delegate_uri)
monitoring_uri = None
if (libp2p_monitoring_uri is not None):
monitoring_uri = Uri(libp2p_monitoring_uri)
entry_peers = [MultiAddr.from_string(str(maddr)) for maddr in libp2p_entry_peers]
delay = None
if (peer_registration_delay is not None):
try:
delay = float(peer_registration_delay)
except ValueError:
raise ValueError(f'peer_registration_delay {peer_registration_delay} must be a float number in seconds')
if (public_uri is None):
if ((entry_peers is None) or (len(entry_peers) == 0)):
raise ValueError('At least one Entry Peer should be provided when node is run in relayed mode')
if (delegate_uri is not None):
self.logger.warning('Ignoring Delegate Uri configuration as node is run in relayed mode')
else:
if (uri is None):
raise ValueError('Local Uri must be set when Public Uri is provided. Hint: they are the same for local host/network deployment')
if (not _ip_all_private_or_all_public(([public_uri.host] + [maddr.host for maddr in entry_peers]))):
raise ValueError("Node's public ip and entry peers ip addresses are not in the same ip address space (private/public)")
cert_requests = self.configuration.cert_requests
if ((cert_requests is None) or (len(cert_requests) != 1)):
raise ValueError('cert_requests field must be set and contain exactly one entry!')
cert_request = cert_requests[0]
agent_record = AgentRecord.from_cert_request(cert_request, self.address, key.public_key, Path(self.data_dir))
self.logger.debug('Public key used by libp2p node: {}'.format(key.public_key))
if self.configuration.config.get('mailbox_uri'):
mailbox_uri = str(self.configuration.config.get('mailbox_uri'))
else:
mailbox_uri = ''
module_dir = self._check_node_built()
self.node = Libp2pNode(agent_record, key, module_dir, self.data_dir, LIBP2P_NODE_CLARGS, uri, public_uri, delegate_uri, monitoring_uri, entry_peers, log_file, env_file, self.logger, delay, records_storage_path, node_connection_timeout, max_restarts=self.configuration.config.get('max_node_restarts', self.DEFAULT_MAX_RESTARTS), mailbox_uri=mailbox_uri)
self._in_queue = None
self._receive_from_node_task = None
self._node_client: Optional[NodeClient] = None
self._send_queue: Optional[asyncio.Queue] = None
self._send_task: Optional[asyncio.Task] = None
def _check_node_built(self) -> str:
if (self.configuration.build_directory is None):
raise ValueError('Connection Configuration build directory is not set!')
libp2p_node_module_path = os.path.join(self.configuration.build_directory, LIBP2P_NODE_MODULE_NAME)
enforce(os.path.exists(libp2p_node_module_path), f'Module {LIBP2P_NODE_MODULE_NAME} is not present in {self.configuration.build_directory}, please call the `aea build` command first!')
return self.configuration.build_directory
async def connect(self) -> None:
if self.is_connected:
return
with self._connect_context():
self.node.logger = self.logger
(await self._start_node())
self._in_queue = asyncio.Queue()
self._send_queue = asyncio.Queue()
self._receive_from_node_task = asyncio.ensure_future(self._receive_from_node(), loop=self.loop)
self._send_task = self.loop.create_task(self._send_loop())
async def _start_node(self) -> None:
(await self.node.start())
self._node_client = self.node.get_client()
async def _restart_node(self) -> None:
(await self.node.stop())
(await self._start_node())
async def disconnect(self) -> None:
if self.is_disconnected:
return
self.state = ConnectionStates.disconnecting
try:
if (self._receive_from_node_task is not None):
self._receive_from_node_task.cancel()
self._receive_from_node_task = None
if (self._send_task is not None):
self._send_task.cancel()
self._send_task = None
(await self.node.stop())
if (self._in_queue is not None):
self._in_queue.put_nowait(None)
else:
self.logger.debug('Called disconnect when input queue not initialized.')
finally:
self.state = ConnectionStates.disconnected
async def receive(self, *args: Any, **kwargs: Any) -> Optional['Envelope']:
try:
if (self._in_queue is None):
raise ValueError('Input queue not initialized.')
envelope = (await self._in_queue.get())
if (envelope is None):
self.logger.debug('Received None.')
return None
return envelope
except CancelledError:
self.logger.debug('Receive cancelled.')
return None
except Exception as e:
self.logger.exception(e)
return None
async def _send_envelope_with_node_client(self, envelope: Envelope) -> None:
if (not self._node_client):
raise ValueError(f'Node client not set! Can not send envelope: {envelope}')
if (not self.node.pipe):
raise ValueError('Node is not connected')
try:
(await self._node_client.send_envelope(envelope))
return
except asyncio.CancelledError:
raise
except Exception as e:
self.logger.exception(f'Failed to send. Exception: {e}. Try recover connection to node and send again.')
try:
if self.node.is_proccess_running():
(await self.node.pipe.connect())
(await self._node_client.send_envelope(envelope))
self.logger.debug('Envelope sent after reconnect to node')
return
except asyncio.CancelledError:
raise
except Exception as e:
self.logger.exception(f'Failed to send after pipe reconnect. Exception: {e}. Try recover connection to node and send again.')
try:
(await self._restart_node())
(await self._node_client.send_envelope(envelope))
except asyncio.CancelledError:
raise
except Exception as e:
self.logger.exception(f'Failed to send after node restart. Exception: {e}. Try recover connection to node and send again.')
raise
async def _send_loop(self) -> None:
if ((not self._send_queue) or (not self._node_client)):
self.logger.error('Send loop not started cause not connected properly.')
return
try:
while self.is_connected:
envelope = (await self._send_queue.get())
(await self._send_envelope_with_node_client(envelope))
except asyncio.CancelledError:
raise
except Exception:
self.logger.exception(f'Failed to send an envelope {envelope}. Stop connection.')
(await asyncio.shield(self.disconnect()))
async def send(self, envelope: Envelope) -> None:
if ((not self._node_client) or (not self._send_queue)):
raise ValueError('Node is not connected!')
self._ensure_valid_envelope_for_external_comms(envelope)
(await self._send_queue.put(envelope))
async def _read_envelope_from_node(self) -> Optional[Envelope]:
if (not self._node_client):
raise ValueError('Node is not connected!')
try:
return (await self._node_client.read_envelope())
except asyncio.CancelledError:
raise
except Exception as e:
self.logger.exception(f'Failed to read. Exception: {e}. Try reconnect to node and read again.')
(await self._restart_node())
return (await self._node_client.read_envelope())
async def _receive_from_node(self) -> None:
while True:
if (self._in_queue is None):
raise ValueError('Input queue not initialized.')
if (not self._node_client):
raise ValueError('Node is not connected!')
envelope = (await self._read_envelope_from_node())
if (envelope is None):
break
self._in_queue.put_nowait(envelope) |
def get_pcie_int_tiles(grid, pcie_loc):
def get_site_at_loc(loc):
gridinfo = grid.gridinfo_at_loc(loc)
sites = list(gridinfo.sites.keys())
if (len(sites) and sites[0].startswith('SLICE')):
return sites[0]
return None
pcie_int_tiles = list()
for tile_name in sorted(grid.tiles()):
if (not tile_name.startswith('PCIE_INT_INTERFACE')):
continue
m = GTP_INT_Y_RE.match(tile_name)
assert m
int_y = int(m.group(1))
if ((int_y % 50) == 0):
loc = grid.loc_of_tilename(tile_name)
is_left = (loc.grid_x < pcie_loc.grid_x)
if is_left:
for i in range(1, loc.grid_x):
loc_grid_x = (loc.grid_x - i)
site = get_site_at_loc(GridLoc(loc_grid_x, loc.grid_y))
if site:
break
else:
(_, x_max, _, _) = grid.dims()
for i in range(1, (x_max - loc.grid_x)):
loc_grid_x = (loc.grid_x + i)
site = get_site_at_loc(GridLoc(loc_grid_x, loc.grid_y))
if site:
break
pcie_int_tiles.append((tile_name, is_left, site))
return pcie_int_tiles |
class ConfigLoaderImpl(ConfigLoader):
def __init__(self, config_search_path: ConfigSearchPath) -> None:
self.config_search_path = config_search_path
self.repository = ConfigRepository(config_search_path=config_search_path)
def validate_sweep_overrides_legal(overrides: List[Override], run_mode: RunMode, from_shell: bool) -> None:
for x in overrides:
if x.is_sweep_override():
if (run_mode == RunMode.MULTIRUN):
if x.is_hydra_override():
raise ConfigCompositionException(f"Sweeping over Hydra's configuration is not supported : '{x.input_line}'")
elif (run_mode == RunMode.RUN):
if (x.value_type == ValueType.SIMPLE_CHOICE_SWEEP):
vals = 'value1,value2'
if from_shell:
example_override = f"key='{vals}'"
else:
example_override = f"key='{vals}'"
msg = dedent(f''' Ambiguous value for argument '{x.input_line}'
1. To use it as a list, use key=[value1,value2]
2. To use it as string, quote the value: {example_override}
3. To sweep over it, add --multirun to your command line''')
raise ConfigCompositionException(msg)
else:
raise ConfigCompositionException(f"Sweep parameters '{x.input_line}' requires --multirun")
else:
assert False
def _missing_config_error(self, config_name: Optional[str], msg: str, with_search_path: bool) -> None:
def add_search_path() -> str:
descs = []
for src in self.repository.get_sources():
if (src.provider != 'schema'):
descs.append(f' {repr(src)}')
lines = '\n'.join(descs)
if with_search_path:
return ((msg + '\nSearch path:') + f'''
{lines}''')
else:
return msg
raise MissingConfigException(missing_cfg_file=config_name, message=add_search_path())
def ensure_main_config_source_available(self) -> None:
for source in self.get_sources():
if (source.provider == 'main'):
if (not source.available()):
if (source.scheme() == 'pkg'):
if (source.path == ''):
msg = 'Primary config module is empty.\nPython requires resources to be in a module with an __init__.py file'
else:
msg = f'''Primary config module '{source.path}' not found.
Check that it's correct and contains an __init__.py file'''
else:
msg = f'''Primary config directory not found.
Check that the config directory '{source.path}' exists and readable'''
self._missing_config_error(config_name=None, msg=msg, with_search_path=False)
def load_configuration(self, config_name: Optional[str], overrides: List[str], run_mode: RunMode, from_shell: bool=True, validate_sweep_overrides: bool=True) -> DictConfig:
try:
return self._load_configuration_impl(config_name=config_name, overrides=overrides, run_mode=run_mode, from_shell=from_shell, validate_sweep_overrides=validate_sweep_overrides)
except OmegaConfBaseException as e:
raise ConfigCompositionException().with_traceback(sys.exc_info()[2]) from e
def _process_config_searchpath(self, config_name: Optional[str], parsed_overrides: List[Override], repo: CachingConfigRepository) -> None:
if (config_name is not None):
loaded = repo.load_config(config_path=config_name)
primary_config: Container
if (loaded is None):
primary_config = OmegaConf.create()
else:
primary_config = loaded.config
else:
primary_config = OmegaConf.create()
if (not OmegaConf.is_dict(primary_config)):
raise ConfigCompositionException(f"primary config '{config_name}' must be a DictConfig, got {type(primary_config).__name__}")
def is_searchpath_override(v: Override) -> bool:
return (v.get_key_element() == 'hydra.searchpath')
override = None
for v in parsed_overrides:
if is_searchpath_override(v):
override = v.value()
break
searchpath = OmegaConf.select(primary_config, 'hydra.searchpath')
if (override is not None):
provider = 'hydra.searchpath in command-line'
searchpath = override
else:
provider = 'hydra.searchpath in main'
def _err() -> None:
raise ConfigCompositionException(f'hydra.searchpath must be a list of strings. Got: {searchpath}')
if (searchpath is None):
return
if (not isinstance(searchpath, MutableSequence)):
_err()
for v in searchpath:
if (not isinstance(v, str)):
_err()
new_csp = copy.deepcopy(self.config_search_path)
schema = new_csp.get_path().pop((- 1))
assert (schema.provider == 'schema')
for sp in searchpath:
new_csp.append(provider=provider, path=sp)
new_csp.append('schema', 'structured://')
repo.initialize_sources(new_csp)
for source in repo.get_sources():
if (not source.available()):
warnings.warn(category=UserWarning, message=f'provider={source.provider}, path={source.path} is not available.')
def _parse_overrides_and_create_caching_repo(self, config_name: Optional[str], overrides: List[str]) -> Tuple[(List[Override], CachingConfigRepository)]:
parser = OverridesParser.create()
parsed_overrides = parser.parse_overrides(overrides=overrides)
caching_repo = CachingConfigRepository(self.repository)
self._process_config_searchpath(config_name, parsed_overrides, caching_repo)
return (parsed_overrides, caching_repo)
def _load_configuration_impl(self, config_name: Optional[str], overrides: List[str], run_mode: RunMode, from_shell: bool=True, validate_sweep_overrides: bool=True) -> DictConfig:
from hydra import __version__, version
self.ensure_main_config_source_available()
(parsed_overrides, caching_repo) = self._parse_overrides_and_create_caching_repo(config_name, overrides)
if validate_sweep_overrides:
self.validate_sweep_overrides_legal(overrides=parsed_overrides, run_mode=run_mode, from_shell=from_shell)
defaults_list = create_defaults_list(repo=caching_repo, config_name=config_name, overrides_list=parsed_overrides, prepend_hydra=True, skip_missing=(run_mode == RunMode.MULTIRUN))
config_overrides = defaults_list.config_overrides
cfg = self._compose_config_from_defaults_list(defaults=defaults_list.defaults, repo=caching_repo)
OmegaConf.set_struct(cfg, True)
OmegaConf.set_readonly(cfg.hydra, False)
ConfigLoaderImpl._apply_overrides_to_config(config_overrides, cfg)
app_overrides = []
for override in parsed_overrides:
if override.is_hydra_override():
cfg.hydra.overrides.hydra.append(override.input_line)
else:
cfg.hydra.overrides.task.append(override.input_line)
app_overrides.append(override)
with open_dict(cfg.hydra):
cfg.hydra.runtime.choices.update(defaults_list.overrides.known_choices)
for key in cfg.hydra.job.env_copy:
cfg.hydra.job.env_set[key] = os.environ[key]
cfg.hydra.runtime.version = __version__
cfg.hydra.runtime.version_base = version.getbase()
cfg.hydra.runtime.cwd = os.getcwd()
cfg.hydra.runtime.config_sources = [ConfigSourceInfo(path=x.path, schema=x.scheme(), provider=x.provider) for x in caching_repo.get_sources()]
if ('name' not in cfg.hydra.job):
cfg.hydra.job.name = JobRuntime().get('name')
cfg.hydra.job.override_dirname = get_overrides_dirname(overrides=app_overrides, kv_sep=cfg.hydra.job.config.override_dirname.kv_sep, item_sep=cfg.hydra.job.config.override_dirname.item_sep, exclude_keys=cfg.hydra.job.config.override_dirname.exclude_keys)
cfg.hydra.job.config_name = config_name
return cfg
def load_sweep_config(self, master_config: DictConfig, sweep_overrides: List[str]) -> DictConfig:
overrides = OmegaConf.to_container(master_config.hydra.overrides.hydra)
assert isinstance(overrides, list)
overrides = (overrides + sweep_overrides)
sweep_config = self.load_configuration(config_name=master_config.hydra.job.config_name, overrides=overrides, run_mode=RunMode.RUN)
OmegaConf.copy_cache(from_config=master_config, to_config=sweep_config)
return sweep_config
def get_search_path(self) -> ConfigSearchPath:
return self.config_search_path
def _apply_overrides_to_config(overrides: List[Override], cfg: DictConfig) -> None:
for override in overrides:
if (override.package is not None):
raise ConfigCompositionException(f"Override {override.input_line} looks like a config group override, but config group '{override.key_or_group}' does not exist.")
key = override.key_or_group
value = override.value()
try:
if override.is_delete():
config_val = OmegaConf.select(cfg, key, throw_on_missing=False)
if (config_val is None):
raise ConfigCompositionException(f"Could not delete from config. '{override.key_or_group}' does not exist.")
elif ((value is not None) and (value != config_val)):
raise ConfigCompositionException(f"Could not delete from config. The value of '{override.key_or_group}' is {config_val} and not {value}.")
last_dot = key.rfind('.')
with open_dict(cfg):
if (last_dot == (- 1)):
del cfg[key]
else:
node = OmegaConf.select(cfg, key[0:last_dot])
del node[key[(last_dot + 1):]]
elif override.is_add():
if ((OmegaConf.select(cfg, key, throw_on_missing=False) is None) or isinstance(value, (dict, list))):
OmegaConf.update(cfg, key, value, merge=True, force_add=True)
else:
assert (override.input_line is not None)
raise ConfigCompositionException(dedent(f''' Could not append to config. An item is already at '{override.key_or_group}'.
Either remove + prefix: '{override.input_line[1:]}'
Or add a second + to add or override '{override.key_or_group}': '+{override.input_line}'
'''))
elif override.is_force_add():
OmegaConf.update(cfg, key, value, merge=True, force_add=True)
else:
try:
OmegaConf.update(cfg, key, value, merge=True)
except (ConfigAttributeError, ConfigKeyError) as ex:
raise ConfigCompositionException(f'''Could not override '{override.key_or_group}'.
To append to your config use +{override.input_line}''') from ex
except OmegaConfBaseException as ex:
raise ConfigCompositionException(f'Error merging override {override.input_line}').with_traceback(sys.exc_info()[2]) from ex
def _load_single_config(self, default: ResultDefault, repo: IConfigRepository) -> ConfigResult:
config_path = default.config_path
assert (config_path is not None)
ret = repo.load_config(config_path=config_path)
assert (ret is not None)
if (not OmegaConf.is_config(ret.config)):
raise ValueError(f'Config {config_path} must be an OmegaConf config, got {type(ret.config).__name__}')
if (not ret.is_schema_source):
schema = None
try:
schema_source = repo.get_schema_source()
cname = ConfigSource._normalize_file_name(filename=config_path)
schema = schema_source.load_config(cname)
except ConfigLoadError:
pass
if (schema is not None):
try:
url = '
if ('defaults' in schema.config):
raise ConfigCompositionException(dedent(f''' '{config_path}' is validated against ConfigStore schema with the same name.
This behavior is deprecated in Hydra 1.1 and will be removed in Hydra 1.2.
In addition, the automatically matched schema contains a defaults list.
This combination is no longer supported.
See {url} for migration instructions.'''))
else:
deprecation_warning(dedent(f'''
'{config_path}' is validated against ConfigStore schema with the same name.
This behavior is deprecated in Hydra 1.1 and will be removed in Hydra 1.2.
See {url} for migration instructions.'''), stacklevel=11)
hydra = None
hydra_config_group = ((default.config_path is not None) and default.config_path.startswith('hydra/'))
config = ret.config
if (default.primary and isinstance(config, DictConfig) and ('hydra' in config) and (not hydra_config_group)):
hydra = config.pop('hydra')
merged = OmegaConf.merge(schema.config, config)
assert isinstance(merged, DictConfig)
if (hydra is not None):
with open_dict(merged):
merged.hydra = hydra
ret.config = merged
except OmegaConfBaseException as e:
raise ConfigCompositionException(f"Error merging '{config_path}' with schema") from e
assert isinstance(merged, DictConfig)
res = self._embed_result_config(ret, default.package)
if ((not default.primary) and (config_path != 'hydra/config') and isinstance(res.config, DictConfig) and (OmegaConf.select(res.config, 'hydra.searchpath') is not None)):
raise ConfigCompositionException(f"In '{config_path}': Overriding hydra.searchpath is only supported from the primary config")
return res
def _embed_result_config(ret: ConfigResult, package_override: Optional[str]) -> ConfigResult:
package = ret.header['package']
if (package_override is not None):
package = package_override
if ((package is not None) and (package != '')):
cfg = OmegaConf.create()
OmegaConf.update(cfg, package, ret.config, merge=False)
ret = copy.copy(ret)
ret.config = cfg
return ret
def list_groups(self, parent_name: str) -> List[str]:
return self.get_group_options(group_name=parent_name, results_filter=ObjectType.GROUP)
def get_group_options(self, group_name: str, results_filter: Optional[ObjectType]=ObjectType.CONFIG, config_name: Optional[str]=None, overrides: Optional[List[str]]=None) -> List[str]:
if (overrides is None):
overrides = []
(_, caching_repo) = self._parse_overrides_and_create_caching_repo(config_name, overrides)
return caching_repo.get_group_options(group_name, results_filter)
def _compose_config_from_defaults_list(self, defaults: List[ResultDefault], repo: IConfigRepository) -> DictConfig:
cfg = OmegaConf.create()
with flag_override(cfg, 'no_deepcopy_set_nodes', True):
for default in defaults:
loaded = self._load_single_config(default=default, repo=repo)
try:
cfg.merge_with(loaded.config)
except OmegaConfBaseException as e:
raise ConfigCompositionException(f'''In '{default.config_path}': {type(e).__name__} raised while composing config:
{e}''').with_traceback(sys.exc_info()[2])
def strip_defaults(cfg: Any) -> None:
if isinstance(cfg, DictConfig):
if (cfg._is_missing() or cfg._is_none()):
return
with flag_override(cfg, ['readonly', 'struct'], False):
if cfg._get_flag('HYDRA_REMOVE_TOP_LEVEL_DEFAULTS'):
cfg._set_flag('HYDRA_REMOVE_TOP_LEVEL_DEFAULTS', None)
cfg.pop('defaults', None)
for (_key, value) in cfg.items_ex(resolve=False):
strip_defaults(value)
strip_defaults(cfg)
return cfg
def get_sources(self) -> List[ConfigSource]:
return self.repository.get_sources()
def compute_defaults_list(self, config_name: Optional[str], overrides: List[str], run_mode: RunMode) -> DefaultsList:
(parsed_overrides, caching_repo) = self._parse_overrides_and_create_caching_repo(config_name, overrides)
defaults_list = create_defaults_list(repo=caching_repo, config_name=config_name, overrides_list=parsed_overrides, prepend_hydra=True, skip_missing=(run_mode == RunMode.MULTIRUN))
return defaults_list |
class AbstractSerializer():
def __init__(self, extensions=None):
super().__init__()
self._extensions = (extensions or []).copy()
def decode(self, s, **kwargs):
raise NotImplementedError()
def encode(self, d, **kwargs):
raise NotImplementedError()
def extensions(self):
return self._extensions.copy() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.