code stringlengths 281 23.7M |
|---|
def test_appending_records_different_schema_works_2(tmpdir):
'
schema = {'type': 'record', 'name': 'test_appending_records_different_schema_fails', 'fields': [{'name': 'field', 'type': 'string'}, {'name': 'field2', 'type': {'type': 'record', 'name': 'subrecord', 'fields': [{'name': 'subfield', 'type': 'string'}]}}, {'name': 'field3', 'type': 'subrecord'}]}
test_file = str(tmpdir.join('test.avro'))
with open(test_file, 'wb') as new_file:
fastavro.writer(new_file, schema, [{'field': 'foo', 'field2': {'subfield': 'foo2'}, 'field3': {'subfield': 'foo3'}}])
different_schema = {'type': 'record', 'name': 'test_appending_records', 'fields': [{'name': 'field', 'type': 'int'}]}
with open(test_file, 'a+b') as new_file:
fastavro.writer(new_file, different_schema, [{'field': 'bar', 'field2': {'subfield': 'bar2'}, 'field3': {'subfield': 'bar3'}}]) |
class TeachAction(UserAction):
no_reveal = True
card_usage = 'any'
def apply_action(self):
(src, tgt) = (self.source, self.target)
cl = VirtualCard.unwrap([self.associated_card])
assert (len(cl) == 1)
g = self.game
ttags(src)['teach_used'] = True
g.process_action(Reforge(src, src, cl[0]))
cl = user_choose_cards(self, src, ('cards', 'showncards', 'equips'))
c = (cl[0] if cl else random_choose_card(g, [src.cards, src.showncards, src.equips]))
if (not c):
return False
g.process_action(TeachTargetEffect(src, tgt, c))
return True
def cond(self, cl):
return ((len(cl) == 1) and (not cl[0].is_card(VirtualCard)))
def is_valid(self):
src = self.source
return (not ttags(src)['teach_used']) |
class Recipient(BaseObject):
def __init__(self, api=None, created_at=None, delivered_at=None, delivery_id=None, id=None, survey_id=None, survey_name=None, updated_at=None, user_email=None, user_id=None, user_name=None, **kwargs):
self.api = api
self.created_at = created_at
self.delivered_at = delivered_at
self.delivery_id = delivery_id
self.id = id
self.survey_id = survey_id
self.survey_name = survey_name
self.updated_at = updated_at
self.user_email = user_email
self.user_id = user_id
self.user_name = user_name
for (key, value) in kwargs.items():
setattr(self, key, value)
for key in self.to_dict():
if (getattr(self, key) is None):
try:
self._dirty_attributes.remove(key)
except KeyError:
continue
def created(self):
if self.created_at:
return dateutil.parser.parse(self.created_at)
def created(self, created):
if created:
self.created_at = created
def delivered(self):
if self.delivered_at:
return dateutil.parser.parse(self.delivered_at)
def delivered(self, delivered):
if delivered:
self.delivered_at = delivered
def delivery(self):
if (self.api and self.delivery_id):
return self.api._get_delivery(self.delivery_id)
def delivery(self, delivery):
if delivery:
self.delivery_id = delivery.id
self._delivery = delivery
def survey(self):
if (self.api and self.survey_id):
return self.api._get_survey(self.survey_id)
def survey(self, survey):
if survey:
self.survey_id = survey.id
self._survey = survey
def updated(self):
if self.updated_at:
return dateutil.parser.parse(self.updated_at)
def updated(self, updated):
if updated:
self.updated_at = updated
def user(self):
if (self.api and self.user_id):
return self.api._get_user(self.user_id)
def user(self, user):
if user:
self.user_id = user.id
self._user = user |
class Resample_config():
def __init__(self, in_dir: str, out_dir: str, sampling_rate: int=44100):
self.sampling_rate: int = sampling_rate
self.in_dir: str = in_dir
self.out_dir: str = out_dir
def from_dict(cls, dataset_path: str, data: Dict[(str, any)]):
data['in_dir'] = os.path.join(dataset_path, data['in_dir'])
data['out_dir'] = os.path.join(dataset_path, data['out_dir'])
return cls(**data) |
class PackageMetadata():
package_name: str
version: str
measurements: Dict[(MeasurementType, str)] = field(default_factory=dict)
def to_dict(self) -> Dict[(str, Any)]:
return {'package_name': self.package_name, 'version': self.version, 'measurements': {k.value: v for (k, v) in self.measurements.items()}} |
def parse_accesses(s, definitions, byte_order='be', silent=False):
accesses = []
for access in s:
parts = access.split('=')
if (len(parts) == 1):
register = parts[0]
value = None
write = False
else:
(register, value) = parts
write = True
if re.fullmatch(REGISTER_RE, register):
access = parse_access(register, None, write, value, byte_order, silent)
if access:
accesses.append(access)
else:
register_re = re.compile(fnmatch.translate(register))
for (name, definition) in definitions.registers.items():
if register_re.match(name):
access = parse_access(definition, name, write, value, byte_order, silent)
if access:
accesses.append(access)
return group_accesses(accesses) |
class HierarchicalFilter():
def _query_string(cls, require, exclude) -> str:
positive_nodes = [cls.node(code, True, require, exclude) for code in require if cls._has_no_parents(code, (require + exclude))]
negative_nodes = [cls.node(code, False, require, exclude) for code in exclude if cls._has_no_parents(code, (require + exclude))]
positive_query = ' OR '.join([node.get_query() for node in positive_nodes if (node.code not in [neg_node.code for neg_node in negative_nodes])])
negative_query = ' OR '.join([node.get_query() for node in negative_nodes if ((node.children or (not positive_nodes)) and (node.code not in [pos_node.code for pos_node in positive_nodes]))])
if (positive_query and negative_query):
return f'({positive_query}) OR ({negative_query})'
if ((not positive_query) and (not negative_query)):
return 'NOT *'
else:
return (positive_query + negative_query)
def _has_no_parents(cls, code, other_codes):
return (not len([match for match in other_codes if cls.code_is_parent_of(match, code)]))
def code_is_parent_of(code, other):
return ((other[:len(code)] == code) and (len(code) < len(other)))
def node(code, positive, positive_codes, negative_codes):
pass |
def extractFotranslatesBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Apartment for Rent', 'Apartment for Rent', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def extractWwwBeanylandCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def extractKnightsandmagicWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class KDTrainer(Trainer):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def ce_loss(self, size_average, student_logits, teacher_logits):
model_output_log_prob = F.log_softmax(student_logits, dim=2)
real_output_soft = F.softmax(teacher_logits, dim=2)
loss = F.kl_div(model_output_log_prob, real_output_soft, reduction='batchmean')
return loss
def mse_loss(self, student_logits, teacher_logits):
return mse_loss(student_logits, teacher_logits)
def compute_loss_train(self, model, inputs, return_outputs=False):
with torch.no_grad():
teacher_outputs = model.teacher(**inputs)
teacher_logits = teacher_outputs.get('logits')
del teacher_outputs
student_outputs = model(**inputs)
student_logits = student_outputs.get('logits')
if (not return_outputs):
del student_outputs
kd_loss = 0.0
size_average = True
if (model.kd_loss_scale > 0.0):
kd_loss = self.ce_loss(size_average, student_logits, teacher_logits)
del teacher_logits
del student_logits
tok_loss = (model.kd_loss_scale * kd_loss)
return ((tok_loss, student_outputs) if return_outputs else tok_loss)
def training_step(self, model: nn.Module, inputs: Dict[(str, Union[(torch.Tensor, Any)])]) -> torch.Tensor:
model.train()
inputs = self._prepare_inputs(inputs)
with self.compute_loss_context_manager():
loss = self.compute_loss_train(model, inputs)
if (self.args.n_gpu > 1):
loss = loss.mean()
if ((self.args.gradient_accumulation_steps > 1) and (not self.deepspeed)):
loss = (loss / self.args.gradient_accumulation_steps)
if self.do_grad_scaling:
self.scaler.scale(loss).backward()
elif self.use_apex:
with amp.scale_loss(loss, self.optimizer) as scaled_loss:
scaled_loss.backward()
elif self.deepspeed:
loss = self.deepspeed.backward(loss)
else:
loss.backward()
return loss.detach()
def _wrap_model(self, model, training=True, dataloader=None):
if self.args.use_ipex:
dtype = (torch.bfloat16 if self.use_cpu_amp else torch.float32)
model = self.ipex_optimize_model(model, training, dtype=dtype)
if (unwrap_model(model) is not model):
return model
if (self.use_apex and training):
(model, self.optimizer) = amp.initialize(model, self.optimizer, opt_level=self.args.fp16_opt_level)
if ((self.args.n_gpu > 1) and (not getattr(model, 'is_loaded_in_8bit', False))):
model = nn.DataParallel(model)
if (self.sharded_ddp is not None):
if (self.sharded_ddp == ShardedDDPOption.SIMPLE):
model = ShardedDDP(model, self.optimizer)
else:
mixed_precision = (self.args.fp16 or self.args.bf16)
cpu_offload = (ShardedDDPOption.OFFLOAD in self.args.sharded_ddp)
zero_3 = (self.sharded_ddp == ShardedDDPOption.ZERO_DP_3)
if (ShardedDDPOption.AUTO_WRAP in self.args.sharded_ddp):
model = auto_wrap(model)
self.model = model = FullyShardedDDP(model, mixed_precision=mixed_precision, reshard_after_forward=zero_3, cpu_offload=cpu_offload).to(self.args.device)
elif (self.fsdp is not None):
if (not self.args.fsdp_config['xla']):
from torch.distributed.fsdp.fully_sharded_data_parallel import CPUOffload, FullyShardedDataParallel as FSDP, MixedPrecision
from torch.distributed.fsdp.wrap import size_based_auto_wrap_policy, transformer_auto_wrap_policy
if (FSDPOption.OFFLOAD in self.args.fsdp):
cpu_offload = CPUOffload(offload_params=True)
else:
cpu_offload = CPUOffload(offload_params=False)
auto_wrap_policy = None
if (FSDPOption.AUTO_WRAP in self.args.fsdp):
if (self.args.fsdp_config['fsdp_min_num_params'] > 0):
auto_wrap_policy = functools.partial(size_based_auto_wrap_policy, min_num_params=self.args.fsdp_config['fsdp_min_num_params'])
elif (self.args.fsdp_config.get('fsdp_transformer_layer_cls_to_wrap', None) is not None):
transformer_cls_to_wrap = set()
for layer_class in self.args.fsdp_config['fsdp_transformer_layer_cls_to_wrap']:
transformer_cls = get_module_class_from_name(model, layer_class)
if (transformer_cls is None):
raise Exception('Could not find the transformer layer class to wrap in the model.')
else:
transformer_cls_to_wrap.add(transformer_cls)
auto_wrap_policy = functools.partial(transformer_auto_wrap_policy, transformer_layer_cls=transformer_cls_to_wrap)
mixed_precision_policy = None
dtype = None
if self.args.fp16:
dtype = torch.float16
elif self.args.bf16:
dtype = torch.bfloat16
if (dtype is not None):
mixed_precision_policy = MixedPrecision(param_dtype=dtype, reduce_dtype=dtype, buffer_dtype=dtype)
if (type(model) != FSDP):
signature = inspect.signature(FSDP.__init__).parameters.keys()
kwargs = {}
for arg in ['limit_all_gathers', 'forward_prefetch', 'backward_prefetch']:
if (arg in signature):
kwargs[arg] = getattr(self, arg)
kwargs['limit_all_gathers'] = True
self.model = model = FSDP(model, sharding_strategy=self.fsdp, cpu_offload=cpu_offload, auto_wrap_policy=auto_wrap_policy, mixed_precision=mixed_precision_policy, device_id=self.args.device, ignored_modules=(None if (getattr(model, 'teacher', None) is None) else [model.teacher]), **kwargs)
elif (self.args.local_rank != (- 1)):
kwargs = {}
if (self.args.ddp_find_unused_parameters is not None):
kwargs['find_unused_parameters'] = self.args.ddp_find_unused_parameters
elif isinstance(model, PreTrainedModel):
kwargs['find_unused_parameters'] = (not model.is_gradient_checkpointing)
else:
kwargs['find_unused_parameters'] = True
if (self.args.ddp_bucket_cap_mb is not None):
kwargs['bucket_cap_mb'] = self.args.ddp_bucket_cap_mb
if is_torch_neuroncore_available():
return model
model = nn.parallel.DistributedDataParallel(model, device_ids=([self.args.local_rank] if (self.args._n_gpu != 0) else None), output_device=(self.args.local_rank if (self.args._n_gpu != 0) else None), **kwargs)
if self.args.torch_compile:
model = torch.compile(model, backend=self.args.torch_compile_backend, mode=self.args.torch_compile_mode)
return model |
class TraceDiff():
def compare_traces(cls, control: Union[(LabeledTrace, TraceDir)], test: Union[(LabeledTrace, TraceDir)], control_rank: Optional[Union[(int, List[int])]]=None, test_rank: Optional[Union[(int, List[int])]]=None, control_iteration: Optional[Union[(int, List[int])]]=None, test_iteration: Optional[Union[(int, List[int])]]=None, device_type: DeviceType=DeviceType.ALL, use_short_name: bool=False) -> pd.DataFrame:
control_trace = _trace_argument_adapter(control, 'Control')
test_trace = _trace_argument_adapter(test, 'Test')
logger.info(f'comparing traces: {control_trace.label} and {test_trace.label}')
if (control_trace.label == test_trace.label):
test_trace.label = f'{test_trace.label}_control'
test_trace.label = f'{test_trace.label}_test'
logger.warn(f"The two traces have the same label. change test_trace's label to {test_trace.label}")
control_label = control_trace.label
test_label = test_trace.label
col_name = ('short_name' if use_short_name else 'name')
control_trace_summary = control_trace.get_ops_summary(control_trace.extract_ops(control_rank, control_iteration, device_type)).groupby(col_name)[['counts', 'total_duration']].sum()
test_trace_summary = test_trace.get_ops_summary(test_trace.extract_ops(test_rank, test_iteration, device_type)).groupby(col_name)[['counts', 'total_duration']].sum()
comp = pd.concat([control_trace_summary, test_trace_summary], axis=1, join='outer', keys=[control_label, test_label])
comp.fillna(0, inplace=True)
flatten_column_names(comp)
comp['diff_counts'] = (comp[f'{test_label}_counts'] - comp[f'{control_label}_counts'])
comp['diff_duration'] = (comp[f'{test_label}_total_duration'] - comp[f'{control_label}_total_duration'])
comp['counts_change_categories'] = comp['diff_counts'].apply((lambda c: ('+' if (c > 0) else ('-' if (c < 0) else '='))))
return comp
def ops_diff(cls, control: Union[(LabeledTrace, TraceDir)], test: Union[(LabeledTrace, TraceDir)], control_rank: Optional[Union[(int, List[int])]]=None, test_rank: Optional[Union[(int, List[int])]]=None, control_iteration: Optional[Union[(int, List[int])]]=None, test_iteration: Optional[Union[(int, List[int])]]=None, device_type: DeviceType=DeviceType.ALL) -> Dict[(str, List[str])]:
control_trace = _trace_argument_adapter(control, 'Control')
test_trace = _trace_argument_adapter(test, 'Test')
df = cls.compare_traces(control_trace, test_trace, control_rank, test_rank, control_iteration, test_iteration, device_type)
col_control = f'{control_trace.label}_counts'
col_test = f'{test_trace.label}_counts'
col_diff = 'diff_counts'
return {'added': df.loc[(df[col_control].eq(0) & df[col_test].gt(0))].index.tolist(), 'deleted': df.loc[(df[col_control].gt(0) & df[col_test].eq(0))].index.tolist(), 'increased': df.loc[(df[col_control].gt(0) & df[col_diff].gt(0))].index.tolist(), 'decreased': df.loc[(df[col_test].gt(0) & df[col_diff].lt(0))].index.tolist(), 'unchanged': df.loc[(df[col_test].gt(0) & df[col_diff].eq(0))].index.tolist()}
def visualize_counts_diff(cls, df: pd.DataFrame, show_image: bool=True, export_image_path: Optional[str]=None) -> None:
labels: List[str] = [col.replace('_total_duration', '') for col in df.columns if col.endswith('_total_duration')]
assert (len(labels) >= 2)
fig = go.Figure(data=[go.Bar(name=labels[0], x=df.index, y=df[f'{labels[0]}_counts']), go.Bar(name=labels[1], x=df.index, y=df[f'{labels[1]}_counts']), go.Bar(name='Difference', x=df.index, y=df['diff_counts'])])
fig.update_layout(barmode='group', title_text='Ops Count Comparison')
if show_image:
fig.show()
if export_image_path:
fig.write_image(export_image_path)
def visualize_duration_diff(cls, df: pd.DataFrame, show_image: bool=True, export_image_path: Optional[str]=None) -> None:
labels: List[str] = [col.replace('_total_duration', '') for col in df.columns if col.endswith('_total_duration')]
assert (len(labels) >= 2)
fig = go.Figure(data=[go.Bar(name=labels[0], x=df.index, y=df[f'{labels[0]}_total_duration']), go.Bar(name=labels[1], x=df.index, y=df[f'{labels[1]}_total_duration']), go.Bar(name='Difference', x=df.index, y=df['diff_duration'])])
fig.update_layout(barmode='group', title_text='Ops Duration Comparison', yaxis_title='Total Duration (us)', autosize=True, width=1000, height=1000)
if show_image:
fig.show()
if export_image_path:
fig.write_image(export_image_path) |
class TrainingState(NamedTuple):
policy_params: networks_lib.Params
critic_params: networks_lib.Params
critic_target_params: networks_lib.Params
policy_optimizer_state: optax.OptState
critic_optimizer_state: optax.OptState
alpha_optimizer_state: optax.OptState
alpha_params: jnp.ndarray
alpha_prime_optimizer_state: Optional[optax.OptState]
alpha_prime_params: Optional[jnp.ndarray]
key: networks_lib.PRNGKey
steps: int |
def extractNubi96WordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
def srmr(signal, sample_rate: int=16000, n_cochlear_filters: int=23, low_freq: int=125):
signal = np.asarray(signal)
if (signal.ndim >= 2):
for i in range((signal.ndim - 1)):
assert (signal.shape[i] < 30), (i, signal.shape)
srmrs = []
for i in np.ndindex(*signal.shape[:(- 1)]):
srmrs.append(SRMR(signal[i], sample_rate=sample_rate, n=n_cochlear_filters, low_freq=low_freq))
return np.array(srmrs).reshape(signal.shape[:(- 1)])
elif (signal.ndim == 1):
return SRMR(signal, sample_rate=sample_rate, n=n_cochlear_filters, low_freq=low_freq)
else:
raise NotImplementedError(signal.ndim) |
class OptionSeriesTimelineSonificationDefaultspeechoptionsPointgrouping(Options):
def algorithm(self):
return self._config_get('last')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class StrictFormatter(Formatter):
def check_unused_args(self, used_args: Sequence[Union[(int, str)]], args: Sequence, kwargs: Mapping[(str, Any)]) -> None:
extra = set(kwargs).difference(used_args)
if extra:
raise KeyError(extra)
def vformat(self, format_string: str, args: Sequence, kwargs: Mapping[(str, Any)]) -> str:
if (len(args) > 0):
raise ValueError('No arguments should be provided, everything should be passed as keyword arguments.')
return super().vformat(format_string, args, kwargs)
def validate_input_variables(self, format_string: str, input_variables: List[str]) -> None:
dummy_inputs = {input_variable: 'foo' for input_variable in input_variables}
super().format(format_string, **dummy_inputs) |
class OptionPlotoptionsWordcloudSonificationDefaultinstrumentoptionsMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class PlotmanConfig():
directories: Directories
scheduling: Scheduling
plotting: Plotting
commands: Commands = attr.ib(factory=Commands)
logging: Logging = Logging()
archiving: Optional[Archiving] = None
user_interface: UserInterface = attr.ib(factory=UserInterface)
version: List[int] = [0]
def setup(self) -> Generator[(None, None, None)]:
if (self.plotting.type == 'chia'):
if (self.plotting.chia is None):
message = ('internal plotman error, please report the full traceback and your' + ' full configuration file')
raise Exception(message)
plotman.plotters.chianetwork.check_configuration(options=self.plotting.chia, pool_contract_address=self.plotting.pool_contract_address)
elif (self.plotting.type == 'madmax'):
if (self.plotting.madmax is None):
message = ('internal plotman error, please report the full traceback and your' + ' full configuration file')
raise Exception(message)
plotman.plotters.madmax.check_configuration(options=self.plotting.madmax, pool_contract_address=self.plotting.pool_contract_address)
elif (self.plotting.type == 'bladebit'):
if (self.plotting.bladebit is None):
message = ('internal plotman error, please report the full traceback and your' + ' full configuration file')
raise Exception(message)
plotman.plotters.bladebit.check_configuration(options=self.plotting.bladebit, pool_contract_address=self.plotting.pool_contract_address)
prefix = f'plotman-pid_{os.getpid()}-'
self.logging.setup()
with tempfile.TemporaryDirectory(prefix=prefix) as temp:
if (self.archiving is not None):
self.archiving.maybe_create_scripts(temp=temp)
(yield) |
class BasicNet(nn.Module):
def __init__(self, model_config):
super(BasicNet, self).__init__()
self.act_fncs = {'relu': nn.ReLU, 'gelu': nn.GELU, 'tanh': nn.Tanh}
self.use_act = self.act_fncs.get(model_config.activation)()
self.layer_1 = nn.Linear(model_config.n_features, model_config.hidden_sizes[0])
self.layer_2 = nn.Linear(model_config.hidden_sizes[0], model_config.hidden_sizes[1])
self.layer_3 = nn.Linear(model_config.hidden_sizes[1], model_config.hidden_sizes[2])
self.dropout = []
if (model_config.dropout is not None):
self.dropout = [nn.Dropout(val) for val in model_config.dropout]
self.softmax = nn.Softmax(dim=1)
def forward(self, x):
x = self.layer_1(x)
x = self.use_act(x)
if (len(self.dropout) != 0):
x = self.dropout[0](x)
x = self.layer_2(x)
x = self.use_act(x)
if (len(self.dropout) != 0):
x = self.dropout[1](x)
x = self.layer_3(x)
output = self.softmax(x)
return output |
class OptionPlotoptionsBubbleSonificationContexttracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
def selectstr(sstr):
strlist = re.split('[\\s\\u0021-\\u002C\\u002E-\\u002F\\u003A-\\u0040\\u005B-\\u0060\\u007B-\\u007E\\uFF01-\\uFF0F\\uFF1A-\\uFF20\\uFF5B-\\uFF65]+', sstr)
strsel = ''
dialog = xbmcgui.Dialog()
sel = 999
while (sel > 0):
sellist = ([(':' + colorize_label(strsel, color='FFFF00'))] + strlist)
sel = dialog.select('', sellist)
if (sel > 0):
strsel = ((strsel + ' ') + strlist[(sel - 1)])
strsel = strsel.strip()
strlist.pop((sel - 1))
if (sel == (- 1)):
strsel = ''
return strsel |
class TestRISCVSupport(unittest.TestCase):
def test_hello(self):
with open(os.path.join('test', 'testfiles_for_unittests', 'simple_gcc.elf.riscv'), 'rb') as f:
elf = ELFFile(f)
self.assertEqual(elf.get_machine_arch(), 'RISC-V')
self.assertEqual(elf['e_entry'], 65814)
self.assertEqual(elf.num_sections(), 13)
self.assertEqual(elf.num_segments(), 3)
def test_build_attributes(self):
with open(os.path.join('test', 'testfiles_for_unittests', 'simple_gcc.elf.riscv'), 'rb') as f:
elf = ELFFile(f)
sec = elf.get_section_by_name('.riscv.attributes')
self.assertEqual(sec['sh_type'], 'SHT_RISCV_ATTRIBUTES')
self.assertEqual(sec.num_subsections, 1)
subsec = sec.subsections[0]
self.assertEqual(subsec.header['vendor_name'], 'riscv')
self.assertEqual(subsec.num_subsubsections, 1)
subsubsec = subsec.subsubsections[0]
self.assertEqual(subsubsec.header.tag, 'TAG_FILE')
for i in subsubsec.iter_attributes('TAG_STACK_ALIGN'):
self.assertEqual(i.value, 16)
for i in subsubsec.iter_attributes('TAG_ARCH'):
self.assertEqual(i.value, 'rv64i2p0_m2p0_a2p0_f2p0_d2p0_c2p0_v1p0_zfh1p0_zfhmin1p0_zba1p0_zbb1p0_zbc1p0_zbs1p0_zve32f1p0_zve32x1p0_zve64d1p0_zve64f1p0_zve64x1p0_zvl128b1p0_zvl32b1p0_zvl64b1p0') |
def caplog(caplog):
restore = []
for logger in logging.Logger.manager.loggerDict.values():
try:
if (not logger.propagate):
logger.propagate = True
restore += [logger]
except AttributeError:
pass
(yield caplog)
for logger in restore:
logger.propagate = False |
.parametrize('truncated_by', [1, 2, 3, 4])
def test_truncated_form(client, truncated_by):
resp = client.simulate_post('/submit', headers={'Content-Type': 'multipart/form-data; boundary=5b11af82ab65407ba8cdccf37d2a9c4f'}, body=EXAMPLE1[:(- truncated_by)])
assert (resp.status_code == 400)
assert (resp.json == {'description': 'unexpected form structure', 'title': 'Malformed multipart/form-data request media'}) |
class Container(Processor):
def base_type(cls):
...
def iterate(self):
...
def __len__(self):
...
def __contains__(self, value):
...
def __getitem__(self, idx):
...
def __bool__(self):
return (len(self) > 0)
def __eq__(self, other):
if (type(self).base_type() is not type(other).base_type()):
return NotImplemented
for (self_item, other_item) in zip(self.iterate(), other.iterate()):
if (self_item != other_item):
return False
return True |
class TestEcommerceItem(unittest.TestCase):
def tearDown(self):
for d in frappe.get_list('Ecommerce Item'):
frappe.get_doc('Ecommerce Item', d.name).delete()
def test_duplicate(self):
self._create_doc()
self.assertRaises(frappe.DuplicateEntryError, self._create_doc)
def test_duplicate_variants(self):
self._create_variant_doc()
self.assertRaises(frappe.DuplicateEntryError, self._create_variant_doc)
def test_duplicate_sku(self):
self._create_doc_with_sku()
self.assertRaises(frappe.DuplicateEntryError, self._create_doc_with_sku)
def test_is_synced(self):
self._create_doc()
self.assertTrue(ecommerce_item.is_synced('shopify', 'T-SHIRT'))
self.assertFalse(ecommerce_item.is_synced('shopify', 'UNKNOWN ITEM'))
def test_is_synced_variant(self):
self._create_variant_doc()
self.assertTrue(ecommerce_item.is_synced('shopify', 'T-SHIRT', 'T-SHIRT-RED'))
self.assertFalse(ecommerce_item.is_synced('shopify', 'T-SHIRT', 'Unknown variant'))
def test_is_synced_sku(self):
self._create_doc_with_sku()
self.assertTrue(ecommerce_item.is_synced('shopify', 'T-SHIRT', sku='TEST_ITEM_1'))
self.assertFalse(ecommerce_item.is_synced('shopify', 'T-SHIRTX', sku='UNKNOWNSKU'))
def test_get_erpnext_item(self):
self._create_doc()
a = ecommerce_item.get_erpnext_item('shopify', 'T-SHIRT')
b = frappe.get_doc('Item', '_Test Item')
self.assertEqual(a.name, b.name)
self.assertEqual(a.item_code, b.item_code)
unknown = ecommerce_item.get_erpnext_item('shopify', 'Unknown item')
self.assertEqual(unknown, None)
def test_get_erpnext_item_variant(self):
self._create_variant_doc()
a = ecommerce_item.get_erpnext_item('shopify', 'T-SHIRT', 'T-SHIRT-RED')
b = frappe.get_doc('Item', '_Test Item 2')
self.assertEqual(a.name, b.name)
self.assertEqual(a.item_code, b.item_code)
def test_get_erpnext_item_sku(self):
self._create_doc_with_sku()
a = ecommerce_item.get_erpnext_item('shopify', 'T-SHIRT', sku='TEST_ITEM_1')
b = frappe.get_doc('Item', '_Test Item')
self.assertEqual(a.name, b.name)
self.assertEqual(a.item_code, b.item_code)
def _create_doc(self):
frappe.get_doc({'doctype': 'Ecommerce Item', 'integration': 'shopify', 'integration_item_code': 'T-SHIRT', 'erpnext_item_code': '_Test Item'}).insert()
def _create_variant_doc(self):
frappe.get_doc({'doctype': 'Ecommerce Item', 'integration': 'shopify', 'integration_item_code': 'T-SHIRT', 'erpnext_item_code': '_Test Item 2', 'has_variants': 0, 'variant_id': 'T-SHIRT-RED', 'variant_of': '_Test Variant Item'}).insert()
def _create_doc_with_sku(self):
frappe.get_doc({'doctype': 'Ecommerce Item', 'integration': 'shopify', 'integration_item_code': 'T-SHIRT', 'erpnext_item_code': '_Test Item', 'sku': 'TEST_ITEM_1'}).insert() |
class Initialization():
def __init__(self, agent_addr_to_currency_endowments: Dict[(Address, CurrencyEndowment)], agent_addr_to_exchange_params: Dict[(Address, ExchangeParams)], agent_addr_to_good_endowments: Dict[(Address, GoodEndowment)], agent_addr_to_utility_params: Dict[(Address, UtilityParams)], good_id_to_eq_prices: Dict[(GoodId, float)], agent_addr_to_eq_good_holdings: Dict[(Address, EquilibriumGoodHoldings)], agent_addr_to_eq_currency_holdings: Dict[(Address, EquilibriumCurrencyHoldings)]):
self._agent_addr_to_currency_endowments = agent_addr_to_currency_endowments
self._agent_addr_to_exchange_params = agent_addr_to_exchange_params
self._agent_addr_to_good_endowments = agent_addr_to_good_endowments
self._agent_addr_to_utility_params = agent_addr_to_utility_params
self._good_id_to_eq_prices = good_id_to_eq_prices
self._agent_addr_to_eq_good_holdings = agent_addr_to_eq_good_holdings
self._agent_addr_to_eq_currency_holdings = agent_addr_to_eq_currency_holdings
self._check_consistency()
def agent_addr_to_currency_endowments(self) -> Dict[(Address, CurrencyEndowment)]:
return self._agent_addr_to_currency_endowments
def agent_addr_to_exchange_params(self) -> Dict[(Address, ExchangeParams)]:
return self._agent_addr_to_exchange_params
def agent_addr_to_good_endowments(self) -> Dict[(Address, GoodEndowment)]:
return self._agent_addr_to_good_endowments
def agent_addr_to_utility_params(self) -> Dict[(Address, UtilityParams)]:
return self._agent_addr_to_utility_params
def good_id_to_eq_prices(self) -> Dict[(GoodId, float)]:
return self._good_id_to_eq_prices
def agent_addr_to_eq_good_holdings(self) -> Dict[(Address, EquilibriumGoodHoldings)]:
return self._agent_addr_to_eq_good_holdings
def agent_addr_to_eq_currency_holdings(self) -> Dict[(Address, EquilibriumCurrencyHoldings)]:
return self._agent_addr_to_eq_currency_holdings
def _check_consistency(self) -> None:
enforce(all(((c_e >= 0) for currency_endowments in self.agent_addr_to_currency_endowments.values() for c_e in currency_endowments.values())), 'Currency endowments must be non-negative.')
enforce(all(((p > 0) for params in self.agent_addr_to_exchange_params.values() for p in params.values())), 'ExchangeParams must be strictly positive.')
enforce(all(((g_e > 0) for good_endowments in self.agent_addr_to_good_endowments.values() for g_e in good_endowments.values())), 'Good endowments must be strictly positive.')
enforce(all(((p > 0) for params in self.agent_addr_to_utility_params.values() for p in params.values())), 'UtilityParams must be strictly positive.')
enforce((len(self.agent_addr_to_good_endowments.keys()) == len(self.agent_addr_to_currency_endowments.keys())), 'Length of endowments must be the same.')
enforce((len(self.agent_addr_to_exchange_params.keys()) == len(self.agent_addr_to_utility_params.keys())), 'Length of params must be the same.')
enforce(all(((len(self.good_id_to_eq_prices.values()) == len(eq_good_holdings)) for eq_good_holdings in self.agent_addr_to_eq_good_holdings.values())), 'Length of eq_prices and an element of eq_good_holdings must be the same.')
enforce((len(self.agent_addr_to_eq_good_holdings.values()) == len(self.agent_addr_to_eq_currency_holdings.values())), 'Length of eq_good_holdings and eq_currency_holdings must be the same.')
enforce(all(((len(self.agent_addr_to_exchange_params[agent_addr]) == len(endowments)) for (agent_addr, endowments) in self.agent_addr_to_currency_endowments.items())), 'Dimensions for exchange_params and currency_endowments rows must be the same.')
enforce(all(((len(self.agent_addr_to_utility_params[agent_addr]) == len(endowments)) for (agent_addr, endowments) in self.agent_addr_to_good_endowments.items())), 'Dimensions for utility_params and good_endowments rows must be the same.') |
class OptionPlotoptionsErrorbarSonificationDefaultinstrumentoptionsMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_broken_register_and_run_with_help(runner):
result = runner.invoke(broken_cli)
assert (result.exit_code == 0)
for ep in iter_entry_points('_test_click_plugins.broken_plugins'):
cmd_result = runner.invoke(broken_cli, [ep.name, '--help'])
assert (cmd_result.exit_code != 0)
assert ('Traceback' in cmd_result.output) |
def _convert_fp_model(cfg: CfgNode, pytorch_model: nn.Module, data_loader: Iterable) -> nn.Module:
if ((not isinstance(cfg, CfgNode)) or (not cfg.QUANTIZATION.QAT.ENABLED)):
pytorch_model = fuse_utils.fuse_model(pytorch_model)
logger.info(f'''Fused Model:
{pytorch_model}''')
if (fuse_utils.count_bn_exist(pytorch_model) > 0):
logger.warning('BN existed in pytorch model after fusing.')
return pytorch_model |
def test_cvp_cvp():
for (m, n) in dimensions:
A = make_integer_matrix(m, n)
A = LLL.reduction(A)
M = GSO.Mat(A)
M.update_gso()
t = list(make_integer_matrix(n, n)[0])
v0 = CVP.closest_vector(A, t)
E = Enumeration(M)
(_, v1) = E.enumerate(0, A.nrows, 2, 40, M.from_canonical(t))[0]
v1 = IntegerMatrix.from_iterable(1, A.nrows, map((lambda x: int(round(x))), v1))
v1 = tuple((v1 * A)[0])
assert (v0 == v1) |
_function
def ip_link_add(session, name, type_='loopback', lladdr='00:00:00:00:00:00'):
intf = ip_link_show(session, ifname=name)
if intf:
LOG.debug('Interface "%s" already exists: %s', intf.ifname, intf)
return intf
if (type_ == 'ethernet'):
intf = Interface(ifname=name, flags=DEFAULT_ETH_FLAGS, ifmtu=DEFAULT_ETH_MTU, ifmtu6=DEFAULT_ETH_MTU, hw_addr=lladdr)
else:
intf = Interface(ifname=name, inet='127.0.0.1/8', inet6='::1/128')
session.add(intf)
return intf |
def plugin_loaded():
menu_path = join(sublime.packages_path(), 'User', PACKAGE_NAME)
if (not exists(menu_path)):
makedirs(menu_path)
upgrade_default_menu()
return
version_file = join(menu_path, VERSION_FILE)
upgrade = False
if (not exists(version_file)):
upgrade = True
else:
v_old = {}
try:
with open(version_file, 'r') as f:
v_old = json.loads(sanitize_json(f.read(), preserve_lines=True))
except Exception as e:
print(e)
if (FORMAT_VERSION['version'] != v_old.get('version', '')):
upgrade = True
if upgrade:
win = sublime.active_window()
if (win is not None):
view = win.new_file()
if (view is not None):
view.set_name('TabsExtra Message')
view.run_command('tabs_extra_message') |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = None
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'extender_lte_carrier_list': {'required': False, 'type': 'dict', 'default': None, 'options': {}}}
for attribute_name in module_spec['options']:
fields['extender_lte_carrier_list']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['extender_lte_carrier_list']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=False)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'extender_lte_carrier_list')
(is_error, has_changed, result, diff) = fortios_extender(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
class LiteSATATransportTX(Module):
def __init__(self, link):
self.sink = sink = stream.Endpoint(transport_tx_description(32))
cmd_ndwords = max(fis_reg_h2d_header.length, fis_data_header.length)
encoded_cmd = Signal((cmd_ndwords * 32))
counter = Signal(max=(cmd_ndwords + 1))
counter_ce = Signal()
counter_reset = Signal()
self.sync += If(counter_reset, counter.eq(0)).Elif(counter_ce, counter.eq((counter + 1)))
cmd_len = Signal(len(counter))
cmd_with_data = Signal()
cmd_send = Signal()
data_send = Signal()
cmd_done = Signal()
fis_type = Signal(8)
update_fis_type = Signal()
def test_type_tx(name):
return test_type(name, sink.type)
self.fsm = fsm = FSM(reset_state='IDLE')
self.submodules += fsm
fsm.act('IDLE', sink.ready.eq(0), counter_reset.eq(1), update_fis_type.eq(1), If(sink.valid, If(test_type_tx('REG_H2D'), NextState('SEND_CTRL_CMD')).Elif(test_type_tx('DATA'), NextState('SEND_DATA_CMD')).Else(sink.ready.eq(1))).Else(sink.ready.eq(1)))
self.sync += If(update_fis_type, fis_type.eq(link.source.data[:8]))
fsm.act('SEND_CTRL_CMD', fis_reg_h2d_header.encode(sink, encoded_cmd), cmd_len.eq((fis_reg_h2d_header.length - 1)), cmd_send.eq(1), If(cmd_done, sink.ready.eq(1), NextState('IDLE')))
fsm.act('SEND_DATA_CMD', sink.ready.eq(0), fis_data_header.encode(sink, encoded_cmd), cmd_len.eq((fis_data_header.length - 1)), cmd_with_data.eq(1), cmd_send.eq(1), If(cmd_done, NextState('SEND_DATA')))
fsm.act('SEND_DATA', data_send.eq(1), sink.ready.eq(link.sink.ready), If(((sink.valid & sink.last) & sink.ready), NextState('IDLE')))
cmd_cases = {}
for i in range(cmd_ndwords):
cmd_cases[i] = [link.sink.data.eq(encoded_cmd[(32 * i):(32 * (i + 1))])]
self.comb += [counter_ce.eq((sink.valid & link.sink.ready)), cmd_done.eq((((counter == cmd_len) & link.sink.valid) & link.sink.ready)), If(cmd_send, link.sink.valid.eq(sink.valid), link.sink.last.eq(((counter == cmd_len) & (~ cmd_with_data))), Case(counter, cmd_cases)).Elif(data_send, link.sink.valid.eq(sink.valid), link.sink.last.eq(sink.last), link.sink.data.eq(sink.data))] |
def get_dataclass_type_arg(t: Type) -> Optional[Type]:
if (not contains_dataclass_type_arg(t)):
return None
if is_dataclass_type(t):
return t
elif (is_tuple_or_list(t) or is_union(t)):
return next(filter(None, (get_dataclass_type_arg(arg) for arg in get_type_arguments(t))), None)
return None |
class FaucetNailedForwardingOrderedTest(FaucetUntaggedTest):
CONFIG_GLOBAL = '\nvlans:\n 100:\n description: "untagged"\nacls:\n 1:\n - rule:\n dl_dst: "0e:00:00:00:02:02"\n actions:\n output:\n - port: %(port_2)d\n - rule:\n dl_type: 0x806\n dl_dst: "ff:ff:ff:ff:ff:ff"\n arp_tpa: "10.0.0.2"\n actions:\n output:\n - port: %(port_2)d\n - rule:\n actions:\n allow: 0\n 2:\n - rule:\n dl_dst: "0e:00:00:00:01:01"\n actions:\n output:\n - port: %(port_1)d\n - rule:\n dl_type: 0x806\n dl_dst: "ff:ff:ff:ff:ff:ff"\n arp_tpa: "10.0.0.1"\n actions:\n output:\n - port: %(port_1)d\n - rule:\n actions:\n allow: 0\n 3:\n - rule:\n actions:\n allow: 0\n 4:\n - rule:\n actions:\n allow: 0\n'
CONFIG = '\n interfaces:\n %(port_1)d:\n native_vlan: 100\n acl_in: 1\n %(port_2)d:\n native_vlan: 100\n acl_in: 2\n %(port_3)d:\n native_vlan: 100\n acl_in: 3\n %(port_4)d:\n native_vlan: 100\n acl_in: 4\n'
def test_untagged(self):
(first_host, second_host) = self.hosts_name_ordered()[0:2]
first_host.setMAC('0e:00:00:00:01:01')
second_host.setMAC('0e:00:00:00:02:02')
self.one_ipv4_ping(first_host, second_host.IP(), require_host_learned=False)
self.one_ipv4_ping(second_host, first_host.IP(), require_host_learned=False) |
class RMTTestReqOwner(object):
def rmttest_positive_01(self):
(config, req) = create_parameters()
config.set_value('requirements.stakeholders', ['marketing', 'security'])
req['Owner'] = RecordEntry('Owner', 'marketing')
rt = ReqOwner(config)
(name, value) = rt.rewrite('Owner-test', req)
assert ('Owner' == name)
assert ('marketing' == value)
def rmttest_negative_01(self):
(config, req) = create_parameters()
config.stakeholders = ['marketing', 'security']
rt = ReqOwner(config)
with pytest.raises(RMTException) as rmte:
rt.rewrite('Owner-test', req)
assert (10 == rmte.id())
def rmttest_negative_02(self):
(config, req) = create_parameters()
config.set_value('requirements.stakeholders', ['marketing', 'security'])
req['Owner'] = RecordEntry('Owner', 'SomethingDifferent')
rt = ReqOwner(config)
with pytest.raises(RMTException) as rmte:
rt.rewrite('Owner-test', req)
assert (11 == rmte.id()) |
def test_get_llm_model_answer_with_streaming(config, mocker):
config.stream = True
mocked_openai_chat = mocker.patch('embedchain.llm.openai.ChatOpenAI')
llm = OpenAILlm(config)
llm.get_llm_model_answer('Test query')
mocked_openai_chat.assert_called_once()
callbacks = [callback[1]['callbacks'] for callback in mocked_openai_chat.call_args_list]
assert any((isinstance(callback[0], StreamingStdOutCallbackHandler) for callback in callbacks)) |
def get_true_adjacencies(alignment, contig_permutations, break_contigs, circular):
by_chr = group_by_chr(alignment)
adjacencies = []
for (chr_name, entries) in by_chr.items():
prev_block = None
prev_contig = None
entries.append(entries[0])
for hit in entries:
if ((prev_contig in break_contigs) or (hit.qry.seq_id in break_contigs)):
continue
sign = (hit.qry.strand * hit.ref.strand)
blocks = contig_permutations[hit.qry.seq_id]
if (sign < 0):
blocks = list([(- x) for x in blocks])[::(- 1)]
if prev_block:
adjacencies.append(Adjacency((- prev_block), blocks[0], False))
prev_block = blocks[(- 1)]
prev_contig = hit.qry.seq_id
if (entries and (not circular)):
adjacencies[(- 1)] = Adjacency(adjacencies[(- 1)].left, adjacencies[(- 1)].right, True)
return adjacencies |
class bsn_misc_capabilities(bsn):
type = 65535
experimenter = 6035143
exp_type = 5
def __init__(self, current=None, available=None, supported=None):
if (current != None):
self.current = current
else:
self.current = 0
if (available != None):
self.available = available
else:
self.available = 0
if (supported != None):
self.supported = supported
else:
self.supported = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!H', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.exp_type))
packed.append(struct.pack('!Q', self.current))
packed.append(struct.pack('!Q', self.available))
packed.append(struct.pack('!Q', self.supported))
length = sum([len(x) for x in packed])
packed[1] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bsn_misc_capabilities()
_type = reader.read('!H')[0]
assert (_type == 65535)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
_experimenter = reader.read('!L')[0]
assert (_experimenter == 6035143)
_exp_type = reader.read('!L')[0]
assert (_exp_type == 5)
obj.current = reader.read('!Q')[0]
obj.available = reader.read('!Q')[0]
obj.supported = reader.read('!Q')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.current != other.current):
return False
if (self.available != other.available):
return False
if (self.supported != other.supported):
return False
return True
def pretty_print(self, q):
q.text('bsn_misc_capabilities {')
with q.group():
with q.indent(2):
q.breakable()
q.text('current = ')
q.text(('%#x' % self.current))
q.text(',')
q.breakable()
q.text('available = ')
q.text(('%#x' % self.available))
q.text(',')
q.breakable()
q.text('supported = ')
q.text(('%#x' % self.supported))
q.breakable()
q.text('}') |
def make_tree(root: Path, tree: Dict):
for (name, content) in tree.items():
if isinstance(content, dict):
directory = (root / name)
directory.mkdir()
make_tree(directory, content)
if isinstance(content, str):
file = (root / name)
file.write_text(content) |
def xmlequals(xml_doc_a, xml_doc_b, xpath):
for check_func in [xml_check_type, xml_check_text_content, xml_check_name, xml_check_attributes, xml_check_child_count, xml_check_children]:
(result, err_msg) = check_func(xml_doc_a, xml_doc_b, xpath)
if (result in (False, True)):
assert (result is not None)
return (result, err_msg)
assert (result is None)
return (True, None) |
('rocm.multi_level_roi_align.gen_function')
def gen_function(func_attrs, template_path, exec_cond_template, shape_eval_template, shape_save_template):
func_name = func_attrs['name']
exec_path = func_attrs['exec_path']
x = func_attrs['inputs'][0]
y = func_attrs['outputs'][0]
backend_spec = ROCMSpec()
input_type = backend_spec.dtype_to_backend_type(x._attrs['dtype'])
output_type = backend_spec.dtype_to_backend_type(y._attrs['dtype'])
exec_paths = ''
for (key, _) in exec_path.items():
program = multi_level_roi_align_common.EXEC_TEMPLATE.render(indent=' ', num_rois=func_attrs['num_rois'], pooled_size=func_attrs['pooled_size'], sampling_ratio=func_attrs['sampling_ratio'], spatial_scale=func_attrs['spatial_scale'], position_sensitive=func_attrs['position_sensitive'], continuous_coordinate=func_attrs['continuous_coordinate'], elem_input_type=input_type, elem_output_type=output_type)
exec_inst = exec_cond_template.render(indent=' ', cond=key, program=program)
exec_paths += exec_inst
return multi_level_roi_align_common.SRC_TEMPLATE.render(function_name=func_name, exec_paths=exec_paths, header_files=EXTRA_HEADER.render(), index_type=backend_spec.index_type, prefix=backend_spec.prefix, elem_input_type=input_type, elem_output_type=output_type) |
class CSVReader(Reader):
def __init__(self, source, path, compression=None):
super().__init__(source, path)
self.compression = compression
(self.dialect, self.has_header) = probe_csv(path, compression=compression)
def to_pandas(self, **kwargs):
import pandas
pandas_read_csv_kwargs = kwargs.get('pandas_read_csv_kwargs', {})
if (self.compression is not None):
pandas_read_csv_kwargs = dict(**pandas_read_csv_kwargs)
pandas_read_csv_kwargs['compression'] = self.compression
LOG.debug('pandas.read_csv(%s,%s)', self.path, pandas_read_csv_kwargs)
return pandas.read_csv(self.path, **pandas_read_csv_kwargs)
def to_tfdataset(self, batch_size=1000, **kwargs):
import tensorflow as tf
assert (self.compression in (None, 'gzip'))
options = {}
if (self.dialect is not None):
COMPRESSIONS = {None: None, 'gzip': 'GZIP', '????': 'ZLIB'}
options = dict(use_quote_delim=self.dialect.doublequote, header=self.has_header, compression_type=COMPRESSIONS[self.compression])
return tf.data.experimental.make_csv_dataset(self.path, batch_size, **options)
def to_tfdataset_multi(cls, paths, batch_size=1000, **kwargs):
import tensorflow as tf
options = {}
return tf.data.experimental.make_csv_dataset(paths, batch_size, **options)
def plot_map(self, backend):
get_wrapper(self.to_pandas()).plot_map(backend)
def to_metview(self):
from climetlab.metview import mv_read_table
assert (self.compression is None)
return mv_read_table(table_filename=self.path) |
def handle_cell_outputs(cell: NotebookNode, plot_data_folder: Path) -> str:
mdx = ''
cell_outputs = cell.get('outputs', [])
if (not cell_outputs):
return mdx
cell_outputs_to_process = aggregate_output_types(cell_outputs)
md = aggregate_mdx(cell_outputs_to_process, plot_data_folder)
return md |
class CompileBenchKind(_common.JobKind):
NAME = 'compile-bench'
TYPICAL_DURATION_SECS = (40 * 60)
REQFS_FIELDS = (_common.JobKind.REQFS_FIELDS + ['pyperformance_manifest', 'pyperformance_config'])
RESFS_FIELDS = (_common.JobKind.RESFS_FIELDS + ['pyperformance_log', 'pyperformance_results'])
Request = CompileBenchRequest
Result = CompileBenchResult
def set_request_fs(self, fs: _common.JobRequestFS, context: Optional[str]) -> None:
fs.pyperformance_manifest = f'{fs}/benchmarks.manifest'
fs.pyperformance_config = f'{fs}/pyperformance.ini'
def set_work_fs(self, fs: _common.JobWorkFS, context: Optional[str]) -> None:
if (context == 'job-worker'):
fs.venv = f'{fs}/pyperformance-venv'
fs.scratch_dir = f'{fs}/pyperformance-scratch'
fs.pyperformance_results_glob = f'{fs}/*.json.gz'
def set_result_fs(self, fs: _common.JobResultFS, context: Optional[str]) -> None:
fs.pyperformance_log = f'{fs}/pyperformance.log'
fs.pyperformance_results = f'{fs}/pyperformance-results.json.gz'
def create(self, reqid: ToRequestIDType, jobfs: '_job.JobFS', workerfs: '_workers.WorkerJobsFS', *, _fake: Optional[Any]=None, **req_kwargs) -> Tuple[(CompileBenchRequest, str)]:
req = resolve_compile_bench_request(reqid, jobfs.work.root, **req_kwargs)
manifest = build_pyperformance_manifest(req, workerfs)
with open(jobfs.request.pyperformance_manifest, 'w') as outfile:
outfile.write(manifest)
ini = build_pyperformance_config(req, workerfs)
with open(jobfs.request.pyperformance_config, 'w') as outfile:
ini.write(outfile)
script = build_compile_script(req, workerfs, _fake)
return (req, script)
def as_row(self, req: requests.Request) -> Tuple[(str, str, str, str, str, str)]:
ref = req.ref
assert (not isinstance(ref, str)), repr(ref)
return (ref.full, ref, ref.remote, ref.branch, ref.tag, ref.commit) |
.parametrize('examples_path', [str((EXAMPLES_DIR / 'summarization.json')), str((EXAMPLES_DIR / 'summarization.yml')), str((EXAMPLES_DIR / 'summarization.jsonl'))])
def test_jinja_template_rendering_with_examples(examples_path, example_text):
nlp = spacy.blank('en')
doc = nlp.make_doc(example_text)
prompt_examples = fewshot_reader(examples_path)
llm_ner = make_summarization_task(examples=prompt_examples, max_n_words=20)
with pytest.warns(UserWarning, match=re.escape("The provided example 'Life is a quality th...' has a summary of length 28, but `max_n_words` == 20.")):
prompt = list(llm_ner.generate_prompts([doc]))[0]
assert (prompt.strip() == f"""
You are an expert summarization system. Your task is to accept Text as input and summarize the Text in a concise way.
The summary must not, under any circumstances, contain more than 20 words.
Below are some examples (only use these as a guide):
Text:
'''
The United Nations, referred to informally as the UN, is an intergovernmental organization whose stated purposes are to maintain international peace and security, develop friendly relations among nations, achieve international cooperation, and serve as a centre for harmonizing the actions of nations. It is the world's largest international organization. The UN is headquartered on international territory in New York City, and the organization has other offices in Geneva, Nairobi, Vienna, and The Hague, where the International Court of Justice is headquartered.
The UN was established after World War II with the aim of preventing future world wars, and succeeded the League of Nations, which was characterized as ineffective. On 25 April 1945, 50 nations met in San Francisco, California for a conference and started drafting the UN Charter, which was adopted on 25 June 1945. The charter took effect on 24 October 1945, when the UN began operations. The organization's objectives, as defined by its charter, include maintaining international peace and security, protecting human rights, delivering humanitarian aid, promoting sustainable development, and upholding international law. At its founding, the UN had 51 member states; as of 2023, it has 193 almost all of the world's sovereign states.
'''
Summary:
'''
UN is an intergovernmental organization to foster international peace, security, and cooperation. Established after WW2 with 51 members, now 193.
'''
Text:
'''
Life is a quality that distinguishes matter that has biological processes, such as signaling and self-sustaining processes, from matter that does not, and is defined by the capacity for growth, reaction to stimuli, metabolism, energy transformation, and reproduction. Various forms of life exist, such as plants, animals, fungi, protists, archaea, and bacteria. Biology is the science that studies life.
The gene is the unit of heredity, whereas the cell is the structural and functional unit of life. There are two kinds of cells, prokaryotic and eukaryotic, both of which consist of cytoplasm enclosed within a membrane and contain many biomolecules such as proteins and nucleic acids. Cells reproduce through a process of cell division, in which the parent cell divides into two or more daughter cells and passes its genes onto a new generation, sometimes producing genetic variation.
Organisms, or the individual entities of life, are generally thought to be open systems that maintain homeostasis, are composed of cells, have a life cycle, undergo metabolism, can grow, adapt to their environment, respond to stimuli, reproduce and evolve over multiple generations. Other definitions sometimes include non-cellular life forms such as viruses and viroids, but they are usually excluded because they do not function on their own; rather, they exploit the biological processes of hosts.
'''
Summary:
'''
Life is a quality defined by biological processes, including reproduction, genetics, and metabolism. There are two types of cells and organisms that can grow, respond, reproduce, and evolve.
'''
Here is the Text that needs to be summarized:
'''
{example_text}
'''
Summary:
""".strip()) |
class HandleChallenge(ChallengeServer):
def challenge(self, end_time):
self.sendline('Another Boring Flag Storage Service')
self.sendline('')
self.sendline('please wait while we setup a secure')
self.sendline('signature scheme. This could take a')
self.sendline('few seconds...')
store = FlagStorage()
self.sendline('Done! Thank you for your patience')
self.request.settimeout(TIME_LIMIT)
while (time.time() < end_time):
self.send('> ')
request = self.recv(1024).strip()
if (request == ''):
return
response = store.handle_request(request)
self.sendline(response)
self.timeout_message() |
()
_and_sanitize_search_inputs
def get_medications_query(doctype, txt, searchfield, start, page_len, filters):
medication_name = filters.get('name')
medication_child = frappe.qb.DocType('Medication Linked Item')
medication = frappe.qb.DocType('Medication')
item = frappe.qb.DocType('Item')
data = frappe.qb.select(medication_child.brand, medication_child.manufacturer, medication_child.item).from_(medication_child).left_join(medication).on((medication.name == medication_child.parent)).left_join(item).on((item.name == medication_child.item)).where(((medication.name == medication_name) & (item.disabled == 0))).run(as_dict=True)
data_list = []
for d in data:
display_list = []
if d.get('item'):
display_list.append(d.get('item'))
if d.get('brand'):
display_list.append(d.get('brand'))
if d.get('manufacturer'):
display_list.append(d.get('manufacturer'))
default_warehouse = frappe.get_cached_value('Stock Settings', None, 'default_warehouse')
if default_warehouse:
actual_qty = frappe.db.get_value('Bin', {'warehouse': default_warehouse, 'item_code': d.get('name')}, 'actual_qty')
display_list.append((('Qty:' + str(actual_qty)) if actual_qty else '0'))
data_list.append(display_list)
res = tuple((tuple(sub) for sub in data_list))
return res |
class SvgRenderer(Renderer):
def __init__(self, transformer: Transformer) -> None:
Renderer.__init__(self, transformer)
self._draw = svgwrite.Drawing(size=(f'{self._trans.image_width()}px', f'{self._trans.image_height()}px'), viewBox=f'0 0 {self._trans.image_width()} {self._trans.image_height()}')
clip = self._draw.defs.add(self._draw.clipPath(id='page'))
clip.add(self._draw.rect(insert=(0, 0), size=(self._trans.image_width(), self._trans.image_height())))
self._group: typing.Optional[svgwrite.container.Group] = None
def drawing(self) -> svgwrite.Drawing:
return self._draw
def group(self) -> svgwrite.container.Group:
assert (self._group is not None)
return self._group
def render_objects(self, objects: typing.List['Object']) -> None:
x_count = math.ceil((self._trans.image_width() / (2 * self._trans.world_width())))
for obj in objects:
for p in range((- x_count), (x_count + 1)):
self._group = self._draw.g(clip_path='url(#page)', transform=f'translate({(p * self._trans.world_width())}, 0)')
obj.render_svg(self)
self._draw.add(self._group)
self._group = None
def render_background(self, color: typing.Optional[Color]) -> None:
if (color is None):
return
group = self._draw.g(clip_path='url(#page)')
group.add(self._draw.rect(insert=(0, 0), size=self._trans.image_size(), rx=None, ry=None, fill=color.hex_rgb()))
self._draw.add(group)
def render_tiles(self, download: typing.Callable[([int, int, int], typing.Optional[bytes])]) -> None:
group = self._draw.g(clip_path='url(#page)')
for yy in range(0, self._trans.tiles_y()):
y = (self._trans.first_tile_y() + yy)
if ((y < 0) or (y >= self._trans.number_of_tiles())):
continue
for xx in range(0, self._trans.tiles_x()):
x = ((self._trans.first_tile_x() + xx) % self._trans.number_of_tiles())
try:
tile_img = self.fetch_tile(download, x, y)
if (tile_img is None):
continue
group.add(self._draw.image(tile_img, insert=(((xx * self._trans.tile_size()) + self._trans.tile_offset_x()), ((yy * self._trans.tile_size()) + self._trans.tile_offset_y())), size=(self._trans.tile_size(), self._trans.tile_size())))
except RuntimeError:
pass
self._draw.add(group)
def render_attribution(self, attribution: typing.Optional[str]) -> None:
if ((attribution is None) or (attribution == '')):
return
group = self._draw.g(clip_path='url(#page)')
group.add(self._draw.rect(insert=(0, (self._trans.image_height() - 12)), size=(self._trans.image_width(), 12), rx=None, ry=None, fill=WHITE.hex_rgb(), fill_opacity='0.8'))
group.add(self._draw.text(attribution, insert=(2, (self._trans.image_height() - 3)), font_family='Arial, Helvetica, sans-serif', font_size='9px', fill=BLACK.hex_rgb()))
self._draw.add(group)
def fetch_tile(self, download: typing.Callable[([int, int, int], typing.Optional[bytes])], x: int, y: int) -> typing.Optional[str]:
image_data = download(self._trans.zoom(), x, y)
if (image_data is None):
return None
return SvgRenderer.create_inline_image(image_data)
def guess_image_mime_type(data: bytes) -> str:
if ((data[:4] == b'\xff\xd8\xff\xe0') and (data[6:11] == b'JFIF\x00')):
return 'image/jpeg'
if (data[1:4] == b'PNG'):
return 'image/png'
return 'image/png'
def create_inline_image(image_data: bytes) -> str:
image_type = SvgRenderer.guess_image_mime_type(image_data)
return f"data:{image_type};base64,{base64.b64encode(image_data).decode('utf-8')}" |
class OptionPlotoptionsSolidgaugeSonificationDefaultinstrumentoptionsMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class BaseSoC(SoCCore):
def __init__(self, sys_clk_freq=int(.0), ip_address='192.168.1.50', debug_pmod=False, no_sdram_init=False, dynamic_freq=False, with_analyzer=False, **kwargs):
platform = digilent_arty.Platform()
kwargs['integrated_rom_size'] = 40960
kwargs['uart_name'] = 'crossover'
SoCCore.__init__(self, platform, sys_clk_freq, ident='LiteX SoC on Arty A7', integrated_rom_mode='rw', **kwargs)
self.submodules.crg = _CRG(platform, sys_clk_freq, dynamic=dynamic_freq)
platform.add_extension(ddram_io())
platform.add_platform_command('set_property INTERNAL_VREF 0.750 [get_iobanks 34]')
if debug_pmod:
from pprint import pprint
print()
print(' PMOD placement:')
print(' ')
pprint(_ddram_dbg_io)
print()
platform.add_extension(_ddram_dbg_io)
ddram_pads = RPCPads(platform.request('ddram_dbg'))
dbg_pmod = platform.request('dbg_pmod')
self.comb += [dbg_pmod[0].eq(ClockSignal()), dbg_pmod[1].eq(ClockSignal('sys4x_90')), dbg_pmod[2].eq(ClockSignal('sys4x_180'))]
else:
ddram_pads = RPCPadsDDR3(platform.request('ddram_sstl15'))
self.submodules.ddrphy = A7RPCPHY(pads=ddram_pads, sys_clk_freq=(.0 if debug_pmod else sys_clk_freq), iodelay_clk_freq=.0)
module = EM6GA16L((.0 if debug_pmod else sys_clk_freq), '1:4')
self.ddrphy.refresh_enable = CSRStorage()
controller_settings = ControllerSettings()
controller_settings.auto_precharge = False
controller_settings.with_refresh = self.ddrphy.refresh_enable.storage
self.add_sdram('sdram', phy=self.ddrphy, module=module, controller_settings=controller_settings)
self.add_constant('SET_DDR_VCC_15')
if no_sdram_init:
self.add_constant('SDRAM_INIT_DISABLE')
self.submodules.i2c = I2CMaster(platform.request('i2c'))
self.i2c.add_init(addr=88, init=[(163, 120), (180, 120)])
if dynamic_freq:
self.add_uartbone(clk_freq=.0, baudrate=1000000.0, cd='uart')
else:
self.submodules.ethphy = LiteEthPHYMII(clock_pads=self.platform.request('eth_clocks'), pads=self.platform.request('eth'))
self.add_etherbone(phy=self.ethphy, ip_address=ip_address)
self.submodules.leds = LedChaser(pads=Cat(*[platform.request('user_led', i) for i in range(4)]), sys_clk_freq=sys_clk_freq)
if with_analyzer:
self.ddrphy.finalize()
analyzer_signals = [*[self.ddrphy.dfi.phases[p].cas_n for p in range(self.ddrphy.nphases)], *[self.ddrphy.dfi.phases[p].ras_n for p in range(self.ddrphy.nphases)], *[self.ddrphy.dfi.phases[p].we_n for p in range(self.ddrphy.nphases)], *[self.ddrphy.dfi.phases[p].reset_n for p in range(self.ddrphy.nphases)], self.ddrphy.stb_1ck_in, self.ddrphy.stb_1ck_out, self.ddrphy.dqs_1ck_out, self.ddrphy.dqs_1ck_in, self.ddrphy.cs_n_1ck_out, self.ddrphy.clk_1ck_out, self.ddrphy.dq_data_en, self.ddrphy.dq_mask_en, self.ddrphy.dq_cmd_en, self.ddrphy.dq_read_stb, self.ddrphy.dq_in_cnt, self.ddrphy.db_cnt, self.ddrphy.dqs_cnt, self.ddrphy.rddata_en.sr, self.ddrphy.wrdata_en.sr, self.ddrphy.db_oe, self.ddrphy.dqs_oe, self.ddrphy.reset_fsm.state]
self.submodules.analyzer = LiteScopeAnalyzer(analyzer_signals, depth=512, register=True, clock_domain='sys', csr_csv='analyzer.csv')
self.add_constant('SDRAM_DEBUG')
def dump(obj):
print()
print((' ' + obj.__class__.__name__))
print((' ' + ('-' * len(obj.__class__.__name__))))
d = (obj if isinstance(obj, dict) else vars(obj))
for (var, val) in d.items():
if (var == 'self'):
continue
print(' {}: {}'.format(var, val))
print(('=' * 80))
dump(self.ddrphy.settings)
dump(module.geom_settings)
dump(module.timing_settings)
print()
print(('=' * 80))
print()
print(' VCO freq = ', self.crg.main_pll.compute_config()['vco'])
print()
print(('=' * 80))
def generate_sdram_phy_py_header(self):
f = open('sdram_init.py', 'w')
f.write(get_sdram_phy_py_header(self.sdram.controller.settings.phy, self.sdram.controller.settings.timing))
f.close() |
class QueryServicer(object):
def CurrentPlan(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AppliedPlan(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpgradedConsensusState(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ModuleVersions(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!') |
def test_prism_layer_properties(dummy_layer):
(coordinates, surface, reference, density) = dummy_layer
suceptibility = ((0 * density) + 0.001)
layer = prism_layer(coordinates, surface, reference, properties={'density': density, 'suceptibility': suceptibility})
npt.assert_allclose(layer.density, density)
npt.assert_allclose(layer.suceptibility, suceptibility) |
class UpdateThread(QThread):
thread_finished = pyqtSignal(bool)
def __init__(self):
QThread.__init__(self)
self.prompt_if_latest = True
self.github_version = 'None'
self.url = '
def run(self):
try:
redirect = get(self.url)
self.github_version = redirect.url.split('/')[(- 1)]
except Exception as e:
print('Error at Update Thread:', e)
self.thread_finished.emit(True) |
class Model(BaseModel):
def __init__(self, **data):
super().__init__(**data)
unknowns = (set(data.keys()) - set(self.__dict__.keys()))
cls_name = self.__class__.__name__
for arg in unknowns:
msg = f'{cls_name} contains unknown attribute: `{arg}`, which was discarded. This warning may be safely ignored. Please consider upgrading Tekore.'
warn(msg, UnknownModelAttributeWarning, stacklevel=5) |
('copr.v3.proxies.project_chroot.ProjectChrootProxy.edit')
('copr_cli.main.config_from_file', return_value=mock_config)
def test_edit_chroot_with_isolation(_config_from_file, project_chroot_proxy_edit, capsys):
main.main(argv=['edit-chroot', 'foo/f20', '--isolation', 'simple'])
(stdout, stderr) = capsys.readouterr()
project_chroot_proxy_edit.assert_called_once()
kwargs = project_chroot_proxy_edit.call_args[1]
assert (stderr == '')
assert (kwargs['isolation'] == 'simple')
assert (stdout == 'Edit chroot operation was successful.\n') |
def generate_self_signed_certificate():
one_day = datetime.timedelta(1, 0, 0)
private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048, backend=default_backend())
public_key = private_key.public_key()
builder = x509.CertificateBuilder()
builder = builder.subject_name(x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, socket.gethostname())]))
builder = builder.issuer_name(x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, socket.gethostname())]))
builder = builder.not_valid_before((datetime.datetime.today() - one_day))
builder = builder.not_valid_after((datetime.datetime.today() + ((one_day * 365) * 5)))
builder = builder.serial_number(x509.random_serial_number())
builder = builder.public_key(public_key)
logger.debug('Adding SANs for %(hostname)s, *.%(hostname)s, localhost, and *.localhost', {'hostname': socket.gethostname()})
builder = builder.add_extension(x509.SubjectAlternativeName(sans), critical=False)
builder = builder.add_extension(x509.BasicConstraints(ca=False, path_length=None), critical=True)
certificate = builder.sign(private_key=private_key, algorithm=hashes.SHA256(), backend=default_backend())
return (certificate.public_bytes(serialization.Encoding.PEM), private_key.private_bytes(serialization.Encoding.PEM, serialization.PrivateFormat.PKCS8, serialization.NoEncryption())) |
class AdReportRun(AdReportRunMixin, AbstractCrudObject):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isAdReportRun = True
super(AdReportRun, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
account_id = 'account_id'
async_percent_completion = 'async_percent_completion'
async_status = 'async_status'
date_start = 'date_start'
date_stop = 'date_stop'
emails = 'emails'
friendly_name = 'friendly_name'
id = 'id'
is_bookmarked = 'is_bookmarked'
is_running = 'is_running'
schedule_id = 'schedule_id'
time_completed = 'time_completed'
time_ref = 'time_ref'
action_attribution_windows = 'action_attribution_windows'
action_breakdowns = 'action_breakdowns'
action_report_time = 'action_report_time'
breakdowns = 'breakdowns'
date_preset = 'date_preset'
default_summary = 'default_summary'
export_columns = 'export_columns'
export_format = 'export_format'
export_name = 'export_name'
fields = 'fields'
filtering = 'filtering'
level = 'level'
product_id_limit = 'product_id_limit'
sort = 'sort'
summary = 'summary'
summary_action_breakdowns = 'summary_action_breakdowns'
time_increment = 'time_increment'
time_range = 'time_range'
time_ranges = 'time_ranges'
use_account_attribution_setting = 'use_account_attribution_setting'
use_unified_attribution_setting = 'use_unified_attribution_setting'
def get_endpoint(cls):
return 'insights'
def api_create(self, parent_id, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.adobjects.adaccount import AdAccount
return AdAccount(api=self._api, fbid=parent_id).get_insights_async(fields, params, batch, success, failure, pending)
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdReportRun, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_insights(self, fields=None, params=None, is_async=False, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adsinsights import AdsInsights
if is_async:
return self.get_insights_async(fields, params, batch, success, failure, pending)
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/insights', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdsInsights, api_type='EDGE', response_parser=ObjectParser(target_class=AdsInsights, api=self._api), include_summary=False)
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {'account_id': 'string', 'async_percent_completion': 'unsigned int', 'async_status': 'string', 'date_start': 'string', 'date_stop': 'string', 'emails': 'list<string>', 'friendly_name': 'string', 'id': 'string', 'is_bookmarked': 'bool', 'is_running': 'bool', 'schedule_id': 'string', 'time_completed': 'unsigned int', 'time_ref': 'unsigned int', 'action_attribution_windows': 'list<ActionAttributionWindows>', 'action_breakdowns': 'list<ActionBreakdowns>', 'action_report_time': 'ActionReportTime', 'breakdowns': 'list<Breakdowns>', 'date_preset': 'DatePreset', 'default_summary': 'bool', 'export_columns': 'list<string>', 'export_format': 'string', 'export_name': 'string', 'fields': 'list<string>', 'filtering': 'list<Object>', 'level': 'Level', 'product_id_limit': 'int', 'sort': 'list<string>', 'summary': 'list<string>', 'summary_action_breakdowns': 'list<SummaryActionBreakdowns>', 'time_increment': 'string', 'time_range': 'map', 'time_ranges': 'list<map>', 'use_account_attribution_setting': 'bool', 'use_unified_attribution_setting': 'bool'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
def train_multi_gpu_model(model: nn.Module, world_size: int, loss_function: nn.Module, sampler: torch.utils.data.Sampler, data_loader: torch.utils.data.DataLoader, optimizer: torch.optim.Optimizer, device: Union[(str, torch.device)], epochs: int=1, window_size: int=100, use_distributed_checkpoint: bool=False, checkpoint_dir: Optional[str]=None) -> Tuple[(float, float, int)]:
if (use_distributed_checkpoint and (not isinstance(optimizer, DistributedShampoo))):
raise ValueError('Distributed checkpointing is only supported with DistributedShampoo!')
if (use_distributed_checkpoint and (checkpoint_dir is None)):
raise ValueError('Trying to use distributed checkpointing but checkpoint directory is not provided!')
metrics = LossMetrics(window_size=window_size, device=device, world_size=world_size)
if (use_distributed_checkpoint and isinstance(optimizer, DistributedShampoo) and os.path.exists((checkpoint_dir + '/.metadata'))):
state_dict = {'model': model.state_dict(), 'optim': optimizer.distributed_state_dict(key_to_param=model.named_parameters())}
dist_checkpoint.load_state_dict(state_dict=state_dict, storage_reader=dist_checkpoint.FileSystemReader(checkpoint_dir))
model.load_state_dict(state_dict['model'])
optimizer.load_distributed_state_dict(state_dict['optim'], key_to_param=model.named_parameters())
for epoch in range(epochs):
metrics._epoch = epoch
sampler.set_epoch(epoch)
for (inputs, labels) in data_loader:
(inputs, labels) = (inputs.to(device), labels.to(device))
optimizer.zero_grad()
output = model(inputs)
loss = loss_function(output, labels)
loss.backward()
average_gradients(model, world_size)
optimizer.step()
metrics.update(loss)
metrics.log()
metrics.update_global_metrics()
if (LOCAL_RANK == 0):
metrics.log_global_metrics()
if (use_distributed_checkpoint and isinstance(optimizer, DistributedShampoo)):
state_dict = {'model': model.state_dict(), 'optim': optimizer.distributed_state_dict(key_to_param=model.named_parameters())}
dist_checkpoint.save_state_dict(state_dict=state_dict, storage_writer=dist_checkpoint.FileSystemWriter(checkpoint_dir))
return (metrics._lifetime_loss, metrics._window_loss, metrics._iteration) |
class Monitor(object):
swagger_types = {'links': 'MonitorLinks', 'mid': 'MonitorId', 'monitor': 'MonitorParameter', 'status': 'MonitorStatus'}
attribute_map = {'links': '_links', 'mid': 'mid', 'monitor': 'monitor', 'status': 'status'}
def __init__(self, links=None, mid=None, monitor=None, status=None):
self._links = None
self._mid = None
self._monitor = None
self._status = None
self.discriminator = None
if (links is not None):
self.links = links
self.mid = mid
self.monitor = monitor
self.status = status
def links(self):
return self._links
def links(self, links):
self._links = links
def mid(self):
return self._mid
def mid(self, mid):
if (mid is None):
raise ValueError('Invalid value for `mid`, must not be `None`')
self._mid = mid
def monitor(self):
return self._monitor
def monitor(self, monitor):
if (monitor is None):
raise ValueError('Invalid value for `monitor`, must not be `None`')
self._monitor = monitor
def status(self):
return self._status
def status(self, status):
if (status is None):
raise ValueError('Invalid value for `status`, must not be `None`')
self._status = status
def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
if issubclass(Monitor, dict):
for (key, value) in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if (not isinstance(other, Monitor)):
return False
return (self.__dict__ == other.__dict__)
def __ne__(self, other):
return (not (self == other)) |
def add_extra_stars(argv, extraStars):
if (not extraStars):
return
if argv.detail:
print('Saving {} to {}'.format(str(extraStars), argv.extraFile))
try:
with open(argv.extraFile, 'a', encoding='utf-8') as output:
for star in extraStars:
print(star, file=output)
except FileNotFoundError:
pass |
()
('scanfile', type=click.File())
('-p', 'protocol', default='tcp', type=click.Choice(['tcp', 'udp', 'sctp']), help='The protocol (default=tcp)')
def cmd_nmap_open(scanfile, protocol):
data = scanfile.read()
fmt = detect_format(data)
if (fmt not in ['xml', 'nmap', 'gnmap']):
print('Unknown file format.', file=sys.stdout)
return 1
if (fmt == 'nmap'):
result = parse_format_nmap(data, protocol)
elif (fmt == 'gnmap'):
result = parse_format_gnmap(data, protocol)
elif (fmt == 'xml'):
result = parse_format_xml(data, protocol)
print(','.join([str(r) for r in result]), end='')
return True |
class XClipClipboarder(Clipboarder):
__last_copied_characters: str
def supported() -> bool:
return ((not is_wayland()) and is_installed('xclip'))
def name() -> str:
return 'xclip'
def copy_to_clipboard(self, characters: str) -> None:
run(['xclip', '-in', '-selection', 'clipboard'], input=characters, encoding='utf-8')
self.__last_copied_characters = characters
def fetch_clipboard_content(self) -> str:
return run(['xclip', '-o', '-selection', 'clipboard'], capture_output=True, encoding='utf-8').stdout
def clear_clipboard_after(self, clear: int) -> None:
if (clear > 0):
time.sleep(clear)
if (self.fetch_clipboard_content() == self.__last_copied_characters):
self.copy_to_clipboard('')
self.__last_copied_characters = None |
def downgrade():
op.add_column('panel_permissions', sa.Column('role_id', sa.INTEGER(), autoincrement=False, nullable=True))
op.create_foreign_key('panel_permissions_role_id_fkey', 'panel_permissions', 'custom_sys_roles', ['role_id'], ['id'], ondelete='CASCADE')
op.drop_table('roles_panels') |
class TestElectionSearch(ApiBaseTest):
def setUp(self):
super().setUp()
factories.ElectionsListFactory(office='P', state='US', district='00', incumbent_id='P12345')
factories.ElectionsListFactory(office='S', state='NJ', district='00', incumbent_id='SNJ123')
factories.ElectionsListFactory(office='H', state='NJ', district='09', incumbent_id='HNJ123')
factories.ElectionsListFactory(office='S', state='VA', district='00', incumbent_id='SVA123')
factories.ElectionsListFactory(office='H', state='VA', district='04', incumbent_id='HVA121')
factories.ElectionsListFactory(office='H', state='VA', district='05', incumbent_id='HVA123')
factories.ElectionsListFactory(office='H', state='VA', district='06', incumbent_id='HVA124')
factories.ElectionsListFactory(office='S', state='GA', district='00', incumbent_id='SGA123')
def test_search_district(self):
results = self._results(api.url_for(ElectionsListView, state='NJ', district='09'))
self.assertEqual(len(results), 3)
assert_dicts_subset(results[0], {'cycle': 2012, 'office': 'P', 'state': 'US', 'district': '00'})
assert_dicts_subset(results[1], {'cycle': 2012, 'office': 'S', 'state': 'NJ', 'district': '00'})
assert_dicts_subset(results[2], {'cycle': 2012, 'office': 'H', 'state': 'NJ', 'district': '09'})
def test_search_district_padding(self):
results_padded = self._results(api.url_for(ElectionsListView, district='09'))
results_unpadded = self._results(api.url_for(ElectionsListView, district=9))
self.assertEqual(len(results_padded), len(results_unpadded))
self.assertEqual(len(results_unpadded), 5)
def test_search_office(self):
results = self._results(api.url_for(ElectionsListView, office='senate'))
self.assertEqual(len(results), 3)
self.assertTrue(all([(each['office'] == 'S') for each in results]))
def test_search_zip(self):
factories.ZipsDistrictsFactory(district='05', zip_code='22902', state_abbrevation='VA')
results = self._results(api.url_for(ElectionsListView, zip='22902'))
assert (len(results) == 3)
assert_dicts_subset(results[0], {'cycle': 2012, 'office': 'P', 'state': 'US', 'district': '00'})
assert_dicts_subset(results[1], {'cycle': 2012, 'office': 'S', 'state': 'VA', 'district': '00'})
assert_dicts_subset(results[2], {'cycle': 2012, 'office': 'H', 'state': 'VA', 'district': '05'})
def test_counts(self):
response = self._response(api.url_for(ElectionsListView))
footer_count = response['pagination']['count']
results_count = len(response['results'])
self.assertEqual(footer_count, results_count)
def test_search_sort_default(self):
results = self._results(api.url_for(ElectionsListView, state='VA'))
self.assertEqual(results[0]['office'], 'P')
self.assertEqual(results[1]['office'], 'S')
self.assertEqual(results[2]['district'], '04')
self.assertEqual(results[3]['district'], '05')
self.assertEqual(results[4]['district'], '06')
def test_search_sort_state(self):
results = self._results(api.url_for(ElectionsListView))
self.assertTrue([each['state'] for each in results], ['GA', 'NJ', 'NJ', 'US', 'VA', 'VA', 'VA', 'VA']) |
class TlsPrivateKeyData(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'type': (TypeTlsPrivateKey,), 'attributes': (TlsPrivateKeyDataAttributes,), 'relationships': (RelationshipsForTlsPrivateKey,)}
_property
def discriminator():
return None
attribute_map = {'type': 'type', 'attributes': 'attributes', 'relationships': 'relationships'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def extractCaehannWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesAreasplineSonificationDefaultinstrumentoptionsMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def select_output_folder():
global output_folder
global patch_folder
output_folder = askdirectory()
if output_folder:
patch_folder = os.path.join(output_folder, 'AAR MOD', 'exefs')
output_folder = os.path.join(output_folder, 'AAR MOD', 'romfs', 'UI', 'LayoutArchive')
try:
os.makedirs(output_folder, exist_ok=True)
Path(patch_folder).mkdir(parents=True, exist_ok=True)
except Exception as e:
status_label.config(text=f'Error: {str(e)}')
return
status_label.config(text=f'Output folder selected: {output_folder}')
else:
status_label.config(text='No output folder selected.') |
def test_variables_cast_as_category_frequent(df_na):
df_na = df_na.copy()
df_na['City'] = df_na['City'].astype('category')
df_na.drop(labels=['Name'], axis=1, inplace=True)
imputer = CategoricalImputer(imputation_method='frequent', variables=None)
X_transformed = imputer.fit_transform(df_na)
X_reference = df_na.copy()
X_reference['Studies'] = X_reference['Studies'].fillna('Bachelor')
X_reference['City'] = X_reference['City'].fillna('London')
assert (imputer.variables_ == ['City', 'Studies'])
assert (imputer.imputer_dict_ == {'City': 'London', 'Studies': 'Bachelor'})
assert (X_transformed[['City', 'Studies']].isnull().sum().sum() == 0)
assert (X_transformed[['Age', 'Marks']].isnull().sum().sum() > 0)
pd.testing.assert_frame_equal(X_transformed, X_reference) |
def create_cloudwatch_event(app_name, env, region, rules):
session = boto3.Session(profile_name=env, region_name=region)
cloudwatch_client = session.client('events')
rule_name = rules.get('rule_name')
rule_type = rules.get('rule_type', 'schedule')
schedule = rules.get('schedule')
event_pattern = rules.get('event_pattern')
rule_description = rules.get('rule_description')
json_input = rules.get('json_input', {})
if ((rule_type == 'schedule') and (schedule is None)):
LOG.critical('A CloudWatch Schedule is required and no schedule pattern is defined!')
raise InvalidEventConfiguration('A CloudWatch Schedule is required and no schedule is defined!')
if ((rule_type == 'event_pattern') and (event_pattern is None)):
LOG.critical('A CloudWatch Event Pattern is required and no event pattern is defined!')
raise InvalidEventConfiguration('A CloudWatch Event Pattern is required and no event pattern is defined!')
if (rule_name is None):
LOG.critical('Rule name is required and no rule_name is defined!')
raise InvalidEventConfiguration('Rule name is required and no rule_name is defined!')
else:
LOG.info('%s and %s', app_name, rule_name)
rule_name = '{}_{}'.format(app_name, rule_name.replace(' ', '_'))
if (rule_description is None):
rule_description = '{} - {}'.format(app_name, rule_name)
lambda_arn = get_lambda_arn(app=app_name, account=env, region=region)
account_id = get_env_credential(env=env)['accountId']
principal = 'events.amazonaws.com'
statement_id = 'cloudwatch_{}'.format(rule_name)
source_arn = 'arn:aws:events:{}:{}:rule/{}'.format(region, account_id, rule_name)
add_lambda_permissions(function=lambda_arn, statement_id=statement_id, action='lambda:InvokeFunction', principal=principal, source_arn=source_arn, env=env, region=region)
if (rule_type == 'schedule'):
cloudwatch_client.put_rule(Name=rule_name, ScheduleExpression=schedule, State='ENABLED', Description=rule_description)
LOG.info('Created CloudWatch Rule "%s" with %s: %s', rule_name, rule_type, schedule)
elif (rule_type == 'event_pattern'):
cloudwatch_client.put_rule(Name=rule_name, EventPattern=json.dumps(event_pattern), State='ENABLED', Description=rule_description)
LOG.info('Created CloudWatch Rule "%s" with %s: %s', rule_name, rule_type, event_pattern)
targets = [{'Id': app_name, 'Arn': lambda_arn}]
if json_input:
json_payload = '{}'.format(json.dumps(json_input))
for each_target in targets:
each_target['Input'] = json_payload
put_targets_response = cloudwatch_client.put_targets(Rule=rule_name, Targets=targets)
LOG.debug('CloudWatch PutTargets Response: %s', put_targets_response)
LOG.info('Configured CloudWatch Rule Target: %s', lambda_arn) |
def test_pushing_and_popping_frames(byte_string, byte_stream):
byte_stream.push_frame(5)
assert (byte_stream.read(5) == byte_string[5:10])
byte_stream.seek_in_frame(1)
assert (byte_stream.read(1) == byte_string[6:7])
byte_stream.seek_in_frame(5)
byte_stream.push_frame(2)
assert (byte_stream.read(5) == byte_string[7:12])
byte_stream.seek_in_frame(2)
assert (byte_stream.read(1) == byte_string[9:10])
byte_stream.pop_frame()
assert (byte_stream.read(5) == byte_string[10:15])
byte_stream.pop_frame()
assert (byte_stream.read(5) == byte_string[0:5])
with pytest.raises(IndexError):
byte_stream.pop_frame() |
def to_match(dp, attrs):
ofp = dp.ofproto
wildcards = ofp.OFPFW_ALL
in_port = 0
dl_src = 0
dl_dst = 0
dl_vlan = 0
dl_vlan_pcp = 0
dl_type = 0
nw_tos = 0
nw_proto = 0
nw_src = 0
nw_dst = 0
tp_src = 0
tp_dst = 0
for (key, value) in attrs.items():
if (key == 'in_port'):
in_port = UTIL.ofp_port_from_user(value)
wildcards &= (~ ofp.OFPFW_IN_PORT)
elif (key == 'dl_src'):
dl_src = haddr_to_bin(value)
wildcards &= (~ ofp.OFPFW_DL_SRC)
elif (key == 'dl_dst'):
dl_dst = haddr_to_bin(value)
wildcards &= (~ ofp.OFPFW_DL_DST)
elif (key == 'dl_vlan'):
dl_vlan = str_to_int(value)
wildcards &= (~ ofp.OFPFW_DL_VLAN)
elif (key == 'dl_vlan_pcp'):
dl_vlan_pcp = str_to_int(value)
wildcards &= (~ ofp.OFPFW_DL_VLAN_PCP)
elif (key == 'dl_type'):
dl_type = str_to_int(value)
wildcards &= (~ ofp.OFPFW_DL_TYPE)
elif (key == 'nw_tos'):
nw_tos = str_to_int(value)
wildcards &= (~ ofp.OFPFW_NW_TOS)
elif (key == 'nw_proto'):
nw_proto = str_to_int(value)
wildcards &= (~ ofp.OFPFW_NW_PROTO)
elif (key == 'nw_src'):
ip = value.split('/')
nw_src = struct.unpack('!I', socket.inet_aton(ip[0]))[0]
mask = 32
if (len(ip) == 2):
mask = int(ip[1])
assert (0 < mask <= 32)
v = (((32 - mask) << ofp.OFPFW_NW_SRC_SHIFT) | (~ ofp.OFPFW_NW_SRC_MASK))
wildcards &= v
elif (key == 'nw_dst'):
ip = value.split('/')
nw_dst = struct.unpack('!I', socket.inet_aton(ip[0]))[0]
mask = 32
if (len(ip) == 2):
mask = int(ip[1])
assert (0 < mask <= 32)
v = (((32 - mask) << ofp.OFPFW_NW_DST_SHIFT) | (~ ofp.OFPFW_NW_DST_MASK))
wildcards &= v
elif (key == 'tp_src'):
tp_src = str_to_int(value)
wildcards &= (~ ofp.OFPFW_TP_SRC)
elif (key == 'tp_dst'):
tp_dst = str_to_int(value)
wildcards &= (~ ofp.OFPFW_TP_DST)
else:
LOG.error('unknown match name %s, %s, %d', key, value, len(key))
match = dp.ofproto_parser.OFPMatch(wildcards, in_port, dl_src, dl_dst, dl_vlan, dl_vlan_pcp, dl_type, nw_tos, nw_proto, nw_src, nw_dst, tp_src, tp_dst)
return match |
('/.well-known/ai-plugin.json')
def plugin_manifest():
with open('./ai-plugin.json', 'r') as f:
raw_text = f.read()
template = string.Template(raw_text)
sub = template.substitute({'CHATGPT_PLUGIN_URL': CHATGPT_PLUGIN_URL})
return flask.Response(sub, mimetype='text/json') |
def test_env_variable_interpolation(config, json_config_file_3):
config.from_json(json_config_file_3)
assert (config() == {'section1': {'value1': 'test-value', 'value2': 'test-path/path'}})
assert (config.section1() == {'value1': 'test-value', 'value2': 'test-path/path'})
assert (config.section1.value1() == 'test-value')
assert (config.section1.value2() == 'test-path/path') |
def add_cli_option(cli_parser, setting_name, setting_info):
description = ('%s (from %i to %i, default: %i)' % (setting_info['description'], setting_info['input_range'][0], setting_info['input_range'][1], setting_info['default']))
cli_parser.add_argument(*setting_info['cli'], help=description, dest=setting_name.upper(), type=int, metavar=setting_name.upper()) |
class UDisksPropertyWrapper():
def __init__(self, obj, iface_type):
self.obj = obj
self.iface_type = iface_type
def __getattr__(self, name):
return (lambda *a, **k: self.obj.__getattr__(name)(*((self.iface_type,) + a), **k))
def __repr__(self):
return ('<UDisksPropertyWrapper: %s>' % self.iface_type) |
.django_db
def test_tas_unparsable_too_short(client, monkeypatch, elasticsearch_award_index, award_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(client, {'require': [['011', '011-0990', '3-4-2']]})
assert (resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY), 'Failed to return 422 Response' |
class Add(BinaryOp):
def __init__(self, A, B):
if (A.shape != B.shape):
raise ValueError(('Illegal op on a %s-tensor with a %s-tensor.' % (A.shape, B.shape)))
assert all([space_equivalence(fsA, fsB) for (fsA, fsB) in zip(A.arg_function_spaces, B.arg_function_spaces)]), 'Function spaces associated with operands must match.'
super(Add, self).__init__(A, B)
self._args = A.arguments()
_property
def arg_function_spaces(self):
(A, _) = self.operands
return A.arg_function_spaces
def arguments(self):
return self._args |
.parametrize('str_template', ['{this_key_is_not_in_the_dict}', '{id}/{neither_is_this_one}'])
def test_load_with_bad_custom_str_template(str_template, one_acct_list):
with pytest.raises(acctload.InvalidFormatTemplateError):
acctload.MetaAccountLoader(one_acct_list, str_template=str_template) |
def _process_packages_protocol(package_path: Path, preserve_generator_docstring: bool=False) -> None:
if preserve_generator_docstring:
old_protocol_generator_docstring = _parse_generator_docstring(package_path)
specification_content = get_protocol_specification_from_readme(package_path)
_save_specification_in_temporary_file(package_path.name, specification_content)
_generate_protocol(package_path)
_fix_generated_protocol(package_path)
if preserve_generator_docstring:
_replace_generator_docstring(package_path, old_protocol_generator_docstring)
_set_copyright_header(Path(PROTOCOLS_PLURALS, package_path.name))
run_isort_and_black(Path(PROTOCOLS_PLURALS, package_path.name), cwd=str(ROOT_DIR))
_fingerprint_protocol(package_path.name)
_update_original_protocol(package_path) |
def URLTab(accent_color, bg):
layout = [[Sg.Text(t('Enter URL'), font=FONT_NORMAL)], [Sg.Radio(t('Play Immediately'), 'url_option', key='url_play', default=True), Sg.Radio(t('Queue'), 'url_option', key='url_queue'), Sg.Radio(t('Play Next'), 'url_option', key='url_play_next')], [Sg.Input(key='url_input', font=FONT_NORMAL, enable_events=True, border_width=1), StyledButton(t('Submit'), accent_color, bg, key='url_submit', bind_return_key=True)], [Sg.Text('', key='url_msg', size=(20, 1))]]
return Sg.Tab(t('URL'), [[Sg.Column(layout, pad=(5, 20))]], key='tab_url') |
class Stack(StackAPI):
__slots__ = ['values', '_append', '_pop_typed', '__len__']
logger = logging.getLogger('eth.vm.stack.Stack')
def __init__(self) -> None:
values: List[Union[(int, bytes)]] = []
self.values = values
self._append = values.append
self._pop = values.pop
self.__len__ = values.__len__
def push_int(self, value: int) -> None:
if (len(self.values) > 1023):
raise FullStack('Stack limit reached')
validate_stack_int(value)
self._append(value)
def push_bytes(self, value: bytes) -> None:
if (len(self.values) > 1023):
raise FullStack('Stack limit reached')
validate_stack_bytes(value)
self._append(value)
def pop1_bytes(self) -> bytes:
return to_bytes(self.pop1_any())
def pop1_int(self) -> int:
return to_int(self.pop1_any())
def pop1_any(self) -> Union[(int, bytes)]:
try:
return self._pop()
except IndexError:
raise InsufficientStack('Wanted 1 stack item, had none')
def pop_any(self, num_items: int) -> Tuple[(Union[(int, bytes)], ...)]:
if (num_items > len(self.values)):
raise InsufficientStack('Wanted %d stack items, only had %d', num_items, len(self.values))
ret = reversed(self.values[(- num_items):])
del self.values[(- num_items):]
return tuple(ret)
def pop_ints(self, num_items: int) -> Tuple[(int, ...)]:
return tuple((to_int(x) for x in self.pop_any(num_items)))
def pop_bytes(self, num_items: int) -> Tuple[(bytes, ...)]:
return tuple((to_bytes(x) for x in self.pop_any(num_items)))
def swap(self, position: int) -> None:
idx = (((- 1) * position) - 1)
try:
(self.values[(- 1)], self.values[idx]) = (self.values[idx], self.values[(- 1)])
except IndexError:
raise InsufficientStack(f'Insufficient stack items for SWAP{position}')
def dup(self, position: int) -> None:
if (len(self.values) > 1023):
raise FullStack('Stack limit reached')
try:
self._append(self.values[(- position)])
except IndexError:
raise InsufficientStack(f'Insufficient stack items for DUP{position}')
def _stack_items_str(self) -> Iterable[str]:
for val in self.values:
if isinstance(val, int):
(yield hex(val))
elif isinstance(val, bytes):
(yield ('0x' + val.hex()))
else:
raise RuntimeError(f'Stack items can only be int or bytes, not {val!r}:{type(val)}')
def __str__(self) -> str:
return str(list(self._stack_items_str())) |
def test_should_group_by_action_and_resource_independent_of_order():
records = [Record('rds.amazonaws.com', 'ListTagsForResource', ['arn:aws:rds:eu-central-1::db:some-db']), Record('rds.amazonaws.com', 'SomethingDifferent', ['arn:aws:rds:eu-central-1::db:a-third-db']), Record('rds.amazonaws.com', 'ListTagsForResource', ['arn:aws:rds:eu-central-1::db:some-other-db'])]
expected = PolicyDocument(Version='2012-10-17', Statement=[Statement(Effect='Allow', Action=[Action('rds', 'ListTagsForResource')], Resource=['arn:aws:rds:eu-central-1::db:some-db', 'arn:aws:rds:eu-central-1::db:some-other-db']), Statement(Effect='Allow', Action=[Action('rds', 'SomethingDifferent')], Resource=['arn:aws:rds:eu-central-1::db:a-third-db'])])
actual = generate_policy(records)
assert (actual == expected) |
_settings(ROOT_URLCONF=__name__)
class BasicAuthTests(TestCase):
def setUp(self):
self.csrf_client = APIClient(enforce_csrf_checks=True)
self.username = 'john'
self.email = ''
self.password = 'password'
self.user = User.objects.create_user(self.username, self.email, self.password)
def test_post_form_passing_basic_auth(self):
credentials = ('%s:%s' % (self.username, self.password))
base64_credentials = base64.b64encode(credentials.encode(HTTP_HEADER_ENCODING)).decode(HTTP_HEADER_ENCODING)
auth = ('Basic %s' % base64_credentials)
response = self.csrf_client.post('/basic/', {'example': 'example'}, HTTP_AUTHORIZATION=auth)
assert (response.status_code == status.HTTP_200_OK)
def test_post_json_passing_basic_auth(self):
credentials = ('%s:%s' % (self.username, self.password))
base64_credentials = base64.b64encode(credentials.encode(HTTP_HEADER_ENCODING)).decode(HTTP_HEADER_ENCODING)
auth = ('Basic %s' % base64_credentials)
response = self.csrf_client.post('/basic/', {'example': 'example'}, format='json', HTTP_AUTHORIZATION=auth)
assert (response.status_code == status.HTTP_200_OK)
def test_post_json_without_password_failing_basic_auth(self):
self.user.set_password('')
credentials = ('%s' % self.username)
base64_credentials = base64.b64encode(credentials.encode(HTTP_HEADER_ENCODING)).decode(HTTP_HEADER_ENCODING)
auth = ('Basic %s' % base64_credentials)
response = self.csrf_client.post('/basic/', {'example': 'example'}, format='json', HTTP_AUTHORIZATION=auth)
assert (response.status_code == status.HTTP_401_UNAUTHORIZED)
def test_regression_handle_bad_base64_basic_auth_header(self):
auth = 'Basic =a='
response = self.csrf_client.post('/basic/', {'example': 'example'}, format='json', HTTP_AUTHORIZATION=auth)
assert (response.status_code == status.HTTP_401_UNAUTHORIZED)
def test_post_form_failing_basic_auth(self):
response = self.csrf_client.post('/basic/', {'example': 'example'})
assert (response.status_code == status.HTTP_401_UNAUTHORIZED)
def test_post_json_failing_basic_auth(self):
response = self.csrf_client.post('/basic/', {'example': 'example'}, format='json')
assert (response.status_code == status.HTTP_401_UNAUTHORIZED)
assert (response['WWW-Authenticate'] == 'Basic realm="api"')
def test_fail_post_if_credentials_are_missing(self):
response = self.csrf_client.post('/basic/', {'example': 'example'}, HTTP_AUTHORIZATION='Basic ')
assert (response.status_code == status.HTTP_401_UNAUTHORIZED)
def test_fail_post_if_credentials_contain_spaces(self):
response = self.csrf_client.post('/basic/', {'example': 'example'}, HTTP_AUTHORIZATION='Basic foo bar')
assert (response.status_code == status.HTTP_401_UNAUTHORIZED)
def test_decoding_of_utf8_credentials(self):
username = 'walterwhite'
email = ''
password = 'password'
User.objects.create_user(username, email, password)
credentials = ('%s:%s' % (username, password))
base64_credentials = base64.b64encode(credentials.encode('utf-8')).decode(HTTP_HEADER_ENCODING)
auth = ('Basic %s' % base64_credentials)
response = self.csrf_client.post('/basic/', {'example': 'example'}, HTTP_AUTHORIZATION=auth)
assert (response.status_code == status.HTTP_200_OK) |
def pytest_configure(config):
try:
from django.conf import settings
except ImportError:
settings = None
if ((settings is not None) and (not settings.configured)):
import django
settings_dict = dict(SECRET_KEY='42', DATABASES={'default': {'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'elasticapm_tests.db', 'TEST_NAME': 'elasticapm_tests.db', 'TEST': {'NAME': 'elasticapm_tests.db'}}}, TEST_DATABASE_NAME='elasticapm_tests.db', INSTALLED_APPS=['django.contrib.auth', 'django.contrib.admin', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.redirects', 'django.contrib.contenttypes', 'elasticapm.contrib.django', 'tests.contrib.django.testapp'], ROOT_URLCONF='tests.contrib.django.testapp.urls', DEBUG=False, SITE_ID=1, BROKER_HOST='localhost', BROKER_PORT=5672, BROKER_USER='guest', BROKER_PASSWORD='guest', BROKER_VHOST='/', CELERY_ALWAYS_EAGER=True, TEMPLATE_DEBUG=False, TEMPLATE_DIRS=[BASE_TEMPLATE_DIR], ALLOWED_HOSTS=['*'], TEMPLATES=[{'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [BASE_TEMPLATE_DIR], 'OPTIONS': {'context_processors': ['django.contrib.auth.context_processors.auth'], 'loaders': ['django.template.loaders.filesystem.Loader'], 'debug': False}}], ELASTIC_APM={'METRICS_INTERVAL': '0ms', 'TRANSPORT_CLASS': 'tests.fixtures.DummyTransport', 'SERVICE_NAME': 'testapp', 'CENTRAL_CONFIG': False, 'CLOUD_PROVIDER': False})
settings_dict.update(**middleware_setting(django.VERSION, ['django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware']))
settings.configure(**settings_dict)
if hasattr(django, 'setup'):
django.setup() |
class Solution():
def isLongPressedName(self, name: str, typed: str) -> bool:
if (len(typed) < len(name)):
return False
(i, j) = (0, 0)
while ((i < len(name)) and (j < len(typed))):
(n, c) = (name[i], typed[j])
if (n != c):
if ((i > 0) and (c == name[(i - 1)])):
j += 1
continue
else:
return False
else:
i += 1
j += 1
if (i != len(name)):
return False
while (j < len(typed)):
c = typed[j]
if (name[(- 1)] != c):
return False
j += 1
return True |
def _move_view_op_before_concat(sorted_graph: List[Tensor]) -> Tuple[(bool, List[Tensor])]:
changed = False
for tensor in sorted_graph:
src_ops = tensor._attrs['src_ops']
if (len(src_ops) == 0):
continue
first_cat = list(src_ops)[0]
if (not _is_valid_cat_op(first_cat)):
continue
first_cat_outputs = first_cat._attrs['outputs']
if (len(first_cat_outputs) != 1):
continue
first_cat_output = first_cat_outputs[0]
if first_cat_output._attrs['is_output']:
continue
next_ops = first_cat_output._attrs['dst_ops']
if (len(next_ops) == 0):
continue
concat_ops = [op for op in next_ops if (op._attrs['op'] == 'concatenate')]
if (len(concat_ops) > 0):
continue
view_ops = [op for op in next_ops if (op._attrs['op'] in _SUPPORTED_VIEW_OPS)]
if (len(view_ops) == 0):
continue
a_view_op = view_ops[0]
view_output_shape = a_view_op._attrs['outputs'][0].shape()
if ((len(view_ops) > 1) and (not all((shape_utils.is_same_shape(vop._attrs['outputs'][0].shape(), view_output_shape) for vop in view_ops)))):
continue
if any((vop._attrs['outputs'][0]._attrs['is_output'] for vop in view_ops)):
continue
(view_op, second_cat) = _get_valid_view_op_and_second_cat(view_ops)
if (second_cat is None):
continue
if _try_move_view_op(first_cat, second_cat, view_op):
changed = True
return (changed, sorted_graph) |
class RoBERTaEncoder(TransformerEncoder[RoBERTaConfig], FromHFHub[RoBERTaConfig]):
def __init__(self, config: RoBERTaConfig, *, device: Optional[torch.device]=None):
super().__init__(config)
self.embeddings = RoBERTaEmbeddings(dropouts=EmbeddingDropouts(embed_output_dropout=Dropout(config.embedding.dropout_prob)), embedding_width=config.embedding.embedding_width, hidden_width=config.layer.feedforward.hidden_width, layer_norms=EmbeddingLayerNorms(embed_output_layer_norm=LayerNorm(config.embedding.embedding_width, config.embedding.layer_norm_eps)), n_pieces=config.embedding.n_pieces, n_positions=config.embedding.n_positions, n_types=config.embedding.n_types, padding_id=config.padding_id)
self.max_seq_len = config.model_max_length
hidden_width = config.layer.feedforward.hidden_width
layer_norm = partial(LayerNorm, hidden_width, config.layer.layer_norm_eps, device=device)
self.layers = torch.nn.ModuleList([EncoderLayer(attention_layer=SelfAttention(attention_heads=AttentionHeads.uniform(config.layer.attention.n_query_heads), attention_scorer=ScaledDotProductAttention(dropout_prob=config.layer.attention.dropout_prob, linear_biases=None), hidden_width=hidden_width, qkv_mode=QkvMode.SEPARATE, rotary_embeds=None, use_bias=config.layer.attention.use_bias, device=device), feed_forward_layer=PointwiseFeedForward(activation=config.layer.feedforward.activation.module(), hidden_width=hidden_width, intermediate_width=config.layer.feedforward.intermediate_width, use_bias=config.layer.feedforward.use_bias, use_gate=config.layer.feedforward.use_gate, device=device), dropouts=TransformerDropouts.layer_output_dropouts(config.layer.dropout_prob), layer_norms=TransformerLayerNorms(attn_residual_layer_norm=layer_norm(), ffn_residual_layer_norm=layer_norm()), use_parallel_attention=config.layer.attention.use_parallel_attention) for _ in range(config.layer.n_hidden_layers)])
def is_supported(cls: Type[Self], config: Dict[(str, Any)]) -> bool:
return (config.get('model_type') == 'roberta')
def state_dict_from_hf(cls: Type[Self], params: Mapping[(str, Tensor)]) -> Mapping[(str, Tensor)]:
return state_dict_from_hf(params, HF_PARAM_KEY_TRANSFORMS)
def state_dict_to_hf(cls: Type[Self], params: Mapping[(str, Tensor)]) -> Mapping[(str, Tensor)]:
return state_dict_to_hf(params, HF_PARAM_KEY_TRANSFORMS)
def config_from_hf(cls, hf_config: Mapping[(str, Any)]) -> RoBERTaConfig:
return _config_from_hf(hf_config)
def config_to_hf(cls, curated_config: RoBERTaConfig) -> Mapping[(str, Any)]:
return _config_to_hf(curated_config)
def from_hf_config(cls: Type[Self], *, hf_config: Any, device: Optional[torch.device]=None) -> Self:
config = cls.config_from_hf(hf_config)
return cls(config, device=device) |
def test_splitQuadratic():
assert (splitQuadratic((0, 0), (50, 100), (100, 0), where=150, isHorizontal=False) == [((0, 0), (50, 100), (100, 0))])
assert (splitQuadratic((0, 0), (50, 100), (100, 0), where=50, isHorizontal=False) == [((0, 0), (25, 50), (50, 50)), ((50, 50), (75, 50), (100, 0))])
assert (splitQuadratic((0, 0), (50, 100), (100, 0), where=25, isHorizontal=False) == [((0, 0), (12.5, 25), (25, 37.5)), ((25, 37.5), (62.5, 75), (100, 0))])
assert_curves_approx_equal(splitQuadratic((0, 0), (50, 100), (100, 0), where=25, isHorizontal=True), [((0, 0), (7.32233, 14.64466), (14.64466, 25)), ((14.64466, 25), (50, 75), (85.3553, 25)), ((85.3553, 25), (92.6777, 14.64466), (100, (- 7.10543e-15)))])
assert (splitQuadratic((0, 0), (50, 100), (100, 0), where=50, isHorizontal=True) == [((0, 0), (25, 50), (50, 50)), ((50, 50), (50, 50), (50, 50)), ((50, 50), (75, 50), (100, 0))]) |
def _mock_admin_directory():
def _mock_ad_get_users(gsuite_id):
return results.AD_GET_USERS[gsuite_id]
def _mock_ad_get_groups(gsuite_id):
return results.AD_GET_GROUPS[gsuite_id]
def _mock_ad_get_group_members(group_key):
return results.AD_GET_GROUP_MEMBERS[group_key]
ad_patcher = mock.patch((MODULE_PATH + 'admin_directory.AdminDirectoryClient'), spec=True)
mock_ad = ad_patcher.start().return_value
mock_ad.get_users.side_effect = _mock_ad_get_users
mock_ad.get_groups.side_effect = _mock_ad_get_groups
mock_ad.get_group_members.side_effect = _mock_ad_get_group_members
return (ad_patcher, mock_ad) |
class ExtraCardSlotHandler(THBEventHandler):
interested = ['action_before']
def handle(self, evt_type, act):
if ((evt_type == 'action_before') and isinstance(act, DropCardStage)):
tgt = act.target
if (not tgt.has_skill(ExtraCardSlot)):
return act
g = self.game
n = sum(((i == THBRoleRole.ACCOMPLICE) for i in g.roles.values()))
n -= sum(((ch.dead and (g.roles[ch.player] == THBRoleRole.ACCOMPLICE)) for ch in g.players))
n = sync_primitive(n, g.players.player)
act.dropn = max((act.dropn - n), 0)
return act |
()
def extract_images_from_html(content):
frappe.flags.has_dataurl = False
file_ids = {'name': []}
def _save_file(match):
data = match.group(1)
data = data.split('data:')[1]
(headers, content) = data.split(',')
if ('filename=' in headers):
filename = headers.split('filename=')[(- 1)]
if (not isinstance(filename, str)):
filename = str(filename, 'utf-8')
else:
mtype = headers.split(';')[0]
filename = get_random_filename(content_type=mtype)
_file = frappe.get_doc({'doctype': 'File', 'file_name': filename, 'content': content, 'decode': True})
_file.save(ignore_permissions=True)
file_url = _file.file_url
file_ids['name'] += [_file.name]
if (not frappe.flags.has_dataurl):
frappe.flags.has_dataurl = True
return f'<img src="{file_url}"'
if (content and isinstance(content, str)):
content = re.sub('<img[^>]*src\\s*=\\s*["\\\'](?=data:)(.*?)["\\\']', _save_file, content)
return (content, file_ids['name']) |
def compute_state_transition(parent_state, parent_block, block, verify_sig=True):
(crystallized_state, active_state) = parent_state
if ((active_state.height % SHARD_COUNT) == 0):
print('Processing epoch transition')
new_validator_records = deepcopy(crystallized_state.active_validators)
ffg_voter_bitmask = bytearray(active_state.ffg_voter_bitmask)
(deltas1, total_vote_count, total_vote_deposits, justify, finalize) = process_ffg_deposits(crystallized_state, ffg_voter_bitmask)
(deltas2, new_crosslink_records) = process_crosslinks(crystallized_state, active_state.partial_crosslinks)
deltas3 = process_balance_deltas(crystallized_state, active_state.balance_deltas)
for (i, v) in enumerate(new_validator_records):
v.balance += ((deltas1[i] + deltas2[i]) + deltas3[i])
total_deposits = (crystallized_state.total_deposits + sum(((deltas1 + deltas2) + deltas3)))
print(('New total deposits: %d' % total_deposits))
if finalize:
(new_queued_validators, new_active_validators, new_exited_validators) = get_incremented_validator_sets(crystallized_state, new_validator_records)
else:
(new_queued_validators, new_active_validators, new_exited_validators) = (crystallized_state.queued_validators, crystallized_state.active_validators, crystallized_state.exited_validators)
crystallized_state = CrystallizedState(queued_validators=new_queued_validators, active_validators=new_active_validators, exited_validators=new_exited_validators, current_shuffling=get_shuffling(active_state.randao, len(new_active_validators)), last_justified_epoch=(crystallized_state.current_epoch if justify else crystallized_state.last_justified_epoch), last_finalized_epoch=((crystallized_state.current_epoch - 1) if finalize else crystallized_state.last_finalized_epoch), dynasty=(crystallized_state.dynasty + (1 if finalize else 0)), next_shard=0, current_epoch=(crystallized_state.current_epoch + 1), crosslink_records=new_crosslink_records, total_deposits=total_deposits)
active_state = ActiveState(height=active_state.height, randao=active_state.randao, ffg_voter_bitmask=bytearray(((len(crystallized_state.active_validators) + 7) // 8)), balance_deltas=[], partial_crosslinks=[], total_skip_count=active_state.total_skip_count)
(attestation_indices, main_signer) = get_attesters_and_signer(crystallized_state, active_state, block.skip_count)
balance_deltas = process_attestations(crystallized_state.active_validators, attestation_indices, block.attestation_bitmask, serialize(parent_block), block.attestation_aggregate_sig)
balance_deltas.append(((main_signer << 24) + len(balance_deltas)))
if verify_sig:
assert block.verify(crystallized_state.active_validators[main_signer].pubkey)
print('Verified main sig')
(new_crosslink_records, new_ffg_bitmask, voters) = update_ffg_and_crosslink_progress(crystallized_state, active_state.partial_crosslinks, active_state.ffg_voter_bitmask, block.shard_aggregate_votes)
balance_deltas.append(((main_signer << 24) + voters))
o = ActiveState(height=(active_state.height + 1), randao=(int.from_bytes(active_state.randao, 'big') ^ int.from_bytes(block.randao_reveal, 'big')).to_bytes(32, 'big'), total_skip_count=(active_state.total_skip_count + block.skip_count), partial_crosslinks=new_crosslink_records, ffg_voter_bitmask=new_ffg_bitmask, balance_deltas=(active_state.balance_deltas + balance_deltas))
return (crystallized_state, o) |
def _sanitize_value(analysis_data: dict, key: str, value):
if isinstance(value, dict):
sanitize(value)
elif isinstance(value, str):
analysis_data[key] = _sanitize_string(value)
elif isinstance(value, list):
_sanitize_list(value)
elif isinstance(value, bytes):
logging.warning(f'''Plugin result contains bytes entry. Plugin results should only contain JSON compatible data structures!:
{value!r}''')
analysis_data[key] = value.decode(errors='replace') |
('cuda.bmm_rrr_permute.config')
def bmm_rrr_permute_config(func_attrs, dtype='float16'):
def fproc(op):
import cutlass_lib
return common.default_fproc(op=op, a_layout=cutlass_lib.library.LayoutType.RowMajor, b_layout=cutlass_lib.library.LayoutType.RowMajor, c_layout=cutlass_lib.library.LayoutType.RowMajor, dtype=func_attrs['inputs'][0].dtype(), epilogue_name=func_attrs['epilogue'], permute_layout=func_attrs['layout'])
func_attrs['op_instance'] = common_permute.extract_config(fproc, func_attrs) |
class Migration(migrations.Migration):
dependencies = []
operations = [migrations.CreateModel(name='Recommendation', fields=[('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('object_ctype', models.PositiveIntegerField()), ('object_id', models.PositiveIntegerField()), ('object_site', models.PositiveIntegerField()), ('user', models.PositiveIntegerField()), ('score', models.FloatField(default=None, null=True, blank=True))], options={'ordering': ['-score']}, bases=(models.Model,)), migrations.CreateModel(name='Similarity', fields=[('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('object_ctype', models.PositiveIntegerField()), ('object_id', models.PositiveIntegerField()), ('object_site', models.PositiveIntegerField()), ('score', models.FloatField(default=None, null=True, blank=True)), ('related_object_ctype', models.PositiveIntegerField()), ('related_object_id', models.PositiveIntegerField()), ('related_object_site', models.PositiveIntegerField())], options={'ordering': ['-score'], 'verbose_name_plural': 'similarities'}, bases=(models.Model,)), migrations.AlterUniqueTogether(name='similarity', unique_together=set([('object_ctype', 'object_id', 'object_site', 'related_object_ctype', 'related_object_id', 'related_object_site')])), migrations.AlterUniqueTogether(name='recommendation', unique_together=set([('object_ctype', 'object_id', 'user')]))] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.