code stringlengths 281 23.7M |
|---|
def _handle_optimizer_in_client(client):
if ('optim_config' not in client):
return
client['optimizer'] = client['optim_config']
del client['optim_config']
optimizer = client['optimizer']
if ('type' not in optimizer):
pass
elif ('sgd' == optimizer['type'].lower()):
optimizer['_base_'] = 'base_optimizer_sgd'
elif ('fedprox' == optimizer['type'].lower()):
optimizer['_base_'] = 'base_optimizer_fedprox'
optimizer.pop('type', None) |
class PanelsBar(Html.Html):
name = 'Panel Bar'
def __init__(self, page: primitives.PageModel, width: tuple, height: tuple, options: Optional[dict], helper: str, profile: Optional[Union[(dict, bool)]]):
super(PanelsBar, self).__init__(page, None, profile=profile, css_attrs={'width': width, 'height': height})
self.add_helper(helper)
self.menus = page.ui.div(options={'inline': True})
self.menus.options.managed = False
self.panels = page.ui.div()
self.panels.options.managed = False
self.menu_mapping = collections.OrderedDict()
self.panels.style.css.display = False
self.panels.style.css.position = 'absolute'
self.style.css.position = 'relative'
self.__options = options
if (options.get('position', 'top') == 'top'):
self.panels.style.css.padding_bottom = 10
self.menus.style.css.top = 0
self.panels.style.css.border_bottom = ('1px solid %s' % page.theme.colors[(- 1)])
else:
self.style.css.position = 'relative'
self.panels.style.css.border_top = ('1px solid %s' % page.theme.colors[(- 1)])
self.panels.style.css.bottom = 0
self.menus.style.css.bottom = 0
self.menus.style.css.position = 'absolute'
self.menus.style.css.background = page.theme.colors[(- 1)]
self.menus.style.css.color = page.theme.colors[0]
self.menus.style.css.padding = '5px 0'
def add_panel(self, text: str, content: Html.Html):
content.style.css.padding = '0 5px'
if (not hasattr(text, 'options')):
text = self.page.ui.div(text)
text.style.css.display = 'inline-block'
text.style.css.width = 'auto'
text.style.css.cursor = 'pointer'
text.style.css.padding = '0 5px'
self.menu_mapping[text] = content
self.menus += text
self.panels += content
return self
def __str__(self):
css_menu = ({'height': 'auto', 'display': 'block', 'margin-top': '30px'} if (self.__options['position'] == 'top') else {'height': 'auto', 'display': 'block', 'margin-bottom': '30px'})
for (menu_item, panel) in self.menu_mapping.items():
menu_item.click([self.page.js.querySelectorAll(Selector.Selector(self.panels).with_child_element('div').excluding(panel)).css({'display': 'none'}), expr.if_((self.page.js.querySelector(Selector.Selector(self.panels)).getAttribute('data-panel') == menu_item.htmlCode), [self.page.js.querySelector(Selector.Selector(self.panels)).setAttribute('data-panel', ''), self.page.js.querySelector(Selector.Selector(self.panels)).css({'display': 'none'})]).else_([self.page.js.querySelector(Selector.Selector(self.panels)).setAttribute('data-panel', menu_item.htmlCode), self.page.js.querySelector(Selector.Selector(self.panels)).css(css_menu), self.page.js.querySelector(Selector.Selector(panel)).css({'display': 'block'})])])
return ('<div %s>%s%s</div>%s' % (self.get_attrs(css_class_names=self.style.get_classes()), self.menus.html(), self.panels.html(), self.helper)) |
class ObjectSpec(EntitySpec):
def __init__(self, params):
super().__init__(params)
def _lookup(self, depth, unlocked=False):
name = self._params['config']['name']
return SpecView(self, depth=[depth], name=name, unlocked=unlocked)
def gui(self, engine_cls: Type['Engine'], interactive: Optional[bool]=True, resolution: Optional[List[int]]=None, filename: Optional[str]=None) -> Union[(None, np.ndarray)]:
import eagerx.core.register as register
spec_copy = ObjectSpec(self.params)
spec_copy._params['engine'] = {}
engine = engine_cls.get_specification()
graph = register.add_engine(spec_copy, engine)
return graph.gui(interactive=interactive, resolution=resolution, filename=filename)
def engine(self) -> Union[SpecView]:
return SpecView(self, depth=['engine'], name=self._params['config']['name'])
def sensors(self) -> SpecView:
return self._lookup('sensors')
def actuators(self) -> SpecView:
return self._lookup('actuators')
def states(self) -> SpecView:
return self._lookup('states')
def config(self) -> SpecView:
return self._lookup('config', unlocked=True)
def initialize(self, spec_cls):
import eagerx.core.register as register
agnostic = register.LOOKUP_TYPES[spec_cls.make]
for (component, cnames) in agnostic.items():
for (cname, space) in cnames.items():
if (component == 'sensors'):
mapping = dict(rate=1, processor=None, space=space)
elif (component == 'actuators'):
mapping = dict(rate=1, delay=0.0, window=1, skip=False, processor=None, space=space)
else:
component = 'states'
mapping = dict(processor=None, space=space)
with getattr(self, component) as d:
d[cname] = mapping
if (cname not in getattr(self.config, component)):
getattr(self.config, component).append(cname)
def _initialize_object_graph(self):
mapping = dict()
for component in ['sensors', 'actuators']:
try:
mapping[component] = getattr(self, component)
except AttributeError:
continue
from eagerx.core.graph_engine import EngineGraph
graph = EngineGraph.create(**mapping)
return graph |
('cuda.bmm_rcr_n1.gen_function')
def gen_function(func_attrs, exec_cond_template, dim_info_dict):
func_name = func_attrs['name']
shape_func = gemm_common.gen_shape_eval_code(indent=1, dtype='int64_t', dim_info_dict=dim_info_dict, is_ptr=True)
def _get_original_dim_val(func_attrs, input_idx, dim):
accessor = func_attrs['input_accessors'][input_idx]
shape = accessor.original_shapes
assert isinstance(shape[dim], IntImm), f"input {input_idx}'s dim {dim} must be static. Instead it's dynamic"
k = shape[dim]._attrs['values'][0]
return k
ak = _get_original_dim_val(func_attrs, 0, 2)
bk = _get_original_dim_val(func_attrs, 1, 2)
assert (ak == bk), f'ak is not equal to bk. ak: {ak}, bk: {bk}'
backend_spec = CUDASpec()
dtype = func_attrs['inputs'][0].dtype()
elem_input_type = backend_spec.dtype_to_backend_type(dtype)
vec_lens = [8, 4, 2]
backend_types = ['uint4', 'uint2', 'uint']
alignment = tensor_accessor_codegen.find_max_alignment(ak, dtype, func_attrs['input_accessors'])
if (alignment % 2):
bmm_rcr_n1_kernel_fp32 = 'bmm_rcr_n1_kernel_fp32_acc'
bmm_rcr_n1_kernel_fp16 = 'bmm_rcr_n1_kernel_fp16_acc'
read_vec_type = elem_input_type
else:
for (vec_idx, vec_len) in enumerate(vec_lens):
if ((ak % vec_len) == 0):
bmm_rcr_n1_kernel_fp32 = 'bmm_rcr_n1_kernel_fp32_acc_vec'
bmm_rcr_n1_kernel_fp16 = 'bmm_rcr_n1_kernel_fp16_acc_vec'
read_vec_type = backend_types[vec_idx]
break
input_output_checks = common.INPUT_OUTPUT_CHECKS_TEMPLATE.render(input_ndims=3, weight_ndims=3, output_ndims=3)
if (ak == 0):
exec_paths = ''
else:
exec_paths = EXEC_TEMPLATE.render(indent=' ', read_vec_type=read_vec_type, elem_input_type=elem_input_type, K=ak)
input_a_accessor = tensor_accessor_codegen.TENSOR_ACCESSOR_TEMPLATE.render(name='input_a_accessor', tensor_accessor=func_attrs['input_accessors'][0])
input_b_accessor = tensor_accessor_codegen.TENSOR_ACCESSOR_TEMPLATE.render(name='input_b_accessor', tensor_accessor=func_attrs['input_accessors'][1])
return SRC_TEMPLATE.render(function_name=func_name, elem_input_type=elem_input_type, bmm_rcr_n1_kernel_fp32=bmm_rcr_n1_kernel_fp32, bmm_rcr_n1_kernel_fp16=bmm_rcr_n1_kernel_fp16, shape_function=shape_func, input_output_checks=input_output_checks, exec_paths=exec_paths, tensor_accessor_libs=tensor_accessor_codegen.get_libs(), input_accessors=(input_a_accessor + input_b_accessor), output_accessors=tensor_accessor_codegen.TENSOR_ACCESSOR_TEMPLATE.render(name='output_accessor', tensor_accessor=func_attrs['output_accessors'][0])) |
class OptionPlotoptionsSeriesStatesHoverHalo(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def opacity(self):
return self._config_get(0.25)
def opacity(self, num: float):
self._config(num, js_type=False)
def size(self):
return self._config_get(10)
def size(self, num: float):
self._config(num, js_type=False) |
(CISAudit, '_shellexec', mock_homedirs_data)
def test_audit_homedirs_ownership_pass(fs):
fake_filesystem.set_uid(0)
fake_filesystem.set_gid(0)
fs.create_dir('/root')
fake_filesystem.set_uid(1000)
fake_filesystem.set_gid(1000)
fs.create_dir('/home/pytest')
state = test.audit_homedirs_ownership()
assert (state == 0) |
def test_async_partial_object_call():
class Async():
async def __call__(self, a, b):
...
class Sync():
def __call__(self, a, b):
...
partial = functools.partial(Async(), 1)
assert is_async_callable(partial)
partial = functools.partial(Sync(), 1)
assert (not is_async_callable(partial)) |
def get_ait_params(hidden_size, vocab_size, max_position_embeddings, type_vocab_size, dtype='float16'):
word_embeddings = Tensor(shape=[vocab_size, hidden_size], dtype=dtype, name='word_embeddings', is_input=True)
token_type_embeddings = Tensor(shape=[type_vocab_size, hidden_size], dtype=dtype, name='token_type_embeddings', is_input=True)
position_embeddings = Tensor(shape=[max_position_embeddings, hidden_size], dtype=dtype, name='position_embeddings', is_input=True)
gamma = Tensor(shape=[hidden_size], dtype=dtype, name='gamma', is_input=True)
beta = Tensor(shape=[hidden_size], dtype=dtype, name='beta', is_input=True)
return (word_embeddings, token_type_embeddings, position_embeddings, gamma, beta) |
class ImportCluster(AbstractCommand):
def setup(self, subparsers):
parser = subparsers.add_parser('import', help='Import a cluster from a zip file', description=self.__doc__)
parser.set_defaults(func=self)
parser.add_argument('--rename', metavar='NAME', help='Rename the cluster during import.')
parser.add_argument('file', help='Path to ZIP file produced by `elasticluster export`.')
def execute(self):
creator = make_creator(self.params.config, storage_path=self.params.storage)
repo = creator.create_repository()
tmpdir = tempfile.mkdtemp()
log.debug('Using temporary directory %s', tmpdir)
tmpconf = make_creator(self.params.config, storage_path=tmpdir)
tmprepo = tmpconf.create_repository()
rc = 0
try:
with ZipFile(self.params.file, 'r') as zipfile:
log.debug('ZIP file %s opened', self.params.file)
cluster = None
zipfile.extractall(tmpdir)
newclusters = tmprepo.get_all()
cluster = newclusters[0]
cur_clusternames = [c.name for c in repo.get_all()]
oldname = cluster.name
newname = self.params.rename
if self.params.rename:
cluster.name = self.params.rename
for node in cluster.get_all_nodes():
node.cluster_name = cluster.name
if (cluster.name in cur_clusternames):
raise Exception(('A cluster with name %s already exists. Use option --rename to rename the cluster to be imported.' % cluster.name))
cluster.repository = repo
repo.save_or_update(cluster)
dest = cluster.repository.storage_path
srcfile = os.path.join(tmpdir, (oldname + '.known_hosts'))
destfile = os.path.join(dest, (cluster.name + '.known_hosts'))
shutil.copy(srcfile, destfile)
for attr in ('user_key_public', 'user_key_private'):
keyfile = getattr(cluster, attr)
keybase = os.path.basename(keyfile)
srcfile = os.path.join(tmpdir, keybase)
if os.path.isfile(srcfile):
log.info('Importing key file %s', keybase)
destfile = os.path.join(dest, keybase)
shutil.copy(srcfile, destfile)
setattr(cluster, attr, destfile)
for node in cluster.get_all_nodes():
nodekeyfile = getattr(node, attr)
if ((nodekeyfile != keyfile) and os.path.isfile(nodekeyfile)):
destdir = os.path.join(dest, cluster.name, node.kind, node.name)
nodekeybase = os.path.basename(nodekeyfile)
log.info('Importing key file %s for node %s', nodekeybase, node.name)
if (not os.path.isdir(destdir)):
os.makedirs(destdir)
srcfile = os.path.join(tmpdir, oldname, node.kind, node.name, nodekeybase)
destfile = os.path.join(destdir, nodekeybase)
shutil.copy(srcfile, destfile)
setattr(node, attr, destfile)
repo.save_or_update(cluster)
if (not cluster):
log.error('ZIP file %s does not contain a valid cluster.', self.params.file)
rc = 2
except Exception as ex:
log.error('Unable to import from zipfile `%s`: %s', self.params.file, ex)
rc = 1
finally:
if os.path.isdir(tmpdir):
shutil.rmtree(tmpdir)
log.info('Cleaning up directory `%s`', tmpdir)
if (rc == 0):
print(('Successfully imported cluster from ZIP %s to %s' % (self.params.file, repo.storage_path)))
sys.exit(rc) |
def extractSlothfulworksWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return False
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
('llama_recipes.finetuning.train')
('llama_recipes.finetuning.LlamaForCausalLM.from_pretrained')
('llama_recipes.finetuning.LlamaTokenizer.from_pretrained')
('llama_recipes.finetuning.get_preprocessed_dataset')
('llama_recipes.finetuning.generate_peft_config')
('llama_recipes.finetuning.get_peft_model')
('llama_recipes.finetuning.optim.AdamW')
('llama_recipes.finetuning.StepLR')
def test_finetuning_peft(step_lr, optimizer, get_peft_model, gen_peft_config, get_dataset, tokenizer, get_model, train):
kwargs = {'use_peft': True}
get_dataset.return_value = get_fake_dataset()
main(**kwargs)
assert (get_peft_model.return_value.to.call_args.args[0] == 'cuda')
assert (get_peft_model.return_value.print_trainable_parameters.call_count == 1) |
def _get_post_processing_handlers(config: Dict[(str, Any)], trace_logging_svc: TraceLoggingService) -> Dict[(str, PostProcessingHandler)]:
if (not config):
return {}
post_processing_handlers = {}
for (name, handler_config) in config['dependency'].items():
constructor_info = handler_config.get('constructor', {})
constructor_info['trace_logging_svc'] = trace_logging_svc
handler_config['constructor'] = constructor_info
post_processing_handlers[name] = reflect.get_instance(handler_config, PostProcessingHandler)
return post_processing_handlers |
class OptionPlotoptionsArearangeSonificationDefaultinstrumentoptionsMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Query(DslBase):
_type_name = 'query'
_type_shortcut = staticmethod(Q)
name = None
def __add__(self, other):
if hasattr(other, '__radd__'):
return other.__radd__(self)
return Bool(must=[self, other])
def __invert__(self):
return Bool(must_not=[self])
def __or__(self, other):
if hasattr(other, '__ror__'):
return other.__ror__(self)
return Bool(should=[self, other])
def __and__(self, other):
if hasattr(other, '__rand__'):
return other.__rand__(self)
return Bool(must=[self, other]) |
def _assemble_source_procurement_records(id_list):
for trx_id in id_list:
dummy_row = _assemble_dummy_source_data()
dummy_row['detached_award_procurement_id'] = trx_id
dummy_row['detached_award_proc_unique'] = str(trx_id)
dummy_row['ordering_period_end_date'] = '2010-01-01 00:00:00'
dummy_row['action_date'] = '2010-01-01 00:00:00'
dummy_row['initial_report_date'] = '2010-01-01 00:00:00'
dummy_row['solicitation_date'] = '2010-01-01 00:00:00'
dummy_row['created_at'] = datetime.datetime.fromtimestamp(0)
dummy_row['updated_at'] = datetime.datetime.fromtimestamp(0)
dummy_row['federal_action_obligation'] = 1000001
dummy_row['period_of_performance_star'] = '2010-01-01 00:00:00'
dummy_row['period_of_performance_curr'] = (datetime.datetime.strptime(dummy_row['period_of_performance_star'], '%Y-%m-%d %H:%M:%S') + datetime.timedelta(days=365))
dummy_row['last_modified'] = datetime.datetime.utcnow()
dummy_row['high_comp_officer1_amount'] = (1 * 1000000)
dummy_row['high_comp_officer2_amount'] = (2 * 1000000)
dummy_row['high_comp_officer3_amount'] = (3 * 1000000)
dummy_row['high_comp_officer4_amount'] = (4 * 1000000)
dummy_row['high_comp_officer5_amount'] = (5 * 1000000)
dummy_row['base_exercised_options_val'] = 10203
dummy_row['base_and_all_options_value'] = 30201
dummy_row['action_date'] = (datetime.datetime.strptime(dummy_row['action_date'], '%Y-%m-%d %H:%M:%S') + datetime.timedelta(days=dummy_row['detached_award_procurement_id']))
baker.make('transactions.SourceProcurementTransaction', **dummy_row) |
class Review(SimpleEntity, ScheduleMixin, StatusMixin):
__auto_name__ = True
__tablename__ = 'Reviews'
__table_args__ = {'extend_existing': True}
__mapper_args__ = {'polymorphic_identity': 'Review'}
review_id = Column('id', Integer, ForeignKey('SimpleEntities.id'), primary_key=True)
task_id = Column(Integer, ForeignKey('Tasks.id'), nullable=False, doc='The id of the related task.')
task = relationship('Task', primaryjoin='Reviews.c.task_id==Tasks.c.id', uselist=False, back_populates='reviews', doc='The :class:`.Task` instance that this Review is created for')
reviewer_id = Column(Integer, ForeignKey('Users.id'), nullable=False, doc='The User which does the review, also on of the responsible of the related Task')
reviewer = relationship('User', primaryjoin='Reviews.c.reviewer_id==Users.c.id')
_review_number = Column('review_number', Integer, default=1)
def __init__(self, task=None, reviewer=None, description='', **kwargs):
kwargs['description'] = description
SimpleEntity.__init__(self, **kwargs)
ScheduleMixin.__init__(self, **kwargs)
StatusMixin.__init__(self, **kwargs)
self.task = task
self.reviewer = reviewer
with DBSession.no_autoflush:
new = Status.query.filter_by(code='NEW').first()
self.status = new
self._review_number = (self.task.review_number + 1)
('task')
def _validate_task(self, key, task):
if (task is not None):
from stalker.models.task import Task
if (not isinstance(task, Task)):
raise TypeError(('%s.task should be an instance of stalker.models.task.Task, not %s' % (self.__class__.__name__, task.__class__.__name__)))
if (not task.is_leaf):
raise ValueError(('It is only possible to create a review for a leaf tasks, and %s is not a leaf task.' % task))
self._review_number = (task.review_number + 1)
return task
('reviewer')
def _validate_reviewer(self, key, reviewer):
from stalker.models.auth import User
if (not isinstance(reviewer, User)):
raise TypeError(('%s.reviewer should be set to a stalker.models.auth.User instance, not %s' % (self.__class__.__name__, reviewer.__class__.__name__)))
return reviewer
def _review_number_getter(self):
return self._review_number
review_number = synonym('_review_number', descriptor=property(_review_number_getter), doc='returns the _review_number attribute value')
def review_set(self):
logger.debug(('finding revisions with the same review_number of: %s' % self.review_number))
with DBSession.no_autoflush:
logger.debug('using raw Python to get review set')
reviews = []
rev_num = self.review_number
for review in self.task.reviews:
if (review.review_number == rev_num):
reviews.append(review)
return reviews
def is_finalized(self):
return all([(review.status.code != 'NEW') for review in self.review_set])
def request_revision(self, schedule_timing=1, schedule_unit='h', description=''):
self.schedule_timing = schedule_timing
self.schedule_unit = schedule_unit
self.description = description
with DBSession.no_autoflush:
rrev = Status.query.filter_by(code='RREV').first()
self.status = rrev
self.finalize_review_set()
def approve(self):
with DBSession.no_autoflush:
app = Status.query.filter_by(code='APP').first()
self.status = app
self.finalize_review_set()
def finalize_review_set(self):
with DBSession.no_autoflush:
hrev = Status.query.filter_by(code='HREV').first()
cmpl = Status.query.filter_by(code='CMPL').first()
if self.is_finalized():
logger.debug('all reviews are finalized')
revise_task = False
total_seconds = self.task.total_logged_seconds
for review in self.review_set:
if (review.status.code == 'RREV'):
total_seconds += review.schedule_seconds
revise_task = True
(timing, unit) = self.least_meaningful_time_unit(total_seconds)
self.task._review_number += 1
if revise_task:
if (total_seconds > self.task.schedule_seconds):
logger.debug(('total_seconds including reviews: %s' % total_seconds))
self.task.schedule_timing = timing
self.task.schedule_unit = unit
self.task.status = hrev
else:
self.task.status = cmpl
self.task.schedule_timing = timing
self.task.schedule_unit = unit
self.task.update_parent_statuses()
from stalker import TaskDependency
for dep in walk_hierarchy(self.task, 'dependent_of', method=1):
logger.debug(('current TaskDependency object: %s' % dep))
dep.update_status_with_dependent_statuses()
if (dep.status.code in ['HREV', 'PREV', 'DREV', 'OH', 'STOP']):
with DBSession.no_autoflush:
tdeps = TaskDependency.query.filter_by(depends_to=dep).all()
for tdep in tdeps:
tdep.dependency_target = 'onstart'
dep.update_parent_statuses()
else:
logger.debug('not all reviews are finalized yet!') |
class EMAHook(HookBase):
def __init__(self, cfg, model):
model = _remove_ddp(model)
assert cfg.MODEL_EMA.ENABLED
assert hasattr(model, 'ema_state'), 'Call `may_build_model_ema` first to initilaize the model ema'
self.model = model
self.ema = self.model.ema_state
self.device = (cfg.MODEL_EMA.DEVICE or cfg.MODEL.DEVICE)
self.is_after_backward = cfg.MODEL_EMA.AFTER_BACKWARD
self.ema_updater = EMAUpdater(self.model.ema_state, decay=cfg.MODEL_EMA.DECAY, device=self.device, use_lerp=cfg.MODEL_EMA.USE_LERP, decay_warm_up_factor=cfg.MODEL_EMA.DECAY_WARM_UP_FACTOR)
def before_train(self):
if self.ema.has_inited():
self.ema.to(self.device)
else:
self.ema_updater.init_state(self.model)
def after_train(self):
pass
def before_step(self):
pass
def after_backward(self):
if (not self.is_after_backward):
return
self._update()
def after_step(self):
if self.is_after_backward:
return
self._update()
def _update(self):
if (not self.model.train):
return
self.ema_updater.update(self.model) |
class OptionValidator(object):
def __init__(self, values):
self.values = values
def __call__(self, value):
if (value.lstrip('-') not in self.values):
raise exceptions.ApiError('Cannot sort on value "{0}". Instead choose one of: "{1}"'.format(value, '", "'.join(self.values)), status_code=422) |
class INETETW(ETW):
def __init__(self, ring_buf_size=1024, max_str_len=1024, min_buffers=0, max_buffers=0, level=et.TRACE_LEVEL_INFORMATION, any_keywords=None, all_keywords=None, filters=None, event_callback=None, logfile=None, no_conout=False):
self.logfile = logfile
self.no_conout = no_conout
if event_callback:
self.event_callback = event_callback
else:
self.event_callback = self.on_event
providers = [ProviderInfo('Microsoft-Windows-WinINet', GUID('{43D1A55C-76D6-4F7E-995C-64C711E5CAFE}'), level, any_keywords, all_keywords), ProviderInfo('Microsoft-Windows-WinINet-Capture', GUID('{A70FF94F-570B-4979-BA5C-E59C9FEAB61B}'), level, any_keywords, all_keywords)]
super().__init__(ring_buf_size=ring_buf_size, max_str_len=max_str_len, min_buffers=min_buffers, max_buffers=max_buffers, event_callback=self.event_callback, task_name_filters=filters, providers=providers)
def on_event(self, event_tufo):
common.on_event_callback(event_tufo, logfile=self.logfile, no_conout=self.no_conout) |
class CTCHead(nn.Layer):
def __init__(self, in_channels, out_channels, fc_decay=0.0004, mid_channels=None, **kwargs):
super(CTCHead, self).__init__()
if (mid_channels is None):
(weight_attr, bias_attr) = get_para_bias_attr(l2_decay=fc_decay, k=in_channels)
self.fc = nn.Linear(in_channels, out_channels, weight_attr=weight_attr, bias_attr=bias_attr)
else:
(weight_attr1, bias_attr1) = get_para_bias_attr(l2_decay=fc_decay, k=in_channels)
self.fc1 = nn.Linear(in_channels, mid_channels, weight_attr=weight_attr1, bias_attr=bias_attr1)
(weight_attr2, bias_attr2) = get_para_bias_attr(l2_decay=fc_decay, k=mid_channels)
self.fc2 = nn.Linear(mid_channels, out_channels, weight_attr=weight_attr2, bias_attr=bias_attr2)
self.out_channels = out_channels
self.mid_channels = mid_channels
def forward(self, x, targets=None):
if (self.mid_channels is None):
predicts = self.fc(x)
else:
predicts = self.fc1(x)
predicts = self.fc2(predicts)
if (not self.training):
predicts = F.softmax(predicts, axis=2)
return predicts |
.usefixtures('use_tmpdir')
def test_job_dispatch_run_subset_specified_as_parmeter():
with open('dummy_executable', 'w', encoding='utf-8') as f:
f.write('#!/usr/bin/env python\nimport sys, os\nfilename = "job_{}.out".format(sys.argv[1])\nf = open(filename, "w", encoding="utf-8")\nf.close()\n')
executable = os.path.realpath('dummy_executable')
os.chmod('dummy_executable', ((stat.S_IRWXU | stat.S_IRWXO) | stat.S_IRWXG))
job_list = {'global_environment': {}, 'global_update_path': {}, 'jobList': [{'name': 'job_A', 'executable': executable, 'target_file': None, 'error_file': None, 'start_file': None, 'stdout': 'dummy.stdout', 'stderr': 'dummy.stderr', 'stdin': None, 'argList': ['A'], 'environment': None, 'exec_env': None, 'license_path': None, 'max_running_minutes': None, 'min_arg': 1, 'arg_types': [], 'max_arg': None}, {'name': 'job_B', 'executable': executable, 'target_file': None, 'error_file': None, 'start_file': None, 'stdout': 'dummy.stdout', 'stderr': 'dummy.stderr', 'stdin': None, 'argList': ['B'], 'environment': None, 'exec_env': None, 'license_path': None, 'max_running_minutes': None, 'min_arg': 1, 'arg_types': [], 'max_arg': None}, {'name': 'job_C', 'executable': executable, 'target_file': None, 'error_file': None, 'start_file': None, 'stdout': 'dummy.stdout', 'stderr': 'dummy.stderr', 'stdin': None, 'argList': ['C'], 'environment': None, 'exec_env': None, 'license_path': None, 'max_running_minutes': None, 'min_arg': 1, 'arg_types': [], 'max_arg': None}], 'run_id': '', 'ert_pid': ''}
with open('jobs.json', 'w', encoding='utf-8') as f:
f.write(json.dumps(job_list))
with open('setsid', 'w', encoding='utf-8') as f:
f.write(dedent(' #!/usr/bin/env python\n import os\n import sys\n os.setsid()\n os.execvp(sys.argv[1], sys.argv[1:])\n '))
os.chmod('setsid', 493)
job_dispatch_script = importlib.util.find_spec('_ert_job_runner.job_dispatch').origin
job_dispatch_process = Popen([(os.getcwd() + '/setsid'), sys.executable, job_dispatch_script, os.getcwd(), 'job_B', 'job_C'])
job_dispatch_process.wait()
assert (not os.path.isfile('job_A.out'))
assert os.path.isfile('job_B.out')
assert os.path.isfile('job_C.out') |
def readFunLite():
from optparse import OptionParser
usage = 'usage: %readFunLite [options] meshFile funFile funOutFile'
parser = OptionParser(usage=usage)
parser.add_option('-m', '--matlab', help='print edges to files for plotting in matlab', action='store_true', dest='matlab', default=False)
parser.add_option('-M', '--view-matlab', help='plot edges in matlab', action='store_true', dest='viewMatlab', default=False)
parser.add_option('-x', '--nnodes-x', help='use NX nodes in the x direction', action='store', type='int', dest='nx', default=2)
parser.add_option('-y', '--nnodes-y', help='use NY nodes in the y direction', action='store', type='int', dest='ny', default=2)
parser.add_option('-z', '--nnodes-z', help='use NZ nodes in the z direction', action='store', type='int', dest='nz', default=2)
(opts, args) = parser.parse_args()
if (len(args) == 3):
meshFilename = args[0]
funFilename = args[1]
funOutFilename = args[2]
else:
print(usage)
return
meshIn = open(meshFilename, 'r')
print(('Reading nodes of the mesh from file=%s' % meshFilename))
line = meshIn.readline()
columns = line.split()
nodes = []
adhBase = 1
while (columns[0] != 'ND'):
line = meshIn.readline()
columns = line.split()
else:
while (line and (columns[0] == 'ND')):
nodes.append(Node((int(columns[1]) - adhBase), float(columns[2]), float(columns[3]), float(columns[4])))
line = meshIn.readline()
columns = line.split()
funIn = open(funFilename, 'r')
nNodes = 0
fun = []
for i in range(6):
line = funIn.readline()
print(line.strip())
words = line.split()
if (words[0] == 'ND'):
nNodes = int(words[1])
if (nNodes != len(nodes)):
print("the number of nodes in mesh and function files don't match")
line = funIn.readline()
while (line.strip() != 'ENDDS'):
print(('Reading ' + line.strip()))
words = line.split()
u = zeros(nNodes, Float)
for i in range(nNodes):
u[i] = float(funIn.readline())
fun.append(u)
line = funIn.readline()
print(('Read %i timesteps' % len(fun)))
print('Writing coordinate files x.grf, y.grf, and z.grf')
xiList = {}
xOut = open('x.grf', 'w')
yOut = open('y.grf', 'w')
zOut = open('z.grf', 'w')
for n in nodes:
xOut.write(('%15.8e \n' % n.x()))
yOut.write(('%15.8e \n' % n.y()))
zOut.write(('%15.8e \n' % n.z()))
xOut.close()
yOut.close()
zOut.close()
for (ts, f) in enumerate(fun):
funOutFileTS = ((funOutFilename + repr(ts)) + '.grf')
print(('Writing time step=%i to file=%s' % (ts, funOutFileTS)))
funOut = open(funOutFileTS, 'w')
for v in f:
funOut.write(('%15.8e \n' % v))
funOut.close() |
def getPathFromShp(shpfile, region, encoding='utf-8'):
try:
sf = shapefile.Reader(shpfile, encoding=encoding)
vertices = []
codes = []
paths = []
for shape_rec in sf.shapeRecords():
if ((region == [100000]) or (shape_rec.record[4] in region)):
pts = shape_rec.shape.points
prt = (list(shape_rec.shape.parts) + [len(pts)])
for i in range((len(prt) - 1)):
for j in range(prt[i], prt[(i + 1)]):
vertices.append((pts[j][0], pts[j][1]))
codes += [Path.MOVETO]
codes += ([Path.LINETO] * ((prt[(i + 1)] - prt[i]) - 2))
codes += [Path.CLOSEPOLY]
path = Path(vertices, codes)
paths.append(path)
if paths:
path = Path.make_compound_path(*paths)
else:
path = None
return path
except Exception as err:
print(err)
return None |
class OptionPlotoptionsWindbarbLabelStyle(Options):
def fontSize(self):
return self._config_get('0.8em')
def fontSize(self, num: float):
self._config(num, js_type=False)
def fontWeight(self):
return self._config_get('bold')
def fontWeight(self, text: str):
self._config(text, js_type=False) |
_os(*metadata.platforms)
def main():
key = 'Software\\Classes\\ms-settings\\shell\\open\\command'
value = 'test'
data = 'test'
with common.temporary_reg(common.HKCU, key, value, data):
pass
fodhelper = 'C:\\Users\\Public\\fodhelper.exe'
powershell = 'C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe'
common.copy_file(EXE_FILE, fodhelper)
common.execute([fodhelper, '/c', powershell], timeout=2, kill=True)
common.remove_file(fodhelper) |
class OptionPlotoptionsNetworkgraphSonificationTracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class ReferenceTemplate(object):
def __init__(self, id: _identifier_model.Identifier, resource_type: int) -> None:
self._id = id
self._resource_type = resource_type
def id(self) -> _identifier_model.Identifier:
return self._id
def resource_type(self) -> int:
return self._resource_type |
class BlockedWell(Well):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._gridname = None
def gridname(self):
return self._gridname
def gridname(self, newname):
if isinstance(newname, str):
self._gridname = newname
else:
raise ValueError('Input name is not a string.')
def copy(self):
newbw = super().copy()
newbw._gridname = self._gridname
return newbw
(deprecated_in='2.16', removed_in='4.0', current_version=__version__, details='Use xtgeo.blockedwell_from_roxar() instead')
def from_roxar(self, *args, **kwargs):
project = args[0]
gname = args[1]
bwname = args[2]
wname = args[3]
lognames = kwargs.get('lognames', 'all')
ijk = kwargs.get('ijk', False)
realisation = kwargs.get('realisation', 0)
_blockedwell_roxapi.import_bwell_roxapi(self, project, gname, bwname, wname, lognames=lognames, ijk=ijk, realisation=realisation)
self._ensure_consistency()
def to_roxar(self, *args, **kwargs):
project = args[0]
gname = args[1]
bwname = args[2]
wname = args[3]
lognames = kwargs.get('lognames', 'all')
ijk = kwargs.get('ijk', False)
realisation = kwargs.get('realisation', 0)
_blockedwell_roxapi.export_bwell_roxapi(self, project, gname, bwname, wname, lognames=lognames, ijk=ijk, realisation=realisation) |
class OptionPlotoptionsArcdiagramLabelStyle(Options):
def fontSize(self):
return self._config_get('0.8em')
def fontSize(self, num: float):
self._config(num, js_type=False)
def fontWeight(self):
return self._config_get('bold')
def fontWeight(self, text: str):
self._config(text, js_type=False) |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = 'name'
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'firewall_ippool6': {'required': False, 'type': 'dict', 'default': None, 'options': {}}}
for attribute_name in module_spec['options']:
fields['firewall_ippool6']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['firewall_ippool6']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=True)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'firewall_ippool6')
(is_error, has_changed, result, diff) = fortios_firewall(module.params, fos, module.check_mode)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
def decorator_with_optional_params(decorator: Callable) -> Callable:
(decorator)
def new_decorator(*args: Any, **kwargs: Any) -> Callable:
if ((len(args) == 1) and (len(kwargs) == 0) and callable(args[0])):
return decorator(args[0])
def final_decorator(real_function: Callable) -> Callable:
return decorator(real_function, *args, **kwargs)
return final_decorator
return new_decorator |
class Parser(object):
def __init__(self):
self._declarations = {}
self._anonymous_counter = 0
self._structnode2type = weakref.WeakKeyDictionary()
self._override = False
self._packed = False
self._int_constants = {}
def _parse(self, csource):
(csource, macros) = _preprocess(csource)
ctn = _common_type_names(csource)
typenames = []
for name in sorted(self._declarations):
if name.startswith('typedef '):
name = name[8:]
typenames.append(name)
ctn.discard(name)
typenames += sorted(ctn)
csourcelines = [('typedef int %s;' % typename) for typename in typenames]
csourcelines.append('typedef int __dotdotdot__;')
csourcelines.append(csource)
csource = '\n'.join(csourcelines)
if (lock is not None):
lock.acquire()
try:
ast = _get_parser().parse(csource)
except pycparser.c_parser.ParseError as e:
self.convert_pycparser_error(e, csource)
finally:
if (lock is not None):
lock.release()
return (ast, macros, csource)
def _convert_pycparser_error(self, e, csource):
line = None
msg = str(e)
if (msg.startswith(':') and (':' in msg[1:])):
linenum = msg[1:msg.find(':', 1)]
if linenum.isdigit():
linenum = int(linenum, 10)
csourcelines = csource.splitlines()
if (1 <= linenum <= len(csourcelines)):
line = csourcelines[(linenum - 1)]
return line
def convert_pycparser_error(self, e, csource):
line = self._convert_pycparser_error(e, csource)
msg = str(e)
if line:
msg = ('cannot parse "%s"\n%s' % (line.strip(), msg))
else:
msg = ('parse error\n%s' % (msg,))
raise api.CDefError(msg)
def parse(self, csource, override=False, packed=False):
prev_override = self._override
prev_packed = self._packed
try:
self._override = override
self._packed = packed
self._internal_parse(csource)
finally:
self._override = prev_override
self._packed = prev_packed
def _internal_parse(self, csource):
(ast, macros, csource) = self._parse(csource)
self._process_macros(macros)
iterator = iter(ast.ext)
for decl in iterator:
if (decl.name == '__dotdotdot__'):
break
try:
for decl in iterator:
if isinstance(decl, pycparser.c_ast.Decl):
self._parse_decl(decl)
elif isinstance(decl, pycparser.c_ast.Typedef):
if (not decl.name):
raise api.CDefError('typedef does not declare any name', decl)
if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) and (decl.type.type.names == ['__dotdotdot__'])):
realtype = model.unknown_type(decl.name)
elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and isinstance(decl.type.type.type, pycparser.c_ast.IdentifierType) and (decl.type.type.type.names == ['__dotdotdot__'])):
realtype = model.unknown_ptr_type(decl.name)
else:
realtype = self._get_type(decl.type, name=decl.name)
self._declare(('typedef ' + decl.name), realtype)
else:
raise api.CDefError('unrecognized construct', decl)
except api.FFIError as e:
msg = self._convert_pycparser_error(e, csource)
if msg:
e.args = ((e.args[0] + ('\n *** Err: %s' % msg)),)
raise
def _add_constants(self, key, val):
if (key in self._int_constants):
if (self._int_constants[key] == val):
return
raise api.FFIError(('multiple declarations of constant: %s' % (key,)))
self._int_constants[key] = val
def _process_macros(self, macros):
for (key, value) in macros.items():
value = value.strip()
match = _r_int_literal.search(value)
if (match is not None):
int_str = match.group(0).lower().rstrip('ul')
if (int_str.startswith('0') and (int_str != '0') and (not int_str.startswith('0x'))):
int_str = ('0o' + int_str[1:])
pyvalue = int(int_str, 0)
self._add_constants(key, pyvalue)
self._declare(('macro ' + key), pyvalue)
elif (value == '...'):
self._declare(('macro ' + key), value)
else:
raise api.CDefError(('only supports one of the following syntax:\n #define %s ... (literally dot-dot-dot)\n #define %s NUMBER (with NUMBER an integer constant, decimal/hex/octal)\ngot:\n #define %s %s' % (key, key, key, value)))
def _parse_decl(self, decl):
node = decl.type
if isinstance(node, pycparser.c_ast.FuncDecl):
tp = self._get_type(node, name=decl.name)
assert isinstance(tp, model.RawFunctionType)
tp = self._get_type_pointer(tp)
self._declare(('function ' + decl.name), tp)
else:
if isinstance(node, pycparser.c_ast.Struct):
if (node.decls is not None):
self._get_struct_union_enum_type('struct', node)
elif isinstance(node, pycparser.c_ast.Union):
if (node.decls is not None):
self._get_struct_union_enum_type('union', node)
elif isinstance(node, pycparser.c_ast.Enum):
if (node.values is not None):
self._get_struct_union_enum_type('enum', node)
elif (not decl.name):
raise api.CDefError('construct does not declare any variable', decl)
if decl.name:
tp = self._get_type(node, partial_length_ok=True)
if self._is_constant_globalvar(node):
self._declare(('constant ' + decl.name), tp)
else:
self._declare(('variable ' + decl.name), tp)
def parse_type(self, cdecl):
(ast, macros) = self._parse(('void __dummy(\n%s\n);' % cdecl))[:2]
assert (not macros)
exprnode = ast.ext[(- 1)].type.args.params[0]
if isinstance(exprnode, pycparser.c_ast.ID):
raise api.CDefError(("unknown identifier '%s'" % (exprnode.name,)))
return self._get_type(exprnode.type)
def _declare(self, name, obj):
if (name in self._declarations):
if (self._declarations[name] is obj):
return
if (not self._override):
raise api.FFIError(('multiple declarations of %s (for interactive usage, try cdef(xx, override=True))' % (name,)))
assert ('__dotdotdot__' not in name.split())
self._declarations[name] = obj
def _get_type_pointer(self, type, const=False):
if isinstance(type, model.RawFunctionType):
return type.as_function_pointer()
if const:
return model.ConstPointerType(type)
return model.PointerType(type)
def _get_type(self, typenode, name=None, partial_length_ok=False):
if (isinstance(typenode, pycparser.c_ast.TypeDecl) and isinstance(typenode.type, pycparser.c_ast.IdentifierType) and (len(typenode.type.names) == 1) and (('typedef ' + typenode.type.names[0]) in self._declarations)):
type = self._declarations[('typedef ' + typenode.type.names[0])]
return type
if isinstance(typenode, pycparser.c_ast.ArrayDecl):
if (typenode.dim is None):
length = None
else:
length = self._parse_constant(typenode.dim, partial_length_ok=partial_length_ok)
return model.ArrayType(self._get_type(typenode.type), length)
if isinstance(typenode, pycparser.c_ast.PtrDecl):
const = (isinstance(typenode.type, pycparser.c_ast.TypeDecl) and ('const' in typenode.type.quals))
return self._get_type_pointer(self._get_type(typenode.type), const)
if isinstance(typenode, pycparser.c_ast.TypeDecl):
type = typenode.type
if isinstance(type, pycparser.c_ast.IdentifierType):
names = list(type.names)
if (names != ['signed', 'char']):
prefixes = {}
while names:
name = names[0]
if (name in ('short', 'long', 'signed', 'unsigned')):
prefixes[name] = (prefixes.get(name, 0) + 1)
del names[0]
else:
break
newnames = []
for prefix in ('unsigned', 'short', 'long'):
for i in range(prefixes.get(prefix, 0)):
newnames.append(prefix)
if (not names):
names = ['int']
if (names == ['int']):
if (('short' in prefixes) or ('long' in prefixes)):
names = []
names = (newnames + names)
ident = ' '.join(names)
if (ident == 'void'):
return model.void_type
if (ident == '__dotdotdot__'):
raise api.FFIError((':%d: bad usage of "..."' % typenode.coord.line))
return resolve_common_type(ident)
if isinstance(type, pycparser.c_ast.Struct):
return self._get_struct_union_enum_type('struct', type, name)
if isinstance(type, pycparser.c_ast.Union):
return self._get_struct_union_enum_type('union', type, name)
if isinstance(type, pycparser.c_ast.Enum):
return self._get_struct_union_enum_type('enum', type, name)
if isinstance(typenode, pycparser.c_ast.FuncDecl):
return self._parse_function_type(typenode, name)
if isinstance(typenode, pycparser.c_ast.Struct):
return self._get_struct_union_enum_type('struct', typenode, name, nested=True)
if isinstance(typenode, pycparser.c_ast.Union):
return self._get_struct_union_enum_type('union', typenode, name, nested=True)
raise api.FFIError((':%d: bad or unsupported type declaration' % typenode.coord.line))
def _parse_function_type(self, typenode, funcname=None):
params = list(getattr(typenode.args, 'params', []))
ellipsis = ((len(params) > 0) and isinstance(params[(- 1)].type, pycparser.c_ast.TypeDecl) and isinstance(params[(- 1)].type.type, pycparser.c_ast.IdentifierType) and (params[(- 1)].type.type.names == ['__dotdotdot__']))
if ellipsis:
params.pop()
if (not params):
raise api.CDefError(("%s: a function with only '(...)' as argument is not correct C" % (funcname or 'in expression')))
elif ((len(params) == 1) and isinstance(params[0].type, pycparser.c_ast.TypeDecl) and isinstance(params[0].type.type, pycparser.c_ast.IdentifierType) and (list(params[0].type.type.names) == ['void'])):
del params[0]
args = [self._as_func_arg(self._get_type(argdeclnode.type)) for argdeclnode in params]
result = self._get_type(typenode.type)
return model.RawFunctionType(tuple(args), result, ellipsis)
def _as_func_arg(self, type):
if isinstance(type, model.ArrayType):
return model.PointerType(type.item)
elif isinstance(type, model.RawFunctionType):
return type.as_function_pointer()
else:
return type
def _is_constant_globalvar(self, typenode):
if isinstance(typenode, pycparser.c_ast.PtrDecl):
return ('const' in typenode.quals)
if isinstance(typenode, pycparser.c_ast.TypeDecl):
return ('const' in typenode.quals)
return False
def _get_struct_union_enum_type(self, kind, type, name=None, nested=False):
try:
return self._structnode2type[type]
except KeyError:
pass
force_name = name
name = type.name
if (name is None):
if (force_name is not None):
explicit_name = ('$%s' % force_name)
else:
self._anonymous_counter += 1
explicit_name = ('$%d' % self._anonymous_counter)
tp = None
else:
explicit_name = name
key = ('%s %s' % (kind, name))
tp = self._declarations.get(key, None)
if (tp is None):
if (kind == 'struct'):
tp = model.StructType(explicit_name, None, None, None)
elif (kind == 'union'):
tp = model.UnionType(explicit_name, None, None, None)
elif (kind == 'enum'):
if (explicit_name == '__dotdotdot__'):
raise CDefError('Enums cannot be declared with ...')
tp = self._build_enum_type(explicit_name, type.values)
else:
raise AssertionError(('kind = %r' % (kind,)))
if (name is not None):
self._declare(key, tp)
elif ((kind == 'enum') and (type.values is not None)):
raise NotImplementedError(("enum %s: the '{}' declaration should appear on the first time the enum is mentioned, not later" % explicit_name))
if (not tp.forcename):
tp.force_the_name(force_name)
if (tp.forcename and ('$' in tp.name)):
self._declare(('anonymous %s' % tp.forcename), tp)
self._structnode2type[type] = tp
if (kind == 'enum'):
return tp
if (type.decls is None):
return tp
if (tp.fldnames is not None):
raise api.CDefError(('duplicate declaration of struct %s' % name))
fldnames = []
fldtypes = []
fldbitsize = []
for decl in type.decls:
if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and (''.join(decl.type.names) == '__dotdotdot__')):
self._make_partial(tp, nested)
continue
if (decl.bitsize is None):
bitsize = (- 1)
else:
bitsize = self._parse_constant(decl.bitsize)
self._partial_length = False
type = self._get_type(decl.type, partial_length_ok=True)
if self._partial_length:
self._make_partial(tp, nested)
if (isinstance(type, model.StructType) and type.partial):
self._make_partial(tp, nested)
fldnames.append((decl.name or ''))
fldtypes.append(type)
fldbitsize.append(bitsize)
tp.fldnames = tuple(fldnames)
tp.fldtypes = tuple(fldtypes)
tp.fldbitsize = tuple(fldbitsize)
if (fldbitsize != ([(- 1)] * len(fldbitsize))):
if (isinstance(tp, model.StructType) and tp.partial):
raise NotImplementedError(("%s: using both bitfields and '...;'" % (tp,)))
tp.packed = self._packed
return tp
def _make_partial(self, tp, nested):
if (not isinstance(tp, model.StructOrUnion)):
raise api.CDefError(('%s cannot be partial' % (tp,)))
if ((not tp.has_c_name()) and (not nested)):
raise NotImplementedError(('%s is partial but has no C name' % (tp,)))
tp.partial = True
def _parse_constant(self, exprnode, partial_length_ok=False):
if isinstance(exprnode, pycparser.c_ast.Constant):
s = exprnode.value
if s.startswith('0'):
if (s.startswith('0x') or s.startswith('0X')):
return int(s, 16)
return int(s, 8)
elif ('1' <= s[0] <= '9'):
return int(s, 10)
elif ((s[0] == "'") and (s[(- 1)] == "'") and ((len(s) == 3) or ((len(s) == 4) and (s[1] == '\\')))):
return ord(s[(- 2)])
else:
raise api.CDefError(('invalid constant %r' % (s,)))
if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and (exprnode.op == '+')):
return self._parse_constant(exprnode.expr)
if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and (exprnode.op == '-')):
return (- self._parse_constant(exprnode.expr))
if (isinstance(exprnode, pycparser.c_ast.ID) and (exprnode.name in self._int_constants)):
return self._int_constants[exprnode.name]
if partial_length_ok:
if (isinstance(exprnode, pycparser.c_ast.ID) and (exprnode.name == '__dotdotdotarray__')):
self._partial_length = True
return '...'
raise api.FFIError((':%d: unsupported expression: expected a simple numeric constant' % exprnode.coord.line))
def _build_enum_type(self, explicit_name, decls):
if (decls is not None):
enumerators1 = [enum.name for enum in decls.enumerators]
enumerators = [s for s in enumerators1 if (not _r_enum_dotdotdot.match(s))]
partial = (len(enumerators) < len(enumerators1))
enumerators = tuple(enumerators)
enumvalues = []
nextenumvalue = 0
for enum in decls.enumerators[:len(enumerators)]:
if (enum.value is not None):
nextenumvalue = self._parse_constant(enum.value)
enumvalues.append(nextenumvalue)
self._add_constants(enum.name, nextenumvalue)
nextenumvalue += 1
enumvalues = tuple(enumvalues)
tp = model.EnumType(explicit_name, enumerators, enumvalues)
tp.partial = partial
else:
tp = model.EnumType(explicit_name, (), ())
return tp
def include(self, other):
for (name, tp) in other._declarations.items():
kind = name.split(' ', 1)[0]
if (kind in ('typedef', 'struct', 'union', 'enum')):
self._declare(name, tp)
for (k, v) in other._int_constants.items():
self._add_constants(k, v) |
def sync_from_localcopy(repo_section, local_copy_dir):
logging.info('Syncing from local_copy_dir to this repo.')
common.local_rsync(options, (os.path.join(local_copy_dir, repo_section).rstrip('/') + '/'), (repo_section.rstrip('/') + '/'))
offline_copy = os.path.join(local_copy_dir, BINARY_TRANSPARENCY_DIR)
if os.path.exists(os.path.join(offline_copy, '.git')):
online_copy = os.path.join(os.getcwd(), BINARY_TRANSPARENCY_DIR)
push_binary_transparency(offline_copy, online_copy) |
class RxEngine(object):
def __init__(self, name, message_broker):
self.name = name
self.ns = '/'.join(name.split('/')[:2])
self.mb = message_broker
self.backend = message_broker.backend
self.initialized = False
self.has_shutdown = False
(rate, inputs, outputs, states, engine_states, node_names, target_addresses, self.engine) = self._prepare_io_topics(self.name)
rx_objects = eagerx.core.rx_pipelines.init_engine(self.ns, rate, self.engine, inputs, outputs, states, engine_states, node_names, target_addresses, self.mb)
self.mb.add_rx_objects(node_name=name, node=self, **rx_objects)
self.mb.connect_io()
self.cond_reg = Condition()
def node_initialized(self):
with self.cond_reg:
wait_for_node_initialization(self.engine.is_initialized, self.backend)
if (not self.initialized):
self.init_pub = self.backend.Publisher((self.name + '/initialized'), 'int64')
self.init_pub.publish(0)
self.backend.loginfo(('Node "%s" initialized.' % self.name))
self.initialized = True
def _prepare_io_topics(self, name):
params = get_param_with_blocking(name, self.backend)
node_names = params['config']['node_names']
target_addresses = params['config']['target_addresses']
rate = params['config']['rate']
sync = get_param_with_blocking((self.ns + '/sync'), self.backend)
real_time_factor = get_param_with_blocking((self.ns + '/real_time_factor'), self.backend)
simulate_delays = get_param_with_blocking((self.ns + '/simulate_delays'), self.backend)
for i in params['inputs']:
if isinstance(i['processor'], dict):
from eagerx.core.specs import ProcessorSpec
i['processor'] = initialize_processor(ProcessorSpec(i['processor']))
if isinstance(i['space'], dict):
i['space'] = eagerx.Space.from_dict(i['space'])
for i in params['outputs']:
if isinstance(i['processor'], dict):
from eagerx.core.specs import ProcessorSpec
i['processor'] = initialize_processor(ProcessorSpec(i['processor']))
if isinstance(i['space'], dict):
i['space'] = eagerx.Space.from_dict(i['space'])
for i in params['states']:
if isinstance(i['processor'], dict):
from eagerx.core.specs import ProcessorSpec
i['processor'] = initialize_processor(ProcessorSpec(i['processor']))
if isinstance(i['space'], dict):
i['space'] = eagerx.Space.from_dict(i['space'])
params['inputs'] = {i['name']: i for i in params['inputs']}
params['outputs'] = {i['name']: i for i in params['outputs']}
params['states'] = {i['name']: i for i in params['states']}
node_cls = load(params['node_type'])
node = node_cls(ns=self.ns, message_broker=self.mb, sync=sync, real_time_factor=real_time_factor, simulate_delays=simulate_delays, params=params, target_addresses=target_addresses, node_names=node_names)
inputs = tuple([value for (key, value) in params['inputs'].items()])
outputs = tuple([value for (key, value) in params['outputs'].items()])
states = tuple([value for (key, value) in params['states'].items()])
engine_states = []
for (_name, obj) in params['objects'].items():
if ('engine_states' not in obj):
continue
for (_cname, s) in obj['engine_states'].items():
engine_states.append(s)
engine_states = tuple(engine_states)
return (rate, inputs, outputs, states, engine_states, node_names, target_addresses, node)
def _shutdown(self):
self.backend.logdebug(f'[{self.name}] RxEngine._shutdown() called.')
self.init_pub.unregister()
def node_shutdown(self):
if (not self.has_shutdown):
self.backend.logdebug(f'[{self.name}] RxEngine.node_shutdown() called.')
for (address, node) in self.engine.launch_nodes.items():
self.backend.loginfo(f"[{self.name}] Send termination signal to '{address}'.")
node.terminate()
for (_, rxnode) in self.engine.sp_nodes.items():
rxnode: RxNode
if (not rxnode.has_shutdown):
self.backend.loginfo(f"[{self.name}] Shutting down '{rxnode.name}'.")
rxnode.node_shutdown()
self.backend.loginfo(f'[{self.name}] Shutting down.')
self._shutdown()
self.engine.shutdown()
self.mb.shutdown()
self.backend.shutdown()
self.has_shutdown = True |
def test_resets():
tfm = get_tfm()
tfm._reset_data_results(True, True, True)
tfm._reset_internal_settings()
for data in ['data', 'model_components']:
for field in OBJ_DESC[data]:
assert (getattr(tfm, field) is None)
for field in OBJ_DESC['results']:
assert np.all(np.isnan(getattr(tfm, field)))
assert ((tfm.freqs is None) and (tfm.modeled_spectrum_ is None)) |
class AsyncTestCase(unittest.TestCase):
def setUp(self) -> None:
self._loop = FcrTestEventLoop()
asyncio.set_event_loop(None)
def tearDown(self) -> None:
pending = [t for t in asyncio.all_tasks(self._loop) if (not t.done())]
if pending:
res = self._run_loop(asyncio.wait(pending, timeout=1))
(done, pending) = res[0]
for p in pending:
print('Task is still pending', p)
self._loop.close()
def wait_for_tasks(self, timeout: int=10) -> None:
pending = asyncio.all_tasks(self._loop)
self._loop.run_until_complete(asyncio.gather(*pending))
def _run_loop(self, *args) -> typing.List[typing.Any]:
(finished, _) = self._loop.run_until_complete(asyncio.wait(args))
return [task.result() for task in finished] |
class BokehWidget(Widget):
DEFAULT_MIN_SIZE = (100, 100)
CSS = '\n .flx-BokehWidget > .plotdiv {\n overflow: hidden;\n }\n '
def from_plot(cls, plot, **kwargs):
return make_bokeh_widget(plot, **kwargs)
plot = event.Attribute(doc='The JS-side of the Bokeh plot object.')
def _render_dom(self):
return None
def set_plot_components(self, d):
global window
self.node.innerHTML = d.div
el = window.document.createElement('script')
el.innerHTML = d.script
self.node.appendChild(el)
def getplot():
self._plot = window.Bokeh.index[d.id]
self.__resize_plot()
window.setTimeout(getplot, 10)
('size')
def __resize_plot(self, *events):
if (self.plot and self.parent):
if self.plot.resize_layout:
self.plot.resize_layout()
elif self.plot.resize:
self.plot.resize()
else:
self.plot.model.document.resize() |
def test_dice_implicit_resolver():
import srsly.ruamel_yaml
pattern = re.compile('^\\d+d\\d+$')
with pytest.raises(ValueError):
srsly.ruamel_yaml.add_implicit_resolver(u'!dice', pattern)
assert (srsly.ruamel_yaml.dump(dict(treasure=Dice(10, 20)), default_flow_style=False) == 'treasure: 10d20\n')
assert (srsly.ruamel_yaml.load('damage: 5d10', Loader=srsly.ruamel_yaml.Loader) == dict(damage=Dice(5, 10))) |
class TestCliQuiet(TestCliBase):
def setUp(self):
super().setUp()
self.env = {'PRINT': 'True'}
def test_with_quiet(self):
self.argv.insert(0, '--quiet')
self.run_testslide(expected_return_code=1, expected_stdout_startswith='top context\n passing example: PASS\nstdout:\nfailing_example stdout\n\nstderr:\nfailing_example stderr\n\n failing example: SimulatedFailure: test failure (extra)\n *focused example: PASS\n skipped example: SKIP\n unittest SkipTest: SKIP\n nested context\n passing nested example: PASS\ntests.sample_tests.SampleTestCase\nstdout:\ntest_fail stdout\n\nstderr:\ntest_fail stderr\n\n test_failing: AssertionError: Third\n test_passing: PASS\n test_skipped: SKIP\n\nFailures:\n')
def test_without_quiet(self):
self.run_testslide(expected_return_code=1, expected_stdout_startswith='top context\npassing_example stdout\n passing example: PASS\nfailing_example stdout\n failing example: SimulatedFailure: test failure (extra)\nfocused_example stdout\n *focused example: PASS\n skipped example: SKIP\nunittest_SkipTest stdout\n unittest SkipTest: SKIP\n nested context\npassing_nested_example stdout\n passing nested example: PASS\ntests.sample_tests.SampleTestCase\ntest_fail stdout\n test_failing: AssertionError: Third\ntest_pass stdout\n test_passing: PASS\n test_skipped: SKIP\n\nFailures:\n') |
class ScheduleHandle():
def __init__(self, task_allocation, sched, task_progress_control, runner, params):
self.task_allocation = task_allocation
self.operation_type = task_allocation.task.operation.type
self.sched = sched
self.task_progress_control = task_progress_control
self.runner = runner
self.params = params
def ramp_up_wait_time(self):
ramp_up_time_period = self.task_allocation.task.ramp_up_time_period
if ramp_up_time_period:
return (ramp_up_time_period * (self.task_allocation.global_client_index / self.task_allocation.total_clients))
else:
return 0
def start(self):
self.task_progress_control.start()
def before_request(self, now):
self.sched.before_request(now)
def after_request(self, now, weight, unit, request_meta_data):
self.sched.after_request(now, weight, unit, request_meta_data)
def params_with_operation_type(self):
p = self.params.params()
p.update({'operation-type': self.operation_type})
return p
async def __call__(self):
next_scheduled = 0
if self.task_progress_control.infinite:
param_source_knows_progress = hasattr(self.params, 'percent_completed')
while True:
try:
next_scheduled = self.sched.next(next_scheduled)
percent_completed = (self.params.percent_completed if param_source_knows_progress else None)
(yield (next_scheduled, self.task_progress_control.sample_type, percent_completed, self.runner, self.params_with_operation_type()))
self.task_progress_control.next()
except StopIteration:
return
else:
while (not self.task_progress_control.completed):
try:
next_scheduled = self.sched.next(next_scheduled)
(yield (next_scheduled, self.task_progress_control.sample_type, self.task_progress_control.percent_completed, self.runner, self.params_with_operation_type()))
self.task_progress_control.next()
except StopIteration:
return |
class TestTask():
def test_tasks_deve_retornar_200_quando_receber_um_get(self):
request = get(url_base)
assert (request.status_code == 200)
def test_tasks_deve_retornar_uma_lista_vazia_no_primeiro_request(self):
request = get(url_base)
assert (request.json() == [])
def test_tasks_deve_retornar_400_quando_receber_um_todo_invalido(self):
not_task = {'titlle': 'Tomar pinga!'}
request = get(url_base)
assert (request.status_code == 400) |
def test_unpickling_watcher_registration(tmp_path: Path) -> None:
executor = FakeExecutor(folder=tmp_path)
job = executor.submit(_three_time, 4)
original_job_id = job._job_id
job._job_id = '007'
assert (job.watcher._registered == {original_job_id})
pkl = pickle.dumps(job)
newjob = pickle.loads(pkl)
assert (newjob.job_id == '007')
assert (newjob.watcher._registered == {original_job_id, '007'}) |
def opensfm_shot_from_info(info: dict, projection: Projection) -> Shot:
latlong = info['computed_geometry']['coordinates'][::(- 1)]
alt = info['computed_altitude']
xyz = projection.project(np.array([*latlong, alt]), return_z=True)
c_rotvec_w = np.array(info['computed_rotation'])
pose = Pose()
pose.set_from_cam_to_world((- c_rotvec_w), xyz)
camera = opensfm_camera_from_info(info)
return (latlong, Shot(info['id'], camera, pose)) |
('drop')
('path')
def drop(path):
from bench.bench import Bench
from bench.exceptions import BenchNotFoundError, ValidationError
bench = Bench(path)
if (not bench.exists):
raise BenchNotFoundError(f'Bench {bench.name} does not exist')
if bench.sites:
raise ValidationError('Cannot remove non-empty bench directory')
bench.drop()
print('Bench dropped') |
def financial_spending_data(db):
federal_account_1 = baker.make('accounts.FederalAccount', id=1)
object_class_1 = baker.make('references.ObjectClass', major_object_class='10', major_object_class_name='mocName1', object_class='111', object_class_name='ocName1')
object_class_2 = baker.make('references.ObjectClass', major_object_class='20', major_object_class_name='mocName2', object_class='222', object_class_name='ocName2')
object_class_4 = baker.make('references.ObjectClass', major_object_class='20', major_object_class_name='mocName2', object_class='444', object_class_name='ocName4')
baker.make('references.ObjectClass', major_object_class='30', major_object_class_name='mocName3', object_class='333', object_class_name='ocName3')
tas = baker.make('accounts.TreasuryAppropriationAccount', federal_account=federal_account_1)
baker.make('accounts.TreasuryAppropriationAccount', federal_account=federal_account_1)
baker.make('financial_activities.FinancialAccountsByProgramActivityObjectClass', treasury_account=tas, object_class=object_class_1)
baker.make('financial_activities.FinancialAccountsByProgramActivityObjectClass', treasury_account=tas, object_class=object_class_2)
baker.make('financial_activities.FinancialAccountsByProgramActivityObjectClass', treasury_account=tas, object_class=object_class_4) |
def run_update_query(backfill_type):
global TOTAL_UPDATES
sql = SQL_LOOKUP[backfill_type]['update_sql']
with connection.cursor() as cursor:
with Timer() as t:
cursor.execute(sql.format(min_id=_min, max_id=_max))
row_count = cursor.rowcount
progress = (((_max - min_id) + 1) / totals)
print('[{}] [{:.2%}] {:,} => {:,}: {:,} updated in {} with an estimated remaining run time of {}'.format(backfill_type, progress, _min, _max, row_count, t.elapsed_as_string, chunk_timer.estimated_remaining_runtime(progress)), flush=True)
TOTAL_UPDATES += row_count |
class Index(Source):
def new_mask_index(self, *args, **kwargs):
return MaskIndex(*args, **kwargs)
def __len__(self):
self._not_implemented()
def _normalize_kwargs_names(**kwargs):
return kwargs
def sel(self, *args, remapping=None, **kwargs):
kwargs = normalize_selection(*args, **kwargs)
kwargs = self._normalize_kwargs_names(**kwargs)
if (not kwargs):
return self
selection = Selection(kwargs, remapping=remapping)
indices = (i for (i, element) in enumerate(self) if selection.match_element(element))
return self.new_mask_index(self, indices)
def order_by(self, *args, remapping=None, patches=None, **kwargs):
kwargs = normalize_order_by(*args, **kwargs)
kwargs = self._normalize_kwargs_names(**kwargs)
remapping = build_remapping(remapping, patches)
if (not kwargs):
return self
order = Order(kwargs, remapping=remapping)
def cmp(i, j):
return order.compare_elements(self[i], self[j])
indices = list(range(len(self)))
indices = sorted(indices, key=functools.cmp_to_key(cmp))
return self.new_mask_index(self, indices)
def __getitem__(self, n):
if isinstance(n, slice):
return self.from_slice(n)
if isinstance(n, tuple):
return self.from_tuple(n)
if isinstance(n, list):
return self.from_mask(n)
if isinstance(n, dict):
return self.from_dict(n)
return self._getitem(n)
def from_slice(self, s):
indices = range(len(self))[s]
return self.new_mask_index(self, indices)
def from_mask(self, lst):
indices = [i for (i, x) in enumerate(lst) if x]
return self.new_mask_index(self, indices)
def from_tuple(self, lst):
return self.new_mask_index(self, lst)
def from_dict(self, dic):
return self.sel(dic)
def merge(cls, sources):
assert all((isinstance(_, Index) for _ in sources))
return MultiIndex(sources)
def to_numpy(self, *args, **kwargs):
import numpy as np
return np.array([f.to_numpy(*args, **kwargs) for f in self])
def to_pytorch_tensor(self, *args, **kwargs):
import torch
return torch.Tensor(self.to_numpy(*args, **kwargs))
def full(self, *coords):
return FullIndex(self, *coords) |
class Label():
type: str
tokens: List[Token]
def __init__(self, type: str=None, tokens: List[Token]=None) -> None:
self.type = type
self.tokens = tokens
def text(self) -> str:
if (not self.tokens):
return ''
return ''.join((token.text for token in self.tokens)) |
def load_vgg_weights(weights_path):
kind = weights_path[(- 3):]
if (kind == 'npz'):
weights = load_from_npz(weights_path)
elif (kind == 'mat'):
weights = load_from_mat(weights_path)
else:
weights = None
print(('Unrecognized file type: %s' % kind))
return weights |
class Barcodes():
def __init__(self):
self._counts = {}
def load(self, fastq=None, fp=None):
for read in FASTQFile.FastqIterator(fastq_file=fastq, fp=fp):
seq = read.seqid.index_sequence
if (seq not in self._counts):
self._counts[seq] = 1
else:
self._counts[seq] += 1
def sequences(self):
unique = sorted(list(self._counts.keys()))
return unique
def count_for(self, *seqs):
count = 0
for s in seqs:
try:
count += self._counts[s]
except KeyError:
pass
return count
def group(self, seq, max_mismatches=1):
grp = []
for s in self.sequences():
if ((s not in grp) and sequences_match(s, seq, max_mismatches)):
grp.append(s)
grp.sort()
return grp |
def commands():
env.MEGASCAN_LIBRARY_PATH = '/mnt/NAS/Users/eoyilmaz/Stalker_Projects/Resources/Quixel/'
if ('blender' in this.root):
pass
if ('maya' in this.root):
env.PYTHONPATH.append('$MEGASCAN_LIBRARY_PATH/support/plugins/maya/6.8/MSLiveLink/')
if ('houdini' in this.root):
env.HOUDINI_PATH.prepend('$MEGASCAN_LIBRARY_PATH/support/plugins/houdini/4.5/MSLiveLink/') |
class OptionSeriesPackedbubbleSonificationDefaultinstrumentoptionsMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def calculate_math_string(expr):
ops = {ast.Add: operator.add, ast.Mult: operator.mul, ast.Sub: operator.sub, ast.USub: operator.neg, ast.Pow: operator.pow}
def execute_ast(node):
if isinstance(node, ast.Num):
return node.n
elif isinstance(node, ast.BinOp):
return ops[type(node.op)](execute_ast(node.left), execute_ast(node.right))
elif isinstance(node, ast.UnaryOp):
return ops[type(node.op)](ast.literal_eval(node.operand))
else:
raise SyntaxError(node)
try:
if ('#' in expr):
raise SyntaxError('no comments allowed')
return execute_ast(ast.parse(expr, mode='eval').body)
except SyntaxError as exc:
raise SyntaxError("could not parse expression '{expr}', only basic math operations are allowed (+, -, *)".format(expr=expr)) from exc |
class TestEncryptDescriptor(Descriptor):
TEST_DESC_UUID = '-1234-5678-1234-56789abcdef4'
def __init__(self, bus, index, characteristic):
Descriptor.__init__(self, bus, index, self.TEST_DESC_UUID, ['encrypt-read', 'encrypt-write'], characteristic)
def ReadValue(self, options):
return [dbus.Byte('T'), dbus.Byte('e'), dbus.Byte('s'), dbus.Byte('t')] |
def pytest_collection_modifyitems(config, items):
has_runslow = config.getoption('--runslow')
has_runsuperslow = config.getoption('--runsuperslow')
skip_slow = pytest.mark.skip(reason='need --runslow option to run')
skip_superslow = pytest.mark.skip(reason='need --runsuperslow option to run')
for item in items:
if (('slow' in item.keywords) and (not has_runslow)):
item.add_marker(skip_slow)
if (('superslow' in item.keywords) and (not has_runsuperslow)):
item.add_marker(skip_superslow) |
def test_consensus_after_non_finalization_streak(casper, concise_casper, funded_account, validation_key, deposit_amount, new_epoch, induct_validator, mk_suggested_vote, send_vote, assert_tx_failed):
validator_index = induct_validator(funded_account, validation_key, deposit_amount)
assert (concise_casper.total_curdyn_deposits_in_wei() == deposit_amount)
send_vote(mk_suggested_vote(validator_index, validation_key))
new_epoch()
send_vote(mk_suggested_vote(validator_index, validation_key))
new_epoch()
for i in range(5):
new_epoch()
assert (not concise_casper.main_hash_justified())
assert (not concise_casper.checkpoints__is_finalized(concise_casper.recommended_source_epoch()))
send_vote(mk_suggested_vote(validator_index, validation_key))
assert concise_casper.main_hash_justified()
assert (not concise_casper.checkpoints__is_finalized(concise_casper.recommended_source_epoch()))
new_epoch()
send_vote(mk_suggested_vote(validator_index, validation_key))
assert concise_casper.main_hash_justified()
assert concise_casper.checkpoints__is_finalized(concise_casper.recommended_source_epoch()) |
def test_member_access_properties():
member_access = MemberAccess(operands=[a], member_name='x', offset=4, vartype=Integer.int32_t(), writes_memory=1)
assert (member_access.member_name == 'x')
assert (member_access.member_offset == 4)
assert (member_access.struct_variable == a)
assert member_access.is_write_access()
assert (not member_access.is_read_access())
member_access = MemberAccess(operands=[a], member_name='x', offset=4, vartype=Integer.int32_t(), writes_memory=None)
assert (not member_access.is_write_access())
assert member_access.is_read_access() |
def init(fips_dir, proj_name):
proj_dir = util.get_project_dir(fips_dir, proj_name)
if os.path.isdir(proj_dir):
templ_values = {'project': proj_name}
for f in ['CMakeLists.txt', 'fips', 'fips.cmd', 'fips.yml']:
template.copy_template_file(fips_dir, proj_dir, f, templ_values)
os.chmod((proj_dir + '/fips'), 484)
gitignore_entries = ['.fips-*', 'fips-files/build/', 'fips-files/deploy/', '*.pyc', '.vscode/', '.idea/', 'CMakeUserPresets.json']
template.write_git_ignore(proj_dir, gitignore_entries)
else:
log.error("project dir '{}' does not exist".format(proj_dir))
return False |
def test_osrm_distance_call_nonsquare_matrix(mocked_osrm_valid_call):
sources = np.array([[0.0, 0.0], [1.0, 1.0]])
destinations = np.array([[2.0, 2.0], [3.0, 3.0]])
osrm_server_address = 'BASE_URL'
osrm_distance_matrix(sources, destinations, osrm_server_address=osrm_server_address, cost_type='distances')
expected_url_call = f'{osrm_server_address}/table/v1/driving/0.0,0.0;1.0,1.0;2.0,2.0;3.0,3.0?sources=0;1&destinations=2;3&annotations=distance'
mocked_osrm_valid_call.assert_called_with(expected_url_call) |
def test_logging_redirect_chain(server, caplog):
caplog.set_level(logging.INFO)
with as client:
response = client.get(server.url.copy_with(path='/redirect_301'))
assert (response.status_code == 200)
assert (caplog.record_tuples == [(' logging.INFO, 'HTTP Request: GET "HTTP/1.1 301 Moved Permanently"'), (' logging.INFO, 'HTTP Request: GET "HTTP/1.1 200 OK"')]) |
class LoggingNewrelicAdditional(ModelNormal):
allowed_values = {('region',): {'US': 'US', 'EU': 'EU'}}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'format': (str,), 'token': (str,), 'region': (str,)}
_property
def discriminator():
return None
attribute_map = {'format': 'format', 'token': 'token', 'region': 'region'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def extractFuel2RocktranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class DelegatedFunctionCheckpoint(CheckpointBase):
def __init__(self, other):
self.other = other
self.count = type(other.output)(other.output.function_space()).count()
def restore(self):
saved_output = self.other.saved_output
if isinstance(saved_output, Number):
return saved_output
else:
return type(saved_output)(saved_output.function_space(), saved_output.dat, count=self.count) |
def test_compression(config_env: Dict):
if (CONFIG_ERROR in config_env.keys()):
fail(f'Config Error: {config_env[CONFIG_ERROR]}')
if (EXPECTED_COMPRESSION not in config_env[EXPECTED_RESULTS].keys()):
skip(f'Test not requested: {EXPECTED_COMPRESSION}')
if (not ({COSE, COMPRESSED} <= config_env.keys())):
skip(f'Test dataset does not contain {COSE} and/or {COMPRESSED}')
cbor_bytes = unhexlify(config_env[COSE])
zip_bytes = unhexlify(config_env[COMPRESSED])
if config_env[EXPECTED_RESULTS][EXPECTED_COMPRESSION]:
decompressed_cbor_bytes = decompress(zip_bytes)
assert (decompressed_cbor_bytes == cbor_bytes)
else:
try:
decompressed_cbor_bytes = decompress(zip_bytes)
except Exception:
return
assert (decompressed_cbor_bytes != cbor_bytes) |
def test_generate_ass_repr_for_building_look_dev_is_working_properly(create_test_data, store_local_session, create_pymel, create_maya_env):
data = create_test_data
pm = create_pymel
maya_env = create_maya_env
gen = RepresentationGenerator(version=data['building1_yapi_model_main_v003'])
gen.generate_ass()
gen.version = data['building1_yapi_look_dev_main_v003']
gen.generate_ass()
gen.version = data['building1_layout_main_v003']
gen.generate_ass()
gen.version = data['building1_look_dev_main_v003']
gen.generate_ass()
r = Representation(version=data['building1_look_dev_main_v003'])
v = r.find('ASS')
maya_env.open(v, force=True)
ref = pm.listReferences()[0]
assert ref.is_repr('ASS') |
class PrimaryDagBuilder(DagBuilderBase):
indent_operators = False
def preamble(self):
assert (len(self.reference_path) == 1), 'Invalid Primary DAG reference path: {}'.format(self.reference_path)
template = self.get_jinja_template('primary_preamble.j2')
dag_args_dumped = DagArgsSchema(context={'for_dag_output': True}).dump(self.dag.get('dag_args', {}))
if dag_args_dumped.errors:
raise Exception('Error serializing dag_args: {}'.format(dag_args_dumped.errors))
dag_args = dag_args_dumped.data
dag_args['dag_id'] = self.build_dag_id()
default_task_args = self.dag.get('default_task_args', {})
return template.render(dag_args=dag_args, default_task_args=default_task_args, imports=self.get_imports(), specs=self.specs, metadata=self.metadata, config_md5=boundary_layer.util.md5(self.metadata.yaml_config), build_time=datetime.datetime.utcnow().isoformat(), library_name='boundary-layer', library_version=boundary_layer.get_version_string())
def epilogue(self):
pass |
(_BUILD_NIGHTHAWK_BENCHMARKS)
(_BUILD_NIGHTHAWK_BINARIES)
(_BUILD_ENVOY_BINARY)
('src.lib.cmd_exec.run_command')
def test_source_to_build_binaries(mock_cmd, mock_envoy_build, mock_nh_bin_build, mock_nh_bench_build):
job_control = generate_test_objects.generate_default_job_control()
generate_test_objects.generate_envoy_source(job_control)
generate_test_objects.generate_nighthawk_source(job_control)
generate_test_objects.generate_environment(job_control)
mock_envoy_path = '/home/ubuntu/envoy/bazel-bin/source/exe/envoy-static'
mock_envoy_build.return_value = mock_envoy_path
benchmark = binary_benchmark.Benchmark(job_control, 'test_benchmark')
benchmark.execute_benchmark()
assert (benchmark._envoy_binary_path == mock_envoy_path)
mock_envoy_build.assert_called_once()
mock_nh_bench_build.assert_called_once()
mock_nh_bin_build.assert_called_once() |
class DataFactory(object):
def __init__(self, seed=36):
self.random = random.Random()
self.random.seed(seed)
counter = itertools.count()
self.next_id = (lambda : next(counter))
self.months = []
self.practices = []
self.practice_statistics = []
self.presentations = []
self.prescribing = []
self.bnf_map = []
def create_months(self, start_date, num_months):
date = parse_date(start_date)
months = [(date + relativedelta(months=i)).strftime('%Y-%m-%d 00:00:00 UTC') for i in range(0, num_months)]
self.months = sorted(set((self.months + months)))
return months
def create_practice(self):
i = self.next_id()
practice = {'code': 'ABC{:03}'.format(i), 'name': 'Practice {}'.format(i)}
self.practices.append(practice)
return practice
def create_practices(self, num_practices):
return [self.create_practice() for i in range(num_practices)]
def create_statistics_for_one_practice_and_month(self, practice, month):
data = {'month': month, 'practice': practice['code'], 'pct_id': '00A', 'astro_pu_items': (self.random.random() * 1000), 'astro_pu_cost': (self.random.random() * 1000), 'star_pu': json.dumps({'statins_cost': (self.random.random() * 1000), 'hypnotics_adq': (self.random.random() * 1000), 'laxatives_cost': (self.random.random() * 1000), 'analgesics_cost': (self.random.random() * 1000), 'oral_nsaids_cost': (self.random.random() * 1000)}), 'total_list_size': 0}
age_bands = ('0_4', '5_14', '15_24', '25_34', '35_44', '45_54', '55_64', '65_74', '75_plus')
for age_band in age_bands:
for sex in ('male', 'female'):
value = self.random.randint(0, 1000)
data['{}_{}'.format(sex, age_band)] = value
data['total_list_size'] += value
self.practice_statistics.append(data)
return data
def create_practice_statistics(self, practices, months):
return [self.create_statistics_for_one_practice_and_month(practice, month) for practice in practices for month in months]
def create_presentation(self, bnf_code=None):
index = self.next_id()
bnf_code = (bnf_code or self.create_bnf_code(index))
presentation = {'bnf_code': bnf_code, 'name': 'Foo Tablet {}'.format(index), 'is_generic': self.random.choice([True, False]), 'adq_per_quantity': self.random.choice([None, (self.random.random() * 30)])}
self.presentations.append(presentation)
return presentation
def create_bnf_code(self, index):
return 'ABCD{}'.format(index)
def create_presentations(self, num_presentations):
return [self.create_presentation() for i in range(num_presentations)]
def create_prescription(self, presentation, practice, month):
prescription = {'month': month, 'practice': practice['code'], 'bnf_code': presentation['bnf_code'], 'bnf_name': presentation['name'], 'items': self.random.randint(1, 100), 'quantity': self.random.uniform(1, 100), 'net_cost': (self.random.randint(1, 10000) / 100), 'actual_cost': (self.random.randint(1, 10000) / 100), 'sha': None, 'pcn': None, 'pct': None, 'stp': None, 'regional_team': None}
self.prescribing.append(prescription)
return prescription
def create_prescribing(self, presentations, practices, months):
prescribing = []
for month in months:
practice_codes_used = set()
for presentation in presentations:
n = self.random.randint(1, len(practices))
selected_practices = self.random.sample(practices, n)
for practice in selected_practices:
prescribing.append(self.create_prescription(presentation, practice, month))
practice_codes_used.add(practice['code'])
for practice in practices:
if (practice['code'] not in practice_codes_used):
presentation = self.random.choice(presentations)
prescribing.append(self.create_prescription(presentation, practice, month))
return prescribing
def create_prescribing_for_bnf_codes(self, bnf_codes):
month = self.create_months('2018-10-01', 1)[0]
practice = self.create_practices(1)[0]
for bnf_code in bnf_codes:
presentation = self.create_presentation(bnf_code)
self.create_prescription(presentation, practice, month)
def update_bnf_code(self, presentation):
new_bnf_code = self.create_bnf_code(self.next_id())
self.bnf_map.append({'former_bnf_code': presentation['bnf_code'], 'current_bnf_code': new_bnf_code})
new_presentation = dict(presentation, bnf_code=new_bnf_code)
indices = [i for (i, other_presentation) in enumerate(self.presentations) if (other_presentation['bnf_code'] == presentation['bnf_code'])]
if indices:
for i in indices:
self.presentations[i] = new_presentation
else:
self.presentations.append(new_presentation)
return new_presentation
def create_all(self, start_date='2018-10-01', num_months=1, num_practices=1, num_presentations=1):
months = self.create_months(start_date, num_months)
practices = self.create_practices(num_practices)
presentations = self.create_presentations(num_presentations)
self.create_practice_statistics(practices, months)
self.create_prescribing(presentations, practices, months) |
def main_github(args):
token = os.getenv('GITHUB_TOKEN')
if (not token):
raise Exception('Missing GITHUB_TOKEN env variable')
(repos, _) = parse_maintainers(args.repo)
for (repo_name, maintainers) in repos:
pr = get_pr(repo_name, token).json()
if (not pr):
print(f'{repo_name} creating pr')
base = get_default_branch(repo_name, token)
pr = [create_pr(repo_name, token, base).json()]
prnum = pr[0]['number']
assignees = get_assignees(maintainers)
resp = update_assignees(repo_name, token, prnum, assignees)
if (resp.status_code != 201):
print(resp.json())
else:
print('{repo_name} ok') |
class OptionSeriesAreasplineSonificationDefaultinstrumentoptionsMappingLowpass(Options):
def frequency(self) -> 'OptionSeriesAreasplineSonificationDefaultinstrumentoptionsMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionSeriesAreasplineSonificationDefaultinstrumentoptionsMappingLowpassFrequency)
def resonance(self) -> 'OptionSeriesAreasplineSonificationDefaultinstrumentoptionsMappingLowpassResonance':
return self._config_sub_data('resonance', OptionSeriesAreasplineSonificationDefaultinstrumentoptionsMappingLowpassResonance) |
class MibTableRow(ManagedMibObject):
def __init__(self, name):
ManagedMibObject.__init__(self, name)
self._idToIdxCache = cache.Cache()
self._idxToIdCache = cache.Cache()
self._indexNames = ()
self._augmentingRows = set()
def oidToValue(self, syntax, identifier, impliedFlag=False, parentIndices=None):
if (not identifier):
raise error.SmiError(('Short OID for index %r' % (syntax,)))
if hasattr(syntax, 'cloneFromName'):
return syntax.cloneFromName(identifier, impliedFlag, parentRow=self, parentIndices=parentIndices)
baseTag = syntax.getTagSet().getBaseTag()
if (baseTag == Integer.tagSet.getBaseTag()):
return (syntax.clone(identifier[0]), identifier[1:])
elif IpAddress.tagSet.isSuperTagSetOf(syntax.getTagSet()):
return (syntax.clone('.'.join([str(x) for x in identifier[:4]])), identifier[4:])
elif (baseTag == OctetString.tagSet.getBaseTag()):
if impliedFlag:
return (syntax.clone(tuple(identifier)), ())
elif syntax.isFixedLength():
l = syntax.getFixedLength()
return (syntax.clone(tuple(identifier[:l])), identifier[l:])
else:
return (syntax.clone(tuple(identifier[1:(identifier[0] + 1)])), identifier[(identifier[0] + 1):])
elif (baseTag == ObjectIdentifier.tagSet.getBaseTag()):
if impliedFlag:
return (syntax.clone(identifier), ())
else:
return (syntax.clone(identifier[1:(identifier[0] + 1)]), identifier[(identifier[0] + 1):])
elif (baseTag == Bits.tagSet.getBaseTag()):
return (syntax.clone(tuple(identifier[1:(identifier[0] + 1)])), identifier[(identifier[0] + 1):])
else:
raise error.SmiError(('Unknown value type for index %r' % (syntax,)))
setFromName = oidToValue
def valueToOid(self, value, impliedFlag=False, parentIndices=None):
if hasattr(value, 'cloneAsName'):
return value.cloneAsName(impliedFlag, parentRow=self, parentIndices=parentIndices)
baseTag = value.getTagSet().getBaseTag()
if (baseTag == Integer.tagSet.getBaseTag()):
return (int(value),)
elif IpAddress.tagSet.isSuperTagSetOf(value.getTagSet()):
return value.asNumbers()
elif (baseTag == OctetString.tagSet.getBaseTag()):
if (impliedFlag or value.isFixedLength()):
initial = ()
else:
initial = (len(value),)
return (initial + value.asNumbers())
elif (baseTag == ObjectIdentifier.tagSet.getBaseTag()):
if impliedFlag:
return tuple(value)
else:
return ((len(value),) + tuple(value))
elif (baseTag == Bits.tagSet.getBaseTag()):
return ((len(value),) + value.asNumbers())
else:
raise error.SmiError(('Unknown value type for index %r' % (value,)))
getAsName = valueToOid
def announceManagementEvent(self, action, varBind, **context):
(name, val) = varBind
cbFun = context['cbFun']
if (not self._augmentingRows):
cbFun(varBind, **context)
return
instId = name[(len(self.name) + 1):]
baseIndices = []
indices = []
for (impliedFlag, modName, symName) in self._indexNames:
(mibObj,) = mibBuilder.importSymbols(modName, symName)
(syntax, instId) = self.oidToValue(mibObj.syntax, instId, impliedFlag, indices)
if (self.name == mibObj.name[:(- 1)]):
baseIndices.append((mibObj.name, syntax))
indices.append(syntax)
if instId:
exc = error.SmiError(('Excessive instance identifier sub-OIDs left at %s: %s' % (self, instId)))
cbFun(varBind, **dict(context, error=exc))
return
if (not baseIndices):
cbFun(varBind, **context)
return
count = [len(self._augmentingRows)]
def _cbFun(varBind, **context):
count[0] -= 1
if (not count[0]):
cbFun(varBind, **context)
for (modName, mibSym) in self._augmentingRows:
(mibObj,) = mibBuilder.importSymbols(modName, mibSym)
mibObj.receiveManagementEvent(action, (baseIndices, val), **dict(context, cbFun=_cbFun))
((debug.logger & debug.FLAG_INS) and debug.logger(('announceManagementEvent %s to %s' % (action, mibObj))))
def receiveManagementEvent(self, action, varBind, **context):
(baseIndices, val) = varBind
instId = ()
for (impliedFlag, modName, symName) in self._indexNames:
(mibObj,) = mibBuilder.importSymbols(modName, symName)
parentIndices = []
for (name, syntax) in baseIndices:
if (name == mibObj.name):
instId += self.valueToOid(syntax, impliedFlag, parentIndices)
parentIndices.append(syntax)
if instId:
((debug.logger & debug.FLAG_INS) and debug.logger(('receiveManagementEvent %s for suffix %s' % (action, instId))))
self._manageColumns(action, (((self.name + (0,)) + instId), val), **context)
def registerAugmentation(self, *names):
for name in names:
if (name in self._augmentingRows):
raise error.SmiError(('Row %s already augmented by %s::%s' % (self.name, name[0], name[1])))
self._augmentingRows.add(name)
return self
registerAugmentions = registerAugmentation
def setIndexNames(self, *names):
for name in names:
self._indexNames += (name,)
return self
def getIndexNames(self):
return self._indexNames
def _manageColumns(self, action, varBind, **context):
(name, val) = varBind
((debug.logger & debug.FLAG_INS) and debug.logger(('%s: _manageColumns(%s, %s, %r)' % (self, action, name, val))))
cbFun = context['cbFun']
colLen = (len(self.name) + 1)
indexVals = {}
instId = name[colLen:]
indices = []
for (impliedFlag, modName, symName) in self._indexNames:
(mibObj,) = mibBuilder.importSymbols(modName, symName)
(syntax, instId) = self.oidToValue(mibObj.syntax, instId, impliedFlag, indices)
indexVals[mibObj.name] = syntax
indices.append(syntax)
count = [len(self._vars)]
if (name[:colLen] in self._vars):
count[0] -= 1
def _cbFun(varBind, **context):
count[0] -= 1
if (not count[0]):
cbFun(varBind, **context)
for (colName, colObj) in self._vars.items():
acFun = context.get('acFun')
if (colName in indexVals):
colInstanceValue = indexVals[colName]
acFun = None
elif (name[:colLen] == colName):
continue
else:
colInstanceValue = None
actionFun = getattr(colObj, action)
colInstanceName = (colName + name[colLen:])
actionFun((colInstanceName, colInstanceValue), **dict(context, acFun=acFun, cbFun=_cbFun))
((debug.logger & debug.FLAG_INS) and debug.logger(('_manageColumns: action %s name %s instance %s %svalue %r' % (action, name, instId, (((name in indexVals) and 'index ') or ''), indexVals.get(name, val)))))
def _checkColumns(self, varBind, **context):
(name, val) = varBind
((debug.logger & debug.FLAG_INS) and debug.logger(('%s: _checkColumns(%s, %r)' % (self, name, val))))
cbFun = context['cbFun']
if (val != 1):
cbFun(varBind, **context)
return
count = [len(self._vars)]
def _cbFun(varBind, **context):
count[0] -= 1
(name, val) = varBind
if (count[0] >= 0):
exc = context.get('error')
if (exc or (not val.hasValue())):
count[0] = (- 1)
exc = error.InconsistentValueError(msg=('Inconsistent column %s: %s' % (name, exc)))
cbFun(varBind, **dict(context, error=exc))
return
if (not count[0]):
cbFun(varBind, **context)
return
colLen = (len(self.name) + 1)
for (colName, colObj) in self._vars.items():
instName = (colName + name[colLen:])
colObj.readGet((instName, None), **dict(context, cbFun=_cbFun))
((debug.logger & debug.FLAG_INS) and debug.logger(('%s: _checkColumns: checking instance %s' % (self, instName))))
def writeTest(self, varBind, **context):
(name, val) = varBind
((debug.logger & debug.FLAG_INS) and debug.logger(('%s: writeTest(%s, %r)' % (self, name, val))))
cbFun = context['cbFun']
instances = context['instances'].setdefault(self.name, {self.ST_CREATE: {}, self.ST_DESTROY: {}})
idx = context['idx']
def _cbFun(varBind, **context):
exc = context.get('error')
if exc:
instances[idx] = exc
if isinstance(exc, error.RowCreationWanted):
def _cbFun(*args, **context):
self.announceManagementEvent('createTest', varBind, **dict(context, cbFun=cbFun))
self._manageColumns('createTest', varBind, **dict(context, cbFun=_cbFun, error=None))
return
if isinstance(exc, error.RowDestructionWanted):
def _cbFun(*args, **context):
self.announceManagementEvent('destroyTest', varBind, **dict(context, cbFun=cbFun))
self._manageColumns('destroyTest', varBind, **dict(context, cbFun=_cbFun, error=None))
return
if isinstance(exc, error.RowConsistencyWanted):
context['error'] = None
cbFun(varBind, **context)
ManagedMibObject.writeTest(self, varBind, **dict(context, cbFun=_cbFun))
def writeCommit(self, varBind, **context):
(name, val) = varBind
((debug.logger & debug.FLAG_INS) and debug.logger(('%s: writeCommit(%s, %r)' % (self, name, val))))
cbFun = context['cbFun']
instances = context['instances'].setdefault(self.name, {self.ST_CREATE: {}, self.ST_DESTROY: {}})
idx = context['idx']
def _cbFun(varBind, **context):
if (idx in instances):
exc = instances[idx]
if isinstance(exc, error.RowCreationWanted):
def _cbFun(*args, **context):
exc = context.get('error')
if exc:
cbFun(varBind, **context)
return
def _cbFun(*args, **context):
self.announceManagementEvent('createCommit', varBind, **dict(context, cbFun=cbFun))
self._checkColumns(varBind, **dict(context, cbFun=_cbFun))
self._manageColumns('createCommit', varBind, **dict(context, cbFun=_cbFun))
return
if isinstance(exc, error.RowDestructionWanted):
def _cbFun(*args, **context):
self.announceManagementEvent('destroyCommit', varBind, **dict(context, cbFun=cbFun))
self._manageColumns('destroyCommit', varBind, **dict(context, cbFun=_cbFun))
return
if isinstance(exc, error.RowConsistencyWanted):
self._checkColumns(varBind, **dict(context, cbFun=cbFun))
return
cbFun(varBind, **context)
ManagedMibObject.writeCommit(self, varBind, **dict(context, cbFun=_cbFun))
def writeCleanup(self, varBind, **context):
(name, val) = varBind
((debug.logger & debug.FLAG_INS) and debug.logger(('%s: writeCleanup(%s, %r)' % (self, name, val))))
cbFun = context['cbFun']
instances = context['instances'].setdefault(self.name, {self.ST_CREATE: {}, self.ST_DESTROY: {}})
idx = context['idx']
def _cbFun(varBind, **context):
if (idx in instances):
exc = instances.pop(idx)
if isinstance(exc, error.RowCreationWanted):
def _cbFun(*args, **context):
self.announceManagementEvent('createCleanup', varBind, **dict(context, cbFun=cbFun))
self._manageColumns('createCleanup', varBind, **dict(context, cbFun=_cbFun))
return
if isinstance(exc, error.RowDestructionWanted):
def _cbFun(*args, **context):
self.announceManagementEvent('destroyCleanup', varBind, **dict(context, cbFun=cbFun))
self._manageColumns('destroyCleanup', varBind, **dict(context, cbFun=_cbFun))
return
cbFun(varBind, **context)
ManagedMibObject.writeCleanup(self, varBind, **dict(context, cbFun=_cbFun))
def writeUndo(self, varBind, **context):
(name, val) = varBind
((debug.logger & debug.FLAG_INS) and debug.logger(('%s: writeUndo(%s, %r)' % (self, name, val))))
cbFun = context['cbFun']
instances = context['instances'].setdefault(self.name, {self.ST_CREATE: {}, self.ST_DESTROY: {}})
idx = context['idx']
def _cbFun(varBind, **context):
if (idx in instances):
exc = instances.pop(idx)
if isinstance(exc, error.RowCreationWanted):
def _cbFun(*args, **context):
self.announceManagementEvent('createUndo', varBind, **dict(context, cbFun=cbFun))
self._manageColumns('createUndo', varBind, **dict(context, cbFun=_cbFun))
return
if isinstance(exc, error.RowDestructionWanted):
def _cbFun(*args, **context):
self.announceManagementEvent('destroyUndo', varBind, **dict(context, cbFun=cbFun))
self._manageColumns('destroyUndo', varBind, **dict(context, cbFun=_cbFun))
return
cbFun(varBind, **context)
ManagedMibObject.writeUndo(self, varBind, **dict(context, cbFun=_cbFun))
def getInstName(self, colId, instId):
return ((self.name + (colId,)) + instId)
def getIndicesFromInstId(self, instId):
if (instId in self._idToIdxCache):
return self._idToIdxCache[instId]
indices = []
for (impliedFlag, modName, symName) in self._indexNames:
(mibObj,) = mibBuilder.importSymbols(modName, symName)
try:
(syntax, instId) = self.oidToValue(mibObj.syntax, instId, impliedFlag, indices)
except PyAsn1Error as exc:
((debug.logger & debug.FLAG_INS) and debug.logger(('error resolving table indices at %s, %s: %s' % (self.__class__.__name__, instId, exc))))
indices = [instId]
instId = ()
break
indices.append(syntax)
if instId:
raise error.SmiError(('Excessive instance identifier sub-OIDs left at %s: %s' % (self, instId)))
indices = tuple(indices)
self._idToIdxCache[instId] = indices
return indices
def getInstIdFromIndices(self, *indices):
try:
return self._idxToIdCache[indices]
except TypeError:
cacheable = False
except KeyError:
cacheable = True
idx = 0
instId = ()
parentIndices = []
for (impliedFlag, modName, symName) in self._indexNames:
if (idx >= len(indices)):
break
(mibObj,) = mibBuilder.importSymbols(modName, symName)
syntax = mibObj.syntax.clone(indices[idx])
instId += self.valueToOid(syntax, impliedFlag, parentIndices)
parentIndices.append(syntax)
idx += 1
if cacheable:
self._idxToIdCache[indices] = instId
return instId
def getInstNameByIndex(self, colId, *indices):
return ((self.name + (colId,)) + self.getInstIdFromIndices(*indices))
def getInstNamesByIndex(self, *indices):
instNames = []
for columnName in self._vars.keys():
instNames.append(self.getInstNameByIndex(*((columnName[(- 1)],) + indices)))
return tuple(instNames) |
class PaymentNoteWidget(FormSectionWidget):
def __init__(self, parent=None) -> None:
super().__init__(parent)
yours_widget = QLineEdit()
theirs_widget = QLineEdit()
theirs_widget.setEnabled(False)
self.add_title(_('Payment notes'))
self.add_row(_('Yours'), yours_widget, stretch_field=True)
self.add_row(_('Theirs'), theirs_widget, stretch_field=True) |
def get_editor_args(editor):
if (editor in ['vim', 'gvim', 'vim.basic', 'vim.tiny']):
return ['-f', '-o']
elif (editor == 'emacs'):
return ['-nw']
elif (editor == 'gedit'):
return ['-w', '--new-window']
elif (editor == 'nano'):
return ['-R']
elif (editor == 'code'):
return ['-w', '-n']
else:
return [] |
class GroupAddMaxID(GroupTest):
def runTest(self):
(port1,) = openflow_ports(1)
msg = ofp.message.group_add(group_type=ofp.OFPGT_ALL, group_id=ofp.OFPG_MAX, buckets=[create_bucket(actions=[ofp.action.output(port1)])])
self.controller.message_send(msg)
do_barrier(self.controller)
stats = get_stats(self, ofp.message.group_desc_stats_request())
self.assertEquals(stats, [ofp.group_desc_stats_entry(group_type=msg.group_type, group_id=msg.group_id, buckets=msg.buckets)]) |
class SigningDialogue(Dialogue):
INITIAL_PERFORMATIVES: FrozenSet[Message.Performative] = frozenset({SigningMessage.Performative.SIGN_TRANSACTION, SigningMessage.Performative.SIGN_MESSAGE})
TERMINAL_PERFORMATIVES: FrozenSet[Message.Performative] = frozenset({SigningMessage.Performative.SIGNED_TRANSACTION, SigningMessage.Performative.SIGNED_MESSAGE, SigningMessage.Performative.ERROR})
VALID_REPLIES: Dict[(Message.Performative, FrozenSet[Message.Performative])] = {SigningMessage.Performative.ERROR: frozenset(), SigningMessage.Performative.SIGN_MESSAGE: frozenset({SigningMessage.Performative.SIGNED_MESSAGE, SigningMessage.Performative.ERROR}), SigningMessage.Performative.SIGN_TRANSACTION: frozenset({SigningMessage.Performative.SIGNED_TRANSACTION, SigningMessage.Performative.ERROR}), SigningMessage.Performative.SIGNED_MESSAGE: frozenset(), SigningMessage.Performative.SIGNED_TRANSACTION: frozenset()}
class Role(Dialogue.Role):
DECISION_MAKER = 'decision_maker'
SKILL = 'skill'
class EndState(Dialogue.EndState):
SUCCESSFUL = 0
FAILED = 1
def __init__(self, dialogue_label: DialogueLabel, self_address: Address, role: Dialogue.Role, message_class: Type[SigningMessage]=SigningMessage) -> None:
Dialogue.__init__(self, dialogue_label=dialogue_label, message_class=message_class, self_address=self_address, role=role) |
def create_default_experience_config(db: Session, experience_config_data: dict) -> Optional[PrivacyExperienceConfig]:
experience_config_data = experience_config_data.copy()
experience_config_schema = transform_fields(transformation=escape, model=ExperienceConfigCreateWithId(**experience_config_data), fields=PRIVACY_EXPERIENCE_ESCAPE_FIELDS)
if (not experience_config_schema.is_default):
raise Exception('This method is for created default experience configs.')
existing_experience_config = PrivacyExperienceConfig.get(db=db, object_id=experience_config_schema.id)
if (not existing_experience_config):
logger.info('Creating default experience config {}', experience_config_schema.id)
return PrivacyExperienceConfig.create(db, data=experience_config_schema.dict(exclude_unset=True), check_name=False)
logger.info('Found existing experience config {}, not creating a new default experience config', experience_config_schema.id)
return None |
.usefixtures('_run_around_tests')
def test_transform_freeze_cache(mocker, tmpdir):
online = mocker.spy(Input, '_online')
offline = mocker.spy(Input, '_offline')
(output1=Output('/output/to/dataset'), input1=Input('/input1'))
def transform_me_data_from_online_cache(output1, input1):
pass
online.assert_called()
offline.assert_not_called()
online.reset_mock()
offline.reset_mock()
with PatchConfig(config_overwrite={'transforms_freeze_cache': True}):
(output1=Output('/output/to/dataset'), input1=Input('/input1'))
def transform_me_data_from_offline_cache(output1, input1):
assert isinstance(output1, TransformOutput)
assert isinstance(input1, TransformInput)
assert_frame_equal(input1.dataframe().toPandas(), spark_df_return_data_one.toPandas())
output1.write_dataframe(input1.dataframe().withColumn('col1', F.lit('replaced')).select('col1'))
result = transform_me_data_from_offline_cache.compute()
assert ('output1' in result)
assert isinstance(result['output1'], DataFrame)
assert (result['output1'].schema.names[0] == 'col1')
online.assert_not_called()
offline.assert_called() |
class MulticlassClassificationTestPreset(TestPreset):
stattest: Optional[PossibleStatTestType]
stattest_threshold: Optional[float]
def __init__(self, stattest: Optional[PossibleStatTestType]=None, stattest_threshold: Optional[float]=None):
super().__init__()
self.stattest = stattest
self.stattest_threshold = stattest_threshold
def generate_tests(self, data_definition: DataDefinition, additional_data: Optional[Dict[(str, Any)]]):
target = data_definition.get_target_column()
if (target is None):
raise ValueError('Target column should be set in mapping and be present in data')
classification_labels = data_definition.classification_labels()
if (classification_labels is None):
labels = set()
else:
labels = set((classification_labels if isinstance(classification_labels, list) else classification_labels.values()))
tests = [TestAccuracyScore(), TestF1Score(), *[TestPrecisionByClass(label) for label in labels], *[TestRecallByClass(label) for label in labels], TestNumberOfRows(), TestColumnDrift(column_name=target.column_name, stattest=self.stattest, stattest_threshold=self.stattest_threshold)]
prediction_columns = data_definition.get_prediction_columns()
if ((prediction_columns is None) or (prediction_columns.prediction_probas is None)):
return tests
else:
return (tests + [TestRocAuc(), TestLogLoss()]) |
class DictActionWrapper(ActionWrapper[gym.Env]):
def __init__(self, env):
super().__init__(env)
assert (not isinstance(env.action_space, gym.spaces.Dict)), 'Action spaces is already a dict space!'
self._original_space = env.action_space
if isinstance(self._original_space, gym.spaces.Tuple):
self._space_dict = dict()
for (i, space) in enumerate(self._original_space.spaces):
self._space_dict[f'action_{i}'] = space
self.action_space = gym.spaces.Dict(self._space_dict)
else:
self._space_dict = {'action': self._original_space}
self.action_space = gym.spaces.Dict(self._space_dict)
def action(self, action: Dict[(str, np.ndarray)]) -> Union[(np.ndarray, Tuple[np.ndarray])]:
if isinstance(self._original_space, gym.spaces.Tuple):
return tuple([v for v in action.values()])
else:
return action['action']
def reverse_action(self, action: Union[(np.ndarray, Tuple[np.ndarray])]) -> Dict[(str, np.ndarray)]:
if isinstance(self._original_space, gym.spaces.Tuple):
dict_action = dict()
for (i, action) in enumerate(action):
dict_action[f'action_{i}'] = action
return dict_action
else:
return {'action': action}
(SimulatedEnvMixin)
def clone_from(self, env: 'DictActionWrapper') -> None:
self.env.clone_from(env) |
('path', ['examples/tutorials/structured_configs/5.1_structured_config_schema_same_config_group/my_app.py', 'examples/tutorials/structured_configs/5.2_structured_config_schema_different_config_group/my_app.py'])
def test_5_structured_config_schema(tmpdir: Path, path: str) -> None:
cmd = [path, ('hydra.run.dir=' + str(tmpdir)), 'hydra.job.chdir=True']
(result, _err) = run_python_script(cmd)
assert (OmegaConf.create(result) == {'db': {'driver': 'mysql', 'host': 'localhost', 'password': 'secret', 'port': 3306, 'user': 'omry'}, 'debug': True}) |
.EventDecorator()
def create_field_decomposition(dm, *args, **kwargs):
W = get_function_space(dm)
names = [s.name for s in W]
dms = [V.dm for V in W]
ctx = get_appctx(dm)
coarsen = get_ctx_coarsener(dm)
parent = get_parent(dm)
for d in dms:
add_hook(parent, setup=partial(push_parent, d, parent), teardown=partial(pop_parent, d, parent), call_setup=True)
if (ctx is not None):
ctxs = ctx.split([(i,) for i in range(len(W))])
for (d, c) in zip(dms, ctxs):
add_hook(parent, setup=partial(push_appctx, d, c), teardown=partial(pop_appctx, d, c), call_setup=True)
add_hook(parent, setup=partial(push_ctx_coarsener, d, coarsen), teardown=partial(pop_ctx_coarsener, d, coarsen), call_setup=True)
return (names, W._ises, dms) |
class velx(object):
def __init__(self):
pass
def uOfXT(self, x, t):
pi = np.pi
if (manufactured_solution == 1):
return (np.sin(x[0]) * np.sin((x[1] + t)))
else:
return ((np.sin((pi * x[0])) * np.cos((pi * x[1]))) * np.sin(t))
def duOfXT(self, x, t):
if (manufactured_solution == 1):
return [(np.cos(x[0]) * np.sin((x[1] + t))), (np.sin(x[0]) * np.cos((x[1] + t)))]
else:
return [(((pi * np.cos((pi * x[0]))) * np.cos((pi * x[1]))) * np.sin(t)), ((((- pi) * np.sin((pi * x[0]))) * np.sin((pi * x[1]))) * np.sin(t))] |
def test_datafile(name, ofp, pyversion):
data = test_data.read(name)
if (pyversion == 3):
key = 'python3'
if (key not in data):
key = 'python'
else:
key = 'python'
if (key not in data):
raise unittest.SkipTest(('no %s section in datafile' % key))
binary = data['binary']
python = data[key]
obj = eval(python, {'ofp': ofp})
test_serialization(obj, binary)
keyprettyprint = (key + ' pretty-printer')
if (keyprettyprint in data):
test_pretty(obj, data[keyprettyprint]) |
class OptionPlotoptionsTimelineAccessibilityPoint(Options):
def dateFormat(self):
return self._config_get(None)
def dateFormat(self, text: str):
self._config(text, js_type=False)
def dateFormatter(self):
return self._config_get(None)
def dateFormatter(self, value: Any):
self._config(value, js_type=False)
def describeNull(self):
return self._config_get(True)
def describeNull(self, flag: bool):
self._config(flag, js_type=False)
def descriptionFormat(self):
return self._config_get(None)
def descriptionFormat(self, text: str):
self._config(text, js_type=False)
def descriptionFormatter(self):
return self._config_get(None)
def descriptionFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valueDescriptionFormat(self):
return self._config_get('{xDescription}{separator}{value}.')
def valueDescriptionFormat(self, text: str):
self._config(text, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False) |
class OptionSeriesGaugeDial(Options):
def backgroundColor(self):
return self._config_get('#000000')
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def baseLength(self):
return self._config_get('70%')
def baseLength(self, text: str):
self._config(text, js_type=False)
def baseWidth(self):
return self._config_get(3)
def baseWidth(self, num: float):
self._config(num, js_type=False)
def borderColor(self):
return self._config_get('#cccccc')
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def path(self):
return self._config_get(None)
def path(self, value: Any):
self._config(value, js_type=False)
def radius(self):
return self._config_get('80%')
def radius(self, text: str):
self._config(text, js_type=False)
def rearLength(self):
return self._config_get('10%')
def rearLength(self, text: str):
self._config(text, js_type=False)
def topWidth(self):
return self._config_get(1)
def topWidth(self, num: float):
self._config(num, js_type=False) |
def test_map_points_to_perimeter():
mesh_obj = load_mesh((parent_dir + '/data/Rectangle.STL'))
angles = [((np.pi / 4) * i) for i in range(8)]
points = [((10 * np.sin(angle)), (10 * np.cos(angle))) for angle in angles]
plotting_obj = {}
intersections = map_points_to_perimeter(mesh_obj, points, output_obj=plotting_obj, map_to_nodes=False)
correct_intersections_list = [(25.0, 30.0), (40., 30.0), (50.0, 15.0), (40., 0.0), (25.0, 0.0), (10., 0.0), (0.0, 15.0), (10., 30.0)]
intersections_list = [(i.x, i.y) for i in intersections]
assert np.all(np.isclose(np.array(intersections_list), np.array(correct_intersections_list))) |
class ModelControllerAdapter(BaseModelController):
def __init__(self, backend: BaseModelController=None) -> None:
self.backend = backend
async def register_instance(self, instance: ModelInstance) -> bool:
return (await self.backend.register_instance(instance))
async def deregister_instance(self, instance: ModelInstance) -> bool:
return (await self.backend.deregister_instance(instance))
async def get_all_instances(self, model_name: str=None, healthy_only: bool=False) -> List[ModelInstance]:
return (await self.backend.get_all_instances(model_name, healthy_only))
async def send_heartbeat(self, instance: ModelInstance) -> bool:
return (await self.backend.send_heartbeat(instance))
async def model_apply(self) -> bool:
return (await self.backend.model_apply()) |
def extractDlscanlationsWpcomstagingCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class CookieTokenAuthMixin(RefreshTokenMutationMixin):
_jwt_settings(JWT_LONG_RUNNING_REFRESH_TOKEN=True)
def test_token_auth(self):
with catch_signal(token_issued) as token_issued_handler:
response = self.execute({self.user.USERNAME_FIELD: self.user.get_username(), 'password': 'dolphins'})
data = response.data['tokenAuth']
token = response.cookies.get(jwt_settings.JWT_REFRESH_TOKEN_COOKIE_NAME).value
self.assertEqual(token_issued_handler.call_count, 1)
self.assertIsNone(response.errors)
self.assertEqual(token, response.data['tokenAuth']['refreshToken'])
self.assertUsernameIn(data['payload']) |
def test_cluster_balls(nlp):
(ents, wgts) = zip(*[(c.text.lower(), c.vector) for c in (nlp('apple'), nlp('pear'), nlp('orange'), nlp('lemon'))])
model = KeyedVectors(wgts[0].size)
model.add_vectors(ents, list(wgts))
print(cluster_balls(model))
print(cluster_balls(model, root='orange')) |
def check_permissions(doctype, parent):
user = frappe.session.user
if ((not frappe.has_permission(doctype, 'select', user=user, parent_doctype=parent)) and (not frappe.has_permission(doctype, 'read', user=user, parent_doctype=parent))):
frappe.throw(f'Insufficient Permission for {doctype}', frappe.PermissionError) |
class RestClient():
def __init__(self, rest_address: str):
self._session = requests.session()
self.rest_address = rest_address
def get(self, url_base_path: str, request: Optional[Message]=None, used_params: Optional[List[str]]=None) -> bytes:
url = self._make_url(url_base_path=url_base_path, request=request, used_params=used_params)
response = self._session.get(url=url)
if (response.status_code != 200):
raise RuntimeError(f'''Error when sending a GET request.
Response: {response.status_code}, {str(response.content)})''')
return response.content
def _make_url(self, url_base_path: str, request: Optional[Message]=None, used_params: Optional[List[str]]=None) -> str:
json_request = (MessageToDict(request) if request else {})
for param in (used_params or []):
json_request.pop(param)
url_encoded_request = self._url_encode(json_request)
url = f'{self.rest_address}{url_base_path}'
if url_encoded_request:
url = f'{url}?{url_encoded_request}'
return url
def post(self, url_base_path: str, request: Message) -> bytes:
json_request = MessageToDict(request)
if ('tx' in json_request):
if ('body' in json_request['tx']):
if ('messages' in json_request['tx']['body']):
for message in json_request['tx']['body']['messages']:
if ('msg' in message):
message['msg'] = json.loads(base64.b64decode(message['msg']))
headers = {'Content-type': 'application/json', 'Accept': 'application/json'}
response = self._session.post(url=f'{self.rest_address}{url_base_path}', json=json_request, headers=headers)
if (response.status_code != 200):
raise RuntimeError(f'''Error when sending a POST request.
Request: {json_request}
Response: {response.status_code}, {str(response.content)})''')
return response.content
def _url_encode(json_request):
for (outer_k, outer_v) in json_request.copy().items():
if isinstance(outer_v, dict):
for (inner_k, inner_v) in outer_v.items():
json_request[f'{outer_k}.{inner_k}'] = inner_v
json_request.pop(outer_k)
return urlencode(json_request, doseq=True)
def __del__(self):
self._session.close() |
def main():
global_config = config['Global']
valid_dataloader = build_dataloader(config, 'Eval', device, logger)
post_process_class = build_post_process(config['PostProcess'], global_config)
if hasattr(post_process_class, 'character'):
char_num = len(getattr(post_process_class, 'character'))
if (config['Architecture']['algorithm'] in ['Distillation']):
for key in config['Architecture']['Models']:
config['Architecture']['Models'][key]['Head']['out_channels'] = char_num
else:
config['Architecture']['Head']['out_channels'] = char_num
model = build_model(config['Architecture'])
extra_input = (config['Architecture']['algorithm'] in ['SRN', 'NRTR', 'SAR', 'SEED'])
if ('model_type' in config['Architecture'].keys()):
model_type = config['Architecture']['model_type']
else:
model_type = None
best_model_dict = load_model(config, model)
if len(best_model_dict):
logger.info('metric in ckpt ')
for (k, v) in best_model_dict.items():
logger.info('{}:{}'.format(k, v))
eval_class = build_metric(config['Metric'])
metric = program.eval(model, valid_dataloader, post_process_class, eval_class, model_type, extra_input)
logger.info('metric eval ')
for (k, v) in metric.items():
logger.info('{}:{}'.format(k, v)) |
def extractWwwCentinniCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class RelationshipTlsDomainTlsDomain(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'data': (RelationshipMemberTlsDomain,)}
_property
def discriminator():
return None
attribute_map = {'data': 'data'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def extractLightnoveldistrictWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
.parametrize('param', [1, 'hola', [1, 2, 0], (True, False)])
def test_raises_error_when_ignore_format_not_permitted(param):
with pytest.raises(ValueError) as record:
CategoricalInitMixin(ignore_format=param)
msg = f'ignore_format takes only booleans True and False. Got {param} instead.'
assert (str(record.value) == msg) |
def lazy_import():
from fastly.model.relationships_for_waf_rule import RelationshipsForWafRule
from fastly.model.type_waf_rule import TypeWafRule
from fastly.model.waf_rule import WafRule
from fastly.model.waf_rule_attributes import WafRuleAttributes
from fastly.model.waf_rule_response_data_all_of import WafRuleResponseDataAllOf
globals()['RelationshipsForWafRule'] = RelationshipsForWafRule
globals()['TypeWafRule'] = TypeWafRule
globals()['WafRule'] = WafRule
globals()['WafRuleAttributes'] = WafRuleAttributes
globals()['WafRuleResponseDataAllOf'] = WafRuleResponseDataAllOf |
def dumpPrices(dbPath, elementMask, stationID=None, file=None, defaultZero=False, debug=0):
withTimes = (elementMask & Element.timestamp)
getBlanks = (elementMask & Element.blanks)
conn = sqlite3.connect(str(dbPath))
conn.execute('PRAGMA foreign_keys=ON')
cur = conn.cursor()
systems = {ID: name for (ID, name) in cur.execute('SELECT system_id, name FROM System')}
stations = {ID: [name, systems[sysID]] for (ID, name, sysID) in cur.execute('SELECT station_id, name, system_id FROM Station')}
categories = {ID: name for (ID, name) in cur.execute('SELECT category_id, name FROM Category')}
items = {ID: [name, catID, categories[catID]] for (ID, name, catID) in cur.execute('SELECT item_id, name, category_id FROM Item')}
longestName = max(items.values(), key=(lambda ent: len(ent[0])))
longestNameLen = len(longestName[0])
if stationID:
cur.execute('\n SELECT COUNT(*)\n FROM StationItem\n WHERE station_id = {}\n '.format(stationID))
if (not cur.fetchone()[0]):
getBlanks = True
defaultDemandVal = (0 if defaultZero else (- 1))
if stationID:
stationWhere = 'WHERE stn.station_id = {}'.format(stationID)
else:
stationWhere = ''
if getBlanks:
itemJoin = 'LEFT OUTER'
else:
itemJoin = 'INNER'
cur.execute('SELECT CURRENT_TIMESTAMP')
now = cur.fetchone()[0]
stmt = '\n SELECT stn.station_id, itm.item_id\n , IFNULL(si.demand_price, 0)\n , IFNULL(si.supply_price, 0)\n , IFNULL(si.demand_units, {defDemand})\n , IFNULL(si.demand_level, {defDemand})\n , IFNULL(si.supply_units, {defDemand})\n , IFNULL(si.supply_level, {defDemand})\n , si.modified\n FROM Station stn,\n Category AS cat\n INNER JOIN Item AS itm USING (category_id)\n {itemJoin} JOIN StationItem AS si\n ON (si.station_id = stn.station_id\n AND si.item_id = itm.item_id)\n {stationWhere}\n ORDER BY stn.station_id, cat.name, itm.ui_order\n '
sql = stmt.format(stationWhere=stationWhere, defDemand=defaultDemandVal, itemJoin=itemJoin)
if debug:
print(sql)
cur.execute(sql)
(lastSys, lastStn, lastCat) = (None, None, None)
if (not file):
file = sys.stdout
if stationID:
stationSet = str(stations[stationID])
else:
stationSet = 'ALL Systems/Stations'
file.write("# TradeDangerous prices for {}\n\n# REMOVE ITEMS THAT DON'T APPEAR IN THE UI\n# ORDER IS REMEMBERED: Move items around within categories to match the game UI\n\n# File syntax:\n# <item name> <sell> <buy> [<demand> <supply> [<timestamp>]]\n# Use '?' for demand/supply when you don't know/care,\n# Use '-' for demand/supply to indicate unavailable,\n# Otherwise use a number followed by L, M or H, e.g.\n# 1L, 23M or 30000H\n# If you omit the timestamp, the current time will be used when the file is loaded.\n\n".format(stationSet))
levelDesc = '?0LMH'
maxCrWidth = 7
levelWidth = 9
outFmt = ' {{:<{width}}} {{:>{crwidth}}} {{:>{crwidth}}} {{:>{lvlwidth}}} {{:>{lvlwidth}}}'.format(width=longestNameLen, crwidth=maxCrWidth, lvlwidth=levelWidth)
if withTimes:
outFmt += ' {}'
outFmt += '\n'
output = outFmt.format('Item Name', 'SellCr', 'BuyCr', 'Demand', 'Supply', 'Timestamp')
file.write(('#' + output[1:]))
naIQL = '-'
unkIQL = '?'
defIQL = ('?' if (not defaultZero) else '-')
output = ''
for (stnID, itemID, fromStn, toStn, demand, demandLevel, supply, supplyLevel, modified) in cur:
modified = (modified or now)
(station, system) = stations[stnID]
(item, catID, category) = items[itemID]
if (stnID != lastStn):
file.write(output)
output = '\n\ {}/{}\n'.format(system.upper(), station)
lastStn = stnID
lastCat = None
if (catID is not lastCat):
output += ' + {}\n'.format(category)
lastCat = catID
if (toStn > 0):
demandStr = (defIQL if (fromStn <= 0) else unkIQL)
if (supplyLevel == 0):
supplyStr = naIQL
elif ((supplyLevel < 0) and (supply <= 0)):
supplyStr = defIQL
else:
units = ('?' if (supply < 0) else str(supply))
level = levelDesc[(supplyLevel + 1)]
supplyStr = (units + level)
else:
if ((fromStn == 0) or (demandLevel == 0)):
demandStr = naIQL
elif ((demandLevel < 0) and (demand <= 0)):
demandStr = defIQL
else:
units = ('?' if (demand < 0) else str(demand))
level = levelDesc[(demandLevel + 1)]
demandStr = (units + level)
supplyStr = naIQL
output += outFmt.format(item, fromStn, toStn, demandStr, supplyStr, modified)
file.write(output) |
class AttrList():
def __init__(self, l, obj_wrapper=None):
if (not isinstance(l, list)):
l = list(l)
self._l_ = l
self._obj_wrapper = obj_wrapper
def __repr__(self):
return repr(self._l_)
def __eq__(self, other):
if isinstance(other, AttrList):
return (other._l_ == self._l_)
return (other == self._l_)
def __ne__(self, other):
return (not (self == other))
def __getitem__(self, k):
l = self._l_[k]
if isinstance(k, slice):
return AttrList(l, obj_wrapper=self._obj_wrapper)
return _wrap(l, self._obj_wrapper)
def __setitem__(self, k, value):
self._l_[k] = value
def __iter__(self):
return map((lambda i: _wrap(i, self._obj_wrapper)), self._l_)
def __len__(self):
return len(self._l_)
def __nonzero__(self):
return bool(self._l_)
__bool__ = __nonzero__
def __getattr__(self, name):
return getattr(self._l_, name)
def __getstate__(self):
return (self._l_, self._obj_wrapper)
def __setstate__(self, state):
(self._l_, self._obj_wrapper) = state |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.