code stringlengths 281 23.7M |
|---|
def generate_assistance_data(fiscal_year, i):
return [(i + 100), (i + 100), f'fain{(i + 100)}', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '001', 'Test_Agency', '001', 'Test_Agency', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '02', 'Block Grant', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', f' f'05/07/{fiscal_year}'] |
def test():
import spacy.tokens
import spacy.lang.de
assert isinstance(nlp, spacy.lang.de.German), 'El objeto nlp deberia ser un instance de la clase de aleman.'
assert isinstance(doc, spacy.tokens.Doc), 'Procesaste el texto con el objeto nlp para crear un doc?'
assert ('print(doc.text)' in __solution__), 'Imprimiste en pantalla el doc.text?'
__msg__.good('Sehr gut! :)') |
()
def construct_graph_loop(variable_u, variable_v, aliased_variables_y) -> Tuple[(List[BasicBlock], ControlFlowGraph)]:
instructions = [Assignment(ListOperation([]), Call(imp_function_symbol('printf'), [Constant()])), Assignment(ListOperation([]), Call(imp_function_symbol('scanf'), [Constant(), UnaryOperation(OperationType.address, [aliased_variables_y[1]])])), Assignment(ListOperation([]), Call(imp_function_symbol('printf'), [Constant(), aliased_variables_y[1]])), Phi(variable_u[3], [aliased_variables_y[1], aliased_variables_y[4]]), Phi(aliased_variables_y[4], [aliased_variables_y[1], aliased_variables_y[7], variable_v[11]]), Branch(Condition(OperationType.less_or_equal, [aliased_variables_y[4], Constant(0)])), Assignment(ListOperation([]), Call(imp_function_symbol('printf'), [Constant(), variable_u[3]])), Return([Constant(0)]), Assignment(ListOperation([]), Call(imp_function_symbol('printf'), [Constant(), aliased_variables_y[4]])), Assignment(aliased_variables_y[7], BinaryOperation(OperationType.minus, [aliased_variables_y[4], Constant(2)])), Assignment(variable_v[9], Call(function_symbol('is_odd'), [aliased_variables_y[7]])), Branch(Condition(OperationType.equal, [BinaryOperation(OperationType.bitwise_and, [variable_v[9], Constant(255)]), Constant(0)])), Assignment(variable_v[11], BinaryOperation(OperationType.minus, [aliased_variables_y[7], Constant(1)]))]
nodes = [BasicBlock(i) for i in range(5)]
nodes[0].instructions = instructions[0:3]
nodes[1].instructions = instructions[3:6]
nodes[2].instructions = instructions[6:8]
nodes[3].instructions = instructions[8:12]
nodes[4].instructions = [instructions[12]]
instructions[3]._origin_block = {nodes[0]: aliased_variables_y[1], nodes[3]: aliased_variables_y[4], nodes[4]: aliased_variables_y[4]}
instructions[4]._origin_block = {nodes[0]: aliased_variables_y[1], nodes[3]: aliased_variables_y[7], nodes[4]: variable_v[11]}
cfg = ControlFlowGraph()
cfg.add_edges_from([UnconditionalEdge(nodes[0], nodes[1]), UnconditionalEdge(nodes[1], nodes[2]), UnconditionalEdge(nodes[1], nodes[3]), UnconditionalEdge(nodes[3], nodes[1]), UnconditionalEdge(nodes[3], nodes[4]), UnconditionalEdge(nodes[4], nodes[1])])
return (nodes, cfg) |
def execute_eventdata(cfg, test_cluster, challenges, track_params):
for challenge in challenges:
cmd = f'--test-mode --pipeline=benchmark-only --target-host=127.0.0.1:{test_cluster. --track-repository=eventdata --track=eventdata --track-params="{track_params}" --challenge={challenge}'
assert (it.race(cfg, cmd) == 0) |
def read_and_forward_pty_output():
max_read_bytes = (1024 * 20)
while True:
socketio.sleep(0.01)
if app.config['fd']:
(data, _, _) = select.select([app.config['fd']], [], [], 0)
if data:
output = os.read(app.config['fd'], max_read_bytes).decode(errors='ignore')
app.config['hist'] += output
socketio.emit('pty-output', {'output': output}, namespace='/pty') |
def get_instance(config: Dict[(str, Any)], target_class: Type[T]) -> T:
cls = get_class(config['class'], target_class)
try:
args_dict = config.get('constructor', {})
if ('TODO' in args_dict.values()):
raise ConfigYamlValidationError(cls.__name__, 'TODOs found in config', 'Fill in remaining TODO entries in config.yml')
instance = cls(**args_dict)
except ConfigYamlValidationError:
raise
except TypeError as e:
raise ConfigYamlWrongConstructorError(cls.__name__, str(e)) from None
return instance |
def extractDanmeifosterhomeWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
.parametrize('signature,expected', (('tokenLaunched()', '0xde78e78a'), ('CEILING()', '0xc51bf934'), ('Registrar(address,bytes32,address)', '0xa31d5580')))
def test_fn_signature_to_4byte_selector(signature, expected):
bytes_selector = function_signature_to_4byte_selector(signature)
hex_selector = encode_hex(bytes_selector)
assert (hex_selector == expected) |
def extractRakudailnWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
class bsn_flow_checksum_bucket_stats_reply(bsn_stats_reply):
version = 5
type = 19
stats_type = 65535
experimenter = 6035143
subtype = 10
def __init__(self, xid=None, flags=None, entries=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (entries != None):
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.subtype))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bsn_flow_checksum_bucket_stats_reply()
_version = reader.read('!B')[0]
assert (_version == 5)
_type = reader.read('!B')[0]
assert (_type == 19)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 65535)
obj.flags = reader.read('!H')[0]
reader.skip(4)
_experimenter = reader.read('!L')[0]
assert (_experimenter == 6035143)
_subtype = reader.read('!L')[0]
assert (_subtype == 10)
obj.entries = loxi.generic_util.unpack_list(reader, ofp.common.bsn_flow_checksum_bucket_stats_entry.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.entries != other.entries):
return False
return True
def pretty_print(self, q):
q.text('bsn_flow_checksum_bucket_stats_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REPLY_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('entries = ')
q.pp(self.entries)
q.breakable()
q.text('}') |
def subject_identity_verification_required(db):
original_value = CONFIG.execution.subject_identity_verification_required
CONFIG.execution.subject_identity_verification_required = True
ApplicationConfig.update_config_set(db, CONFIG)
(yield)
CONFIG.execution.subject_identity_verification_required = original_value
ApplicationConfig.update_config_set(db, CONFIG) |
class Codec():
def serialize(self, root):
if (not root):
return 'null'
return ((((str(root.val) + ',') + self.serialize(root.left)) + ',') + self.serialize(root.right))
def deserialize(self, data):
data = deque(data.split(','))
def _deserialize(data):
v = data.popleft()
if (v == 'null'):
return None
node = TreeNode(int(v))
node.left = _deserialize(data)
node.right = _deserialize(data)
return node
return _deserialize(data) |
_sensor(name='DemonstrationSensor')
class DemonstrationSensor(Sensor):
def __init__(self, **kwargs):
self.uuid = 'demonstration'
self.observation_space = spaces.Discrete(1)
self.timestep = 0
self.prev_action = 0
def _get_uuid(self, *args: Any, **kwargs: Any) -> str:
return self.uuid
def _get_observation(self, observations: Dict[(str, Observations)], episode, task: EmbodiedTask, **kwargs):
if task._is_resetting:
self.timestep = 1
if (self.timestep < len(episode.reference_replay)):
action_name = episode.reference_replay[self.timestep].action
action = get_habitat_sim_action(action_name)
else:
action = 0
self.timestep += 1
return action
def get_observation(self, **kwargs):
return self._get_observation(**kwargs) |
class PanelNotebook(notebook.SmartNotebook, providers.ProviderHandler):
def __init__(self, exaile, gui):
notebook.SmartNotebook.__init__(self, vertical=True)
self.exaile = exaile
self.panels = {}
self.set_add_tab_on_empty(False)
self.loading_panels = True
self.connect('page-removed', self.on_panel_removed)
self.connect('page-reordered', self.on_panel_reordered)
self.connect('switch-page', self.on_panel_switch)
_register_builtin_panels(exaile, gui.main.window)
self.view_menu = menu.ProviderMenu('panel-tab-context', None)
menu.simple_menu_item('panel-menu', ['show-playing-track'], _('P_anels'), submenu=self.view_menu).register('menubar-view-menu')
providers.ProviderHandler.__init__(self, 'main-panel', simple_init=True)
self.actions = notebook.NotebookActionService(self, 'main-panel-actions')
if (not self.exaile.loading):
self.on_gui_loaded()
def focus_panel(self, tab_name):
data = self.panels[tab_name]
if data.shown:
panel_nr = self.page_num(data.tab.page)
self.set_current_page(panel_nr)
data.tab.grab_focus()
def get_active_panel(self):
self.get_current_page()
return None
def toggle_panel(self, tab_name):
data = self.panels[tab_name]
if data.shown:
self.remove_tab(data.tab)
else:
self.add_tab(data.tab, data.tab.page, data.position)
data.shown = True
self.save_panel_settings()
def on_provider_added(self, provider):
if (provider.name is None):
logger.warning('Ignoring improperly initialized panel provider: %s', provider)
return
panel = provider.get_panel()
panel.show()
tab = notebook.NotebookTab(self, panel, vertical=True)
tab.provider = provider
item = menu.check_menu_item(provider.name, [], panel.get_page_name(), (lambda *a: self.panels[provider.name].shown), (lambda *a: self.toggle_panel(provider.name)))
providers.register('panel-tab-context', item)
self.add_tab(tab, panel)
self.panels[provider.name] = PanelData(tab, provider, (self.get_n_pages() - 1), item)
self.save_panel_settings()
def on_provider_removed(self, provider):
data = self.panels[provider.name]
for n in range(self.get_n_pages()):
if (data.tab.page == self.get_nth_page(n)):
self.remove_page(n)
break
providers.unregister('panel-tab-context', data.menuitem)
del self.panels[provider.name]
def on_panel_removed(self, notebook, page, pagenum):
if self.loading_panels:
return
for (name, data) in self.panels.items():
if (data.tab.page == page):
data.shown = False
break
self.save_panel_settings()
def on_panel_reordered(self, notebook, page, pagenum):
if self.loading_panels:
return
for (name, data) in self.panels.items():
if data.shown:
data.position = self.page_num(data.tab.page)
self.save_panel_settings()
def on_panel_switch(self, notebook, page, pagenum):
if self.exaile.loading:
return
page = notebook.get_nth_page(pagenum)
for (name, data) in self.panels.items():
if (data.tab.page == page):
settings.set_option('gui/last_selected_panel', name)
return
def save_panel_settings(self):
if self.loading_panels:
return
param = dict([(k, v.opts) for (k, v) in self.panels.items()])
settings.set_option('gui/panels', param)
def on_gui_loaded(self):
last_selected_panel = settings.get_option('gui/last_selected_panel', 'collection')
order = settings.get_option('gui/panels', {'collection': (True, 0), 'radio': (True, 1), 'playlists': (True, 2), 'files': (True, 3)})
selected_panel = None
for (name, (shown, pos)) in order.items():
panel_data = self.panels.get(name, None)
if (panel_data is None):
continue
tab = panel_data.tab
panel_data.shown = shown
panel_data.position = pos
if shown:
self.reorder_child(tab.page, pos)
else:
self.remove_tab(tab)
if (last_selected_panel == name):
selected_panel = tab.page
self.loading_panels = False
if (selected_panel is not None):
panel_num = self.page_num(selected_panel)
self.set_current_page(panel_num) |
class Packages(Base):
__tablename__ = 'packages'
id = sa.Column(sa.Integer, primary_key=True)
distro_name = sa.Column(sa.String(200), sa.ForeignKey('distros.name', ondelete='cascade', onupdate='cascade'))
project_id = sa.Column(sa.Integer, sa.ForeignKey('projects.id', ondelete='cascade', onupdate='cascade'))
package_name = sa.Column(sa.String(200))
__table_args__ = (sa.UniqueConstraint('distro_name', 'package_name'),)
project = sa.orm.relationship('Project', backref=sa.orm.backref('package', cascade='all, delete-orphan'))
distro = sa.orm.relationship('Distro', backref=sa.orm.backref('package', cascade='all, delete-orphan'))
def __repr__(self):
return f'<Packages({self.project_id}, {self.distro_name}: {self.package_name})>'
def __json__(self):
return dict(package_name=self.package_name, distro=self.distro_name)
def by_id(cls, session, pkg_id):
return session.query(cls).filter_by(id=pkg_id).first()
def get(cls, session, project_id, distro_name, package_name):
query = session.query(cls).filter((cls.project_id == project_id)).filter((sa.func.lower(cls.distro_name) == sa.func.lower(distro_name))).filter((cls.package_name == package_name))
return query.first()
def by_package_name_distro(cls, session, package_name, distro_name):
query = session.query(cls).filter((cls.package_name == package_name)).filter((sa.func.lower(cls.distro_name) == sa.func.lower(distro_name)))
return query.first() |
class OdysseyArgument(int):
class OdysseyParser(str):
def __new__(cls, val, **kwargs):
if (not isinstance(val, str)):
raise OdysseyError(val)
for v in val:
if ('YN?'.find(v.upper()) < 0):
raise OdysseyError(val.upper())
return super().__new__(cls, val, **kwargs)
def __init__(self):
self.args = ['--odyssey', '--od']
self.kwargs = {'help': 'Limit to stations with one of the specified odyssey, e.g. --od YN? matches any station, --od Y matches only odyssey stations.', 'dest': 'odyssey', 'metavar': 'ODYSSEY', 'type': 'odyssey', 'choices': 'YN?'} |
class OptionPlotoptionsPolygonStatesHover(Options):
def animation(self) -> 'OptionPlotoptionsPolygonStatesHoverAnimation':
return self._config_sub_data('animation', OptionPlotoptionsPolygonStatesHoverAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def halo(self) -> 'OptionPlotoptionsPolygonStatesHoverHalo':
return self._config_sub_data('halo', OptionPlotoptionsPolygonStatesHoverHalo)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def marker(self) -> 'OptionPlotoptionsPolygonStatesHoverMarker':
return self._config_sub_data('marker', OptionPlotoptionsPolygonStatesHoverMarker) |
def next_contract_address(w3, base_tester, fake_contract_code):
def next_contract_address(sender):
snapshot_id = base_tester.take_snapshot()
bytecode = compiler.compile(fake_contract_code)
hex_bytecode = Web3.toHex(bytecode)
tx_hash = w3.eth.sendTransaction({'from': sender, 'to': '', 'gas': 7000000, 'data': hex_bytecode})
contract_address = w3.eth.getTransactionReceipt(tx_hash).contractAddress
base_tester.revert_to_snapshot(snapshot_id)
return contract_address
return next_contract_address |
class VocabularyMatcher():
def __init__(self, tokenizer, model_identifier):
self.tokenizer = tokenizer
self.model_identifier = model_identifier
self.vocab = {v: k for (k, v) in get_vocab(self.tokenizer).items()}
self.space_repr = self.tokenizer.tokenize(' ')[0]
self.nl_repr = self.tokenizer.tokenize('\n')[0]
self.token_lengths = None
self.stats = Stats('VocabularyMatcher')
self.disk_cached = 0
self.cache = {}
def eos_token_id(self):
return self.tokenizer.eos_token_id
def init(tokenizer):
if (tokenizer.name in VocabularyMatcher._instances):
return
import pickle
cache_identifier = tokenizer.model_identifier.replace('/', '-').replace(':', '__')
cache_identifier += ('-' + type(tokenizer.tokenizer_impl).__name__.replace('[^a-z0-9]', ''))
cache_path = f'token-mask-cache-{cache_identifier}.pkl'
matcher_path = f'matcher-{cache_identifier}.pkl'
try:
with cachefile(matcher_path, 'rb') as f:
_instance = pickle.load(f)
_instance.stats = Stats('VocabularyMatcher')
except:
_instance = VocabularyMatcher(tokenizer, tokenizer.model_identifier)
try:
with cachefile(cache_path, 'rb') as f:
try:
import time
s = time.time()
_instance.cache = pickle.load(f)
_instance.disk_cached = len(_instance.cache)
except:
warnings.warn('Failed to load token mask cache from {}. If the cache is corrupted, please delete it.'.format(cache_path))
except:
pass
VocabularyMatcher._instances[tokenizer.name] = _instance
atexit.register((lambda : _instance.save()))
def save(self):
import pickle
cache_identifier = self.tokenizer.model_identifier.replace('/', '-')
cache_identifier += ('-' + type(self.tokenizer.tokenizer_impl).__name__.replace('[^a-z0-9]', ''))
cache_path = f'token-mask-cache-{cache_identifier}.pkl'
matcher_path = f'matcher-{cache_identifier}.pkl'
with cachefile(matcher_path, 'wb') as f:
stats = self.stats
self.stats = None
pickle.dump(self, f)
self.stats = stats
def is_cached(k):
if k.startswith('named:'):
return True
if k.startswith('charlen:'):
return True
return False
with cachefile(cache_path, 'wb') as f:
pickle.dump({k: v for (k, v) in self.cache.items() if is_cached(k)}, f)
def instance():
tokenizer = get_tokenizer()
if (not (tokenizer.name in VocabularyMatcher._instances)):
raise Exception('VocabularyMatcher not initialized.')
return VocabularyMatcher._instances[tokenizer.name]
def ensure_ready():
VocabularyMatcher.instance()
def with_cache(self, keys, provider):
keys = [k for k in keys if (k is not None)]
for k in keys:
if (k in self.cache.keys()):
return self.cache[k]
else:
result = provider()
for k in keys:
self.cache[k] = result
return result
def mask_cache_name(self, tokens=None, regex=None, minus=None, prefix=None, exact=None, charlen=None, name=None):
keys = ([('named:' + name)] if (name is not None) else [])
if (regex is not None):
return (keys + [('regex:' + regex)])
elif (charlen is not None):
return (keys + [('charlen:' + str(charlen))])
else:
assert (tokens is not None)
t = ((('prefix ' if prefix else '') + ('* \\ ' if minus else '')) + '|'.join(sorted(list(tokens))))
return (keys + [t])
def make_mask(self, tokens=None, regex=None, minus=None, prefix=False, exact=False, charlen=None, name=None):
with self.stats.timer('make_mask'):
cache_keys = self.mask_cache_name(tokens, regex, minus, prefix, exact, charlen, name)
def do_make_mask():
if (tokens is not None):
mask = self._make_mask_from_tokens(tokens, prefix, exact=exact)
elif (charlen is not None):
mask = self._make_mask_from_char_length(charlen)
else:
assert (regex is not None), 'TokenSetConcrete: either tokens or regex must be set.'
mask = self._make_mask_from_regex(regex, prefix)
if minus:
mask = np.logical_not(mask)
return mask
return self.with_cache(cache_keys, do_make_mask)
def _make_mask_from_regex(self, regex, prefix=False):
regex = regex.replace(' ', self.space_repr)
regex = regex.replace('\n', self.nl_repr)
mask = np.zeros([self.vocab_size], dtype=np.bool_)
if prefix:
r = Regex(regex)
for (id, subtoken) in self.vocab.items():
if r.is_prefix(subtoken):
mask[id] = True
if r.is_prefix(''):
mask[self.eos_token_id] = True
else:
pattern = re.compile(regex, re.UNICODE)
for (id, subtoken) in self.vocab.items():
if (pattern.match(subtoken) is not None):
mask[id] = True
return mask
def vocab_size(self):
return self.tokenizer.vocab_size
def _make_mask_from_char_length(self, length):
if (self.token_lengths is None):
token_lengths = np.zeros([self.vocab_size], dtype=np.int32)
for (id, subtoken) in self.vocab.items():
token_lengths[id] = len(subtoken)
self.token_lengths = token_lengths
return (self.token_lengths == length)
def _make_mask_from_tokens(self, tokens, prefix, exact=False):
mask = np.zeros([self.vocab_size], dtype=np.bool_)
if ('*' in tokens):
mask[:] = True
elif (len(tokens) > 0):
if prefix:
tokens = [self.tokenizer(t)['input_ids'][0] for t in tokens]
for t in tokens:
mask[t] = True
else:
if (any((t for t in tokens if (t != 'eos'))) > 0):
def process(t):
t = t.replace('.', '\\.')
t = t.replace(' ', self.space_repr)
t = t.replace('\n', self.nl_repr)
t = re.escape(t)
return t
if exact:
pattern = '|'.join((f'({process(t)})' for t in tokens if (t != 'eos')))
pattern = re.compile(pattern, re.UNICODE)
matcher = pattern.fullmatch
else:
pattern = '|'.join((f'{process(t)}.*' for t in tokens if (t != 'eos')))
pattern = re.compile(pattern, re.UNICODE)
matcher = pattern.match
for (id, subtoken) in self.vocab.items():
if (matcher(subtoken) is not None):
mask[id] = True
if any([(t == 'eos') for t in tokens]):
mask[self.eos_token_id] = True
return mask
def str(self, mask, full=False):
prefix = ''
tokens = []
mask = mask
def tstr(t):
return str([t])[1:(- 1)]
if (mask.sum() == mask.shape[0]):
return '*'
if ((mask.sum() > np.logical_not(mask).sum()) and (np.logical_not(mask).sum() > 0)):
prefix = '* \\ '
mask = np.logical_not(mask)
truncated = False
for i in mask.reshape((- 1)).nonzero()[0]:
if ((len(tokens) > 5) and (not full)):
truncated = True
break
if (i == self.eos_token_id):
tokens.append('eos')
else:
if (not (i in self.vocab)):
continue
s = self.vocab[i]
s = self.tokenizer.convert_tokens_to_string([s])
s = s.encode('unicode_escape').decode('utf-8')
tokens.append(tstr(s))
return (prefix + '{{{}}}'.format((', '.join([t for t in sorted(list(tokens))]) + ('...' if truncated else '')))) |
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [migrations.CreateModel(name='Event', fields=[('id', models.AutoField(primary_key=True, serialize=False, verbose_name='ID')), ('channel', models.CharField(db_index=True, max_length=255)), ('type', models.CharField(db_index=True, max_length=255)), ('data', models.TextField()), ('eid', models.BigIntegerField(db_index=True, default=0)), ('created', models.DateTimeField(auto_now_add=True, db_index=True))]), migrations.CreateModel(name='EventCounter', fields=[('id', models.AutoField(primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=255, unique=True)), ('value', models.BigIntegerField(default=0)), ('updated', models.DateTimeField(auto_now=True, db_index=True))]), migrations.AlterUniqueTogether(name='event', unique_together=set([('channel', 'eid')]))] |
def test_get_responses_with_observations(poly_example_tmp_dir, dark_storage_client):
resp: Response = dark_storage_client.get('/experiments')
experiment_json = resp.json()
ensemble_id = experiment_json[0]['ensemble_ids'][1]
resp: Response = dark_storage_client.get(f'/ensembles/{ensemble_id}/responses')
ensemble_json = resp.json()
assert ('POLY_' in ensemble_json)
assert ('has_observations' in ensemble_json['POLY_'])
assert (ensemble_json['POLY_']['has_observations'] is True) |
def login_command(username: str, password: str, server_url: str) -> str:
(user_id, access_token) = get_access_token(username=username, password=password, server_url=server_url)
echo_green(f'Logged in as user: {username}')
credentials = Credentials(username=username, password=password, user_id=user_id, access_token=access_token)
credentials_path = get_credentials_path()
write_credentials_file(credentials, credentials_path)
echo_green(f'Credentials file written to: {credentials_path}')
return credentials_path |
class Cutting2DCoreEnvironment(CoreEnv):
def __init__(self, max_pieces_in_inventory: int, raw_piece_size: (int, int), static_demand: (int, int)):
super().__init__()
self.max_pieces_in_inventory = max_pieces_in_inventory
self.raw_piece_size = tuple(raw_piece_size)
self.current_demand = static_demand
self._setup_env()
def _setup_env(self):
self.inventory = Inventory(self.max_pieces_in_inventory, self.raw_piece_size)
self.inventory.replenish_piece()
def step(self, maze_action: Cutting2DMazeAction) -> Tuple[(Cutting2DMazeState, np.array, bool, Dict[(Any, Any)])]:
(info, reward) = ({}, 0)
replenishment_needed = False
if (maze_action.piece_id >= self.inventory.size()):
info['error'] = 'piece_id_out_of_bounds'
else:
piece_to_cut = self.inventory.pieces[maze_action.piece_id]
if self.inventory.cut(maze_action, self.current_demand):
info['msg'] = 'valid_cut'
replenishment_needed = (piece_to_cut == self.raw_piece_size)
else:
info['error'] = 'invalid_cut'
reward = (- 2)
if replenishment_needed:
self.inventory.replenish_piece()
reward = (- 1)
maze_state = self.get_maze_state()
return (maze_state, reward, False, info)
def get_maze_state(self) -> Cutting2DMazeState:
return Cutting2DMazeState(self.inventory.pieces, self.max_pieces_in_inventory, self.current_demand, self.raw_piece_size)
def reset(self) -> Cutting2DMazeState:
self._setup_env()
return self.get_maze_state()
def close(self):
def seed(self, seed: int) -> None:
pass
def get_renderer(self) -> Any:
pass
def get_serializable_components(self) -> Dict[(str, Any)]:
pass
def is_actor_done(self) -> bool:
pass
def actor_id(self) -> ActorID:
pass
def agent_counts_dict(self) -> Dict[(Union[(str, int)], int)]:
pass |
.django_db
def test_federal_accounts_uses_corrected_cgac(client, fixture_data):
resp = client.post('/api/v2/federal_accounts/', content_type='application/json', data=json.dumps({'sort': {'field': 'managing_agency', 'direction': 'asc'}, 'filters': {'fy': '2015'}}))
response_data = resp.json()
assert (response_data['results'][0]['managing_agency_acronym'] == 'DOD') |
class OptionPlotoptionsXrangeSonificationContexttracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def selfdestruct(evm: Evm) -> None:
beneficiary = to_address(pop(evm.stack))
gas_cost = GAS_SELF_DESTRUCT
if ((not is_account_alive(evm.env.state, beneficiary)) and (get_account(evm.env.state, evm.message.current_target).balance != 0)):
gas_cost += GAS_SELF_DESTRUCT_NEW_ACCOUNT
originator = evm.message.current_target
refunded_accounts = evm.accounts_to_delete
parent_evm = evm.message.parent_evm
while (parent_evm is not None):
refunded_accounts.update(parent_evm.accounts_to_delete)
parent_evm = parent_evm.message.parent_evm
if (originator not in refunded_accounts):
evm.refund_counter += REFUND_SELF_DESTRUCT
charge_gas(evm, gas_cost)
beneficiary_balance = get_account(evm.env.state, beneficiary).balance
originator_balance = get_account(evm.env.state, originator).balance
set_account_balance(evm.env.state, beneficiary, (beneficiary_balance + originator_balance))
set_account_balance(evm.env.state, originator, U256(0))
evm.accounts_to_delete.add(originator)
if account_exists_and_is_empty(evm.env.state, beneficiary):
evm.touched_accounts.add(beneficiary)
evm.running = False
pass |
def generateMDP(MDPTemplate, outputPrefix, timeStep, numSteps, temperature, pressure, logger=None):
MDP_content = string.Template(MDPTemplate)
MDP_content = MDP_content.safe_substitute(dt=timeStep, nsteps=numSteps, temperature=temperature, pressure=pressure)
with open((outputPrefix + '.mdp'), 'w', newline='\n') as foutput:
foutput.write(MDP_content) |
class GameArchive(models.Model):
class Meta():
verbose_name = ' Replay '
verbose_name_plural = ' Replay '
game = models.OneToOneField(Game, **_(''), primary_key=True, related_name='archive', on_delete=models.CASCADE)
replay = models.BinaryField(**_('Replay '))
def __str__(self):
return self.game.name |
def test_multiaddr_consistency():
key = make_crypto(DEFAULT_LEDGER)
maddr1 = MultiAddr(HOST, PORT, key.public_key)
tmpdir = tempfile.mkdtemp()
key_file = (tmpdir + '/key')
key.dump(key_file)
key2 = make_crypto(DEFAULT_LEDGER, private_key_path=key_file)
maddr2 = MultiAddr(HOST, PORT, key2.public_key)
rmtree(tmpdir)
assert (str(maddr1) == str(maddr2))
assert (maddr1.public_key == maddr2.public_key)
assert (maddr1.peer_id == maddr2.peer_id) |
def can_skip_ciftify_fmri(participant_label, fmriname, settings):
is_done = ciftify.utils.has_ciftify_fmri_run('sub-{}'.format(participant_label), fmriname, settings.ciftify_work_dir)
if is_done:
logger.info('ciftify_subject_fmri has already been run for sub-{}_{}'.format(participant_label, fmriname))
return True
results_dir = os.path.join(settings.ciftify_work_dir, 'sub-{}'.format(participant_label), 'MNINonLinear', 'Results', fmriname)
print(results_dir)
if os.path.exists(results_dir):
if settings.rerun:
logger.info('Deleting incomplete ciftify outputs for sub-{}_{} and re-running ciftify_subject_fmri'.format(participant_label, fmriname))
shutil.rmtree(results_dir)
return False
else:
logger.warning('Incomplete ciftify_subject_fmri output found for sub-{}_{}Consider using the --rerun-if-incomplete flag to delete and rerun ciftify_subject_fmri'.format(participant_label, fmriname))
return True
return False |
def dipole3d_34(ax, da, A, bx, db, B, R):
result = numpy.zeros((3, 10, 15), dtype=float)
x0 = (0.5 / (ax + bx))
x1 = ((ax + bx) ** (- 1.0))
x2 = ((- x1) * ((ax * A[0]) + (bx * B[0])))
x3 = ((- x2) - A[0])
x4 = ((- x2) - B[0])
x5 = ((ax * bx) * x1)
x6 = numpy.exp(((- x5) * ((A[0] - B[0]) ** 2)))
x7 = (1. * numpy.sqrt(x1))
x8 = (x6 * x7)
x9 = (x0 * x8)
x10 = ((- x2) - R[0])
x11 = (x4 * x8)
x12 = (x10 * x11)
x13 = (x12 + x9)
x14 = (x13 * x4)
x15 = (x10 * x8)
x16 = (x0 * (x11 + x15))
x17 = (3.0 * x16)
x18 = (x13 * x3)
x19 = (2.0 * x18)
x20 = (x3 * x6)
x21 = (x20 * x7)
x22 = (x0 * (x11 + x21))
x23 = (x21 * x4)
x24 = (x23 + x9)
x25 = (x24 * x4)
x26 = (x22 + x25)
x27 = (x0 * (((x14 + x17) + x19) + x26))
x28 = (3.0 * x9)
x29 = (x10 * x21)
x30 = (x0 * (((x12 + x23) + x28) + x29))
x31 = (x16 + x18)
x32 = (x31 * x4)
x33 = (x30 + x32)
x34 = (x3 * x33)
x35 = (x27 + x34)
x36 = (x3 * x35)
x37 = (x35 * x4)
x38 = (x3 * x31)
x39 = (2.0 * x23)
x40 = ((x4 ** 2) * x8)
x41 = (x28 + x40)
x42 = (x0 * (x39 + x41))
x43 = (x26 * x3)
x44 = (x42 + x43)
x45 = (x0 * ((((4.0 * x30) + (2.0 * x32)) + (2.0 * x38)) + x44))
x46 = (x26 * x4)
x47 = (x0 * (((5.0 * x42) + (3.0 * x43)) + (2.0 * x46)))
x48 = (x24 * x3)
x49 = ((2.0 * x0) * (((2.0 * x22) + x25) + x48))
x50 = (x4 * x44)
x51 = (x49 + x50)
x52 = ((x3 * x51) + x47)
x53 = (x33 * x4)
x54 = (x0 * ((((5.0 * x27) + (3.0 * x34)) + x51) + (2.0 * x53)))
x55 = (x37 + x45)
x56 = ((x3 * x55) + x54)
x57 = (da * db)
x58 = (0. * x57)
x59 = (2. * x58)
x60 = numpy.exp(((- x5) * ((A[1] - B[1]) ** 2)))
x61 = numpy.exp(((- x5) * ((A[2] - B[2]) ** 2)))
x62 = ((3. * x1) * x61)
x63 = (x60 * x62)
x64 = (x59 * x63)
x65 = ((- x1) * ((ax * A[1]) + (bx * B[1])))
x66 = ((- x65) - B[1])
x67 = (0. * x57)
x68 = (x66 * x67)
x69 = (x56 * x63)
x70 = ((- x1) * ((ax * A[2]) + (bx * B[2])))
x71 = ((- x70) - B[2])
x72 = (x67 * x71)
x73 = (x36 + x45)
x74 = (x61 * x7)
x75 = 3.
x76 = (0. * x75)
x77 = (x60 * x7)
x78 = ((x66 ** 2) * x77)
x79 = (x0 * x77)
x80 = (x78 + x79)
x81 = (x57 * x80)
x82 = (x76 * x81)
x83 = 2.
x84 = (x67 * x83)
x85 = (x71 * x84)
x86 = (x63 * x85)
x87 = ((x71 ** 2) * x74)
x88 = (x0 * x74)
x89 = (x87 + x88)
x90 = (x57 * x89)
x91 = (x76 * x90)
x92 = (2.0 * x79)
x93 = (x66 * (x80 + x92))
x94 = (x29 + x9)
x95 = ((x0 * (x15 + x21)) + (x3 * x94))
x96 = (x22 + x48)
x97 = (x30 + x38)
x98 = ((x0 * ((((2.0 * x16) + x19) + x95) + x96)) + (x3 * x97))
x99 = (x67 * x98)
x100 = (x71 * x74)
x101 = (x84 * x98)
x102 = (x66 * x77)
x103 = (2.0 * x88)
x104 = (x71 * (x103 + x89))
x105 = (3.0 * x79)
x106 = ((x0 * (x105 + (3.0 * x78))) + (x66 * x93))
x107 = ((x3 ** 2) * x8)
x108 = (x107 + x28)
x109 = ((x0 * (x108 + (2.0 * x29))) + (x3 * x95))
x110 = (x109 * x59)
x111 = (x109 * x67)
x112 = (3.0 * x88)
x113 = ((x0 * (x112 + (3.0 * x87))) + (x104 * x71))
x114 = ((- x65) - A[1])
x115 = (5. * x58)
x116 = (x114 * x115)
x117 = (x63 * ((x4 * x55) + x54))
x118 = (x114 * x77)
x119 = (x118 * x66)
x120 = (x119 + x79)
x121 = (x120 * x67)
x122 = (x121 * x83)
x123 = (x0 * (x102 + x118))
x124 = (x120 * x66)
x125 = (x123 + x124)
x126 = 1.
x127 = (x125 * x126)
x128 = (0. * x127)
x129 = (x35 * x57)
x130 = (0. * x57)
x131 = (x100 * x130)
x132 = (0. * x126)
x133 = (x132 * x90)
x134 = (x105 + (2.0 * x119))
x135 = (x0 * (x134 + x78))
x136 = (x125 * x66)
x137 = (x135 + x136)
x138 = (x84 * x97)
x139 = (x130 * x97)
x140 = (x104 * x84)
x141 = ((x0 * (((3.0 * x123) + (3.0 * x124)) + x93)) + (x137 * x66))
x142 = (x115 * x95)
x143 = (x83 * x95)
x144 = (x100 * x67)
x145 = (0. * x95)
x146 = ((- x70) - A[2])
x147 = (x115 * x146)
x148 = (x146 * x63)
x149 = (x66 * x84)
x150 = (x146 * x74)
x151 = (x150 * x71)
x152 = (x151 + x88)
x153 = (x152 * x67)
x154 = (x153 * x83)
x155 = (x132 * x81)
x156 = (x130 * x152)
x157 = (x0 * (x100 + x150))
x158 = (x152 * x71)
x159 = (x157 + x158)
x160 = (x126 * x159)
x161 = (0. * x160)
x162 = (x84 * x93)
x163 = (x112 + (2.0 * x151))
x164 = (x0 * (x163 + x87))
x165 = (x159 * x71)
x166 = (x164 + x165)
x167 = (x102 * x67)
x168 = ((x0 * ((x104 + (3.0 * x157)) + (3.0 * x158))) + (x166 * x71))
x169 = ((x114 ** 2) * x77)
x170 = (x169 + x79)
x171 = (x14 + x16)
x172 = ((x0 * ((2.0 * x12) + x41)) + (x171 * x4))
x173 = (x42 + x46)
x174 = (x27 + x53)
x175 = ((x0 * (((x172 + x173) + (3.0 * x30)) + (3.0 * x32))) + (x174 * x4))
x176 = (x115 * x175)
x177 = (x114 * x120)
x178 = (x123 + x177)
x179 = (x74 * x84)
x180 = (x100 * x84)
x181 = (x114 * x125)
x182 = (x135 + x181)
x183 = (x57 * x74)
x184 = (x126 * x33)
x185 = (0. * x184)
x186 = (0. * x170)
x187 = ((2.0 * x0) * (((2.0 * x123) + x124) + x177))
x188 = (x182 * x66)
x189 = (x187 + x188)
x190 = (x130 * x31)
x191 = (x0 * (((5.0 * x135) + (2.0 * x136)) + (3.0 * x181)))
x192 = ((x189 * x66) + x191)
x193 = (x115 * x94)
x194 = (x132 * x94)
x195 = (10. * x58)
x196 = (x114 * x195)
x197 = (x174 * x75)
x198 = (x125 * x130)
x199 = (x130 * x159)
x200 = (x150 * x75)
x201 = (x31 * x67)
x202 = (x166 * x75)
x203 = (x195 * x94)
x204 = (x75 * x94)
x205 = ((x146 ** 2) * x74)
x206 = (x205 + x88)
x207 = (x206 * x84)
x208 = (x146 * x152)
x209 = (x157 + x208)
x210 = (x77 * x84)
x211 = (0. * x206)
x212 = (x130 * x209)
x213 = (x146 * x159)
x214 = (x164 + x213)
x215 = (x214 * x57)
x216 = ((2.0 * x0) * (((2.0 * x157) + x158) + x208))
x217 = (x214 * x71)
x218 = (x216 + x217)
x219 = (x102 * x84)
x220 = (x0 * (((5.0 * x164) + (2.0 * x165)) + (3.0 * x213)))
x221 = ((x218 * x71) + x220)
x222 = (x114 * (x170 + x92))
x223 = (x40 + x9)
x224 = (2.0 * x0)
x225 = ((x11 * x224) + (x223 * x4))
x226 = ((x0 * (((3.0 * x14) + x17) + x225)) + (x172 * x4))
x227 = (x226 * x59)
x228 = ((x0 * (x134 + x169)) + (x114 * x178))
x229 = (x172 * x67)
x230 = (x114 * x182)
x231 = (x187 + x230)
x232 = (x171 * x57)
x233 = (x232 * x76)
x234 = ((x114 * x189) + x191)
x235 = (x13 * x67)
x236 = (x235 * x83)
x237 = (x6 * x62)
x238 = (x237 * x59)
x239 = (x238 * (((3.0 * x0) * (((2.0 * x187) + x188) + x230)) + (x234 * x66)))
x240 = (x234 * x237)
x241 = (x15 * x67)
x242 = (x15 * x59)
x243 = (x115 * x170)
x244 = (x178 * x84)
x245 = (x172 * x83)
x246 = (x132 * x182)
x247 = (x147 * x237)
x248 = (x161 * x57)
x249 = (x115 * x206)
x250 = (x209 * x84)
x251 = (x132 * x214)
x252 = (x130 * x214)
x253 = ((3. * x1) * x60)
x254 = (x253 * x6)
x255 = (x116 * x254)
x256 = (x146 * (x103 + x206))
x257 = ((x0 * (x163 + x205)) + (x146 * x209))
x258 = (x256 * x76)
x259 = (x146 * x214)
x260 = (x216 + x259)
x261 = ((x146 * x218) + x220)
x262 = (x254 * x261)
x263 = (x254 * x59)
x264 = (x263 * (((3.0 * x0) * (((2.0 * x216) + x217) + x259)) + (x261 * x71)))
x265 = ((- x65) - R[1])
x266 = (x3 * x44)
x267 = (x64 * (((3.0 * x0) * ((x266 + (2.0 * x49)) + x50)) + (x4 * x52)))
x268 = (x102 * x265)
x269 = (x268 + x79)
x270 = (x269 * x67)
x271 = (x52 * x63)
x272 = (x266 + x49)
x273 = (x265 * x77)
x274 = (x0 * (x102 + x273))
x275 = (x269 * x66)
x276 = (x274 + x275)
x277 = (x276 * x57)
x278 = (x74 * x76)
x279 = (x270 * x83)
x280 = ((x0 * ((x105 + (2.0 * x268)) + x78)) + (x276 * x66))
x281 = ((x0 * (x108 + x39)) + (x3 * x96))
x282 = (x67 * x74)
x283 = (x104 * x67)
x284 = (x107 + x9)
x285 = ((x21 * x224) + (x284 * x3))
x286 = (3.0 * x274)
x287 = ((x0 * (((3.0 * x275) + x286) + x93)) + (x280 * x66))
x288 = (x59 * x74)
x289 = (x273 * x59)
x290 = ((x4 * x51) + x47)
x291 = (x118 * x265)
x292 = (x291 + x79)
x293 = (x115 * x292)
x294 = (x114 * x269)
x295 = (x274 + x294)
x296 = (x0 * (((x105 + x119) + x268) + x291))
x297 = (x295 * x66)
x298 = (x296 + x297)
x299 = (x132 * x183)
x300 = (2.0 * x294)
x301 = (x0 * (((x125 + x275) + x286) + x300))
x302 = (x298 * x66)
x303 = (x301 + x302)
x304 = (x130 * x96)
x305 = (x130 * x89)
x306 = ((x0 * (((x137 + x280) + (3.0 * x296)) + (3.0 * x297))) + (x303 * x66))
x307 = (x115 * x74)
x308 = (x290 * x63)
x309 = (x132 * x44)
x310 = (x84 * x96)
x311 = (x115 * x284)
x312 = ((x0 * (x118 + x273)) + (x114 * x292))
x313 = ((x0 * (((3.0 * x22) + x225) + (3.0 * x25))) + (x173 * x4))
x314 = (x114 * x295)
x315 = (x296 + x314)
x316 = (x114 * x298)
x317 = (x301 + x316)
x318 = (x130 * x26)
x319 = (x0 * (((x182 + (4.0 * x296)) + (2.0 * x297)) + (2.0 * x314)))
x320 = (x317 * x66)
x321 = (x319 + x320)
x322 = (x24 * x84)
x323 = (x130 * x24)
x324 = (x0 * (((x189 + (5.0 * x301)) + (2.0 * x302)) + (3.0 * x316)))
x325 = ((x321 * x66) + x324)
x326 = (x115 * x20)
x327 = (x326 * x62)
x328 = (x20 * x62)
x329 = (x115 * x312)
x330 = (x195 * x292)
x331 = (x295 * x67)
x332 = (x292 * x75)
x333 = (x126 * x156)
x334 = (x24 * x67)
x335 = (x21 * x75)
x336 = (x132 * x206)
x337 = (x132 * x215)
x338 = (x253 * x326)
x339 = ((x0 * ((x105 + x169) + (2.0 * x291))) + (x114 * x312))
x340 = ((x0 * (x28 + (3.0 * x40))) + (x225 * x4))
x341 = ((x0 * (((x178 + (2.0 * x274)) + x300) + x312)) + (x114 * x315))
x342 = (x114 * x317)
x343 = (x319 + x342)
x344 = (x223 * x57)
x345 = ((x114 * x321) + x324)
x346 = (x237 * x345)
x347 = (x4 * x67)
x348 = (x237 * x4)
x349 = (x11 * x84)
x350 = (x67 * x8)
x351 = (x59 * x8)
x352 = (x225 * x84)
x353 = (x132 * x344)
x354 = (x57 * x8)
x355 = (x8 * x84)
x356 = (x115 * x8)
x357 = (x225 * x67)
x358 = (x260 * x76)
x359 = (x11 * x67)
x360 = ((- x70) - R[2])
x361 = (x100 * x360)
x362 = (x361 + x88)
x363 = (x362 * x67)
x364 = (x360 * x74)
x365 = (x363 * x83)
x366 = (x0 * (x100 + x364))
x367 = (x362 * x71)
x368 = (x366 + x367)
x369 = (x368 * x76)
x370 = (x57 * x77)
x371 = ((x0 * ((x112 + (2.0 * x361)) + x87)) + (x368 * x71))
x372 = (x67 * x77)
x373 = (x364 * x59)
x374 = (3.0 * x366)
x375 = ((x0 * ((x104 + (3.0 * x367)) + x374)) + (x371 * x71))
x376 = (x59 * x77)
x377 = (x364 * x57)
x378 = (x130 * x362)
x379 = (x368 * x57)
x380 = (x150 * x360)
x381 = (x380 + x88)
x382 = (x115 * x381)
x383 = (x146 * x362)
x384 = (x366 + x383)
x385 = (x130 * x384)
x386 = (x0 * (((x112 + x151) + x361) + x380))
x387 = (x384 * x71)
x388 = (x386 + x387)
x389 = (x132 * x370)
x390 = (2.0 * x383)
x391 = (x0 * (((x159 + x367) + x374) + x390))
x392 = (x388 * x71)
x393 = (x391 + x392)
x394 = ((x0 * (((x166 + x371) + (3.0 * x386)) + (3.0 * x387))) + (x393 * x71))
x395 = (x115 * x77)
x396 = (x126 * x186)
x397 = (x195 * x381)
x398 = (x381 * x75)
x399 = (x118 * x75)
x400 = (x384 * x67)
x401 = (x120 * x126)
x402 = (x20 * x253)
x403 = ((x0 * (x150 + x364)) + (x146 * x381))
x404 = (x146 * x384)
x405 = (x386 + x404)
x406 = (x146 * x388)
x407 = (x391 + x406)
x408 = (x0 * (((x214 + (4.0 * x386)) + (2.0 * x387)) + (2.0 * x404)))
x409 = (x407 * x71)
x410 = (x408 + x409)
x411 = (x115 * x403)
x412 = (x0 * (((x218 + (5.0 * x391)) + (2.0 * x392)) + (3.0 * x406)))
x413 = ((x410 * x71) + x412)
x414 = (x344 * x76)
x415 = (x130 * x223)
x416 = (x11 * x130)
x417 = (x254 * x4)
x418 = ((x0 * ((x112 + x205) + (2.0 * x380))) + (x146 * x403))
x419 = ((x0 * (((x209 + (2.0 * x366)) + x390) + x403)) + (x146 * x405))
x420 = (x146 * x407)
x421 = (x408 + x420)
x422 = ((x146 * x410) + x412)
x423 = (x254 * x422)
result[(0, 0, 0)] = numpy.sum((x64 * ((x0 * ((((3.0 * x36) + (3.0 * x37)) + (6.0 * x45)) + x52)) + (x4 * x56))))
result[(0, 0, 1)] = numpy.sum((x68 * x69))
result[(0, 0, 2)] = numpy.sum((x69 * x72))
result[(0, 0, 3)] = numpy.sum(((x73 * x74) * x82))
result[(0, 0, 4)] = numpy.sum(((x66 * x73) * x86))
result[(0, 0, 5)] = numpy.sum(((x73 * x77) * x91))
result[(0, 0, 6)] = numpy.sum(((x74 * x93) * x99))
result[(0, 0, 7)] = numpy.sum(((x100 * x101) * x80))
result[(0, 0, 8)] = numpy.sum(((x101 * x102) * x89))
result[(0, 0, 9)] = numpy.sum(((x104 * x77) * x99))
result[(0, 0, 10)] = numpy.sum(((x106 * x110) * x74))
result[(0, 0, 11)] = numpy.sum(((x100 * x111) * x93))
result[(0, 0, 12)] = numpy.sum(((x109 * x80) * x91))
result[(0, 0, 13)] = numpy.sum(((x102 * x104) * x111))
result[(0, 0, 14)] = numpy.sum(((x110 * x113) * x77))
result[(0, 1, 0)] = numpy.sum((x116 * x117))
result[(0, 1, 1)] = numpy.sum(((x122 * x55) * x74))
result[(0, 1, 2)] = numpy.sum(((x114 * x55) * x86))
result[(0, 1, 3)] = numpy.sum(((x128 * x129) * x74))
result[(0, 1, 4)] = numpy.sum(((x120 * x131) * x35))
result[(0, 1, 5)] = numpy.sum(((x118 * x133) * x35))
result[(0, 1, 6)] = numpy.sum(((x137 * x138) * x74))
result[(0, 1, 7)] = numpy.sum(((x100 * x125) * x139))
result[(0, 1, 8)] = numpy.sum(((x120 * x139) * x89))
result[(0, 1, 9)] = numpy.sum(((x118 * x140) * x97))
result[(0, 1, 10)] = numpy.sum(((x141 * x142) * x74))
result[(0, 1, 11)] = numpy.sum(((x137 * x143) * x144))
result[(0, 1, 12)] = numpy.sum(((x127 * x145) * x90))
result[(0, 1, 13)] = numpy.sum(((x104 * x121) * x143))
result[(0, 1, 14)] = numpy.sum(((x113 * x118) * x142))
result[(0, 2, 0)] = numpy.sum((x117 * x147))
result[(0, 2, 1)] = numpy.sum(((x148 * x149) * x55))
result[(0, 2, 2)] = numpy.sum(((x154 * x55) * x77))
result[(0, 2, 3)] = numpy.sum(((x150 * x155) * x35))
result[(0, 2, 4)] = numpy.sum(((x102 * x156) * x35))
result[(0, 2, 5)] = numpy.sum(((x129 * x161) * x77))
result[(0, 2, 6)] = numpy.sum(((x150 * x162) * x97))
result[(0, 2, 7)] = numpy.sum(((x139 * x152) * x80))
result[(0, 2, 8)] = numpy.sum(((x102 * x139) * x159))
result[(0, 2, 9)] = numpy.sum(((x138 * x166) * x77))
result[(0, 2, 10)] = numpy.sum(((x106 * x142) * x150))
result[(0, 2, 11)] = numpy.sum(((x143 * x153) * x93))
result[(0, 2, 12)] = numpy.sum(((x145 * x160) * x81))
result[(0, 2, 13)] = numpy.sum(((x143 * x166) * x167))
result[(0, 2, 14)] = numpy.sum(((x142 * x168) * x77))
result[(0, 3, 0)] = numpy.sum(((x170 * x176) * x74))
result[(0, 3, 1)] = numpy.sum(((x174 * x178) * x179))
result[(0, 3, 2)] = numpy.sum(((x170 * x174) * x180))
result[(0, 3, 3)] = numpy.sum(((x182 * x183) * x185))
result[(0, 3, 4)] = numpy.sum(((x131 * x178) * x33))
result[(0, 3, 5)] = numpy.sum(((x184 * x186) * x90))
result[(0, 3, 6)] = numpy.sum(((x179 * x189) * x31))
result[(0, 3, 7)] = numpy.sum(((x100 * x182) * x190))
result[(0, 3, 8)] = numpy.sum(((x178 * x190) * x89))
result[(0, 3, 9)] = numpy.sum(((x140 * x170) * x31))
result[(0, 3, 10)] = numpy.sum(((x192 * x193) * x74))
result[(0, 3, 11)] = numpy.sum(((x180 * x189) * x94))
result[(0, 3, 12)] = numpy.sum(((x182 * x194) * x90))
result[(0, 3, 13)] = numpy.sum(((x140 * x178) * x94))
result[(0, 3, 14)] = numpy.sum(((x113 * x170) * x193))
result[(0, 4, 0)] = numpy.sum(((x148 * x175) * x196))
result[(0, 4, 1)] = numpy.sum(((x121 * x150) * x197))
result[(0, 4, 2)] = numpy.sum(((x118 * x153) * x197))
result[(0, 4, 3)] = numpy.sum(((x150 * x198) * x33))
result[(0, 4, 4)] = numpy.sum(((x120 * x156) * x184))
result[(0, 4, 5)] = numpy.sum(((x118 * x199) * x33))
result[(0, 4, 6)] = numpy.sum(((x137 * x200) * x201))
result[(0, 4, 7)] = numpy.sum(((x127 * x152) * x190))
result[(0, 4, 8)] = numpy.sum(((x120 * x160) * x190))
result[(0, 4, 9)] = numpy.sum(((x118 * x201) * x202))
result[(0, 4, 10)] = numpy.sum(((x141 * x150) * x203))
result[(0, 4, 11)] = numpy.sum(((x137 * x153) * x204))
result[(0, 4, 12)] = numpy.sum(((x125 * x199) * x94))
result[(0, 4, 13)] = numpy.sum(((x121 * x166) * x204))
result[(0, 4, 14)] = numpy.sum(((x118 * x168) * x203))
result[(0, 5, 0)] = numpy.sum(((x176 * x206) * x77))
result[(0, 5, 1)] = numpy.sum(((x102 * x174) * x207))
result[(0, 5, 2)] = numpy.sum(((x174 * x209) * x210))
result[(0, 5, 3)] = numpy.sum(((x184 * x211) * x81))
result[(0, 5, 4)] = numpy.sum(((x102 * x212) * x33))
result[(0, 5, 5)] = numpy.sum(((x185 * x215) * x77))
result[(0, 5, 6)] = numpy.sum(((x162 * x206) * x31))
result[(0, 5, 7)] = numpy.sum(((x190 * x209) * x80))
result[(0, 5, 8)] = numpy.sum(((x102 * x190) * x214))
result[(0, 5, 9)] = numpy.sum(((x210 * x218) * x31))
result[(0, 5, 10)] = numpy.sum(((x106 * x193) * x206))
result[(0, 5, 11)] = numpy.sum(((x162 * x209) * x94))
result[(0, 5, 12)] = numpy.sum(((x194 * x214) * x81))
result[(0, 5, 13)] = numpy.sum(((x218 * x219) * x94))
result[(0, 5, 14)] = numpy.sum(((x193 * x221) * x77))
result[(0, 6, 0)] = numpy.sum(((x222 * x227) * x74))
result[(0, 6, 1)] = numpy.sum(((x228 * x229) * x74))
result[(0, 6, 2)] = numpy.sum(((x100 * x222) * x229))
result[(0, 6, 3)] = numpy.sum(((x231 * x233) * x74))
result[(0, 6, 4)] = numpy.sum(((x171 * x180) * x228))
result[(0, 6, 5)] = numpy.sum(((x171 * x222) * x91))
result[(0, 6, 6)] = numpy.sum(((x234 * x235) * x74))
result[(0, 6, 7)] = numpy.sum(((x100 * x231) * x236))
result[(0, 6, 8)] = numpy.sum(((x228 * x236) * x89))
result[(0, 6, 9)] = numpy.sum(((x104 * x222) * x235))
result[(0, 6, 10)] = numpy.sum((x10 * x239))
result[(0, 6, 11)] = numpy.sum(((x10 * x240) * x72))
result[(0, 6, 12)] = numpy.sum(((x15 * x231) * x91))
result[(0, 6, 13)] = numpy.sum(((x104 * x228) * x241))
result[(0, 6, 14)] = numpy.sum(((x113 * x222) * x242))
result[(0, 7, 0)] = numpy.sum(((x150 * x226) * x243))
result[(0, 7, 1)] = numpy.sum(((x150 * x172) * x244))
result[(0, 7, 2)] = numpy.sum(((x153 * x170) * x245))
result[(0, 7, 3)] = numpy.sum(((x150 * x232) * x246))
result[(0, 7, 4)] = numpy.sum(((x156 * x171) * x178))
result[(0, 7, 5)] = numpy.sum(((x160 * x186) * x232))
result[(0, 7, 6)] = numpy.sum(((x150 * x189) * x236))
result[(0, 7, 7)] = numpy.sum(((x13 * x156) * x182))
result[(0, 7, 8)] = numpy.sum(((x13 * x178) * x199))
result[(0, 7, 9)] = numpy.sum(((x166 * x170) * x236))
result[(0, 7, 10)] = numpy.sum(((x10 * x192) * x247))
result[(0, 7, 11)] = numpy.sum(((x15 * x154) * x189))
result[(0, 7, 12)] = numpy.sum(((x15 * x182) * x248))
result[(0, 7, 13)] = numpy.sum(((x15 * x166) * x244))
result[(0, 7, 14)] = numpy.sum(((x15 * x168) * x243))
result[(0, 8, 0)] = numpy.sum(((x118 * x226) * x249))
result[(0, 8, 1)] = numpy.sum(((x121 * x206) * x245))
result[(0, 8, 2)] = numpy.sum(((x118 * x172) * x250))
result[(0, 8, 3)] = numpy.sum(((x127 * x211) * x232))
result[(0, 8, 4)] = numpy.sum(((x120 * x171) * x212))
result[(0, 8, 5)] = numpy.sum(((x118 * x232) * x251))
result[(0, 8, 6)] = numpy.sum(((x137 * x206) * x236))
result[(0, 8, 7)] = numpy.sum(((x125 * x13) * x212))
result[(0, 8, 8)] = numpy.sum(((x120 * x13) * x252))
result[(0, 8, 9)] = numpy.sum(((x118 * x218) * x236))
result[(0, 8, 10)] = numpy.sum(((x141 * x15) * x249))
result[(0, 8, 11)] = numpy.sum(((x137 * x15) * x250))
result[(0, 8, 12)] = numpy.sum(((x128 * x15) * x215))
result[(0, 8, 13)] = numpy.sum(((x122 * x15) * x218))
result[(0, 8, 14)] = numpy.sum(((x10 * x221) * x255))
result[(0, 9, 0)] = numpy.sum(((x227 * x256) * x77))
result[(0, 9, 1)] = numpy.sum(((x102 * x229) * x256))
result[(0, 9, 2)] = numpy.sum(((x229 * x257) * x77))
result[(0, 9, 3)] = numpy.sum(((x171 * x258) * x81))
result[(0, 9, 4)] = numpy.sum(((x171 * x219) * x257))
result[(0, 9, 5)] = numpy.sum(((x233 * x260) * x77))
result[(0, 9, 6)] = numpy.sum(((x235 * x256) * x93))
result[(0, 9, 7)] = numpy.sum(((x236 * x257) * x80))
result[(0, 9, 8)] = numpy.sum(((x102 * x236) * x260))
result[(0, 9, 9)] = numpy.sum(((x235 * x261) * x77))
result[(0, 9, 10)] = numpy.sum(((x106 * x242) * x256))
result[(0, 9, 11)] = numpy.sum(((x241 * x257) * x93))
result[(0, 9, 12)] = numpy.sum(((x15 * x260) * x82))
result[(0, 9, 13)] = numpy.sum(((x10 * x262) * x68))
result[(0, 9, 14)] = numpy.sum((x10 * x264))
result[(1, 0, 0)] = numpy.sum((x265 * x267))
result[(1, 0, 1)] = numpy.sum(((x270 * x52) * x74))
result[(1, 0, 2)] = numpy.sum(((x265 * x271) * x72))
result[(1, 0, 3)] = numpy.sum(((x272 * x277) * x278))
result[(1, 0, 4)] = numpy.sum(((x100 * x272) * x279))
result[(1, 0, 5)] = numpy.sum(((x272 * x273) * x91))
result[(1, 0, 6)] = numpy.sum(((x280 * x281) * x282))
result[(1, 0, 7)] = numpy.sum(((x180 * x276) * x281))
result[(1, 0, 8)] = numpy.sum(((x279 * x281) * x89))
result[(1, 0, 9)] = numpy.sum(((x273 * x281) * x283))
result[(1, 0, 10)] = numpy.sum(((x285 * x287) * x288))
result[(1, 0, 11)] = numpy.sum(((x144 * x280) * x285))
result[(1, 0, 12)] = numpy.sum(((x276 * x285) * x91))
result[(1, 0, 13)] = numpy.sum(((x104 * x270) * x285))
result[(1, 0, 14)] = numpy.sum(((x113 * x285) * x289))
result[(1, 1, 0)] = numpy.sum(((x290 * x293) * x74))
result[(1, 1, 1)] = numpy.sum(((x179 * x295) * x51))
result[(1, 1, 2)] = numpy.sum(((x180 * x292) * x51))
result[(1, 1, 3)] = numpy.sum(((x298 * x299) * x44))
result[(1, 1, 4)] = numpy.sum(((x131 * x295) * x44))
result[(1, 1, 5)] = numpy.sum(((x133 * x292) * x44))
result[(1, 1, 6)] = numpy.sum(((x179 * x303) * x96))
result[(1, 1, 7)] = numpy.sum(((x100 * x298) * x304))
result[(1, 1, 8)] = numpy.sum(((x295 * x305) * x96))
result[(1, 1, 9)] = numpy.sum(((x140 * x292) * x96))
result[(1, 1, 10)] = numpy.sum(((x284 * x306) * x307))
result[(1, 1, 11)] = numpy.sum(((x180 * x284) * x303))
result[(1, 1, 12)] = numpy.sum(((x133 * x284) * x298))
result[(1, 1, 13)] = numpy.sum(((x140 * x284) * x295))
result[(1, 1, 14)] = numpy.sum(((x113 * x284) * x293))
result[(1, 2, 0)] = numpy.sum(((x147 * x265) * x308))
result[(1, 2, 1)] = numpy.sum(((x150 * x279) * x51))
result[(1, 2, 2)] = numpy.sum(((x154 * x273) * x51))
result[(1, 2, 3)] = numpy.sum(((x150 * x277) * x309))
result[(1, 2, 4)] = numpy.sum(((x156 * x269) * x44))
result[(1, 2, 5)] = numpy.sum(((x248 * x273) * x44))
result[(1, 2, 6)] = numpy.sum(((x150 * x280) * x310))
result[(1, 2, 7)] = numpy.sum(((x156 * x276) * x96))
result[(1, 2, 8)] = numpy.sum(((x199 * x269) * x96))
result[(1, 2, 9)] = numpy.sum(((x166 * x273) * x310))
result[(1, 2, 10)] = numpy.sum(((x150 * x287) * x311))
result[(1, 2, 11)] = numpy.sum(((x154 * x280) * x284))
result[(1, 2, 12)] = numpy.sum(((x161 * x277) * x284))
result[(1, 2, 13)] = numpy.sum(((x166 * x279) * x284))
result[(1, 2, 14)] = numpy.sum(((x168 * x273) * x311))
result[(1, 3, 0)] = numpy.sum(((x307 * x312) * x313))
result[(1, 3, 1)] = numpy.sum(((x173 * x179) * x315))
result[(1, 3, 2)] = numpy.sum(((x173 * x180) * x312))
result[(1, 3, 3)] = numpy.sum(((x26 * x299) * x317))
result[(1, 3, 4)] = numpy.sum(((x100 * x315) * x318))
result[(1, 3, 5)] = numpy.sum(((x133 * x26) * x312))
result[(1, 3, 6)] = numpy.sum(((x321 * x322) * x74))
result[(1, 3, 7)] = numpy.sum(((x100 * x317) * x323))
result[(1, 3, 8)] = numpy.sum(((x24 * x305) * x315))
result[(1, 3, 9)] = numpy.sum(((x140 * x24) * x312))
result[(1, 3, 10)] = numpy.sum((x325 * x327))
result[(1, 3, 11)] = numpy.sum(((x321 * x328) * x85))
result[(1, 3, 12)] = numpy.sum(((x133 * x21) * x317))
result[(1, 3, 13)] = numpy.sum(((x140 * x21) * x315))
result[(1, 3, 14)] = numpy.sum(((x113 * x21) * x329))
result[(1, 4, 0)] = numpy.sum(((x150 * x313) * x330))
result[(1, 4, 1)] = numpy.sum(((x173 * x200) * x331))
result[(1, 4, 2)] = numpy.sum(((x153 * x173) * x332))
result[(1, 4, 3)] = numpy.sum(((x150 * x298) * x318))
result[(1, 4, 4)] = numpy.sum(((x26 * x295) * x333))
result[(1, 4, 5)] = numpy.sum(((x199 * x26) * x292))
result[(1, 4, 6)] = numpy.sum(((x200 * x303) * x334))
result[(1, 4, 7)] = numpy.sum(((x24 * x298) * x333))
result[(1, 4, 8)] = numpy.sum((((x126 * x199) * x24) * x295))
result[(1, 4, 9)] = numpy.sum(((x166 * x332) * x334))
result[(1, 4, 10)] = numpy.sum((((x146 * x195) * x306) * x328))
result[(1, 4, 11)] = numpy.sum(((x153 * x303) * x335))
result[(1, 4, 12)] = numpy.sum(((x199 * x21) * x298))
result[(1, 4, 13)] = numpy.sum(((x202 * x21) * x331))
result[(1, 4, 14)] = numpy.sum(((x168 * x21) * x330))
result[(1, 5, 0)] = numpy.sum(((x249 * x273) * x313))
result[(1, 5, 1)] = numpy.sum(((x173 * x206) * x279))
result[(1, 5, 2)] = numpy.sum(((x173 * x250) * x273))
result[(1, 5, 3)] = numpy.sum(((x26 * x277) * x336))
result[(1, 5, 4)] = numpy.sum(((x212 * x26) * x269))
result[(1, 5, 5)] = numpy.sum(((x26 * x273) * x337))
result[(1, 5, 6)] = numpy.sum(((x207 * x24) * x280))
result[(1, 5, 7)] = numpy.sum(((x212 * x24) * x276))
result[(1, 5, 8)] = numpy.sum(((x24 * x252) * x269))
result[(1, 5, 9)] = numpy.sum(((x218 * x273) * x322))
result[(1, 5, 10)] = numpy.sum(((x21 * x249) * x287))
result[(1, 5, 11)] = numpy.sum(((x21 * x250) * x280))
result[(1, 5, 12)] = numpy.sum(((x21 * x251) * x277))
result[(1, 5, 13)] = numpy.sum(((x21 * x218) * x279))
result[(1, 5, 14)] = numpy.sum(((x221 * x265) * x338))
result[(1, 6, 0)] = numpy.sum(((x288 * x339) * x340))
result[(1, 6, 1)] = numpy.sum(((x225 * x282) * x341))
result[(1, 6, 2)] = numpy.sum(((x144 * x225) * x339))
result[(1, 6, 3)] = numpy.sum(((x278 * x343) * x344))
result[(1, 6, 4)] = numpy.sum(((x180 * x223) * x341))
result[(1, 6, 5)] = numpy.sum(((x223 * x339) * x91))
result[(1, 6, 6)] = numpy.sum((x346 * x347))
result[(1, 6, 7)] = numpy.sum(((x343 * x348) * x85))
result[(1, 6, 8)] = numpy.sum(((x341 * x349) * x89))
result[(1, 6, 9)] = numpy.sum(((x11 * x283) * x339))
result[(1, 6, 10)] = numpy.sum((x238 * ((x0 * (((x234 + (6.0 * x319)) + (3.0 * x320)) + (3.0 * x342))) + (x345 * x66))))
result[(1, 6, 11)] = numpy.sum((x346 * x72))
result[(1, 6, 12)] = numpy.sum(((x343 * x8) * x91))
result[(1, 6, 13)] = numpy.sum(((x104 * x341) * x350))
result[(1, 6, 14)] = numpy.sum(((x113 * x339) * x351))
result[(1, 7, 0)] = numpy.sum(((x150 * x329) * x340))
result[(1, 7, 1)] = numpy.sum(((x150 * x315) * x352))
result[(1, 7, 2)] = numpy.sum(((x154 * x225) * x312))
result[(1, 7, 3)] = numpy.sum(((x150 * x317) * x353))
result[(1, 7, 4)] = numpy.sum(((x156 * x223) * x315))
result[(1, 7, 5)] = numpy.sum(((x161 * x312) * x344))
result[(1, 7, 6)] = numpy.sum((((x146 * x321) * x348) * x84))
result[(1, 7, 7)] = numpy.sum(((x11 * x156) * x317))
result[(1, 7, 8)] = numpy.sum(((x11 * x199) * x315))
result[(1, 7, 9)] = numpy.sum(((x166 * x312) * x349))
result[(1, 7, 10)] = numpy.sum((x247 * x325))
result[(1, 7, 11)] = numpy.sum(((x154 * x321) * x8))
result[(1, 7, 12)] = numpy.sum(((x161 * x317) * x354))
result[(1, 7, 13)] = numpy.sum(((x166 * x315) * x355))
result[(1, 7, 14)] = numpy.sum(((x168 * x312) * x356))
result[(1, 8, 0)] = numpy.sum(((x206 * x293) * x340))
result[(1, 8, 1)] = numpy.sum(((x207 * x225) * x295))
result[(1, 8, 2)] = numpy.sum(((x209 * x292) * x352))
result[(1, 8, 3)] = numpy.sum(((x298 * x336) * x344))
result[(1, 8, 4)] = numpy.sum(((x212 * x223) * x295))
result[(1, 8, 5)] = numpy.sum(((x214 * x292) * x353))
result[(1, 8, 6)] = numpy.sum(((x11 * x207) * x303))
result[(1, 8, 7)] = numpy.sum(((x11 * x212) * x298))
result[(1, 8, 8)] = numpy.sum(((x11 * x252) * x295))
result[(1, 8, 9)] = numpy.sum(((x218 * x292) * x349))
result[(1, 8, 10)] = numpy.sum(((x206 * x306) * x356))
result[(1, 8, 11)] = numpy.sum(((x209 * x303) * x355))
result[(1, 8, 12)] = numpy.sum(((x298 * x337) * x8))
result[(1, 8, 13)] = numpy.sum(((x218 * x295) * x355))
result[(1, 8, 14)] = numpy.sum(((x221 * x293) * x8))
result[(1, 9, 0)] = numpy.sum(((x256 * x289) * x340))
result[(1, 9, 1)] = numpy.sum(((x225 * x256) * x270))
result[(1, 9, 2)] = numpy.sum(((x257 * x273) * x357))
result[(1, 9, 3)] = numpy.sum(((x223 * x258) * x277))
result[(1, 9, 4)] = numpy.sum(((x223 * x257) * x279))
result[(1, 9, 5)] = numpy.sum(((x273 * x344) * x358))
result[(1, 9, 6)] = numpy.sum(((x256 * x280) * x359))
result[(1, 9, 7)] = numpy.sum(((x257 * x276) * x349))
result[(1, 9, 8)] = numpy.sum(((x11 * x260) * x279))
result[(1, 9, 9)] = numpy.sum(((x262 * x265) * x347))
result[(1, 9, 10)] = numpy.sum(((x256 * x287) * x351))
result[(1, 9, 11)] = numpy.sum(((x257 * x280) * x350))
result[(1, 9, 12)] = numpy.sum(((x277 * x358) * x8))
result[(1, 9, 13)] = numpy.sum(((x261 * x270) * x8))
result[(1, 9, 14)] = numpy.sum((x264 * x265))
result[(2, 0, 0)] = numpy.sum((x267 * x360))
result[(2, 0, 1)] = numpy.sum(((x271 * x360) * x68))
result[(2, 0, 2)] = numpy.sum(((x363 * x52) * x77))
result[(2, 0, 3)] = numpy.sum(((x272 * x364) * x82))
result[(2, 0, 4)] = numpy.sum(((x102 * x272) * x365))
result[(2, 0, 5)] = numpy.sum(((x272 * x369) * x370))
result[(2, 0, 6)] = numpy.sum((((x281 * x364) * x67) * x93))
result[(2, 0, 7)] = numpy.sum(((x281 * x365) * x80))
result[(2, 0, 8)] = numpy.sum(((x219 * x281) * x368))
result[(2, 0, 9)] = numpy.sum(((x281 * x371) * x372))
result[(2, 0, 10)] = numpy.sum(((x106 * x285) * x373))
result[(2, 0, 11)] = numpy.sum(((x285 * x363) * x93))
result[(2, 0, 12)] = numpy.sum(((x285 * x369) * x81))
result[(2, 0, 13)] = numpy.sum(((x167 * x285) * x371))
result[(2, 0, 14)] = numpy.sum(((x285 * x375) * x376))
result[(2, 1, 0)] = numpy.sum(((x116 * x308) * x360))
result[(2, 1, 1)] = numpy.sum(((x122 * x364) * x51))
result[(2, 1, 2)] = numpy.sum(((x118 * x365) * x51))
result[(2, 1, 3)] = numpy.sum(((x128 * x377) * x44))
result[(2, 1, 4)] = numpy.sum(((x120 * x378) * x44))
result[(2, 1, 5)] = numpy.sum(((x118 * x309) * x379))
result[(2, 1, 6)] = numpy.sum(((x137 * x310) * x364))
result[(2, 1, 7)] = numpy.sum(((x125 * x378) * x96))
result[(2, 1, 8)] = numpy.sum(((x120 * x304) * x368))
result[(2, 1, 9)] = numpy.sum(((x118 * x310) * x371))
result[(2, 1, 10)] = numpy.sum(((x141 * x311) * x364))
result[(2, 1, 11)] = numpy.sum(((x137 * x284) * x365))
result[(2, 1, 12)] = numpy.sum(((x128 * x284) * x379))
result[(2, 1, 13)] = numpy.sum(((x122 * x284) * x371))
result[(2, 1, 14)] = numpy.sum(((x118 * x311) * x375))
result[(2, 2, 0)] = numpy.sum(((x290 * x382) * x77))
result[(2, 2, 1)] = numpy.sum(((x219 * x381) * x51))
result[(2, 2, 2)] = numpy.sum(((x210 * x384) * x51))
result[(2, 2, 3)] = numpy.sum(((x155 * x381) * x44))
result[(2, 2, 4)] = numpy.sum(((x102 * x385) * x44))
result[(2, 2, 5)] = numpy.sum(((x388 * x389) * x44))
result[(2, 2, 6)] = numpy.sum(((x162 * x381) * x96))
result[(2, 2, 7)] = numpy.sum(((x304 * x384) * x80))
result[(2, 2, 8)] = numpy.sum(((x102 * x304) * x388))
result[(2, 2, 9)] = numpy.sum(((x210 * x393) * x96))
result[(2, 2, 10)] = numpy.sum(((x106 * x284) * x382))
result[(2, 2, 11)] = numpy.sum(((x162 * x284) * x384))
result[(2, 2, 12)] = numpy.sum(((x155 * x284) * x388))
result[(2, 2, 13)] = numpy.sum(((x219 * x284) * x393))
result[(2, 2, 14)] = numpy.sum(((x284 * x394) * x395))
result[(2, 3, 0)] = numpy.sum(((x243 * x313) * x364))
result[(2, 3, 1)] = numpy.sum(((x173 * x244) * x364))
result[(2, 3, 2)] = numpy.sum(((x170 * x173) * x365))
result[(2, 3, 3)] = numpy.sum(((x246 * x26) * x377))
result[(2, 3, 4)] = numpy.sum(((x178 * x26) * x378))
result[(2, 3, 5)] = numpy.sum(((x26 * x379) * x396))
result[(2, 3, 6)] = numpy.sum(((x189 * x322) * x364))
result[(2, 3, 7)] = numpy.sum(((x182 * x24) * x378))
result[(2, 3, 8)] = numpy.sum(((x178 * x323) * x368))
result[(2, 3, 9)] = numpy.sum(((x170 * x322) * x371))
result[(2, 3, 10)] = numpy.sum(((x192 * x327) * x360))
result[(2, 3, 11)] = numpy.sum(((x189 * x21) * x365))
result[(2, 3, 12)] = numpy.sum(((x21 * x246) * x379))
result[(2, 3, 13)] = numpy.sum(((x21 * x244) * x371))
result[(2, 3, 14)] = numpy.sum(((x21 * x243) * x375))
result[(2, 4, 0)] = numpy.sum(((x118 * x313) * x397))
result[(2, 4, 1)] = numpy.sum(((x121 * x173) * x398))
result[(2, 4, 2)] = numpy.sum(((x173 * x399) * x400))
result[(2, 4, 3)] = numpy.sum(((x125 * x318) * x381))
result[(2, 4, 4)] = numpy.sum(((x318 * x384) * x401))
result[(2, 4, 5)] = numpy.sum(((x118 * x318) * x388))
result[(2, 4, 6)] = numpy.sum(((x137 * x334) * x398))
result[(2, 4, 7)] = numpy.sum(((x127 * x323) * x384))
result[(2, 4, 8)] = numpy.sum(((x323 * x388) * x401))
result[(2, 4, 9)] = numpy.sum(((x334 * x393) * x399))
result[(2, 4, 10)] = numpy.sum(((x141 * x21) * x397))
result[(2, 4, 11)] = numpy.sum(((x137 * x335) * x400))
result[(2, 4, 12)] = numpy.sum(((x198 * x21) * x388))
result[(2, 4, 13)] = numpy.sum(((x121 * x335) * x393))
result[(2, 4, 14)] = numpy.sum(((x196 * x394) * x402))
result[(2, 5, 0)] = numpy.sum(((x313 * x395) * x403))
result[(2, 5, 1)] = numpy.sum(((x173 * x219) * x403))
result[(2, 5, 2)] = numpy.sum(((x173 * x210) * x405))
result[(2, 5, 3)] = numpy.sum(((x155 * x26) * x403))
result[(2, 5, 4)] = numpy.sum(((x102 * x318) * x405))
result[(2, 5, 5)] = numpy.sum(((x26 * x389) * x407))
result[(2, 5, 6)] = numpy.sum(((x162 * x24) * x403))
result[(2, 5, 7)] = numpy.sum(((x323 * x405) * x80))
result[(2, 5, 8)] = numpy.sum(((x102 * x323) * x407))
result[(2, 5, 9)] = numpy.sum(((x322 * x410) * x77))
result[(2, 5, 10)] = numpy.sum(((x106 * x21) * x411))
result[(2, 5, 11)] = numpy.sum(((x162 * x21) * x405))
result[(2, 5, 12)] = numpy.sum(((x155 * x21) * x407))
result[(2, 5, 13)] = numpy.sum(((x149 * x402) * x410))
result[(2, 5, 14)] = numpy.sum((x338 * x413))
result[(2, 6, 0)] = numpy.sum(((x222 * x340) * x373))
result[(2, 6, 1)] = numpy.sum(((x228 * x357) * x364))
result[(2, 6, 2)] = numpy.sum(((x222 * x225) * x363))
result[(2, 6, 3)] = numpy.sum(((x231 * x364) * x414))
result[(2, 6, 4)] = numpy.sum(((x223 * x228) * x365))
result[(2, 6, 5)] = numpy.sum(((x222 * x344) * x369))
result[(2, 6, 6)] = numpy.sum(((x240 * x347) * x360))
result[(2, 6, 7)] = numpy.sum(((x11 * x231) * x365))
result[(2, 6, 8)] = numpy.sum(((x228 * x349) * x368))
result[(2, 6, 9)] = numpy.sum(((x222 * x359) * x371))
result[(2, 6, 10)] = numpy.sum((x239 * x360))
result[(2, 6, 11)] = numpy.sum(((x234 * x363) * x8))
result[(2, 6, 12)] = numpy.sum(((x231 * x354) * x369))
result[(2, 6, 13)] = numpy.sum(((x228 * x350) * x371))
result[(2, 6, 14)] = numpy.sum(((x222 * x351) * x375))
result[(2, 7, 0)] = numpy.sum(((x170 * x340) * x382))
result[(2, 7, 1)] = numpy.sum(((x178 * x352) * x381))
result[(2, 7, 2)] = numpy.sum(((x170 * x352) * x384))
result[(2, 7, 3)] = numpy.sum(((x182 * x353) * x381))
result[(2, 7, 4)] = numpy.sum(((x178 * x384) * x415))
result[(2, 7, 5)] = numpy.sum(((x344 * x388) * x396))
result[(2, 7, 6)] = numpy.sum(((x189 * x349) * x381))
result[(2, 7, 7)] = numpy.sum(((x11 * x182) * x385))
result[(2, 7, 8)] = numpy.sum(((x178 * x388) * x416))
result[(2, 7, 9)] = numpy.sum(((x170 * x349) * x393))
result[(2, 7, 10)] = numpy.sum(((x192 * x382) * x8))
result[(2, 7, 11)] = numpy.sum(((x189 * x355) * x384))
result[(2, 7, 12)] = numpy.sum(((x246 * x354) * x388))
result[(2, 7, 13)] = numpy.sum(((x178 * x355) * x393))
result[(2, 7, 14)] = numpy.sum(((x170 * x356) * x394))
result[(2, 8, 0)] = numpy.sum(((x118 * x340) * x411))
result[(2, 8, 1)] = numpy.sum(((x122 * x225) * x403))
result[(2, 8, 2)] = numpy.sum(((x118 * x352) * x405))
result[(2, 8, 3)] = numpy.sum(((x128 * x344) * x403))
result[(2, 8, 4)] = numpy.sum(((x120 * x405) * x415))
result[(2, 8, 5)] = numpy.sum(((x118 * x353) * x407))
result[(2, 8, 6)] = numpy.sum(((x137 * x349) * x403))
result[(2, 8, 7)] = numpy.sum(((x11 * x198) * x405))
result[(2, 8, 8)] = numpy.sum(((x120 * x407) * x416))
result[(2, 8, 9)] = numpy.sum((((x114 * x410) * x417) * x84))
result[(2, 8, 10)] = numpy.sum(((x141 * x356) * x403))
result[(2, 8, 11)] = numpy.sum(((x137 * x355) * x405))
result[(2, 8, 12)] = numpy.sum(((x128 * x354) * x407))
result[(2, 8, 13)] = numpy.sum(((x122 * x410) * x8))
result[(2, 8, 14)] = numpy.sum((x255 * x413))
result[(2, 9, 0)] = numpy.sum(((x340 * x376) * x418))
result[(2, 9, 1)] = numpy.sum(((x102 * x357) * x418))
result[(2, 9, 2)] = numpy.sum(((x225 * x372) * x419))
result[(2, 9, 3)] = numpy.sum(((x223 * x418) * x82))
result[(2, 9, 4)] = numpy.sum(((x219 * x223) * x419))
result[(2, 9, 5)] = numpy.sum(((x414 * x421) * x77))
result[(2, 9, 6)] = numpy.sum(((x359 * x418) * x93))
result[(2, 9, 7)] = numpy.sum(((x349 * x419) * x80))
result[(2, 9, 8)] = numpy.sum(((x149 * x417) * x421))
result[(2, 9, 9)] = numpy.sum((x347 * x423))
result[(2, 9, 10)] = numpy.sum(((x106 * x351) * x418))
result[(2, 9, 11)] = numpy.sum(((x350 * x419) * x93))
result[(2, 9, 12)] = numpy.sum(((x421 * x8) * x82))
result[(2, 9, 13)] = numpy.sum((x423 * x68))
result[(2, 9, 14)] = numpy.sum((x263 * ((x0 * (((x261 + (6.0 * x408)) + (3.0 * x409)) + (3.0 * x420))) + (x422 * x71))))
return result |
def main():
if postgres_is_installed():
logging.info('Skipping PostgreSQL installation. Reason: Already installed.')
else:
logging.info('Setting up PostgreSQL database')
install_postgres()
logging.info('Initializing PostgreSQL database')
with OperateInDirectory('..'):
process = run('python3 init_postgres.py', shell=True, text=True, check=False, stderr=PIPE)
if (process.returncode != 0):
raise InstallationError(f'''Unable to initialize database
{process.stderr}''')
with OperateInDirectory('../../'):
with suppress(FileNotFoundError):
Path('start_fact_database').unlink()
Path('start_fact_db').unlink()
Path('start_fact_database').symlink_to('src/start_fact_database.py')
return 0 |
class TestDevice(object):
def mouse(self):
settings = mouse_settings.FakeMouseSettings(4152, 47789, rival3.profile)
return mouse.Mouse(usbhid.FakeDevice(), rival3.profile, settings)
.parametrize('value,expected_hid_report', [(200, b'\x02\x00\x0b\x00\x01\x01\x04'), ('200', b'\x02\x00\x0b\x00\x01\x01\x04'), ('200,400', b'\x02\x00\x0b\x00\x02\x01\x04\x08'), ('200,400,800,1600', b'\x02\x00\x0b\x00\x04\x01\x04\x08\x11$')])
def test_set_sensitivity(self, mouse, value, expected_hid_report):
mouse.set_sensitivity(value)
mouse._hid_device.bytes.seek(0)
hid_report = mouse._hid_device.bytes.read()
assert (hid_report == expected_hid_report)
.parametrize('value,expected_hid_report', [(125, b'\x02\x00\x04\x00\x04'), (250, b'\x02\x00\x04\x00\x03'), (500, b'\x02\x00\x04\x00\x02'), (1000, b'\x02\x00\x04\x00\x01')])
def test_set_polling_rate(self, mouse, value, expected_hid_report):
mouse.set_polling_rate(value)
mouse._hid_device.bytes.seek(0)
hid_report = mouse._hid_device.bytes.read()
assert (hid_report == expected_hid_report)
.parametrize('value,expected_hid_report', [('#ABCDEF', b'\x02\x00\x05\x00\x01\xab\xcd\xefd'), ('red', b'\x02\x00\x05\x00\x01\xff\x00\x00d')])
def test_set_z1_color(self, mouse, value, expected_hid_report):
mouse.set_z1_color(value)
mouse._hid_device.bytes.seek(0)
hid_report = mouse._hid_device.bytes.read()
assert (hid_report == expected_hid_report)
.parametrize('value,expected_hid_report', [('#ABCDEF', b'\x02\x00\x05\x00\x02\xab\xcd\xefd'), ('red', b'\x02\x00\x05\x00\x02\xff\x00\x00d')])
def test_set_z2_color(self, mouse, value, expected_hid_report):
mouse.set_z2_color(value)
mouse._hid_device.bytes.seek(0)
hid_report = mouse._hid_device.bytes.read()
assert (hid_report == expected_hid_report)
.parametrize('value,expected_hid_report', [('#ABCDEF', b'\x02\x00\x05\x00\x03\xab\xcd\xefd'), ('red', b'\x02\x00\x05\x00\x03\xff\x00\x00d')])
def test_set_z3_color(self, mouse, value, expected_hid_report):
mouse.set_z3_color(value)
mouse._hid_device.bytes.seek(0)
hid_report = mouse._hid_device.bytes.read()
assert (hid_report == expected_hid_report)
.parametrize('value,expected_hid_report', [('#ABCDEF', b'\x02\x00\x05\x00\x04\xab\xcd\xefd'), ('red', b'\x02\x00\x05\x00\x04\xff\x00\x00d')])
def test_set_logo_color(self, mouse, value, expected_hid_report):
mouse.set_logo_color(value)
mouse._hid_device.bytes.seek(0)
hid_report = mouse._hid_device.bytes.read()
assert (hid_report == expected_hid_report)
.parametrize('value,expected_hid_report', [('rainbow-shift', b'\x02\x00\x06\x00\x00'), ('breath-fast', b'\x02\x00\x06\x00\x01'), ('breath', b'\x02\x00\x06\x00\x02'), ('breath-slow', b'\x02\x00\x06\x00\x03'), ('steady', b'\x02\x00\x06\x00\x04'), ('rainbow-breath', b'\x02\x00\x06\x00\x05'), ('disco', b'\x02\x00\x06\x00\x06')])
def test_set_light_effect(self, mouse, value, expected_hid_report):
mouse.set_light_effect(value)
mouse._hid_device.bytes.seek(0)
hid_report = mouse._hid_device.bytes.read()
assert (hid_report == expected_hid_report)
.parametrize('value,expected_hid_report', [('default', b'\x02\x00\x07\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x000\x001\x002\x00'), ('buttons(button2=button6)', b'\x02\x00\x07\x00\x01\x00\x06\x00\x03\x00\x04\x00\x05\x000\x001\x002\x00'), ({'buttons': {'button2': 'button6'}}, b'\x02\x00\x07\x00\x01\x00\x06\x00\x03\x00\x04\x00\x05\x000\x001\x002\x00'), ('buttons(ScrollUp=ScrollDown; ScrollDown=ScrollUp)', b'\x02\x00\x07\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x000\x002\x001\x00')])
def test_set_buttons_mapping(self, mouse, value, expected_hid_report):
mouse.set_buttons_mapping(value)
mouse._hid_device.bytes.seek(0)
hid_report = mouse._hid_device.bytes.read()
assert (hid_report == expected_hid_report)
def test_save(self, mouse):
mouse.save()
mouse._hid_device.bytes.seek(0)
hid_report = mouse._hid_device.bytes.read()
assert (hid_report == b'\x02\x00\t\x00')
def test_firmware_version(self, mouse):
mouse.firmware_version_tuple
mouse._hid_device.bytes.seek(0)
hid_report = mouse._hid_device.bytes.read()
assert (hid_report == b'\x02\x00\x10\x00') |
class CLICommand(RegionalCommand):
def regional_execute(self, session, acct, region):
out = io.StringIO()
ec2 = session.resource('ec2', region_name=region)
for igw in ec2.internet_gateways.all():
igw_id = igw.internet_gateway_id
attachments = [a['VpcId'] for a in igw.attachments if (a['State'] == 'available')]
print(f'{acct}/{region}: id={igw_id} attachments={len(attachments)}', end='', file=out)
if attachments:
print(f" vpcs={', '.join(attachments)}", end='', file=out)
print(file=out)
return out.getvalue() |
def lazy_import():
from fastly.model.relationships_for_tls_bulk_certificate import RelationshipsForTlsBulkCertificate
from fastly.model.tls_bulk_certificate_data import TlsBulkCertificateData
from fastly.model.tls_bulk_certificate_response_attributes import TlsBulkCertificateResponseAttributes
from fastly.model.tls_bulk_certificate_response_data_all_of import TlsBulkCertificateResponseDataAllOf
from fastly.model.type_tls_bulk_certificate import TypeTlsBulkCertificate
globals()['RelationshipsForTlsBulkCertificate'] = RelationshipsForTlsBulkCertificate
globals()['TlsBulkCertificateData'] = TlsBulkCertificateData
globals()['TlsBulkCertificateResponseAttributes'] = TlsBulkCertificateResponseAttributes
globals()['TlsBulkCertificateResponseDataAllOf'] = TlsBulkCertificateResponseDataAllOf
globals()['TypeTlsBulkCertificate'] = TypeTlsBulkCertificate |
class Game(object):
def __init__(self):
deck = Deck()
deck.shuffle()
cards = list(deck)
self.waste = []
self.tableau = []
for n in range(1, 8):
self.tableau.append([cards.pop() for _ in range(n)])
for pile in self.tableau:
pile[(- 1)].face_up = True
self.stock = list(cards)
self.foundations = [[], [], [], []]
def _reset_game_to_almost_won_state(self):
deck = Deck()
cards = list(deck)
for c in cards:
c.face_up = True
self.waste = []
self.tableau = []
for n in range(1, 8):
self.tableau.append([])
self.foundations = [cards[0:13], cards[13:(13 * 2)], cards[(13 * 2):(13 * 3)], cards[(13 * 3):((13 * 4) - 1)]]
self.stock = []
self.waste = [cards[(- 1)]]
def deal_from_stock(self):
if (not self.stock):
raise InvalidMove('No cards in stock')
self.waste.append(self.stock.pop())
self.waste[(- 1)].face_up = True
def restore_stock(self):
self.stock[:] = list(reversed(self.waste))
for card in self.stock:
card.face_up = False
self.waste[:] = []
def _is_valid_move_to_tableau(self, source_card, target_card):
if (target_card is None):
return (source_card.rank == 'K')
if ((not source_card.face_up) or (not target_card.face_up)):
return False
diff = rank_diff(source_card.rank, target_card.rank)
return ((diff == 1) and (suit_color(source_card.suit) != suit_color(target_card.suit)))
def can_move_card_to_tableau(self, card, tableau_index):
assert (tableau_index in range(7))
target_pile = self.tableau[tableau_index]
target_card = (target_pile[(- 1)] if target_pile else None)
return self._is_valid_move_to_tableau(card, target_card)
def move_from_waste_to_tableau(self, target_index):
assert (target_index in range(7))
target_pile = self.tableau[target_index]
target_card = (target_pile[(- 1)] if target_pile else None)
if (self.waste and self._is_valid_move_to_tableau(self.waste[(- 1)], target_card)):
target_pile.append(self.waste.pop())
else:
raise InvalidMove()
def can_move_from_waste_to_tableau(self, tableau_index):
if self.waste:
return self.can_move_card_to_tableau(self.waste[(- 1)], tableau_index)
return False
def move_tableau_pile(self, src_index, target_index):
assert (src_index in range(7)), ('Invalid index: %r' % src_index)
assert (target_index in range(7)), ('Invalid index: %r' % target_index)
if (src_index == target_index):
raise InvalidMove('Source is same as destination')
(source_pile, target_pile) = (self.tableau[src_index], self.tableau[target_index])
target_card = (target_pile[(- 1)] if target_pile else None)
for (index, card) in list(enumerate(source_pile))[::(- 1)]:
if self._is_valid_move_to_tableau(card, target_card):
to_move = source_pile[index:]
target_pile.extend(to_move)
for _ in range(len(to_move)):
source_pile.pop()
return
raise InvalidMove()
def _find_foundation_pile(self, card_to_move):
for pile in self.foundations:
if any([((not pile) and (card_to_move.rank == 'A')), (pile and (card_to_move.suit == pile[(- 1)].suit) and (rank_diff(card_to_move.rank, pile[(- 1)].rank) == (- 1)))]):
return pile
def move_to_foundation_from_waste(self):
if (not self.waste):
raise InvalidMove()
foundation_pile = self._find_foundation_pile(self.waste[(- 1)])
if (foundation_pile is None):
raise InvalidMove()
foundation_pile.append(self.waste.pop())
def can_move_to_foundation_from_waste(self) -> bool:
if (not self.waste):
return False
foundation_pile = self._find_foundation_pile(self.waste[(- 1)])
return (foundation_pile is not None)
def move_to_foundation_from_tableau(self, index):
assert (index in range(7)), ('Invalid index: %r' % index)
pile = self.tableau[index]
if (not pile):
raise InvalidMove()
card_to_move = pile[(- 1)]
if (not card_to_move.face_up):
raise InvalidMove()
foundation_pile = self._find_foundation_pile(card_to_move)
if (foundation_pile is None):
raise InvalidMove()
foundation_pile.append(pile.pop())
def can_move_to_foundation_from_tableau(self, index) -> bool:
assert (index in range(7)), ('Invalid index: %r' % index)
pile = self.tableau[index]
if (not pile):
return False
card_to_move = pile[(- 1)]
if (not card_to_move.face_up):
return False
foundation_pile = self._find_foundation_pile(card_to_move)
return (foundation_pile is not None)
def won(self):
return all(((len(pile) == 13) for pile in self.foundations)) |
class WsCom():
recvQ = Queue()
sendQ = Queue()
socket = None
def readline(self):
return self.recvQ.get()
def queue(self, what):
if (type(what) == str):
w = what
else:
w = json.dumps(what)
asyncio.run_coroutine_threadsafe(sendQ.put(w), loop)
def put(self, what):
self.recvQ.put(what) |
class MockedTestCase(ServicePlanTestCase):
server_class = _TestServiceServer
server_settings = {}
add_error = mock.MagicMock()
set_up_test_fixture = mock.MagicMock()
tear_down_test_fixture = mock.MagicMock()
set_up_test_case = mock.MagicMock()
tear_down_test_case = mock.MagicMock()
set_up_test_case_action = mock.MagicMock()
tear_down_test_case_action = mock.MagicMock()
_run_test_case = mock.MagicMock()
setUpClass = mock.MagicMock()
setUpClass.__func__ = ServicePlanTestCase.setUpClass.__func__
tearDownClass = mock.MagicMock()
tearDownClass.__func__ = ServicePlanTestCase.tearDownClass.__func__
_all_directives = [cast(Type[Directive], mock.MagicMock())]
def reset(cls):
cls._test_fixture_setup_called = {}
cls._test_fixture_setup_succeeded = {}
cls.add_error = mock.MagicMock()
cls.set_up_test_fixture = mock.MagicMock()
cls.tear_down_test_fixture = mock.MagicMock()
cls.set_up_test_case = mock.MagicMock()
cls.tear_down_test_case = mock.MagicMock()
cls.set_up_test_case_action = mock.MagicMock()
cls.tear_down_test_case_action = mock.MagicMock()
cls._run_test_case = mock.MagicMock()
cls._all_directives = [cast(Type[Directive], mock.MagicMock())]
cls._all_directives[0].return_value.assert_test_fixture_results = mock.MagicMock() |
class AnyDcPermission(DcBasePermission):
admin_required = False
permission = None
def has_permission(self, request, view, args, kwargs):
request.dcs = request.user.get_permission_dcs(self.permission.name, admin_required=self.admin_required)
if request.user.is_staff:
return True
return bool(request.dcs) |
class StopWatchRecorder():
def __init__(self):
self.stop_watch = StopWatch()
self.recorded_timings = []
self.started = None
def stop(self):
self.start(None)
def start(self, name: str):
elapsed = self.stop_watch.get_elapsed_seconds(reset=True)
if self.started:
self.recorded_timings.append((self.started, elapsed))
self.started = name
def __str__(self):
total = ('total', sum((elapsed for (_, elapsed) in self.recorded_timings)))
return ', '.join((('%s: %.6fs' % (name, elapsed)) for (name, elapsed) in (self.recorded_timings + [total]))) |
def extractChronaZero(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if ('tensei jinsei' in item['tags']):
return buildReleaseMessageWithType(item, 'Cheat Aru Kedo Mattari KurashitaiTensei Jinsei o Tanoshimou!', vol, chp, frag=frag, postfix=postfix)
if ('Level up by walking' in item['tags']):
return buildReleaseMessageWithType(item, 'Level up By Walking: in 10 thousand steps I will be level 10000', vol, chp, frag=frag, postfix=postfix)
if ('When you actually went to be another world not as the Hero but as the Slave and then...' in item['tags']):
return buildReleaseMessageWithType(item, 'When you actually went to be another world not as the Hero but as the Slave and then...', vol, chp, frag=frag, postfix=postfix)
return False |
def test_instantiate_existing_filter(w3, sleep_interval, wait_for_block, filter_id):
with pytest.raises(TypeError):
w3.eth.filter('latest', filter_id)
with pytest.raises(TypeError):
w3.eth.filter('latest', filter_id=filter_id)
with pytest.raises(TypeError):
w3.eth.filter(filter_params='latest', filter_id=filter_id)
block_filter = w3.eth.filter(filter_id=filter_id)
current_block = w3.eth.block_number
wait_for_block(w3, (current_block + 3))
found_block_hashes = []
with Timeout(5) as timeout:
while (len(found_block_hashes) < 3):
found_block_hashes.extend(block_filter.get_new_entries())
timeout.sleep(sleep_interval())
assert (len(found_block_hashes) == 3)
expected_block_hashes = [w3.eth.get_block((n + 1)).hash for n in range(current_block, (current_block + 3))]
assert (found_block_hashes == expected_block_hashes) |
def create_analysis_entries(file_object: FileObject, fo_backref: FileObjectEntry) -> list[AnalysisEntry]:
return [AnalysisEntry(uid=file_object.uid, plugin=plugin_name, plugin_version=analysis_data.get('plugin_version'), system_version=analysis_data.get('system_version'), analysis_date=analysis_data.get('analysis_date'), summary=_sanitize_list(analysis_data.get('summary', [])), tags=analysis_data.get('tags'), result=sanitize(analysis_data.get('result', {})), file_object=fo_backref) for (plugin_name, analysis_data) in file_object.processed_analysis.items()] |
def main(unused_argv):
print(('-' * 64))
print('TEST INFO - ESTIMATOR')
print(('-' * 64))
print('TF version:\t {}'.format(tf.__version__))
print('Dataset:\t MNIST')
print('Model:\t pre-made estimator DNNClassifier')
device_name = tf.test.gpu_device_name()
if (device_name == '/device:GPU:0'):
print('Found GPU at:\t {}'.format(device_name))
else:
print("Found CPU at:\t '[/cpu:0]'")
print(('=' * 64))
mnist = tf.contrib.learn.datasets.load_dataset('mnist')
train_data = mnist.train.images
train_labels = np.asarray(mnist.train.labels, dtype=np.int32)
eval_data = mnist.test.images
eval_labels = np.asarray(mnist.test.labels, dtype=np.int32)
feature_columns = [tf.feature_column.numeric_column('x', shape=[28, 28])]
estimator = tf.estimator.DNNClassifier(feature_columns=feature_columns, hidden_units=[512], activation_fn=tf.nn.relu, optimizer='RMSProp', n_classes=10)
train_input_fn = tf.estimator.inputs.numpy_input_fn(x={'x': train_data}, y=train_labels, batch_size=100, num_epochs=1, shuffle=True)
eval_input_fn = tf.estimator.inputs.numpy_input_fn(x={'x': eval_data}, y=eval_labels, num_epochs=1, shuffle=False)
train_spec = tf.estimator.TrainSpec(input_fn=train_input_fn, max_steps=3000)
eval_spec = tf.estimator.EvalSpec(input_fn=eval_input_fn)
tf.estimator.train_and_evaluate(estimator, train_spec, eval_spec) |
('max_failure_rate', (0.5, 1.0))
def test_failure_rate(max_failure_rate: float, tmpdir: Path) -> None:
cmd = [sys.executable, 'example/my_app.py', '-m', f'hydra.sweep.dir={tmpdir}', 'hydra.sweeper.optim.budget=2', 'hydra.sweeper.optim.num_workers=2', f'hydra.sweeper.optim.max_failure_rate={max_failure_rate}', 'error=true']
(out, err) = run_process(cmd, print_error=False, raise_exception=False)
assert ('Returning infinity for failed experiment' in out)
error_string = 'RuntimeError: cfg.error is True'
if (max_failure_rate < 1.0):
assert (error_string in err)
else:
assert (error_string not in err) |
class ConstIntArg(ConstArg):
def __init__(self, value):
super().__init__(value)
def cformat(self):
return '%d'
def format(self):
return ('%d' % self.value)
def __str__(self):
return ('Int(%s)' % self.format())
def is_one(self):
return (self.value == 1)
def is_zero(self):
return (self.value == 0) |
def do_bintest(cnarr, segments=None, alpha=0.005, target_only=False):
cnarr = cnarr.copy()
resid = cnarr.residuals(segments)
if (not resid.index.is_unique):
dup_idx = resid.index.duplicated(keep=False)
logging.warning('Segments may overlap at %d bins; dropping duplicate values', dup_idx.sum())
logging.debug('Duplicated indices: %s', ' '.join(map(str, resid[dup_idx].head(50))))
resid = resid[(~ resid.index.duplicated())]
cnarr = cnarr.as_dataframe(cnarr.data.loc[resid.index])
if (len(cnarr) != len(resid)):
logging.info('Segments do not cover all bins (%d), only %d of them', len(cnarr), len(resid))
cnarr = cnarr.as_dataframe(cnarr.data.loc[resid.index])
cnarr['log2'] = resid
cnarr['probes'] = 1
if target_only:
antitarget_idx = cnarr['gene'].isin(params.ANTITARGET_ALIASES)
if antitarget_idx.any():
logging.info('Ignoring %d off-target bins', antitarget_idx.sum())
cnarr = cnarr[(~ antitarget_idx)]
cnarr['p_bintest'] = z_prob(cnarr)
is_sig = (cnarr['p_bintest'] < alpha)
logging.info('Significant hits in {}/{} bins ({:.3g}%)'.format(is_sig.sum(), len(is_sig), ((100 * is_sig.sum()) / len(is_sig))))
hits = cnarr[is_sig]
return hits |
def default_serialize_error(req: Request, resp: Response, exception: HTTPError):
preferred = req.client_prefers((MEDIA_XML, 'text/xml', MEDIA_JSON))
if (preferred is None):
accept = req.accept.lower()
if ('+json' in accept):
preferred = MEDIA_JSON
elif ('+xml' in accept):
preferred = MEDIA_XML
if (preferred is not None):
if (preferred == MEDIA_JSON):
(handler, _, _) = resp.options.media_handlers._resolve(MEDIA_JSON, MEDIA_JSON, raise_not_found=False)
resp.data = exception.to_json(handler)
else:
resp.data = exception.to_xml()
resp.content_type = preferred
resp.append_header('Vary', 'Accept') |
class UserFavouriteSessionList(ResourceList):
def query(self, view_kwargs):
query_ = UserFavouriteSession.query
if view_kwargs.get('user_id'):
user = safe_query_kwargs(User, view_kwargs, 'user_id')
if ((user != current_user) and (not ((is_logged_in() and has_access('is_admin')) or user.is_profile_public))):
raise ForbiddenError({'pointer': 'user_id'})
query_ = query_.filter_by(user_id=user.id)
elif view_kwargs.get('session_id'):
session = safe_query_kwargs(Session, view_kwargs, 'session_id')
query_ = query_.filter_by(session_id=session.id)
elif view_kwargs.get('event_id'):
event = safe_query_kwargs(Event, view_kwargs, 'event_id')
query_ = query_.join(UserFavouriteSession.session).filter_by(event_id=event.id)
elif (not has_access('is_admin')):
raise ForbiddenError({'pointer': 'user_id'}, 'Admin Access Required')
return query_
methods = ['GET']
schema = UserFavouriteSessionSchema
data_layer = {'session': db.session, 'model': UserFavouriteSession, 'methods': {'query': query}} |
class MonitoringService():
datasets: List[str]
metric: Dict[(str, prometheus_client.Gauge)]
last_run: Optional[datetime.datetime]
reference: Dict[(str, pd.DataFrame)]
current: Dict[(str, Optional[pd.DataFrame])]
monitoring: Dict[(str, ModelMonitoring)]
calculation_period_sec: float = 15
window_size: int
def __init__(self, datasets: Dict[(str, LoadedDataset)], window_size: int):
self.reference = {}
self.monitoring = {}
self.current = {}
self.column_mapping = {}
self.window_size = window_size
for dataset_info in datasets.values():
self.reference[dataset_info.name] = dataset_info.references
self.monitoring[dataset_info.name] = ModelMonitoring(monitors=[EVIDENTLY_MONITORS_MAPPING[k]() for k in dataset_info.monitors], options=[])
self.column_mapping[dataset_info.name] = dataset_info.column_mapping
self.metrics = {}
self.next_run_time = {}
self.hash = hashlib.sha256(pd.util.hash_pandas_object(self.reference['bike_random_forest']).values).hexdigest()
self.hash_metric = prometheus_client.Gauge('evidently:reference_dataset_hash', '', labelnames=['hash'])
def iterate(self, dataset_name: str, new_rows: pd.DataFrame):
window_size = self.window_size
if (dataset_name in self.current):
current_data = pd.concat([self.current[dataset_name], new_rows], ignore_index=True)
else:
current_data = new_rows
current_size = current_data.shape[0]
if (current_size > self.window_size):
current_data.drop(index=list(range(0, (current_size - self.window_size))), inplace=True)
current_data.reset_index(drop=True, inplace=True)
self.current[dataset_name] = current_data
if (current_size < window_size):
logging.info(f'Not enough data for measurement: {current_size} of {window_size}. Waiting more data')
return
next_run_time = self.next_run_time.get(dataset_name)
if ((next_run_time is not None) and (next_run_time > datetime.datetime.now())):
logging.info('Next run for dataset %s at %s', dataset_name, next_run_time)
return
self.next_run_time[dataset_name] = (datetime.datetime.now() + datetime.timedelta(seconds=self.calculation_period_sec))
self.monitoring[dataset_name].execute(self.reference[dataset_name], current_data, self.column_mapping[dataset_name])
self.hash_metric.labels(hash=self.hash).set(1)
for (metric, value, labels) in self.monitoring[dataset_name].metrics():
metric_key = f'evidently:{metric.name}'
found = self.metrics.get(metric_key)
if (not labels):
labels = {}
labels['dataset_name'] = dataset_name
if isinstance(value, str):
continue
if (found is None):
found = prometheus_client.Gauge(metric_key, '', list(sorted(labels.keys())))
self.metrics[metric_key] = found
try:
found.labels(**labels).set(value)
except ValueError as error:
logging.error('Value error for metric %s, error: ', metric_key, error) |
def validate_nursing_tasks(document):
if (not frappe.db.get_single_value('Healthcare Settings', 'validate_nursing_checklists')):
return True
filters = {'reference_name': document.name, 'mandatory': 1, 'status': ['not in', ['Completed', 'Cancelled']]}
tasks = frappe.get_all('Nursing Task', filters=filters)
if (not tasks):
return True
frappe.throw(_('Please complete linked Nursing Tasks before submission: {}').format(', '.join((get_link_to_form('Nursing Task', task.name) for task in tasks)))) |
class _EventItem():
def __init__(self, name: str, address: Optional[str], event_data: List, pos: Tuple) -> None:
self.name = name
self.address = address
self._ordered = event_data
self.pos = pos
def __getitem__(self, key: Union[(int, str)]) -> List:
if (not isinstance(key, (int, str))):
raise TypeError(f"Invalid key type '{type(key)}' - can only use strings or integers")
if isinstance(key, int):
try:
return self._ordered[key]
except IndexError:
raise EventLookupError(f"Index out of range - only {len(self._ordered)} '{self.name}' events fired")
if (key in self._ordered[0]):
return self._ordered[0][key]
if (f'{key} (indexed)' in self._ordered[0]):
return self._ordered[0][f'{key} (indexed)']
valid_keys = ', '.join(self.keys())
raise EventLookupError(f"Unknown key '{key}' - the '{self.name}' event includes these keys: {valid_keys}")
def __contains__(self, name: str) -> bool:
return (name in self._ordered[0])
def __len__(self) -> int:
return len(self._ordered)
def __repr__(self) -> str:
return str(self)
def __str__(self) -> str:
if (len(self._ordered) == 1):
return str(self._ordered[0])
return str([i[0] for i in self._ordered])
def __iter__(self) -> Iterator:
return iter(self._ordered)
def __eq__(self, other: object) -> bool:
if (len(self._ordered) == 1):
if isinstance(other, (tuple, list, ReturnValue)):
return (self._ordered[0].values() == other)
return (other == self._ordered[0])
return (other == self._ordered)
def items(self) -> ReturnValue:
return ReturnValue([(i, self[i]) for i in self.keys()])
def keys(self) -> ReturnValue:
return ReturnValue([i.replace(' (indexed)', '') for i in self._ordered[0].keys()])
def values(self) -> ReturnValue:
return ReturnValue(self._ordered[0].values()) |
_type(OSPF_EXTENDED_PREFIX_SID_SUBTLV)
class PrefixSIDSubTLV(ExtendedPrefixTLV):
_VALUE_PACK_STR = '!HHBBBBHHI'
_VALUE_PACK_LEN = struct.calcsize(_VALUE_PACK_STR)
_VALUE_FIELDS = ['flags', 'mt_id', 'algorithm', '_pad', 'range_size', '_pad', 'index']
def __init__(self, type_=OSPF_EXTENDED_PREFIX_SID_SUBTLV, length=0, flags=0, mt_id=0, algorithm=0, range_size=0, index=0):
super(PrefixSIDSubTLV, self).__init__()
self.type_ = type_
self.length = length
self.flags = flags
self.mt_id = mt_id
self.algorithm = algorithm
self.range_size = range_size
self.index = index
def parser(cls, buf):
rest = buf[cls._VALUE_PACK_LEN:]
buf = buf[:cls._VALUE_PACK_LEN]
(type_, length, flags, mt_id, algorithm, _pad, range_size, _pad, index) = struct.unpack_from(cls._VALUE_PACK_STR, buf)
return (cls(type_, length, flags, mt_id, algorithm, range_size, index), rest)
def serialize(self):
return struct.pack(self._VALUE_PACK_STR, OSPF_EXTENDED_PREFIX_SID_SUBTLV, (self._VALUE_PACK_LEN - 4), self.flags, self.mt_id, self.algorithm, 0, self.range_size, 0, self.index) |
def extractSeisparadiseWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('the husband who is played broken', 'the husband who is played broken', 'translated'), ('family sex slave', 'family sex slave', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionPlotoptionsSplineSonificationContexttracksMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def update_segs(db_root, clb_int, seg_fn_in, seg_fn_out, zero_db_fn, tag_groups, strict=True, verbose=False):
if clb_int:
zero_db = clb_int_zero_db
lazy = True
def gen_fns():
for tile_type in ['int_l', 'int_r', 'clbll_l', 'clbll_r', 'clblm_l', 'clblm_r']:
fn = ('%s/segbits_%s.db' % (db_root, tile_type))
(yield (fn, fn))
fn_inouts = list(gen_fns())
else:
assert seg_fn_in
assert zero_db_fn
lazy = False
if (not seg_fn_out):
seg_fn_out = seg_fn_in
fn_inouts = [(seg_fn_in, seg_fn_out)]
zero_db = load_zero_db(zero_db_fn)
print(('CLB INT mode: %s' % clb_int))
print(('Segbit groups: %s' % len(zero_db)))
update_seg_fns(fn_inouts, zero_db, tag_groups, clb_int, lazy=lazy, strict=strict, verbose=verbose) |
class SimulationFunction():
phase_data = np.linspace((- np.pi), np.pi, SAMPLE_SIZE_CYCLE)[:(- 1)]
def sin_cycle(self):
return np.sin(self.phase_data)
def square_cycle(self):
return np.sign(np.sin(self.phase_data))
def daub_cycle(self):
hb = signal.wavelets.daub(ORDER_OF_ZERO_DAUB)
zero_array = np.zeros(SAMPLES_REST, dtype=float)
hb_full = np.concatenate([hb, zero_array])
return hb_full
def hrf_cycle(self):
times = np.arange(1, SAMPLE_SIZE_CYCLE)
peak_values = gamma.pdf(times, PEAK_GAMMA_SHAPE)
undershoot_values = gamma.pdf(times, UNDERSHOOT_GAMMA_SHAPE)
values = (peak_values - (TIME_TO_ZERO_HFR * undershoot_values))
return (values / np.max(values)) |
class OptionPlotoptionsHeatmapSonificationTracksMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_replace_middleware(middleware_factory):
mw1 = middleware_factory()
mw2 = middleware_factory()
mw3 = middleware_factory()
manager = RequestManager(None, BaseProvider(), middlewares=[mw1, (mw2, '2nd'), mw3])
assert (tuple(manager.middleware_onion) == (mw1, mw2, mw3))
mw_replacement = middleware_factory()
manager.middleware_onion.replace('2nd', mw_replacement)
assert (tuple(manager.middleware_onion) == (mw1, mw_replacement, mw3))
manager.middleware_onion.remove('2nd')
assert (tuple(manager.middleware_onion) == (mw1, mw3)) |
_ARCH_REGISTRY.register()
class FCOS(d2_FCOS):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def from_config(cls, cfg):
backbone = build_backbone(cfg)
backbone_shape = backbone.output_shape()
try:
feature_shapes = [backbone_shape[f] for f in cfg.MODEL.FCOS.IN_FEATURES]
except KeyError:
raise KeyError(f'Available keys: {backbone_shape.keys()}. Requested keys: {cfg.MODEL.FCOS.IN_FEATURES}')
head = FCOSHead(input_shape=feature_shapes, num_classes=cfg.MODEL.FCOS.NUM_CLASSES, conv_dims=([feature_shapes[0].channels] * cfg.MODEL.FCOS.NUM_CONVS), norm=cfg.MODEL.FCOS.HEAD_NORM)
return {'backbone': backbone, 'head': head, 'pixel_mean': cfg.MODEL.PIXEL_MEAN, 'pixel_std': cfg.MODEL.PIXEL_STD, 'num_classes': cfg.MODEL.FCOS.NUM_CLASSES, 'head_in_features': cfg.MODEL.FCOS.IN_FEATURES, 'focal_loss_alpha': cfg.MODEL.FCOS.FOCAL_LOSS_ALPHA, 'focal_loss_gamma': cfg.MODEL.FCOS.FOCAL_LOSS_GAMMA, 'test_score_thresh': cfg.MODEL.FCOS.SCORE_THRESH_TEST, 'test_topk_candidates': cfg.MODEL.FCOS.TOPK_CANDIDATES_TEST, 'test_nms_thresh': cfg.MODEL.FCOS.NMS_THRESH_TEST, 'max_detections_per_image': cfg.TEST.DETECTIONS_PER_IMAGE}
def prepare_for_export(self, cfg, *args, **kwargs):
func = FCOS_PREPARE_FOR_EXPORT_REGISTRY.get(cfg.FCOS_PREPARE_FOR_EXPORT)
return func(self, cfg, *args, **kwargs)
def prepare_for_quant(self, cfg, *args, **kwargs):
model = self
qconfig = set_backend_and_create_qconfig(cfg, is_train=cfg.QUANTIZATION.QAT.ENABLED)
logger.info('Setup the model with qconfig:\n{}'.format(qconfig))
model.backbone.qconfig = qconfig
model.head.qconfig = qconfig
if isinstance(model.backbone, FPN):
model.backbone.bottom_up = wrap_quant_subclass(model.backbone.bottom_up, n_inputs=1, n_outputs=len(model.backbone.bottom_up._out_features))
else:
model.backbone = wrap_quant_subclass(model.backbone, n_inputs=1, n_outputs=len(model.backbone._out_features))
def unpack_cyclebatchnormlist(module):
if isinstance(module, CycleBatchNormList):
if (len(module) > 1):
raise NotImplementedError('CycleBatchNormList w/ more than one element cannot be quantized')
else:
num_channel = module.weight.size(0)
new_module = nn.BatchNorm2d(num_channel, affine=True)
new_module.weight = module.weight
new_module.bias = module.bias
new_module.running_mean = module[0].running_mean
new_module.running_var = module[0].running_var
module = new_module
else:
for (name, child) in module.named_children():
new_child = unpack_cyclebatchnormlist(child)
if (new_child is not child):
module.add_module(name, new_child)
return module
model.head = unpack_cyclebatchnormlist(model.head)
model.head = wrap_quant_subclass(model.head, n_inputs=len(cfg.MODEL.FCOS.IN_FEATURES), n_outputs=(len(cfg.MODEL.FCOS.IN_FEATURES) * 3))
model = fuse_utils.fuse_model(model, is_qat=cfg.QUANTIZATION.QAT.ENABLED, inplace=True)
model = wrap_non_quant_group_norm(model)
return model |
def update_feed_name(feedrow, params):
oldn = params['old_name'].strip()
newn = params['new_name'].strip()
havef = g.session.query(db.RssFeedEntry).filter((db.RssFeedEntry.feed_name == newn)).scalar()
if havef:
return {'error': True, 'message': 'A feed with that name already exists!', 'reload': False, 'merge_ids': [feedrow.id, havef.id]}
if (oldn == newn):
return {'error': True, 'message': 'Name has not changed? Nothing to do!', 'reload': False}
if (not newn):
return {'error': True, 'message': 'Name is empty!', 'reload': False}
feedrow.feed_name = newn
return {'error': False, 'message': 'Name updated successfully!', 'reload': True} |
class CodeMixFooMixedInClass(SimpleEntity, CodeMixin):
__tablename__ = 'CodeMixFooMixedInClasses'
__mapper_args__ = {'polymorphic_identity': 'CodeMixFooMixedInClass'}
codeMixFooMixedInClass_id = Column('id', Integer, ForeignKey('SimpleEntities.id'), primary_key=True)
def __init__(self, **kwargs):
super(CodeMixFooMixedInClass, self).__init__(**kwargs)
CodeMixin.__init__(self, **kwargs) |
class CandidateHistory(BaseCandidate):
__tablename__ = 'ofec_candidate_history_mv'
candidate_id = db.Column(db.String, primary_key=True, index=True, doc=docs.CANDIDATE_ID)
two_year_period = db.Column(db.Integer, primary_key=True, index=True, doc=docs.CANDIDATE_CYCLE)
candidate_election_year = db.Column(db.Integer, doc=docs.LAST_CANDIDATE_ELECTION_YEAR)
address_city = db.Column(db.String(100), doc=docs.F2_CANDIDATE_CITY)
address_state = db.Column(db.String(2), doc=docs.F2_CANDIDATE_STATE)
address_street_1 = db.Column(db.String(200), doc=docs.F2_CANDIDATE_STREET_1)
address_street_2 = db.Column(db.String(200), doc=docs.F2_CANDIDATE_STREET_2)
address_zip = db.Column(db.String(10), doc=docs.F2_CANDIDATE_ZIP)
candidate_inactive = db.Column(db.Boolean, doc=docs.CANDIDATE_INACTIVE)
active_through = db.Column(db.Integer, doc=docs.ACTIVE_THROUGH)
rounded_election_years = db.Column(ARRAY(db.Integer), index=True, doc=docs.ROUNDED_ELECTION_YEARS)
fec_cycles_in_election = db.Column(ARRAY(db.Integer), index=True, doc=docs.FEC_CYCLES_IN_ELECTION) |
class OptionPlotoptionsFunnel3dSonificationTracksMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class CodesHandle():
def __init__(self, handle, path, offset):
self.handle = handle
self.path = path
self.offset = offset
def from_sample(cls, name):
return cls(eccodes.codes_new_from_samples(name, eccodes.CODES_PRODUCT_GRIB), None, None)
def __del__(self):
try:
eccodes_codes_release(self.handle)
except TypeError:
pass
def get(self, name):
try:
if (name == 'values'):
return eccodes.codes_get_values(self.handle)
size = eccodes.codes_get_size(self.handle, name)
if (name == 'md5GridSection'):
save = eccodes.codes_get_long(self.handle, 'shapeOfTheEarth')
eccodes.codes_set_long(self.handle, 'shapeOfTheEarth', 255)
result = eccodes.codes_get_string(self.handle, 'md5GridSection')
eccodes.codes_set_long(self.handle, 'shapeOfTheEarth', save)
return result
if (size and (size > 1)):
return eccodes.codes_get_array(self.handle, name)
return eccodes.codes_get(self.handle, name)
except eccodes.KeyValueNotFoundError:
return None
def get_long(self, name):
try:
return eccodes.codes_get_long(self.handle, name)
except eccodes.KeyValueNotFoundError:
return None
def get_string(self, name):
try:
return eccodes.codes_get_string(self.handle, name)
except eccodes.KeyValueNotFoundError:
return None
def get_double(self, name):
try:
return eccodes.codes_get_double(self.handle, name)
except eccodes.KeyValueNotFoundError:
return None
def get_data(self):
return eccodes.codes_grib_get_data(self.handle)
def as_mars(self, param='shortName'):
r = {}
it = eccodes.codes_keys_iterator_new(self.handle, 'mars')
try:
while eccodes.codes_keys_iterator_next(it):
key = eccodes.codes_keys_iterator_get_name(it)
r[key] = self.get((param if (key == 'param') else key))
finally:
eccodes.codes_keys_iterator_delete(it)
return r
def clone(self):
return CodesHandle(eccodes.codes_clone(self.handle), None, None)
def set_values(self, values):
assert (self.path is None), 'Only cloned handles can have values changed'
eccodes.codes_set_values(self.handle, values.flatten())
eccodes.codes_set_long(self.handle, 'generatingProcessIdentifier', 255)
def set_multiple(self, values):
assert (self.path is None), 'Only cloned handles can have values changed'
eccodes.codes_set_key_vals(self.handle, values)
def set_long(self, name, value):
try:
assert (self.path is None), 'Only cloned handles can have values changed'
eccodes.codes_set_long(self.handle, name, value)
except Exception as e:
LOG.error('Error setting %s=%s', name, value)
raise ValueError(('Error setting %s=%s (%s)' % (name, value, e)))
def set_double(self, name, value):
try:
assert (self.path is None), 'Only cloned handles can have values changed'
eccodes.codes_set_double(self.handle, name, value)
except Exception as e:
LOG.error('Error setting %s=%s', name, value)
raise ValueError(('Error setting %s=%s (%s)' % (name, value, e)))
def set_string(self, name, value):
try:
assert (self.path is None), 'Only cloned handles can have values changed'
eccodes.codes_set_string(self.handle, name, value)
except Exception as e:
LOG.error('Error setting %s=%s', name, value)
raise ValueError(('Error setting %s=%s (%s)' % (name, value, e)))
def set(self, name, value):
try:
assert (self.path is None), 'Only cloned handles can have values changed'
if isinstance(value, list):
return eccodes.codes_set_array(self.handle, name, value)
return eccodes.codes_set(self.handle, name, value)
except Exception as e:
LOG.error('Error setting %s=%s', name, value)
raise ValueError(('Error setting %s=%s (%s)' % (name, value, e)))
def write(self, f):
eccodes.codes_write(self.handle, f)
def save(self, path):
with open(path, 'wb') as f:
eccodes.codes_write(self.handle, f)
self.path = path
self.offset = 0
def read_bytes(self, offset, length):
with open(self.path, 'rb') as f:
f.seek(offset)
return f.read(length) |
class RunAction(EcsAction):
def __init__(self, client, cluster_name):
super(RunAction, self).__init__(client, cluster_name, None)
self._client = client
self._cluster_name = cluster_name
self.started_tasks = []
def run(self, task_definition, count, started_by, launchtype, subnets, security_groups, public_ip, platform_version):
try:
result = self._client.run_task(cluster=self._cluster_name, task_definition=task_definition.family_revision, count=count, started_by=started_by, overrides=dict(containerOverrides=task_definition.get_overrides()), launchtype=launchtype, subnets=subnets, security_groups=security_groups, public_ip=public_ip, platform_version=platform_version)
self.started_tasks = result['tasks']
return True
except ClientError as e:
raise EcsError(str(e)) |
class TestInpatientRecord(FrappeTestCase):
def test_admit_and_discharge(self):
frappe.db.sql('delete from `tabInpatient Record`')
patient = create_patient()
ip_record = create_inpatient(patient)
ip_record.expected_length_of_stay = 0
ip_record.save(ignore_permissions=True)
self.assertEqual(ip_record.name, frappe.db.get_value('Patient', patient, 'inpatient_record'))
self.assertEqual(ip_record.status, frappe.db.get_value('Patient', patient, 'inpatient_status'))
service_unit = get_healthcare_service_unit()
admit_patient(ip_record, service_unit, now_datetime())
self.assertEqual('Admitted', frappe.db.get_value('Patient', patient, 'inpatient_status'))
self.assertEqual('Occupied', frappe.db.get_value('Healthcare Service Unit', service_unit, 'occupancy_status'))
schedule_discharge(frappe.as_json({'patient': patient}))
self.assertEqual('Vacant', frappe.db.get_value('Healthcare Service Unit', service_unit, 'occupancy_status'))
ip_record1 = frappe.get_doc('Inpatient Record', ip_record.name)
self.assertRaises(frappe.ValidationError, ip_record.discharge)
mark_invoiced_inpatient_occupancy(ip_record1)
discharge_patient(ip_record1)
self.assertEqual(None, frappe.db.get_value('Patient', patient, 'inpatient_record'))
self.assertEqual(None, frappe.db.get_value('Patient', patient, 'inpatient_status'))
def test_allow_discharge_despite_unbilled_services(self):
frappe.db.sql('delete from `tabInpatient Record`')
setup_inpatient_settings(key='allow_discharge_despite_unbilled_services', value=1)
patient = create_patient()
ip_record = create_inpatient(patient)
ip_record.expected_length_of_stay = 0
ip_record.save(ignore_permissions=True)
service_unit = get_healthcare_service_unit()
admit_patient(ip_record, service_unit, now_datetime())
schedule_discharge(frappe.as_json({'patient': patient}))
self.assertEqual('Vacant', frappe.db.get_value('Healthcare Service Unit', service_unit, 'occupancy_status'))
ip_record = frappe.get_doc('Inpatient Record', ip_record.name)
ip_record.discharge()
self.assertEqual(None, frappe.db.get_value('Patient', patient, 'inpatient_record'))
self.assertEqual(None, frappe.db.get_value('Patient', patient, 'inpatient_status'))
setup_inpatient_settings(key='allow_discharge_despite_unbilled_services', value=0)
def test_do_not_bill_patient_encounters_for_inpatients(self):
frappe.db.sql('delete from `tabInpatient Record`')
setup_inpatient_settings(key='do_not_bill_inpatient_encounters', value=1)
patient = create_patient()
ip_record = create_inpatient(patient)
ip_record.expected_length_of_stay = 0
ip_record.save(ignore_permissions=True)
service_unit = get_healthcare_service_unit()
admit_patient(ip_record, service_unit, now_datetime())
patient_encounter = create_patient_encounter()
encounters = get_encounters_to_invoice(patient, '_Test Company')
encounter_ids = [entry.reference_name for entry in encounters]
self.assertFalse((patient_encounter.name in encounter_ids))
schedule_discharge(frappe.as_json({'patient': patient}))
self.assertEqual('Vacant', frappe.db.get_value('Healthcare Service Unit', service_unit, 'occupancy_status'))
ip_record = frappe.get_doc('Inpatient Record', ip_record.name)
mark_invoiced_inpatient_occupancy(ip_record)
discharge_patient(ip_record)
setup_inpatient_settings(key='do_not_bill_inpatient_encounters', value=0)
def test_validate_overlap_admission(self):
frappe.db.sql('delete from `tabInpatient Record`')
patient = create_patient()
ip_record = create_inpatient(patient)
ip_record.expected_length_of_stay = 0
ip_record.save(ignore_permissions=True)
ip_record_new = create_inpatient(patient)
ip_record_new.expected_length_of_stay = 0
self.assertRaises(frappe.ValidationError, ip_record_new.save)
service_unit = get_healthcare_service_unit()
admit_patient(ip_record, service_unit, now_datetime())
ip_record_new = create_inpatient(patient)
self.assertRaises(frappe.ValidationError, ip_record_new.save)
frappe.db.sql('delete from `tabInpatient Record`') |
class CommandsTestCase(TestCase):
def test_import_hscic_prescribing(self):
args = []
chem_file = 'frontend/tests/fixtures/commands/hscic_chemicals.csv'
opts = {'chem_file': chem_file}
call_command('import_hscic_chemicals', *args, **opts)
chemicals = Chemical.objects.all()
self.assertEqual(chemicals.count(), 100)
chemical = Chemical.objects.get(bnf_code='0410000N0')
self.assertEqual(chemical.chem_name, 'Unknown')
chemical = Chemical.objects.get(bnf_code='0101010M0')
self.assertEqual(chemical.chem_name, 'Magaldrate') |
def annotate_speed_model(tile_type, reduced_tile, root_dir):
speed_model_indices = get_speed_model_indices(reduced_tile)
tmp_indices_file = os.path.join(root_dir, '{}_speed_index.tmp'.format(tile_type))
with open(tmp_indices_file, 'w') as f:
for index in speed_model_indices:
print(index, file=f)
vivado = os.getenv('XRAY_VIVADO')
assert (vivado is not None)
subprocess.check_call('{} -mode batch -source get_speed_model.tcl -tclargs {}'.format(vivado, tmp_indices_file), shell=True, stdout=subprocess.DEVNULL)
with open(tmp_indices_file, 'r') as f:
speed_model_data = json5.load(f)
for site in reduced_tile['sites']:
annotate_site_pins_speed_model(site['site_pins'], speed_model_data)
annotate_pips_speed_model(reduced_tile['pips'], speed_model_data)
annotate_wires_speed_model(reduced_tile['wires'], speed_model_data) |
def multi_process_capture(source_face: Face, webcam_capture: cv2.VideoCapture, fps: float) -> Generator[(Frame, None, None)]:
with tqdm(desc=wording.get('processing'), unit='frame', ascii=' =', disable=(facefusion.globals.log_level in ['warn', 'error'])) as progress:
with ThreadPoolExecutor(max_workers=facefusion.globals.execution_thread_count) as executor:
futures = []
deque_capture_frames: Deque[Frame] = deque()
while (webcam_capture and webcam_capture.isOpened()):
(_, capture_frame) = webcam_capture.read()
if analyse_stream(capture_frame, fps):
return
future = executor.submit(process_stream_frame, source_face, capture_frame)
futures.append(future)
for future_done in [future for future in futures if future.done()]:
capture_frame = future_done.result()
deque_capture_frames.append(capture_frame)
futures.remove(future_done)
while deque_capture_frames:
progress.update()
(yield deque_capture_frames.popleft()) |
()
('input', nargs=1, type=click.Path(exists=True))
def apply(input):
if WAYLAND:
sys.exit('You appear to be running Wayland, which does not support this operation.')
layout = KeyboardLayout(input)
with tempfile.NamedTemporaryFile(mode='w+', suffix='.xkb', encoding='utf-8') as temp_file:
try:
temp_file.write(layout.xkb)
os.system(f'xkbcomp -w0 {temp_file.name} $DISPLAY')
finally:
temp_file.close() |
def test_create_default_project_will_create_a_workspace_mel_file(create_test_data, trash_bin):
arch = Archiver()
tempdir = tempfile.gettempdir()
project_path = arch.create_default_project(tempdir)
trash_bin.append(project_path)
workspace_mel_path = os.path.join(project_path, 'workspace.mel')
assert os.path.exists(workspace_mel_path) |
def wave(handler: SerialHandler, args: argparse.Namespace):
waveform_generator = WaveformGenerator(handler)
if (args.wave_function == 'gen'):
waveform_generator.generate(channels=args.channel, frequency=args.frequency, phase=args.phase)
elif (args.wave_function == 'load'):
if (args.table is not None):
table = args.table
elif (args.table_file is not None):
with open(args.table_file) as table_file:
table = json.load(table_file)
x = np.arange(0, len(table), (len(table) / 512))
y = [table[int(i)] for i in x]
waveform_generator.load_table(channel=args.channel, points=y) |
()
('--contract', help='Contract to lookup', required=False)
('--market', help='Market to lookup', callback=partial(click_validate_enum, Market), required=False)
def main(contract, market):
exporter = Exporter()
if all((contract, market)):
raise click.BadParameter('Either contract or market must be specified')
elif (not any((contract, market))):
raise click.BadParameter('Neither contract nor market is specified')
pd.options.display.max_rows = 1000
if contract:
try:
meta = exporter.lookup(code=contract)
except FinamObjectNotFoundError:
logger.info('No such contract')
else:
print(meta)
else:
contracts = exporter.lookup(market=Market[market])
print(contracts) |
class OptionSeriesDependencywheelSonificationTracksMappingLowpass(Options):
def frequency(self) -> 'OptionSeriesDependencywheelSonificationTracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionSeriesDependencywheelSonificationTracksMappingLowpassFrequency)
def resonance(self) -> 'OptionSeriesDependencywheelSonificationTracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionSeriesDependencywheelSonificationTracksMappingLowpassResonance) |
class TestWorkbook(unittest.TestCase):
def test_make_empty_workbook(self):
wb = Workbook()
self.assertEqual(len(wb.sheets), 0, "New workbook shouldn't have any sheets")
self.assertRaises(KeyError, wb.getSheet, 'Nonexistant sheet')
def test_make_workbook_add_sheets(self):
wb = Workbook()
ws1 = wb.addSheet('sheet 1')
self.assertTrue(isinstance(ws1, Worksheet), 'addSheet should return a Worksheet instance')
self.assertEqual(len(wb.sheets), 1, 'Workbook should have one sheet')
ws2 = wb.addSheet('sheet 2')
self.assertTrue(isinstance(ws2, Worksheet), 'addSheet should return a Worksheet instance')
self.assertEqual(len(wb.sheets), 2, 'Workbook should have two sheets')
def test_make_workbook_get_sheets(self):
wb = Workbook()
ws1 = wb.addSheet('sheet 1')
self.assertEqual(ws1, wb.getSheet('sheet 1'), "Didn't fetch expected worksheet #1")
ws2 = wb.addSheet('sheet 2')
self.assertEqual(ws2, wb.getSheet('sheet 2'), "Didn't fetch expected worksheet #2")
self.assertNotEqual(ws1, ws2, 'Worksheets should be different') |
def test_process_connection_ids_bad_default_connection():
builder = AEABuilder()
builder.set_name('aea_1')
builder.add_private_key('fetchai')
connection = _make_dummy_connection()
builder.add_component_instance(connection)
with pytest.raises(ValueError, match='Default connection not a dependency. Please add it and retry.'):
builder._default_connection = ConnectionConfig('conn', 'author', '0.1.0').public_id
builder._process_connection_ids([connection.public_id]) |
def position2coordinates(position, surface_size=None):
if (surface_size is None):
surface_size = _internals.active_exp.screen.surface.get_size()
rtn = [(position[0] + (surface_size[0] // 2)), ((- position[1]) + (surface_size[1] // 2))]
if ((surface_size[0] % 2) == 0):
rtn[0] -= 1
if ((surface_size[1] % 2) == 0):
rtn[1] -= 1
return rtn |
def _path_for(team_root_path, team_member_type):
root_path = os.path.join(team_root_path, team_member_type, f'v{TEAM_FORMAT_VERSION}')
if (not os.path.exists(root_path)):
raise exceptions.SystemSetupError(f'Path {root_path} for {team_member_type} does not exist.')
return root_path |
class BaseAttachmentFormset(BaseModelFormSet):
def __init__(self, *args, **kwargs):
self.post = kwargs.pop('post', None)
super().__init__(*args, **kwargs)
def save(self, commit=True, **kwargs):
if self.post:
for form in self.forms:
form.instance.post = self.post
super().save(commit) |
def request_pcfad_device(mportnum):
(i2caddress, realpin) = get_pcfad_pin_address(mportnum)
if (realpin > (- 1)):
for i in range(len(pcfad_devices)):
if (pcfad_devices[i].i2cAddress == int(i2caddress)):
return pcfad_devices[i]
pcfad_devices.append(PCFADEntity(i2caddress))
return pcfad_devices[(- 1)]
else:
return None |
def calibrate_folder(args):
(height, width) = cv2.imread(args.input_files[0]).shape[:2]
calibrator = StereoCalibrator(args.rows, args.columns, args.square_size, (width, height))
progress = ProgressBar(maxval=len(args.input_files), widgets=[Bar('=', '[', ']'), ' ', Percentage()])
print('Reading input files...')
progress.start()
while args.input_files:
(left, right) = args.input_files[:2]
(img_left, im_right) = (cv2.imread(left), cv2.imread(right))
calibrator.add_corners((img_left, im_right), show_results=args.show_chessboards)
args.input_files = args.input_files[2:]
progress.update((progress.maxval - len(args.input_files)))
progress.finish()
print('Calibrating cameras. This can take a while.')
calibration = calibrator.calibrate_cameras()
avg_error = calibrator.check_calibration(calibration)
print('The average error between chessboard points and their epipolar lines is \n{} pixels. This should be as small as possible.'.format(avg_error))
calibration.export(args.output_folder) |
class ValveStackMCLAGRestartTestCase(ValveTestBases.ValveTestNetwork):
CONFIG = ("\ndps:\n s1:\n%s\n stack:\n priority: 1\n interfaces:\n 1:\n description: p1\n stack:\n dp: s2\n port: 1\n 2:\n description: p2\n native_vlan: 100\n 3:\n description: p3\n native_vlan: 100\n lacp: 1\n 4:\n description: p4\n native_vlan: 100\n lacp: 1\n s2:\n hardware: 'GenericTFM'\n dp_id: 0x2\n interfaces:\n 1:\n description: p1\n stack:\n dp: s1\n port: 1\n 2:\n description: p2\n native_vlan: 100\n 3:\n description: p3\n native_vlan: 100\n lacp: 1\n 4:\n description: p4\n native_vlan: 100\n lacp: 1\n" % BASE_DP1_CONFIG)
def setUp(self):
self.setup_valves(self.CONFIG)
def test_mclag_cold_start(self):
self.activate_all_ports()
valve = self.valves_manager.valves[1]
other_valves = self.get_other_valves(valve)
old_port = valve.dp.ports[3]
self.assertTrue(valve.lacp_update(old_port, True, 1, 1, other_valves), 'No OFMSGS returned')
self.assertTrue(old_port.is_actor_up(), 'Actor not UP')
valve.port_delete(3, other_valves=other_valves)
self.assertTrue(old_port.is_actor_none(), 'Actor not NONE')
self.cold_start()
new_port = valve.dp.ports[3]
self.assertEqual(id(old_port), id(new_port), 'Port object changed')
self.assertTrue(valve.port_add(3), 'No OFMSGS returned')
self.assertTrue(valve.lacp_update(new_port, True, 1, 1, other_valves), 'No OFMSGS returned')
self.assertTrue(new_port.is_actor_up(), 'Actor not UP') |
class CandlePlot(BaseCandlePlot):
min_values = Instance(AbstractDataSource)
bar_min = Instance(AbstractDataSource)
center_values = Instance(AbstractDataSource)
bar_max = Instance(AbstractDataSource)
max_values = Instance(AbstractDataSource)
value = Property
def map_data(self, screen_pt, all_values=True):
(x, y) = screen_pt
if (self.orientation == 'v'):
(x, y) = (y, x)
return array((self.index_mapper.map_data(x), self.value_mapper.map_data(y)))
def map_index(self, screen_pt, threshold=0.0, outside_returns_none=True, index_only=True):
if (not index_only):
raise NotImplementedError('Candle Plots only support index_only map_index()')
if (len(screen_pt) == 0):
return None
index_data = self.index.get_data()
if (len(index_data) == 0):
return None
target_data = self.index_mapper.map_data(screen_pt[0])
index = searchsorted(index_data, [target_data])[0]
if (index == len(index_data)):
index -= 1
if (index > 0):
lower = index_data[(index - 1)]
upper = index_data[index]
(screen_low, screen_high) = self.index_mapper.map_screen(array([lower, upper]))
low_dist = abs((screen_pt[0] - screen_low))
high_dist = abs((screen_pt[0] - screen_high))
if (low_dist < high_dist):
index = (index - 1)
dist = low_dist
else:
dist = high_dist
if ((threshold > 0) and (dist >= threshold)):
return None
else:
return index
else:
screen = self.index_mapper.map_screen(index_data[0])
if ((threshold > 0) and (abs((screen - screen_pt[0])) >= threshold)):
return None
else:
return index
def _gather_points(self):
index = self.index.get_data()
mask = broaden(self.index_range.mask_data(index))
if (not mask.any()):
self._cached_data_pts = []
self._cache_valid = True
return
data_pts = [compress(mask, index)]
for v in (self.min_values, self.bar_min, self.center_values, self.bar_max, self.max_values):
if ((v is None) or (len(v.get_data()) == 0)):
data_pts.append(None)
else:
data_pts.append(compress(mask, v.get_data()))
self._cached_data_pts = data_pts
self._cache_valid = True
def _draw_plot(self, gc, view_bounds=None, mode='normal'):
self._gather_points()
if (len(self._cached_data_pts) == 0):
return
index = self.index_mapper.map_screen(self._cached_data_pts[0])
if (len(index) == 0):
return
vals = []
for v in self._cached_data_pts[1:]:
if (v is None):
vals.append(None)
else:
vals.append(self.value_mapper.map_screen(v))
if (len(index) == 1):
width = 5.0
else:
width = ((index[1:] - index[:(- 1)]).min() / 2.5)
left = (index - width)
right = (index + width)
with gc:
gc.clip_to_rect(self.x, self.y, self.width, self.height)
self._render(gc, left, right, *vals)
def _get_value(self):
if (self.center_values is not None):
return self.center_values
elif (self.bar_min is not None):
return self.bar_min
elif (self.bar_max is not None):
return self.bar_max |
class Unpacker(UnpackBase):
FS_FALLBACK_CANDIDATES = ['SquashFS']
CARVER_FALLBACK_BLACKLIST = ['generic_carver', 'NOP', 'PaTool', 'SFX', 'LinuxKernel']
def __init__(self, config=None, extract_everything: bool=False, folder: str=None):
super().__init__(config=config, extract_everything=extract_everything)
data_folder = Path(self.config.get('unpack', 'data_folder'))
if folder:
self._file_folder = ((data_folder / folder) / 'files')
self._report_folder = ((data_folder / folder) / 'reports')
else:
self._file_folder = (data_folder / 'files')
self._report_folder = (data_folder / 'reports')
def unpack(self, file_path):
if self._should_ignore(file_path):
meta_data = {'plugin_used': None, 'number_of_unpacked_files': 0, 'number_of_unpacked_directories': 0, 'number_of_excluded_files': 1, 'info': f'File was ignored because it matched the exclude list {self.exclude}'}
extracted_files = []
else:
logging.debug(f'Extracting {Path(file_path).name}')
tmp_dir = TemporaryDirectory(prefix='fact_unpack_')
(extracted_files, meta_data) = self.extract_files_from_file(file_path, tmp_dir.name)
(extracted_files, meta_data) = self._do_fallback_if_necessary(extracted_files, meta_data, tmp_dir.name, file_path)
extracted_files = self.move_extracted_files(extracted_files, Path(tmp_dir.name))
compute_stats = self.config.getboolean('ExpertSettings', 'statistics', fallback=True)
if compute_stats:
binary = Path(file_path).read_bytes()
add_unpack_statistics(self._file_folder, meta_data)
get_unpack_status(file_path, binary, extracted_files, meta_data, self.config)
self.cleanup(tmp_dir)
Path(self._report_folder, 'meta.json').write_text(json.dumps(meta_data, cls=ReportEncoder, indent=4))
return extracted_files
def _do_fallback_if_necessary(self, extracted_files: List, meta_data: Dict, tmp_dir: str, file_path: str) -> Tuple[(List, Dict)]:
if (meta_data.get('number_of_excluded_files', 0) > 0):
return (extracted_files, meta_data)
if ((not extracted_files) and (meta_data['plugin_used'] in self.FS_FALLBACK_CANDIDATES)):
logging.warning(f"{meta_data['plugin_used']} could not extract any file from {file_path} -> generic fs fallback")
(extracted_files, meta_data) = self.unpacking_fallback(file_path, tmp_dir, meta_data, 'generic/fs')
if ((not extracted_files) and (meta_data['plugin_used'] not in self.CARVER_FALLBACK_BLACKLIST)):
logging.warning(f"{meta_data['plugin_used']} could not extract any file from {file_path} -> generic carver fallback")
(extracted_files, meta_data) = self.unpacking_fallback(file_path, tmp_dir, meta_data, 'generic/carver')
return (extracted_files, meta_data)
def cleanup(tmp_dir: TemporaryDirectory):
try:
tmp_dir.cleanup()
except OSError as error:
logging.error(f'Could not CleanUp tmp_dir: {error}', exc_info=True)
def move_extracted_files(self, file_paths: List[str], extraction_dir: Path) -> List[Path]:
extracted_files = list()
for item in file_paths:
if ((not file_is_empty(item)) or self.extract_everything):
absolute_path = Path(item)
relative_path = absolute_path.relative_to(extraction_dir)
target_path = Path(self._file_folder, relative_path)
target_path.parent.mkdir(parents=True, exist_ok=True)
try:
shutil.move(str(absolute_path), str(target_path))
extracted_files.append(target_path)
except OSError as error:
logging.error(f'Error occurred during move: {error}')
return extracted_files |
def _break_layernorm_groups(group: List[Operator]) -> List[List[Operator]]:
if (len(group) <= _MAX_LAYERNORM_GROUP):
return group
group.sort(key=(lambda x: _get_layernorm_flattened_normalized_shape(x)), reverse=True)
groups = []
num_groups = (((len(group) + _MAX_LAYERNORM_GROUP) - 1) // _MAX_LAYERNORM_GROUP)
for i in range(num_groups):
begin = (i * _MAX_LAYERNORM_GROUP)
end = min(((i + 1) * _MAX_LAYERNORM_GROUP), len(group))
groups.append(group[begin:end])
return groups |
def test_current_cfg(env, robot):
print('Test grasping in current position.')
pose = robot.manip.get_ee_pose(matrix=True)
(pos, quat) = robot.manip.get_ee_pose(matrix=False)
cfg = robot.manip.solve_ik(pos, quat)
print('Solve IK for current pose:', cfg)
(fk_pos, fk_quat) = robot.manip.solve_fk(cfg)
fk_pose = posquat2sophus(fk_pos, fk_quat)
_send_predicted_grasp_to_tf(env.grasp_planner, 'current_ee_pose', pose)
minimal_error = np.all((np.abs((fk_pose.matrix() - pose)) < 0.0001))
print('Minimal error between pose and fk pose:', minimal_error)
print('Config =', cfg)
cfg_manip = (robot.manip._extract_joint_pos(cfg),)
print('Manip cfg =', cfg_manip)
return pose |
class ToBMPython():
code: str
_code: List[str]
bmg: BMGraphBuilder
node_to_var_id: Dict[(bn.BMGNode, int)]
node_to_func_id: Dict[(bn.BMGNode, int)]
dist_to_rv_id: Dict[(bn.BMGNode, int)]
no_dist_samples: Dict[(bn.BMGNode, int)]
queries: List[str]
observations: List[str]
def __init__(self, bmg: BMGraphBuilder) -> None:
self.code = ''
self._code = ['import beanmachine.ppl as bm', 'import torch']
self.bmg = bmg
self.node_to_var_id = {}
self.node_to_func_id = {}
self.dist_to_rv_id = {}
self.no_dist_samples = defaultdict((lambda : 0))
self.queries = []
self.observations = []
def _get_node_id_mapping(self, node: bn.BMGNode) -> str:
if (node in self.node_to_var_id):
return f'v{self.node_to_var_id[node]}'
elif (node in self.node_to_func_id):
return f'f{self.node_to_func_id[node]}()'
else:
raise InternalError('Unsupported node type {node}')
def _get_id(self) -> int:
return (len(self.node_to_var_id) + len(self.node_to_func_id))
def _no_dist_samples(self, node: bn.DistributionNode) -> int:
return sum((isinstance(o, bn.SampleNode) for o in node.outputs.items))
def _inputs(self, node: bn.BMGNode) -> str:
input_seq = []
for x in node.inputs:
if isinstance(x, bn.SampleNode):
input_seq.append(f'{self._get_node_id_mapping(x)}.wrapper(*{self._get_node_id_mapping(x)}.arguments)')
else:
input_seq.append(self._get_node_id_mapping(x))
inputs = ', '.join(input_seq)
return inputs
def _add_constant(self, node: bn.ConstantNode) -> None:
var_id = self._get_id()
self.node_to_var_id[node] = var_id
t = type(node)
v = node.value
if ((t is bn.PositiveRealNode) or (t is bn.NegativeRealNode) or (t is bn.ProbabilityNode) or (t is bn.RealNode)):
f = str(float(v))
elif (t is bn.NaturalNode):
f = str(int(v))
else:
f = str(float(v))
self._code.append(f'v{var_id} = {f}')
def _add_distribution(self, node: bn.DistributionNode) -> None:
distr_type = _node_type_to_distribution[type(node)]
i = self._inputs(node)
no_dist_samples = self._no_dist_samples(node)
rv_id = len(self.dist_to_rv_id)
self.dist_to_rv_id[node] = rv_id
if (no_dist_samples > 1):
param = 'i'
else:
param = ''
self._code.append(f'''_variable
def rv{rv_id}({param}):
return {distr_type}({i})''')
def _add_operator(self, node: bn.OperatorNode) -> None:
var_id = self._get_id()
operator_type = _node_type_to_operator[type(node)]
i = self._inputs(node)
has_samples = any((isinstance(x, bn.SampleNode) for x in node.inputs))
if has_samples:
self.node_to_func_id[node] = var_id
self._code.append(f'''
def f{var_id}():
return {operator_type}({i})''')
else:
self.node_to_var_id[node] = var_id
self._code.append(f'v{var_id} = {operator_type}({i})')
def _add_sample(self, node: bn.SampleNode) -> None:
var_id = self._get_id()
self.node_to_var_id[node] = var_id
rv_id = self.dist_to_rv_id[node.operand]
self.no_dist_samples[node.operand] += 1
total_samples = self._no_dist_samples(node.operand)
if (total_samples > 1):
param = f'{self.no_dist_samples[node.operand]}'
else:
param = ''
self._code.append(f'v{var_id} = rv{rv_id}({param})')
def _add_query(self, node: bn.Query) -> None:
self.queries.append(f'{self._get_node_id_mapping(node.operator)}')
def _add_observation(self, node: bn.Observation) -> None:
val = node.value
if isinstance(val, bool):
val = float(val)
self.observations.append(f'{self._get_node_id_mapping(node.observed)} : torch.tensor({val})')
def _generate_python(self, node: bn.BMGNode) -> None:
if isinstance(node, bn.ConstantNode):
self._add_constant(node)
elif isinstance(node, bn.DistributionNode):
self._add_distribution(node)
elif isinstance(node, bn.SampleNode):
self._add_sample(node)
elif isinstance(node, bn.OperatorNode):
self._add_operator(node)
elif isinstance(node, bn.Query):
self._add_query(node)
elif isinstance(node, bn.Observation):
self._add_observation(node)
def _generate_bm_python(self) -> str:
(bmg, error_report) = fix_problems(self.bmg)
self.bmg = bmg
error_report.raise_errors()
for node in self.bmg.all_ancestor_nodes():
self._generate_python(node)
self._code.append(f"queries = [{','.join(self.queries)}]")
self._code.append(f"observations = {{{','.join(self.observations)}}}")
self.code = '\n'.join(self._code)
return self.code |
()
('--query', default='', help='Query string to search Connections by name.')
('--page', type=int, default=1, help='Page number to display.')
_ctx
def connections(ctx: Context, query: str, page: int) -> None:
item_type = CONNECTION
_output_search_results(item_type, *search_items(ctx, item_type, query, page), page) |
class BitsParser():
def __init__(self, queue_dir, carve_db, carve_all, out_file):
self.queue_dir = queue_dir
self.carve_db_files = carve_db
self.carve_all_files = carve_all
self.out_file = out_file
self.sid_user_cache = {}
self.visited_jobs = set()
self.is_win_10 = True
def get_username_from_sid(self, sid):
if (sid in self.sid_user_cache):
return self.sid_user_cache[sid]
try:
(name, domain, _) = advapi32.LookupAccountSid(advapi32.ConvertStringSidToSid(sid))
username = ((domain + '\\') + name)
self.sid_user_cache[sid] = username
return username
except Exception as e:
print((f'Failed to resolve sid {sid}: ' + str(e)), file=sys.stderr)
self.sid_user_cache[sid] = None
return None
def is_qmgr_database(file_data):
if (file_data[16:32] == b'\x13\xf7+\\x99\x12J\x9f\x1a:\xae\xbd\x89N\xea'):
return True
return False
def is_qmgr10_database(file_data):
if (file_data[4:8] == b'\xef\xcd\xab\x89'):
return True
return False
def load_qmgr_jobs(self, file_path):
jobs = []
analyzer = bits.Bits.load_file(file_path)
if (self.carve_db_files or self.carve_all_files):
for job in analyzer:
jobs.append(BitsJob(job, self))
else:
for job in analyzer.parse():
jobs.append(BitsJob(job, self))
return jobs
def load_non_qmgr_jobs(self, file_data):
jobs = []
analyzer = bits.Bits()
for sample in bits.sample_disk(file_data, XFER_HEADER, 2048):
analyzer.append_data(sample)
analyzer.guess_info()
for job in analyzer:
jobs.append(BitsJob(job, self))
return jobs
def parse_qmgr10_job(self, job_data):
if (len(job_data) < 128):
return None
try:
name_length = struct.unpack_from('<L', job_data, 32)[0]
if ((32 + (name_length * 2)) > len(job_data)):
return None
try:
parsed_job = JOB.parse(job_data)
except Exception:
try:
parsed_job = CONTROL.parse(job_data)
except Exception:
return None
try:
parsed_job['files'] = []
xfer_parts = job_data.split(XFER_HEADER)
file_ref_data = xfer_parts[1]
num_file_refs = struct.unpack_from('<L', file_ref_data)[0]
if ((4 + (num_file_refs * 16)) > len(file_ref_data)):
return None
for i in range(0, num_file_refs):
cur_guid = file_ref_data[(4 + (i * 16)):(4 + ((i + 1) * 16))]
file_job = self.file_entries.pop(cur_guid, None)
if file_job:
parsed_job['files'].extend(file_job['files'])
except Exception:
pass
new_job = BitsJob(parsed_job, self)
return new_job
except Exception:
print((f'Exception occurred parsing job: ' + traceback.format_exc()), file=sys.stderr)
return None
def parse_qmgr10_file(self, file_data, suppress_duplicates):
if (len(file_data) < 256):
return None
try:
filename_length = struct.unpack_from('<L', file_data)[0]
if ((4 + (filename_length * 2)) > len(file_data)):
return None
parsed_file = FILE.parse(file_data)
cur_job = {}
cur_job['files'] = [parsed_file]
filetime = struct.unpack_from('<Q', file_data, (parsed_file.offset + 29))[0]
if (filetime != 0):
cur_job['ctime'] = (datetime.datetime(1601, 1, 1) + datetime.timedelta(microseconds=(filetime / 10)))
return cur_job
except Exception:
return None
def process_qmgr10_rows(table):
for i in range(table.get_number_of_records()):
cur_record = table.get_record(i)
num_values = cur_record.get_number_of_values()
if (num_values != 2):
continue
try:
if cur_record.is_long_value(0):
guid = cur_record.get_value_data_as_long_value(0).data
else:
guid = cur_record.get_value_data(0)
if cur_record.is_long_value(1):
val = cur_record.get_value_data_as_long_value(1).data
else:
val = cur_record.get_value_data(1)
if (len(val) > 16):
(yield (guid, val[16:]))
except Exception:
pass
def load_qmgr10_db(self, file_data):
jobs = []
self.file_entries = {}
ese = ESENT_DB(file_data)
files_table = ese.openTable('Files')
while True:
file_record = ese.getNextRow(files_table)
if (file_record is None):
break
guid = file_record.get(b'Id')
new_job = self.parse_qmgr10_file(file_record.get(b'Blob', b''), False)
if (guid and new_job):
self.file_entries[guid] = new_job
jobs_table = ese.openTable('Jobs')
while True:
job_record = ese.getNextRow(jobs_table)
if (job_record is None):
break
guid = job_record.get(b'Id')
job_data = job_record.get(b'Blob', b'')[16:]
new_job = self.parse_qmgr10_job(job_data)
if (guid and new_job):
jobs.append(new_job)
for (guid, file_job) in self.file_entries.items():
jobs.append(BitsJob(file_job, self))
return jobs
def carve_qmgr10_records(self, file_data):
jobs = []
self.file_entries = {}
cur_offset = file_data.find(WIN10_FILE_DELIMITER)
while (cur_offset > 0):
next_offset = file_data.find(WIN10_FILE_DELIMITER, (cur_offset + len(WIN10_FILE_DELIMITER)))
if (next_offset > 0):
file_job = self.parse_qmgr10_file(file_data[(cur_offset + 16):next_offset], True)
else:
file_job = self.parse_qmgr10_file(file_data[(cur_offset + 16):], True)
if file_job:
guid = file_data[(cur_offset - 22):(cur_offset - 6)]
self.file_entries[guid] = file_job
cur_offset = next_offset
for job_delimiter in WIN10_JOB_DELIMITERS:
carved_jobs = file_data.split(job_delimiter)
if (len(carved_jobs) == 1):
continue
for i in range(1, len(carved_jobs)):
new_job = self.parse_qmgr10_job(carved_jobs[i])
if new_job:
new_job.job_dict['Carved'] = True
jobs.append(new_job)
for (guid, carved_job) in self.file_entries.items():
file_job = BitsJob(carved_job, self)
file_job.job_dict['Carved'] = True
jobs.append(file_job)
return jobs
def load_qmgr10_jobs(self, file_data):
jobs = self.load_qmgr10_db(file_data)
if (self.carve_db_files or self.carve_all_files):
jobs.extend(self.carve_qmgr10_records(file_data))
return jobs
def output_jobs(self, file_path, jobs):
if self.out_file:
orig_stdout = sys.stdout
sys.stdout = open(self.out_file, 'w')
try:
for job in jobs:
if (job.is_carved() and (not job.is_useful_for_analysis())):
continue
if (job.hash not in self.visited_jobs):
formatted_job = json.dumps(job.job_dict, indent=4)
print(formatted_job)
self.visited_jobs.add(job.hash)
finally:
if self.out_file:
sys.stdout.close()
sys.stdout = orig_stdout
def process_file(self, file_path):
try:
print(('Processing file ' + file_path), file=sys.stderr)
file_data = None
with open(file_path, 'rb') as f:
file_data = f.read()
jobs = []
if BitsParser.is_qmgr_database(file_data):
jobs = self.load_qmgr_jobs(file_path)
elif BitsParser.is_qmgr10_database(file_data):
jobs = self.load_qmgr10_jobs(file_data)
elif self.carve_all_files:
if self.is_win_10:
jobs = self.carve_qmgr10_records(file_data)
else:
jobs = self.load_non_qmgr_jobs(file_data)
self.output_jobs(file_path, jobs)
except Exception:
print((f'Exception occurred processing file {file_path}: ' + traceback.format_exc()), file=sys.stderr)
def determine_directory_architecture(self, path):
if os.path.exists(((path + os.sep) + 'qmgr.db')):
self.is_win_10 = True
elif os.path.exists(((path + os.sep) + 'qmgr0.dat')):
self.is_win_10 = False
def run(self):
if os.path.isfile(self.queue_dir):
self.process_file(self.queue_dir)
return
self.determine_directory_architecture(self.queue_dir)
for f in os.listdir(self.queue_dir):
cur_path = ((self.queue_dir + os.sep) + f)
if (not os.path.isfile(cur_path)):
continue
self.process_file(cur_path) |
('ExodusIIReader support is disabled for now.')
class TestExodusIIReader(DataReaderTestBase):
def setup_reader(self):
r = UnstructuredGridReader()
r.initialize(get_example_data('disk_out_ref.ex2'))
self.e.add_source(r)
def test_point_cell_data(self):
u_grid = self.scene.children[0].outputs[0]
self.assertEqual(u_grid.point_data.number_of_arrays, 7)
self.assertEqual(u_grid.cell_data.number_of_arrays, 1)
self.assertEqual(u_grid.point_data.number_of_tuples, 8499)
self.assertEqual(u_grid.cell_data.number_of_tuples, 7472) |
def get_repository(client, repository=''):
try:
return client.snapshot.get_repository(name=repository)
except (es8exc.TransportError, es8exc.NotFoundError) as err:
msg = f'Unable to get repository {repository}. Error: {err} Check Elasticsearch logs for more information.'
raise CuratorException(msg) from err |
class _ImageForm(SerializerForm):
name = forms.CharField(label=_('Name'), max_length=32, required=True, widget=forms.TextInput(attrs={'class': 'input-transparent narrow disable_created', 'required': 'required', 'pattern': '[A-Za-z0-9._-]+'}))
alias = forms.CharField(label=_('Alias'), required=True, max_length=32, widget=forms.TextInput(attrs={'class': 'input-transparent narrow', 'required': 'required'}))
version = forms.CharField(label=_('Version'), required=False, max_length=16, widget=forms.TextInput(attrs={'class': 'input-transparent narrow', 'required': 'required'}))
owner = forms.ChoiceField(label=_('Owner'), required=False, widget=forms.Select(attrs={'class': 'narrow input-select2'}))
access = forms.TypedChoiceField(label=_('Access'), required=False, coerce=int, choices=Image.ACCESS, widget=forms.Select(attrs={'class': 'narrow input-select2'}))
desc = forms.CharField(label=_('Description'), max_length=128, required=False, widget=forms.TextInput(attrs={'class': 'input-transparent wide', 'required': ''}))
resize = forms.BooleanField(label=_('Resizable?'), required=False, help_text=_('Image is able to resize the disk during an initial start or deploy process.'), widget=forms.CheckboxInput(attrs={'class': 'normal-check'}))
deploy = forms.BooleanField(label=_('Shutdown after deploy?'), required=False, help_text=_('Image is able to shut down the server after an initial start and successful deploy.'), widget=forms.CheckboxInput(attrs={'class': 'normal-check'}))
tags = TagField(label=_('Tags'), required=False, help_text=_('Tags will be inherited by servers which will use this image.'), widget=TagWidget(attrs={'class': 'tags-select2 narrow'}))
def __init__(self, request, img, *args, **kwargs):
super(_ImageForm, self).__init__(request, img, *args, **kwargs)
self.fields['owner'].choices = get_owners(request).values_list('username', 'username')
self.fields['tags'].tag_choices = TagVm.objects.distinct().filter(content_object__in=Vm.objects.filter(dc=request.dc)).order_by('tag__name').values_list('tag__name', flat=True) |
class TableConfig(OptionsWithTemplates):
_struct__schema = {'autoGroupColumnDef': {}, 'defaultColDef': {}, 'TableStatusBar': {}, 'columns': []}
def alignedGrids(self):
return self._config_get()
def alignedGrids(self, val):
self._config(val)
def allowContextMenuWithControlKey(self):
return self._config_get()
def allowContextMenuWithControlKey(self, flag: bool):
self._config(flag)
def animateRows(self):
return self._config_get()
def animateRows(self, flag: bool):
self._config(flag)
def autoGroupColumnDef(self) -> DefaultColDef:
return self._config_sub_data('autoGroupColumnDef', DefaultColDef)
def cacheBlockSize(self):
return self._config_get()
def cacheBlockSize(self, num: int):
self._config(num)
def colResizeDefault(self):
return self._config_get()
def colResizeDefault(self, val):
self._config(val)
def columns(self) -> Column:
return self._config_sub_data_enum('columnDefs', Column)
def columnTypes(self):
return self._config_get()
def columnTypes(self, data: str):
self._config(data, js_type=True)
def defaultColDef(self) -> DefaultColDef:
return self._config_sub_data('defaultColDef', DefaultColDef)
def data(self):
return self._config_get(name='rowData')
def data(self, val):
self._config(val, name='rowData')
def debug(self):
return self._config_get()
def debug(self, flag: bool):
self._config(flag)
def deltaColumnMode(self):
return self._config_get()
def deltaColumnMode(self, val: str):
self._config(val)
def editType(self):
return self._config_get()
def editType(self, value: str):
self._config(value)
def enableColResize(self):
return self._config_get()
def enableColResize(self, flag: bool):
self._config(flag)
def enablePivot(self):
return self._config_get()
def enablePivot(self, flag: bool):
self._config(flag)
def enableRangeSelection(self):
return self._config_get()
def enableRangeSelection(self, flag: bool):
self._config(flag)
def enableValue(self):
return self._config_get()
def enableValue(self, flag: bool):
self._config(flag)
def ensureDomOrder(self):
return self._config_get()
def ensureDomOrder(self, val):
self._config(val)
def enterMovesDown(self):
return self._config_get()
def enterMovesDown(self, val):
self._config(val)
def enterMovesDownAfterEdit(self):
return self._config_get()
def enterMovesDownAfterEdit(self, val):
self._config(val)
def functionsReadOnly(self):
return self._config_get()
def functionsReadOnly(self, flag: bool):
self._config(flag)
def getServerSideGroupKey(self, js_funcs: etypes.JS_FUNCS_TYPES, profile: etypes.PROFILE_TYPE=None, func_ref: bool=False):
if (not isinstance(js_funcs, list)):
js_funcs = [js_funcs]
str_func = JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)
if ((not str_func.startswith('function(dataItem)')) and (not func_ref)):
if ('return ' not in str_func):
str_func = ('return %s' % str_func)
str_func = ('function(dataItem){%s}' % str_func)
self._config(str_func, js_type=True)
def getContextMenuItems(self):
pass
def getRowClass(self, js_funcs: etypes.JS_FUNCS_TYPES, profile: etypes.PROFILE_TYPE=None, func_ref: bool=False):
if (not isinstance(js_funcs, list)):
js_funcs = [js_funcs]
str_func = JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)
if ((not str_func.startswith('function(params)')) and (not func_ref)):
str_func = ('function(params){%s}' % str_func)
self._config(str_func, js_type=True)
def groupAggFiltering(self):
return self._config_get()
def groupAggFiltering(self, flag: bool):
self._config(flag)
def groupIncludeFooter(self):
return self._config_get()
def groupIncludeFooter(self, flag: bool):
self._config(flag)
def groupIncludeTotalFooter(self):
return self._config_get()
def groupIncludeTotalFooter(self, flag: bool):
self._config(flag)
def isGroupOpenByDefault(self, js_funcs: etypes.JS_FUNCS_TYPES, profile: etypes.PROFILE_TYPE=None, func_ref: bool=False):
self._config_func(js_funcs=js_funcs, profile=profile, func_ref=func_ref)
def isServerSideGroup(self, js_funcs: etypes.JS_FUNCS_TYPES, profile: etypes.PROFILE_TYPE=None, func_ref: bool=False):
if (not isinstance(js_funcs, list)):
js_funcs = [js_funcs]
str_func = JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)
if ((not str_func.startswith('function(dataItem)')) and (not func_ref)):
if ('return ' not in str_func):
str_func = ('return %s' % str_func)
str_func = ('function(dataItem){%s}' % str_func)
self._config(str_func, js_type=True)
def maxBlocksInCache(self):
return self._config_get()
def maxBlocksInCache(self, num: int):
self._config(num)
def onGridReady(self, js_funcs: etypes.JS_FUNCS_TYPES, profile: etypes.PROFILE_TYPE=None, func_ref: bool=False):
self._config_func(js_funcs=js_funcs, profile=profile, func_ref=func_ref)
def on(self, event_type: str, js_funcs: etypes.JS_FUNCS_TYPES, profile: etypes.PROFILE_TYPE=None, func_ref: bool=False):
if (not event_type.startswith('on')):
event_type = ('on%s' % event_type.capitalize())
self._config_func(js_funcs=js_funcs, profile=profile, func_ref=func_ref, name=event_type)
def onCellEditingStopped(self, js_funcs: etypes.JS_FUNCS_TYPES, profile: etypes.PROFILE_TYPE=None, func_ref: bool=False):
self._config_func(js_funcs=js_funcs, profile=profile, func_ref=func_ref)
def onCellValueChanged(self, js_funcs: etypes.JS_FUNCS_TYPES, profile: etypes.PROFILE_TYPE=None, func_ref: bool=False):
self._config_func(js_funcs=js_funcs, profile=profile, func_ref=func_ref)
def onRowClicked(self, js_funcs: etypes.JS_FUNCS_TYPES, profile: etypes.PROFILE_TYPE=None, func_ref: bool=False):
self._config_func(js_funcs=js_funcs, profile=profile, func_ref=func_ref)
def onSelectionChanged(self, js_funcs: etypes.JS_FUNCS_TYPES, profile: etypes.PROFILE_TYPE=None, func_ref: bool=False):
if (self.rowSelection is None):
self.rowSelection = 'single'
self._config_func(js_funcs=js_funcs, profile=profile, func_ref=func_ref)
def onColumnResized(self, js_funcs: etypes.JS_FUNCS_TYPES, profile: etypes.PROFILE_TYPE=None, func_ref: bool=False):
self._config_func(js_funcs=js_funcs, profile=profile, func_ref=func_ref)
def onPaginationChanged(self, js_funcs: etypes.JS_FUNCS_TYPES, profile: etypes.PROFILE_TYPE=None, func_ref: bool=False):
self._config_func(js_funcs=js_funcs, profile=profile, func_ref=func_ref)
def overlayLoadingTemplate(self):
return self._config_get()
def overlayLoadingTemplate(self, val: str):
self._config(val)
def overlayNoRowsTemplate(self):
return self._config_get()
def overlayNoRowsTemplate(self, val: str):
self._config(val)
def paginateChildRows(self):
return self._config_get(False)
def paginateChildRows(self, flag: bool):
self._config(flag)
def pagination(self):
return self._config_get()
def pagination(self, val):
self._config(val)
def paginationPageSize(self):
return self._config_get(100)
def paginationPageSize(self, num: int):
self.pagination = (num > 0)
self._config(num)
def paginationAutoPageSize(self):
return self._config_get()
def paginationAutoPageSize(self, num: int):
self._config(num)
def paginationNumberFormatter(self):
pass
def popupParent(self):
return self._config_get()
def popupParent(self, data: str):
self._config(data, js_type=True)
def pivotMode(self):
return self._config_get()
def pivotMode(self, flag: bool):
self._config(flag)
def pivotPanelShow(self):
return self._config_get()
def pivotPanelShow(self, val: str):
self._config(val)
def groupDefaultExpanded(self):
return self._config_get()
def groupDefaultExpanded(self, val: int):
if (self.component.options.verbose and self.page.imports.pkgs.ag_grid.community_version):
logging.warning('groupDefaultExpanded not available in the community version')
self._config(val)
def groupSelectsChildren(self):
return self._config_get()
def groupSelectsChildren(self, flag: bool):
self._config(flag)
def rowModelType(self):
return self._config_get()
def rowModelType(self, value: str):
self._config(value)
def rowClass(self):
return self._config_get(None)
def rowClass(self, values: Union[(str, list)]):
self._config(values)
def rowClassRules(self):
return self._config_get(None)
def rowClassRules(self, values: Union[(str, dict)]):
self._config(values)
def rowHeight(self):
return self._config_get(42)
def rowHeight(self, num: Union[(bool, int)]):
if (not num):
self.remove()
self.page.properties.css.remove_text('aggrid_row_height')
theme = (self.component.style.theme_name or 'alpine')
self.page.properties.css.remove_text(('aggrid_row_height_%s' % theme))
else:
row_height_px = int(num)
if (row_height_px < 42):
theme = (self.component.style.theme_name or 'alpine')
self.page.properties.css.add_text(('\n.ag-theme-%s .ag-cell {\n line-height: %spx !important; \n}' % (theme, row_height_px)), map_id=('aggrid_row_height_%s' % theme), replace=True)
self.page.properties.css.add_text(('\n.ag-theme-%s {\n --ag-grid-size: 3px;\n --ag-list-item-height: %spx;\n} ' % (theme, row_height_px)), map_id='aggrid_row_height', replace=True)
self._config(row_height_px)
def rowStyle(self):
return self._config_get(None)
def rowStyle(self, values: Union[(dict, str)]):
self._config(values)
def singleClickEdit(self):
return self._config_get()
def singleClickEdit(self, val):
self._config(val)
def suppressAggFilteredOnly(self):
return self._config_get()
def suppressAggFilteredOnly(self, flag: bool):
self._config(flag)
def suppressClickEdit(self):
return self._config_get()
def suppressClickEdit(self, val):
self._config(val)
def suppressCutToClipboard(self):
return self._config_get()
def suppressCutToClipboard(self, val):
self._config(val)
def suppressPaginationPanel(self):
return self._config_get(False)
def suppressPaginationPanel(self, flag: bool):
self._config(flag)
def suppressPaste(self):
return self._config_get()
def suppressPaste(self, flag: bool):
self._config(flag)
def suppressCopySingleCellRanges(self):
return self._config_get()
def suppressCopySingleCellRanges(self, flag: bool):
self._config(flag)
def suppressRowHoverHighlight(self):
return self._config_get()
def suppressRowHoverHighlight(self, flag: bool):
self._config(flag)
def columnHoverHighlight(self):
return self._config_get()
def columnHoverHighlight(self, flag: bool):
self._config(flag)
def suppressScrollOnNewData(self):
return self._config_get()
def suppressScrollOnNewData(self, val):
self._config(val)
def suppressColumnVirtualisation(self):
return self._config_get()
def suppressColumnVirtualisation(self, val):
self._config(val)
def suppressDragLeaveHidesColumns(self):
return self._config_get()
def suppressDragLeaveHidesColumns(self, val):
self._config(val)
def suppressExcelExport(self):
return self._config_get()
def suppressExcelExport(self, flag: bool):
self._config(flag)
def rowMultiSelectWithClick(self):
return self._config_get()
def rowMultiSelectWithClick(self, val):
self._config(val)
def rowDeselection(self):
return self._config_get()
def rowDeselection(self, val):
self._config(val)
def rowSelection(self):
return self._config_get()
def rowSelection(self, val):
self._config(val)
def rowBuffer(self):
return self._config_get()
def rowBuffer(self, val):
self._config(val)
def rowGroupPanelShow(self):
return self._config_get()
def rowGroupPanelShow(self, val: str):
if (self.component.options.verbose and self.page.imports.pkgs.ag_grid.community_version):
logging.warning('rowGroupPanelShow not available in the community version')
self._config(val)
def rowTotal(self):
return self._config_get(False)
def rowTotal(self, flag: bool):
self._config(flag)
def suppressRowClickSelection(self):
return self._config_get()
def suppressRowClickSelection(self, flag: bool):
self._config(flag)
def suppressAggFuncInHeader(self):
return self._config_get()
def suppressAggFuncInHeader(self, flag: bool):
self._config(flag)
def suppressRowTransform(self):
return self._config_get()
def suppressRowTransform(self, flag: bool):
self._config(flag)
def sortingOrder(self):
return self._config_get()
def sortingOrder(self, val):
self._config(val)
def sideBar(self):
return self._config_get()
def sideBar(self, val: Union[(str, dict)]):
self._config(val)
def sideBars(self):
return EnumSidebar(self, 'sideBar')
def statusBar(self) -> TableStatusBar:
return self._config_sub_data('statusBar', TableStatusBar)
def treeData(self):
return self._config_get()
def treeData(self, flag: bool):
if (self.component.options.verbose and self.page.imports.pkgs.ag_grid.community_version):
logging.warning('treeData not available in the community version')
self._config(flag)
def valueCache(self):
return self._config_get(False)
def valueCache(self, flag: bool):
self._config(flag)
def valueCacheNeverExpires(self):
return self._config_get(False)
def valueCacheNeverExpires(self, flag: bool):
self._config(flag) |
def forward(model: Model[(InT, OutT)], Xr: InT, is_train: bool) -> Tuple[(OutT, Callable[([OutT], InT)])]:
(Y, lasts) = model.ops.reduce_last(cast(Floats2d, Xr.data), Xr.lengths)
array_info = ArrayInfo.from_array(Y)
def backprop(dY: OutT) -> InT:
array_info.check_consistency(dY)
dX = model.ops.backprop_reduce_last(dY, lasts)
return Ragged(dX, Xr.lengths)
return (Y, backprop) |
.parametrize('mask, expected', [pytest.param(([True] * 5), [(- 0.), 1., 1., (- 0.), 0.], id='Sampling all values, checking that we get length of 5'), pytest.param([False, True, False, True], [1., (- 0.)], id='Sampling a subset of parameters (at index 1 and 4), checkingthat those values match the corresponding values from the fullsample at the same index')])
def test_that_sub_sample_maintains_order(tmpdir, storage, mask, expected):
with tmpdir.as_cwd():
config = dedent('\n NUM_REALIZATIONS 5\n RANDOM_SEED 1234\n GEN_KW KW_NAME template.txt kw.txt prior.txt\n ')
with open('config.ert', 'w', encoding='utf-8') as fh:
fh.writelines(config)
with open('template.txt', 'w', encoding='utf-8') as fh:
fh.writelines('MY_KEYWORD <MY_KEYWORD>')
with open('prior.txt', 'w', encoding='utf-8') as fh:
fh.writelines('MY_KEYWORD NORMAL 0 1')
ert_config = ErtConfig.from_file('config.ert')
fs = storage.create_ensemble(storage.create_experiment(ert_config.ensemble_config.parameter_configuration), name='prior', ensemble_size=5)
sample_prior(fs, [i for (i, active) in enumerate(mask) if active], random_seed=1234)
assert (fs.load_parameters('KW_NAME')['values'].sel(names='MY_KEYWORD').values.ravel().tolist() == expected) |
class GetBlockBodiesTracker(BasePerformanceTracker[(GetBlockBodiesV65, BlockBodyBundles)]):
def _get_request_size(self, request: GetBlockBodiesV65) -> Optional[int]:
return len(request.payload)
def _get_result_size(self, result: BlockBodyBundles) -> int:
return len(result)
def _get_result_item_count(self, result: BlockBodyBundles) -> int:
return sum(((len(body.uncles) + len(body.transactions)) for (body, trie_data, uncles_hash) in result)) |
class SubprocessException(Exception):
def __init__(self, stdout, stderr, returncode):
super().__init__()
self.stdout: str = stdout
self.stderr: str = stderr
self.returncode: int = returncode
def __str__(self):
return f'''Return Code: {self.returncode}
Stdout:
{self.stdout}
Stderr:
{self.stderr}''' |
class RmtooTreeModel(GObject.GObject, Gtk.TreeModel):
column_names = ['Requirement']
column_types = (str,)
def __init__(self, topic_continuum_set):
GObject.GObject.__init__(self)
self.__topic_continuum_set = topic_continuum_set
def get_column_names(self):
return self.column_names
def on_get_flags(self):
return 0
def on_get_n_columns(self):
return len(self.column_types)
def on_get_column_type(self, n):
return self.column_types[n]
def on_get_iter(self, path):
print(('NEW ITER PATH [%s]' % path))
if (path[0] == 0):
return TopicContinuumSetIterator(self.__topic_continuum_set)
assert False
return self.files[path[0]]
def on_get_path(self, rowref):
assert False
return self.files.index(rowref)
def on_get_value(self, rowref, column):
print(('GET VALUE COL [%s]' % column))
(key, value) = rowref.current()
print(('Current value [%s] [%s]' % (key, value)))
print(('TYPE KEY [%s]' % type(key)))
return key
assert False
def on_iter_next(self, rowref):
print(('ON ITER NEXT [%s]' % rowref))
try:
return rowref.next()
except StopIteration:
return None
assert False
try:
i = (self.files.index(rowref) + 1)
return self.files[i]
except IndexError:
return None
def on_iter_children(self, rowref):
print(('On ITER CHILDERN [%s]' % rowref))
if (rowref is not None):
return rowref.iter_children()
assert False
if rowref:
return None
return self.files[0]
def on_iter_has_child(self, rowref):
print(('ON ITER HAS CHILD [%s]' % rowref))
return rowref.has_child()
type_name = rowref.type_name()
if (type_name == 'topic_continuum'):
(key, value) = rowref.current()
return (len(value.get_vcs_commit_ids()) > 0)
if (type_name == 'topics'):
return (rowref.current() is not None)
assert False
return False
def on_iter_n_children(self, rowref):
assert False
if rowref:
return 0
return len(self.files)
def on_iter_nth_child(self, rowref, n):
print(('ON ITER NTH CHILD [%s] [%s]' % (rowref, n)))
if (rowref is None):
iter = TopicContinuumSetIterator(self.__topic_continuum_set)
advance(iter, n)
return iter
assert False
return None
assert False
if rowref:
return None
try:
return self.files[n]
except IndexError:
return None
def on_iter_parent(child):
assert False
return None |
class RegressionPredictedVsActualScatter(Metric[RegressionPredictedVsActualScatterResults]):
def __init__(self, options: AnyOptions=None):
super().__init__(options=options)
def calculate(self, data: InputData) -> RegressionPredictedVsActualScatterResults:
dataset_columns = process_columns(data.current_data, data.column_mapping)
target_name = dataset_columns.utility_columns.target
prediction_name = dataset_columns.utility_columns.prediction
curr_df = data.current_data
ref_df = data.reference_data
if ((target_name is None) or (prediction_name is None)):
raise ValueError("The columns 'target' and 'prediction' columns should be present")
if (not isinstance(prediction_name, str)):
raise ValueError('Expect one column for prediction. List of columns was provided.')
curr_df = self._make_df_for_plot(curr_df.copy(), target_name, prediction_name, None)
if (ref_df is not None):
ref_df = self._make_df_for_plot(ref_df.copy(), target_name, prediction_name, None)
if (self.get_options().render_options.raw_data or (not is_possible_contour(curr_df[prediction_name], curr_df[target_name])) or ((ref_df is not None) and (not is_possible_contour(ref_df[prediction_name], ref_df[target_name])))):
curr_df.drop_duplicates(subset=[prediction_name, target_name], inplace=True)
current_scatter = PredActualScatter(predicted=curr_df[prediction_name], actual=curr_df[target_name])
reference_scatter: Optional[PredActualScatter] = None
if (ref_df is not None):
ref_df.drop_duplicates(subset=[prediction_name, target_name], inplace=True)
reference_scatter = PredActualScatter(predicted=ref_df[prediction_name], actual=ref_df[target_name])
return RegressionPredictedVsActualScatterResults(current=current_scatter, reference=reference_scatter, agg_data=False)
current_agg = AggPredActualScatter(data=get_gaussian_kde(curr_df[prediction_name], curr_df[target_name]))
reference_agg = AggPredActualScatter(data=None)
if (ref_df is not None):
reference_agg = AggPredActualScatter(data=get_gaussian_kde(ref_df[prediction_name], ref_df[target_name]))
return RegressionPredictedVsActualScatterResults(current=current_agg, reference=reference_agg, agg_data=True)
def _make_df_for_plot(self, df, target_name: str, prediction_name: str, datetime_column_name: Optional[str]):
result = df.replace([np.inf, (- np.inf)], np.nan)
if (datetime_column_name is not None):
result.dropna(axis=0, how='any', inplace=True, subset=[target_name, prediction_name, datetime_column_name])
return result.sort_values(datetime_column_name)
result.dropna(axis=0, how='any', inplace=True, subset=[target_name, prediction_name])
return result.sort_index() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.