code stringlengths 281 23.7M |
|---|
class CollisionLinks(object):
swagger_types = {'_self': 'Link', 'trip': 'Link', 'vehicle': 'Link'}
attribute_map = {'_self': 'self', 'trip': 'trip', 'vehicle': 'vehicle'}
def __init__(self, _self=None, trip=None, vehicle=None):
self.__self = None
self._trip = None
self._vehicle = None
self.discriminator = None
if (_self is not None):
self._self = _self
if (trip is not None):
self.trip = trip
if (vehicle is not None):
self.vehicle = vehicle
def _self(self):
return self.__self
_self.setter
def _self(self, _self):
self.__self = _self
def trip(self):
return self._trip
def trip(self, trip):
self._trip = trip
def vehicle(self):
return self._vehicle
def vehicle(self, vehicle):
self._vehicle = vehicle
def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
if issubclass(CollisionLinks, dict):
for (key, value) in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if (not isinstance(other, CollisionLinks)):
return False
return (self.__dict__ == other.__dict__)
def __ne__(self, other):
return (not (self == other)) |
class Model(SkillComponent, ABC):
def __init__(self, name: str, skill_context: SkillContext, configuration: Optional[SkillComponentConfiguration]=None, keep_terminal_state_dialogues: Optional[bool]=None, **kwargs: Any) -> None:
super().__init__(name, skill_context, configuration=configuration, **kwargs)
if (keep_terminal_state_dialogues is not None):
self._keep_terminal_state_dialogues = keep_terminal_state_dialogues
def setup(self) -> None:
super_obj = super()
if hasattr(super_obj, 'setup'):
super_obj.setup()
def teardown(self) -> None:
super_obj = super()
if hasattr(super_obj, 'teardown'):
super_obj.teardown()
def parse_module(cls, path: str, model_configs: Dict[(str, SkillComponentConfiguration)], skill_context: SkillContext) -> Dict[(str, 'Model')]:
return _parse_module(path, model_configs, skill_context, Model) |
class ExperienceReplay():
def __init__(self, model, max_memory=1000, discount=0.95):
self.model = model
self.discount = discount
self.memory = list()
self.max_memory = max_memory
def remember(self, transition):
self.memory.append(transition)
if (len(self.memory) > self.max_memory):
del self.memory[0]
def predict(self, state):
return self.model.predict(state)[0]
def get_samples(self, sample_size=10):
mem_size = len(self.memory)
sample_size = min(mem_size, sample_size)
state_size = self.memory[0][0].size
num_actions = self.model.output_shape[(- 1)]
states = np.zeros((sample_size, state_size), dtype=int)
targets = np.zeros((sample_size, num_actions), dtype=float)
for (i, idx) in enumerate(np.random.choice(range(mem_size), sample_size, replace=False)):
(state, move, reward, next_state, status) = self.memory[idx]
states[i] = state
targets[i] = self.predict(state)
if (status == 'win'):
targets[(i, move)] = reward
else:
targets[(i, move)] = (reward + (self.discount * np.max(self.predict(next_state))))
return (states, targets) |
def filter_data_categories(access_request_results: Dict[(str, List[Dict[(str, Optional[Any])]])], target_categories: Set[str], data_category_fields: Dict[(CollectionAddress, Dict[(FidesKey, List[FieldPath])])], rule_key: str='', fides_connector_datasets: Optional[Set[str]]=None) -> Dict[(str, List[Dict[(str, Optional[Any])]])]:
logger.info('Filtering Access Request results to return fields associated with data categories')
filtered_access_results: Dict[(str, List[Dict[(str, Any)]])] = defaultdict(list)
for (node_address, results) in access_request_results.items():
if (not results):
continue
if (fides_connector_datasets and (CollectionAddress.from_string(node_address).dataset in fides_connector_datasets)):
unpack_fides_connector_results(results, filtered_access_results, rule_key, node_address)
continue
target_field_paths: Set[FieldPath] = set(itertools.chain(*[field_paths for (cat, field_paths) in data_category_fields[CollectionAddress.from_string(node_address)].items() if any((cat.startswith(tar) for tar in target_categories))]))
if (not target_field_paths):
continue
for row in results:
filtered_results: Dict[(str, Any)] = {}
for field_path in target_field_paths:
select_and_save_field(filtered_results, row, field_path)
remove_empty_containers(filtered_results)
filtered_access_results[node_address].append(filtered_results)
return filtered_access_results |
def _wrap_gen_lifespan_context(lifespan_context: typing.Callable[([typing.Any], typing.Generator[(typing.Any, typing.Any, typing.Any)])]) -> typing.Callable[([typing.Any], typing.AsyncContextManager[typing.Any])]:
cmgr = contextlib.contextmanager(lifespan_context)
(cmgr)
def wrapper(app: typing.Any) -> _AsyncLiftContextManager[typing.Any]:
return _AsyncLiftContextManager(cmgr(app))
return wrapper |
class TestIlluminaDataForCasava(BaseTestIlluminaData):
def setUp(self):
self.top_dir = tempfile.mkdtemp()
def tearDown(self):
try:
os.rmdir(self.top_dir)
except Exception:
pass
def makeMockIlluminaData(self, paired_end=False, multiple_projects=False, multiplexed_run=False):
mock_illumina_data = MockIlluminaData('test.MockIlluminaData', 'casava', paired_end=paired_end, top_dir=self.top_dir)
mock_illumina_data.add_fastq_batch('AB', 'AB1', 'AB1_GCCAAT', lanes=(1,))
mock_illumina_data.add_fastq_batch('AB', 'AB2', 'AB2_AGTCAA', lanes=(1,))
if multiplexed_run:
lanes = (1, 4, 5)
mock_illumina_data.add_fastq_batch('CDE', 'CDE3', 'CDE3_GCCAAT', lanes=lanes)
mock_illumina_data.add_fastq_batch('CDE', 'CDE4', 'CDE4_AGTCAA', lanes=lanes)
mock_illumina_data.add_undetermined(lanes=lanes)
self.mock_illumina_data = mock_illumina_data
self.mock_illumina_data.create()
def test_illumina_data(self):
self.makeMockIlluminaData()
illumina_data = IlluminaData(self.mock_illumina_data.dirn)
self.assertIlluminaData(illumina_data, self.mock_illumina_data)
self.assertEqual(illumina_data.format, 'casava')
self.assertEqual(illumina_data.lanes, [1])
def test_illumina_data_paired_end(self):
self.makeMockIlluminaData(paired_end=True)
illumina_data = IlluminaData(self.mock_illumina_data.dirn)
self.assertIlluminaData(illumina_data, self.mock_illumina_data)
self.assertEqual(illumina_data.format, 'casava')
self.assertEqual(illumina_data.lanes, [1])
def test_illumina_data_multiple_projects(self):
self.makeMockIlluminaData(multiple_projects=True)
illumina_data = IlluminaData(self.mock_illumina_data.dirn)
self.assertIlluminaData(illumina_data, self.mock_illumina_data)
self.assertEqual(illumina_data.format, 'casava')
self.assertEqual(illumina_data.lanes, [1])
def test_illumina_data_multiple_projects_paired_end(self):
self.makeMockIlluminaData(multiple_projects=True, paired_end=True)
illumina_data = IlluminaData(self.mock_illumina_data.dirn)
self.assertIlluminaData(illumina_data, self.mock_illumina_data)
self.assertEqual(illumina_data.format, 'casava')
self.assertEqual(illumina_data.lanes, [1])
def test_illumina_data_multiple_projects_multiplexed(self):
self.makeMockIlluminaData(multiple_projects=True, multiplexed_run=True)
illumina_data = IlluminaData(self.mock_illumina_data.dirn)
self.assertIlluminaData(illumina_data, self.mock_illumina_data)
self.assertEqual(illumina_data.format, 'casava')
self.assertEqual(illumina_data.lanes, [1, 4, 5])
def test_illumina_data_multiple_projects_multiplexed_paired_end(self):
self.makeMockIlluminaData(multiple_projects=True, multiplexed_run=True, paired_end=True)
illumina_data = IlluminaData(self.mock_illumina_data.dirn)
self.assertIlluminaData(illumina_data, self.mock_illumina_data)
self.assertEqual(illumina_data.format, 'casava')
self.assertEqual(illumina_data.lanes, [1, 4, 5]) |
class Interpolate():
def __init__(self, points: Sequence[VectorLike], callback: Callable[(..., float)], length: int, linear: bool=False) -> None:
self.length = length
self.num_coords = len(points[0])
self.points = list(zip(*points))
self.callback = callback
self.linear = linear
def steps(self, count: int) -> list[Vector]:
divisor = (count - 1)
return [self((r / divisor)) for r in range(0, count)]
def __call__(self, t: float) -> Vector:
n = (self.length - 1)
i = max(min(math.floor((t * n)), (n - 1)), 0)
t = (((t - (i / n)) * n) if (0 <= t <= 1) else t)
if (not self.linear):
i += 1
coord = []
for idx in range(self.num_coords):
c = self.points[idx]
if (self.linear or (t < 0) or (t > 1)):
coord.append(lerp(c[i], c[(i + 1)], t))
else:
coord.append(self.callback(c[(i - 1)], c[i], c[(i + 1)], c[(i + 2)], t))
return coord |
def _run_in_venv_handler(env_dir: str, fn: Callable, queue: multiprocessing.Queue, *args: Any) -> None:
result = None
try:
make_venv(env_dir, set_env=True)
result = fn(*args)
except Exception as e:
print(f'''Exception in venv runner at {datetime.datetime.now()} for {fn}:
{format_exc()}''')
result = e
queue.put_nowait(result) |
_frequency(timedelta(days=2))
def fetch_exchange_forecast(zone_key1: str='NO-NO4', zone_key2: str='SE', session: (Session | None)=None, target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)) -> list[dict]:
return fetch_data(zone_key1=zone_key1, zone_key2=zone_key2, session=session, target_datetime=target_datetime, logger=logger, type='exchange_forecast') |
class ADH_metfile(object):
from proteus.cSubsurfaceTransportCoefficients import piecewiseLinearTableLookup
allowed_time_units = ['day', 'hour', 'sec']
def __init__(self, fileprefix, directory='.'):
self.fileprefix = fileprefix
self.directory = directory
import os, numpy
filename = os.path.join(directory, (fileprefix + '.met'))
assert os.path.exists(filename), ('%s not found' % filename)
fh = open(filename, 'r')
self.full_file = fh.readlines()
fh.close()
latitude = float(self.full_file[2].split()[0])
longitude = float(self.full_file[2].split()[1])
zone = float(self.full_file[2].split()[2])
self.npoints = len(self.full_file[5:])
self.data = {}
self.data['latitude'] = latitude
self.data['longitude'] = longitude
self.data['zone'] = zone
self.entries = {'day': 0, 'hour': 1, 'min': 2}
for (i, label) in enumerate(self.full_file[3].split()[2:]):
self.entries[label] = (i + 3)
for entry in list(self.entries.keys()):
self.data[entry] = numpy.zeros(self.npoints, 'd')
for (i, line) in enumerate(self.full_file[5:]):
for (entry, column) in self.entries.items():
self.data[entry][i] = float(line.split()[column])
self.time_unit = 'day'
self.data['time'] = numpy.zeros(self.npoints, 'd')
self.data['time'] = ((self.data['day'] + old_div(self.data['hour'], 24.0)) + old_div(self.data['min'], (24.0 * 60.0)))
def getValue(self, entry, t):
if (entry not in list(self.data.keys())):
print(('ADH_metfile entry= %s not found ' % entry))
return None
if (entry in ['latitude', 'longitude', 'zone']):
return self.data[entry]
index = 0
(interp, dinterp, index) = self.piecewiseLinearTableLookup(t, self.data['time'], self.data[entry], index)
return interp |
class TestSelectTasks(TestCase):
('app.database.db', new=tasks)
def test_select_tasks_deve_retornar_somente_tasks_de_acordar(self):
results = select_tasks('Acordar', '')
for result in results:
with self.subTest(f'Acordar in {result}'):
self.assertEqual('Acordar', result['task_name'])
('app.database.db', new=tasks)
def test_select_tasks_deve_retornar_somente_tasks_com_state_fazendo(self):
results = select_tasks('', 'Fazendo')
for result in results:
with self.subTest(f'Acordar in {result}'):
self.assertEqual('Fazendo', result['state']) |
def evaluate_supersampled(field_generator, grid, oversampling, statistic='mean', make_sparse=True):
import scipy.sparse
from ..mode_basis import ModeBasis
if isinstance(field_generator, (list, tuple)):
modes = []
for fg in field_generator:
field = evaluate_supersampled(fg, grid, oversampling, statistic)
if make_sparse:
field = scipy.sparse.csr_matrix(field)
field.eliminate_zeros()
modes.append(field)
return ModeBasis(modes, grid)
oversampling = (np.round(oversampling) * np.ones(grid.ndim)).astype('int')
if grid.is_separated:
deltas = []
for i in range(grid.ndim):
x = grid.coords.separated_coords[i]
d = ((x[2:] - x[:(- 2)]) / 2.0)
d = np.concatenate(([(x[1] - x[0])], d, [(x[(- 1)] - x[(- 2)])]))
deltas.append(d)
dithers = make_uniform_grid(oversampling, 1)
separated_coords = grid.separated_coords
if (statistic in ['mean', 'sum']):
field = 0
else:
field = None
for dither in dithers.points:
dithered_separated_coords = [(c + (d * delta)) for (c, d, delta) in zip(separated_coords, dither, deltas)]
dithered_grid = grid.__class__(SeparatedCoords(dithered_separated_coords))
if (statistic in ['mean', 'sum']):
field += field_generator(dithered_grid)
elif (field is None):
field = field_generator(dithered_grid)
elif (statistic == 'min'):
field = np.minimum(field, field_generator(dithered_grid))
else:
field = np.maximum(field, field_generator(dithered_grid))
if (statistic == 'mean'):
field /= len(dithers)
field.grid = grid
return field
else:
supersampled_grid = make_supersampled_grid(grid, oversampling)
field = field_generator(supersampled_grid)
return subsample_field(field, oversampling, grid, statistic) |
class MDataWrapper(HasStrictTraits):
def has_format(self, format):
return (format.mimetype in self.mimetypes())
def get_format(self, format):
return format.deserialize(self.get_mimedata(format.mimetype))
def set_format(self, format, data):
self.set_mimedata(format.mimetype, format.serialize(data)) |
class NodeItem(QStandardItem):
ITEM_TYPE = (QStandardItem.UserType + 35)
NAME_ROLE = (Qt.UserRole + 1)
COL_CFG = 1
STATE_OFF = 0
STATE_RUN = 1
STATE_WARNING = 2
STATE_GHOST = 3
STATE_DUPLICATE = 4
STATE_PARTS = 5
def __init__(self, node_info):
QStandardItem.__init__(self, node_info.name)
self._parent_item = None
self._node_info = node_info.copy()
self._cfgs = []
self.launched_cfg = None
self.next_start_cfg = None
self._std_config = None
self._is_ghost = False
self._has_running = False
self.setIcon(nm.settings().icon('state_off.png'))
self._state = NodeItem.STATE_OFF
self.diagnostic_array = []
self.nodelet_mngr = ''
self.nodelets = []
self.has_screen = True
self.has_multiple_screens = False
self._with_namespace = (rospy.names.SEP in node_info.name)
self.kill_on_stop = False
self._kill_parameter_handler = ParameterHandler()
self._kill_parameter_handler.parameter_values_signal.connect(self._on_kill_param_values)
def state(self):
return self._state
def name(self):
return self._node_info.name
def name(self, new_name):
self.setText(new_name)
def masteruri(self):
return self._node_info.masteruri
def published(self):
return self._node_info.publishedTopics
def subscribed(self):
return self._node_info.subscribedTopics
def services(self):
return self._node_info.services
def parent_item(self):
return self._parent_item
_item.setter
def parent_item(self, parent_item):
self._parent_item = parent_item
if (parent_item is None):
self.setText(self._node_info.name)
self._with_namespace = (rospy.names.SEP in self._node_info.name)
else:
new_name = self._node_info.name.replace(parent_item.get_namespace(), '', 1)
self.setText(new_name)
self._with_namespace = (rospy.names.SEP in new_name)
def node_info(self):
return self._node_info
def set_node_info(self, node_info):
abbos_changed = False
run_changed = False
if (self._node_info.publishedTopics != node_info.publishedTopics):
abbos_changed = True
self._node_info._publishedTopics = list(node_info.publishedTopics)
if (self._node_info.subscribedTopics != node_info.subscribedTopics):
abbos_changed = True
self._node_info._subscribedTopics = list(node_info.subscribedTopics)
if (self._node_info.services != node_info.services):
abbos_changed = True
self._node_info._services = list(node_info.services)
if (self._node_info.pid != node_info.pid):
self._node_info.pid = node_info.pid
run_changed = True
if (self._node_info.uri != node_info.uri):
self._node_info.uri = node_info.uri
run_changed = True
if run_changed:
del self.diagnostic_array[:]
if ((run_changed and (self.is_running() or self.has_configs)) or abbos_changed):
self.has_screen = True
self.update_dispayed_name()
if (self.parent_item is not None):
self.parent_item.updateIcon()
if (run_changed and self.is_running()):
self._kill_parameter_handler.requestParameterValues(self.masteruri, [roslib.names.ns_join(self.name, 'kill_on_stop'), roslib.names.ns_join(self.name, 'nm/kill_on_stop')])
return True
return False
def uri(self):
if (self._node_info.uri is not None):
if (self._node_info.uri == 'None'):
self._node_info.uri = None
return self._node_info.uri
def pid(self):
return self._node_info.pid
def has_running(self):
return self._has_running
_running.setter
def has_running(self, state):
if (self._has_running != state):
self._has_running = state
if (self.has_configs() or self.is_running()):
self.update_dispayed_name()
if ((self.parent_item is not None) and (not isinstance(self.parent_item, HostItem))):
self.parent_item.updateIcon()
def is_ghost(self):
return self._is_ghost
_ghost.setter
def is_ghost(self, state):
if (self._is_ghost != state):
self._is_ghost = state
if (self.has_configs() or self.is_running()):
self.update_dispayed_name()
if ((self.parent_item is not None) and (not isinstance(self.parent_item, HostItem))):
self.parent_item.updateIcon()
def with_namespace(self):
return self._with_namespace
def host(self):
pitem = self.parent_item
while (pitem is not None):
if (type(pitem) == HostItem):
return pitem.host
else:
pitem = pitem.parent_item
return None
def append_diagnostic_status(self, diagnostic_status):
if self.diagnostic_array:
last_item = self.diagnostic_array[(- 1)]
if (last_item.level == diagnostic_status.level):
if (last_item.message == diagnostic_status.message):
return
dt_key = KeyValue()
dt_key.key = 'recvtime'
dt_key.value = datetime.now().strftime('%d.%m.%Y %H:%M:%S.%f')
if (diagnostic_status.values and (diagnostic_status.values[(- 1)].key == 'recvtime')):
diagnostic_status.values[(- 1)].value = dt_key.value
else:
diagnostic_status.values.append(dt_key)
self.diagnostic_array.append(diagnostic_status)
self.update_dispayed_name()
if ((self.parent_item is not None) and (not isinstance(self.parent_item, HostItem))):
self.parent_item.updateIcon()
if (len(self.diagnostic_array) > 15):
del self.diagnostic_array[0]
def data(self, role):
if (role == self.NAME_ROLE):
return self.name
else:
return QStandardItem.data(self, role)
def _diagnostic_level2icon(level):
if (level == 1):
return nm.settings().icon('state_diag_warn.png')
elif (level == 2):
return nm.settings().icon('state_diag_error.png')
elif (level == 3):
return nm.settings().icon('state_diag_stale.png')
else:
return nm.settings().icon('state_diag_other.png')
def diagnostic_level(self):
if self.diagnostic_array:
return self.diagnostic_array[(- 1)].level
return 0
def _on_kill_param_values(self, masteruri, code, msg, params):
if (code == 1):
for (_p, (code_n, _msg_n, val)) in params.items():
if (code_n == 1):
self.kill_on_stop = val
def update_dispayed_name(self):
tooltip = ('<h4>%s</h4><dl>' % self.node_info.name)
tooltip += ('<dt><b>URI:</b> %s</dt>' % self.node_info.uri)
tooltip += ('<dt><b>PID:</b> %s</dt>' % self.node_info.pid)
if self.nodelet_mngr:
tooltip += ('<dt><b>Nodelet manager</b>: %s</dt>' % self.nodelet_mngr)
if self.nodelets:
tooltip += ('<dt><b>This is nodelet manager for %d nodes</b></dt>' % len(self.nodelets))
tooltip += ('<dt><b>ORG.MASTERURI:</b> %s</dt></dl>' % self.node_info.masteruri)
master_discovered = nm.nameres().has_master(self.node_info.masteruri)
if (self.node_info.pid is not None):
self._state = NodeItem.STATE_RUN
if (self.diagnostic_array and (self.diagnostic_array[(- 1)].level > 0)):
level = self.diagnostic_array[(- 1)].level
self.setIcon(self._diagnostic_level2icon(level))
self.setToolTip(self.diagnostic_array[(- 1)].message)
else:
self.setIcon(nm.settings().icon('state_run.png'))
self.setToolTip('')
elif ((self.node_info.uri is not None) and (not self.node_info.isLocal)):
self._state = NodeItem.STATE_RUN
self.setIcon(nm.settings().icon('state_unknown.png'))
tooltip += '<dl><dt>(Remote nodes will not be ping, so they are always marked running)</dt></dl>'
tooltip += '</dl>'
self.setToolTip(('<div>%s</div>' % tooltip))
elif ((self.node_info.pid is None) and (self.node_info.uri is None) and (self.node_info.subscribedTopics or self.node_info.publishedTopics or self.node_info.services)):
self.setIcon(nm.settings().icon('crystal_clear_warning.png'))
self._state = NodeItem.STATE_WARNING
tooltip += "<dl><dt>Can't get node contact information, but there exists publisher, subscriber or services of this node.</dt></dl>"
tooltip += '</dl>'
self.setToolTip(('<div>%s</div>' % tooltip))
elif (self.node_info.uri is not None):
self._state = NodeItem.STATE_WARNING
self.setIcon(nm.settings().icon('crystal_clear_warning.png'))
if ((not self.node_info.isLocal) and master_discovered):
tooltip = ('<h4>%s is not local, however the ROS master on this host is discovered, but no information about this node received!</h4>' % self.node_info.name)
self.setToolTip(('<div>%s</div>' % tooltip))
elif self.is_ghost:
self._state = NodeItem.STATE_GHOST
self.setIcon(nm.settings().icon('state_ghost.png'))
tooltip = '<h4>The node is running, but not synchronized because of filter or errors, see master_sync log.</h4>'
self.setToolTip(('<div>%s</div>' % tooltip))
elif self.has_running:
self._state = NodeItem.STATE_DUPLICATE
self.setIcon(nm.settings().icon('imacadam_stop.png'))
tooltip = '<h4>There are nodes with the same name on remote hosts running. These will be terminated, if you run this node! (Only if master_sync is running or will be started somewhere!)</h4>'
self.setToolTip(('<div>%s</div>' % tooltip))
else:
self._state = NodeItem.STATE_OFF
self.setIcon(nm.settings().icon('state_off.png'))
self.setToolTip('')
def update_displayed_url(self):
if (self.parent_item is not None):
uri_col = self.parent_item.child(self.row(), NodeItem.COL_URI)
if ((uri_col is not None) and isinstance(uri_col, QStandardItem)):
uri_col.setText((utf8(self.node_info.uri) if (self.node_info.uri is not None) else ''))
def update_displayed_config(self):
if (self.parent_item is not None):
cfg_col = self.parent_item.child(self.row(), NodeItem.COL_CFG)
if ((cfg_col is not None) and isinstance(cfg_col, QStandardItem)):
cfg_count = len(self._cfgs)
cfg_col.setText((utf8(''.join(['[', utf8(cfg_count), ']'])) if (cfg_count > 1) else ''))
has_launches = NodeItem.has_launch_cfgs(self._cfgs)
has_defaults = NodeItem.has_default_cfgs(self._cfgs)
if (has_launches and has_defaults):
cfg_col.setIcon(nm.settings().icon('crystal_clear_launch_file_def_cfg.png'))
elif has_launches:
cfg_col.setIcon(nm.settings().icon('crystal_clear_launch_file.png'))
elif has_defaults:
cfg_col.setIcon(nm.settings().icon('default_cfg.png'))
else:
cfg_col.setIcon(QIcon())
def cfgs(self):
return self._cfgs
def add_config(self, cfg):
if (cfg == ''):
self._std_config = cfg
if (cfg and (cfg not in self._cfgs)):
self._cfgs.append(cfg)
self.update_displayed_config()
def rem_config(self, cfg):
result = False
if (cfg == ''):
self._std_config = None
result = True
if (cfg in self._cfgs):
self._cfgs.remove(cfg)
result = True
if (result and (self.has_configs() or self.is_running())):
self.update_displayed_config()
return result
def readd(self):
if ((self.parent_item is not None) and self.with_namespace):
row = None
for i in reversed(range(self.parent_item.rowCount())):
item = self.parent_item.child(i)
if ((type(item) == NodeItem) and (item.name == self.name)):
row = self.parent_item.takeRow(i)
break
group_item = self.parent_item.get_group_item(namespace(item.name), is_group=False)
group_item._add_row_sorted(row)
group_item.updateIcon()
def type(self):
return NodeItem.ITEM_TYPE
def newNodeRow(self, name, masteruri):
items = []
item = NodeItem(NodeInfo(name, masteruri))
items.append(item)
cfgitem = CellItem(name, item)
items.append(cfgitem)
return items
def has_configs(self):
return (not (len(self._cfgs) == 0))
def is_running(self):
return (not ((self._node_info.pid is None) and (self._node_info.uri is None)))
def has_std_cfg(self):
return (self._std_config == '')
def count_launch_cfgs(self):
result = 0
for c in self.cfgs:
if (not self.is_default_cfg(c)):
result += 1
return result
def count_default_cfgs(self):
result = 0
for c in self.cfgs:
if self.is_default_cfg(c):
result += 1
return result
def has_launch_cfgs(cls, cfgs):
for c in cfgs:
if (not cls.is_default_cfg(c)):
return True
return False
def has_default_cfgs(cls, cfgs):
for c in cfgs:
if cls.is_default_cfg(c):
return True
return False
def is_default_cfg(cls, cfg):
return isinstance(cfg, tuple)
def __eq__(self, item):
if isstring(item):
return (self.name == item)
elif ((item is not None) and (type(item) == NodeItem)):
return (self.name == item.name)
return False
def __gt__(self, item):
if isstring(item):
return (self.name > item)
elif ((item is not None) and (type(item) == NodeItem)):
return (self.name > item.name)
return False |
((detect_target().name() == 'rocm'), 'Not supported by ROCM.')
class ExpandTestCase(unittest.TestCase):
def test_expand_fails_mismatched_ndim(self):
x = Tensor(shape=[5, IntVar([1, 10]), 5])
expand_shape = [5, (- 1)]
self.assertRaises(ValueError, ops.expand().__call__, x, expand_shape)
def test_expand_fails_non_singleton_dim(self):
x = Tensor(shape=[5, 1, 2])
expand_shape = [6, 1, 2]
self.assertRaises(ValueError, ops.expand().__call__, x, expand_shape)
x = Tensor(shape=[IntVar([1, 10])])
expand_shape = [20]
self.assertRaises(ValueError, ops.expand().__call__, x, expand_shape)
def _test_no_op_expands_removed_static_shapes(self, test_name='no_op_expands_removed_static_shapes', dtype='float16'):
x = Tensor([1, 2, 3], name='input_0', is_input=True, dtype=dtype)
y = ops.expand()(x, [1, (- 1), (- 1)])
z = ops.elementwise(FuncEnum.MUL)(y, y)
z._attrs['is_output'] = True
z._attrs['name'] = 'output_0'
x_pt = get_random_torch_tensor([1, 2, 3], dtype=dtype)
z_pt = (x_pt * x_pt)
z_ait = torch.empty_like(z_pt)
with compile_model(z, detect_target(), './tmp', test_name) as module:
module.run_with_tensors({'input_0': x_pt}, {'output_0': z_ait})
self.assertFalse(graph_has_op(module.debug_sorted_graph, 'expand'))
self.assertTrue(torch.equal(z_ait, z_pt))
def test_no_op_expands_removed_static_shapes_fp16(self):
self._test_no_op_expands_removed_static_shapes(test_name='no_op_expands_removed_static_shapes_fp16', dtype='float16')
((detect_target().name() == 'rocm'), 'Not supported by ROCM.')
def test_no_op_expands_removed_static_shapes_fp32(self):
self._test_no_op_expands_removed_static_shapes(test_name='no_op_expands_removed_static_shapes_fp32', dtype='float32')
def _test_no_op_expands_removed_dynamic_shapes(self, test_name='no_op_expands_removed_dynamic_shapes', dtype='float16'):
dynamic_dim = IntVar([1, 5], name='dynamic_dim')
x = Tensor([1, dynamic_dim, 3], name='input_0', is_input=True, dtype=dtype)
y = ops.expand()(x, [IntVar([1, 1]), (- 1), (- 1)])
z = ops.elementwise(FuncEnum.MUL)(y, y)
z._attrs['is_output'] = True
z._attrs['name'] = 'output_0'
x_pt = get_random_torch_tensor([1, 2, 3], dtype=dtype)
z_pt = (x_pt * x_pt)
z_ait = torch.empty_like(z_pt)
with compile_model(z, detect_target(), './tmp', test_name) as module:
module.run_with_tensors({'input_0': x_pt}, {'output_0': z_ait})
self.assertFalse(graph_has_op(module.debug_sorted_graph, 'expand'))
self.assertTrue(torch.equal(z_ait, z_pt))
def test_no_op_expands_removed_dynamic_shapes_fp16(self):
self._test_no_op_expands_removed_dynamic_shapes(test_name='no_op_expands_removed_dynamic_shapes_fp16', dtype='float16')
((detect_target().name() == 'rocm'), 'Not supported by ROCM.')
def test_no_op_expands_removed_dynamic_shapes_fp32(self):
self._test_no_op_expands_removed_dynamic_shapes(test_name='no_op_expands_removed_dynamic_shapes_fp32', dtype='float32')
def _test_no_op_expands_removed_size_op(self, test_name='no_op_expands_removed_size_op', dtype='float16'):
x = Tensor([1, 2, 3], name='input_0', is_input=True, dtype=dtype)
y = Tensor([IntVar([1, 1]), 2, 3], name='input_1', is_input=True, dtype=dtype)
x_size = ops.size()(x, 0)
y_size = ops.size()(y, 0)
x_expand = ops.expand()(x, [x_size, (- 1), (- 1)])
y_expand = ops.expand()(y, [y_size, (- 1), (- 1)])
z = ops.elementwise(FuncEnum.MUL)(x_expand, y_expand)
z._attrs['is_output'] = True
z._attrs['name'] = 'output_0'
x_pt = get_random_torch_tensor([1, 2, 3], dtype=dtype)
y_pt = get_random_torch_tensor([1, 2, 3], dtype=dtype)
z_pt = (x_pt * y_pt)
z_ait = torch.empty_like(z_pt)
with compile_model(z, detect_target(), './tmp', test_name) as module:
module.run_with_tensors({'input_0': x_pt, 'input_1': y_pt}, {'output_0': z_ait})
self.assertFalse(graph_has_op(module.debug_sorted_graph, 'expand'))
self.assertTrue(torch.equal(z_ait, z_pt))
def test_no_op_expands_removed_size_op_fp16(self):
self._test_no_op_expands_removed_size_op(test_name='no_op_expands_removed_size_op_fp16', dtype='float16')
((detect_target().name() == 'rocm'), 'Not supported by ROCM.')
def test_no_op_expands_removed_size_op_fp32(self):
self._test_no_op_expands_removed_size_op(test_name='no_op_expands_removed_size_op_fp32', dtype='float32')
def test_no_op_expand_elementwise_jagged_dense_inputs(self):
total_length = IntVar([1, 100])
batch_dim = IntVar([1, 10])
offsets_dim = IntVar([2, 11])
embedding_dim = IntImm(128)
max_seq_len = 10
X = Tensor([batch_dim, 1, embedding_dim], name='x', is_input=True, dtype='float16')
SOURCE = Tensor([total_length, embedding_dim], name='source', is_input=True, dtype='float16')
OFFSETS_LIST = [Tensor(shape=[offsets_dim], name='offsets', is_input=True, dtype='int32')]
JAGGED = ops.make_jagged(batch_dim=batch_dim, jagged_dims=[JaggedDim(0, max_seq_len)])(source=SOURCE, offsets_list=OFFSETS_LIST)
Y = ops.expand()(X, [batch_dim, max_seq_len, (- 1)])
Z = ops.elementwise(FuncEnum.MUL)(JAGGED, Y)
graph = compiler.transform.toposort([Z])
compiler.transform.remove_no_ops(graph)
sorted_ops = graph_utils.get_sorted_ops(graph)
assert (not has_op(sorted_ops, 'expand'))
([param('fp32_small_noadd_1', 'float32', [10, 1, 5], [(- 1), 10, 5]), param('fp32_small_noadd_2', 'float32', [10, 1, 8], [(- 1), 10, 8]), param('fp32_small_noadd_3', 'float32', [10, 1, 2], [(- 1), 10, 2]), param('fp32_small_noadd_4', 'float32', [10, 1, 5], [10, 10, 5]), param('fp32_small_1', 'float32', [10, 1, 5], [3, 10, 10, 5]), param('fp32_small_2', 'float32', [3, 1, 5], [3, 3, 3, (- 1)]), param('fp32_small_3', 'float32', [2, 1, 4, 1, 6], [(- 1), 10, 4, 5, 6]), param('fp32_small_var_1', 'float32', [10, 1, 5], [3, 10, 10, 5], False), param('fp32_small_var_2', 'float32', [1, 1, 5], [3, 3, 10, (- 1)], False), param('fp32_small_var_3', 'float32', [2, 1, 4, 1, 6], [(- 1), 10, 4, 5, 6], False), param('float16_small_1', 'float16', [2, 3, 1, 5], [2, (- 1), 3, 10, 5]), param('float16_small_2', 'float16', [1, 2, 10], [10, 2, 10]), param('bfloat16_small_1', 'bfloat16', [2, 3, 1, 5], [2, (- 1), 3, 10, 5]), param('int64_small_1', 'int64', [2, 3, 1, 5], [2, (- 1), 3, 10, 5]), param('fp32_large_1', 'float32', [100, 1, 9, 3], [2, 20, (- 1), 100, 9, (- 1)], 'int32'), param('fp32_large_2', 'float32', [101, 1, 91, 3], [(- 1), 100, 91, (- 1)], 'int64'), param('fp32_large_3', 'float32', [100, 1, 9, 3], [2, 20, (- 1), 100, 9, (- 1)], 'int64'), param('benchmark_var_1', 'float32', [100, 1, 9, 4], [20, 20, 100, 100, 9, (- 1)], False, 'int64'), param('fp32_m_1', 'float32', [5, 1, 3, 2], [2, 2, (- 1), 5, 3, (- 1)]), param('fp32_m_2', 'float32', [5, 1, 3, 5], [2, 2, (- 1), 5, 3, (- 1)]), param('edge_case_shapes_1', 'float32', [1, 1, 1, 1], [1, 1, (- 1), 1, (- 1), 1]), param('edge_case_shapes_2', 'float32', [1], [(- 1)]), param('edge_case_shapes_3', 'float32', [3], [(- 1)]), param('edge_case_shapes_4', 'float32', [1], [1]), param('edge_case_shapes_5', 'float32', [1, 1], [1, 0]), param('edge_case_shapes_6', 'float32', [2, 0], [(- 1), (- 1)]), param('edge_case_shapes_7', 'float32', [2, 0], [2, 0]), param('edge_case_shapes_var_1', 'float32', [1, 1, 1, 1], [1, 1, (- 1), 1, (- 1), 1], False), param('edge_case_shapes_var_2', 'float32', [1], [(- 1)], False), param('edge_case_shapes_var_3', 'float32', [3], [(- 1)], False), param('edge_case_shapes_var_4', 'float32', [1], [1], False), param('edge_case_shapes_var_5', 'float32', [1, 1], [1, 0], False), param('edge_case_shapes_var_6', 'float32', [2, 0], [(- 1), (- 1)], False), param('edge_case_shapes_var_6', 'float32', [2, 0], [2, 0], False)])
((detect_target().name() == 'rocm'), 'Not supported by ROCM.')
def test_expand_op(self, name, dtype, src_shape, expand_shape, optimize_fixed_dims=True, index_type='int64'):
x = Tensor(src_shape, name='X', is_input=True, dtype=dtype)
y = ops.expand()(x, expand_shape, optimize_fixed_dims=optimize_fixed_dims, index_type=index_type)
y._attrs['is_output'] = True
y._attrs['name'] = 'Y'
if (dtype != 'int64'):
x_pt = get_random_torch_tensor(src_shape, dtype=dtype)
else:
x_pt = torch.arange(1, (math.prod(src_shape) + 1), 1, dtype=torch.int64, device='cuda').view(src_shape)
y_pt = x_pt.expand(expand_shape)
y_ait = torch.zeros_like(y_pt)
stream = torch.cuda.default_stream()
start_event_pt = torch.cuda.Event(enable_timing=True)
end_event_pt = torch.cuda.Event(enable_timing=True)
num_iters = 20
with compile_model(y, detect_target(), './tmp', ('test_expand_codegen_' + name)) as module:
module.run_with_tensors({'X': x_pt}, {'Y': y_ait})
self.assertTrue(graph_has_op(module.debug_sorted_graph, 'expand'))
(time_mean_ms, time_std_ms, result_tensors) = module.benchmark_with_tensors({'X': x_pt}, {'Y': y_ait}, count=num_iters)
print(f'Write GB/sec:{((((1000 * y_pt.numel()) * y_pt.element_size()) / time_mean_ms) / ((1024 * 1024) * 1024))}')
self.assertTrue(torch.equal(y_ait, y_pt))
cache_trasher = torch.zeros(1000, 1000, 42, device='cuda', requires_grad=False)
sum_elapsed_pt = 0.0
for _ in range(num_iters):
cache_trasher.normal_()
start_event_pt = torch.cuda.Event(enable_timing=True)
end_event_pt = torch.cuda.Event(enable_timing=True)
torch.cuda.synchronize()
start_event_pt.record(stream=stream)
_ = y_pt.contiguous()
end_event_pt.record(stream=stream)
torch.cuda.synchronize()
sum_elapsed_pt += start_event_pt.elapsed_time(end_event_pt)
pt_time = (sum_elapsed_pt / num_iters)
ait_throughput_write = ((((1000 * y_pt.numel()) * y_pt.element_size()) / time_mean_ms) / ((1024 * 1024) * 1024))
ait_throughput_read_once = ((((1000 * x_pt.numel()) * x_pt.element_size()) / time_mean_ms) / ((1024 * 1024) * 1024))
ait_throughput_total_lower_bound = (ait_throughput_write + ait_throughput_read_once)
ait_throughput_total_upper_bound = (ait_throughput_write * 2)
pt_throughput_write = ((((1000 * y_pt.numel()) * y_pt.element_size()) / pt_time) / ((1024 * 1024) * 1024))
pt_throughput_read = ((((1000 * x_pt.numel()) * x_pt.element_size()) / pt_time) / ((1024 * 1024) * 1024))
pt_throughput_total_lower_bound = (pt_throughput_write + pt_throughput_read)
pt_throughput_total_upper_bound = (pt_throughput_write * 2)
ait_speedup_factor = f'{(pt_time / time_mean_ms):.2f}'
ait_expand_variant = 'general'
if optimize_fixed_dims:
ait_expand_variant = 'optimized'
print(f'''Benchmark Summary (test_expand_op:{name}) - {src_shape} => {expand_shape}: dtype={dtype}, variant={ait_expand_variant}. AIT speedup={ait_speedup_factor}x. Throughputs in GB/sec.: Write: pt={pt_throughput_write:.1f}, ait={ait_throughput_write:.1f}, Total (lower): pt={pt_throughput_total_lower_bound:.1f}, ait={ait_throughput_total_lower_bound:.1f} Total (upper): pt={pt_throughput_total_upper_bound:.1f}, ait=={ait_throughput_total_upper_bound:.1f} ]
Benchmark note: Total throughput (lower) assumes the input is read once, Total throughput (upper) assumes every byte written has been read as well. The truth is inbetween due to caching of repeated reads.''', file=sys.stdout, flush=True) |
class BaseFixedDecoder(Fixed32ByteSizeDecoder):
frac_places = None
is_big_endian = True
def validate(self):
super().validate()
if (self.frac_places is None):
raise ValueError('must specify `frac_places`')
if ((self.frac_places <= 0) or (self.frac_places > 80)):
raise ValueError('`frac_places` must be in range (0, 80]') |
def normalizeLinkText(url: str) -> str:
parsed = mdurl.parse(url, slashes_denote_host=True)
if (parsed.hostname and ((not parsed.protocol) or (parsed.protocol in RECODE_HOSTNAME_FOR))):
with suppress(Exception):
parsed = parsed._replace(hostname=_punycode.to_unicode(parsed.hostname))
return mdurl.decode(mdurl.format(parsed), (mdurl.DECODE_DEFAULT_CHARS + '%')) |
def _parse_value(value: Union[(int, str, float, dict)], name: str, parent_name: str=None) -> dict:
if (name is not None):
assert isinstance(name, str), 'name can only be `str` type, not {}.'.format(name)
_dict = {}
if (isinstance(value, (int, float, str, bool)) or (value is None)):
if (name is None):
raise RuntimeError('When value is {}, you must pass `name`.'.format(type(value)))
elif isinstance(value, dict):
_check_dict_value(value)
elif ('torch.Tensor' in str(type(value))):
assert (name is not None), f'When value is `{type(value)}`, you must pass a name.'
try:
value = value.item()
except:
value = str(value.tolist())
elif ('numpy.ndarray' in str(type(value))):
assert (name is not None), f'When value is `{type(value)}`, you must pass a name.'
total_ele = 1
for dim in value.shape:
total_ele *= dim
if (total_ele == 1):
value = value.reshape(1)[0]
else:
value = str(value.tolist())
elif isinstance(value, np.bool_):
value = bool(value)
elif isinstance(value, np.integer):
value = int(value)
elif isinstance(value, np.floating):
value = (float(value) if (not (math.isnan(value) and math.isinf(value))) else str(value))
else:
value = str(value)
assert (name is not None), f'When value is `{type(value)}`, you must pass a name.'
if ((parent_name != None) and (name != None)):
_dict = {parent_name.replace(' ', '_'): {name.replace(' ', '_'): value}}
elif (parent_name != None):
_dict = {parent_name.replace(' ', '_'): value}
elif (name != None):
_dict = {name.replace(' ', '_'): value}
else:
_dict = value
return _dict |
def test_globals():
base_args = ['python', 'decompile.py']
args1 = (base_args + ['tests/samples/bin/systemtests/64/2/condmap', 'main'])
args2 = (base_args + ['tests/samples/bin/systemtests/32/0/test_goto', 'test2'])
output1 = str(subprocess.run(args1, check=True, capture_output=True).stdout)
output2 = str(subprocess.run(args2, check=True, capture_output=True).stdout)
assert (output1.count('main') == 1)
assert (output1.count('NULL') == 0)
assert (output2.count('ELF') == 0) |
class Chart():
def __init__(self, date, pos, **kwargs):
hsys = kwargs.get('hsys', const.HOUSES_DEFAULT)
IDs = kwargs.get('IDs', const.LIST_OBJECTS_TRADITIONAL)
self.date = date
self.pos = pos
self.hsys = hsys
self.objects = ephem.getObjectList(IDs, date, pos)
(self.houses, self.angles) = ephem.getHouses(date, pos, hsys)
def copy(self):
chart = Chart.__new__(Chart)
chart.date = self.date
chart.pos = self.pos
chart.hsys = self.hsys
chart.objects = self.objects.copy()
chart.houses = self.houses.copy()
chart.angles = self.angles.copy()
return chart
def getObject(self, ID):
return self.objects.get(ID)
def getHouse(self, ID):
return self.houses.get(ID)
def getAngle(self, ID):
return self.angles.get(ID)
def get(self, ID):
if ID.startswith('House'):
return self.getHouse(ID)
elif (ID in const.LIST_ANGLES):
return self.getAngle(ID)
else:
return self.getObject(ID)
def getFixedStar(self, ID):
return ephem.getFixedStar(ID, self.date)
def getFixedStars(self):
IDs = const.LIST_FIXED_STARS
return ephem.getFixedStarList(IDs, self.date)
def isHouse1Asc(self):
house1 = self.getHouse(const.HOUSE1)
asc = self.getAngle(const.ASC)
dist = angle.closestdistance(house1.lon, asc.lon)
return (abs(dist) < 0.0003)
def isHouse10MC(self):
house10 = self.getHouse(const.HOUSE10)
mc = self.getAngle(const.MC)
dist = angle.closestdistance(house10.lon, mc.lon)
return (abs(dist) < 0.0003)
def isDiurnal(self):
sun = self.getObject(const.SUN)
mc = self.getAngle(const.MC)
lat = self.pos.lat
(sunRA, sunDecl) = utils.eqCoords(sun.lon, sun.lat)
(mcRA, mcDecl) = utils.eqCoords(mc.lon, 0)
return utils.isAboveHorizon(sunRA, sunDecl, mcRA, lat)
def getMoonPhase(self):
sun = self.getObject(const.SUN)
moon = self.getObject(const.MOON)
dist = angle.distance(sun.lon, moon.lon)
if (dist < 90):
return const.MOON_FIRST_QUARTER
elif (dist < 180):
return const.MOON_SECOND_QUARTER
elif (dist < 270):
return const.MOON_THIRD_QUARTER
else:
return const.MOON_LAST_QUARTER
def solarReturn(self, year):
sun = self.getObject(const.SUN)
date = Datetime('{0}/01/01'.format(year), '00:00', self.date.utcoffset)
srDate = ephem.nextSolarReturn(date, sun.lon)
return Chart(srDate, self.pos, hsys=self.hsys) |
class CommandStreamer(PacketStreamer):
def __init__(self):
self.source = stream.Endpoint(command_tx_description(32))
self.packets = []
self.packet = CommandTXPacket()
self.packet.done = True
def send(self, packet):
packet = deepcopy(packet)
self.packets.append(packet)
return packet
def send_blocking(self, packet):
packet = self.send(packet)
while (not packet.done):
(yield)
def generator(self):
while True:
if (len(self.packets) and self.packet.done):
self.packet = self.packets.pop(0)
(yield self.source.write.eq(self.packet.write))
(yield self.source.read.eq(self.packet.read))
(yield self.source.sector.eq(self.packet.sector))
(yield self.source.count.eq(self.packet.count))
if ((not self.packet.ongoing) and (not self.packet.done)):
(yield self.source.valid.eq(1))
if (len(self.packet) > 0):
(yield self.source.data.eq(self.packet.pop(0)))
self.packet.ongoing = True
elif ((yield self.source.valid) and (yield self.source.ready)):
(yield self.source.last.eq((len(self.packet) == 1)))
if (len(self.packet) > 0):
(yield self.source.valid.eq(1))
(yield self.source.data.eq(self.packet.pop(0)))
else:
self.packet.done = True
(yield self.source.valid.eq(0))
(yield) |
class LastSboxes():
def __new__(cls, guesses=_np.arange(64, dtype='uint8'), words=None, ciphertext_tag='ciphertext', key_tag='key'):
return _decorated_selection_function(_AttackSelectionFunctionWrapped, _sboxes, expected_key_function=_last_key, words=words, guesses=guesses, target_tag=ciphertext_tag, key_tag=key_tag) |
class MiscellaneousRoutes(ComponentBase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.stats_updater = StatsUpdater(stats_db=self.db.stats_updater)
_required
_accepted(*PRIVILEGES['status'])
('/', GET)
def show_home(self):
latest_count = config.frontend.number_of_latest_firmwares_to_display
with get_shared_session(self.db.frontend) as frontend_db:
latest_firmware_submissions = frontend_db.get_last_added_firmwares(latest_count)
latest_comments = frontend_db.get_latest_comments(latest_count)
latest_comparison_results = self.db.comparison.page_comparison_results(limit=10)
ajax_stats_reload_time = config.frontend.ajax_stats_reload_time
general_stats = self.stats_updater.get_general_stats()
return render_template('home.html', general_stats=general_stats, latest_firmware_submissions=latest_firmware_submissions, latest_comments=latest_comments, latest_comparison_results=latest_comparison_results, ajax_stats_reload_time=ajax_stats_reload_time)
('/about', GET)
def show_about(self):
return render_template('about.html')
_accepted(*PRIVILEGES['comment'])
('/comment/<uid>', POST)
def post_comment(self, uid):
comment = request.form['comment']
author = request.form['author']
self.db.editing.add_comment_to_object(uid, comment, author, round(time()))
return redirect(url_for('show_analysis', uid=uid))
_accepted(*PRIVILEGES['comment'])
('/comment/<uid>', GET)
def show_add_comment(self, uid):
error = (not self.db.frontend.exists(uid))
return render_template('add_comment.html', uid=uid, error=error)
_accepted(*PRIVILEGES['delete'])
('/admin/delete_comment/<uid>/<timestamp>', GET)
def delete_comment(self, uid, timestamp):
self.db.editing.delete_comment(uid, timestamp)
return redirect(url_for('show_analysis', uid=uid))
_accepted(*PRIVILEGES['delete'])
('/admin/delete/<uid>', GET)
def delete_firmware(self, uid):
if (not self.db.frontend.is_firmware(uid)):
return render_template('error.html', message=f'Firmware not found in database: {uid}')
(deleted_virtual_path_entries, deleted_files) = self.db.admin.delete_firmware(uid)
return render_template('delete_firmware.html', deleted_vps=deleted_virtual_path_entries, deleted_files=deleted_files, uid=uid)
_accepted(*PRIVILEGES['delete'])
('/admin/missing_analyses', GET)
def find_missing_analyses(self):
template_data = {'missing_analyses': self._find_missing_analyses(), 'failed_analyses': self._find_failed_analyses()}
return render_template('find_missing_analyses.html', **template_data)
def _find_missing_analyses(self):
start = time()
missing_analyses = self.db.frontend.find_missing_analyses()
return {'tuples': list(missing_analyses.items()), 'count': self._count_values(missing_analyses), 'duration': format_time((time() - start))}
def _count_values(dictionary: dict[(str, Sized)]) -> int:
return sum((len(e) for e in dictionary.values()))
def _find_failed_analyses(self):
start = time()
failed_analyses = self.db.frontend.find_failed_analyses()
return {'tuples': list(failed_analyses.items()), 'count': self._count_values(failed_analyses), 'duration': format_time((time() - start))}
_accepted(*PRIVILEGES['view_logs'])
('/admin/logs', GET)
def show_logs(self):
backend_logs = '\n'.join(self.intercom.get_backend_logs())
frontend_logs = '\n'.join(self._get_frontend_logs())
return render_template('logs.html', backend_logs=backend_logs, frontend_logs=frontend_logs)
def _get_frontend_logs(self):
frontend_logs = Path(config.frontend.logging.file_frontend)
if frontend_logs.is_file():
return frontend_logs.read_text().splitlines()[(- 100):]
return [] |
class VRRPRouter(app_manager.RyuApp):
_EVENTS = [vrrp_event.EventVRRPStateChanged]
_CONSTRUCTORS = {}
_STATE_MAP = {}
def register(version):
def _register(cls):
VRRPRouter._CONSTRUCTORS[version] = cls
return cls
return _register
def factory(name, monitor_name, interface, config, statistics, *args, **kwargs):
cls = VRRPRouter._CONSTRUCTORS[config.version]
app_mgr = app_manager.AppManager.get_instance()
kwargs = kwargs.copy()
kwargs['name'] = name
kwargs['monitor_name'] = monitor_name
kwargs['vrrp_interface'] = interface
kwargs['vrrp_config'] = config
kwargs['vrrp_statistics'] = statistics
return app_mgr.instantiate(cls, *args, **kwargs)
class _EventMasterDown(event.EventBase):
pass
class _EventAdver(event.EventBase):
pass
class _EventPreemptDelay(event.EventBase):
pass
class _EventStatisticsOut(event.EventBase):
pass
def __init__(self, *args, **kwargs):
super(VRRPRouter, self).__init__(*args, **kwargs)
self.name = kwargs['name']
self.monitor_name = kwargs['monitor_name']
self.interface = kwargs['vrrp_interface']
self.config = kwargs['vrrp_config']
self.statistics = kwargs['vrrp_statistics']
self.params = VRRPParams(self.config)
self.state = None
self.state_impl = None
self.vrrp = None
self.master_down_timer = TimerEventSender(self, self._EventMasterDown)
self.adver_timer = TimerEventSender(self, self._EventAdver)
self.preempt_delay_timer = TimerEventSender(self, self._EventPreemptDelay)
self.register_observer(self._EventMasterDown, self.name)
self.register_observer(self._EventAdver, self.name)
self.stats_out_timer = TimerEventSender(self, self._EventStatisticsOut)
self.register_observer(self._EventStatisticsOut, self.name)
def send_advertisement(self, release=False):
if (self.vrrp is None):
config = self.config
max_adver_int = vrrp.vrrp.sec_to_max_adver_int(config.version, config.advertisement_interval)
self.vrrp = vrrp.vrrp.create_version(config.version, vrrp.VRRP_TYPE_ADVERTISEMENT, config.vrid, config.priority, max_adver_int, config.ip_addresses)
vrrp_ = self.vrrp
if release:
vrrp_ = vrrp_.create(vrrp_.type, vrrp_.vrid, vrrp.VRRP_PRIORITY_RELEASE_RESPONSIBILITY, vrrp_.max_adver_int, vrrp_.ip_addresses)
if (self.vrrp.priority == 0):
self.statistics.tx_vrrp_zero_prio_packets += 1
interface = self.interface
packet_ = vrrp_.create_packet(interface.primary_ip_address, interface.vlan_id)
packet_.serialize()
vrrp_api.vrrp_transmit(self, self.monitor_name, packet_.data)
self.statistics.tx_vrrp_packets += 1
def state_change(self, new_state):
old_state = self.state
self.state = new_state
self.state_impl = self._STATE_MAP[new_state](self)
state_changed = vrrp_event.EventVRRPStateChanged(self.name, self.monitor_name, self.interface, self.config, old_state, new_state)
self.send_event_to_observers(state_changed)
_ev_handler(_EventMasterDown)
def master_down_handler(self, ev):
self.state_impl.master_down(ev)
_ev_handler(_EventAdver)
def adver_handler(self, ev):
self.state_impl.adver(ev)
_ev_handler(_EventPreemptDelay)
def preempt_delay_handler(self, ev):
self.state_impl.preempt_delay(ev)
_ev_handler(vrrp_event.EventVRRPReceived)
def vrrp_received_handler(self, ev):
self.state_impl.vrrp_received(ev)
_ev_handler(vrrp_event.EventVRRPShutdownRequest)
def vrrp_shutdown_request_handler(self, ev):
assert (ev.instance_name == self.name)
self.state_impl.vrrp_shutdown_request(ev)
_ev_handler(vrrp_event.EventVRRPConfigChangeRequest)
def vrrp_config_change_request_handler(self, ev):
config = self.config
if (ev.priority is not None):
config.priority = ev.priority
if (ev.advertisement_interval is not None):
config.advertisement_interval = ev.advertisement_interval
if (ev.preempt_mode is not None):
config.preempt_mode = ev.preempt_mode
if (ev.preempt_delay is not None):
config.preempt_delay = ev.preempt_delay
if (ev.accept_mode is not None):
config.accept_mode = ev.accept_mode
self.vrrp = None
self.state_impl.vrrp_config_change_request(ev)
_ev_handler(_EventStatisticsOut)
def statistics_handler(self, ev):
self.stats_out_timer.start(self.statistics.statistics_interval) |
('bodhi.server.security.get_current_registry', mock.MagicMock(return_value=FakeRegistry()))
class TestCorsOrigins():
def test___contains___initialized(self):
co = security.CorsOrigins('cors_origins_ro')
co.initialize()
with mock.patch('bodhi.server.security.get_current_registry', side_effect=Exception()):
assert ('origin_2' in co)
def test___contains___uninitialized(self):
co = security.CorsOrigins('cors_origins_ro')
assert ('origin_1' in co)
def test___getitem___initialized(self):
co = security.CorsOrigins('cors_origins_ro')
co.initialize()
with mock.patch('bodhi.server.security.get_current_registry', side_effect=Exception()):
assert (co[1] == 'origin_2')
def test___getitem___uninitialized(self):
co = security.CorsOrigins('cors_origins_ro')
assert (co[0] == 'origin_1')
def test___init__(self):
co = security.CorsOrigins('cors_origins_ro')
assert (co.name == 'cors_origins_ro')
assert (co.origins is None)
def test___iter___initialized(self):
co = security.CorsOrigins('cors_origins_ro')
co.initialize()
with mock.patch('bodhi.server.security.get_current_registry', side_effect=Exception()):
assert (list(co) == ['origin_1', 'origin_2'])
def test___iter___uninitialized(self):
co = security.CorsOrigins('cors_origins_ro')
assert (list(co) == ['origin_1', 'origin_2'])
def test___len___initialized(self):
co = security.CorsOrigins('cors_origins_ro')
co.initialize()
with mock.patch('bodhi.server.security.get_current_registry', side_effect=Exception()):
assert (len(co) == 2)
def test___len___uninitialized(self):
co = security.CorsOrigins('cors_origins_ro')
assert (len(co) == 2)
def test_initialize_setting_not_found(self):
co = security.CorsOrigins('not_found')
assert (list(co) == ['localhost'])
def test_initialize_with_origins(self):
co = security.CorsOrigins('cors_origins_ro')
co.initialize()
with mock.patch('bodhi.server.security.get_current_registry', side_effect=Exception()):
co.initialize()
def test_initialize_without_origins(self):
co = security.CorsOrigins('cors_origins_ro')
co.initialize()
assert (co.origins == ['origin_1', 'origin_2']) |
class TestPredictionCountEvaluation(unittest.TestCase):
def setUp(self):
self.evaluator = PredictionCountEvaluator()
image_size = (224, 224)
self.mock_outputs = [{'instances': Instances(image_size, scores=torch.Tensor([0.9, 0.8, 0.7]))}, {'instances': Instances(image_size, scores=torch.Tensor([0.9, 0.8, 0.7]))}, {'instances': Instances(image_size, scores=torch.Tensor([0.9, 0.8]))}, {'instances': Instances(image_size, scores=torch.Tensor([0.9, 0.8]))}, {'instances': Instances(image_size, scores=torch.Tensor([0.9]))}]
self.mock_inputs = ([None] * len(self.mock_outputs))
def test_process_evaluate_reset(self):
self.assertEqual(len(self.evaluator.prediction_counts), 0)
self.assertEqual(len(self.evaluator.confidence_scores), 0)
self.evaluator.process(self.mock_inputs, self.mock_outputs)
self.assertListEqual(self.evaluator.prediction_counts, [3, 3, 2, 2, 1])
self.assertEqual(len(self.evaluator.confidence_scores), 11)
output_metrics = self.evaluator.evaluate()
self.assertDictAlmostEqual(output_metrics, {'false_positives': {'predictions_per_image': (11 / 5), 'confidence_per_prediction': ((((0.9 * 5) + (0.8 * 4)) + (0.7 * 2)) / 11)}})
self.evaluator.reset()
self.assertEqual(len(self.evaluator.prediction_counts), 0)
self.assertEqual(len(self.evaluator.confidence_scores), 0)
def assertDictAlmostEqual(self, dict1, dict2):
keys1 = list(dict1.keys())
keys2 = list(dict2.keys())
self.assertCountEqual(keys1, keys2)
for (k, v1) in dict1.items():
v2 = dict2[k]
if isinstance(v2, list):
self.assertListEqual(v1, v2)
elif isinstance(v2, dict):
self.assertDictAlmostEqual(v1, v2)
else:
self.assertAlmostEqual(v1, v2) |
def manage_context(args, daccess):
path = args['context']
name = args.get('name')
options = get_options(args, CONTEXT_MUTATORS)
exists = True
if ((len(options) == 0) and (name is None)):
return todo(args, daccess)
else:
if (name is not None):
renamed = data_access.rename_context(args['context'], name)
rcount = daccess.rename_context(args['context'], name)
if (rcount is None):
return ('target_name_exists', renamed)
elif (rcount > 0):
path = renamed
else:
exists = False
if (len(options) > 0):
daccess.set_context(path, options)
elif (not exists):
return ('not_exists', path) |
def convert_schemas(d, definitions=None):
if (definitions is None):
definitions = {}
definitions.update(d.get('definitions', {}))
new = {}
for (k, v) in d.items():
if isinstance(v, dict):
v = convert_schemas(v, definitions)
if isinstance(v, (list, tuple)):
new_v = []
for item in v:
if isinstance(item, dict):
new_v.append(convert_schemas(item, definitions))
else:
new_v.append(item)
v = new_v
if (inspect.isclass(v) and issubclass(v, Schema)):
if (Schema is None):
raise RuntimeError('Please install marshmallow and apispec')
definitions[v.__name__] = schema2jsonschema(v)
ref = {'$ref': '#/definitions/{0}'.format(v.__name__)}
if (k == 'parameters'):
new[k] = schema2parameters(v, location=v.swag_in)
new[k][0]['schema'] = ref
if (len(definitions[v.__name__]['required']) != 0):
new[k][0]['required'] = True
else:
new[k] = ref
else:
new[k] = v
if ('definitions' in new):
del new['definitions']
return new |
def mfa_reset_secret(username):
secret = pyotp.random_base32()
conn = sqlite3.connect('db_users.sqlite')
conn.set_trace_callback(print)
conn.row_factory = sqlite3.Row
c = conn.cursor()
c.execute('UPDATE users SET mfa_secret = ? WHERE username = ?', (secret, username))
conn.commit()
return False |
class TestUndoItem(UnittestTools, unittest.TestCase):
def test_undo(self):
example = SimpleExample(value=11)
undo_item = UndoItem(object=example, name='value', old_value=10, new_value=11)
with self.assertTraitChanges(example, 'value', count=1):
undo_item.undo()
self.assertEqual(example.value, 10)
def test_redo(self):
example = SimpleExample(value=10)
undo_item = UndoItem(object=example, name='value', old_value=10, new_value=11)
with self.assertTraitChanges(example, 'value', count=1):
undo_item.redo()
self.assertEqual(example.value, 11)
def test_merge_different_undo_item_type(self):
example_1 = SimpleExample()
example_2 = SimpleExample()
undo_item = UndoItem(object=example_1, name='any_value')
next_undo_item = ListUndoItem(object=example_2, name='any_value')
result = undo_item.merge(next_undo_item)
self.assertFalse(result)
def test_merge_different_objects(self):
example_1 = SimpleExample()
example_2 = SimpleExample()
undo_item = UndoItem(object=example_1, name='value', old_value=10, new_value=11)
next_undo_item = UndoItem(object=example_2, name='value', old_value=10, new_value=11)
result = undo_item.merge(next_undo_item)
self.assertFalse(result)
def test_merge_different_traits(self):
example = SimpleExample()
undo_item = UndoItem(object=example, name='value', old_value=10, new_value=11)
next_undo_item = UndoItem(object=example, name='str_value', old_value='foo', new_value='bar')
result = undo_item.merge(next_undo_item)
self.assertFalse(result)
def test_merge_different_value_types(self):
example = SimpleExample()
undo_item = UndoItem(object=example, name='any_value', old_value=10, new_value=11)
next_undo_item = UndoItem(object=example, name='any_value', old_value=11, new_value='foo')
result = undo_item.merge(next_undo_item)
self.assertFalse(result)
def test_merge_numbers(self):
example = SimpleExample()
undo_item = UndoItem(object=example, name='value', old_value=10, new_value=11)
next_undo_item = UndoItem(object=example, name='value', old_value=11, new_value=12)
result = undo_item.merge(next_undo_item)
self.assertTrue(result)
self.assertEqual(undo_item.old_value, 10)
self.assertEqual(undo_item.new_value, 12)
def test_merge_str_insert(self):
example = SimpleExample()
undo_item = UndoItem(object=example, name='str_value', old_value='foo', new_value='bar')
next_undo_item = UndoItem(object=example, name='str_value', old_value='bar', new_value='bear')
result = undo_item.merge(next_undo_item)
self.assertTrue(result)
self.assertEqual(undo_item.old_value, 'foo')
self.assertEqual(undo_item.new_value, 'bear')
def test_merge_str_delete(self):
example = SimpleExample()
undo_item = UndoItem(object=example, name='str_value', old_value='foo', new_value='bear')
next_undo_item = UndoItem(object=example, name='str_value', old_value='bear', new_value='bar')
result = undo_item.merge(next_undo_item)
self.assertTrue(result)
self.assertEqual(undo_item.old_value, 'foo')
self.assertEqual(undo_item.new_value, 'bar')
def test_merge_str_change(self):
example = SimpleExample()
undo_item = UndoItem(object=example, name='str_value', old_value='foo', new_value='bar')
next_undo_item = UndoItem(object=example, name='str_value', old_value='bar', new_value='baz')
result = undo_item.merge(next_undo_item)
self.assertTrue(result)
self.assertEqual(undo_item.old_value, 'foo')
self.assertEqual(undo_item.new_value, 'baz')
def test_merge_str_same(self):
example = SimpleExample()
undo_item = UndoItem(object=example, name='str_value', old_value='foo', new_value='bar')
next_undo_item = UndoItem(object=example, name='str_value', old_value='bar', new_value='bar')
result = undo_item.merge(next_undo_item)
self.assertTrue(result)
self.assertEqual(undo_item.old_value, 'foo')
self.assertEqual(undo_item.new_value, 'bar')
def test_merge_str_different(self):
example = SimpleExample()
undo_item = UndoItem(object=example, name='str_value', old_value='foo', new_value='bar')
next_undo_item = UndoItem(object=example, name='str_value', old_value='bar', new_value='wombat')
result = undo_item.merge(next_undo_item)
self.assertFalse(result)
def test_merge_sequence_change(self):
example = SimpleExample()
undo_item = UndoItem(object=example, name='tuple_value', old_value=('foo', 'bar', 'baz'), new_value=('foo', 'wombat', 'baz'))
next_undo_item = UndoItem(object=example, name='tuple_value', old_value=('foo', 'wombat', 'baz'), new_value=('foo', 'fizz', 'baz'))
result = undo_item.merge(next_undo_item)
self.assertTrue(result)
self.assertEqual(undo_item.old_value, ('foo', 'bar', 'baz'))
self.assertEqual(undo_item.new_value, ('foo', 'fizz', 'baz'))
def test_merge_sequence_change_different_types(self):
example = SimpleExample()
undo_item = UndoItem(object=example, name='tuple_value', old_value=('foo', 'bar', 'baz'), new_value=('foo', 'wombat', 'baz'))
next_undo_item = UndoItem(object=example, name='tuple_value', old_value=('foo', 'wombat', 'baz'), new_value=('foo', 12, 'baz'))
result = undo_item.merge(next_undo_item)
self.assertTrue(result)
self.assertEqual(undo_item.old_value, ('foo', 'bar', 'baz'))
self.assertEqual(undo_item.new_value, ('foo', 12, 'baz'))
def test_merge_sequence_change_not_simple_types(self):
example = SimpleExample()
undo_item = UndoItem(object=example, name='tuple_value', old_value=('foo', ['bar'], 'baz'), new_value=('foo', ['wombat'], 'baz'))
next_undo_item = UndoItem(object=example, name='tuple_value', old_value=('foo', ['wombat'], 'baz'), new_value=('foo', ['fizz'], 'baz'))
result = undo_item.merge(next_undo_item)
self.assertTrue(result)
self.assertEqual(undo_item.old_value, ('foo', ['bar'], 'baz'))
self.assertEqual(undo_item.new_value, ('foo', ['fizz'], 'baz'))
def test_merge_sequence_change_multiple_not_simple_types(self):
example = SimpleExample()
undo_item = UndoItem(object=example, name='tuple_value', old_value=(['foo'], 'bar', 'baz'), new_value=(['foo'], 'wombat', 'baz'))
next_undo_item = UndoItem(object=example, name='tuple_value', old_value=(['foo'], 'wombat', 'baz'), new_value=(['foo'], 'fizz', 'baz'))
result = undo_item.merge(next_undo_item)
self.assertFalse(result)
def test_merge_sequence_change_back(self):
example = SimpleExample()
undo_item = UndoItem(object=example, name='tuple_value', old_value=('foo', 'bar', 'baz'), new_value=('foo', 'wombat', 'baz'))
next_undo_item = UndoItem(object=example, name='tuple_value', old_value=('foo', 'wombat', 'baz'), new_value=('foo', 'bar', 'baz'))
result = undo_item.merge(next_undo_item)
self.assertFalse(result)
def test_merge_sequence_two_changes(self):
example = SimpleExample()
undo_item = UndoItem(object=example, name='tuple_value', old_value=('foo', 'bar', 'baz'), new_value=('foo', 'wombat', 'baz'))
next_undo_item = UndoItem(object=example, name='tuple_value', old_value=('foo', 'wombat', 'baz'), new_value=('foo', 'wombat', 'fizz'))
result = undo_item.merge(next_undo_item)
self.assertFalse(result)
def test_merge_sequence_change_length(self):
example = SimpleExample()
undo_item = UndoItem(object=example, name='tuple_value', old_value=('foo', 'bar', 'baz'), new_value=('foo', 'wombat', 'baz'))
next_undo_item = UndoItem(object=example, name='tuple_value', old_value=('foo', 'wombat', 'baz'), new_value=('foo', 'wombat', 'baz', 'fizz'))
result = undo_item.merge(next_undo_item)
self.assertFalse(result)
def test_merge_unhandled_type(self):
example = SimpleExample()
undo_item = UndoItem(object=example, name='any_value', old_value={'foo', 'bar', 'baz'}, new_value={'foo', 'wombat', 'baz'})
next_undo_item = UndoItem(object=example, name='any_value', old_value={'foo', 'wombat', 'baz'}, new_value={'foo', 'fizz', 'baz'})
result = undo_item.merge(next_undo_item)
self.assertFalse(result) |
class OptionSeriesWindbarbSonificationDefaultinstrumentoptionsActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
def unfreeze_last_n_stages(stages: List[nn.Module], n: int):
if (n == (- 1)):
return
num_stages = len(stages)
num_stages_to_freeze = (num_stages - n)
for (i, stage) in enumerate(stages):
if (i >= num_stages_to_freeze):
break
for param in stage.parameters():
param.requires_grad = False |
def _run_is_compiling_tests(dtype_name, backend):
dtype = to_backend_dtype(dtype_name, like=backend)
x = anp.array([[0.0, 1.0], [1.0, 2.0]], dtype=dtype, like=backend)
assert (not _is_compiling(x)), f'_is_compiling has a false positive with backend {backend}'
def check_compiling(x):
assert _is_compiling(x), f'_is_compiling has a false negative with backend {backend}'
return x
if (backend == 'jax'):
import jax
jax.jit(check_compiling)(x)
elif (backend == 'torch'):
import torch
torch.jit.trace(check_compiling, (x,), check_trace=False)(x)
elif (backend == 'tensorflow'):
import tensorflow as tf
tf.function(check_compiling, jit_compile=True)(x)
tf.function(check_compiling, jit_compile=False)(x) |
def test_local_error(tmp_path: Path) -> None:
def failing_job() -> None:
raise RuntimeError('Failed on purpose')
executor = local.LocalExecutor(tmp_path)
job = executor.submit(failing_job)
exception = job.exception()
assert isinstance(exception, utils.FailedJobError)
traceback = exception.args[0]
assert ('Traceback' in traceback)
assert ('Failed on purpose' in traceback) |
('ecs_deploy.cli.get_client')
def test_run_task_with_environment_var(get_client, runner):
get_client.return_value = EcsTestClient('acces_key', 'secret_key')
result = runner.invoke(cli.run, (CLUSTER_NAME, 'test-task', '2', '-e', 'application', 'foo', 'bar'))
assert (not result.exception)
assert (result.exit_code == 0)
assert (u'Using task definition: test-task' in result.output)
assert (u'Changed environment "foo" of container "application" to: "bar"' in result.output)
assert (u'Successfully started 2 instances of task: test-task:2' in result.output)
assert (u'- arn:foo:bar' in result.output)
assert (u'- arn:lorem:ipsum' in result.output) |
(ITreeNode)
class AdderNode(TreeNode):
label = Str('Base AdderNode')
tooltip = Str('Add an item')
object = Any
scene = Property
view = View(Group(label='AdderNode'))
def dialog_view(self):
view = self.trait_view()
view.buttons = []
view.title = self.label
view.icon = ImageResource('add.ico')
view.resizable = True
view.width = 350
view.height = 650
return view
def _get_scene(self):
object = self.object
if isinstance(object, AdderNode):
return None
if (object is not None):
return object.scene
else:
return None
def get_label(self):
return self.label
def get_icon(self, obj, is_expanded=False):
return self.icon_name
def get_icon_path(self):
return resource_path()
def get_tooltip(self):
return self.tooltip
def allows_children(self):
return False
def get_children_id(self, node=None):
return []
def when_label_changed(self, label_updated, remove):
return
def when_column_labels_change(self, listener, remove):
return |
class PolygonPlot(BaseXYPlot):
edge_color = black_color_trait(requires_redraw=True)
edge_width = Float(1.0, requires_redraw=True)
edge_style = LineStyle(requires_redraw=True)
face_color = transparent_color_trait(requires_redraw=True)
hittest_type = Enum('poly', 'point', 'line')
effective_edge_color = Property(Tuple, observe=['edge_color', 'alpha'])
effective_face_color = Property(Tuple, observe=['face_color', 'alpha'])
def _gather_points(self):
if self._cache_valid:
return
index = self.index.get_data()
value = self.value.get_data()
if ((not self.index) or (not self.value)):
return
if ((len(index) == 0) or (len(value) == 0) or (len(index) != len(value))):
self._cached_data_pts = []
self._cache_valid = True
return
points = np.transpose(np.array((index, value)))
self._cached_data_pts = points
self._cache_valid = True
def _render(self, gc, points):
with gc:
gc.clip_to_rect(self.x, self.y, self.width, self.height)
gc.set_stroke_color(self.effective_edge_color)
gc.set_line_width(self.edge_width)
gc.set_line_dash(self.edge_style_)
gc.set_fill_color(self.effective_face_color)
gc.lines(points)
gc.close_path()
gc.draw_path()
def _render_icon(self, gc, x, y, width, height):
with gc:
gc.set_stroke_color(self.effective_edge_color)
gc.set_line_width(self.edge_width)
gc.set_fill_color(self.effective_face_color)
if hasattr(self, 'line_style_'):
gc.set_line_dash(self.line_style_)
gc.draw_rect((x, y, width, height))
def hittest(self, screen_pt, threshold=7.0, return_distance=False):
if (self.hittest_type in ('line', 'point')):
return BaseXYPlot.hittest(self, screen_pt, threshold, return_distance)
data_pt = self.map_data(screen_pt, all_values=True)
index = self.index.get_data()
value = self.value.get_data()
poly = np.column_stack((index, value))
if (points_in_polygon([data_pt], poly)[0] == 1):
return True
else:
return False
_property
def _get_effective_edge_color(self):
if (len(self.edge_color_) == 4):
edge_alpha = self.edge_color_[(- 1)]
else:
edge_alpha = 1.0
c = (self.edge_color_[:3] + ((edge_alpha * self.alpha),))
return c
_property
def _get_effective_face_color(self):
if (len(self.face_color_) == 4):
face_alpha = self.face_color_[(- 1)]
else:
face_alpha = 1.0
c = (self.face_color_[:3] + ((face_alpha * self.alpha),))
return c |
class TestMisc(unittest.TestCase):
def setUp(self):
datasets = [tvtk.ImageData(), tvtk.StructuredPoints(), tvtk.RectilinearGrid(), tvtk.StructuredGrid(), tvtk.PolyData(), tvtk.UnstructuredGrid()]
exts = ['.vti', '.vti', '.vtr', '.vts', '.vtp', '.vtu']
self.datasets = datasets
self.exts = exts
def test_write_data_xml_noext(self):
datasets = self.datasets
exts = self.exts
for (d, ext) in zip(datasets, exts):
(fh, fname) = tempfile.mkstemp(ext)
fbase = os.path.splitext(fname)[0]
os.close(fh)
os.remove(fname)
write_data(d, fbase)
self.assertEqual(os.path.exists(fname), True)
os.remove(fname)
def test_write_data_xml(self):
datasets = self.datasets
for d in datasets:
(fh, fname) = tempfile.mkstemp('.xml')
os.close(fh)
os.remove(fname)
self.assertEqual(os.path.exists(fname), False)
write_data(d, fname)
self.assertEqual(os.path.exists(fname), True)
os.remove(fname)
def test_write_data_xml_kwargs(self):
datasets = self.datasets
exts = self.exts
for (d, ext) in zip(datasets, exts):
(fh, fname) = tempfile.mkstemp(ext)
fbase = os.path.splitext(fname)[0]
os.close(fh)
os.remove(fname)
write_data(d, fbase, compressor=None, data_mode='ascii')
self.assertEqual(os.path.exists(fname), True)
os.remove(fname)
def test_write_data_vtk(self):
datasets = self.datasets
for d in datasets:
(fh, fname) = tempfile.mkstemp('.vtk')
os.close(fh)
os.remove(fname)
self.assertEqual(os.path.exists(fname), False)
write_data(d, fname)
self.assertEqual(os.path.exists(fname), True)
r = tvtk.DataSetReader(file_name=fname)
r.update()
self.assertEqual(isinstance(r.output, d.__class__), True)
os.remove(fname)
def test_suppress_vtk_warnings(self):
obj = tvtk.to_vtk(tvtk.Object())
self.assertEqual(obj.GetGlobalWarningDisplay(), 1)
with suppress_vtk_warnings():
self.assertEqual(obj.GetGlobalWarningDisplay(), 0)
self.assertEqual(obj.GetGlobalWarningDisplay(), 1) |
class OcrNumbersTest(unittest.TestCase):
def test_recognizes_0(self):
self.assertEqual(convert([' _ ', '| |', '|_|', ' ']), '0')
def test_recognizes_1(self):
self.assertEqual(convert([' ', ' |', ' |', ' ']), '1')
def test_unreadable_but_correctly_sized_inputs_return(self):
self.assertEqual(convert([' ', ' _', ' |', ' ']), '?')
def test_input_with_a_number_of_lines_that_is_not_a_multiple_of_four_raises_an_error(self):
with self.assertRaises(ValueError) as err:
convert([' _ ', '| |', ' '])
self.assertEqual(type(err.exception), ValueError)
self.assertEqual(err.exception.args[0], 'Number of input lines is not a multiple of four')
def test_input_with_a_number_of_columns_that_is_not_a_multiple_of_three_raises_an_error(self):
with self.assertRaises(ValueError) as err:
convert([' ', ' |', ' |', ' '])
self.assertEqual(type(err.exception), ValueError)
self.assertEqual(err.exception.args[0], 'Number of input columns is not a multiple of three')
def test_recognizes_(self):
self.assertEqual(convert([' _ _ _ _ ', ' | || | || | | || || |', ' | ||_| ||_| | ||_||_|', ' ']), '')
def test_garbled_numbers_in_a_string_are_replaced_with(self):
self.assertEqual(convert([' _ _ _ ', ' | || | || | || || |', ' | | _| ||_| | ||_||_|', ' ']), '11?10?1?0')
def test_recognizes_2(self):
self.assertEqual(convert([' _ ', ' _|', '|_ ', ' ']), '2')
def test_recognizes_3(self):
self.assertEqual(convert([' _ ', ' _|', ' _|', ' ']), '3')
def test_recognizes_4(self):
self.assertEqual(convert([' ', '|_|', ' |', ' ']), '4')
def test_recognizes_5(self):
self.assertEqual(convert([' _ ', '|_ ', ' _|', ' ']), '5')
def test_recognizes_6(self):
self.assertEqual(convert([' _ ', '|_ ', '|_|', ' ']), '6')
def test_recognizes_7(self):
self.assertEqual(convert([' _ ', ' |', ' |', ' ']), '7')
def test_recognizes_8(self):
self.assertEqual(convert([' _ ', '|_|', '|_|', ' ']), '8')
def test_recognizes_9(self):
self.assertEqual(convert([' _ ', '|_|', ' _|', ' ']), '9')
def test_recognizes_string_of_decimal_numbers(self):
self.assertEqual(convert([' _ _ _ _ _ _ _ _ ', ' | _| _||_||_ |_ ||_||_|| |', ' ||_ _| | _||_| ||_| _||_|', ' ']), '')
def test_numbers_separated_by_empty_lines_are_recognized_lines_are_joined_by_commas(self):
self.assertEqual(convert([' _ _ ', ' | _| _|', ' ||_ _|', ' ', ' _ _ ', '|_||_ |_ ', ' | _||_|', ' ', ' _ _ _ ', ' ||_||_|', ' ||_| _|', ' ']), '123,456,789') |
def LocationEditContent(request, location_slug):
location = get_object_or_404(Location, slug=location_slug)
if (request.method == 'POST'):
form = LocationContentForm(request.POST, request.FILES, instance=location)
if form.is_valid():
form.save()
messages.add_message(request, messages.INFO, 'Location Updated.')
else:
form = LocationContentForm(instance=location)
return render(request, 'location_edit_content.html', {'page': 'content', 'location': location, 'form': form}) |
class ModelLoader():
_instance: Optional[object] = None
def __new__(cls):
if (cls._instance is None):
cls._instance = super().__new__(cls)
return cls._instance
def __init__(self) -> None:
self.model_path: Text = 'models/model.joblib'
self.model: Optional[Callable] = None
def get_model(self) -> Callable:
if (not self.model):
self._load_model()
return self.model
def _load_model(self) -> None:
self.model = joblib.load(self.model_path) |
def _get_code_object(obj):
kind = _get_kind(obj)
if (kind == 'code'):
return obj
try:
return obj.__code__
except AttributeError:
pass
if (kind == 'function'):
raise NotImplementedError
elif (kind in ('class', 'module')):
(filename, text) = _get_source(obj, root=False)
co = compile(text, filename, 'exec')
obj.__code__ = co
return co
else:
raise NotImplementedError(obj) |
def test_validate_bloch_with_symmetry():
with pytest.raises(pydantic.ValidationError):
td.Simulation(size=(1, 1, 1), run_time=1e-12, boundary_spec=td.BoundarySpec(x=td.Boundary.bloch(bloch_vec=1.0), y=td.Boundary.bloch(bloch_vec=1.0), z=td.Boundary.bloch(bloch_vec=1.0)), symmetry=(1, 1, 1), grid_spec=td.GridSpec(wavelength=1.0)) |
def print_table(term: Terminal, players: Dict[(str, AsciiPlayer)], public_cards: AsciiCardCollection, n_table_rotations: int, n_spaces_between_cards: int=4, n_chips_in_pot: int=0):
left_player = players['left']
middle_player = players['middle']
right_player = players['right']
for line in public_cards.lines:
print(term.center(line))
print(term.center(f'chips in pot: {n_chips_in_pot}'))
print('\n\n')
spacing = (' ' * n_spaces_between_cards)
for (l, m, r) in zip(left_player.lines, middle_player.lines, right_player.lines):
print(term.center(f'{l}{spacing}{m}{spacing}{r}')) |
def get_item_names(is_jp: bool) -> list[str]:
item_names = game_data_getter.get_file_latest('resLocal', 'GatyaitemName.csv', is_jp)
if (item_names is None):
helper.error_text('Failed to get item names')
return []
item_names = csv_handler.parse_csv(item_names.decode('utf-8'), delimeter=helper.get_text_splitter(is_jp))
names: list[str] = []
for item in item_names:
names.append(item[0])
return names |
class EvalLearner(Learner):
__subtype__ = EvalSubLearner
def __init__(self, estimator, preprocess, name, attr, scorer, error_score=None, verbose=False, **kwargs):
super(EvalLearner, self).__init__(estimator=estimator, preprocess=preprocess, name=name, attr=attr, scorer=scorer, verbose=verbose, **kwargs)
self.__only_sub__ = True
self.__only_all__ = False
self.output_columns = {0: 0}
self.error_score = error_score
def gen_fit(self, X, y, P=None, refit=True):
self.cache_name = (('%s.%s' % (self.preprocess, self.name)) if self.preprocess else self.name)
if ((not refit) and self.__fitted__):
self.gen_transform(X, P)
if (self.indexer is None):
raise ValueError('Cannot run cross-validation without an indexer')
self.__collect__ = True
for (i, (train_index, test_index)) in enumerate(self.indexer.generate()):
if (self._partitions == 1):
index = (0, (i + 1))
else:
index = (0, ((i % self._partitions) + 1))
(yield EvalSubLearner(job='fit', parent=self, estimator=self.cloned_estimator, in_index=train_index, out_index=test_index, in_array=X, targets=y, index=index)) |
def log_deploy():
current_commit = run('git rev-parse --verify HEAD')
url = (' % (env.previous_commit, current_commit))
log_line = json.dumps({'started_at': str(env.started_at), 'ended_at': str(datetime.utcnow()), 'changes_url': url})
run(("echo '%s' >> deploy-log.json" % log_line))
with prefix('source .venv/bin/activate'):
run('python deploy/notify_deploy.py {revision} {url} {fab_env}'.format(revision=current_commit, url=url, fab_env=env.environment)) |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = None
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'log_eventfilter': {'required': False, 'type': 'dict', 'default': None, 'options': {}}}
for attribute_name in module_spec['options']:
fields['log_eventfilter']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['log_eventfilter']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=False)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'log_eventfilter')
(is_error, has_changed, result, diff) = fortios_log(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
class GribIndexingDirectoryParserIterator():
def __init__(self, directory, db_path, relative_paths, extensions=['.grib', '.grib1', '.grib2'], followlinks=True, verbose=False, with_statistics=True):
self.db_path = db_path
self.extensions = set(extensions)
self.directory = directory
self.relative_paths = relative_paths
self.followlinks = followlinks
self.verbose = verbose
self.with_statistics = with_statistics
self._tasks = None
def _new_db(self):
from climetlab.indexing.database.sql import SqlDatabase
return SqlDatabase(self.db_path)
def worker(self, i):
_i = i
time.sleep(i)
while True:
task = self.q_in.get()
if (task is None):
break
n = self.process_path(_i, task)
self.q_out.put(n)
def load_database(self):
start = datetime.datetime.now()
n_proc = 5
if (sys.platform == 'win32'):
n_proc = 1
if (n_proc == 1):
count = 0
for path in self.tasks:
count += self.process_path(0, path)
else:
assert (n_proc > 1), n_proc
self.q_in = Queue()
self.q_out = Queue()
workers = []
for i in range(n_proc):
proc = Process(target=self.worker, args=(i,))
proc.start()
workers.append(proc)
for path in self.tasks:
self.q_in.put(path)
for i in range(n_proc):
self.q_in.put(None)
count = 0
for _ in progress_bar(desc='Indexing...', iterable=self.tasks, total=len(self.tasks)):
count += self.q_out.get()
for p in workers:
p.join()
del self.q_in
del self.q_out
self._new_db().build_indexes()
end = datetime.datetime.now()
print(f"Indexed {plural(count, 'field')} in {seconds((end - start))}.")
def process_path(self, i, path):
db = self._new_db()
if db.already_loaded(self._format_path(path), self):
LOG.warning(f'Skipping {path}, already loaded')
return 0
lst = []
LOG.debug(f'Parsing file {path}')
try:
for field in _index_grib_file(path, with_statistics=self.with_statistics, position=(i + 1)):
field['_path'] = self._format_path(path)
lst.append(field)
except PermissionError as e:
LOG.error(f'Could not read {path}: {e}')
return 0
except Exception as e:
LOG.exception(f'(grib-parsing) Ignoring {path}, {e}')
return 0
if (not lst):
LOG.warn(f'No entry found in {path}.')
return 0
return db.load_iterator(lst)
def tasks(self):
if (self._tasks is not None):
return self._tasks
LOG.debug(f'Parsing files in {self.directory}')
assert os.path.exists(self.directory), f'{self.directory} does not exist'
assert os.path.isdir(self.directory), f'{self.directory} is not a directory'
tasks = []
for (root, _, files) in os.walk(self.directory, followlinks=self.followlinks):
for name in files:
path = os.path.join(root, name)
(_, ext) = os.path.splitext(path)
if (ext not in self.extensions):
continue
tasks.append(path)
tasks = sorted(tasks)
if tasks:
if self.verbose:
print(f'Found {len(tasks)} files to index.')
else:
LOG.error(f'Could not find any files to index in {self.directory}')
self._tasks = tasks
return self.tasks
def _format_path(self, path):
return {None: (lambda x: x), True: (lambda x: os.path.relpath(x, self.directory)), False: (lambda x: os.path.abspath(x))}[self.relative_paths](path) |
class CTrait(ctraits.cTrait):
def __call__(self, *args, **metadata):
from .trait_type import TraitType
from .traits import Trait
handler = self.handler
if isinstance(handler, TraitType):
dict = (self.__dict__ or {}).copy()
dict.update(metadata)
return handler(*args, **dict)
metadata.setdefault('parent', self)
return Trait(*(args + (self,)), **metadata)
def default(self):
(kind, value) = self.default_value()
if (kind in (DefaultValue.object, DefaultValue.callable_and_args, DefaultValue.callable, DefaultValue.disallow)):
return Undefined
elif (kind in (DefaultValue.dict_copy, DefaultValue.trait_dict_object, DefaultValue.trait_set_object, DefaultValue.list_copy, DefaultValue.trait_list_object)):
return value.copy()
elif (kind in {DefaultValue.constant, DefaultValue.missing}):
return value
else:
raise RuntimeError('Unexpected default value kind: {!r}'.format(kind))
def default_kind(self):
return default_value_map[self.default_value()[0]]
def trait_type(self):
handler = self.handler
if (handler is not None):
return handler
else:
from .trait_types import Any
return Any
def inner_traits(self):
handler = self.handler
if (handler is not None):
return handler.inner_traits()
return ()
def comparison_mode(self):
i_comparison_mode = super().comparison_mode
return ComparisonMode(i_comparison_mode)
_mode.setter
def comparison_mode(self, value):
ctraits.cTrait.comparison_mode.__set__(self, value)
def property_fields(self):
return self._get_property()
_fields.setter
def property_fields(self, value):
func_arg_counts = []
for arg in value:
if (arg is None):
nargs = 0
else:
sig = inspect.signature(arg)
nargs = len(sig.parameters)
func_arg_counts.extend([arg, nargs])
self._set_property(*func_arg_counts)
def is_trait_type(self, trait_type):
return isinstance(self.trait_type, trait_type)
def get_editor(self):
from traitsui.api import EditorFactory
editor = self.editor
if (editor is None):
handler = self.handler
if (handler is not None):
editor = handler.get_editor(self)
if (editor is None):
from traitsui.api import TextEditor
editor = TextEditor
if (not isinstance(editor, EditorFactory)):
args = ()
traits = {}
if (type(editor) in SequenceTypes):
for item in editor[:]:
if (type(item) in SequenceTypes):
args = tuple(item)
elif isinstance(item, dict):
traits = item
if (traits.get('trait', 0) is None):
traits = traits.copy()
traits['trait'] = self
else:
editor = item
editor = editor(*args, **traits)
self.editor = editor
return editor
def get_help(self, full=True):
if full:
help = self.help
if (help is not None):
return help
handler = self.handler
if (handler is not None):
info = ('must be %s.' % handler.info())
else:
info = 'may be any value.'
desc = self.desc
if (self.desc is None):
return info.capitalize()
return ('Specifies %s and %s' % (desc, info))
def full_info(self, object, name, value):
handler = self.handler
if (handler is not None):
return handler.full_info(object, name, value)
return 'any value'
def info(self):
handler = self.handler
if (handler is not None):
return handler.info()
return 'any value'
def as_ctrait(self):
return self
def __reduce_ex__(self, protocol):
return (__newobj__, (self.__class__, 0), self.__getstate__()) |
_type(ofproto.OFPTFPT_NEXT_TABLES)
_type(ofproto.OFPTFPT_NEXT_TABLES_MISS)
class OFPTableFeaturePropNextTables(OFPTableFeatureProp):
_TABLE_ID_PACK_STR = '!B'
def __init__(self, type_=None, length=None, table_ids=None):
table_ids = (table_ids if table_ids else [])
super(OFPTableFeaturePropNextTables, self).__init__(type_, length)
self.table_ids = table_ids
def parser(cls, buf):
rest = cls.get_rest(buf)
ids = []
while rest:
(i,) = struct.unpack_from(cls._TABLE_ID_PACK_STR, six.binary_type(rest), 0)
rest = rest[struct.calcsize(cls._TABLE_ID_PACK_STR):]
ids.append(i)
return cls(table_ids=ids)
def serialize_body(self):
bin_ids = bytearray()
for i in self.table_ids:
bin_id = bytearray()
msg_pack_into(self._TABLE_ID_PACK_STR, bin_id, 0, i)
bin_ids += bin_id
return bin_ids |
class TripLinks(object):
swagger_types = {'alerts': 'Link', '_self': 'Link', 'vehicle': 'Link', 'waypoints': 'Link'}
attribute_map = {'alerts': 'alerts', '_self': 'self', 'vehicle': 'vehicle', 'waypoints': 'waypoints'}
def __init__(self, alerts=None, _self=None, vehicle=None, waypoints=None):
self._alerts = None
self.__self = None
self._vehicle = None
self._waypoints = None
self.discriminator = None
if (alerts is not None):
self.alerts = alerts
if (_self is not None):
self._self = _self
if (vehicle is not None):
self.vehicle = vehicle
if (waypoints is not None):
self.waypoints = waypoints
def alerts(self):
return self._alerts
def alerts(self, alerts):
self._alerts = alerts
def _self(self):
return self.__self
_self.setter
def _self(self, _self):
self.__self = _self
def vehicle(self):
return self._vehicle
def vehicle(self, vehicle):
self._vehicle = vehicle
def waypoints(self):
return self._waypoints
def waypoints(self, waypoints):
self._waypoints = waypoints
def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
if issubclass(TripLinks, dict):
for (key, value) in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if (not isinstance(other, TripLinks)):
return False
return (self.__dict__ == other.__dict__)
def __ne__(self, other):
return (not (self == other)) |
def validate_attributed_ast(test: unittest.TestCase, source_unit: ast.SourceUnit):
grammar = get_solidity_grammar_instance()
stmts = []
ids_to_vars = {}
def regsiter_variables(a, _, __):
if isinstance(a, Statement):
stmts.append(a)
if isinstance(a, VariableDeclaration):
test.assertNotIn(a.id, ids_to_vars)
ids_to_vars[a.id] = a.name
grammar.traverse(source_unit, regsiter_variables)
test.assertEqual(len(stmts), len({s.src_line for s in stmts}))
stmts_by_line = {s.src_line: s for s in stmts}
for (i, line) in enumerate(source_unit.source.split('\n')):
if ('//' not in line):
continue
(_, line, *_) = line.split('//')
expected_scope_vars = line.split(',')
expected_scope_vars = {v.strip() for v in expected_scope_vars}
stmt = stmts_by_line[(i + 1)]
actual_scope_vars = stmt
actual_scope_vars = {ids_to_vars[q] for q in actual_scope_vars.scope_post}
test.assertSetEqual(expected_scope_vars, actual_scope_vars, f'Line: {i}') |
class Commands():
def __init__(self) -> None:
self.by_version: typing.Dict[(typing.Sequence[int], CommandProtocol)] = {}
def register(self, version: typing.Sequence[int]) -> typing.Callable[([CommandProtocol], CommandProtocol)]:
if (version in self.by_version):
raise Exception(f'Version already registered: {version!r}')
if (not isinstance(version, tuple)):
raise Exception(f'Version must be a tuple: {version!r}')
return functools.partial(self._decorator, version=version)
def _decorator(self, command: CommandProtocol, *, version: typing.Sequence[int]) -> CommandProtocol:
self.by_version[version] = command
return command
def __getitem__(self, item: typing.Sequence[int]) -> typing.Callable[([], None)]:
return self.by_version[item]
def latest_command(self) -> CommandProtocol:
return max(self.by_version.items())[1] |
def test_branch(converter):
assert (str(converter.convert(Branch(Condition(OperationType.equal, [var_x.copy(), const_1.copy()])))) == str((BitVec('x', 32) == BitVecVal(1, 32))))
assert (str(converter.convert(Branch(Condition(OperationType.not_equal, [var_x.copy(), const_1.copy()])))) == str((BitVec('x', 32) != BitVecVal(1, 32))))
assert (str(converter.convert(Branch(Condition(OperationType.less, [var_x.copy(), const_1.copy()])))) == str((BitVec('x', 32) < BitVecVal(1, 32))))
assert (str(converter.convert(Branch(Condition(OperationType.less_or_equal, [var_x.copy(), const_1.copy()])))) == str((BitVec('x', 32) <= BitVecVal(1, 32))))
assert (str(converter.convert(Branch(Condition(OperationType.greater, [var_x.copy(), const_1.copy()])))) == str((BitVec('x', 32) > BitVecVal(1, 32))))
assert (str(converter.convert(Branch(Condition(OperationType.greater_or_equal, [var_x.copy(), const_1.copy()])))) == str((BitVec('x', 32) >= BitVecVal(1, 32)))) |
class RemotableModelPool(remote.Remotable, Launchable):
def __init__(self, model: RemotableModel, capacity: int=0, seed: Optional[int]=None, identifier: Optional[str]=None) -> None:
super().__init__(identifier)
self._model = model
self._capacity = capacity
self._seed = seed
if (self._capacity > 0):
self._history = CircularBuffer(self._capacity)
def capacity(self) -> int:
return self._capacity
def seed(self) -> Optional[int]:
return self._seed
def init_launching(self) -> None:
self._model.share_memory()
def init_execution(self) -> None:
self._bind()
if (self._seed is not None):
random_utils.manual_seed(self._seed)
self._model.init_model()
console.log(self._model)
def model(self, version: int=ModelVersion.LATEST) -> nn.Module:
return (self._model if (version == ModelVersion.LATEST) else self._history[version][1])
_method(batch_size=None)
def pull(self, version: int=ModelVersion.LATEST) -> Dict[(str, torch.Tensor)]:
state_dict = self.model(version).state_dict()
state_dict = nested_utils.map_nested((lambda x: x.cpu()), state_dict)
return state_dict
_method(batch_size=None)
def push(self, state_dict: Dict[(str, torch.Tensor)]) -> None:
device = self._model.device
state_dict = nested_utils.map_nested((lambda x: x.to(device)), state_dict)
self._model.load_state_dict(state_dict)
_method(batch_size=None)
def release(self) -> None:
if (self._capacity > 0):
self._history.append(copy.deepcopy(self._model))
_method(batch_size=None)
def sample_model(self) -> int:
if (self._capacity == 0):
return ModelVersion.LATEST
else:
return np.random.randint(len(self._history))
def _bind(self) -> None:
for method in self._model.remote_methods:
batch_size = getattr(getattr(self._model, method), '__batch_size__', None)
(method_name, method_impl) = self._wrap_remote_method(method, batch_size)
self.__remote_methods__.append(method_name)
setattr(self, method_name, method_impl)
for i in range(self._capacity):
(method_name, method_impl) = self._wrap_remote_method(method, batch_size, i)
self.__remote_methods__.append(method_name)
setattr(self, method_name, method_impl)
(method_name, method_impl) = self._wrap_remote_method(method, batch_size, ((- i) - 1))
setattr(self, method_name, method_impl)
self.__remote_methods__.append(method_name)
def _wrap_remote_method(self, method: str, batch_size: Optional[int]=None, version: int=ModelVersion.LATEST) -> Callable[(..., Any)]:
method_name = method
if (version != ModelVersion.LATEST):
method_name += f'[{version}]'
method_impl = functools.partial(self._dispatch_model_call, version, method)
setattr(method_impl, '__remote__', True)
if (batch_size is not None):
setattr(method_impl, '__batch_size__', batch_size)
return (method_name, method_impl)
def _dispatch_model_call(self, version: int, method: str, *args, **kwargs) -> Any:
model = self.model(version)
device = model.device
args = nested_utils.map_nested((lambda x: x.to(device)), args)
kwargs = nested_utils.map_nested((lambda x: x.to(device)), kwargs)
ret = getattr(model, method)(*args, **kwargs)
ret = nested_utils.map_nested((lambda x: x.cpu()), ret)
return ret |
def extractAnathema(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
return buildReleaseMessageWithType(item, 'Anathema', vol, chp, frag=frag, postfix=postfix, tl_type='oel') |
def test_appending_records_null_schema_works(tmpdir):
'
schema = {'type': 'record', 'name': 'test_appending_records_different_schema_fails', 'fields': [{'name': 'field', 'type': 'string'}]}
test_file = str(tmpdir.join('test.avro'))
with open(test_file, 'wb') as new_file:
fastavro.writer(new_file, schema, [{'field': 'foo'}])
with open(test_file, 'a+b') as new_file:
fastavro.writer(new_file, None, [{'field': 'bar'}]) |
class OptionPlotoptionsPackedbubbleSonificationDefaultspeechoptionsActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class FSDPShampoo(torch.optim.Optimizer):
def __init__(self, params, param_metadata: Dict[(torch.nn.Parameter, Tuple)], lr: float=0.01, betas: Tuple[(float, float)]=(0.9, 1.0), epsilon: float=1e-12, momentum: float=0.0, weight_decay: float=0.0, max_preconditioner_dim: int=1024, precondition_frequency: int=1, start_preconditioning_step: int=(- 1), exponent_override: int=0, exponent_multiplier: float=1.0, use_nesterov: bool=False, use_bias_correction: bool=True, use_decoupled_weight_decay: bool=True, preconditioner_dtype: torch.dtype=torch.float, large_dim_method: LargeDimMethod=LargeDimMethod.BLOCKING, num_trainers_per_group: int=(- 1), use_merge_dims: bool=True, grafting_type: GraftingType=GraftingType.ADAGRAD, grafting_epsilon: float=0.001, grafting_beta2: float=1.0, use_protected_eigh: bool=True, use_dtensor: bool=False, debug_mode: bool=False, tensor_block_recovery: TensorBlockRecoveryMethod=TensorBlockRecoveryMethod.COMM, dist_group: Optional[dist.ProcessGroup]=None):
logger.info('FSDP Shampoo is experimental and still under development! Checkpointing is not currently supported.')
if (not (lr >= 0.0)):
raise ValueError(f'Invalid learning rate: {lr}. Must be >= 0.0.')
if (not (0.0 <= betas[0] < 1.0)):
raise ValueError(f'Invalid beta parameter at index 0: {betas[0]}. Must be in [0.0, 1.0).')
if (not (0.0 < betas[1] <= 1.0)):
raise ValueError(f'Invalid beta parameter at index 1: {betas[1]}. Must be in (0.0, 1.0].')
if (not (epsilon > 0.0)):
raise ValueError(f'Invalid epsilon value: {epsilon}. Must be > 0.0.')
if (not (0.0 <= momentum < 1.0)):
raise ValueError(f'Invalid momentum parameter: {momentum}. Must be [0.0, 1.0).')
if (not (weight_decay >= 0.0)):
raise ValueError(f'Invalid weight_decay value: {weight_decay}. Must be > 0.0.')
if (not (max_preconditioner_dim >= 1)):
raise ValueError(f'Invalid max preconditioner dimension: {max_preconditioner_dim}. Must be >= 1.')
if (not (precondition_frequency >= 1)):
raise ValueError(f'Invalid precondition frequency: {precondition_frequency}. Must be >= 1.')
if (not (start_preconditioning_step >= (- 1))):
raise ValueError(f'Invalid start preconditioning step: {start_preconditioning_step}')
if (not (num_trainers_per_group >= (- 1))):
raise ValueError(f'Invalid number of GPUs per group: {num_trainers_per_group}. Must be >= -1.')
if (not (exponent_override >= 0)):
raise ValueError(f'Invalid exponent override: {exponent_override}. Must be >= 0.')
if (not (0.0 < grafting_beta2 <= 1.0)):
raise ValueError(f'Invalid grafting beta parameter: {grafting_beta2}. Must be in (0.0, 1.0].')
if (not (grafting_epsilon > 0.0)):
raise ValueError(f'Invalid epsilon value: {grafting_epsilon}. Must be > 0.0.')
if ((num_trainers_per_group > 1) or (num_trainers_per_group == (- 1))):
if (not torch.cuda.is_available()):
raise ValueError('Using distributed version of Shampoo without GPUs!')
if (not dist.is_initialized()):
raise ValueError('Using distributed version of Shampoo without initializing distributed process group!')
if (num_trainers_per_group == (- 1)):
num_trainers_per_group = int(os.environ.get('LOCAL_WORLD_SIZE', dist.get_world_size()))
if (not (dist.get_world_size() >= num_trainers_per_group)):
num_trainers_per_group = dist.get_world_size()
logger.warning(f'Number of GPUs per group {num_trainers_per_group} is specified larger than global world size {dist.get_world_size()}. Setting to default world size.')
if (not ((dist.get_world_size() % num_trainers_per_group) == 0)):
raise ValueError(f'Invalid number of GPUs per group: {num_trainers_per_group}. Must divide global world size {dist.get_world_size()}.')
else:
num_trainers_per_group = 1
super(FSDPShampoo, self).__init__(params, {LR: lr, BETAS: betas, MOMENTUM: momentum, WEIGHT_DECAY: weight_decay, EPSILON: epsilon, GRAFTING_EPSILON: grafting_epsilon, GRAFTING_BETA2: grafting_beta2})
self._param_metadata = param_metadata
self._max_preconditioner_dim = max_preconditioner_dim
self._precondition_frequency = precondition_frequency
self._exponent_override = exponent_override
self._exponent_multiplier = exponent_multiplier
self._num_trainers_per_group = num_trainers_per_group
self._use_merge_dims = use_merge_dims
self._large_dim_method = large_dim_method
self._use_decoupled_weight_decay = use_decoupled_weight_decay
self._preconditioner_dtype = preconditioner_dtype
self._use_bias_correction = use_bias_correction
self._grafting_type = grafting_type
self._grafting_epsilon = grafting_epsilon
self._grafting_beta2 = grafting_beta2
self._parameter_count = 0
self._use_nesterov = use_nesterov
self._use_protected_eigh = use_protected_eigh
self._use_dtensor = use_dtensor
self._debug_mode = debug_mode
self._tensor_block_recovery = tensor_block_recovery
self._dist_group = dist_group
if (self._use_nesterov and (momentum == 0.0)):
logger.warning('Nesterov flag is enabled but momentum parameter is zero! Continuing without using momentum or Nesterov acceleration...')
if (start_preconditioning_step == (- 1)):
self._start_preconditioning_step = precondition_frequency
logger.warning(f'start_preconditioning_step set to -1. Setting start_preconditioning_step equal to precondition frequency {precondition_frequency} by default.')
elif (start_preconditioning_step < precondition_frequency):
raise ValueError(f'Invalid start_preconditioning_step value: {start_preconditioning_step}. Must be >= precondition_frequency = {precondition_frequency!r}.')
else:
self._start_preconditioning_step = start_preconditioning_step
self._initialize_preconditioners_and_steps()
_grad()
def _initialize_preconditioners_and_steps(self):
group_rank = dist.get_rank(group=self._dist_group)
group_size = dist.get_world_size(group=self._dist_group)
for group in self.param_groups:
for (idx, p) in enumerate(group[PARAMS]):
if (p.numel() == 0):
continue
state = self.state[p]
state[STEP] = torch.tensor(0)
if (p not in self._param_metadata):
raise RuntimeError(f'Parameter {p} not found in metadata. Please make sure that the module containing this parameter has been wrapped with FSDP.')
if (self._tensor_block_recovery == TensorBlockRecoveryMethod.SPLIT):
state[PRECONDITIONERS] = SplitShampooPreconditioner(p, self._param_metadata[p], large_dim_method=self._large_dim_method, beta1=group[BETAS][0], beta2=group[BETAS][1], epsilon=group[EPSILON], exponent_override=self._exponent_override, exponent_multiplier=self._exponent_multiplier, use_bias_correction=self._use_bias_correction, max_preconditioner_dim=self._max_preconditioner_dim, dtype=self._preconditioner_dtype, idx=idx, use_merge_dims=self._use_merge_dims, start_preconditioning_step=self._start_preconditioning_step, grafting_type=self._grafting_type, grafting_beta2=self._grafting_beta2, grafting_epsilon=self._grafting_epsilon, use_protected_eigh=self._use_protected_eigh, use_dtensor=self._use_dtensor)
elif (self._tensor_block_recovery == TensorBlockRecoveryMethod.COMM):
left_comm = (CommunicationType.NONE if (group_rank == 0) else CommunicationType.RECV)
right_comm = (CommunicationType.NONE if (group_rank == (group_size - 1)) else CommunicationType.SEND)
state[PRECONDITIONERS] = CommunicatedSplitShampooPreconditioner(p, self._param_metadata[p], large_dim_method=self._large_dim_method, beta1=group[BETAS][0], beta2=group[BETAS][1], epsilon=group[EPSILON], exponent_override=self._exponent_override, exponent_multiplier=self._exponent_multiplier, use_bias_correction=self._use_bias_correction, max_preconditioner_dim=self._max_preconditioner_dim, dtype=self._preconditioner_dtype, idx=idx, use_merge_dims=self._use_merge_dims, start_preconditioning_step=self._start_preconditioning_step, grafting_type=self._grafting_type, grafting_beta2=self._grafting_beta2, grafting_epsilon=self._grafting_epsilon, use_protected_eigh=self._use_protected_eigh, use_dtensor=self._use_dtensor, left_comm=left_comm, right_comm=right_comm)
else:
raise NotImplementedError(f'Invalid tensor block recovery method {self._tensor_block_recovery}!')
self._parameter_count += state[PRECONDITIONERS].parameter_count
logger.info(f'Total Parameter Count: {self._parameter_count}')
_grad()
def _send_grad(self, forward_direction: bool):
ops = []
for group in self.param_groups:
for p in group[PARAMS]:
if ((p.numel() == 0) or (p.grad is None)):
continue
state = self.state[p]
if forward_direction:
ops.extend(state[PRECONDITIONERS].get_forward_ops(p.grad))
else:
ops.extend(state[PRECONDITIONERS].get_backward_ops())
if (len(ops) > 0):
reqs = dist.batch_isend_irecv(ops)
for req in reqs:
req.wait()
_grad()
def _compute_root_inverse(self):
for group in self.param_groups:
for p in group[PARAMS]:
if ((p.numel() == 0) or (p.grad is None)):
continue
state = self.state[p]
if isinstance(state[PRECONDITIONERS], (SplitShampooPreconditioner, CommunicatedSplitShampooPreconditioner)):
state[PRECONDITIONERS].compute_root_inverse()
_grad()
def _compute_and_log_root_inverse_residuals(self):
if (self._preconditioner_dtype == torch.float64):
expected_relative_error = 1e-07
elif (self._preconditioner_dtype == torch.float):
expected_relative_error = 0.001
else:
logger.warning('Expected relative error/residual not supported for precision lower than float32.')
relative_errors = []
relative_residuals = []
for group in self.param_groups:
for p in group[PARAMS]:
if (p.numel() == 0):
continue
state = self.state[p]
if isinstance(state[PRECONDITIONERS], (SplitShampooPreconditioner, CommunicatedSplitShampooPreconditioner)):
(relative_error, relative_residual) = state[PRECONDITIONERS].compute_root_inverse_residuals()
relative_errors += relative_error
relative_residuals += relative_residual
relative_errors = torch.stack(relative_errors)
relative_residuals = torch.stack(relative_residuals)
quantiles = torch.as_tensor([0, 0.25, 0.5, 0.75, 1], device=relative_errors.device, dtype=relative_errors.dtype)
logger.debug(f'Expect Relative Error <= {expected_relative_error}')
logger.debug(f"Relative Error (||X - X_hat||_inf / ||X||_inf) Average: {torch.mean(relative_errors)}, Quantiles [0, 25, 50, 75, 100]: {torch.quantile(relative_errors, quantiles, interpolation='nearest')}")
logger.debug(f"Relative Residual (||X_hat^-r - A||_inf / ||A||_inf) Average: {torch.mean(relative_residuals)}, Quantiles [0, 25, 50, 75, 100]: {torch.quantile(relative_residuals, quantiles, interpolation='nearest')}")
_grad()
def _apply_weight_decay(self):
for group in self.param_groups:
weight_decay = group[WEIGHT_DECAY]
for p in group[PARAMS]:
if ((p.numel() == 0) or (p.grad is None)):
continue
grad = p.grad
if grad.is_sparse:
raise Exception('Sparse parameters are not currently supported by Shampoo.')
if (weight_decay != 0):
grad.add_(p, alpha=weight_decay)
_grad()
def _update_preconditioners(self):
for group in self.param_groups:
for p in group[PARAMS]:
if (p.numel() == 0):
continue
grad = p.grad
state = self.state[p]
if (grad is None):
continue
if p.grad.is_sparse:
raise Exception('Sparse parameters are not currently supported by Shampoo.')
else:
state[PRECONDITIONERS].update_preconditioners(grad, state[STEP])
_grad()
def _init_group(self, group: Dict[(str, Any)], iteration: torch.Tensor) -> Tuple[(List[torch.Tensor], List[torch.Tensor], List[torch.Tensor])]:
momentum_param = group[MOMENTUM]
split_params = []
split_preconditioned_grads = []
split_momentum_directions = []
for p in group[PARAMS]:
if ((p.numel() == 0) or (p.grad is None)):
continue
if p.grad.is_sparse:
raise Exception('Sparse parameters are not currently supported by Shampoo.')
state = self.state[p]
if ((momentum_param != 0.0) and (MOMENTUM not in state)):
state[MOMENTUM] = torch.zeros_like(p.grad, memory_format=torch.preserve_format)
split_params.extend(state[PRECONDITIONERS].apply_split(p))
if (self._tensor_block_recovery == TensorBlockRecoveryMethod.SPLIT):
split_preconditioned_grads.extend(state[PRECONDITIONERS].precondition(p.grad, iteration))
elif (self._tensor_block_recovery == TensorBlockRecoveryMethod.COMM):
split_preconditioned_grads.extend(state[PRECONDITIONERS].retrieve_preconditioned_grad())
else:
raise NotImplementedError('invalid tensor block recovery method {self._tensor_block_recovery}')
split_momentum_directions.extend((state[PRECONDITIONERS].apply_split(state[MOMENTUM]) if (momentum_param != 0.0) else []))
return (split_params, split_preconditioned_grads, split_momentum_directions)
_grad()
def _iterate_step(self) -> torch.Tensor:
iteration = None
for group in self.param_groups:
for p in group[PARAMS]:
if (p.numel() == 0):
continue
self.state[p][STEP] += 1
iteration = self.state[p][STEP]
return iteration
_grad()
def reset_preconditioners(self):
for group in self.param_groups:
for p in group[PARAMS]:
if (p.numel() == 0):
continue
self.state[p][PRECONDITIONERS].reset_preconditioners()
_grad()
def step(self, closure=None):
loss = None
if (closure is not None):
with torch.enable_grad():
loss = closure()
iteration = self._iterate_step()
if (not self._use_decoupled_weight_decay):
self._apply_weight_decay()
if (self._tensor_block_recovery == TensorBlockRecoveryMethod.COMM):
self._send_grad(forward_direction=True)
self._update_preconditioners()
if (((iteration % self._precondition_frequency) == 0) and (iteration >= self._start_preconditioning_step)):
self._compute_root_inverse()
if self._debug_mode:
self._compute_and_log_root_inverse_residuals()
for group in self.param_groups:
for p in group[PARAMS]:
state = self.state[p]
if ((p.numel() == 0) or (p.grad is None)):
continue
state[PRECONDITIONERS].update_exp_avg(p.grad, iteration)
if (self._tensor_block_recovery == TensorBlockRecoveryMethod.COMM):
state[PRECONDITIONERS].precondition_and_store(p.grad, iteration)
if (self._tensor_block_recovery == TensorBlockRecoveryMethod.COMM):
self._send_grad(forward_direction=False)
for group in self.param_groups:
momentum_param = group[MOMENTUM]
weight_decay = group[WEIGHT_DECAY]
lr = group[LR]
(split_params, split_preconditioned_grads, split_momentum_directions) = self._init_group(group, iteration)
split_search_directions = split_preconditioned_grads
if (self._use_decoupled_weight_decay and (weight_decay != 0.0)):
if (momentum_param == 0.0):
torch._foreach_mul_(split_params, (1.0 - (lr * weight_decay)))
else:
torch._foreach_add_(split_search_directions, split_params, alpha=weight_decay)
if (momentum_param != 0.0):
torch._foreach_mul_(split_momentum_directions, momentum_param)
torch._foreach_add_(split_momentum_directions, split_search_directions)
if self._use_nesterov:
torch._foreach_add_(split_search_directions, split_momentum_directions, alpha=momentum_param)
else:
split_search_directions = split_momentum_directions
torch._foreach_add_(split_params, split_search_directions, alpha=(- lr))
return loss |
def test_make_config_positional_args_complex():
_registry.cats('catsie.v890')
def catsie_890(*args: Optional[Union[(StrictBool, PositiveInt)]]):
assert (args[0] == 123)
return args[0]
cfg = {'config': {'': 'catsie.v890', '*': [123, True, 1, False]}}
assert (my_registry.resolve(cfg)['config'] == 123)
cfg = {'config': {'': 'catsie.v890', '*': [123, 'True']}}
with pytest.raises(ConfigValidationError):
my_registry.resolve(cfg) |
def fetch_exchange(zone_key1: str, zone_key2: str, session: Session=Session(), target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)):
if (target_datetime is not None):
raise ParserException('HN.py', 'This parser is not yet able to parse past dates')
(CSV_data, EXCHANGE_MAP) = get_data(session, 'exchange')
(exchange_per_hour, date) = get_values(CSV_data, EXCHANGE_MAP, 'exchange')
sorted_zone_keys = '->'.join(sorted([zone_key1, zone_key2]))
exchange_list = []
if (date is not None):
for index in range(0, 24):
if (exchange_per_hour[index] != {}):
exchange_list.append({'sortedZoneKeys': sorted_zone_keys, 'datetime': get_datetime(date, index), 'netFlow': exchange_per_hour[index][sorted_zone_keys], 'source': 'ods.org.hn'})
return exchange_list |
def convert_not_equal(name, operator, version_id):
if version_id.endswith('.*'):
version_id = version_id[:(- 2)]
version = RpmVersion(version_id)
if version.is_legacy():
return 'Invalid version'
version_gt = RpmVersion(version_id).increment()
version_gt_operator = '>='
version = '{}~~'.format(version)
else:
version = RpmVersion(version_id)
version_gt = version
version_gt_operator = '>'
return '({{name}} < {} or {{name}} {} {})'.format(version, version_gt_operator, version_gt) |
class Generator(lg.Node):
OUTPUT = lg.Topic(RandomMessage)
config: GeneratorConfig
(OUTPUT)
async def generate_noise(self) -> lg.AsyncPublisher:
while True:
(yield (self.OUTPUT, RandomMessage(sources=np.random.random(1), detectors=np.random.random(8))))
(await asyncio.sleep((1 / self.config.sample_rate))) |
class OptionSeriesPyramid3dSonificationTracksMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestDumpManifest(unittest.TestCase):
tmpdir: TemporaryDirectory
def setUp(self):
self.tmpdir = TemporaryDirectory()
def tearDown(self):
self.tmpdir.cleanup()
def test_editorconfig(self):
for (i, _style) in enumerate(EDITORCONFIG_STYLES):
(econfig, expected_data) = _style
path = ((Path(self.tmpdir.name) / str(i)) / f'{i}.json')
path.parent.mkdir(parents=True)
if econfig:
(path.parent / '.editorconfig').write_text(econfig)
path.write_text('{}')
dump_manifest(EDITORCONFIG_SAMPLE_DATA, path)
self.assertEqual(path.read_text(), expected_data) |
class VirtualKeyboard(QWidget):
def __init__(self, pw_edit):
super().__init__(pw_edit)
self.pw_edit = pw_edit
self.page = pages[0]
self.pages = pages.copy()
self.refresh_button = vkb_button(self._refresh)
self.refresh_button.setIcon(read_QIcon('refresh_win10_16.png'))
self.refresh_button.setToolTip(_('Regenerate page'))
self.page_buttons = [vkb_button(self._on_page_button) for page in pages]
self.char_buttons = [vkb_button(self._char_pressed) for n in range(max_chars)]
self.setLayout(self._create_grid_layout())
self._refresh()
def _refresh(self, _button=None):
random.shuffle(self.pages)
for (button, page) in zip(self.page_buttons, self.pages):
button.setIcon(read_QIcon(page.icon))
button.setToolTip(page.tooltip)
button.setDisabled((page is self.page))
self._shuffle_page()
def _shuffle_page(self):
chars = list(self.page.chars)
random.shuffle(chars)
for (n, char_button) in enumerate(self.char_buttons):
if (n < len(chars)):
char_button.setText((chars[n] if (chars[n] != '&') else '&&'))
char_button.setVisible(True)
else:
char_button.setVisible(False)
def _create_grid_layout(self):
grid = QGridLayout()
grid.setVerticalSpacing(2)
grid.setHorizontalSpacing(1)
grid.setContentsMargins(0, 4, 0, 0)
rows = 6
cols = (((max_chars + rows) - 1) // rows)
grid.addWidget(self.refresh_button, 0, (cols + 1))
for (n, button) in enumerate(self.page_buttons):
grid.addWidget(button, (n + 1), (cols + 1))
for (n, button) in enumerate(self.char_buttons):
grid.addWidget(button, (n // cols), (n % cols))
grid.setColumnMinimumWidth(cols, (app_state.app.dpi / 12))
return grid
def _on_page_button(self, button):
self.page = self.pages[self.page_buttons.index(button)]
self._refresh()
def _char_pressed(self, button):
self.pw_edit.setText((self.pw_edit.text() + button.text()[0])) |
def read_font(font: str) -> dict:
fonts_dir = (__file__.replace('__init__.py', '') + 'fonts')
if ('/' in font):
font_filename = os.path.realpath(str(font))
else:
font_filename = (((fonts_dir.rstrip('/') + '/') + font.replace('.aff', '')) + '.aff')
file_line = 0
try:
fontfile = open(font_filename)
aff_headers = fontfile.readline().split(' ')
file_line += 1
except:
raise FileNotFoundError
if (aff_headers[0] != 'aff3'):
print('this is not an aff3 font. or there is an Error in header.', file=sys.stderr)
sys.exit(1)
block_height = int(aff_headers[1])
korsi = int(aff_headers[2])
comment_lines = int(aff_headers[3])
num_chars = int(aff_headers[5])
default_direction = int(aff_headers[6])
for _ in range(comment_lines):
fontfile.readline()
file_line += 1
font_glyphs = dict()
for i in range(num_chars):
persianchars = fontfile.readline().rstrip('\n')
file_line += 1
if (len(persianchars) == 0):
print(f'there is an Error in Line {file_line}', file=sys.stderr)
print(f'character for this glyph is empty.', file=sys.stderr)
try:
line = fontfile.readline()
if (len(line) < 1):
print(f'file seems to be broken. trying to read a glyph that does not exist!', file=sys.stderr)
print(f'ignoring from line {file_line}', file=sys.stderr)
break
(char_variation, char_direction) = list(map(int, line.rstrip('\n').split(' ')))
file_line += 1
except ValueError:
print(f'there is an Error in font file. somewhere near Line {file_line}', file=sys.stderr)
sys.exit(1)
persianasciichars = '\n'.join([fontfile.readline()[:(- 1)] for _ in range(block_height)])
file_line += block_height
glyph_key = (persianchars, char_variation)
glyph_data = (char_direction, persianasciichars)
font_glyphs[glyph_key] = glyph_data
font_data = {'height': block_height, 'korsi': korsi, 'glyphs': font_glyphs}
return font_data |
class BaseTest(object):
def assign(self, value):
self.obj.value = value
def coerce(self, value):
return value
def test_assignment(self):
obj = self.obj
value = self._default_value
self.assertEqual(obj.value, value)
for (i, value) in enumerate(self._good_values):
obj.value = value
self.assertEqual(obj.value, self.coerce(value))
if (i < len(self._mapped_values)):
self.assertEqual(obj.value_, self._mapped_values[i])
for value in self._bad_values:
self.assertRaises(TraitError, self.assign, value) |
def test():
assert Token.has_extension('is_country'), "As-tu declare l'extension du token ?"
ext = Token.get_extension('is_country')
assert (ext[0] == False), 'As-tu defini correctement la valeur par defaut ?'
country_values = [False, False, False, True, False]
assert ([t._.is_country for t in doc] == country_values), 'As-tu change la valeur pour le bon token ?'
assert ('print([(token.text, token._.is_country)' in __solution__), 'Affiches-tu les bons attributs de token ?'
__msg__.good('Bien joue !') |
.django_db
def test_show_collaborators_tab_when_all_checks_are_false_should_return_false(user1, event1, mocker):
mock_can_register_as_collaborator = mocker.patch('manager.templatetags.filters.can_register_as_collaborator')
mock_can_register_as_collaborator.return_value = False
mock_can_register_as_installer = mocker.patch('manager.templatetags.filters.can_register_as_installer')
mock_can_register_as_installer.return_value = False
mock_can_register_installations = mocker.patch('manager.templatetags.filters.can_register_installations')
mock_can_register_installations.return_value = False
mock_can_take_attendance = mocker.patch('manager.templatetags.filters.can_take_attendance')
mock_can_take_attendance.return_value = False
mock_is_organizer = mocker.patch('manager.templatetags.filters.is_organizer')
mock_is_organizer.return_value = False
assert (not filters.show_collaborators_tab(user1, event1))
mock_can_register_as_collaborator.assert_called_once_with(user1, event1)
mock_can_register_as_installer.assert_called_once_with(user1, event1)
mock_can_register_installations.assert_called_once_with(user1, event1)
mock_can_take_attendance.assert_called_once_with(user1, event1.event_slug)
mock_is_organizer.assert_called_once_with(user1, event1.event_slug) |
def test_valid_document(tmpdir):
schema = os.path.join(tmpdir, 'schema.json')
with open(schema, 'w') as schema_file:
schema_file.write(json.dumps({'openapi': '3.0.0', 'info': {'title': '', 'version': ''}, 'paths': {}}))
runner = CliRunner()
result = runner.invoke(cli, ['validate', '--path', schema, '--format', 'openapi'])
assert (result.exit_code == 0)
assert (result.output == ' Valid OpenAPI schema.\n') |
.asyncio
.workspace_host
class TestUserOAuthAccounts():
async def test_unauthorized(self, unauthorized_dashboard_assertions: HTTPXResponseAssertion, test_client_dashboard: test_data: TestData):
response = (await test_client_dashboard.get(f"/users/{test_data['users']['regular'].id}/oauth-accounts"))
unauthorized_dashboard_assertions(response)
.authenticated_admin(mode='session')
async def test_not_existing(self, test_client_dashboard: not_existing_uuid: uuid.UUID):
response = (await test_client_dashboard.get(f'/users/{not_existing_uuid}/oauth-accounts'))
assert (response.status_code == status.HTTP_404_NOT_FOUND)
.authenticated_admin(mode='session')
.htmx(target='aside')
async def test_valid(self, test_client_dashboard: test_data: TestData):
user = test_data['users']['regular']
response = (await test_client_dashboard.get(f'/users/{user.id}/oauth-accounts'))
assert (response.status_code == status.HTTP_200_OK)
html = BeautifulSoup(response.text, features='html.parser')
rows = html.find('table', id='user-oauth-accounts-table').find('tbody').find_all('tr')
assert (len(rows) == len([oauth_account for oauth_account in test_data['oauth_accounts'].values() if (oauth_account.user_id == user.id)])) |
class PhysicalFile(tuple):
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def close(self):
for f in self:
f.close()
def __repr__(self):
return 'PhysicalFile(logical files: {})'.format(len(self))
def describe(self, width=80, indent=''):
buf = StringIO()
utils.describe_header(buf, 'Physical File', width, indent)
d = {'Number of Logical Files': len(self)}
utils.describe_dict(buf, d, width, indent)
for f in self:
d = OrderedDict()
d['Description'] = repr(f)
d['Frames'] = len(f.frames)
d['Channels'] = len(f.channels)
utils.describe_dict(buf, d, width, indent)
return utils.Summary(info=buf.getvalue()) |
class VirtualMachineError(Exception):
def __init__(self, exc: ValueError) -> None:
self.txid: str = ''
self.source: str = ''
self.revert_type: str = ''
self.pc: Optional[int] = None
self.revert_msg: Optional[str] = None
self.dev_revert_msg: Optional[str] = None
try:
exc = exc.args[0]
except Exception:
pass
if (not (isinstance(exc, dict) and ('message' in exc))):
raise ValueError(str(exc)) from None
if ('data' not in exc):
raise ValueError(exc['message']) from None
self.message: str = exc['message'].rstrip('.')
if isinstance(exc['data'], str):
if (not exc['data'].startswith(ERROR_SIG)):
err_msg = exc['data']
if err_msg.endswith('0x'):
err_msg = exc['data'][:(- 2)].strip()
raise ValueError(f'{self.message}: {err_msg}') from None
self.revert_type = 'revert'
err_msg = exc['data'][len(ERROR_SIG):]
(err_msg,) = eth_abi.decode(['string'], HexBytes(err_msg))
self.revert_msg = err_msg
return
try:
(txid, data) = next(((k, v) for (k, v) in exc['data'].items() if k.startswith('0x')))
self.revert_type = data['error']
except StopIteration:
raise ValueError(exc['message']) from None
self.txid = txid
self.source = ''
self.pc = data.get('program_counter')
if (self.pc and (self.revert_type == 'revert')):
self.pc -= 1
self.revert_msg = data.get('reason')
self.dev_revert_msg = brownie.project.build._get_dev_revert(self.pc)
if ((self.revert_msg is None) and (self.revert_type in ('revert', 'invalid opcode'))):
self.revert_msg = self.dev_revert_msg
elif (self.revert_msg == 'Failed assertion'):
self.revert_msg = (self.dev_revert_msg or self.revert_msg)
def __str__(self) -> str:
if (not hasattr(self, 'revert_type')):
return str(self.message)
msg = self.revert_type
if self.revert_msg:
msg = f'{msg}: {self.revert_msg}'
if self.source:
msg = f'''{msg}
{self.source}'''
return str(msg)
def _with_attr(self, **kwargs) -> 'VirtualMachineError':
for (key, value) in kwargs.items():
setattr(self, key, value)
if (self.revert_msg == 'Failed assertion'):
self.revert_msg = (self.dev_revert_msg or self.revert_msg)
return self |
class CartesianGrid(Grid):
_coordinate_system = 'cartesian'
def x(self):
return self.coords[0]
def y(self):
return self.coords[1]
def z(self):
return self.coords[2]
def w(self):
return self.coords[3]
def scale(self, scale):
if np.isscalar(scale):
self.weights *= (np.abs(scale) ** self.ndim)
else:
self.weights *= np.prod(np.abs(scale))
self.coords *= scale
return self
def shift(self, shift):
self.coords += shift
return self
def rotate(self, angle, axis=None):
R = _get_rotation_matrix(self.ndim, angle, axis)
coords = np.einsum('ik,kn->in', R, np.array(self.coords))
self.coords = UnstructuredCoords(coords)
return self
def rotated(self, angle, axis=None):
R = _get_rotation_matrix(self.ndim, angle, axis)
coords = np.einsum('ik,kn->in', R, np.array(self.coords))
return CartesianGrid(UnstructuredCoords(coords))
def _get_automatic_weights(coords):
if coords.is_regular:
return np.prod(coords.delta)
elif coords.is_separated:
weights = []
for i in range(len(coords)):
x = coords.separated_coords[i]
w = ((x[2:] - x[:(- 2)]) / 2.0)
w = np.concatenate(([(x[1] - x[0])], w, [(x[(- 1)] - x[(- 2)])]))
weights.append(w)
return _prod(np.ix_(*weights[::(- 1)])).ravel() |
class vtk(testing.TestCase):
def setUp(self):
super().setUp()
if (self.ndims == 1):
self.x = numpy.array([[0], [1], [2], [3]], dtype=self.xtype)
self.tri = numpy.array([[0, 1], [1, 2], [2, 3]])
elif (self.ndims == 2):
self.x = numpy.array([[0, 0], [0, 1], [1, 0], [1, 1]], dtype=self.xtype)
self.tri = numpy.array([[0, 1, 2], [1, 2, 3]])
elif (self.ndims == 3):
self.x = numpy.array([[0, 0, 0], [0, 1, 0], [1, 0, 0], [0, 0, 1]], dtype=self.xtype)
self.tri = numpy.array([[0, 1, 2, 3]])
else:
raise Exception('invalid ndims {}'.format(self.ndims))
if hasattr(self, 'ptype'):
self.p = numpy.arange((len(self.x) * numpy.prod(self.pshape))).astype(self.ptype).reshape(((len(self.x),) + self.pshape))
else:
self.p = None
if hasattr(self, 'ctype'):
self.c = numpy.arange((len(self.tri) * numpy.prod(self.cshape))).astype(self.ctype).reshape(((len(self.tri),) + self.cshape))
else:
self.c = None
def data(self):
(yield b'# vtk DataFile Version 3.0\nvtk output\nBINARY\nDATASET UNSTRUCTURED_GRID\n')
if (self.xtype == 'i4'):
(yield b'POINTS 4 int\n')
elif (self.xtype == 'f4'):
(yield b'POINTS 4 float\n')
elif (self.xtype == 'f8'):
(yield b'POINTS 4 double\n')
else:
raise Exception('not supported: xtype={!r}'.format(self.xtype))
if ((self.ndims == 1) and (self.xtype == 'i4')):
(yield bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0]))
elif ((self.ndims == 1) and (self.xtype == 'f4')):
(yield bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 63, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 64, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]))
elif ((self.ndims == 2) and (self.xtype == 'i4')):
(yield bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0]))
elif ((self.ndims == 2) and (self.xtype == 'f4')):
(yield bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 63, 128, 0, 0, 0, 0, 0, 0, 63, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 63, 128, 0, 0, 63, 128, 0, 0, 0, 0, 0, 0]))
elif ((self.ndims == 2) and (self.xtype == 'f8')):
(yield bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 63, 240, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 63, 240, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 63, 240, 0, 0, 0, 0, 0, 0, 63, 240, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]))
elif ((self.ndims == 3) and (self.xtype == 'f4')):
(yield bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 63, 128, 0, 0, 0, 0, 0, 0, 63, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 63, 128, 0, 0]))
else:
raise Exception('not supported: xtype={!r}, ndims={}'.format(self.xtype, self.ndims))
(yield b'\n')
if (self.ndims == 1):
(yield b'CELLS 3 9\n')
(yield bytes([0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 2, 0, 0, 0, 2, 0, 0, 0, 3]))
(yield b'\nCELL_TYPES 3\n')
(yield bytes([0, 0, 0, 3, 0, 0, 0, 3, 0, 0, 0, 3]))
elif (self.ndims == 2):
(yield b'CELLS 2 8\n')
(yield bytes([0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 3]))
(yield b'\nCELL_TYPES 2\n')
(yield bytes([0, 0, 0, 5, 0, 0, 0, 5]))
elif (self.ndims == 3):
(yield b'CELLS 1 5\n')
(yield bytes([0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 3]))
(yield b'\nCELL_TYPES 1')
(yield bytes([10, 0, 0, 0, 10]))
else:
raise Exception('invalid ndims {}'.format(self.ndims))
(yield b'\n')
if (self.p is not None):
(yield b'POINT_DATA 4\n')
if ((self.ptype == 'f4') and (self.pshape == ())):
(yield b'SCALARS p float 1\nLOOKUP_TABLE default\n')
(yield bytes([0, 0, 0, 0, 63, 128, 0, 0, 64, 0, 0, 0, 64, 64, 0, 0]))
elif ((self.ptype == 'f8') and (self.pshape == ())):
(yield b'SCALARS p double 1\nLOOKUP_TABLE default\n')
(yield bytes([0, 0, 0, 0, 0, 0, 0, 0, 63, 240, 0, 0, 0, 0, 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 64, 8, 0, 0, 0, 0, 0, 0]))
elif ((self.ptype == 'i1') and (self.pshape == ())):
(yield b'SCALARS p char 1\nLOOKUP_TABLE default\n')
(yield bytes([0, 1, 2, 3]))
elif ((self.ptype == 'i2') and (self.pshape == ())):
(yield b'SCALARS p short 1\nLOOKUP_TABLE default\n')
(yield bytes([0, 0, 0, 1, 0, 2, 0, 3]))
elif ((self.ptype == 'i4') and (self.pshape == ())):
(yield b'SCALARS p int 1\nLOOKUP_TABLE default\n')
(yield bytes([0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 3]))
elif ((self.ptype == 'i1') and (self.pshape == (2,))):
(yield b'VECTORS p char\n')
(yield bytes([0, 1, 0, 2, 3, 0, 4, 5, 0, 6, 7, 0]))
elif ((self.ptype == 'f4') and (self.pshape == (2,))):
(yield b'VECTORS p float\n')
(yield bytes([0, 0, 0, 0, 63, 128, 0, 0, 0, 0, 0, 0, 64, 0, 0, 0, 64, 64, 0, 0, 0, 0, 0, 0, 64, 128, 0, 0, 64, 160, 0, 0, 0, 0, 0, 0, 64, 192, 0, 0, 64, 224, 0, 0, 0, 0, 0, 0]))
elif ((self.ptype == 'i1') and (self.pshape == (3,))):
(yield b'VECTORS p char\n')
(yield bytes([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]))
elif ((self.ptype == 'i1') and (self.pshape == (2, 2))):
(yield b'TENSORS p char\n')
(yield bytes([0, 1, 0, 2, 3, 0, 0, 0, 0, 4, 5, 0, 6, 7, 0, 0, 0, 0, 8, 9, 0, 10, 11, 0, 0, 0, 0, 12, 13, 0, 14, 15, 0, 0, 0, 0]))
elif ((self.ptype == 'i1') and (self.pshape == (3, 3))):
(yield b'TENSORS p char\n')
(yield bytes([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35]))
else:
raise Exception('not supported: ptype={}, udims={}'.format(self.ptype, self.udims))
(yield b'\n')
if (self.c is not None):
(yield b'CELL_DATA 1\n')
if ((self.ndims == 3) and (self.ctype == 'i1') and (self.cshape == ())):
(yield b'SCALARS c char 1\nLOOKUP_TABLE default\n')
(yield bytes([0]))
else:
raise Exception('not supported: ndims={}, ctype={}, cdims={}'.format(self.ndims, self.ctype, self.cdims))
(yield b'\n')
def test_data(self):
with tempfile.TemporaryDirectory() as outdir, treelog.set(treelog.DataLog(outdir)):
kwargs = {}
if (self.p is not None):
kwargs['p'] = self.p
if (self.c is not None):
kwargs['c'] = self.c
export.vtk('test', self.tri, self.x, **kwargs)
with open(os.path.join(outdir, 'test.vtk'), 'rb') as f:
data = f.read()
self.assertEqual(data, b''.join(self.data)) |
def visit_fields_with_memo(fields: Dict[(str, FieldEntry)], func: Callable[([FieldEntry, Field], None)], memo: Optional[Dict[(str, Field)]]=None) -> None:
for (name, details) in fields.items():
if ('field_details' in details):
func(details, memo)
if ('fields' in details):
visit_fields_with_memo(details['fields'], func, memo) |
class StacktraceProfiler(threading.Thread):
def __init__(self, thread_to_monitor, endpoint, ip, group_by, outlier_profiler=None):
threading.Thread.__init__(self)
self._keeprunning = True
self._thread_to_monitor = thread_to_monitor
self._endpoint = endpoint
self._ip = ip
self._group_by = group_by
self._duration = 0
self._histogram = defaultdict(float)
self._path_hash = PathHash()
self._lines_body = []
self._total = 0
self._outlier_profiler = outlier_profiler
self._status_code = 404
def run(self):
current_time = time.time()
while self._keeprunning:
newcurrent_time = time.time()
duration = (newcurrent_time - current_time)
current_time = newcurrent_time
try:
frame = sys._current_frames()[self._thread_to_monitor]
except KeyError:
log("Can't get the stacktrace of the main thread. Stopping StacktraceProfiler")
log(('Thread to monitor: %s' % self._thread_to_monitor))
log(('Running threads: %s' % sys._current_frames().keys()))
break
in_endpoint_code = False
self._path_hash.set_path('')
for (fn, ln, fun, line) in traceback.extract_stack(frame):
if (self._endpoint.name == fun):
in_endpoint_code = True
if in_endpoint_code:
key = (self._path_hash.get_path(fn, ln), fun, line)
self._histogram[key] += duration
if ((len(fn) > FILENAME_LEN) and (fn[(- FILENAME_LEN):] == FILENAME) and (fun == 'wrapper')):
in_endpoint_code = True
if in_endpoint_code:
self._total += duration
elapsed = (time.time() - current_time)
if (config.sampling_period > elapsed):
time.sleep((config.sampling_period - elapsed))
self._on_thread_stopped()
def stop(self, duration, status_code):
self._duration = (duration * 1000)
self._status_code = status_code
if self._outlier_profiler:
self._outlier_profiler.stop_by_profiler()
self._keeprunning = False
def _on_thread_stopped(self):
update_duration_cache(endpoint_name=self._endpoint.name, duration=self._duration)
with session_scope() as session:
request_id = add_request(session, duration=self._duration, endpoint_id=self._endpoint.id, ip=self._ip, status_code=self._status_code, group_by=self._group_by)
self._lines_body = order_histogram(self._histogram.items())
self.insert_lines_db(session, request_id)
if self._outlier_profiler:
self._outlier_profiler.add_outlier(session, request_id)
def insert_lines_db(self, session, request_id):
position = 0
for code_line in self.get_funcheader():
add_stack_line(session, request_id, position=position, indent=0, duration=self._duration, code_line=code_line)
position += 1
for (key, val) in self._lines_body:
(path, fun, line) = key
(fn, ln) = self._path_hash.get_last_fn_ln(path)
indent = self._path_hash.get_indent(path)
duration = (((val * self._duration) / self._total) if (self._total != 0) else 0)
add_stack_line(session, request_id, position=position, indent=indent, duration=duration, code_line=(fn, ln, fun, line))
position += 1
def get_funcheader(self):
lines_returned = []
try:
fun = config.app.view_functions[self._endpoint.name]
except AttributeError:
fun = None
if hasattr(fun, 'original'):
original = fun.original
fn = inspect.getfile(original)
(lines, ln) = inspect.getsourcelines(original)
count = 0
for line in lines:
lines_returned.append((fn, (ln + count), 'None', line.strip()))
count += 1
if (line.strip()[:4] == 'def '):
return lines_returned
raise ValueError('Cannot retrieve the function header') |
_blueprint.route('/projects/search')
_blueprint.route('/projects/search/')
_blueprint.route('/projects/search/<pattern>')
def projects_search(pattern=None):
pattern = (flask.request.args.get('pattern', pattern) or '*')
page = flask.request.args.get('page', 1)
exact = flask.request.args.get('exact', 0)
try:
page = int(page)
except ValueError:
page = 1
projects = models.Project.search(Session, pattern=pattern, page=page)
if (str(exact).lower() not in ['1', 'true']):
for proj in models.Project.search(Session, pattern=get_extended_pattern(pattern), page=page):
if (proj not in projects):
projects.append(proj)
projects_count = models.Project.search(Session, pattern=get_extended_pattern(pattern), count=True)
else:
projects_count = models.Project.search(Session, pattern=pattern, count=True)
if ((projects_count == 1) and (projects[0].name == pattern.replace('*', ''))):
flask.flash('Only one result matching with an exact match, redirecting')
return flask.redirect(flask.url_for('anitya_ui.project', project_id=projects[0].id))
total_page = int(ceil((projects_count / float(50))))
return flask.render_template('search.html', current='projects', pattern=pattern, projects=projects, total_page=total_page, projects_count=projects_count, page=page) |
class OptionPlotoptionsLineSonificationDefaultspeechoptionsMapping(Options):
def pitch(self) -> 'OptionPlotoptionsLineSonificationDefaultspeechoptionsMappingPitch':
return self._config_sub_data('pitch', OptionPlotoptionsLineSonificationDefaultspeechoptionsMappingPitch)
def playDelay(self) -> 'OptionPlotoptionsLineSonificationDefaultspeechoptionsMappingPlaydelay':
return self._config_sub_data('playDelay', OptionPlotoptionsLineSonificationDefaultspeechoptionsMappingPlaydelay)
def rate(self) -> 'OptionPlotoptionsLineSonificationDefaultspeechoptionsMappingRate':
return self._config_sub_data('rate', OptionPlotoptionsLineSonificationDefaultspeechoptionsMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionPlotoptionsLineSonificationDefaultspeechoptionsMappingTime':
return self._config_sub_data('time', OptionPlotoptionsLineSonificationDefaultspeechoptionsMappingTime)
def volume(self) -> 'OptionPlotoptionsLineSonificationDefaultspeechoptionsMappingVolume':
return self._config_sub_data('volume', OptionPlotoptionsLineSonificationDefaultspeechoptionsMappingVolume) |
class PageModel():
def __init__(self):
self.id: int = None
self.title: str = None
self.link_name: str = None
self.type: str = None
self.order: int = None
self.content: str = None
def copy(self):
m = PageModel()
m.id = self.id
m.title = self.title
m.link_name = self.link_name
m.type = self.type
m.order = self.order
m.content = self.content
return m
def from_title_link_type(cls, title: str, link: str, page_type: str):
page = PageModel()
page.title = title
page.link_name = link
page.type = page_type
page.order = 0
page.content = ''
return page
def from_orm_model(cls, model: PageOrmModel):
m = cls()
m.id = model.id
m.title = model.title
m.link_name = model.link_name
m.type = model.type
m.order = model.order
m.content = model.content
return m
def from_cache(cls, cache: dict):
m = cls()
m.id = cache['id']
m.title = cache['title']
m.link_name = cache['link_name']
m.type = cache['type']
m.order = cache['order']
m.content = cache['content']
return m
def to_orm_model(self) -> PageOrmModel:
orm_model = PageOrmModel()
orm_model.id = self.id
orm_model.title = self.title
orm_model.link_name = self.link_name
orm_model.type = self.type
orm_model.order = self.order
orm_model.content = self.content
return orm_model
def to_cache(self):
return {'id': self.id, 'title': self.title, 'link_name': self.link_name, 'type': self.type, 'order': self.order, 'content': self.content} |
class OptionSonificationGlobalcontexttracksMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def value(self):
return self._config_get(None)
def value(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class PageCallToAction(AbstractCrudObject):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isPageCallToAction = True
super(PageCallToAction, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
android_app = 'android_app'
android_deeplink = 'android_deeplink'
android_destination_type = 'android_destination_type'
android_package_name = 'android_package_name'
android_url = 'android_url'
created_time = 'created_time'
email_address = 'email_address'
field_from = 'from'
id = 'id'
intl_number_with_plus = 'intl_number_with_plus'
iphone_app = 'iphone_app'
iphone_deeplink = 'iphone_deeplink'
iphone_destination_type = 'iphone_destination_type'
iphone_url = 'iphone_url'
status = 'status'
type = 'type'
updated_time = 'updated_time'
web_destination_type = 'web_destination_type'
web_url = 'web_url'
class AndroidDestinationType():
app_deeplink = 'APP_DEEPLINK'
become_a_volunteer = 'BECOME_A_VOLUNTEER'
email = 'EMAIL'
facebook_app = 'FACEBOOK_APP'
follow = 'FOLLOW'
marketplace_inventory_page = 'MARKETPLACE_INVENTORY_PAGE'
menu_on_facebook = 'MENU_ON_FACEBOOK'
messenger = 'MESSENGER'
mini_shop = 'MINI_SHOP'
mobile_center = 'MOBILE_CENTER'
none = 'NONE'
phone_call = 'PHONE_CALL'
shop_on_facebook = 'SHOP_ON_FACEBOOK'
website = 'WEBSITE'
class IphoneDestinationType():
app_deeplink = 'APP_DEEPLINK'
become_a_volunteer = 'BECOME_A_VOLUNTEER'
email = 'EMAIL'
facebook_app = 'FACEBOOK_APP'
follow = 'FOLLOW'
marketplace_inventory_page = 'MARKETPLACE_INVENTORY_PAGE'
menu_on_facebook = 'MENU_ON_FACEBOOK'
messenger = 'MESSENGER'
mini_shop = 'MINI_SHOP'
none = 'NONE'
phone_call = 'PHONE_CALL'
shop_on_facebook = 'SHOP_ON_FACEBOOK'
website = 'WEBSITE'
class Type():
become_a_volunteer = 'BECOME_A_VOLUNTEER'
book_appointment = 'BOOK_APPOINTMENT'
book_now = 'BOOK_NOW'
buy_tickets = 'BUY_TICKETS'
call_now = 'CALL_NOW'
charity_donate = 'CHARITY_DONATE'
contact_us = 'CONTACT_US'
donate_now = 'DONATE_NOW'
email = 'EMAIL'
follow_page = 'FOLLOW_PAGE'
get_directions = 'GET_DIRECTIONS'
get_offer = 'GET_OFFER'
get_offer_view = 'GET_OFFER_VIEW'
interested = 'INTERESTED'
learn_more = 'LEARN_MORE'
listen = 'LISTEN'
local_dev_platform = 'LOCAL_DEV_PLATFORM'
message = 'MESSAGE'
mobile_center = 'MOBILE_CENTER'
open_app = 'OPEN_APP'
order_food = 'ORDER_FOOD'
play_music = 'PLAY_MUSIC'
play_now = 'PLAY_NOW'
purchase_gift_cards = 'PURCHASE_GIFT_CARDS'
request_appointment = 'REQUEST_APPOINTMENT'
request_quote = 'REQUEST_QUOTE'
shop_now = 'SHOP_NOW'
shop_on_facebook = 'SHOP_ON_FACEBOOK'
sign_up = 'SIGN_UP'
view_inventory = 'VIEW_INVENTORY'
view_menu = 'VIEW_MENU'
view_shop = 'VIEW_SHOP'
visit_group = 'VISIT_GROUP'
watch_now = 'WATCH_NOW'
woodhenge_support = 'WOODHENGE_SUPPORT'
class WebDestinationType():
become_a_volunteer = 'BECOME_A_VOLUNTEER'
become_supporter = 'BECOME_SUPPORTER'
email = 'EMAIL'
follow = 'FOLLOW'
messenger = 'MESSENGER'
mobile_center = 'MOBILE_CENTER'
none = 'NONE'
shop_on_facebook = 'SHOP_ON_FACEBOOK'
website = 'WEBSITE'
def api_delete(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=PageCallToAction, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_update(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'android_app_id': 'int', 'android_destination_type': 'android_destination_type_enum', 'android_package_name': 'string', 'android_url': 'string', 'email_address': 'string', 'intl_number_with_plus': 'string', 'iphone_app_id': 'int', 'iphone_destination_type': 'iphone_destination_type_enum', 'iphone_url': 'string', 'type': 'type_enum', 'web_destination_type': 'web_destination_type_enum', 'web_url': 'string'}
enums = {'android_destination_type_enum': PageCallToAction.AndroidDestinationType.__dict__.values(), 'iphone_destination_type_enum': PageCallToAction.IphoneDestinationType.__dict__.values(), 'type_enum': PageCallToAction.Type.__dict__.values(), 'web_destination_type_enum': PageCallToAction.WebDestinationType.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=PageCallToAction, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {'android_app': 'Application', 'android_deeplink': 'string', 'android_destination_type': 'string', 'android_package_name': 'string', 'android_url': 'string', 'created_time': 'datetime', 'email_address': 'string', 'from': 'Page', 'id': 'string', 'intl_number_with_plus': 'string', 'iphone_app': 'Application', 'iphone_deeplink': 'string', 'iphone_destination_type': 'string', 'iphone_url': 'string', 'status': 'string', 'type': 'string', 'updated_time': 'datetime', 'web_destination_type': 'string', 'web_url': 'string'}
def _get_field_enum_info(cls):
field_enum_info = {}
field_enum_info['AndroidDestinationType'] = PageCallToAction.AndroidDestinationType.__dict__.values()
field_enum_info['IphoneDestinationType'] = PageCallToAction.IphoneDestinationType.__dict__.values()
field_enum_info['Type'] = PageCallToAction.Type.__dict__.values()
field_enum_info['WebDestinationType'] = PageCallToAction.WebDestinationType.__dict__.values()
return field_enum_info |
def copy_solution_files(exercise: ExerciseInfo, workdir: Path, exercise_config: ExerciseConfig=None):
if (exercise_config is not None):
solution_files = exercise_config.files.solution
exemplar_files = exercise_config.files.exemplar
helper_files = exercise_config.files.editor
else:
solution_files = []
exemplar_files = []
helper_files = []
if helper_files:
helper_files = [(exercise.path / h) for h in helper_files]
for helper_file in helper_files:
dst = (workdir / helper_file.relative_to(exercise.path))
copy_file(helper_file, dst)
if (not solution_files):
solution_files.append(exercise.solution_stub.name)
solution_files = [(exercise.path / s) for s in solution_files]
if (not exemplar_files):
exemplar_files.append(exercise.exemplar_file.relative_to(exercise.path))
exemplar_files = [(exercise.path / e) for e in exemplar_files]
for (solution_file, exemplar_file) in zip_longest(solution_files, exemplar_files):
if (solution_file is None):
copy_file(exemplar_file, (workdir / exemplar_file.name))
elif (exemplar_file is None):
copy_file(solution_file, (workdir / solution_file.name))
else:
dst = (workdir / solution_file.relative_to(exercise.path))
copy_file(exemplar_file, dst) |
class PageTriggerBase(UrlTrigger):
pluginName = 'Page Triggers'
loggerPath = 'PageTriggers'
def pages(self):
pass
def get_urls(self):
return [tmp for tmp in self.pages]
def retriggerPages(self):
self.retriggerUrlList(self.pages, ignoreignore=True, retrigger_complete=True)
self.log.info('Pages retrigger complete.')
def go(self):
self.log.info('Retriggering %s pages.', len(self.pages))
self.retriggerPages() |
.standalone
def contour():
mayavi.new_scene()
r = VTKFileReader()
filename = join(mayavi2.get_data_dir(dirname(abspath(__file__))), 'heart.vtk')
r.initialize(filename)
mayavi.add_source(r)
o = Outline()
mayavi.add_module(o)
gp = GridPlane()
mayavi.add_module(gp)
gp = GridPlane()
mayavi.add_module(gp)
gp.grid_plane.axis = 'y'
gp = GridPlane()
mayavi.add_module(gp)
gp.grid_plane.axis = 'z'
cgp = ContourGridPlane()
mayavi.add_module(cgp)
cgp.grid_plane.position = 15
cgp = ContourGridPlane()
mayavi.add_module(cgp)
cgp.grid_plane.axis = 'y'
cgp.grid_plane.position = 15
cgp.contour.filled_contours = True
iso = IsoSurface(compute_normals=True)
mayavi.add_module(iso)
iso.contour.contours = [220.0]
cp = ScalarCutPlane()
mayavi.add_module(cp)
cp.implicit_plane.normal = (0, 0, 1) |
def update_event_info(event_slug, render_dict=None, event=None):
event = get_object_or_404(Event, event_slug=event_slug)
contacts = Contact.objects.filter(event=event)
render_dict = (render_dict or {})
render_dict.update({'event_slug': event_slug, 'event': event, 'contacts': contacts})
return render_dict |
class encrypt(crypto_base):
def __get_tcp_wrap(self, _list: list):
byte_data = b''.join(_list)
size = len(byte_data)
header = struct.pack('!IH', zlib.crc32(byte_data), size)
return b''.join([header, byte_data])
def wrap(self, user_id: bytes, byte_data: bytes):
size = len(byte_data)
new_size = get_size(size)
x = (new_size - size)
_list = [struct.pack('!B16s', x, user_id), _encrypt(self.key, user_id, b''.join([byte_data, (b'\x00' * x)]))]
if self.is_tcp:
return self.__get_tcp_wrap(_list)
else:
return b''.join(_list) |
def query_photos(url, key, nsid, page):
params = {'per_page': 100, 'page': page, 'extras': 'media,url_sq,url_q,url_t,url_s,url_n,url_w,url_m,url_z,url_c,url_l,url_h,url_k,url_3k,url_4k,url_f,url_5k,url_6k,url_o', 'api_key': key, 'format': 'json', 'nojsoncallback': 1}
match = re.search('/(?:sets|albums)/([^/]+)', url)
if match:
set_id = match.group(1)
params['method'] = 'flickr.photosets.getPhotos'
params['photoset_id'] = set_id
else:
params['method'] = 'flickr.people.getPhotos'
params['user_id'] = nsid
rs = grabhtml(' params=params)
rs = json.loads(rs)
return (rs.get('photos') or rs.get('photoset'))['photo'] |
class LoopIR_Dependencies(LoopIR_Do):
def __init__(self, buf_sym, stmts):
self._buf_sym = buf_sym
self._lhs = None
self._depends = defaultdict(set)
self._alias = dict()
self._lhs = None
self._context = set()
self._control = False
self.do_stmts(stmts)
def result(self):
depends = self._depends[self._buf_sym]
new = list(depends)
done = []
while True:
if (len(new) == 0):
break
sym = new.pop()
done.append(sym)
d = self._depends[sym]
depends.update(d)
new.extend((s for s in d if (s not in done)))
return depends
def do_s(self, s):
if isinstance(s, (LoopIR.Assign, LoopIR.Reduce)):
lhs = self._alias.get(s.name, s.name)
self._lhs = lhs
self._depends[lhs].add(lhs)
self._depends[lhs].update(self._context)
for i in s.idx:
self.do_e(i)
self.do_e(s.rhs)
self._lhs = None
elif isinstance(s, LoopIR.WriteConfig):
lhs = (s.config, s.field)
self._lhs = lhs
self._depends[lhs].add(lhs)
self._depends[lhs].update(self._context)
self.do_e(s.rhs)
self._lhs = None
elif isinstance(s, LoopIR.WindowStmt):
rhs_buf = self._alias.get(s.rhs.name, s.rhs.name)
self._alias[s.lhs] = rhs_buf
self._lhs = rhs_buf
self._depends[rhs_buf].add(rhs_buf)
self.do_e(s.rhs)
self._lhs = None
elif isinstance(s, LoopIR.If):
old_context = self._context
self._context = old_context.copy()
self._control = True
self.do_e(s.cond)
self._control = False
self.do_stmts(s.body)
self.do_stmts(s.orelse)
self._context = old_context
elif isinstance(s, LoopIR.For):
old_context = self._context
self._context = old_context.copy()
self._control = True
self._lhs = s.iter
self._depends[s.iter].add(s.iter)
self.do_e(s.lo)
self.do_e(s.hi)
self._lhs = None
self._control = False
self.do_stmts(s.body)
self._context = old_context
elif isinstance(s, LoopIR.Call):
def process_reads():
for (faa, aa) in zip(s.f.args, s.args):
if faa.type.is_numeric():
maybe_read = self.analyze_eff(s.f.eff, faa.name, read=True)
else:
maybe_read = True
if maybe_read:
self.do_e(aa)
for ce in s.f.eff.config_reads:
name = (ce.config, ce.field)
if self._lhs:
self._depends[self._lhs].add(name)
for (fa, a) in zip(s.f.args, s.args):
maybe_write = (fa.type.is_numeric() and self.analyze_eff(s.f.eff, fa.name, write=True))
if maybe_write:
name = self._alias.get(a.name, a.name)
self._lhs = name
self._depends[name].add(name)
self._depends[name].update(self._context)
process_reads()
self._lhs = None
for ce in s.f.eff.config_writes:
name = (ce.config, ce.field)
self._lhs = name
self._depends[name].add(name)
self._depends[name].update(self._context)
process_reads()
self._lhs = None
elif isinstance(s, (LoopIR.Pass, LoopIR.Alloc)):
pass
else:
assert False, 'bad case'
def analyze_eff(self, eff, buf, write=False, read=False):
if read:
if any(((es.buffer == buf) for es in eff.reads)):
return True
if write:
if any(((es.buffer == buf) for es in eff.writes)):
return True
if (read or write):
if any(((es.buffer == buf) for es in eff.reduces)):
return True
return False
def do_e(self, e):
if isinstance(e, (LoopIR.Read, LoopIR.WindowExpr)):
def visit_idx(e):
if isinstance(e, LoopIR.Read):
for i in e.idx:
self.do_e(i)
else:
for w in e.idx:
if isinstance(w, LoopIR.Interval):
self.do_e(w.lo)
self.do_e(w.hi)
else:
self.do_e(w.pt)
name = self._alias.get(e.name, e.name)
if self._lhs:
self._depends[self._lhs].add(name)
if self._control:
self._context.add(name)
visit_idx(e)
elif isinstance(e, LoopIR.ReadConfig):
name = (e.config, e.field)
if self._lhs:
self._depends[self._lhs].add(name)
if self._control:
self._context.add(name)
else:
super().do_e(e)
def do_t(self, t):
pass
def do_eff(self, eff):
pass |
class Function_Definition(Definition):
def __init__(self, t_fun, n_sig, l_validation, n_body, l_nested):
super().__init__()
assert isinstance(t_fun, MATLAB_Token)
assert ((t_fun.kind == 'KEYWORD') and (t_fun.value == 'function'))
assert isinstance(n_sig, Function_Signature)
assert isinstance(l_validation, list)
for n in l_validation:
assert isinstance(n, Special_Block)
assert isinstance(n_body, Sequence_Of_Statements)
assert isinstance(l_nested, list)
for n in l_nested:
assert isinstance(n, Function_Definition)
self.t_fun = t_fun
self.t_fun.set_ast(self)
self.t_end = None
self.n_sig = n_sig
self.n_sig.set_parent(self)
self.l_validation = l_validation
for n_block in self.l_validation:
n_block.set_parent(self)
self.n_body = n_body
self.n_body.set_parent(self)
self.l_nested = l_nested
for n_fdef in self.l_nested:
n_fdef.set_parent(self)
def loc(self):
if self.n_sig.n_name:
return self.n_sig.loc()
else:
return self.t_fun.location
def set_parent(self, n_parent):
assert isinstance(n_parent, (Compilation_Unit, Special_Block, Function_Definition))
super().set_parent(n_parent)
def set_end(self, t_end):
assert isinstance(t_end, MATLAB_Token)
assert ((t_end.kind == 'KEYWORD') and (t_end.value == 'end'))
self.t_end = t_end
self.t_end.set_ast(self)
def debug_parse_tree(self):
dotpr((('fun_' + str(self.n_sig.n_name)) + '.dot'), self)
subprocess.run(['dot', '-Tpdf', (('fun_' + str(self.n_sig.n_name)) + '.dot'), (('-ofun_' + str(self.n_sig.n_name)) + '.pdf')], check=False)
for n_function in self.l_nested:
n_function.debug_parse_tree()
def visit(self, parent, function, relation):
self._visit(parent, function, relation)
self.n_sig.visit(self, function, 'Signature')
self._visit_list(self.l_validation, function, 'Validation')
self.n_body.visit(self, function, 'Body')
self._visit_list(self.l_nested, function, 'Nested')
self._visit_end(parent, function, relation)
def sty_check_naming(self, mh, cfg):
assert isinstance(mh, Message_Handler)
assert isinstance(cfg, Config)
self.n_sig.sty_check_naming(mh, cfg)
for n_function in self.l_nested:
n_function.sty_check_naming(mh, cfg)
def is_class_method(self):
return (isinstance(self.n_parent, Special_Block) or (isinstance(self.n_parent, Function_File) and self.n_parent.is_separate and (self.n_parent.l_functions[0] == self)))
def get_local_name(self):
if isinstance(self.n_parent, Function_File):
return str(self.n_sig.n_name)
elif isinstance(self.n_parent, Compilation_Unit):
return ('%s::%s' % (self.n_parent.name, str(self.n_sig.n_name)))
elif isinstance(self.n_parent, Special_Block):
return ('%s::%s' % (self.n_parent.n_parent.get_local_name(), str(self.n_sig.n_name)))
elif isinstance(self.n_parent, Function_Definition):
return ('%s::%s' % (self.n_parent.get_local_name(), str(self.n_sig.n_name)))
else:
raise ICE(('logic error: parent of fn is %s' % self.n_parent.__class__.__name__)) |
class OptionPlotoptionsTimelineSonificationDefaultspeechoptionsActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class MainDialog(QtWidgets.QDialog, AnimaDialogBase):
shot_child_task_defaults = {'Animation': {'schedule_timing': 1, 'schedule_unit': 'd'}, 'Camera': {'schedule_timing': 10, 'schedule_unit': 'min', 'schedule_model': 'duration'}, 'Comp': {'schedule_timing': 1, 'schedule_unit': 'h'}, 'Lighting': {'schedule_timing': 3, 'schedule_unit': 'h'}, 'Mocap': {'schedule_timing': 1, 'schedule_unit': 'h'}, 'Plate': {'schedule_timing': 10, 'schedule_unit': 'min', 'schedule_model': 'duration'}, 'Previs': {'schedule_timing': 10, 'schedule_unit': 'min', 'schedule_model': 'duration', 'type_name': 'Shot Previs'}, 'Scene Assembly': {'schedule_timing': 1, 'schedule_unit': 'h'}}
def __init__(self, parent=None, project=None, parent_task=None):
super(MainDialog, self).__init__(parent)
self._setup()
def _setup(self):
self.setWindowTitle('Create Shot Dialog')
self.resize(550, 790)
self.vertical_layout = QtWidgets.QVBoxLayout(self)
self.dialog_label = QtWidgets.QLabel(self)
self.dialog_label.setText('Create Shot')
self.dialog_label.setStyleSheet('color: rgb(71, 143, 202);font: 18pt;')
self.vertical_layout.addWidget(self.dialog_label)
line = QtWidgets.QFrame(self)
line.setFrameShape(QtWidgets.QFrame.HLine)
line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.vertical_layout.addWidget(line)
self.buttonBox = QtWidgets.QDialogButtonBox(self)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons((QtWidgets.QDialogButtonBox.Cancel | QtWidgets.QDialogButtonBox.Ok))
self.vertical_layout.addWidget(self.buttonBox)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL('accepted()'), self.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL('rejected()'), self.reject) |
class OptionSeriesPyramidSonificationTracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.parametrize('type_str, expected_type', (('uint', BasicType('uint', 256)), ('uint256[]', BasicType('uint', 256, ((),))), ('function', BasicType('bytes', 24)), ('fixed', BasicType('fixed', (128, 18))), ('ufixed', BasicType('ufixed', (128, 18)))))
def test_normalizing_and_parsing_works(type_str, expected_type):
assert (parse(normalize(type_str)) == expected_type) |
class Candidate():
def __init__(self, source_contig, source_start, source_end, members, score, std_span, std_pos, support_fraction='.', genotype='./.', ref_reads=None, alt_reads=None):
self.source_contig = source_contig
self.source_start = source_start
self.source_end = source_end
self.members = members
self.score = score
self.std_span = std_span
self.std_pos = std_pos
self.type = None
self.support_fraction = support_fraction
self.genotype = genotype
self.ref_reads = ref_reads
self.alt_reads = alt_reads
def get_source(self):
return (self.source_contig, self.source_start, self.source_end)
def get_key(self):
(contig, start, end) = self.get_source()
return (self.type, contig, end)
def downstream_distance_to(self, candidate2):
(this_contig, this_start, this_end) = self.get_source()
(other_contig, other_start, other_end) = candidate2.get_source()
if ((self.type == candidate2.type) and (this_contig == other_contig)):
return max(0, (other_start - this_end))
else:
return float('inf')
def get_std_span(self, ndigits=2):
if self.std_span:
return round(self.std_span, ndigits)
else:
return '.'
def get_std_pos(self, ndigits=2):
if self.std_pos:
return round(self.std_pos, ndigits)
else:
return '.'
def get_bed_entry(self):
return '{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}'.format(self.source_contig, self.source_start, self.source_end, '{0};{1};{2}'.format(self.type, self.get_std_span(), self.get_std_pos()), self.score, '.', (('[' + ']['.join([ev.as_string('|') for ev in self.members])) + ']'))
def get_vcf_entry(self):
raise NotImplementedError |
class OptionPlotoptionsTreemapStatesInactive(Options):
def animation(self) -> 'OptionPlotoptionsTreemapStatesInactiveAnimation':
return self._config_sub_data('animation', OptionPlotoptionsTreemapStatesInactiveAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def opacity(self):
return self._config_get(0.2)
def opacity(self, num: float):
self._config(num, js_type=False) |
class BrownieMiddlewareABC(ABC):
def __init__(self, w3: Web3) -> None:
self.w3 = w3
def get_layer(cls, w3: Web3, network_type: str) -> Optional[int]:
raise NotImplementedError
def __call__(self, make_request: Callable, w3: Web3) -> Callable:
return functools.partial(self.process_request, make_request)
def process_request(self, make_request: Callable, method: str, params: List) -> Dict:
raise NotImplementedError
def uninstall(self) -> None:
pass |
class MongoClientModel(Document):
id = SequenceField(primary_key=True)
namespace = StringField()
sender = StringField()
create_time = LongField()
is_deleted = BooleanField(default=False)
def __repr__(self):
return '<Document Client ({}, {}, {}, {})>'.format(self.id, self.namespace, self.sender, self.is_deleted) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.