code stringlengths 281 23.7M |
|---|
def buildFKeyStmt(conn, tableName, key):
unqIndex = getUniqueIndex(conn, key['table'])
keyList = getFKeyList(conn, key['table'])
keyStmt = []
for colName in unqIndex:
keyKey = search_keyList(keyList, colName)
if keyKey:
newStmt = buildFKeyStmt(conn, key['table'], keyKey)
for row in newStmt:
keyStmt.append(row)
else:
keyStmt.append({'table': tableName, 'column': colName, 'joinTable': key['table'], 'joinColumn': key['to']})
return keyStmt |
class ExtendedNotifiers(HasTraits):
def __init__(self, **traits):
ok_listeners = [self.method_listener_0, self.method_listener_1, self.method_listener_2, self.method_listener_3, self.method_listener_4]
for listener in ok_listeners:
self._on_trait_change(listener, 'ok', dispatch='extended')
fail_listeners = [self.failing_method_listener_0, self.failing_method_listener_1, self.failing_method_listener_2, self.failing_method_listener_3, self.failing_method_listener_4]
for listener in fail_listeners:
self._on_trait_change(listener, 'fail', dispatch='extended')
super().__init__(**traits)
ok = Float
fail = Float
rebind_calls_0 = List
rebind_calls_1 = List
rebind_calls_2 = List
rebind_calls_3 = List
rebind_calls_4 = List
exceptions_from = List
def method_listener_0(self):
self.rebind_calls_0.append(True)
def method_listener_1(self, new):
self.rebind_calls_1.append(new)
def method_listener_2(self, name, new):
self.rebind_calls_2.append((name, new))
def method_listener_3(self, obj, name, new):
self.rebind_calls_3.append((obj, name, new))
def method_listener_4(self, obj, name, old, new):
self.rebind_calls_4.append((obj, name, old, new))
def failing_method_listener_0(self):
self.exceptions_from.append(0)
raise Exception('error')
def failing_method_listener_1(self, new):
self.exceptions_from.append(1)
raise Exception('error')
def failing_method_listener_2(self, name, new):
self.exceptions_from.append(2)
raise Exception('error')
def failing_method_listener_3(self, obj, name, new):
self.exceptions_from.append(3)
raise Exception('error')
def failing_method_listener_4(self, obj, name, old, new):
self.exceptions_from.append(4)
raise Exception('error') |
def collect_general_telemetry_data(session, telemetry_user):
endpoints = session.query(Endpoint.name).all()
blueprints = set((get_blueprint(endpoint) for (endpoint,) in endpoints))
no_of_endpoints = len(endpoints)
no_of_blueprints = len(blueprints)
counts = session.query(Endpoint.monitor_level, func.count(Endpoint.monitor_level)).group_by(Endpoint.monitor_level).all()
counts_dict = dict(counts)
level_zeros_count = counts_dict.get(0, 0)
level_ones_count = counts_dict.get(1, 0)
level_twos_count = counts_dict.get(2, 0)
level_threes_count = counts_dict.get(3, 0)
data = {'endpoints': no_of_endpoints, 'blueprints': no_of_blueprints, 'time_initialized': telemetry_user.last_initialized.strftime('%Y-%m-%d %H:%M:%S'), 'monitoring_0': level_zeros_count, 'monitoring_1': level_ones_count, 'monitoring_2': level_twos_count, 'monitoring_3': level_threes_count}
post_to_back_if_telemetry_enabled('UserSession', **data) |
class TestGetRegexCleanedLayoutBlockWithPrefixSuffix():
def test_should_return_original_block_for_non_matching_regex(self):
layout_block = LayoutBlock.for_text('test')
(prefix_block, cleaned_block, suffix_block) = get_regex_cleaned_layout_block_with_prefix_suffix(layout_block, 'other')
assert (not prefix_block.lines)
assert (cleaned_block == layout_block)
assert (not suffix_block.lines)
def test_should_return_original_block_for_empty_block(self):
layout_block = LayoutBlock(lines=[])
(prefix_block, cleaned_block, suffix_block) = get_regex_cleaned_layout_block_with_prefix_suffix(layout_block, 'other')
assert (not prefix_block.lines)
assert (cleaned_block == layout_block)
assert (not suffix_block.lines)
def test_should_return_prefix_for_prefix_match(self):
layout_block = LayoutBlock.for_text('a b c d')
(prefix_block, cleaned_block, suffix_block) = get_regex_cleaned_layout_block_with_prefix_suffix(layout_block, '.*?(b.*)')
assert (prefix_block.text == 'a')
assert (cleaned_block.text == 'b c d')
assert (not suffix_block.lines)
def test_should_return_suffix_for_suffix_match(self):
layout_block = LayoutBlock.for_text('a b c d')
(prefix_block, cleaned_block, suffix_block) = get_regex_cleaned_layout_block_with_prefix_suffix(layout_block, '(.*)d')
assert (not prefix_block.lines)
assert (cleaned_block.text == 'a b c')
assert (suffix_block.text == 'd')
def test_should_return_prefix_suffix_for_prefix_suffix_match(self):
layout_block = LayoutBlock.for_text('a b c d')
(prefix_block, cleaned_block, suffix_block) = get_regex_cleaned_layout_block_with_prefix_suffix(layout_block, 'a(.*)d')
assert (prefix_block.text == 'a')
assert (cleaned_block.text == 'b c')
assert (suffix_block.text == 'd') |
def _parse_cache_to_checkpoint_action_required(cache: dict[(str, Any)]) -> CheckpointActionRequired:
collection = (CollectionAddress(cache['collection']['dataset'], cache['collection']['collection']) if cache.get('collection') else None)
action_needed = ([ManualAction(**action) for action in cache['action_needed']] if cache.get('action_needed') else None)
return CheckpointActionRequired(step=cache['step'], collection=collection, action_needed=action_needed) |
class StubEditor(Editor):
is_event = Bool()
auxiliary_value = Any()
auxiliary_list = List()
auxiliary_event = Event()
auxiliary_cv_int = Int(sync_value='from')
auxiliary_cv_float = Float()
def init(self, parent):
self.control = FakeControl()
self.is_event = self.factory.is_event
if self.is_event:
self.control.on_trait_change(self.update_object, 'control_event')
else:
self.control.on_trait_change(self.update_object, 'control_value')
self.set_tooltip()
def dispose(self):
if self.is_event:
self.control.on_trait_change(self.update_object, 'control_event', remove=True)
else:
self.control.on_trait_change(self.update_object, 'control_value', remove=True)
super().dispose()
def update_editor(self):
if self.is_event:
self.control.control_event = True
else:
self.control.control_value = self.value
def update_object(self, new):
if (self.control is not None):
if (not self._no_update):
self._no_update = True
try:
self.value = new
finally:
self._no_update = False
def set_tooltip_text(self, control, text):
control.tooltip = text
def set_focus(self, parent):
pass |
def pytest_collection_modifyitems(config, items):
if config.getoption('--network'):
global _dev_network
_dev_network = config.getoption('--network')
if config.getoption('--evm'):
target = 'evm'
else:
target = config.getoption('--target')
for (flag, fixture) in TARGET_OPTS.items():
if (target == flag):
continue
for test in [i for i in items if (fixture in i.fixturenames)]:
items.remove(test)
if (target != 'core'):
fixtures = set(TARGET_OPTS.values())
for test in [i for i in items if (not fixtures.intersection(i.fixturenames))]:
items.remove(test) |
def get_stock_rating_data(debug=False):
global data_to_add
global allStockData
counter = 0
print('\nCalculating Stock Ratings...\n')
with tqdm(total=allStockData.shape[0]) as pbar:
for row in allStockData.iterrows():
(ticker, sector) = (row[1]['Ticker'], row[1]['Sector'])
category_grades = get_category_grades(ticker, sector)
stock_rating = get_stock_rating(category_grades)
data_to_add['Overall Rating'].append(stock_rating)
data_to_add['Valuation Grade'].append(convert_to_letter_grade(category_grades['Valuation'][(- 1)]))
data_to_add['Profitability Grade'].append(convert_to_letter_grade(category_grades['Profitability'][(- 1)]))
data_to_add['Growth Grade'].append(convert_to_letter_grade(category_grades['Growth'][(- 1)]))
data_to_add['Performance Grade'].append(convert_to_letter_grade(category_grades['Performance'][(- 1)]))
counter += 1
pbar.update(1)
if ((debug == True) and (counter == 10)):
break |
_required
_required
_POST
def settings_form(request, hostname):
if (hostname is None):
vm = None
else:
vm = get_vm(request, hostname)
if request.user.is_admin(request):
action = None
form = AdminServerSettingsForm(request, vm, request.POST, prefix='opt')
else:
action = 'update'
form = ServerSettingsForm(request, vm, request.POST, prefix='opt')
if form.is_valid():
status = form.save(action=action, args=(form.current_hostname,))
if (status == 204):
return HttpResponse(None, status=status)
elif (status in (200, 201)):
if (form.action == 'delete'):
return redirect('vm_list')
else:
return redirect('vm_details', hostname=form.saved_hostname)
return render(request, 'gui/vm/settings_form.html', {'settingsform': form, 'vm': vm, 'mb_addon': SIZE_FIELD_MB_ADDON, 'percent_addon': SIZE_FIELD_PERCENT_ADDON}) |
class IndexWrapperForCfGrib():
def __init__(self, index=None, ignore_keys=[]):
self.index = index
self.ignore_keys = ignore_keys
def __getstate__(self):
return dict(index=serialise_state(self.index), ignore_keys=self.ignore_keys)
def __setstate__(self, state):
self.index = deserialise_state(state['index'])
self.ignore_keys = state['ignore_keys']
def __getitem__(self, n):
return ItemWrapperForCfGrib(self.index[n], ignore_keys=self.ignore_keys)
def __len__(self):
return len(self.index) |
class OptionSeriesErrorbarSonificationDefaultinstrumentoptionsMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSonificationGlobalcontexttracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def value(self):
return self._config_get(None)
def value(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
(arrays_OI_O_BI(max_batch=8, max_out=8, max_in=8))
def test_dropout_gives_zero_gradients(W_b_input):
model = chain(get_model(W_b_input), Dropout(1.0))
(nr_batch, nr_out, nr_in) = get_shape(W_b_input)
(W, b, input_) = W_b_input
for node in model.walk():
if (node.name == 'dropout'):
node.attrs['dropout_rate'] = 1.0
(fwd_dropped, finish_update) = model.begin_update(input_)
grad_BO = numpy.ones((nr_batch, nr_out), dtype='f')
grad_BI = finish_update(grad_BO)
assert all(((val == 0.0) for val in grad_BI.flatten())) |
class UtilsTest(unittest.TestCase):
def test_json_hook(self):
example_input = {'date': '/Date(+1300)/'}
self.assertEqual(xero.utils.json_load_object_hook(example_input), {'date': datetime.datetime(2015, 3, 21, 0, 0)})
example_input = {'date': '2015-04-29T00:00:00'}
self.assertEqual(xero.utils.json_load_object_hook(example_input), {'date': datetime.date(2015, 4, 29)})
example_input = {'date': '2015-04-29T10:21:03'}
self.assertEqual(xero.utils.json_load_object_hook(example_input), {'date': datetime.datetime(2015, 4, 29, 10, 21, 3)})
self.assertEqual(xero.utils.json_load_object_hook({'date': 6}), {'date': 6})
self.assertEqual(xero.utils.json_load_object_hook({'date': None}), {'date': None})
example_input = {'date': '/Date(0+0000)/'}
self.assertEqual(xero.utils.json_load_object_hook(example_input), {'date': '/Date(0+0000)/'})
def test_parse_date(self):
self.assertEqual(xero.utils.parse_date('/Date(+1200)/'), datetime.datetime(2015, 5, 7, 0, 0))
self.assertEqual(xero.utils.parse_date('/Date(+1200)/'), datetime.datetime(2008, 9, 16, 10, 28, 51, 500000))
self.assertEqual(xero.utils.parse_date('/Date()/'), datetime.datetime(2015, 8, 10, 10, 55, 33, 355000))
self.assertEqual(xero.utils.parse_date('2015-04-29T00:00:00'), datetime.date(2015, 4, 29))
self.assertEqual(xero.utils.parse_date('2015-04-29T10:21:03'), datetime.datetime(2015, 4, 29, 10, 21, 3))
self.assertEqual(xero.utils.parse_date('not a date'), None)
self.assertEqual(xero.utils.parse_date('/Date(0+0000)/'), None) |
def get_datasets(root: str):
transform_train = transforms.Compose([transforms.RandomCrop(32, padding=4), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.201))])
transform_cv = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.201))])
tr_set = torchvision.datasets.CIFAR10(root=root, train=True, download=True, transform=transform_train)
cv_set = torchvision.datasets.CIFAR10(root=root, train=False, download=True, transform=transform_cv)
return (tr_set, cv_set) |
class conn_tracking_zone(oxm):
type_len = 119810
def __init__(self, value=None):
if (value != None):
self.value = value
else:
self.value = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!L', self.type_len))
packed.append(struct.pack('!H', self.value))
return ''.join(packed)
def unpack(reader):
obj = conn_tracking_zone()
_type_len = reader.read('!L')[0]
assert (_type_len == 119810)
obj.value = reader.read('!H')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.value != other.value):
return False
return True
def pretty_print(self, q):
q.text('conn_tracking_zone {')
with q.group():
with q.indent(2):
q.breakable()
q.text('value = ')
q.text(('%#x' % self.value))
q.breakable()
q.text('}') |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = None
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'log_fortianalyzer_cloud_filter': {'required': False, 'type': 'dict', 'default': None, 'options': {}}}
for attribute_name in module_spec['options']:
fields['log_fortianalyzer_cloud_filter']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['log_fortianalyzer_cloud_filter']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=False)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'log_fortianalyzer_cloud_filter')
(is_error, has_changed, result, diff) = fortios_log_fortianalyzer_cloud(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
class TyperTest(unittest.TestCase):
def test_typer(self) -> None:
self.maxDiff = None
bmg = BMGraphBuilder()
c0 = bmg.add_constant(0.0)
c1 = bmg.add_constant(1.0)
c2 = bmg.add_constant(2.0)
c3 = bmg.add_constant(3.0)
norm = bmg.add_normal(c0, c1)
ns = bmg.add_sample(norm)
d = bmg.add_division(ns, c2)
a = bmg.add_addition(d, c3)
e = bmg.add_exp(a)
neg = bmg.add_negate(a)
typer = SupportedTyper()
self.assertTrue(typer[ns])
self.assertTrue((norm in typer))
self.assertTrue((c0 in typer))
self.assertTrue((c1 in typer))
self.assertFalse((d in typer))
self.assertFalse((a in typer))
self.assertFalse((c2 in typer))
self.assertFalse((c3 in typer))
self.assertFalse((e in typer))
self.assertFalse((neg in typer))
self.assertFalse(typer[e])
self.assertTrue((a in typer))
self.assertTrue((d in typer))
self.assertTrue((c3 in typer))
self.assertTrue((c2 in typer))
self.assertFalse((neg in typer))
self.assertFalse(typer[d])
self.assertFalse(typer[a])
self.assertFalse(typer[e])
self.assertTrue(typer[c2])
self.assertTrue(typer[c3])
c4 = bmg.add_constant(0.5)
m = bmg.add_multiplication(ns, c4)
a.inputs[0] = m
self.assertFalse(typer[a])
typer.update_type(a)
self.assertTrue(typer[a])
self.assertTrue((m in typer))
self.assertTrue((c4 in typer))
self.assertFalse((neg in typer))
self.assertTrue(typer[m])
self.assertTrue(typer[e]) |
class EvalTableFilter(TableFilter):
name = 'Default evaluation filter'
expression = Expression
filter_view = Group('expression')
def filter(self, object):
if (self._traits is None):
self._traits = object.trait_names()
try:
return eval(self.expression_, globals(), object.trait_get(*self._traits))
except:
return False
def description(self):
return self.expression |
def extractOtherworldsinwordWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('riad', 'The Roommates Were Ecstatic to See Their Roommate in a Dress', 'translated'), ('bwc', 'the boss wants to be coaxed', 'translated'), ('the boss wants to be coaxed', 'the boss wants to be coaxed', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class AcceptHeaderVersioning(BaseVersioning):
invalid_version_message = _('Invalid version in "Accept" header.')
def determine_version(self, request, *args, **kwargs):
media_type = MediaType(request.accepted_media_type)
version = media_type.params.get(self.version_param, self.default_version)
version = unicode_
if (not self.is_allowed_version(version)):
raise exceptions.NotAcceptable(self.invalid_version_message)
return version |
.django_db
def test_award_count_cfo_agencies_only(client, monkeypatch, award_data, helpers, elasticsearch_award_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_award_index)
helpers.mock_current_fiscal_year(monkeypatch)
resp = client.get(url.format(filters='?group=cfo'))
results = resp.data['results']
assert (resp.status_code == status.HTTP_200_OK)
assert (len(results) == 1) |
def distance_matrix_new(target, leaf_only=False, topological=False):
t = target.root
real_outgroup = t.children[0]
t.set_outgroup(target)
n2dist = {target: 0}
for n in target.get_descendants('preorder'):
n2dist[n] = (n2dist[n.up] + (topological or n.dist))
sister = target.get_sisters()[0]
n2dist[sister] = ((topological or sister.dist) + (topological or target.dist))
for n in sister.get_descendants('preorder'):
n2dist[n] = (n2dist[n.up] + (topological or n.dist))
t.set_outgroup(real_outgroup)
return n2dist |
def _partial_quarterly_schedule_for_year(year):
for quarter in range(1, 4):
baker.make('submissions.DABSSubmissionWindowSchedule', is_quarter=True, submission_fiscal_year=year, submission_fiscal_quarter=quarter, submission_fiscal_month=(quarter * 3), submission_reveal_date=f'{year}-{(quarter * 3)}-15') |
def test_caller_with_block_identifier(w3, math_contract):
start_num = w3.eth.get_block('latest').number
assert (math_contract.caller.counter() == 0)
w3.provider.make_request(method='evm_mine', params=[5])
math_contract.functions.incrementCounter().transact()
math_contract.functions.incrementCounter().transact()
output1 = math_contract.caller(block_identifier=(start_num + 6)).counter()
output2 = math_contract.caller(block_identifier=(start_num + 7)).counter()
assert (output1 == 1)
assert (output2 == 2) |
class THBattleUTBootstrap(BootstrapAction):
game: THBattle
def __init__(self, params: Dict[(str, Any)], items: Dict[(Player, List[GameItem])], players: BatchList[Player]):
self.source = self.target = None
self.params = params
self.items = items
self.players = players
def apply_action(self) -> bool:
from thb.characters.alice import Alice
pl = self.players
g = self.game
g.players = BatchList([Alice(p) for p in pl])
g.roles = {p: PlayerRole(THBUnitRole) for p in pl}
items = self.items
_ = roll(g, pl, items)
sync_primitive(1, pl)
sync_primitive(2, pl)
sync_primitive(3, pl)
sync_primitive(4, pl)
sync_primitive(5, pl)
sync_primitive(6, pl)
raise GameEnded(self.players) |
class OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesScatter3dDataMarkerStatesSelect(Options):
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get('#cccccc')
def fillColor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#000000')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(2)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(None)
def radius(self, num: float):
self._config(num, js_type=False) |
class AssetBuildingTestRunner(DiscoverRunner):
os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = '0.0.0.0:6080-6580'
def build_suite(self, test_labels, extra_tests=None, **kwargs):
if ((os.environ.get('TEST_SUITE', '') == 'functional') and (len(test_labels) == 0)):
test_labels = ['frontend.tests.functional']
return super(AssetBuildingTestRunner, self).build_suite(test_labels, extra_tests, **kwargs)
def setup_test_environment(self):
os.environ['API_HOST'] = (' % MockApiServer.api_port())
npm_cmd = 'mkdir -p ../../static/js && npm run build'
if (('SKIP_NPM_BUILD' not in os.environ) and (os.environ.get('TEST_SUITE', '') != 'nonfunctional')):
subprocess.check_call(npm_cmd, shell=True, cwd=(settings.APPS_ROOT + '/media/js'))
if (not os.environ.get('BROWSER')):
os.environ['BROWSER'] = 'firefox:latest:Windows 10'
if (os.environ.get('TEST_SUITE', '') != 'nonfunctional'):
MockApiServer().start()
super(AssetBuildingTestRunner, self).setup_test_environment() |
def test_multi_stage_build_batch():
uid_generator = string_generator()
request_info1 = dm.RequestInfo(input=np.array(range(10)), parameters={'gif_id': 12})
request_object1 = dm.RequestObject(uid=next(uid_generator), request_info=request_info1, source_id='internal_123_124', model=stub_model)
request_info2 = dm.RequestInfo(input=np.array(range(10)), parameters={})
request_object2 = dm.RequestObject(uid=next(uid_generator), request_info=request_info2, source_id='internal_123_123', model=blur_stateful_model)
request_info3 = dm.RequestInfo(input=np.array(range(10)), parameters={})
request_object3 = dm.RequestObject(uid=next(uid_generator), request_info=request_info3, source_id='internal_123_125', model=stub_stateful_model)
request_info4 = dm.RequestInfo(input=np.array(range(10)), parameters={})
request_object4 = dm.RequestObject(uid=next(uid_generator), request_info=request_info4, source_id='internal_123_121', model=stub_stateful_model)
request_info5 = dm.RequestInfo(input=np.array(range(10)), parameters={})
request_object5 = dm.RequestObject(uid=next(uid_generator), request_info=request_info5, source_id='internal_123_123', model=blur_stateful_model)
request_info6 = dm.RequestInfo(input=np.array(range(10)), parameters={})
request_object6 = dm.RequestObject(uid=next(uid_generator), request_info=request_info6, source_id='internal_123_123', model=blur_stateful_model)
request_info7 = dm.RequestInfo(input=np.array(range(10)), parameters={'gif_id': 12})
request_object7 = dm.RequestObject(uid=next(uid_generator), request_info=request_info7, source_id='internal_123_127', model=blur_model)
batch_uid_generator = string_generator()
expected_value = dm.Batches(batches=[dm.BatchObject(uid=next(batch_uid_generator), requests_info=[request_object1.request_info], model=stub_model, request_objects=[request_object1]), dm.BatchObject(uid=next(batch_uid_generator), requests_info=[request_object2.request_info, request_object5.request_info, request_object6.request_info], source_id=request_object2.source_id, model=blur_stateful_model, request_objects=[request_object2, request_object5, request_object6]), dm.BatchObject(uid=next(batch_uid_generator), requests_info=[request_object3.request_info], model=stub_stateful_model, source_id=request_object3.source_id, request_objects=[request_object3]), dm.BatchObject(uid=next(batch_uid_generator), requests_info=[request_object4.request_info], model=stub_stateful_model, source_id=request_object4.source_id, request_objects=[request_object4]), dm.BatchObject(uid=next(batch_uid_generator), requests_info=[request_object7.request_info], model=blur_model, request_objects=[request_object7])])
build_batch_generator = string_generator()
result_value = build_batches([request_object1, request_object2, request_object3], uid_generator=build_batch_generator)
result_value = build_batches([request_object4, request_object5, request_object6, request_object7], existing_batches=result_value, uid_generator=build_batch_generator)
assert (result_value == expected_value) |
class Migration(migrations.Migration):
dependencies = [('wagtailcore', '0030_index_on_pagerevision_created_at'), ('home', '0015_auto__0102')]
operations = [migrations.CreateModel(name='FormField', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('sort_order', models.IntegerField(blank=True, editable=False, null=True)), ('label', models.CharField(help_text='The label of the form field', max_length=255, verbose_name='label')), ('field_type', models.CharField(choices=[('singleline', 'Single line text'), ('multiline', 'Multi-line text'), ('email', 'Email'), ('number', 'Number'), ('url', 'URL'), ('checkbox', 'Checkbox'), ('checkboxes', 'Checkboxes'), ('dropdown', 'Drop down'), ('radio', 'Radio buttons'), ('date', 'Date'), ('datetime', 'Date/time')], max_length=16, verbose_name='field type')), ('required', models.BooleanField(default=True, verbose_name='required')), ('choices', models.TextField(blank=True, help_text='Comma separated list of choices. Only applicable in checkboxes, radio and dropdown.', verbose_name='choices')), ('default_value', models.CharField(blank=True, help_text='Default value. Comma separated values supported for checkboxes.', max_length=255, verbose_name='default value')), ('help_text', models.CharField(blank=True, max_length=255, verbose_name='help text'))], options={'abstract': False, 'ordering': ['sort_order']}), migrations.CreateModel(name='FormPage', fields=[('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')), ('to_address', models.CharField(blank=True, help_text='Optional - form submissions will be emailed to these addresses. Separate multiple addresses by comma.', max_length=255, verbose_name='to address')), ('from_address', models.CharField(blank=True, max_length=255, verbose_name='from address')), ('subject', models.CharField(blank=True, max_length=255, verbose_name='subject')), ('intro', wagtail.core.fields.RichTextField(blank=True)), ('thank_you_text', wagtail.core.fields.RichTextField(blank=True))], options={'abstract': False}, bases=('wagtailcore.page',)), migrations.AddField(model_name='formfield', name='page', field=modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='form_fields', to='home.FormPage'))] |
def test_container_error():
obj = errors.ContainerError('code', 'my message', errors.ContainerError.Kind.RECOVERABLE, execution.ExecutionError.ErrorKind.SYSTEM)
assert (obj.code == 'code')
assert (obj.message == 'my message')
assert (obj.kind == errors.ContainerError.Kind.RECOVERABLE)
assert (obj.origin == 2)
obj2 = errors.ContainerError.from_flyte_idl(obj.to_flyte_idl())
assert (obj == obj2)
assert (obj2.code == 'code')
assert (obj2.message == 'my message')
assert (obj2.kind == errors.ContainerError.Kind.RECOVERABLE)
assert (obj2.origin == 2) |
('cuda.grouped_fmha_style_b2b_bmm.func_call')
def grouped_fmha_style_b2b_bmm_gen_function_call(func_attrs, indent=' '):
assert (len(func_attrs['outputs']) == 1)
assert (len(func_attrs['inputs']) in (3, 4))
output_name = func_attrs['outputs'][0]._attrs['name']
q_name = func_attrs['inputs'][0]._attrs['name']
k_name = func_attrs['inputs'][1]._attrs['name']
v_name = func_attrs['inputs'][2]._attrs['name']
bias_name = 'nullptr'
if (len(func_attrs['inputs']) == 4):
bias_name = func_attrs['inputs'][3]._attrs['name']
q_shape = func_attrs['inputs'][0]._attrs['shape']
jagged_intvar = q_shape[0]
batch_size_str = jagged_intvar.batch_dim()._attrs['name']
if (len(jagged_intvar.jagged_dims()) != 1):
raise RuntimeError(f'Only support 1 jagged dim in grouped_fmha_style_b2b_bmm for now! Current jagged intvar: {jagged_intvar}')
max_seq_length_dim = jagged_intvar.jagged_dims()[0].max_value()
max_seq_length_str = (str(max_seq_length_dim.value()) if isinstance(max_seq_length_dim, IntImm) else max_seq_length_dim._attrs['name'])
num_heads_str = q_shape[1]._attrs['name']
offset = f'{jagged_intvar.offsets_var_name()}.data[0]'
return FUNC_CALL_TEMPLATE.render(func_name=func_attrs['name'], output=output_name, query=q_name, key=k_name, value=v_name, bias=bias_name, accum_ptr='global_workspace_', batch_size=batch_size_str, seq_length=max_seq_length_str, seq_length_kv=max_seq_length_str, num_heads=num_heads_str, offset=offset, indent=indent) |
def _split(ops: Ops, Xp: Padded) -> Tuple[(Padded, Padded)]:
half = (Xp.data.shape[(- 1)] // 2)
X_l2r = Xp.data[cast(Tuple[(slice, slice)], (..., slice(None, half)))]
X_r2l = Xp.data[cast(Tuple[(slice, slice)], (..., slice(half)))]
return (Padded(X_l2r, Xp.size_at_t, Xp.lengths, Xp.indices), Padded(X_r2l, Xp.size_at_t, Xp.lengths, Xp.indices)) |
class OptionSeriesPyramidAccessibility(Options):
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def descriptionFormat(self):
return self._config_get(None)
def descriptionFormat(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def exposeAsGroupOnly(self):
return self._config_get(None)
def exposeAsGroupOnly(self, flag: bool):
self._config(flag, js_type=False)
def keyboardNavigation(self) -> 'OptionSeriesPyramidAccessibilityKeyboardnavigation':
return self._config_sub_data('keyboardNavigation', OptionSeriesPyramidAccessibilityKeyboardnavigation)
def point(self) -> 'OptionSeriesPyramidAccessibilityPoint':
return self._config_sub_data('point', OptionSeriesPyramidAccessibilityPoint) |
def get_security_info(esp, args):
si = esp.get_security_info()
print()
title = 'Security Information:'
print(title)
print(('=' * len(title)))
print('Flags: {:#010x} ({})'.format(si['flags'], bin(si['flags'])))
print('Key Purposes: {}'.format(si['key_purposes']))
if ((si['chip_id'] is not None) and (si['api_version'] is not None)):
print('Chip ID: {}'.format(si['chip_id']))
print('API Version: {}'.format(si['api_version']))
flags = si['flags']
if get_security_flag_status('SECURE_BOOT_EN', flags):
print('Secure Boot: Enabled')
if get_security_flag_status('SECURE_BOOT_AGGRESSIVE_REVOKE', flags):
print('Secure Boot Aggressive key revocation: Enabled')
revoked_keys = []
for (i, key) in enumerate(['SECURE_BOOT_KEY_REVOKE0', 'SECURE_BOOT_KEY_REVOKE1', 'SECURE_BOOT_KEY_REVOKE2']):
if get_security_flag_status(key, flags):
revoked_keys.append(i)
if (len(revoked_keys) > 0):
print('Secure Boot Key Revocation Status:\n')
for i in revoked_keys:
print(f''' Secure Boot Key{i} is Revoked
''')
else:
print('Secure Boot: Disabled')
flash_crypt_cnt = bin(si['flash_crypt_cnt'])
if ((flash_crypt_cnt.count('1') % 2) != 0):
print('Flash Encryption: Enabled')
else:
print('Flash Encryption: Disabled')
CRYPT_CNT_STRING = 'SPI Boot Crypt Count (SPI_BOOT_CRYPT_CNT)'
if (esp.CHIP_NAME == 'esp32'):
CRYPT_CNT_STRING = 'Flash Crypt Count (FLASH_CRYPT_CNT)'
print(f"{CRYPT_CNT_STRING}: {si['flash_crypt_cnt']:#x}")
if get_security_flag_status('DIS_DOWNLOAD_DCACHE', flags):
print('Dcache in UART download mode: Disabled')
if get_security_flag_status('DIS_DOWNLOAD_ICACHE', flags):
print('Icache in UART download mode: Disabled')
hard_dis_jtag = get_security_flag_status('HARD_DIS_JTAG', flags)
soft_dis_jtag = get_security_flag_status('SOFT_DIS_JTAG', flags)
if hard_dis_jtag:
print('JTAG: Permenantly Disabled')
elif soft_dis_jtag:
print('JTAG: Software Access Disabled')
if get_security_flag_status('DIS_USB', flags):
print('USB Access: Disabled') |
class VersionTreeModel(QtGui.QStandardItemModel):
def __init__(self, flat_view=False, *args, **kwargs):
QtGui.QStandardItemModel.__init__(self, *args, **kwargs)
logger.debug('VersionTreeModel.__init__() is started')
self.root = None
self.root_versions = []
self.reference_resolution = None
self.flat_view = flat_view
logger.debug('VersionTreeModel.__init__() is finished')
def populateTree(self, versions):
logger.debug('VersionTreeModel.populateTree() is started')
self.setColumnCount(7)
self.setHorizontalHeaderLabels(['Do Update?', 'Thumbnail', 'Task', 'Take', 'Current', 'Latest', 'Action', 'Updated By', 'Notes'])
self.root_versions = versions
for version in versions:
self.appendRow(VersionItem.generate_version_row(None, self, version))
logger.debug('VersionTreeModel.populateTree() is finished')
def canFetchMore(self, index):
logger.debug(('VersionTreeModel.canFetchMore() is started for index: %s' % index))
if (not index.isValid()):
return_value = False
else:
item = self.itemFromIndex(index)
return_value = item.canFetchMore()
logger.debug(('VersionTreeModel.canFetchMore() is finished for index: %s' % index))
return return_value
def fetchMore(self, index):
logger.debug(('VersionTreeModel.canFetchMore() is started for index: %s' % index))
if index.isValid():
item = self.itemFromIndex(index)
item.fetchMore()
logger.debug(('VersionTreeModel.canFetchMore() is finished for index: %s' % index))
def hasChildren(self, index):
logger.debug(('VersionTreeModel.hasChildren() is started for index: %s' % index))
if (not index.isValid()):
return_value = (len(self.root_versions) > 0)
elif self.flat_view:
return False
else:
item = self.itemFromIndex(index)
return_value = False
if item:
return_value = item.hasChildren()
logger.debug(('VersionTreeModel.hasChildren() is finished for index: %s' % index))
return return_value |
_oriented
class SPHSolver():
method_WCSPH = 0
method_PCISPH = 1
method_DFSPH = 2
methods = {'WCSPH': method_WCSPH, 'PCISPH': method_PCISPH, 'DFSPH': method_DFSPH}
material_fluid = 1
material_bound = 0
materials = {'fluid': material_fluid, 'bound': material_bound}
def __init__(self, res, screen_to_world_ratio, bound, alpha=0.5, dx=0.2, max_num_particles=(2 ** 20), padding=12, max_time=5.0, max_steps=50000, dynamic_allocate=False, adaptive_time_step=True, method=0):
self.method = method
self.adaptive_time_step = adaptive_time_step
self.dim = len(res)
self.res = res
self.screen_to_world_ratio = screen_to_world_ratio
self.dynamic_allocate = dynamic_allocate
self.padding = (2 * dx)
self.max_time = max_time
self.max_steps = max_steps
self.max_num_particles = max_num_particles
self.g = (- 9.8)
self.alpha = alpha
self.rho_0 = 1000.0
self.CFL_v = 0.25
self.CFL_a = 0.05
self.df_fac = 1.3
self.dx = dx
self.dh = (self.dx * self.df_fac)
self.dt = ti.field(ti.f32, shape=())
self.m = ((self.dx ** self.dim) * self.rho_0)
self.grid_size = (2 * self.dh)
self.grid_pos = np.ceil(((np.array(res) / self.screen_to_world_ratio) / self.grid_size)).astype(int)
self.top_bound = bound[0]
self.bottom_bound = bound[1]
self.left_bound = bound[2]
self.right_bound = bound[3]
self.gamma = 7.0
self.c_0 = 200.0
self.s_f = ti.field(ti.f32, shape=())
self.it = 0
self.max_it = 0
self.sub_max_iteration = 3
self.rho_err = ti.Vector.field(1, dtype=ti.f32, shape=())
self.max_rho_err = ti.Vector.field(1, dtype=ti.f32, shape=())
self.sum_rho_err = ti.field(ti.f32, shape=())
self.sum_drho = ti.field(ti.f32, shape=())
self.source_bound = ti.Vector.field(self.dim, dtype=ti.f32, shape=2)
self.source_velocity = ti.Vector.field(self.dim, dtype=ti.f32, shape=())
self.source_pressure = ti.Vector.field(1, dtype=ti.f32, shape=())
self.source_density = ti.Vector.field(1, dtype=ti.f32, shape=())
self.particle_num = ti.field(ti.i32, shape=())
self.particle_positions = ti.Vector.field(self.dim, dtype=ti.f32)
self.particle_velocity = ti.Vector.field(self.dim, dtype=ti.f32)
self.particle_positions_new = ti.Vector.field(self.dim, dtype=ti.f32)
self.particle_velocity_new = ti.Vector.field(self.dim, dtype=ti.f32)
self.particle_pressure = ti.Vector.field(1, dtype=ti.f32)
self.particle_pressure_acc = ti.Vector.field(self.dim, dtype=ti.f32)
self.particle_density = ti.Vector.field(1, dtype=ti.f32)
self.particle_density_new = ti.Vector.field(1, dtype=ti.f32)
self.particle_alpha = ti.Vector.field(1, dtype=ti.f32)
self.particle_stiff = ti.Vector.field(1, dtype=ti.f32)
self.color = ti.field(dtype=ti.f32)
self.material = ti.field(dtype=ti.f32)
self.d_velocity = ti.Vector.field(self.dim, dtype=ti.f32)
self.d_density = ti.Vector.field(1, dtype=ti.f32)
self.grid_num_particles = ti.field(ti.i32)
self.grid2particles = ti.field(ti.i32)
self.particle_num_neighbors = ti.field(ti.i32)
self.particle_neighbors = ti.field(ti.i32)
self.max_num_particles_per_cell = 100
self.max_num_neighbors = 100
self.max_v = 0.0
self.max_a = 0.0
self.max_rho = 0.0
self.max_pressure = 0.0
if dynamic_allocate:
ti.root.dynamic(ti.i, max_num_particles, (2 ** 18)).place(self.particle_positions, self.particle_velocity, self.particle_pressure, self.particle_density, self.particle_density_new, self.d_velocity, self.d_density, self.material, self.color, self.particle_positions_new, self.particle_velocity_new, self.particle_pressure_acc, self.particle_alpha, self.particle_stiff)
else:
ti.root.dense(ti.i, (2 ** 18)).place(self.particle_positions, self.particle_velocity, self.particle_pressure, self.particle_density, self.particle_density_new, self.d_velocity, self.d_density, self.material, self.color, self.particle_positions_new, self.particle_velocity_new, self.particle_pressure_acc, self.particle_alpha, self.particle_stiff)
if (self.dim == 2):
grid_snode = ti.root.dense(ti.ij, self.grid_pos)
grid_snode.place(self.grid_num_particles)
grid_snode.dense(ti.k, self.max_num_particles_per_cell).place(self.grid2particles)
else:
grid_snode = ti.root.dense(ti.ijk, self.grid_pos)
grid_snode.place(self.grid_num_particles)
grid_snode.dense(ti.l, self.max_num_particles_per_cell).place(self.grid2particles)
nb_node = ti.root.dynamic(ti.i, max_num_particles)
nb_node.place(self.particle_num_neighbors)
nb_node.dense(ti.j, self.max_num_neighbors).place(self.particle_neighbors)
if (method == SPHSolver.method_WCSPH):
self.dt.from_numpy(np.array(((0.1 * self.dh) / self.c_0), dtype=np.float32))
self.CFL_v = 0.2
self.CFL_a = 0.2
if (method == SPHSolver.method_PCISPH):
self.s_f.from_numpy(np.array(1.0, dtype=np.float32))
if self.adaptive_time_step:
self.dt.from_numpy(np.array(0.0015, dtype=np.float32))
else:
self.dt.from_numpy(np.array(0.00015, dtype=np.float32))
if (method == SPHSolver.method_DFSPH):
self.dt.from_numpy(np.array(((1.0 * self.dh) / self.c_0), dtype=np.float32))
self.CFL_v = 0.3
self.CFL_a = 0.05
def compute_grid_index(self, pos):
return (pos / (2 * self.dh)).cast(int)
def allocate_particles(self):
for p_i in range(self.particle_num[None]):
cell = self.compute_grid_index(self.particle_positions[p_i])
offs = self.grid_num_particles[cell].atomic_add(1)
self.grid2particles[(cell, offs)] = p_i
def is_in_grid(self, c):
res = 1
for i in ti.static(range(self.dim)):
res = ti.atomic_and(res, (0 <= c[i] < self.grid_pos[i]))
return res
def is_fluid(self, p):
return self.material[p]
def search_neighbors(self):
for p_i in range(self.particle_num[None]):
pos_i = self.particle_positions[p_i]
nb_i = 0
if ((self.is_fluid(p_i) == 1) or (self.is_fluid(p_i) == 0)):
cell = self.compute_grid_index(self.particle_positions[p_i])
for offs in ti.static(ti.grouped(ti.ndrange(*((((- 1), 2),) * self.dim)))):
cell_to_check = (cell + offs)
if (self.is_in_grid(cell_to_check) == 1):
for j in range(self.grid_num_particles[cell_to_check]):
p_j = self.grid2particles[(cell_to_check, j)]
if ((nb_i < self.max_num_neighbors) and (p_j != p_i) and ((pos_i - self.particle_positions[p_j]).norm() < (self.dh * 2.0))):
self.particle_neighbors[(p_i, nb_i)] = p_j
nb_i.atomic_add(1)
self.particle_num_neighbors[p_i] = nb_i
def cubic_kernel(self, r, h):
k = (10.0 / ((7.0 * np.pi) * (h ** self.dim)))
q = (r / h)
res = ti.cast(0.0, ti.f32)
if (q <= 1.0):
res = (k * ((1 - (1.5 * (q ** 2))) + (0.75 * (q ** 3))))
elif (q < 2.0):
res = ((k * 0.25) * ((2 - q) ** 3))
return res
def cubic_kernel_derivative(self, r, h):
k = (10.0 / ((7.0 * np.pi) * (h ** self.dim)))
q = (r / h)
res = ti.cast(0.0, ti.f32)
if (q < 1.0):
res = ((k / h) * (((- 3) * q) + (2.25 * (q ** 2))))
elif (q < 2.0):
res = (((- 0.75) * (k / h)) * ((2 - q) ** 2))
return res
def rho_derivative(self, ptc_i, ptc_j, r, r_mod):
return ((self.m * self.cubic_kernel_derivative(r_mod, self.dh)) * (self.particle_velocity[ptc_i] - self.particle_velocity[ptc_j]).dot((r / r_mod)))
def p_update(self, rho, rho_0=1000.0, gamma=7.0, c_0=20.0):
b = ((rho_0 * (c_0 ** 2)) / gamma)
return (b * (((rho / rho_0) ** gamma) - 1.0))
def pressure_force(self, ptc_i, ptc_j, r, r_mod):
res = ti.Vector([0.0 for _ in range(self.dim)])
res = (((((- self.m) * ((self.particle_pressure[ptc_i][0] / (self.particle_density[ptc_i][0] ** 2)) + (self.particle_pressure[ptc_j][0] / (self.particle_density[ptc_j][0] ** 2)))) * self.cubic_kernel_derivative(r_mod, self.dh)) * r) / r_mod)
return res
def viscosity_force(self, ptc_i, ptc_j, r, r_mod):
res = ti.Vector([0.0 for _ in range(self.dim)])
v_xy = (self.particle_velocity[ptc_i] - self.particle_velocity[ptc_j]).dot(r)
if (v_xy < 0):
vmu = (((((- 2.0) * self.alpha) * self.dx) * self.c_0) / (self.particle_density[ptc_i][0] + self.particle_density[ptc_j][0]))
res = (((((((- self.m) * vmu) * v_xy) / ((r_mod ** 2) + (0.01 * (self.dx ** 2)))) * self.cubic_kernel_derivative(r_mod, self.dh)) * r) / r_mod)
return res
def simulate_collisions(self, ptc_i, vec, d):
c_f = 0.3
self.particle_positions[ptc_i] += (vec * d)
self.particle_velocity[ptc_i] -= (((1.0 + c_f) * self.particle_velocity[ptc_i].dot(vec)) * vec)
if (self.method == SPHSolver.method_DFSPH):
self.particle_velocity_new[ptc_i] -= (((1.0 + c_f) * self.particle_velocity_new[ptc_i].dot(vec)) * vec)
def enforce_boundary(self):
for p_i in range(self.particle_num[None]):
if (self.is_fluid(p_i) == 1):
pos = self.particle_positions[p_i]
if (pos[0] < (self.left_bound + (0.5 * self.padding))):
self.simulate_collisions(p_i, ti.Vector([1.0, 0.0]), ((self.left_bound + (0.5 * self.padding)) - pos[0]))
if (pos[0] > (self.right_bound - (0.5 * self.padding))):
self.simulate_collisions(p_i, ti.Vector([(- 1.0), 0.0]), ((pos[0] - self.right_bound) + (0.5 * self.padding)))
if (pos[1] > (self.top_bound - self.padding)):
self.simulate_collisions(p_i, ti.Vector([0.0, (- 1.0)]), ((pos[1] - self.top_bound) + self.padding))
if (pos[1] < (self.bottom_bound + self.padding)):
self.simulate_collisions(p_i, ti.Vector([0.0, 1.0]), ((self.bottom_bound + self.padding) - pos[1]))
def wc_compute_deltas(self):
for p_i in range(self.particle_num[None]):
pos_i = self.particle_positions[p_i]
d_v = ti.Vector([0.0 for _ in range(self.dim)])
d_rho = 0.0
for j in range(self.particle_num_neighbors[p_i]):
p_j = self.particle_neighbors[(p_i, j)]
pos_j = self.particle_positions[p_j]
r = (pos_i - pos_j)
r_mod = ti.max(r.norm(), 1e-05)
d_rho += self.rho_derivative(p_i, p_j, r, r_mod)
if (self.is_fluid(p_i) == 1):
d_v += self.viscosity_force(p_i, p_j, r, r_mod)
d_v += self.pressure_force(p_i, p_j, r, r_mod)
if (self.is_fluid(p_i) == 1):
val = [0.0 for _ in range((self.dim - 1))]
val.extend([self.g])
d_v += ti.Vector(val)
self.d_velocity[p_i] = d_v
self.d_density[p_i][0] = d_rho
def wc_update_time_step(self):
for p_i in range(self.particle_num[None]):
if (self.is_fluid(p_i) == 1):
self.particle_velocity[p_i] += (self.dt[None] * self.d_velocity[p_i])
self.particle_positions[p_i] += (self.dt[None] * self.particle_velocity[p_i])
self.particle_density[p_i][0] += (self.dt[None] * self.d_density[p_i][0])
self.particle_pressure[p_i][0] = self.p_update(self.particle_density[p_i][0], self.rho_0, self.gamma, self.c_0)
def pci_scaling_factor(self):
grad_sum = ti.Vector([0.0 for _ in range(self.dim)])
grad_dot_sum = 0.0
range_num = ti.cast(((self.dh * 2.0) / self.dx), ti.i32)
half_range = ti.cast((0.5 * range_num), ti.i32)
for x in range((- half_range), half_range):
for y in range((- half_range), half_range):
r = ti.Vector([((- x) * self.dx), ((- y) * self.dx)])
r_mod = r.norm()
if ((2.0 * self.dh) > r_mod > 1e-05):
grad = ((self.cubic_kernel_derivative(r_mod, self.dh) * r) / r_mod)
grad_sum += grad
grad_dot_sum += grad.dot(grad)
beta = (2 * (((self.dt[None] * self.m) / self.rho_0) ** 2))
self.s_f[None] = (1.0 / ti.max((beta * (grad_sum.dot(grad_sum) + grad_dot_sum)), 1e-06))
def pci_pos_vel_prediction(self):
for p_i in range(self.particle_num[None]):
if (self.is_fluid(p_i) == 1):
self.particle_velocity_new[p_i] = (self.particle_velocity[p_i] + (self.dt[None] * (self.d_velocity[p_i] + self.particle_pressure_acc[p_i])))
self.particle_positions_new[p_i] = (self.particle_positions[p_i] + (self.dt[None] * self.particle_velocity_new[p_i]))
self.max_rho_err[None][0] = 0.0
def pci_update_pressure(self):
for p_i in range(self.particle_num[None]):
pos_i = self.particle_positions_new[p_i]
d_rho = 0.0
curr_rho = 0.0
for j in range(self.particle_num_neighbors[p_i]):
p_j = self.particle_neighbors[(p_i, j)]
pos_j = self.particle_positions_new[p_j]
r = (pos_i - pos_j)
r_mod = r.norm()
if (r_mod > 1e-05):
d_rho += (self.cubic_kernel_derivative(r_mod, self.dh) * (self.particle_velocity_new[p_i] - self.particle_velocity_new[p_j]).dot((r / r_mod)))
self.d_density[p_i][0] = d_rho
self.rho_err[None][0] = ((self.particle_density[p_i][0] + (self.dt[None] * d_rho)) - self.rho_0)
self.max_rho_err[None][0] = max(abs(self.rho_err[None][0]), self.max_rho_err[None][0])
self.particle_pressure[p_i][0] += (self.s_f[None] * self.rho_err[None][0])
def pci_update_pressure_force(self):
for p_i in range(self.particle_num[None]):
pos_i = self.particle_positions_new[p_i]
d_vp = ti.Vector([0.0 for _ in range(self.dim)])
for j in range(self.particle_num_neighbors[p_i]):
p_j = self.particle_neighbors[(p_i, j)]
pos_j = self.particle_positions_new[p_j]
r = (pos_i - pos_j)
r_mod = r.norm()
if (r_mod > 1e-05):
d_vp += self.pressure_force(p_i, p_j, r, r_mod)
self.particle_pressure_acc[p_i] = d_vp
def pci_pc_iteration(self):
self.pci_pos_vel_prediction()
self.pci_update_pressure()
self.pci_update_pressure_force()
def pci_compute_deltas(self):
for p_i in range(self.particle_num[None]):
pos_i = self.particle_positions[p_i]
d_v = ti.Vector([0.0 for _ in range(self.dim)])
for j in range(self.particle_num_neighbors[p_i]):
p_j = self.particle_neighbors[(p_i, j)]
pos_j = self.particle_positions[p_j]
r = (pos_i - pos_j)
r_mod = r.norm()
if ((r_mod > 1e-05) and (self.is_fluid(p_i) == 1)):
d_v += self.viscosity_force(p_i, p_j, r, r_mod)
if (self.is_fluid(p_i) == 1):
val = [0.0 for _ in range((self.dim - 1))]
val.extend([self.g])
d_v += ti.Vector(val)
self.d_velocity[p_i] = d_v
self.particle_pressure[p_i][0] = 0.0
self.particle_pressure_acc[p_i] = ti.Vector([0.0 for _ in range(self.dim)])
def pci_update_time_step(self):
for p_i in range(self.particle_num[None]):
if (self.is_fluid(p_i) == 1):
self.particle_velocity[p_i] += (self.dt[None] * (self.d_velocity[p_i] + self.particle_pressure_acc[p_i]))
self.particle_positions[p_i] += (self.dt[None] * self.particle_velocity[p_i])
self.particle_density[p_i][0] += (self.dt[None] * self.d_density[p_i][0])
def df_compute_deltas(self):
for p_i in range(self.particle_num[None]):
pos_i = self.particle_positions[p_i]
d_v = ti.Vector([0.0 for _ in range(self.dim)])
for j in range(self.particle_num_neighbors[p_i]):
p_j = self.particle_neighbors[(p_i, j)]
pos_j = self.particle_positions[p_j]
r = (pos_i - pos_j)
r_mod = r.norm()
if ((r_mod > 0.0001) and (self.is_fluid(p_i) == 1)):
d_v += self.viscosity_force(p_i, p_j, r, r_mod)
if (self.is_fluid(p_i) == 1):
val = [0.0 for _ in range((self.dim - 1))]
val.extend([self.g])
d_v += ti.Vector(val)
self.d_velocity[p_i] = d_v
def df_predict_velocities(self):
for p_i in range(self.particle_num[None]):
if (self.is_fluid(p_i) == 1):
self.particle_velocity_new[p_i] = (self.particle_velocity[p_i] + (self.dt[None] * self.d_velocity[p_i]))
def df_correct_density_predict(self):
for p_i in range(self.particle_num[None]):
pos_i = self.particle_positions[p_i]
d_rho = 0.0
for j in range(self.particle_num_neighbors[p_i]):
p_j = self.particle_neighbors[(p_i, j)]
pos_j = self.particle_positions[p_j]
r = (pos_i - pos_j)
r_mod = ti.max(r.norm(), 1e-05)
if (self.is_fluid(p_j) == 1):
d_rho += ((self.m * self.cubic_kernel_derivative(r_mod, self.dh)) * (self.particle_velocity_new[p_i] - self.particle_velocity_new[p_j]).dot((r / r_mod)))
elif (self.is_fluid(p_j) == 0):
d_rho += ((self.m * self.cubic_kernel_derivative(r_mod, self.dh)) * self.particle_velocity_new[p_i].dot((r / r_mod)))
self.particle_density_new[p_i][0] = (self.particle_density[p_i][0] + (self.dt[None] * d_rho))
err = ti.max(0.0, (self.particle_density_new[p_i][0] - self.rho_0))
self.particle_stiff[p_i][0] = (err * self.particle_alpha[p_i][0])
self.sum_rho_err[None] += err
def df_correct_density_adapt_vel(self):
for p_i in range(self.particle_num[None]):
pos_i = self.particle_positions[p_i]
d_v = ti.Vector([0.0 for _ in range(self.dim)])
for j in range(self.particle_num_neighbors[p_i]):
p_j = self.particle_neighbors[(p_i, j)]
pos_j = self.particle_positions[p_j]
r = (pos_i - pos_j)
r_mod = r.norm()
if (r_mod > 0.0001):
if (self.is_fluid(p_j) == 1):
d_v += ((((self.m * (self.particle_stiff[p_i][0] + self.particle_stiff[p_j][0])) * self.cubic_kernel_derivative(r_mod, self.dh)) * r) / r_mod)
elif (self.is_fluid(p_j) == 0):
d_v += ((((self.m * self.particle_stiff[p_i][0]) * self.cubic_kernel_derivative(r_mod, self.dh)) * r) / r_mod)
self.particle_velocity_new[p_i] += (d_v / ti.max(self.dt[None], 1e-05))
self.particle_pressure_acc[p_i] = (d_v / ti.max((self.dt[None] * self.dt[None]), 1e-08))
def df_update_positions(self):
for p_i in range(self.particle_num[None]):
if (self.is_fluid(p_i) == 1):
self.particle_positions[p_i] += (self.dt[None] * self.particle_velocity_new[p_i])
def df_compute_density_alpha(self):
for p_i in range(self.particle_num[None]):
pos_i = self.particle_positions[p_i]
grad_sum = ti.Vector([0.0 for _ in range(self.dim)])
grad_square_sum = 0.0
curr_rho = 0.0
for j in range(self.particle_num_neighbors[p_i]):
p_j = self.particle_neighbors[(p_i, j)]
pos_j = self.particle_positions[p_j]
r = (pos_i - pos_j)
r_mod = r.norm()
if (r_mod > 0.0001):
grad_val = (((self.m * self.cubic_kernel_derivative(r_mod, self.dh)) * r) / r_mod)
grad_sum += grad_val
if (self.is_fluid(p_j) == 1):
grad_square_sum += grad_val.dot(grad_val)
curr_rho += (self.m * self.cubic_kernel(r_mod, self.dh))
self.particle_density[p_i][0] = curr_rho
self.particle_alpha[p_i][0] = ((- 1.0) / ti.max((grad_sum.dot(grad_sum) + grad_square_sum), 1e-06))
def df_correct_divergence_compute_drho(self):
for p_i in range(self.particle_num[None]):
pos_i = self.particle_positions[p_i]
d_rho = 0.0
for j in range(self.particle_num_neighbors[p_i]):
p_j = self.particle_neighbors[(p_i, j)]
pos_j = self.particle_positions[p_j]
r = (pos_i - pos_j)
r_mod = r.norm()
if (r_mod > 0.0001):
if (self.is_fluid(p_j) == 1):
d_rho += ((self.m * (self.particle_velocity_new[p_i] - self.particle_velocity_new[p_j]).dot((r / r_mod))) * self.cubic_kernel_derivative(r_mod, self.dh))
elif (self.is_fluid(p_j) == 0):
d_rho += ((self.m * self.particle_velocity_new[p_i].dot((r / r_mod))) * self.cubic_kernel_derivative(r_mod, self.dh))
self.d_density[p_i][0] = ti.max(d_rho, 0.0)
if (((self.particle_density[p_i][0] + (self.dt[None] * self.d_density[p_i][0])) < self.rho_0) and (self.particle_density[p_i][0] < self.rho_0)):
self.d_density[p_i][0] = 0.0
self.particle_stiff[p_i][0] = (self.d_density[p_i][0] * self.particle_alpha[p_i][0])
self.sum_drho[None] += self.d_density[p_i][0]
def df_correct_divergence_adapt_vel(self):
for p_i in range(self.particle_num[None]):
pos_i = self.particle_positions[p_i]
d_v = ti.Vector([0.0 for _ in range(self.dim)])
for j in range(self.particle_num_neighbors[p_i]):
p_j = self.particle_neighbors[(p_i, j)]
pos_j = self.particle_positions[p_j]
r = (pos_i - pos_j)
r_mod = r.norm()
if (r_mod > 1e-05):
if (self.is_fluid(p_j) == 1):
d_v += ((((self.m * (self.particle_stiff[p_i][0] + self.particle_stiff[p_j][0])) * self.cubic_kernel_derivative(r_mod, self.dh)) * r) / r_mod)
elif (self.is_fluid(p_j) == 0):
d_v += ((((self.m * self.particle_stiff[p_i][0]) * self.cubic_kernel_derivative(r_mod, self.dh)) * r) / r_mod)
self.particle_velocity_new[p_i] += d_v
self.particle_pressure_acc[p_i] = (d_v / self.dt[None])
def df_update_velocities(self):
for p_i in range(self.particle_num[None]):
if (self.is_fluid(p_i) == 1):
self.particle_velocity[p_i] = self.particle_velocity_new[p_i]
def sim_info(self, output=False):
print('Time step: ', self.dt[None])
print(('Domain: (%s, %s, %s, %s)' % (self.x_min, self.x_max, self.y_min, self.y_max)))
print(('Fluid area: (%s, %s, %s, %s)' % (self.left_bound, self.right_bound, self.bottom_bound, self.top_bound)))
print('Grid: ', self.grid_pos)
def sim_info_realtime(self, frame, t, curr_start, curr_end, total_start):
print(('Step: %d, physics time: %s, progress: %s %%, time used: %s, total time used: %s' % (frame, t, (100 * np.max([(t / self.max_time), (frame / self.max_steps)])), (curr_end - curr_start), (curr_end - total_start))))
print(('Max velocity: %s, Max acceleration: %s, Max density: %s, Max pressure: %s' % (self.max_v, self.max_a, self.max_rho, self.max_pressure)))
if (self.method == SPHSolver.methods['PCISPH']):
print(('Max iter: %d, Max density variation: %s' % (self.max_it, self.max_rho_err[None][0])))
if (self.method == SPHSolver.methods['DFSPH']):
print(('Max iter: %d, Max density variation: %s' % (self.it, (self.sum_rho_err[None] / self.particle_num[None]))))
print(('Max iter: %d, Max divergence variation: %s' % (self.it, (self.sum_drho[None] / self.particle_num[None]))))
print('Adaptive time step: ', self.dt[None])
def adaptive_step(self):
total_num = self.particle_num[None]
self.max_v = np.max(np.linalg.norm(self.particle_velocity.to_numpy()[:total_num], 2, axis=1))
dt_cfl = ((self.CFL_v * self.dh) / self.max_v)
self.max_a = np.max(np.linalg.norm((self.d_velocity.to_numpy() + self.particle_pressure_acc.to_numpy())[:total_num], 2, axis=1))
dt_f = (self.CFL_a * np.sqrt((self.dh / self.max_a)))
if (self.adaptive_time_step and (self.method == SPHSolver.method_DFSPH)):
self.dt[None] = np.min([dt_cfl, dt_f])
return
self.max_rho = np.max(self.particle_density.to_numpy()[:total_num])
self.max_pressure = np.max(self.particle_pressure.to_numpy()[:total_num])
dt_a = ((0.2 * self.dh) / (self.c_0 * np.sqrt(((self.max_rho / self.rho_0) ** self.gamma))))
if (self.adaptive_time_step and (self.method == SPHSolver.method_WCSPH)):
self.dt[None] = np.min([dt_cfl, dt_f, dt_a])
if (self.adaptive_time_step and (self.method == SPHSolver.method_PCISPH)):
self.dt[None] = np.min([dt_cfl, dt_f])
def step(self, frame, t, total_start):
curr_start = time.process_time()
self.grid_num_particles.fill(0)
self.particle_neighbors.fill((- 1))
self.allocate_particles()
self.search_neighbors()
if (self.method == SPHSolver.methods['WCSPH']):
self.wc_compute_deltas()
self.wc_update_time_step()
elif (self.method == SPHSolver.methods['PCISPH']):
self.pci_compute_deltas()
self.pci_scaling_factor()
self.it = 0
self.max_it = 0
while ((self.max_rho_err[None][0] >= (0.01 * self.rho_0)) or (self.it < self.sub_max_iteration)):
self.pci_pc_iteration()
self.it += 1
self.max_it += 1
self.max_it = max(self.it, self.max_it)
if (self.it > 1000):
print(('Warning: PCISPH density does not converge, iterated %d steps' % self.it))
break
self.pci_update_time_step()
elif (self.method == SPHSolver.methods['DFSPH']):
self.df_compute_density_alpha()
self.it = 0
self.sum_drho[None] = 0.0
while ((self.sum_drho[None] >= ((0.01 * self.particle_num[None]) * self.rho_0)) or (self.it < 1)):
self.sum_drho[None] = 0.0
self.df_correct_divergence_compute_drho()
self.df_correct_divergence_adapt_vel()
self.it += 1
if (self.it > 1000):
print(('Warning: DFSPH divergence does not converge, iterated %d steps' % self.it))
break
self.df_update_velocities()
self.df_compute_deltas()
self.adaptive_step()
self.df_predict_velocities()
self.it = 0
self.sum_rho_err[None] = 0.0
while ((self.sum_rho_err[None] >= ((0.01 * self.particle_num[None]) * self.rho_0)) or (self.it < 2)):
self.sum_rho_err[None] = 0.0
self.df_correct_density_predict()
self.df_correct_density_adapt_vel()
self.it += 1
if (self.it > 1000):
print(('Warning: DFSPH density does not converge, iterated %d steps' % self.it))
break
self.df_update_positions()
self.enforce_boundary()
if (self.method != SPHSolver.methods['DFSPH']):
self.adaptive_step()
curr_end = time.process_time()
if ((frame % 10) == 0):
self.sim_info_realtime(frame, t, curr_start, curr_end, total_start)
return self.dt[None]
def fill_particle(self, i, x, material, color, velocity, pressure, density):
self.particle_positions[i] = x
self.particle_positions_new[i] = x
self.particle_velocity[i] = velocity
self.particle_velocity_new[i] = velocity
self.d_velocity[i] = ti.Vector([0.0 for _ in range(self.dim)])
self.particle_pressure[i] = pressure
self.particle_pressure_acc[i] = ti.Vector([0.0 for _ in range(self.dim)])
self.particle_density[i] = density
self.particle_density_new[i] = density
self.d_density[i][0] = 0.0
self.particle_alpha[i][0] = 0.0
self.particle_stiff[i][0] = 0.0
self.color[i] = color
self.material[i] = material
def fill(self, new_particles: ti.i32, new_positions: ti.types.ndarray(), new_material: ti.i32, color: ti.i32):
for i in range(self.particle_num[None], (self.particle_num[None] + new_particles)):
self.material[i] = new_material
x = ti.Vector.zero(ti.f32, self.dim)
for k in ti.static(range(self.dim)):
x[k] = new_positions[(k, (i - self.particle_num[None]))]
self.fill_particle(i, x, new_material, color, self.source_velocity[None], self.source_pressure[None], self.source_density[None])
def set_source_velocity(self, velocity):
if (velocity is not None):
velocity = list(velocity)
assert (len(velocity) == self.dim)
self.source_velocity[None] = velocity
else:
for i in range(self.dim):
self.source_velocity[None][i] = 0
def set_source_pressure(self, pressure):
if (pressure is not None):
self.source_pressure[None] = pressure
else:
self.source_pressure[None][0] = 0.0
def set_source_density(self, density):
if (density is not None):
self.source_density[None] = density
else:
self.source_density[None][0] = 0.0
def add_cube(self, lower_corner, cube_size, material, color=, density=None, pressure=None, velocity=None):
num_dim = []
for i in range(self.dim):
num_dim.append(np.arange(lower_corner[i], (lower_corner[i] + cube_size[i]), self.dx))
num_new_particles = reduce((lambda x, y: (x * y)), [len(n) for n in num_dim])
assert ((self.particle_num[None] + num_new_particles) <= self.max_num_particles)
new_positions = np.array(np.meshgrid(*num_dim, sparse=False, indexing='ij'), dtype=np.float32)
new_positions = new_positions.reshape((- 1), reduce((lambda x, y: (x * y)), list(new_positions.shape[1:])))
print(new_positions.shape)
for i in range(self.dim):
self.source_bound[0][i] = lower_corner[i]
self.source_bound[1][i] = cube_size[i]
self.set_source_velocity(velocity=velocity)
self.set_source_pressure(pressure=pressure)
self.set_source_density(density=density)
self.fill(num_new_particles, new_positions, material, color)
self.particle_num[None] += num_new_particles
def copy_dynamic_nd(self, np_x: ti.types.ndarray(), input_x: ti.template()):
for i in range(self.particle_num[None]):
for j in ti.static(range(self.dim)):
np_x[(i, j)] = input_x[i][j]
def copy_dynamic(self, np_x: ti.types.ndarray(), input_x: ti.template()):
for i in range(self.particle_num[None]):
np_x[i] = input_x[i]
def particle_info(self):
np_x = np.ndarray((self.particle_num[None], self.dim), dtype=np.float32)
self.copy_dynamic_nd(np_x, self.particle_positions)
np_v = np.ndarray((self.particle_num[None], self.dim), dtype=np.float32)
self.copy_dynamic_nd(np_v, self.particle_velocity)
np_material = np.ndarray((self.particle_num[None],), dtype=np.int32)
self.copy_dynamic(np_material, self.material)
np_color = np.ndarray((self.particle_num[None],), dtype=np.int32)
self.copy_dynamic(np_color, self.color)
return {'position': np_x, 'velocity': np_v, 'material': np_material, 'color': np_color} |
class VPNApplicationBuilder(Builder):
VPNs = {'express_vpn': {'macos': {'app': '/Applications/ExpressVPN.app'}, 'windows': {'app': windows_safe_path('C:\\Program Files (x86)\\ExpressVPN\\xvpn-ui\\ExpressVpn.exe'), 'tap': 'ExpressVPN Tap Adapter'}, 'linux': {'app': '/usr/bin/expressvpn'}}}
def name():
return 'vpn_application'
def build(self, device, config):
return GenericVPNBuilder(VPNApplicationBuilder.VPNs).build(device, config) |
class OptionPlotoptionsSankeyLabelStyle(Options):
def fontSize(self):
return self._config_get('0.8em')
def fontSize(self, num: float):
self._config(num, js_type=False)
def fontWeight(self):
return self._config_get('bold')
def fontWeight(self, text: str):
self._config(text, js_type=False) |
def extractTrackingFeedpressIt(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class ExampleAutoSchemaDuplicate2(generics.GenericAPIView):
serializer_class = ExampleSerializerModel
schema = AutoSchema(component_name='Duplicate')
def get(self, *args, **kwargs):
from datetime import datetime
now = datetime.now()
serializer = self.get_serializer(data=now.date(), datetime=now)
return Response(serializer.data) |
class TestMacOSPacketCaptureConnectWhenNoNetwork(LocalPacketCaptureTestCase):
def __init__(self, devices, config):
super().__init__(devices, config)
self.original_active_services = None
def test(self):
services = self.localhost['network_tool'].network_services_in_priority_order()
interfaces = [service.interface() for service in services if service.active()]
self.original_active_services = [service for service in services if service.active()]
primary_service = self.original_active_services[0]
L.describe('Disable all active services')
for service in self.original_active_services:
service.disable()
L.describe('Open and connect the VPN application')
self.localhost['vpn_application'].open_and_connect()
L.describe('Capture traffic')
self.localhost['packet_capturer'].start(interfaces)
L.describe('Wait for the VPN application to notice the interruption')
self.localhost['vpn_application'].wait_for_connection_interrupt_detection()
L.describe('Enable primary network service')
primary_service.enable()
L.info('Enabled {}'.format(primary_service))
L.describe('Reconnect the VPN')
self.localhost['vpn_application'].connect()
L.describe('Determine the VPN server IP')
vpn_server_ip = self.localhost['vpn_application'].vpn_server_ip()
L.info('VPN server IP is {}'.format(vpn_server_ip))
L.describe('Stop capturing traffic')
self.localhost['packet_capturer'].stop()
L.describe('Analyse packets to ensure we saw no traffic leaking')
packets = self.localhost['packet_capturer'].get_capture(interfaces)
unmatched = self.traffic_filter.filter_traffic(packets, local=True)[1]
unmatched = self.traffic_filter.filter_traffic(unmatched, link_local=True)[1]
unmatched = self.traffic_filter.filter_traffic(unmatched, multicast=True)[1]
unmatched = self.traffic_filter.filter_traffic(unmatched, dst_ip=True)[1]
self.assertEmpty(unmatched, json.dumps(unmatched, indent=2))
def teardown(self):
for service in self.original_active_services:
if (not service.enabled()):
service.enable()
super().teardown() |
def test_setup_argparser():
log_file_path = 'any/given/path'
args = setup_argparser('test', 'test description', command_line_options=['script_name', '--log_file', log_file_path, 'ANY_FILE'])
assert (args.debug is False)
assert (args.log_file == log_file_path)
assert (args.log_level is None) |
def setup_windows_console():
if ((not sys.platform.startswith('win')) or ('MSYSTEM' in os.environ) or ('CYGWIN' in os.environ)):
return
force_console = (('-v' in sys.argv) or ('--verbose' in sys.argv))
if ((not _create_or_attach_console(create=force_console, title='ElectrumSV Console')) and force_console):
MB_ICONERROR = 16
MB_OK = 0
ctypes.windll.user32.MessageBoxW(0, 'Failed to get a console', 'ElectronSV', (MB_OK | MB_ICONERROR))
sys.exit(1) |
class CyclopolisStation(BikeShareStation):
def __init__(self, name, latitude, longitude, bikes, free, extra):
super(CyclopolisStation, self).__init__()
self.name = name
self.latitude = latitude
self.longitude = longitude
self.bikes = bikes
self.free = free
self.extra = extra |
def get_class(path: str) -> type:
try:
cls = _locate(path)
if (not isinstance(cls, type)):
raise ValueError((f"Located non-class of type '{type(cls).__name__}'" + f" while loading '{path}'"))
return cls
except Exception as e:
log.error(f'Error getting class at {path}: {e}')
raise e |
def test_transformer_on_integer_variables():
df = pd.DataFrame({'var1': [0, 1, 0, 2, 3, 4, 5, 6, 8, 10], 'var2': [12, 11, 10, 15, 13, 12, 11, 10, 10, 20]})
dft = pd.DataFrame({'var1': {0: 0.0, 1: 0., 2: 0.0, 3: 1., 4: 1., 5: 2., 6: 2., 7: 2., 8: 3., 9: 3.}, 'var2': {0: 0., 1: 0., 2: 0., 3: 0., 4: 0., 5: 0., 6: 0., 7: 0., 8: 0., 9: 0.}})
X_tr = YeoJohnsonTransformer().fit_transform(df)
pd.testing.assert_frame_equal(X_tr, dft) |
def serialize(A):
if isinstance(A, str):
return (0, A)
if isinstance(A, numpy.ndarray):
dt = A.dtype
if ((not dt.isnative) or (dt.num < 1) or (dt.num >= len(dataType))):
return (DATATYPE_UNKNOWN, None)
ft = dataType[dt.num]
if (ft == (- 1)):
return (DATATYPE_UNKNOWN, None)
if A.flags['C_CONTIGUOUS']:
return (ft, A.tostring())
AC = A.copy('C')
return (ft, AC.tostring())
if isinstance(A, int):
return (DATATYPE_INT32, struct.pack('i', A))
if isinstance(A, float):
return (DATATYPE_FLOAT64, struct.pack('d', A))
return (DATATYPE_UNKNOWN, None) |
class DiscountCode(SoftDeletionModel):
__tablename__ = 'discount_codes'
__table_args__ = (UniqueConstraint('event_id', 'code', 'deleted_at', name='uq_event_discount_code'),)
id = db.Column(db.Integer, primary_key=True)
code = db.Column(CIText, nullable=False)
discount_url = db.Column(db.String)
value = db.Column(db.Float, nullable=False)
type = db.Column(db.String, nullable=False)
is_active = db.Column(db.Boolean, default=True)
tickets_number = db.Column(db.Integer)
min_quantity = db.Column(db.Integer, default=1)
max_quantity = db.Column(db.Integer, default=100)
valid_from = db.Column(db.DateTime(timezone=True), nullable=True)
valid_till = db.Column(db.DateTime(timezone=True), nullable=True)
event_id = db.Column(db.Integer, db.ForeignKey('events.id', ondelete='CASCADE'))
event = db.relationship('Event', backref='discount_codes', foreign_keys=[event_id])
created_at = db.Column(db.DateTime(timezone=True), default=func.now())
marketer_id = db.Column(db.Integer, db.ForeignKey('users.id', ondelete='CASCADE'))
marketer = db.relationship('User', backref='discount_codes_')
used_for = db.Column(db.String, nullable=False)
def get_service_name() -> str:
return 'discount_code'
def __repr__(self) -> str:
return ('<DiscountCode %r>' % self.id)
def get_confirmed_attendees_query(self):
return TicketHolder.query.filter_by(deleted_at=None).filter_by(is_discount_applied=True).join(Order).filter_by(discount_code_id=self.id).filter(Order.status.in_(['completed', 'placed', 'pending', 'initializing']))
def confirmed_attendees(self):
return self.get_confirmed_attendees_query().all()
def confirmed_attendees_count(self) -> int:
return get_count(self.get_confirmed_attendees_query())
def valid_expire_time(self):
return (self.valid_till or self.event.ends_at)
def get_supported_tickets(self, ticket_ids=None):
query = Ticket.query.with_parent(self).filter_by(deleted_at=None)
if ticket_ids:
query = query.filter(Ticket.id.in_(ticket_ids))
return query
def is_available(self, tickets=None, ticket_holders=None):
return is_discount_available(self, tickets=tickets, ticket_holders=ticket_holders)
def validate(self, tickets=None, ticket_holders=None, event_id=None):
return validate_discount_code(self, tickets=tickets, ticket_holders=ticket_holders, event_id=event_id) |
def main():
segmk = Segmaker('design.bits')
fuz_dir = os.getenv('FUZDIR', None)
assert fuz_dir
with open(os.path.join(fuz_dir, 'attrs.json'), 'r') as attr_file:
attrs = json.load(attr_file)
print('Loading tags')
with open('params.json') as f:
params = json.load(f)
site = params['site']
for (param, param_info) in attrs.items():
value = params[param]
param_type = param_info['type']
param_digits = param_info['digits']
param_values = param_info['values']
if (param_type == BIN):
bitstr = [int(x) for x in '{value:0{digits}b}'.format(value=value, digits=param_digits)[::(- 1)]]
for i in range(param_digits):
segmk.add_site_tag(site, ('%s[%u]' % (param, i)), bitstr[i])
else:
assert (param_type == BOOL)
segmk.add_site_tag(site, param, (value == 'TRUE'))
segmk.compile(bitfilter=bitfilter)
segmk.write() |
def fortios_firewall_schedule(data, fos, check_mode):
fos.do_member_operation('firewall.schedule', 'recurring')
if data['firewall_schedule_recurring']:
resp = firewall_schedule_recurring(data, fos, check_mode)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'firewall_schedule_recurring'))
if check_mode:
return resp
return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {}) |
def test_rule_get_storage_destination_local(db: Session, policy: Policy, storage_config: StorageConfig, storage_config_default_local: StorageConfig) -> None:
rule: Rule = policy.rules[0]
rule_storage_config = rule.get_storage_destination(db)
assert (rule_storage_config == storage_config)
rule.storage_destination = None
rule_storage_config = rule.get_storage_destination(db)
assert (rule_storage_config == storage_config_default_local)
rule.storage_destination = storage_config
rule_storage_config = rule.get_storage_destination(db)
assert (rule_storage_config == storage_config) |
class SSHCommandSessionTest(AsyncTestCase):
def setUp(self) -> None:
super().setUp()
self.mock_options = {}
self.mocks = MockService(self.mock_options, self._loop)
test_device = self.mock_device('test-dev-1')
self.devinfo = self._run_loop(self.mocks.device_db.get(test_device))[0]
self._mock_asyncssh = patch('fbnet.command_runner.command_session.asyncssh').start()
self.options = {'client_ip': '10.10.10.10', 'client_port': 1010, 'open_timeout': 10}
def tearDown(self) -> None:
self.mocks.tearDown()
super().tearDown()
def mock_session(self, service: MockService, devinfo: 'DeviceInfo', options: typing.Dict[(str, typing.Any)], loop: 'FcrTestEventLoop') -> SSHCommandSession:
session = SSHCommandSession(service=service, devinfo=devinfo, options=options, loop=loop)
session._stream_reader = AsyncMock(spec=CommandStreamReader)
session._stream_writer = Mock(spec=asyncio.StreamWriter)
session._captured_time_ms = Mock(wraps=session.captured_time_ms)
mock_response = b'run_command response'
session.run_command = AsyncMock(return_value=mock_response)
return session
def mock_device(self, name: str, console: typing.Optional[str]=None, command_prompts: typing.Optional[typing.Dict[(str, str)]]=None) -> Mock:
return Mock(hostname=name, console=console, command_prompts=command_prompts, pre_setup_commands=[], clear_command=None)
_test
async def test_external_communication_time_reset(self) -> None:
session = self.mock_session(service=self.mocks, devinfo=self.devinfo, options=self.options, loop=self._loop)
session._conn = AsyncMock(spec_set=session._conn)
self._mock_asyncssh.create_connection = AsyncMock(spec_set=self._mock_asyncssh.create_connection, return_value=(Mock(), Mock()))
session._conn.create_session = AsyncMock(spec_set=session._conn.create_session, return_value=(Mock(), Mock()))
self.assertFalse(session._connected)
session._connected = True
(await session.connect())
(await session.wait_until_connected(10))
self.assertTrue(session._connected)
self.assertEqual(session.captured_time_ms.increment_external_communication_time_ms.call_count, 1)
session._stream_reader.reset_mock()
session.captured_time_ms.reset_mock()
(await session._setup_connection())
self.assertEqual(session.captured_time_ms.increment_external_communication_time_ms.call_count, session._stream_reader.feed_data.call_count)
session.captured_time_ms.reset_time()
session._stream_reader.reset_mock()
session.captured_time_ms.reset_mock()
res = (await session.run_command(b'test1\n'))
self.assertEqual(res, b'run_command response')
self.assertEqual(session.captured_time_ms.increment_external_communication_time_ms.call_count, session._stream_reader.feed_data.call_count)
_test
async def test_external_communication_time_no_reset(self) -> None:
session = self.mock_session(service=self.mocks, devinfo=self.devinfo, options=self.options, loop=self._loop)
session._conn = AsyncMock(spec_set=session._conn)
self._mock_asyncssh.create_connection = AsyncMock(spec_set=self._mock_asyncssh.create_connection, return_value=(Mock(), Mock()))
session._conn.create_session = AsyncMock(spec_set=session._conn.create_session, return_value=(Mock(), Mock()))
self.assertFalse(session._connected)
session._connected = True
(await session.connect())
(await session.wait_until_connected())
self.assertTrue(session._connected)
self.assertEqual(session.captured_time_ms.increment_external_communication_time_ms.call_count, 1)
session._stream_reader.reset_mock()
(await session._setup_connection())
res = (await session.run_command(b'test1\n'))
self.assertEqual(res, b'run_command response')
self.assertEqual(session.captured_time_ms.increment_external_communication_time_ms.call_count, (session._stream_reader.feed_data.call_count + 1)) |
.parametrize('ops', ALL_OPS)
.parametrize('dtype', FLOAT_TYPES)
def test_reduce_first(ops, dtype):
X = ops.asarray2f([[1.0, 6.0], [2.0, 7.0], [3.0, 8.0], [4.0, 9.0], [5.0, 10.0]], dtype=dtype)
lengths = ops.asarray1i([3, 2])
(Y, starts_ends) = ops.reduce_first(X, lengths)
ops.xp.testing.assert_array_equal(starts_ends, ops.asarray1i([0, 3, 5]))
ops.xp.testing.assert_allclose(Y, [[1.0, 6.0], [4.0, 9.0]])
lengths = ops.asarray1i([3, 0, 2])
with pytest.raises(ValueError, match='all sequence lengths must be > 0'):
ops.reduce_last(X, lengths)
lengths = ops.asarray1i([3, 2, 1])
with pytest.raises(IndexError, match='lengths must sum up to the number of rows'):
ops.reduce_last(X, lengths) |
def create_post(board: BoardModel, thread: ThreadModel, post: PostModel, sage: bool) -> Tuple[(PostResultModel, int, int)]:
start_time = now()
with session() as s:
post_orm_model = post.to_orm_model()
s.add(post_orm_model)
to_thread_orm_model = s.query(ThreadOrmModel).filter_by(id=thread.id).one()
post_orm_model.thread = to_thread_orm_model
post_orm_model.refno = 0
to_thread_orm_model.refno_counter = (ThreadOrmModel.refno_counter + 1)
s.commit()
post_refno = post_orm_model.refno = to_thread_orm_model.refno_counter
post_id = post_orm_model.id
if post.files:
for file in post.files:
file_orm_model = file.to_orm_model()
file_orm_model.post_id = post_id
s.add(file_orm_model)
if post.moderator:
post_orm_model.moderator_id = post.moderator.id
modify_date = now()
if ((not sage) and (post_refno <= board.config.bump_limit)):
to_thread_orm_model.last_modified = modify_date
s.commit()
insert_time = (now() - start_time)
start_time = now()
_invalidate_thread_cache(s, thread, board)
_invalidate_board_pages_catalog_cache(s, board)
document_cache.purge_thread(board, thread, True)
document_cache.purge_board(board)
cache_time = (now() - start_time)
res = PostResultModel.from_board_name_thread_refno_post_refno(board.name, thread.refno, post_refno)
return (res, insert_time, cache_time) |
def commit_changes(changes: t.List[str]) -> CommittedChanges:
log.info('Committing updates')
body: t.Optional[str]
if (len(changes) > 1):
subject = 'Update {} modules'.format(len(changes))
body = '\n'.join(changes)
message = ((subject + '\n\n') + body)
else:
subject = changes[0]
body = None
message = subject
base_branch: t.Optional[str]
base_branch = subprocess.check_output(['git', 'branch', '--show-current'], text=True).strip()
if (not base_branch):
base_branch = None
check_call(['git', 'checkout', '{0}'])
check_call(['git', 'commit', '-am', message])
tree = subprocess.check_output(['git', 'rev-parse', 'HEAD^{tree}'], text=True).strip()
branch = f'update-{tree[:7]}'
try:
subprocess.run(['git', 'rev-parse', '--verify', branch], capture_output=True, check=True)
except subprocess.CalledProcessError:
check_call(['git', 'checkout', '-b', branch])
return CommittedChanges(subject=subject, body=body, commit=tree, branch=branch, base_branch=base_branch) |
_blueprint.route('/api/packages/wiki/')
_blueprint.route('/api/packages/wiki')
def api_packages_wiki_list():
project_objs = models.Project.all(Session)
projects = []
for project in project_objs:
for package in project.packages:
tmp = f'* {package.package_name} {project.regex} {project.version_url}'
projects.append(tmp)
return flask.Response('\n'.join(projects), content_type='text/plain;charset=UTF-8') |
class OptionPlotoptionsBulletStatesSelect(Options):
def animation(self) -> 'OptionPlotoptionsBulletStatesSelectAnimation':
return self._config_sub_data('animation', OptionPlotoptionsBulletStatesSelectAnimation)
def borderColor(self):
return self._config_get('#000000')
def borderColor(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#cccccc')
def color(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
def update(proj_dir):
check_exists_with_error()
if (not has_local_changes(proj_dir)):
subprocess.call('git pull', cwd=proj_dir, shell=True)
update_submodule(proj_dir)
return True
else:
log.warn('skipping {}, uncommitted or unpushed changes!'.format(proj_dir))
return False |
class OptionPlotoptionsAreasplinerangeSonificationDefaultinstrumentoptionsMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class FFmpegBackend(BaseEncodingBackend):
name = 'FFmpeg'
def __init__(self) -> None:
self.params: List[str] = ['-threads', str(settings.VIDEO_ENCODING_THREADS), '-y', '-strict', '-2']
self.ffmpeg_path: str = getattr(settings, 'VIDEO_ENCODING_FFMPEG_PATH', which('ffmpeg'))
self.ffprobe_path: str = getattr(settings, 'VIDEO_ENCODING_FFPROBE_PATH', which('ffprobe'))
if (not self.ffmpeg_path):
raise exceptions.FFmpegError('ffmpeg binary not found: {}'.format((self.ffmpeg_path or '')))
if (not self.ffprobe_path):
raise exceptions.FFmpegError('ffprobe binary not found: {}'.format((self.ffmpeg_path or '')))
def check(cls) -> List[checks.Error]:
errors = super(FFmpegBackend, cls).check()
try:
FFmpegBackend()
except exceptions.FFmpegError as e:
errors.append(checks.Error(e.msg, hint='Please install ffmpeg.', obj=cls, id='video_conversion.E001'))
return errors
def _spawn(self, cmd: List[str]) -> subprocess.Popen:
try:
return subprocess.Popen(cmd, shell=False, stderr=subprocess.PIPE, universal_newlines=False)
except OSError as e:
raise exceptions.FFmpegError('Error while running ffmpeg binary') from e
def encode(self, source_path: str, target_path: str, params: List[str]) -> Generator[(float, None, None)]:
total_time = self.get_media_info(source_path)['duration']
cmd = [self.ffmpeg_path, '-i', source_path, *self.params, *params, target_path]
process = self._spawn(cmd)
reader = io.TextIOWrapper(process.stderr, newline=None)
while (process.poll() is None):
line = reader.readline()
try:
time_str = RE_TIMECODE.findall(line)[0]
except IndexError:
continue
time: float = 0
for part in time_str.split(':'):
time = ((60 * time) + float(part))
percent = round((time / total_time), 2)
logger.debug('yield {}%'.format(percent))
(yield percent)
if (os.path.getsize(target_path) == 0):
raise exceptions.FFmpegError('File size of generated file is 0')
if (process.returncode != 0):
raise exceptions.FFmpegError('`{}` exited with code {:d}'.format(' '.join(map(str, process.args)), process.returncode))
(yield 100)
def _parse_media_info(self, data: bytes) -> Dict:
media_info = json.loads(data)
media_info['video'] = [stream for stream in media_info['streams'] if (stream['codec_type'] == 'video')]
media_info['audio'] = [stream for stream in media_info['streams'] if (stream['codec_type'] == 'audio')]
media_info['subtitle'] = [stream for stream in media_info['streams'] if (stream['codec_type'] == 'subtitle')]
del media_info['streams']
return media_info
def get_media_info(self, video_path: str) -> Dict[(str, Union[(int, float)])]:
cmd = [self.ffprobe_path, '-i', video_path]
cmd.extend(['-hide_banner', '-loglevel', 'warning'])
cmd.extend(['-print_format', 'json'])
cmd.extend(['-show_format', '-show_streams'])
stdout = subprocess.check_output(cmd)
media_info = self._parse_media_info(stdout)
return {'duration': float(media_info['format']['duration']), 'width': int(media_info['video'][0]['width']), 'height': int(media_info['video'][0]['height'])}
def get_thumbnail(self, video_path: str, at_time: float=0.5) -> str:
filename = os.path.basename(video_path)
(filename, __) = os.path.splitext(filename)
(_, image_path) = tempfile.mkstemp(suffix='_{}.jpg'.format(filename))
video_duration = self.get_media_info(video_path)['duration']
if (at_time > video_duration):
raise exceptions.InvalidTimeError()
thumbnail_time = at_time
cmd = [self.ffmpeg_path, '-i', video_path, '-vframes', '1']
cmd.extend(['-ss', str(thumbnail_time), '-y', image_path])
subprocess.check_call(cmd)
if (not os.path.getsize(image_path)):
os.unlink(image_path)
raise exceptions.InvalidTimeError()
return image_path |
.parametrize('file_name, max_context', [('gsub_51', 2), ('gsub_52', 2), ('gsub_71', 1), ('gpos_91', 1)])
def test_max_ctx_calc_features_ttx(file_name, max_context):
ttx_path = os.path.join(os.path.dirname(__file__), 'data', '{}.ttx'.format(file_name))
font = TTFont()
font.importXML(ttx_path)
assert (maxCtxFont(font) == max_context) |
class OptionPlotoptionsVariwideSonificationDefaultinstrumentoptionsMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
(frozen=True)
class ProductionChildInfo():
child_name: str
production_name: str
symbol: type
symbol_optional: bool
arity: ProductionChildArity
annotation: Any
def validate_child(self, child):
def error(msg):
raise GrammarError(msg)
if (child is None):
if (not self.is_node_optional):
error(f"Expected child '{child}' in node of type '{self.production_name}'.Consider using the 'Optional[T]' type annotation.")
else:
is_sequence = isinstance(child, (list, tuple, Sequence))
if (is_sequence and (not self.is_list)):
error(f"Unexpected sequence-like field '{self.child_name}' in node of type '{self.production_name}'.Consider using the 'Sequence[T]' type annotation.")
if ((not is_sequence) and self.is_list):
error(f"Expected sequence-like field '{self.child_name}' but got single node value in node of type '{self.production_name}'.")
for e in (child if is_sequence else [child]):
possible_types = {self.symbol}
possible_types |= ({type(None)} if self.symbol_optional else set())
child_type_name = type(e).__name__
child_type_names = [c.__name__ for c in possible_types]
types_invalid = [(not isinstance(e, c)) for c in possible_types]
if all(types_invalid):
error(f"Expected child of type in {child_type_names} but got '{child_type_name}' in node of type '{self.production_name}'.")
if ((e is not None) and Production.is_abstract(type(e))):
error(f'Found instance of abstract production {type(e).__name__}.')
def is_list(self):
return (self.arity in {ProductionChildArity.LIST, ProductionChildArity.LIST_OPTIONAL})
def is_node_optional(self):
return ((self.arity is ProductionChildArity.LIST_OPTIONAL) or self.symbol_optional)
def to_string(self):
result = self.symbol.__name__
result = (f'{result}?' if self.symbol_optional else result)
result = (f'({result})*' if (self.arity is ProductionChildArity.LIST) else result)
result = (f'(({result})*)?' if (self.arity is ProductionChildArity.LIST_OPTIONAL) else result)
return result
def __str__(self) -> str:
return ('[RuleOutput: %s]' % self.to_string())
def from_type_hint(type_hint, production_name, child_name):
symbol = None
modifiers = []
type_hint_element = type_hint
while (symbol is None):
if isinstance(type_hint_element, _GenericAlias):
type_params = type_hint_element.__args__
annotation_type = type_hint_element.__origin__
if (annotation_type in [Sequence, list, tuple]):
modifiers.append('list')
type_hint_element = type_params[0]
elif (annotation_type is Union):
if (not ((len(type_params) == 2) and (type(None) in type_params))):
return None
modifiers.append('opt')
(type_hint_element, *_) = {t for t in type_params if (t is not type(None))}
else:
return None
elif Production.is_production(type_hint_element):
symbol = type_hint_element
else:
return None
def constructor(*args):
return ProductionChildInfo(child_name, production_name, symbol, *args)
if (not modifiers):
info = constructor(False, ProductionChildArity.SINGLE, type_hint)
elif (modifiers == ['opt']):
info = constructor(True, ProductionChildArity.SINGLE, type_hint)
elif (modifiers == ['list']):
info = constructor(False, ProductionChildArity.LIST, type_hint)
elif (modifiers == ['list', 'opt']):
info = constructor(True, ProductionChildArity.LIST, type_hint)
elif (modifiers == ['opt', 'list']):
info = constructor(False, ProductionChildArity.LIST_OPTIONAL, type_hint)
elif (modifiers == ['opt', 'list', 'opt']):
info = constructor(True, ProductionChildArity.LIST_OPTIONAL, type_hint)
else:
raise RuntimeError()
return info |
def gen_dsps():
db = Database(util.get_db_root(), util.get_part())
grid = db.grid()
for tile_name in sorted(grid.tiles()):
loc = grid.loc_of_tilename(tile_name)
gridinfo = grid.gridinfo_at_loc(loc)
sites = []
for (site_name, site_type) in gridinfo.sites.items():
if (site_type in ['DSP48E1']):
sites.append(site_name)
sites.sort()
if (len(sites) == 0):
continue
if (gridinfo.tile_type[(- 1)] == 'L'):
int_grid_x = (loc.grid_x + 2)
int_tile_type = 'INT_L'
else:
int_grid_x = (loc.grid_x - 2)
int_tile_type = 'INT_R'
int_tile_locs = [(int_grid_x, loc.grid_y), (int_grid_x, (loc.grid_y - 1)), (int_grid_x, (loc.grid_y - 2)), (int_grid_x, (loc.grid_y - 3)), (int_grid_x, (loc.grid_y - 4))]
int_tiles = []
for int_tile_loc in int_tile_locs:
int_gridinfo = grid.gridinfo_at_loc(int_tile_loc)
assert (int_gridinfo.tile_type == int_tile_type), (int_gridinfo.tile_type, int_tile_type)
int_tiles.append(grid.tilename_at_loc(int_tile_loc))
(yield (tile_name, sites, int_tiles)) |
class _PaletteTool(HasTraits):
group = Any()
def __init__(self, tool_palette, image_cache, item, show_labels):
self.item = item
self.tool_palette = tool_palette
action = self.item.action
label = action.name
if (action.style == 'widget'):
raise NotImplementedError('WxPython does not support widgets in palettes')
if label.endswith('...'):
label = label[:(- 3)]
label = label.replace('&', '')
path = action.image.absolute_path
bmp = image_cache.get_bitmap(path)
kind = action.style
tooltip = action.tooltip
longtip = action.description
if (not show_labels):
label = ''
self.tool_id = tool_palette.add_tool(label, bmp, kind, tooltip, longtip)
tool_palette.toggle_tool(self.tool_id, action.checked)
tool_palette.enable_tool(self.tool_id, action.enabled)
tool_palette.on_tool_event(self.tool_id, self._on_tool)
action.observe(self._on_action_enabled_changed, 'enabled')
action.observe(self._on_action_checked_changed, 'checked')
return
def dispose(self):
action = self.item.action
action.observe(self._on_action_enabled_changed, 'enabled', remove=True)
action.observe(self._on_action_checked_changed, 'checked', remove=True)
def _on_action_enabled_changed(self, event):
action = event.object
self.tool_palette.enable_tool(self.tool_id, action.enabled)
def _on_action_checked_changed(self, event):
action = event.object
if (action.style == 'radio'):
if event.new:
for item in self.item.parent.items:
if (item is not self.item):
item.action.checked = False
self.tool_palette.toggle_tool(self.tool_id, event.new)
return
def _on_tool(self, event):
action = self.item.action
action_event = ActionEvent()
action.checked = (self.tool_palette.get_tool_state(self.tool_id) == 1)
action.perform(action_event)
return |
def _iter_avro_records(decoder, header, codec, writer_schema, named_schemas, reader_schema, options):
sync_marker = header['sync']
read_block = BLOCK_READERS.get(codec)
if (not read_block):
raise ValueError(f'Unrecognized codec: {codec}')
block_count = 0
while True:
try:
block_count = decoder.read_long()
except EOFError:
return
block_fo = read_block(decoder)
for i in range(block_count):
(yield read_data(BinaryDecoder(block_fo), writer_schema, named_schemas, reader_schema, options))
skip_sync(decoder.fo, sync_marker) |
class Trainer():
def __init__(self, model: Union[(PreTrainedModel, nn.Module)]=None, args: TrainingArguments=None, train_dataset: Dataset=None, eval_dataset: Optional[Dataset]=None):
if (args is None):
output_dir = 'tmp_trainer'
logger.info(f'No `TrainingArguments` passed, using `output_dir={output_dir}`.')
args = TrainingArguments(output_dir=output_dir)
self.args = args
set_seed(self.args.seed)
if (model is None):
raise RuntimeError('`Trainer` requires a `model`')
if torch.cuda.is_available():
self.model = model.cuda()
else:
self.model = model
self.train_dataset = train_dataset
self.eval_dataset = eval_dataset
self.best_epoch = 0
self.best_metric = 0.0
def train(self, resume_from_checkpoint: Optional[Union[(str, bool)]]=None, **kwargs):
args = self.args
epochs_trained = 0
device = args.device
num_train_epochs = args.num_train_epochs
learning_rate = args.learning_rate
train_batch_size = args.train_batch_size
train_dataset = self.train_dataset
train_dataloader = self.get_train_dataloader()
step_size = int(((len(train_dataset) / train_batch_size) * 200))
if (args.n_gpu > 1):
self.model = nn.DataParallel(self.model)
optimizer = Adam(self.model.parameters(), lr=learning_rate)
scheduler = lr_scheduler.StepLR(optimizer, step_size=step_size, gamma=self.args.gamma)
for epoch in range(epochs_trained, num_train_epochs):
for param_group in optimizer.param_groups:
param_group['lr'] = (learning_rate * (0.1 ** (epoch // int((num_train_epochs * 0.8)))))
self.model.train()
epoch_losses = AverageMeter()
with tqdm(total=(len(train_dataset) - (len(train_dataset) % train_batch_size))) as t:
t.set_description(f'epoch: {epoch}/{(num_train_epochs - 1)}')
for data in train_dataloader:
(inputs, labels) = data
inputs = inputs.to(device)
labels = labels.to(device)
preds = self.model(inputs)
criterion = nn.L1Loss()
loss = criterion(preds, labels)
epoch_losses.update(loss.item(), len(inputs))
optimizer.zero_grad()
loss.backward()
optimizer.step()
scheduler.step()
t.set_postfix(loss=f'{epoch_losses.avg:.6f}')
t.update(len(inputs))
self.eval(epoch)
def eval(self, epoch):
args = self.args
if isinstance(self.model, nn.DataParallel):
scale = self.model.module.config.scale
else:
scale = self.model.config.scale
device = args.device
eval_dataloader = self.get_eval_dataloader()
epoch_psnr = AverageMeter()
epoch_ssim = AverageMeter()
self.model.eval()
for data in eval_dataloader:
(inputs, labels) = data
inputs = inputs.to(device)
labels = labels.to(device)
with torch.no_grad():
preds = self.model(inputs)
metrics = compute_metrics(EvalPrediction(predictions=preds, labels=labels), scale=scale)
epoch_psnr.update(metrics['psnr'], len(inputs))
epoch_ssim.update(metrics['ssim'], len(inputs))
print(f'scale:{str(scale)} eval psnr: {epoch_psnr.avg:.2f} ssim: {epoch_ssim.avg:.4f}')
if (epoch_psnr.avg > self.best_metric):
self.best_epoch = epoch
self.best_metric = epoch_psnr.avg
print(f'best epoch: {epoch}, psnr: {epoch_psnr.avg:.6f}, ssim: {epoch_ssim.avg:.6f}')
self.save_model()
def _load_state_dict_in_model(self, state_dict):
load_result = self.model.load_state_dict(state_dict, strict=False)
def _save_checkpoint(self, model, trial, metrics=None):
checkpoint_folder = f'{PREFIX_CHECKPOINT_DIR}-{self.state.global_step}'
run_dir = self.args.output_dir
output_dir = os.path.join(run_dir, checkpoint_folder)
self.save_model(output_dir)
def save_model(self, output_dir: Optional[str]=None):
output_dir = (output_dir if (output_dir is not None) else self.args.output_dir)
os.makedirs(output_dir, exist_ok=True)
if (not isinstance(self.model, PreTrainedModel)):
scale = self.model.config.scale
if (scale is not None):
weights_name = WEIGHTS_NAME_SCALE.format(scale=scale)
else:
weights_name = WEIGHTS_NAME
weights = copy.deepcopy(self.model.state_dict())
torch.save(weights, os.path.join(output_dir, weights_name))
else:
self.model.save_pretrained(output_dir)
def get_train_dataloader(self) -> DataLoader:
if (self.train_dataset is None):
raise ValueError('Trainer: training requires a train_dataset.')
train_dataset = self.train_dataset
return DataLoader(dataset=train_dataset, batch_size=self.args.train_batch_size, shuffle=True, num_workers=self.args.dataloader_num_workers, pin_memory=self.args.dataloader_pin_memory)
def get_eval_dataloader(self) -> DataLoader:
eval_dataset = self.eval_dataset
if (eval_dataset is None):
eval_dataset = self.train_dataset
return DataLoader(dataset=eval_dataset, batch_size=1) |
.parametrize('name,kwargs,in_data,out_data', TEST_CASES)
def test_layers_from_config(name, kwargs, in_data, out_data):
cfg = {'': name, **kwargs}
filled_cfg = registry.fill({'config': cfg})
assert srsly.is_json_serializable(filled_cfg)
model = registry.resolve({'config': cfg})['config']
if ('LSTM' in name):
model = with_padded(model)
valid = True
with data_validation(valid):
model.initialize(in_data, out_data)
(Y, backprop) = model(in_data, is_train=True)
if model.has_dim('nO'):
assert (get_width(Y) == model.get_dim('nO'))
assert_data_match(Y, out_data)
dX = backprop(Y)
assert_data_match(dX, in_data)
model._to_ops(NoDropoutOps())
model.predict(in_data) |
def extractDemonictofuWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
def obtain_extract_partition_sql(config: dict, is_null_partition: bool=False) -> str:
if (not config.get('optional_predicate')):
config['optional_predicate'] = 'WHERE'
else:
config['optional_predicate'] += ' AND '
if is_null_partition:
sql = EXTRACT_NULL_PARTITION_SQL
else:
sql = EXTRACT_PARTITION_SQL
return sql.format(**config).format(**config) |
class TestImages(unittest.TestCase):
def test_roundtrip_rgb8(self):
arr = np.random.randint(0, 256, size=(240, 360, 3)).astype(np.uint8)
msg = ros_numpy.msgify(Image, arr, encoding='rgb8')
arr2 = ros_numpy.numpify(msg)
np.testing.assert_equal(arr, arr2)
def test_roundtrip_mono(self):
arr = np.random.randint(0, 256, size=(240, 360)).astype(np.uint8)
msg = ros_numpy.msgify(Image, arr, encoding='mono8')
arr2 = ros_numpy.numpify(msg)
np.testing.assert_equal(arr, arr2)
def test_roundtrip_big_endian(self):
arr = np.random.randint(0, 256, size=(240, 360)).astype('>u2')
msg = ros_numpy.msgify(Image, arr, encoding='mono16')
self.assertEqual(msg.is_bigendian, True)
arr2 = ros_numpy.numpify(msg)
np.testing.assert_equal(arr, arr2)
def test_roundtrip_little_endian(self):
arr = np.random.randint(0, 256, size=(240, 360)).astype('<u2')
msg = ros_numpy.msgify(Image, arr, encoding='mono16')
self.assertEqual(msg.is_bigendian, False)
arr2 = ros_numpy.numpify(msg)
np.testing.assert_equal(arr, arr2)
def test_bad_encodings(self):
mono_arr = np.random.randint(0, 256, size=(240, 360)).astype(np.uint8)
mono_arrf = np.random.randint(0, 256, size=(240, 360)).astype(np.float32)
rgb_arr = np.random.randint(0, 256, size=(240, 360, 3)).astype(np.uint8)
rgb_arrf = np.random.randint(0, 256, size=(240, 360, 3)).astype(np.float32)
with self.assertRaises(TypeError):
msg = ros_numpy.msgify(Image, rgb_arr, encoding='mono8')
with self.assertRaises(TypeError):
msg = ros_numpy.msgify(Image, mono_arrf, encoding='mono8')
with self.assertRaises(TypeError):
msg = ros_numpy.msgify(Image, rgb_arrf, encoding='rgb8')
with self.assertRaises(TypeError):
msg = ros_numpy.msgify(Image, mono_arr, encoding='rgb8') |
.skip_ci
('pyscf')
def test_oniom3():
run_dict = {'geom': {'type': 'redund', 'fn': 'lib:oniom3alkyl.pdb'}, 'calc': {'type': 'oniom', 'calcs': {'real': {'type': 'pyscf', 'basis': 'sto3g', 'pal': 2}, 'mid': {'type': 'pyscf', 'basis': '321g', 'pal': 2}, 'high': {'type': 'pyscf', 'basis': '431g', 'pal': 2}}, 'models': {'high': {'inds': list(range(7, 16)), 'calc': 'high'}, 'mid': {'inds': list(range(4, 19)), 'calc': 'mid'}}}, 'opt': {'thresh': 'gau_tight'}}
res = run_from_dict(run_dict)
print()
opt = res.opt
assert opt.is_converged
assert (opt.cur_cycle == 7)
geom = res.opt_geom
res = do_final_hessian(geom, save_hessian=False)
nus = res.nus
np.testing.assert_allclose(nus[[(- 1), (- 5)]], (3750.1537948, 3566.366994), atol=0.01) |
class AbstractCrudObject(AbstractObject):
def __init__(self, fbid=None, parent_id=None, api=None):
super(AbstractCrudObject, self).__init__()
self._api = (api or FacebookAdsApi.get_default_api())
self._changes = {}
if (parent_id is not None):
warning_message = 'parent_id as a parameter of constructor is being deprecated.'
logging.warning(warning_message)
self._parent_id = parent_id
self._data['id'] = fbid
self._include_summary = True
def __setitem__(self, key, value):
if ((key not in self._data) or (self._data[key] != value)):
self._changes[key] = value
super(AbstractCrudObject, self).__setitem__(key, value)
if ('_setitem_trigger' in dir(self)):
self._setitem_trigger(key, value)
return self
def __delitem__(self, key):
del self._data[key]
self._changes.pop(key, None)
def __eq__(self, other):
return (isinstance(other, self.__class__) and self.get_id() and other.get_id() and (self.get_id() == other.get_id()))
def __ne__(self, other):
return (not self.__eq__(other))
def get_by_ids(cls, ids, params=None, fields=None, api=None):
api = (api or FacebookAdsApi.get_default_api())
params = dict((params or {}))
cls._assign_fields_to_params(fields, params)
params['ids'] = ','.join(map(str, ids))
response = api.call('GET', ['/'], params=params)
result = []
for (fbid, data) in response.json().items():
obj = cls(fbid, api=api)
obj._set_data(data)
result.append(obj)
return result
def get_id(self):
return (self[self.Field.id] if (hasattr(self, 'Field') and hasattr(self.Field, 'Field')) else self['id'])
def get_parent_id(self):
warning_message = 'parent_id is being deprecated.'
logging.warning(warning_message)
return (self._parent_id or FacebookAdsApi.get_default_account_id())
def get_api(self):
return self._api
def get_id_assured(self):
if (not self.get(self.Field.id)):
raise FacebookBadObjectError(('%s object needs an id for this operation.' % self.__class__.__name__))
return self.get_id()
def get_parent_id_assured(self):
warning_message = 'parent_id is being deprecated.'
logging.warning(warning_message)
if (not self.get_parent_id()):
raise FacebookBadObjectError(('%s object needs a parent_id for this operation.' % self.__class__.__name__))
return self.get_parent_id()
def get_api_assured(self):
api = self.get_api()
if (not api):
raise FacebookBadObjectError(('%s does not yet have an associated api object.\nDid you forget to instantiate an API session with: FacebookAdsApi.init(app_id, app_secret, access_token)' % self.__class__.__name__))
return api
def _clear_history(self):
self._changes = {}
if ('filename' in self._data):
del self._data['filename']
return self
def _set_data(self, data):
for key in map(str, data):
self[key] = data[key]
self._changes.pop(key, None)
self._json = data
return self
def export_changed_data(self):
return self.export_value(self._changes)
def export_data(self):
return self.export_changed_data()
def clear_id(self):
del self[self.Field.id]
return self
def get_node_path(self):
return (self.get_id_assured(),)
def get_node_path_string(self):
return '/'.join(self.get_node_path())
def remote_create(self, batch=None, failure=None, files=None, params=None, success=None, api_version=None):
warning_message = '`remote_create` is being deprecated, please update your code with new function.'
logging.warning(warning_message)
if self.get_id():
raise FacebookBadObjectError(('This %s object was already created.' % self.__class__.__name__))
if (not ('get_endpoint' in dir(self))):
raise TypeError(('Cannot create object of type %s.' % self.__class__.__name__))
params = ({} if (not params) else params.copy())
params.update(self.export_all_data())
request = None
if hasattr(self, 'api_create'):
request = self.api_create(self.get_parent_id_assured(), pending=True)
else:
request = FacebookRequest(node_id=self.get_parent_id_assured(), method='POST', endpoint=self.get_endpoint(), api=self._api, target_class=self.__class__, response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_files(files)
if (batch is not None):
def callback_success(response):
self._set_data(response.json())
self._clear_history()
if success:
success(response)
def callback_failure(response):
if failure:
failure(response)
return batch.add_request(request=request, success=callback_success, failure=callback_failure)
else:
response = request.execute()
self._set_data(response._json)
self._clear_history()
return self
def remote_read(self, batch=None, failure=None, fields=None, params=None, success=None, api_version=None):
warning_message = '`remote_read` is being deprecated, please update your code with new function.'
logging.warning(warning_message)
params = dict((params or {}))
if hasattr(self, 'api_get'):
request = self.api_get(pending=True)
else:
request = FacebookRequest(node_id=self.get_id_assured(), method='GET', endpoint='/', api=self._api, target_class=self.__class__, response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
def callback_success(response):
self._set_data(response.json())
if success:
success(response)
def callback_failure(response):
if failure:
failure(response)
batch_call = batch.add_request(request=request, success=callback_success, failure=callback_failure)
return batch_call
else:
self = request.execute()
return self
def remote_update(self, batch=None, failure=None, files=None, params=None, success=None, api_version=None):
warning_message = '`remote_update` is being deprecated, please update your code with new function.'
logging.warning(warning_message)
params = ({} if (not params) else params.copy())
params.update(self.export_changed_data())
self._set_data(params)
if hasattr(self, 'api_update'):
request = self.api_update(pending=True)
else:
request = FacebookRequest(node_id=self.get_id_assured(), method='POST', endpoint='/', api=self._api, target_class=self.__class__, response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_files(files)
if (batch is not None):
def callback_success(response):
self._clear_history()
if success:
success(response)
def callback_failure(response):
if failure:
failure(response)
batch_call = batch.add_request(request=request, success=callback_success, failure=callback_failure)
return batch_call
else:
request.execute()
self._clear_history()
return self
def remote_delete(self, batch=None, failure=None, params=None, success=None, api_version=None):
warning_message = '`remote_delete` is being deprecated, please update your code with new function.'
logging.warning(warning_message)
if hasattr(self, 'api_delete'):
request = self.api_delete(pending=True)
else:
request = FacebookRequest(node_id=self.get_id_assured(), method='DELETE', endpoint='/', api=self._api)
request.add_params(params)
if (batch is not None):
def callback_success(response):
self.clear_id()
if success:
success(response)
def callback_failure(response):
if failure:
failure(response)
batch_call = batch.add_request(request=request, success=callback_success, failure=callback_failure)
return batch_call
else:
request.execute()
self.clear_id()
return self
def remote_save(self, *args, **kwargs):
warning_message = '`remote_save` is being deprecated, please update your code with new function.'
logging.warning(warning_message)
if self.get_id():
return self.remote_update(*args, **kwargs)
else:
return self.remote_create(*args, **kwargs)
def remote_archive(self, batch=None, failure=None, success=None):
if (('Status' not in dir(self)) or ('archived' not in dir(self.Status))):
raise TypeError(('Cannot archive object of type %s.' % self.__class__.__name__))
return self.api_create(params={'status': self.Status.archived}, batch=batch, failure=failure, success=success)
save = remote_save
def iterate_edge(self, target_objects_class, fields=None, params=None, fetch_first_page=True, include_summary=True, endpoint=None):
source_object = self
cursor = Cursor(source_object, target_objects_class, fields=fields, params=params, include_summary=include_summary, endpoint=endpoint)
if fetch_first_page:
cursor.load_next_page()
return cursor
def iterate_edge_async(self, target_objects_class, fields=None, params=None, is_async=False, include_summary=True, endpoint=None):
from facebook_business.adobjects.adreportrun import AdReportRun
synchronous = (not is_async)
synchronous_iterator = self.iterate_edge(target_objects_class, fields, params, fetch_first_page=synchronous, include_summary=include_summary)
if synchronous:
return synchronous_iterator
if (not params):
params = {}
else:
params = dict(params)
self.__class__._assign_fields_to_params(fields, params)
if (endpoint is None):
endpoint = target_objects_class.get_endpoint()
response = self.get_api_assured().call('POST', (self.get_id_assured(), endpoint), params=params).json()
result = AdReportRun()
if ('report_run_id' in response):
response['id'] = response['report_run_id']
result._set_data(response)
return result
def edge_object(self, target_objects_class, fields=None, params=None, endpoint=None):
params = ({} if (not params) else params.copy())
params['limit'] = '1'
for obj in self.iterate_edge(target_objects_class, fields=fields, params=params, endpoint=endpoint):
return obj
return None
def assure_call(self):
if (not self._api):
raise FacebookBadObjectError('Api call cannot be made if api is not set') |
.django_db
def test_spending_over_time_funny_dates_ordering(client, monkeypatch, elasticsearch_transaction_index, populate_models):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
group = 'month'
test_payload = {'group': group, 'subawards': False, 'filters': {'time_period': [{'start_date': '2010-02-01', 'end_date': '2010-03-31'}, {'start_date': '2011-02-01', 'end_date': '2011-03-31'}]}, 'messages': [get_time_period_message()]}
expected_response = {'results': [{'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2010', 'month': '5'}}, {'aggregated_amount': 100.0, 'time_period': {'fiscal_year': '2010', 'month': '6'}}, {'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2010', 'month': '7'}}, {'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2010', 'month': '8'}}, {'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2010', 'month': '9'}}, {'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2010', 'month': '10'}}, {'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2010', 'month': '11'}}, {'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2010', 'month': '12'}}, {'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2011', 'month': '1'}}, {'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2011', 'month': '2'}}, {'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2011', 'month': '3'}}, {'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2011', 'month': '4'}}, {'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2011', 'month': '5'}}, {'aggregated_amount': 110.0, 'time_period': {'fiscal_year': '2011', 'month': '6'}}], 'group': 'month', 'messages': [get_time_period_message()]}
resp = client.post(get_spending_over_time_url(), content_type='application/json', data=json.dumps(test_payload))
assert (resp.status_code == status.HTTP_200_OK)
assert (expected_response == resp.data), 'Unexpected or missing content!'
confirm_proper_ordering(group, resp.data['results']) |
class OptionSeriesVariwideSonificationTracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.network
def test_pooch_logging_level():
with TemporaryDirectory() as local_store:
path = Path(local_store)
urls = {'tiny-data.txt': (BASEURL + 'tiny-data.txt')}
pup = Pooch(path=path, base_url='', registry=REGISTRY, urls=urls)
with capture_log('CRITICAL') as log_file:
fname = pup.fetch('tiny-data.txt')
assert (log_file.getvalue() == '')
check_tiny_data(fname) |
.unit
class TestMergeCredentialsEnvironment():
(os.environ, {'FIDES__CREDENTIALS__POSTGRES_1__CONNECTION_STRING': 'postgresql+psycopg2://fides:env_variable.com:5439/fidesctl_test', 'FIDES__CREDENTIALS__AWS_ACCOUNT_1__REGION': 'us-east-1', 'FIDES__CREDENTIALS__AWS_ACCOUNT_1__ACCESS_KEY_ID': 'ACCESS_KEY_ID_1', 'FIDES__CREDENTIALS__AWS_ACCOUNT_1__ACCESS_KEY': 'ACCESS_KEY_1'}, clear=True)
def test_merge_credentials(self) -> None:
credentials_dict: Dict = dict()
merge_credentials_environment(credentials_dict)
assert (credentials_dict == {'postgres_1': {'connection_string': 'postgresql+psycopg2://fides:env_variable.com:5439/fidesctl_test'}, 'aws_account_1': {'region': 'us-east-1', 'access_key_id': 'ACCESS_KEY_ID_1', 'access_key': 'ACCESS_KEY_1'}})
(os.environ, {'FIDES__CREDENTIALS__POSTGRES_1__CONNECTION_STRING': 'postgresql+psycopg2://fides:env_variable.com:5439/fidesctl_test', 'FIDES__CREDENTIALS__AWS_ACCOUNT_1__ACCESS_KEY_ID': 'ACCESS_KEY_ID_1', 'FIDES__CREDENTIALS__AWS_ACCOUNT_1__ACCESS_KEY': 'ACCESS_KEY_1'}, clear=True)
def test_mixed_configs(self) -> None:
credentials_dict = {'aws_account_1': {'region': 'us-east-1'}}
merge_credentials_environment(credentials_dict)
assert (credentials_dict == {'postgres_1': {'connection_string': 'postgresql+psycopg2://fides:env_variable.com:5439/fidesctl_test'}, 'aws_account_1': {'region': 'us-east-1', 'access_key_id': 'ACCESS_KEY_ID_1', 'access_key': 'ACCESS_KEY_1'}})
(os.environ, {'FIDES__CREDENTIALS__AWS_ACCOUNT_1__ACCESS_KEY_ID': 'ACCESS_KEY_ID_OVERRIDE', 'FIDES__CREDENTIALS__AWS_ACCOUNT_1__ACCESS_KEY': 'ACCESS_KEY_OVERRIDE'}, clear=True)
def test_environment_override(self) -> None:
credentials_dict = {'aws_account_1': {'region': 'us-east-1', 'access_key_id': 'ACCESS_KEY_ID_1', 'access_key': 'ACCESS_KEY_1'}}
merge_credentials_environment(credentials_dict)
assert (credentials_dict == {'aws_account_1': {'region': 'us-east-1', 'access_key_id': 'ACCESS_KEY_ID_OVERRIDE', 'access_key': 'ACCESS_KEY_OVERRIDE'}}) |
def extractBujangtranslationWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PCN', 'Pendekar Cambuk Naga', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_and_qualifier(alice, bob, my_logic):
def _is_alice(connection, logic):
assert isinstance(alice, ConnectionAPI)
return (connection is alice)
def _is_my_logic(connection, logic):
assert isinstance(logic, LogicAPI)
return (logic is my_logic)
qualifier = AndQualifier(_is_alice, _is_my_logic)
assert (qualifier(alice, my_logic) is True)
assert (qualifier(bob, my_logic) is False)
assert (qualifier(alice, SimpleLogic()) is False)
assert (qualifier(bob, SimpleLogic()) is False) |
def chain_setup(w3):
old_chain_id = remove_0x_prefix(to_hex(w3.eth.get_block(0)['hash']))
block_hash = remove_0x_prefix(to_hex(w3.eth.get_block('earliest').hash))
old_chain_uri = f'blockchain://{old_chain_id}/block/{block_hash}'
match_data = {old_chain_uri: {'x': 'x'}, f'blockchain://1234/block/{block_hash}': {'x': 'x'}}
no_match_data = {f'blockchain://56775ac59d0774e6b603a79c4218efeb5653b99ba0ff14db983bac2662251a8a/block/{block_hash}': {'x': 'x'}}
return (w3, match_data, no_match_data, old_chain_uri) |
class TestGtLtMatchers():
def setup_method(self):
self.str = get_search_result_track()
def test_gt_bitrate_matcher_true(self):
matcher = search._GtMatcher('__bitrate', 100000, (lambda x: x))
self.str.track.set_tag_raw('__bitrate', 128000)
assert matcher.match(self.str)
def test_gt_bitrate_matcher_false(self):
matcher = search._GtMatcher('__bitrate', 100000, (lambda x: x))
self.str.track.set_tag_raw('__bitrate', 28000)
assert (not matcher.match(self.str))
def test_lt_bitrate_matcher_true(self):
matcher = search._LtMatcher('__bitrate', 100000, (lambda x: x))
self.str.track.set_tag_raw('__bitrate', 28000)
assert matcher.match(self.str)
def test_lt_bitrate_matcher_false(self):
matcher = search._LtMatcher('__bitrate', 100000, (lambda x: x))
self.str.track.set_tag_raw('__bitrate', 128000)
assert (not matcher.match(self.str)) |
.unit
def test_parse_manifest():
expected_result = models.DataCategory(organization_fides_key=1, fides_key='some_resource', name='Test resource 1', description='Test Description')
test_dict = {'organization_fides_key': 1, 'fides_key': 'some_resource', 'name': 'Test resource 1', 'description': 'Test Description'}
actual_result = parse.parse_dict('data_category', test_dict)
assert (actual_result == expected_result) |
def simulate_calls_with_limiter(num_calls: int, rate_limit_requests: List[RateLimiterRequest]) -> Dict:
limiter: RateLimiter = RateLimiter()
call_log = {}
for _ in range(num_calls):
limiter.limit(requests=rate_limit_requests)
current_time = int(time.time())
count = call_log.get(current_time, 0)
call_log[current_time] = (count + 1)
time.sleep(0.002)
return call_log |
class OptionSeriesXrangeSonificationDefaultspeechoptionsMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestMatcher():
def setup_method(self):
self.strack = get_search_result_track()
self.strack.track.set_tag_raw('artist', ['foo', 'bar'])
(search._Matcher, '_matches', return_value=True)
def test_match_list_true(self, mock_method):
matcher = search._Matcher('artist', 'bar', (lambda x: x))
assert matcher.match(self.strack)
mock_method.assert_called_with('foo')
(search._Matcher, '_matches', return_value=False)
def test_match_list_false(self, mock_method):
matcher = search._Matcher('artist', 'bar', (lambda x: x))
assert (not matcher.match(self.strack))
mock_method.assert_has_calls([call('foo'), call('bar')])
(search._Matcher, '_matches', return_value=True)
def test_match_list_none(self, mock_method):
matcher = search._Matcher('album', None, (lambda x: x))
assert matcher.match(self.strack)
mock_method.assert_called_with(None)
def test_matches(self):
matcher = search._Matcher('album', None, (lambda x: x))
with pytest.raises(NotImplementedError):
matcher._matches('foo') |
def init_db():
global _scoped_session
global _session_cls
global _engine
global OrmModelBase
_engine = create_engine(connect_string(), pool_size=config.database_pool_size, echo=config.database_echo_sql)
_session_cls = sessionmaker(autocommit=False, autoflush=False, bind=_engine)
_scoped_session = scoped_session(_session_cls)
import uchan.lib.ormmodel |
class ScheduleTimeColumn(Column):
name = 'schedule_time'
display = _('Schedule')
size = 80
def __init__(self, *args):
Column.__init__(self, *args)
self.timeout_id = None
event.add_ui_callback(self.on_queue_current_playlist_changed, 'queue_current_playlist_changed', player.QUEUE)
event.add_ui_callback(self.on_playback_player_start, 'playback_player_start', player.PLAYER)
event.add_ui_callback(self.on_playback_player_end, 'playback_player_end', player.PLAYER)
def data_func(self, col, cell, model, iter, user_data):
text = None
playlist = self.container.playlist
if ((not self.player.is_stopped()) and (playlist is self.player.queue.current_playlist) and (playlist.shuffle_mode == 'disabled') and (playlist.repeat_mode != 'track')):
track = model.get_value(iter, 0)
position = playlist.index(track)
current_position = playlist.current_position
if (position > current_position):
try:
delay = sum((t.get_tag_raw('__length') for t in playlist[current_position:position]))
except TypeError:
pass
else:
delay -= self.player.get_time()
schedule_time = time.localtime((time.time() + delay))
text = time.strftime('%H:%M', schedule_time)
cell.props.text = text
def start_timer(self):
timeout_id = self.timeout_id
if (timeout_id is not None):
GLib.source_remove(timeout_id)
self.timeout_id = GLib.timeout_add_seconds(60, self.on_timeout)
def stop_timer(self):
timeout_id = self.timeout_id
if (timeout_id is not None):
GLib.source_remove(timeout_id)
self.timeout_id = None
self.on_timeout()
def on_timeout(self):
self.queue_resize()
view = self.get_tree_view()
if (view is not None):
view.queue_draw()
return True
self.timeout_id = None
return False
def on_queue_current_playlist_changed(self, e, queue, current_playlist):
self.stop_timer()
if (current_playlist is self.container.playlist):
self.start_timer()
def on_playback_player_start(self, e, player, track):
self.stop_timer()
if (self.player.queue.current_playlist is self.container.playlist):
logger.debug('Playback started, enabling realtime updates')
self.start_timer()
def on_playback_player_end(self, e, player, track):
self.stop_timer() |
class CashFlowStatement(DocType):
id = Keyword()
securityId = Keyword()
code = Keyword()
reportPeriod = Date()
timestamp = Date()
reportEventDate = Date()
cashFromSellingCommoditiesOrOfferingLabor = Float()
refundOfTaxAndFeeReceived = Float()
cashReceivedRelatingToOtherOperatingActivities = Float()
subTotalOfCashInflowsFromOperatingActivities = Float()
cashPaidForGoodsAndServices = Float()
cashPaidToAndOnBehalfOfemployees = Float()
paymentsOfTaxesAndSurcharges = Float()
cashPaidRelatingToOtherOperatingActivities = Float()
subTotalOfCashOutflowsFromOperatingActivities = Float()
netCashFlowsFromOperatingActivities = Float()
cashReceivedFromDisposalOfInvestments = Float()
cashReceivedFromReturnsOnIvestments = Float()
netCashReceivedFromDisposalAssets = Float()
netCashReceivedFromDisposalSubsidiaries = Float()
cashReceivedFromOtherInvesting = Float()
subTotalOfCashInflowsFromInvesting = Float()
cashPaidToAcquireFixedAssets = Float()
cashPaidToAcquireInvestments = Float()
netCashPaidToAcquireSubsidiaries = Float()
cashPaidRelatingToOtherInvesting = Float()
subTotalOfCashOutflowsFromInvesting = Float()
netCashFlowsFromInvesting = Float()
cashReceivedFromCapitalContributions = Float()
cashReceivedFromMinorityShareholdersOfSubsidiaries = Float()
cashReceivedFromBorrowings = Float()
cashReceivedFromIssuingBonds = Float()
cashReceivedRelatingToOtherFinancingActivities = Float()
subTotalOfCashInflowsFromFinancingActivities = Float()
cashRepaymentsOfBorrowings = Float()
cashPaymentsForInterestExpensesAndDistributionOfDividendsOrProfits = Float()
cashPaymentsForDividendsOrProfitToMinorityShareholders = Float()
cashPaymentsRelatingToOtherFinancingActivities = Float()
subTotalOfCashOutflowsFromFinancingActivities = Float()
netCashFlowsFromFinancingActivities = Float()
effectOfForeignExchangeRate = Float()
netIncreaseInCash = Float()
cashAtBeginningOfyear = Float()
cashAtEndOfyear = Float()
netProfit = Float()
minorityBookValue = Float()
unrealisedInvestmentLosses = Float()
allowanceForAssetDevaluation = Float()
depreciationOfFixedAssets = Float()
amorizationOfIntangibleAssets = Float()
longTermDeferredExpenses = Float()
decreaseOfDeferredExpenses = Float()
IncreaseOfwithholdingExpenses = Float()
lossOnDisposalOfFixedAssets = Float()
lossOnFixedAssetsDamaged = Float()
lossOnFairValueChange = Float()
changeOnDeferredRevenue = Float()
estimatedLiabilities = Float()
financingExpenses = Float()
investmentLoss = Float()
decreaseOnDeferredIncomeTaxAssets = Float()
increaseOnDeferredIncomeTaxLiabilities = Float()
decreaseInInventories = Float()
decreaseInReceivablesUnderOperatingActivities = Float()
increaseInReceivablesUnderOperatingActivities = Float()
decreaseOnAmountDue = Float()
increaseOnSettlementNotYetCompleted = Float()
other = Float()
netCashFlowFromOperatingActivities = Float()
debtsTransferToCapital = Float()
oneYearDueConvertibleBonds = Float()
financingRentToFixedAsset = Float()
cashAtTheEndOfPeriod = Float()
cashAtTheBeginningOfPeriod = Float()
cashEquivalentsAtTheEndOfPeriod = Float()
cashEquivalentsAtTheBeginningOfPeriod = Float()
netIncreaseInCashAndCashEquivalents = Float()
class Meta():
index = 'cash_flow_statement'
doc_type = 'doc'
all = MetaField(enabled=False)
dynamic = MetaField('strict') |
.parametrize('is_admin', [True, False])
def test_user_create_success(dashboard_user, session, is_admin):
username = str(uuid.uuid4())
password = str(uuid.uuid4())
response = dashboard_user.post('dashboard/api/user/create', data={'username': username, 'password': password, 'password2': password, 'is_admin': ('true' if is_admin else 'false')})
assert (response.status_code == 200)
assert (response.data == b'OK')
user = session.query(User).filter((User.username == username)).one()
assert user.check_password(password)
assert (user.is_admin is is_admin) |
class TestFunctions():
def test_func_default_return_null(self):
assert (evalpy('def foo():pass\nprint(foo(), 1)') == 'null 1')
assert (evalpy('def foo():return\nprint(foo(), 1)') == 'null 1')
def test_func_call_compilation(self):
assert (py2js('foo()') == 'foo();')
assert (py2js('foo(3, 4)') == 'foo(3, 4);')
assert (py2js('foo(3, 4+1)') == 'foo(3, 4 + 1);')
assert py2js('foo(3, *args)')
assert py2js('a.foo(3, *args)')
def test_simple_funcs_dont_parse_kwargs(self):
code = py2js('def foo(): pass')
assert ('parse_kwargs' not in code)
assert ('kw_values' not in code)
code = py2js('def foo(a, b, c): pass')
assert ('parse_kwargs' not in code)
assert ('kw_values' not in code)
code = py2js('def foo(a, *c): pass')
assert ('parse_kwargs' not in code)
assert ('kw_values' not in code)
code = py2js('def foo(a, b=1, c="foo"): pass')
assert ('parse_kwargs' not in code)
assert ('kw_values' not in code)
def test_when_funcs_do_parse_kwargs(self):
code = py2js('def foo(a, **c): pass')
assert ('parse_kwargs' in code)
assert ('kw_values' not in code)
if (sys.version_info > (3,)):
code = py2js('def foo(a, *, b=1, c="foo"): pass')
assert ('parse_kwargs' in code)
assert ('kw_values' in code)
if (sys.version_info > (3,)):
code = py2js('def foo(a, *, b=1, c="foo", **d): pass')
assert ('parse_kwargs' in code)
assert ('kw_values' in code)
def test_func1(self):
code = py2js(func1)
lines = [line for line in code.split('\n') if line]
assert (len(lines) == 4)
assert (lines[1] == 'func1 = function flx_func1 () {')
assert lines[2].startswith(' ')
assert (lines[3] == '};')
def test_function_call_simple(self):
code = "def foo(x): return x + 1\nd = {'foo':foo}\n"
assert (evalpy((code + 'foo(3)')) == '4')
assert (evalpy((code + 'd.foo(3)')) == '4')
def test_function_call_varargs(self):
code = "def foo(x, *xx): return x + sum(xx)\nd = {'foo':foo}\nfive=[2, 3]\n"
assert (evalpy((code + 'foo(1, 2, 3)')) == '6')
assert (evalpy((code + 'd.foo(1, 2, 3)')) == '6')
assert (evalpy((code + 'foo(1, *five)')) == '6')
assert (evalpy((code + 'd.foo(1, *five)')) == '6')
def test_function_call_default_args(self):
code = "def foo(a=2, b=3, c=4): return a+b+c;\nd = {'foo':foo}\n"
assert (evalpy((code + 'foo(1, 2, 3)')) == '6')
assert (evalpy((code + 'd.foo(1, 2, 3)')) == '6')
assert (evalpy((code + 'foo(1, 2)')) == '7')
assert (evalpy((code + 'd.foo(1, 2)')) == '7')
assert (evalpy((code + 'foo(1)')) == '8')
assert (evalpy((code + 'd.foo(1)')) == '8')
assert (evalpy((code + 'foo()')) == '9')
assert (evalpy((code + 'd.foo()')) == '9')
((sys.version_info < (3,)), reason='no keyword only args in legacy py')
def test_function_call_keyword_only_args(self):
code = "def foo(*, a=2, b=3, c=4): return a+b+c;\nd = {'foo':foo}\n"
assert (evalpy((code + 'foo(a=1, b=2, c=3)')) == '6')
assert (evalpy((code + 'd.foo(a=1, b=2, c=3)')) == '6')
assert (evalpy((code + 'foo(b=10)')) == '16')
assert (evalpy((code + 'd.foo(b=10)')) == '16')
code = "def foo(*x, a=2, b=3, c=4): return (sum(x) if x else 0) + a+b+c;\nd = {'foo':foo}\n"
assert (evalpy((code + 'foo(a=1, b=2, c=3)')) == '6')
assert (evalpy((code + 'd.foo(a=1, b=2, c=3)')) == '6')
assert (evalpy((code + 'foo(b=10)')) == '16')
assert (evalpy((code + 'd.foo(b=10)')) == '16')
assert (evalpy((code + 'foo(100, 200, b=10)')) == '316')
assert (evalpy((code + 'd.foo(100, 200, b=10)')) == '316')
assert (evalpy((code + 'try:\n foo(d=4)\nexcept TypeError:\n print("ha")')) == 'ha')
assert (evalpy((code + 'try:\n d.foo(d=4)\nexcept TypeError:\n print("ha")')) == 'ha')
code = "def foo(a=2): return a\nd = {'foo':foo}\n"
assert (evalpy((code + 'foo(a=3)')) == '{ flx_args: [], flx_kwargs: { a: 3 } }')
assert (evalpy((code + 'd.foo(a=3)')) == '{ flx_args: [], flx_kwargs: { a: 3 } }')
def test_function_call_kwargs(self):
code = "def foo(a, b=9, **x): return repr([a, b]) + repr(x);\nd = {'foo':foo}\n"
assert (evalpy((code + 'foo(1, 2)')) == '[1,2]{}')
assert (evalpy((code + 'foo(1, 2, 3)')) == '[1,2]{}')
assert (evalpy((code + 'foo(1, b=3)')) == '[1,9]{"b":3}')
assert (evalpy((code + 'foo(1, b=3, c=4)')) == '[1,9]{"b":3,"c":4}')
assert (evalpy((code + 'd.foo(1, 2)')) == '[1,2]{}')
assert (evalpy((code + 'd.foo(1, b=3, c=4)')) == '[1,9]{"b":3,"c":4}')
def test_function_call_args_and_kwargs(self):
code = 'def foo(*args, **kwargs): return args;\n'
assert (evalpy((code + 'foo(1, 2, 3)')) == '[ 1, 2, 3 ]')
assert (evalpy((code + 'foo(1, 2, 3, a=3)')) == '[ 1, 2, 3 ]')
assert (evalpy((code + 'foo(1, 2, 3, **{"b":4})')) == '[ 1, 2, 3 ]')
assert (evalpy((code + 'foo(a=3, **{"b":4})')) == '[]')
code = 'def foo(*args, **kwargs): return kwargs;\n'
assert (evalpy((code + 'foo(1, 2, 3)')) == '{}')
assert (evalpy((code + 'foo(1, 2, 3, a=3)')) == '{ a: 3 }')
assert (evalpy((code + 'foo(1, 2, 3, **{"b":4})')) == '{ b: 4 }')
assert (evalpy((code + 'foo(a=3, **{"b":4})')) == '{ a: 3, b: 4 }')
((sys.version_info < (3,)), reason='no keyword only args in legacy py')
def test_function_call_keyword_only_args_and_kwargs(self):
code = "def foo(*, a=3, b=4, **x): return repr([a, b]) + repr(x);\nd = {'foo':foo}\n"
assert (evalpy((code + 'foo(1)')) == '[3,4]{}')
assert (evalpy((code + 'foo(a=1, b=2)')) == '[1,2]{}')
assert (evalpy((code + 'foo(a=1, b=2, c=5)')) == '[1,2]{"c":5}')
assert (evalpy((code + 'd.foo(1)')) == '[3,4]{}')
assert (evalpy((code + 'd.foo(a=1, b=2, c=5)')) == '[1,2]{"c":5}')
code = 'def foo(a, b=2, *, c=3, **x): return repr([a, b, c]) + repr(x);\n'
assert (evalpy((code + 'foo(1)')) == '[1,2,3]{}')
assert (evalpy((code + 'foo(1, b=8)')) == '[1,2,3]{"b":8}')
assert (evalpy((code + 'foo(1, c=8)')) == '[1,2,8]{}')
assert (evalpy((code + 'foo(1, d=8)')) == '[1,2,3]{"d":8}')
def method1(self):
return
def test_method1(self):
code = py2js(self.method1)
lines = [line for line in code.split('\n') if line]
assert (len(lines) == 4)
assert (lines[1] == 'method1 = function () {')
assert lines[2].startswith(' ')
assert (lines[3] == '};')
def test_default_args(self):
def func(self, foo, bar=2):
return (foo - bar)
code = py2js(func)
lines = [line for line in code.split('\n') if line]
assert (lines[1] == 'func = function (foo, bar) {')
assert ('2' in code)
assert (evaljs((code + 'func(2)')) == '0')
assert (evaljs((code + 'func(4, 3)')) == '1')
assert (evaljs((code + 'func(0, 0)')) == '0')
def test_var_args1(self):
def func(self, *args):
return args
code1 = py2js(func)
code2 = py2js('func(2, 3)')
assert (evaljs((code1 + code2), False) == '[2,3]')
code2 = py2js('func()')
assert (evaljs((code1 + code2), False) == '[]')
code2 = py2js('a=[2,3]\nfunc(*a)')
assert (evaljs((code1 + code2), False) == '[2,3]')
code2 = py2js('a=[2,3]\nfunc(1,2,*a)')
assert (evaljs((code1 + code2), False) == '[1,2,2,3]')
def test_var_args2(self):
def func(self, foo, *args):
return args
code1 = py2js(func)
code2 = py2js('func(0, 2, 3)')
assert (evaljs((code1 + code2), False) == '[2,3]')
code2 = py2js('func(0)')
assert (evaljs((code1 + code2), False) == '[]')
code2 = py2js('a=[0,2,3]\nfunc(*a)')
assert (evaljs((code1 + code2), False) == '[2,3]')
code2 = py2js('a=[2,3]\nfunc(0,1,2,*a)')
assert (evaljs((code1 + code2), False) == '[1,2,2,3]')
def test_self_becomes_this(self):
def func(self):
return self.foo
code = py2js(func)
lines = [line.strip() for line in code.split('\n') if line]
assert ('return this.foo;' in lines)
def test_lambda(self):
assert (evalpy('f=lambda x:x+1\nf(2)') == '3')
assert (evalpy('(lambda x:x+1)(2)') == '3')
def test_scope(self):
def func(self):
def foo(z):
y = 2
stub = False
only_here = 1
return ((x + y) + z)
x = 1
y = 0
y = 1
z = 1
res = foo(3)
stub = True
return (res + y)
code = py2js(func)
i = code.splitlines().index('var func;')
assert (i >= 0)
vars1 = code.splitlines()[(i + 2)]
vars2 = code.splitlines()[(i + 4)]
assert vars1.strip().startswith('var ')
assert vars2.strip().startswith('var ')
assert (('y' in vars1) and ('y' in vars2))
assert (('stub' in vars1) and ('stub' in vars2))
assert (('only_here' in vars2) and ('only_here' not in vars1))
assert (evaljs((code + 'func()')) == '7')
def test_scope2(self):
def func1(self):
x = 1
def func2(self):
x = 1
y = (lambda : None)
def func3(self):
x = 1
def y():
pass
assert ('var x' in py2js(func1))
assert ('var x' in py2js(func2))
assert ('var x' in py2js(func3))
def test_recursion(self=None):
code = 'def f(i): i *= 2; return i if i > 10 else f(i)\n\n'
assert (evalpy((code + 'f(1)')) == '16')
clscode = 'class G:\n def __init__(self): self.i = 1\n\n'
code = (clscode + ' def f(self): self.i *= 2; return self.i if self.i > 10 else self.f()\n\n')
assert (evalpy((code + 'g = G(); g.f()')) == '16')
code = (clscode + ' def f(self):\n def h(): self.i *= 2; return self.i if self.i > 10 else h()\n\n')
assert (evalpy((code + ' return h()\n\ng = G(); g.f()')) == '16')
def test_global(self):
assert (py2js('global foo;foo = 3').strip() == 'foo = 3;')
def func1():
def inner():
x = 3
x = 2
inner()
return x
def func2():
def inner():
global x
x = 3
x = 2
inner()
return x
assert (evaljs((py2js(func1) + 'func1()')) == '2')
assert (evaljs((py2js(func2) + 'func2()')) == '3')
((sys.version_info < (3,)), reason='no nonlocal on legacy Python')
def test_nonlocal(self):
assert (py2js('nonlocal foo;foo = 3').strip() == 'foo = 3;')
func3_code = 'def func3():\n def inner():\n nonlocal x\n x = 3\n x = 2\n inner()\n return x\n '
assert (evaljs((py2js(func3_code) + 'func3()')) == '3')
((sys.version_info < (3,)), reason='no nonlocal on legacy Python')
def test_global_vs_nonlocal(self):
js1 = py2js('global foo;foo = 3')
js2 = py2js('nonlocal foo;foo = 3')
assert (js1.meta['vars_unknown'] == set())
assert (js2.meta['vars_unknown'] == set())
assert (js1.meta['vars_global'] == set(['foo']))
assert (js2.meta['vars_global'] == set())
code = 'if True:\n x = 1\n y = 1\n def inner1():\n x = 2\n y = 2\n def inner2():\n global x\n nonlocal y\n print(x, y)\n inner2()\n inner1()\n undefined\n '
def test_raw_js(self):
def func(a, b):
RawJS('\n var c = 3;\n return a + b + c;\n ')
code = py2js(func)
assert (evaljs((code + 'func(100, 10)')) == '113')
assert (evaljs((code + 'func("x", 10)')) == 'x103')
def test_docstring(self):
def func(a, b):
return (a + b)
code = py2js(func)
assert (evaljs((code + 'func(100, 10)')) == '110')
assert (evaljs((code + 'func("x", 10)')) == 'x10')
assert (code.count('// docstring') == 1)
def test_async_and_await(self):
if (sys.version_info < (3, 6)):
return
foo = py2js('async def foo(): return 42\n\n')
spam = py2js('async def spam(): print(await foo())\n\n')
eggs = py2js('async def eggs(): return await foo()\n\n')
js = ((foo + spam) + eggs)
assert ('Promise' in evaljs((js + 'foo()')))
assert ('Promise' in evaljs((js + 'spam()')))
assert ('Promise' in evaljs((js + 'eggs()')))
assert ('42' in evaljs((js + 'spam()')))
assert ('42' not in evaljs((js + 'eggs()'))) |
(scope='function')
def create_test_repo(create_test_db):
data = dict()
data['repo1'] = Repository(name='Test Repo 1', code='TR1', linux_path='/test/repo/1/linux/path', windows_path='T:/test/repo/1/windows/path', osx_path='/test/repo/1/osx/path')
data['repo2'] = Repository(name='Test Repo 2', code='TR2', linux_path='/test/repo/2/linux/path', windows_path='T:/test/repo/2/windows/path', osx_path='/test/repo/2/osx/path')
data['repo3'] = Repository(name='Test Repo 3', code='TR3', linux_path='/test/repo/3/linux/path', windows_path='T:/test/repo/3/windows/path', osx_path='/test/repo/3/osx/path')
data['repo4'] = Repository(name='Test Repo 4', code='TR4', linux_path='/test/repo/4/linux/path', windows_path='T:/test/repo/4/windows/path', osx_path='/test/repo/4/osx/path')
data['repo5'] = Repository(name='Test Repo 5', code='TR5', linux_path='/test/repo/5/linux/path', windows_path='T:/test/repo/5/windows/path', osx_path='/test/repo/5/osx/path')
data['all_repos'] = [data['repo1'], data['repo2'], data['repo3'], data['repo4'], data['repo5']]
DBSession.add_all(data['all_repos'])
DBSession.commit()
(yield data) |
class TestWifiOffWifi(MultimachineTestCase):
def test(self):
L.describe('Open and connect the VPN application')
self.target_device['vpn_application'].open_and_connect()
L.describe('Capture traffic')
self.capture_device['packet_capturer'].start()
L.describe('Generate whatever traffic you want')
message_and_await_enter('Are you done?')
L.describe('Disconnect WiFi')
self.target_device['settings'].disable_wifi()
message_and_await_enter('Wait until the application has noticed.')
L.describe('Connect WiFi')
self.target_device['settings'].enable_wifi()
L.describe('Generate whatever traffic you want')
message_and_await_enter('Are you done?')
L.describe('Stop capturing traffic')
packets = self.capture_device['packet_capturer'].stop()
whitelist = self.capture_device.local_ips()
L.debug('Excluding {} from analysis'.format(whitelist))
self.traffic_analyser.get_vpn_server_ip(packets, whitelist) |
class AbstractTeiTrainingDataGenerator(TeiTrainingDataGenerator):
def __init__(self, root_training_xml_element_path: Sequence[str], training_xml_element_path_by_label: Mapping[(str, Sequence[str])], root_tag: str='tei', use_tei_namespace: bool=True, element_maker: Optional[ElementMaker]=None, reset_training_xml_element_path_by_label: Optional[Mapping[(str, Sequence[str])]]=None, default_tei_filename_suffix: Optional[str]=None, default_data_filename_suffix: Optional[str]=None, default_tei_sub_directory: Optional[str]=None, default_data_sub_directory: Optional[str]=None):
self.root_training_xml_element_path = root_training_xml_element_path
self.root_parent_training_xml_element_path = root_training_xml_element_path[:(- 1)]
self.training_xml_element_path_by_label = training_xml_element_path_by_label
self.reset_training_xml_element_path_by_label = (reset_training_xml_element_path_by_label or {})
self._training_xml_element_paths = {tuple(element_path) for (label, element_path) in training_xml_element_path_by_label.items() if ((label not in OTHER_LABELS) and (tuple(element_path) != tuple(root_training_xml_element_path)))}
self.other_element_path = training_xml_element_path_by_label.get('<other>')
if (element_maker is None):
element_maker = (TEI_E if use_tei_namespace else NO_NS_TEI_E)
self.element_maker = element_maker
self.root_tag = root_tag
self.default_tei_filename_suffix = default_tei_filename_suffix
self.default_data_filename_suffix = default_data_filename_suffix
self.default_tei_sub_directory = default_tei_sub_directory
self.default_data_sub_directory = default_data_sub_directory
def get_default_tei_filename_suffix(self) -> Optional[str]:
return self.default_tei_filename_suffix
def get_default_data_filename_suffix(self) -> Optional[str]:
return self.default_data_filename_suffix
def get_default_tei_sub_directory(self) -> Optional[str]:
return self.default_tei_sub_directory
def get_default_data_sub_directory(self) -> Optional[str]:
return self.default_data_sub_directory
def get_training_xml_path_for_label(self, label: Optional[str], current_path: Sequence[str]) -> Sequence[str]:
if ((not label) or (label in OTHER_LABELS)):
if (label and (self.other_element_path is not None)):
return self.other_element_path
if (tuple(current_path) in self._training_xml_element_paths):
LOGGER.debug('found current path in element paths, returning parent: %r', current_path)
return current_path[:(- 1)]
LOGGER.debug('not found current path in element paths, returning current: %r', current_path)
return current_path
training_xml_path = self.training_xml_element_path_by_label.get((label or ''))
if (not training_xml_path):
note_type = get_default_note_type_for_label(label)
LOGGER.info('label not mapped, creating note: %r', label)
training_xml_path = (list(self.root_training_xml_element_path) + [f'note[="{note_type}"]'])
return training_xml_path
def get_reset_training_xml_path_for_label(self, label: Optional[str], prefix: Optional[str]) -> Optional[Sequence[str]]:
if ((prefix != 'B') or (not label)):
return None
return self.reset_training_xml_element_path_by_label.get(label)
def write_xml_for_model_data_with_instructions_iterable(self, xml_writer: XmlTreeWriter, model_data_or_instruction_iterable: Iterable[Union[(LayoutModelData, ExtractInstruction)]]):
default_path = xml_writer.current_path
LOGGER.debug('default_path: %r', default_path)
pending_whitespace = ''
prev_label: str = ''
pending_reset_path: Optional[List[str]] = None
for model_data_or_instruction in model_data_or_instruction_iterable:
if isinstance(model_data_or_instruction, LayoutModelData):
model_data = model_data_or_instruction
layout_token = model_data.layout_token
assert (layout_token is not None)
prefixed_label = get_model_data_label(model_data)
(prefix, label) = get_split_prefix_label((prefixed_label or ''))
xml_element_path = self.get_training_xml_path_for_label(label, current_path=xml_writer.current_path)
reset_path = self.get_reset_training_xml_path_for_label(label=label, prefix=prefix)
if (pending_reset_path is not None):
reset_path = pending_reset_path
pending_reset_path = None
LOGGER.debug('label: %r (%r: %r; reset_path=%r)', label, prefix, xml_element_path, reset_path)
if (reset_path is not None):
xml_writer.require_path(reset_path)
elif ((prev_label not in OTHER_LABELS) and pending_whitespace and (not is_same_or_parent_path_of(xml_writer.current_path, xml_element_path))):
LOGGER.debug('closing element before adding whitespace, %r -> %r', xml_writer.current_path, xml_element_path)
xml_writer.require_path(xml_writer.current_path[:(- 1)])
elif ((prefix == 'B') and (label not in OTHER_LABELS)):
xml_writer.require_path(xml_element_path[:(- 1)])
xml_writer.require_path_or_below(xml_element_path)
xml_writer.append_text(pending_whitespace)
pending_whitespace = ''
xml_writer.require_path(xml_element_path)
xml_writer.append_text(layout_token.text)
pending_whitespace = layout_token.whitespace
prev_label = label
elif isinstance(model_data_or_instruction, ResetExtractInstruction):
pending_reset_path = model_data_or_instruction.reset_element_path
elif isinstance(model_data_or_instruction, NewLineExtractInstruction):
xml_writer.append(self.element_maker('lb'))
pending_whitespace = '\n'
xml_writer.require_path(default_path)
xml_writer.append_text(pending_whitespace)
def iter_model_data_or_instruction_for_model_data_iterable(self, model_data_iterable: Iterable[LayoutModelData]) -> Iterable[Union[(LayoutModelData, ExtractInstruction)]]:
return iter_model_data_with_new_line_instruction(model_data_iterable)
def write_xml_for_model_data_iterable(self, xml_writer: XmlTreeWriter, model_data_iterable: Iterable[LayoutModelData]):
self.write_xml_for_model_data_with_instructions_iterable(xml_writer, self.iter_model_data_or_instruction_for_model_data_iterable(model_data_iterable))
def _get_xml_writer(self) -> XmlTreeWriter:
return XmlTreeWriter(self.element_maker(self.root_tag), element_maker=self.element_maker)
def get_post_processed_xml_root(self, xml_root: etree.ElementBase):
return xml_root
def get_training_tei_xml_for_multiple_model_data_iterables(self, model_data_iterables: Iterable[Iterable[LayoutModelData]]) -> etree.ElementBase:
xml_writer = self._get_xml_writer()
xml_writer.require_path(self.root_parent_training_xml_element_path)
for model_data_iterable in model_data_iterables:
xml_writer.require_path(self.root_parent_training_xml_element_path)
xml_writer.require_path(self.root_training_xml_element_path)
self.write_xml_for_model_data_iterable(xml_writer, model_data_iterable=model_data_iterable)
return self.get_post_processed_xml_root(xml_writer.root)
def get_training_tei_xml_for_model_data_iterable(self, model_data_iterable: Iterable[LayoutModelData]) -> etree.ElementBase:
return self.get_training_tei_xml_for_multiple_model_data_iterables([model_data_iterable]) |
def password(v):
if ((v == '\n') or (v == ' ')):
return 'Password cannot be a newline or space!'
if (9 <= len(v) <= 20):
if re.search('(.)\\1\\1', v):
return 'Weak Password: Same character repeats three or more times in a row'
if re.search('(..)(.*?)\\1', v):
return 'Weak password: Same string pattern repetition'
else:
return 'Strong Password!'
else:
return 'Password length must be 9-20 characters!' |
def build_integrations_manifest(overwrite: bool, rule_integrations: list=[], integration: str=None) -> None:
def write_manifests(integrations: dict) -> None:
manifest_file = gzip.open(MANIFEST_FILE_PATH, 'w+')
manifest_file_bytes = json.dumps(integrations).encode('utf-8')
manifest_file.write(manifest_file_bytes)
manifest_file.close()
if overwrite:
if MANIFEST_FILE_PATH.exists():
MANIFEST_FILE_PATH.unlink()
final_integration_manifests = ({integration: {} for integration in rule_integrations} or {integration: {}})
rule_integrations = (rule_integrations or [integration])
for integration in rule_integrations:
integration_manifests = get_integration_manifests(integration)
for manifest in integration_manifests:
validated_manifest = IntegrationManifestSchema(unknown=EXCLUDE).load(manifest)
package_version = validated_manifest.pop('version')
final_integration_manifests[integration][package_version] = validated_manifest
if (overwrite and rule_integrations):
write_manifests(final_integration_manifests)
elif (integration and (not overwrite)):
manifest_file = gzip.open(MANIFEST_FILE_PATH, 'rb')
manifest_file_bytes = manifest_file.read()
manifest_file_contents = json.loads(manifest_file_bytes.decode('utf-8'))
manifest_file.close()
manifest_file_contents[integration] = final_integration_manifests[integration]
write_manifests(manifest_file_contents)
print(f'final integrations manifests dumped: {MANIFEST_FILE_PATH}') |
class TestTrack():
def test_finds_default_challenge(self):
default_challenge = track.Challenge('default', description='default challenge', default=True)
another_challenge = track.Challenge('other', description='non-default challenge', default=False)
assert (default_challenge == track.Track(name='unittest', description='unittest track', challenges=[another_challenge, default_challenge]).default_challenge)
def test_default_challenge_none_if_no_challenges(self):
assert (track.Track(name='unittest', description='unittest track', challenges=[]).default_challenge is None)
def test_finds_challenge_by_name(self):
default_challenge = track.Challenge('default', description='default challenge', default=True)
another_challenge = track.Challenge('other', description='non-default challenge', default=False)
assert (another_challenge == track.Track(name='unittest', description='unittest track', challenges=[another_challenge, default_challenge]).find_challenge_or_default('other'))
def test_uses_default_challenge_if_no_name_given(self):
default_challenge = track.Challenge('default', description='default challenge', default=True)
another_challenge = track.Challenge('other', description='non-default challenge', default=False)
assert (default_challenge == track.Track(name='unittest', description='unittest track', challenges=[another_challenge, default_challenge]).find_challenge_or_default(''))
def test_does_not_find_unknown_challenge(self):
default_challenge = track.Challenge('default', description='default challenge', default=True)
another_challenge = track.Challenge('other', description='non-default challenge', default=False)
with pytest.raises(exceptions.InvalidName) as exc:
track.Track(name='unittest', description='unittest track', challenges=[another_challenge, default_challenge]).find_challenge_or_default('unknown-name')
assert (exc.value.args[0] == 'Unknown challenge [unknown-name] for track [unittest]')
def test_index_names(self):
idx1 = track.Index(name='foo')
idx2 = track.Index(name='bar')
track_ = track.Track(name='unittest', description='unittest track', indices=[idx1, idx2])
assert (track_.index_names() == ['foo', 'bar']) |
def upgrade():
op.execute('COMMIT')
try:
op.execute('SHOW bdr.permit_ddl_locking')
op.execute('SET LOCAL bdr.permit_ddl_locking = true')
except exc.ProgrammingError:
pass
op.execute("UPDATE updates SET request = 'stable' WHERE request = 'batched'")
op.execute('ALTER TYPE ck_update_request RENAME TO ck_update_request_old')
op.execute("CREATE TYPE ck_update_request AS ENUM('testing', 'obsolete', 'unpush', 'revoke', 'stable')")
op.execute('ALTER TABLE updates ALTER COLUMN request TYPE ck_update_request USING request::text::ck_update_request')
op.execute('ALTER TABLE composes ALTER COLUMN request TYPE ck_update_request USING request::text::ck_update_request')
op.execute('DROP TYPE ck_update_request_old') |
class ForumSerializerss(serializers.ModelSerializer):
authors = UserSerializer(read_only=True)
category = Forum_plateSerializers(read_only=True)
comment_set = CommentSerializers(many=True)
add_time = serializers.DateTimeField(format='%Y-%m-%d %H:%M:%S', required=False, read_only=True)
class Meta():
model = Forum
fields = '__all__' |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.