code stringlengths 281 23.7M |
|---|
class Migration(migrations.Migration):
dependencies = [('forum', '0002_auto__1626')]
operations = [migrations.RemoveField(model_name='forum_plate', name='category_type'), migrations.RemoveField(model_name='forum_plate', name='code'), migrations.RemoveField(model_name='forum_plate', name='parent_category')] |
def test_if1(evmtester, branch_results):
evmtester.ifBranches(1, True, False, False, False)
assert ([175, 176] in branch_results()[True])
evmtester.ifBranches(1, False, False, False, False)
results = branch_results()
assert ([175, 176] in results[False])
assert ([208, 209] in results[True]) |
class GradTTS(nn.Module):
def __init__(self, model_config):
super(GradTTS, self).__init__()
self.diffusion = DIFFUSIONS.build(model_config.diffusion)
if getattr(model_config, 'gradient_checkpointing', False):
self.diffusion.denoise_fn.gradient_checkpointing_enable()
def get_mask_from_lengths(lengths, max_len=None):
batch_size = lengths.shape[0]
if (max_len is None):
max_len = torch.max(lengths).item()
ids = torch.arange(0, max_len).unsqueeze(0).expand(batch_size, (- 1)).to(lengths.device)
mask = (ids >= lengths.unsqueeze(1).expand((- 1), max_len))
return mask
def forward_features(self, speakers, contents, contents_lens, contents_max_len, mel=None, mel_lens=None, mel_max_len=None, pitches=None, pitch_shift=None, phones2mel=None, energy=None):
src_masks = self.get_mask_from_lengths(contents_lens, contents_max_len)
if (self.training is False):
mel_lens = (mel_lens * (0.9 + (0.2 * torch.rand_like(mel_lens.float()))))
mel_lens = mel_lens.long()
mel_max_len = torch.max(mel_lens).item()
mel_masks = self.get_mask_from_lengths(mel_lens, mel_max_len)
return dict(features=contents, cond_masks=src_masks, x_masks=mel_masks, x_lens=mel_lens)
def forward(self, speakers, contents, contents_lens, contents_max_len, mel=None, mel_lens=None, mel_max_len=None, pitches=None, pitch_shift=None, phones2mel=None, energy=None):
features = self.forward_features(speakers=speakers, contents=contents, contents_lens=contents_lens, contents_max_len=contents_max_len, mel=mel, mel_lens=mel_lens, mel_max_len=mel_max_len, pitches=pitches, pitch_shift=pitch_shift, phones2mel=phones2mel, energy=energy)
if self.training:
output_dict = self.diffusion.train_step(features['features'], mel, x_masks=features['x_masks'], cond_masks=features['cond_masks'])
else:
output_dict = {'loss': 0.0}
if ('loss' in features):
output_dict['loss'] = (output_dict['loss'] + features['loss'])
if ('metrics' in features):
metrics = output_dict.get('metrics', {})
metrics.update(features['metrics'])
output_dict['metrics'] = metrics
output_dict['features'] = features['features']
output_dict['x_masks'] = features['x_masks']
output_dict['x_lens'] = features['x_lens']
output_dict['cond_masks'] = features['cond_masks']
return output_dict |
def test_traverse_args():
provider1 = providers.Object('bar')
provider2 = providers.Object('baz')
provider = providers.Factory(list, 'foo', provider1, provider2)
all_providers = list(provider.traverse())
assert (len(all_providers) == 2)
assert (provider1 in all_providers)
assert (provider2 in all_providers) |
def extractKaede721WordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Hakata Tonkotsu Ramens', 'Hakata Tonkotsu Ramens', 'translated'), ('htr', 'Hakata Tonkotsu Ramens', 'translated'), ('durarara x hakata tonkotsu ramens', 'Durarara!! x Hakata Tonkotsu Ramens', 'translated'), ('Durarara x HTR Crossover', 'Durarara!! x Hakata Tonkotsu Ramens', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class table_features(loxi.OFObject):
def __init__(self, table_id=None, command=None, features=None, name=None, metadata_match=None, metadata_write=None, capabilities=None, max_entries=None, properties=None):
if (table_id != None):
self.table_id = table_id
else:
self.table_id = 0
if (command != None):
self.command = command
else:
self.command = 0
if (features != None):
self.features = features
else:
self.features = 0
if (name != None):
self.name = name
else:
self.name = ''
if (metadata_match != None):
self.metadata_match = metadata_match
else:
self.metadata_match = 0
if (metadata_write != None):
self.metadata_write = metadata_write
else:
self.metadata_write = 0
if (capabilities != None):
self.capabilities = capabilities
else:
self.capabilities = 0
if (max_entries != None):
self.max_entries = max_entries
else:
self.max_entries = 0
if (properties != None):
self.properties = properties
else:
self.properties = []
return
def pack(self):
packed = []
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!B', self.table_id))
packed.append(struct.pack('!B', self.command))
packed.append(struct.pack('!L', self.features))
packed.append(struct.pack('!32s', self.name))
packed.append(struct.pack('!Q', self.metadata_match))
packed.append(struct.pack('!Q', self.metadata_write))
packed.append(struct.pack('!L', self.capabilities))
packed.append(struct.pack('!L', self.max_entries))
packed.append(loxi.generic_util.pack_list(self.properties))
length = sum([len(x) for x in packed])
packed[0] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = table_features()
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 2)
obj.table_id = reader.read('!B')[0]
obj.command = reader.read('!B')[0]
obj.features = reader.read('!L')[0]
obj.name = reader.read('!32s')[0].rstrip('\x00')
obj.metadata_match = reader.read('!Q')[0]
obj.metadata_write = reader.read('!Q')[0]
obj.capabilities = reader.read('!L')[0]
obj.max_entries = reader.read('!L')[0]
obj.properties = loxi.generic_util.unpack_list(reader, ofp.common.table_feature_prop.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.table_id != other.table_id):
return False
if (self.command != other.command):
return False
if (self.features != other.features):
return False
if (self.name != other.name):
return False
if (self.metadata_match != other.metadata_match):
return False
if (self.metadata_write != other.metadata_write):
return False
if (self.capabilities != other.capabilities):
return False
if (self.max_entries != other.max_entries):
return False
if (self.properties != other.properties):
return False
return True
def pretty_print(self, q):
q.text('table_features {')
with q.group():
with q.indent(2):
q.breakable()
q.text('table_id = ')
q.text(('%#x' % self.table_id))
q.text(',')
q.breakable()
q.text('command = ')
value_name_map = {0: 'OFPTFC_REPLACE', 1: 'OFPTFC_MODIFY', 2: 'OFPTFC_ENABLE', 3: 'OFPTFC_DISABLE'}
if (self.command in value_name_map):
q.text(('%s(%d)' % (value_name_map[self.command], self.command)))
else:
q.text(('%#x' % self.command))
q.text(',')
q.breakable()
q.text('features = ')
value_name_map = {1: 'OFPTFF_INGRESS_TABLE', 2: 'OFPTFF_EGRESS_TABLE', 16: 'OFPTFF_FIRST_EGRESS'}
q.text(util.pretty_flags(self.features, value_name_map.values()))
q.text(',')
q.breakable()
q.text('name = ')
q.pp(self.name)
q.text(',')
q.breakable()
q.text('metadata_match = ')
q.text(('%#x' % self.metadata_match))
q.text(',')
q.breakable()
q.text('metadata_write = ')
q.text(('%#x' % self.metadata_write))
q.text(',')
q.breakable()
q.text('capabilities = ')
value_name_map = {3: 'OFPTC_DEPRECATED_MASK', 4: 'OFPTC_EVICTION', 8: 'OFPTC_VACANCY_EVENTS'}
q.text(util.pretty_flags(self.capabilities, value_name_map.values()))
q.text(',')
q.breakable()
q.text('max_entries = ')
q.text(('%#x' % self.max_entries))
q.text(',')
q.breakable()
q.text('properties = ')
q.pp(self.properties)
q.breakable()
q.text('}') |
class OptionPlotoptionsBulletSonificationContexttracksMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_compare_length():
data = encode([1, 2, 3, 4, 5])
assert (compare_length(data, 100) == (- 1))
assert (compare_length(data, 5) == 0)
assert (compare_length(data, 1) == 1)
data = encode([])
assert (compare_length(data, 100) == (- 1))
assert (compare_length(data, 0) == 0)
assert (compare_length(data, (- 1)) == 1) |
class SnmprecRecordMixIn(object):
def evaluateValue(self, oid, tag, value, **context):
if (':' in tag):
context['backdoor']['textTag'] = tag
return (oid, '', value)
else:
return snmprec.SnmprecRecord.evaluate_value(self, oid, tag, value)
def formatValue(self, oid, value, **context):
if ('textTag' in context['backdoor']):
return (self.formatOid(oid), context['backdoor']['textTag'], value)
else:
return snmprec.SnmprecRecord.format_value(self, oid, value, **context) |
class Worksheet():
def __init__(self, workbook, title, xlrd_index=None, xlrd_sheet=None):
self.title = title
if (len(self.title) > MAX_LEN_WORKSHEET_TITLE):
self.title = self.title[:MAX_LEN_WORKSHEET_TITLE]
logging.warning(('Worksheet title > %d characters' % MAX_LEN_WORKSHEET_TITLE))
logging.warning(("Truncated to '%s'" % self.title))
self.workbook = workbook
if ((xlrd_index is None) and (xlrd_sheet is None)):
self.worksheet = self.workbook.add_sheet(self.title)
self.is_new = True
self.current_row = (- 1)
self.ncols = 0
else:
self.worksheet = self.workbook.get_sheet(xlrd_index)
self.is_new = False
self.current_row = (xlrd_sheet.nrows - 1)
self.ncols = xlrd_sheet.ncols
self.data = []
self.re_style = re.compile('^<style +([^>]*)>(.*)</style>$')
self.styles = Styles()
self.max_col_width = []
self.freeze_row = 0
self.freeze_col = 0
def addTabData(self, rows):
for row in rows:
self.data.append(row)
self.ncols = max(self.ncols, len(row.split('\t')))
def addText(self, text):
return self.addTabData(text.split('\n'))
def insertColumn(self, position, insert_items=None, title=None):
if (not self.is_new):
logging.error('Cannot insert data into pre-existing worksheet')
return False
if (title is None):
if isinstance(insert_items, list):
title = insert_items[0]
nrows = max(len(self.data), len(insert_items))
else:
title = insert_items
nrows = len(self.data)
offset = 0
else:
if isinstance(insert_items, list):
nrows = max(len(self.data), (len(insert_items) + 1))
else:
nrows = len(self.data)
offset = (- 1)
insert_title = True
for i in range(nrows):
try:
row = self.data[i]
items = row.split('\t')
except IndexError:
items = []
new_items = []
for j in range(max(len(items), (position + 1))):
if (j == position):
if insert_title:
new_items.append(title)
insert_title = False
else:
if isinstance(insert_items, list):
try:
insert_item = insert_items[(i + offset)]
except IndexError:
insert_item = ''
else:
insert_item = insert_items
new_items.append(insert_item)
try:
new_items.append(items[j])
except IndexError:
if (j < position):
new_items.append('')
row = '\t'.join([str(x) for x in new_items])
try:
self.data[i] = row
except IndexError:
self.data.append(row)
self.ncols = max(self.ncols, len(row.split('\t')))
return True
def setCellValue(self, row, col, value):
while (len(self.data) < (row + 1)):
self.data.append('')
new_row = self.data[row].split('\t')
while (len(new_row) < (col + 1)):
new_row.append('')
self.ncols = max(self.ncols, len(new_row))
new_row[col] = str(value)
new_row = '\t'.join(new_row)
self.data[row] = new_row
def getColumnId(self, name):
try:
i = self.data[0].split('\t').index(name)
return self.column_id_from_index(i)
except IndexError:
raise IndexError(("Column '%s' not found" % name))
def freezePanes(self, row=None, column=None):
self.freeze_row = int(row)
self.freeze_col = int(column)
def column_id_from_index(self, i):
name = ''
try:
while (i >= 0):
name += ascii_uppercase[(i % 26)]
i = ((i // 26) - 1)
return name[::(- 1)]
except IndexError as ex:
print(('Exception getting column name for index %d: %s' % (i, ex)))
raise ex
def save(self):
for row in self.data:
self.current_row += 1
cindex = 0
for item in row.split('\t'):
bold = False
color = None
bg_color = None
wrap = False
border_style = None
style_match = self.re_style.match(item)
num_format_str = None
font_size = None
centre = False
shrink_to_fit = False
if style_match:
item = style_match.group(2)
styles = style_match.group(1)
for style in styles.split(' '):
if style.strip().startswith('bgcolor='):
bg_color = style.split('=')[1].strip()
elif style.strip().startswith('color='):
color = style.split('=')[1].strip()
elif (style.strip() == 'font=bold'):
bold = True
elif style.strip().startswith('border='):
border_style = style.split('=')[1].strip()
elif (style.strip() == 'wrap'):
wrap = True
elif style.strip().startswith('number_format='):
num_format_str = style.split('=')[1].strip()
elif style.strip().startswith('font_size='):
font_size = style.split('=')[1].strip()
elif (style.strip() == 'centre'):
centre = True
elif (style.strip() == 'shrink_to_fit'):
shrink_to_fit = True
style = self.styles.getXfStyle(bold=bold, color=color, bg_color=bg_color, wrap=wrap, border_style=border_style, num_format_str=num_format_str, font_size=font_size, centre=centre, shrink_to_fit=shrink_to_fit)
if str(item).startswith('='):
formula = item[1:]
formula = formula.replace('?', str((self.current_row + 1)))
formula = formula.replace('#', self.column_id_from_index(cindex))
try:
item = xlwt.Formula(formula)
except Exception as ex:
logging.warning("Error writing formula '%s' to cell %s%s: %s", formula, self.column_id_from_index(cindex), (self.current_row + 1), ex)
item = 'FORMULA_ERROR'
else:
converted = str(item)
try:
converted = int(converted)
except ValueError:
try:
converted = float(converted)
except ValueError:
if (len(converted) > MAX_LEN_WORKSHEET_CELL_VALUE):
logging.warning(("Saving sheet '%s' (row %d, col %d)" % (self.title, self.current_row, cindex)))
logging.warning(("Truncating value '%s...' to %d characters" % (converted[:15], MAX_LEN_WORKSHEET_CELL_VALUE)))
converted = converted[:MAX_LEN_WORKSHEET_CELL_VALUE]
item = converted
try:
self.worksheet.write(self.current_row, cindex, item, style)
len_item = len(str(item))
try:
if (len_item > self.max_col_width[cindex]):
self.max_col_width[cindex] = len_item
except IndexError:
self.max_col_width.append(len_item)
self.worksheet.col(cindex).width = (256 * (self.max_col_width[cindex] + 5))
cindex += 1
except ValueError as ex:
logging.error(("couldn't write item to sheet '%s' (row %d col %d)" % (self.title, (self.current_row + 1), (cindex + 1))))
self.worksheet.set_horz_split_pos(self.freeze_row)
self.worksheet.set_vert_split_pos(self.freeze_col)
if (self.freeze_row or self.freeze_col):
self.worksheet.set_panes_frozen(True)
self.data = []
self.is_new = False
return |
class RegisterNamespaceRequest(betterproto.Message):
name: str = betterproto.string_field(1)
description: str = betterproto.string_field(2)
owner_email: str = betterproto.string_field(3)
workflow_execution_retention_period: timedelta = betterproto.message_field(4)
clusters: List[v1replication.ClusterReplicationConfig] = betterproto.message_field(5)
active_cluster_name: str = betterproto.string_field(6)
data: Dict[(str, str)] = betterproto.map_field(7, betterproto.TYPE_STRING, betterproto.TYPE_STRING)
security_token: str = betterproto.string_field(8)
is_global_namespace: bool = betterproto.bool_field(9)
history_archival_state: v1enums.ArchivalState = betterproto.enum_field(10)
history_archival_uri: str = betterproto.string_field(11)
visibility_archival_state: v1enums.ArchivalState = betterproto.enum_field(12)
visibility_archival_uri: str = betterproto.string_field(13) |
_packages_ns.route('/add/<ownername>/<projectname>/<package_name>/<source_type_text>')
class PackageAdd(Resource):
_api_login_required
_packages_ns.doc(params=add_package_docs)
_packages_ns.expect(package_add_input_model)
_packages_ns.marshal_with(package_model)
def post(self, ownername, projectname, package_name, source_type_text):
copr = get_copr(ownername, projectname)
data = rename_fields(get_form_compatible_data(preserve=['python_versions']))
process_package_add_or_edit(copr, source_type_text, data=data)
package = PackagesLogic.get(copr.id, package_name).first()
return to_dict(package) |
class TestMemoized():
def test_caching(self):
return_value = True
def some_function(arg):
return return_value
assert some_function(42)
return_value = False
assert some_function(42)
def test_caching_different_args(self):
return_value = True
def some_function(arg):
return return_value
assert some_function(42)
return_value = False
assert (not some_function(41))
def test_dont_cache_lists(self):
return_value = True
def some_function(arg):
return return_value
assert some_function(['some', 'list'])
return_value = False
assert (not some_function(['some', 'list']))
def test___get__(self):
def some_function(arg):
return 42
class some_class(object):
thing = some_function
assert (some_class().thing() == 42) |
class TopicSubscriptionListView(LoginRequiredMixin, ListView):
context_object_name = 'topics'
model = Topic
paginate_by = machina_settings.FORUM_TOPICS_NUMBER_PER_PAGE
template_name = 'forum_member/subscription_topic_list.html'
def get_queryset(self):
return self.request.user.topic_subscriptions.select_related('forum', 'poster', 'last_post', 'last_post__poster').all() |
.parametrize('coords, expected_val', [pytest.param((10.0, 10.0), 0.0, id='(xori, yori)'), pytest.param((10.0, 20.0), 1.0, id='(xori, ymax)'), pytest.param((20.0, 10.0), 2.0, id='(xmax, yori)'), pytest.param((20.0, 20.0), 3.0, id='(xmax, ymax)'), pytest.param((15.0, 10.0), 1.0, id='((xori + xmax) / 2, yori)'), pytest.param((10.0, 15.0), 0.5, id='(xori, (yori + ymax) / 2)'), pytest.param((15.0, 15.0), 1.5, id='((xori + xmax) / 2, (yori + ymax) / 2)')])
def test_ijk_bilinear(coords, expected_val):
surface = Surface()
result = xtgeo.surface.regular_surface._regsurf_oper.get_value_from_xy(surface, coords, sampling='bilinear')
assert (result == expected_val) |
def dashboard(config, endpoint, view_func, rule='/'):
app = Flask(__name__)
app.add_url_rule(rule, endpoint=endpoint.name, view_func=(lambda : view_func))
flask_monitoringdashboard.bind(app, schedule=False)
app.config['DEBUG'] = True
app.config['TESTING'] = True
with app.test_client() as client:
(yield client) |
class TestWealthCommandsPositive(AEATestCaseManyFlaky):
.integration
.flaky(reruns=MAX_FLAKY_RERUNS_INTEGRATION)
def test_wealth_commands(self, password_or_none):
agent_name = 'test_aea'
self.create_agents(agent_name)
self.set_agent_context(agent_name)
self.generate_private_key(password=password_or_none)
self.add_private_key(password=password_or_none)
self.generate_wealth(password=password_or_none) |
class Result(object):
def __init__(self):
self.lock = Lock()
self.lock.acquire()
self.value = Exception()
self.raised = False
def put(self, value, raised):
self.value = value
self.raised = raised
self.lock.release()
def get(self):
self.lock.acquire()
try:
if self.raised:
raise self.value
return self.value
finally:
self.lock.release() |
class Solution():
def minCost(self, colors: str, neededTime: List[int]) -> int:
prev = 0
cost = 0
for i in range(1, len(colors)):
if (colors[i] == colors[prev]):
if (neededTime[i] > neededTime[prev]):
cost += neededTime[prev]
prev = i
else:
cost += neededTime[i]
else:
prev = i
return cost |
def extractBludeblobtranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
titlemap = [('SEG Chapter ', 'Sinister Ex-girlfriend', 'translated'), ('Master of Dungeon', 'Master of Dungeon', 'oel')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_discourse_loader_load_post_with_invalid_post_id(discourse_loader, monkeypatch, caplog):
def mock_get(*args, **kwargs):
class MockResponse():
def raise_for_status(self):
raise requests.exceptions.RequestException('Test error')
return MockResponse()
monkeypatch.setattr(requests, 'get', mock_get)
discourse_loader._load_post(123)
assert ('Failed to load post' in caplog.text) |
class FileRead(BaseTest):
def setUp(self):
session = SessionURL(self.url, self.password, volatile=True)
modules.load_modules(session)
self.run_argv = modules.loaded['file_read'].run_argv
def test_read_php(self):
self.assertEqual(self.run_argv(['test_file_read/ok.test']), b'OK')
self.assertTrue(self.run_argv(['/bin/ls']))
self.assertEqual(self.run_argv(['test_file_read/ko.test']), None)
self.assertEqual(self.run_argv(['bogus']), None)
def test_read_allvectors(self):
for vect in modules.loaded['file_download'].vectors.get_names():
self.assertEqual(self.run_argv(['-vector', vect, 'test_file_read/ok.test']), b'OK')
def test_read_sh(self):
self.assertEqual(self.run_argv(['-vector', 'base64', 'test_file_read/ok.test']), b'OK')
self.assertTrue(self.run_argv(['-vector', 'base64', '/bin/ls']))
self.assertEqual(self.run_argv(['-vector', 'base64', 'test_file_read/ko.test']), None)
self.assertEqual(self.run_argv(['-vector', 'base64', 'bogus']), None) |
def subscriber(topic: Topic) -> Callable[([SubscriberType], SubscriberType)]:
def subscriber_wrapper(method: SubscriberType) -> SubscriberType:
annotations = {arg: arg_type for (arg, arg_type) in method.__annotations__.items() if (arg not in ('self', 'return'))}
if ((not (len(annotations) == 1)) or (list(annotations.values())[0] != topic.message_type) or (method.__code__.co_argcount != 2) or (method.__code__.co_varnames[0] != 'self') or (method.__code__.co_varnames[1] != list(annotations.keys())[0])):
raise LabgraphError(f"Expected subscriber '{method.__name__}' to have signature def {method.__name__}(self, message: {topic.message_type.__name__}) -> None")
metadata = get_method_metadata(method)
if (metadata.subscribed_topic is not None):
raise LabgraphError(f"Method '{metadata.name}' already has a {subscriber.__name__} decorator")
metadata.subscribed_topic = topic
metadata.validate()
return method
return subscriber_wrapper |
.django_db
def test_non_match_search_on_multiple_tas(client, monkeypatch, elasticsearch_award_index, award_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(client, {'require': [_tas_path(ATA_TAS), _tas_path(BPOA_TAS)]})
assert (resp.json()['results'] == []) |
class EventWatcher(metaclass=_Singleton):
def __init__(self) -> None:
self.target_list_lock: Lock = Lock()
self.target_events_watch_data: Dict[(str, _EventWatchData)] = {}
self._kill: bool = False
self._has_started: bool = False
self._watcher_thread = Thread(target=self._loop, daemon=True)
def __del__(self) -> None:
self.stop()
def stop(self, wait: bool=True) -> None:
self._kill = True
if ((wait is True) and self._watcher_thread.is_alive()):
self._watcher_thread.join()
self._has_started = False
def reset(self) -> None:
self.stop()
self._setup()
def add_event_callback(self, event: ContractEvent, callback: Callable[([AttributeDict], None)], delay: float=2.0, repeat: bool=True) -> None:
if (not callable(callback)):
raise TypeError("Argument 'callback' argument must be a callable.")
delay = max(delay, 0.05)
self.target_list_lock.acquire()
event_watch_data_key = ((str(event.address) + '+') + event.event_name)
if (self.target_events_watch_data.get(event_watch_data_key) is None):
self.target_events_watch_data[event_watch_data_key] = _EventWatchData(event, callback, delay, repeat)
else:
self.target_events_watch_data[event_watch_data_key].add_callback(callback, repeat)
if (repeat is True):
self.target_events_watch_data[event_watch_data_key].update_delay(delay)
self.target_list_lock.release()
if (self._has_started is False):
self._start_watch()
def _setup(self) -> None:
self.target_list_lock.acquire()
self.target_events_watch_data.clear()
self.target_list_lock.release()
self._kill = False
self._has_started = False
self._watcher_thread = Thread(target=self._loop, daemon=True)
def _start_watch(self) -> None:
self._watcher_thread.start()
self._has_started = True
def _loop(self) -> None:
workers_list: List[Thread] = []
while (not self._kill):
try:
sleep_time: float = 1.0
self.target_list_lock.acquire()
for (_, elem) in self.target_events_watch_data.items():
time_left = elem.time_left
if (time_left > 0):
sleep_time = min(sleep_time, time_left)
continue
latest_events = elem.get_new_events()
if (len(latest_events) != 0):
workers_list += elem._trigger_callbacks(latest_events)
elem.reset_timer()
sleep_time = min(sleep_time, elem.time_left)
finally:
self.target_list_lock.release()
workers_list = list(filter((lambda x: x.is_alive()), workers_list))
time.sleep(sleep_time)
for worker_instance in workers_list:
worker_instance.join(timeout=30)
if worker_instance.is_alive():
warnings.warn(message='Callback execution ({}) could not be joined.'.format(worker_instance.getName()), category=RuntimeWarning) |
class TestAttributeOverrides(BaseTestCase):
def test_template_name_override(self):
create_instance(quantity=3)
pk = Example.objects.all()[0].pk
view = DetailView.as_view(model=Example, template_name='example.html')
response = self.get(view, pk=pk)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.template_name, ['example.html'])
self.assertContext(response, {'object': Example.objects.get(pk=pk), 'example': Example.objects.get(pk=pk), 'view': InstanceOf(View)})
def test_template_name_suffix_override(self):
create_instance(quantity=3)
pk = Example.objects.all()[0].pk
view = DetailView.as_view(model=Example, template_name_suffix='_suffix')
response = self.get(view, pk=pk)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.template_name, ['vanilla/example_suffix.html'])
self.assertContext(response, {'object': Example.objects.get(pk=pk), 'example': Example.objects.get(pk=pk), 'view': InstanceOf(View)})
def test_context_object_name_override(self):
create_instance(quantity=3)
pk = Example.objects.all()[0].pk
view = DetailView.as_view(model=Example, context_object_name='current')
response = self.get(view, pk=pk)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.template_name, ['vanilla/example_detail.html'])
self.assertContext(response, {'object': Example.objects.get(pk=pk), 'current': Example.objects.get(pk=pk), 'view': InstanceOf(View)})
def test_form_class_override(self):
class CustomForm(ModelForm):
class Meta():
fields = ('text',)
model = Example
view = CreateView.as_view(model=Example, success_url='/success/', form_class=CustomForm)
response = self.get(view)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.template_name, ['vanilla/example_form.html'])
self.assertContext(response, {'form': InstanceOf(CustomForm), 'view': InstanceOf(View)})
self.assertFalse(Example.objects.exists())
def test_queryset_override(self):
create_instance(text='abc', quantity=3)
create_instance(text='def', quantity=3)
view = ListView.as_view(model=Example, queryset=Example.objects.filter(text='abc'))
response = self.get(view)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.template_name, ['vanilla/example_list.html'])
self.assertContext(response, {'object_list': Example.objects.filter(text='abc'), 'example_list': Example.objects.filter(text='abc'), 'view': InstanceOf(View), 'page_obj': None, 'paginator': None, 'is_paginated': False}) |
def fetch_seek_paginator(query, kwargs, index_column, clear=False, count=None, cap=100):
check_cap(kwargs, cap)
model = index_column.parent.class_
(sort, hide_null, nulls_last) = (kwargs.get('sort'), kwargs.get('sort_hide_null'), kwargs.get('sort_nulls_last'))
if sort:
(query, sort_column) = sorting.sort(query, sort, model=model, clear=clear, hide_null=hide_null, nulls_last=nulls_last)
else:
sort_column = None
return SeekCoalescePaginator(query, kwargs['per_page'], kwargs['sort_hide_null'], index_column, sort_column=sort_column, count=count) |
def main():
global TOTAL_UPDATES
global ITERATION_ESTIMATED_SECONDS
with psycopg2.connect(dsn=CONNECTION_STRING) as connection:
connection.autocommit = True
connection.readonly = True
if ((MIN_ID is None) or (MAX_ID is None)):
with connection.cursor() as cursor:
logging.info('Finding min/max IDs from awards table...')
cursor.execute(GET_MIN_MAX_SQL)
results = cursor.fetchall()
(min_id, max_id) = results[0]
if (MAX_ID is not None):
logging.info('Using provided MAX ID {}'.format(MAX_ID))
max_id = MAX_ID
if (MIN_ID is not None):
logging.info('Using provided MIN ID {}'.format(MIN_ID))
min_id = MIN_ID
if (min_id >= max_id):
raise RuntimeError('MAX ID ({}) must be greater than MIN ID ({})'.format(MAX_ID, MIN_ID))
logging.info('Min ID: {:,}'.format(min_id))
logging.info('Max ID: {:,}'.format(max_id))
logging.info('Total in range: {:,}'.format(((max_id - min_id) + 1)))
logging.info('Closing time: {}'.format(CLOSING_TIME))
batch_min = min_id
iteration = 1
while (batch_min <= max_id):
batch_max = min(((batch_min + CHUNK_SIZE) - 1), max_id)
if CLOSING_TIME:
if ITERATION_ESTIMATED_SECONDS:
curr_time = datetime.now(timezone.utc)
next_run_estimated_end_datetime = (curr_time + timedelta(seconds=ITERATION_ESTIMATED_SECONDS))
dt_str = next_run_estimated_end_datetime.isoformat()
logging.info('=> Expected iteration duration: {} '.format(ITERATION_ESTIMATED_SECONDS))
logging.info('=> Estimated loop end datetime of: {}'.format(dt_str))
if (next_run_estimated_end_datetime >= CLOSING_TIME):
logging.info('===== Suspending job due to --closing-time flag')
logging.info('===== Start next job at ID {} ====='.format(batch_min))
return
with Timer('[Awards {:,} - {:,}]'.format(batch_min, batch_max), pipe_output=logging.info) as t:
with connection.cursor() as cursor:
if ('fabs' in TYPES):
cursor.execute(GET_FABS_AWARDS.format(minid=batch_min, maxid=batch_max))
fabs = [f"'{row[0]}'" for row in cursor.fetchall()]
else:
fabs = []
if ('fpds' in TYPES):
cursor.execute(GET_FPDS_AWARDS.format(minid=batch_min, maxid=batch_max))
fpds = [f"'{row[0]}'" for row in cursor.fetchall()]
else:
fpds = []
if (fabs or fpds):
row_count = run_update_query(fabs_awards=fabs, fpds_awards=fpds)
logging.info('UPDATED {:,} records'.format(row_count))
TOTAL_UPDATES += row_count
else:
logging.info('#### No awards to update in range ###')
if (ITERATION_ESTIMATED_SECONDS is None):
ITERATION_ESTIMATED_SECONDS = t.elapsed
else:
ITERATION_ESTIMATED_SECONDS = rolling_average(ITERATION_ESTIMATED_SECONDS, t.elapsed, iteration)
batch_min = (batch_max + 1)
iteration += 1 |
def _install_dependencies(dependencies):
if dependencies:
log_path = os.path.join(paths.logs(), 'dependency.log')
console.info(f"Installing track dependencies [{', '.join(dependencies)}]")
try:
with open(log_path, 'ab') as install_log:
subprocess.check_call([sys.executable, '-m', 'pip', 'install', *dependencies, '--upgrade', '--target', paths.libs()], stdout=install_log, stderr=install_log)
except subprocess.CalledProcessError:
raise exceptions.SystemSetupError(f'Installation of track dependencies failed. See [{install_log.name}] for more information.') |
class MultiProviderMenu(providers.MultiProviderHandler, Menu):
def __init__(self, names, parent):
providers.MultiProviderHandler.__init__(self, names, parent)
Menu.__init__(self, parent)
for p in self.get_providers():
self.on_provider_added(p)
def on_provider_added(self, provider):
self.add_item(provider)
def on_provider_removed(self, provider):
self.remove_item(provider) |
def validate_consumption(obj: dict, zone_key: ZoneKey) -> None:
validate_datapoint_format(datapoint=obj, kind='consumption', zone_key=zone_key)
if ((obj.get('consumption') or 0) < 0):
raise ValidationError(f"{zone_key}: consumption has negative value {obj['consumption']}")
if (abs((obj.get('consumption') or 0)) > 500000):
raise ValidationError(f"{zone_key}: consumption is not realistic (>500GW) {obj['consumption']}")
validate_reasonable_time(obj, zone_key) |
_frequency(timedelta(days=1))
def fetch_production(zone_key: ZoneKey=ZoneKey('US-CAL-CISO'), session: (Session | None)=None, target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)) -> list:
target_url = get_target_url(target_datetime, kind='production')
if (target_datetime is None):
target_datetime = arrow.now(tz='US/Pacific').floor('day').datetime
csv = pandas.read_csv(target_url)
if (csv.iloc[(- 1)]['Time'] == 'OO:OO'):
df = csv.copy().iloc[:(- 1)]
else:
df = csv.copy()
df.columns = [col.lower() for col in df.columns]
all_data_points = ProductionBreakdownList(logger)
for (index, row) in df.iterrows():
production_mix = ProductionMix()
storage_mix = StorageMix()
row_datetime = target_datetime.replace(hour=int(row['time'][:2]), minute=int(row['time'][(- 2):]))
for mode in [mode for mode in PRODUCTION_MODES_MAPPING if (mode not in ['small hydro', 'large hydro'])]:
production_value = float(row[mode])
production_mix.add_value(PRODUCTION_MODES_MAPPING[mode], production_value, (mode in CORRECT_NEGATIVE_PRODUCTION_MODES_WITH_ZERO))
for mode in ['small hydro', 'large hydro']:
production_value = float(row[mode])
if (production_value < 0):
storage_mix.add_value('hydro', (production_value * (- 1)))
else:
production_mix.add_value('hydro', production_value)
storage_mix.add_value('battery', (float(row['batteries']) * (- 1)))
all_data_points.append(zoneKey=zone_key, production=production_mix, storage=storage_mix, source='caiso.com', datetime=arrow.get(row_datetime).replace(tzinfo='US/Pacific').datetime)
return all_data_points.to_list() |
def upgrade():
op.execute('alter type connectiontype rename to connectiontype_old')
op.execute("create type connectiontype as enum('mongodb', 'mysql', ' 'snowflake', 'redshift', 'mssql', 'mariadb', 'bigquery', 'saas', 'manual', 'manual_webhook', 'timescale', 'fides', 'sovrn', 'attentive', 'dynamodb', 'postgres','generic_consent_email', 'generic_erasure_email')")
op.execute('alter table connectionconfig alter column connection_type type connectiontype using connection_type::text::connectiontype')
op.execute('drop type connectiontype_old') |
class Setup(object):
def __init__(self, G1_side, X2):
self.G1_side = G1_side
self.X2 = X2
def from_file(cls, filename):
contents = open(filename, 'rb').read()
powers = (2 ** contents[SETUP_FILE_POWERS_POS])
values = [int.from_bytes(contents[i:(i + 32)], 'little') for i in range(SETUP_FILE_G1_STARTPOS, (SETUP_FILE_G1_STARTPOS + ((32 * powers) * 2)), 32)]
assert (max(values) < b.field_modulus)
factor = (f(values[0]) / b.G1[0])
values = [(f(x) / factor) for x in values]
G1_side = [(values[(i * 2)], values[((i * 2) + 1)]) for i in range(powers)]
print('Extracted G1 side, X^1 point: {}'.format(G1_side[1]))
pos = (SETUP_FILE_G1_STARTPOS + ((32 * powers) * 2))
target = (factor * b.G2[0].coeffs[0]).n
while (pos < len(contents)):
v = int.from_bytes(contents[pos:(pos + 32)], 'little')
if (v == target):
break
pos += 1
print('Detected start of G2 side at byte {}'.format(pos))
X2_encoding = contents[(pos + (32 * 4)):(pos + (32 * 8))]
X2_values = [(f(int.from_bytes(X2_encoding[i:(i + 32)], 'little')) / factor) for i in range(0, 128, 32)]
X2 = (f2(X2_values[:2]), f2(X2_values[2:]))
assert b.is_on_curve(X2, b.b2)
print('Extracted G2 side, X^1 point: {}'.format(X2))
return cls(G1_side, X2) |
class MemoryBaseStore(KeyValueStore):
def __init__(self):
self.values = {}
def get(self, key, original_timestamp=None):
tupl = self.values.get(key)
if (not tupl):
return None
(value, created_at, expires_at) = tupl
if ((original_timestamp is not None) and (created_at < original_timestamp)):
self.delete(key)
return None
if (expires_at < _time()):
self.delete(key)
return None
return value
def set(self, key, value, lifetime=0):
created_at = _time()
expires_at = ((lifetime and (created_at + lifetime)) or 0)
self.values[key] = (value, created_at, expires_at)
return True
def delete(self, key):
try:
del self.values[key]
return True
except KeyError:
return False
def has(self, key):
pair = self.values.get(key)
if (not pair):
return False
(value, created_at, expires_at) = pair
if (expires_at and (expires_at < _time())):
self.delete(key)
return False
return True |
class Solution():
def isMatch(self, s: str, p: str) -> bool:
def match_pattern(s, p, sidx, pidx, track):
if ((sidx, pidx) in track):
return track[(sidx, pidx)]
if ((sidx >= len(s)) and (pidx >= len(p))):
return True
elif (sidx >= len(s)):
if (((len(p) - pidx) % 2) != 0):
track[(sidx, pidx)] = False
return False
for i in range(pidx, len(p)):
if (((i - pidx) % 2) == 1):
if (p[i] != '*'):
track[(sidx, pidx)] = False
return False
track[(sidx, pidx)] = True
return True
elif (pidx >= len(p)):
return False
if (pidx == (len(p) - 1)):
if (sidx != (len(s) - 1)):
track[(sidx, pidx)] = False
return False
if ((p[pidx] == '.') or (p[pidx] == s[sidx])):
track[(sidx, pidx)] = True
return True
track[(sidx, pidx)] = False
return False
(curr, nn) = (p[pidx], p[(pidx + 1)])
if (nn != '*'):
if ((curr != s[sidx]) and (curr != '.')):
track[(sidx, pidx)] = False
return False
ret = match_pattern(s, p, (sidx + 1), (pidx + 1), track)
track[(sidx, pidx)] = ret
return ret
for i in range(((len(s) - sidx) + 1)):
if ((i > 0) and ((s[((sidx + i) - 1)] != curr) and (curr != '.'))):
break
tmp = match_pattern(s, p, (sidx + i), (pidx + 2), track)
if tmp:
track[(sidx, pidx)] = True
return True
track[(sidx, pidx)] = False
return False
track = {}
return match_pattern(s, p, 0, 0, track) |
class OptionSeriesHeatmapSonificationContexttracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def extractGreenappletranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def permission_manager(view, view_args, view_kwargs, *args, **kwargs):
methods = 'GET,POST,DELETE,PATCH'
if ('id' in kwargs):
view_kwargs['id'] = kwargs['id']
if kwargs.get('methods'):
methods = kwargs['methods']
if (request.method not in methods):
return view(*view_args, **view_kwargs)
if ('leave_if' in kwargs):
check = kwargs['leave_if']
if check(view_kwargs):
return view(*view_args, **view_kwargs)
if ('check' in kwargs):
check = kwargs['check']
if (not check(view_kwargs)):
raise ForbiddenError({'source': ''}, 'Access forbidden')
if ('order_identifier' in view_kwargs):
try:
order = Order.query.filter_by(identifier=view_kwargs['order_identifier']).one()
except NoResultFound:
raise NotFoundError({'parameter': 'order_identifier'}, 'Order not found')
view_kwargs['id'] = order.id
if ('event_identifier' in view_kwargs):
try:
event = Event.query.filter_by(identifier=view_kwargs['event_identifier']).one()
except NoResultFound:
raise NotFoundError({'parameter': 'event_identifier'}, 'Event not found')
view_kwargs['event_id'] = event.id
if (view_kwargs.get('event_invoice_identifier') is not None):
try:
event_invoice = EventInvoice.query.filter_by(identifier=view_kwargs['event_invoice_identifier']).one()
except NoResultFound:
NotFoundError({'parameter': 'event_invoice_identifier'}, 'Event Invoice not found')
view_kwargs['id'] = event_invoice.id
if ('identifier' in view_kwargs):
try:
event = Event.query.filter_by(identifier=view_kwargs['identifier']).one()
except NoResultFound:
raise NotFoundError({'parameter': 'identifier'}, 'Event not found')
view_kwargs['id'] = event.id
if ('fetch' in kwargs):
fetched = None
if is_multiple(kwargs['fetch']):
kwargs['fetch'] = [f.strip() for f in kwargs['fetch'].split(',')]
for f in kwargs['fetch']:
if (f in view_kwargs):
fetched = view_kwargs.get(f)
break
elif (kwargs['fetch'] in view_kwargs):
fetched = view_kwargs[kwargs['fetch']]
if (not fetched):
model = kwargs['model']
fetch = kwargs['fetch']
fetch_key_url = 'id'
fetch_key_model = 'id'
if kwargs.get('fetch_key_url'):
fetch_key_url = kwargs['fetch_key_url']
if kwargs.get('fetch_key_model'):
fetch_key_model = kwargs['fetch_key_model']
if (not is_multiple(model)):
model = [model]
if (isinstance(fetch_key_url, str) and is_multiple(fetch_key_url)):
fetch_key_url = fetch_key_url.split(',')
found = False
for (index, mod) in enumerate(model):
if is_multiple(fetch_key_url):
f_url = fetch_key_url[index].strip()
else:
f_url = fetch_key_url
if (not view_kwargs.get(f_url)):
continue
try:
data = mod.query.filter((getattr(mod, fetch_key_model) == view_kwargs[f_url])).one()
except NoResultFound:
pass
else:
found = True
break
if (not found):
raise NotFoundError({'source': ''}, 'Object not found.')
fetched = None
if is_multiple(fetch):
for f in fetch:
if hasattr(data, f):
fetched = getattr(data, f)
break
else:
fetched = (getattr(data, fetch) if hasattr(data, fetch) else None)
if fetched:
fetch_as = kwargs.get('fetch_as')
fetch = kwargs.get('fetch')
if (fetch_as == fetch):
logger.warning("If 'fetch_as' is same as 'fetch', then it is redundant: %s", fetch)
if fetch_as:
kwargs[fetch_as] = fetched
elif fetch:
kwargs[fetch] = fetched
else:
raise NotFoundError({'source': ''}, 'Object not found.')
if (args[0] in permissions):
return permissions[args[0]](view, view_args, view_kwargs, *args, **kwargs)
raise ForbiddenError({'source': ''}, 'Access forbidden') |
def read_expected_ttx(testfile, tableTag):
name = os.path.splitext(testfile)[0]
xml_expected_path = getpath(('%s.ttx.%s' % (name, tagToXML(tableTag))))
with open(xml_expected_path, 'r', encoding='utf-8') as xml_file:
xml_expected = ttLibVersion_RE.sub('', xml_file.read())
return xml_expected |
.usefixtures('use_tmpdir')
def test_stop_on_fail_is_parsed_external():
with open('fail_job', 'w+', encoding='utf-8') as f:
f.write('INTERNAL False\n')
f.write('EXECUTABLE echo\n')
f.write('MIN_ARG 1\n')
f.write('STOP_ON_FAIL True\n')
job_internal = WorkflowJob.from_file(name='FAIL', config_file='fail_job')
assert job_internal.stop_on_fail |
def crlf_check(uri, method, headers, body, scanid):
if ((method == 'GET') or (method == 'DEL')):
crlf_get_uri_method(uri, method, headers, scanid)
crlf_get_url_method(uri, headers, scanid)
if ((method == 'POST') or (method == 'PUT')):
crlf_post_method(uri, method, headers, body, scanid) |
def get_tvtk_name(vtk_name):
if (vtk_name[:3] == 'vtk'):
name = vtk_name[3:]
dig2name = {'1': 'One', '2': 'Two', '3': 'Three', '4': 'Four', '5': 'Five', '6': 'Six', '7': 'Seven', '8': 'Eight', '9': 'Nine', '0': 'Zero'}
if (name[0] in string.digits):
return (dig2name[name[0]] + name[1:])
else:
return name
else:
return vtk_name |
class RelationshipMemberWafTag(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'type': (TypeWafTag,), 'id': (str,)}
_property
def discriminator():
return None
attribute_map = {'type': 'type', 'id': 'id'}
read_only_vars = {'id'}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
_required(login_url='/login')
def ArticleUpdate(request, article_id):
if (request.method == 'GET'):
category = Category_Article.objects.all()
try:
article = Article.objects.get(id=article_id)
except Exception:
return Http404
return render(request, 'pc/article_update.html', {'article': article, 'category': category})
if (request.method == 'POST'):
forms = Article_form(request.POST)
if forms.is_valid():
title = forms.cleaned_data.get('title')
content = forms.cleaned_data.get('content')
category = request.POST.get('category', '')
desc = request.POST.get('desc', '')
keywords = request.POST.get('keywords', '')
type = request.POST.get('type', '')
if type:
list_pic = request.FILES.get('list_pic', '')
else:
list_pic = request.POST.get('list_pic', '')
authors = forms.cleaned_data.get('authors', '')
article = Article.objects.get(id=article_id)
article.title = title
article.content = content
article.desc = desc
article.keywords = keywords
article.authors = authors
article.category_id = int(category)
article.list_pic = list_pic
try:
article.save()
return JsonResponse({'code': 200, 'data': ''})
except Exception:
return JsonResponse({'code': 400, 'data': ''})
pattern = re.compile('<[^>]+>', re.S)
result = pattern.sub('', str(forms.errors))
return JsonResponse({'code': 400, 'data': result}) |
class SequenceResult():
def __init__(self, request_buffer, result_index: int, offset: int=0, length: int=None):
self.request_buffer: OpenAIModelOutputBuffer = request_buffer
self.result_index = result_index
self.offset = offset
self.length = length
self.collected_data = []
self.collected_tokens = []
self.collected_logprobs = []
def continuation_type(self):
return self.request_buffer.complete_result.continuation_type
def __getitem__(self, key):
if (type(key) is slice):
return self.slice(key.start, key.stop)
elif (type(key) is int):
return self.get(key)
else:
raise TypeError(f'Can only subscript SequenceResult with int or slice, not {type(key)}')
async def json(self):
return f'<SequenceResult current_len={self.current_len}>'
def current_len(self):
return max(0, (len(self.request_buffer.tokens[self.result_index]) - self.offset))
async def get(self, i: int):
i = (self.offset + i)
while (len(self.request_buffer.response_data[self.result_index]) <= i):
(await self.request_buffer.advance_stream_iter())
return OpenAITokenResult(self.request_buffer.tokens[self.result_index][i], self.request_buffer.logprobs[self.result_index][i], self.request_buffer.response_data[self.result_index][i])
def slice(self, lower, upper):
assert (upper is None), 'cannot specify upper bound for SequenceResult slice'
return SequenceResult(self.request_buffer, self.result_index, (self.offset + lower), upper)
async def empty(self):
if (len(self.request_buffer.tokens[self.result_index]) > self.offset):
return False
while (self.current_len == 0):
at_end = (await self.request_buffer.advance_stream_iter())
if at_end:
break
return (self.current_len == 0) |
class OptionPlotoptionsVectorStatesHover(Options):
def animation(self) -> 'OptionPlotoptionsVectorStatesHoverAnimation':
return self._config_sub_data('animation', OptionPlotoptionsVectorStatesHoverAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def halo(self) -> 'OptionPlotoptionsVectorStatesHoverHalo':
return self._config_sub_data('halo', OptionPlotoptionsVectorStatesHoverHalo)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def marker(self) -> 'OptionPlotoptionsVectorStatesHoverMarker':
return self._config_sub_data('marker', OptionPlotoptionsVectorStatesHoverMarker) |
class OptionSeriesErrorbarData(Options):
def accessibility(self) -> 'OptionSeriesErrorbarDataAccessibility':
return self._config_sub_data('accessibility', OptionSeriesErrorbarDataAccessibility)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def colorIndex(self):
return self._config_get(None)
def colorIndex(self, num: float):
self._config(num, js_type=False)
def custom(self):
return self._config_get(None)
def custom(self, value: Any):
self._config(value, js_type=False)
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def dragDrop(self) -> 'OptionSeriesErrorbarDataDragdrop':
return self._config_sub_data('dragDrop', OptionSeriesErrorbarDataDragdrop)
def events(self) -> 'OptionSeriesErrorbarDataEvents':
return self._config_sub_data('events', OptionSeriesErrorbarDataEvents)
def high(self):
return self._config_get(None)
def high(self, num: float):
self._config(num, js_type=False)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def labelrank(self):
return self._config_get(None)
def labelrank(self, num: float):
self._config(num, js_type=False)
def low(self):
return self._config_get(None)
def low(self, num: float):
self._config(num, js_type=False)
def name(self):
return self._config_get(None)
def name(self, text: str):
self._config(text, js_type=False)
def selected(self):
return self._config_get(False)
def selected(self, flag: bool):
self._config(flag, js_type=False)
def x(self):
return self._config_get(None)
def x(self, num: float):
self._config(num, js_type=False) |
class FixMatrixOpTest(unittest.TestCase):
def test_fix_matrix_addition(self) -> None:
self.maxDiff = None
bmg = BMGraphBuilder()
zeros = bmg.add_real_matrix(torch.zeros(2))
ones = bmg.add_pos_real_matrix(torch.ones(2))
tensor_elements = []
for index in range(0, 2):
index_node = bmg.add_natural(index)
index_mu = bmg.add_vector_index(zeros, index_node)
index_sigma = bmg.add_vector_index(ones, index_node)
normal = bmg.add_normal(index_mu, index_sigma)
sample = bmg.add_sample(normal)
tensor_elements.append(sample)
matrix = bmg.add_tensor(Size([2]), *tensor_elements)
exp = bmg.add_matrix_exp(matrix)
mult = bmg.add_elementwise_multiplication(matrix, matrix)
add = bmg.add_matrix_addition(exp, mult)
bmg.add_query(add, _rv_id())
observed = to_dot(bmg, after_transform=False)
expectation = '\ndigraph "graph" {\n N00[label="[0.0,0.0]"];\n N01[label=0];\n N02[label=index];\n N03[label="[1.0,1.0]"];\n N04[label=index];\n N05[label=Normal];\n N06[label=Sample];\n N07[label=1];\n N08[label=index];\n N09[label=index];\n N10[label=Normal];\n N11[label=Sample];\n N12[label=Tensor];\n N13[label=MatrixExp];\n N14[label=ElementwiseMult];\n N15[label=MatrixAdd];\n N16[label=Query];\n N00 -> N02[label=left];\n N00 -> N08[label=left];\n N01 -> N02[label=right];\n N01 -> N04[label=right];\n N02 -> N05[label=mu];\n N03 -> N04[label=left];\n N03 -> N09[label=left];\n N04 -> N05[label=sigma];\n N05 -> N06[label=operand];\n N06 -> N12[label=left];\n N07 -> N08[label=right];\n N07 -> N09[label=right];\n N08 -> N10[label=mu];\n N09 -> N10[label=sigma];\n N10 -> N11[label=operand];\n N11 -> N12[label=right];\n N12 -> N13[label=operand];\n N12 -> N14[label=left];\n N12 -> N14[label=right];\n N13 -> N15[label=left];\n N14 -> N15[label=right];\n N15 -> N16[label=operator];\n}\n '
self.assertEqual(expectation.strip(), observed.strip())
observed = to_dot(bmg, after_transform=True)
expectation = '\ndigraph "graph" {\n N00[label="[0.0,0.0]"];\n N01[label=0];\n N02[label=index];\n N03[label="[1.0,1.0]"];\n N04[label=index];\n N05[label=Normal];\n N06[label=Sample];\n N07[label=1];\n N08[label=index];\n N09[label=index];\n N10[label=Normal];\n N11[label=Sample];\n N12[label=2];\n N13[label=ToMatrix];\n N14[label=MatrixExp];\n N15[label=ToRealMatrix];\n N16[label=ElementwiseMult];\n N17[label=MatrixAdd];\n N18[label=Query];\n N00 -> N02[label=left];\n N00 -> N08[label=left];\n N01 -> N02[label=right];\n N01 -> N04[label=right];\n N02 -> N05[label=mu];\n N03 -> N04[label=left];\n N03 -> N09[label=left];\n N04 -> N05[label=sigma];\n N05 -> N06[label=operand];\n N06 -> N13[label=0];\n N07 -> N08[label=right];\n N07 -> N09[label=right];\n N07 -> N13[label=columns];\n N08 -> N10[label=mu];\n N09 -> N10[label=sigma];\n N10 -> N11[label=operand];\n N11 -> N13[label=1];\n N12 -> N13[label=rows];\n N13 -> N14[label=operand];\n N13 -> N16[label=left];\n N13 -> N16[label=right];\n N14 -> N15[label=operand];\n N15 -> N17[label=left];\n N16 -> N17[label=right];\n N17 -> N18[label=operator];\n}\n '
self.assertEqual(expectation.strip(), observed.strip())
generated_graph = to_bmg_graph(bmg)
observed = generated_graph.graph.to_dot()
expectation = '\ndigraph "graph" {\n N0[label="matrix"];\n N1[label="0"];\n N2[label="Index"];\n N3[label="matrix"];\n N4[label="Index"];\n N5[label="Normal"];\n N6[label="~"];\n N7[label="1"];\n N8[label="Index"];\n N9[label="Index"];\n N10[label="Normal"];\n N11[label="~"];\n N12[label="2"];\n N13[label="ToMatrix"];\n N14[label="MatrixExp"];\n N15[label="ToReal"];\n N16[label="ElementwiseMultiply"];\n N17[label="MatrixAdd"];\n N0 -> N2;\n N0 -> N8;\n N1 -> N2;\n N1 -> N4;\n N2 -> N5;\n N3 -> N4;\n N3 -> N9;\n N4 -> N5;\n N5 -> N6;\n N6 -> N13;\n N7 -> N8;\n N7 -> N9;\n N7 -> N13;\n N8 -> N10;\n N9 -> N10;\n N10 -> N11;\n N11 -> N13;\n N12 -> N13;\n N13 -> N14;\n N13 -> N16;\n N13 -> N16;\n N14 -> N15;\n N15 -> N17;\n N16 -> N17;\n Q0[label="Query"];\n N17 -> Q0;\n}\n '
self.assertEqual(expectation.strip(), observed.strip())
def test_fix_elementwise_multiply(self) -> None:
self.maxDiff = None
bmg = BMGraphBuilder()
zeros = bmg.add_real_matrix(torch.zeros(2))
ones = bmg.add_pos_real_matrix(torch.ones(2))
tensor_elements = []
for index in range(0, 2):
index_node = bmg.add_natural(index)
index_mu = bmg.add_vector_index(zeros, index_node)
index_sigma = bmg.add_vector_index(ones, index_node)
normal = bmg.add_normal(index_mu, index_sigma)
sample = bmg.add_sample(normal)
tensor_elements.append(sample)
matrix = bmg.add_tensor(Size([2]), *tensor_elements)
exp = bmg.add_matrix_exp(matrix)
add = bmg.add_matrix_addition(matrix, matrix)
mult = bmg.add_elementwise_multiplication(exp, add)
sum = bmg.add_matrix_sum(mult)
bmg.add_query(sum, _rv_id())
observed = to_dot(bmg, after_transform=False)
expectation = '\ndigraph "graph" {\n N00[label="[0.0,0.0]"];\n N01[label=0];\n N02[label=index];\n N03[label="[1.0,1.0]"];\n N04[label=index];\n N05[label=Normal];\n N06[label=Sample];\n N07[label=1];\n N08[label=index];\n N09[label=index];\n N10[label=Normal];\n N11[label=Sample];\n N12[label=Tensor];\n N13[label=MatrixExp];\n N14[label=MatrixAdd];\n N15[label=ElementwiseMult];\n N16[label=MatrixSum];\n N17[label=Query];\n N00 -> N02[label=left];\n N00 -> N08[label=left];\n N01 -> N02[label=right];\n N01 -> N04[label=right];\n N02 -> N05[label=mu];\n N03 -> N04[label=left];\n N03 -> N09[label=left];\n N04 -> N05[label=sigma];\n N05 -> N06[label=operand];\n N06 -> N12[label=left];\n N07 -> N08[label=right];\n N07 -> N09[label=right];\n N08 -> N10[label=mu];\n N09 -> N10[label=sigma];\n N10 -> N11[label=operand];\n N11 -> N12[label=right];\n N12 -> N13[label=operand];\n N12 -> N14[label=left];\n N12 -> N14[label=right];\n N13 -> N15[label=left];\n N14 -> N15[label=right];\n N15 -> N16[label=operand];\n N16 -> N17[label=operator];\n}\n '
self.assertEqual(expectation.strip(), observed.strip())
observed = to_dot(bmg, after_transform=True)
expectation = '\ndigraph "graph" {\n N00[label="[0.0,0.0]"];\n N01[label=0];\n N02[label=index];\n N03[label="[1.0,1.0]"];\n N04[label=index];\n N05[label=Normal];\n N06[label=Sample];\n N07[label=1];\n N08[label=index];\n N09[label=index];\n N10[label=Normal];\n N11[label=Sample];\n N12[label=2];\n N13[label=ToMatrix];\n N14[label=MatrixExp];\n N15[label=ToRealMatrix];\n N16[label=MatrixAdd];\n N17[label=ElementwiseMult];\n N18[label=MatrixSum];\n N19[label=Query];\n N00 -> N02[label=left];\n N00 -> N08[label=left];\n N01 -> N02[label=right];\n N01 -> N04[label=right];\n N02 -> N05[label=mu];\n N03 -> N04[label=left];\n N03 -> N09[label=left];\n N04 -> N05[label=sigma];\n N05 -> N06[label=operand];\n N06 -> N13[label=0];\n N07 -> N08[label=right];\n N07 -> N09[label=right];\n N07 -> N13[label=columns];\n N08 -> N10[label=mu];\n N09 -> N10[label=sigma];\n N10 -> N11[label=operand];\n N11 -> N13[label=1];\n N12 -> N13[label=rows];\n N13 -> N14[label=operand];\n N13 -> N16[label=left];\n N13 -> N16[label=right];\n N14 -> N15[label=operand];\n N15 -> N17[label=left];\n N16 -> N17[label=right];\n N17 -> N18[label=operand];\n N18 -> N19[label=operator];\n}\n '
self.assertEqual(expectation.strip(), observed.strip())
generated_graph = to_bmg_graph(bmg)
observed = generated_graph.graph.to_dot()
expectation = '\ndigraph "graph" {\n N0[label="matrix"];\n N1[label="0"];\n N2[label="Index"];\n N3[label="matrix"];\n N4[label="Index"];\n N5[label="Normal"];\n N6[label="~"];\n N7[label="1"];\n N8[label="Index"];\n N9[label="Index"];\n N10[label="Normal"];\n N11[label="~"];\n N12[label="2"];\n N13[label="ToMatrix"];\n N14[label="MatrixExp"];\n N15[label="ToReal"];\n N16[label="MatrixAdd"];\n N17[label="ElementwiseMultiply"];\n N18[label="MatrixSum"];\n N0 -> N2;\n N0 -> N8;\n N1 -> N2;\n N1 -> N4;\n N2 -> N5;\n N3 -> N4;\n N3 -> N9;\n N4 -> N5;\n N5 -> N6;\n N6 -> N13;\n N7 -> N8;\n N7 -> N9;\n N7 -> N13;\n N8 -> N10;\n N9 -> N10;\n N10 -> N11;\n N11 -> N13;\n N12 -> N13;\n N13 -> N14;\n N13 -> N16;\n N13 -> N16;\n N14 -> N15;\n N15 -> N17;\n N16 -> N17;\n N17 -> N18;\n Q0[label="Query"];\n N18 -> Q0;\n}\n '
self.assertEqual(expectation.strip(), observed.strip())
def test_fix_matrix_sum(self) -> None:
self.maxDiff = None
bmg = BMGraphBuilder()
probs = bmg.add_real_matrix(torch.tensor([[0.75, 0.25], [0.125, 0.875]]))
tensor_elements = []
for row in range(0, 2):
row_node = bmg.add_natural(row)
row_prob = bmg.add_column_index(probs, row_node)
for column in range(0, 2):
col_index = bmg.add_natural(column)
prob = bmg.add_vector_index(row_prob, col_index)
bernoulli = bmg.add_bernoulli(prob)
sample = bmg.add_sample(bernoulli)
tensor_elements.append(sample)
matrix = bmg.add_tensor(Size([2, 2]), *tensor_elements)
sum = bmg.add_matrix_sum(matrix)
bmg.add_query(sum, _rv_id())
observed_beanstalk = to_dot(bmg, after_transform=True)
expected = '\ndigraph "graph" {\n N00[label="[[0.75,0.25],\\\\n[0.125,0.875]]"];\n N01[label=0];\n N02[label=ColumnIndex];\n N03[label=index];\n N04[label=ToProb];\n N05[label=Bernoulli];\n N06[label=Sample];\n N07[label=1];\n N08[label=index];\n N09[label=ToProb];\n N10[label=Bernoulli];\n N11[label=Sample];\n N12[label=ColumnIndex];\n N13[label=index];\n N14[label=ToProb];\n N15[label=Bernoulli];\n N16[label=Sample];\n N17[label=index];\n N18[label=ToProb];\n N19[label=Bernoulli];\n N20[label=Sample];\n N21[label=2];\n N22[label=ToMatrix];\n N23[label=ToRealMatrix];\n N24[label=MatrixSum];\n N25[label=Query];\n N00 -> N02[label=left];\n N00 -> N12[label=left];\n N01 -> N02[label=right];\n N01 -> N03[label=right];\n N01 -> N13[label=right];\n N02 -> N03[label=left];\n N02 -> N08[label=left];\n N03 -> N04[label=operand];\n N04 -> N05[label=probability];\n N05 -> N06[label=operand];\n N06 -> N22[label=0];\n N07 -> N08[label=right];\n N07 -> N12[label=right];\n N07 -> N17[label=right];\n N08 -> N09[label=operand];\n N09 -> N10[label=probability];\n N10 -> N11[label=operand];\n N11 -> N22[label=1];\n N12 -> N13[label=left];\n N12 -> N17[label=left];\n N13 -> N14[label=operand];\n N14 -> N15[label=probability];\n N15 -> N16[label=operand];\n N16 -> N22[label=2];\n N17 -> N18[label=operand];\n N18 -> N19[label=probability];\n N19 -> N20[label=operand];\n N20 -> N22[label=3];\n N21 -> N22[label=columns];\n N21 -> N22[label=rows];\n N22 -> N23[label=operand];\n N23 -> N24[label=operand];\n N24 -> N25[label=operator];\n}\n '
self.assertEqual(observed_beanstalk.strip(), expected.strip())
generated_graph = to_bmg_graph(bmg)
observed_bmg = generated_graph.graph.to_dot()
expectation = '\ndigraph "graph" {\n N0[label="matrix"];\n N1[label="0"];\n N2[label="ColumnIndex"];\n N3[label="Index"];\n N4[label="ToProb"];\n N5[label="Bernoulli"];\n N6[label="~"];\n N7[label="1"];\n N8[label="Index"];\n N9[label="ToProb"];\n N10[label="Bernoulli"];\n N11[label="~"];\n N12[label="ColumnIndex"];\n N13[label="Index"];\n N14[label="ToProb"];\n N15[label="Bernoulli"];\n N16[label="~"];\n N17[label="Index"];\n N18[label="ToProb"];\n N19[label="Bernoulli"];\n N20[label="~"];\n N21[label="2"];\n N22[label="ToMatrix"];\n N23[label="ToReal"];\n N24[label="MatrixSum"];\n N0 -> N2;\n N0 -> N12;\n N1 -> N2;\n N1 -> N3;\n N1 -> N13;\n N2 -> N3;\n N2 -> N8;\n N3 -> N4;\n N4 -> N5;\n N5 -> N6;\n N6 -> N22;\n N7 -> N8;\n N7 -> N12;\n N7 -> N17;\n N8 -> N9;\n N9 -> N10;\n N10 -> N11;\n N11 -> N22;\n N12 -> N13;\n N12 -> N17;\n N13 -> N14;\n N14 -> N15;\n N15 -> N16;\n N16 -> N22;\n N17 -> N18;\n N18 -> N19;\n N19 -> N20;\n N20 -> N22;\n N21 -> N22;\n N21 -> N22;\n N22 -> N23;\n N23 -> N24;\n Q0[label="Query"];\n N24 -> Q0;\n}\n'
self.assertEqual(expectation.strip(), observed_bmg.strip())
def test_fix_matrix_exp_log_phi(self) -> None:
self.maxDiff = None
bmg = BMGraphBuilder()
probs = bmg.add_real_matrix(torch.tensor([[0.75, 0.25], [0.125, 0.875]]))
tensor_elements = []
for row in range(0, 2):
row_node = bmg.add_natural(row)
row_prob = bmg.add_column_index(probs, row_node)
for column in range(0, 2):
col_index = bmg.add_natural(column)
prob = bmg.add_vector_index(row_prob, col_index)
bernoulli = bmg.add_bernoulli(prob)
sample = bmg.add_sample(bernoulli)
tensor_elements.append(sample)
matrix = bmg.add_tensor(Size([2, 2]), *tensor_elements)
me = bmg.add_matrix_exp(matrix)
ml = bmg.add_matrix_log(matrix)
mp = bmg.add_matrix_phi(matrix)
bmg.add_query(me, _rv_id())
bmg.add_query(ml, _rv_id())
bmg.add_query(mp, _rv_id())
observed_beanstalk = to_dot(bmg, after_transform=True)
expectation = '\ndigraph "graph" {\n N00[label="[[0.75,0.25],\\\\n[0.125,0.875]]"];\n N01[label=0];\n N02[label=ColumnIndex];\n N03[label=index];\n N04[label=ToProb];\n N05[label=Bernoulli];\n N06[label=Sample];\n N07[label=1];\n N08[label=index];\n N09[label=ToProb];\n N10[label=Bernoulli];\n N11[label=Sample];\n N12[label=ColumnIndex];\n N13[label=index];\n N14[label=ToProb];\n N15[label=Bernoulli];\n N16[label=Sample];\n N17[label=index];\n N18[label=ToProb];\n N19[label=Bernoulli];\n N20[label=Sample];\n N21[label=2];\n N22[label=ToMatrix];\n N23[label=ToRealMatrix];\n N24[label=MatrixExp];\n N25[label=Query];\n N26[label=ToPosRealMatrix];\n N27[label=MatrixLog];\n N28[label=Query];\n N29[label=MatrixPhi];\n N30[label=Query];\n N00 -> N02[label=left];\n N00 -> N12[label=left];\n N01 -> N02[label=right];\n N01 -> N03[label=right];\n N01 -> N13[label=right];\n N02 -> N03[label=left];\n N02 -> N08[label=left];\n N03 -> N04[label=operand];\n N04 -> N05[label=probability];\n N05 -> N06[label=operand];\n N06 -> N22[label=0];\n N07 -> N08[label=right];\n N07 -> N12[label=right];\n N07 -> N17[label=right];\n N08 -> N09[label=operand];\n N09 -> N10[label=probability];\n N10 -> N11[label=operand];\n N11 -> N22[label=1];\n N12 -> N13[label=left];\n N12 -> N17[label=left];\n N13 -> N14[label=operand];\n N14 -> N15[label=probability];\n N15 -> N16[label=operand];\n N16 -> N22[label=2];\n N17 -> N18[label=operand];\n N18 -> N19[label=probability];\n N19 -> N20[label=operand];\n N20 -> N22[label=3];\n N21 -> N22[label=columns];\n N21 -> N22[label=rows];\n N22 -> N23[label=operand];\n N22 -> N26[label=operand];\n N23 -> N24[label=operand];\n N23 -> N29[label=operand];\n N24 -> N25[label=operator];\n N26 -> N27[label=operand];\n N27 -> N28[label=operator];\n N29 -> N30[label=operator];\n}\n '
self.assertEqual(expectation.strip(), observed_beanstalk.strip())
generated_graph = to_bmg_graph(bmg)
observed_bmg = generated_graph.graph.to_dot()
expectation = '\ndigraph "graph" {\n N0[label="matrix"];\n N1[label="0"];\n N2[label="ColumnIndex"];\n N3[label="Index"];\n N4[label="ToProb"];\n N5[label="Bernoulli"];\n N6[label="~"];\n N7[label="1"];\n N8[label="Index"];\n N9[label="ToProb"];\n N10[label="Bernoulli"];\n N11[label="~"];\n N12[label="ColumnIndex"];\n N13[label="Index"];\n N14[label="ToProb"];\n N15[label="Bernoulli"];\n N16[label="~"];\n N17[label="Index"];\n N18[label="ToProb"];\n N19[label="Bernoulli"];\n N20[label="~"];\n N21[label="2"];\n N22[label="ToMatrix"];\n N23[label="ToReal"];\n N24[label="MatrixExp"];\n N25[label="ToPosReal"];\n N26[label="MatrixLog"];\n N27[label="MatrixPhi"];\n N0 -> N2;\n N0 -> N12;\n N1 -> N2;\n N1 -> N3;\n N1 -> N13;\n N2 -> N3;\n N2 -> N8;\n N3 -> N4;\n N4 -> N5;\n N5 -> N6;\n N6 -> N22;\n N7 -> N8;\n N7 -> N12;\n N7 -> N17;\n N8 -> N9;\n N9 -> N10;\n N10 -> N11;\n N11 -> N22;\n N12 -> N13;\n N12 -> N17;\n N13 -> N14;\n N14 -> N15;\n N15 -> N16;\n N16 -> N22;\n N17 -> N18;\n N18 -> N19;\n N19 -> N20;\n N20 -> N22;\n N21 -> N22;\n N21 -> N22;\n N22 -> N23;\n N22 -> N25;\n N23 -> N24;\n N23 -> N27;\n N25 -> N26;\n Q0[label="Query"];\n N24 -> Q0;\n Q1[label="Query"];\n N26 -> Q1;\n Q2[label="Query"];\n N27 -> Q2;\n}\n'
self.assertEqual(expectation.strip(), observed_bmg.strip())
def test_fix_matrix_complement(self) -> None:
self.maxDiff = None
bmg = BMGraphBuilder()
probs = bmg.add_real_matrix(torch.tensor([[0.75, 0.25], [0.125, 0.875]]))
tensor_elements = []
for row in range(0, 2):
row_node = bmg.add_natural(row)
row_prob = bmg.add_column_index(probs, row_node)
for column in range(0, 2):
col_index = bmg.add_natural(column)
prob = bmg.add_vector_index(row_prob, col_index)
bernoulli = bmg.add_bernoulli(prob)
sample = bmg.add_sample(bernoulli)
tensor_elements.append(sample)
matrix = bmg.add_tensor(Size([2, 2]), *tensor_elements)
const_prob_matrix = bmg.add_probability_matrix(torch.tensor([[0.25, 0.75], [0.5, 0.5]]))
const_bool_matrix = bmg.add_probability_matrix(torch.tensor([[True, False], [False, False]]))
const_prob_simplex = bmg.add_simplex(torch.tensor([0.5, 0.5]))
mc_non_constant_boolean = bmg.add_matrix_complement(matrix)
mc_const_prob = bmg.add_matrix_complement(const_prob_matrix)
mc_const_bool = bmg.add_matrix_complement(const_bool_matrix)
mc_const_simplex = bmg.add_matrix_complement(const_prob_simplex)
bmg.add_query(mc_non_constant_boolean, _rv_id())
bmg.add_query(mc_const_prob, _rv_id())
bmg.add_query(mc_const_bool, _rv_id())
bmg.add_query(mc_const_simplex, _rv_id())
observed_beanstalk = to_dot(bmg, after_transform=True)
expectation = '\ndigraph "graph" {\n N00[label="[[0.75,0.25],\\\\n[0.125,0.875]]"];\n N01[label=0];\n N02[label=ColumnIndex];\n N03[label=index];\n N04[label=ToProb];\n N05[label=Bernoulli];\n N06[label=Sample];\n N07[label=1];\n N08[label=index];\n N09[label=ToProb];\n N10[label=Bernoulli];\n N11[label=Sample];\n N12[label=ColumnIndex];\n N13[label=index];\n N14[label=ToProb];\n N15[label=Bernoulli];\n N16[label=Sample];\n N17[label=index];\n N18[label=ToProb];\n N19[label=Bernoulli];\n N20[label=Sample];\n N21[label=2];\n N22[label=ToMatrix];\n N23[label=MatrixComplement];\n N24[label=Query];\n N25[label="[[0.25,0.75],\\\\n[0.5,0.5]]"];\n N26[label=MatrixComplement];\n N27[label=Query];\n N28[label="[[True,False],\\\\n[False,False]]"];\n N29[label=MatrixComplement];\n N30[label=Query];\n N31[label="[0.5,0.5]"];\n N32[label=MatrixComplement];\n N33[label=Query];\n N00 -> N02[label=left];\n N00 -> N12[label=left];\n N01 -> N02[label=right];\n N01 -> N03[label=right];\n N01 -> N13[label=right];\n N02 -> N03[label=left];\n N02 -> N08[label=left];\n N03 -> N04[label=operand];\n N04 -> N05[label=probability];\n N05 -> N06[label=operand];\n N06 -> N22[label=0];\n N07 -> N08[label=right];\n N07 -> N12[label=right];\n N07 -> N17[label=right];\n N08 -> N09[label=operand];\n N09 -> N10[label=probability];\n N10 -> N11[label=operand];\n N11 -> N22[label=1];\n N12 -> N13[label=left];\n N12 -> N17[label=left];\n N13 -> N14[label=operand];\n N14 -> N15[label=probability];\n N15 -> N16[label=operand];\n N16 -> N22[label=2];\n N17 -> N18[label=operand];\n N18 -> N19[label=probability];\n N19 -> N20[label=operand];\n N20 -> N22[label=3];\n N21 -> N22[label=columns];\n N21 -> N22[label=rows];\n N22 -> N23[label=operand];\n N23 -> N24[label=operator];\n N25 -> N26[label=operand];\n N26 -> N27[label=operator];\n N28 -> N29[label=operand];\n N29 -> N30[label=operator];\n N31 -> N32[label=operand];\n N32 -> N33[label=operator];\n}\n '
self.assertEqual(expectation.strip(), observed_beanstalk.strip())
generated_graph = to_bmg_graph(bmg)
observed_bmg = generated_graph.graph.to_dot()
expectation = '\ndigraph "graph" {\n N0[label="matrix"];\n N1[label="0"];\n N2[label="ColumnIndex"];\n N3[label="Index"];\n N4[label="ToProb"];\n N5[label="Bernoulli"];\n N6[label="~"];\n N7[label="1"];\n N8[label="Index"];\n N9[label="ToProb"];\n N10[label="Bernoulli"];\n N11[label="~"];\n N12[label="ColumnIndex"];\n N13[label="Index"];\n N14[label="ToProb"];\n N15[label="Bernoulli"];\n N16[label="~"];\n N17[label="Index"];\n N18[label="ToProb"];\n N19[label="Bernoulli"];\n N20[label="~"];\n N21[label="2"];\n N22[label="ToMatrix"];\n N23[label="MatrixComplement"];\n N24[label="matrix"];\n N25[label="MatrixComplement"];\n N26[label="matrix"];\n N27[label="MatrixComplement"];\n N28[label="simplex"];\n N29[label="MatrixComplement"];\n N0 -> N2;\n N0 -> N12;\n N1 -> N2;\n N1 -> N3;\n N1 -> N13;\n N2 -> N3;\n N2 -> N8;\n N3 -> N4;\n N4 -> N5;\n N5 -> N6;\n N6 -> N22;\n N7 -> N8;\n N7 -> N12;\n N7 -> N17;\n N8 -> N9;\n N9 -> N10;\n N10 -> N11;\n N11 -> N22;\n N12 -> N13;\n N12 -> N17;\n N13 -> N14;\n N14 -> N15;\n N15 -> N16;\n N16 -> N22;\n N17 -> N18;\n N18 -> N19;\n N19 -> N20;\n N20 -> N22;\n N21 -> N22;\n N21 -> N22;\n N22 -> N23;\n N24 -> N25;\n N26 -> N27;\n N28 -> N29;\n Q0[label="Query"];\n N23 -> Q0;\n Q1[label="Query"];\n N25 -> Q1;\n Q2[label="Query"];\n N27 -> Q2;\n Q3[label="Query"];\n N29 -> Q3;\n}\n'
self.assertEqual(expectation.strip(), observed_bmg.strip())
def test_fix_matrix_log1mexp(self) -> None:
self.maxDiff = None
bmg = BMGraphBuilder()
probs = bmg.add_real_matrix(torch.tensor([[0.75, 0.25], [0.125, 0.875]]))
tensor_elements = []
for row in range(0, 2):
row_node = bmg.add_natural(row)
row_prob = bmg.add_column_index(probs, row_node)
for column in range(0, 2):
col_index = bmg.add_natural(column)
prob = bmg.add_vector_index(row_prob, col_index)
bern = bmg.add_bernoulli(prob)
sample = bmg.add_sample(bern)
neg_two = bmg.add_neg_real((- 2.0))
neg_samples = bmg.add_multiplication(neg_two, sample)
tensor_elements.append(neg_samples)
matrix = bmg.add_tensor(Size([2, 2]), *tensor_elements)
const_neg_real_matrix = bmg.add_neg_real_matrix(torch.tensor([[(- 0.25), (- 0.75)], [(- 0.5), (- 0.5)]]))
mlog1mexp_non_constant_real = bmg.add_matrix_log1mexp(matrix)
mlog1mexp_const_neg_real = bmg.add_matrix_log1mexp(const_neg_real_matrix)
bmg.add_query(mlog1mexp_non_constant_real, _rv_id())
bmg.add_query(mlog1mexp_const_neg_real, _rv_id())
observed_beanstalk = to_dot(bmg, after_transform=True)
expectation = '\ndigraph "graph" {\n N00[label="[[0.75,0.25],\\\\n[0.125,0.875]]"];\n N01[label=0];\n N02[label=ColumnIndex];\n N03[label=index];\n N04[label=ToProb];\n N05[label=Bernoulli];\n N06[label=Sample];\n N07[label=1];\n N08[label=index];\n N09[label=ToProb];\n N10[label=Bernoulli];\n N11[label=Sample];\n N12[label=ColumnIndex];\n N13[label=index];\n N14[label=ToProb];\n N15[label=Bernoulli];\n N16[label=Sample];\n N17[label=index];\n N18[label=ToProb];\n N19[label=Bernoulli];\n N20[label=Sample];\n N21[label=2];\n N22[label=-2.0];\n N23[label=0.0];\n N24[label=if];\n N25[label=if];\n N26[label=if];\n N27[label=if];\n N28[label=ToMatrix];\n N29[label=MatrixLog1mexp];\n N30[label=Query];\n N31[label="[[-0.25,-0.75],\\\\n[-0.5,-0.5]]"];\n N32[label=MatrixLog1mexp];\n N33[label=Query];\n N00 -> N02[label=left];\n N00 -> N12[label=left];\n N01 -> N02[label=right];\n N01 -> N03[label=right];\n N01 -> N13[label=right];\n N02 -> N03[label=left];\n N02 -> N08[label=left];\n N03 -> N04[label=operand];\n N04 -> N05[label=probability];\n N05 -> N06[label=operand];\n N06 -> N24[label=condition];\n N07 -> N08[label=right];\n N07 -> N12[label=right];\n N07 -> N17[label=right];\n N08 -> N09[label=operand];\n N09 -> N10[label=probability];\n N10 -> N11[label=operand];\n N11 -> N25[label=condition];\n N12 -> N13[label=left];\n N12 -> N17[label=left];\n N13 -> N14[label=operand];\n N14 -> N15[label=probability];\n N15 -> N16[label=operand];\n N16 -> N26[label=condition];\n N17 -> N18[label=operand];\n N18 -> N19[label=probability];\n N19 -> N20[label=operand];\n N20 -> N27[label=condition];\n N21 -> N28[label=columns];\n N21 -> N28[label=rows];\n N22 -> N24[label=consequence];\n N22 -> N25[label=consequence];\n N22 -> N26[label=consequence];\n N22 -> N27[label=consequence];\n N23 -> N24[label=alternative];\n N23 -> N25[label=alternative];\n N23 -> N26[label=alternative];\n N23 -> N27[label=alternative];\n N24 -> N28[label=0];\n N25 -> N28[label=1];\n N26 -> N28[label=2];\n N27 -> N28[label=3];\n N28 -> N29[label=operand];\n N29 -> N30[label=operator];\n N31 -> N32[label=operand];\n N32 -> N33[label=operator];\n}\n '
self.assertEqual(expectation.strip(), observed_beanstalk.strip())
generated_graph = to_bmg_graph(bmg)
observed_bmg = generated_graph.graph.to_dot()
expectation = '\ndigraph "graph" {\n N0[label="matrix"];\n N1[label="0"];\n N2[label="ColumnIndex"];\n N3[label="Index"];\n N4[label="ToProb"];\n N5[label="Bernoulli"];\n N6[label="~"];\n N7[label="1"];\n N8[label="Index"];\n N9[label="ToProb"];\n N10[label="Bernoulli"];\n N11[label="~"];\n N12[label="ColumnIndex"];\n N13[label="Index"];\n N14[label="ToProb"];\n N15[label="Bernoulli"];\n N16[label="~"];\n N17[label="Index"];\n N18[label="ToProb"];\n N19[label="Bernoulli"];\n N20[label="~"];\n N21[label="2"];\n N22[label="-2"];\n N23[label="-1e-10"];\n N24[label="IfThenElse"];\n N25[label="IfThenElse"];\n N26[label="IfThenElse"];\n N27[label="IfThenElse"];\n N28[label="ToMatrix"];\n N29[label="MatrixLog1mexp"];\n N30[label="matrix"];\n N31[label="MatrixLog1mexp"];\n N0 -> N2;\n N0 -> N12;\n N1 -> N2;\n N1 -> N3;\n N1 -> N13;\n N2 -> N3;\n N2 -> N8;\n N3 -> N4;\n N4 -> N5;\n N5 -> N6;\n N6 -> N24;\n N7 -> N8;\n N7 -> N12;\n N7 -> N17;\n N8 -> N9;\n N9 -> N10;\n N10 -> N11;\n N11 -> N25;\n N12 -> N13;\n N12 -> N17;\n N13 -> N14;\n N14 -> N15;\n N15 -> N16;\n N16 -> N26;\n N17 -> N18;\n N18 -> N19;\n N19 -> N20;\n N20 -> N27;\n N21 -> N28;\n N21 -> N28;\n N22 -> N24;\n N22 -> N25;\n N22 -> N26;\n N22 -> N27;\n N23 -> N24;\n N23 -> N25;\n N23 -> N26;\n N23 -> N27;\n N24 -> N28;\n N25 -> N28;\n N26 -> N28;\n N27 -> N28;\n N28 -> N29;\n N30 -> N31;\n Q0[label="Query"];\n N29 -> Q0;\n Q1[label="Query"];\n N31 -> Q1;\n}\n'
self.assertEqual(expectation.strip(), observed_bmg.strip()) |
def parse_area(tags):
if ('building' in tags):
group = 'building'
kind = tags['building']
if (kind == 'yes'):
for key in ['amenity', 'tourism']:
if (key in tags):
kind = tags[key]
break
if (kind != 'yes'):
group += f':{kind}'
return group
if ('area:highway' in tags):
return f"highway:{tags['area:highway']}"
for key in ['amenity', 'landcover', 'leisure', 'shop', 'highway', 'tourism', 'natural', 'waterway', 'landuse']:
if (key in tags):
return f'{key}:{tags[key]}'
return None |
class EventFile(OutputFile):
_file_suffix = '.xpe'
def __init__(self, additional_suffix, directory=None, delimiter=None, clock=None, time_stamp=None):
if (directory is None):
directory = defaults.eventfile_directory
if (additional_suffix is None):
additional_suffix = ''
if (len(additional_suffix) > 0):
suffix = '.{0}{1}'.format(additional_suffix, self._file_suffix)
else:
suffix = self._file_suffix
OutputFile.__init__(self, suffix, directory, time_stamp=time_stamp)
if (delimiter is not None):
self._delimiter = delimiter
else:
self._delimiter = defaults.eventfile_delimiter
if (clock is not None):
self._clock = clock
else:
if (not _internals.active_exp.is_initialized):
raise RuntimeError('Cannot find a clock. Initialize Expyriment!')
self._clock = _internals.active_exp.clock
try:
display = repr(_internals.active_exp.screen.window_size)
window_mode = repr(_internals.active_exp.screen.window_mode)
open_gl = repr(_internals.active_exp.screen.open_gl)
except Exception:
display = 'unknown'
window_mode = 'unknown'
open_gl = 'unknown'
self.write_comment('sha1: {0}'.format(get_experiment_secure_hash()))
self.write_comment('modules: {0}'.format(module_hashes_as_string()))
self.write_comment('display: size={0}, window_mode={1}, open_gl={2}'.format(display, window_mode, open_gl))
self.write_comment('os: {0}'.format(uname()))
self.write_line('Time,Type,Event,Value,Detail,Detail2')
self.save()
self._inter_event_intervall_log = _InterEventIntervallLog()
atexit.register(self._write_inter_event_intervall_summary)
def clock(self):
return self._clock
def delimiter(self):
return self._delimiter
def log(self, event, log_event_tag=None):
log_time = self._clock.time
if (not isinstance(event, (str, bytes))):
event = str(event)
self.write_line(((repr(log_time) + self.delimiter) + event))
if (log_event_tag is not None):
self._inter_event_intervall_log.add_event(log_event_tag, log_time)
return log_time
def warn(self, message):
line = ('WARNING: ' + message)
self.write_line(line)
def _write_inter_event_intervall_summary(self):
for l in self._inter_event_intervall_log.summary():
self.write_comment(l) |
class ObjectCreationTest(BaseEvenniaTest):
def test_channel_create(self):
description = 'A place to talk about coffee.'
(obj, errors) = DefaultChannel.create('coffeetalk', description=description)
self.assertTrue(obj, errors)
self.assertFalse(errors, errors)
self.assertEqual(description, obj.db.desc)
def test_message_create(self):
msg = create_message('peewee herman', 'heh-heh!', header='mail time!')
self.assertTrue(msg)
self.assertEqual(str(msg), 'peewee herman->: heh-heh!') |
def test_substitute_branches_by():
asforest = AbstractSyntaxForest(condition_handler=ConditionHandler())
code_node_1 = asforest.add_code_node([Assignment(var('u'), const(9))])
code_node_2 = asforest.add_code_node([Break()])
code_node_3 = asforest.add_code_node([Assignment(var('v'), const(9))])
code_node_4 = asforest.add_code_node([Assignment(var('w'), const(9))])
code_node_5 = asforest.add_code_node([Assignment(var('x'), const(9))])
condition = asforest._add_condition_node_with(LogicCondition.initialize_symbol('a', asforest.factory.logic_context), code_node_2, code_node_3)
seq_node = asforest.factory.create_seq_node()
asforest._add_node(seq_node)
asforest._add_edges_from(((seq_node, code_node_1), (seq_node, condition), (seq_node, code_node_4)))
asforest._code_node_reachability_graph.add_reachability_from(((code_node_1, code_node_3), (code_node_1, code_node_2), (code_node_1, code_node_4), (code_node_3, code_node_4)))
seq_node.sort_children()
branch = asforest._add_condition_node_with(LogicCondition.initialize_symbol('b', asforest.factory.logic_context), code_node_5)
asforest.substitute_branches_by(branch, condition)
assert ((len(asforest) == 9) and (set(asforest.get_roots) == {seq_node, asforest._current_root}) and (seq_node.children == (code_node_1, condition, code_node_4)) and (condition.true_branch_child == branch) and (condition.false_branch is None) and asforest._code_node_reachability_graph.reaches(code_node_1, code_node_5) and asforest._code_node_reachability_graph.reaches(code_node_1, code_node_4) and asforest._code_node_reachability_graph.reaches(code_node_5, code_node_4)) |
def _create_model():
base_models = [LogExpModel(lgbm.sklearn.LGBMRegressor()), LogExpModel(ctb.CatBoostRegressor(verbose=False))]
ensemble = EnsembleModel(base_models=base_models, bagging_fraction=BAGGING_FRACTION, model_cnt=MODEL_CNT)
model = GroupedOOFModel(base_model=ensemble, group_column='ticker', fold_cnt=FOLD_CNT)
return model |
class OptionSeriesArearangeAccessibility(Options):
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def descriptionFormat(self):
return self._config_get(None)
def descriptionFormat(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def exposeAsGroupOnly(self):
return self._config_get(None)
def exposeAsGroupOnly(self, flag: bool):
self._config(flag, js_type=False)
def keyboardNavigation(self) -> 'OptionSeriesArearangeAccessibilityKeyboardnavigation':
return self._config_sub_data('keyboardNavigation', OptionSeriesArearangeAccessibilityKeyboardnavigation)
def point(self) -> 'OptionSeriesArearangeAccessibilityPoint':
return self._config_sub_data('point', OptionSeriesArearangeAccessibilityPoint) |
def test_structlog_processor_no_span(elasticapm_client):
transaction = elasticapm_client.begin_transaction('test')
event_dict = {}
new_dict = structlog_processor(None, None, event_dict)
assert (new_dict['transaction.id'] == transaction.id)
assert (new_dict['trace.id'] == transaction.trace_parent.trace_id)
assert ('span.id' not in new_dict) |
def test_modify_base(mockproject):
code = BASE_CONTRACT.split('\n')
code[4] += '// comment'
code = '\n'.join(code)
with mockproject._path.joinpath('contracts/BaseFoo.sol').open('w') as fp:
fp.write(code)
mockproject.load()
assert (sorted(mockproject._compile.call_args[0][0]) == ['contracts/BaseFoo.sol', 'contracts/Foo.sol']) |
def render_table(header, rows):
(columns, sep) = header
line = ''
div = ''
for (column, _, width) in columns:
line += sep
line += column
div += sep
div += ('=' * width)
(yield line)
(yield div)
for row in rows:
line = ''
assert (len(row) == len(columns)), (row, columns)
for (data, column) in zip(row, columns):
(valuetext, value) = data
line += (sep + valuetext)
(yield line) |
def extractJujubbtranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class HttpImageHandler(BaseHTTPRequestHandler):
def log_message(self, format, *args):
log_line = (format % args)
log.debug(log_line)
return
def do_GET(self):
log.debug('HttpImageHandler:do_GET()')
self.serve_image()
return
def do_HEAD(self):
log.debug('HttpImageHandler:do_HEAD()')
self.send_response(200)
self.end_headers()
return
def do_QUIT(self):
log.debug('HttpImageHandler:do_QUIT()')
self.send_response(200)
self.end_headers()
return
def serve_image(self):
if pil_loaded:
image_bytes = build_image(self.path)
self.send_response(200)
self.send_header('Content-type', 'image/jpeg')
self.send_header('Content-Length', str(len(image_bytes)))
self.end_headers()
self.wfile.write(image_bytes)
else:
image_path = xbmcvfs.translatePath('special://home/addons/plugin.video.embycon/icon.png').decode('utf-8')
self.send_response(200)
self.send_header('Content-type', 'image/png')
modified = xbmcvfs.Stat(image_path).st_mtime()
self.send_header('Last-Modified', ('%s' % modified))
image = xbmcvfs.File(image_path)
size = image.size()
self.send_header('Content-Length', str(size))
self.end_headers()
self.wfile.write(image.readBytes())
image.close()
del image |
def _parse_func_metadata(ops: List[Operator], inputs: List[Tensor], outputs: List[Tensor], input_accessors: List[TensorAccessor], output_accessors: List[TensorAccessor], original_inputs: List[Tensor], original_outputs: List[Tensor], backend_spec: BackendSpec) -> FusedElementwiseMetaData:
(mixed_jagged_dense_indexing, output_volume, use_jagged_space_indexing) = _get_mixed_jagged_dense_config(input_accessors, output_accessors)
dtype = inputs[0]._attrs['dtype']
(input_alignments, input_broadcast_sizes) = _get_alignments_and_broadcast_sizes(dtype, input_accessors, output_accessors, mixed_jagged_dense_indexing, output_volume)
max_read_type = backend_spec.get_elementwise_read_backend_type(max(input_alignments), dtype)
read_types = [backend_spec.get_elementwise_read_backend_type(alignment, dtype) for alignment in input_alignments]
op_type = backend_spec.get_elementwise_op_backend_type(max(input_alignments), dtype)
data_type = backend_spec.dtype_to_backend_type(dtype)
float32_type = backend_spec.dtype_to_backend_type('float32')
(sub_func_metadata, op_type, use_fp32_acc) = _get_sub_func_metadata(ops, data_type, op_type, backend_spec, float32_type)
dynamic_dims = get_dynamic_dims(*[acc.original_shapes for acc in output_accessors])
return FusedElementwiseMetaData(inputs, outputs, input_accessors, output_accessors, original_inputs, original_outputs, max_read_type, read_types, op_type, data_type, input_broadcast_sizes, dynamic_dims, sub_func_metadata, mixed_jagged_dense_indexing, output_volume, use_jagged_space_indexing, use_fp32_acc, float32_type) |
class OptionPlotoptionsGaugeSonificationDefaultinstrumentoptionsMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def observer_change_handler(event, graph, handler, target, dispatcher):
if all(((event.old is not skipped) for skipped in UNOBSERVABLE_VALUES)):
try:
add_or_remove_notifiers(object=event.old, graph=graph, handler=handler, target=target, dispatcher=dispatcher, remove=True)
except NotifierNotFound:
pass
if all(((event.new is not skipped) for skipped in UNOBSERVABLE_VALUES)):
add_or_remove_notifiers(object=event.new, graph=graph, handler=handler, target=target, dispatcher=dispatcher, remove=False) |
def change_owner_of_output_files(files_dir: Path, owner: str) -> int:
if (not match('\\d+:\\d+', owner)):
logging.error('ownership string should have the format <user id>:<group id>')
return 1
(_, return_code_chown) = execute_shell_command_get_return_code(f'sudo chown -R {owner} {files_dir}')
(_, return_code_chmod) = execute_shell_command_get_return_code(f'sudo chmod -R u+rw {files_dir}')
return (return_code_chmod | return_code_chown) |
def test_iter_sse_id_retry_invalid() -> None:
class Body(
def __iter__(self) -> Iterator[bytes]:
(yield b'retry: 1667a\n')
(yield b'\n')
response = headers={'content-type': 'text/event-stream'}, stream=Body())
events = list(EventSource(response).iter_sse())
assert (len(events) == 0) |
class OptionSeriesSankeySonificationTracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def startup():
dp_proj = ''
dp_argv = {}
(argv_opts, argv_left) = getopt(sys.argv[1:], 'hvtr:i:o:w:e:l:p:d:', ['help', 'version', 'temporary', 'reset=', 'input=', 'output=', 'export=', 'window=', 'sequence=', 'lang=', 'locale=', 'port=', 'project=', 'dir='])
for (opt_name, opt_value) in argv_opts:
if (opt_name in ('-h', '--help')):
dp_argv['help'] = True
elif (opt_name in ('-v', '--version')):
dp_argv['version'] = True
elif (opt_name in ('-t', '--temporary')):
dp_argv['temporary'] = True
elif (opt_name in ('-r', '--reset')):
dp_argv['reset'] = opt_value
elif (opt_name in ('-i', '--input')):
dp_argv['input'] = opt_value
elif (opt_name in ('-o', '--output', '--export')):
dp_argv['output'] = opt_value
elif (opt_name in ('-w', '--window')):
dp_argv['window'] = opt_value
elif (opt_name in ('-e', '--sequence')):
dp_argv['sequence'] = opt_value
elif (opt_name in ('-l', '--lang', '--locale')):
dp_argv['lang'] = opt_value
elif (opt_name in ('-p', '--port')):
dp_argv['port'] = opt_value
elif (opt_name in ('-d', '--project', '--dir')):
dp_proj = opt_value
if (argv_left and argv_left[(- 1)]):
dp_argv['input'] = argv_left[(- 1)]
rr = Rickrack()
rr.run(dp_argv=dp_argv, dp_proj=dp_proj) |
def get_field_kwargs(field_name, model_field):
kwargs = {}
validator_kwarg = list(model_field.validators)
kwargs['model_field'] = model_field
if (model_field.verbose_name and needs_label(model_field, field_name)):
kwargs['label'] = capfirst(model_field.verbose_name)
if model_field.help_text:
kwargs['help_text'] = model_field.help_text
max_digits = getattr(model_field, 'max_digits', None)
if (max_digits is not None):
kwargs['max_digits'] = max_digits
decimal_places = getattr(model_field, 'decimal_places', None)
if (decimal_places is not None):
kwargs['decimal_places'] = decimal_places
if isinstance(model_field, models.SlugField):
kwargs['allow_unicode'] = model_field.allow_unicode
if ((isinstance(model_field, models.TextField) and (not model_field.choices)) or (postgres_fields and isinstance(model_field, postgres_fields.JSONField)) or (hasattr(models, 'JSONField') and isinstance(model_field, models.JSONField))):
kwargs['style'] = {'base_template': 'textarea.html'}
if model_field.null:
kwargs['allow_null'] = True
if (isinstance(model_field, models.AutoField) or (not model_field.editable)):
kwargs['read_only'] = True
return kwargs
if ((model_field.default is not None) and (model_field.default != empty) and (not callable(model_field.default))):
kwargs['default'] = model_field.default
if (model_field.has_default() or model_field.blank or model_field.null):
kwargs['required'] = False
if (model_field.blank and isinstance(model_field, (models.CharField, models.TextField))):
kwargs['allow_blank'] = True
if ((not model_field.blank) and (postgres_fields and isinstance(model_field, postgres_fields.ArrayField))):
kwargs['allow_empty'] = False
if isinstance(model_field, models.FilePathField):
kwargs['path'] = model_field.path
if (model_field.match is not None):
kwargs['match'] = model_field.match
if (model_field.recursive is not False):
kwargs['recursive'] = model_field.recursive
if (model_field.allow_files is not True):
kwargs['allow_files'] = model_field.allow_files
if (model_field.allow_folders is not False):
kwargs['allow_folders'] = model_field.allow_folders
if model_field.choices:
kwargs['choices'] = model_field.choices
else:
max_value = next((validator.limit_value for validator in validator_kwarg if isinstance(validator, validators.MaxValueValidator)), None)
if ((max_value is not None) and isinstance(model_field, NUMERIC_FIELD_TYPES)):
kwargs['max_value'] = max_value
validator_kwarg = [validator for validator in validator_kwarg if (not isinstance(validator, validators.MaxValueValidator))]
min_value = next((validator.limit_value for validator in validator_kwarg if isinstance(validator, validators.MinValueValidator)), None)
if ((min_value is not None) and isinstance(model_field, NUMERIC_FIELD_TYPES)):
kwargs['min_value'] = min_value
validator_kwarg = [validator for validator in validator_kwarg if (not isinstance(validator, validators.MinValueValidator))]
if isinstance(model_field, models.URLField):
validator_kwarg = [validator for validator in validator_kwarg if (not isinstance(validator, validators.URLValidator))]
if isinstance(model_field, models.EmailField):
validator_kwarg = [validator for validator in validator_kwarg if (validator is not validators.validate_email)]
if isinstance(model_field, models.SlugField):
validator_kwarg = [validator for validator in validator_kwarg if (validator is not validators.validate_slug)]
if isinstance(model_field, models.GenericIPAddressField):
validator_kwarg = [validator for validator in validator_kwarg if (validator is not validators.validate_ipv46_address)]
if isinstance(model_field, models.DecimalField):
validator_kwarg = [validator for validator in validator_kwarg if (not isinstance(validator, validators.DecimalValidator))]
max_length = getattr(model_field, 'max_length', None)
if ((max_length is not None) and isinstance(model_field, (models.CharField, models.TextField, models.FileField))):
kwargs['max_length'] = max_length
validator_kwarg = [validator for validator in validator_kwarg if (not isinstance(validator, validators.MaxLengthValidator))]
min_length = next((validator.limit_value for validator in validator_kwarg if isinstance(validator, validators.MinLengthValidator)), None)
if ((min_length is not None) and isinstance(model_field, models.CharField)):
kwargs['min_length'] = min_length
validator_kwarg = [validator for validator in validator_kwarg if (not isinstance(validator, validators.MinLengthValidator))]
validator_kwarg += get_unique_validators(field_name, model_field)
if validator_kwarg:
kwargs['validators'] = validator_kwarg
return kwargs |
class HCI_Cmd(HCI):
HCI_CMD_STR = {1025: 'COMND Inquiry', 1026: 'COMND Inquiry_Cancel', 1027: 'COMND Periodic_Inquiry_Mode', 1028: 'COMND Exit_Periodic_Inquiry_Mode', 1029: 'COMND Create_Connection', 1030: 'COMND Disconnect', 1032: 'COMND Create_Connection_Cancel', 1033: 'COMND Accept_Connection_Request', 1034: 'COMND Reject_Connection_Request', 1035: 'COMND Link_Key_Request_Reply', 1036: 'COMND Link_Key_Request_Negative_Reply', 1037: 'COMND PIN_Code_Request_Reply', 1038: 'COMND PIN_Code_Request_Negative_Reply', 1039: 'COMND Change_Connection_Packet_Type', 1041: 'COMND Authentication_Requested', 1043: 'COMND Set_Connection_Encryption ', 1045: 'COMND Change_Connection_Link_Key', 1047: 'COMND Master_Link_Key', 1049: 'COMND Remote_Name_Request', 1050: 'COMND Remote_Name_Request_Cancel', 1051: 'COMND Read_Remote_Supported_Features', 1052: 'COMND Read_Remote_Extended_Features', 1053: 'COMND Read_Remote_Version_Information', 1055: 'COMND Read_Clock_Offset', 1056: 'COMND Read_LMP_Handle', 1064: 'COMND Setup_Synchronous_Connection', 1065: 'COMND Accept_Synchronous_Connection_Request', 1066: 'COMND Reject_Synchronous_Connection_Request', 1067: 'COMND IO_Capability_Request_Reply', 1068: 'COMND User_Confirmation_Request_Reply', 1069: 'COMND User_Confirmation_Request_Negative_Reply', 1070: 'COMND User_Passkey_Request_Reply', 1071: 'COMND User_Passkey_Request_Negative_Reply', 1072: 'COMND Remote_OOB_Data_Request_Reply', 1075: 'COMND Remote_OOB_Data_Request_Negative_Reply', 1076: 'COMND IO_Capability_Request_Negative_Reply', 1077: 'COMND Create_Physical_Link', 1078: 'COMND Accept_Physical_Link', 1079: 'COMND Disconnect_Physical_Link', 1080: 'COMND Create_Logical_Link', 1081: 'COMND Accept_Logical_Link', 1082: 'COMND Disconnect_Logical_Link', 1083: 'COMND Logical_Link_Cancel', 1084: 'COMND Flow_Spec_Modify', 1085: 'COMND Enhanced_Setup_Synchronous_Connection', 1086: 'COMND Enhanced_Accept_Synchronous_Connection_Request', 1087: 'COMND Truncated_Page', 1088: 'COMND Truncated_Page_Cancel', 1089: 'COMND Set_Connectionless_Slave_Broadcast', 1090: 'COMND Set_Connectionless_Slave_Broadcast_Broadcast_Receive', 1091: 'COMND Start_Synchronization_Train', 1092: 'COMND Receive_Synchronization_Train', 1093: 'COMND Remote_OOB_Extended_Data_Request_Reply', 2049: 'COMND Hold_Mode', 2051: 'COMND Sniff_Mode', 2052: 'COMND Exit_Sniff_Mode', 2053: 'COMND Park_State', 2054: 'COMND Exit_Park_State', 2055: 'COMND QoS_Setup', 2057: 'COMND Role_Discovery', 2059: 'COMND Switch_Role', 2060: 'COMND Read_Link_Policy_Settings', 2061: 'COMND Write_Link_Policy_Settings', 2062: 'COMND Read_Default_Link_Policy_Settings', 2063: 'COMND Write_Default_Link_Policy_Settings', 2064: 'COMND Flow_Specification', 2065: 'COMND Sniff_Subrating', 3073: 'COMND Set_Event_Mask', 3075: 'COMND Reset', 3077: 'COMND Set_Event_Filter', 3080: 'COMND Flush', 3081: 'COMND Read_PIN_Type', 3082: 'COMND Write_PIN_Type', 3083: 'COMND Create_New_Unit_Key', 3085: 'COMND Read_Stored_Link_Key', 3089: 'COMND Write_Stored_Link_Key', 3090: 'COMND Delete_Stored_Link_Key', 3091: 'COMND Write_Local_Name', 3092: 'COMND Read_Local_Name', 3093: 'COMND Read_Connection_Accept_Timeout', 3094: 'COMND Write_Connection_Accept_Timeout', 3095: 'COMND Read_Page_Timeout', 3096: 'COMND Write_Page_Timeout', 3097: 'COMND Read_Scan_Enable', 3098: 'COMND Write_Scan_Enable', 3099: 'COMND Read_Page_Scan_Activity', 3100: 'COMND Write_Page_Scan_Activity', 3101: 'COMND Read_Inquiry_Scan_Activity', 3102: 'COMND Write_Inquiry_Scan_Activity', 3103: 'COMND Read_Authentication_Enable', 3104: 'COMND Write_Authentication_Enable', 3107: 'COMND Read_Class_of_Device', 3108: 'COMND Write_Class_of_Device', 3109: 'COMND Read_Voice_Setting', 3110: 'COMND Write_Voice_Setting', 3111: 'COMND Read_Automatic_Flush_Timeout', 3112: 'COMND Write_Automatic_Flush_Timeout', 3113: 'COMND Read_Num_Broadcast_Retransmissions', 3120: 'COMND Write_Num_Broadcast_Retransmissions', 3115: 'COMND Read_Hold_Mode_Activity', 3116: 'COMND Write_Hold_Mode_Activity', 3117: 'COMND Read_Transmit_Power_Level', 3118: 'COMND Read_Synchronous_Flow_Control_Enable', 3119: 'COMND Write_Synchronous_Flow_Control_Enable', 3121: 'COMND Set_Controller_To_Host_Flow_Control', 3123: 'COMND Host_Buffer_Size', 3125: 'COMND Host_Number_Of_Completed_Packets', 3126: 'COMND Read_Link_Supervision_Timeout', 3127: 'COMND Write_Link_Supervision_Timeout', 3128: 'COMND Read_Number_Of_Supported_IAC', 3129: 'COMND Read_Current_IAC_LAP', 3130: 'COMND Write_Current_IAC_LAP', 3135: 'COMND Set_AFH_Host_Channel_Classification', 3138: 'COMND Read_Inquiry_Scan_Type', 3139: 'COMND Write_Inquiry_Scan_Type', 3140: 'COMND Read_Inquiry_Mode', 3141: 'COMND Write_Inquiry_Mode', 3142: 'COMND Read_Page_Scan_Type', 3143: 'COMND Write_Page_Scan_Type', 3144: 'COMND Read_AFH_Channel_Assessment_Mode', 3145: 'COMND Write_AFH_Channel_Assessment_Mode', 3153: 'COMND Read_Extended_Inquiry_Response', 3154: 'COMND Write_Extended_Inquiry_Response', 3155: 'COMND Refresh_Encryption_Key', 3157: 'COMND Read_Simple_Pairing_Mode', 3158: 'COMND Write_Simple_Pairing_Mode', 3159: 'COMND Read_Local_OOB_Data', 3160: 'COMND Read_Inquiry_Response_Transmit_Power_Level', 3161: 'COMND Write_Inquiry_Response_Transmit_Power_Level', 3168: 'COMND Send_Key_Press_Notification', 3162: 'COMND Read_Default_Erroneous_Data_Reporting', 3163: 'COMND Write_Default_Erroneous_Data_Reporting', 3167: 'COMND Enhanced_Flush', 3169: 'COMND Read_Logical_Link_Accept_Timeout', 3170: 'COMND Write_Logical_Link_Accept_Timeout', 3171: 'COMND Set_Event_Mask_Page_2', 3172: 'COMND Read_Location_Data', 3173: 'COMND Write_Location_Data', 3174: 'COMND Read_Flow_Control_Mode', 3175: 'COMND Write_Flow_Control_Mode', 3176: 'COMND Read_Enhance_Transmit_Power_Level', 3177: 'COMND Read_Best_Effort_Flush_Timeout', 3178: 'COMND Write_Best_Effort_Flush_Timeout', 3179: 'COMND Short_Range_Mode', 3180: 'COMND Read_LE_Host_Support', 3181: 'COMND Write_LE_Host_Support', 3182: 'COMND Set_MWS_Channel_Parameters', 3183: 'COMND Set_ External_Frame_Configuration', 3184: 'COMND Set_MWS_Signaling', 3185: 'COMND Set_MWS_Transport_Layer', 3186: 'COMND Set_MWS_Scan_Frequency_Table', 3187: 'COMND Set_MWS_PATTERN_Configuration', 3188: 'COMND Set_Reserved_LT_ADDR', 3189: 'COMND Delete_Reserved_LT_ADDR', 3190: 'COMND Set_Connectionless_Slave_Broadcast_Data', 3191: 'COMND Read_Synchronization_Train_Parameters', 3192: 'COMND Write_Synchronization_Train_Parameters', 3193: 'COMND Read_Secure_Connections_Host_Support', 3194: 'COMND Write_Secure_Connections_Host_Support', 3195: 'COMND Read_Authenticated_Payload_Timeout', 3196: 'COMND Write_Authenticated_Payload_Timeout', 3197: 'COMND Read_Local_OOB_Extended_Data', 3198: 'COMND Read_Extended_Page_Timeout', 3199: 'COMND Write_Extended_Page_Timeout', 3200: 'COMND Read_Extended_Inquiry_Length', 3201: 'COMND Write_Extended_Inquiry_Length', 4097: 'COMND Read_Local_Version_Information', 4098: 'COMND Read_Local_Supported_Commands', 4099: 'COMND Read_Local_Supported_Features', 4100: 'COMND Read_Local_Extended_Features', 4101: 'COMND Read_Buffer_Size', 4105: 'COMND Read_BD_ADDR', 4106: 'COMND Read_Data_Block_Size', 4107: 'COMND Read_Local_Supported_Codecs', 5121: 'COMND Read_Failed_Contact_Counter', 5122: 'COMND Reset_Failed_Contact_Counter', 5123: 'COMND Read_Link_Quality', 5125: 'COMND Read_RSSI', 5126: 'COMND Read_AFH_Channel_Map', 5127: 'COMND Read_Clock', 5128: 'COMND Encryption_Key_Size', 5129: 'COMND Read_Local_AMP_Info', 5130: 'COMND Read_Local_AMP_ASSOC', 5131: 'COMND Write_Remote_AMP_ASSOC', 5132: 'COMND Get_MWS_Transport_Layer_Configuration', 5133: 'COMND Set_Triggered_Clock_Capture', 6145: 'COMND Read_Loopback_Mode', 6146: 'COMND Write_Loopback_Mode', 6147: 'COMND Enable_Device_Under_Test_Mode', 6148: 'COMND Write_Simple_Pairing_Debug_Mode', 6151: 'COMND Enable_AMP_Receiver_Reports', 6152: 'COMND AMP_Test_End', 6153: 'COMND AMP_Test', 6154: 'COMND Write_Secure_Connection_Test_Mode', 8193: 'COMND LE_Set_Event_Mask', 8194: 'COMND LE_Read_Buffer_Size', 8195: 'COMND LE_Read_Local_Supported_Features', 8197: 'COMND LE_Set_Random_Address', 8198: 'COMND LE_Set_Advertising_Parameters', 8199: 'COMND LE_Read_Advertising_Channel_Tx_Power', 8200: 'COMND LE_Set_Advertising_Data', 8201: 'COMND LE_Set_Scan_Responce_Data', 8202: 'COMND LE_Set_Advertise_Enable', 8203: 'COMND LE_Set_Set_Scan_Parameters', 8204: 'COMND LE_Set_Scan_Enable', 8205: 'COMND LE_Create_Connection', 8206: 'COMND LE_Create_Connection_Cancel ', 8207: 'COMND LE_Read_White_List_Size', 8208: 'COMND LE_Clear_White_List', 8209: 'COMND LE_Add_Device_To_White_List', 8210: 'COMND LE_RemoveDevice_From_White_List', 8211: 'COMND LE_Connection_Update', 8212: 'COMND LE_Set_Host_Channel_Classification', 8213: 'COMND LE_Read_Channel_Map', 8214: 'COMND LE_Read_Remote_Used_Features', 8215: 'COMND LE_Encrypt', 8216: 'COMND LE_Rand', 8217: 'COMND LE_Start_Encryption', 8218: 'COMND LE_Long_Term_Key_Request_Reply', 8219: 'COMND LE_Long_Term_Key_Request_Negative_Reply', 8220: 'COMND LE_Read_Supported_States', 8221: 'COMND LE_Receiver_Test', 8222: 'COMND LE_Transmitter_Test', 8223: 'COMND LE_Test_End', 8224: 'COMND LE_Remote_Connection_Parameter_Request_Reply', 8225: 'COMND LE_Remote_Connection_Parameter_Request_Negative_Reply', 64638: 'COMND VSC_Write_Dynamic_SCO_Routing_Change', 64599: 'COMND VSC_Write_High_Priority_Connection', 64588: 'COMND VSC_Write_RAM', 64589: 'COMND VSC_Read_RAM', 64590: 'COMND VSC_Launch_RAM', 64536: 'COMND VSC_Update_UART_Baud_Rate', 64513: 'COMND VSC_Write_BD_ADDR', 64540: 'COMND VSC_Write_SCO_PCM_Int_Param', 64551: 'COMND VSC_Set_Sleepmode_Param', 64542: 'COMND VSC_Write_PCM_Data_Format_Param', 64558: 'COMND VSC_Download_Minidriver', 64851: 'COMND VSC_BLE_VENDOR_CAP', 64852: 'COMND VSC_BLE_MULTI_ADV', 64854: 'COMND VSC_BLE_BATCH_SCAN', 64855: 'COMND VSC_BLE_ADV_FILTER', 64856: 'COMND VSC_BLE_TRACK_ADV', 64857: 'COMND VSC_BLE_ENERGY_INFO'}
def from_data(data):
return HCI_Cmd(u16(data[0:2]), ord(data[2]), data[3:])
def __init__(self, opcode, length, data):
HCI.__init__(self, HCI.HCI_CMD)
self.opcode = opcode
self.length = length
self.data = data
def getRaw(self):
return (((super(HCI_Cmd, self).getRaw() + p16(self.opcode)) + p8(self.length)) + self.data)
def __str__(self):
parent = HCI.__str__(self)
cmdname = 'unknown'
if (self.opcode in self.HCI_CMD_STR):
cmdname = self.HCI_CMD_STR[self.opcode]
return (parent + ('<0x%04x %s (len=%d): %s>' % (self.opcode, cmdname, self.length, self.data[0:16].encode('hex')))) |
class Strategy(GenericStrategy):
def __init__(self, **kwargs: Any) -> None:
aw1_aea: Optional[str] = kwargs.pop('aw1_aea', None)
if (aw1_aea is None):
raise ValueError('aw1_aea must be provided!')
self.aw1_aea = aw1_aea
self.minimum_hours_between_txs = kwargs.pop('mininum_hours_between_txs', 4)
self.minimum_minutes_since_last_attempt = kwargs.pop('minimum_minutes_since_last_attempt', 2)
super().__init__(**kwargs)
self.last_attempt: Dict[(str, datetime.datetime)] = {}
def get_acceptable_counterparties(self, counterparties: Tuple[(str, ...)]) -> Tuple[(str, ...)]:
valid_counterparties: List[str] = []
for counterparty in counterparties:
if self.is_valid_counterparty(counterparty):
valid_counterparties.append(counterparty)
return tuple(valid_counterparties)
def is_enough_time_since_last_attempt(self, counterparty: str) -> bool:
last_time = self.last_attempt.get(counterparty, None)
if (last_time is None):
return True
result = (datetime.datetime.now() > (last_time + datetime.timedelta(minutes=self.minimum_minutes_since_last_attempt)))
return result
def is_valid_counterparty(self, counterparty: str) -> bool:
registration_db = cast(RegistrationDB, self.context.registration_db)
if (not registration_db.is_registered(counterparty)):
self.context.logger.info(f'Invalid counterparty={counterparty}, not registered!')
return False
if (not self.is_enough_time_since_last_attempt(counterparty)):
self.context.logger.debug(f'Not enough time since last attempt for counterparty={counterparty}!')
return False
self.last_attempt[counterparty] = datetime.datetime.now()
if (not registration_db.is_allowed_to_trade(counterparty, self.minimum_hours_between_txs)):
return False
return True
def successful_trade_with_counterparty(self, counterparty: str, data: Dict[(str, str)]) -> None:
registration_db = cast(RegistrationDB, self.context.registration_db)
registration_db.set_trade(counterparty, datetime.datetime.now(), data)
self.context.logger.info(f'Successful trade with={counterparty}. Data acquired={data}!')
def register_counterparty(self, counterparty: str, developer_handle: str) -> None:
registration_db = cast(RegistrationDB, self.context.registration_db)
registration_db.set_registered(counterparty, developer_handle) |
def instance_details(request):
last_tag_cmd = ['git', 'describe', '--tags', '--always', '--abbrev=0']
last_commit_cmd = ['git', 'log', '-1', '--pretty=oneline']
versions = {'tag': check_output(last_tag_cmd).decode('utf-8').strip(), 'commit': check_output(last_commit_cmd).decode('utf-8').strip(), 'django': get_version(), 'python': version}
events = Event.objects.all().count()
users = User.objects.all().count()
return render(request, 'instance_details.html', context=dict(events=events, users=users, versions=versions)) |
class TestIndicesStatsRunner():
('elasticsearch.Elasticsearch')
.asyncio
async def test_indices_stats_without_parameters(self, es):
es.indices.stats = mock.AsyncMock(return_value={})
indices_stats = runner.IndicesStats()
result = (await indices_stats(es, params={}))
assert (result == {'weight': 1, 'unit': 'ops', 'success': True})
es.indices.stats.assert_awaited_once_with(index='_all', metric='_all')
('elasticsearch.Elasticsearch')
.asyncio
async def test_indices_stats_with_timeout_and_headers(self, es):
es.indices.stats = mock.AsyncMock(return_value={})
indices_stats = runner.IndicesStats()
result = (await indices_stats(es, params={'request-timeout': 3.0, 'headers': {'header1': 'value1'}, 'opaque-id': 'test-id1'}))
assert (result == {'weight': 1, 'unit': 'ops', 'success': True})
es.indices.stats.assert_awaited_once_with(index='_all', metric='_all', headers={'header1': 'value1'}, opaque_id='test-id1', request_timeout=3.0)
('elasticsearch.Elasticsearch')
.asyncio
async def test_indices_stats_with_failed_condition(self, es):
es.indices.stats = mock.AsyncMock(return_value={'_all': {'total': {'merges': {'current': 2, 'current_docs': 292698}}}})
indices_stats = runner.IndicesStats()
result = (await indices_stats(es, params={'index': 'logs-*', 'condition': {'path': '_all.total.merges.current', 'expected-value': 0}}))
assert (result == {'weight': 1, 'unit': 'ops', 'success': False, 'condition': {'path': '_all.total.merges.current', 'actual-value': '2', 'expected-value': '0'}})
es.indices.stats.assert_awaited_once_with(index='logs-*', metric='_all')
('elasticsearch.Elasticsearch')
.asyncio
async def test_indices_stats_with_successful_condition(self, es):
es.indices.stats = mock.AsyncMock(return_value={'_all': {'total': {'merges': {'current': 0, 'current_docs': 292698}}}})
indices_stats = runner.IndicesStats()
result = (await indices_stats(es, params={'index': 'logs-*', 'condition': {'path': '_all.total.merges.current', 'expected-value': 0}}))
assert (result == {'weight': 1, 'unit': 'ops', 'success': True, 'condition': {'path': '_all.total.merges.current', 'actual-value': '0', 'expected-value': '0'}})
es.indices.stats.assert_awaited_once_with(index='logs-*', metric='_all')
('elasticsearch.Elasticsearch')
.asyncio
async def test_indices_stats_with_non_existing_path(self, es):
es.indices.stats = mock.AsyncMock(return_value={'indices': {'total': {'docs': {'current': 0}}}})
indices_stats = runner.IndicesStats()
result = (await indices_stats(es, params={'index': 'logs-*', 'condition': {'path': 'indices.my_index.total.docs.count', 'expected-value': 0}}))
assert (result == {'weight': 1, 'unit': 'ops', 'success': False, 'condition': {'path': 'indices.my_index.total.docs.count', 'actual-value': None, 'expected-value': '0'}})
es.indices.stats.assert_awaited_once_with(index='logs-*', metric='_all') |
class TestHiddenFieldUniquenessForDateValidation(TestCase):
def test_repr_date_field_not_included(self):
class TestSerializer(serializers.ModelSerializer):
class Meta():
model = HiddenFieldUniqueForDateModel
fields = ('id', 'slug')
serializer = TestSerializer()
expected = dedent("\n TestSerializer():\n id = IntegerField(label='ID', read_only=True)\n slug = CharField(max_length=100)\n published = HiddenField(default=CreateOnlyDefault(<function now>))\n class Meta:\n validators = [<UniqueForDateValidator(queryset=HiddenFieldUniqueForDateModel.objects.all(), field='slug', date_field='published')>]\n ")
assert (repr(serializer) == expected)
def test_repr_date_field_included(self):
class TestSerializer(serializers.ModelSerializer):
class Meta():
model = HiddenFieldUniqueForDateModel
fields = ('id', 'slug', 'published')
serializer = TestSerializer()
expected = dedent("\n TestSerializer():\n id = IntegerField(label='ID', read_only=True)\n slug = CharField(max_length=100)\n published = DateTimeField(default=CreateOnlyDefault(<function now>), read_only=True)\n class Meta:\n validators = [<UniqueForDateValidator(queryset=HiddenFieldUniqueForDateModel.objects.all(), field='slug', date_field='published')>]\n ")
assert (repr(serializer) == expected) |
def extractOootransWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
.benchmark(group='import/export')
def test_benchmark_cube_import(benchmark, testpath, tmp_path):
cube1 = xtgeo.cube_from_file(join(testpath, 'cubes/reek/syntseis__seismic_depth_stack.segy'))
fname = join(tmp_path, 'syntseis__seismic_depth_stack.xtgrecube')
cube1.to_file(fname, fformat='xtgregcube')
cube2 = None
def read():
nonlocal cube2
cube2 = xtgeo.cube_from_file(fname, fformat='xtgregcube')
benchmark(read)
assert_allclose(cube1.values, cube2.values) |
_main(config_path='conf', config_name='config')
def main(args):
logger.info(repr(args))
import os
logger.info('%s %s', os.getcwd(), __file__)
xp = get_xp()
logger.info(xp.sig)
use_cuda = ((not args.no_cuda) and torch.cuda.is_available())
torch.manual_seed(args.seed)
device = torch.device(('cuda' if use_cuda else 'cpu'))
train_kwargs = {'batch_size': args.batch_size}
test_kwargs = {'batch_size': args.test_batch_size}
if use_cuda:
cuda_kwargs = {'num_workers': 1, 'pin_memory': True, 'shuffle': True}
train_kwargs.update(cuda_kwargs)
test_kwargs.update(cuda_kwargs)
transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))])
data = '/tmp/dora_test_mnist'
dataset1 = datasets.MNIST(data, train=True, download=True, transform=transform)
dataset2 = datasets.MNIST(data, train=False, transform=transform)
train_loader = torch.utils.data.DataLoader(dataset1, **train_kwargs)
test_loader = torch.utils.data.DataLoader(dataset2, **test_kwargs)
model = Net().to(device)
optimizer = optim.Adadelta(model.parameters(), lr=args.lr)
scheduler = StepLR(optimizer, step_size=1, gamma=args.gamma)
for epoch in range(1, (args.epochs + 1)):
train_loss = train(args, model, device, train_loader, optimizer, epoch)
(test_loss, correct) = test(model, device, test_loader)
xp.link.push_metrics({'train': train_loss, 'test': test_loss, 'correct': correct})
scheduler.step()
if args.save_model:
torch.save(model.state_dict(), (xp.folder / 'mnist_cnn.pt')) |
def writeResultHDF(pOutFileName, pAcceptedData, pRejectedData, pAllResultData, pInputData, pAlpha, pTest):
resultFileH5Object = h5py.File(pOutFileName, 'w')
resultFileH5Object.attrs['type'] = 'differential'
resultFileH5Object.attrs['version'] = __version__
resultFileH5Object.attrs['alpha'] = pAlpha
resultFileH5Object.attrs['test'] = pTest
all_data_dict = {'accepted': pAcceptedData, 'rejected': pRejectedData, 'all': pAllResultData}
for (i, inputData) in enumerate(pInputData):
matrix1_name = inputData[0][1]
matrix2_name = inputData[1][1]
chromosome = inputData[0][2]
gene_name = inputData[0][3]
if (matrix1_name not in resultFileH5Object):
matrix1_object = resultFileH5Object.create_group(matrix1_name)
else:
matrix1_object = resultFileH5Object[matrix1_name]
if (matrix2_name not in matrix1_object):
matrix2_object = matrix1_object.create_group(matrix2_name)
else:
matrix2_object = matrix1_object[matrix2_name]
if ('genes' not in matrix2_object):
geneGroup = matrix2_object.create_group('genes')
else:
geneGroup = matrix2_object['genes']
if (chromosome not in matrix2_object):
chromosome_object = matrix2_object.create_group(chromosome)
else:
chromosome_object = matrix2_object[chromosome]
gene_object = chromosome_object.create_group(gene_name)
gene_object.create_group('accepted')
gene_object.create_group('rejected')
gene_object.create_group('all')
for category in ['accepted', 'rejected', 'all']:
write_object = gene_object[category]
data_object = all_data_dict[category][i]
if (len(data_object) == 0):
continue
chromosome = None
start_list = []
end_list = []
sum_of_interactions_1 = None
sum_of_interactions_2 = None
relative_distance_list = []
pvalue_list = []
raw_target_list_1 = []
raw_target_list_2 = []
for data in data_object:
chromosome = data[0][0]
start_list.append(data[0][1])
end_list.append(data[0][2])
relative_distance_list.append(data[0][5])
sum_of_interactions_1 = float(data[3][0])
sum_of_interactions_2 = float(data[4][0])
raw_target_list_1.append(data[3][1])
raw_target_list_2.append(data[4][1])
pvalue_list.append(data[2])
write_object['chromosome'] = str(chromosome)
write_object.create_dataset('start_list', data=start_list, compression='gzip', compression_opts=9)
write_object.create_dataset('end_list', data=end_list, compression='gzip', compression_opts=9)
write_object['gene'] = str(gene_name)
write_object.create_dataset('relative_distance_list', data=relative_distance_list, compression='gzip', compression_opts=9)
write_object['sum_of_interactions_1'] = float(sum_of_interactions_1)
write_object['sum_of_interactions_2'] = float(sum_of_interactions_2)
write_object.create_dataset('raw_target_list_1', data=raw_target_list_1, compression='gzip', compression_opts=9)
write_object.create_dataset('raw_target_list_2', data=raw_target_list_2, compression='gzip', compression_opts=9)
write_object.create_dataset('pvalue_list', data=pvalue_list, compression='gzip', compression_opts=9)
try:
geneGroup[gene_name] = chromosome_object[gene_name]
except Exception as exp:
log.debug('exception {}'.format(str(exp)))
resultFileH5Object.close() |
class COLRVariationMergerTest():
.parametrize('paints, expected_xml, expected_varIdxes', [pytest.param([{'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 0, 'Alpha': 1.0}, {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 0, 'Alpha': 1.0}], ['<Paint Format="2"><!-- PaintSolid -->', ' <PaletteIndex value="0"/>', ' <Alpha value="1.0"/>', '</Paint>'], [], id='solid-same'), pytest.param([{'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 0, 'Alpha': 1.0}, {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 0, 'Alpha': 0.5}], ['<Paint Format="3"><!-- PaintVarSolid -->', ' <PaletteIndex value="0"/>', ' <Alpha value="1.0"/>', ' <VarIndexBase value="0"/>', '</Paint>'], [0], id='solid-alpha'), pytest.param([{'Format': int(ot.PaintFormat.PaintLinearGradient), 'ColorLine': {'Extend': int(ot.ExtendMode.PAD), 'ColorStop': [{'StopOffset': 0.0, 'PaletteIndex': 0, 'Alpha': 1.0}, {'StopOffset': 1.0, 'PaletteIndex': 1, 'Alpha': 1.0}]}, 'x0': 0, 'y0': 0, 'x1': 1, 'y1': 1, 'x2': 2, 'y2': 2}, {'Format': int(ot.PaintFormat.PaintLinearGradient), 'ColorLine': {'Extend': int(ot.ExtendMode.PAD), 'ColorStop': [{'StopOffset': 0.1, 'PaletteIndex': 0, 'Alpha': 1.0}, {'StopOffset': 0.9, 'PaletteIndex': 1, 'Alpha': 1.0}]}, 'x0': 0, 'y0': 0, 'x1': 1, 'y1': 1, 'x2': 2, 'y2': 2}], ['<Paint Format="5"><!-- PaintVarLinearGradient -->', ' <ColorLine>', ' <Extend value="pad"/>', ' <!-- StopCount=2 -->', ' <ColorStop index="0">', ' <StopOffset value="0.0"/>', ' <PaletteIndex value="0"/>', ' <Alpha value="1.0"/>', ' <VarIndexBase value="0"/>', ' </ColorStop>', ' <ColorStop index="1">', ' <StopOffset value="1.0"/>', ' <PaletteIndex value="1"/>', ' <Alpha value="1.0"/>', ' <VarIndexBase value="2"/>', ' </ColorStop>', ' </ColorLine>', ' <x0 value="0"/>', ' <y0 value="0"/>', ' <x1 value="1"/>', ' <y1 value="1"/>', ' <x2 value="2"/>', ' <y2 value="2"/>', ' <VarIndexBase/>', '</Paint>'], [0, NO_VARIATION_INDEX, 1, NO_VARIATION_INDEX], id='linear_grad-stop-offsets'), pytest.param([{'Format': int(ot.PaintFormat.PaintLinearGradient), 'ColorLine': {'Extend': int(ot.ExtendMode.PAD), 'ColorStop': [{'StopOffset': 0.0, 'PaletteIndex': 0, 'Alpha': 1.0}, {'StopOffset': 1.0, 'PaletteIndex': 1, 'Alpha': 1.0}]}, 'x0': 0, 'y0': 0, 'x1': 1, 'y1': 1, 'x2': 2, 'y2': 2}, {'Format': int(ot.PaintFormat.PaintLinearGradient), 'ColorLine': {'Extend': int(ot.ExtendMode.PAD), 'ColorStop': [{'StopOffset': 0.0, 'PaletteIndex': 0, 'Alpha': 0.5}, {'StopOffset': 1.0, 'PaletteIndex': 1, 'Alpha': 1.0}]}, 'x0': 0, 'y0': 0, 'x1': 1, 'y1': 1, 'x2': 2, 'y2': 2}], ['<Paint Format="5"><!-- PaintVarLinearGradient -->', ' <ColorLine>', ' <Extend value="pad"/>', ' <!-- StopCount=2 -->', ' <ColorStop index="0">', ' <StopOffset value="0.0"/>', ' <PaletteIndex value="0"/>', ' <Alpha value="1.0"/>', ' <VarIndexBase value="0"/>', ' </ColorStop>', ' <ColorStop index="1">', ' <StopOffset value="1.0"/>', ' <PaletteIndex value="1"/>', ' <Alpha value="1.0"/>', ' <VarIndexBase/>', ' </ColorStop>', ' </ColorLine>', ' <x0 value="0"/>', ' <y0 value="0"/>', ' <x1 value="1"/>', ' <y1 value="1"/>', ' <x2 value="2"/>', ' <y2 value="2"/>', ' <VarIndexBase/>', '</Paint>'], [NO_VARIATION_INDEX, 0], id='linear_grad-stop[0].alpha'), pytest.param([{'Format': int(ot.PaintFormat.PaintLinearGradient), 'ColorLine': {'Extend': int(ot.ExtendMode.PAD), 'ColorStop': [{'StopOffset': 0.0, 'PaletteIndex': 0, 'Alpha': 1.0}, {'StopOffset': 1.0, 'PaletteIndex': 1, 'Alpha': 1.0}]}, 'x0': 0, 'y0': 0, 'x1': 1, 'y1': 1, 'x2': 2, 'y2': 2}, {'Format': int(ot.PaintFormat.PaintLinearGradient), 'ColorLine': {'Extend': int(ot.ExtendMode.PAD), 'ColorStop': [{'StopOffset': (- 0.5), 'PaletteIndex': 0, 'Alpha': 1.0}, {'StopOffset': 1.0, 'PaletteIndex': 1, 'Alpha': 1.0}]}, 'x0': 0, 'y0': 0, 'x1': 1, 'y1': 1, 'x2': 2, 'y2': (- 200)}], ['<Paint Format="5"><!-- PaintVarLinearGradient -->', ' <ColorLine>', ' <Extend value="pad"/>', ' <!-- StopCount=2 -->', ' <ColorStop index="0">', ' <StopOffset value="0.0"/>', ' <PaletteIndex value="0"/>', ' <Alpha value="1.0"/>', ' <VarIndexBase value="0"/>', ' </ColorStop>', ' <ColorStop index="1">', ' <StopOffset value="1.0"/>', ' <PaletteIndex value="1"/>', ' <Alpha value="1.0"/>', ' <VarIndexBase/>', ' </ColorStop>', ' </ColorLine>', ' <x0 value="0"/>', ' <y0 value="0"/>', ' <x1 value="1"/>', ' <y1 value="1"/>', ' <x2 value="2"/>', ' <y2 value="2"/>', ' <VarIndexBase value="1"/>', '</Paint>'], [0, NO_VARIATION_INDEX, NO_VARIATION_INDEX, NO_VARIATION_INDEX, NO_VARIATION_INDEX, NO_VARIATION_INDEX, 1], id='linear_grad-stop[0].offset-y2'), pytest.param([{'Format': int(ot.PaintFormat.PaintRadialGradient), 'ColorLine': {'Extend': int(ot.ExtendMode.PAD), 'ColorStop': [{'StopOffset': 0.0, 'PaletteIndex': 0, 'Alpha': 1.0}, {'StopOffset': 1.0, 'PaletteIndex': 1, 'Alpha': 1.0}]}, 'x0': 0, 'y0': 0, 'r0': 0, 'x1': 1, 'y1': 1, 'r1': 1}, {'Format': int(ot.PaintFormat.PaintRadialGradient), 'ColorLine': {'Extend': int(ot.ExtendMode.PAD), 'ColorStop': [{'StopOffset': 0.1, 'PaletteIndex': 0, 'Alpha': 0.6}, {'StopOffset': 0.9, 'PaletteIndex': 1, 'Alpha': 0.7}]}, 'x0': (- 1), 'y0': (- 2), 'r0': 3, 'x1': (- 4), 'y1': (- 5), 'r1': 6}], ['<Paint Format="7"><!-- PaintVarRadialGradient -->', ' <ColorLine>', ' <Extend value="pad"/>', ' <!-- StopCount=2 -->', ' <ColorStop index="0">', ' <StopOffset value="0.0"/>', ' <PaletteIndex value="0"/>', ' <Alpha value="1.0"/>', ' <VarIndexBase value="0"/>', ' </ColorStop>', ' <ColorStop index="1">', ' <StopOffset value="1.0"/>', ' <PaletteIndex value="1"/>', ' <Alpha value="1.0"/>', ' <VarIndexBase value="2"/>', ' </ColorStop>', ' </ColorLine>', ' <x0 value="0"/>', ' <y0 value="0"/>', ' <r0 value="0"/>', ' <x1 value="1"/>', ' <y1 value="1"/>', ' <r1 value="1"/>', ' <VarIndexBase value="4"/>', '</Paint>'], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], id='radial_grad-all-different'), pytest.param([{'Format': int(ot.PaintFormat.PaintSweepGradient), 'ColorLine': {'Extend': int(ot.ExtendMode.REPEAT), 'ColorStop': [{'StopOffset': 0.4, 'PaletteIndex': 0, 'Alpha': 1.0}, {'StopOffset': 0.6, 'PaletteIndex': 1, 'Alpha': 1.0}]}, 'centerX': 0, 'centerY': 0, 'startAngle': 0, 'endAngle': 180.0}, {'Format': int(ot.PaintFormat.PaintSweepGradient), 'ColorLine': {'Extend': int(ot.ExtendMode.REPEAT), 'ColorStop': [{'StopOffset': 0.4, 'PaletteIndex': 0, 'Alpha': 1.0}, {'StopOffset': 0.6, 'PaletteIndex': 1, 'Alpha': 1.0}]}, 'centerX': 0, 'centerY': 0, 'startAngle': 90.0, 'endAngle': 180.0}], ['<Paint Format="9"><!-- PaintVarSweepGradient -->', ' <ColorLine>', ' <Extend value="repeat"/>', ' <!-- StopCount=2 -->', ' <ColorStop index="0">', ' <StopOffset value="0.4"/>', ' <PaletteIndex value="0"/>', ' <Alpha value="1.0"/>', ' <VarIndexBase/>', ' </ColorStop>', ' <ColorStop index="1">', ' <StopOffset value="0.6"/>', ' <PaletteIndex value="1"/>', ' <Alpha value="1.0"/>', ' <VarIndexBase/>', ' </ColorStop>', ' </ColorLine>', ' <centerX value="0"/>', ' <centerY value="0"/>', ' <startAngle value="0.0"/>', ' <endAngle value="180.0"/>', ' <VarIndexBase value="0"/>', '</Paint>'], [NO_VARIATION_INDEX, NO_VARIATION_INDEX, 0, NO_VARIATION_INDEX], id='sweep_grad-startAngle'), pytest.param([{'Format': int(ot.PaintFormat.PaintSweepGradient), 'ColorLine': {'Extend': int(ot.ExtendMode.PAD), 'ColorStop': [{'StopOffset': 0.0, 'PaletteIndex': 0, 'Alpha': 1.0}, {'StopOffset': 1.0, 'PaletteIndex': 1, 'Alpha': 1.0}]}, 'centerX': 0, 'centerY': 0, 'startAngle': 0.0, 'endAngle': 180.0}, {'Format': int(ot.PaintFormat.PaintSweepGradient), 'ColorLine': {'Extend': int(ot.ExtendMode.PAD), 'ColorStop': [{'StopOffset': 0.0, 'PaletteIndex': 0, 'Alpha': 0.5}, {'StopOffset': 1.0, 'PaletteIndex': 1, 'Alpha': 0.5}]}, 'centerX': 0, 'centerY': 0, 'startAngle': 0.0, 'endAngle': 180.0}], ['<Paint Format="9"><!-- PaintVarSweepGradient -->', ' <ColorLine>', ' <Extend value="pad"/>', ' <!-- StopCount=2 -->', ' <ColorStop index="0">', ' <StopOffset value="0.0"/>', ' <PaletteIndex value="0"/>', ' <Alpha value="1.0"/>', ' <VarIndexBase value="0"/>', ' </ColorStop>', ' <ColorStop index="1">', ' <StopOffset value="1.0"/>', ' <PaletteIndex value="1"/>', ' <Alpha value="1.0"/>', ' <VarIndexBase value="0"/>', ' </ColorStop>', ' </ColorLine>', ' <centerX value="0"/>', ' <centerY value="0"/>', ' <startAngle value="0.0"/>', ' <endAngle value="180.0"/>', ' <VarIndexBase/>', '</Paint>'], [NO_VARIATION_INDEX, 0], id='sweep_grad-stops-alpha-reuse-varidxbase'), pytest.param([{'Format': int(ot.PaintFormat.PaintTransform), 'Paint': {'Format': int(ot.PaintFormat.PaintRadialGradient), 'ColorLine': {'Extend': int(ot.ExtendMode.PAD), 'ColorStop': [{'StopOffset': 0.0, 'PaletteIndex': 0, 'Alpha': 1.0}, {'StopOffset': 1.0, 'PaletteIndex': 1, 'Alpha': 1.0}]}, 'x0': 0, 'y0': 0, 'r0': 0, 'x1': 1, 'y1': 1, 'r1': 1}, 'Transform': {'xx': 1.0, 'xy': 0.0, 'yx': 0.0, 'yy': 1.0, 'dx': 0.0, 'dy': 0.0}}, {'Format': int(ot.PaintFormat.PaintTransform), 'Paint': {'Format': int(ot.PaintFormat.PaintRadialGradient), 'ColorLine': {'Extend': int(ot.ExtendMode.PAD), 'ColorStop': [{'StopOffset': 0.0, 'PaletteIndex': 0, 'Alpha': 1.0}, {'StopOffset': 1.0, 'PaletteIndex': 1, 'Alpha': 1.0}]}, 'x0': 0, 'y0': 0, 'r0': 0, 'x1': 1, 'y1': 1, 'r1': 1}, 'Transform': {'xx': 1.0, 'xy': 0.0, 'yx': 0.0, 'yy': 0.5, 'dx': 0.0, 'dy': (- 100.0)}}], ['<Paint Format="13"><!-- PaintVarTransform -->', ' <Paint Format="6"><!-- PaintRadialGradient -->', ' <ColorLine>', ' <Extend value="pad"/>', ' <!-- StopCount=2 -->', ' <ColorStop index="0">', ' <StopOffset value="0.0"/>', ' <PaletteIndex value="0"/>', ' <Alpha value="1.0"/>', ' </ColorStop>', ' <ColorStop index="1">', ' <StopOffset value="1.0"/>', ' <PaletteIndex value="1"/>', ' <Alpha value="1.0"/>', ' </ColorStop>', ' </ColorLine>', ' <x0 value="0"/>', ' <y0 value="0"/>', ' <r0 value="0"/>', ' <x1 value="1"/>', ' <y1 value="1"/>', ' <r1 value="1"/>', ' </Paint>', ' <Transform>', ' <xx value="1.0"/>', ' <yx value="0.0"/>', ' <xy value="0.0"/>', ' <yy value="1.0"/>', ' <dx value="0.0"/>', ' <dy value="0.0"/>', ' <VarIndexBase value="0"/>', ' </Transform>', '</Paint>'], [NO_VARIATION_INDEX, NO_VARIATION_INDEX, NO_VARIATION_INDEX, 0, NO_VARIATION_INDEX, 1], id='transform-yy-dy'), pytest.param([{'Format': ot.PaintFormat.PaintTransform, 'Paint': {'Format': ot.PaintFormat.PaintSweepGradient, 'ColorLine': {'Extend': ot.ExtendMode.PAD, 'ColorStop': [{'StopOffset': 0.0, 'PaletteIndex': 0}, {'StopOffset': 1.0, 'PaletteIndex': 1, 'Alpha': 1.0}]}, 'centerX': 0, 'centerY': 0, 'startAngle': 0, 'endAngle': 360}, 'Transform': (1.0, 0, 0, 1.0, 0, 0)}, {'Format': ot.PaintFormat.PaintTransform, 'Paint': {'Format': ot.PaintFormat.PaintSweepGradient, 'ColorLine': {'Extend': ot.ExtendMode.PAD, 'ColorStop': [{'StopOffset': 0.0, 'PaletteIndex': 0}, {'StopOffset': 1.0, 'PaletteIndex': 1, 'Alpha': 1.0}]}, 'centerX': 256, 'centerY': 0, 'startAngle': 0, 'endAngle': 360}, 'Transform': (1., 0, 0, 1.0, 10, 0)}], ['<Paint Format="13"><!-- PaintVarTransform -->', ' <Paint Format="9"><!-- PaintVarSweepGradient -->', ' <ColorLine>', ' <Extend value="pad"/>', ' <!-- StopCount=2 -->', ' <ColorStop index="0">', ' <StopOffset value="0.0"/>', ' <PaletteIndex value="0"/>', ' <Alpha value="1.0"/>', ' <VarIndexBase/>', ' </ColorStop>', ' <ColorStop index="1">', ' <StopOffset value="1.0"/>', ' <PaletteIndex value="1"/>', ' <Alpha value="1.0"/>', ' <VarIndexBase/>', ' </ColorStop>', ' </ColorLine>', ' <centerX value="0"/>', ' <centerY value="0"/>', ' <startAngle value="0.0"/>', ' <endAngle value="360.0"/>', ' <VarIndexBase value="0"/>', ' </Paint>', ' <Transform>', ' <xx value="1.0"/>', ' <yx value="0.0"/>', ' <xy value="0.0"/>', ' <yy value="1.0"/>', ' <dx value="0.0"/>', ' <dy value="0.0"/>', ' <VarIndexBase value="0"/>', ' </Transform>', '</Paint>'], [0, NO_VARIATION_INDEX, NO_VARIATION_INDEX, NO_VARIATION_INDEX, 1, NO_VARIATION_INDEX], id='transform-xx-sweep_grad-centerx-same-varidxbase')])
def test_merge_Paint(self, paints, ttFont, expected_xml, expected_varIdxes):
paints = [build_paint(p) for p in paints]
out = deepcopy(paints[0])
model = VariationModel([{}, {'ZZZZ': 1.0}])
merger = COLRVariationMerger(model, ['ZZZZ'], ttFont)
merger.mergeThings(out, paints)
assert (compile_decompile(out, ttFont) == out)
assert (dump_xml(out, ttFont) == expected_xml)
assert (merger.varIdxes == expected_varIdxes)
def test_merge_ClipList(self, ttFont):
clipLists = [buildClipList(clips) for clips in [{'A': (0, 0, 1000, 1000), 'B': (0, 0, 1000, 1000), 'C': (0, 0, 1000, 1000), 'D': (0, 0, 1000, 1000)}, {'B': (10, 0, 1000, 1000), 'C': (20, 20, 1020, 1020), 'D': (20, 20, 1020, 1020)}]]
out = deepcopy(clipLists[0])
model = VariationModel([{}, {'ZZZZ': 1.0}])
merger = COLRVariationMerger(model, ['ZZZZ'], ttFont)
merger.mergeThings(out, clipLists)
assert (compile_decompile(out, ttFont) == out)
assert (dump_xml(out, ttFont) == ['<ClipList Format="1">', ' <Clip>', ' <Glyph value="A"/>', ' <ClipBox Format="1">', ' <xMin value="0"/>', ' <yMin value="0"/>', ' <xMax value="1000"/>', ' <yMax value="1000"/>', ' </ClipBox>', ' </Clip>', ' <Clip>', ' <Glyph value="B"/>', ' <ClipBox Format="2">', ' <xMin value="0"/>', ' <yMin value="0"/>', ' <xMax value="1000"/>', ' <yMax value="1000"/>', ' <VarIndexBase value="0"/>', ' </ClipBox>', ' </Clip>', ' <Clip>', ' <Glyph value="C"/>', ' <Glyph value="D"/>', ' <ClipBox Format="2">', ' <xMin value="0"/>', ' <yMin value="0"/>', ' <xMax value="1000"/>', ' <yMax value="1000"/>', ' <VarIndexBase value="4"/>', ' </ClipBox>', ' </Clip>', '</ClipList>'])
assert (merger.varIdxes == [0, NO_VARIATION_INDEX, NO_VARIATION_INDEX, NO_VARIATION_INDEX, 1, 1, 1, 1])
.parametrize('master_layer_reuse', [pytest.param(False, id='no-reuse'), pytest.param(True, id='with-reuse')])
.parametrize('color_glyphs, output_layer_reuse, expected_xml, expected_varIdxes', [pytest.param([{'A': {'Format': int(ot.PaintFormat.PaintColrLayers), 'Layers': [{'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 0, 'Alpha': 1.0}, 'Glyph': 'B'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 1, 'Alpha': 1.0}, 'Glyph': 'B'}]}}, {'A': {'Format': ot.PaintFormat.PaintColrLayers, 'Layers': [{'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 0, 'Alpha': 1.0}, 'Glyph': 'B'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 1, 'Alpha': 1.0}, 'Glyph': 'B'}]}}], False, ['<COLR>', ' <Version value="1"/>', ' <!-- BaseGlyphRecordCount=0 -->', ' <!-- LayerRecordCount=0 -->', ' <BaseGlyphList>', ' <!-- BaseGlyphCount=1 -->', ' <BaseGlyphPaintRecord index="0">', ' <BaseGlyph value="A"/>', ' <Paint Format="1"><!-- PaintColrLayers -->', ' <NumLayers value="2"/>', ' <FirstLayerIndex value="0"/>', ' </Paint>', ' </BaseGlyphPaintRecord>', ' </BaseGlyphList>', ' <LayerList>', ' <!-- LayerCount=2 -->', ' <Paint index="0" Format="10"><!-- PaintGlyph -->', ' <Paint Format="2"><!-- PaintSolid -->', ' <PaletteIndex value="0"/>', ' <Alpha value="1.0"/>', ' </Paint>', ' <Glyph value="B"/>', ' </Paint>', ' <Paint index="1" Format="10"><!-- PaintGlyph -->', ' <Paint Format="2"><!-- PaintSolid -->', ' <PaletteIndex value="1"/>', ' <Alpha value="1.0"/>', ' </Paint>', ' <Glyph value="B"/>', ' </Paint>', ' </LayerList>', '</COLR>'], [], id='no-variation'), pytest.param([{'A': {'Format': int(ot.PaintFormat.PaintColrLayers), 'Layers': [{'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 0, 'Alpha': 1.0}, 'Glyph': 'B'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 1, 'Alpha': 1.0}, 'Glyph': 'B'}]}, 'C': {'Format': int(ot.PaintFormat.PaintColrLayers), 'Layers': [{'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 2, 'Alpha': 1.0}, 'Glyph': 'B'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 3, 'Alpha': 1.0}, 'Glyph': 'B'}]}}, {'C': {'Format': int(ot.PaintFormat.PaintColrLayers), 'Layers': [{'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 2, 'Alpha': 0.5}, 'Glyph': 'B'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 3, 'Alpha': 0.5}, 'Glyph': 'B'}]}}], False, ['<COLR>', ' <Version value="1"/>', ' <!-- BaseGlyphRecordCount=0 -->', ' <!-- LayerRecordCount=0 -->', ' <BaseGlyphList>', ' <!-- BaseGlyphCount=2 -->', ' <BaseGlyphPaintRecord index="0">', ' <BaseGlyph value="A"/>', ' <Paint Format="1"><!-- PaintColrLayers -->', ' <NumLayers value="2"/>', ' <FirstLayerIndex value="0"/>', ' </Paint>', ' </BaseGlyphPaintRecord>', ' <BaseGlyphPaintRecord index="1">', ' <BaseGlyph value="C"/>', ' <Paint Format="1"><!-- PaintColrLayers -->', ' <NumLayers value="2"/>', ' <FirstLayerIndex value="2"/>', ' </Paint>', ' </BaseGlyphPaintRecord>', ' </BaseGlyphList>', ' <LayerList>', ' <!-- LayerCount=4 -->', ' <Paint index="0" Format="10"><!-- PaintGlyph -->', ' <Paint Format="2"><!-- PaintSolid -->', ' <PaletteIndex value="0"/>', ' <Alpha value="1.0"/>', ' </Paint>', ' <Glyph value="B"/>', ' </Paint>', ' <Paint index="1" Format="10"><!-- PaintGlyph -->', ' <Paint Format="2"><!-- PaintSolid -->', ' <PaletteIndex value="1"/>', ' <Alpha value="1.0"/>', ' </Paint>', ' <Glyph value="B"/>', ' </Paint>', ' <Paint index="2" Format="10"><!-- PaintGlyph -->', ' <Paint Format="3"><!-- PaintVarSolid -->', ' <PaletteIndex value="2"/>', ' <Alpha value="1.0"/>', ' <VarIndexBase value="0"/>', ' </Paint>', ' <Glyph value="B"/>', ' </Paint>', ' <Paint index="3" Format="10"><!-- PaintGlyph -->', ' <Paint Format="3"><!-- PaintVarSolid -->', ' <PaletteIndex value="3"/>', ' <Alpha value="1.0"/>', ' <VarIndexBase value="0"/>', ' </Paint>', ' <Glyph value="B"/>', ' </Paint>', ' </LayerList>', '</COLR>'], [0], id='sparse-masters'), pytest.param([{'A': {'Format': int(ot.PaintFormat.PaintColrLayers), 'Layers': [{'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 0, 'Alpha': 1.0}, 'Glyph': 'B'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 1, 'Alpha': 1.0}, 'Glyph': 'B'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 2, 'Alpha': 1.0}, 'Glyph': 'B'}]}, 'C': {'Format': int(ot.PaintFormat.PaintColrLayers), 'Layers': [{'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 1, 'Alpha': 1.0}, 'Glyph': 'B'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 2, 'Alpha': 1.0}, 'Glyph': 'B'}]}, 'D': {'Format': int(ot.PaintFormat.PaintColrLayers), 'Layers': [{'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 1, 'Alpha': 1.0}, 'Glyph': 'B'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 2, 'Alpha': 1.0}, 'Glyph': 'B'}]}, 'E': {'Format': int(ot.PaintFormat.PaintColrLayers), 'Layers': [{'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 1, 'Alpha': 1.0}, 'Glyph': 'B'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 2, 'Alpha': 1.0}, 'Glyph': 'B'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 3, 'Alpha': 1.0}, 'Glyph': 'B'}]}}, {'C': {'Format': int(ot.PaintFormat.PaintColrLayers), 'Layers': [{'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 1, 'Alpha': 0.5}, 'Glyph': 'B'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 2, 'Alpha': 0.5}, 'Glyph': 'B'}]}, 'D': {'Format': int(ot.PaintFormat.PaintColrLayers), 'Layers': [{'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 1, 'Alpha': 0.5}, 'Glyph': 'B'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 2, 'Alpha': 0.5}, 'Glyph': 'B'}]}, 'E': {'Format': int(ot.PaintFormat.PaintColrLayers), 'Layers': [{'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 1, 'Alpha': 0.5}, 'Glyph': 'B'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 2, 'Alpha': 0.5}, 'Glyph': 'B'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 3, 'Alpha': 1.0}, 'Glyph': 'B'}]}}], True, ['<COLR>', ' <Version value="1"/>', ' <!-- BaseGlyphRecordCount=0 -->', ' <!-- LayerRecordCount=0 -->', ' <BaseGlyphList>', ' <!-- BaseGlyphCount=4 -->', ' <BaseGlyphPaintRecord index="0">', ' <BaseGlyph value="A"/>', ' <Paint Format="1"><!-- PaintColrLayers -->', ' <NumLayers value="3"/>', ' <FirstLayerIndex value="0"/>', ' </Paint>', ' </BaseGlyphPaintRecord>', ' <BaseGlyphPaintRecord index="1">', ' <BaseGlyph value="C"/>', ' <Paint Format="1"><!-- PaintColrLayers -->', ' <NumLayers value="2"/>', ' <FirstLayerIndex value="3"/>', ' </Paint>', ' </BaseGlyphPaintRecord>', ' <BaseGlyphPaintRecord index="2">', ' <BaseGlyph value="D"/>', ' <Paint Format="1"><!-- PaintColrLayers -->', ' <NumLayers value="2"/>', ' <FirstLayerIndex value="3"/>', ' </Paint>', ' </BaseGlyphPaintRecord>', ' <BaseGlyphPaintRecord index="3">', ' <BaseGlyph value="E"/>', ' <Paint Format="1"><!-- PaintColrLayers -->', ' <NumLayers value="2"/>', ' <FirstLayerIndex value="5"/>', ' </Paint>', ' </BaseGlyphPaintRecord>', ' </BaseGlyphList>', ' <LayerList>', ' <!-- LayerCount=7 -->', ' <Paint index="0" Format="10"><!-- PaintGlyph -->', ' <Paint Format="2"><!-- PaintSolid -->', ' <PaletteIndex value="0"/>', ' <Alpha value="1.0"/>', ' </Paint>', ' <Glyph value="B"/>', ' </Paint>', ' <Paint index="1" Format="10"><!-- PaintGlyph -->', ' <Paint Format="2"><!-- PaintSolid -->', ' <PaletteIndex value="1"/>', ' <Alpha value="1.0"/>', ' </Paint>', ' <Glyph value="B"/>', ' </Paint>', ' <Paint index="2" Format="10"><!-- PaintGlyph -->', ' <Paint Format="2"><!-- PaintSolid -->', ' <PaletteIndex value="2"/>', ' <Alpha value="1.0"/>', ' </Paint>', ' <Glyph value="B"/>', ' </Paint>', ' <Paint index="3" Format="10"><!-- PaintGlyph -->', ' <Paint Format="3"><!-- PaintVarSolid -->', ' <PaletteIndex value="1"/>', ' <Alpha value="1.0"/>', ' <VarIndexBase value="0"/>', ' </Paint>', ' <Glyph value="B"/>', ' </Paint>', ' <Paint index="4" Format="10"><!-- PaintGlyph -->', ' <Paint Format="3"><!-- PaintVarSolid -->', ' <PaletteIndex value="2"/>', ' <Alpha value="1.0"/>', ' <VarIndexBase value="0"/>', ' </Paint>', ' <Glyph value="B"/>', ' </Paint>', ' <Paint index="5" Format="1"><!-- PaintColrLayers -->', ' <NumLayers value="2"/>', ' <FirstLayerIndex value="3"/>', ' </Paint>', ' <Paint index="6" Format="10"><!-- PaintGlyph -->', ' <Paint Format="2"><!-- PaintSolid -->', ' <PaletteIndex value="3"/>', ' <Alpha value="1.0"/>', ' </Paint>', ' <Glyph value="B"/>', ' </Paint>', ' </LayerList>', '</COLR>'], [0], id='sparse-masters-with-reuse'), pytest.param([{'A': {'Format': int(ot.PaintFormat.PaintColrLayers), 'Layers': [{'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 0, 'Alpha': 1.0}, 'Glyph': 'B'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 1, 'Alpha': 1.0}, 'Glyph': 'B'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 2, 'Alpha': 1.0}, 'Glyph': 'B'}]}, 'C': {'Format': int(ot.PaintFormat.PaintColrLayers), 'Layers': [{'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 1, 'Alpha': 1.0}, 'Glyph': 'B'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 2, 'Alpha': 1.0}, 'Glyph': 'B'}]}}, {'A': {'Format': int(ot.PaintFormat.PaintColrLayers), 'Layers': [{'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 0, 'Alpha': 1.0}, 'Glyph': 'B'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 1, 'Alpha': 0.9}, 'Glyph': 'B'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 2, 'Alpha': 1.0}, 'Glyph': 'B'}]}, 'C': {'Format': int(ot.PaintFormat.PaintColrLayers), 'Layers': [{'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 1, 'Alpha': 0.5}, 'Glyph': 'B'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 2, 'Alpha': 1.0}, 'Glyph': 'B'}]}}], True, ['<COLR>', ' <Version value="1"/>', ' <!-- BaseGlyphRecordCount=0 -->', ' <!-- LayerRecordCount=0 -->', ' <BaseGlyphList>', ' <!-- BaseGlyphCount=2 -->', ' <BaseGlyphPaintRecord index="0">', ' <BaseGlyph value="A"/>', ' <Paint Format="1"><!-- PaintColrLayers -->', ' <NumLayers value="3"/>', ' <FirstLayerIndex value="0"/>', ' </Paint>', ' </BaseGlyphPaintRecord>', ' <BaseGlyphPaintRecord index="1">', ' <BaseGlyph value="C"/>', ' <Paint Format="1"><!-- PaintColrLayers -->', ' <NumLayers value="2"/>', ' <FirstLayerIndex value="3"/>', ' </Paint>', ' </BaseGlyphPaintRecord>', ' </BaseGlyphList>', ' <LayerList>', ' <!-- LayerCount=5 -->', ' <Paint index="0" Format="10"><!-- PaintGlyph -->', ' <Paint Format="2"><!-- PaintSolid -->', ' <PaletteIndex value="0"/>', ' <Alpha value="1.0"/>', ' </Paint>', ' <Glyph value="B"/>', ' </Paint>', ' <Paint index="1" Format="10"><!-- PaintGlyph -->', ' <Paint Format="3"><!-- PaintVarSolid -->', ' <PaletteIndex value="1"/>', ' <Alpha value="1.0"/>', ' <VarIndexBase value="0"/>', ' </Paint>', ' <Glyph value="B"/>', ' </Paint>', ' <Paint index="2" Format="10"><!-- PaintGlyph -->', ' <Paint Format="2"><!-- PaintSolid -->', ' <PaletteIndex value="2"/>', ' <Alpha value="1.0"/>', ' </Paint>', ' <Glyph value="B"/>', ' </Paint>', ' <Paint index="3" Format="10"><!-- PaintGlyph -->', ' <Paint Format="3"><!-- PaintVarSolid -->', ' <PaletteIndex value="1"/>', ' <Alpha value="1.0"/>', ' <VarIndexBase value="1"/>', ' </Paint>', ' <Glyph value="B"/>', ' </Paint>', ' <Paint index="4" Format="10"><!-- PaintGlyph -->', ' <Paint Format="2"><!-- PaintSolid -->', ' <PaletteIndex value="2"/>', ' <Alpha value="1.0"/>', ' </Paint>', ' <Glyph value="B"/>', ' </Paint>', ' </LayerList>', '</COLR>'], [0, 1], id='shared-master-layers-different-variations')])
def test_merge_full_table(self, color_glyphs, ttFont, expected_xml, expected_varIdxes, master_layer_reuse, output_layer_reuse):
master_ttfs = [deepcopy(ttFont) for _ in range(len(color_glyphs))]
for (ttf, glyphs) in zip(master_ttfs, color_glyphs):
ttf['COLR'] = buildCOLR(glyphs, allowLayerReuse=master_layer_reuse)
vf = deepcopy(master_ttfs[0])
model = VariationModel([{}, {'ZZZZ': 1.0}])
merger = COLRVariationMerger(model, ['ZZZZ'], vf, allowLayerReuse=output_layer_reuse)
merger.mergeTables(vf, master_ttfs)
out = vf['COLR'].table
assert (compile_decompile(out, vf) == out)
assert (dump_xml(out, vf) == expected_xml)
assert (merger.varIdxes == expected_varIdxes)
.parametrize('color_glyphs, before_xml, expected_xml', [pytest.param({'A': {'Format': int(ot.PaintFormat.PaintColrLayers), 'Layers': [{'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 0, 'Alpha': 1.0}, 'Glyph': 'B'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 1, 'Alpha': 1.0}, 'Glyph': 'C'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 2, 'Alpha': 1.0}, 'Glyph': 'D'}]}, 'E': {'Format': int(ot.PaintFormat.PaintColrLayers), 'Layers': [{'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 1, 'Alpha': 1.0}, 'Glyph': 'C'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 2, 'Alpha': 1.0}, 'Glyph': 'D'}, {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 3, 'Alpha': 1.0}, 'Glyph': 'F'}]}, 'G': {'Format': int(ot.PaintFormat.PaintColrGlyph), 'Glyph': 'E'}}, ['<COLR>', ' <Version value="1"/>', ' <!-- BaseGlyphRecordCount=0 -->', ' <!-- LayerRecordCount=0 -->', ' <BaseGlyphList>', ' <!-- BaseGlyphCount=3 -->', ' <BaseGlyphPaintRecord index="0">', ' <BaseGlyph value="A"/>', ' <Paint Format="1"><!-- PaintColrLayers -->', ' <NumLayers value="3"/>', ' <FirstLayerIndex value="0"/>', ' </Paint>', ' </BaseGlyphPaintRecord>', ' <BaseGlyphPaintRecord index="1">', ' <BaseGlyph value="E"/>', ' <Paint Format="1"><!-- PaintColrLayers -->', ' <NumLayers value="2"/>', ' <FirstLayerIndex value="3"/>', ' </Paint>', ' </BaseGlyphPaintRecord>', ' <BaseGlyphPaintRecord index="2">', ' <BaseGlyph value="G"/>', ' <Paint Format="11"><!-- PaintColrGlyph -->', ' <Glyph value="E"/>', ' </Paint>', ' </BaseGlyphPaintRecord>', ' </BaseGlyphList>', ' <LayerList>', ' <!-- LayerCount=5 -->', ' <Paint index="0" Format="10"><!-- PaintGlyph -->', ' <Paint Format="2"><!-- PaintSolid -->', ' <PaletteIndex value="0"/>', ' <Alpha value="1.0"/>', ' </Paint>', ' <Glyph value="B"/>', ' </Paint>', ' <Paint index="1" Format="10"><!-- PaintGlyph -->', ' <Paint Format="2"><!-- PaintSolid -->', ' <PaletteIndex value="1"/>', ' <Alpha value="1.0"/>', ' </Paint>', ' <Glyph value="C"/>', ' </Paint>', ' <Paint index="2" Format="10"><!-- PaintGlyph -->', ' <Paint Format="2"><!-- PaintSolid -->', ' <PaletteIndex value="2"/>', ' <Alpha value="1.0"/>', ' </Paint>', ' <Glyph value="D"/>', ' </Paint>', ' <Paint index="3" Format="1"><!-- PaintColrLayers -->', ' <NumLayers value="2"/>', ' <FirstLayerIndex value="1"/>', ' </Paint>', ' <Paint index="4" Format="10"><!-- PaintGlyph -->', ' <Paint Format="2"><!-- PaintSolid -->', ' <PaletteIndex value="3"/>', ' <Alpha value="1.0"/>', ' </Paint>', ' <Glyph value="F"/>', ' </Paint>', ' </LayerList>', '</COLR>'], ['<COLR>', ' <Version value="1"/>', ' <!-- BaseGlyphRecordCount=0 -->', ' <!-- LayerRecordCount=0 -->', ' <BaseGlyphList>', ' <!-- BaseGlyphCount=3 -->', ' <BaseGlyphPaintRecord index="0">', ' <BaseGlyph value="A"/>', ' <Paint Format="1"><!-- PaintColrLayers -->', ' <NumLayers value="3"/>', ' <FirstLayerIndex value="0"/>', ' </Paint>', ' </BaseGlyphPaintRecord>', ' <BaseGlyphPaintRecord index="1">', ' <BaseGlyph value="E"/>', ' <Paint Format="1"><!-- PaintColrLayers -->', ' <NumLayers value="3"/>', ' <FirstLayerIndex value="3"/>', ' </Paint>', ' </BaseGlyphPaintRecord>', ' <BaseGlyphPaintRecord index="2">', ' <BaseGlyph value="G"/>', ' <Paint Format="11"><!-- PaintColrGlyph -->', ' <Glyph value="E"/>', ' </Paint>', ' </BaseGlyphPaintRecord>', ' </BaseGlyphList>', ' <LayerList>', ' <!-- LayerCount=6 -->', ' <Paint index="0" Format="10"><!-- PaintGlyph -->', ' <Paint Format="2"><!-- PaintSolid -->', ' <PaletteIndex value="0"/>', ' <Alpha value="1.0"/>', ' </Paint>', ' <Glyph value="B"/>', ' </Paint>', ' <Paint index="1" Format="10"><!-- PaintGlyph -->', ' <Paint Format="2"><!-- PaintSolid -->', ' <PaletteIndex value="1"/>', ' <Alpha value="1.0"/>', ' </Paint>', ' <Glyph value="C"/>', ' </Paint>', ' <Paint index="2" Format="10"><!-- PaintGlyph -->', ' <Paint Format="2"><!-- PaintSolid -->', ' <PaletteIndex value="2"/>', ' <Alpha value="1.0"/>', ' </Paint>', ' <Glyph value="D"/>', ' </Paint>', ' <Paint index="3" Format="10"><!-- PaintGlyph -->', ' <Paint Format="2"><!-- PaintSolid -->', ' <PaletteIndex value="1"/>', ' <Alpha value="1.0"/>', ' </Paint>', ' <Glyph value="C"/>', ' </Paint>', ' <Paint index="4" Format="10"><!-- PaintGlyph -->', ' <Paint Format="2"><!-- PaintSolid -->', ' <PaletteIndex value="2"/>', ' <Alpha value="1.0"/>', ' </Paint>', ' <Glyph value="D"/>', ' </Paint>', ' <Paint index="5" Format="10"><!-- PaintGlyph -->', ' <Paint Format="2"><!-- PaintSolid -->', ' <PaletteIndex value="3"/>', ' <Alpha value="1.0"/>', ' </Paint>', ' <Glyph value="F"/>', ' </Paint>', ' </LayerList>', '</COLR>'], id='simple-reuse'), pytest.param({'A': {'Format': int(ot.PaintFormat.PaintGlyph), 'Paint': {'Format': int(ot.PaintFormat.PaintSolid), 'PaletteIndex': 0, 'Alpha': 1.0}, 'Glyph': 'B'}}, ['<COLR>', ' <Version value="1"/>', ' <!-- BaseGlyphRecordCount=0 -->', ' <!-- LayerRecordCount=0 -->', ' <BaseGlyphList>', ' <!-- BaseGlyphCount=1 -->', ' <BaseGlyphPaintRecord index="0">', ' <BaseGlyph value="A"/>', ' <Paint Format="10"><!-- PaintGlyph -->', ' <Paint Format="2"><!-- PaintSolid -->', ' <PaletteIndex value="0"/>', ' <Alpha value="1.0"/>', ' </Paint>', ' <Glyph value="B"/>', ' </Paint>', ' </BaseGlyphPaintRecord>', ' </BaseGlyphList>', '</COLR>'], ['<COLR>', ' <Version value="1"/>', ' <!-- BaseGlyphRecordCount=0 -->', ' <!-- LayerRecordCount=0 -->', ' <BaseGlyphList>', ' <!-- BaseGlyphCount=1 -->', ' <BaseGlyphPaintRecord index="0">', ' <BaseGlyph value="A"/>', ' <Paint Format="10"><!-- PaintGlyph -->', ' <Paint Format="2"><!-- PaintSolid -->', ' <PaletteIndex value="0"/>', ' <Alpha value="1.0"/>', ' </Paint>', ' <Glyph value="B"/>', ' </Paint>', ' </BaseGlyphPaintRecord>', ' </BaseGlyphList>', '</COLR>'], id='no-layer-list')])
def test_expandPaintColrLayers(self, color_glyphs, ttFont, before_xml, expected_xml):
colr = buildCOLR(color_glyphs, allowLayerReuse=True)
assert (dump_xml(colr.table, ttFont) == before_xml)
before_layer_count = 0
reuses_colr_layers = False
if colr.table.LayerList:
before_layer_count = len(colr.table.LayerList.Paint)
reuses_colr_layers = any(((p.Format == ot.PaintFormat.PaintColrLayers) for p in colr.table.LayerList.Paint))
COLRVariationMerger.expandPaintColrLayers(colr.table)
assert (dump_xml(colr.table, ttFont) == expected_xml)
after_layer_count = (0 if (not colr.table.LayerList) else len(colr.table.LayerList.Paint))
if reuses_colr_layers:
assert (not any(((p.Format == ot.PaintFormat.PaintColrLayers) for p in colr.table.LayerList.Paint)))
assert (after_layer_count > before_layer_count)
else:
assert (after_layer_count == before_layer_count)
if colr.table.LayerList:
assert (len({id(p) for p in colr.table.LayerList.Paint}) == after_layer_count) |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = 'peer-name'
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'endpoint_control_forticlient_registration_sync': {'required': False, 'type': 'dict', 'default': None, 'options': {}}}
for attribute_name in module_spec['options']:
fields['endpoint_control_forticlient_registration_sync']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['endpoint_control_forticlient_registration_sync']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=True)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'endpoint_control_forticlient_registration_sync')
(is_error, has_changed, result, diff) = fortios_endpoint_control(module.params, fos, module.check_mode)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
def trace(mod: nn.Module, sample_inputs: Sequence[Any], remove_assertions: bool=True, remove_exceptions: bool=True, use_acc_normalization: bool=True, ast_rewriter_allow_list: Optional[Set[Type[nn.Module]]]=None, leaf_module_list: Optional[Set[Type[nn.Module]]]=None, acc_normalization_block_list: Optional[Set[Tuple[(str, Union[(str, Callable)])]]]=None) -> torch.fx.GraphModule:
if mod.training:
warnings.warn('acc_tracer does not support currently support models for training. Calling eval on model before tracing.')
mod.eval()
assert isinstance(sample_inputs, (list, tuple))
traced = rewriter_base_trace(mod, ast_rewriter_allow_list, leaf_module_list)
if remove_assertions:
_remove_assertions(traced)
if remove_exceptions:
_remove_exceptions(traced)
traced.graph.eliminate_dead_code()
traced.recompile()
acc_shape_prop.AccShapeProp(traced).propagate(*sample_inputs)
_replace_tensor_meta_with_rank(traced)
traced = NormalizeArgs(traced, normalize_to_only_use_kwargs=False).transform()
if use_acc_normalization:
acc_normalizer.normalize(traced, acc_normalization_block_list=acc_normalization_block_list)
traced.recompile()
acc_shape_prop.AccShapeProp(traced).propagate(*sample_inputs)
return traced |
def alloc_css_item(pool: list, token_type: str, start: int, end: int):
if pool:
item = pool.pop()
item['type'] = token_type
item['region'].a = start
item['region'].b = end
else:
item = {'type': token_type, 'region': sublime.Region(start, end)}
return item |
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [migrations.CreateModel(name='Category', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=20)), ('slug', models.SlugField()), ('lft', models.PositiveIntegerField(db_index=True, editable=False)), ('rght', models.PositiveIntegerField(db_index=True, editable=False)), ('tree_id', models.PositiveIntegerField(db_index=True, editable=False)), ('level', models.PositiveIntegerField(db_index=True, editable=False)), ('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='testapp.Category'))], options={'verbose_name': 'category', 'verbose_name_plural': 'categories', 'ordering': ['tree_id', 'lft']}), migrations.CreateModel(name='CustomContentType', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('region', models.CharField(max_length=255)), ('ordering', models.IntegerField(default=0, verbose_name='ordering'))], options={'verbose_name': 'custom content type', 'verbose_name_plural': 'custom content types', 'db_table': 'testapp_mymodel_customcontenttype', 'ordering': ['ordering'], 'permissions': [], 'abstract': False}), migrations.CreateModel(name='ExampleCMSBase', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'))], options={'abstract': False}, bases=(models.Model, feincms.extensions.base.ExtensionsMixin)), migrations.CreateModel(name='ExampleCMSBase2', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'))], options={'abstract': False}, bases=(models.Model, feincms.extensions.base.ExtensionsMixin)), migrations.CreateModel(name='MyModel', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'))], options={'abstract': False}, bases=(models.Model, feincms.extensions.base.ExtensionsMixin)), migrations.AddField(model_name='customcontenttype', name='parent', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='customcontenttype_set', to='testapp.MyModel'))] |
class OptionPlotoptionsArearangeSonificationTracksMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def query_capacity(in_domain: str, session: Session, target_datetime: datetime) -> (str | None):
params = {'documentType': 'A68', 'processType': 'A33', 'in_Domain': in_domain, 'periodStart': target_datetime.strftime('%Y'), 'periodEnd': target_datetime.strftime('%Y')}
return query_ENTSOE(session, params, target_datetime=target_datetime, function_name=query_capacity.__name__) |
class OptionSeriesStreamgraphSonificationTracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def split_and_zip_data_files(zip_file_path, source_path, data_file_name, file_format, download_job=None):
with SubprocessTrace(name=f'job.{JOB_TYPE}.download.zip', service='bulk-download', span_type=SpanTypes.WORKER, source_path=source_path, zip_file_path=zip_file_path) as span:
try:
log_time = time.perf_counter()
delim = FILE_FORMATS[file_format]['delimiter']
extension = FILE_FORMATS[file_format]['extension']
output_template = f'{data_file_name}_%s.{extension}'
write_to_log(message='Beginning the delimited text file partition', download_job=download_job)
list_of_files = partition_large_delimited_file(download_job=download_job, file_path=source_path, delimiter=delim, row_limit=EXCEL_ROW_LIMIT, output_name_template=output_template)
span.set_tag('file_parts', len(list_of_files))
msg = f'Partitioning data into {len(list_of_files)} files took {(time.perf_counter() - log_time):.4f}s'
write_to_log(message=msg, download_job=download_job)
write_to_log(message='Beginning zipping and compression', download_job=download_job)
log_time = time.perf_counter()
append_files_to_zip_file(list_of_files, zip_file_path)
write_to_log(message=f'Writing to zipfile took {(time.perf_counter() - log_time):.4f}s', download_job=download_job)
except Exception as e:
message = 'Exception while partitioning text file'
if download_job:
fail_download(download_job, e, message)
write_to_log(message=message, download_job=download_job, is_error=True)
raise e |
def get_record(fileObj, line_no=None, offset=0):
line = fileObj.readline()
if ((line_no is not None) and line):
line_no += 1
while line:
tline = line.strip()
if ((not tline) or tline.startswith(str2octs('#'))):
offset += len(line)
line = fileObj.readline()
if ((line_no is not None) and line):
line_no += 1
else:
break
return (line, line_no, offset) |
('ciftify.config.verify_msm_available')
('ciftify.bidsapp.fmriprep_ciftify.run')
def test_ux03_default_two_participants_for_ds005(mock_run, mock_vmsm):
uargs = [ds005_bids, '/output/dir', 'participant', '--participant_label=01,14']
ret = simple_main_run(uargs)
call_list = parse_call_list_into_strings(mock_run.call_args_list)
assert (count_calls_to('fmriprep', call_list) == 8)
assert (count_calls_to('ciftify_recon_all', call_list) == 2)
assert (count_calls_to('fmriprep', call_list, call_contains='--participant_label 01') == 4) |
class DPMSolver(Scheduler):
def __init__(self, num_inference_steps: int, num_train_timesteps: int=1000, initial_diffusion_rate: float=0.00085, final_diffusion_rate: float=0.012, noise_schedule: NoiseSchedule=NoiseSchedule.QUADRATIC, device: (Device | str)='cpu', dtype: Dtype=float32):
super().__init__(num_inference_steps=num_inference_steps, num_train_timesteps=num_train_timesteps, initial_diffusion_rate=initial_diffusion_rate, final_diffusion_rate=final_diffusion_rate, noise_schedule=noise_schedule, device=device, dtype=dtype)
self.estimated_data = deque(([tensor([])] * 2), maxlen=2)
self.initial_steps = 0
def _generate_timesteps(self) -> Tensor:
return tensor(np.linspace(0, (self.num_train_timesteps - 1), (self.num_inference_steps + 1)).round().astype(int)[1:], device=self.device).flip(0)
def dpm_solver_first_order_update(self, x: Tensor, noise: Tensor, step: int) -> Tensor:
(timestep, previous_timestep) = (self.timesteps[step], self.timesteps[((step + 1) if (step < (len(self.timesteps) - 1)) else 0)])
(previous_ratio, current_ratio) = (self.signal_to_noise_ratios[previous_timestep], self.signal_to_noise_ratios[timestep])
previous_scale_factor = self.cumulative_scale_factors[previous_timestep]
(previous_noise_std, current_noise_std) = (self.noise_std[previous_timestep], self.noise_std[timestep])
factor = (exp((- (previous_ratio - current_ratio))) - 1.0)
denoised_x = (((previous_noise_std / current_noise_std) * x) - ((factor * previous_scale_factor) * noise))
return denoised_x
def multistep_dpm_solver_second_order_update(self, x: Tensor, step: int) -> Tensor:
(previous_timestep, current_timestep, next_timestep) = ((self.timesteps[(step + 1)] if (step < (len(self.timesteps) - 1)) else tensor([0])), self.timesteps[step], self.timesteps[(step - 1)])
(current_data_estimation, next_data_estimation) = (self.estimated_data[(- 1)], self.estimated_data[(- 2)])
(previous_ratio, current_ratio, next_ratio) = (self.signal_to_noise_ratios[previous_timestep], self.signal_to_noise_ratios[current_timestep], self.signal_to_noise_ratios[next_timestep])
previous_scale_factor = self.cumulative_scale_factors[previous_timestep]
(previous_std, current_std) = (self.noise_std[previous_timestep], self.noise_std[current_timestep])
estimation_delta = ((current_data_estimation - next_data_estimation) / ((current_ratio - next_ratio) / (previous_ratio - current_ratio)))
factor = (exp((- (previous_ratio - current_ratio))) - 1.0)
denoised_x = ((((previous_std / current_std) * x) - ((factor * previous_scale_factor) * current_data_estimation)) - ((0.5 * (factor * previous_scale_factor)) * estimation_delta))
return denoised_x
def __call__(self, x: Tensor, noise: Tensor, step: int) -> Tensor:
current_timestep = self.timesteps[step]
(scale_factor, noise_ratio) = (self.cumulative_scale_factors[current_timestep], self.noise_std[current_timestep])
estimated_denoised_data = ((x - (noise_ratio * noise)) / scale_factor)
self.estimated_data.append(estimated_denoised_data)
denoised_x = (self.dpm_solver_first_order_update(x=x, noise=estimated_denoised_data, step=step) if (self.initial_steps == 0) else self.multistep_dpm_solver_second_order_update(x=x, step=step))
if (self.initial_steps < 2):
self.initial_steps += 1
return denoised_x |
class ClientContextResponse(Response):
def __init__(self, original_response: Response):
super().__init__()
self.status = original_response.status
self.headers._data.update(original_response.headers._data)
self.cookies.update(original_response.cookies.copy())
self.__dict__.update(original_response.__dict__) |
class FwAnalysisStatus():
files_to_unpack: Set[str]
files_to_analyze: Set[str]
total_files_count: int
hid: str
analysis_plugins: Dict[(str, int)]
start_time: float = field(default_factory=time)
completed_files: Set[str] = field(default_factory=set)
total_files_with_duplicates: int = 1
unpacked_files_count: int = 1
analyzed_files_count: int = 0 |
class TestLegalDoc(unittest.TestCase):
('webservices.rest.legal.es_client.search', legal_doc_data)
def test_legal_doc(self):
app = rest.app.test_client()
response = app.get((('/v1/legal/' + DOCS_PATH) + '/doc_type/1?api_key=1234'))
assert (response.status_code == 200)
result = json.loads(codecs.decode(response.data))
assert (result == {DOCS_PATH: [{'type': 'document type', 'no': '100', 'summary': 'summery 100', 'documents': [{'document_id': 111, 'category': 'Final Opinion', 'description': 'Closeout Letter', 'url': 'files/legal/aos/100/111.pdf'}, {'document_id': 222, 'category': 'Draft Documents', 'description': 'Vote', 'url': 'files/legal/aos/100/222.pdf'}]}]})
def test_legal_doc_wrong_path(self):
app = rest.app.test_client()
response = app.get('/v1/legal/wrong_path/doc_type/1?api_key=1234')
assert (response.status_code == 404) |
class Guild(models.Model):
class Meta():
verbose_name = ''
verbose_name_plural = ''
id = models.AutoField(primary_key=True)
name = models.CharField('', max_length=20, unique=True, help_text='')
leader = models.OneToOneField(Player, models.CASCADE, unique=True, related_name='leading_guild', verbose_name='', help_text='')
slogan = models.CharField('', max_length=200, help_text='')
totem = models.ImageField('', blank=True, help_text='')
badges = models.ManyToManyField('badge.Badge', related_name='guilds', verbose_name='', help_text='')
requests = models.ManyToManyField('player.Player', related_name='requested_guilds', verbose_name='', help_text='')
founded_at = models.DateTimeField('', auto_now_add=True, help_text='')
def __str__(self):
return self.name |
_settings(CACHES={'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'}})
class MemoizeDecoratorTest(SimpleTestCase):
def test_cached_function_with_basic_arguments(self):
test_func = Mock(side_effect=(lambda *args, **kwargs: (args, kwargs)), __qualname__='test_func')
cached_func = memoize()(test_func)
result = cached_func('bar', foo=12, flag=True)
self.assertEqual(result, (('bar',), {'foo': 12, 'flag': True}))
result2 = cached_func('bar', foo=12, flag=True)
self.assertEqual(result2, result)
test_func.assert_called_once_with('bar', foo=12, flag=True)
def test_non_basic_arguments_with_cache_key_attr(self):
test_func = Mock(side_effect=(lambda arg: arg.value), __qualname__='test_func2')
cached_func = memoize()(test_func)
test_arg = MyTestObject()
test_arg.cache_key = b''
result = cached_func(test_arg)
self.assertEqual(result, 'hello')
result2 = cached_func(test_arg)
self.assertEqual(result2, result)
test_func.assert_called_once_with(test_arg)
new_test_arg = MyTestObject()
new_test_arg.cache_key = b''
cached_func(new_test_arg)
test_func.assert_called_with(new_test_arg)
self.assertEqual(test_func.call_count, 2)
def test_non_basic_arguments_without_cache_key_raise_error(self):
def test_func(arg):
return 'foo'
cached_func = memoize()(test_func)
some_dict_arg = {}
with self.assertRaises(ValueError):
cached_func(some_dict_arg)
test_arg = MyTestObject()
with self.assertRaises(ValueError):
cached_func(test_arg) |
def _format_date_time(date_time):
tm = date_time.timetuple()
offset = 0
sign = '+'
if (date_time.tzinfo is not None):
if (date_time.tzinfo.__class__ is not TZFixedOffset):
raise ValueError('Only TZFixedOffset supported.')
offset = date_time.tzinfo.offset
if (offset < 0):
offset = (offset * (- 1))
sign = '-'
return ('%04d-%02d-%02dT%02d:%02d:%02d.%06d%c%02d:%02d' % (tm.tm_year, tm.tm_mon, tm.tm_mday, tm.tm_hour, tm.tm_min, tm.tm_sec, date_time.microsecond, sign, (offset / 60), (offset % 60))) |
def _get_revlog_graph(cid):
entries = mw.col.db.all(('select * from revlog where cid = %s and (type = 1 or type = 2)' % cid))
html = "<div class='w-100'>%s</div>"
blocks = ''
for (_, _, _, ease, ivl, _, _, _, type) in entries:
ease = ((ease + 1) if ((type == 2) and (ease > 1)) else ease)
sivl = (int(utility.misc.to_day_ivl(ivl)) if (int(utility.misc.to_day_ivl(ivl)) > 0) else '<1')
blocks += ("<div class='revlog-block revlog-block-%s %s'>%s</div>" % (ease, ('larger' if (ivl > 1000) else ''), sivl))
return (html % blocks) |
class Tensor(Node):
def __init__(self, shape: List[IntVar], name: str=None, src_ops: Iterable[Node]=None, dst_ops: Iterable[Node]=None, dtype: str='float16', is_input: bool=False, is_output: bool=False, value: Any=None, is_view_of: Any=None, is_internal_constant: bool=False, skip_constant_folding: bool=False, check_nan_and_inf: bool=False, check_outputs: bool=False, original_name: str=None) -> None:
super().__init__()
self._attrs['shape'] = self._convert_shape(shape)
self._attrs['name'] = name
self._attrs['src_ops'] = StableSet(src_ops)
self._attrs['dst_ops'] = StableSet(dst_ops)
self._attrs['dtype'] = dtype
self._attrs['is_output'] = is_output
self._attrs['is_input'] = is_input
self._attrs['is_param'] = False
self._attrs['is_internal_constant'] = is_internal_constant
self._attrs['skip_constant_folding'] = skip_constant_folding
self._attrs['has_output_aliases'] = False
self._attrs['external_tensor'] = None
self._attrs['is_view_of'] = is_view_of
if is_view_of:
self._attrs['dtype'] = is_view_of._attrs['dtype']
self._attrs['value'] = value
src_deps = [src_op._attrs['depth'] for src_op in self._attrs['src_ops']]
self._attrs['depth'] = ((max(src_deps) + 1) if (len(src_deps) > 0) else 0)
self._attrs['offset'] = None
self._attrs['data'] = None
self._attrs['constant_folding_output_idx'] = None
self._attrs['check_nan_and_inf'] = check_nan_and_inf
self._attrs['check_outputs'] = check_outputs
self._attrs['original_name'] = original_name
def __str__(self) -> str:
output = {}
for key in self._attrs.keys():
if ((key in ('src_ops', 'dst_ops')) and (self._attrs[key] is not None)):
output[key] = [x._attrs['name'] for x in self._attrs[key]]
else:
output[key] = self._attrs[key]
return pformat(output, indent=2)
def _convert_shape(self, shape: List[Union[(int, IntVar)]]) -> List[IntVar]:
ret = []
for v in shape:
if isinstance(v, int):
ret.append(IntImm(v))
elif isinstance(v, IntVar):
ret.append(v)
else:
raise RuntimeError(f'Unsupported dim type: {type(v)}, dim: {v}')
return ret
def shape(self) -> List[IntVar]:
return self._attrs['shape']
def _rank(self) -> int:
return len(self._attrs['shape'])
def _size(self, dim) -> IntVar:
return self._attrs['shape'][wrap_dim(dim, self._rank())]
def dtype(self) -> str:
return self._attrs['dtype']
def src_ops(self) -> Set[Operator]:
return self._attrs['src_ops']
def dst_ops(self) -> Set[Operator]:
return self._attrs['dst_ops']
def is_a_const_num(self) -> bool:
return ((len(self._attrs['shape']) == 0) and (self._attrs['value'] is not None))
def is_jagged(self) -> bool:
return ((len(self._attrs['shape']) > 0) and isinstance(self._attrs['shape'][0], JaggedIntVar))
def size_bytes(self, alignment: int=1) -> int:
return get_aligned_size(self._attrs['shape'], self.dtype(), alignment)
def pseudo_code(self, with_shape=True) -> str:
name = self._attrs['name']
if (name is None):
name = 'None'
args = [f'name={name}']
if with_shape:
shapes = ', '.join([dim.pseudo_code() for dim in self._attrs['shape']])
args.append(f'shape=[{shapes}]')
data = self._attrs['data']
if (data is not None):
args.append(f'data=({data.size()} bytes)')
if self.is_jagged():
args.append('jagged=True')
return f"Tensor({', '.join(args)})"
def _bind_data(self, data: _ConstantTensorData) -> None:
if self.src_ops():
raise ValueError(f"Cannot bind tensor {self._attrs['name']}; len(self.src_ops())={len(self.src_ops())!r} > 0")
dtype = self._attrs['dtype']
if (not data.is_dtype(dtype)):
raise ValueError(f"data's dtype did not match: expected {dtype}, got {data.dtype}")
tensor_size = self.size_bytes(alignment=1)
if (tensor_size != len(data)):
raise ValueError(f"ConstantTensor's maximum size is not equal to len(data)! Got len(data)={len(data)!r}, but expected at least {tensor_size} bytes. Check that the ConstantTensor's size and dtype are correct.")
self._attrs['data'] = data
def __deepcopy__(self, memo):
result = Tensor(self.shape())
memo[id(self)] = result
result._attrs = copy.deepcopy(self._attrs, memo)
return result
def __add__(self, other: Any) -> Tensor:
return OP_REGISTRY.get('ADD')(self, other)
def __radd__(self, other: Any) -> Tensor:
return OP_REGISTRY.get('ADD')(other, self)
def __sub__(self, other: Any) -> Tensor:
return OP_REGISTRY.get('SUB')(self, other)
def __rsub__(self, other: Any) -> Tensor:
return OP_REGISTRY.get('SUB')(other, self)
def __mul__(self, other: Any) -> Tensor:
return OP_REGISTRY.get('MUL')(self, other)
def __rmul__(self, other: Any) -> Tensor:
return OP_REGISTRY.get('MUL')(other, self)
def __truediv__(self, other: Any) -> Tensor:
return OP_REGISTRY.get('DIV')(self, other)
def __rtruediv__(self, other: Any) -> Tensor:
return OP_REGISTRY.get('DIV')(other, self)
def __neg__(self) -> Tensor:
return OP_REGISTRY.get('MUL')((- 1), self) |
class TestWrapperGenerator(unittest.TestCase):
def setUp(self):
self.wg = _cache
def test_find_type(self):
wg = self.wg
sigs = ['int', 'vtkOpenGLVolumeMapper', ('int', 'int', 'float', 'list'), ('int', 'vtkActor', 'vtkXMLReader'), ['vtkImageActor', 'vtkExporter'], ['int', 'vtkDataArray', 'vtkCellArray', 'vtkIdTypeArray']]
expect = ['basic', 'vtk', 'basic', 'vtk', 'vtk', 'array']
for (i, sig) in enumerate(sigs):
self.assertEqual(expect[i], wg._find_type(sig))
def test_sig_types(self):
wg = self.wg
meths = [vtk.vtkProperty.GetColor, vtk.vtkProperty.GetRepresentation, vtk.vtkStructuredPointsReader.GetOutput, vtk.vtkPolyData.SetPoints, vtk.vtkPolyData.SetPolys, vtk.vtkQuad.CellBoundary, vtk.vtkContourFilter.SetLocator]
expect = [('basic', 'basic'), ('basic', None), ('vtk', 'basic'), ('basic', 'array'), ('basic', 'array'), ('basic', 'array'), ('basic', 'vtk')]
for (i, meth) in enumerate(meths):
sig = wg.parser.get_method_signature(meth)
self.assertEqual(expect[i], wg._find_sig_type(sig))
sig = [(['int'], ['int']), (['vtkStructuredPoints'], ['vtkFooClass'])]
self.assertEqual(('vtk', 'vtk'), wg._find_sig_type(sig))
def test_unicode_return_value(self):
wg = self.wg
try:
meth = vtk.vtkDelimitedTextReader.GetUTF8RecordDelimiters
except AttributeError:
meth = vtk.vtkDelimitedTextReader.GetUnicodeRecordDelimiters
expect = ('unicode' if ('-> unicode' in meth.__doc__) else 'string')
sig = wg.parser.get_method_signature(meth)
self.assertEqual(sig[0][0][0], expect) |
class CoverChooser(GObject.GObject):
__gsignals__ = {'covers-fetched': (GObject.SignalFlags.RUN_LAST, None, (object,)), 'cover-chosen': (GObject.SignalFlags.RUN_LAST, None, (object, object))}
def __init__(self, parent, track, search=None):
GObject.GObject.__init__(self)
self.parent = parent
self.builder = guiutil.get_builder(xdg.get_data_path('ui', 'coverchooser.ui'))
self.builder.connect_signals(self)
self.window = self.builder.get_object('CoverChooser')
self.window.set_title((_('Cover options for %(artist)s - %(album)s') % {'artist': track.get_tag_display('artist'), 'album': track.get_tag_display('album')}))
self.window.set_transient_for(parent)
self.message = dialogs.MessageBar(parent=self.builder.get_object('main_container'), buttons=Gtk.ButtonsType.CLOSE)
self.message.connect('response', self.on_message_response)
self.track = track
self.covers = []
self.current = 0
self.cover = guiutil.ScalableImageWidget()
self.cover.set_image_size(350, 350)
self.cover_image_box = self.builder.get_object('cover_image_box')
self.stack = self.builder.get_object('stack')
self.stack_ready = self.builder.get_object('stack_ready')
self.size_label = self.builder.get_object('size_label')
self.source_label = self.builder.get_object('source_label')
self.covers_model = self.builder.get_object('covers_model')
self.previews_box = self.builder.get_object('previews_box')
self.previews_box.set_no_show_all(True)
self.previews_box.hide()
self.previews_box.set_model(None)
self.set_button = self.builder.get_object('set_button')
self.set_button.set_sensitive(False)
self.window.show_all()
self.stopper = threading.Event()
self.fetcher_thread = threading.Thread(target=self.fetch_cover, name='Coverfetcher')
self.fetcher_thread.start()
def fetch_cover(self):
db_strings = COVER_MANAGER.find_covers(self.track)
if db_strings:
for db_string in db_strings:
if self.stopper.is_set():
return
coverdata = COVER_MANAGER.get_cover_data(db_string)
pixbuf = pixbuf_from_data(coverdata)
if pixbuf:
self.covers_model.append([(db_string, coverdata), pixbuf, pixbuf.scale_simple(50, 50, GdkPixbuf.InterpType.BILINEAR)])
self.emit('covers-fetched', db_strings)
def do_covers_fetched(self, db_strings):
if self.stopper.is_set():
return
self.stack.set_visible_child(self.stack_ready)
self.previews_box.set_model(self.covers_model)
if db_strings:
self.cover_image_box.pack_start(self.cover, True, True, 0)
self.cover.show()
self.set_button.set_sensitive(True)
if (len(db_strings) > 1):
self.previews_box.set_no_show_all(False)
self.previews_box.show_all()
track_db_string = COVER_MANAGER.get_db_string(self.track)
position = (db_strings.index(track_db_string) if (track_db_string in db_strings) else 0)
self.previews_box.select_path(Gtk.TreePath(position))
else:
self.builder.get_object('stack').hide()
self.builder.get_object('actions_box').hide()
self.message.show_warning(_('No covers found.'), _('None of the enabled sources has a cover for this track, try enabling more sources.'))
def on_cancel_button_clicked(self, button):
self.stopper.set()
self.window.destroy()
def on_set_button_clicked(self, button):
paths = self.previews_box.get_selected_items()
if paths:
path = paths[0]
coverdata = self.covers_model[path][0]
COVER_MANAGER.set_cover(self.track, coverdata[0], coverdata[1])
self.emit('cover-chosen', self.track, coverdata[1])
self.window.destroy()
def on_previews_box_selection_changed(self, iconview):
paths = self.previews_box.get_selected_items()
if paths:
path = paths[0]
db_string = self.covers_model[path][0]
source = db_string[0].split(':', 1)[0]
provider = providers.get_provider('covers', source)
pixbuf = self.covers_model[path][1]
self.cover.set_image_pixbuf(pixbuf)
self.size_label.set_text(_('{width}x{height} pixels').format(width=pixbuf.get_width(), height=pixbuf.get_height()))
self.source_label.set_text(getattr(provider, 'title', source))
self.set_button.set_sensitive(True)
else:
self.set_button.set_sensitive(False)
def on_previews_box_item_activated(self, iconview, path):
self.set_button.clicked()
def on_message_response(self, widget, response):
if (response == Gtk.ResponseType.CLOSE):
self.window.destroy() |
class TestHtml5AdvancedSelectors2(util.PluginTestCase):
def setup_fs(self):
template = self.dedent('\n <!DOCTYPE html>\n <html>\n <head>\n <meta content="text/html; charset=UTF-8">\n </head>\n <body>\n <div class="aaaa">aaaa\n <p class="bbbb">bbbb</p>\n </div>\n <div class="cccc">cccc\n <p class="dddd">dddd</p>\n </div>\n <div class="eeee">eeee\n <p class="ffff">ffff</p>\n </div>\n <div class="gggg">gggg\n <p class="hhhh">hhhh</p>\n </div>\n <div class="iiii">iiii\n <p class="jjjj">jjjj</p>\n <span></span>\n </div>\n </body>\n </body>\n </html>\n ')
self.mktemp('test.txt', template, 'utf-8')
def test_has_list(self):
config = self.dedent("\n matrix:\n - name: html_css\n sources:\n - '{}/**/*.txt'\n aspell:\n lang: en\n d: en_US\n hunspell:\n d: en_US\n pipeline:\n - pyspelling.filters.html:\n mode: html5\n ignores:\n - 'div:has(> .bbbb, .ffff, .jjjj)'\n ").format(self.tempdir)
self.mktemp('.html5.yml', config, 'utf-8')
self.assert_spellcheck('.html5.yml', ['cccc', 'dddd', 'gggg', 'hhhh'])
def test_not_has_list(self):
config = self.dedent("\n matrix:\n - name: html_css\n sources:\n - '{}/**/*.txt'\n aspell:\n lang: en\n d: en_US\n hunspell:\n d: en_US\n pipeline:\n - pyspelling.filters.html:\n mode: html5\n ignores:\n - 'div:has(> :not(.bbbb, .ffff, .jjjj))'\n ").format(self.tempdir)
self.mktemp('.html5.yml', config, 'utf-8')
self.assert_spellcheck('.html5.yml', ['aaaa', 'bbbb', 'eeee', 'ffff'])
def test_not_has_list2(self):
config = self.dedent("\n matrix:\n - name: html_css\n sources:\n - '{}/**/*.txt'\n aspell:\n lang: en\n d: en_US\n hunspell:\n d: en_US\n pipeline:\n - pyspelling.filters.html:\n mode: html5\n ignores:\n - 'div:not(:has(> .bbbb, .ffff, .jjjj))'\n ").format(self.tempdir)
self.mktemp('.html5.yml', config, 'utf-8')
self.assert_spellcheck('.html5.yml', ['aaaa', 'bbbb', 'eeee', 'ffff', 'iiii', 'jjjj'])
def test_not_not(self):
config = self.dedent("\n matrix:\n - name: html_css\n sources:\n - '{}/**/*.txt'\n aspell:\n lang: en\n d: en_US\n hunspell:\n d: en_US\n pipeline:\n - pyspelling.filters.html:\n mode: html5\n ignores:\n - 'div:not(:not(.aaaa))'\n ").format(self.tempdir)
self.mktemp('.html5.yml', config, 'utf-8')
self.assert_spellcheck('.html5.yml', ['cccc', 'dddd', 'eeee', 'ffff', 'gggg', 'hhhh', 'iiii', 'jjjj']) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.