code stringlengths 281 23.7M |
|---|
class OptionSeriesWordcloudMarkerStates(Options):
def hover(self) -> 'OptionSeriesWordcloudMarkerStatesHover':
return self._config_sub_data('hover', OptionSeriesWordcloudMarkerStatesHover)
def normal(self) -> 'OptionSeriesWordcloudMarkerStatesNormal':
return self._config_sub_data('normal', OptionSeriesWordcloudMarkerStatesNormal)
def select(self) -> 'OptionSeriesWordcloudMarkerStatesSelect':
return self._config_sub_data('select', OptionSeriesWordcloudMarkerStatesSelect) |
class OptionPlotoptionsPackedbubbleSonificationContexttracks(Options):
def activeWhen(self) -> 'OptionPlotoptionsPackedbubbleSonificationContexttracksActivewhen':
return self._config_sub_data('activeWhen', OptionPlotoptionsPackedbubbleSonificationContexttracksActivewhen)
def instrument(self):
return self._config_get('piano')
def instrument(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionPlotoptionsPackedbubbleSonificationContexttracksMapping':
return self._config_sub_data('mapping', OptionPlotoptionsPackedbubbleSonificationContexttracksMapping)
def midiName(self):
return self._config_get(None)
def midiName(self, text: str):
self._config(text, js_type=False)
def pointGrouping(self) -> 'OptionPlotoptionsPackedbubbleSonificationContexttracksPointgrouping':
return self._config_sub_data('pointGrouping', OptionPlotoptionsPackedbubbleSonificationContexttracksPointgrouping)
def roundToMusicalNotes(self):
return self._config_get(True)
def roundToMusicalNotes(self, flag: bool):
self._config(flag, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def timeInterval(self):
return self._config_get(None)
def timeInterval(self, num: float):
self._config(num, js_type=False)
def type(self):
return self._config_get('instrument')
def type(self, text: str):
self._config(text, js_type=False)
def valueInterval(self):
return self._config_get(None)
def valueInterval(self, num: float):
self._config(num, js_type=False)
def valueMapFunction(self):
return self._config_get('linear')
def valueMapFunction(self, value: Any):
self._config(value, js_type=False)
def valueProp(self):
return self._config_get('"x"')
def valueProp(self, text: str):
self._config(text, js_type=False) |
def test_pagination_query(db: DatabaseManager, Model: Type[BaseModel]):
class User(Model):
__tablename__ = 'user'
id = Column(Integer, primary_key=True)
name = Column(String(50))
db.create_all()
with db.session() as session:
for i in range(30):
user = User(name=f'User {i}')
session.add(user)
with db.session() as session:
users_page_1 = session.query(User).paginate_query(page=1, per_page=10)
assert (len(users_page_1.items) == 10)
assert (users_page_1.total_pages == 3) |
def _drop_relation(adapter: SQLAdapter, relation: BaseRelation):
with adapter.connection_named(_connection_name('drop_relation', relation)):
with _cache_lock('_drop_relation'):
adapter.connections.begin()
_clear_relations_cache(adapter)
adapter.drop_relation(relation)
adapter.commit_if_has_connection() |
def css_section(code: str, pos: int, properties=False) -> CSSSection:
section = get_css_section(code, pos, properties)
if (section and section.properties):
for p in section.properties:
p.name = to_region(p.name)
p.value = to_region(p.value)
p.value_tokens = [to_region(v) for v in p.value_tokens]
return section |
class ESP32FirmwareImage(BaseFirmwareImage):
ROM_LOADER = ESP32ROM
WP_PIN_DISABLED = 238
EXTENDED_HEADER_STRUCT_FMT = (('<BBBBHBHH' + ('B' * 4)) + 'B')
IROM_ALIGN = 65536
def __init__(self, load_file=None, append_digest=True, ram_only_header=False):
super(ESP32FirmwareImage, self).__init__()
self.secure_pad = None
self.flash_mode = 0
self.flash_size_freq = 0
self.version = 1
self.wp_pin = self.WP_PIN_DISABLED
self.clk_drv = 0
self.q_drv = 0
self.d_drv = 0
self.cs_drv = 0
self.hd_drv = 0
self.wp_drv = 0
self.chip_id = 0
self.min_rev = 0
self.min_rev_full = 0
self.max_rev_full = 0
self.ram_only_header = ram_only_header
self.append_digest = append_digest
if (load_file is not None):
start = load_file.tell()
segments = self.load_common_header(load_file, ESPLoader.ESP_IMAGE_MAGIC)
self.load_extended_header(load_file)
for _ in range(segments):
self.load_segment(load_file)
self.checksum = self.read_checksum(load_file)
if self.append_digest:
end = load_file.tell()
self.stored_digest = load_file.read(32)
load_file.seek(start)
calc_digest = hashlib.sha256()
calc_digest.update(load_file.read((end - start)))
self.calc_digest = calc_digest.digest()
self.verify()
def is_flash_addr(self, addr):
return ((self.ROM_LOADER.IROM_MAP_START <= addr < self.ROM_LOADER.IROM_MAP_END) or (self.ROM_LOADER.DROM_MAP_START <= addr < self.ROM_LOADER.DROM_MAP_END))
def default_output_name(self, input_file):
return ('%s.bin' % os.path.splitext(input_file)[0])
def warn_if_unusual_segment(self, offset, size, is_irom_segment):
pass
def save(self, filename):
total_segments = 0
with io.BytesIO() as f:
self.write_common_header(f, self.segments)
self.save_extended_header(f)
checksum = ESPLoader.ESP_CHECKSUM_MAGIC
flash_segments = [copy.deepcopy(s) for s in sorted(self.segments, key=(lambda s: s.addr)) if self.is_flash_addr(s.addr)]
ram_segments = [copy.deepcopy(s) for s in sorted(self.segments, key=(lambda s: s.addr)) if (not self.is_flash_addr(s.addr))]
for segment in flash_segments:
if (isinstance(segment, ELFSection) and (segment.name == '.flash.appdesc')):
flash_segments.remove(segment)
flash_segments.insert(0, segment)
break
for segment in ram_segments:
if (segment.name == '.dram0.bootdesc'):
ram_segments.remove(segment)
ram_segments.insert(0, segment)
break
if (len(flash_segments) > 0):
last_addr = flash_segments[0].addr
for segment in flash_segments[1:]:
if ((segment.addr // self.IROM_ALIGN) == (last_addr // self.IROM_ALIGN)):
raise FatalError(("Segment loaded at 0x%08x lands in same 64KB flash mapping as segment loaded at 0x%08x. Can't generate binary. Suggest changing linker script or ELF to merge sections." % (segment.addr, last_addr)))
last_addr = segment.addr
def get_alignment_data_needed(segment):
align_past = ((segment.addr % self.IROM_ALIGN) - self.SEG_HEADER_LEN)
pad_len = ((self.IROM_ALIGN - (f.tell() % self.IROM_ALIGN)) + align_past)
if ((pad_len == 0) or (pad_len == self.IROM_ALIGN)):
return 0
pad_len -= self.SEG_HEADER_LEN
if (pad_len < 0):
pad_len += self.IROM_ALIGN
return pad_len
if self.ram_only_header:
for segment in ram_segments:
checksum = self.save_segment(f, segment, checksum)
total_segments += 1
self.append_checksum(f, checksum)
flash_segments.reverse()
for segment in flash_segments:
pad_len = get_alignment_data_needed(segment)
while (pad_len > 0):
pad_segment = ImageSegment(0, (b'\x00' * pad_len), f.tell())
self.save_segment(f, pad_segment)
total_segments += 1
pad_len = get_alignment_data_needed(segment)
assert (((f.tell() + 8) % self.IROM_ALIGN) == (segment.addr % self.IROM_ALIGN))
self.save_flash_segment(f, segment)
total_segments += 1
else:
while (len(flash_segments) > 0):
segment = flash_segments[0]
pad_len = get_alignment_data_needed(segment)
if (pad_len > 0):
if ((len(ram_segments) > 0) and (pad_len > self.SEG_HEADER_LEN)):
pad_segment = ram_segments[0].split_image(pad_len)
if (len(ram_segments[0].data) == 0):
ram_segments.pop(0)
else:
pad_segment = ImageSegment(0, (b'\x00' * pad_len), f.tell())
checksum = self.save_segment(f, pad_segment, checksum)
total_segments += 1
else:
assert (((f.tell() + 8) % self.IROM_ALIGN) == (segment.addr % self.IROM_ALIGN))
checksum = self.save_flash_segment(f, segment, checksum)
flash_segments.pop(0)
total_segments += 1
for segment in ram_segments:
checksum = self.save_segment(f, segment, checksum)
total_segments += 1
if self.secure_pad:
if (not self.append_digest):
raise FatalError('secure_pad only applies if a SHA-256 digest is also appended to the image')
align_past = ((f.tell() + self.SEG_HEADER_LEN) % self.IROM_ALIGN)
checksum_space = 16
if (self.secure_pad == '1'):
space_after_checksum = (((32 + 4) + 64) + 12)
elif (self.secure_pad == '2'):
space_after_checksum = 32
pad_len = ((((self.IROM_ALIGN - align_past) - checksum_space) - space_after_checksum) % self.IROM_ALIGN)
pad_segment = ImageSegment(0, (b'\x00' * pad_len), f.tell())
checksum = self.save_segment(f, pad_segment, checksum)
total_segments += 1
if (not self.ram_only_header):
self.append_checksum(f, checksum)
image_length = f.tell()
if self.secure_pad:
assert (((image_length + space_after_checksum) % self.IROM_ALIGN) == 0)
f.seek(1)
if self.ram_only_header:
f.write(bytes([len(ram_segments)]))
else:
f.write(bytes([total_segments]))
if self.append_digest:
f.seek(0)
digest = hashlib.sha256()
digest.update(f.read(image_length))
f.write(digest.digest())
if self.pad_to_size:
image_length = f.tell()
if ((image_length % self.pad_to_size) != 0):
pad_by = (self.pad_to_size - (image_length % self.pad_to_size))
f.write((b'\xff' * pad_by))
with open(filename, 'wb') as real_file:
real_file.write(f.getvalue())
def load_extended_header(self, load_file):
def split_byte(n):
return ((n & 15), ((n >> 4) & 15))
fields = list(struct.unpack(self.EXTENDED_HEADER_STRUCT_FMT, load_file.read(16)))
self.wp_pin = fields[0]
(self.clk_drv, self.q_drv) = split_byte(fields[1])
(self.d_drv, self.cs_drv) = split_byte(fields[2])
(self.hd_drv, self.wp_drv) = split_byte(fields[3])
self.chip_id = fields[4]
if (self.chip_id != self.ROM_LOADER.IMAGE_CHIP_ID):
print(('Unexpected chip id in image. Expected %d but value was %d. Is this image for a different chip model?' % (self.ROM_LOADER.IMAGE_CHIP_ID, self.chip_id)))
self.min_rev = fields[5]
self.min_rev_full = fields[6]
self.max_rev_full = fields[7]
append_digest = fields[(- 1)]
if (append_digest in [0, 1]):
self.append_digest = (append_digest == 1)
else:
raise RuntimeError('Invalid value for append_digest field (0x%02x). Should be 0 or 1.', append_digest)
def save_extended_header(self, save_file):
def join_byte(ln, hn):
return ((ln & 15) + ((hn & 15) << 4))
append_digest = (1 if self.append_digest else 0)
fields = [self.wp_pin, join_byte(self.clk_drv, self.q_drv), join_byte(self.d_drv, self.cs_drv), join_byte(self.hd_drv, self.wp_drv), self.ROM_LOADER.IMAGE_CHIP_ID, self.min_rev, self.min_rev_full, self.max_rev_full]
fields += ([0] * 4)
fields += [append_digest]
packed = struct.pack(self.EXTENDED_HEADER_STRUCT_FMT, *fields)
save_file.write(packed) |
class King3DrModel(FunctionModel1DAuto):
xaxisname = 'r'
def f(self, r, rc=1, rt=2, A=1):
rcsq = (rc * rc)
z = ((((r * r) + rcsq) ** 0.5) * (((rt * rt) + rcsq) ** (- 0.5)))
res = ((((((A / z) / z) / pi) / rc) * ((1 + ((rt * rt) / rcsq)) ** (- 1.5))) * ((np.arccos(z) / z) - ((1 - (z * z)) ** 0.5)))
res[(r >= rt)] = 0
return res
def rangehint(self):
return (0, self.rt) |
class EstimateTests(unittest.TestCase):
def test_unicode(self):
estimate = Estimate()
estimate.TotalAmt = 10
self.assertEqual(str(estimate), '10')
def test_valid_object_name(self):
obj = Estimate()
client = QuickBooks()
result = client.isvalid_object_name(obj.qbo_object_name)
self.assertTrue(result) |
class TBItemsCharacter(tb_basic.TBBasicCharacter):
rules = ItemCombatRules()
def at_object_creation(self):
self.db.max_hp = 100
self.db.hp = self.db.max_hp
self.db.conditions = {}
tickerhandler.add(NONCOMBAT_TURN_TIME, self.at_update, idstring='update')
def at_turn_start(self):
self.msg(("|wIt's your turn! You have %i HP remaining.|n" % self.db.hp))
self.apply_turn_conditions()
def apply_turn_conditions(self):
if ('Regeneration' in self.db.conditions):
to_heal = randint(REGEN_RATE[0], REGEN_RATE[1])
if ((self.db.hp + to_heal) > self.db.max_hp):
to_heal = (self.db.max_hp - self.db.hp)
self.db.hp += to_heal
self.location.msg_contents(('%s regains %i HP from Regeneration.' % (self, to_heal)))
if ('Poisoned' in self.db.conditions):
to_hurt = randint(POISON_RATE[0], POISON_RATE[1])
self.rules.apply_damage(self, to_hurt)
self.location.msg_contents(('%s takes %i damage from being Poisoned.' % (self, to_hurt)))
if (self.db.hp <= 0):
self.rules.at_defeat(self)
if (self.rules.is_in_combat(self) and ('Haste' in self.db.conditions)):
self.db.combat_actionsleft += 1
self.msg('You gain an extra action this turn from Haste!')
if (self.rules.is_in_combat(self) and ('Paralyzed' in self.db.conditions)):
self.db.combat_actionsleft = 0
self.location.msg_contents(("%s is Paralyzed, and can't act this turn!" % self))
self.db.combat_turnhandler.turn_end_check(self)
def at_update(self):
if (not self.rules.is_in_combat(self)):
for key in self.db.conditions:
self.db.conditions[key][1] = self
self.apply_turn_conditions()
self.rules.condition_tickdown(self, self) |
class BISTBlockChecker(LiteXModule):
def __init__(self, random):
self.sink = sink = stream.Endpoint([('data', 32)])
self.reset = CSR()
self.start = CSR()
self.done = CSRStatus()
self.count = CSRStorage(32, reset=1)
self.errors = CSRStatus(32)
self.core = core = _BISTBlockChecker(random)
self.comb += [sink.connect(core.sink), core.reset.eq(self.reset.re), core.start.eq(self.start.re), self.done.status.eq(core.done), core.count.eq(self.count.storage), self.errors.status.eq(core.errors)] |
class TraitAddedObserver():
def __init__(self, match_func, optional):
self.match_func = match_func
self.optional = optional
def __hash__(self):
return hash((type(self).__name__, self.match_func, self.optional))
def __eq__(self, other):
return ((type(self) is type(other)) and (self.match_func == other.match_func) and (self.optional == other.optional))
def notify(self):
return False
def iter_observables(self, object):
if (not object_has_named_trait(object, 'trait_added')):
if self.optional:
return
raise ValueError("Unable to observe 'trait_added' event on {!r}".format(object))
(yield object._trait('trait_added', 2))
def iter_objects(self, object):
(yield from ())
def get_maintainer(self, graph, handler, target, dispatcher):
return ObserverChangeNotifier(observer_handler=self.observer_change_handler, event_factory=trait_event_factory, prevent_event=self.prevent_event, graph=graph, handler=handler, target=target, dispatcher=dispatcher)
def prevent_event(self, event):
object = event.object
name = event.new
trait = object.trait(name=name)
return (not self.match_func(name, trait))
def observer_change_handler(event, graph, handler, target, dispatcher):
new_graph = ObserverGraph(node=_RestrictedNamedTraitObserver(name=event.new, wrapped_observer=graph.node), children=graph.children)
add_or_remove_notifiers(object=event.object, graph=new_graph, handler=handler, target=target, dispatcher=dispatcher, remove=False)
def iter_extra_graphs(self, graph):
(yield from ()) |
def type_dec_to_count_base(m_type):
count = 1
chk_ar = m_type.split('[')
if (len(chk_ar) > 1):
count_str = chk_ar[1].split(']')[0]
if (count_str in ofp_constants):
count = ofp_constants[count_str]
else:
count = int(count_str)
base_type = chk_ar[0]
else:
base_type = m_type
return (count, base_type) |
class Stacked100(Options):
def enable(self):
return self._config_get(True)
def enable(self, flag: bool):
self._config(flag)
def replaceTooltipLabel(self):
return self._config_get(True)
def replaceTooltipLabel(self, flag: bool):
self._config(flag)
def fixNegativeScale(self):
return self._config_get(True)
def fixNegativeScale(self, flag: bool):
self._config(flag)
def individual(self):
return self._config_get(False)
def individual(self, flag: bool):
self._config(flag)
def precision(self):
return self._config_get(1)
def precision(self, num: int):
self._config(num) |
def test_no_amount_with_discount(db):
ticket = TicketSubFactory(price=100.0)
discount_code = DiscountCodeTicketSubFactory(type='percent', value=10.0, tickets=[ticket])
db.session.commit()
amount_data = calculate_order_amount([], discount_code.id)
assert (amount_data['total'] == 0.0)
assert (amount_data['tax'] is None)
assert (amount_data['discount'] == 0.0)
assert (amount_data['tickets'] == []) |
def test_init_function():
def _init():
_init.counter += 1
_init.counter = 0
provider = providers.Resource(_init)
result1 = provider()
assert (result1 is None)
assert (_init.counter == 1)
result2 = provider()
assert (result2 is None)
assert (_init.counter == 1)
provider.shutdown() |
def string_to_dictionary(string, true_agency_name):
if (len(string.split('-')) == 1):
return {true_agency_name: string}
elif (len(string.split('-')) == 2):
return FederalAccount.fa_rendering_label_to_component_dictionary(string)
else:
return TreasuryAppropriationAccount.tas_rendering_label_to_component_dictionary(string) |
()
def graph_with_edge_condition(aliased_variable_y, aliased_variable_z, aliased_variable_x, variable_v) -> Tuple[(List[BasicBlock], ControlFlowGraph)]:
instructions = [Assignment(ListOperation([]), Call(imp_function_symbol('printf'), [Constant('Enter your choice = ')])), Assignment(ListOperation([]), Call(imp_function_symbol('scanf'), [Constant(), UnaryOperation(OperationType.address, [aliased_variable_y[0]])])), Assignment(ListOperation([]), Call(imp_function_symbol('puts'), [Constant('Enter a number ')])), Assignment(ListOperation([]), Call(imp_function_symbol('scanf'), [Constant(), UnaryOperation(OperationType.address, [aliased_variable_z[0]])])), Assignment(ListOperation([]), Call(imp_function_symbol('puts'), [Constant('Enter a second number ')])), Assignment(ListOperation([]), Call(imp_function_symbol('scanf'), [Constant(), UnaryOperation(OperationType.address, [aliased_variable_x[0]])])), Branch(Condition(OperationType.greater, [aliased_variable_y[0], Constant(5)])), IndirectBranch(aliased_variable_y[0]), Assignment(ListOperation([]), Call(imp_function_symbol('puts'), [Constant('default !')])), Assignment(variable_v[1], BinaryOperation(OperationType.multiply, [BinaryOperation(OperationType.plus, [aliased_variable_z[0], Constant(1)]), aliased_variable_x[0]])), Assignment(variable_v[2], BinaryOperation(OperationType.plus, [BinaryOperation(OperationType.plus, [aliased_variable_z[0], Constant(2)]), aliased_variable_x[0]])), Assignment(variable_v[3], BinaryOperation(OperationType.minus, [aliased_variable_x[0], BinaryOperation(OperationType.plus, [aliased_variable_z[0], Constant(3)])])), Assignment(variable_v[4], BinaryOperation(OperationType.minus, [BinaryOperation(OperationType.plus, [aliased_variable_z[0], Constant(4)]), aliased_variable_x[0]])), Assignment(variable_v[5], BinaryOperation(OperationType.multiply, [Constant(2), BinaryOperation(OperationType.plus, [BinaryOperation(OperationType.plus, [aliased_variable_z[0], Constant(4)]), aliased_variable_x[0]])])), Phi(variable_v[6], [Constant(0), variable_v[1], variable_v[2], variable_v[3], variable_v[4], variable_v[5]]), Assignment(ListOperation([]), Call(imp_function_symbol('printf'), [Constant('a = %d '), variable_v[6]])), Return(Constant(0))]
nodes = [BasicBlock(i) for i in range(9)]
nodes[0].instructions = instructions[0:7]
nodes[1].instructions = [instructions[7]]
nodes[2].instructions = [instructions[8]]
nodes[3].instructions = [instructions[9]]
nodes[4].instructions = [instructions[10]]
nodes[5].instructions = [instructions[11]]
nodes[6].instructions = [instructions[12]]
nodes[7].instructions = [instructions[13]]
nodes[8].instructions = instructions[14:]
instructions[14]._origin_block = {nodes[2]: Constant(0), nodes[3]: variable_v[1], nodes[4]: variable_v[2], nodes[5]: variable_v[3], nodes[6]: variable_v[4], nodes[7]: variable_v[5]}
cfg = ControlFlowGraph()
cfg.add_edges_from([TrueCase(nodes[0], nodes[3]), FalseCase(nodes[0], nodes[1]), SwitchCase(nodes[1], nodes[2], [Constant(0)]), SwitchCase(nodes[1], nodes[3], [Constant(1)]), SwitchCase(nodes[1], nodes[4], [Constant(2)]), SwitchCase(nodes[1], nodes[5], [Constant(3)]), SwitchCase(nodes[1], nodes[6], [Constant(4)]), SwitchCase(nodes[1], nodes[7], [Constant(5)]), UnconditionalEdge(nodes[2], nodes[8]), UnconditionalEdge(nodes[3], nodes[8]), UnconditionalEdge(nodes[4], nodes[8]), UnconditionalEdge(nodes[5], nodes[8]), UnconditionalEdge(nodes[6], nodes[8]), UnconditionalEdge(nodes[7], nodes[8])])
return (nodes, cfg) |
class _FormatRenderer():
def __init__(self, fmt: str, colors: bool=True, force_colors: bool=False):
if (colors is True):
if force_colors:
colorama.deinit()
colorama.init(strip=False)
else:
colorama.init()
self._level_to_color = {'critical': colorama.Fore.RED, 'exception': colorama.Fore.RED, 'error': colorama.Fore.RED, 'warn': colorama.Fore.YELLOW, 'warning': colorama.Fore.YELLOW, 'info': colorama.Fore.GREEN, 'debug': colorama.Fore.WHITE, 'notset': colorama.Back.RED}
self._reset = colorama.Style.RESET_ALL
else:
self._level_to_color = {'critical': '', 'exception': '', 'error': '', 'warn': '', 'warning': '', 'info': '', 'debug': '', 'notset': ''}
self._reset = ''
self._vformat = string.Formatter().vformat
self._fmt = fmt
def __call__(self, _, __, event_dict):
message = StringIO()
event_dict['log_color_reset'] = self._reset
if ('level' in event_dict):
level = event_dict['level']
if (level in self._level_to_color):
event_dict['log_color'] = self._level_to_color[level]
else:
event_dict['log_color'] = ''
event_dict['level_uc'] = level.upper()
else:
event_dict['log_color'] = ''
if ('timestamp' in event_dict):
event_dict['timestamp_local_ctime'] = datetime.fromtimestamp(event_dict['timestamp']).ctime()
message.write(self._vformat(self._fmt, [], event_dict))
stack = event_dict.pop('stack', None)
exception = event_dict.pop('exception', None)
if (stack is not None):
message.write(('\n' + stack))
if (exception is not None):
message.write(('\n' + exception))
message.write(self._reset)
return message.getvalue() |
class Reader(object):
def __init__(self, f):
self._f = f
def __iter__(self):
return self
def next(self):
header_buf = self._f.read(MrtRecord.HEADER_SIZE)
if (len(header_buf) < MrtRecord.HEADER_SIZE):
raise StopIteration()
self._f.seek((- MrtRecord.HEADER_SIZE), 1)
required_len = MrtRecord.parse_pre(header_buf)
buf = self._f.read(required_len)
(record, _) = MrtRecord.parse(buf)
return record
__next__ = next
def close(self):
self._f.close()
def __del__(self):
self.close() |
class HTML_Message_Handler(File_Based_Message_Handler):
def __init__(self, tool_id, filename):
super().__init__(tool_id, filename)
self.last_file = None
def fork(self):
rv = HTML_Message_Handler(self.tool_id, self.filename)
self.fork_copy_attributes(rv)
return rv
def setup_fd(self):
if (self.fd is not None):
return
self.fd = open(self.filename, 'w', encoding='UTF-8')
self.fd.write('<!DOCTYPE html>\n')
self.fd.write('<html>\n')
self.fd.write('<head>\n')
self.fd.write('<meta charset="UTF-8">\n')
self.fd.write(('<link rel="stylesheet" href="file:%s">\n' % os.path.relpath(os.path.join(sys.path[0], 'docs', 'style.css'), os.path.dirname(pathutil.abspath(self.filename))).replace('\\', '/')))
self.fd.write('<title>MISS_HIT Report</title>\n')
self.fd.write('</head>\n')
self.fd.write('<body>\n')
self.fd.write('<header>MISS_HIT Report</header>\n')
self.fd.write('<main>\n')
self.fd.write('<div></div>\n')
self.fd.write('<h1>Issues identified</h1>\n')
self.fd.write('<section>\n')
def emit_message(self, message):
self.setup_fd()
if (self.last_file != message.location.filename):
self.last_file = message.location.filename
self.fd.write(('<h2>%s</h2>\n' % message.location.filename))
mtext = message.message
if (message.fixed and self.autofix):
mtext += ' [fixed]'
self.fd.write('<div class="message">')
if message.location.col_start:
self.fd.write(('<a href="matlab:opentoline(\'%s\', %u, %u)">' % (message.location.filename, message.location.line, (message.location.col_start + 1))))
self.fd.write(('%s: line %u:' % (message.location.filename, message.location.line)))
elif message.location.line:
self.fd.write(('<a href="matlab:opentoline(\'%s\', %u)">' % (message.location.filename, message.location.line)))
self.fd.write(('%s: line %u:' % (message.location.filename, message.location.line)))
else:
self.fd.write(('<a href="matlab:opentoline(\'%s\')">' % message.location.filename))
self.fd.write(('%s:' % message.location.filename))
self.fd.write('</a>')
if (message.kind == 'check'):
self.fd.write((' %s (%s):' % (message.kind, message.severity)))
else:
self.fd.write((' %s:' % message.kind))
self.fd.write((' %s' % html.escape(message.message)))
self.fd.write('</div>\n')
def emit_summary(self):
self.setup_fd()
super().emit_summary()
if (not (self.style_issues or self.warnings or self.errors)):
self.fd.write('<div>Everything is fine :)</div>')
self.fd.write('</section>\n')
self.fd.write('</main>\n')
self.fd.write('</body>\n')
self.fd.write('</html>\n')
self.fd.close()
self.fd = None |
class OFPTableFeaturesStats(StringifyMixin):
_TYPE = {'utf-8': ['name']}
def __init__(self, table_id=None, command=None, features=None, name=None, metadata_match=None, metadata_write=None, capabilities=None, max_entries=None, properties=None, length=None):
super(OFPTableFeaturesStats, self).__init__()
self.length = length
self.table_id = table_id
self.command = command
self.features = features
self.name = name
self.metadata_match = metadata_match
self.metadata_write = metadata_write
self.capabilities = capabilities
self.max_entries = max_entries
self.properties = properties
def parser(cls, buf, offset):
tbl = cls()
(tbl.length, tbl.table_id, tbl.command, tbl.features, name, tbl.metadata_match, tbl.metadata_write, tbl.capabilities, tbl.max_entries) = struct.unpack_from(ofproto.OFP_TABLE_FEATURES_PACK_STR, buf, offset)
tbl.name = name.rstrip(b'\x00')
props = []
rest = buf[(offset + ofproto.OFP_TABLE_FEATURES_SIZE):(offset + tbl.length)]
while rest:
(p, rest) = OFPTableFeatureProp.parse(rest)
props.append(p)
tbl.properties = props
return tbl
def serialize(self):
bin_props = bytearray()
for p in self.properties:
bin_props += p.serialize()
self.length = (ofproto.OFP_TABLE_FEATURES_SIZE + len(bin_props))
buf = bytearray()
msg_pack_into(ofproto.OFP_TABLE_FEATURES_PACK_STR, buf, 0, self.length, self.table_id, self.command, self.features, self.name, self.metadata_match, self.metadata_write, self.capabilities, self.max_entries)
return (buf + bin_props) |
_filter()
def time_remaining(t):
remaining = (t - now())
past = False
if (remaining < 0):
past = True
remaining = (- remaining)
ms_in_day = (((1000 * 60) * 60) * 24)
days = int((remaining // ms_in_day))
remaining -= (days * ms_in_day)
ms_in_hour = ((1000 * 60) * 60)
hours = int((remaining // ms_in_hour))
remaining -= (hours * ms_in_hour)
ms_in_minute = (1000 * 60)
minutes = int((remaining // ms_in_minute))
remaining -= (minutes * ms_in_minute)
ms_in_second = 1000
seconds = int((remaining // ms_in_second))
remaining -= (seconds * ms_in_second)
text = ''
if ((not days) and (not hours) and (not minutes)):
text += '{} second{}'.format(seconds, ('' if (seconds == 1) else 's'))
else:
if (days > 0):
text += '{} day{}'.format(days, ('' if (days == 1) else 's'))
if (hours > 0):
text += ', '
else:
text += ' and '
if (hours > 0):
text += '{} hour{} and '.format(hours, ('' if (hours == 1) else 's'))
text += '{} minute{}'.format(minutes, ('' if (minutes == 1) else 's'))
if past:
text += ' ago'
return text |
class Module():
module_options: dict
def print_info(self):
headers = ['Option', 'Value', 'Required', 'Description']
options = [(k.replace('_', '-'), v['value'], v['required'], v['help']) for (k, v) in self.module_options.items()]
click.echo(tabulate(options, headers=headers, tablefmt='pretty'))
def set_options(self, ctx, new_options):
if all(((value is None) for value in new_options.values())):
return click.echo(ctx.get_help())
for (k, v) in new_options.items():
if ((k == 'group_ids') and v):
v = list(v.strip().split(','))
self.module_options[k]['value'] = v
elif v:
self.module_options[k]['value'] = v.strip()
else:
pass
return self.module_options
def reset_options(self):
for (k, v) in self.module_options.items():
v['value'] = None
return self.module_options
def check_options(self):
for (k, v) in self.module_options.items():
if ((v['required'] is True) and (not v.get('value'))):
click.secho(f"[!] Unable to execute module. Required value not set: {k.replace('_', '-')}. Set required value and try again", fg='red')
error = True
return error
else:
error = False
return error |
class TestResumeErasureRequestWithManualConfirmation():
(scope='function')
def url(self, privacy_request):
return (V1_URL_PREFIX + PRIVACY_REQUEST_MANUAL_ERASURE.format(privacy_request_id=privacy_request.id))
def test_manual_resume_not_authenticated(self, api_client, url):
response = api_client.post(url, headers={}, json={})
assert (response.status_code == 401)
def test_manual_resume_wrong_scope(self, api_client, url, generate_auth_header):
auth_header = generate_auth_header(scopes=[PRIVACY_REQUEST_READ])
response = api_client.post(url, headers=auth_header, json={})
assert (response.status_code == 403)
def test_manual_resume_privacy_request_not_paused(self, api_client, url, generate_auth_header, privacy_request):
auth_header = generate_auth_header(scopes=[PRIVACY_REQUEST_CALLBACK_RESUME])
response = api_client.post(url, headers=auth_header, json={'row_count': 0})
assert (response.status_code == 400)
assert (response.json()['detail'] == f"Invalid resume request: privacy request '{privacy_request.id}' status = in_processing. Privacy request is not paused.")
def test_manual_resume_privacy_request_no_paused_location(self, db, api_client, url, generate_auth_header, privacy_request):
auth_header = generate_auth_header(scopes=[PRIVACY_REQUEST_CALLBACK_RESUME])
privacy_request.status = PrivacyRequestStatus.paused
privacy_request.save(db)
response = api_client.post(url, headers=auth_header, json={'row_count': 0})
assert (response.status_code == 400)
assert (response.json()['detail'] == f"Cannot resume privacy request '{privacy_request.id}'; no paused details.")
privacy_request.delete(db)
def test_resume_with_manual_erasure_confirmation_collection_has_changed(self, db, api_client, url, generate_auth_header, privacy_request):
auth_header = generate_auth_header(scopes=[PRIVACY_REQUEST_CALLBACK_RESUME])
privacy_request.status = PrivacyRequestStatus.paused
privacy_request.save(db)
privacy_request.cache_paused_collection_details(step=CurrentStep.erasure, collection=CollectionAddress('manual_example', 'filing_cabinet'))
response = api_client.post(url, headers=auth_header, json={'row_count': 0})
assert (response.status_code == 422)
assert (response.json()['detail'] == "Cannot save manual data. No collection in graph with name: 'manual_example:filing_cabinet'.")
privacy_request.delete(db)
def test_resume_still_paused_at_access_request(self, db, api_client, url, generate_auth_header, privacy_request):
auth_header = generate_auth_header(scopes=[PRIVACY_REQUEST_CALLBACK_RESUME])
privacy_request.status = PrivacyRequestStatus.paused
privacy_request.save(db)
privacy_request.cache_paused_collection_details(step=CurrentStep.access, collection=CollectionAddress('manual_example', 'filing_cabinet'))
response = api_client.post(url, headers=auth_header, json={'row_count': 0})
assert (response.status_code == 400)
assert (response.json()['detail'] == "Collection 'manual_example:filing_cabinet' is paused at the access step. Pass in manual data instead to '/privacy-request/{privacy_request_id}/manual_input' to resume.")
privacy_request.delete(db)
.usefixtures('postgres_example_test_dataset_config', 'manual_dataset_config')
('fides.api.service.privacy_request.request_runner_service.run_privacy_request.delay')
def test_resume_with_manual_count(self, _, db, api_client, url, generate_auth_header, privacy_request):
auth_header = generate_auth_header(scopes=[PRIVACY_REQUEST_CALLBACK_RESUME])
privacy_request.status = PrivacyRequestStatus.paused
privacy_request.save(db)
privacy_request.cache_paused_collection_details(step=CurrentStep.erasure, collection=CollectionAddress('manual_input', 'filing_cabinet'))
response = api_client.post(url, headers=auth_header, json={'row_count': 5})
assert (response.status_code == 200)
db.refresh(privacy_request)
assert (privacy_request.status == PrivacyRequestStatus.in_processing)
privacy_request.delete(db) |
class RaindropsGame():
def __init__(self):
pygame.init()
self.clock = pygame.time.Clock()
self.settings = Settings()
self.screen = pygame.display.set_mode((self.settings.screen_width, self.settings.screen_height))
pygame.display.set_caption('Raindrops')
self.raindrops = pygame.sprite.Group()
self._create_drops()
def run_game(self):
while True:
self._check_events()
self._update_raindrops()
self._update_screen()
self.clock.tick(60)
def _check_events(self):
for event in pygame.event.get():
if (event.type == pygame.QUIT):
sys.exit()
elif (event.type == pygame.KEYDOWN):
self._check_keydown_events(event)
def _check_keydown_events(self, event):
if (event.key == pygame.K_q):
sys.exit()
def _create_drops(self):
drop = Raindrop(self)
(drop_width, drop_height) = drop.rect.size
(current_x, current_y) = (drop_width, drop_height)
while (current_y < (self.settings.screen_height - (2 * drop_height))):
while (current_x < (self.settings.screen_width - (2 * drop_width))):
self._create_drop(current_x, current_y)
current_x += (2 * drop_width)
current_x = drop_width
current_y += (2 * drop_height)
def _create_drop(self, x_position, y_position):
new_drop = Raindrop(self)
new_drop.y = y_position
new_drop.rect.x = x_position
new_drop.rect.y = y_position
self.raindrops.add(new_drop)
def _create_new_row(self):
drop = Raindrop(self)
(drop_width, drop_height) = drop.rect.size
current_x = drop_width
current_y = ((- 1) * drop_height)
while (current_x < (self.settings.screen_width - (2 * drop_width))):
self._create_drop(current_x, current_y)
current_x += (2 * drop_width)
def _update_raindrops(self):
self.raindrops.update()
make_new_drops = False
for drop in self.raindrops.copy():
if drop.check_disappeared():
self.raindrops.remove(drop)
make_new_drops = True
if make_new_drops:
self._create_new_row()
def _update_screen(self):
self.screen.fill(self.settings.bg_color)
self.raindrops.draw(self.screen)
pygame.display.flip() |
def generate_payload(provider, generator, filtering, verify_name=True, verify_size=True):
filtering.information(provider)
results = []
definition = definitions[provider]
definition = get_alias(definition, get_setting(('%s_alias' % provider)))
for (id, name, info_hash, uri, size, seeds, peers) in generator:
size = clean_size(size)
v_name = (name if verify_name else filtering.title)
v_size = (size if verify_size else None)
if filtering.verify(provider, v_name, v_size):
sort_seeds = get_int(seeds)
sort_resolution = (filtering.determine_resolution(v_name)[1] + 1)
sort_balance = (((sort_seeds + 1) * 3) * sort_resolution)
results.append({'id': id, 'name': name, 'uri': uri, 'info_hash': info_hash, 'size': size, 'seeds': sort_seeds, 'peers': get_int(peers), 'language': (definition['language'] if ('language' in definition) else 'en'), 'provider': ('[COLOR %s]%s[/COLOR]' % (definition['color'], definition['name'])), 'icon': os.path.join(ADDON_PATH, 'burst', 'providers', 'icons', ('%s.png' % provider)), 'sort_resolution': sort_resolution, 'sort_balance': sort_balance})
else:
log.debug(filtering.reason)
log.debug(('[%s] >>>>>> %s would send %d torrents to Elementum <<<<<<<' % (provider, provider, len(results))))
results = cleanup_results(results)
log.debug(('[%s] >>>>>> %s would send %d torrents to Elementum after cleanup <<<<<<<' % (provider, provider, len(results))))
return results |
def extractPeasKingdom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
ltags = [tmp.lower() for tmp in item['tags']]
if (('second chance' in ltags) and (chp or vol)):
return buildReleaseMessageWithType(item, 'Second Chance: a Wonderful New Life', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False |
_command.command(name='all')
_ctx
def all_command(ctx: Context) -> None:
for item_type in ITEM_TYPES:
details = list_agent_items(ctx, item_type)
if (not details):
continue
output = '{}:\n{}'.format((item_type.title() + 's'), format_items(sort_items(details)))
click.echo(output) |
def get_episodes(html, url):
s = []
for match in re.finditer('href="([^"]+?tuchong\\.com/\\d+/(\\d+)[^"]*)" title="([^"]+)', html):
(ep_url, ep_id, title) = match.groups()
title = '{id} - {title}'.format(id=ep_id, title=title)
s.append(Episode(title, ep_url))
return s[::(- 1)] |
def test_combining_char():
table = Texttable()
table.set_cols_align(['l', 'r', 'r'])
table.add_rows([['str', 'code-point\nlength', 'display\nwidth'], ['a', 2, 1], ['a', 1, 1]])
assert (clean(table.draw()) == u_dedent(' +-----+++\n | str | code-point | display |\n | | length | width |\n +=====+++\n | a | 2 | 1 |\n +-----+++\n | a | 1 | 1 |\n +-----+++\n ')) |
def test_ford_doc_for_type():
string = write_rpc_request(1, 'initialize', {'rootPath': str((test_dir / 'docs'))})
file_path = ((test_dir / 'docs') / 'test_module_and_type_doc.f90')
string += hover_request(file_path, 28, 11)
(errcode, results) = run_request(string)
assert (errcode == 0)
ref = ((0, '```fortran90'), (1, 'TYPE :: b_t'), (2, '```'), (3, '-----'), (4, 'Doc for b_t'))
check_return(results[1], ref) |
class OptionPlotoptionsScatterSonificationTracksMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def _recursive_generic_validator(typed):
if (hasattr(typed, '__args__') and (not isinstance(typed, _SpockVariadicGenericAlias))):
if (_get_name_py_version(typed) == 'Tuple'):
member_validator = (typed.__args__ if (len(typed.__args__) > 1) else typed.__args__[0])
try:
set_len = len(member_validator)
except Exception as e:
raise TypeError(f"Attempting to use a Tuple of length 1 -- don't use an iterable type as it seems it is not needed")
iterable_validator = attr.validators.and_(instance_of(typed.__origin__), is_len(set_len))
return_type = ordered_is_instance_deep_iterable(ordered_types=typed.__args__, recurse_callable=_recursive_generic_validator, iterable_validator=iterable_validator)
return return_type
elif (_get_name_py_version(typed) == 'List'):
member_validator = typed.__args__[0]
iterable_validator = instance_of(typed.__origin__)
return_type = attr.validators.deep_iterable(member_validator=_recursive_generic_validator(member_validator), iterable_validator=iterable_validator)
return return_type
elif (_get_name_py_version(typed) == 'Dict'):
(key_type, value_type) = typed.__args__
if (key_type is not str):
raise TypeError(f'Unexpected key type of `{str(key_type.__name__)}` when attempting to handle GenericAlias type of Dict -- currently Spock only supports str as keys due to maintaining support for valid TOML and JSON files')
if (hasattr(value_type, '__args__') and (not isinstance(typed, _SpockVariadicGenericAlias))):
return_type = attr.validators.deep_mapping(value_validator=_recursive_generic_validator(value_type), key_validator=instance_of(key_type))
else:
return_type = attr.validators.deep_mapping(value_validator=instance_of(value_type), key_validator=instance_of(key_type))
return return_type
else:
raise TypeError(f'Unexpected type of `{str(typed)}` when attempting to handle GenericAlias types')
elif isinstance(typed, EnumMeta):
(base_type, allowed) = _check_enum_props(typed)
return_type = attr.validators.and_(instance_of(base_type), attr.validators.in_(allowed))
else:
return_type = instance_of(typed)
return return_type |
class RegisterForm(_RegisterForm):
password = forms.CharField(label=_('Password'), required=True, min_length=6, widget=forms.PasswordInput(render_value=False, attrs={'class': 'input-transparent', 'placeholder': _('New password'), 'required': 'required', 'pattern': '.{6,}', 'title': _('6 characters minimum')}))
def __init__(self, *args, **kwargs):
super(RegisterForm, self).__init__(*args, **kwargs)
dc1_settings = DefaultDc().settings
if dc1_settings.SMS_REGISTRATION_ENABLED:
del self.fields['password']
def save(self, *args, **kwargs):
password = self.cleaned_data.pop('password', None)
user = super(RegisterForm, self).save(*args, **kwargs)
if password:
user.set_password(password)
return user |
class PhysicalLayout(object):
def __init__(self, n_participated, world, world_conf, on_cuda):
self.n_participated = n_participated
self._world = self.configure_world(world, world_conf)
self._on_cuda = on_cuda
self.rank = (- 1)
def configure_world(self, world, world_conf):
if (world is not None):
world_list = world.split(',')
assert (self.n_participated <= len(world_list))
elif (world_conf is not None):
return configure_gpu(world_conf)
else:
raise RuntimeError('you should at least make sure world or world_conf is not None.')
return [int(l) for l in world_list]
def primary_device(self):
return self.devices[0]
def devices(self):
return self.world
def on_cuda(self):
return self._on_cuda
def ranks(self):
return list(range((1 + self.n_participated)))
def world(self):
return self._world
def get_device(self, rank):
return self.devices[rank]
def change_n_participated(self, n_participated):
self.n_participated = n_participated |
class EqOpInt(Node):
def __init__(self, predecessors):
super().__init__(predecessors)
def forward(self, *args, **kwargs):
return all([(a == args[0]) for a in args])
def follow(self, *args, **kwargs):
op1 = args[0]
op2 = args[1]
if ((op1 is None) or (op2 is None)):
return None
return (op1 == op2)
def final(self, operand_final, operands=None, result=None, **kwargs):
op1f = operand_final[0]
op1 = operands[0]
op2f = operand_final[1]
op2 = operands[1]
if ((op1f == 'fin') and (op1 <= op2) and (op2f in ['fin', 'inc'])):
return 'fin'
if ((op2f == 'fin') and (op2 <= op1) and (op1f in ['fin', 'inc'])):
return 'fin'
if all([(a == 'fin') for a in operand_final]):
return 'fin'
return 'var'
def token_hint(self):
num = [n for n in self.predecessors if (type(n) is int)]
len_op = [n for n in self.predecessors if isinstance(n, LenOp)]
if ((len(num) != 1) or (len(len_op) != 1)):
return super().token_hint()
limit = num[0]
tokens_op = [n for n in len_op[0].predecessors if isinstance(n, TokensOp)]
if (len(tokens_op) != 1):
return super().token_hint()
var = [n for n in tokens_op[0].predecessors if isinstance(n, Var)]
if (len(var) != 1):
return super().token_hint()
return {var[0].name: limit} |
class ResidualAccumulator(fl.Passthrough):
def __init__(self, n: int) -> None:
self.n = n
super().__init__(fl.Residual(fl.UseContext(context='unet', key='residuals').compose(func=(lambda residuals: residuals[self.n]))), fl.SetContext(context='unet', key='residuals', callback=self.update))
def update(self, residuals: list[(Tensor | float)], x: Tensor) -> None:
residuals[self.n] = x |
def extractLiterarycrowWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def submit_payment(request, booking_uuid, location_slug):
booking = Booking.objects.get(uuid=booking_uuid)
location = get_object_or_404(Location, slug=location_slug)
if (request.method == 'POST'):
form = PaymentForm(request.POST, default_amount=None)
if form.is_valid():
stripe.api_key = settings.STRIPE_SECRET_KEY
token = request.POST.get('stripeToken')
amount = float(request.POST.get('amount'))
pay_name = request.POST.get('name')
pay_email = request.POST.get('email')
comment = request.POST.get('comment')
pay_user = None
try:
pay_user = User.objects.filter(email=pay_email).first()
except:
pass
charge_descr = ('payment from %s (%s).' % (pay_name, pay_email))
if comment:
charge_descr += (' Comment added: %s' % comment)
try:
charge = payment_gateway.stripe_charge_card_third_party(booking, amount, token, charge_descr)
Payment.objects.create(bill=booking.bill, user=pay_user, payment_service='Stripe', payment_method=charge.source.brand, paid_amount=(charge.amount / 100.0), transaction_id=charge.id, last4=charge.source.last4)
if (booking.bill.total_owed() <= 0.0):
booking.confirm()
send_booking_receipt(booking, send_to=pay_email)
days_until_arrival = (booking.use.arrive - datetime.date.today()).days
if (days_until_arrival <= booking.use.location.welcome_email_days_ahead):
guest_welcome(booking.use)
messages.add_message(request, messages.INFO, ('Thanks you for your payment! A receipt is being emailed to you at %s' % pay_email))
else:
messages.add_message(request, messages.INFO, ('Thanks you for your payment! There is now a pending amount due of $%.2f' % booking.bill.total_owed()))
form = PaymentForm(default_amount=booking.bill.total_owed)
except Exception as e:
messages.add_message(request, messages.INFO, (('Drat, there was a problem with your card. Sometimes this reflects a card transaction limit, or bank ' + 'hold due to an unusual charge. Please contact your bank or credit card, or try a different card. The ') + ('error returned was: <em>%s</em>' % e)))
else:
logger.debug('payment form not valid')
logger.debug(form.errors)
else:
form = PaymentForm(default_amount=booking.bill.total_owed)
if (booking.bill.total_owed() > 0.0):
owed_color = 'text-danger'
else:
owed_color = 'text-success'
return render(request, 'payment.html', {'r': booking, 'location': location, 'total_owed_color': owed_color, 'form': form, 'stripe_publishable_key': settings.STRIPE_PUBLISHABLE_KEY}) |
class TraceCounters():
def __init__(self):
pass
def _get_queue_length_time_series_for_rank(cls, t: 'Trace', rank: int) -> Optional[pd.DataFrame]:
trace_df: pd.DataFrame = t.get_trace(rank)
runtime_calls: pd.DataFrame = trace_df.query(t.symbol_table.get_runtime_launch_events_query()).copy()
runtime_calls.drop(['stream', 'pid', 'tid'], axis=1, inplace=True)
runtime_calls['queue'] = 1
gpu_kernels = trace_df[trace_df['stream'].ne((- 1))].copy()
gpu_kernels['queue'] = (- 1)
runtime_calls_filt = runtime_calls.join(gpu_kernels[['stream', 'pid', 'tid', 'correlation']].set_index('correlation'), on='correlation')
gpu_kernels_filt = gpu_kernels[gpu_kernels['correlation'].isin(runtime_calls['correlation'])]
assert (len(runtime_calls_filt) == len(gpu_kernels_filt))
merged_df = pd.concat([runtime_calls_filt, gpu_kernels_filt]).sort_values(by='ts').set_index('index')
result_df_list = []
for (stream, stream_df) in merged_df.groupby('stream'):
logger.debug(f'Processing queue_length for rank {rank}, stream {stream}')
stream_df['queue_length'] = stream_df['queue'].cumsum()
result_df_list.append(stream_df)
return (pd.concat(result_df_list)[['ts', 'pid', 'tid', 'stream', 'queue_length']] if (len(result_df_list) > 0) else None)
def get_queue_length_time_series(cls, t: 'Trace', ranks: Optional[List[int]]=None) -> Dict[(int, pd.DataFrame)]:
if ((ranks is None) or (len(ranks) == 0)):
ranks = [0]
logger.info('Please note that the time series only contains points when the value changes. Once a values is observed the time series stays constant until the next update.')
result = {rank: TraceCounters._get_queue_length_time_series_for_rank(t, rank) for rank in ranks}
return dict(filter((lambda x: (x[1] is not None)), result.items()))
def get_queue_length_summary(cls, t: 'Trace', ranks: Optional[List[int]]=None) -> Optional[pd.DataFrame]:
if ((ranks is None) or (len(ranks) == 0)):
ranks = [0]
results_list: List[pd.DataFrame] = []
for (rank, rank_df) in TraceCounters.get_queue_length_time_series(t, ranks).items():
rank_df['rank'] = rank
result = rank_df[['rank', 'stream', 'queue_length']].groupby(['rank', 'stream']).describe()
results_list.append(result)
return (pd.concat(results_list) if (len(results_list) > 0) else None)
def _get_memory_bw_time_series_for_rank(cls, t: 'Trace', rank: int) -> Optional[pd.DataFrame]:
trace_df: pd.DataFrame = t.get_trace(rank)
sym_table = t.symbol_table.get_sym_table()
gpu_kernels = trace_df[trace_df['stream'].ne((- 1))].copy()
gpu_kernels['kernel_type'] = gpu_kernels[['name']].apply((lambda x: get_kernel_type(sym_table[x['name']])), axis=1)
memcpy_kernels = gpu_kernels[(gpu_kernels.kernel_type == KernelType.MEMORY.name)].copy()
memcpy_kernels['name'] = memcpy_kernels[['name']].apply((lambda x: get_memory_kernel_type(sym_table[x['name']])), axis=1)
memcpy_kernels.loc[((memcpy_kernels.dur == 0), ['dur'])] = 1
membw_time_series_a = memcpy_kernels[['ts', 'name', 'pid', 'memory_bw_gbps']]
membw_time_series_b = memcpy_kernels[['ts', 'name', 'dur', 'pid', 'memory_bw_gbps']].copy()
membw_time_series_b.ts = (membw_time_series_b.ts + membw_time_series_b.dur)
membw_time_series_b.memory_bw_gbps = (- membw_time_series_b.memory_bw_gbps)
membw_time_series = pd.concat([membw_time_series_a, membw_time_series_b[['ts', 'pid', 'name', 'memory_bw_gbps']]], ignore_index=True).sort_values(by='ts')
result_df_list = []
for (_, membw_df) in membw_time_series.groupby('name'):
membw_df.memory_bw_gbps = membw_df.memory_bw_gbps.cumsum()
result_df_list.append(membw_df)
if (len(result_df_list) == 0):
return None
result_df = pd.concat(result_df_list)[['ts', 'pid', 'name', 'memory_bw_gbps']]
return result_df
def get_memory_bw_time_series(cls, t: 'Trace', ranks: Optional[List[int]]=None) -> Dict[(int, pd.DataFrame)]:
if ((ranks is None) or (len(ranks) == 0)):
ranks = [0]
logger.info('Please note that the time series only contains points when the value changes. Once a values is observed the time series stays constant until the next update.')
result = {rank: TraceCounters._get_memory_bw_time_series_for_rank(t, rank) for rank in ranks}
return dict(filter((lambda x: (x[1] is not None)), result.items()))
def get_memory_bw_summary(cls, t: 'Trace', ranks: Optional[List[int]]=None) -> Optional[pd.DataFrame]:
if ((ranks is None) or (len(ranks) == 0)):
ranks = [0]
results_list: List[pd.DataFrame] = []
for (rank, rank_df) in TraceCounters.get_memory_bw_time_series(t, ranks).items():
rank_df['rank'] = rank
rank_df = rank_df[(rank_df.memory_bw_gbps > 0)]
result = rank_df[['rank', 'name', 'memory_bw_gbps']].groupby(['rank', 'name']).describe()
results_list.append(result)
return (pd.concat(results_list) if (len(results_list) > 0) else None) |
def _get_third_party_data(recipients: Optional[List[str]]) -> Tuple[(bool, Optional[str])]:
data_shared_with_third_parties: bool = bool(recipients)
third_parties: Optional[str] = None
if isinstance(recipients, list):
third_parties = ('; '.join([elem for elem in recipients if elem]) or None)
return (data_shared_with_third_parties, third_parties) |
class RawShellTask(ShellTask):
def __init__(self, name: str, debug: bool=False, script: typing.Optional[str]=None, script_file: typing.Optional[str]=None, task_config: T=None, inputs: typing.Optional[typing.Dict[(str, typing.Type)]]=None, output_locs: typing.Optional[typing.List[OutputLocation]]=None, **kwargs):
super().__init__(name=name, debug=debug, script=script, script_file=script_file, task_config=task_config, inputs=inputs, output_locs=output_locs, **kwargs)
def make_export_string_from_env_dict(self, d: typing.Dict[(str, str)]) -> str:
items = []
for (k, v) in d.items():
items.append(f'export {k}={v}')
return '\n'.join(items)
def execute(self, **kwargs) -> typing.Any:
logger.info(f'Running shell script as type {self.task_type}')
if self.script_file:
with open(self.script_file) as f:
self._script = f.read()
outputs: typing.Dict[(str, str)] = {}
if self._output_locs:
for v in self._output_locs:
outputs[v.var] = self._interpolizer.interpolate(v.location, inputs=kwargs)
if (os.name == 'nt'):
self._script = self._script.lstrip().rstrip().replace('\n', '&&')
if (('env' in kwargs) and isinstance(kwargs['env'], dict)):
kwargs['export_env'] = self.make_export_string_from_env_dict(kwargs['env'])
gen_script = self._interpolizer.interpolate(self._script, inputs=kwargs, outputs=outputs)
if self._debug:
print('\n\n')
print(gen_script)
print('\n\n')
try:
subprocess.check_call(gen_script, shell=True)
except subprocess.CalledProcessError as e:
files = os.listdir('.')
fstr = '\n-'.join(files)
logger.error(f'''Failed to Execute Script, return-code {e.returncode}
StdErr: {e.stderr}
StdOut: {e.stdout}
Current directory contents: .
-{fstr}''')
raise
final_outputs = []
for v in self._output_locs:
if issubclass(v.var_type, FlyteFile):
final_outputs.append(FlyteFile(outputs[v.var]))
if issubclass(v.var_type, FlyteDirectory):
final_outputs.append(FlyteDirectory(outputs[v.var]))
if (len(final_outputs) == 1):
return final_outputs[0]
if (len(final_outputs) > 1):
return tuple(final_outputs)
return None |
class PopupText(_PopupWidget):
defaults = [('font', 'sans', 'Font name'), ('fontsize', 12, 'Font size'), ('foreground', '#ffffff', 'Font colour'), ('foreground_highlighted', None, 'Font colour when highlighted via `block` (None to use foreground value)'), ('highlight_method', 'block', "Available options: 'border', 'block' or 'text'."), ('h_align', 'left', 'Text alignment: left, center or right.'), ('v_align', 'middle', 'Vertical alignment: top, middle or bottom.'), ('wrap', False, 'Wrap text in layout')]
def __init__(self, text='', **config):
_PopupWidget.__init__(self, **config)
self.add_defaults(PopupText.defaults)
self._text = text
def _configure(self, qtile, container):
_PopupWidget._configure(self, qtile, container)
self.layout = self.drawer.textlayout(self._text, self.foreground, self.font, self.fontsize, None, markup=False, wrap=self.wrap)
self.layout.layout.set_alignment(pangocffi.ALIGNMENTS[self.h_align])
self.layout.width = self.width
def _set_layout_colour(self):
if ((self.highlight_method == 'text') and self._highlight):
self.layout.colour = self.highlight
elif ((self.highlight_method == 'block') and (self.foreground_highlighted is not None) and self._highlight):
self.layout.colour = self.foreground_highlighted
else:
self.layout.colour = self.foreground
def paint(self):
self._set_layout_colour()
if (self.v_align == 'top'):
y = 0
elif (self.v_align == 'bottom'):
y = (self.height - self.layout.height)
else:
y = ((self.height - self.layout.height) // 2)
self.clear(self._background)
self.layout.draw(0, y)
def text(self):
return self._text
def text(self, val):
self._text = val
self.layout.text = self._text
self.draw()
def info(self):
info = _PopupWidget.info(self)
info['text'] = self.text
return info |
class SegySampleFormat(Enum):
IBM_FLOAT_4_BYTE = 1
SIGNED_INTEGER_4_BYTE = 2
SIGNED_SHORT_2_BYTE = 3
FIXED_POINT_WITH_GAIN_4_BYTE = 4
IEEE_FLOAT_4_BYTE = 5
IEEE_FLOAT_8_BYTE = 6
SIGNED_CHAR_3_BYTE = 7
SIGNED_CHAR_1_BYTE = 8
SIGNED_INTEGER_8_BYTE = 9
UNSIGNED_INTEGER_4_BYTE = 10
UNSIGNED_SHORT_2_BYTE = 11
UNSIGNED_INTEGER_8_BYTE = 12
UNSIGNED_INTEGER_3_BYTE = 15
UNSIGNED_CHAR_1_BYTE = 16
NOT_IN_USE_1 = 19
NOT_IN_USE_2 = 20 |
_ns.route('/<username>/<coprname>/module/<id>/raw')
_ns.route('/g/<group_name>/<coprname>/module/<id>/raw')
_with_copr
def copr_module_raw(copr, id):
module = ModulesLogic.get(id).first()
response = flask.make_response(module.yaml)
response.mimetype = 'text/plain'
response.headers['Content-Disposition'] = 'filename={}.yaml'.format('-'.join([str(module.id), module.name, module.stream, str(module.version)]))
return response |
def _get_sequence_info(recipe):
chr2genome = {}
for (gen_name, gen_params) in recipe['genomes'].items():
if ('fasta' not in gen_params):
raise BackendException("FASTA file for '{0}' is not specified".format(gen_name))
if (not os.path.exists(gen_params['fasta'])):
raise BackendException("Can't open '{0}'".format(gen_params['fasta']))
total_size = 0
with open(gen_params['fasta'], 'r') as f:
for line in f:
line = line.strip()
if line.startswith('>'):
chr_name = line.strip()[1:].split(' ')[0]
if (chr_name in chr2genome):
raise BackendException('Some fasta files contain sequences with similar names')
chr2genome[chr_name] = gen_name
else:
total_size += len(line)
return (chr2genome, total_size) |
def sanitize_date(date: ((int | str) | Literal[('first', 'last')])) -> (list[int] | Literal[('first', 'last')]):
if isinstance(date, int):
return [date]
if (date not in ('first', 'last')):
try:
if (isinstance(date, str) and (len(date) == 10) and (date[4] == '-') and (date[7] == '-')):
date = date.replace('-', '')
return [int(date)]
except ValueError as err:
raise ValueError(f"valid dates are either of the form 'YYYY-MM-DD', 'YYYYMMDD' or 'first'/'last' got {date}") from err
return date |
def _locate_dependent_closing_args(provider: providers.Provider) -> Dict[(str, providers.Provider)]:
if (not hasattr(provider, 'args')):
return {}
closing_deps = {}
for arg in provider.args:
if ((not isinstance(arg, providers.Provider)) or (not hasattr(arg, 'args'))):
continue
if ((not arg.args) and isinstance(arg, providers.Resource)):
return {str(id(arg)): arg}
else:
closing_deps += _locate_dependent_closing_args(arg)
return closing_deps |
class TestWindowsAbs(util.MdCase):
extension = ['pymdownx.pathconverter']
extension_configs = {'pymdownx.pathconverter': {'base_path': 'C:/Some/fake/path', 'absolute': True}}
def test_windows_root_conversion(self):
if util.is_win():
self.check_markdown('', '<p><img alt="picture" src="/C:/Some/fake/path/extensions/_assets/bg.png" /></p>')
else:
self.check_markdown('', '<p><img alt="picture" src="/C%3A/Some/fake/path/extensions/_assets/bg.png" /></p>') |
def get_normalized_bounding_box_list_for_layout_block(layout_block: LayoutBlock) -> Sequence[LayoutPageCoordinates]:
page_meta_by_page_number = {token.line_meta.page_meta.page_number: token.line_meta.page_meta for line in layout_block.lines for token in line.tokens}
LOGGER.debug('page_meta_by_page_number: %r', page_meta_by_page_number)
merged_coordinates_list = layout_block.get_merged_coordinates_list()
return [get_normalized_bounding_box_for_page_coordinates_and_page_meta(coordinates=coordinates, page_meta=page_meta_by_page_number[coordinates.page_number]) for coordinates in merged_coordinates_list] |
def extractFuwaFuwaTales(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Prince Herscherik and The Kingdom of Sorrow', 'Prince Herscherik and The Kingdom of Sorrow', 'translated'), ('By A Slight Mistake', 'By A Slight Mistake', 'translated'), ('The Magnificent Battle Records of a Former Noble Lady', 'The Magnificent Battle Records of a Former Noble Lady', 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class CssButtonReset(CssStyle.Style):
_attrs = {'font-weight': 'bold', 'padding': '5px 10px 5px 10px', 'margin-top': '5px', 'text-decoration': 'none', 'border-radius': '5px', 'display': 'inline-block', 'text-transform': 'uppercase'}
_hover = {'text-decoration': 'none', 'cursor': 'pointer'}
_focus = {'outline': 0}
def customize(self):
self.css({'border': ('1px solid %s' % self.page.theme.danger.dark), 'color': self.page.theme.danger.dark, 'background-color': 'white'})
self.hover.css({'background-color': self.page.theme.danger.dark, 'color': 'white'}) |
class TestRefreshToken():
.asyncio
async def test_unsupported_refresh_token(self):
with pytest.raises(RefreshTokenNotSupportedError):
(await client.refresh_token('REFRESH_TOKEN'))
.asyncio
async def test_refresh_token(self, load_mock, get_respx_call_args):
request = respx.post(client_refresh.refresh_token_endpoint).mock(return_value=Response(200, json=load_mock('google_success_refresh_token')))
access_token = (await client_refresh.refresh_token('REFRESH_TOKEN'))
(url, headers, content) = (await get_respx_call_args(request))
assert (headers['Content-Type'] == 'application/x-www-form-urlencoded')
assert (headers['Accept'] == 'application/json')
assert ('grant_type=refresh_token' in content)
assert ('refresh_token=REFRESH_TOKEN' in content)
assert (f'client_id={CLIENT_ID}' in content)
assert (f'client_secret={CLIENT_SECRET}' in content)
assert (type(access_token) == OAuth2Token)
assert ('access_token' in access_token)
assert ('token_type' in access_token)
assert (access_token.is_expired() is False)
.asyncio
async def test_refresh_token_error(self, load_mock):
respx.post(client_refresh.refresh_token_endpoint).mock(return_value=Response(400, json=load_mock('error')))
with pytest.raises(RefreshTokenError) as excinfo:
(await client_refresh.refresh_token('REFRESH_TOKEN'))
assert isinstance(excinfo.value.args[0], dict)
assert ('error' in excinfo.value.args[0]) |
def upgrade():
op.drop_constraint(u'speakers_sessions_session_id_fkey', 'speakers_sessions', type_='foreignkey')
op.drop_constraint(u'speakers_sessions_speaker_id_fkey', 'speakers_sessions', type_='foreignkey')
op.create_foreign_key(None, 'speakers_sessions', 'session', ['session_id'], ['id'], ondelete='CASCADE')
op.create_foreign_key(None, 'speakers_sessions', 'speaker', ['speaker_id'], ['id'], ondelete='CASCADE') |
(frozen=True)
class DeviceMeta():
id: int
jack_in: bool = False
jack_out: bool = False
serial: str = ''
color_controls: Tuple[(ColorControlCapabilities, ...)] = (ColorControlCapabilities(color_control_command=ColorControlCommand.EXPOSURE_TIME_ABSOLUTE, supports_auto=True, min_value=500, max_value=133330, step_value=100, default_value=16670, default_mode=ColorControlMode.AUTO), ColorControlCapabilities(color_control_command=ColorControlCommand.AUTO_EXPOSURE_PRIORITY, supports_auto=False, min_value=0, max_value=0, step_value=0, default_value=0, default_mode=ColorControlMode.MANUAL), ColorControlCapabilities(color_control_command=ColorControlCommand.BRIGHTNESS, supports_auto=False, min_value=0, max_value=255, step_value=1, default_value=128, default_mode=ColorControlMode.MANUAL), ColorControlCapabilities(color_control_command=ColorControlCommand.CONTRAST, supports_auto=False, min_value=0, max_value=10, step_value=1, default_value=5, default_mode=ColorControlMode.MANUAL), ColorControlCapabilities(color_control_command=ColorControlCommand.SATURATION, supports_auto=False, min_value=0, max_value=63, step_value=1, default_value=32, default_mode=ColorControlMode.MANUAL), ColorControlCapabilities(color_control_command=ColorControlCommand.SHARPNESS, supports_auto=False, min_value=0, max_value=4, step_value=1, default_value=2, default_mode=ColorControlMode.MANUAL), ColorControlCapabilities(color_control_command=ColorControlCommand.WHITEBALANCE, supports_auto=True, min_value=2500, max_value=12500, step_value=10, default_value=4500, default_mode=ColorControlMode.AUTO), ColorControlCapabilities(color_control_command=ColorControlCommand.BACKLIGHT_COMPENSATION, supports_auto=False, min_value=0, max_value=1, step_value=1, default_value=0, default_mode=ColorControlMode.MANUAL), ColorControlCapabilities(color_control_command=ColorControlCommand.GAIN, supports_auto=False, min_value=0, max_value=255, step_value=1, default_value=128, default_mode=ColorControlMode.MANUAL), ColorControlCapabilities(color_control_command=ColorControlCommand.POWERLINE_FREQUENCY, supports_auto=False, min_value=1, max_value=2, step_value=1, default_value=2, default_mode=ColorControlMode.MANUAL)) |
class OptionSeriesTilemapSonificationTracksMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def select_and_save_field(saved: Any, row: Row, target_path: FieldPath) -> Dict:
def _defaultdict_or_array(resource: Any) -> Any:
return (type(resource)() if isinstance(resource, (list, dict)) else resource)
if isinstance(row, list):
for (i, elem) in enumerate(row):
try:
saved[i] = select_and_save_field(saved[i], elem, target_path)
except IndexError:
saved.append(select_and_save_field(_defaultdict_or_array(elem), elem, target_path))
elif isinstance(row, dict):
for key in row:
if (target_path.levels and (key == target_path.levels[0])):
if (key not in saved):
saved[key] = _defaultdict_or_array(row[key])
saved[key] = select_and_save_field(saved[key], row[key], FieldPath(*target_path.levels[1:]))
return saved |
class BitsJob():
FILE_MAP = dict(src_fn='SourceURL', dest_fn='DestFile', tmp_fn='TmpFile', download_size='DownloadByteSize', transfer_size='TransferByteSize', vol_guid='VolumeGUID')
JOB_MAP = dict(job_id='JobId', type='JobType', priority='JobPriority', state='JobState', name='JobName', desc='JobDesc', cmd='CommandExecuted', args='CommandArguments', sid='OwnerSID', ctime='CreationTime', mtime='ModifiedTime', carved='Carved', files='Files', queue_path='QueuePath')
def __init__(self, job, bits_parser):
self.job = job
self.bits_parser = bits_parser
self.hash = None
self.job_dict = {}
if (bits_parser.carve_db_files or bits_parser.carve_all_files):
self.job_dict = {'Carved': False}
self.parse()
def is_useful_for_analysis(self, cur_dict=None):
useful_fields = ['SourceURL', 'DestFile', 'TmpFile', 'JobId', 'JobState', 'CommandExecuted', 'CommandArguments']
if (not cur_dict):
cur_dict = self.job_dict
for (k, v) in cur_dict.items():
if ((k in useful_fields) and v):
return True
if isinstance(v, list):
for d in v:
if self.is_useful_for_analysis(d):
return True
return False
def is_carved(self):
return (self.job_dict.get('Carved') is True)
def escape(input_str):
if ((not isinstance(input_str, str)) or input_str.isprintable()):
return input_str
return ''.join(filter((lambda x: (x in string.printable)), input_str))
def parse(self):
file_fields = ['args', 'cmd', 'dest_fn', 'tmp_fn']
job_hash = hashlib.md5()
for (k, v) in self.job.items():
alias = self.JOB_MAP.get(k)
if (not alias):
continue
elif ((not v) or (str(v).strip() == '')):
continue
elif isinstance(v, datetime.datetime):
self.job_dict[alias] = (v.replace(microsecond=0).isoformat() + 'Z')
elif isinstance(v, bool):
self.job_dict[alias] = str(v).lower()
elif (alias == self.JOB_MAP['sid']):
self.job_dict[alias] = str(v)
owner = self.bits_parser.get_username_from_sid(v)
if owner:
self.job_dict['Owner'] = owner
elif (alias == self.JOB_MAP['files']):
files_list = []
for file in v:
file_dict = {}
for (k1, v1) in file.items():
t_alias = self.FILE_MAP.get(k1)
if (not t_alias):
continue
elif ((v1 is None) or (str(v1).strip() == '') or (not str(v1).isprintable())):
continue
if ((v1 is None) or (v1 == )):
continue
if (k1 in file_fields):
file_dict[t_alias] = os.path.expandvars(v1)
else:
file_dict[t_alias] = v1
job_hash.update(str(file_dict[t_alias]).encode('utf-8'))
files_list.append(file_dict)
self.job_dict['Files'] = files_list
else:
self.job_dict[alias] = v
self.job_dict[alias] = self.escape(self.job_dict[alias])
if (type(v) is not 'Dict'):
job_hash.update(str(v).encode('utf-8'))
self.hash = job_hash.hexdigest() |
def main(argv=None):
if (argv is None):
argv = sys.argv[1:]
parser = argparse.ArgumentParser(description='')
parser.add_argument('template', help='')
args = parser.parse_args(argv)
test_configs = object_from_command_line(args.template, 'TESTS')
print(len(test_configs))
output_file = 'tests.json'
with open(output_file, 'w') as _file:
json.dump(test_configs, _file, indent=4)
print('Wrote json file: {}'.format(output_file)) |
def _get_results_from_parent_fo(parent_results: (dict | None), parent_uid: str, vfp: dict[(str, list[str])]) -> dict:
if ((parent_results is None) or ('files' not in parent_results)):
return {}
results_by_file = _result_list_to_dict(parent_results['files'])
results = {}
for file_name in vfp.get(parent_uid, []):
key = file_name.lstrip('/')
encoded_key = b64encode(key.encode()).decode()
if (encoded_key in results_by_file):
results[key] = results_by_file[encoded_key]
results[key]['parent_uid'] = parent_uid
return results |
class OptionPlotoptionsNetworkgraphSonificationContexttracksMapping(Options):
def frequency(self) -> 'OptionPlotoptionsNetworkgraphSonificationContexttracksMappingFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsNetworkgraphSonificationContexttracksMappingFrequency)
def gapBetweenNotes(self) -> 'OptionPlotoptionsNetworkgraphSonificationContexttracksMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionPlotoptionsNetworkgraphSonificationContexttracksMappingGapbetweennotes)
def highpass(self) -> 'OptionPlotoptionsNetworkgraphSonificationContexttracksMappingHighpass':
return self._config_sub_data('highpass', OptionPlotoptionsNetworkgraphSonificationContexttracksMappingHighpass)
def lowpass(self) -> 'OptionPlotoptionsNetworkgraphSonificationContexttracksMappingLowpass':
return self._config_sub_data('lowpass', OptionPlotoptionsNetworkgraphSonificationContexttracksMappingLowpass)
def noteDuration(self) -> 'OptionPlotoptionsNetworkgraphSonificationContexttracksMappingNoteduration':
return self._config_sub_data('noteDuration', OptionPlotoptionsNetworkgraphSonificationContexttracksMappingNoteduration)
def pan(self) -> 'OptionPlotoptionsNetworkgraphSonificationContexttracksMappingPan':
return self._config_sub_data('pan', OptionPlotoptionsNetworkgraphSonificationContexttracksMappingPan)
def pitch(self) -> 'OptionPlotoptionsNetworkgraphSonificationContexttracksMappingPitch':
return self._config_sub_data('pitch', OptionPlotoptionsNetworkgraphSonificationContexttracksMappingPitch)
def playDelay(self) -> 'OptionPlotoptionsNetworkgraphSonificationContexttracksMappingPlaydelay':
return self._config_sub_data('playDelay', OptionPlotoptionsNetworkgraphSonificationContexttracksMappingPlaydelay)
def rate(self) -> 'OptionPlotoptionsNetworkgraphSonificationContexttracksMappingRate':
return self._config_sub_data('rate', OptionPlotoptionsNetworkgraphSonificationContexttracksMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionPlotoptionsNetworkgraphSonificationContexttracksMappingTime':
return self._config_sub_data('time', OptionPlotoptionsNetworkgraphSonificationContexttracksMappingTime)
def tremolo(self) -> 'OptionPlotoptionsNetworkgraphSonificationContexttracksMappingTremolo':
return self._config_sub_data('tremolo', OptionPlotoptionsNetworkgraphSonificationContexttracksMappingTremolo)
def volume(self) -> 'OptionPlotoptionsNetworkgraphSonificationContexttracksMappingVolume':
return self._config_sub_data('volume', OptionPlotoptionsNetworkgraphSonificationContexttracksMappingVolume) |
def make_test_datasets():
import h5py
import numpy as np
import pandas as pd
from random import randrange
n = 100
df = pd.DataFrame(dict([('int{0}'.format(i), np.random.randint(0, 10, size=n)) for i in range(5)]))
df['float'] = np.random.randn(n)
for i in range(10):
df['object_1_{0}'.format(i)] = [('%08x' % randrange((16 ** 8))) for _ in range(n)]
for i in range(7):
df['object_2_{0}'.format(i)] = [('%15x' % randrange((16 ** 15))) for _ in range(n)]
df.info()
df.to_hdf('test_fixed.h5', 'data', format='fixed')
df.to_hdf('test_table_no_dc.h5', 'data', format='table')
df.to_hdf('test_table_dc.h5', 'data', format='table', data_columns=True)
df.to_hdf('test_fixed_compressed.h5', 'data', format='fixed', complib='blosc', complevel=9)
time = np.arange(n)
x = np.linspace((- 7), 7, n)
axes_latlon = [('time', time), ('coordinate', np.array(['lat', 'lon'], dtype='S3'))]
axes_mag = [('time', time), ('direction', np.array(['x', 'y', 'z'], dtype='S1'))]
latlon = np.vstack(((np.linspace((- 0.0001), 1e-05, n) + 23.8), (np.zeros(n) - 82.3))).T
mag_data = np.vstack((((- (1 - (np.tanh(x) ** 2))) * np.sin((2 * x))), ((- (1 - (np.tanh(x) ** 2))) * np.sin((2 * x))), (- (1 - (np.tanh(x) ** 2))))).T
datasets = ((axes_mag + axes_latlon) + [('magnetic_3_axial', mag_data), ('latlon', latlon)])
with h5py.File(os.path.join(ROOT, 'test_h5pydata.h5'), 'a') as h5file:
h5group = h5file.require_group('run1_test1')
for (data_name, data) in datasets:
h5group.require_dataset(name=data_name, dtype=data.dtype, shape=data.shape, data=data) |
class OptionPlotoptionsPyramid3dSonificationContexttracksMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class FaucetTunnelSameDpOrderedTest(FaucetMultiDPTestBase):
NUM_DPS = 2
NUM_HOSTS = 2
SOFTWARE_ONLY = True
SWITCH_TO_SWITCH_LINKS = 2
def acls(self):
return {1: [{'rule': {'dl_type': IPV4_ETH, 'ip_proto': 1, 'actions': {'allow': 0, 'output': [{'tunnel': {'type': 'vlan', 'tunnel_id': 200, 'dp': self.topo.switches_by_id[0], 'port': self.host_port_maps[1][0][0]}}]}}}]}
def link_acls(self):
return {0: [1]}
def test_tunnel_established(self):
self.set_up(stack=True, n_dps=self.NUM_DPS, n_untagged=self.NUM_HOSTS, switch_to_switch_links=self.SWITCH_TO_SWITCH_LINKS)
self.verify_stack_up()
src_host = self.net.get(self.topo.hosts_by_id[0])
dst_host = self.net.get(self.topo.hosts_by_id[1])
other_host = self.net.get(self.topo.hosts_by_id[2])
self.verify_tunnel_established(src_host, dst_host, other_host) |
class OptionPlotoptionsPackedbubbleSonificationTracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestLocationRegistry(unittest.TestCase):
def test_location_registry_empty(self):
class SpecificTarget():
pass
class Locator():
pass
registry = TargetRegistry()
with self.assertRaises(LocationNotSupported) as exception_context:
registry._get_solver(SpecificTarget(), Locator())
self.assertEqual(exception_context.exception.supported, [])
self.assertEqual(str(exception_context.exception), f'Location {Locator!r} is not supported for {SpecificTarget!r}. Supported these: []')
def test_register_location(self):
def solver(wrapper, location):
return 1
registry = TargetRegistry()
registry.register_location(target_class=float, locator_class=str, solver=solver)
self.assertIs(registry._get_solver(2.1, 'dummy'), solver)
def test_register_location_report_existing(self):
def solver(wrapper, location):
return 1
registry = TargetRegistry()
registry.register_location(target_class=float, locator_class=str, solver=solver)
with self.assertRaises(LocationNotSupported) as exception_context:
registry._get_solver(3.4, None)
self.assertEqual(exception_context.exception.supported, [str])
def test_get_locations_supported(self):
registry = TargetRegistry()
class SpecificEditor():
pass
class Locator1():
pass
class Locator2():
pass
def solver(wrapper, location):
return 1
registry.register_location(target_class=SpecificEditor, locator_class=Locator1, solver=solver)
registry.register_location(target_class=SpecificEditor, locator_class=Locator2, solver=solver)
self.assertEqual(registry._get_locations(SpecificEditor()), {Locator1, Locator2})
def test_get_location_help_default(self):
class Locator():
pass
registry = TargetRegistry()
registry.register_location(target_class=float, locator_class=Locator, solver=(lambda w, l: 1))
help_text = registry._get_location_doc(target=2.345, locator_class=Locator)
self.assertEqual(help_text, 'Some default documentation.')
def test_error_get_interaction_doc(self):
registry = TargetRegistry()
with self.assertRaises(LocationNotSupported):
registry._get_location_doc(3.456, int) |
class ChunkTracker():
def __init__(self, root_dir: str):
if (not os.path.exists(root_dir)):
os.makedirs(root_dir)
file = os.path.join(root_dir, 'chunk_tracker.db')
logging.info(f'will init chunk tracker, db={file}')
self.conn = sqlite3.connect(file)
self.cursor = self.conn.cursor()
self.cursor.execute('\n CREATE TABLE IF NOT EXISTS chunks (\n id TEXT NOT NULL,\n pos TEXT NOT NULL,\n pos_type TINYINT NOT NULL,\n insert_time UNSIGNED BIG INT NOT NULL,\n update_time UNSIGNED BIG INT NOT NULL,\n flags INTEGER DEFAULT 0,\n PRIMARY KEY(id, pos, pos_type)\n )\n ')
self.conn.commit()
def add_position(self, chunk_id: ChunkID, position: str, position_type: PositionType):
logging.debug(f'add chunk position: {chunk_id}, {position}, {position_type}')
insert_time = update_time = int(time.time())
self.cursor.execute('\n INSERT OR REPLACE INTO chunks (id, pos, pos_type, insert_time, update_time)\n VALUES (?, ?, ?, ?, ?)\n ', (str(chunk_id), position, position_type.value, insert_time, update_time))
self.conn.commit()
def remove_position(self, chunk_id: ChunkID):
logging.info(f'remove chunk position: {chunk_id}')
self.cursor.execute('\n DELETE FROM chunks WHERE id = ?\n ', (str(chunk_id),))
self.conn.commit()
def get_position(self, chunk_id: ChunkID) -> List[Tuple[(str, PositionType)]]:
self.cursor.execute('\n SELECT pos, pos_type FROM chunks WHERE id = ?\n ', (str(chunk_id),))
return self.cursor.fetchmany() |
def extractYorasuTranslations(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if item['title'].startswith('DKFTOD'):
return buildReleaseMessageWithType(item, 'Devil King From The Otherworldly Dimension', vol, chp, frag=frag, postfix=postfix)
if item['title'].startswith('Hacker'):
return buildReleaseMessageWithType(item, 'Hacker', vol, chp, frag=frag, postfix=postfix)
if item['title'].startswith('Fallen God Records'):
return buildReleaseMessageWithType(item, 'Fallen God Records', vol, chp, frag=frag, postfix=postfix)
if ('Godly Model Creator' in item['tags']):
return buildReleaseMessageWithType(item, 'Godly Model Creator', vol, chp, frag=frag, postfix=postfix)
if ('Super Brain Telekinesis' in item['tags']):
return buildReleaseMessageWithType(item, 'Super Brain Telekinesis', vol, chp, frag=frag, postfix=postfix)
if ('Super soldier' in item['tags']):
return buildReleaseMessageWithType(item, 'Super soldier', vol, chp, frag=frag, postfix=postfix)
if ('The Different World Of Demon Lord' in item['tags']):
return buildReleaseMessageWithType(item, 'The Different World Of Demon Lord', vol, chp, frag=frag, postfix=postfix)
return False |
def test_session_using_server_name(app, client):
app.config.update(SERVER_NAME='example.com')
('/')
def index():
flask.session['testing'] = 42
return 'Hello World'
rv = client.get('/', '
assert ('domain=.example.com' in rv.headers['set-cookie'].lower())
assert (' in rv.headers['set-cookie'].lower()) |
def _add_vars_option(parser: argparse.ArgumentParser):
parser.add_argument('--vars', type=str, default='{}', help="\n Supply variables to the project. This argument overrides variables\n defined in your dbt_project.yml file. This argument should be a YAML\n string, eg. '{my_variable: my_value}'\n ") |
def eval_globs(globs):
all_files = []
for g in globs:
if g.endswith('/'):
g += '*'
new_files = glob.glob(g)
if (len(new_files) == 0):
warn('{} did not match any files'.format(g))
else:
all_files.extend(new_files)
return all_files |
(scope='function')
def privacy_preference_history_fr_provide_service_frontend_only(db: Session, privacy_notice_fr_provide_service_frontend_only) -> Generator:
provided_identity_data = {'privacy_request_id': None, 'field_name': 'email', 'hashed_value': ProvidedIdentity.hash_value(''), 'encrypted_value': {'value': ''}}
provided_identity = ProvidedIdentity.create(db, data=provided_identity_data)
pref_1 = PrivacyPreferenceHistory.create(db=db, data={'preference': 'opt_in', 'provided_identity_id': provided_identity.id, 'privacy_notice_history_id': privacy_notice_fr_provide_service_frontend_only.privacy_notice_history_id, 'user_geography': 'fr_idg'}, check_name=False)
(yield pref_1)
pref_1.delete(db)
provided_identity.delete(db) |
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--with-codesign', action='store_true', dest='with_codesign', help='Codesign the app bundle')
args = parser.parse_args()
build_path = os.path.join(root, 'build')
dist_path = os.path.join(root, 'dist')
dist_root_path = os.path.join(root, 'dist', 'root')
app_path = os.path.join(dist_path, 'GPG Sync.app')
component_pkg_path = os.path.join(root, 'dist', 'GPGSyncComponent.pkg')
pkg_path = os.path.join(dist_path, 'GPGSync.pkg')
print(' Deleting old build and dist')
if os.path.exists(build_path):
shutil.rmtree(build_path)
if os.path.exists(dist_path):
shutil.rmtree(dist_path)
print(' Building app bundle')
run(['pyinstaller', os.path.join(root, 'install/pyinstaller.spec'), '--clean'])
shutil.rmtree(os.path.join(dist_path, 'gpgsync'))
print(f' Finished build app: {app_path}')
if args.with_codesign:
print(' Code signing app bundle')
identity_name_application = 'Developer ID Application: FIRST LOOK PRODUCTIONS, INC. (P24U45L8P5)'
identity_name_installer = 'Developer ID Installer: FIRST LOOK PRODUCTIONS, INC. (P24U45L8P5)'
entitlements_plist_path = os.path.join(root, 'install', 'macos-packaging', 'entitlements.plist')
for path in itertools.chain(glob.glob(f'{app_path}/**/*.so', recursive=True), glob.glob(f'{app_path}/**/*.dylib', recursive=True), glob.glob(f'{app_path}/**/Python3', recursive=True), [app_path]):
codesign(path, entitlements_plist_path, identity_name_application)
print(f' Signed app bundle: {app_path}')
os.makedirs(os.path.join(dist_root_path, 'Applications'))
shutil.move(app_path, os.path.join(dist_root_path, 'Applications'))
run(['pkgbuild', '--sign', identity_name_installer, '--root', dist_root_path, '--component-plist', os.path.join(root, 'install', 'macos-packaging', 'gpgsync-component.plist'), '--scripts', os.path.join(root, 'install', 'macos-packaging', 'scripts'), component_pkg_path])
run(['productbuild', '--sign', identity_name_installer, '--package', component_pkg_path, pkg_path])
shutil.rmtree(os.path.join(dist_root_path))
os.remove(component_pkg_path)
print(f' Finished building package: {pkg_path}')
else:
print(' Skipping code signing') |
def test_get_languages_raise_on_wrong_cmd():
tesseract_cmd = 'non-existing-binary'
tessdata_path = system_info.get_tessdata_path()
with pytest.raises(FileNotFoundError, match='Could not find Tesseract binary'):
_ = tesseract.get_languages(tesseract_cmd=tesseract_cmd, tessdata_path=tessdata_path) |
def parse_offxml(offxml_filename):
root = ET.parse(offxml_filename).getroot()
torsions = root.findall('ProperTorsions')
vdw = root.findall('vdW')
assert (len(torsions) == 1)
assert (len(vdw) == 1)
torsions = torsions[0]
vdw = vdw[0]
vdw_list = []
for child in vdw:
if (child.tag != 'Atom'):
print((' SKIPPING: %s' % child.tag))
continue
v = make_vdw_entry(child.attrib)
vdw_list.append(v)
atomic_numbers = assign_atypes(vdw_list)
vdw_by_atype = make_vdw_by_atype(vdw_list, atomic_numbers)
dihedral_list = []
for child in torsions:
if (child.tag != 'Proper'):
print((' SKIPPING: %s' % child.tag))
continue
d = make_dihedral_entry(child.attrib)
dihedral_list.append(d)
return (vdw_list, dihedral_list, vdw_by_atype) |
def confirm_email(bot, update: Update, user, render):
key = update.message.text.strip()
if (user.confirmation != key):
update.message.reply_text(text=render('confirmation_failure'))
return
user.is_confirmed = True
user.save()
update.message.reply_text(text=render('email_is_confirmed')) |
def test_getattr(provider):
has_dependency = hasattr(provider, 'dependency')
dependency = provider.dependency
assert isinstance(dependency, providers.Dependency)
assert (dependency is provider.dependency)
assert (has_dependency is True)
assert (dependency.last_overriding is None) |
class OptionPlotoptionsSplineSonificationDefaultinstrumentoptionsMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def ArticleList(request):
article = Article.objects.filter(is_show=True)
category = Category_Article.objects.all().order_by('order')
type = request.GET.get('type', '')
try:
page = request.GET.get('page', 1)
if type:
article = Article.objects.filter(category_id=type, is_show=True)
if (page == ''):
page = 1
except PageNotAnInteger:
page = 1
p = Paginator(article, 10, request=request)
people = p.page(page)
headlines = Headlines.objects.all()[:30]
banners = Banners.objects.first()
return render(request, 'pc/article.html', {'article': people, 'category': category, 'Headlines': headlines, 'banners': banners}) |
def create_hyperlink(file_name: str, full_name: str, module: str, repo_url: str) -> str:
logger.debug(f'''Creating git host file hyperlink:
File: {file_name}
Full name: {full_name}
Module: {module}
Repo URL: {repo_url}''')
if ('invalid' in full_name.lower()):
return file_name
git_file_link = get_remote_file_url(module, full_name, repo_url)
return f'[{file_name}]({git_file_link})' |
def _create_plot_component():
xs = linspace(0, 10, 600)
ys = linspace(0, 5, 600)
(x, y) = meshgrid(xs, ys)
z = exp(((- ((x ** 2) + (y ** 2))) / 100))
pd = ArrayPlotData()
pd.set_data('imagedata', z)
plot = Plot(pd)
img_plot = plot.img_plot('imagedata', xbounds=(0, 10), ybounds=(0, 5), colormap=viridis)[0]
img_plot.index_mapper.aspect_ratio = 0.5
img_plot.index_mapper.maintain_aspect_ratio = True
plot.title = 'My First Image Plot'
plot.padding = 50
plot.tools.append(PanTool(plot))
zoom = ZoomTool(component=img_plot, tool_mode='box', always_on=False)
img_plot.overlays.append(zoom)
return plot |
def automl_classification_predict_async_arguments(provider_name: str) -> Dict:
if (provider_name == 'nyckel'):
project_id = 'function_yfisrgk70k1iuroq'
else:
raise NotImplementedError(f'Please add a project id for test arguments of provider: {provider_name}')
return {'project_id': project_id, 'file': file_wrapper} |
()
('--outage_time', '-o', default=None, help='start time of the outage')
('--ticket', '-t', default=None, help='fedora infra ticket ID')
('--rest', default=None, help='additional code')
('--remove', is_flag=True, show_default=True, default=False, help='removes banner')
def warning_banner(outage_time, ticket, rest, remove):
if remove:
if (outage_time or ticket or rest):
print("Error: can't remove banner with `outage_time` or `ticket` or `rest`", file=sys.stderr)
return
if os.path.exists(BANNER_LOCATION):
os.remove(BANNER_LOCATION)
return
if ((outage_time is None) and (rest is None)):
print('Error: `outage_time` or `rest` should be present.', file=sys.stderr)
return
with open(BANNER_LOCATION, 'w', encoding='UTF-8') as banner:
banner_path_template = (Path(__file__).parent / '../coprs/templates/banner-include.html')
with banner_path_template.open() as banner_template:
template = Template(banner_template.read())
banner.write(template.render(outage_time=outage_time, ticket=ticket, rest=rest)) |
def upload_items_to_unicommerce(item_codes: List[ItemCode], client: UnicommerceAPIClient=None) -> List[ItemCode]:
if (not client):
client = UnicommerceAPIClient()
synced_items = []
for item_code in item_codes:
item_data = _build_unicommerce_item(item_code)
sku = item_data.get('skuCode')
item_exists = bool(client.get_unicommerce_item(sku, log_error=False))
(_, status) = client.create_update_item(item_data, update=item_exists)
if status:
_handle_ecommerce_item(item_code)
synced_items.append(item_code)
return synced_items |
def test_types():
string = write_rpc_request(1, 'initialize', {'rootPath': str((test_dir / 'hover'))})
file_path = ((test_dir / 'hover') / 'types.f90')
string += hover_req(file_path, 3, 25)
string += hover_req(file_path, 6, 44)
string += hover_req(file_path, 9, 35)
(errcode, results) = run_request(string, fortls_args=['-n', '1'])
assert (errcode == 0)
ref_results = ['```fortran90\nTYPE, ABSTRACT :: base_t\n```', '```fortran90\nTYPE, ABSTRACT, EXTENDS(base_t) :: extends_t\n```', '```fortran90\nTYPE, EXTENDS(extends_t) :: a_t\n```']
validate_hover(results, ref_results) |
def _get_item_price(line_item, taxes_inclusive: bool) -> float:
price = flt(line_item.get('price'))
qty = cint(line_item.get('quantity'))
total_discount = _get_total_discount(line_item)
if (not taxes_inclusive):
return (price - (total_discount / qty))
total_taxes = 0.0
for tax in line_item.get('tax_lines'):
total_taxes += flt(tax.get('price'))
return (price - ((total_taxes + total_discount) / qty)) |
def extractJoseonworldBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionPlotoptionsLineSonificationDefaultinstrumentoptionsMappingHighpass(Options):
def frequency(self) -> 'OptionPlotoptionsLineSonificationDefaultinstrumentoptionsMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsLineSonificationDefaultinstrumentoptionsMappingHighpassFrequency)
def resonance(self) -> 'OptionPlotoptionsLineSonificationDefaultinstrumentoptionsMappingHighpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsLineSonificationDefaultinstrumentoptionsMappingHighpassResonance) |
class MFBiasContinuous(nn.Module):
def __init__(self, emb_size, emb_dim, c_vector=1e-06, c_bias=1e-06):
super().__init__()
self.emb_size = emb_size
self.emb_dim = emb_dim
self.c_vector = c_vector
self.c_bias = c_bias
self.product_embedding = nn.Embedding(emb_size, emb_dim)
self.sig = nn.Sigmoid()
self.product_bias = nn.Embedding(emb_size, 1)
self.bias = nn.Parameter(torch.ones(1))
self.mse = nn.MSELoss()
logger.info('Model initialized: {}'.format(self))
def forward(self, product1, product2):
emb_product1 = self.product_embedding(product1)
emb_product2 = self.product_embedding(product2)
interaction = torch.sum((emb_product1 * emb_product2), dim=1, dtype=torch.float)
bias_product1 = self.product_bias(product1).squeeze()
bias_product2 = self.product_bias(product2).squeeze()
biases = ((self.bias + bias_product1) + bias_product2)
prediction = (interaction + biases)
return prediction
def predict(self, product1, product2):
emb_product1 = self.product_embedding(product1)
emb_product2 = self.product_embedding(product2)
interaction = torch.sum((emb_product1 * emb_product2), dim=1, dtype=torch.float)
bias_product1 = self.product_bias(product1).squeeze()
bias_product2 = self.product_bias(product2).squeeze()
biases = ((self.bias + bias_product1) + bias_product2)
prediction = self.sig((interaction + biases))
return prediction
def loss(self, pred, label):
mf_loss = self.mse(pred, label)
product_prior = (regularize_l2(self.product_embedding.weight) * self.c_vector)
product_bias_prior = (regularize_l2(self.product_bias.weight) * self.c_bias)
loss_total = ((mf_loss + product_prior) + product_bias_prior)
return loss_total |
class FromReader(UtilsFromReader):
def __init__(self, ffrom, b, bl, add, add_generic, ldr, adrp, str_, str_imm_64, data_pointers, code_pointers, b_blocks, bl_blocks, add_blocks, add_generic_blocks, ldr_blocks, adrp_blocks, str_blocks, str_imm_64_blocks, data_pointers_blocks, code_pointers_blocks):
super().__init__(ffrom)
self._write_zeros_to_from(b_blocks, b)
self._write_zeros_to_from(bl_blocks, bl)
self._write_zeros_to_from(add_blocks, add)
self._write_zeros_to_from(add_generic_blocks, add_generic)
self._write_zeros_to_from(ldr_blocks, ldr)
self._write_zeros_to_from(adrp_blocks, adrp)
self._write_zeros_to_from(str_blocks, str_)
self._write_zeros_to_from(str_imm_64_blocks, str_imm_64)
if (data_pointers_blocks is not None):
self._write_zeros_to_from(data_pointers_blocks, data_pointers, overwrite_size=8)
if (code_pointers_blocks is not None):
self._write_zeros_to_from(code_pointers_blocks, code_pointers, overwrite_size=8) |
def validate_object(rule):
provided_object = rule['value']
if (type(provided_object) is not dict):
raise InvalidParameterException(INVALID_TYPE_MSG.format(**rule))
if (not provided_object):
raise InvalidParameterException("'{}' is empty. Please populate object".format(rule['key']))
for field in provided_object.keys():
if (field not in rule['object_keys'].keys()):
raise InvalidParameterException("Unexpected field '{}' in parameter {}".format(field, rule['key']))
for (key, value) in rule['object_keys'].items():
if (key not in provided_object):
if (('optional' in value) and (value['optional'] is False)):
raise UnprocessableEntityException(f"Required object fields: {list(rule['object_keys'].keys())}")
else:
continue
return provided_object |
('build-threat-map-entry')
('tactic')
('technique-ids', nargs=(- 1))
def build_threat_map(tactic: str, technique_ids: Iterable[str]):
entry = build_threat_map_entry(tactic, *technique_ids)
rendered = pytoml.dumps({'rule': {'threat': [entry]}})
cleaned = '\n'.join(rendered.splitlines()[2:])
print(cleaned)
return entry |
class OptionPlotoptionsColumnrangeSonificationDefaultinstrumentoptionsMappingHighpass(Options):
def frequency(self) -> 'OptionPlotoptionsColumnrangeSonificationDefaultinstrumentoptionsMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsColumnrangeSonificationDefaultinstrumentoptionsMappingHighpassFrequency)
def resonance(self) -> 'OptionPlotoptionsColumnrangeSonificationDefaultinstrumentoptionsMappingHighpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsColumnrangeSonificationDefaultinstrumentoptionsMappingHighpassResonance) |
_os(*metadata.platforms)
def main():
common.log('Executing file modification on SyncedRules.plist file.')
plist_path = Path(f'{Path.home()}/Library/Mobile Documents/com.apple.mail/Data/test/MailData/')
plist_path.mkdir(exist_ok=True, parents=True)
output_file = (plist_path / 'SyncedRules.plist')
with open(output_file, 'w') as f:
f.write('test')
common.remove_directory(f'{Path.home()}/Library/Mobile Documents/com.apple.mail/Data/test/') |
def verify_board(minefield):
row_len = len(minefield[0])
if (not all(((len(row) == row_len) for row in minefield))):
raise ValueError('The board is invalid with current input.')
character_set = set()
for row in minefield:
character_set.update(row)
if (character_set - set(' *')):
raise ValueError('The board is invalid with current input.') |
_toolkit([ToolkitName.qt, ToolkitName.wx])
class TestShellEditor(BaseTestMixin, unittest.TestCase):
def setUp(self):
BaseTestMixin.setUp(self)
def tearDown(self):
BaseTestMixin.tearDown(self)
def smoke_test(self, locals_type, share):
shell_test = ShellTest()
tester = UITester()
if (locals_type == 'str'):
with tester.create_ui(shell_test, dict(view=get_str_view(share))):
pass
else:
with tester.create_ui(shell_test, dict(view=get_dict_view(share))):
pass
def test_no_share_dict(self):
self.smoke_test('dict', False)
def test_share_dict(self):
self.smoke_test('dict', True)
def test_no_share_str(self):
self.smoke_test('str', False)
def test_share_str(self):
self.smoke_test('str', True) |
class StplParser(object):
_re_cache = {}
_re_tok = '(\n [urbURB]*\n (?: \'\'(?!\')\n |""(?!")\n |\'{6}\n |"{6}\n |\'(?:[^\\\\\']|\\\\.)+?\'\n |"(?:[^\\\\"]|\\\\.)+?"\n |\'{3}(?:[^\\\\]|\\\\.|\\n)+?\'{3}\n |"{3}(?:[^\\\\]|\\\\.|\\n)+?"{3}\n )\n )'
_re_inl = _re_tok.replace('|\\n', '')
_re_tok += "\n # 2: Comments (until end of line, but not the newline itself)\n |(\\#.*)\n\n # 3: Open and close (4) grouping tokens\n |([\\[\\{\\(])\n |([\\]\\}\\)])\n\n # 5,6: Keywords that start or continue a python block (only start of line)\n |^([\\ \\t]*(?:if|for|while|with|try|def|class)\\b)\n |^([\\ \\t]*(?:elif|else|except|finally)\\b)\n\n # 7: Our special 'end' keyword (but only if it stands alone)\n |((?:^|;)[\\ \\t]*end[\\ \\t]*(?=(?:%(block_close)s[\\ \\t]*)?\\r?$|;|\\#))\n\n # 8: A customizable end-of-code-block template token (only end of line)\n |(%(block_close)s[\\ \\t]*(?=\\r?$))\n\n # 9: And finally, a single newline. The 10th token is 'everything else'\n |(\\r?\\n)\n "
_re_split = '(?m)^[ \\t]*(\\\\?)((%(line_start)s)|(%(block_start)s))'
_re_inl = ('%%(inline_start)s((?:%s|[^\'"\\n]+?)*?)%%(inline_end)s' % _re_inl)
_re_tok = ('(?mx)' + _re_tok)
_re_inl = ('(?mx)' + _re_inl)
default_syntax = '<% %> % {{ }}'
def __init__(self, source, syntax=None, encoding='utf8'):
(self.source, self.encoding) = (touni(source, encoding), encoding)
self.set_syntax((syntax or self.default_syntax))
(self.code_buffer, self.text_buffer) = ([], [])
(self.lineno, self.offset) = (1, 0)
(self.indent, self.indent_mod) = (0, 0)
self.paren_depth = 0
def get_syntax(self):
return self._syntax
def set_syntax(self, syntax):
self._syntax = syntax
self._tokens = syntax.split()
if (syntax not in self._re_cache):
names = 'block_start block_close line_start inline_start inline_end'
etokens = map(re.escape, self._tokens)
pattern_vars = dict(zip(names.split(), etokens))
patterns = (self._re_split, self._re_tok, self._re_inl)
patterns = [re.compile((p % pattern_vars)) for p in patterns]
self._re_cache[syntax] = patterns
(self.re_split, self.re_tok, self.re_inl) = self._re_cache[syntax]
syntax = property(get_syntax, set_syntax)
def translate(self):
if self.offset:
raise RuntimeError('Parser is a one time instance.')
while True:
m = self.re_split.search(self.source, pos=self.offset)
if m:
text = self.source[self.offset:m.start()]
self.text_buffer.append(text)
self.offset = m.end()
if m.group(1):
(line, sep, _) = self.source[self.offset:].partition('\n')
self.text_buffer.append((((self.source[m.start():m.start(1)] + m.group(2)) + line) + sep))
self.offset += len((line + sep))
continue
self.flush_text()
self.offset += self.read_code(self.source[self.offset:], multiline=bool(m.group(4)))
else:
break
self.text_buffer.append(self.source[self.offset:])
self.flush_text()
return ''.join(self.code_buffer)
def read_code(self, pysource, multiline):
(code_line, comment) = ('', '')
offset = 0
while True:
m = self.re_tok.search(pysource, pos=offset)
if (not m):
code_line += pysource[offset:]
offset = len(pysource)
self.write_code(code_line.strip(), comment)
break
code_line += pysource[offset:m.start()]
offset = m.end()
(_str, _com, _po, _pc, _blk1, _blk2, _end, _cend, _nl) = m.groups()
if ((self.paren_depth > 0) and (_blk1 or _blk2)):
code_line += (_blk1 or _blk2)
continue
if _str:
code_line += _str
elif _com:
comment = _com
if (multiline and _com.strip().endswith(self._tokens[1])):
multiline = False
elif _po:
self.paren_depth += 1
code_line += _po
elif _pc:
if (self.paren_depth > 0):
self.paren_depth -= 1
code_line += _pc
elif _blk1:
code_line = _blk1
self.indent += 1
self.indent_mod -= 1
elif _blk2:
code_line = _blk2
self.indent_mod -= 1
elif _cend:
if multiline:
multiline = False
else:
code_line += _cend
elif _end:
self.indent -= 1
self.indent_mod += 1
else:
self.write_code(code_line.strip(), comment)
self.lineno += 1
(code_line, comment, self.indent_mod) = ('', '', 0)
if (not multiline):
break
return offset
def flush_text(self):
text = ''.join(self.text_buffer)
del self.text_buffer[:]
if (not text):
return
(parts, pos, nl) = ([], 0, ('\\\n' + (' ' * self.indent)))
for m in self.re_inl.finditer(text):
(prefix, pos) = (text[pos:m.start()], m.end())
if prefix:
parts.append(nl.join(map(repr, prefix.splitlines(True))))
if prefix.endswith('\n'):
parts[(- 1)] += nl
parts.append(self.process_inline(m.group(1).strip()))
if (pos < len(text)):
prefix = text[pos:]
lines = prefix.splitlines(True)
if lines[(- 1)].endswith('\\\\\n'):
lines[(- 1)] = lines[(- 1)][:(- 3)]
elif lines[(- 1)].endswith('\\\\\r\n'):
lines[(- 1)] = lines[(- 1)][:(- 4)]
parts.append(nl.join(map(repr, lines)))
code = ('_printlist((%s,))' % ', '.join(parts))
self.lineno += (code.count('\n') + 1)
self.write_code(code)
def process_inline(chunk):
if (chunk[0] == '!'):
return ('_str(%s)' % chunk[1:])
return ('_escape(%s)' % chunk)
def write_code(self, line, comment=''):
code = (' ' * (self.indent + self.indent_mod))
code += ((line.lstrip() + comment) + '\n')
self.code_buffer.append(code) |
_write_router.delete('/{project_id}/{snapshot_id}')
def delete_snapshot(project_id: Annotated[(uuid.UUID, PROJECT_ID)], snapshot_id: Annotated[(uuid.UUID, SNAPSHOT_ID)], project_manager: ProjectManager=Depends(get_project_manager), log_event: Callable=Depends(event_logger), user_id: UserID=Depends(get_user_id)):
project_manager.delete_snapshot(user_id, project_id, snapshot_id)
log_event('delete_snapshot') |
def get_fields(required=None, valid_extra=None):
data = req_json()
if (required and any(((x not in data) for x in required))):
abort(400, f'must have the fields {required}')
valid = ((required or []) + (valid_extra or []))
if (not all(((x in valid) for x in data))):
abort(400, f'can only have the fields {valid}')
return data |
_blueprint.route('/project/<project_id>/delete/<distro_name>/<pkg_name>', methods=['GET', 'POST'])
_required
def delete_project_mapping(project_id, distro_name, pkg_name):
project = models.Project.get(Session, project_id)
if (not project):
flask.abort(404)
distro = models.Distro.get(Session, distro_name)
if (not distro):
flask.abort(404)
package = models.Packages.get(Session, project.id, distro.name, pkg_name)
if (not package):
flask.abort(404)
if (not is_admin()):
flask.abort(401)
form = anitya.forms.ConfirmationForm()
confirm = flask.request.form.get('confirm', False)
if form.validate_on_submit():
if confirm:
utilities.publish_message(project=project.__json__(), topic='project.map.remove', message=dict(agent=flask.g.user.username, project=project.name, distro=distro.name))
Session.delete(package)
Session.commit()
flask.flash(f'Mapping for {project.name} has been removed')
return flask.redirect(flask.url_for('anitya_ui.project', project_id=project.id))
return flask.render_template('regex_delete.html', current='projects', project=project, package=package, form=form) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.