code stringlengths 281 23.7M |
|---|
def build_threat_map_entry(tactic: str, *technique_ids: str) -> dict:
techniques_redirect_map = load_techniques_redirect()
url_base = '
tactic_id = tactics_map[tactic]
tech_entries = {}
def make_entry(_id):
e = {'id': _id, 'name': technique_lookup[_id]['name'], 'reference': url_base.format(type='techniques', id=_id.replace('.', '/'))}
return e
for tid in technique_ids:
if (tid in deprecated):
raise ValueError(f'Technique ID: {tid} has been deprecated and should not be used')
elif (tid in techniques_redirect_map):
tid = techniques_redirect_map[tid]
if (tid not in matrix[tactic]):
raise ValueError(f'Technique ID: {tid} does not fall under tactic: {tactic}')
if ('.' in tid):
(parent_technique, _) = tid.split('.', 1)
tech_entries.setdefault(parent_technique, make_entry(parent_technique))
tech_entries[parent_technique].setdefault('subtechnique', []).append(make_entry(tid))
else:
tech_entries.setdefault(tid, make_entry(tid))
entry = {'framework': 'MITRE ATT&CK', 'tactic': {'id': tactic_id, 'name': tactic, 'reference': url_base.format(type='tactics', id=tactic_id)}}
if tech_entries:
entry['technique'] = sorted(tech_entries.values(), key=(lambda x: x['id']))
return entry |
class ChannelInterface(QObject):
error = Signal(str, str, str, Exception)
def __init__(self):
QObject.__init__(self)
self._threads = ThreadManager()
def get_insecure_channel(self, uri):
channel = remote.get_insecure_channel(uri)
if (channel is None):
raise Exception(("Node manager daemon '%s' not reachable" % uri))
return channel
def close_channel(self, channel, uri):
if (channel is not None):
rospy.logdebug(('close channel to %s' % uri))
channel.close()
def clear_cache(self, grpc_path=''):
pass
def stop(self):
self.clear_cache()
del self._threads |
class StrEnum(str, Enum):
def __new__(cls: Type[_S], *values: str) -> _S:
if (len(values) > 3):
raise TypeError(('too many arguments for str(): %r' % (values,)))
if (len(values) == 1):
if (not isinstance(values[0], str)):
raise TypeError(('%r is not a string' % (values[0],)))
if (len(values) >= 2):
if (not isinstance(values[1], str)):
raise TypeError(('encoding must be a string, not %r' % (values[1],)))
if (len(values) == 3):
if (not isinstance(values[2], str)):
raise TypeError(('errors must be a string, not %r' % values[2]))
value = str(*values)
member = str.__new__(cls, value)
member._value_ = value
return member
__str__ = str.__str__
def _generate_next_value_(name: str, start: int, count: int, last_values: List[Any]) -> str:
return name.lower() |
class TestCompileFromStr(unittest.TestCase):
def test_compile_simple(self):
actual = compile_str('name')
expected = [create_graph(NamedTraitObserver(name='name', notify=True, optional=False))]
self.assertEqual(actual, expected)
def test_compile_serial(self):
actual = compile_str('name1.name2')
expected = [create_graph(NamedTraitObserver(name='name1', notify=True, optional=False), NamedTraitObserver(name='name2', notify=True, optional=False))]
self.assertEqual(actual, expected)
def test_compile_parallel(self):
actual = compile_str('name1,name2')
expected = [create_graph(NamedTraitObserver(name='name1', notify=True, optional=False)), create_graph(NamedTraitObserver(name='name2', notify=True, optional=False))]
self.assertEqual(actual, expected) |
class SharedStateActionCriticComposer(BaseStateActionCriticComposer):
def __init__(self, observation_spaces_dict: Dict[(Union[(str, int)], spaces.Dict)], action_spaces_dict: Dict[(Union[(str, int)], spaces.Dict)], networks: CollectionOfConfigType):
super().__init__(observation_spaces_dict, action_spaces_dict)
assert (len(networks) == 1)
network = networks[0]
flat_action_space = flat_structured_space(self._action_spaces_dict)
obs_shapes_flat = flat_structured_shapes(self._obs_shapes)
critic_output_shapes = dict()
if all(self._only_discrete_spaces.values()):
for (act_key, act_space) in flat_action_space.spaces.items():
critic_output_shapes[(act_key + '_q_values')] = (act_space.n,)
else:
for (act_key, act_space) in flat_action_space.spaces.items():
if isinstance(act_space, spaces.Discrete):
obs_shapes_flat[act_key] = (act_space.n,)
else:
obs_shapes_flat[act_key] = act_space.sample().shape
critic_output_shapes['q_value'] = (1,)
model_registry = Factory(base_type=nn.Module)
self._critics = {0: model_registry.instantiate(network, obs_shapes=obs_shapes_flat, output_shapes=critic_output_shapes)}
(BaseStateActionCriticComposer)
def critic(self) -> TorchSharedStateActionCritic:
return TorchSharedStateActionCritic(self._critics, num_policies=len(self._obs_shapes), device='cpu', only_discrete_spaces={0: all(self._only_discrete_spaces.values())}, action_spaces_dict={0: flat_structured_space(self._action_spaces_dict)}) |
class TestMHAImageReader(DataReaderTestBase):
def setup_reader(self):
r = ImageReader()
r.initialize(get_example_data('foot.mha'))
self.e.add_source(r)
self.bounds = (0.0, 255.0, 0.0, 255.0, 0.0, 0.0)
def check(self, scene, bounds, error=0.0101):
src = scene.children[0]
ot = src.children[0].children[0]
ot.render()
self.assertEqual(numpy.allclose(ot.outline_filter.output.bounds, bounds, atol=error), True)
self.assertEqual(numpy.allclose(src.reader.data_spacing, (1.0, 1.0, 1.0)), True)
def test_mha_image_data_reader(self):
self.check(self.scene, self.bounds)
def test_save_and_restore(self):
self.check_saving(self.e, self.scene, self.bounds)
def test_deepcopied(self):
self.check_deepcopying(self.scene, self.bounds) |
class FlattenConcatPolicyNet(FlattenConcatBaseNet):
def __init__(self, obs_shapes: Dict[(str, Sequence[int])], action_logits_shapes: Dict[(str, Sequence[int])], hidden_units: List[int], non_lin=nn.Module):
super().__init__(obs_shapes, hidden_units, non_lin)
for (action, shape) in action_logits_shapes.items():
self.perception_dict[action] = LinearOutputBlock(in_keys='latent', out_keys=action, in_shapes=self.perception_dict['latent'].out_shapes(), output_units=action_logits_shapes[action][(- 1)])
module_init = make_module_init_normc(std=0.01)
self.perception_dict[action].apply(module_init)
self.net = InferenceBlock(in_keys=list(obs_shapes.keys()), out_keys=list(action_logits_shapes.keys()), in_shapes=list(obs_shapes.values()), perception_blocks=self.perception_dict)
def forward(self, x):
return self.net(x) |
def test_offset_enriched():
m = UnitSquareMesh(1, 1)
m = ExtrudedMesh(m, layers=1)
ele = (TensorProductElement(FiniteElement('CG', 'triangle', 2), FiniteElement('CG', 'interval', 1)) + TensorProductElement(FiniteElement('CG', 'triangle', 1), FiniteElement('DG', 'interval', 0)))
V = FunctionSpace(m, ele)
assert (V.exterior_facet_node_map().offset == [2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 2, 2, 2]).all() |
class TestShapeDomainBuilding(unittest.TestCase):
.skip(reason='need to redo after history revision')
def test_create_shapes(self):
domain2D = create_domain2D()
domain3D = create_domain3D()
rectangle = create_rectangle(domain2D)
rectangleRANS = create_rectangle(domain2D, folder='mprans')
cuboid = create_cuboid(domain3D)
cuboidRANS = create_cuboid(domain3D, folder='mprans')
tand2D = create_tank2D(domain2D)
tank3D = create_tank3D(domain3D)
custom2D = create_custom2D(domain2D)
custom2DRANS = create_custom2D(domain2D, folder='mprans')
custom3D = create_custom3D(domain3D)
custom3DRANS = create_custom3D(domain3D, folder='mprans')
path1 = getpath()
stl = create_stl3D(domain3D, os.path.join(path1, 'STLBlocks.stl'))
.skip(reason='need to redo after history revision')
def test_assemble_domain(self):
nb_shapes = 10
domain2D = create_domain2D()
domain2DRANS = create_domain2D()
domain3D = create_domain3D()
domain3DRANS = create_domain3D()
domainSTL = create_domain3D()
dim2D = np.array([1.0, 1.0])
dim3D = np.array([1.0, 1.0, 1.0])
coords2D = np.array([0.5, 0.5])
coords3D = np.array([0.5, 0.5, 0.5])
nb_bc2D = 0
nb_bc2DRANS = 0
nb_bc3D = 0
nb_bc3DRANS = 0
for shape in range(nb_shapes):
coords2D += 1.5
coords3D += 1.5
a = create_rectangle(domain2D, dim=dim2D, coords=coords2D)
nb_bc2D += len(a.BC_list)
a = create_cuboid(domain3D, dim=dim3D, coords=coords3D)
nb_bc3D += len(a.BC_list)
a = create_rectangle(domain2DRANS, dim=dim2D, coords=coords2D, folder='mprans')
nb_bc2DRANS += len(a.BC_list)
a = create_cuboid(domain3DRANS, dim=dim3D, coords=coords3D, folder='mprans')
nb_bc3DRANS += len(a.BC_list)
a = create_tank2D(domain2DRANS, dim=[50.0, 50.0])
nb_bc2DRANS += len(a.BC_list)
a = create_tank3D(domain3DRANS, dim=[50.0, 50.0, 50.0])
nb_bc3DRANS += len(a.BC_list)
assembleDomain(domain2D)
assembleDomain(domain3D)
assembleDomainRANS(domain2DRANS)
assembleDomainRANS(domain3DRANS)
x2D = domain2D.x
x3D = domain3D.x
x2DRANS = domain2DRANS.x
x3DRANS = domain3DRANS.x
L2D = domain2D.L
L3D = domain3D.L
L2DRANS = domain2DRANS.L
L3DRANS = domain3DRANS.L
npt.assert_equal(len(domain2D.shape_list), nb_shapes)
npt.assert_equal(len(domain3D.shape_list), nb_shapes)
npt.assert_equal(len(domain2DRANS.shape_list), (nb_shapes + 1))
npt.assert_equal(len(domain3DRANS.shape_list), (nb_shapes + 1))
npt.assert_equal(len(domain2D.bc), (nb_bc2D + 1))
npt.assert_equal(len(domain3D.bc), (nb_bc3D + 1))
npt.assert_equal(len(domain2DRANS.bc), (nb_bc2DRANS + 1))
npt.assert_equal(len(domain3DRANS.bc), (nb_bc3DRANS + 1))
npt.assert_equal(L2D, [14.5, 14.5])
npt.assert_equal(L3D, [14.5, 14.5, 14.5])
npt.assert_equal(L2DRANS, [50.0, 50.0])
npt.assert_equal(L3DRANS, [50.0, 50.0, 50.0])
npt.assert_equal(x2D, [1.5, 1.5])
npt.assert_equal(x3D, [1.5, 1.5, 1.5])
npt.assert_equal(x2DRANS, [0.0, 0.0])
npt.assert_equal(x3DRANS, [0.0, 0.0, 0.0])
path1 = getpath()
stl = create_stl3D(domainSTL, os.path.join(path1, 'STLBlocks.stl'))
assembleDomainRANS(domainSTL)
STLnames = ['Bed0', 'Concrete0', 'Inlet0', 'Outlet0', 'Top0', 'Wall0']
nSTLs = len(STLnames)
npt.assert_equal(nSTLs, max(stl.vertexFlags))
npt.assert_equal(nSTLs, max(stl.facetFlags))
npt.assert_equal(nSTLs, max(domainSTL.vertexFlags))
npt.assert_equal(nSTLs, max(domainSTL.facetFlags))
.skip(reason='need to redo after history revision')
def test_BC_flags(self):
nb_shapes = 3
domain2D = create_domain2D()
domain3D = create_domain3D()
domain2DRANS = create_domain2D()
domain3DRANS = create_domain3D()
domainSTL = create_domain3D()
flags_v2D = []
flags_s2D = []
flags_v3D = []
flags_f3D = []
flags_v2DRANS = []
flags_s2DRANS = []
flags_v3DRANS = []
flags_f3DRANS = []
maxf = 0
for i in range(nb_shapes):
a = create_custom2D(domain2D)
if flags_v2D:
maxf = np.max([np.max(flags_v2D), np.max(flags_s2D)])
flags_v2D += (a.vertexFlags + maxf).tolist()
flags_s2D += (a.segmentFlags + maxf).tolist()
a = create_custom3D(domain3D)
if flags_v3D:
maxf = np.max([np.max(flags_v3D), np.max(flags_f3D)])
flags_v3D += (a.vertexFlags + maxf).tolist()
flags_f3D += (a.facetFlags + maxf).tolist()
a = create_custom2D(domain2DRANS, folder='mprans')
if flags_v2DRANS:
maxf = np.max([np.max(flags_v2DRANS), np.max(flags_s2DRANS)])
flags_v2DRANS += (a.vertexFlags + maxf).tolist()
flags_s2DRANS += (a.segmentFlags + maxf).tolist()
a = create_custom3D(domain3DRANS, folder='mprans')
if flags_v3DRANS:
maxf = np.max([np.max(flags_v3DRANS), np.max(flags_f3DRANS)])
flags_v3DRANS += (a.vertexFlags + maxf).tolist()
flags_f3DRANS += (a.facetFlags + maxf).tolist()
assembleDomain(domain2D)
assembleDomain(domain3D)
assembleDomainRANS(domain2DRANS)
assembleDomainRANS(domain3DRANS)
npt.assert_equal(domain2D.vertexFlags, flags_v2D)
npt.assert_equal(domain2D.segmentFlags, flags_s2D)
npt.assert_equal(domain3D.vertexFlags, flags_v3D)
npt.assert_equal(domain3D.facetFlags, flags_f3D)
npt.assert_equal(domain2DRANS.vertexFlags, flags_v2DRANS)
npt.assert_equal(domain2DRANS.segmentFlags, flags_s2DRANS)
npt.assert_equal(domain3DRANS.vertexFlags, flags_v3DRANS)
npt.assert_equal(domain3DRANS.facetFlags, flags_f3DRANS)
path1 = getpath()
stl = create_stl3D(domainSTL, os.path.join(path1, 'STLBlocks.stl'))
assembleDomainRANS(domainSTL)
STLnames = ['Bed0', 'Concrete0', 'Inlet0', 'Outlet0', 'Top0', 'Wall0']
nSTLs = len(STLnames)
j = 0
for (key, value) in stl.boundaryTags.items():
self.assertTrue((STLnames[(value - 1)] == key))
for (key, value) in domainSTL.boundaryTags.items():
self.assertTrue((((stl.name + '_') + STLnames[(value - 1)]) == key))
npt.assert_equal(nSTLs, len(stl.boundaryTags)) |
def filter_extender_controller_extender_data(json):
option_list = ['aaa_shared_secret', 'access_point_name', 'admin', 'allowaccess', 'at_dial_script', 'authorized', 'bandwidth_limit', 'billing_start_day', 'cdma_aaa_spi', 'cdma_ha_spi', 'cdma_nai', 'conn_status', 'controller_report', 'description', 'device_id', 'dial_mode', 'dial_status', 'enforce_bandwidth', 'ext_name', 'extension_type', 'ha_shared_secret', 'id', 'ifname', 'initiated_update', 'login_password', 'login_password_change', 'mode', 'modem_passwd', 'modem_type', 'modem1', 'modem2', 'multi_mode', 'name', 'override_allowaccess', 'override_enforce_bandwidth', 'override_login_password_change', 'ppp_auth_protocol', 'ppp_echo_request', 'ppp_password', 'ppp_username', 'primary_ha', 'profile', 'quota_limit_mb', 'redial', 'redundant_intf', 'roaming', 'role', 'secondary_ha', 'sim_pin', 'vdom', 'wan_extension', 'wimax_auth_protocol', 'wimax_carrier', 'wimax_realm']
json = remove_invalid_fields(json)
dictionary = {}
for attribute in option_list:
if ((attribute in json) and (json[attribute] is not None)):
dictionary[attribute] = json[attribute]
return dictionary |
class NewViewAction(Action):
description = 'Create and add a new view'
name = 'New View'
tooltip = 'Create and add a new view'
def perform(self, event):
view = View(id='my.view.fred', name='Fred', position='right')
self.window.add_view(view)
view = View(id='my.view.wilma', name='Wilma')
self.window.add_view(view, position='top')
return |
class OptionPlotoptionsParetoSonificationTracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
_factors.command()
_context
def execute(ctx):
error = MODULE.check_options()
if error:
return
msg = f"Attempting to reset MFA factors for user ID {MODULE_OPTIONS['id']['value']}"
LOGGER.info(msg)
index_event(ctx.obj.es, module=__name__, event_type='INFO', event=msg)
click.echo(f'[*] {msg}')
url = f"{ctx.obj.base_url}/users/{MODULE_OPTIONS['id']['value']}/lifecycle/reset_factors"
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': f'SSWS {ctx.obj.api_token}'}
params = {}
payload = {}
try:
response = ctx.obj.session.post(url, headers=headers, params=params, json=payload, timeout=7)
except Exception as e:
LOGGER.error(e, exc_info=True)
index_event(ctx.obj.es, module=__name__, event_type='ERROR', event=e)
click.secho(f'[!] {URL_OR_API_TOKEN_ERROR}', fg='red')
response = None
if response.ok:
msg = f"MFA factors reset for user {MODULE_OPTIONS['id']['value']}"
LOGGER.info(msg)
index_event(ctx.obj.es, module=__name__, event_type='INFO', event=msg)
click.secho(f'[*] {msg}', fg='green')
ctx.obj.okta.get_user(ctx, MODULE_OPTIONS['id']['value'])
else:
msg = f'''Error resetting MFA factors for Okta user
Response Code: {response.status_code} | Response Reason: {response.reason}
Error Code: {response.json().get('errorCode')} | Error Summary: {response.json().get('errorSummary')}'''
LOGGER.error(msg)
index_event(ctx.obj.es, module=__name__, event_type='ERROR', event=msg)
click.secho(f'[!] {msg}', fg='red')
click.echo("Check that the user's status is ACTIVE and that they have at least one factor enrolled")
return |
.django_db
def test_summing_period_and_quarterly_in_same_year(client, monkeypatch, helpers, defc_codes, basic_ref_data, late_gtas, quarterly_gtas, basic_faba):
helpers.patch_datetime_now(monkeypatch, EARLY_YEAR, LATE_MONTH, 25)
helpers.reset_dabs_cache()
resp = client.get(OVERVIEW_URL)
assert (resp.data['funding'] == [{'amount': (LATE_GTAS_CALCULATIONS['total_budgetary_resources'] + QUARTERLY_GTAS_CALCULATIONS['total_budgetary_resources']), 'def_code': 'M'}])
assert (resp.data['total_budget_authority'] == (LATE_GTAS_CALCULATIONS['total_budgetary_resources'] + QUARTERLY_GTAS_CALCULATIONS['total_budgetary_resources'])) |
def test_restructure_cfg_loop_two_back_edges_condition_2(task):
task.graph.add_nodes_from((vertices := [BasicBlock(0, instructions=[Assignment(variable(name='i'), Constant(0)), Assignment(variable(name='x'), Constant(42))]), BasicBlock(1, instructions=[Branch(Condition(OperationType.not_equal, [variable(name='i'), Constant(3)]))]), BasicBlock(2, instructions=[Assignment(variable(name='i'), BinaryOperation(OperationType.plus, [variable(name='i'), Constant(1)])), Assignment(variable(name='x'), BinaryOperation(OperationType.minus, [variable(name='x'), variable(name='i')])), Branch(Condition(OperationType.not_equal, [variable(name='x'), Constant(3)]))]), BasicBlock(3, instructions=[Return([variable(name='x')])]), BasicBlock(4, instructions=[Branch(Condition(OperationType.not_equal, [variable(name='j'), Constant(3)]))]), BasicBlock(5, instructions=[Assignment(variable(name='j'), Constant(0))]), BasicBlock(6, instructions=[Assignment(variable(name='j'), BinaryOperation(OperationType.plus, [variable(name='j'), Constant(1)]))])]))
task.graph.add_edges_from([UnconditionalEdge(vertices[0], vertices[1]), TrueCase(vertices[1], vertices[2]), FalseCase(vertices[1], vertices[3]), TrueCase(vertices[2], vertices[4]), FalseCase(vertices[2], vertices[1]), TrueCase(vertices[4], vertices[5]), FalseCase(vertices[4], vertices[6]), UnconditionalEdge(vertices[5], vertices[6]), UnconditionalEdge(vertices[6], vertices[1])])
PatternIndependentRestructuring().run(task)
context = LogicCondition.generate_new_context()
resulting_ast = AbstractSyntaxTree((seq_node := SeqNode(LogicCondition.initialize_true(context))), {LogicCondition.initialize_symbol('x1', context): Condition(OperationType.not_equal, [variable('i'), Constant(3)]), LogicCondition.initialize_symbol('x2', context): Condition(OperationType.not_equal, [variable('x'), Constant(3)]), LogicCondition.initialize_symbol('x3', context): Condition(OperationType.not_equal, [variable('j'), Constant(3)])})
code_node_0 = resulting_ast._add_code_node([Assignment(variable('i'), Constant(0)), Assignment(variable('x'), Constant(42))])
resulting_ast._add_node((while_loop := resulting_ast.factory.create_while_loop_node(LogicCondition.initialize_symbol('x1', context))))
resulting_ast._add_node((loop_body := resulting_ast.factory.create_seq_node()))
code_node_2 = resulting_ast._add_code_node([Assignment(variable('i'), BinaryOperation(OperationType.plus, [variable('i'), Constant(1)])), Assignment(variable('x'), BinaryOperation(OperationType.minus, [variable('x'), variable('i')]))])
continue_branch = resulting_ast._add_code_node([Continue()])
continue_condition = resulting_ast._add_condition_node_with((~ LogicCondition.initialize_symbol('x2', context)), continue_branch)
code_node_5 = resulting_ast._add_code_node([Assignment(variable(name='j'), Constant(0))])
node_5_condition = resulting_ast._add_condition_node_with(LogicCondition.initialize_symbol('x3', context), code_node_5)
code_node_6 = resulting_ast._add_code_node([Assignment(variable('j'), BinaryOperation(OperationType.plus, [variable('j'), Constant(1)]))])
code_node_3 = resulting_ast._add_code_node([Return([variable('x')])])
resulting_ast._add_edges_from(((seq_node, code_node_0), (seq_node, while_loop), (while_loop, loop_body), (loop_body, code_node_2), (loop_body, continue_condition), (loop_body, node_5_condition), (loop_body, code_node_6), (seq_node, code_node_3)))
resulting_ast._code_node_reachability_graph.add_reachability_from(((code_node_0, code_node_3), (code_node_0, code_node_2), (code_node_0, code_node_6), (code_node_2, code_node_6), (code_node_2, code_node_5), (code_node_2, code_node_3), (code_node_2, continue_branch), (continue_branch, code_node_5), (continue_branch, code_node_6), (code_node_5, code_node_3), (code_node_5, code_node_6), (code_node_6, code_node_3)))
seq_node.sort_children()
loop_body.sort_children()
assert (ASTComparator.compare(task.syntax_tree, resulting_ast) and (task.syntax_tree.condition_map == resulting_ast.condition_map)) |
class standardize_test_case(unittest.TestCase):
def test_standardize(self):
d = {'CamelCase': 1, 'CamelCamelCase': 1, 'Camel2Camel2Case': 1, 'getHTTPResponseCode': 1, 'get2HTTPResponseCode': 1, 'HTTPResponseCode': 1, 'HTTPResponseCodeXYZ': 1, ' LocationCoordinates ': {'Lat. ': 0.0, 'Lng. ': 0.0}, ' LocationHistoryCoordinates ': [{'Lat. ': 0.0, 'Lng. ': 0.0}, {'Lat. ': 0.0, 'Lng. ': 0.0}]}
_standardize(d)
r = {'camel_case': 1, 'camel_camel_case': 1, 'camel2_camel2_case': 1, 'get_ 1, 'get2_ 1, ' 1, ' 1, 'location_coordinates': {'lat': 0.0, 'lng': 0.0}, 'location_history_coordinates': [{'lat': 0.0, 'lng': 0.0}, {'lat': 0.0, 'lng': 0.0}]}
self.assertEqual(d, r) |
def addTableSeparator(label, colspan, h_size):
global TXBuffer
TXBuffer += '<TR><TD colspan='
TXBuffer += str(colspan)
TXBuffer += '><H'
TXBuffer += str(h_size)
TXBuffer += '>'
TXBuffer += str(label)
TXBuffer += '</H'
TXBuffer += str(h_size)
TXBuffer += '></TD></TR>' |
def get_credits_data() -> dict:
project_dir = Path(__file__).parent.parent
metadata = toml.load((project_dir / 'pyproject.toml'))['tool']['poetry']
lock_data = toml.load((project_dir / 'poetry.lock'))
project_name = metadata['name']
poetry_dependencies = chain(metadata['dependencies'].keys(), metadata['dev-dependencies'].keys())
direct_dependencies = {dep.lower() for dep in poetry_dependencies}
direct_dependencies.remove('python')
indirect_dependencies = {pkg['name'].lower() for pkg in lock_data['package']}
indirect_dependencies -= direct_dependencies
dependencies = (direct_dependencies | indirect_dependencies)
packages = {}
for pkg in search_packages_info(dependencies):
pkg = {_: pkg[_] for _ in ('name', 'home-page')}
packages[pkg['name'].lower()] = pkg
return {'project_name': project_name, 'direct_dependencies': sorted(direct_dependencies), 'indirect_dependencies': sorted(indirect_dependencies), 'package_info': packages} |
class PostServiceTest(RPCTestCase):
def test_create_post(self):
stub = post_pb2_grpc.PostControllerStub(self.channel)
response = stub.Create(post_pb2.Post(title='title', content='content'))
self.assertEqual(response.title, 'title')
self.assertEqual(response.content, 'content')
self.assertEqual(Post.objects.count(), 1)
post_list = list(stub.List(post_pb2.PostListRequest()))
self.assertEqual(len(post_list), 1)
def test_list_posts(self):
Post.objects.create(title='title1', content='content1')
Post.objects.create(title='title2', content='content2')
stub = post_pb2_grpc.PostControllerStub(self.channel)
post_list = list(stub.List(post_pb2.PostListRequest()))
self.assertEqual(len(post_list), 2) |
class FTSIndex():
def __init__(self, anki_index_data, addon_index_data, force_rebuild=False):
self.limit = 20
self.pinned = []
self.highlighting = True
self.fields_to_exclude = {}
self.creation_info = {}
self.threadPool = QThreadPool()
config = mw.addonManager.getConfig(__name__)
self.dir = config['addon.data_folder']
if ((not self.dir) or (len(self.dir.strip()) == 0)):
self.dir = utility.misc.get_application_data_path()
config['addon.data_folder'] = self.dir
mw.addonManager.writeConfig(__name__, config)
ex = (utility.misc.get_user_files_folder_path() + 'search-data.db')
try:
if os.path.isfile(ex):
os.remove(ex)
except:
pass
if (not os.path.isdir(self.dir)):
os.mkdir(self.dir)
self.creation_info['stopwords_size'] = len(set(config['stopwords']))
self.creation_info['decks'] = config['decks']
self.porter = config['usePorterStemmer']
try:
self.fields_to_exclude = config['fieldsToExclude']
self.creation_info['fields_to_exclude_original'] = self.fields_to_exclude
except KeyError:
self.fields_to_exclude = {}
UI.fields_to_exclude = self.fields_to_exclude
index_data = (anki_index_data + addon_index_data)
index_up_to_date = ((not force_rebuild) and (not self._should_rebuild(index_data)))
self.creation_info['index_was_rebuilt'] = (not index_up_to_date)
if (not index_up_to_date):
if self.porter:
sql = 'create virtual table notes using fts%s(nid, text, tags, did, source, mid, refs, tokenize=porter)'
else:
sql = 'create virtual table notes using fts%s(nid, text, tags, did, source, mid, refs)'
cleaned = self._cleanText(index_data)
file_path = (self.dir + 'search-data.db')
try:
os.remove(file_path)
except OSError:
pass
conn = sqlite3.connect(file_path)
conn.execute('drop table if exists notes')
try:
conn.execute((sql % 5))
self.type = 'SQLite FTS5'
except:
try:
conn.execute((sql % 4))
self.type = 'SQLite FTS4'
except:
conn.execute((sql % 3))
self.type = 'SQlite FTS3'
conn.executemany('INSERT INTO notes VALUES (?,?,?,?,?,?,?)', cleaned)
if (self.type == 'SQLite FTS5'):
conn.execute("INSERT INTO notes(notes) VALUES('optimize')")
conn.commit()
conn.close()
else:
self.type = self._check_fts_version(config['logging'])
if (not index_up_to_date):
persist_index_info(self)
state.index_data = None
def _should_rebuild(self, index_data):
config = mw.addonManager.getConfig(__name__)
if config['freezeIndex']:
return False
info = get_index_info()
if (info is None):
return True
if info['shouldRebuild']:
toggle_should_rebuild()
return True
if (not config['addon.data_folder']):
return True
file_path = os.path.join(config['addon.data_folder'], 'search-data.db')
if (not os.path.isfile(file_path)):
return True
try:
conn = sqlite3.connect(file_path)
row = conn.cursor().execute('SELECT * FROM notes_content ORDER BY id ASC LIMIT 1').fetchone()
conn.close()
if ((row is not None) and (len(row) != 8)):
return True
except:
return True
index_size = len(index_data)
if (info['size'] != index_size):
return True
if (index_size < config['alwaysRebuildIndexIfSmallerThan']):
return True
if (len(config['decks']) != len(info['decks'])):
return True
for d in config['decks']:
if (d not in info['decks']):
return True
if (len(config['fieldsToExclude']) != len(info['fieldsToExclude'])):
return True
for (model_name, field_list) in config['fieldsToExclude'].items():
if (model_name not in info['fieldsToExclude']):
return True
if (len(field_list) != len(info['fieldsToExclude'][model_name])):
return True
for field_name in field_list:
if (field_name not in info['fieldsToExclude'][model_name]):
return True
if (len(set(config['stopwords'])) != info['stopwordsSize']):
return True
return False
def _check_fts_version(self, logging):
con = sqlite3.connect(':memory:')
cur = con.cursor()
cur.execute('pragma compile_options;')
available_pragmas = [s[0].lower() for s in cur.fetchall()]
con.close()
if logging:
log(('\nSQlite compile options: ' + str(available_pragmas)))
if ('enable_fts5' in available_pragmas):
return 'SQLite FTS5'
return 'SQLite FTS4'
def _cleanText(self, index_data):
return [(row[0], (utility.text.clean_for_indexing(utility.text.remove_fields(row[1], self.fields_to_exclude[row[4]])) if (row[4] in self.fields_to_exclude) else utility.text.clean_for_indexing(row[1])), row[2], row[3], row[1], row[4], row[5]) for row in index_data]
def search(self, text, decks, only_user_notes=False, print_mode='default', knowledge_tree=None):
worker = Worker(self.searchProc, text, decks, only_user_notes, print_mode)
worker.stamp = utility.misc.get_milisec_stamp()
UI.latest = worker.stamp
if knowledge_tree:
worker.signals.result.connect(knowledge_tree.get_search_results_back)
elif (print_mode == 'default'):
worker.signals.result.connect(self.print_output)
elif (print_mode == 'pdf'):
worker.signals.result.connect(self.print_pdf)
elif (print_mode == 'pdf.left'):
worker.signals.result.connect(self.print_pdf_left)
worker.signals.tooltip.connect(UI.show_tooltip)
self.threadPool.start(worker)
def searchProc(self, text, decks, only_user_notes, print_mode):
resDict = {}
start = time.time()
orig = text
text = self.clean(text)
resDict['time-stopwords'] = int(((time.time() - start) * 1000))
self.lastSearch = (text, decks, 'default', orig)
if self.logging:
log(('\nFTS index - Received query: ' + text))
log(('Decks (arg): ' + str(decks)))
log(('Self.pinned: ' + str(self.pinned)))
log(('Self.limit: ' + str(self.limit)))
if (len(text) == 0):
if (print_mode == 'default'):
UI.empty_result(('Query was empty after cleaning.<br/><br/><b>Query:</b> <i>%s</i>' % utility.text.trim_if_longer_than(orig, 100).replace('\x1f', '').replace('`', '`')))
if mw.addonManager.getConfig(__name__)['hideSidebar']:
return 'Found 0 notes. Query was empty after cleaning.'
return None
elif (print_mode == 'pdf'):
return None
start = time.time()
text = utility.text.expand_by_synonyms(text, self.synonyms)
resDict['time-synonyms'] = int(((time.time() - start) * 1000))
resDict['query'] = text
if utility.text.text_too_small(text):
if self.logging:
log(('Returning - Text was < 2 chars: ' + text))
return {'results': []}
tokens = text.split(' ')
if (len(tokens) > 10):
tokens = set(tokens)
if (self.type == 'SQLite FTS5'):
query = u' OR '.join([('tags:' + s.strip().replace('OR', 'or')) for s in tokens if (not utility.text.text_too_small(s))])
query += (' OR ' + ' OR '.join([('text:' + s.strip().replace('OR', 'or')) for s in tokens if (not utility.text.text_too_small(s))]))
else:
query = ' OR '.join([s.strip().replace('OR', 'or') for s in tokens if (not utility.text.text_too_small(s))])
if ((len(query) == 0) or (query == ' OR ')):
if self.logging:
log(('Returning. Query was: ' + query))
return {'results': []}
c = 0
resDict['decks'] = decks
allDecks = ('-1' in decks)
decks.append('-1')
rList = list()
user_note_filter = ("AND mid='-1'" if only_user_notes else '')
conn = sqlite3.connect((self.dir + 'search-data.db'))
if (self.type == 'SQLite FTS5'):
dbStr = ("select nid, text, tags, did, source, bm25(notes) as score, mid, refs from notes where notes match '%s' %s order by score" % (query, user_note_filter))
else:
conn.create_function('simple_rank', 1, simple_rank)
dbStr = ("select nid, text, tags, did, source, simple_rank(matchinfo(notes)) as score, mid, refs from notes where text match '%s' %s order by score desc" % (query, user_note_filter))
try:
start = time.time()
res = conn.execute(dbStr).fetchall()
resDict['time-query'] = int(((time.time() - start) * 1000))
except Exception as e:
print(('Executing match query threw exception: ' + str(e)))
res = []
finally:
conn.close()
if self.logging:
log(('dbStr was: ' + dbStr))
log(('Result length of db query: ' + str(len(res))))
resDict['highlighting'] = self.highlighting
for r in res:
if ((not (str(r[0]) in self.pinned)) and (allDecks or (str(r[3]) in decks))):
if (str(r[6]) == '-1'):
rList.append(SiacNote.from_index(r))
else:
rList.append(IndexNote(r))
c += 1
if (c >= self.limit):
break
if self.logging:
log(('Query was: ' + query))
log(('Result length (after removing pinned and unselected decks): ' + str(len(rList))))
resDict['results'] = rList[:min(self.limit, len(rList))]
self.lastResDict = resDict
return resDict
def print_output(self, result, stamp):
query_set = None
if (self.highlighting and (self.lastResDict is not None) and ('query' in self.lastResDict) and (self.lastResDict['query'] is not None)):
query_set = set((utility.text.replace_accents_with_vowels(s).lower() for s in self.lastResDict['query'].split(' ')))
if (type(result) is str):
pass
elif (result is not None):
q = (utility.text.trim_if_longer_than(self.lastResDict['query'], 50) if ('query' in self.lastResDict) else '')
if q:
q = q.replace('"', '')
q = f'"{q}"'
UI.print_search_results(['Search', q], result['results'], stamp, timing_info=True, query_set=query_set)
def print_pdf(self, result, stamp):
res = (result['results'] if (result is not None) else [])
UI.print_pdf_search_results(res, self.lastSearch[0], self.lastSearch[3])
def print_pdf_left(self, result, stamp):
query_set = None
if ((self.lastResDict is not None) and ('query' in self.lastResDict) and (self.lastResDict['query'] is not None)):
query_set = set((utility.text.replace_accents_with_vowels(s).lower() for s in self.lastResDict['query'].split(' ')))
if (result is not None):
Reader.sidebar.print(result['results'], stamp, query_set)
else:
Reader.sidebar.print([], stamp, self.lastSearch[0])
def searchDB(self, text, decks):
stamp = utility.misc.get_milisec_stamp()
UI.latest = stamp
try:
if hasattr(mw.col, 'find_notes'):
found = mw.col.find_notes(text)
else:
found = mw.col.findNotes(text)
except:
found = []
if (len(found) > 0):
if (not ('-1' in decks)):
deckQ = ('(-1, %s)' % ','.join(decks))
else:
deckQ = ''
foundQ = ('(%s)' % ','.join([str(f) for f in found]))
if deckQ:
res = mw.col.db.all(('select distinct notes.id, flds, tags, did, notes.mid from notes left join cards on notes.id = cards.nid where nid in %s and did in %s' % (foundQ, deckQ)))
else:
res = mw.col.db.all(('select distinct notes.id, flds, tags, did, notes.mid from notes left join cards on notes.id = cards.nid where nid in %s' % foundQ))
rList = []
for r in res:
if (not (str(r[0]) in self.pinned)):
rList.append(IndexNote((r[0], r[1], r[2], r[3], r[1], (- 1), r[4], '')))
return {'result': rList[:self.limit], 'stamp': stamp}
return {'result': [], 'stamp': stamp}
def clean(self, text):
return utility.text.clean(text)
def deleteNote(self, nid):
conn = sqlite3.connect((self.dir + 'search-data.db'))
conn.cursor().execute(('DELETE FROM notes WHERE CAST(nid AS INTEGER) = %s;' % nid))
conn.commit()
conn.close()
def add_user_note(self, note):
text = utility.text.build_user_note_text(title=note[1], text=note[2], source=note[3])
conn = sqlite3.connect((self.dir + 'search-data.db'))
conn.cursor().execute("INSERT INTO notes (nid, text, tags, did, source, mid, refs) VALUES (?, ?, ?, ?, ?, ?, '')", (note[0], utility.text.clean(text), note[4], '-1', text, '-1'))
conn.commit()
conn.close()
persist_index_info(self)
def update_user_note(self, note):
self.deleteNote(int(note[0]))
self.add_user_note(note)
def addNote(self, note):
content = ' \x1f '.join(note.fields)
tags = ' '.join(note.tags)
did = mw.col.db.all(('select distinct did from notes left join cards on notes.id = cards.nid where nid = %s' % note.id))
if ((did is None) or (len(did) == 0)):
return
did = did[0][0]
source = content
if (str(note.mid) in self.fields_to_exclude):
content = utility.text.remove_fields(content, self.fields_to_exclude[str(note.mid)])
conn = sqlite3.connect((self.dir + 'search-data.db'))
conn.cursor().execute("INSERT INTO notes (nid, text, tags, did, source, mid, refs) VALUES (?, ?, ?, ?, ?, ?, '')", (note.id, utility.text.clean(content), tags, did, source, note.mid))
conn.commit()
conn.close()
persist_index_info(self)
def updateNote(self, note):
self.deleteNote(note.id)
self.addNote(note)
def get_last_inserted_id(self):
conn = sqlite3.connect((self.dir + 'search-data.db'))
row_id = conn.cursor().execute('SELECT id FROM notes_content ORDER BY id DESC LIMIT 1').fetchone()[0]
conn.close()
return row_id
def get_number_of_notes(self):
res = 0
conn = None
try:
conn = sqlite3.connect((self.dir + 'search-data.db'))
res = conn.cursor().execute('select count(*) from notes_content').fetchone()[0]
except:
res = 0
finally:
if conn:
conn.close()
return res |
def get_tvtk_class_doc(obj):
doc = (obj.__doc__ + '\nTraits:\n\n\n')
ignore = ['trait_added', 'trait_modified']
for (key, trait) in obj.traits().items():
if (key.startswith('_') or key.endswith('_') or (key in ignore)):
continue
doc += ('\n%s: %s' % (key, (trait.tooltip or trait.desc or trait.help)))
doc += '\nMethods:\n\n\n'
traits = obj.trait_names()
for name in dir(obj):
if ((name in traits) or name.startswith('_')):
continue
if ((name.find('trait') > (- 1)) and (name != 'update_traits')):
continue
func = getattr(obj, name)
if callable(func):
doc += ('\n' + get_func_doc(func, name))
return doc |
class LoaderOldRc4(Unpacker):
def __init__(self, apk_obj, dvms, output_dir):
super().__init__('loader.rc4.v2', 'Unpacker old rc4 based variants', apk_obj, dvms, output_dir)
def start_decrypt(self, native_lib: str=''):
self.logger.info('Starting to decrypt')
self.decrypted_payload_path = None
application_oncreate = self.find_application_oncreate()
if (not application_oncreate):
return
rc4_caller = self.find_caller_rc4_init(application_oncreate)
if (not rc4_caller):
return
rc4_inits = self.get_rc4_init_from_caller(rc4_caller)
for rc4_init in rc4_inits:
rc4_keys = self.get_rc4_key(rc4_init)
for rc4_key in rc4_keys:
x = self.brute_assets(rc4_key)
if (x != None):
return
def get_rc4_key(self, rc4_init_function):
(klass_name, method_name) = rc4_init_function.split('->')
m = self.find_method(klass_name, method_name, descriptor='()V')
if m:
self.logger.info(m.get_name())
array_data = self.get_array_data(m)
if (len(array_data) > 1):
self.logger.info('Found multiple array data, might be wrong function')
return array_data
return []
def get_rc4_init_from_caller(self, class_func_str) -> list:
(klass_name, method_name) = class_func_str.split('->')
m = self.find_method(klass_name, method_name, '(Landroid/app/Application;)V')
if (m == None):
return []
self.logger.info('Found rc4 init method')
smali_str = self.get_smali(m)
match = re.findall('invoke-direct [vp]\\d+, (L[^;]+;->[^\\s]+)\\(\\)V', smali_str)
if (len(match) == 0):
self.logger.info('Unable to extract variable from target_method')
self.logger.info('Exiting ...')
return []
if (len(match) == 1):
self.logger.info(f'Found variable ! : {match[0]}')
else:
self.logger.info('Found multiple functions to call rc4_init ')
return match
return []
def find_application_oncreate(self):
application_smali = self.find_main_application()
return self.find_method(application_smali, 'onCreate')
def find_caller_rc4_init(self, target_method):
smali_str = self.get_smali(target_method)
match = re.findall('invoke-virtual [vp]\\d+, [vp]\\d+, (L[^;]+;->[^\\s]+)\\(Landroid/app/Application;\\)V\\s+', smali_str)
if (len(match) == 0):
self.logger.info('Unable to extract variable from target_method')
self.logger.info('Exiting ...')
return None
if (len(match) == 1):
self.logger.info(f'Found variable ! : {match[0]}')
return match[0]
else:
self.logger.info('Something is wrong .. ')
self.logger.info('Found multiple ?? : {match}')
return None
def brute_assets(self, key: bytes):
self.logger.info('Starting brute-force')
asset_list = self.apk_object.get_files()
for filepath in asset_list:
f = self.apk_object.get_file(filepath)
if self.solve_encryption(f, key, filepath):
self.logger.info(f'Decryption finished! {self.decrypted_payload_path}')
return self.decrypted_payload_path
self.logger.info(f'No valid file found for {key}')
return None
def solve_encryption(self, file_data, key: bytes, filepath: str):
arc4 = ARC4(bytes(key))
filesize = int.from_bytes(file_data[0:4], byteorder='little')
if (filesize > len(file_data)):
return False
decrypted = arc4.decrypt(file_data[4:])
decrypted = decrypted[:filesize]
if self.check_and_write_file(decrypted):
return True
return False |
class ButtonsLineEdit(KeyEventLineEdit, ButtonsWidget):
qt_css_class = 'QLineEdit'
def __init__(self, text=''):
KeyEventLineEdit.__init__(self, None, text, {Qt.Key_Return, Qt.Key_Enter})
self.buttons: Iterable[QAbstractButton] = []
def resizeEvent(self, event: QResizeEvent) -> None:
QLineEdit.resizeEvent(self, event)
self.resizeButtons()
buttons_width = 0
for button in self.buttons:
buttons_width += button.size().width()
self.setTextMargins(0, 0, buttons_width, 0) |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = None
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'log_memory_filter': {'required': False, 'type': 'dict', 'default': None, 'options': {}}}
for attribute_name in module_spec['options']:
fields['log_memory_filter']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['log_memory_filter']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=False)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'log_memory_filter')
(is_error, has_changed, result, diff) = fortios_log_memory(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
def renewables_production_mix(zone_key: ZoneKey, session: Session, logger: Logger) -> ProductionBreakdownList:
today = datetime.now().astimezone(tz=ZoneInfo('America/Argentina/Buenos_Aires')).strftime('%d-%m-%Y')
params = {'desde': today, 'hasta': today}
renewables_response = session.get(CAMMESA_RENEWABLES_ENDPOINT, params=params)
assert (renewables_response.status_code == 200), 'Exception when fetching production for {}: error when calling url={} with payload={}'.format(zone_key, CAMMESA_RENEWABLES_ENDPOINT, params)
production_list = renewables_response.json()
renewables_production = ProductionBreakdownList(logger)
for production_info in production_list:
renewables_production.append(zoneKey=zone_key, datetime=arrow.get(production_info['momento']).datetime, production=ProductionMix(biomass=production_info['biocombustible'], hydro=production_info['hidraulica'], solar=production_info['fotovoltaica'], wind=production_info['eolica']), source=SOURCE)
return renewables_production |
class CustomEditor(BaseEnumEditor):
update_handler = Any
def init(self, parent):
super().init(parent)
self.control = TraitsUIPanel(parent, (- 1))
self._create_image_grid()
def rebuild_editor(self):
self.control.SetSizer(None)
toolkit.destroy_children(self.control)
self._create_image_grid()
def _create_image_grid(self):
panel = self.control
if (self.factory.cols > 1):
sizer = wx.GridSizer(0, self.factory.cols, 0, 0)
else:
sizer = wx.BoxSizer(wx.VERTICAL)
factory = self.factory
cur_value = self.value
for name in self.names:
value = self.mapping[name]
control = ImageControl(panel, bitmap_cache(('%s%s%s' % (factory.prefix, name, factory.suffix)), False, factory._image_path), (value == cur_value), self.update_object)
control.value = value
sizer.Add(control, 0, wx.ALL, 2)
self.set_tooltip(control)
panel.SetSizerAndFit(sizer)
def update_object(self, control):
self.value = control.value
if (self.update_handler is not None):
self.update_handler()
def update_editor(self):
value = self.value
for control in self.control.GetChildren():
control.Selected((value == control.value)) |
class TestHeaderTeiTrainingDataGenerator():
def test_should_include_layout_document_text_in_tei_output(self):
training_data_generator = get_tei_training_data_generator()
layout_document = LayoutDocument.for_blocks([LayoutBlock.for_text(TEXT_1)])
xml_root = training_data_generator.get_training_tei_xml_for_model_data_iterable(get_model_data_list_for_layout_document(layout_document, data_generator=get_data_generator()))
LOGGER.debug('xml: %r', etree.tostring(xml_root))
text_nodes = xml_root.xpath('./text/front')
assert (len(text_nodes) == 1)
assert (get_text_content(text_nodes[0]).rstrip() == TEXT_1)
def test_should_keep_original_whitespace(self):
training_data_generator = get_tei_training_data_generator()
text = 'Token1, Token2 ,Token3'
layout_document = LayoutDocument.for_blocks([LayoutBlock(lines=[LayoutLine.for_text(text, tail_whitespace='\n')])])
xml_root = training_data_generator.get_training_tei_xml_for_model_data_iterable(get_model_data_list_for_layout_document(layout_document, data_generator=get_data_generator()))
text_nodes = xml_root.xpath('./text/front')
assert (len(text_nodes) == 1)
assert (get_text_content(text_nodes[0]).rstrip() == text)
def test_should_add_line_feeds(self):
training_data_generator = get_tei_training_data_generator()
layout_document = LayoutDocument.for_blocks([LayoutBlock(lines=[LayoutLine.for_text(TEXT_1, tail_whitespace='\n'), LayoutLine.for_text(TEXT_2, tail_whitespace='\n')])])
xml_root = training_data_generator.get_training_tei_xml_for_model_data_iterable(get_model_data_list_for_layout_document(layout_document, data_generator=get_data_generator()))
text_nodes = xml_root.xpath('./text/front')
assert (len(text_nodes) == 1)
assert (get_text_content(text_nodes[0]).rstrip() == '\n'.join([TEXT_1, TEXT_2]))
def test_should_lb_elements_before_line_feeds(self):
training_data_generator = get_tei_training_data_generator()
layout_document = LayoutDocument.for_blocks([LayoutBlock(lines=[LayoutLine.for_text(TEXT_1, tail_whitespace='\n'), LayoutLine.for_text(TEXT_2, tail_whitespace='\n')])])
xml_root = training_data_generator.get_training_tei_xml_for_model_data_iterable(get_model_data_list_for_layout_document(layout_document, data_generator=get_data_generator()))
text_nodes = xml_root.xpath('./text/front')
assert (len(text_nodes) == 1)
lb_nodes = text_nodes[0].xpath('lb')
assert (len(lb_nodes) == 2)
assert (lb_nodes[0].getparent().text == TEXT_1)
assert (lb_nodes[0].tail == ('\n' + TEXT_2))
def test_should_generate_tei_from_model_data(self):
layout_document = LayoutDocument.for_blocks([LayoutBlock(lines=[get_next_layout_line_for_text(TEXT_1), get_next_layout_line_for_text(TEXT_2)])])
data_generator = get_data_generator()
model_data_iterable = data_generator.iter_model_data_for_layout_document(layout_document)
training_data_generator = get_tei_training_data_generator()
xml_root = training_data_generator.get_training_tei_xml_for_model_data_iterable(model_data_iterable)
LOGGER.debug('xml: %r', etree.tostring(xml_root))
text_nodes = xml_root.xpath('./text/front')
assert (len(text_nodes) == 1)
lb_nodes = text_nodes[0].xpath('lb')
assert (len(lb_nodes) == 2)
assert (lb_nodes[0].getparent().text == TEXT_1)
assert (lb_nodes[0].tail == ('\n' + TEXT_2))
def test_should_generate_tei_from_model_data_using_model_labels(self):
label_and_layout_line_list = [('<title>', get_next_layout_line_for_text(TEXT_1)), ('<abstract>', get_next_layout_line_for_text(TEXT_2))]
labeled_model_data_list = get_labeled_model_data_list(label_and_layout_line_list, data_generator=get_data_generator())
training_data_generator = get_tei_training_data_generator()
xml_root = training_data_generator.get_training_tei_xml_for_model_data_iterable(labeled_model_data_list)
LOGGER.debug('xml: %r', etree.tostring(xml_root))
assert (get_text_content_list(xml_root.xpath('./text/front/docTitle/titlePart')) == [TEXT_1])
assert (get_text_content_list(xml_root.xpath('./text/front/div[="abstract"]')) == [TEXT_2])
assert (get_text_content_list(xml_root.xpath('./text/front')) == [f'''{TEXT_1}
{TEXT_2}
'''])
def test_should_map_unknown_label_to_note(self):
label_and_layout_line_list = [('<unknown>', get_next_layout_line_for_text(TEXT_1))]
labeled_model_data_list = get_labeled_model_data_list(label_and_layout_line_list, data_generator=get_data_generator())
training_data_generator = get_tei_training_data_generator()
xml_root = training_data_generator.get_training_tei_xml_for_model_data_iterable(labeled_model_data_list)
LOGGER.debug('xml: %r', etree.tostring(xml_root))
assert (get_text_content_list(xml_root.xpath('./text/front/note[="unknown"]')) == [TEXT_1])
assert (get_text_content_list(xml_root.xpath('./text/front')) == [f'''{TEXT_1}
'''])
def test_should_not_join_separate_labels(self):
label_and_layout_line_list = [('<title>', get_next_layout_line_for_text(TEXT_1)), ('<title>', get_next_layout_line_for_text(TEXT_2))]
labeled_model_data_list = get_labeled_model_data_list(label_and_layout_line_list, data_generator=get_data_generator())
training_data_generator = get_tei_training_data_generator()
xml_root = training_data_generator.get_training_tei_xml_for_model_data_iterable(labeled_model_data_list)
LOGGER.debug('xml: %r', etree.tostring(xml_root))
assert (get_text_content_list(xml_root.xpath('./text/front/docTitle/titlePart')) == [TEXT_1, TEXT_2])
assert (get_text_content_list(xml_root.xpath('./text/front')) == [f'''{TEXT_1}
{TEXT_2}
''']) |
def create_appointment(patient, practitioner, appointment_date, invoice=0, procedure_template=0, service_unit=None, appointment_type=None, save=1, department=None, appointment_based_on_check_in=None, appointment_time=None, discount_percentage=0, discount_amount=0):
item = create_healthcare_service_items()
frappe.db.set_single_value('Healthcare Settings', 'inpatient_visit_charge_item', item)
frappe.db.set_single_value('Healthcare Settings', 'op_consulting_charge_item', item)
appointment = frappe.new_doc('Patient Appointment')
appointment.patient = patient
appointment.practitioner = practitioner
appointment.department = (department or create_medical_department())
appointment.appointment_date = appointment_date
appointment.company = '_Test Company'
appointment.duration = 15
appointment.appointment_type = (appointment_type or create_appointment_type().name)
if service_unit:
appointment.service_unit = service_unit
if invoice:
appointment.mode_of_payment = 'Cash'
if procedure_template:
appointment.procedure_template = create_clinical_procedure_template().get('name')
if appointment_based_on_check_in:
appointment.appointment_based_on_check_in = True
if appointment_time:
appointment.appointment_time = appointment_time
if save:
appointment.save(ignore_permissions=True)
if (invoice or frappe.db.get_single_value('Healthcare Settings', 'show_payment_popup')):
invoice_appointment(appointment.name, discount_percentage, discount_amount)
return appointment |
.skip
.django_db
def test_federal_account_spending_by_category(client, financial_spending_data):
resp = client.post('/api/v2/federal_accounts/1/spending_by_category', content_type='application/json', data=json.dumps(base_payload))
assert (resp.status_code == status.HTTP_200_OK)
assert ('results' in resp.json())
results = resp.json()['results']
assert len(results)
for (k, v) in results.items():
assert isinstance(k, str)
assert hasattr(v, '__pow__')
assert (results['Office of the Secretary'] == 2000000)
assert (results['Under/Assistant Secretaries'] == 2000000) |
class Solution():
def stringShift(self, s: str, shift: List[List[int]]) -> str:
distance = 0
for (d, step) in shift:
if (d == 0):
distance += step
else:
distance -= step
distance = (distance % len(s))
return (s[distance:] + s[:distance]) |
class BuilderCMake():
def __init__(self, n_cpus: int=(- 1), timeout: int=180) -> None:
self._timeout = timeout
self._n_cpus = n_cpus
def _build_compile_options(self) -> List[str]:
device_compiler_options = Target.current().get_device_compiler_options()
if is_windows():
host_compiler_options = ['-Xcompiler=/Zc:__cplusplus']
else:
host_compiler_options = [(f'-Xcompiler {opt}' if ('=' in opt) else f'-Xcompiler={opt}') for opt in Target.current().get_host_compiler_options()]
compile_options = (device_compiler_options + host_compiler_options)
compile_options = [option.replace('\\,', '\\\\,') for option in compile_options]
return compile_options
def make_profilers(self, generated_profilers, workdir: Path):
file_pairs = [f for gp in generated_profilers for f in gp]
if (not file_pairs):
return
cmake_template = jinja2.Template(CMAKELISTS_TXT_PROFILER_TEMPLATE)
include_directories = Target.current().get_include_directories()
compile_options = self._build_compile_options()
for (source, profiler_binary) in file_pairs:
test_name = short_str(str(source))
build_dir = (Path(source).parent / test_name)
build_dir.mkdir(exist_ok=True)
rendered = cmake_template.render(CMAKE_PROJECT=test_name, CMAKE_SOURCE_FILES=_files_as_str(('../' + str(Path(source).name))), CMAKE_HEADER_FILES=_files_as_str([]), CMAKE_THIRD_PARTY_HEADER_FILES=_files_as_str(include_directories), CMAKE_THIRD_PARTY_SOURCE_FILES=_files_as_str([]), CMAKE_COMPILE_OPTIONS=' '.join(compile_options), CUDA_ARCH=Target.current()._arch, cuda_static=is_windows(), is_linux=is_linux())
cmake_filename = (build_dir / 'CMakeLists.txt')
with cmake_filename.open('w') as f:
f.write(rendered)
cmake_build_dir = (build_dir / 'build')
cmake_cmd = Target.current().cmake()
cmake_command_line = f'{_render_path(cmake_cmd)} -B {_render_path(cmake_build_dir)} -S {_render_path(build_dir)}'
_run_cmd(cmake_command_line, self._timeout)
if is_windows():
msbuild_sln_filename = (cmake_build_dir / f'{test_name}.sln')
msbuild_command_line = f'msbuild {_render_path(msbuild_sln_filename)}'
if (self._n_cpus < 0):
msbuild_command_line += ' -m'
else:
msbuild_command_line += f' -m:{self._n_cpus}'
if (Target.current()._ndebug == 1):
msbuild_command_line += ' /property:Configuration=Release'
else:
msbuild_command_line += ' /property:Configuration=Debug'
_run_cmd(msbuild_command_line, self._timeout)
target_profiler_filename = profiler_binary
if (Target.current()._ndebug == 1):
compiled_profiler_filename = ((cmake_build_dir / 'Release') / 'profiler.exe')
shutil.copy(compiled_profiler_filename, target_profiler_filename)
else:
compiled_profiler_filename = ((cmake_build_dir / 'Debug') / 'profiler.exe')
shutil.copy(compiled_profiler_filename, target_profiler_filename)
else:
make_cmd = Target.current().make()
make_command_line = f'{make_cmd} -C {_render_path(cmake_build_dir)}'
if (self._n_cpus < 0):
make_command_line += ' -j'
else:
make_command_line += f' -j{self._n_cpus}'
_run_cmd(make_command_line, self._timeout)
target_profiler_filename = profiler_binary
compiled_profiler_filename = (cmake_build_dir / 'profiler')
shutil.copy(compiled_profiler_filename, target_profiler_filename)
def make(self, file_pairs, dll_name: str, workdir: Path, test_name: str, debug_settings: AITDebugSettings=_DEBUG_SETTINGS, allow_cache=False):
if allow_cache:
_LOGGER.warning('Caching is not yet supported')
build_dir = (Path(workdir) / test_name)
cmake_template = jinja2.Template(CMAKELISTS_TXT_TEMPLATE)
include_directories = Target.current().get_include_directories()
compile_options = self._build_compile_options()
cmake_third_party_source_files = []
cmake_third_party_header_files = []
constants_bin_file = (build_dir / 'constants.bin')
if constants_bin_file.exists():
if is_windows():
resource_file = (build_dir / 'constants.rc')
with resource_file.open('w') as f:
f.write('constant_bin CUSTOMDATA "constants.bin"')
cmake_third_party_source_files.append('constants.rc')
cmake_third_party_header_files.append('windll.h')
cmake_third_party_source_files.append('windll.cu')
rendered = cmake_template.render(CMAKE_PROJECT=test_name, CMAKE_SOURCE_FILES=_files_as_str([Path(source).name for (source, _) in file_pairs if (Path(source).name not in ['standalone.cu', 'windll.cu'])]), CMAKE_HEADER_FILES=_files_as_str([]), CMAKE_STANDALONE_SOURCE_FILES=_render_path('standalone.cu'), CMAKE_THIRD_PARTY_SOURCE_FILES=_files_as_str(cmake_third_party_source_files), CMAKE_THIRD_PARTY_HEADER_FILES=_files_as_str((include_directories + cmake_third_party_header_files)), CMAKE_CONSTANTS_BIN=_render_path('constants.bin'), CMAKE_CONSTANTS_OBJ=_render_path('constants.obj'), CMAKE_COMPILE_OPTIONS=' '.join(compile_options), CUDA_ARCH=Target.current()._arch, cuda_static=is_windows(), is_linux=is_linux(), build_standalone=debug_settings.gen_standalone)
cmake_filename = (build_dir / 'CMakeLists.txt')
with cmake_filename.open('w') as f:
f.write(rendered)
cmake_build_dir = (build_dir / 'build')
cmake_cmd = Target.current().cmake()
cmake_command_line = f'{_render_path(cmake_cmd)} -B {_render_path(cmake_build_dir)} -S {_render_path(build_dir)}'
_run_cmd(cmake_command_line, self._timeout)
if is_windows():
msbuild_sln_filename = (cmake_build_dir / f'{test_name}.sln')
msbuild_command_line = f'msbuild {_render_path(msbuild_sln_filename)}'
if (self._n_cpus < 0):
msbuild_command_line += ' -m'
else:
msbuild_command_line += f' -m:{self._n_cpus}'
if (Target.current()._ndebug == 1):
msbuild_command_line += ' /property:Configuration=Release'
else:
msbuild_command_line += ' /property:Configuration=Debug'
_run_cmd(msbuild_command_line, self._timeout)
target_library_filename = (build_dir / dll_name)
target_standalone_filename = (build_dir / f'{Path(dll_name).stem}.exe')
if (Target.current()._ndebug == 1):
compiled_library_filename = ((cmake_build_dir / 'Release') / 'model.dll')
shutil.copy(compiled_library_filename, target_library_filename)
if debug_settings.gen_standalone:
compiled_standlone_filename = ((cmake_build_dir / 'Release') / 'standalone.exe')
shutil.copy(compiled_standlone_filename, target_standalone_filename)
else:
compiled_library_filename = ((cmake_build_dir / 'Debug') / 'model.dll')
shutil.copy(compiled_library_filename, target_library_filename)
if debug_settings.gen_standalone:
compiled_standlone_filename = ((cmake_build_dir / 'Debug') / 'standalone.exe')
shutil.copy(compiled_standlone_filename, target_standalone_filename)
else:
make_cmd = Target.current().make()
make_command_line = f'{make_cmd} -C {_render_path(cmake_build_dir)}'
if (self._n_cpus < 0):
make_command_line += ' -j'
else:
make_command_line += f' -j{self._n_cpus}'
_run_cmd(make_command_line, self._timeout)
target_library_filename = (build_dir / dll_name)
compiled_library_filename = (cmake_build_dir / 'libmodel.so')
shutil.copy(compiled_library_filename, target_library_filename)
if debug_settings.gen_standalone:
target_standalone_filename = (build_dir / Path(dll_name).stem)
compiled_standalone_filename = (cmake_build_dir / 'standalone')
shutil.copy(compiled_standalone_filename, target_standalone_filename) |
class ExecutionPlan():
before_scripts: List[str]
dbt_models: List[str]
after_scripts: List[str]
project_name: str
def __init__(self, unique_ids: List[str], project_name):
self.before_scripts = []
self.dbt_models = []
self.after_scripts = []
self.project_name = project_name
for id in unique_ids:
if _is_before_script(id):
self.before_scripts.append(id)
elif _is_after_script(id):
self.after_scripts.append(id)
else:
self.dbt_models.append(id)
def nodes(self) -> List[str]:
return ((self.before_scripts + self.after_scripts) + self.dbt_models)
def create_plan_from_graph(cls, parsed, nodeGraph: NodeGraph, fal_dbt: FalDbt):
unique_ids = list(nodeGraph.graph.nodes.keys())
ids_to_execute = unique_ids
if parsed.select:
ids_to_execute = _filter_node_ids(unique_ids, fal_dbt, list(parsed.select), nodeGraph)
ids_to_exclude = []
if (('exclude' in parsed) and parsed.exclude):
ids_to_exclude = _filter_node_ids(unique_ids, fal_dbt, list(parsed.exclude), nodeGraph)
ids_to_execute = [i for i in ids_to_execute if (i not in ids_to_exclude)]
ids_to_execute = [i for i in ids_to_execute if (i in nodeGraph.node_lookup)]
return cls(list(set(ids_to_execute)), fal_dbt.project_name) |
class ElfPlugin(ida_idaapi.plugin_t):
PLUGIN_NAME = 'PS5 elf plugin'
PLUGIN_VERSION = '0.0.1'
PLUGIN_AUTHORS = 'flatz'
flags = ida_idaapi.PLUGIN_PROC
wanted_name = PLUGIN_NAME
comment = ('%s to extend loader functionality' % PLUGIN_NAME)
wanted_hotkey = ''
help = ''
DEMANGLED_FORM =
DEMANGLED_TYPEINFO =
UD2_INSN_BYTES = b'\x0f\x0b'
PROSPERO_NET_NODE = '$ prospero'
class UiHooks(ida_kernwin.UI_Hooks):
def __init__(self, plugin):
super().__init__()
self.plugin = plugin
def ready_to_run(self, *args):
return 0
def database_inited(self, is_new_database, idc_script):
return 0
def plugin_loaded(self, plugin_info):
return 0
class IdbHooks(ida_idp.IDB_Hooks):
def __init__(self, plugin):
super().__init__()
self.plugin = plugin
def loader_finished(self, *args):
return 0
def determined_main(self, ea):
return 0
def segm_added(self, seg):
return 0
def auto_empty_finally(self, *args):
self.plugin.post_initial_analysis()
return 0
class IdpHooks(ida_idp.IDP_Hooks):
def __init__(self, plugin):
super().__init__()
self.plugin = plugin
def ev_func_bounds(self, possible_ret_code, func, max_func_end_ea):
self.plugin.fixup_func_bounds(func, max_func_end_ea)
return 0
def __init__(self):
super().__init__()
self.elf = None
self.file_type = None
self.lib_versions = None
self.prodg_meta_data = None
self.soname = None
self.orig_file_path = None
self.needed_modules = None
self.modules = None
self.libraries = None
self.relocation_type = None
self.rela_reloc_table = None
self.jmprel_reloc_table = None
self.symbol_table = None
self.string_table = None
self.hash_table = None
self.id_table = None
self.got_start_ea = ida_idaapi.BADADDR
self.got_plt_start_ea = ida_idaapi.BADADDR
self.init_proc_ea = ida_idaapi.BADADDR
self.term_proc_ea = ida_idaapi.BADADDR
self.nids = None
self.symbols = None
self.ui_hooks = ElfPlugin.UiHooks(self)
self.idb_hooks = ElfPlugin.IdbHooks(self)
self.idp_hooks = ElfPlugin.IdpHooks(self)
def init(self):
if (not ida_kernwin.is_idaq()):
return ida_idaapi.PLUGIN_SKIP
if (not ida_hexrays.init_hexrays_plugin()):
return ida_idaapi.PLUGIN_SKIP
print(('Initializing plugin: %s' % ElfPlugin.description()))
file_path = ida_nalt.get_input_file_path()
file_name = ida_nalt.get_root_filename()
if ((ida_ida.inf_get_filetype() != idc.FT_ELF) or (ida_ida.inf_get_procname() != 'metapc') or ida_ida.inf_is_be() or (not ida_ida.inf_is_64bit())):
return ida_idaapi.PLUGIN_SKIP
idc.add_default_til('gnuunx64')
standard_types = ['Elf64_Ehdr', 'Elf64_Phdr', 'Elf64_Shdr', 'Elf64_Nhdr', 'Elf64_Rel', 'Elf64_Rela', 'Elf64_Dyn', 'Elf64_Sym']
for type_name in standard_types:
idc.import_type((- 1), type_name)
elf = ElfUtil()
if elf.is_inited():
ehdr = elf.ehdr
is_just_loaded = False
else:
ehdr_struct_name = 'Elf64_Ehdr'
ehdr_size = get_struct_size(ehdr_struct_name)
phdr_struct_name = 'Elf64_Phdr'
phdr_size = get_struct_size(phdr_struct_name)
is_prospero_elf = False
try:
with open(file_path, 'rb') as f:
data = f.read(ehdr_size)
while True:
if (len(data) != ehdr_size):
break
ehdr = parse_struct(ehdr_struct_name, data)
phdr_offset = ehdr['e_phoff']
if (phdr_offset <= 0):
break
f.seek(phdr_offset)
data = f.read(phdr_size)
if (len(data) != phdr_size):
break
phdr = parse_struct(phdr_struct_name, data)
(phdr_type, phdr_flags) = (ElfUtil.phdr_type(phdr), phdr['p_flags'])
if ((phdr_type != ElfUtil.PT_LOAD) or (phdr_flags != ElfUtil.PF_EXEC)):
break
is_prospero_elf = True
break
except:
pass
if (not is_prospero_elf):
return ida_idaapi.PLUGIN_SKIP
else:
node = ida_netnode.netnode()
node.create(ElfPlugin.PROSPERO_NET_NODE)
is_just_loaded = True
file_type_str = {ElfUtil.ET_SCE_EXEC_ASLR: 'Executable', ElfUtil.ET_SCE_DYNAMIC: 'PRX'}.get(ehdr['e_type'], None)
if (file_type_str is None):
return ida_idaapi.PLUGIN_SKIP
self.file_type = ehdr['e_type']
print(('File type: %s' % file_type_str))
self.lib_versions = {}
self.prodg_meta_data = {}
self.soname = None
self.orig_file_path = None
self.needed_modules = []
self.modules = {}
self.libraries = {}
self.relocation_type = None
self.rela_reloc_table = None
self.jmprel_reloc_table = None
self.symbol_table = None
self.string_table = None
self.hash_table = None
self.id_table = None
self.got_start_ea = ida_idaapi.BADADDR
self.got_plt_start_ea = ida_idaapi.BADADDR
self.init_proc_ea = ida_idaapi.BADADDR
self.term_proc_ea = ida_idaapi.BADADDR
for name in ['prospero']:
idc.add_default_til(name)
self.nids = load_known_nids('ps5_symbols.txt')
self.symbols = []
ida_idaapi.require('gcc_extab')
if is_just_loaded:
self.setup_analysis()
else:
self.elf = elf
self.ui_hooks.hook()
self.idb_hooks.hook()
self.idp_hooks.hook()
return ida_idaapi.PLUGIN_KEEP
def term(self):
self.idp_hooks.unhook()
self.idb_hooks.unhook()
self.ui_hooks.unhook()
def setup_analysis(self):
ida_ida.inf_set_ostype(6)
ida_ida.inf_set_demnames((ida_ida.DEMNAM_NAME | ida_ida.DEMNAM_GCC3))
ida_ida.inf_set_cc_id(ida_typeinf.COMP_GNU)
ida_ida.inf_set_cc_cm(((ida_typeinf.CM_N64 | ida_typeinf.CM_M_NN) | ida_typeinf.CM_CC_CDECL))
ida_ida.inf_set_cc_size_b(1)
ida_ida.inf_set_cc_size_s(2)
ida_ida.inf_set_cc_size_i(4)
ida_ida.inf_set_cc_size_e(4)
ida_ida.inf_set_cc_size_l(8)
ida_ida.inf_set_cc_size_l(8)
ida_ida.inf_set_cc_size_ldbl(8)
ida_ida.inf_set_cc_defalign(0)
ida_ida.inf_set_mark_code(False)
ida_ida.inf_set_create_func_tails(False)
ida_ida.inf_set_noflow_to_data(True)
ida_ida.inf_set_rename_jumpfunc(False)
def _fixup_segment(self, seg):
image_base = ida_nalt.get_imagebase()
name = ida_segment.get_segm_name(seg)
type_id = ida_segment.segtype(seg.start_ea)
print(('Fixing up segment at 0x%x (type: %d, perm: 0x%x).' % (seg.start_ea, type_id, seg.perm)))
if (type_id == ida_segment.SEG_CODE):
other_seg = ida_segment.get_segm_by_name('.text')
if ((seg.start_ea == image_base) and (seg.perm == ida_segment.SEGPERM_EXEC) and (not other_seg)):
ida_segment.set_segm_name(seg, '.text')
print('Found .text segment.')
return True
elif (type_id == ida_segment.SEG_DATA):
other_seg = ida_segment.get_segm_by_name('.rodata')
if (seg.perm == ida_segment.SEGPERM_READ):
if (not other_seg):
ida_segment.set_segm_name(seg, '.rodata')
print('Found .rodata segment.')
return True
elif (name.lower().strip() == 'note'):
print('Deleting note segment.')
ida_segment.del_segm(seg.start_ea, (ida_segment.SEGMOD_KILL | ida_segment.SEGMOD_SILENT))
return True
elif (seg.perm == (ida_segment.SEGPERM_READ | ida_segment.SEGPERM_WRITE)):
return False
elif (seg.perm == 0):
other_seg = ida_segment.get_segm_by_name('.dynsym')
if (not other_seg):
ida_segment.set_segm_name(seg, '.dynsym')
print('Found .dynsym segment.')
return True
elif (type_id == ida_segment.SEG_XTRN):
other_seg = ida_segment.get_segm_by_name('extern')
if ((seg.perm == 0) and (not other_seg)):
ida_segment.set_segm_name(seg, 'extern')
print('Found extern segment.')
return True
return False
def _parse_extra_segments(self):
assert self.elf.is_inited()
file_path = ida_nalt.get_input_file_path()
result = False
dynamic_phdr = self.elf.find_phdr_by_type(ElfUtil.PT_DYNAMIC)
if (dynamic_phdr is not None):
result |= self._parse_dynamic_segment(dynamic_phdr)
comment_phdr = self.elf.find_phdr_by_type(ElfUtil.PT_SCE_COMMENT)
version_phdr = self.elf.find_phdr_by_type(ElfUtil.PT_SCE_VERSION)
if ((not comment_phdr) and (not version_phdr)):
return False
with open(file_path, 'rb') as f:
if (comment_phdr is not None):
f.seek(comment_phdr['p_offset'])
comment_data = f.read(comment_phdr['p_filesz'])
if (len(comment_data) != comment_phdr['p_filesz']):
comment_data = None
else:
comment_data = None
if (version_phdr is not None):
f.seek(version_phdr['p_offset'])
version_data = f.read(version_phdr['p_filesz'])
if (len(version_data) != version_phdr['p_filesz']):
version_data = None
else:
version_data = None
if comment_data:
result |= self._parse_comment_segment(comment_data)
if version_data:
result |= self._parse_version_segment(version_data)
return result
def _parse_dynamic_segment(self, dynamic_phdr):
print('Processing dynamic segment.')
struct_name = 'Elf64_Dyn'
struct_size = get_struct_size(struct_name)
seg = ida_segment.get_segm_by_name('.dynsym')
if (not seg):
ida_kernwin.warning('Unable to find .dynsym segment, cannot parse dynamic segment.')
return False
dynsym_base_ea = seg.start_ea
ea = dynamic_phdr['p_vaddr']
end_ea = (dynamic_phdr['p_vaddr'] + dynamic_phdr['p_memsz'])
dyns = []
while (ea < end_ea):
data = ida_bytes.get_bytes(ea, struct_size)
if (len(data) != struct_size):
raise RuntimeError(('Insufficient data of %s structure: 0x%x (expected: 0x%x)' % (struct_name, len(data), struct_size)))
dyn = parse_struct(struct_name, data)
if (dyn['d_tag'] == ElfUtil.DT_NULL):
break
dyns.append(dyn)
ea += struct_size
if (not dyns):
print('No dynamic tags found.')
return True
self.rela_reloc_table = RelaRelocTable()
self.jmprel_reloc_table = JmpRelRelocTable()
self.symbol_table = SymbolTable()
self.string_table = StringTable()
self.hash_table = HashTable()
self.id_table = IdTable()
print('Dynamic tags:')
for dyn in dyns:
(tag, value) = (dyn['d_tag'], dyn['d_un'])
print((' %s: 0x%x' % (ElfUtil.stringify_dyn_tag(tag), value)))
if (tag == ElfUtil.DT_NEEDED):
name = read_cstring_at((dynsym_base_ea + value))
self.needed_modules.append(name)
elif (tag == ElfUtil.DT_SONAME):
self.soname = read_cstring_at((dynsym_base_ea + value))
elif (tag in [ElfUtil.DT_SCE_NEEDED_MODULE, ElfUtil.DT_SCE_NEEDED_MODULE_PPR]):
module_id = ObjectInfo.obj_id(value)
if (module_id not in self.modules):
self.modules[module_id] = ObjectInfo()
self.modules[module_id].set_info(value)
self.modules[module_id].update_name(dynsym_base_ea)
elif (tag in [ElfUtil.DT_SCE_EXPORT_LIB, ElfUtil.DT_SCE_EXPORT_LIB_PPR]):
library_id = ObjectInfo.obj_id(value)
if (library_id not in self.libraries):
self.libraries[library_id] = ObjectInfo()
self.libraries[library_id].set_info(value, True)
self.libraries[library_id].update_name(dynsym_base_ea)
elif (tag == ElfUtil.DT_SCE_IMPORT_LIB_ATTR):
library_id = ObjectInfo.obj_id(value)
if (library_id not in self.libraries):
self.libraries[library_id] = ObjectInfo()
self.libraries[library_id].set_attr(value)
elif (tag in [ElfUtil.DT_SCE_MODULE_INFO, ElfUtil.DT_SCE_MODULE_INFO_PPR]):
module_id = ObjectInfo.obj_id(value)
if (module_id not in self.modules):
self.modules[module_id] = ObjectInfo()
self.modules[module_id].set_info(value, True)
self.modules[module_id].update_name(dynsym_base_ea)
elif (tag == ElfUtil.DT_SCE_MODULE_ATTR):
module_id = ObjectInfo.obj_id(value)
if (module_id not in self.modules):
self.modules[module_id] = ObjectInfo()
self.modules[module_id].set_attr(value)
elif (tag in [ElfUtil.DT_SCE_ORIGINAL_FILENAME, ElfUtil.DT_SCE_ORIGINAL_FILENAME_PPR]):
self.orig_file_path = read_cstring_at((dynsym_base_ea + value))
elif (tag in [ElfUtil.DT_SCE_IMPORT_LIB, ElfUtil.DT_SCE_IMPORT_LIB_PPR]):
library_id = ObjectInfo.obj_id(value)
if (library_id not in self.libraries):
self.libraries[library_id] = ObjectInfo()
self.libraries[library_id].set_info(value)
self.libraries[library_id].update_name(dynsym_base_ea)
elif (tag == ElfUtil.DT_SCE_EXPORT_LIB_ATTR):
library_id = ObjectInfo.obj_id(value)
if (library_id not in self.libraries):
self.libraries[library_id] = ObjectInfo()
self.libraries[library_id].set_attr(value)
elif (tag == ElfUtil.DT_RELA):
ea = as_uint64(value)
if ((ea != 0) and (ea != ida_idaapi.BADADDR)):
self.rela_reloc_table.ea = ea
elif (tag == ElfUtil.DT_RELASZ):
size = as_uint64(value)
if (size != ida_idaapi.BADADDR):
self.rela_reloc_table.size = size
elif (tag == ElfUtil.DT_RELAENT):
size = as_uint64(value)
if (size != ida_idaapi.BADADDR):
assert (size == get_struct_size(self.rela_reloc_table.struct_name()))
self.rela_reloc_table.entry_size = size
elif (tag == ElfUtil.DT_RELACOUNT):
count = as_uint64(value)
if (count != ida_idaapi.BADADDR):
self.rela_reloc_table.entry_count = count
elif (tag == ElfUtil.DT_JMPREL):
ea = as_uint64(value)
if ((ea != 0) and (ea != ida_idaapi.BADADDR)):
self.jmprel_reloc_table.ea = ea
elif (tag == ElfUtil.DT_PLTRELSZ):
size = as_uint64(value)
if (size != ida_idaapi.BADADDR):
self.jmprel_reloc_table.size = size
elif (tag == ElfUtil.DT_PLTGOT):
ea = as_uint64(value)
if ((ea != 0) and (ea != ida_idaapi.BADADDR)):
self.got_plt_start_ea = ea
elif (tag == ElfUtil.DT_PLTREL):
self.relocation_type = as_uint32(value)
if ((self.relocation_type != ElfUtil.DT_REL) and (self.relocation_type != ElfUtil.DT_RELA)):
ida_kernwin.warning(('Unsupported PLT relocation type: 0x%x' % self.relocation_type))
elif (tag == ElfUtil.DT_SYMTAB):
ea = as_uint64(value)
if ((ea != 0) and (ea != ida_idaapi.BADADDR)):
self.symbol_table.ea = ea
elif (tag == ElfUtil.DT_SCE_SYMTABSZ):
size = as_uint64(value)
if (size != ida_idaapi.BADADDR):
self.symbol_table.size = size
elif (tag == ElfUtil.DT_SYMENT):
size = as_uint64(value)
if (size != ida_idaapi.BADADDR):
assert (size == get_struct_size(self.symbol_table.struct_name()))
self.symbol_table.entry_size = size
elif (tag == ElfUtil.DT_STRTAB):
ea = as_uint64(value)
if ((ea != 0) and (ea != ida_idaapi.BADADDR)):
self.string_table.ea = ea
elif (tag == ElfUtil.DT_STRSZ):
size = as_uint64(value)
if (size != ida_idaapi.BADADDR):
self.string_table.size = size
elif (tag == ElfUtil.DT_HASH):
ea = as_uint64(value)
if ((ea != 0) and (ea != ida_idaapi.BADADDR)):
self.hash_table.ea = ea
elif (tag == ElfUtil.DT_SCE_HASHSZ):
size = as_uint64(value)
if (size != ida_idaapi.BADADDR):
self.hash_table.size = size
elif (tag == ElfUtil.DT_SCE_IDTABENTSZ):
size = as_uint64(value)
if (size != ida_idaapi.BADADDR):
self.id_table.entry_size = size
elif (tag == ElfUtil.DT_INIT):
self.init_proc_ea = value
elif (tag == ElfUtil.DT_FINI):
self.term_proc_ea = value
elif (tag == ElfUtil.DT_PREINIT_ARRAY):
continue
elif (tag == ElfUtil.DT_PREINIT_ARRAYSZ):
continue
elif (tag == ElfUtil.DT_INIT_ARRAY):
continue
elif (tag == ElfUtil.DT_INIT_ARRAYSZ):
continue
elif (tag == ElfUtil.DT_FINI_ARRAY):
continue
elif (tag == ElfUtil.DT_FINI_ARRAYSZ):
continue
if (self.jmprel_reloc_table.entry_size == ida_idaapi.BADADDR):
self.jmprel_reloc_table.entry_size = get_struct_size(self.jmprel_reloc_table.struct_name())
if (self.rela_reloc_table.entry_size == ida_idaapi.BADADDR):
self.rela_reloc_table.entry_size = get_struct_size(self.rela_reloc_table.struct_name())
if ((self.id_table.entry_size != ida_idaapi.BADADDR) and (self.id_table.entry_size != 8)):
ida_kernwin.warning(('Unsupported ID table entry size: 0x%x' % self.id_table.entry_size))
return True
def _parse_comment_segment(self, data):
print('Processing comment segment.')
f = BytesIO(data)
while True:
key = f.read(struct.calcsize('4s'))
if (len(key) < struct.calcsize('4s')):
break
key = key.rstrip(b'\x00').decode('ascii')
data = f.read(struct.calcsize('2I'))
if (len(data) != struct.calcsize('2I')):
raise RuntimeError('Truncated data at comment segment.')
(max_length, length) = struct.unpack('<2I', data)
value = f.read(length)
if (len(value) != length):
raise RuntimeError('Truncated data at comment segment.')
try:
value = value.decode('utf-8').rstrip('\x00')
except:
pass
self.prodg_meta_data[key] = value
params = {'PATH': 'Original path'}
for (key, desc) in params.items():
if (key not in self.prodg_meta_data):
continue
print(('%s: %s' % (desc, self.prodg_meta_data[key])))
return True
def _parse_version_segment(self, data):
print('Processing version segment.')
f = BytesIO(data)
while True:
data = f.read(struct.calcsize('2H'))
if (data == b''):
break
elif (len(data) != struct.calcsize('2H')):
raise RuntimeError('Truncated data at version segment.')
(reserved, length) = struct.unpack('<2H', data)
assert (reserved == 0)
if (length == 0):
continue
data = f.read(length)
if (len(data) != length):
raise RuntimeError('Truncated data at version segment.')
(type_id, data) = (data[0], data[1:])
if (type_id == 8):
(name, version) = data.split(b':')
name = name.decode('ascii')
version = version.hex().upper()
print(('Library %s version: %s' % (name, version)))
self.lib_versions[name] = version
else:
ida_kernwin.warning(('Unknown type id 0x%x in version info.' % type_id))
continue
return True
def _fixup_padding_segment(self):
seg = ida_segment.get_segm_by_name('.sce_padding')
if (not seg):
image_base = ida_nalt.get_imagebase()
has_padding = (ida_bytes.get_bytes(image_base, ElfUtil.SCE_PADDING_SIZE) == ElfUtil.SCE_PADDING)
if (not has_padding):
return False
text_seg = ida_segment.get_segm_by_name('.text')
if (not text_seg):
ida_kernwin.warning('Unable to find .text segment, cannot fixup .sce_padding segment.')
return False
if (text_seg.start_ea == image_base):
print(('Moving start of .text segment from 0x%x to 0x%x.' % (text_seg.start_ea, (text_seg.start_ea + ElfUtil.SCE_PADDING_SIZE))))
ida_segment.set_segm_start(text_seg.start_ea, (text_seg.start_ea + ElfUtil.SCE_PADDING_SIZE), (ida_segment.SEGMOD_KILL | ida_segment.SEGMOD_SILENT))
print('Creating .sce_padding segment.')
seg = ida_segment.segment_t()
seg.start_ea = image_base
seg.end_ea = (image_base + ElfUtil.SCE_PADDING_SIZE)
seg.bitness = text_seg.bitness
seg.type = ida_segment.SEG_UNDF
seg.perm = 0
ida_segment.add_segm_ex(seg, '.sce_padding', None, ida_segment.ADDSEG_NOAA)
seg = ida_segment.get_segm_by_name('.sce_padding')
if (not seg):
return False
ida_auto.auto_mark_range(seg.start_ea, seg.end_ea, ida_auto.AU_UNK)
ida_bytes.del_items(seg.start_ea, ida_bytes.DELIT_SIMPLE, ElfUtil.SCE_PADDING_SIZE)
print('Found .sce_padding segment.')
return True
def _link_segments_with_phdrs(self):
num_segments = ida_segment.get_segm_qty()
print(('Number of segments: %d' % num_segments))
for i in range(num_segments):
seg = ida_segment.getnseg(i)
if (not seg):
continue
idx = ida_segment.get_segm_num(seg.start_ea)
phdr = self.elf.find_phdr_by_seg(seg)
if (not phdr):
continue
def _fixup_segment_perms(self):
print('Fixing up segments permissions.')
(seg, last_seg) = (ida_segment.get_first_seg(), ida_segment.get_last_seg())
while seg:
name = ida_segment.get_segm_name(seg)
if ((name in ['.text', '.init', '.fini', '.plt']) or name.startswith('.text.')):
seg.perm = ida_segment.SEGPERM_EXEC
need_update = True
else:
need_update = False
if (not need_update):
print(('Updating %s segment permissions.' % name))
ida_segment.update_segm(seg)
seg = ida_segment.get_next_seg(seg.start_ea)
if (seg == last_seg):
break
return True
def _fixup_init_fini_segments(self):
print('Fixing up .init and .fini segments.')
info = {'.init_proc': '.init', '.term_proc': '.fini'}
segments = {}
for (func_name, segment_name) in info.items():
seg = ida_segment.get_segm_by_name(segment_name)
if seg:
continue
ea = ida_name.get_name_ea(ida_idaapi.BADADDR, func_name)
if (ea == ida_idaapi.BADADDR):
ida_kernwin.warning(('Unable to find %s function address, cannot fixup %s segment.' % (func_name, segment_name)))
continue
func = ida_funcs.get_func(ea)
if (not func):
ida_kernwin.warning(('Unable to find %s function, cannot fixup %s segment.' % (func_name, segment_name)))
continue
(start_ea, end_ea) = (func.start_ea, func.end_ea)
text_seg = ida_segment.get_segm_by_name('.text')
if (not text_seg):
ida_kernwin.warning(('Unable to find .text segment, cannot fixup %s segment.' % segment_name))
continue
if (segment_name == '.init'):
end_ea = align_up(end_ea, 16)
print(('Moving start of .text segment from 0x%x to 0x%x.' % (text_seg.start_ea, end_ea)))
ida_segment.set_segm_start(text_seg.start_ea, end_ea, (ida_segment.SEGMOD_KEEP | ida_segment.SEGMOD_SILENT))
elif (segment_name == '.fini'):
start_ea = align_up(start_ea, 16)
print(('Moving end of .text segment from 0x%x to 0x%x.' % (text_seg.end_ea, start_ea)))
ida_segment.set_segm_end(text_seg.start_ea, start_ea, (ida_segment.SEGMOD_KEEP | ida_segment.SEGMOD_SILENT))
seg = ida_segment.segment_t()
seg.start_ea = start_ea
seg.end_ea = end_ea
seg.bitness = text_seg.bitness
seg.type = text_seg.type
seg.perm = text_seg.perm
segments[segment_name] = seg
text_seg = ida_segment.get_segm_by_name('.text')
if (not text_seg):
ida_kernwin.warning('Unable to find .text segment, cannot fixup .init and .proc segments.')
return False
for (segment_name, seg) in segments.items():
print(('Creating %s segment.' % segment_name))
ida_segment.add_segm_ex(seg, segment_name, ida_segment.get_segm_class(text_seg), ida_segment.ADDSEG_NOSREG)
return True
def _fixup_eh_segments(self):
assert self.elf.is_inited()
print('Fixing up .eh_frame and .eh_frame_hdr segments.')
seg = ida_segment.get_segm_by_name('.eh_frame')
if seg:
return True
seg = ida_segment.get_segm_by_name('.eh_frame_hdr')
if (not seg):
seg = ida_segment.get_segm_by_name('.rodata')
if (not seg):
ida_kernwin.warning('Unable to find .rodata segment, cannot fixup .eh_frame_hdr segment.')
return False
phdr = self.elf.find_phdr_by_type(ElfUtil.PT_GNU_EH_FRAME)
if (phdr is None):
ida_kernwin.warning('Unable to find program header for segment .eh_frame_hdr, cannot fixup it.')
return False
new_seg = ida_segment.segment_t()
new_seg.start_ea = phdr['p_vaddr']
new_seg.end_ea = (phdr['p_vaddr'] + phdr['p_memsz'])
new_seg.bitness = seg.bitness
new_seg.type = seg.type
new_seg.perm = seg.perm
print('Creating .eh_frame_hdr segment.')
ida_segment.add_segm_ex(new_seg, '.eh_frame_hdr', ida_segment.get_segm_class(seg), ida_segment.ADDSEG_NOSREG)
seg = ida_segment.get_segm_by_name('.eh_frame_hdr')
if (not seg):
ida_kernwin.warning('Unable to find .eh_frame_hdr segment, cannot fixup .eh_frame segment.')
return False
ea = seg.start_ea
(exc_data_base_ea, exc_version) = (ea, gcc_extab.format_byte(ea, 'version'))
ea += struct.calcsize('B')
(exc_eh_frame_ptr_enc, ea) = gcc_extab.format_enc(ea, 'eh frame ptr encoding')
(exc_fde_count_enc, ea) = gcc_extab.format_enc(ea, 'fde count encoding')
(exc_ent_table_enc, ea) = gcc_extab.format_enc(ea, 'ent binary table encoding')
(exc_eh_frame_ptr, ea) = gcc_extab.read_enc_val(ea, exc_eh_frame_ptr_enc, True, data_ea=exc_data_base_ea)
if ((exc_eh_frame_ptr != ida_idaapi.BADADDR) and (exc_eh_frame_ptr < seg.start_ea)):
new_seg = ida_segment.segment_t()
new_seg.start_ea = exc_eh_frame_ptr
new_seg.end_ea = seg.start_ea
new_seg.bitness = seg.bitness
new_seg.type = seg.type
new_seg.perm = seg.perm
print('Creating .eh_frame segment.')
ida_segment.add_segm_ex(new_seg, '.eh_frame', ida_segment.get_segm_class(seg), ida_segment.ADDSEG_NOSREG)
return True
def _fixup_param_segment(self):
assert self.elf.is_inited()
if (self.file_type == ElfUtil.ET_SCE_EXEC_ASLR):
phdr_type = ElfUtil.PT_SCE_PROCPARAM
segment_name = '.sce_process_param'
struct_name = 'sceProcessParam'
handler_cb = self._fixup_process_param_segment
else:
phdr_type = ElfUtil.PT_SCE_MODULE_PARAM
segment_name = '.sce_module_param'
struct_name = 'sceModuleParam'
handler_cb = self._fixup_module_param_segment
phdr = self.elf.find_phdr_by_type(phdr_type)
if (phdr is None):
ida_kernwin.warning(('Unable to find program header for segment %s, cannot fixup it.' % segment_name))
return False
seg = ida_segment.get_segm_by_name(segment_name)
if (not seg):
seg = ida_segment.get_segm_by_name('.rodata')
if (not seg):
ida_kernwin.warning(('Unable to find .rodata segment, cannot fixup %s segment.' % segment_name))
return False
new_seg = ida_segment.segment_t()
new_seg.start_ea = phdr['p_vaddr']
new_seg.end_ea = align_up((phdr['p_vaddr'] + phdr['p_memsz']), 16)
new_seg.bitness = seg.bitness
new_seg.type = seg.type
new_seg.perm = seg.perm
print(('Creating %s segment.' % segment_name))
ida_segment.add_segm_ex(new_seg, segment_name, ida_segment.get_segm_class(seg), ida_segment.ADDSEG_NOSREG)
seg = ida_segment.get_segm_by_name(segment_name)
if (not seg):
ida_kernwin.warning(('Unable to find %s segment, cannot fixup it.' % segment_name))
return False
print(('Processing %s segment.' % segment_name))
size = ida_bytes.get_qword(seg.start_ea)
if ((size == ida_idaapi.BADADDR) or (size < struct.calcsize('Q'))):
ida_kernwin.warning(('Unexpected size of %s structure.' % struct_name))
return False
print(('%s structure size: 0x%x' % (struct_name, size)))
end_ea = align_up((seg.start_ea + size), 16)
if (seg.end_ea != end_ea):
print(('Moving end of %s segment from 0x%x to 0x%x.' % (segment_name, seg.end_ea, end_ea)))
ida_segment.set_segm_end(seg.start_ea, end_ea, (ida_segment.SEGMOD_KEEP | ida_segment.SEGMOD_SILENT))
data = ida_bytes.get_bytes(seg.start_ea, size)
if (len(data) != size):
raise RuntimeError(('Insufficient data of %s structure: 0x%x (expected: 0x%x)' % (struct_name, len(data), size)))
return handler_cb(segment_name, struct_name, data[struct.calcsize('Q'):])
def _fixup_process_param_segment(self, segment_name, struct_name, data):
fmt = '<4s3I5Q'
extra_fmt = '<3Q'
data_size = len(data)
expected_size = struct.calcsize(fmt)
expected_extra_size = struct.calcsize(extra_fmt)
if (data_size < expected_size):
raise RuntimeError(('Unsupported size of %s structure: 0x%x (expected: 0x%x)' % (struct_name, data_size, expected_size)))
elif (data_size > (expected_size + expected_extra_size)):
ida_kernwin.warning(('Size of %s structure is larger than expected: 0x%x (expected: 0x%x)' % (struct_name, data_size, (expected_size + expected_extra_size))))
(magic, entry_count, sdk_version, unk1, process_name_ea, user_main_thread_name_ea, user_main_thread_priority_ea, user_main_thread_stack_size_ea, libc_param_ea) = struct.unpack(fmt, data[:expected_size])
if (magic != ElfUtil.SCE_PROCESS_PARAM_MAGIC):
raise RuntimeError(('Invalid magic in %s structure: 0x%08x' % (struct_name, magic)))
offset = expected_size
data_size -= offset
kernel_mem_param_ea = ida_idaapi.BADADDR
if (data_size >= 8):
(kernel_mem_param_ea,) = struct.unpack('<Q', data[offset:(offset + struct.calcsize('Q'))])
offset += struct.calcsize('Q')
data_size -= struct.calcsize('Q')
kernel_fs_param_ea = ida_idaapi.BADADDR
if (data_size >= 8):
(kernel_fs_param_ea,) = struct.unpack('<Q', data[offset:(offset + struct.calcsize('Q'))])
offset += struct.calcsize('Q')
data_size -= struct.calcsize('Q')
process_preload_enabled_ea = ida_idaapi.BADADDR
if (data_size >= 8):
(process_preload_enabled_ea,) = struct.unpack('<Q', data[offset:(offset + struct.calcsize('Q'))])
offset += struct.calcsize('Q')
data_size -= struct.calcsize('Q')
print('Process info:')
print((' Magic: 0x%s' % magic.hex()))
print((' Entry count: %d' % entry_count))
print((' SDK version: 0x%x' % sdk_version))
print((' Unk1: 0x%x' % unk1))
print((' Process name ea: 0x%x' % process_name_ea))
print((' User main thread ea: 0x%x' % user_main_thread_name_ea))
print((' User main thread priority ea: 0x%x' % user_main_thread_priority_ea))
print((' User main thread stack size ea: 0x%x' % user_main_thread_stack_size_ea))
print((' Libc param ea: 0x%x' % libc_param_ea))
if (kernel_mem_param_ea != ida_idaapi.BADADDR):
print((' Kernel mem param ea: 0x%x' % kernel_mem_param_ea))
if (kernel_fs_param_ea != ida_idaapi.BADADDR):
print((' Kernel fs param ea: 0x%x' % kernel_fs_param_ea))
if (process_preload_enabled_ea != ida_idaapi.BADADDR):
print((' Process preload enabled ea: 0x%x' % process_preload_enabled_ea))
return True
def _fixup_module_param_segment(self, segment_name, struct_name, data):
fmt = '<4sIQ2I'
data_size = len(data)
expected_size = struct.calcsize(fmt)
if (data_size < expected_size):
raise RuntimeError(('Unsupported size of %s structure: 0x%x (expected: 0x%x)' % (struct_name, data_size, expected_size)))
elif (data_size > expected_size):
ida_kernwin.warning(('Size of %s structure is larger than expected: 0x%x (expected: 0x%x)' % (struct_name, data_size, expected_size)))
(magic, entry_count, sdk_version, unk1, unk2) = struct.unpack(fmt, data[:expected_size])
if (magic != ElfUtil.SCE_MODULE_PARAM_MAGIC):
raise RuntimeError(('Invalid magic in %s structure: 0x%08x' % (struct_name, magic)))
print('Module info:')
print((' Magic: 0x%s' % magic.hex()))
print((' Entry count: %d' % entry_count))
print((' SDK version: 0x%x' % sdk_version))
print((' Unk1: 0x%x' % unk1))
print((' Unk2: 0x%x' % unk2))
return True
def _fixup_data_segment(self):
seg = ida_segment.get_segm_by_name('.data')
if seg:
return False
seg = self._find_last_rw_seg()
if (not seg):
ida_kernwin.warning('Unable to find R/W segment, cannot fixup .data segment.')
return False
seg_name = ida_segment.get_segm_name(seg)
if seg_name.startswith('.'):
ida_kernwin.warning('R/W segment starts with dot already, cannot fixup .data segment.')
return False
ida_segment.set_segm_name(seg, '.data')
return True
def _fixup_extra_segments(self):
print('Fixing up extra .data segments.')
(first_seg, last_seg) = (ida_segment.get_first_seg(), ida_segment.get_last_seg())
seg = first_seg
while seg:
name = ida_segment.get_segm_name(seg)
sclass = ida_segment.get_segm_class(seg)
idx = ida_segment.get_segm_num(seg.start_ea)
if ((name.lower() == 'load') and (not name.startswith('.')) and (sclass == 'DATA')):
print(('Renaming extra R/W %s segment #%03d to .data.' % (name, idx)))
ida_segment.set_segm_name(seg, '.data')
seg = ida_segment.get_next_seg(seg.start_ea)
if (seg == last_seg):
break
print('Merging similar neighboring segments.')
seg1 = first_seg
while seg1:
name1 = ida_segment.get_segm_name(seg1)
sclass1 = ida_segment.get_segm_class(seg1)
idx1 = ida_segment.get_segm_num(seg1.start_ea)
finished = False
while (not finished):
seg2 = ida_segment.get_next_seg(seg1.start_ea)
if (not seg2):
finished = True
break
is_last = (seg2 == last_seg)
name2 = ida_segment.get_segm_name(seg2)
sclass2 = ida_segment.get_segm_class(seg2)
idx2 = ida_segment.get_segm_num(seg2.start_ea)
if ((name1 != name2) or (seg1.perm != seg2.perm) or (seg1.end_ea != seg2.start_ea)):
finished = True
break
print(('Merging segments #%03d(%s) and #%03d(%s): [0x%x;0x%x) / [0x%x;0x%x)' % (idx1, name1, idx2, name2, seg1.start_ea, seg1.end_ea, seg2.start_ea, seg2.end_ea)))
end_ea = seg2.end_ea
assert (end_ea >= seg1.end_ea)
ida_segment.del_segm(seg2.start_ea, ida_segment.SEGMOD_KEEP)
ida_segment.set_segm_end(seg1.start_ea, end_ea, ida_segment.SEGMOD_KEEP)
ida_segment.update_segm(seg1)
if is_last:
break
seg1 = ida_segment.get_next_seg(seg1.start_ea)
if (seg1 == last_seg):
break
return True
def _fixup_got_segments(self):
print('Fixing up .got and .got.plt segments.')
result = False
if (self.got_plt_start_ea != ida_idaapi.BADADDR):
print(('Address of .got.plt section: 0x%x' % self.got_plt_start_ea))
seg = ida_segment.get_segm_by_name('.got.plt')
if (not seg):
seg = ida_segment.getseg(self.got_plt_start_ea)
if (not seg):
ida_kernwin.warning('Unable to find segment which includes .got.plt, cannot fixup .got.plt segment.')
return False
new_seg = ida_segment.segment_t()
new_seg.start_ea = self.got_plt_start_ea
new_seg.end_ea = seg.end_ea
new_seg.bitness = seg.bitness
new_seg.type = seg.type
new_seg.perm = seg.perm
print('Creating .got.plt segment.')
ida_segment.add_segm_ex(new_seg, '.got.plt', ida_segment.get_segm_class(seg), 0)
else:
pass
result |= True
if (self.got_start_ea != ida_idaapi.BADADDR):
print(('Address of .got section: 0x%x' % self.got_start_ea))
seg = ida_segment.get_segm_by_name('.got')
if (not seg):
seg = ida_segment.getseg(self.got_start_ea)
if (not seg):
ida_kernwin.warning('Unable to find segment which includes .got, cannot fixup .got segment.')
return False
new_seg = ida_segment.segment_t()
new_seg.start_ea = self.got_start_ea
new_seg.end_ea = seg.end_ea
new_seg.bitness = seg.bitness
new_seg.type = seg.type
new_seg.perm = seg.perm
print('Creating .got segment.')
ida_segment.add_segm_ex(new_seg, '.got', ida_segment.get_segm_class(seg), 0)
else:
pass
result |= True
return result
def _fixup_ctors_dtors_segments(self):
print('Fixing up .ctors and .dtors segments.')
data_seg = ida_segment.get_segm_by_name('.data')
if (not data_seg):
ida_kernwin.warning('Unable to find .data segment, cannot fixup .ctors and .dtors segments.')
return False
(seg_class, seg_bitness, seg_type, seg_perm) = (ida_segment.get_segm_class(data_seg), data_seg.bitness, data_seg.type, data_seg.perm)
ea_pair = self._fixup_dtors_segment()
if (not ea_pair):
return False
(dtors_start_ea, dtors_end_ea) = ea_pair
if ((dtors_start_ea != ida_idaapi.BADADDR) and (dtors_end_ea != ida_idaapi.BADADDR)):
seg = ida_segment.segment_t()
seg.start_ea = dtors_start_ea
seg.end_ea = dtors_end_ea
seg.bitness = seg_bitness
seg.type = seg_type
seg.perm = seg_perm
print('Creating .dtors segment.')
ida_segment.add_segm_ex(seg, '.dtors', seg_class, 0)
else:
ida_kernwin.warning('Unable to find .dtors segment.')
ea_pair = self._fixup_ctors_segment()
if (not ea_pair):
return False
(ctors_start_ea, ctors_end_ea) = ea_pair
if ((ctors_start_ea != ida_idaapi.BADADDR) and (ctors_end_ea != ida_idaapi.BADADDR)):
seg = ida_segment.segment_t()
seg.start_ea = ctors_start_ea
seg.end_ea = ctors_end_ea
seg.bitness = seg_bitness
seg.type = seg_type
seg.perm = seg_perm
print('Creating .ctors segment.')
ida_segment.add_segm_ex(seg, '.ctors', seg_class, 0)
else:
ida_kernwin.warning('Unable to find .ctors segment.')
return True
def _fixup_ctors_segment(self):
dtors_seg = ida_segment.get_segm_by_name('.dtors')
if (not dtors_seg):
ida_kernwin.warning('Unable to find .dtors segment, cannot fixup .ctors segment.')
return None
(dtors_start_ea, dtors_end_ea) = (dtors_seg.start_ea, dtors_seg.end_ea)
if ((dtors_start_ea == ida_idaapi.BADADDR) or (dtors_end_ea == ida_idaapi.BADADDR)):
ida_kernwin.warning('Unexpected .dtors segment addresses, cannot fixup .ctors segment.')
return None
ea = ida_name.get_name_ea(ida_idaapi.BADADDR, '.init_proc')
if (ea == ida_idaapi.BADADDR):
ida_kernwin.warning('Unable to find .init_proc, cannot fixup .ctors segment.')
return None
preinit_array_end_ea = ida_idaapi.BADADDR
got_plt_end_ea = ida_idaapi.BADADDR
cmp_found = mov_found = lea_found = False
mov_reg = None
for ea in idautils.FuncItems(ea):
if ((not cmp_found) and check_insn_format(ea, 'cmp', [(ida_ua.o_reg, None), (ida_ua.o_mem, None)])):
value = idc.get_operand_value(ea, 1)
if ((value != ida_idaapi.BADADDR) and (preinit_array_end_ea == ida_idaapi.BADADDR)):
preinit_array_end_ea = value
cmp_found = True
elif ((not mov_found) and check_insn_format(ea, 'mov', [(ida_ua.o_reg, None), (ida_ua.o_phrase, None)])):
mov_reg = idc.print_operand(ea, 1).lower().strip().lstrip('[').rstrip(']')
mov_found = True
elif ((not lea_found) and mov_found and (mov_reg is not None) and check_insn_format(ea, 'lea', [(ida_ua.o_reg, mov_reg), (ida_ua.o_mem, None)])):
value = idc.get_operand_value(ea, 1)
if ((value != ida_idaapi.BADADDR) and (got_plt_end_ea == ida_idaapi.BADADDR)):
got_plt_end_ea = value
lea_found = True
ctors_end_ea = (dtors_start_ea - 8)
if (ida_bytes.get_qword(ctors_end_ea) != 0):
raise RuntimeError('Unexpected end of constructors table.')
ida_bytes.create_qword(ctors_end_ea, 8, True)
ida_name.set_name(ctors_end_ea, '__CTOR_END__', ida_name.SN_NOCHECK)
ctors_start_ea = (ctors_end_ea - 8)
while (ida_bytes.get_qword(ctors_start_ea) != ida_idaapi.BADADDR):
ctors_start_ea -= 8
ida_bytes.create_qword(ctors_start_ea, 8, True)
ida_name.set_name(ctors_start_ea, '__CTOR_LIST__', ida_name.SN_NOCHECK)
ctors_end_ea += 8
if (preinit_array_end_ea != ida_idaapi.BADADDR):
ida_name.set_name(preinit_array_end_ea, '_G__preinit_array_end', ida_name.SN_NOCHECK)
return (ctors_start_ea, ctors_end_ea)
def _fixup_dtors_segment(self):
ea = ida_name.get_name_ea(ida_idaapi.BADADDR, '.term_proc')
if (ea == ida_idaapi.BADADDR):
ida_kernwin.warning('Unable to find .term_proc, cannot fixup .dtors segment.')
return None
last_lea_value = None
for ea in idautils.FuncItems(ea):
if check_insn_format(ea, 'cmp', [(ida_ua.o_mem, None), (ida_ua.o_imm, 0)]):
value = idc.get_operand_value(ea, 0)
if (value != ida_idaapi.BADADDR):
self.got_start_ea = value
elif check_insn_format(ea, 'lea', [(ida_ua.o_reg, None), (ida_ua.o_mem, None)]):
value = idc.get_operand_value(ea, 1)
if (value != ida_idaapi.BADADDR):
last_lea_value = value
elif check_insn_format(ea, 'add', [(ida_ua.o_reg, None), (ida_ua.o_imm, None)]):
value = idc.get_operand_value(ea, 1)
if (value != 8):
continue
if (last_lea_value is None):
raise RuntimeError('Unexpected instructions at .term_proc().')
break
if (last_lea_value is None):
raise RuntimeError('Unexpected instructions at .term_proc().')
dtors_start_ea = (last_lea_value - 16)
if (ida_bytes.get_qword(dtors_start_ea) != ida_idaapi.BADADDR):
raise RuntimeError('Unexpected start of destructors table.')
ida_bytes.create_qword(dtors_start_ea, 8, True)
ida_name.set_name(dtors_start_ea, '__DTOR_LIST__', ida_name.SN_NOCHECK)
dtors_end_ea = (dtors_start_ea + 8)
while (ida_bytes.get_qword(dtors_end_ea) != 0):
dtors_end_ea += 8
ida_bytes.create_qword(dtors_end_ea, 8, True)
ida_name.set_name(dtors_end_ea, '__DTOR_END__', ida_name.SN_NOCHECK)
dtors_end_ea += 8
return (dtors_start_ea, dtors_end_ea)
def _fixup_bss_segment(self):
seg = ida_segment.get_segm_by_name('.bss')
if seg:
return False
data_seg = self._find_last_rw_seg()
if (not data_seg):
return False
data_segment_name = ida_segment.get_segm_name(data_seg)
bss_start_ea = ida_idaapi.BADADDR
ea = data_seg.start_ea
while ((ea != ida_idaapi.BADADDR) and (ea < data_seg.end_ea)):
if (not idc.is_loaded(ea)):
bss_start_ea = ea
break
ea = ida_bytes.next_addr(ea)
if (bss_start_ea == ida_idaapi.BADADDR):
return False
bss_end_ea = data_seg.end_ea
print('Creating .bss segment.')
seg = ida_segment.segment_t()
seg.start_ea = bss_start_ea
seg.end_ea = bss_end_ea
seg.type = ida_segment.SEG_BSS
seg.bitness = data_seg.bitness
seg.perm = data_seg.perm
ida_segment.add_segm_ex(seg, '.bss', ida_segment.get_segm_class(data_seg), ida_segment.ADDSEG_NOSREG)
return True
def fixup_func_bounds(self, func, max_func_end_ea):
end_ea = func.end_ea
data = ida_bytes.get_bytes(end_ea, len(ElfPlugin.UD2_INSN_BYTES))
if ((not data) or (data != ElfPlugin.UD2_INSN_BYTES)):
return
end_ea += len(data)
print(('Setting function 0x%x end to 0x%x (old: 0x%x).' % (func.start_ea, end_ea, func.end_ea)))
func.end_ea = end_ea
ida_funcs.reanalyze_function(func, func.start_ea, end_ea, False)
def _fixup_symbols(self):
print('Fixing up symbols.')
if ((not self.symbol_table) or (not self.symbol_table.is_table_loaded())):
ida_kernwin.warning('Symbol table is not loaded, cannot fixup symbols.')
return False
if ((not self.string_table) or (not self.string_table.is_loaded())):
ida_kernwin.warning('String table is not loaded, cannot fixup symbols.')
return False
for i in range(self.symbol_table.get_num_entries()):
entry = self.symbol_table.get_entry(i)
if (entry is None):
ida_kernwin.warning(('No entry for symbol table entry #%d.' % i))
return False
symbol = SymbolTable.Symbol(entry)
self.symbols.append(symbol)
if ((not symbol.is_object()) and (not symbol.is_func())):
continue
mangled_name = self.string_table.get_string(symbol.entry['st_name'])
if (not mangled_name):
continue
(symbol_name_enc, lid_enc, mid_enc) = mangled_name.split('#')
(nid, lid, mid) = (ObjectInfo.decode_nid(symbol_name_enc), ObjectInfo.decode_obj_id(lid_enc), ObjectInfo.decode_obj_id(mid_enc))
if (mid not in self.modules):
ida_kernwin.warning(('No module with ID: 0x%x' % mid))
return False
module_name = self.modules[mid].name
assert (lid in self.libraries)
if (lid not in self.libraries):
ida_kernwin.warning(('No library with ID: 0x%x' % lid))
return False
library_name = self.libraries[lid].name
is_export = self.libraries[lid].is_export
symbol_name = (self.nids[nid] if (nid in self.nids) else nid)
symbol.set_descriptor(module_name, library_name, symbol_name, is_export)
return True
def _fixup_plt_segment(self):
print('Fixing up .plt segment.')
if (not self.jmprel_reloc_table.is_table_loaded()):
ida_kernwin.warning('Jmprel relocation table is not loaded, cannot fixup .plt segment.')
return False
if (not self.string_table.is_loaded()):
ida_kernwin.warning('String table is not loaded, cannot fixup .plt segment.')
return False
jmprel_entry_count = self.jmprel_reloc_table.get_num_entries()
got_plt_seg = ida_segment.get_segm_by_name('.got.plt')
if (not got_plt_seg):
ida_kernwin.warning('Unable to find .got.plt segment, cannot fixup .plt segment.')
return False
target_ea = (got_plt_seg.start_ea + struct.calcsize('Q'))
xrefs = list(idautils.XrefsTo(target_ea, ida_xref.XREF_DATA))
if (not xrefs):
ida_kernwin.warning('Unable to find xrefs to .got.plt segment, cannot fixup .plt segment.')
return False
(xref_type, plt_start_ea) = (xrefs[0].type, xrefs[0].frm)
assert (xref_type == ida_xref.dr_R)
base_insns = bytes.fromhex((('FF 35 00 00 00 00' + 'FF 25 00 00 00 00') + ''))
stub_insns = bytes.fromhex(((('FF 25 00 00 00 00' + '68 00 00 00 00') + 'E9 00 00 00 00') + ''))
data = ida_bytes.get_bytes(plt_start_ea, 2)
if (data[:2] != base_insns[:2]):
ida_kernwin.warning('Unexpected .plt segment data, cannot fixup .plt segment.')
return False
super_seg = ida_segment.getseg(plt_start_ea)
if (not super_seg):
ida_kernwin.warning('Unable to find segment which includes .plt, cannot fixup .plt segment.')
return False
plt_base_ea = ida_search.find_binary((plt_start_ea + len(base_insns)), super_seg.end_ea, 'FF 25', 16, (ida_search.SEARCH_DOWN | ida_search.SEARCH_CASE))
if (plt_base_ea == ida_idaapi.BADADDR):
ida_kernwin.warning('Unable to find .plt base ea, cannot fixup .plt segment.')
return False
plt_end_ea = align_up((plt_base_ea + (jmprel_entry_count * len(stub_insns))), 16)
seg = ida_segment.get_segm_by_name('.plt')
if (not seg):
new_seg = ida_segment.segment_t()
new_seg.start_ea = plt_start_ea
new_seg.end_ea = plt_end_ea
new_seg.bitness = super_seg.bitness
new_seg.type = super_seg.type
new_seg.perm = super_seg.perm
print('Creating .plt segment.')
ida_segment.add_segm_ex(new_seg, '.plt', ida_segment.get_segm_class(super_seg), 0)
else:
pass
idaldr_node = ida_netnode.netnode('$ IDALDR node for ids loading $')
if (not idaldr_node):
ida_kernwin.warning('Unable to find netnode for imports.')
for i in range(jmprel_entry_count):
entry = self.jmprel_reloc_table.get_entry(i)
if (entry is None):
ida_kernwin.warning(('No entry for jmprel relocation table entry #%d.' % i))
return False
record = JmpRelRelocTable.Record(entry)
reloc_type = record.get_type()
if (not (reloc_type in [JmpRelRelocTable.R_AMD64_JUMP_SLOT])):
ida_kernwin.warning(('Unsupported relocation type 0x%x for jmprel relocation table entry #%d.' % (reloc_type, i)))
return False
if (reloc_type != self.relocation_type):
ida_kernwin.warning(('Mismatched relocation type 0x%x (should be 0x%x) for jmprel relocation table entry #%d.' % (reloc_type, self.relocation_type, i)))
return False
symbol_idx = record.get_symbol_idx()
if (symbol_idx >= len(self.symbols)):
ida_kernwin.warning(('Symbol index #%d out of range for jmprel relocation table entry #%d.' % (symbol_idx, i)))
return False
symbol = self.symbols[symbol_idx]
if (not symbol.has_descriptor()):
ida_kernwin.warning(('Symbol #%d has no descriptor for jmprel relocation table entry #%d.' % (symbol_idx, i)))
return False
name = symbol.get_name()
name_ex = symbol.get_name_ex()
comment = symbol.get_name_comment()
stub_name = ('/B%s' % name_ex)
stub_ptr_name = ('/PG%s' % name_ex)
stub_ptr_ea = record.entry['r_offset']
stub_ea = ida_bytes.get_qword(record.entry['r_offset'])
func_ea = (plt_base_ea + (i * len(stub_insns)))
ida_name.set_name(stub_ptr_ea, stub_ptr_name, ida_name.SN_NOCHECK)
ida_bytes.set_cmt(stub_ptr_ea, '', False)
ida_name.set_name(stub_ea, stub_name, ida_name.SN_NOCHECK)
ida_bytes.set_cmt(stub_ea, '', False)
func = ida_funcs.get_func(func_ea)
if (not func):
ida_funcs.add_func(func_ea, ida_idaapi.BADADDR)
ida_name.set_name(func_ea, name, ida_name.SN_NOCHECK)
ida_bytes.set_cmt(func_ea, comment, False)
func = ida_funcs.get_func(stub_ea)
if (not func):
ida_funcs.add_func(stub_ea, ida_idaapi.BADADDR)
ea = ida_name.get_name_ea(func_ea, name)
if (ea != ida_idaapi.BADADDR):
func = ida_funcs.get_func(ea)
if func:
func.flags |= ida_funcs.FUNC_LIB
ida_funcs.update_func(func)
ea = ida_name.get_name_ea(stub_ea, stub_name)
if (ea != ida_idaapi.BADADDR):
func = ida_funcs.get_func(ea)
if func:
func.flags |= ida_funcs.FUNC_LIB
ida_funcs.update_func(func)
if idaldr_node:
idaldr_node.supset_ea(stub_ea, stub_name, ida_netnode.stag)
return True
def _fixup_relocations(self):
print('Fixing up relocations.')
if (not self.rela_reloc_table.is_table_loaded()):
ida_kernwin.warning('Rela relocation table is not loaded, cannot fixup relocations.')
return False
if (not self.string_table.is_loaded()):
ida_kernwin.warning('String table is not loaded, cannot fixup relocations.')
return False
idaldr_node = ida_netnode.netnode('$ IDALDR node for ids loading $')
if (not idaldr_node):
ida_kernwin.warning('Unable to find netnode for imports.')
rela_entry_count = self.rela_reloc_table.get_num_entries()
for i in range(rela_entry_count):
entry = self.rela_reloc_table.get_entry(i)
if (entry is None):
ida_kernwin.warning(('No entry for rela relocation table entry #%d.' % i))
return False
record = RelaRelocTable.Record(entry)
reloc_type = record.get_type()
(ea, addend) = (as_uint64(record.entry['r_offset']), as_uint64(record.entry['r_addend']))
if (reloc_type in [RelaRelocTable.R_AMD64_GLOB_DAT, RelaRelocTable.R_AMD64_64]):
symbol_idx = record.get_symbol_idx()
if (symbol_idx < len(self.symbols)):
symbol = self.symbols[symbol_idx]
if symbol.has_descriptor():
name = symbol.get_name()
if name:
ea = ida_bytes.get_qword(ea)
if (ea != ida_idaapi.BADADDR):
ida_name.set_name(ea, name, ida_name.SN_NOCHECK)
if idaldr_node:
idaldr_node.supset_ea(ea, name, ida_netnode.stag)
else:
continue
else:
continue
else:
print(('Warning! Unsupported relocation type 0x%x for rela relocation table entry #%d.' % (reloc_type, i)))
continue
return True
def _fixup_exports(self):
print('Fixing up exports.')
ea_ordinal_map = {}
for i in range(ida_entry.get_entry_qty()):
ordinal = ida_entry.get_entry_ordinal(i)
ea = ida_entry.get_entry(ordinal)
ea_ordinal_map[ea] = ordinal
for (i, symbol) in enumerate(self.symbols):
if ((not symbol.is_export) and (not symbol.is_object()) and (not symbol.is_func())):
continue
if (not symbol.has_descriptor()):
continue
(ea, size) = (symbol.entry['st_value'], symbol.entry['st_size'])
if ((ea == 0) or (ea == ida_idaapi.BADADDR)):
continue
func = ida_funcs.get_func(ea)
if (not func):
ida_bytes.del_items(ea, ida_bytes.DELIT_SIMPLE, size)
ida_ua.create_insn(ea)
ida_funcs.add_func(ea, (ea + size))
name = symbol.get_name()
print(('Setting name %s to exported function at 0x%x.' % (name, ea)))
if (ea in ea_ordinal_map):
ordinal = ea_ordinal_map[ea]
ida_entry.rename_entry(ordinal, name, ida_entry.AEF_UTF8)
else:
ida_name.set_name(ea, name)
ida_bytes.set_cmt(ea, '', False)
return True
def _fixup_dynsym_segment(self):
print('Deleting .dynsym segment.')
seg = ida_segment.get_segm_by_name('.dynsym')
if (not seg):
ida_kernwin.warning('Unable to find .dynsym segment, cannot fixup .dynsym segment.')
return False
ida_segment.del_segm(seg.start_ea, (ida_segment.SEGMOD_KILL | ida_segment.SEGMOD_SILENT))
return True
def _mark_noret_funcs(self):
names = ['exit', 'exit1', 'abort', '__stack_chk_fail', '_ZNSt9bad_allocD0Ev', '_ZNSt9bad_allocD1Ev', '_ZNSt9bad_allocD2Ev', '_ZSt11_Xbad_allocv', '_ZNSt16invalid_argumentD0Ev', '_ZNSt16invalid_argumentD1Ev', '_ZNSt16invalid_argumentD2Ev', '_ZSt18_Xinvalid_argumentPKc', '_ZNSt12length_errorD0Ev', '_ZNSt12length_errorD1Ev', '_ZNSt12length_errorD2Ev', '_ZSt14_Xlength_errorPKc', '_ZNSt12out_of_rangeD0Ev', '_ZNSt12out_of_rangeD1Ev', '_ZNSt12out_of_rangeD2Ev', '_ZSt14_Xout_of_rangePKc', '_ZNSt14overflow_errorD0Ev', '_ZNSt14overflow_errorD1Ev', '_ZNSt14overflow_errorD2Ev', '_ZSt16_Xoverflow_errorPKc', '_ZNSt13runtime_errorD0Ev', '_ZNSt13runtime_errorD1Ev', '_ZNSt13runtime_errorD2Ev', '_ZSt15_Xruntime_errorPKc', '_ZNSt17bad_function_callD0Ev', '_ZNSt17bad_function_callD1Ev', '_ZNSt17bad_function_callD2Ev', '_ZSt19_Xbad_function_callv', '_ZNSt11regex_errorD0Ev', '_ZNSt11regex_errorD1Ev', '_ZNSt11regex_errorD2Ev', '_ZSt10_Rng_abortPKc', '_ZSt19_Throw_future_errorRKSt10error_code', '_ZSt25_Rethrow_future_exceptionPv', '_ZSt25_Rethrow_future_exceptionSt13exception_ptr']
for name in names:
ea = ida_name.get_name_ea(ida_idaapi.BADADDR, name)
if (ea == ida_idaapi.BADADDR):
continue
func = ida_funcs.get_func(ea)
if (not func):
continue
func.flags |= ida_funcs.FUNC_NORET
ida_funcs.update_func(func)
ida_auto.reanalyze_callers(ea, True)
def _find_last_rw_seg(self):
rw_seg = None
(seg, first_seg) = (ida_segment.get_last_seg(), ida_segment.get_first_seg())
while (seg and (seg != first_seg)):
name = ida_segment.get_segm_name(seg)
sclass = ida_segment.get_segm_class(seg)
if (seg.perm == (ida_segment.SEGPERM_READ | ida_segment.SEGPERM_WRITE)):
rw_seg = seg
break
seg = ida_segment.get_prev_seg(seg.start_ea)
return rw_seg
def post_initial_analysis(self):
self.elf = ElfUtil()
if (not self.elf.is_inited()):
raise RuntimeError('Netnode for elf is not initialized.')
print('Performing post initial auto analysis.')
for i in range(ida_segment.get_segm_qty()):
seg = ida_segment.getnseg(i)
if seg:
self._fixup_segment(seg)
self._parse_extra_segments()
self._fixup_segment_perms()
self._link_segments_with_phdrs()
self._fixup_padding_segment()
self._fixup_param_segment()
self._fixup_data_segment()
self._fixup_init_fini_segments()
self._fixup_eh_segments()
self._fixup_ctors_dtors_segments()
self._fixup_got_segments()
self._fixup_bss_segment()
self._fixup_extra_segments()
self._fixup_symbols()
ida_ida.inf_set_rename_jumpfunc(True)
self._fixup_plt_segment()
self._fixup_relocations()
self._fixup_exports()
self._fixup_dynsym_segment()
self._mark_noret_funcs()
if (self.soname is not None):
print(('Name: %s' % self.soname))
if (self.orig_file_path is not None):
print(('Original file path: %s' % self.orig_file_path))
if (self.init_proc_ea != ida_idaapi.BADADDR):
print(('Address of .init_proc function: 0x%x' % self.init_proc_ea))
if (self.term_proc_ea != ida_idaapi.BADADDR):
print(('Address of .term_proc function: 0x%x' % self.term_proc_ea))
if self.needed_modules:
print(('Needed modules: %s' % ', '.join(self.needed_modules)))
for (id, info) in self.modules.items():
print(('Module #%03d: %s' % (id, repr(info))))
for (id, info) in self.libraries.items():
print(('Library #%03d: %s' % (id, repr(info))))
if (self.relocation_type is not None):
print(('Relocation type: 0x%x' % self.relocation_type))
def run(self, arg):
ida_kernwin.warning('Running as script is not possible.')
return False
def description():
return ('%s v%s' % (ElfPlugin.PLUGIN_NAME, ElfPlugin.PLUGIN_VERSION)) |
class Game(Base):
__tablename__ = 'game'
game_id = Column(Integer, primary_key=True)
game_name = Column(String(50))
player_country_name = Column(String(100))
db_galaxy_template = Column(String(100))
db_galaxy_shape = Column(String(100))
db_difficulty = Column(String(100))
db_last_updated = Column(sqlalchemy.DateTime, default=None)
systems = relationship('System', back_populates='game', cascade='all,delete,delete-orphan')
countries = relationship('Country', back_populates='game', cascade='all,delete,delete-orphan')
species = relationship('Species', back_populates='game', cascade='all,delete,delete-orphan')
game_states = relationship('GameState', back_populates='game', cascade='all,delete,delete-orphan')
wars = relationship('War', back_populates='game', cascade='all,delete,delete-orphan')
leaders = relationship('Leader', back_populates='game', cascade='all,delete,delete-orphan')
def galaxy(self):
return f'{self.galaxy_template} {self.galaxy_shape}'
def galaxy_template(self):
return game_info.lookup_key(self.db_galaxy_template)
def galaxy_shape(self):
return game_info.lookup_key(self.db_galaxy_shape)
def difficulty(self):
return game_info.lookup_key(self.db_difficulty)
def last_updated(self):
return f'{self.db_last_updated:%Y.%m.%d %H:%M}' |
.skipif(not_multi_db, reason='requires different db strings')
class TestMultiDB(BaseTestCase):
def test_users_in_two_databases(self, db_session, db_session2):
for x in range(1, 5):
add_user(x, 'a', db_session=db_session)
for x in range(1, 3):
add_user(x, 'b', db_session=db_session2)
db_session.flush()
db_session2.flush()
db_users = BaseService.all(User, db_session=db_session).all()
db2_users = BaseService.all(User, db_session=db_session2).all()
assert (len(db_users) == 4)
assert (len(db2_users) == 2)
assert (db_users[0].user_name == 'username_1_db_a')
assert (db2_users[0].user_name == 'username_1_db_b') |
def gradient_editor_factory(parent, trait_editor):
tvtk_obj = getattr(trait_editor.object, trait_editor.name)
if (ETSConfig.toolkit == 'wx'):
from .wx_gradient_editor import wxGradientEditorWidget
widget = wxGradientEditorWidget(parent, tvtk_obj)
elif (ETSConfig.toolkit == 'qt4'):
from .qt_gradient_editor import QGradientEditorWidget
widget = QGradientEditorWidget(None, tvtk_obj)
else:
msg = ('Toolkit %s does not implement gradient_editors.' % ETSConfig.toolkit)
raise NotImplementedError(msg)
return widget |
class OptionSeriesVectorDataDragdropGuideboxDefault(Options):
def className(self):
return self._config_get('highcharts-drag-box-default')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('rgba(0, 0, 0, 0.1)')
def color(self, text: str):
self._config(text, js_type=False)
def cursor(self):
return self._config_get('move')
def cursor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#888')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(900)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def parse_args(argv):
parser = argparse.ArgumentParser(prog='ergo create', description='Create a new ergo project.', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('path', help='Path of the project to create.')
parser.add_argument('-i', '--inputs', dest='num_inputs', action='store', type=int, default=10, help='Number of inputs of the model.')
parser.add_argument('-o', '--outputs', dest='num_outputs', action='store', type=int, default=2, help='Number of outputs of the model.')
parser.add_argument('-l', '--layers', dest='hidden', action='store', type=str, default='30, 30', help='Comma separated list of positive integers, one per each hidden layer representing its size.')
parser.add_argument('-b', '--batch-size', dest='batch_size', action='store', type=int, default=64, help='Batch size parameter for training.')
parser.add_argument('-e', '--epochs', dest='max_epochs', action='store', type=int, default=50, help='Maximum number of epochs to train the model.')
args = parser.parse_args(argv)
return args |
class Fastq(object):
def __init__(self, name, seq, name2, qual):
self.name = name
self.seq = seq
self.name2 = name2
self.qual = qual
def write_to_file(self, handle):
handle.write((('' + self.name) + '\n'))
handle.write((self.seq + '\n'))
handle.write((('+' + self.name2) + '\n'))
handle.write((self.qual + '\n')) |
class LightBlue(Theme.Theme):
_charts = ['#009999', '#336699', '#ffdcb9', '#cc99ff', '#b3d9ff', '#ffff99', '#000066', '#b2dfdb', '#80cbc4', '#e8f5e9', '#c8e6c9', '#a5d6a7', '#ffebee', '#ffcdd2', '#ef9a9a', '#f3e5f5', '#e1bee7', '#ce93d8', '#ede7f6', '#d1c4e9', '#b39ddb', '#e8eaf6', '#c5cae9', '#9fa8da', '#fffde7', '#fff9c4', '#fff59d', '#fff3e0', '#ffe0b2', '#ffcc80', '#efebe9', '#d7ccc8', '#bcaaa4']
_colors = ['#E1F5FE', '#B3E5FC', '#81D4FA', '#4FC3F7', '#29B6F6', '#03A9F4', '#039BE5', '#0288D1', '#0277BD', '#01579B']
(_warning, _danger, _success) = (['#FFF3CD', '#e2ac00'], ['#F8D7DA', '#C00000'], ['#e8f2ef', '#3bb194']) |
class HeaderMenu(Options):
def hide(self, label: str='Hide Column', icon: Optional[str]='fas fa-eye-slash', disabled: bool=False):
if (icon is not None):
self._attrs[('<i class="%s" style="margin-right:5px"></i>%s' % (icon, label))] = 'column.hide()'
else:
self._attrs[label] = 'column.hide()'
return self
def separator(self):
self._attrs[('separator_%s' % len(self._attrs))] = None
return self
def custom(self, label: str, func: str, icon: str=None, disabled: bool=False):
if (not func.startswith('function(')):
func = ('function(e, column){%s}' % func)
if (icon is not None):
self._attrs[('<i class="%s" style="margin-right:5px"></i>%s' % (icon, label))] = func
else:
self._attrs[label] = func
return self
def __str__(self):
result = []
for (k, v) in self._attrs.items():
if ((v is None) and k.startswith('separator_')):
result.append("{'separator': true}")
else:
result.append(("{label: '%s', action: %s}" % (k, v)))
return ', '.join(result) |
def length_of(m_type, version):
if (m_type in of_g.of_mixed_types):
m_type = of_g.of_mixed_types[m_type][version]
if (m_type in of_g.of_base_types):
return of_g.of_base_types[m_type]['bytes']
if ((m_type[:(- 2)], version) in of_g.base_length):
return of_g.base_length[(m_type[:(- 2)], version)]
print('Unknown length request', m_type, version)
sys.exit(1) |
def extractDoramtranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def extractGoddessGrantMeaGirlfriend(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('ggmag chapter' in item['tags']):
return buildReleaseMessageWithType(item, 'Goddess! Grant Me a Girlfriend!!', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False |
class OptionPlotoptionsBulletSonificationContexttracksMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_custom_index_view_in_init_app():
view = base.AdminIndexView(name='a', category='b', endpoint='c', url='/d', template='e')
app = Flask(__name__)
admin = base.Admin()
admin.init_app(app, index_view=view)
assert (admin.endpoint == 'c')
assert (admin.url == '/d')
assert (admin.index_view is view)
assert (view.name == 'a')
assert (view.category == 'b')
assert (view._template == 'e')
assert (len(admin._views) == 1)
assert (admin._views[0] == view) |
def gen_v4_match_compat(out):
out.write('\n/**\n * Definitions to coerce v4 match (version 1.3) to v3 matches\n * (version 1.2).\n * This is a stopgap and needs to get cleaned up.\n */\n#define of_match_v4_t of_match_v3_t\n#define of_match_v4_init of_match_v3_init\n#define of_match_v4_new of_match_v3_new\n#define of_match_v4_to_match of_match_v3_to_match\n#define of_match_to_wire_match_v4 of_match_to_wire_match_v3\n#define of_match_v4_delete of_match_v3_delete\n\n#define of_match_v5_t of_match_v3_t\n#define of_match_v5_init of_match_v3_init\n#define of_match_v5_new of_match_v3_new\n#define of_match_v5_to_match of_match_v3_to_match\n#define of_match_to_wire_match_v5 of_match_to_wire_match_v3\n#define of_match_v5_delete of_match_v3_delete\n') |
def upgrade():
op.execute('delete from message_settings')
op.add_column('message_settings', sa.Column('created_at', sa.DateTime(timezone=True), nullable=True))
op.add_column('message_settings', sa.Column('enabled', sa.Boolean(), server_default='True', nullable=False))
op.add_column('message_settings', sa.Column('modified_at', sa.DateTime(timezone=True), nullable=True))
op.alter_column('message_settings', 'action', existing_type=sa.VARCHAR(), nullable=False)
op.drop_column('message_settings', 'sent_at')
op.drop_column('message_settings', 'mail_status')
op.drop_column('message_settings', 'user_control_status')
op.drop_column('message_settings', 'notification_status') |
class RegReplacePanelTestCommand(sublime_plugin.TextCommand):
test_keys = ('find_only', 'multi_pass', 'no_selection', 'regex_full_file_with_selections', 'replacements', 'action', 'options')
test_subkeys = ('key', 'scope', 'style')
test_bool_keys = ('find_only', 'multi_pass', 'no_selection', 'regex_full_file_with_selections')
def process_test_cmd(self, test):
remove = []
okay = True
for (k, v) in test.items():
if (k not in self.test_keys):
remove.append(k)
elif (((k == 'replacements') and (not isinstance(v, list))) or (not all((isinstance(x, str) for x in test['replacements'])))):
error('You need to specify valid replacements in your sequence for testing!')
okay = False
break
elif (k in self.test_bool_keys):
if (v is None):
remove.append(k)
elif (not isinstance(v, bool)):
error('"%s" must be a boolean value!')
okay = False
break
elif (k == 'action'):
if (v is None):
remove.append(k)
elif (not isinstance(v, str)):
error('"action" must be a string!')
okay = False
break
elif (k == 'options'):
if (v is None):
remove.append(k)
elif (not isinstance(v, dict)):
error('"options" must be a dict!')
okay = False
break
else:
for (k1, v1) in v.items():
if (k1 not in self.test_subkeys):
remove.append((k, k1))
elif (not isinstance(v1, str)):
error(('"%s" must be a string!' % k1))
okay = False
break
if okay:
for r in remove:
if isinstance(r, tuple):
del test[r[0]][r[1]]
else:
del test[r]
return okay
def run(self, edit):
(obj, test) = ConvertPythonSrc2Obj().convert(self.view.substr(sublime.Region(0, self.view.size())))
if ((test is None) or (obj is None)):
return
if (not self.process_test_cmd(test)):
return
test_rules = {}
rules = sublime.load_settings('reg_replace_rules.sublime-settings').get('replacements', {})
for x in test['replacements']:
if (x in rules):
test_rules[x] = rules[x]
if (not obj.get('name')):
error('A valid name must be provided!')
elif ((obj.get('scope') is None) and (obj.get('find') is None)):
error('A valid find pattern or scope must be provided!')
else:
try:
if (obj.get('find') is not None):
if obj.get('selection_inputs', False):
pass
elif obj.get('literal', False):
flags = 0
pattern = re.escape(obj['find'])
if obj.get('literal_ignorecase', False):
flags = re.I
re.compile(pattern, flags)
else:
extend = sublime.load_settings('reg_replace.sublime-settings').get('extended_back_references', False)
if extend:
bre.compile_search(obj['find'])
else:
re.compile(obj['find'])
test_rules[obj['name']] = obj
settings = sublime.load_settings('reg_replace_test.sublime-settings')
settings.set('format', '3.2')
settings.set('replacements', test_rules)
window = sublime.active_window()
if (window is not None):
view = window.active_view()
if (view is not None):
test['use_test_buffer'] = True
view.run_command('reg_replace', test)
except Exception as e:
error(('Regex compile failed!\n\n%s' % str(e))) |
class GptsPlanIdentifier(ResourceIdentifier):
identifier_split: str = dataclasses.field(default='___$$$$___', init=False)
conv_id: str
sub_task_num: Optional[str]
def __post_init__(self):
if ((self.conv_id is None) or (self.sub_task_num is None)):
raise ValueError('conv_id and sub_task_num cannot be None')
if any(((self.identifier_split in key) for key in [self.conv_id, self.sub_task_num] if (key is not None))):
raise ValueError(f'identifier_split {self.identifier_split} is not allowed in conv_id, sub_task_num')
def str_identifier(self) -> str:
return self.identifier_split.join((key for key in [self.conv_id, self.sub_task_num] if (key is not None)))
def to_dict(self) -> Dict:
return {'conv_id': self.conv_id, 'sub_task_num': self.sub_task_num} |
class App(dict):
def __init__(self, copydict=None):
if copydict:
super().__init__(copydict)
return
super().__init__()
self.Disabled = None
self.AntiFeatures = dict()
self.Provides = None
self.Categories = []
self.License = 'Unknown'
self.AuthorName = None
self.AuthorEmail = None
self.AuthorWebSite = None
self.WebSite = ''
self.SourceCode = ''
self.IssueTracker = ''
self.Translation = ''
self.Changelog = ''
self.Donate = None
self.FlattrID = None
self.Liberapay = None
self.OpenCollective = None
self.Bitcoin = None
self.Litecoin = None
self.Name = None
self.AutoName = ''
self.Summary = ''
self.Description = ''
self.RequiresRoot = False
self.RepoType = ''
self.Repo = ''
self.Binaries = None
self.AllowedAPKSigningKeys = []
self.MaintainerNotes = ''
self.ArchivePolicy = None
self.AutoUpdateMode = 'None'
self.UpdateCheckMode = 'None'
self.UpdateCheckIgnore = None
self.VercodeOperation = []
self.UpdateCheckName = None
self.UpdateCheckData = None
self.CurrentVersion = ''
self.CurrentVersionCode = None
self.NoSourceSince = ''
self.id = None
self.metadatapath = None
self.Builds = []
self.added = None
self.lastUpdated = None
def __getattr__(self, name):
if (name in self):
return self[name]
else:
raise AttributeError(('No such attribute: ' + name))
def __setattr__(self, name, value):
self[name] = value
def __delattr__(self, name):
if (name in self):
del self[name]
else:
raise AttributeError(('No such attribute: ' + name)) |
def get_duns_business_types_mapping():
if (not DUNS_BUSINESS_TYPES_MAPPING):
if (not os.path.exists(SAM_FUNCTIONAL_DATA_DICTIONARY_CSV)):
logger.warning('SAM Functional Data Dictionary CSV not found. DUNS business types not loaded.')
return {}
with open(SAM_FUNCTIONAL_DATA_DICTIONARY_CSV, 'r') as sam_data_dict_csv:
reader = csv.DictReader(sam_data_dict_csv, delimiter=',')
rows = list(reader)
for (index, row) in enumerate(rows, 1):
DUNS_BUSINESS_TYPES_MAPPING[row['code']] = row['terse_label']
return DUNS_BUSINESS_TYPES_MAPPING |
class IVTKWithCrustAndBrowser(SplitApplicationWindow):
ratio = Float(0.7)
direction = Str('horizontal')
scene = Instance(Scene)
browser = Instance(PipelineBrowser)
browser_scene = Instance(SceneWithBrowser)
python_shell = Instance(PythonShell)
def __init__(self, **traits):
super(IVTKWithCrustAndBrowser, self).__init__(**traits)
self.title = 'TVTK Scene'
self.menu_bar_manager = create_ivtk_menu(self)
def close(self):
if (self.scene is not None):
self.scene.close()
super(IVTKWithCrustAndBrowser, self).close()
icon = Instance(ImageResource, scene_icon)
def _create_lhs(self, parent):
self.browser_scene = SceneWithBrowser(parent)
self.scene = self.browser_scene.scene
self.browser = self.browser_scene.browser
return self.browser_scene.control
def _create_rhs(self, parent):
self.python_shell = PythonShell(parent)
self.python_shell.bind('scene', self.scene)
self.python_shell.bind('s', self.scene)
self.python_shell.bind('browser', self.browser)
self.python_shell.bind('b', self.browser)
self.python_shell.bind('tvtk', tvtk)
return self.python_shell.control |
def text(session, *args, **kwargs):
txt = (args[0] if args else None)
if (txt is None):
return
if (txt.strip() in _IDLE_COMMAND):
session.update_session_counters(idle=True)
return
txt = _maybe_strip_incoming_mxp(txt)
if session.account:
puppet = session.puppet
if puppet:
txt = puppet.nicks.nickreplace(txt, categories='inputline', include_account=True)
else:
txt = session.account.nicks.nickreplace(txt, categories='inputline', include_account=False)
kwargs.pop('options', None)
cmdhandler(session, txt, callertype='session', session=session, **kwargs)
session.update_session_counters() |
class TestGetPageNumbersWithUncommonPageDimension():
def test_should_provide_empty_list_for_empty_document(self):
layout_document = LayoutDocument(pages=[])
result = get_page_numbers_with_uncommon_page_dimension(layout_document)
assert (result == [])
def test_should_provide_empty_list_if_all_pages_have_same_dimension(self):
layout_document = LayoutDocument(pages=[LayoutPage(blocks=[], meta=LayoutPageMeta(page_number=1, coordinates=LAYOUT_PAGE_COORDINATES_1._replace(page_number=1))), LayoutPage(blocks=[], meta=LayoutPageMeta(page_number=2, coordinates=LAYOUT_PAGE_COORDINATES_1._replace(page_number=2))), LayoutPage(blocks=[], meta=LayoutPageMeta(page_number=3, coordinates=LAYOUT_PAGE_COORDINATES_1._replace(page_number=3)))])
result = get_page_numbers_with_uncommon_page_dimension(layout_document)
assert (result == [])
def test_should_provide_page_number_with_uncomment_page_dimension(self):
layout_document = LayoutDocument(pages=[LayoutPage(blocks=[], meta=LayoutPageMeta(page_number=1, coordinates=LAYOUT_PAGE_COORDINATES_1._replace(page_number=1))), LayoutPage(blocks=[], meta=LayoutPageMeta(page_number=2, coordinates=LAYOUT_PAGE_COORDINATES_2._replace(page_number=2))), LayoutPage(blocks=[], meta=LayoutPageMeta(page_number=3, coordinates=LAYOUT_PAGE_COORDINATES_1._replace(page_number=3)))])
result = get_page_numbers_with_uncommon_page_dimension(layout_document)
assert (result == [2]) |
(frozen=False)
class CapturedTimeMS():
external_communication_time_ms: float = 0.0
def __add__(self, other):
return CapturedTimeMS(external_communication_time_ms=(self.external_communication_time_ms + other.external_communication_time_ms))
def __radd__(self, other):
raise RuntimeErrorException('Can only add CapturedTimeMS objects together')
def reset_time(self) -> None:
self.external_communication_time_ms = 0.0
def increment_external_communication_time_ms(self, time_ms: float) -> None:
self.external_communication_time_ms += time_ms |
class Switch(object):
def __init__(self, dp):
super(Switch, self).__init__()
self.dp = dp
self.ports = []
def add_port(self, ofpport):
port = Port(self.dp.id, self.dp.ofproto, ofpport)
if (not port.is_reserved()):
self.ports.append(port)
def del_port(self, ofpport):
self.ports.remove(Port(ofpport))
def to_dict(self):
d = {'dpid': dpid_to_str(self.dp.id), 'ports': [port.to_dict() for port in self.ports]}
return d
def __str__(self):
msg = ('Switch<dpid=%s, ' % self.dp.id)
for port in self.ports:
msg += (str(port) + ' ')
msg += '>'
return msg |
class TestNoImports():
def test_name_with_import_error(self, modules_tmpdir):
modules_tmpdir.join('importerror.py').write('raise NotImplementedError()')
try:
flask.Flask('importerror')
except NotImplementedError:
AssertionError('Flask(import_name) is importing import_name.') |
def add_PostControllerServicer_to_server(servicer, server):
rpc_method_handlers = {'List': grpc.unary_stream_rpc_method_handler(servicer.List, request_deserializer=blog__proto_dot_post__pb2.PostListRequest.FromString, response_serializer=blog__proto_dot_post__pb2.Post.SerializeToString), 'Create': grpc.unary_unary_rpc_method_handler(servicer.Create, request_deserializer=blog__proto_dot_post__pb2.Post.FromString, response_serializer=blog__proto_dot_post__pb2.Post.SerializeToString), 'Retrieve': grpc.unary_unary_rpc_method_handler(servicer.Retrieve, request_deserializer=blog__proto_dot_post__pb2.PostRetrieveRequest.FromString, response_serializer=blog__proto_dot_post__pb2.Post.SerializeToString), 'Update': grpc.unary_unary_rpc_method_handler(servicer.Update, request_deserializer=blog__proto_dot_post__pb2.Post.FromString, response_serializer=blog__proto_dot_post__pb2.Post.SerializeToString), 'Destroy': grpc.unary_unary_rpc_method_handler(servicer.Destroy, request_deserializer=blog__proto_dot_post__pb2.Post.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString)}
generic_handler = grpc.method_handlers_generic_handler('blog_proto.PostController', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,)) |
class DataSerializer(serializers.ModelSerializer):
api_key = serializers.HiddenField(default=None)
class Meta():
model = Data
fields = ['id', 'device', 'field_1', 'field_2', 'field_3', 'field_4', 'field_5', 'field_6', 'field_7', 'field_8', 'field_9', 'field_10', 'remote_address', 'api_key'] |
def load_cloudasset_data(engine, config, inventory_index_id):
cai_gcs_dump_paths = config.get_cai_dump_file_paths()
storage_client = storage.StorageClient({})
imported_assets = 0
if (not cai_gcs_dump_paths):
cai_gcs_dump_paths = _download_cloudasset_data(config, inventory_index_id)
for gcs_path in cai_gcs_dump_paths:
try:
LOGGER.debug(f'Streaming CAI dump from GCS {gcs_path}.')
assets = _stream_gcs_to_database(gcs_path, engine, storage_client)
imported_assets += assets
except StreamError as e:
LOGGER.error('Error streaming data from GCS to Database: %s', e)
return _clear_cai_data(engine)
LOGGER.info('%i assets imported to database.', imported_assets)
engine.execute('pragma optimize;')
return imported_assets |
class PlayNextService(threading.Thread):
stop_thread = False
monitor = None
def __init__(self, play_monitor):
super(PlayNextService, self).__init__()
self.monitor = play_monitor
def run(self):
from .play_utils import get_playing_data
settings = xbmcaddon.Addon()
play_next_trigger_time = int(settings.getSetting('play_next_trigger_time'))
play_next_dialog = None
play_next_triggered = False
is_playing = False
while ((not xbmc.Monitor().abortRequested()) and (not self.stop_thread)):
player = xbmc.Player()
if player.isPlaying():
if (not is_playing):
settings = xbmcaddon.Addon()
play_next_trigger_time = int(settings.getSetting('play_next_trigger_time'))
log.debug('New play_next_trigger_time value: {0}', play_next_trigger_time)
duration = player.getTotalTime()
position = player.getTime()
trigger_time = play_next_trigger_time
time_to_end = (duration - position)
if ((not play_next_triggered) and (trigger_time > time_to_end) and (play_next_dialog is None)):
play_next_triggered = True
log.debug('play_next_triggered hit at {0} seconds from end', time_to_end)
play_data = get_playing_data(self.monitor.played_information)
log.debug('play_next_triggered play_data : {0}', play_data)
next_episode = play_data.get('next_episode')
item_type = play_data.get('item_type')
if ((next_episode is not None) and (item_type == 'Episode')):
settings = xbmcaddon.Addon()
plugin_path = settings.getAddonInfo('path')
plugin_path_real = xbmcvfs.translatePath(os.path.join(plugin_path))
play_next_dialog = PlayNextDialog('PlayNextDialog.xml', plugin_path_real, 'default', '720p')
play_next_dialog.set_episode_info(next_episode)
if (play_next_dialog is not None):
play_next_dialog.show()
is_playing = True
else:
play_next_triggered = False
if (play_next_dialog is not None):
play_next_dialog.stop_auto_close()
play_next_dialog.close()
del play_next_dialog
play_next_dialog = None
is_playing = False
if xbmc.Monitor().waitForAbort(1):
break
def stop_servcie(self):
log.debug('PlayNextService Stop Called')
self.stop_thread = True |
class OptionPlotoptionsVariwideSonificationContexttracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def _set_localized_text_entry(app, locale, key, f, versionCode=None):
try:
limit = config['char_limits'][key]
if (not versionCode):
localized = _get_localized_dict(app, locale)
with open(f, errors='replace') as fp:
text = fp.read((limit * 2))
if versionCode:
for build in app['Builds']:
if (build['versionCode'] == versionCode):
if ('whatsNew' not in build):
build['whatsNew'] = collections.OrderedDict()
build['whatsNew'][locale] = text[:limit]
return
if (len(text) > 0):
if (key in ('name', 'summary', 'video')):
localized[key] = text.strip('\n')[:limit]
else:
localized[key] = text[:limit]
except Exception as e:
logging.error(_('{path}: {error}').format(path=f, error=str(e))) |
class Engine(HasStrictTraits):
__version__ = 0
scenes = List(Scene, record=True)
children_ui_list = Property(record=False)
name = Str('Mayavi Engine')
current_scene = Property(Instance(Scene), record=False)
current_object = Property(record=False)
current_selection = Property(record=False)
started = Event(record=False)
scene_factory = Callable(viewer_factory)
running = Bool(False, record=False)
closed = Event()
recorder = Instance(Recorder, record=False)
_current_scene = WeakRef(Scene, allow_none=True)
_current_object = WeakRef(HasTraits, allow_none=True)
_current_selection = WeakRef(HasTraits, allow_none=True)
_viewer_ref = Dict
current_selection_view = View(Item(name='_current_selection', enabled_when='_current_selection is not None', style='custom', springy=True, show_label=False), resizable=True, scrollable=True)
def __init__(self, **traits):
super(Engine, self).__init__(**traits)
preference_manager.root.on_trait_change(self._show_helper_nodes_changed, 'show_helper_nodes')
def __get_pure_state__(self):
d = self.__dict__.copy()
for x in ['_current_scene', '_current_object', '__sync_trait__', '_viewer_ref', '__traits_listener__']:
d.pop(x, None)
return d
def __set_pure_state__(self, state):
n_scene = len(self.scenes)
n_saved_scene = len(state.scenes)
for i in range((n_scene - n_saved_scene)):
self.close_scene(self.scenes[(- 1)])
for i in range((n_saved_scene - n_scene)):
self.new_scene()
state_pickler.set_state(self, state)
def __getstate__(self):
return state_pickler.dumps(self)
def __setstate__(self, str_state):
self.__init__()
state = state_pickler.loads_state(str_state)
state_pickler.update_state(state)
self.__set_pure_state__(state)
def start(self):
registry.register_engine(self)
self.started = self
self.running = True
def stop(self):
registry.unregister_engine(self)
self.running = False
self.closed = True
def add_source(self, src, scene=None):
if (scene is not None):
tvtk_scene = scene.scene
for sc in self.scenes:
if (sc.scene == tvtk_scene):
scene = sc
break
else:
error('This scene is not managed by mayavi')
return
else:
scene = self.current_scene
if (scene is None):
self.new_scene()
scene = self.current_scene
scene.add_child(src)
self.current_object = src
def add_filter(self, fil, obj=None):
if (obj is None):
obj = self.current_object
if (not isinstance(obj, Base)):
msg = 'No valid current object, please select an active object.'
error(msg)
return
if ((obj is not None) and (not isinstance(obj, Scene))):
if obj.running:
obj.add_child(fil)
self.current_object = fil
else:
msg = 'Current object is not active, please select an active object.'
error(msg)
elif (obj is None):
error('Please create a VTK scene and open some data first.')
else:
error('No data: cannot use a Filter/Module/ModuleManager.')
def add_module(self, mod, obj=None):
self.add_filter(mod, obj=obj)
def save_visualization(self, file_or_fname):
o = vtk.vtkObject
w = o.GetGlobalWarningDisplay()
o.SetGlobalWarningDisplay(0)
try:
state = state_pickler.get_state(self)
st = state.scenes[0].children[0].children[0].children[4]
l_pos = st.seed.widget.position
st.seed.widget.position = [pos.item() for pos in l_pos]
saved_state = state_pickler.dumps(state)
file_or_fname.write(saved_state)
except (IndexError, AttributeError):
state_pickler.dump(self, file_or_fname)
finally:
o.SetGlobalWarningDisplay(w)
def load_visualization(self, file_or_fname):
o = vtk.vtkObject
w = o.GetGlobalWarningDisplay()
o.SetGlobalWarningDisplay(0)
try:
state = state_pickler.load_state(file_or_fname)
state_pickler.update_state(state)
for scene_state in state.scenes:
self.new_scene()
scene = self.scenes[(- 1)]
if (scene.scene is not None):
scene.scene.disable_render = True
state_pickler.update_state(scene_state)
scene.__set_pure_state__(scene_state)
scene.render()
finally:
o.SetGlobalWarningDisplay(w)
def open(self, filename, scene=None):
passed_scene = scene
reader = registry.get_file_reader(filename)
if (reader is None):
msg = ('No suitable reader found for the file %s' % filename)
error(msg)
else:
src = None
if (scene is None):
scene = self.current_scene
if (scene is None):
scene = self.new_scene()
try:
sc = scene.scene
if (sc is not None):
sc.busy = True
callable = reader.get_callable()
if (reader.factory is None):
src = callable()
src.initialize(filename)
else:
src = callable(filename, self)
if (src is not None):
self.add_source(src, passed_scene)
finally:
if (sc is not None):
sc.busy = False
if (src is not None):
return src
def record(self, msg):
r = self.recorder
if (r is not None):
r.record(msg)
def add_scene(self, scene, name=None):
if (name is None):
if hasattr(scene, 'name'):
name = scene.name
else:
name = ('Mayavi Scene %d' % next(scene_id_generator))
s = Scene(scene=scene, name=name, parent=self)
s.start()
recorder = self.recorder
self.scenes.append(s)
self.current_scene = s
if (recorder is not None):
recorder.register(s)
def remove_scene(self, scene, **kwargs):
s = None
for (index, x) in enumerate(self.scenes):
if (x.scene is scene):
s = x
break
if (s is not None):
s.stop()
self.scenes.remove(s)
recorder = self.recorder
if (recorder is not None):
recorder.unregister(s)
if (scene in self._viewer_ref):
del self._viewer_ref[scene]
if (scene is self._current_scene):
self._current_scene = None
def new_scene(self, viewer=None, name=None, **kwargs):
if (viewer is None):
factory_kwargs = {}
factory_kwargs_names = get_args(self.scene_factory)
for (arg, value) in kwargs.items():
if (arg in factory_kwargs_names):
factory_kwargs[arg] = value
viewer = self.scene_factory(**factory_kwargs)
process_ui_events()
if (name is not None):
viewer.name = name
self._viewer_ref[viewer.scene] = viewer
self.add_scene(viewer.scene)
if hasattr(viewer, 'on_trait_change'):
viewer.on_trait_change(self._on_scene_closed, 'closing')
viewer.on_trait_change(self._on_scene_activated, 'activated')
if hasattr(viewer, 'title'):
self.current_scene.sync_trait('name', viewer, 'title')
return viewer
def close_scene(self, scene):
viewer = self.get_viewer(scene)
self.remove_scene(scene.scene)
if hasattr(scene, 'close'):
scene.close()
elif (scene.scene is not None):
scene.scene.close()
if ((viewer is not None) and hasattr(viewer, 'close')):
viewer.close()
def get_viewer(self, scene):
return self._viewer_ref.get(scene.scene)
def dialog_view(self):
return None
def _on_select(self, object):
self.current_selection = object
self._current_object = object
try:
scene = object.scene
for s in self.scenes:
if (s.scene == scene):
self._current_scene = s
break
except AttributeError:
pass
def _get_current_scene(self):
n_scene = len(self.scenes)
if (n_scene == 0):
return None
elif (n_scene == 1):
return self.scenes[0]
elif (self._current_scene is not None):
return self._current_scene
elif (n_scene > 1):
return self.scenes[(- 1)]
else:
return None
def _set_current_scene(self, scene):
old = self._current_scene
self._current_scene = scene
self.trait_property_changed('current_scene', old, scene)
def _get_current_object(self):
if (self._current_object is not None):
return self._current_object
elif (self.current_scene is not None):
return self.current_scene
else:
return None
def _set_current_object(self, object):
old = self._current_object
self._current_object = object
self.trait_property_changed('current_object', old, object)
def _get_current_selection(self):
return self._current_selection
def _set_current_selection(self, object):
old = self._current_selection
if (not isinstance(object, (Base, AdderNode))):
object = None
self._current_selection = object
self.trait_property_changed('current_selection', old, object)
def _on_scene_closed(self, obj, name, old, new):
self.remove_scene(obj.scene)
def _on_scene_activated(self, obj, name, old, new):
for scene in self.scenes:
if (scene.scene is obj.scene):
self.current_scene = scene
break
def _closed_fired(self):
self._viewer_ref.clear()
self.scenes = []
preference_manager.root.on_trait_change(self._show_helper_nodes_changed, 'show_helper_nodes', remove=True)
registry.unregister_engine(self)
def _show_helper_nodes_changed(self):
self.trait_property_changed('children_ui_list', [], self.children_ui_list)
def _get_children_ui_list(self):
if (preference_manager.root.show_helper_nodes and (len(self.scenes) == 0)):
return [SceneAdderNode(object=self)]
else:
return self.scenes
_trait_change('scenes[]')
def _trigger_children_ui_list(self, old, new):
self.trait_property_changed('children_ui_list', old, new)
def _recorder_changed(self, old, new):
if (new is not None):
new.record('# Recorded script from Mayavi2')
new.record('from numpy import array')
new.record('try:')
new.record(' engine = mayavi.engine')
new.record('except NameError:')
new.record(' from mayavi.api import Engine')
new.record(' engine = Engine()')
new.record(' engine.start()')
new.record('if len(engine.scenes) == 0:')
new.record(' engine.new_scene()')
new.record('# ')
elif (old is not None):
old.record('# ')
old.record('from mayavi.tools.show import show')
old.record('show()') |
def upgrade():
op.add_column('session_types', sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True))
op.add_column('social_links', sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True))
op.add_column('stripe_authorizations', sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True)) |
def test_get_secret_from_cache_bytes() -> None:
masking_meta_key: Dict[(SecretType, MaskingSecretMeta)] = {SecretType.key: MaskingSecretMeta[bytes](masking_strategy=AesEncryptionMaskingStrategy.name, generate_secret_func=SecretsUtil.generate_secret_bytes)}
secret_key = MaskingSecretCache[str](secret=b'\x94Y\xa8Z', masking_strategy=AesEncryptionMaskingStrategy.name, secret_type=SecretType.key)
cache_secret(secret_key, request_id)
result: str = SecretsUtil.get_or_generate_secret(request_id, SecretType.key, masking_meta_key[SecretType.key])
assert (result == b'\x94Y\xa8Z')
clear_cache_secrets(request_id) |
def st_local_audio(pathname, key):
st_player(local_audio(pathname), **{'progress_interval': 1000, 'playing': False, 'muted': False, 'light': False, 'play_inline': True, 'playback_rate': 1, 'height': 40, 'config': {'start': 0, 'forceAudio': True, 'forceHLS': True, 'forceSafariHLS': True}}, key=key) |
class OptionPlotoptionsHeatmapSonificationTracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class Archive(object):
def __init__(self, file, ext=''):
self._archive = self._archive_cls(file, ext=ext)(file)
def _archive_cls(file, ext=''):
if isinstance(file, string_types):
filename = file
else:
try:
filename = file.name
except AttributeError:
raise UnrecognizedArchiveFormat('File object not a recognized archive format.')
lookup_filename = (filename + ext)
(base, tail_ext) = os.path.splitext(lookup_filename.lower())
cls = extension_map.get(tail_ext)
if (not cls):
(base, ext) = os.path.splitext(base)
cls = extension_map.get(ext)
if (not cls):
raise UnrecognizedArchiveFormat(('Path not a recognized archive format: %s' % filename))
return cls
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def extract(self, dst=''):
self._archive.extract(dst)
def list(self):
self._archive.list()
def filenames(self):
return self._archive.filenames()
def close(self):
self._archive.close() |
def get_high_priority_urls(filter_before=None):
print('Loading high priority netlocs')
with db.session_context() as sess:
query = sess.query(db.WebPages.url).filter((db.WebPages.priority <= db.DB_HIGH_PRIORITY)).filter((db.WebPages.is_text == True)).yield_per(10000)
if filter_before:
query = query.filter((db.NuReleaseItem.release_date >= filter_before))
page_items = query.all()
mapdict = {}
for (row,) in tqdm.tqdm(page_items):
itemnl = WebMirror.OutputFilters.util.feedNameLut.patch_blogspot(urllib.parse.urlsplit(row).netloc)
mapdict.setdefault(itemnl, set())
mapdict[itemnl].add(row)
print('High Priority outbound items: ', len(mapdict))
return mapdict |
def compute_bls_to_execution_change_domain(fork_version: bytes, genesis_validators_root: bytes) -> bytes:
if (len(fork_version) != 4):
raise ValueError(f'Fork version should be in 4 bytes. Got {len(fork_version)}.')
domain_type = DOMAIN_BLS_TO_EXECUTION_CHANGE
fork_data_root = compute_fork_data_root(fork_version, genesis_validators_root)
return (domain_type + fork_data_root[:28]) |
class BadDate(NotFound):
def __init__(self, date):
try:
parsed = parse_date(date)
if (parsed.isoformat() != date):
raise ValueError()
except ValueError:
detail = 'Dates must be in YYYY-MM-DD format'
else:
detail = 'Date is outside the 5 years of data available'
super().__init__(detail) |
def try_find_text(rom_bytes, start_offset=PAYLOAD_OFFSET, valid_threshold=32) -> tuple[(int, int)]:
start = end = 0
good_count = valid_count = 0
in_text = False
last_opcode = None
words = struct.iter_unpack('<I', rom_bytes[start_offset:])
for (i, (word,)) in enumerate(words):
insn = rabbitizer.Instruction(word)
if in_text:
if (not is_valid(insn)):
end = (start_offset + (i * WORD_SIZE_BYTES))
break
else:
if is_valid(insn):
valid_count += 1
opcode = insn.getOpcodeName()
if ((last_opcode != opcode) and (opcode != 'nop')):
good_count += 1
else:
good_count = valid_count = 0
if (good_count > valid_threshold):
in_text = True
start = (start_offset + (((i + 1) - valid_count) * WORD_SIZE_BYTES))
last_opcode = insn.getOpcodeName()
return (start, end) |
class ModelPipDeleter(ErsiliaBase):
def __init__(self, config_json=None):
ErsiliaBase.__init__(self, config_json=config_json, credentials_json=None)
def pip_uninstall(self, model_id):
run_command(('echo y | pip uninstall %s' % model_id))
def delete(self, model_id):
env = Environment()
if env.has_module(model_id):
self.logger.info('Uninstalling pip package {0}'.format(model_id))
self.pip_uninstall(model_id) |
class WaitkWhisper(SpeechToTextAgent):
def __init__(self, args):
super().__init__(args)
self.waitk_lagging = args.waitk_lagging
self.source_segment_size = args.source_segment_size
self.source_language = args.source_language
self.continuous_write = args.continuous_write
self.model_size = args.model_size
self.model = whisper.load_model(self.model_size)
self.task = args.task
if (self.task == 'translate'):
assert (self.source_language != 'en'), 'source language must be different from en for translation task'
def add_args(parser):
parser.add_argument('--waitk-lagging', default=1, type=int)
parser.add_argument('--source-language', default='en', type=str)
parser.add_argument('--continuous-write', default=1, type=int, help='Max number of words to write at each step')
parser.add_argument('--model-size', default='tiny', type=str)
parser.add_argument('--task', default='transcribe', type=str, choices=['transcribe', 'translate'])
def policy(self, states: Optional[AgentStates]=None):
if (states is None):
states = self.states
if (states.source_sample_rate == 0):
length_in_seconds = 0
else:
length_in_seconds = (float(len(states.source)) / states.source_sample_rate)
if (not states.source_finished):
if (((length_in_seconds * 1000) / self.source_segment_size) < self.waitk_lagging):
return ReadAction()
previous_translation = ' '.join(states.target)
options = whisper.DecodingOptions(prefix=previous_translation, language=self.source_language, without_timestamps=True, fp16=False)
audio = whisper.pad_or_trim(numpy.array(states.source).astype('float32'))
mel = whisper.log_mel_spectrogram(audio).to(self.model.device)
output = self.model.decode(mel, options)
prediction = output.text.split()
if ((not states.source_finished) and (self.continuous_write > 0)):
prediction = prediction[:self.continuous_write]
return WriteAction(content=' '.join(prediction), finished=states.source_finished) |
def _fix_inout_args(func, argtypes, paramflags):
SIMPLETYPE = type(ctypes.c_int)
BYREFTYPE = type(ctypes.byref(ctypes.c_int()))
def call_with_inout(self, *args, **kw):
args = list(args)
outargs = {}
outnum = 0
for (i, info) in enumerate(paramflags):
direction = info[0]
if ((direction & 3) == 3):
name = info[1]
atyp = argtypes[i]._type_
try:
try:
v = args[i]
except IndexError:
v = kw[name]
except KeyError:
v = atyp()
else:
if (getattr(v, '_type_', None) is atyp):
pass
elif (type(atyp) is SIMPLETYPE):
v = atyp(v)
else:
v = atyp.from_param(v)
assert (not isinstance(v, BYREFTYPE))
outargs[outnum] = v
outnum += 1
if (len(args) > i):
args[i] = v
else:
kw[name] = v
elif ((direction & 2) == 2):
outnum += 1
rescode = func(self, *args, **kw)
if (outnum == 1):
if (len(outargs) == 1):
rescode = rescode.__ctypes_from_outparam__()
return rescode
rescode = list(rescode)
for (outnum, o) in outargs.items():
rescode[outnum] = o.__ctypes_from_outparam__()
return rescode
return call_with_inout |
def get_latest_version(package_name: str, index_url: str, default_version: str):
python_command = shutil.which('python')
if (not python_command):
python_command = shutil.which('python3')
if (not python_command):
print('Python command not found.')
return default_version
command = [python_command, '-m', 'pip', 'index', 'versions', package_name, '--index-url', index_url]
result = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if (result.returncode != 0):
print('Error executing command.')
print(result.stderr.decode())
return default_version
output = result.stdout.decode()
lines = output.split('\n')
for line in lines:
if ('Available versions:' in line):
available_versions = line.split(':')[1].strip()
latest_version = available_versions.split(',')[0].strip()
return latest_version
return default_version |
class PostgreSQL(_PoolingBroker):
_query_mac = 'SELECT\n m.ip, m.hostname,\n s.gateway, s.subnet_mask, s.broadcast_address, s.domain_name, s.domain_name_servers,\n s.ntp_servers, s.lease_time, s.subnet, s.serial{extra}\n FROM maps m, subnets s\n WHERE\n {mac} = %s AND\n m.subnet = s.subnet AND\n m.serial = s.serial\n LIMIT 1'.format(extra=((_extra and ','.join(itertools.chain(('',), ('m.{}'.format(i) for i in config.EXTRA_MAPS), ('s.{}'.format(i) for i in config.EXTRA_SUBNETS)))) or ''), mac=((config.CASE_INSENSITIVE_MACS and 'LOWER(m.mac)') or 'm.mac'))
def __init__(self):
import psycopg
self._module = psycopg
self._connection_details = {'dbname': config.POSTGRESQL_DATABASE, 'user': config.POSTGRESQL_USERNAME, 'password': config.POSTGRESQL_PASSWORD}
if (not (config.POSTGRESQL_HOST is None)):
self._connection_details['host'] = config.POSTGRESQL_HOST
self._connection_details['port'] = config.POSTGRESQL_PORT
self._connection_details['sslmode'] = config.POSTGRESQL_SSLMODE
_PoolingBroker.__init__(self, config.POSTGRESQL_MAXIMUM_CONNECTIONS)
_logger.debug('PostgreSQL configured; connection-details: {}'.format(self._connection_details)) |
.parametrize('batch_size', [1, 2])
.parametrize('src_len', [2, 8])
.parametrize('tgt_len', [2, 8])
.parametrize('num_features', [2, 8])
.parametrize('num_encoder_layers', [1, 6])
.parametrize('num_decoder_layers', [1, 6])
.parametrize('num_heads', [1, 6])
.parametrize('dim_feedforward', [2, 8])
def test_forward(batch_size: int, src_len: int, tgt_len: int, num_features: int, num_encoder_layers: int, num_decoder_layers: int, num_heads: int, dim_feedforward: int):
model = Transformer(num_encoder_layers=num_encoder_layers, num_decoder_layers=num_decoder_layers, dim_model=num_features, num_heads=num_heads, dim_feedforward=dim_feedforward)
src = torch.randn(batch_size, src_len, num_features)
tgt = torch.randn(batch_size, tgt_len, num_features)
out = model(src, tgt)
(_batch_size, seq_len, _num_features) = out.shape
assert (batch_size == _batch_size)
assert (seq_len == tgt_len)
assert (_num_features == num_features) |
def stack_torch_dict_list(dict_list: List[Dict[(str, Union[(torch.Tensor, np.ndarray)])]], dim: int=0) -> Dict[(str, torch.Tensor)]:
list_dict = defaultdict(list)
for d in dict_list:
for (k, v) in d.items():
list_dict[k].append((torch.from_numpy(v) if isinstance(v, np.ndarray) else v))
stacked_dict = dict()
for k in list_dict.keys():
stacked_dict[k] = torch.stack(list_dict[k], dim=dim)
return stacked_dict |
class TestConflictTarget(Test):
group: ClassVar = DATA_QUALITY_GROUP.id
name: ClassVar = 'Test number of conflicts in target'
_metric: ConflictTargetMetric
def __init__(self, is_critical: bool=True):
self._metric = ConflictTargetMetric()
super().__init__(is_critical=is_critical)
def metric(self):
return self._metric
def check(self):
metric_result = self.metric.get_result()
if (metric_result.number_not_stable_target is None):
test_result = TestStatus.ERROR
description = 'No target in the dataset'
elif (metric_result.number_not_stable_target > 0):
test_result = TestStatus.FAIL
description = f'Not stable target rows count is {metric_result.number_not_stable_target}'
else:
test_result = TestStatus.SUCCESS
description = 'Target is stable'
return TestResult(name=self.name, description=description, status=test_result, group=self.group)
def groups(self) -> Dict[(str, str)]:
return {} |
class LeadGenCustomDisclaimer(AbstractObject):
def __init__(self, api=None):
super(LeadGenCustomDisclaimer, self).__init__()
self._isLeadGenCustomDisclaimer = True
self._api = api
class Field(AbstractObject.Field):
body = 'body'
checkboxes = 'checkboxes'
title = 'title'
_field_types = {'body': 'LeadGenCustomDisclaimerBody', 'checkboxes': 'list<LeadGenLegalContentCheckbox>', 'title': 'string'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
(scope='function')
def erasure_policy_string_rewrite_long(db: Session, oauth_client: ClientDetail) -> Generator:
erasure_policy = Policy.create(db=db, data={'name': 'example erasure policy string rewrite', 'key': 'example_erasure_policy_string_rewrite', 'client_id': oauth_client.id})
erasure_rule = Rule.create(db=db, data={'action_type': ActionType.erasure.value, 'client_id': oauth_client.id, 'name': 'Erasure Rule', 'policy_id': erasure_policy.id, 'masking_strategy': {'strategy': StringRewriteMaskingStrategy.name, 'configuration': {'rewrite_value': 'some rewrite value that is very long and goes on and on'}}})
rule_target = RuleTarget.create(db=db, data={'client_id': oauth_client.id, 'data_category': DataCategory('user.name').value, 'rule_id': erasure_rule.id})
(yield erasure_policy)
try:
rule_target.delete(db)
except ObjectDeletedError:
pass
try:
erasure_rule.delete(db)
except ObjectDeletedError:
pass
try:
erasure_policy.delete(db)
except ObjectDeletedError:
pass |
def lazy_import():
from fastly.model.included_with_waf_rule import IncludedWithWafRule
from fastly.model.pagination import Pagination
from fastly.model.pagination_links import PaginationLinks
from fastly.model.pagination_meta import PaginationMeta
from fastly.model.waf_rule_response_data import WafRuleResponseData
from fastly.model.waf_rules_response_all_of import WafRulesResponseAllOf
globals()['IncludedWithWafRule'] = IncludedWithWafRule
globals()['Pagination'] = Pagination
globals()['PaginationLinks'] = PaginationLinks
globals()['PaginationMeta'] = PaginationMeta
globals()['WafRuleResponseData'] = WafRuleResponseData
globals()['WafRulesResponseAllOf'] = WafRulesResponseAllOf |
def filter_query(query, tbl, wkt=None, distance=None, begin=None, end=None):
if (wkt is not None):
if (distance is not None):
query = query.filter(geoalchemy2.func.ST_DWITHIN(tbl.c.geom, wkt, distance))
else:
query = query.filter(geoalchemy2.func.ST_WITHIN(tbl.c.geom, wkt))
if (begin is not None):
query = query.filter((begin <= tbl.c.timestamp))
if (end is not None):
query = query.filter((end > tbl.c.timestamp))
return query |
def upgrade():
op.execute('create extension if not exists citext')
op.alter_column('speaker', 'email', existing_type=sa.VARCHAR(), type_=citext.CIText(), existing_nullable=True)
op.alter_column('ticket_holders', 'email', existing_type=sa.VARCHAR(), type_=citext.CIText(), existing_nullable=True)
op.alter_column('video_stream_moderators', 'email', existing_type=sa.VARCHAR(), type_=citext.CIText(), existing_nullable=False) |
class GuiControlProperty(IntEnum):
BORDER_COLOR_NORMAL = 0
BASE_COLOR_NORMAL = 1
TEXT_COLOR_NORMAL = 2
BORDER_COLOR_FOCUSED = 3
BASE_COLOR_FOCUSED = 4
TEXT_COLOR_FOCUSED = 5
BORDER_COLOR_PRESSED = 6
BASE_COLOR_PRESSED = 7
TEXT_COLOR_PRESSED = 8
BORDER_COLOR_DISABLED = 9
BASE_COLOR_DISABLED = 10
TEXT_COLOR_DISABLED = 11
BORDER_WIDTH = 12
TEXT_PADDING = 13
TEXT_ALIGNMENT = 14 |
class MessageData(object):
def __init__(self, data, data_list=[]):
self.data = data
self.data_list = (data_list if data_list else [])
def __str__(self):
return utf8(self.data)
def __eq__(self, other):
if (other is not None):
return (self.data == other.data)
return False
def __ne__(self, other):
if (other is not None):
return (self.data != other.data)
return False |
class OptionSeriesPyramid3dSonificationTracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class JobQueue():
def __init__(self, removed='<removed-task>'):
self.prio_queue = []
self.entry_finder = {}
self.removed = removed
self.counter = itertools.count()
def add_task(self, task, priority=0):
if (repr(task) in self.entry_finder):
self.remove_task(task)
count = next(self.counter)
entry = [priority, count, task]
self.entry_finder[repr(task)] = entry
heappush(self.prio_queue, entry)
def remove_task(self, task):
self.remove_task_by_id(repr(task))
def remove_task_by_id(self, task_id):
entry = self.entry_finder.pop(task_id)
entry[(- 1)] = self.removed
def pop_task(self):
while self.prio_queue:
(_priority, _count, task) = heappop(self.prio_queue)
if (task is not self.removed):
del self.entry_finder[repr(task)]
return task
raise KeyError('pop from an empty priority queue') |
def test_task_set_log_configurations(task_definition):
assert (len(task_definition.containers[0]['logConfiguration']) == 0)
task_definition.set_log_configurations(((u'webserver', u'awslogs', u'awslogs-group', u'service_logs'), (u'webserver', u'awslogs', u'awslogs-region', u'eu-central-1')))
assert (len(task_definition.containers[0]['logConfiguration']) > 0)
assert ('logDriver' in task_definition.containers[0]['logConfiguration'])
assert ('awslogs' == task_definition.containers[0]['logConfiguration']['logDriver'])
assert ('options' in task_definition.containers[0]['logConfiguration'])
assert ('awslogs-group' in task_definition.containers[0]['logConfiguration']['options'])
assert ('service_logs' == task_definition.containers[0]['logConfiguration']['options']['awslogs-group'])
assert ('awslogs-region' in task_definition.containers[0]['logConfiguration']['options'])
assert ('eu-central-1' == task_definition.containers[0]['logConfiguration']['options']['awslogs-region']) |
_ns.route('/package/build', methods=POST)
_login_required
def package_build():
copr = get_copr()
data = rename_fields(get_form_compatible_data(preserve=['python_versions', 'chroots', 'exclude_chroots']))
form = forms.RebuildPackageFactory.create_form_cls(copr.active_chroots)(data, meta={'csrf': False})
try:
package = PackagesLogic.get(copr.id, form.package_name.data)[0]
except IndexError:
raise ObjectNotFound('No package with name {name} in copr {copr}'.format(name=form.package_name.data, copr=copr.name))
if form.validate_on_submit():
buildopts = {k: v for (k, v) in form.data.items() if (k in data)}
try:
build = PackagesLogic.build_package(flask.g.user, copr, package, form.selected_chroots, copr_dirname=form.project_dirname.data, **buildopts)
except NoPackageSourceException as e:
raise BadRequest(str(e))
db.session.commit()
else:
raise InvalidForm(form)
return flask.jsonify(build_to_dict(build)) |
def are_files_equal(file1, file2, delta=1, skip=0):
lines_file1_dict = {}
mismatches = 0
matches = 0
line_count_file1 = 0
with open(file1, 'r') as textfile1:
file_content = textfile1.readlines()
for (i, line) in enumerate(file_content):
if (i < skip):
continue
lines_file1_dict[line] = True
line_count_file1 += 1
with open(file2, 'r') as textfile2:
file_content = textfile2.readlines()
for (i, line) in enumerate(file_content):
if (i < skip):
continue
if (line in lines_file1_dict):
matches += 1
else:
mismatches += 1
if ((mismatches < delta) and ((line_count_file1 - delta) <= matches)):
return True
else:
return False |
class Range(AttrDict):
OPS = {'lt': operator.lt, 'lte': operator.le, 'gt': operator.gt, 'gte': operator.ge}
def __init__(self, *args, **kwargs):
if (args and ((len(args) > 1) or kwargs or (not isinstance(args[0], dict)))):
raise ValueError('Range accepts a single dictionary or a set of keyword arguments.')
data = (args[0] if args else kwargs)
for k in data:
if (k not in self.OPS):
raise ValueError(f'Range received an unknown operator {k!r}')
if (('gt' in data) and ('gte' in data)):
raise ValueError('You cannot specify both gt and gte for Range.')
if (('lt' in data) and ('lte' in data)):
raise ValueError('You cannot specify both lt and lte for Range.')
super().__init__((args[0] if args else kwargs))
def __repr__(self):
return ('Range(%s)' % ', '.join((('%s=%r' % op) for op in self._d_.items())))
def __contains__(self, item):
if isinstance(item, str):
return super().__contains__(item)
for op in self.OPS:
if ((op in self._d_) and (not self.OPS[op](item, self._d_[op]))):
return False
return True
def upper(self):
if ('lt' in self._d_):
return (self._d_['lt'], False)
if ('lte' in self._d_):
return (self._d_['lte'], True)
return (None, False)
def lower(self):
if ('gt' in self._d_):
return (self._d_['gt'], False)
if ('gte' in self._d_):
return (self._d_['gte'], True)
return (None, False) |
class Solution():
def oddEvenJumps(self, A: List[int]) -> int:
entries1 = []
entries2 = []
jumps = {}
for (i, num) in enumerate(reversed(A)):
entry1 = (num, i)
entry2 = (num, ((len(A) - i) - 1))
index1 = bisect.bisect(entries1, entry1)
index2 = bisect.bisect(entries2, entry2)
if (index2 == len(entries2)):
bigger = None
else:
bigger = entries2[index2][1]
if (index1 > 0):
smaller = ((len(A) - entries1[(index1 - 1)][1]) - 1)
else:
smaller = None
jumps[((len(A) - i) - 1)] = (smaller, bigger)
entries1.insert(index1, entry1)
entries2.insert(index2, entry2)
cnt = 1
track = {((len(A) - 1), True), ((len(A) - 1), False)}
for i in range((len(A) - 2), (- 1), (- 1)):
if ((jumps[i][1], True) in track):
track.add((i, False))
cnt += 1
if ((jumps[i][0], False) in track):
track.add((i, True))
return cnt |
class OptionSeriesColumnSonificationDefaultspeechoptionsPointgrouping(Options):
def algorithm(self):
return self._config_get('last')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
.skipif(sys.platform.startswith('darwin'), reason='No flock on MacOS')
def test_proxyfile_not_existing_but_locked(tmpdir, monkeypatch):
monkeypatch.chdir(tmpdir)
with open(PROXYFILE_FOR_TESTS, 'w', encoding='utf-8') as proxy_fd:
fcntl.flock(proxy_fd, fcntl.LOCK_EX)
assert (os.stat(PROXYFILE_FOR_TESTS).st_size == 0)
with pytest.raises(subprocess.CalledProcessError):
result = subprocess.run([PROXYSCRIPT, '15399', PROXYFILE_FOR_TESTS], check=True, capture_output=True)
print(str(result.stdout))
print(str(result.stderr))
fcntl.flock(proxy_fd, fcntl.LOCK_UN) |
def build_json_report(all_metrics, worst_offenders):
rv = {'metrics': {}, 'worst_case': {}}
def format_metrics_result(mres):
if (mres['limit'] is None):
return {'status': 'measured only', 'measure': mres['measure']}
elif (mres['measure'] <= mres['limit']):
return {'status': 'checked: ok', 'measure': mres['measure'], 'limit': mres['limit']}
elif mres['reason']:
return {'status': 'checked: justified', 'measure': mres['measure'], 'limit': mres['limit'], 'justification': mres['reason']}
else:
return {'status': 'checked: fail', 'measure': mres['measure'], 'limit': mres['limit']}
def format_ml(mlst):
return {name: format_metrics_result(mlst[name]) for name in mlst}
for filename in all_metrics:
if all_metrics[filename]['errors']:
continue
rv['metrics'][filename] = {'file_metrics': format_ml(all_metrics[filename]['metrics']), 'function_metrics': {fn: format_ml(all_metrics[filename]['functions'][fn]) for fn in all_metrics[filename]['functions']}}
if worst_offenders:
for file_metric in config.FILE_METRICS:
if (file_metric not in worst_offenders):
continue
rv['worst_case'][file_metric] = []
for file_name in worst_offenders[file_metric]:
if (not file_name):
break
mdata = all_metrics[file_name]['metrics'][file_metric]
tmp = format_metrics_result(mdata)
tmp['file'] = file_name
rv['worst_case'][file_metric].append(tmp)
for function_metric in config.FUNCTION_METRICS:
if (function_metric not in worst_offenders):
continue
rv['worst_case'][function_metric] = []
for tup in worst_offenders[function_metric]:
if (not tup):
break
(file_name, function_name) = tup
mdata = all_metrics[file_name]['functions'][function_name][function_metric]
tmp = format_metrics_result(mdata)
tmp['file'] = file_name
tmp['function'] = function_name
rv['worst_case'][function_metric].append(tmp)
return rv |
def run_server(args: Optional[argparse.Namespace]=None, debug: bool=False) -> None:
if (args is None):
args = parse_args()
if ('ERT_STORAGE_TOKEN' in os.environ):
authtoken = os.environ['ERT_STORAGE_TOKEN']
else:
authtoken = generate_authtoken()
os.environ['ERT_STORAGE_TOKEN'] = authtoken
config_args: Dict[(str, Any)] = {}
if (args.debug or debug):
config_args.update(reload=True, reload_dirs=[os.path.dirname(ert_shared_path)])
os.environ['ERT_STORAGE_DEBUG'] = '1'
(_, _, sock) = port_handler.find_available_port(custom_host=args.host)
connection_info = _create_connection_info(sock, authtoken)
os.environ['ERT_STORAGE_NO_TOKEN'] = '1'
os.environ['ERT_STORAGE_RES_CONFIG'] = (os.path.abspath(args.config) or find_ert_config())
config = uvicorn.Config('ert.dark_storage.app:app', **config_args)
server = Server(config, json.dumps(connection_info))
logger = logging.getLogger('ert.shared.storage.info')
log_level = (logging.INFO if args.verbose else logging.WARNING)
logger.setLevel(log_level)
logger.info('Storage server is ready to accept requests. Listening on:')
for url in connection_info['urls']:
logger.info(f' {url}')
logger.info(f'''
OpenAPI Docs: {url}/docs''')
if (args.debug or debug):
logger.info('\tRunning in NON-SECURE debug mode.\n')
os.environ['ERT_STORAGE_NO_TOKEN'] = '1'
if config.should_reload:
supervisor = ChangeReload(config, target=server.run, sockets=[sock])
supervisor.run()
else:
server.run(sockets=[sock]) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.