code stringlengths 281 23.7M |
|---|
def file_hash(fname, alg='sha256'):
if (alg not in ALGORITHMS_AVAILABLE):
raise ValueError(f"Algorithm '{alg}' not available to the pooch library. Only the following algorithms are available {list(ALGORITHMS_AVAILABLE.keys())}.")
chunksize = 65536
hasher = ALGORITHMS_AVAILABLE[alg]()
with open(fname, 'rb') as fin:
buff = fin.read(chunksize)
while buff:
hasher.update(buff)
buff = fin.read(chunksize)
return hasher.hexdigest() |
def main():
parser = argparse.ArgumentParser(description='LiteX SoC on Arty A7-35')
parser.add_argument('--build', action='store_true', help='Build bitstream')
parser.add_argument('--mode-single', action='store_true', help='Build bitstream')
parser.add_argument('--load', action='store_true', help='Load bitstream')
parser.add_argument('--flash', action='store_true', help='Flash Bitstream')
parser.add_argument('--sys-clk-freq', default=.0, help='System clock frequency (default: 100MHz)')
builder_args(parser)
soc_core_args(parser)
args = parser.parse_args()
m = mode.DOUBLE
if args.mode_single:
m = mode.SINGLE
soc = BaseSoC(sys_clk_freq=int(float(args.sys_clk_freq)), mode=m, **soc_core_argdict(args))
builder = Builder(soc, **builder_argdict(args))
builder.build(run=args.build)
if args.load:
prog = soc.platform.create_programmer()
prog.load_bitstream(os.path.join(builder.gateware_dir, (soc.build_name + '.bit')))
exit() |
def application_factory(output: typing.Iterable[bytes]) -> 'WSGIApplication':
def application(environ, start_response):
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
for item in output:
(yield item)
return wsgiref.validate.validator(application) |
def test_using_block_identifier(accounts, tester):
brownie.multicall.deploy({'from': accounts[0]})
addr = accounts[1]
old_value = ['blahblah', addr, ['yesyesyes', '0x1234']]
tx = tester.setTuple(old_value)
new_value = ['fooo', addr, ['nonono', '0x4321']]
tester.setTuple(new_value)
with brownie.multicall(block_identifier=tx.block_number):
assert (tester.getTuple(addr) == old_value)
assert (tester.getTuple(addr) == new_value) |
def extractYuyuchannelWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('lost temple', 'Lost Temple', 'translated'), ('fengyujiutian', 'Feng Yu Jiu Tian', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TestRTAs(unittest.TestCase):
def test_rtas_with_triggered_rules_have_uuid(self):
rule_keys = ['rule_id', 'rule_name']
for rta_test in sorted(get_available_tests().values(), key=(lambda r: r['name'])):
self.assertIsNotNone(rta_test.get('uuid'), f"RTA {rta_test.get('name')} missing uuid")
for rule_info in rta_test.get('siem'):
for rule_key in rule_keys:
self.assertIsNotNone(rule_info.get(rule_key), f"RTA {rta_test.get('name')} - {rta_test.get('uuid')} missing {rule_key}") |
class Unzip(ExtractorProcessor):
suffix = '.unzip'
def _extract_file(self, fname, extract_dir):
with ZipFile(fname, 'r') as zip_file:
if (self.members is None):
get_logger().info("Unzipping contents of '%s' to '%s'", fname, extract_dir)
zip_file.extractall(path=extract_dir)
else:
for member in self.members:
get_logger().info("Extracting '%s' from '%s' to '%s'", member, fname, extract_dir)
subdir_members = [name for name in zip_file.namelist() if os.path.normpath(name).startswith(os.path.normpath(member))]
zip_file.extractall(members=subdir_members, path=extract_dir) |
def test_top_level() -> None:
dependency = Dependency('beautifulsoup4', Path('pyproject.toml'))
dependency.top_levels = {'bs4'}
module = ModuleBuilder('bs4', {'foo', 'bar'}, frozenset(), [dependency]).build()
assert (module.package is None)
assert (module.standard_library is False)
assert (module.local_module is False) |
class OptionPlotoptionsScatter3dSonificationTracksMappingTremolo(Options):
def depth(self) -> 'OptionPlotoptionsScatter3dSonificationTracksMappingTremoloDepth':
return self._config_sub_data('depth', OptionPlotoptionsScatter3dSonificationTracksMappingTremoloDepth)
def speed(self) -> 'OptionPlotoptionsScatter3dSonificationTracksMappingTremoloSpeed':
return self._config_sub_data('speed', OptionPlotoptionsScatter3dSonificationTracksMappingTremoloSpeed) |
class OptionSeriesAreasplinerangeSonificationDefaultspeechoptionsMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Bill(models.Model):
generated_on = models.DateTimeField(auto_now=True)
comment = models.TextField(blank=True, null=True)
def __str__(self):
return ('Bill %d' % self.id)
def non_refund_payments(self):
return self.payments.filter(paid_amount__gt=0)
def total_paid(self):
payments = self.payments.all()
if (not payments):
return 0
paid = Decimal(0)
for payment in payments:
paid = (paid + payment.paid_amount)
return paid
def total_owed(self):
return (self.amount() - self.total_paid())
def amount(self):
amount = 0
for line_item in self.line_items.all():
if ((not line_item.fee) or (not line_item.paid_by_house)):
amount = (amount + line_item.amount)
return amount
def total_owed_in_cents(self):
return int((self.total_owed() * 100))
def subtotal_amount(self):
base_fees = self.subtotal_items()
return sum([item.amount for item in base_fees])
def subtotal_items(self):
base_resource_fee = self.line_items.filter(fee__isnull=True).filter(custom=False)
addl_fees = self.line_items.filter(fee__isnull=True).filter(custom=True)
return (list(base_resource_fee) + list(addl_fees))
def fees(self):
bill_fees = self.line_items.filter(fee__isnull=False)
return list(bill_fees)
def house_fees(self):
amount = 0
for line_item in self.line_items.all():
if (line_item.fee and line_item.paid_by_house):
amount = (amount + line_item.amount)
return amount
def non_house_fees(self):
amount = 0
for line_item in self.line_items.all():
if (line_item.fee and (not line_item.paid_by_house)):
amount = (amount + line_item.amount)
return amount
def to_house(self):
return ((self.amount() - self.non_house_fees()) - self.house_fees())
def is_paid(self):
return (self.total_owed() <= 0)
def time_ordered_payments(self):
return self.payments.order_by('payment_date')
def payment_date(self):
last_payment = self.payments.order_by('payment_date').reverse().first()
if last_payment:
return last_payment.payment_date
else:
return None
def ordered_line_items(self):
resource_item = self.line_items.filter(custom=False).filter(fee=None)
custom_items = self.line_items.filter(custom=True)
fees = self.line_items.filter(fee__isnull=False)
return ((list(resource_item) + list(custom_items)) + list(fees))
def is_booking_bill(self):
return hasattr(self, 'bookingbill')
def is_subscription_bill(self):
return hasattr(self, 'subscriptionbill') |
class RangeZeroOneObservationNormalizationStrategy(ObservationNormalizationStrategy):
(ObservationNormalizationStrategy)
def estimate_stats(self, observations: List[np.ndarray]) -> StatisticsType:
array = np.vstack(observations)
keepdims = (False if (self._axis is None) else True)
min_val: np.ndarray = np.min(array, axis=self._axis, keepdims=keepdims)
max_val: np.ndarray = np.max(array, axis=self._axis, keepdims=keepdims)
if keepdims:
min_val = min_val[0]
max_val = max_val[0]
if (not isinstance(min_val, np.ndarray)):
min_val = np.asarray(min_val, np.float32)
max_val = np.asarray(max_val, np.float32)
statistics = {'min': min_val, 'max': max_val}
return statistics
(ObservationNormalizationStrategy)
def normalize_value(self, value: np.ndarray) -> np.ndarray:
return ((value - self._statistics['min']) / (self._statistics['max'] - self._statistics['min'])) |
def get_cat_by_id(cat_names: list[tuple[(str, int, int)]], cat_id_to_search: int) -> list[tuple[(str, int, int)]]:
cats: list[tuple[(str, int, int)]] = []
for (cat_name, cat_id, cat_form) in cat_names:
if (cat_id == cat_id_to_search):
cats.append((cat_name, cat_id, cat_form))
return cats |
def parse_arguments():
parser = argparse.ArgumentParser(description='Review and validate incoming EDSM star data.', epilog='Confirmed systems are written to tmp/new.systems.csv.')
parser.add_argument('refSystem', help='*Exact* name of the system you are *currently* in, used as a reference system for distance validations.', type=str, metavar='"REFERENCE SYSTEM"', default=None, nargs='?')
parser.add_argument('--cmdr', required=False, help='Specify your commander name.', type=str, default=os.environ.get('CMDR', None))
grp = parser.add_mutually_exclusive_group()
if grp:
grp.add_argument('--random', action='store_true', required=False, help='Show systems in random order, maximum of --max-systems.')
grp.add_argument('--distance', action='store_true', required=False, help='Select upto 10 systems by proximity.')
parser.add_argument('--max-systems', dest='maxSystems', help='Maximum systems to query with --random/--distance.', required=False, type=int, default=10)
parser.add_argument('--max-ly', dest='maxLy', help='Maximum distance to reference systems (for --distance).', required=False, type=int, default=0)
parser.add_argument('--add-to-local-db', '-A', action='store_true', required=False, help='Add accepted systems to the local database.', dest='add')
parser.add_argument('--date', required=False, help='Use specified date (YYYY-MM-DD HH:MM:SS format) for start of update search. Default is to use the last System modified date.', type=str, default=None)
parser.add_argument('--no-splash', '-NS', required=False, action='store_false', help="Don't display the 'splash' text on startup.", dest='splash', default=True)
parser.add_argument('--summary', required=False, help='Check for and report on new systems but do no work.', action='store_true')
parser.add_argument('--detail', '-v', help='Increase level of detail in output.', default=0, required=False, action='count')
parser.add_argument('--debug', '-w', help='Enable/raise level of diagnostic output.', default=0, required=False, action='count')
parser.add_argument('--ref', help='Reference system (for --distance).', default=None, dest='refSys', type=str)
parser.add_argument('--log-edsm', required=False, help='Log the EDSM request and response in tmp/edsm.log.', default=False, dest='logEDSM', action='store_true')
parser.add_argument('--yes', required=False, help='Answer "y" to autoconfirm all EDSM systems.', default=False, dest='autoOK', action='store_true')
argv = parser.parse_args(sys.argv[1:])
if (not argv.summary):
if (not argv.refSystem):
raise UsageError('Must specify a reference system name (when not using --summary). Be sure to put the name in double quotes, e.g. "SOL" or "I BOOTIS".\n')
if ((not argv.distance) and argv.refSys):
raise UsageError('--ref requires --distance')
if ((not argv.distance) and argv.maxLy):
raise UsageError('--max-ly requires --distance')
return argv |
class Client(Entity):
__auto_name__ = False
__tablename__ = 'Clients'
__mapper_args__ = {'polymorphic_identity': 'Client'}
client_id = Column('id', Integer, ForeignKey('Entities.id'), primary_key=True)
users = association_proxy('user_role', 'user', creator=(lambda n: ClientUser(user=n)))
user_role = relationship('ClientUser', back_populates='client', cascade='all, delete-orphan', primaryjoin='Clients.c.id==Client_Users.c.cid', doc='List of users representing the members of this client.')
projects = association_proxy('project_role', 'project', creator=(lambda p: create_project_client(p)))
project_role = relationship('ProjectClient', back_populates='client', cascade='all, delete-orphan', primaryjoin='Clients.c.id==Project_Clients.c.client_id')
goods = relationship('Good', back_populates='client', cascade='all', primaryjoin='Clients.c.id==Goods.c.client_id')
def __init__(self, users=None, projects=None, **kwargs):
super(Client, self).__init__(**kwargs)
if (users is None):
users = []
if (projects is None):
projects = []
self.users = users
self.projects = projects
def __eq__(self, other):
return (super(Client, self).__eq__(other) and isinstance(other, Client))
def __hash__(self):
return super(Client, self).__hash__()
def to_tjp(self):
return ''
('goods')
def _validate_good(self, key, good):
from stalker.models.budget import Good
if (not isinstance(good, Good)):
raise TypeError(('%s.goods attribute should be all stalker.models.budget.Good instances, not %s' % (self.__class__.__name__, good.__class__.__name__)))
return good |
class AutocompleteFormats():
def __init__(self, component: primitives.HtmlModel):
self.component = component
def groups(self):
return JsUtils.jsWrap(('\n%s.autocomplete("instance")._renderItem = function( ul, item ) {\n return $( "<li>" )\n .append( "<div>" + item.label + "<br>" + item.category + "</div>" )\n .appendTo( ul );\n}' % self.component.dom.jquery.varId)) |
def filter_icap_server_group_data(json):
option_list = ['ldb_method', 'name', 'server_list']
json = remove_invalid_fields(json)
dictionary = {}
for attribute in option_list:
if ((attribute in json) and (json[attribute] is not None)):
dictionary[attribute] = json[attribute]
return dictionary |
def parse_turbo_exstates_re(text):
results = dict()
exc_ens_str = 'excitation_energies_'
model_re = re.compile((exc_ens_str + '(\\w+?)_'))
blocks = text.strip().split('$')
for block in blocks:
if (not block.startswith(exc_ens_str)):
continue
(model_line, *exc_lines) = block.strip().split('\n')
mobj = model_re.search(model_line)
model = mobj.group(1)
exc_lines = [line.strip().split() for line in exc_lines]
assert all([(len(line) == 2) for line in exc_lines])
exc_ens = np.array([exc_en for (_, exc_en) in exc_lines], dtype=float)
results[model] = exc_ens
return results |
class BlobStorage(EvidentlyBaseModel, ABC):
def open_blob(self, id: BlobID):
raise NotImplementedError
def put_blob(self, path: str, obj):
raise NotImplementedError
def get_snapshot_blob_id(self, project_id: UUID, snapshot: Snapshot) -> BlobID:
raise NotImplementedError
def put_snapshot(self, project_id: UUID, snapshot: Snapshot) -> BlobID:
id = self.get_snapshot_blob_id(project_id, snapshot)
self.put_blob(id, json.dumps(snapshot.dict(), cls=NumpyEncoder))
return id |
class OptionSeriesSolidgaugeSonificationContexttracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class ClimetlabTestingSource(FileSource):
def __init__(self, kind, request=None, force=False, extension=None, **kwargs):
super().__init__()
if (request is None):
request = {}
request.update(kwargs)
(generate, ext) = GENERATORS[kind]
if (extension is not None):
ext = extension
def _generate(target, args):
return generate(target, **args)
self.path = self.cache_file(_generate, request, hash_extra=kind, extension=ext, force=force) |
def initialize_engines():
supported_engines = []
engines = glob(path.join(path.dirname(__file__), 'engines', '*.py'))
for engine in engines:
engi = path.basename(engine).split('.')[0].strip()
if ((len(engi) == 0) or engi.startswith('_')):
continue
supported_engines.append(engi)
return supported_engines |
def bulkCmd(snmpEngine, authData, transportTarget, contextData, nonRepeaters, maxRepetitions, *varBinds, **options):
def __cbFun(snmpEngine, sendRequestHandle, errorIndication, errorStatus, errorIndex, varBindTable, cbCtx):
(lookupMib, cbFun, cbCtx) = cbCtx
varBindTable = [VB_PROCESSOR.unmakeVarBinds(snmpEngine.cache, varBindTableRow, lookupMib) for varBindTableRow in varBindTable]
return cbFun(snmpEngine.cache, sendRequestHandle, errorIndication, errorStatus, errorIndex, varBindTable, cbCtx)
(addrName, paramsName) = LCD.configure(snmpEngine, authData, transportTarget, contextData.contextName)
varBinds = VB_PROCESSOR.makeVarBinds(snmpEngine.cache, varBinds)
return cmdgen.BulkCommandGenerator().sendVarBinds(snmpEngine, addrName, contextData.contextEngineId, contextData.contextName, nonRepeaters, maxRepetitions, varBinds, __cbFun, (options.get('lookupMib', True), options.get('cbFun'), options.get('cbCtx'))) |
('cuda.reshape.gen_function')
('cuda.flatten.gen_function')
def reshape_gen_function(func_attrs, shape_eval_template):
func_name = ('ait_' + func_attrs['name'])
unknown_idx = func_attrs['unknown_idx']
input_ndim = len(func_attrs['inputs'][0]._attrs['shape'])
if _is_intvar(func_attrs):
input_ndim = (len(func_attrs['inputs']) - 1)
output_ndim = len(func_attrs['outputs'][0]._attrs['shape'])
input_args = INPUT_ARGS_TEMPLATE.render(input_ndim=input_ndim)
output_args = OUTPUT_ARGS_TEMPLATE.render(output_ndim=output_ndim)
shape_functions = shape_eval_template.render(indent=' ', dtype='int64_t ', input_ndim=input_ndim, output_ndim=output_ndim, unknown_idx=unknown_idx)
return SRC_TEMPLATE.render(function_name=func_name, shape_functions=shape_functions.strip(), input_args=input_args.strip(), output_args=output_args.strip()) |
class Pessoa(Base):
__tablename__ = 'pessoas'
id = Column(Integer, primary_key=True)
nome = Column(String)
idade = Column(Integer)
produtos = relationship(Produto, backref='pessoas')
def __repr__(self):
return f'Pessoa(id={self.id}, nome={self.nome}, idade={self.idade})' |
def test_supports_trajectory_recording_wrapper():
class TestWriter(TrajectoryWriter):
def __init__(self):
self.step_count = 0
def write(self, episode_record: StateTrajectoryRecord):
self.step_count += len(episode_record.step_records)
assert (episode_record.renderer is not None)
step_count = 10
writer = TestWriter()
TrajectoryWriterRegistry.writers = []
TrajectoryWriterRegistry.register_writer(writer)
agent_deployment = AgentDeployment(policy=DummyGreedyPolicy(), env=TrajectoryRecordingWrapper.wrap(build_dummy_maze_env()))
test_core_env = build_dummy_maze_env().core_env
maze_state = test_core_env.reset()
(reward, done, info) = (None, None, None)
for i in range(10):
maze_action = agent_deployment.act(maze_state, reward, done, info)
(maze_state, reward, done, info) = test_core_env.step(maze_action)
agent_deployment.close(maze_state, reward, done, info)
assert (writer.step_count == (step_count + 1)) |
def test_notebook_batch_task():
serialization_settings = flytekit.configuration.SerializationSettings(project='project', domain='domain', version='version', env=None, image_config=ImageConfig(Image(name='name', fqn='image', tag='name')))
assert (nb_batch.get_container(serialization_settings) is not None)
assert (nb_batch.get_container(serialization_settings).args == ['pyflyte-execute', '--inputs', '{{.input}}', '--output-prefix', '{{.outputPrefix}}/0', '--raw-output-data-prefix', '{{.rawOutputDataPrefix}}', '--resolver', 'flytekit.core.python_auto_container.default_task_resolver', '--', 'task-module', 'tests.test_task', 'task-name', 'nb_batch']) |
_event
class AdminsAdded(ThreadEvent):
added = attr.ib(type=Sequence['_threads.User'])
at = attr.ib(type=datetime.datetime)
def _parse(cls, session, data):
(author, thread, at) = cls._parse_metadata(session, data)
subject = _threads.User(session=session, id=data['untypedData']['TARGET_ID'])
return cls(author=author, thread=thread, added=[subject], at=at) |
('bodhi.server.push.initialize_db', mock.MagicMock())
def _make_task(transactional_session_maker, extra_push_args=None):
if (extra_push_args is None):
extra_push_args = []
cli = testing.CliRunner()
with mock.patch('bodhi.server.push.compose_task') as compose:
with mock.patch('bodhi.server.push.transactional_session_maker', return_value=transactional_session_maker):
cli.invoke(push.push, (['--username', 'bowlofeggs'] + extra_push_args), input='y\ny')
assert (len(compose.delay.mock_calls) == 1)
return compose.delay.call_args_list[0][1] |
def simulate(r, param):
r = _extract_log_norms(r)
d = len(r)
r1 = copy(r)
r2 = copy(r)
c = [(rk[(- i)] - (sum(rk[(- i):]) / i)) for i in range(1, 46)]
c += [(((lgamma(((beta / 2.0) + 1)) * (1.0 / beta)) - log(sqrt(pi))) / log(2.0)) for beta in range(46, (param.block_size + 1))]
if param.max_loops:
N = param.max_loops
else:
N = d
for i in range(N):
phi = True
for k in range((d - min(45, param.block_size))):
beta = min(param.block_size, (d - k))
f = (k + beta)
logV = (sum(r1[:f]) - sum(r2[:k]))
lma = ((logV / beta) + c[(beta - 1)])
if phi:
if (lma < r1[k]):
r2[k] = lma
phi = False
else:
r2[k] = lma
if (phi or (r1 == r2)):
break
else:
beta = min(45, param.block_size)
logV = (sum(r1) - sum(r2[:(- beta)]))
if (param.block_size < 45):
tmp = (sum(rk[(- param.block_size):]) / param.block_size)
rk1 = [(r_ - tmp) for r_ in rk[(- param.block_size):]]
else:
rk1 = rk
for (k, r) in zip(range((d - beta), d), rk1):
r2[k] = ((logV / beta) + r)
r1 = copy(r2)
if (param.flags & BKZ.VERBOSE):
r = OrderedDict()
r['i'] = i
for (k, v) in basis_quality(list(map((lambda x: (2.0 ** (2 * x))), r1))).items():
r[k] = v
print(pretty_dict(r))
r1 = list(map((lambda x: (2.0 ** (2 * x))), r1))
return (r1, (i + 1)) |
def test_base_repr():
assert (base_repr(6) == '110')
assert (base_repr(6, base=2, n_frac=3) == '.110')
assert (base_repr(6, n_frac=0) == '110.')
assert (base_repr(6, n_frac=(- 2)) == '110##.')
assert (base_repr(6, n_frac=8) == '.')
assert (base_repr((- 1)) == '-1')
assert (base_repr((- 6)) == '-110')
assert (base_repr((- 6), n_frac=1) == '-11.0')
assert (base_repr(30, base=16) == '1E')
assert (base_repr((- 30), base=16) == '-1E') |
def test_distinguishers_linearise_data_dimensions_internally():
traces = np.array([[0, 1, 2, 3], [1, 2, 3, 4], [4, 5, 6, 7], [2, 3, 4, 5], [3, 4, 5, 6]], dtype='uint8')
data = np.array([[[2], [0]], [[0], [1]], [[3], [0]], [[1], [0]], [[0], [4]]], dtype='uint8')
c = scared.CPADistinguisher()
c.update(traces, data)
c_r = c.compute()
assert (c_r.shape == (2, 1, 4)) |
class IServiceProvider(IUnknown):
_iid_ = GUID('{6D5140C1-7436-11CE-8034-00AA006009FA}')
if TYPE_CHECKING:
_QueryService = hints.AnnoField()
def QueryService(self, serviceIID, interface):
p = POINTER(interface)()
self._QueryService(byref(serviceIID), byref(interface._iid_), byref(p))
return p
_methods_ = [COMMETHOD([], HRESULT, 'QueryService', (['in'], POINTER(GUID), 'guidService'), (['in'], POINTER(GUID), 'riid'), (['in'], POINTER(c_void_p), 'ppvObject'))] |
def extractManeatingreincarnationBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if (item['tags'] == []):
titlemap = [("I've Been Reincarnated into a Man Eating Flower ~Revenge~ ~Till the Day I Eat Him~", "I've Been Reincarnated into a Man Eating Flower ~Revenge~ ~Till the Day I Eat Him~", 'translated'), ('Tensei Shoujo no Rirekisho', 'Tensei Shoujo no Rirekisho', 'translated'), ('Master of Dungeon', 'Master of Dungeon', 'oel')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def extractNanowaveTranslations(item):
titletmp = item['title'].replace("'High Speed! 2:", '')
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(titletmp)
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if ('high speed! 2 translation' in item['tags']):
return buildReleaseMessageWithType(item, 'High Speed!', vol, chp, frag=frag, postfix=postfix)
return False |
def render_attrs(obj, names=None, *, resolve_attr=getattr, onmissing=_FAIL, render_value=None, namewidth=None):
if (resolve_attr is None):
resolve_attr = getattr
elif (not callable(resolve_attr)):
raise TypeError(f'expected resolve_attr to be callable, got {resolve_attr}')
if (render_value and (not callable(render_value))):
raise TypeError(f'expected render_value to be callable, got {render_value}')
if (not namewidth):
namewidth = 15
if (not names):
names = (n for n in dir(obj) if (not n.startswith('_')))
elif (names is obj):
try:
names = list(vars(obj))
except TypeError:
names = dir(obj)
names = (n for n in names if (not n.startswith('_')))
inline_indent = (' ' * (namewidth + 1))
needsep = False
for name in names:
try:
value = resolve_attr(obj, name)
except AttributeError:
if (onmissing is _FAIL):
raise
elif (onmissing is _IGNORE):
continue
else:
value = onmissing
if (render_value is not None):
(value, indent) = render_value(obj, name, value)
value = normalize_rendered(value)
if (not isinstance(value, str)):
if (not indent):
(yield format_item(name, value[0], namewidth))
for line in value[1:]:
(yield (inline_indent + line))
continue
(yield '')
(yield f'{name}:')
for line in value:
(yield (indent + line))
needsep = True
continue
if needsep:
(yield '')
needsep = False
value = normalize_rendered(value)
(yield format_item(name, value, namewidth)) |
def test_aw_convergence(stress_element, mesh_hierarchy):
mesh = mesh_hierarchy[0]
V = FunctionSpace(mesh, mesh.coordinates.ufl_element())
eps = Constant((1 / 2))
(x, y) = SpatialCoordinate(mesh)
new = Function(V).interpolate(as_vector([(x + ((eps * sin(((2 * pi) * x))) * sin(((2 * pi) * y)))), (y - ((eps * sin(((2 * pi) * x))) * sin(((2 * pi) * y))))]))
coords = [new]
for mesh in mesh_hierarchy[1:]:
fine = Function(mesh.coordinates.function_space())
prolong(new, fine)
coords.append(fine)
new = fine
for (mesh, coord) in zip(mesh_hierarchy, coords):
mesh.coordinates.assign(coord)
nu = Constant(0.25)
lam = Constant(1)
mu = ((lam * (1 - (2 * nu))) / (2 * nu))
I = Identity(2)
def A(sig):
return ((1 / (2 * mu)) * (sig - ((nu * tr(sig)) * I)))
def epsilon(u):
return sym(grad(u))
l2_u = []
l2_sigma = []
l2_div_sigma = []
element = MixedElement([stress_element, VectorElement('DG', cell=mesh.ufl_cell(), degree=1)])
for msh in mesh_hierarchy[1:]:
(x, y) = SpatialCoordinate(msh)
uex = as_vector([(sin((pi * x)) * sin((pi * y))), (sin((pi * x)) * sin((pi * y)))])
sigex = as_tensor([[(cos((pi * x)) * cos(((3 * pi) * y))), (y + (2 * cos(((pi * x) / 2))))], [(y + (2 * cos(((pi * x) / 2)))), ((- sin(((3 * pi) * x))) * cos(((2 * pi) * x)))]])
comp_r = (A(sigex) - epsilon(uex))
cons_r = div(sigex)
V = FunctionSpace(msh, element)
Uh = Function(V)
(sigh, uh) = split(Uh)
(tau, v) = TestFunctions(V)
(sig, u) = TrialFunctions(V)
n = FacetNormal(msh)
F = (((((((+ inner(A(sigh), tau)) * dx) + (inner(uh, div(tau)) * dx)) + (inner(div(sigh), v) * dx)) - (inner(comp_r, tau) * dx)) - (inner(cons_r, v) * dx)) - (inner(uex, dot(tau, n)) * ds))
Jp = (((inner(A(sig), tau) + inner(div(sig), div(tau))) + inner(u, v)) * dx)
params = {'mat_type': 'matfree', 'pmat_type': 'nest', 'snes_type': 'ksponly', 'snes_monitor': None, 'ksp_monitor': None, 'ksp_type': 'minres', 'ksp_norm_type': 'preconditioned', 'pc_type': 'fieldsplit', 'pc_fieldsplit_type': 'additive', 'fieldsplit_0_pc_type': 'cholesky', 'fieldsplit_1_pc_type': 'icc', 'ksp_rtol': 1e-16, 'ksp_atol': 1e-25, 'ksp_max_it': 10}
solve((F == 0), Uh, Jp=Jp, solver_parameters=params)
error_u = sqrt(assemble((inner((uex - uh), (uex - uh)) * dx)))
error_sigma = sqrt(assemble((inner((sigh - sigex), (sigh - sigex)) * dx)))
error_div_sigma = sqrt(assemble((inner(div((sigh - sigex)), div((sigh - sigex))) * dx)))
l2_u.append(error_u)
l2_sigma.append(error_sigma)
l2_div_sigma.append(error_div_sigma)
if stress_element.family().startswith('Conforming'):
assert (min(convergence_orders(l2_u)) > 1.9)
assert (min(convergence_orders(l2_sigma)) > 2.9)
assert (min(convergence_orders(l2_div_sigma)) > 1.9)
elif stress_element.family().startswith('Nonconforming'):
assert (min(convergence_orders(l2_u)) > 1.9)
assert (min(convergence_orders(l2_sigma)) > 1)
assert (min(convergence_orders(l2_div_sigma)) > 1.9)
else:
raise ValueError("Don't know what the convergence should be") |
(scope='module')
def df_enc_category_dtypes():
df = {'var_A': (((['A'] * 6) + (['B'] * 10)) + (['C'] * 4)), 'var_B': (((['A'] * 10) + (['B'] * 6)) + (['C'] * 4)), 'target': [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0]}
df = pd.DataFrame(df)
df[['var_A', 'var_B']] = df[['var_A', 'var_B']].astype('category')
return df |
.parametrize('distance_matrix, expected_distance', [(distance_matrix1, 28), (distance_matrix2, 26), (distance_matrix3, 20)])
def test_return_correct_permutation_distance(distance_matrix, expected_distance):
permutation = [0, 2, 3, 1, 4]
distance = compute_permutation_distance(distance_matrix, permutation)
assert (distance == expected_distance) |
def x86_epilogue(blk):
for stmt in blk.bap.stmts:
if ((stmt.insn is not None) and stmt.insn.startswith('POP') and isinstance(stmt, DefStmt) and isinstance(stmt.lhs, RegVar) and isinstance(stmt.rhs, LoadExp)):
(base_pointer, offset, access) = mem_addr(stmt.rhs.addr, blk, stmt.pc)
if ((base_pointer is not None) and (base_pointer.base_register == 'ESP')):
make_temp_offset(base_pointer.base_register, offset, blk, stmt.pc)
make_giv_reg(stmt.lhs.name, stmt.lhs.index, blk, stmt.pc) |
class bsn_flow_checksum_bucket_stats_request(bsn_stats_request):
version = 6
type = 18
stats_type = 65535
experimenter = 6035143
subtype = 10
def __init__(self, xid=None, flags=None, table_id=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (table_id != None):
self.table_id = table_id
else:
self.table_id = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.subtype))
packed.append(struct.pack('!B', self.table_id))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bsn_flow_checksum_bucket_stats_request()
_version = reader.read('!B')[0]
assert (_version == 6)
_type = reader.read('!B')[0]
assert (_type == 18)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 65535)
obj.flags = reader.read('!H')[0]
reader.skip(4)
_experimenter = reader.read('!L')[0]
assert (_experimenter == 6035143)
_subtype = reader.read('!L')[0]
assert (_subtype == 10)
obj.table_id = reader.read('!B')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.table_id != other.table_id):
return False
return True
def pretty_print(self, q):
q.text('bsn_flow_checksum_bucket_stats_request {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REQ_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('table_id = ')
q.text(('%#x' % self.table_id))
q.breakable()
q.text('}') |
class TraceParseTestCase(unittest.TestCase):
vision_transformer_t: Trace
vision_transformer_raw_df: pd.DataFrame
inference_t: Trace
inference_raw_df: pd.DataFrame
def setUpClass(cls):
super(TraceParseTestCase, cls).setUpClass()
vision_transformer_trace_dir: str = 'tests/data/vision_transformer'
inference_trace_dir: str = 'tests/data/inference_single_rank'
vision_transformer_rank_0_file: str = 'rank-0.json.gz'
inference_rank_0_file: str = 'inference_rank_0.json.gz'
max_ranks = 8
cls.vision_transformer_t: Trace = Trace(trace_dir=vision_transformer_trace_dir)
cls.vision_transformer_t.parse_traces(max_ranks=max_ranks, use_multiprocessing=True)
cls.vision_transformer_raw_df = cls.prepare_ground_truth_df(vision_transformer_trace_dir, vision_transformer_rank_0_file)
cls.inference_t: Trace = Trace(trace_dir=inference_trace_dir)
cls.inference_t.parse_traces(max_ranks=max_ranks, use_multiprocessing=True)
cls.inference_raw_df = cls.prepare_ground_truth_df(inference_trace_dir, inference_rank_0_file)
def prepare_ground_truth_df(cls, trace_dir, rank_0_file) -> pd.DataFrame:
df = pd.DataFrame(parse_trace_dict(os.path.join(trace_dir, rank_0_file))['traceEvents'])
df.dropna(axis=0, subset=['dur', 'cat'], inplace=True)
df.drop(df[(df['cat'] == 'Trace')].index, inplace=True)
return df
def setUp(self) -> None:
self.traces = [self.vision_transformer_t, self.inference_t]
self.raw_dfs = [self.vision_transformer_raw_df, self.inference_raw_df]
self.total_ranks = [8, 1]
def test_trace_load(self) -> None:
for (t, raw_df, total_ranks) in zip(self.traces, self.raw_dfs, self.total_ranks):
self.assertEqual(len(t.traces), total_ranks)
sym_id_map = t.symbol_table.get_sym_id_map()
sym_table = t.symbol_table.get_sym_table()
rank_0_df_name_id = t.traces[0]['name']
rank_0_df_name = t.traces[0]['name'].apply((lambda x: sym_table[x]))
ground_truth_name = raw_df['name']
ground_truth_name_id = raw_df['name'].apply((lambda x: sym_id_map[x]))
self.assertListEqual(rank_0_df_name_id.to_list(), ground_truth_name_id.to_list())
self.assertListEqual(rank_0_df_name.to_list(), ground_truth_name.to_list())
t.align_and_filter_trace()
raw_profiler_steps = raw_df['name'].str.contains('ProfilerStep').sum()
sym_id_map = t.symbol_table.get_sym_id_map()
profiler_steps = [v for (k, v) in sym_id_map.items() if ('ProfilerStep' in k)]
filtered_profiler_steps = t.traces[0]['name'].isin(profiler_steps).sum()
self.assertEqual((filtered_profiler_steps + int((raw_profiler_steps > 1))), raw_profiler_steps)
self.assertLessEqual(len(t.traces[0]), len(raw_df))
self.assertGreaterEqual(t.traces[0]['ts'].min(), 0)
def test_trace_iteration(self) -> None:
for t in self.traces:
df = t.traces[0]
sym_id_map = t.symbol_table.get_sym_id_map()
iterations = {f'ProfilerStep#{i}' for i in set(df['iteration'].unique()) if (i != (- 1))}
valid_gpu_kernels = df.loc[(df['stream'].gt(0) & df['index_correlation'].gt(0))]
correlated_cpu_ops = df.loc[df.loc[(valid_gpu_kernels.index, 'index_correlation')]]
gpu_kernels_per_iteration = valid_gpu_kernels.groupby('iteration')['index'].agg('count').to_dict()
correlated_cpu_ops_per_iteration = correlated_cpu_ops.groupby('iteration')['index'].agg('count').to_dict()
self.assertTrue(('iteration' in df.columns))
self.assertTrue(all(((i in sym_id_map) for i in iterations)))
self.assertDictEqual(gpu_kernels_per_iteration, correlated_cpu_ops_per_iteration) |
def find_system_cameras():
device_root = '/sys/class/video4linux'
devices = {}
if os.path.exists(device_root):
for device in os.listdir(device_root):
path = os.path.join(device_root, device, 'name')
try:
with open(path, encoding='utf-8') as f:
name = f.read()
except Exception:
continue
name = name.strip('\n')
devices[name] = os.path.join('/dev', device)
return devices |
def do_link_batch_update_sess(logger, interface, link_batch):
if (not link_batch):
return
expected_keys = set(['url', 'starturl', 'netloc', 'distance', 'priority', 'state', 'addtime', 'epoch'])
for item in link_batch:
try:
assert ('url' in item)
assert ('starturl' in item)
assert ('netloc' in item)
assert ('distance' in item)
assert ('priority' in item)
assert ('state' in item)
assert ('addtime' in item)
assert ('epoch' in item)
except AssertionError:
logger.error('Missing key from raw entry: ')
item_str = pprint.pformat(item)
for line in item_str.split('\n'):
logger.error('\t%s', line.rstrip())
raise
item_keys = set(item.keys())
excess_keys = (item_keys - expected_keys)
try:
assert (not excess_keys)
except AssertionError:
logger.error("Excess key(s) in raw entry: '%s'", excess_keys)
item_str = pprint.pformat(item)
for line in item_str.split('\n'):
logger.error('\t%s', line.rstrip())
raise
logger.info('Inserting %s items into DB in batch.', len(link_batch))
raw_cur = interface.connection().connection.cursor()
per_cmd = '\n\tSELECT upsert_link_raw(\n\t\t\t%(url)s,\n\t\t\t%(starturl)s,\n\t\t\t%(netloc)s,\n\t\t\t%(distance)s,\n\t\t\t%(priority)s,\n\t\t\t%(addtime)s,\n\t\t\t%(state)s,\n\t\t\t%(epoch)s\n\t\t\t);\n\t\t\t'.replace('\t', ' ')
per_cmd = per_cmd.replace('\n', ' ')
while (' ' in per_cmd):
per_cmd = per_cmd.replace(' ', ' ')
raw_cur.execute('COMMIT;')
rowcnt = 0
try:
for subc in misc.batch(link_batch, 50):
raw_cur.execute('BEGIN TRANSACTION ISOLATION LEVEL READ COMMITTED;')
raw_cur.execute('SET statement_timeout TO 2500;')
psycopg_execute_batch.execute_batch(raw_cur, per_cmd, subc)
rowcnt += raw_cur.rowcount
raw_cur.execute('COMMIT;')
raw_cur.execute('RESET statement_timeout;')
link_batch = []
logger.info('Touched AT LEAST %s rows', rowcnt)
return rowcnt
except psycopg2.Error:
logger.error('psycopg2.Error - Failure on bulk insert.')
for line in traceback.format_exc().split('\n'):
logger.error(line)
raw_cur.execute('ROLLBACK;')
logger.error('Retrying.')
rowcnt = 0
try:
for subc in misc.batch(link_batch, 5):
raw_cur.execute('BEGIN TRANSACTION ISOLATION LEVEL READ COMMITTED;')
raw_cur.execute('SET statement_timeout TO 2500;')
psycopg_execute_batch.execute_batch(raw_cur, per_cmd, subc)
rowcnt += raw_cur.rowcount
raw_cur.execute('COMMIT;')
raw_cur.execute('RESET statement_timeout;')
link_batch = []
logger.info('Touched AT LEAST %s rows', rowcnt)
return rowcnt
except psycopg2.Error:
logger.error('psycopg2.Error - Failure on bulk insert.')
for line in traceback.format_exc().split('\n'):
logger.error(line)
raw_cur.execute('ROLLBACK;')
logger.error('Retrying with per upsert commit.')
commit_each = False
while 1:
rowcnt = 0
try:
raw_cur.execute('BEGIN TRANSACTION ISOLATION LEVEL READ COMMITTED;')
raw_cur.execute('SET statement_timeout TO 2500;')
for paramset in link_batch:
assert isinstance(paramset['starturl'], str)
if (len(paramset['url']) > 2000):
logger.error('URL Is too long to insert into the database!')
logger.error("URL: '%s'", paramset['url'])
else:
raw_cur.execute(per_cmd, paramset)
rowcnt += raw_cur.rowcount
if commit_each:
raw_cur.execute('COMMIT;')
raw_cur.execute('BEGIN TRANSACTION ISOLATION LEVEL READ COMMITTED;')
raw_cur.execute('SET statement_timeout TO 2500;')
raw_cur.execute('COMMIT;')
break
except psycopg2.Error:
if (commit_each is False):
logger.warning('psycopg2.Error - Retrying with commit each.')
else:
logger.warning('psycopg2.Error - Retrying.')
traceback.print_exc()
raw_cur.execute('ROLLBACK;')
commit_each = True
raw_cur.execute('RESET statement_timeout;')
logger.info('Changed %s rows', rowcnt)
return |
def test_downside_risk():
data1 = pd.DataFrame({'1': [1.0, 2.0, 4.0, 8.0], '2': [1.0, 2.0, 3.0, 4.0]})
weights = np.array([0.25, 0.75])
rf_rate = 0.005
dr1 = downside_risk(data1, weights, rf_rate)
assert (dr1 == 0)
data2 = pd.DataFrame({'1': [7.0, 6.0, 5.0, 4.0, 3.0]})
weights = np.array([1.0])
rf_rate = 0.0
dr2 = downside_risk(data2, weights, rf_rate)
assert (abs((dr2 - 0.)) <= 1e-15) |
def detect_task_name(calling_file: Optional[str], calling_module: Optional[str]) -> str:
if (calling_file is not None):
target_file = os.path.basename(calling_file)
task_name = get_valid_filename(os.path.splitext(target_file)[0])
elif (calling_module is not None):
last_dot = calling_module.rfind('.')
if (last_dot != (- 1)):
task_name = calling_module[(last_dot + 1):]
else:
task_name = calling_module
else:
raise ValueError()
return task_name |
class CmdEditorBase(_COMMAND_DEFAULT_CLASS):
locks = 'cmd:all()'
help_entry = 'LineEditor'
editor = None
def parse(self):
editor = self.caller.ndb._eveditor
if (not editor):
_load_editor(self.caller)
editor = self.caller.ndb._eveditor
self.editor = editor
linebuffer = self.editor.get_buffer().split('\n')
nlines = len(linebuffer)
arglist = [part for part in _RE_GROUP.findall(self.args) if part]
temp = []
for arg in arglist:
if arg.startswith('"'):
arg.strip('"')
elif arg.startswith("'"):
arg.strip("'")
temp.append(arg)
arglist = temp
words = self.args.split()
cline = (nlines - 1)
(lstart, lend) = (cline, (cline + 1))
linerange = False
if (arglist and (arglist[0].count(':') == 1)):
(part1, part2) = arglist[0].split(':')
if (part1 and part1.isdigit()):
lstart = min((max(0, int(part1)) - 1), nlines)
linerange = True
if (part2 and part2.isdigit()):
lend = (min((lstart + 1), int(part2)) + 1)
linerange = True
elif (arglist and arglist[0].isdigit()):
lstart = min(max(0, (int(arglist[0]) - 1)), nlines)
lend = (lstart + 1)
linerange = True
if linerange:
arglist = arglist[1:]
lstr = (('line %i' % (lstart + 1)) if ((not linerange) or ((lstart + 1) == lend)) else ('lines %i-%i' % ((lstart + 1), lend)))
args = ' '.join(arglist)
(arg1, arg2) = ('', '')
if (len(arglist) > 1):
(arg1, arg2) = (arglist[0], ' '.join(arglist[1:]))
else:
arg1 = ' '.join(arglist)
self.linebuffer = linebuffer
self.nlines = nlines
self.arglist = arglist
self.cline = cline
self.lstart = lstart
self.lend = lend
self.linerange = linerange
self.lstr = lstr
self.words = words
self.args = args
self.arg1 = arg1
self.arg2 = arg2 |
def test_send_no_active_handler_1():
handler = ErrorHandler()
envelope_mock = Mock()
envelope_mock.protocol_specification_id = '1'
envelope_mock.sender = '2'
envelope_mock.to = '3'
envelope_mock.skill_id = None
count = handler.no_active_handler_count
reason = 'reason'
with patch.object(_default_logger, 'warning') as mock_logger:
handler.send_no_active_handler(envelope_mock, reason, _default_logger)
mock_logger.assert_any_call(f'Cannot handle envelope: {reason}. Sender={envelope_mock.sender}, to={envelope_mock.sender}.')
assert ((count + 1) == handler.no_active_handler_count) |
class IntFlag(int, ReprEnum, Flag):
_boundary_ = KEEP
def __contains__(self, other):
if isinstance(other, int):
other = self.__class__(other)
elif (not isinstance(other, self.__class__)):
raise TypeError(("unsupported operand type(s) for 'in': '%s' and '%s'" % (type(other).__name__, self.__class__.__name__)))
if ((other._value_ == 0) or (self._value_ == 0)):
return False
return ((other._value_ & self._value_) == other._value_) |
def find_writeable_directory(base_dir):
for (root, dirs, files) in os.walk(base_dir):
for d in dirs:
subdir = (Path(base_dir) / d)
try:
test_file = (Path(subdir) / 'test_file')
f = open(test_file, 'w')
f.close()
os.remove(test_file)
return subdir
except IOError:
pass |
def validate_text(rule):
rule['min'] = (rule.get('min') or 1)
rule['max'] = (rule.get('max') or MAX_ITEMS)
if (type(rule['value']) is not str):
raise InvalidParameterException(INVALID_TYPE_MSG.format(**rule))
_check_max(rule)
_check_min(rule)
search_remap = {ord('\t'): None, ord('\x0b'): None, ord('\x08'): None, ord('\x0c'): None, ord('\r'): None, ord('\n'): None}
text_type = rule['text_type']
if (text_type not in SUPPORTED_TEXT_TYPES):
msg = "Invalid model {key}: '{text_type}' is not a valid text_type".format(**rule)
raise Exception((msg + ' Possible types: {}'.format(SUPPORTED_TEXT_TYPES)))
if (text_type in ('raw', 'sql', 'password')):
if (text_type in ('sql', 'password')):
logger.warning("Caution: text_type '{}' not yet fully implemented".format(text_type))
val = rule['value']
elif (text_type == 'url'):
val = urllib.parse.quote_plus(rule['value'])
elif (text_type == 'search'):
val = rule['value'].translate(search_remap).strip()
if (val != rule['value']):
logger.warning('Field {} value was changed from {} to {}'.format(rule['key'], repr(rule['value']), repr(val)))
return val |
class _is(Validator):
message = 'Invalid value'
rule = None
def __call__(self, value):
if ((self.rule is None) or ((self.rule is not None) and self.rule.match((to_unicode(value) or '')))):
return self.check(value)
return (value, translate(self.message))
def check(self, value):
return (value, None) |
def fortios_firewall(data, fos, check_mode):
fos.do_member_operation('firewall', 'internet-service-custom-group')
if data['firewall_internet_service_custom_group']:
resp = firewall_internet_service_custom_group(data, fos, check_mode)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'firewall_internet_service_custom_group'))
if check_mode:
return resp
return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {}) |
class BaseDataDriftMetricsTest(BaseCheckValueTest, WithDriftOptionsFields, ABC):
group: ClassVar = DATA_DRIFT_GROUP.id
_metric: DataDriftTable
columns: Optional[List[str]]
feature_importance: Optional[bool]
def __init__(self, columns: Optional[List[str]]=None, eq: Optional[Numeric]=None, gt: Optional[Numeric]=None, gte: Optional[Numeric]=None, is_in: Optional[List[Union[(Numeric, str, bool)]]]=None, lt: Optional[Numeric]=None, lte: Optional[Numeric]=None, not_eq: Optional[Numeric]=None, not_in: Optional[List[Union[(Numeric, str, bool)]]]=None, stattest: Optional[PossibleStatTestType]=None, cat_stattest: Optional[PossibleStatTestType]=None, num_stattest: Optional[PossibleStatTestType]=None, text_stattest: Optional[PossibleStatTestType]=None, per_column_stattest: Optional[Dict[(str, PossibleStatTestType)]]=None, stattest_threshold: Optional[float]=None, cat_stattest_threshold: Optional[float]=None, num_stattest_threshold: Optional[float]=None, text_stattest_threshold: Optional[float]=None, per_column_stattest_threshold: Optional[Dict[(str, float)]]=None, is_critical: bool=True, feature_importance: Optional[bool]=False):
super().__init__(eq=eq, gt=gt, gte=gte, is_in=is_in, lt=lt, lte=lte, not_eq=not_eq, not_in=not_in, is_critical=is_critical, columns=columns, stattest=stattest, cat_stattest=cat_stattest, num_stattest=num_stattest, text_stattest=text_stattest, per_column_stattest=per_column_stattest, stattest_threshold=stattest_threshold, cat_stattest_threshold=cat_stattest_threshold, num_stattest_threshold=num_stattest_threshold, text_stattest_threshold=text_stattest_threshold, per_column_stattest_threshold=per_column_stattest_threshold, feature_importance=feature_importance)
self._metric = DataDriftTable(columns=self.columns, stattest=self.stattest, cat_stattest=self.cat_stattest, num_stattest=self.num_stattest, text_stattest=self.text_stattest, per_column_stattest=self.per_column_stattest, stattest_threshold=self.stattest_threshold, cat_stattest_threshold=self.cat_stattest_threshold, num_stattest_threshold=self.num_stattest_threshold, text_stattest_threshold=self.text_stattest_threshold, per_column_stattest_threshold=self.per_column_stattest_threshold, feature_importance=self.feature_importance)
def metric(self):
return self._metric
def check(self):
result = super().check()
metrics = self.metric.get_result()
return TestResult(name=result.name, description=result.description, status=TestStatus(result.status), group=self.group, parameters=ColumnsDriftParameters.from_data_drift_table(metrics, self.get_condition())) |
class Solution():
def isStrobogrammatic(self, num: str) -> bool:
track = {0: 0, 1: 1, 6: 9, 8: 8, 9: 6}
(start, end) = (0, (len(num) - 1))
while (start <= end):
if (start == end):
if (num[start] not in '018'):
return False
else:
return True
sc = (ord(num[start]) - ord('0'))
ec = (ord(num[end]) - ord('0'))
if (track.get(sc, (- 1)) != ec):
return False
start += 1
end -= 1
return True |
def from_extension(extension):
if (not extension.startswith('.')):
raise ValueError('Extensions must begin with a period.')
try:
return EXTENSION_TO_TYPE[extension.lower()]
except KeyError:
raise UnknownExtensionError((('seqmagick does not know how to handle ' + 'files with extensions like this: ') + extension)) |
class EWM():
def __init__(self, span: int, std_regularisation: float):
self.avg = EWMA(span)
self.std = EWMA(span)
self._last_x = math.nan
self.std_regularisation = std_regularisation
def add(self, x):
self.avg.add(x)
if (self.std_regularisation != 0):
x = (((- 1) if (x < 0) else 1) * (abs(x) + self.std_regularisation))
ex = self._last_x
if math.isfinite(ex):
pct = ((x - ex) / ex)
pct = min(abs(pct), 1.4)
self.std.add((pct * pct))
self._last_x = x
def stddev(self):
return abs((self.avg.value * self.std.value))
def z_score(self, x):
return ((x - self.avg.value) / (self.stddev + self.std_regularisation)) |
class FileMessageFactory(MessageFactory):
def send_message(self, slave: MockSlaveChannel, chat: Chat, target: Optional[Message]=None) -> Message:
return slave.send_file_like_message(MsgType.File, Path('tests/mocks/document_0.txt.gz'), 'application/gzip', chat, target=target, reactions=True, commands=True, substitution=True)
def compare_message(self, tg_msg: Message, efb_msg: EFBMessage) -> None:
assert tg_msg.file
assert (tg_msg.file.name == efb_msg.filename)
size = efb_msg.path.stat().st_size
assert (tg_msg.file.size == size)
assert (efb_msg.text in tg_msg.raw_text)
for i in efb_msg.reactions:
assert (i in tg_msg.raw_text)
if efb_msg.commands:
assert (tg_msg.button_count == len(efb_msg.commands))
def edit_message(self, slave: MockSlaveChannel, message: Message) -> Optional[Message]:
return slave.edit_file_like_message_text(message, reactions=True, commands=True, substitution=True)
def edit_message_media(self, slave: MockSlaveChannel, message: Message) -> Optional[Message]:
return slave.edit_file_like_message(message, Path('tests/mocks/document_1.txt.gz'), mime='application/gzip', reactions=True, commands=True, substitution=True)
def finalize_message(self, tg_msg: Message, efb_msg: EFBMessage):
if (efb_msg.file and (not efb_msg.file.closed)):
efb_msg.file.close() |
class OptionPlotoptionsDumbbellSonificationContexttracksMappingHighpass(Options):
def frequency(self) -> 'OptionPlotoptionsDumbbellSonificationContexttracksMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsDumbbellSonificationContexttracksMappingHighpassFrequency)
def resonance(self) -> 'OptionPlotoptionsDumbbellSonificationContexttracksMappingHighpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsDumbbellSonificationContexttracksMappingHighpassResonance) |
class YOLOv5():
def __init__(self, model_path, device=None, load_on_init=True):
warnings.warn("YOLOv5 class is deprecated and will be removed in future release. Use 'model = yolov5.load()' instead.", DeprecationWarning)
self.model_path = model_path
self.device = device
if load_on_init:
Path(model_path).parents[0].mkdir(parents=True, exist_ok=True)
self.model = load_model(model_path=model_path, device=device, autoshape=True)
else:
self.model = None
def load_model(self):
Path(self.model_path).parents[0].mkdir(parents=True, exist_ok=True)
self.model = load_model(model_path=self.model_path, device=self.device, autoshape=True)
def predict(self, image_list, size=640, augment=False):
assert (self.model is not None), 'before predict, you need to call .load_model()'
results = self.model(ims=image_list, size=size, augment=augment)
return results |
class Collection(BaseModel):
name: str
skip_processing: Optional[bool] = False
fields: List[Field]
after: Set[CollectionAddress] = set()
erase_after: Set[CollectionAddress] = set()
grouped_inputs: Set[str] = set()
def field_dict(self) -> Dict[(FieldPath, Field)]:
return self.recursively_collect_matches((lambda f: True))
def top_level_field_dict(self) -> Dict[(FieldPath, Field)]:
return {FieldPath(field.name): field for field in self.fields}
def recursively_collect_matches(self, func: Callable[([Field], bool)]) -> Dict[(FieldPath, Field)]:
matches = [field.collect_matching(func) for field in self.fields]
return merge_dicts(*matches)
def references(self) -> Dict[(FieldPath, List[Tuple[(FieldAddress, Optional[EdgeDirection])]])]:
return {field_path: field.references for (field_path, field) in self.field_dict.items() if field.references}
def identities(self) -> Dict[(FieldPath, str)]:
return {field_path: field.identity for (field_path, field) in self.field_dict.items() if field.identity}
def field(self, field_path: FieldPath) -> Optional[Field]:
return (self.field_dict[field_path] if (field_path in self.field_dict) else None)
def field_paths_by_category(self) -> Dict[(FidesKey, List[FieldPath])]:
categories = defaultdict(list)
for (field_path, field) in self.field_dict.items():
for category in (field.data_categories or []):
categories[category].append(field_path)
return categories
class Config():
arbitrary_types_allowed = True |
def test_mask_datetime():
configuration = HashMaskingConfiguration()
masker = HashMaskingStrategy(configuration)
expected = ['a6597d576d8fb7ff58047db31f6c526bf984db454fa2460cdf7cf4f9d72a6d09']
secret = MaskingSecretCache[str](secret='adobo', masking_strategy=HashMaskingStrategy.name, secret_type=SecretType.salt)
cache_secret(secret, request_id)
masked = masker.mask([datetime(2000, 1, 1)], request_id)
assert (expected == masked)
clear_cache_secrets(request_id) |
class Ponderation():
enabled_modules: Dict[(str, List[str])]
generic_modules_parameters: Dict[(str, Union[(int, str, bool)])]
specific_modules_parameters: Dict[(str, Dict[(str, Union[(int, str, bool)])])]
weights: Dict[(str, float)]
lang_map: Dict[(str, list)]
new_keyword_alg: int |
def setup_virtualenv(root: Path, version: Optional[Version]=None) -> Path:
venv_path = ((root / f'venv-{version}') if version else (root / 'venv-local'))
venv.create(venv_path, clear=True, with_pip=True)
bin_dir = ((venv_path / 'Scripts') if (platform.system() == 'Windows') else (venv_path / 'bin'))
python = (bin_dir / 'python')
subprocess.run((python, '-m', 'pip', '-q', 'install', '-U', 'pip'), check=True)
if version:
subprocess.run((python, '-m', 'pip', '-q', 'install', '-U', f'usort=={version}'), check=True)
else:
subprocess.run((python, '-m', 'pip', '-q', 'install', '-U', REPO_ROOT), check=True)
return (bin_dir / 'usort') |
def factorize_naive(n):
if (n < 2):
return []
factors = []
p = 2
while True:
if (n == 1):
return factors
r = (n % p)
if (r == 0):
factors.append(p)
n = (n // p)
elif ((p * p) >= n):
factors.append(n)
return factors
elif (p > 2):
p += 2
else:
p += 1
assert False, 'unreachable' |
def test():
assert ('doc1.similarity(doc2)' or ('doc2.similarity(doc1)' in __solution__)), 'Estas comparando la similitud entre los dos docs?'
assert (0 <= float(similarity) <= 1), 'El valor de la similitud debe ser de punto flotante. Lo calculaste correctamente?'
__msg__.good('Bien hecho!') |
def discover_system_id(idl):
system_id = None
while ((system_id is None) and idl._session.is_connected()):
idl.run()
openvswitch = idl.tables['Open_vSwitch'].rows
if openvswitch:
row = openvswitch.get(list(openvswitch.keys())[0])
system_id = row.external_ids.get('system-id')
return system_id |
_metaclass(abc.ABCMeta)
class ServicePlanTestCase(PyTestServerTestCase):
fixture_path = None
fixture_regex = re.compile('^[^.].*?\\.(:?pysoa)$')
custom_fixtures = ()
model_constants = {}
_all_directives = []
def setup_class(cls):
cls._all_directives = get_all_directives()
super(ServicePlanTestCase, cls).setup_class()
if (not hasattr(cls, '_test_fixture_setup_called')):
setattr(cls, '_test_fixture_setup_called', {})
if (not hasattr(cls, '_test_fixture_setup_succeeded')):
setattr(cls, '_test_fixture_setup_succeeded', {})
def teardown_class(cls):
_test_fixture_setup_called = getattr(cls, '_test_fixture_setup_called', None)
if _test_fixture_setup_called:
last_e = None
for (fixture_name, tear_down_not_called) in six.iteritems(_test_fixture_setup_called):
if tear_down_not_called:
(self, test_fixture) = tear_down_not_called
try:
self._run_directive_hook('tear_down_test_fixture', test_fixture)
self.tear_down_test_fixture(test_fixture)
except Exception as e:
last_e = e
if last_e:
raise last_e
super(ServicePlanTestCase, cls).teardown_class()
def set_up_test_fixture(self, test_fixture, **kwargs):
def tear_down_test_fixture(self, test_fixture, **kwargs):
def setup_method(self):
case_data = getattr(self, '_pytest_first_fixture_case', None)
if case_data:
getattr(self.__class__, '_test_fixture_setup_called')[case_data.fixture_name] = (self, case_data.test_fixture)
self.set_up_test_fixture(case_data.test_fixture)
self._run_directive_hook('set_up_test_fixture', case_data.test_fixture)
getattr(self.__class__, '_test_fixture_setup_succeeded')[case_data.fixture_name] = True
super(ServicePlanTestCase, self).setup_method()
def teardown_method(self):
outer_exception = None
try:
super(ServicePlanTestCase, self).teardown_method()
except BaseException as e:
outer_exception = e
raise
finally:
case_data = getattr(self, '_pytest_last_fixture_case', None)
if case_data:
try:
getattr(self.__class__, '_test_fixture_setup_succeeded')[case_data.fixture_name] = False
getattr(self.__class__, '_test_fixture_setup_called')[case_data.fixture_name] = False
self._run_directive_hook('tear_down_test_fixture', case_data.test_fixture)
self.tear_down_test_fixture(case_data.test_fixture)
except KeyboardInterrupt:
if outer_exception:
raise outer_exception
raise
except BaseException:
if (not outer_exception):
raise
self.add_error(*sys.exc_info())
def set_up_test_case(self, test_case, test_fixture, **kwargs):
def tear_down_test_case(self, test_case, test_fixture, **kwargs):
def set_up_test_case_action(self, action_name, action_case, test_case, test_fixture, **kwargs):
def tear_down_test_case_action(self, action_name, action_case, test_case, test_fixture, **kwargs):
def get_fixture_test_information(cls):
for fixture_file_name in cls.get_fixture_file_names():
(fixture_name, _) = os.path.splitext(os.path.basename(fixture_file_name))
test_fixture_results = []
last_case_data = None
fixture_parser = ServiceTestPlanFixtureParser(fixture_file_name, fixture_name)
test_fixture = fixture_parser.parse_test_fixture()
for test_case in test_fixture:
test_function = cls._create_test_function(description=test_case['description'], fixture_name=fixture_name, test_case=test_case, test_fixture=test_fixture, test_fixture_results=test_fixture_results)
skip = test_case.get('skip', None)
case_data = FixtureTestCaseData(name=test_case['name'], description=test_case['description'], fixture_name=fixture_name, fixture_file=fixture_file_name, line_number=test_case['line_number'], skip=skip, callable=test_function, test_fixture=test_fixture, is_first_fixture_case=(last_case_data is None), is_last_fixture_case=False)
if (not skip):
last_case_data = case_data
(yield case_data)
if last_case_data:
setattr(last_case_data.callable, '_last_fixture_test', True)
last_case_data.is_last_fixture_case = True
def get_fixture_file_names(cls):
if cls.custom_fixtures:
return cls.custom_fixtures
if (not cls.fixture_path):
return []
if (not os.path.isdir(cls.fixture_path)):
raise FixtureLoadError('Tried loading fixtures from "{}", however this path does not exist. Please specify the correct path by setting `cls.fixture_path`.'.format(cls.fixture_path))
fixture_files = []
for (directory_path, _, files) in os.walk(cls.fixture_path):
for fixture_file in files:
if cls.fixture_regex.search(fixture_file):
fixture_files.append(((directory_path + '/') + fixture_file))
if (not fixture_files):
raise FixtureLoadError('Could not find any fixture files in `cls.fixture_path` "{path}" that matched `cls.fixture_regex` "{regex}". To customize this regex, please override `cls.fixture_regex`.'.format(path=cls.fixture_path, regex=cls.fixture_regex.pattern))
return sorted(fixture_files)
def _create_test_function(description, fixture_name, test_case, test_fixture, test_fixture_results):
def test_function(self, *args, **kwargs):
_test_function_frame = True
if (not hasattr(self.__class__, '_test_fixture_setup_succeeded')):
setattr(self.__class__, '_test_fixture_setup_succeeded', {})
_test_fixture_setup_succeeded = getattr(self.__class__, '_test_fixture_setup_succeeded')
if (not _test_fixture_setup_succeeded.get(fixture_name, False)):
raise StatusError('Test fixture {} not set up'.format(fixture_name))
outer_exception = None
try:
self.set_up_test_case(test_case, test_fixture)
self._run_directive_hook('set_up_test_case', test_case, test_fixture)
try:
self._run_test_case(test_case, test_fixture, test_fixture_results, *args, **kwargs)
except BaseException as e:
outer_exception = e
raise
finally:
try:
self._run_directive_hook('tear_down_test_case', test_case, test_fixture)
self.tear_down_test_case(test_case, test_fixture)
except KeyboardInterrupt:
if outer_exception:
raise outer_exception
raise
except BaseException:
if (not outer_exception):
raise
self.add_error(*sys.exc_info())
except BaseException as e:
outer_exception = e
raise
finally:
if getattr(test_function, '_last_fixture_test'):
try:
self._run_directive_hook('assert_test_fixture_results', test_fixture_results, test_fixture)
except KeyboardInterrupt:
if outer_exception:
raise outer_exception
raise
except AssertionError:
if (not outer_exception):
raise
self.add_error(*sys.exc_info())
except BaseException:
if (not outer_exception):
raise
self.add_error(*sys.exc_info())
test_function.__doc__ = description
setattr(test_function, '_last_fixture_test', False)
return test_function
def _run_directive_hook(cls, hook, *args, **kwargs):
if (not cls._all_directives):
raise DirectiveError('FATAL: No directives found!')
for directive_class in cls._all_directives:
directive = directive_class()
if (not hasattr(directive, hook)):
raise DirectiveError('Directive class {} has no method {}.'.format(directive_class.__name__, hook))
getattr(directive, hook)(*args, **kwargs)
def _run_test_case(self, test_case, test_fixture, test_fixture_results, *_, **__):
_run_test_case_frame = True
action_results = {}
action_response_bodies = {}
test_fixture_results.append(action_results)
assert (self.server_class is not None)
assert (self.server_class.action_class_map is not None)
assert (self.server_class.service_name is not None)
for action_path in test_case['actions']:
(action_name, action_index) = action_path.split('.')
action_case = test_case[action_path]
if ('inputs' in action_case):
substitute_variables(action_case['inputs'], action_response_bodies, self.model_constants)
if ('job_control_inputs' in action_case):
substitute_variables(action_case['job_control_inputs'], action_response_bodies, self.model_constants)
if ('job_context_inputs' in action_case):
substitute_variables(action_case['job_context_inputs'], action_response_bodies, self.model_constants)
self.set_up_test_case_action(action_name, action_case, test_case, test_fixture)
self._run_directive_hook('set_up_test_case_action', action_name, action_case, test_case, test_fixture)
stub_context = self._WrapperContextManager()
if ((action_name not in self.server_class.action_class_map) and (action_name not in ('status', 'introspect')) and hasattr(self, '_process_stub_action_{}'.format(action_name))):
stub_context = self._WrapperContextManager(stub_action(self.server_class.service_name, action_name), getattr(self, '_process_stub_action_{}'.format(action_name)))
with stub_context:
job_response = self.client.call_actions(service_name=self.server_class.service_name, actions=[{'action': action_name, 'body': action_case.get('inputs', {})}], raise_job_errors=False, raise_action_errors=False, context=action_case.get('job_context_inputs', {}), control_extra=action_case.get('job_control_inputs', {}))
action_results[action_path] = (job_response.actions[0] if job_response.actions else None)
action_response_bodies[action_path] = (job_response.actions[0].body if job_response.actions else None)
substitute_variables(action_response_bodies, action_response_bodies, self.model_constants)
substitute_variables(action_case, action_response_bodies, self.model_constants)
try:
self._run_directive_hook('assert_test_case_action_results', action_name, action_case, test_case, test_fixture, action_results[action_path], job_response, action_path)
finally:
try:
self._run_directive_hook('tear_down_test_case_action', action_name, action_case, test_case, test_fixture)
finally:
self.tear_down_test_case_action(action_name, action_case, test_case, test_fixture)
self._run_directive_hook('assert_test_case_results', action_results, test_case, test_fixture)
def add_error(exc_type, exc_value, traceback_value):
if (exc_type or exc_value):
sys.stderr.write('Additionally, {} error occurred: {}'.format(('a `{}`'.format(exc_type.__name__) if exc_type else 'an unknown'), (exc_value if exc_value else 'No details')))
sys.stderr.write('\n')
sys.stderr.flush()
if traceback_value:
traceback.print_tb(traceback_value, None, sys.stderr)
sys.stderr.flush()
class _WrapperContextManager(object):
def __init__(self, stub_action_context=None, mock_action_side_effect=None):
self._stub_action_context = stub_action_context
self._mock_action_side_effect = mock_action_side_effect
def __enter__(self):
if self._stub_action_context:
mock_action = self._stub_action_context.__enter__()
mock_action.side_effect = self._mock_action_side_effect
return mock_action
return None
def __exit__(self, exc_type=None, exc_value=None, traceback=None):
if self._stub_action_context:
return self._stub_action_context.__exit__(exc_type, exc_value, traceback)
return False |
class OptionSeriesVennSonificationTracksMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
_test.with_options_matrix(platform=['anythingreally', 'win32'], executable=['interpreter'], get_executable=[get_executable_verbatim])
class FixArgvTests(Fixtures):
def _test(self, argv, path, main, expect, *, platform, executable, get_executable):
module = MockModule(*main)
self.assertEqual(expect, runner._fix_argv(argv, path, module, executable=executable, platform=platform, get_executable=get_executable))
plainfile = (['afile.py', '...'], ['/path/to/cwd', '/usr/lib/pythonX.Y'], ['afile.py', '__main__', None], ['afile.py', '...'], options(platform='anythingreally'))
plainfile_win = (['afile.py', '...'], ['/path/to/cwd', '/usr/lib/pythonX.Y'], ['afile.py', '__main__', None], ['interpreter afile.py', '...'], options(platform='win32'))
asmodule = (['/path/to/cwd/afile.py', '...'], ['', '/usr/lib/pythonX.Y'], ['/path/to/cwd/afile.py', '__main__', ''], ['interpreter -m afile', '...'])
packedmodule = (['/path/to/cwd/apkg/afile.py', '...'], ['', '/usr/lib/pythonX.Y'], ['/path/to/cwd/apkg/afile.py', '__main__', 'apkg'], ['interpreter -m apkg.afile', '...'])
packedmain2 = (['/path/to/cwd/apkg/__main__.py', '...'], ['', '/usr/lib/pythonX.Y'], ['/path/to/cwd/apkg/__main__.py', 'apkg.__main__', 'apkg'], ['interpreter -m apkg', '...'])
packedmain3 = (['/path/to/cwd/apkg/__main__.py', '...'], ['', '/usr/lib/pythonX.Y'], ['/path/to/cwd/apkg/__main__.py', '__main__', 'apkg'], ['interpreter -m apkg', '...'])
def test_bad_fakemodule(self):
module = BadModule('/path/to/cwd/afile.py', '__main__')
argv = ['afile.py', '...']
path = ['', '/usr/lib/pythonX.Y']
self.assertEqual(['afile.py', '...'], runner._fix_argv(argv, path, module, get_executable=(lambda p: ''))) |
def start_game():
global board
board = Board()
getHumans()
print_board(board)
while ((board.count(EMPTY) != 0) and (board.count(BLACK) != 0) and (board.count(WHITE) != 0)):
turn(BLACK)
turn(WHITE)
print_board(board)
print(f"'X': {board.count(BLACK)}, 'O': {board.count(WHITE)}")
if (board.count(BLACK) > board.count(WHITE)):
print("'X' wins!")
elif (board.count(BLACK) < board.count(WHITE)):
print("'Y' wins!")
else:
print('Equality)') |
def _to_perf_rep(v: Any) -> Any:
if isinstance(v, dict):
p = PerformanceReport()
for (key, value) in v.items():
value = _to_perf_rep(value)
setattr(p, key, value)
return p
if isinstance(v, list):
for i in range(len(v)):
v[i] = _to_perf_rep(v[i])
return v
return v |
def get_registerable_container_image(img: Optional[Union[(str, ImageSpec)]], cfg: ImageConfig) -> str:
if isinstance(img, ImageSpec):
ImageBuildEngine.build(img)
return img.image_name()
if ((img is not None) and (img != '')):
matches = _IMAGE_REPLACE_REGEX.findall(img)
if ((matches is None) or (len(matches) == 0)):
return img
for m in matches:
if (len(m) < 3):
raise AssertionError(f'Image specification should be of the form <fqn>:<tag> OR <fqn>:{{{{.image.default.version}}}} OR {{.image.xyz.fqn}}:{{.image.xyz.version}} OR {{.image.xyz}} - Received {m}')
(replace_group, name, attr) = m
if ((name is None) or (name == '')):
raise AssertionError(f'Image format is incorrect {m}')
img_cfg = cfg.find_image(name)
if (img_cfg is None):
raise AssertionError(f'Image Config with name {name} not found in the configuration')
if (attr == 'version'):
if (img_cfg.tag is not None):
img = img.replace(replace_group, img_cfg.tag)
else:
img = img.replace(replace_group, cfg.default_image.tag)
elif (attr == 'fqn'):
img = img.replace(replace_group, img_cfg.fqn)
elif (attr == ''):
img = img.replace(replace_group, img_cfg.full)
else:
raise AssertionError(f'Only fqn and version are supported replacements, {attr} is not supported')
return img
if (cfg.default_image is None):
raise ValueError('An image is required for PythonAutoContainer tasks')
return f'{cfg.default_image.fqn}:{cfg.default_image.tag}' |
def extractFrozensLazyBlog(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Kuusen Madoushi', 'Kuusen Madoushi', 'translated'), ('Last Embryo', 'Last Embryo', 'translated'), ('Vanquish Overlord', 'Vanquish Overlord', 'translated'), ('Majo no Tabitabi', 'Majo no Tabitabi', 'translated'), ('Okaa-san wa Suki desu ka?', 'Okaa-san wa Suki desu ka?', 'translated'), ('Amagi Brilliant Park', 'Amagi Brilliant Park', 'translated'), ('mondaiji', 'Mondaiji-tachi ga Isekai kara Kuru Sou Desu yo?', 'translated'), ('Rakudai Kishi', 'Rakudai Kishi No Eiyuutan', 'translated'), ('Isekai NEET', 'Did You Think Another World Would Motivate A NEET?', 'translated'), ('Riku and Chise', 'Riku and Chise: The Paperboy and The Princess', 'translated'), ('Granblue Fantasy', 'Granblue Fantasy', 'translated'), ('Miyamoto Sakura', 'Just A Story About Miyamoto Sakura Being Cute', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class Imp(BinaryOp):
__slots__ = ()
__hash__ = Expr.__hash__
opstr = imp_sym
_coconut_tco
def __rshift__(self, other):
if isinstance(other, Imp):
return _coconut_tail_call(Imp, self, *other.elems)
else:
return _coconut_tail_call(Imp, self, other)
_coconut_tco
def __lshift__(self, other):
return _coconut_tail_call(Imp, *((other,) + self.elems))
def conds(self):
return self.elems[:(- 1)]
def concl(self):
return self.elems[(- 1)]
def __eq__(self, other):
return (isinstance(other, self.__class__) and (self.concl == other.concl) and unorderd_eq(self.conds, other.conds))
_coconut_tco
def to_or(self):
ors = (tuple(map(Not, self.conds)) + (self.concl,))
return _coconut_tail_call(Or, *ors)
_coconut_tco
def simplify(self, **kwargs):
return _coconut_tail_call(self.to_or().simplify, **kwargs)
_coconut_tco
def admits_empty_universe(self):
return _coconut_tail_call(self.to_or().admits_empty_universe) |
def validate_chroot_denylist(_form, field):
if field.data:
string = field.data
items = [x.lstrip().rstrip() for x in string.split(',')]
for item in items:
if (not re.match(REGEX_CHROOT_DENYLIST, item)):
raise wtforms.ValidationError('Pattern "{0}" does not match "{1}"'.format(item, REGEX_CHROOT_DENYLIST))
matched = set()
all_chroots = MockChrootsLogic.active_names()
for chroot in all_chroots:
if fnmatch(chroot, item):
matched.add(chroot)
if (not matched):
raise wtforms.ValidationError('no chroot matched by pattern "{0}"'.format(item))
if (matched == all_chroots):
raise wtforms.ValidationError('patterns are deny-listing all chroots') |
class TlsBulkCertificateData(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'type': (TypeTlsBulkCertificate,), 'attributes': (TlsBulkCertificateDataAttributes,), 'relationships': (RelationshipsForTlsBulkCertificate,)}
_property
def discriminator():
return None
attribute_map = {'type': 'type', 'attributes': 'attributes', 'relationships': 'relationships'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class OptionPlotoptionsVectorStatesInactive(Options):
def animation(self) -> 'OptionPlotoptionsVectorStatesInactiveAnimation':
return self._config_sub_data('animation', OptionPlotoptionsVectorStatesInactiveAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def opacity(self):
return self._config_get(0.2)
def opacity(self, num: float):
self._config(num, js_type=False) |
class OptionPlotoptionsColumnrangeSonificationDefaultinstrumentoptionsActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class CupertinoSwitch(ConstrainedControl):
def __init__(self, ref: Optional[Ref]=None, key: Optional[str]=None, width: OptionalNumber=None, height: OptionalNumber=None, left: OptionalNumber=None, top: OptionalNumber=None, right: OptionalNumber=None, bottom: OptionalNumber=None, expand: Union[(None, bool, int)]=None, col: Optional[ResponsiveNumber]=None, opacity: OptionalNumber=None, rotate: RotateValue=None, scale: ScaleValue=None, offset: OffsetValue=None, aspect_ratio: OptionalNumber=None, animate_opacity: AnimationValue=None, animate_size: AnimationValue=None, animate_position: AnimationValue=None, animate_rotation: AnimationValue=None, animate_scale: AnimationValue=None, animate_offset: AnimationValue=None, on_animation_end=None, tooltip: Optional[str]=None, visible: Optional[bool]=None, disabled: Optional[bool]=None, data: Any=None, label: Optional[str]=None, label_position: LabelPosition=LabelPosition.NONE, value: Optional[bool]=None, autofocus: Optional[bool]=None, active_color: Optional[str]=None, focus_color: Optional[str]=None, thumb_color: Optional[str]=None, track_color: Optional[str]=None, on_change=None, on_focus=None, on_blur=None):
ConstrainedControl.__init__(self, ref=ref, key=key, width=width, height=height, left=left, top=top, right=right, bottom=bottom, expand=expand, col=col, opacity=opacity, rotate=rotate, scale=scale, offset=offset, aspect_ratio=aspect_ratio, animate_opacity=animate_opacity, animate_size=animate_size, animate_position=animate_position, animate_rotation=animate_rotation, animate_scale=animate_scale, animate_offset=animate_offset, on_animation_end=on_animation_end, tooltip=tooltip, visible=visible, disabled=disabled, data=data)
self.value = value
self.label = label
self.label_position = label_position
self.autofocus = autofocus
self.active_color = active_color
self.focus_color = focus_color
self.thumb_color = thumb_color
self.track_color = track_color
self.on_change = on_change
self.on_focus = on_focus
self.on_blur = on_blur
def _get_control_name(self):
return 'cupertinoswitch'
def _before_build_command(self):
super()._before_build_command()
self._set_attr_json('thumbColor', self.__thumb_color)
self._set_attr_json('trackColor', self.__track_color)
def value(self) -> Optional[bool]:
return self._get_attr('value', data_type='bool', def_value=False)
def value(self, value: Optional[bool]):
self._set_attr('value', value)
def label(self):
return self._get_attr('label')
def label(self, value):
self._set_attr('label', value)
def label_position(self) -> LabelPosition:
return self.__label_position
_position.setter
def label_position(self, value: LabelPosition):
self.__label_position = value
if isinstance(value, LabelPosition):
self._set_attr('labelPosition', value.value)
else:
self.__set_label_position(value)
def __set_label_position(self, value: LabelPositionString):
self._set_attr('labelPosition', value)
def autofocus(self) -> Optional[bool]:
return self._get_attr('autofocus', data_type='bool', def_value=False)
def autofocus(self, value: Optional[bool]):
self._set_attr('autofocus', value)
def active_color(self):
return self._get_attr('activeColor')
_color.setter
def active_color(self, value):
self._set_attr('activeColor', value)
def focus_color(self):
return self._get_attr('focusColor')
_color.setter
def focus_color(self, value):
self._set_attr('focusColor', value)
def thumb_color(self) -> Optional[str]:
return self.__thumb_color
_color.setter
def thumb_color(self, value: Optional[str]):
self.__thumb_color = value
def track_color(self) -> Optional[str]:
return self.__track_color
_color.setter
def track_color(self, value: Optional[str]):
self.__track_color = value
def on_change(self):
return self._get_event_handler('change')
_change.setter
def on_change(self, handler):
self._add_event_handler('change', handler)
def on_focus(self):
return self._get_event_handler('focus')
_focus.setter
def on_focus(self, handler):
self._add_event_handler('focus', handler)
def on_blur(self):
return self._get_event_handler('blur')
_blur.setter
def on_blur(self, handler):
self._add_event_handler('blur', handler) |
class bad_action_error_msg(error_msg):
version = 2
type = 1
err_type = 2
def __init__(self, xid=None, code=None, data=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (code != None):
self.code = code
else:
self.code = 0
if (data != None):
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.err_type))
packed.append(struct.pack('!H', self.code))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bad_action_error_msg()
_version = reader.read('!B')[0]
assert (_version == 2)
_type = reader.read('!B')[0]
assert (_type == 1)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_err_type = reader.read('!H')[0]
assert (_err_type == 2)
obj.code = reader.read('!H')[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.code != other.code):
return False
if (self.data != other.data):
return False
return True
def pretty_print(self, q):
q.text('bad_action_error_msg {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('code = ')
value_name_map = {0: 'OFPBAC_BAD_TYPE', 1: 'OFPBAC_BAD_LEN', 2: 'OFPBAC_BAD_EXPERIMENTER', 3: 'OFPBAC_BAD_EXPERIMENTER_TYPE', 4: 'OFPBAC_BAD_OUT_PORT', 5: 'OFPBAC_BAD_ARGUMENT', 6: 'OFPBAC_EPERM', 7: 'OFPBAC_TOO_MANY', 8: 'OFPBAC_BAD_QUEUE', 9: 'OFPBAC_BAD_OUT_GROUP', 10: 'OFPBAC_MATCH_INCONSISTENT', 11: 'OFPBAC_UNSUPPORTED_ORDER', 12: 'OFPBAC_BAD_TAG'}
if (self.code in value_name_map):
q.text(('%s(%d)' % (value_name_map[self.code], self.code)))
else:
q.text(('%#x' % self.code))
q.text(',')
q.breakable()
q.text('data = ')
q.pp(self.data)
q.breakable()
q.text('}') |
class EosUserStatistic(BaseDocType):
id = Keyword()
userId = Keyword()
timestamp = Date(format='yyyyMMdd HHmmss.SSS||yyyy-MM-dd||epoch_millis')
updateTimestamp = Date(format='yyyyMMdd HHmmss.SSS||yyyy-MM-dd||epoch_millis')
securityId = Keyword()
code = Keyword()
name = Keyword()
cash = Float()
volume = Float()
volumeIn = Float()
volumeOut = Float()
turnover = Float()
turnoverIn = Float()
turnoverOut = Float()
averagePrice = Float()
class Meta():
doc_type = 'doc'
all = MetaField(enabled=False)
dynamic = MetaField('strict') |
class Grid(_Grid3D):
_allow_deprecated_init
def __init__(self, coordsv: np.ndarray, zcornsv: np.ndarray, actnumsv: np.ndarray, dualporo: bool=False, dualperm: bool=False, subgrids: (dict | None)=None, units: (Units | None)=None, filesrc: ((pathlib.Path | str) | None)=None, props: (GridProperties | None)=None, name: (str | None)=None, roxgrid: (Any | None)=None, roxindexer: (Any | None)=None):
coordsv = np.asarray(coordsv)
zcornsv = np.asarray(zcornsv)
actnumsv = np.asarray(actnumsv)
if (coordsv.dtype != np.float64):
raise TypeError(f'The dtype of the coordsv array must be float64, got {coordsv.dtype}')
if (zcornsv.dtype != np.float32):
raise TypeError(f'The dtype of the zcornsv array must be float32, got {zcornsv.dtype}')
if (actnumsv.dtype != np.int32):
raise TypeError(f'The dtype of the actnumsv array must be int32, got {actnumsv.dtype}')
if ((len(coordsv.shape) != 3) or (coordsv.shape[2] != 6)):
raise ValueError(f'shape of coordsv should be (nx+1,ny+1,6), got {coordsv.shape}')
if ((len(zcornsv.shape) != 4) or (zcornsv.shape[3] != 4)):
raise ValueError(f'shape of zcornsv should be (nx+1,ny+1,nz+1, 4), got {zcornsv.shape}')
if (zcornsv.shape[0:2] != coordsv.shape[0:2]):
raise ValueError(f'Mismatch between zcornsv and coordsv shape: {zcornsv.shape} vs {coordsv.shape}')
if np.any((np.asarray(zcornsv.shape[0:3]) != (np.asarray(actnumsv.shape) + 1))):
raise ValueError(f'Mismatch between zcornsv and actnumsv shape: {zcornsv.shape} vs {actnumsv.shape}')
super().__init__(*actnumsv.shape)
self._reset(coordsv=coordsv, zcornsv=zcornsv, actnumsv=actnumsv, dualporo=dualporo, dualperm=dualperm, subgrids=subgrids, units=units, filesrc=filesrc, props=props, name=name, roxgrid=roxgrid, roxindexer=roxindexer)
def _reset(self, coordsv: np.ndarray, zcornsv: np.ndarray, actnumsv: np.ndarray, dualporo: bool=False, dualperm: bool=False, subgrids: (dict[(str, (range | list[int]))] | None)=None, units: (Units | None)=None, filesrc: ((pathlib.Path | str) | None)=None, props: (GridProperties | None)=None, name: (str | None)=None, roxgrid: (Any | None)=None, roxindexer: (Any | None)=None) -> None:
self._xtgformat = 2
self._ncol = actnumsv.shape[0]
self._nrow = actnumsv.shape[1]
self._nlay = actnumsv.shape[2]
self._coordsv = coordsv
self._zcornsv = zcornsv
self._actnumsv = actnumsv
self._dualporo = dualporo
self._dualperm = dualperm
self._filesrc = filesrc
self._props: (GridProperties | None) = (GridProperties(props=[]) if (props is None) else props)
self._name = name
self._subgrids = subgrids
self._ijk_handedness: (Literal[('left', 'right')] | None) = None
self._dualactnum = None
if dualporo:
self._dualactnum = self.get_actnum(name='DUALACTNUM')
acttmp = self._dualactnum.copy()
acttmp.values[(acttmp.values >= 1)] = 1
self.set_actnum(acttmp)
self._metadata = xtgeo.MetaDataCPGeometry()
self._metadata.required = self
self._roxgrid = roxgrid
self._roxindexer = roxindexer
self.units = units
self._tmp: dict = {}
def __repr__(self) -> str:
logger.info('Invoke __repr__ for grid')
return f'{self.__class__.__name__} (id={id(self)}) ncol={self._ncol!r}, nrow={self._nrow!r}, nlay={self._nlay!r}, filesrc={self._filesrc!r}'
def __str__(self) -> str:
logger.debug('Invoke __str__ for grid', stack_info=True)
return (self.describe(flush=False) or '')
def metadata(self) -> xtgeo.MetaDataCPGeometry:
return self._metadata
def metadata(self, obj: xtgeo.MetaDataCPGeometry) -> None:
if (not isinstance(obj, xtgeo.MetaDataCPGeometry)):
raise ValueError('Input obj not an instance of MetaDataCPGeometry')
self._metadata = obj
def filesrc(self) -> ((str | pathlib.Path) | None):
return self._filesrc
def name(self) -> (str | None):
return self._name
def name(self, name: str) -> None:
if (not isinstance(name, str)):
raise ValueError('Input name is not a text string')
self._name = name
def dimensions(self) -> Dimensions:
return Dimensions(self.ncol, self.nrow, self.nlay)
def vectordimensions(self) -> tuple[(int, int, int)]:
ncoord = ((((self.ncol + 1) * (self.nrow + 1)) * 2) * 3)
nzcorn = (((self.ncol * self.nrow) * (self.nlay + 1)) * 4)
ntot = ((self.ncol * self.nrow) * self.nlay)
return (ncoord, nzcorn, ntot)
def ijk_handedness(self) -> (Literal[('left', 'right')] | None):
nflip = _grid_etc1.estimate_flip(self)
if (nflip == 1):
self._ijk_handedness = 'left'
elif (nflip == (- 1)):
self._ijk_handedness = 'right'
else:
self._ijk_handedness = None
return self._ijk_handedness
_handedness.setter
def ijk_handedness(self, value: Literal[('left', 'right')]) -> None:
if (value not in ('right', 'left')):
raise ValueError("The value must be 'right' or 'left'")
self.reverse_row_axis(ijk_handedness=value)
self._ijk_handedness = value
def subgrids(self) -> (dict[(str, (range | list[int]))] | None):
return (None if (self._subgrids is None) else self._subgrids)
def subgrids(self, sgrids: (dict[(str, (range | list[int]))] | None)) -> None:
if (sgrids is None):
self._subgrids = None
return
if (not isinstance(sgrids, dict)):
raise ValueError('Input to subgrids must be an ordered dictionary')
lengths = 0
zarr: list[Hashable] = []
keys: list[Hashable] = []
for (key, val) in sgrids.items():
lengths += len(val)
keys.append(key)
zarr.extend(val)
if (lengths != self._nlay):
raise ValueError(f'Subgrids lengths <{lengths}> not equal NLAY <{self.nlay}>')
if (set(zarr) != set(range(1, (self._nlay + 1)))):
raise ValueError(f'Arrays are not valid as the do not sum to vertical range, {zarr}')
if (len(keys) != len(set(keys))):
raise ValueError(f'Subgrid keys are not unique: {keys}')
self._subgrids = sgrids
def nactive(self) -> int:
return len(self.actnum_indices)
def actnum_array(self) -> np.ndarray:
actnumv = self.get_actnum().values
return ma.filled(actnumv, fill_value=0)
def actnum_indices(self) -> np.ndarray:
actnumv = self.get_actnum()
actnumv = np.ravel(actnumv.values)
return np.flatnonzero(actnumv)
def ntotal(self) -> int:
return ((self.ncol * self.nrow) * self.nlay)
def dualporo(self) -> bool:
return self._dualporo
def dualperm(self) -> bool:
return self._dualperm
def gridprops(self) -> GridProperties:
return self._props
def gridprops(self, gprops: GridProperties) -> None:
if (not isinstance(gprops, GridProperties)):
raise ValueError('Input must be a GridProperties instance')
self._props = gprops
def props(self) -> (list[GridProperty] | None):
if isinstance(self._props, GridProperties):
return self._props.props
if isinstance(self._props, list):
raise RuntimeError('self._props is a list, not a GridProperties instance')
return None
def props(self, plist: list[GridProperty]) -> None:
if (not isinstance(plist, list)):
raise ValueError('Input to props must be a list')
for gridprop in plist:
if (gridprop.dimensions != self.dimensions):
raise IndexError(f'Property NX NY NZ <{gridprop.name}> does not match grid!')
self._props.props = plist
def propnames(self) -> (list[str] | None):
return (None if (self._props is None) else self._props.names)
def roxgrid(self) -> (Any | None):
return self._roxgrid
def roxindexer(self) -> (Any | None):
return self._roxindexer
def generate_hash(self, hashmethod: Literal[('md5', 'sha256', 'blake2b')]='md5') -> str:
required = ('_ncol', '_nrow', '_nlay', '_coordsv', '_zcornsv', '_actnumsv')
gid = ''.join((str(getattr(self, att)) for att in required))
return generic_hash(gid, hashmethod=hashmethod)
(deprecated_in='2.16', removed_in='4.0', current_version=__version__, details='Use xtgeo.create_box_grid() instead')
def create_box(self, dimension: tuple[(int, int, int)]=(10, 12, 6), origin: tuple[(float, float, float)]=(10.0, 20.0, 1000.0), oricenter: bool=False, increment: tuple[(int, int, int)]=(100, 150, 5), rotation: float=30.0, flip: Literal[(1, (- 1))]=1) -> None:
kwargs = _grid_etc1.create_box(dimension=dimension, origin=origin, oricenter=oricenter, increment=increment, rotation=rotation, flip=flip)
self._reset(**kwargs)
def to_file(self, gfile: FileLike, fformat: str='roff') -> None:
_gfile = xtgeo._XTGeoFile(gfile, mode='wb')
if (not _gfile.memstream):
_gfile.check_folder(raiseerror=OSError)
valid_formats = {'roff': ['roff', 'roff_binary', 'roff_bin', 'roffbin'], 'roff_ascii': ['roff_ascii', 'roff_asc', 'roffasc'], 'grdecl': ['grdecl'], 'bgrdecl': ['bgrdecl'], 'egrid': ['egrid'], 'fegrid': ['fegrid']}
if (fformat in valid_formats['roff']):
_grid_export.export_roff(self, _gfile.name, 'binary')
elif (fformat in valid_formats['roff_ascii']):
_grid_export.export_roff(self, _gfile.name, 'ascii')
elif (fformat in valid_formats['grdecl']):
_grid_export.export_grdecl(self, _gfile.name, 1)
elif (fformat in valid_formats['bgrdecl']):
_grid_export.export_grdecl(self, _gfile.name, 0)
elif (fformat in valid_formats['egrid']):
_grid_export.export_egrid(self, _gfile.name)
elif (fformat in valid_formats['fegrid']):
_grid_export.export_fegrid(self, _gfile.name)
else:
raise ValueError(f"Invalid file format: {fformat}, valid options are: {', '.join((v for vv in valid_formats.values() for v in vv))}")
def to_hdf(self, gfile: (str | pathlib.Path), compression: (str | None)=None, chunks: (bool | None)=False, subformat: (int | None)=844) -> FileLike:
_gfile = xtgeo._XTGeoFile(gfile, mode='wb', obj=self)
_gfile.check_folder(raiseerror=OSError)
_grid_export.export_hdf5_cpgeom(self, _gfile, compression=compression, chunks=chunks, subformat=subformat)
return _gfile.file
def to_xtgf(self, gfile: (str | pathlib.Path), subformat: (int | None)=844) -> pathlib.Path:
_gfile = xtgeo._XTGeoFile(gfile, mode='wb', obj=self)
_gfile.check_folder(raiseerror=OSError)
_grid_export.export_xtgcpgeom(self, _gfile, subformat=subformat)
return _gfile.file
def to_roxar(self, project: str, gname: str, realisation: int=0, info: bool=False, method: str='cpg') -> None:
_grid_roxapi.export_grid_roxapi(self, project, gname, realisation, info=info, method=method)
(deprecated_in='2.16', removed_in='4.0', current_version=__version__, details='Use xtgeo.grid_from_file() instead')
def from_file(self, gfile: FileLike, fformat: (str | None)=None, **kwargs: Any) -> Grid:
def constructor(*args, **kwargs):
self._reset(*args, **kwargs)
return self
_handle_import(constructor, gfile, fformat, **kwargs)
return self
(deprecated_in='2.16', removed_in='4.0', current_version=__version__, details='Use xtgeo.grid_from_file() instead')
def from_hdf(self, gfile: FileLike, ijkrange: (Sequence[int] | None)=None, zerobased: bool=False) -> None:
gfile = xtgeo._XTGeoFile(gfile, mode='wb', obj=self)
kwargs = _grid_import_xtgcpgeom.import_hdf5_cpgeom(gfile, ijkrange=ijkrange, zerobased=zerobased)
self._reset(**kwargs)
(deprecated_in='2.16', removed_in='4.0', current_version=__version__, details='Use xtgeo.grid_from_file() instead')
def from_xtgf(self, gfile: FileLike, mmap: bool=False) -> None:
gfile = xtgeo._XTGeoFile(gfile, mode='wb', obj=self)
kwargs = _grid_import_xtgcpgeom.import_xtgcpgeom(gfile, mmap)
self._reset(**kwargs)
(deprecated_in='2.16', removed_in='4.0', current_version=__version__, details='Use xtgeo.grid_from_roxar() instead')
def from_roxar(self, projectname: str, gname: str, realisation: int=0, dimensions_only: (bool | None)=None, info: bool=False) -> None:
if (dimensions_only is not None):
warnings.warn("Argument 'dimension_only' is redundant and has no effect It will no longer be supported in xtgeo version 4.0 and can safely be removed.", DeprecationWarning)
kwargs = _grid_roxapi.import_grid_roxapi(projectname, gname, realisation, info)
self._reset(**kwargs)
def convert_units(self, units: Units) -> None:
if (self.units is None):
raise ValueError('convert_units called on unitless grid.')
if (self.units == units):
return
factor = self.units.conversion_factor(units)
self._coordsv *= factor
self._zcornsv *= factor
self.units = units
def copy(self) -> Grid:
logger.info('Copy a Grid instance')
return _grid_etc1.copy(self)
def describe(self, details: bool=False, flush: bool=True) -> (str | None):
logger.info('Print a description...')
dsc = XTGDescription()
dsc.title('Description of Grid instance')
dsc.txt('Object ID', id(self))
dsc.txt('File source', self._filesrc)
dsc.txt('Shape: NCOL, NROW, NLAY', self.ncol, self.nrow, self.nlay)
dsc.txt('Number of active cells', self.nactive)
if details:
geom = self.get_geometrics(cellcenter=True, return_dict=True)
assert isinstance(geom, dict)
prp1: list[str] = []
for prp in ('xmin', 'xmax', 'ymin', 'ymax', 'zmin', 'zmax'):
prp1.append(f'{geom[prp]:10.3f}')
prp2: list[str] = []
for prp in ('avg_dx', 'avg_dy', 'avg_dz', 'avg_rotation'):
prp2.append(f'{geom[prp]:7.4f}')
geox = self.get_geometrics(cellcenter=False, allcells=True, return_dict=True)
assert isinstance(geox, dict)
prp3: list[str] = []
for prp in ('xmin', 'xmax', 'ymin', 'ymax', 'zmin', 'zmax'):
prp3.append(f'{geox[prp]:10.3f}')
prp4 = []
for prp in ('avg_dx', 'avg_dy', 'avg_dz', 'avg_rotation'):
prp4.append(f'{geox[prp]:7.4f}')
dsc.txt('For active cells, using cell centers:')
dsc.txt('Xmin, Xmax, Ymin, Ymax, Zmin, Zmax:', *prp1)
dsc.txt('Avg DX, Avg DY, Avg DZ, Avg rotation:', *prp2)
dsc.txt('For all cells, using cell corners:')
dsc.txt('Xmin, Xmax, Ymin, Ymax, Zmin, Zmax:', *prp3)
dsc.txt('Avg DX, Avg DY, Avg DZ, Avg rotation:', *prp4)
dsc.txt('Attached grid props objects (names)', self.propnames)
if details:
dsc.txt('Attached grid props objects (id)', self.props)
if self.subgrids:
dsc.txt('Number of subgrids', len(list(self.subgrids.keys())))
else:
dsc.txt('Number of subgrids', 'No subgrids')
if details:
dsc.txt('Subgrids details', json.dumps(self.get_subgrids()))
dsc.txt('Subgrids with values array', self.subgrids)
if flush:
dsc.flush()
return None
return dsc.astext()
def get_dataframe(self, activeonly: bool=True, ijk: bool=True, xyz: bool=True, doubleformat: bool=False) -> pd.DataFrame:
return self.gridprops.get_dataframe(grid=self, activeonly=activeonly, ijk=ijk, xyz=xyz, doubleformat=doubleformat)
(deprecated_in='2.16', removed_in='4.0', current_version=__version__, details='Method dataframe is deprecated, use get_dataframe instead.')
def dataframe(self, *args: Any, **kwargs: Any) -> pd.DataFrame:
return self.get_dataframe(*args, **kwargs)
def get_vtk_esg_geometry_data(self) -> tuple[(np.ndarray, np.ndarray, np.ndarray, np.ndarray)]:
return _grid_etc1.get_vtk_esg_geometry_data(self)
def get_vtk_geometries(self) -> tuple[(np.ndarray, np.ndarray, np.ndarray)]:
return _grid_etc1.get_vtk_geometries(self)
def append_prop(self, prop: GridProperty) -> None:
if (prop.dimensions != self.dimensions):
raise ValueError(f'Dimensions does not match, got: {prop.dimensions} expected: {self.dimensions}.')
self._props.append_props([prop])
def set_subgrids(self, sdict: (dict[(str, int)] | None)) -> None:
if (sdict is None):
return
if (not isinstance(sdict, dict)):
raise ValueError('Input sdict is not an dict')
newsub: dict[(str, (range | list[int]))] = {}
inn1 = 1
for (name, nsub) in sdict.items():
inn2 = (inn1 + nsub)
newsub[name] = range(inn1, inn2)
inn1 = inn2
self.subgrids = newsub
def get_subgrids(self) -> (dict[(str, int)] | None):
if (not self.subgrids):
return None
return {name: len(subb) for (name, subb) in self.subgrids.items()}
def rename_subgrids(self, names: (list[str] | tuple[(str, ...)])) -> None:
if (not isinstance(names, (list, tuple))):
raise ValueError('Input names not a list or a tuple')
assert (self.subgrids is not None)
if (len(names) != len(list(self.subgrids.keys()))):
raise ValueError('Lenght of names list not same as number of subgrids')
subs = self.get_subgrids()
assert (subs is not None)
subs_copy = subs.copy()
for (num, oldname) in enumerate(self.subgrids.keys()):
subs_copy[str(names[num])] = subs_copy.pop(oldname)
self.set_subgrids(subs_copy)
def estimate_design(self, nsub: ((str | int) | None)=None) -> (dict[(str, (str | float))] | None):
nsubname = None
if ((nsub is None) and self.subgrids):
raise ValueError('Subgrids exists, nsub cannot be None')
if (nsub is not None):
if (not self.subgrids):
return None
if isinstance(nsub, int):
try:
nsubname = list(self.subgrids.keys())[(nsub - 1)]
except IndexError:
return None
elif isinstance(nsub, str):
nsubname = nsub
else:
raise ValueError('Key nsub of wrong type, must be a number or a name')
if (nsubname not in self.subgrids):
return None
return _grid_etc1.estimate_design(self, nsubname)
def estimate_flip(self) -> Literal[(1, (- 1))]:
return _grid_etc1.estimate_flip(self)
def subgrids_from_zoneprop(self, zoneprop: GridProperty) -> (dict[(str, int)] | None):
(_, _, k_index) = self.get_ijk()
kval = k_index.values
zprval = zoneprop.values
minzone = int(zprval.min())
maxzone = int(zprval.max())
newd: dict[(str, range)] = {}
for izone in range(minzone, (maxzone + 1)):
mininzn = int(kval[(zprval == izone)].min())
maxinzn = int(kval[(zprval == izone)].max())
newd[('zone' + str(izone))] = range(mininzn, (maxinzn + 1))
self.subgrids = newd
return self.get_subgrids()
def get_zoneprop_from_subgrids(self) -> NoReturn:
raise NotImplementedError('Not yet; todo')
def get_actnum_indices(self, order: Literal[('C', 'F', 'A', 'K')]='C', inverse: bool=False) -> np.ndarray:
actnumv = self.get_actnum().values.copy(order=order)
actnumv = np.ravel(actnumv, order=order)
if inverse:
actnumv -= 1
return np.flatnonzero(actnumv)
return np.flatnonzero(actnumv)
def get_dualactnum_indices(self, order: Literal[('C', 'F', 'A', 'K')]='C', fracture: bool=False) -> (np.ndarray | None):
if (not self._dualporo):
return None
assert (self._dualactnum is not None)
actnumv = self._dualactnum.values.copy(order=order)
actnumv = np.ravel(actnumv, order=order)
if fracture:
actnumvf = actnumv.copy()
actnumvf[((actnumv == 3) | (actnumv == 2))] = 1
actnumvf[((actnumv == 1) | (actnumv == 0))] = 0
return np.flatnonzero(actnumvf)
actnumvm = actnumv.copy()
actnumvm[((actnumv == 3) | (actnumv == 1))] = 1
actnumvm[((actnumv == 2) | (actnumv == 0))] = 0
return np.flatnonzero(actnumvm)
(deprecated_in='2.16', removed_in='4.0', current_version=__version__, details='Use xtgeo.Grid().gridprops instead')
def get_gridproperties(self) -> GridProperties:
return self._props
def get_prop_by_name(self, name: str) -> (GridProperty | None):
if (self.props is None):
raise RuntimeError(f'{self.__class__.__name__} has no gird property objects (self.props is None)')
for obj in self.props:
if (obj.name == name):
return obj
return None
def get_actnum(self, name: str='ACTNUM', asmasked: bool=False, mask: (bool | None)=None, dual: bool=False) -> GridProperty:
if (mask is not None):
xtg.warndeprecated('The mask option is deprecated,and will be removed in version 4.0. Use asmasked instead.')
asmasked = self._evaluate_mask(mask)
if (dual and self._dualactnum):
act = self._dualactnum.copy()
else:
act = xtgeo.grid3d.GridProperty(ncol=self._ncol, nrow=self._nrow, nlay=self._nlay, values=np.zeros((self._ncol, self._nrow, self._nlay), dtype=np.int32), name=name, discrete=True)
if (self._xtgformat == 1):
values = _gridprop_lowlevel.f2c_order(self, self._actnumsv)
else:
values = self._actnumsv
act.values = values
act.mask_undef()
if asmasked:
act.values = ma.masked_equal(act.values, 0)
act.codes = {0: '0', 1: '1'}
if (dual and self._dualactnum):
act.codes = {0: '0', 1: '1', 2: '2', 3: '3'}
return act
def set_actnum(self, actnum: GridProperty) -> None:
val1d = actnum.values.ravel()
if (self._xtgformat == 1):
self._actnumsv = _gridprop_lowlevel.c2f_order(self, val1d)
else:
self._actnumsv = np.ma.filled(actnum.values, fill_value=0).astype(np.int32)
def get_dz(self, name: str='dZ', flip: bool=True, asmasked: bool=True, mask: (bool | None)=None, metric: METRIC='z projection') -> GridProperty:
if (mask is not None):
xtg.warndeprecated('The mask option is deprecated,and will be removed in version 4.0. Use asmasked instead.')
asmasked = self._evaluate_mask(mask)
return _grid_etc1.get_dz(self, name=name, flip=flip, asmasked=asmasked, metric=metric)
def get_dx(self, name: str='dX', asmasked: bool=True, metric: METRIC='horizontal') -> GridProperty:
return _grid_etc1.get_dx(self, name=name, asmasked=asmasked, metric=metric)
def get_dy(self, name: str='dY', asmasked: bool=True, metric: METRIC='horizontal') -> GridProperty:
return _grid_etc1.get_dy(self, name=name, asmasked=asmasked, metric=metric)
(deprecated_in='3.0', removed_in='4.0', current_version=__version__, details='Use xtgeo.Grid.get_dx() and/or xtgeo.Grid.get_dy() instead.')
def get_dxdy(self, names: tuple[(str, str)]=('dX', 'dY'), asmasked: bool=False) -> tuple[(GridProperty, GridProperty)]:
return (self.get_dx(name=names[0], asmasked=asmasked), self.get_dy(name=names[1], asmasked=asmasked))
def get_cell_volume(self, ijk: tuple[(int, int, int)]=(1, 1, 1), activeonly: bool=True, zerobased: bool=False, precision: Literal[(1, 2, 4)]=2) -> float:
return _grid_etc1.get_cell_volume(self, ijk=ijk, activeonly=activeonly, zerobased=zerobased, precision=precision)
def get_bulk_volume(self, name: str='bulkvol', asmasked: bool=True, precision: Literal[(1, 2, 4)]=2) -> GridProperty:
return _grid_etc1.get_bulk_volume(self, name=name, asmasked=asmasked, precision=precision)
def get_ijk(self, names: tuple[(str, str, str)]=('IX', 'JY', 'KZ'), asmasked: bool=True, mask: (bool | None)=None, zerobased: bool=False) -> tuple[(GridProperty, GridProperty, GridProperty)]:
if (mask is not None):
xtg.warndeprecated('The mask option is deprecated,and will be removed in version 4.0. Use asmasked instead.')
asmasked = self._evaluate_mask(mask)
(ixc, jyc, kzc) = _grid_etc1.get_ijk(self, names=names, asmasked=asmasked, zerobased=zerobased)
return (ixc, jyc, kzc)
def get_ijk_from_points(self, points: Points, activeonly: bool=True, zerobased: bool=False, dataframe: bool=True, includepoints: bool=True, columnnames: tuple[(str, str, str)]=('IX', 'JY', 'KZ'), fmt: Literal[('int', 'float')]='int', undef: int=(- 1)) -> (pd.DataFrame | list):
return _grid_etc1.get_ijk_from_points(self, points, activeonly=activeonly, zerobased=zerobased, dataframe=dataframe, includepoints=includepoints, columnnames=columnnames, fmt=fmt, undef=undef)
def get_xyz(self, names: tuple[(str, str, str)]=('X_UTME', 'Y_UTMN', 'Z_TVDSS'), asmasked: bool=True, mask: (bool | None)=None) -> tuple[(GridProperty, GridProperty, GridProperty)]:
if (mask is not None):
xtg.warndeprecated('The mask option is deprecated,and will be removed in version 4.0. Use asmasked instead.')
asmasked = self._evaluate_mask(mask)
return _grid_etc1.get_xyz(self, names=names, asmasked=asmasked)
def get_xyz_cell_corners(self, ijk: tuple[(int, int, int)]=(1, 1, 1), activeonly: bool=True, zerobased: bool=False) -> tuple[(int, ...)]:
return _grid_etc1.get_xyz_cell_corners(self, ijk=ijk, activeonly=activeonly, zerobased=zerobased)
def get_xyz_corners(self, names: tuple[(str, str, str)]=('X_UTME', 'Y_UTMN', 'Z_TVDSS')) -> tuple[(GridProperty, ...)]:
return _grid_etc1.get_xyz_corners(self, names=names)
def get_layer_slice(self, layer: int, top: bool=True, activeonly: bool=True) -> tuple[(np.ndarray, np.ndarray)]:
return _grid_etc1.get_layer_slice(self, layer, top=top, activeonly=activeonly)
def get_geometrics(self, allcells: bool=False, cellcenter: bool=True, return_dict: bool=False, _ver: Literal[(1, 2)]=1) -> (dict | tuple):
return _grid_etc1.get_geometrics(self, allcells=allcells, cellcenter=cellcenter, return_dict=return_dict, _ver=_ver)
def get_adjacent_cells(self, prop: GridProperty, val1: int, val2: int, activeonly: bool=True) -> GridProperty:
return _grid_etc1.get_adjacent_cells(self, prop, val1, val2, activeonly=activeonly)
def get_gridquality_properties(self) -> GridProperties:
return _grid_etc1.get_gridquality_properties(self)
def activate_all(self) -> None:
self._actnumsv = np.ones(self.dimensions, dtype=np.int32)
if (self._xtgformat == 1):
self._actnumsv = self._actnumsv.flatten()
self._tmp = {}
def inactivate_by_dz(self, threshold: float) -> None:
_grid_etc1.inactivate_by_dz(self, threshold)
self._tmp = {}
def inactivate_inside(self, poly: Polygons, layer_range: (tuple[(int, int)] | None)=None, inside: bool=True, force_close: bool=False) -> None:
_grid_etc1.inactivate_inside(self, poly, layer_range=layer_range, inside=inside, force_close=force_close)
self._tmp = {}
def inactivate_outside(self, poly: Polygons, layer_range: (tuple[(int, int)] | None)=None, force_close: bool=False) -> None:
self.inactivate_inside(poly, layer_range=layer_range, inside=False, force_close=force_close)
self._tmp = {}
def collapse_inactive_cells(self) -> None:
_grid_etc1.collapse_inactive_cells(self)
self._tmp = {}
def crop(self, colcrop: tuple[(int, int)], rowcrop: tuple[(int, int)], laycrop: tuple[(int, int)], props: ((Literal['all'] | list[GridProperty]) | None)=None) -> None:
_grid_etc1.crop(self, (colcrop, rowcrop, laycrop), props=props)
self._tmp = {}
def reduce_to_one_layer(self) -> None:
_grid_etc1.reduce_to_one_layer(self)
self._tmp = {}
def translate_coordinates(self, translate: tuple[(int, int, int)]=(0, 0, 0), flip: tuple[(int, int, int)]=(1, 1, 1)) -> None:
_grid_etc1.translate_coordinates(self, translate=translate, flip=flip)
self._tmp = {}
def reverse_row_axis(self, ijk_handedness: (Literal[('left', 'right')] | None)=None) -> None:
_grid_etc1.reverse_row_axis(self, ijk_handedness=ijk_handedness)
self._tmp = {}
def make_zconsistent(self, zsep: (float | int)=1e-05) -> None:
_grid_etc1.make_zconsistent(self, zsep)
self._tmp = {}
def convert_to_hybrid(self, nhdiv: int=10, toplevel: float=1000.0, bottomlevel: float=1100.0, region: (GridProperty | None)=None, region_number: (int | None)=None) -> None:
_grid_hybrid.make_hybridgrid(self, nhdiv=nhdiv, toplevel=toplevel, bottomlevel=bottomlevel, region=region, region_number=region_number)
self._tmp = {}
def refine_vertically(self, rfactor: ((int | dict) | None), zoneprop: (GridProperty | None)=None) -> None:
_grid_refine.refine_vertically(self, rfactor, zoneprop=zoneprop)
self._tmp = {}
def report_zone_mismatch(self, well: (Well | None)=None, zonelogname: str='ZONELOG', zoneprop: (GridProperty | None)=None, onelayergrid: (tuple | None)=None, zonelogrange: tuple[(int, int)]=(0, 9999), zonelogshift: int=0, depthrange: (tuple | None)=None, perflogname: (str | None)=None, perflogrange: tuple[(int, int)]=(1, 9999), filterlogname: (str | None)=None, filterlogrange: tuple[(float, float)]=(1e-32, 9999.0), resultformat: Literal[(1, 2)]=1) -> ((tuple | dict) | None):
return _grid_wellzone.report_zone_mismatch(self, well=well, zonelogname=zonelogname, zoneprop=zoneprop, onelayergrid=onelayergrid, zonelogrange=zonelogrange, zonelogshift=zonelogshift, depthrange=depthrange, perflogname=perflogname, perflogrange=perflogrange, filterlogname=filterlogname, filterlogrange=filterlogrange, resultformat=resultformat)
def get_randomline(self, fencespec: (np.ndarray | Polygons), prop: (str | GridProperty), zmin: (float | None)=None, zmax: (float | None)=None, zincrement: float=1.0, hincrement: (float | None)=None, atleast: int=5, nextend: int=2) -> tuple[(float, float, float, float, np.ndarray)]:
if (not isinstance(fencespec, (np.ndarray, xtgeo.Polygons))):
raise ValueError('fencespec must be a numpy or a Polygons() object')
logger.info('Getting randomline...')
res = _grid3d_fence.get_randomline(self, fencespec, prop, zmin=zmin, zmax=zmax, zincrement=zincrement, hincrement=hincrement, atleast=atleast, nextend=nextend)
logger.info('Getting randomline... DONE')
return res
def _convert_xtgformat2to1(self) -> None:
_grid_etc1._convert_xtgformat2to1(self)
def _convert_xtgformat1to2(self) -> None:
_grid_etc1._convert_xtgformat1to2(self)
def _xtgformat1(self) -> None:
self._convert_xtgformat2to1()
def _xtgformat2(self) -> None:
self._convert_xtgformat1to2() |
def _download_finance_data_if_need(the_path, the_code):
if (not os.path.exists(the_path)):
from fooltrader.datamanager.china_stock_manager import crawl_finance_data
logger.info('try to download the finance data at first')
crawl_finance_data(start_code=the_code, end_code=the_code) |
def cli(args=None):
logger.info(f'Inside CLI, args={args!r}')
parser = basic_argument_parser(requires_output_dir=False)
parser.add_argument('--eval-only', action='store_true', help='perform evaluation only')
parser.add_argument('--resume', action='store_true', help='whether to attempt to resume from the checkpoint directory')
args = (sys.argv[1:] if (args is None) else args)
run_with_cmdline_args(parser.parse_args(args)) |
def head_data_forward(model, docs, is_train):
sent_ids = []
head_ids = []
prefix = model.attrs['prefix']
for doc in docs:
sids = model.ops.asarray2i(get_sentence_ids(doc))
sent_ids.append(sids)
heads = []
for (key, sg) in doc.spans.items():
if (not matches_coref_prefix(prefix, key)):
continue
for span in sg:
heads.append(span[0].i)
if (len(span) > 1):
warnings.warn(f'Input span has length {len(span)}, but should be 1.')
heads = model.ops.asarray2i(heads)
head_ids.append(heads)
return ((sent_ids, head_ids), (lambda x: [])) |
class Invite(db.Model):
__tablename__ = 'invites'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id', ondelete='CASCADE'))
user = db.relationship('User', backref='invite')
event_id = db.Column(db.Integer, db.ForeignKey('events.id', ondelete='CASCADE'))
session_id = db.Column(db.Integer, db.ForeignKey('sessions.id', ondelete='CASCADE'))
session = db.relationship('Session', backref='invite')
hash = db.Column(db.String, nullable=False)
def __repr__(self):
return ('<Invite %r>' % self.user_id) |
def add_param(arr_t, param_dtype):
return Transformation([Parameter('output', Annotation(arr_t, 'o')), Parameter('input', Annotation(arr_t, 'i')), Parameter('param', Annotation(param_dtype))], '${output.store_same}(${add}(${input.load_same}, ${param}));', render_kwds=dict(add=functions.add(arr_t.dtype, param_dtype, out_dtype=arr_t.dtype))) |
def test_verbose(server):
url = str(server.url)
runner = CliRunner()
result = runner.invoke( [url, '-v'])
assert (result.exit_code == 0)
assert (remove_date_header(splitlines(result.output)) == ["* Connecting to '127.0.0.1'", "* Connected to '127.0.0.1' on port 8000", 'GET / HTTP/1.1', f"Host: {server.url.netloc.decode('ascii')}", 'Accept: */*', 'Accept-Encoding: gzip, deflate, br', 'Connection: keep-alive', f'User-Agent: python- '', 'HTTP/1.1 200 OK', 'server: uvicorn', 'content-type: text/plain', 'Transfer-Encoding: chunked', '', 'Hello, world!']) |
class DelUser(MethodView):
_required(constant.PERMISSION_LEVEL_4)
def post(self):
data = request.get_data()
data = json.loads(data.decode('UTF-8'))
user_name = data['username']
user_api = UserTable()
(ret, msg) = user_api.del_user(user_name)
if (not ret):
return jsonify({'code': 500, 'data': msg})
user_node_api = UserNodesTable()
user_node_api.del_user(user_name)
del_list = user_node_api.get_node_for_user_name(user_name)
del_data_list = [{'user_name': user_name, 'node_name': node} for node in del_list]
user_node_api.del_user_node(del_data_list)
return jsonify({'code': 200, 'data': ''}) |
class OptionPlotoptionsSolidgaugeSonificationTracks(Options):
def activeWhen(self) -> 'OptionPlotoptionsSolidgaugeSonificationTracksActivewhen':
return self._config_sub_data('activeWhen', OptionPlotoptionsSolidgaugeSonificationTracksActivewhen)
def instrument(self):
return self._config_get('piano')
def instrument(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionPlotoptionsSolidgaugeSonificationTracksMapping':
return self._config_sub_data('mapping', OptionPlotoptionsSolidgaugeSonificationTracksMapping)
def midiName(self):
return self._config_get(None)
def midiName(self, text: str):
self._config(text, js_type=False)
def pointGrouping(self) -> 'OptionPlotoptionsSolidgaugeSonificationTracksPointgrouping':
return self._config_sub_data('pointGrouping', OptionPlotoptionsSolidgaugeSonificationTracksPointgrouping)
def roundToMusicalNotes(self):
return self._config_get(True)
def roundToMusicalNotes(self, flag: bool):
self._config(flag, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def type(self):
return self._config_get('instrument')
def type(self, text: str):
self._config(text, js_type=False) |
def interpolate_all(geoms, between, kind='linear', align=False, **interpol_kwargs):
if ((between == 0) or (kind is None)):
return geoms
interpolate_class = INTERPOLATE[kind]
interpolator = interpolate_class(geoms, between, align=align, **interpol_kwargs)
return interpolator.interpolate_all() |
def test_memory_version_does_not_exist():
aliased_x = [Variable('x', Integer.int32_t(), i, is_aliased=True) for i in range(5)]
var_v = [Variable('v', Integer.int32_t(), i) for i in range(5)]
instructions = [Assignment(var_v[1], UnaryOperation(OperationType.address, [aliased_x[0]])), Assignment(ListOperation([]), Call(function_symbol('scanf'), [Constant(), var_v[1]], writes_memory=1)), Assignment(var_v[2], BinaryOperation(OperationType.plus, [aliased_x[1], Constant(5)])), Assignment(aliased_x[2], BinaryOperation(OperationType.multiply, [aliased_x[1], Constant(3)])), Assignment(ListOperation([]), Call(function_symbol('printf'), [Constant(), aliased_x[2]], writes_memory=3)), Assignment(var_v[4], aliased_x[4])]
cfg = ControlFlowGraph()
task = DecompilerTask('test', cfg)
cfg.add_node(BasicBlock(1, instructions))
with pytest.raises(ValueError):
InsertMissingDefinitions().run(task) |
class _ComputeInstancesRepository(repository_mixins.AggregatedListQueryMixin, repository_mixins.ListQueryMixin, _base_repository.GCPRepository):
def __init__(self, **kwargs):
super(_ComputeInstancesRepository, self).__init__(component='instances', **kwargs)
def list(self, resource, zone, **kwargs):
kwargs['zone'] = zone
return repository_mixins.ListQueryMixin.list(self, resource, **kwargs) |
class CandidateDuplicationInterspersed(Candidate):
def __init__(self, source_contig, source_start, source_end, dest_contig, dest_start, dest_end, reads, bam, cutpaste=False, genotype='1/1'):
assert (source_end >= source_start), 'Interspersed duplication source end ({0}:{1}) is smaller than its start ({0}:{2}). From read {3}'.format(source_contig, source_end, source_start, reads)
assert (dest_end >= dest_start), 'Interspersed duplication destination end ({0}:{1}) is smaller than its start ({0}:{2}). From read {3}'.format(dest_contig, dest_end, dest_start, reads)
self.source_contig = source_contig
source_contig_length = bam.get_reference_length(source_contig)
self.source_start = max(0, source_start)
self.source_end = min(source_contig_length, source_end)
self.dest_contig = dest_contig
dest_contig_length = bam.get_reference_length(dest_contig)
self.dest_start = max(0, dest_start)
self.dest_end = min(dest_contig_length, dest_end)
self.cutpaste = cutpaste
self.type = 'DUP_INT'
self.reads = reads
self.genotype = genotype
def get_destination(self):
return (self.dest_contig, self.dest_start, self.dest_end)
def get_key(self):
return (self.type, self.dest_contig, self.dest_start)
def get_vcf_entry_as_ins(self, sequence_alleles=False, reference=None, read_names=False):
(contig, start, end) = self.get_destination()
svtype = 'INS'
filters = []
if sequence_alleles:
ref_allele = reference.fetch(contig, max(0, (start - 1)), start).upper()
alt_allele = (ref_allele + reference.fetch(self.source_contig, self.source_start, self.source_end).upper())
else:
ref_allele = 'N'
alt_allele = (('<' + svtype) + '>')
info_template = 'SVTYPE={0};{1}END={2};SVLEN={3}'
info_string = info_template.format(svtype, ('CUTPASTE;' if self.cutpaste else ''), start, (end - start))
if read_names:
info_string += ';READS={0}'.format(','.join(self.reads))
return '{chrom}\t{pos}\t{id}\t{ref}\t{alt}\t{qual}\t{filter}\t{info}\t{format}\t{samples}'.format(chrom=contig, pos=max(1, start), id='PLACEHOLDERFORID', ref=ref_allele, alt=alt_allele, qual='.', filter=('PASS' if (len(filters) == 0) else ';'.join(filters)), info=info_string, format='GT', samples='{gt}'.format(gt=self.genotype))
def get_vcf_entry_as_dup(self, read_names=False):
(contig, start, end) = self.get_source()
svtype = 'DUP:INT'
filters = []
info_template = 'SVTYPE={0};{1}END={2};SVLEN={3}'
info_string = info_template.format(svtype, ('CUTPASTE;' if self.cutpaste else ''), end, (end - start))
if read_names:
info_string += ';READS={0}'.format(','.join(self.reads))
return '{chrom}\t{pos}\t{id}\t{ref}\t{alt}\t{qual}\t{filter}\t{info}\t{format}\t{samples}'.format(chrom=contig, pos=(start + 1), id='PLACEHOLDERFORID', ref='N', alt=(('<' + svtype) + '>'), qual='.', filter=('PASS' if (len(filters) == 0) else ';'.join(filters)), info=info_string, format='GT', samples='{gt}'.format(gt=self.genotype)) |
class OptionPlotoptionsLineSonificationDefaultspeechoptionsMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class ElectionClassDate(db.Model):
__tablename__ = 'ofec_election_dates'
race_pk = db.Column(db.Integer, primary_key=True)
office = db.Column(db.String, index=True, doc=docs.OFFICE)
office_desc = db.Column(db.String, doc=docs.OFFICE_FULL)
state = db.Column(db.String, index=True, doc=docs.STATE)
state_desc = db.Column(db.String, doc=docs.STATE)
district = db.Column(db.Integer, index=True, doc=docs.DISTRICT)
election_year = db.Column('election_yr', db.Integer, index=True, doc=docs.ELECTION_YEAR)
open_seat_flag = db.Column('open_seat_flg', db.String, doc=docs.OPEN_SEAT_FLAG)
create_date = db.Column(db.Date, doc=docs.CREATE_DATE)
election_type_id = db.Column(db.String, doc=docs.ELECTION_TYPE)
cycle_start_date = db.Column('cycle_start_dt', db.Date)
cycle_end_date = db.Column('cycle_end_dt', db.Date)
election_date = db.Column('election_dt', db.Date, doc=docs.ELECTION_DATE)
senate_class = db.Column(db.Integer, index=True, doc=docs.SENATE_CLASS) |
class OptionPlotoptionsHeatmapSonificationDefaultinstrumentoptionsMappingHighpass(Options):
def frequency(self) -> 'OptionPlotoptionsHeatmapSonificationDefaultinstrumentoptionsMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsHeatmapSonificationDefaultinstrumentoptionsMappingHighpassFrequency)
def resonance(self) -> 'OptionPlotoptionsHeatmapSonificationDefaultinstrumentoptionsMappingHighpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsHeatmapSonificationDefaultinstrumentoptionsMappingHighpassResonance) |
def _run_command_side_effect(*args):
_verify_cwd(**args[1]._asdict())
if (args[0] == 'git clone .'):
return "Mocked output: Cloning into '.'..."
elif (args[0] == "git rev-list --no-merges --committer='GitHub <>' --max-count=1 HEAD"):
return 'mocked_hash'
elif (args[0] == "git rev-list --no-merges --committer='GitHub <>' --max-count=2 mocked_hash"):
return 'mocked_hash\nmocked_hash_the_sequel'
elif (args[0] == 'git status'):
return "Your branch is up to date with 'some_random_branch'"
elif (args[0] == 'git remote -v'):
return 'origin :username/reponame.git (fetch)'
elif (args[0] == "git rev-list --no-merges --committer='GitHub <>' --max-count=2 expected_baseline_hash"):
return 'expected_baseline_hash\nexpected_previous_commit_hash'
elif (args[0] == 'git clone :username/reponame.git .'):
return "Cloning into '.'"
elif (args[0] == 'git tag --list --sort v:refname'):
return '\nv1.14.5\nv1.15.0\nv1.15.1\nv1.15.2\nv1.16.0\n'
raise NotImplementedError(f'Unhandled input in side effect: {args}') |
def write_module(basedir, generated_modules):
if (not os.path.exists(basedir)):
os.makedirs(basedir)
elif (not os.path.isdir(basedir)):
raise MsgGenerationException(('file preventing the creating of module directory: %s' % basedir))
p = os.path.join(basedir, '__init__.py')
with open(p, 'w') as f:
for mod in generated_modules:
f.write(('from .%s import *\n' % mod))
os.path.dirname(basedir) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.