code stringlengths 281 23.7M |
|---|
def aten_compose_chunk(flat_args_1, chunk, dim):
sym_size = torch.ops.aten.sym_size(flat_args_1, dim)
add = operator.add(sym_size, chunk)
sub = operator.sub(add, 1)
floordiv = operator.floordiv(sub, chunk)
split = torch.ops.aten.split.Tensor(flat_args_1, floordiv, dim)
return split |
def validate_photo_url(photo_url, required=False):
if ((photo_url is None) and (not required)):
return None
if ((not isinstance(photo_url, str)) or (not photo_url)):
raise ValueError('Invalid photo URL: "{0}". Photo URL must be a non-empty string.'.format(photo_url))
try:
parsed = pa... |
class GymDialogue(Dialogue):
INITIAL_PERFORMATIVES: FrozenSet[Message.Performative] = frozenset({GymMessage.Performative.RESET})
TERMINAL_PERFORMATIVES: FrozenSet[Message.Performative] = frozenset({GymMessage.Performative.CLOSE})
VALID_REPLIES: Dict[(Message.Performative, FrozenSet[Message.Performative])] =... |
class OpsStats():
def __init__(self, name, sub_name):
self.name = name
self.sub_name = sub_name
self.stats_map = {'total_input': OpsCounter('total_input'), 'post_deduping': OpsCounter('post_deduping'), 'post_scoring': OpsCounter('post_scoring'), 'post_filtering': OpsCounter('post_filtering')... |
class OptionPlotoptionsDumbbellStatesSelectMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
sel... |
class Buffer(DownstreamNode):
def __init__(self, internal_capacitance, drive_resistance, delays):
self.internal_capacitance = internal_capacitance
self.drive_resistance = drive_resistance
self.delays = delays
self.downstream_cap = None
self.rc_delay = None
def set_sink_wi... |
class Solution():
def permuteUnique(self, nums: List[int]) -> List[List[int]]:
def generate_permutations(candidates, curr, perms):
if (sum(candidates.values()) == 0):
perms.append(list(curr))
return
for n in list(candidates.keys()):
if ... |
_for(CreateIndexOp)
def _add_index(op: CreateIndexOp) -> str:
kw_str = ''
if op.kw:
kw_str = (', %s' % ', '.join([('%s=%r' % (key, val)) for (key, val) in op.kw.items()]))
return ('self.create_index(%(iname)r, %(tname)r, %(idata)s)' % {'tname': op.table_name, 'iname': op.index_name, 'idata': ('%r, e... |
class FuncDefNode():
def __init__(self, var_name_tok, arg_name_toks, body_node, should_auto_return):
self.var_name_tok = var_name_tok
self.arg_name_toks = arg_name_toks
self.body_node = body_node
self.should_auto_return = should_auto_return
if self.var_name_tok:
s... |
class XMPPPerson(XMPPIdentifier, Person):
aclattr = XMPPIdentifier.person
def __eq__(self, other):
if (not isinstance(other, XMPPPerson)):
log.debug('Weird, you are comparing an XMPPPerson to a %s', type(other))
return False
return ((self._domain == other._domain) and (se... |
class Sampler(Module, AutoCSR):
def __init__(self, pads):
self.enable = CSRStorage()
self.pattern = CSRStorage()
self.state = CSRStatus(fields=[CSRField('idle', offset=0), CSRField('trigger', offset=1), CSRField('capture', offset=2)])
self.trig_value = CSRStorage(8)
self.trig... |
def test_task_get_overrides_with_secrets(task_definition):
task_definition.set_secrets((('webserver', 'foo', 'baz'),))
overrides = task_definition.get_overrides()
assert (len(overrides) == 1)
assert (overrides[0]['name'] == 'webserver')
assert (dict(name='foo', valueFrom='baz') in overrides[0]['secr... |
('config_type', ['strict'])
def test_missing_envs_not_required_in_strict_mode(config, json_config_file_3):
with open(json_config_file_3, 'w') as file:
file.write(json.dumps({'section': {'undefined': '${UNDEFINED}'}}))
config.set_json_files([json_config_file_3])
config.load(envs_required=False)
a... |
class Migration(migrations.Migration):
dependencies = [('article', '0007_auto__1429')]
operations = [migrations.AddField(model_name='article_comment', name='url', field=models.CharField(blank=True, default='', max_length=60, null=True)), migrations.AddField(model_name='articlecommentreply', name='url', field=mo... |
def must_be_matrix(r: Requirement) -> bool:
if (r is any_requirement):
return False
if (r is any_real_matrix):
return True
if isinstance(r, AlwaysMatrix):
return True
t = requirement_to_type(r)
if isinstance(t, BMGMatrixType):
return ((t.rows != 1) or (t.columns != 1)... |
('image.frozenmodel', dataset=recipe_args['dataset'], frozen_model_path=('Path to frozen_model.pb', 'positional', None, str), label_map_path=('Path to label_map.pbtxt', 'positional', None, str), source=recipe_args['source'], threshold=('Score threshold', 'option', 't', float, None, 0.5), api=recipe_args['api'], exclude... |
def DllGetClassObject(rclsid, riid, ppv):
COMObject.__run_inprocserver__()
iid = GUID.from_address(riid)
clsid = GUID.from_address(rclsid)
if (not _logging_configured):
_setup_logging(clsid)
try:
_debug('DllGetClassObject(clsid=%s, iid=%s)', clsid, iid)
cls = inproc_find_clas... |
def test_guiding_input(thr):
N = 1000
dtype = numpy.float32
p = PureParallel([Parameter('output', Annotation(Type(dtype, shape=(2, N)), 'o')), Parameter('input', Annotation(Type(dtype, shape=N), 'i'))], '\n float t = ${input.load_idx}(${idxs[0]});\n ${output.store_idx}(0, ${idxs[0]}, t);\n ... |
class AifsPurpose(Purpose):
def __call__(self, config):
def check_dict_value_and_set(dic, key, value):
if (key in dic):
if (dic[key] != value):
raise ValueError(f'Cannot use {key}={dic[key]} with {self} purpose. Must use {value}.')
dic[key] = value... |
def dnsans2log(msg: dns.message.Message) -> str:
question = msg2question(msg)
flags = msg2flags(msg)
return '{} {} {} {}/{}/{} {}/{}/{} {}'.format(question, msg.id, flags, sum_items(msg.answer), sum_items(msg.authority), sum_items(msg.additional), msg.edns, msg.ednsflags, msg.payload, dns.rcode.to_text(msg.... |
class OptionSeriesBulletSonificationContexttracksMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
... |
class StaticAnytraitChangeNotifyWrapper(AbstractStaticChangeNotifyWrapper):
argument_transforms = {0: (lambda obj, name, old, new: ()), 1: (lambda obj, name, old, new: (obj,)), 2: (lambda obj, name, old, new: (obj, name)), 3: (lambda obj, name, old, new: (obj, name, new)), 4: (lambda obj, name, old, new: (obj, name... |
def test_centered_product_with_two_frames_and_default_values(traces):
expected = _read('centered_product_result_frame2_10.npz')
frame = slice(None, 50)
frame_2 = slice(None, 10)
result = scared.preprocesses.high_order.CenteredProduct(frame_1=frame, frame_2=frame_2)(traces)
assert np.array_equal(expe... |
class TumDataLoader(BaseDataset):
def __init__(self, filename, transform):
super().__init__(filename, transform)
def load_depth(self, filename):
depth_np = cv2.imread(filename, cv2.IMREAD_ANYDEPTH)
depth_np = (depth_np.astype(np.float32) / 5000.0)
return depth_np
def get_file... |
def extractLightNovelCafe(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if ('Evolution Theory of the Hunter' in item['tags']):
return buildReleaseMessageWithType(item, 'E... |
class GlyphComponentTest():
def test_toXML_no_transform(self):
comp = GlyphComponent()
comp.glyphName = 'a'
comp.flags = ARGS_ARE_XY_VALUES
(comp.x, comp.y) = (1, 2)
assert (getXML(comp.toXML) == ['<component glyphName="a" x="1" y="2" flags="0x2"/>'])
def test_toXML_trans... |
class ValveBadConfTestCase(ValveTestBases.ValveTestNetwork):
CONFIG = ('\ndps:\n s1:\n%s\n interfaces:\n p1:\n number: 1\n native_vlan: 0x100\n' % DP1_CONFIG)
MORE_CONFIG = ('\ndps:\n s1:\n%s\n interfaces:\n p1:\n number: 1\n... |
class FLTrainerConfig():
_target_: str = MISSING
_recursive_: bool = False
epochs: float = 1000.0
do_eval: bool = True
always_keep_trained_model: bool = False
timeout_simulator: TimeOutSimulatorConfig = TimeOutSimulatorConfig()
train_metrics_reported_per_epoch: int = 1
eval_epoch_frequen... |
def slack_enterprise_dataset_config(db: Session, slack_enterprise_connection_config: ConnectionConfig, slack_enterprise_dataset, slack_enterprise_config) -> Generator:
fides_key = slack_enterprise_config['fides_key']
slack_enterprise_connection_config.name = fides_key
slack_enterprise_connection_config.key ... |
def _is_successful(operation):
success = True
if ('error' in operation):
if ('errors' in operation['error']):
for err in operation['error']['errors']:
if (err.get('code') in ['RESOURCE_ALREADY_EXISTS', 'INVALID_FIELD_VALUE']):
LOGGER.warning('Ignoring erro... |
def klDivBeta(a, b, a2, b2):
from scipy.special import gammaln, psi
import numpy as np
left = np.array([a, b])
right = np.array([a2, b2])
return ((((gammaln(sum(left)) - gammaln(sum(right))) - sum(gammaln(left))) + sum(gammaln(right))) + np.dot((left - right), (psi(left) - psi(sum(left))))) |
def aggregate(conf, fedavg_models, client_models, criterion, metrics, flatten_local_models, fa_val_perf, distillation_sampler, distillation_data_loader, val_data_loader, test_data_loader):
fl_aggregate = conf.fl_aggregate
(_, local_models) = agg_utils.recover_models(conf, client_models, flatten_local_models)
... |
def test_set_transaction_custom_data(elasticapm_client):
elasticapm_client.begin_transaction('test')
elasticapm.set_custom_context({'foo': 'bar'})
elasticapm_client.end_transaction('foo', 200)
transactions = elasticapm_client.events[TRANSACTION]
assert (transactions[0]['context']['custom'] == {'foo'... |
.usefixtures('use_tmpdir')
def test_that_unknown_hooked_job_gives_config_validation_error():
test_config_file_name = 'test.ert'
test_config_contents = dedent('\n NUM_REALIZATIONS 1\n HOOK_WORKFLOW NO_SUCH_JOB PRE_SIMULATION\n ')
with open(test_config_file_name, 'w', encoding='utf-8') a... |
class info():
class distro_name():
def returns_other_by_default(self, cxn):
cxn.run.return_value = Result(connection=cxn, exited=1)
assert (distro_name(cxn) == 'other')
def returns_fedora_if_fedora_release_exists(self, cxn):
def fedora_exists(*args, **kwargs):
... |
class RelationshipTlsCertificateTlsCertificate(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
... |
def getRaidLevel(data):
raid = None
raidLevels = ['raid0', 'raid1', 'raid5', 'raid6']
for x in data:
for y in raidLevels:
if x.startswith(y):
raid = x
break
if raid:
return raid
else:
print('No mdraid arrays found')
sys.exit... |
def test_buildVarColorLine_StopMap():
stops = [{'StopOffset': 0.0, 'PaletteIndex': 0, 'Alpha': 0.5, 'VarIndexBase': 1}, {'StopOffset': 1.0, 'PaletteIndex': 1, 'Alpha': 0.3, 'VarIndexBase': 3}]
cline = _build(ot.VarColorLine, {'ColorStop': stops})
assert ([{'StopOffset': cs.StopOffset, 'PaletteIndex': cs.Pal... |
def send_email_for_monthly_fee_payment(user, event_name, previous_month, amount, app_name, link, follow_up=False):
options = {False: MailType.MONTHLY_PAYMENT, True: MailType.MONTHLY_PAYMENT_FOLLOWUP, 'pre_due': MailType.MONTHLY_PAYMENT_PRE_DUE, 'post_due': MailType.MONTHLY_PAYMENT_POST_DUE}
key = options[follow... |
def gen_unified_accessor_tests(out, name):
loxi_utils.gen_c_copy_license(out)
out.write('\n/**\n *\n * AUTOMATICALLY GENERATED FILE. Edits will be lost on regen.\n *\n * Unified simple class instantiation tests for all versions\n */\n\n#include <locitest/test_common.h>\n')
for version in of_g.of_version_ra... |
_type(MrtRecord.TYPE_TABLE_DUMP_V2)
class TableDump2MrtRecord(MrtCommonRecord):
MESSAGE_CLS = TableDump2MrtMessage
SUBTYPE_PEER_INDEX_TABLE = 1
SUBTYPE_RIB_IPV4_UNICAST = 2
SUBTYPE_RIB_IPV4_MULTICAST = 3
SUBTYPE_RIB_IPV6_UNICAST = 4
SUBTYPE_RIB_IPV6_MULTICAST = 5
SUBTYPE_RIB_GENERIC = 6
... |
class OptionSeriesPyramid3dDragdropDraghandle(Options):
def className(self):
return self._config_get('highcharts-drag-handle')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#fff')
def color(self, text: str):
se... |
class FundingSourceDetails(AbstractCrudObject):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isFundingSourceDetails = True
super(FundingSourceDetails, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
coupon = 'coupon'
display_string = 'dis... |
class OptionSeriesGaugeSonificationDefaultinstrumentoptionsMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
... |
_frequency(timedelta(hours=72))
def fetch_price(zone_key: ZoneKey, session: (Session | None)=None, target_datetime: (datetime | None)=None, logger=getLogger(__name__)):
(data_objects, date_string, source) = fetch_data(zone_key, session, target_datetime)
price_list = PriceList(logger=logger)
for data_object ... |
class OptionPlotoptionsPieDatalabelsFilter(Options):
def operator(self):
return self._config_get(None)
def operator(self, value: Any):
self._config(value, js_type=False)
def property(self):
return self._config_get(None)
def property(self, text: str):
self._config(text, js... |
('is_partial', [True, False])
def test_regression_1483(instantiate_func: Any, is_partial: bool) -> None:
def gen() -> Any:
(yield 10)
res: ArgsClass = instantiate_func({'_target_': 'tests.instantiate.ArgsClass'}, _partial_=is_partial, gen=gen(), lst=[1, 2])
if is_partial:
pickle.dumps(res.ke... |
def benchmark_module(name: str, inputs: Tensor, outputs: Tensor, pt_mod: torch.nn.Module, ait_mod: AITModule, iters: int=100, permute_inputs: Optional[List[int]]=None):
input_shape = inputs.size()
batch_size = input_shape[0]
args = (inputs,)
t = benchmark_torch_function(iters, pt_mod, *args)
t_pt = ... |
class RetryConfigBase(_typing.TypedDict):
maxRetrySeconds: _typing_extensions.NotRequired[(((int | _params.Expression[int]) | _util.Sentinel) | None)]
maxBackoffSeconds: _typing_extensions.NotRequired[(((int | _params.Expression[int]) | _util.Sentinel) | None)]
maxDoublings: _typing_extensions.NotRequired[(... |
def _config_from_hf(hf_config: Mapping[(str, Any)]) -> LlamaConfig:
kwargs = config_from_hf('Llama', hf_config, HF_CONFIG_KEYS)
n_kv_heads = HFConfigKeys.NUM_KEY_VALUE_HEADS.get_kwarg(kwargs)
if (n_kv_heads == (- 1)):
HFConfigKeys.NUM_KEY_VALUE_HEADS.set_kwarg(HFConfigKeys.NUM_ATTENTION_HEADS, kwarg... |
def new(project_path_str: str='.', ignore_subfolder: bool=False, ignore_existing: bool=False) -> str:
project_path = Path(project_path_str).resolve()
if ((not ignore_existing) and project_path.exists() and list(project_path.glob('*'))):
raise FileExistsError(f'Directory is not empty: {project_path}')
... |
class FidesopsMessage(BaseModel, smart_union=True, arbitrary_types_allowed=True):
action_type: MessagingActionType
body_params: Optional[Union[(ConsentEmailFulfillmentBodyParams, SubjectIdentityVerificationBodyParams, RequestReceiptBodyParams, RequestReviewDenyBodyParams, AccessRequestCompleteBodyParams, Erasur... |
class TestRestStatistic():
.usefixtures('intercom_backend_binding')
def test_status(self, test_client, workload_statistic, unpacking_scheduler, analysis_scheduler):
workload_statistic.update(unpacking_workload=unpacking_scheduler.get_scheduled_workload(), analysis_workload=analysis_scheduler.get_schedul... |
('persistent')
def persistent_variables(node):
assert isinstance(node, (Function_Definition, Script_File))
class Persistent_Visitor(AST_Visitor):
def __init__(self):
self.names = set()
def visit(self, node, n_parent, relation):
if isinstance(node, Persistent_Statement):
... |
def test_isolate_setup_funcs(isolated_client):
def setup_function():
import math
return math.pi
_client(setup_function=setup_function)
def is_tau(setup, by_factor) -> str:
import math
return ((setup * by_factor) == math.tau)
assert is_tau(2)
assert (not is_tau(by_fact... |
class TopicTest(unittest.TestCase):
def test_get_subtopic(self) -> None:
self.assertEqual('/haha', get_subtopic('/haha', 'a'))
self.assertEqual('/a', get_subtopic('/*', 'a'))
self.assertEqual('/a', get_subtopic('/**', 'a'))
self.assertEqual('/a/*', get_subtopic('/*/*', 'a'))
... |
class AWSSecretsManagerGateway(AWSGateway):
def __init__(self, region: str, access_key_id: Optional[str]=None, access_key_data: Optional[str]=None, config: Optional[Dict[(str, Any)]]=None) -> None:
super().__init__(region, access_key_id, access_key_data, config)
self.client: BaseClient = boto3.clien... |
class CFFTableTest(unittest.TestCase):
def setUpClass(cls):
with open(CFF_BIN, 'rb') as f:
cls.cffData = f.read()
with open(CFF_TTX, 'r') as f:
cls.cffXML = strip_ttLibVersion(f.read()).splitlines()
def test_toXML(self):
font = TTFont(sfntVersion='OTTO')
c... |
class IPythonConsoleWindow(Gtk.Window):
__ipv = None
def __init__(self, namespace):
Gtk.Window.__init__(self)
self.set_title(_('IPython Console - Exaile'))
self.set_size_request(750, 550)
self.set_resizable(True)
self.__ipv = IPView(namespace)
self.__ipv.connect('... |
class Migration(migrations.Migration):
dependencies = [('manager', '0006_event_registration_code')]
operations = [migrations.AlterField(model_name='activity', name='type', field=models.CharField(blank=True, choices=[('1', 'Talk'), ('2', 'Workshop'), ('3', 'Lightning talk')], max_length=200, null=True, verbose_n... |
class MORXContextualSubstitutionTest(unittest.TestCase):
def setUpClass(cls):
cls.maxDiff = None
g = (['.notdef'] + [('g.%d' % i) for i in range(1, 910)])
g[80] = 'C'
(g[50], g[52], g[201], g[202]) = ('A', 'B', 'X', 'Y')
(g[600], g[601], g[602], g[900]) = ('A.swash', 'B.swash... |
class TestXYZD65(util.ColorAssertsPyTest):
COLORS = [('red', 'color(xyz-d65 0.41239 0.21264 0.01933)'), ('orange', 'color(xyz-d65 0.54694 0.48173 0.06418)'), ('yellow', 'color(xyz-d65 0.76998 0.92781 0.13853)'), ('green', 'color(xyz-d65 0.07719 0.15438 0.02573)'), ('blue', 'color(xyz-d65 0.18048 0.07219 0.95053)'),... |
def x86_call_args(blk):
if (len(blk.bap.stmts) > 0):
last_stmt_bap = blk.bap.stmts[(- 1)]
if (isinstance(last_stmt_bap, JmpStmt) and isinstance(last_stmt_bap.kind, CallKind)):
tmp_args = dict()
call = last_stmt_bap
for i in range((len(blk.bap.stmts) - 4), (- 1), (... |
def CROSSOVER(ts, timeperiod_fast=50, timeperiod_slow=200, func_fast=SMA, func_slow=SMA, band=0, price='close', prevday=False):
if ((timeperiod_fast < 1) or (timeperiod_slow < 2) or (timeperiod_fast >= timeperiod_slow)):
raise TradeCrossOverError
ts['__sma_fast__'] = (ts[price] if (timeperiod_fast == 1)... |
def test_ref_plain_no_outputs():
r1 = ReferenceEntity(TaskReference('proj', 'domain', 'some.name', 'abc'), inputs=kwtypes(a=str, b=int), outputs={})
with pytest.raises(Exception) as e:
r1(a='fdsa', b=3)
assert ('You must mock this out' in f'{e}')
def wf1(a: str, b: int):
r1(a=a, b=b)
... |
def to_dict(copr):
return {'id': copr.id, 'name': copr.name, 'ownername': copr.owner_name, 'full_name': copr.full_name, 'homepage': copr.homepage, 'contact': copr.contact, 'description': copr.description, 'instructions': copr.instructions, 'devel_mode': copr.devel_mode, 'persistent': copr.persistent, 'unlisted_on_h... |
class TracingTestZipkinV1(AmbassadorTest):
def init(self):
self.target = HTTP()
self.zipkin = Zipkin()
def config(self) -> Generator[(Union[(str, Tuple[(Node, str)])], None, None)]:
(yield (self.target, self.format('\n---\napiVersion: getambassador.io/v3alpha1\nkind: Mapping\nname: trac... |
def test_schema_to_json_schema():
booking_schema = typesystem.Schema({'start_date': typesystem.Date(title='Start date'), 'end_date': typesystem.Date(title='End date'), 'room': typesystem.Choice(title='Room type', choices=[('double', 'Double room'), ('twin', 'Twin room'), ('single', 'Single room')]), 'include_breakf... |
def run():
segmk = Segmaker('design.bits')
print('Loading params')
f = open('params.json')
params = json.load(f)
params = {p['site']: p for p in params}
print('Loading tags')
f = open('params.jl', 'r')
f.readline()
for l in f:
j = json.loads(l)
bus_tags(segmk, j, para... |
def ensure_broker_server_dblink_exists():
if (DEFAULT_DB_ALIAS not in settings.DATABASES):
raise Exception("'{}' database not configured in django settings.DATABASES".format(DEFAULT_DB_ALIAS))
if ('data_broker' not in settings.DATABASES):
raise Exception("'data_broker' database not configured in... |
class TestDataTransformsAutoAug(unittest.TestCase):
def test_rand_aug_transforms(self):
default_cfg = Detectron2GoRunner.get_default_cfg()
img = np.concatenate([(np.random.uniform(0, 1, size=(80, 60, 1)) * 255).astype(np.uint8), (np.random.uniform(0, 1, size=(80, 60, 1)) * 255).astype(np.uint8), (np... |
class BEDSClientDecryptor(BaseStantinkoDecryptor):
def __init__(self):
super(self.__class__, self).__init__()
def parse_response(self, response):
param1 = b'date='
sep = b'&'
param2 = b'data='
i1 = response.find(param1)
i2 = response.find(sep, (i1 + len(param1)))
... |
def fortios_endpoint_control(data, fos, check_mode):
fos.do_member_operation('endpoint-control', 'forticlient-ems')
if data['endpoint_control_forticlient_ems']:
resp = endpoint_control_forticlient_ems(data, fos, check_mode)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'endpoint... |
.parametrize('constructor_args,constructor_kwargs', (([1234, b'abcd'], {}), ([1234], {'b': b'abcd'}), ([], {'a': 1234, 'b': b'abcd'}), ([], {'b': b'abcd', 'a': 1234})))
def test_contract_constructor_gas_estimate_with_arguments_non_strict(w3_non_strict_abi, non_strict_contract_with_constructor_args_factory, constructor_... |
class CompanyNameMarkov():
random_state: numpy.random.RandomState
sic = {}
_metrics.timeit
def __init__(self, seed):
self.random_state = seed
self.namer = MName(NAMES, self.random_state, 3)
_metrics.timeit
def make(self, model):
masks = model['masks']
full_name = ... |
class P100():
def __init__(self, ipAddress, email, password):
self.ipAddress = ipAddress
self.terminalUUID = str(uuid.uuid4())
self.email = email
self.password = password
self.session = None
self.cookie_name = 'TP_SESSIONID'
self.errorCodes = ERROR_CODES
... |
def prompt_loop(app, store, db, session):
print(FACT_ASCII_ART)
print('\nWelcome to the FACT User Management (FACTUM)\n')
initialise_roles(app, store, db)
actions = Actions(session, app, store, db)
while True:
try:
action_completer = WordCompleter(LEGAL_ACTIONS)
actio... |
class OptionSeriesXrangeSonificationTracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._conf... |
class ExtendedNodeSerializer(NodeSerializer):
extra_select = frozendict({'vms': 'SELECT COUNT(*) FROM "vms_vm" WHERE "vms_node"."uuid" = "vms_vm"."node_id"', 'real_vms': 'SELECT COUNT(*) FROM "vms_vm" LEFT OUTER JOIN "vms_slavevm" ON\n ( "vms_vm"."uuid" = "vms_slavevm"."vm_id" ) WHERE "vms_node"."uuid" = "vms_vm... |
class Validator(declarative.Declarative):
_messages = {}
if_missing = NoDefault
repeating = False
compound = False
accept_iterator = False
gettextargs = {}
use_builtins_gettext = True
__singletonmethods__ = ('to_python', 'from_python', 'message', 'all_messages', 'subvalidators')
def ... |
def generate_json(asset_name):
subprocess.run(['cd $FLEDGE_ROOT/data && mkdir -p tests'], shell=True, check=True)
fogbench_template_path = os.path.join(os.path.expandvars('${FLEDGE_ROOT}'), 'data/tests/{}'.format(TEMPLATE_NAME))
with open(fogbench_template_path, 'w') as f:
f.write(('[{"name": "%s", ... |
class TestSmithWatermanDecoder(unittest.TestCase):
def setUp(self):
torch.manual_seed(2)
(B, S, N, M) = (1, 3, 4, 4)
self.theta = torch.rand(B, N, M, requires_grad=True, dtype=torch.float32)
self.Ztheta = torch.rand(B, N, M, requires_grad=True, dtype=torch.float32)
self.Et = ... |
class OptionPlotoptionsFunnelSonificationDefaultinstrumentoptionsMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self,... |
('Notify.sdlog.error')
('Notify.sdlog.warning')
('Notify.sdlog.info')
def test_warning_not_shown_if_warning_threshold_not_exceeded(mocked_info, mocked_warning, mocked_error):
with TemporaryDirectory() as tmpdir, mock.patch('Notify.LAST_UPDATED_FILE', os.path.join(tmpdir, 'sdw-last-updated')):
just_now = dat... |
def test_launched_jobs(hydra_sweep_runner: TSweepRunner) -> None:
sweep = hydra_sweep_runner(calling_file=None, calling_module='hydra.test_utils.a_module', config_path='configs', config_name='compose.yaml', task_function=None, overrides=['hydra/sweeper=example', 'hydra/launcher=basic', 'foo=1,2'])
with sweep:
... |
class Address():
PREFIX_EVEN = b'\x02'
PREFIX_ODD = b'\x03'
WITNESS_VERSION = 0
class Net(ABC):
def __init__(self):
self.pubaddr1 = None
self.pubaddr1c = None
self.pubaddr3 = None
self.pubaddrbc1_P2WPKH = None
self.pubaddrbc1_P2WSH = No... |
class CTypesData(object):
__metaclass__ = CTypesType
__slots__ = ['__weakref__']
__name__ = '<cdata>'
def __init__(self, *args):
raise TypeError(('cannot instantiate %r' % (self.__class__,)))
def _newp(cls, init):
raise TypeError(("expected a pointer or array ctype, got '%s'" % (cls.... |
class Kolumbus(BikeShareSystem):
meta = {'system': 'kolumbus', 'company': ['Kolumbus']}
def __init__(self, tag, meta, bbox):
super(Kolumbus, self).__init__(tag, meta)
self.bbox = bbox
def update(self, scraper=None):
scraper = (scraper or PyBikesScraper())
data = json.loads(sc... |
class TestVisionModelConverter(AITTestCase):
def test_resnet50(self):
torch.manual_seed(0)
class TestModule(torch.nn.Module):
def __init__(self):
super().__init__()
self.mod = torchvision.models.resnet18()
def forward(self, x):
... |
class gsmtap_umts_rrc_types(IntEnum):
DL_DCCH = 0
UL_DCCH = 1
DL_CCCH = 2
UL_CCCH = 3
PCCH = 4
DL_SHCCH = 5
UL_SHCCH = 6
BCCH_FACH = 7
BCCH_BCH = 8
MCCH = 9
MSCH = 10
HandoverToUTRANCommand = 11
InterRATHandoverInfo = 12
SystemInformation_BCH = 13
System_Infor... |
def check_gv(save_data: list[int], save_stats: dict[(str, Any)], game_version: int) -> dict[(str, Any)]:
if (save_stats['game_version']['Value'] < game_version):
save_data = exit_serialiser(save_data, save_stats)
return {'save_data': save_data, 'exit': True}
else:
return {'save_data': sa... |
def e_B(x):
S = ((((x[0] + (x[1] * a)) + (x[2] * b)) + (x[3] * (a ** 2))) + ((x[4] * a) * b))
S = np.maximum(0, S)
f = to_B(S, h)
f1 = to_B_dS(S, h)
f2 = to_B_dS2(S, h)
S_1 = (S - ((f * f1) / ((f1 ** 2) - ((f * f2) / 2))))
f_ = to_B(S_1, h)
return np.average((f_ ** 10)) |
class Bitstream():
def __init__(self, file_name, verbose=False):
self.frame_data = []
self.idcode = 0
self.exp_sign = 0
self.far_min = 0
self.far_maj = 0
self.curr_fdri_write_len = 0
self.curr_crc_check = 0
self.fdri_in_progress = False
with Op... |
def sigint_process_id(process_id: int, wait_for_completion: Callable[([int], None)], logger: Logger, SIGINT_timeout: int=DEFAULT_SIGINT_TIMEOUT) -> None:
try:
try:
os.kill(process_id, signal.SIGINT)
except ProcessLookupError:
logger.info('Process %d has already terminated', p... |
def extractNanigenainamachiCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('i was born as the seventh prince, what should i do?', 'i was born as the seventh prince, what sho... |
class OptionPlotoptionsTreemapSonificationDefaultinstrumentoptionsMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(... |
class SpecialScenarioDataProcessor(object):
def __init__(self, db_type: str='mysql', host: str='127.0.0.1', port: str=3306, user: Optional[str]=None, passwd: Optional[str]=None) -> Any:
self.db_type = db_type
self.connector = AnyDBConnector(db_type=db_type, host=host, port=port, user=user, passwd=pa... |
def test_tuple_to_dict_action_wrapper():
base_env = DummyTupleEnv()
env = DictActionWrapper.wrap(base_env)
assert isinstance(env.action_space, spaces.Dict)
action = env.action_space.sample()
out_action = env.action(action)
assert env.action_space.contains(env.reverse_action(out_action))
asse... |
.parametrize('excluded_nodes, expected_subgraphs', [([], {frozenset(('B1', 'B2')), frozenset(('E', 'F', 'G'))}), (['A', 'B'], {frozenset(('B1', 'B2', 'C', 'D')), frozenset(('E', 'F', 'G'))})])
def test_chunking_with_execution_plan(excluded_nodes, expected_subgraphs):
graph = to_graph(GRAPH_2)
execution_plan = E... |
class CustomFieldSource(FieldSource, PlanarSource):
field_dataset: Optional[FieldDataset] = pydantic.Field(..., title='Field Dataset', description=':class:`.FieldDataset` containing the desired frequency-domain fields patterns to inject. At least one tangetial field component must be specified.')
_field_dataset... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.