code stringlengths 281 23.7M |
|---|
def test_bazel_options():
job_control = proto_control.JobControl(remote=True, scavenging_benchmark=True)
envoy_source = job_control.source.add()
envoy_source.identity = proto_source.SourceRepository.SourceIdentity.SRCID_ENVOY
envoy_source.source_path = '/home/ubuntu/envoy'
envoy_source.branch = 'mas... |
class Command(DanubeCloudCommand):
help = 'Install or update dependencies according to *etc/requirements-<type>.txt*.'
options = (CommandOption('-q', '--que', '--node', action='store_true', dest='que_only', default=False, help='Install or update compute node related requirements.'), CommandOption('-u', '--updat... |
def test_capture_serverless_lambda_url(event_lurl, context, elasticapm_client):
os.environ['AWS_LAMBDA_FUNCTION_NAME'] = 'test_func'
_serverless
def test_func(event, context):
with capture_span('test_span'):
time.sleep(0.01)
return {'statusCode': 200, 'headers': {'foo': 'bar'}}
... |
class RestoreButtonControl(ButtonControl):
name = 'restore'
title = _('Restore')
description = _('Restore the main window')
fixed = True
__gsignals__ = {'hierarchy-changed': 'override'}
def __init__(self):
ButtonControl.__init__(self)
self.set_image_from_icon_name('window-new')
... |
class KMcbLdesIhpqaHjnBNuvlOYmyASfQTPWVXtEJirGxgRUzwCDko():
def __init__(KMcbLdesIhpqaHjnBNuvlOYmyASfQTPWVXtEJirGxgRUzwCkFD):
KMcbLdesIhpqaHjnBNuvlOYmyASfQTPWVXtEJirGxgRUzwCkFD.atributo = 7
def KMcbLdesIhpqaHjnBNuvlOYmyASfQTPWVXtEJirGxgRUzwCkoD(KMcbLdesIhpqaHjnBNuvlOYmyASfQTPWVXtEJirGxgRUzwCkFD):
... |
class test(testing.TestCase):
def test_simple(self):
(cons, args) = main(nelems=4, etype='square', degree=1, poisson=0.25)
with self.subTest('constraints'):
self.assertAlmostEqual64(cons['u'], '\n eNpjaGBAhSBAZTEAEKAUAQ==')
with self.subTest('displacement'):
... |
class DatabaseAPITestSuite():
def test_database_api_get(self, db: DatabaseAPI) -> None:
db[b'key-1'] = b'value-1'
assert (db.get(b'key-1') == b'value-1')
def test_database_api_item_getter(self, db: DatabaseAPI) -> None:
db[b'key-1'] = b'value-1'
assert (db[b'key-1'] == b'value-1'... |
_exempt
def test80085(request, location_slug):
try:
location = get_location(location_slug)
except:
return HttpResponse(status=200)
logger.debug((' for location: %s' % location))
logger.debug(request.POST)
logger.debug(request.FILES)
header_txt = request.POST.get('message-headers'... |
class PKNullableForeignKeyTests(TestCase):
def setUp(self):
target = ForeignKeyTarget(name='target-1')
target.save()
for idx in range(1, 4):
if (idx == 3):
target = None
source = NullableForeignKeySource(name=('source-%d' % idx), target=target)
... |
def test_hamming_weight_model_shapes_and_type_output_validation(nb_words):
vm = scared.HammingWeight(nb_words)
data = np.random.randint(0, 255, (500, 16), dtype='uint8')
if (nb_words > 16):
with pytest.raises(ValueError):
vm(data)
else:
res = vm(data)
assert (data.sha... |
class set_config(message):
version = 5
type = 9
def __init__(self, xid=None, flags=None, miss_send_len=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
... |
class OptionSeriesHeatmapStatesInactive(Options):
def animation(self) -> 'OptionSeriesHeatmapStatesInactiveAnimation':
return self._config_sub_data('animation', OptionSeriesHeatmapStatesInactiveAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
... |
def _test_success_with_all_filters_recipient_location_district(client):
resp = client.post('/api/v2/search/spending_by_geography', content_type='application/json', data=json.dumps({'scope': 'recipient_location', 'geo_layer': 'district', 'filters': non_legacy_filters()}))
assert (resp.status_code == status.HTTP_... |
def deploy_purity_checker(w3):
def deploy_purity_checker():
w3.eth.sendTransaction({'to': PURITY_CHECKER_TX_SENDER, 'value': (10 ** 17)})
tx_hash = w3.eth.sendRawTransaction(PURITY_CHECKER_TX_HEX)
receipt = w3.eth.getTransactionReceipt(tx_hash)
return receipt.contractAddress
retu... |
class add_one(Operator):
def __init__(self):
super().__init__()
self._attrs['op'] = 'add_one'
self._attrs['has_profiler'] = False
self._attrs['nop'] = False
def __call__(self, x: Tensor) -> Tensor:
self._attrs['inputs'] = [x]
self._set_depth()
output_shape... |
(context_settings=get_width())
('--repository', type=str, required=True, help='Snapshot repository')
('--name', type=str, help='Snapshot name', show_default=True, default='curator-%Y%m%d%H%M%S')
('--ignore_unavailable', is_flag=True, show_default=True, help='Ignore unavailable shards/indices.')
('--include_global_state... |
class DataDb():
def __init__(self, page: primitives.PageModel=None):
self.page = page
(self._db_bindings, self.pkgs) = ({}, {})
self.no_sql = NoSql(page)
self.table_names = None
def __settings(self):
db_settings = ({'family': 'sqlite'} if (self.page._dbSettings is None) e... |
_view(('GET', 'POST', 'PUT', 'DELETE'))
_data(permissions=(IsAdminOrReadOnly,))
def vm_define(request, hostname_or_uuid, data=None):
vm = get_vm(request, hostname_or_uuid, sr=('owner', 'node', 'template', 'slavevm'), check_node_status=None, noexists_fail=False, exists_ok=False)
return VmDefineView(request).resp... |
def get_last_opened_notes() -> List[SiacNote]:
conn = _get_connection()
res = conn.execute('select notes.* from notes inner join (select nid, created as nc from notes_opened group by nid order by max(created) desc) as ot on notes.id = ot.nid order by ot.nc desc limit 100').fetchall()
conn.close()
return... |
def get_culprit(frames, include_paths=None, exclude_paths=None):
if (include_paths is None):
include_paths = []
if (exclude_paths is None):
exclude_paths = []
best_guess = None
culprit = None
for frame in frames:
try:
culprit = '.'.join(((f or '<unknown>') for f i... |
def fix_bases_mask(bases_mask, barcode_sequence):
indexes = barcode_sequence.split('-')
reads = []
i = 0
for read in bases_mask.split(','):
new_read = read
if read.startswith('I'):
input_index_length = int(read[1:])
try:
actual_index_length = len(i... |
def extractWwwAddergooleCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in t... |
class FieldAddress():
def __init__(self, dataset: str, collection: str, *fields: str):
self.dataset = dataset
self.collection = collection
self.field_path: FieldPath = FieldPath(*fields)
self.value: str = ':'.join((dataset, collection, self.field_path.string_path))
def is_member_... |
def check_data_names(data, data_names):
if isinstance(data_names, str):
data_names = (data_names,)
if (data_names is None):
raise ValueError('Invalid data_names equal to None.')
if (len(data) != len(data_names)):
raise ValueError('Data has {} components but only {} names provided: {}... |
class OptionPlotoptionsNetworkgraphLink(Options):
def color(self):
return self._config_get('rgba(100, 100, 100, 0.5)')
def color(self, text: str):
self._config(text, js_type=False)
def dashStyle(self):
return self._config_get(None)
def dashStyle(self, text: str):
self._co... |
_OP_REGISTRY.register()
def TrivialAugmentWideImageOp(cfg: CfgNode, arg_str: str, is_train: bool) -> List[Union[(aug.Augmentation, Transform)]]:
assert is_train
kwargs = (_json_load(arg_str) if (arg_str is not None) else {})
assert isinstance(kwargs, dict)
return [TrivialAugmentWideImage(**kwargs)] |
.skipif((nb_proc > 1), reason='No commandline in MPI')
.xfail((backend_default not in ['pythran', 'python']), reason='Not yet implemented')
def test_install_package(tmpdir, virtualenv):
assert virtualenv.python.endswith('/bin/python')
for name in ('pyproject.toml', 'meson.build', 'meson.options', 'README.md'):
... |
def test_handle_cancellation_failure_event(timer_dsm: TimerDecisionStateMachine):
timer_dsm.state = DecisionState.CANCELLATION_DECISION_SENT
timer_dsm.handle_cancellation_failure_event(HistoryEvent())
assert ('handle_cancellation_failure_event' in timer_dsm.state_history)
assert (timer_dsm.state == Deci... |
def test_compare_data_pattern():
errors = validate_data_with_pattern({'a': 12}, {'a': 13})
assert (not errors)
errors = validate_data_with_pattern({'a': 12}, {'a': 'string'})
assert errors
assert (errors[0] == 'For attribute `a` `str` data type is expected, but `int` was provided!')
errors = val... |
class OptionSeriesDumbbellSonificationDefaultspeechoptionsMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str... |
class OptionPlotoptionsDependencywheelStates(Options):
def hover(self) -> 'OptionPlotoptionsDependencywheelStatesHover':
return self._config_sub_data('hover', OptionPlotoptionsDependencywheelStatesHover)
def inactive(self) -> 'OptionPlotoptionsDependencywheelStatesInactive':
return self._config_... |
def actions_to_str(instructions):
actions = []
for instruction in instructions:
if isinstance(instruction, ofproto_v1_3_parser.OFPInstructionActions):
if (instruction.type == ofproto_v1_3.OFPIT_APPLY_ACTIONS):
for a in instruction.actions:
actions.append(a... |
_converter(acc_ops.conv3d)
def acc_ops_conv3d(target: Target, args: Tuple[(Argument, ...)], kwargs: Dict[(str, Argument)], name: str) -> ConverterOutput:
input_val = ait_ncdhw2ndhwc(kwargs['input'])
if (not isinstance(input_val, AITTensor)):
raise RuntimeError(f'Non-tensor inputs for {name}: {input_val}... |
class OptionSeriesTreegraphSonificationDefaultinstrumentoptionsMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(se... |
def test_cache_keeps_two_old_transactions(spark_session, provide_config):
pc = DiskPersistenceBackedSparkCache(**provide_config)
insert = spark_session.createDataFrame(data=[[1, 2], [3, 4]], schema='a: int, b: int')
pc[to_dict('d1', 'ri.foundry.main.transaction.-d2e4-ac6e-b00c-29965c42029e', '/d1')] = inser... |
def extractHannitriedHomeBlog(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in... |
def try_num_differentiators(raw_string):
num_ref_match = _MULTIMATCH_REGEX.match(raw_string)
if num_ref_match:
(mindex, new_raw_string) = (num_ref_match.group('number'), (num_ref_match.group('name') + num_ref_match.group('args')))
return (int(mindex), new_raw_string)
else:
return (No... |
class OptionSeriesScatter3dDatalabelsTextpath(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(f... |
.skip(reason='need to run in parallel')
def test_2DparallelLoadPUMI(verbose=0):
comm = Comm.init()
eq(comm.size(), 2)
testDir = os.path.dirname(os.path.abspath(__file__))
domain = Domain.PUMIDomain(dim=2)
Model = (testDir + '/Rectangle.dmg')
Mesh = (testDir + '/Rectangle.smb')
domain.PUMIMes... |
class CompleteTrackParams():
def __init__(self, user_specified_track_params=None):
self.track_defined_params = set()
self.user_specified_track_params = (user_specified_track_params if user_specified_track_params else {})
def internal_user_defined_track_params(self):
set_user_params = set... |
class _CRG(Module, AutoCSR):
def __init__(self, platform, sys_clk_freq):
self.rst = Signal()
self.clock_domains.cd_sys_pll = ClockDomain()
self.clock_domains.cd_sys = ClockDomain()
self.clock_domains.cd_sys4x = ClockDomain(reset_less=True)
self.clock_domains.cd_pll4x = ClockD... |
class VersionMoverTestCase(unittest.TestCase):
original_message_box = None
def show_dialog(self, dialog):
dialog.show()
self.app.exec_()
self.app.connect(self.app, QtCore.SIGNAL('lastWindowClosed()'), self.app, QtCore.SLOT('quit()'))
def create_version(self, task, take_name):
... |
def test_w_prep_fit():
evl = Evaluator(mape_scorer, cv=5, shuffle=False, random_state=100, verbose=True)
with open(os.devnull, 'w') as f, redirect_stdout(f):
evl.fit(X, y, estimators=[OLS()], param_dicts={'ols': {'offset': randint(1, 10)}}, preprocessing={'pr': [Scale()], 'no': []}, n_iter=3)
np.tes... |
def extractUtnovelWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) i... |
class TestCatColumnsOutOfListValues(BaseGenerator):
columns: Optional[List[str]]
def __init__(self, columns: Optional[List[str]]=None, is_critical: bool=True):
self.is_critical = is_critical
self.columns = columns
def generate(self, data_definition: DataDefinition) -> List[TestShareOfOutList... |
class RecurrentAgent(RL_Agent):
def __init__(self, model=None, n_actions=None):
super().__init__()
self.model = model
self.n_actions = n_actions
def update(self, state_dict):
self.model.load_state_dict(state_dict)
def initial_state(self, agent_info, B):
return DictTen... |
def setup_module(_):
global standalone
with mock.patch('pysoa.utils.get_python_interpreter_arguments') as mock_get_args:
prev_path_0 = sys.path[0]
mock_get_args.return_value = ['python', '/path/to/module.py']
sys.path[0] = '/path/to/module.py'
try:
from pysoa.server i... |
()
('--runtime', default=DEFAULT_RUNTIME)
('--toolkit', default=DEFAULT_TOOLKIT)
('--environment', default=None)
('--editable/--not-editable', default=False, help="Install main package in 'editable' mode? [default: --not-editable]")
def install(runtime, toolkit, environment, editable):
parameters = get_parameters(... |
class ClassificationClassSeparationPlotResults(MetricResult):
class Config():
dict_exclude_fields = {'current', 'reference'}
pd_exclude_fields = {'current', 'reference'}
target_name: str
current: Optional[ColumnScatterOrAgg] = None
(current_raw, current_agg) = raw_agg_properties('current... |
def connect_signals():
post_init.connect(post_init_treenode, dispatch_uid='post_init_treenode')
post_migrate.connect(post_migrate_treenode, dispatch_uid='post_migrate_treenode')
post_save.connect(post_save_treenode, dispatch_uid='post_save_treenode')
post_delete.connect(post_delete_treenode, dispatch_ui... |
class bsn_lacp_stats_request(bsn_stats_request):
version = 4
type = 18
stats_type = 65535
experimenter = 6035143
subtype = 1
def __init__(self, xid=None, flags=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
... |
class OptionSeriesSplineMarkerStates(Options):
def hover(self) -> 'OptionSeriesSplineMarkerStatesHover':
return self._config_sub_data('hover', OptionSeriesSplineMarkerStatesHover)
def normal(self) -> 'OptionSeriesSplineMarkerStatesNormal':
return self._config_sub_data('normal', OptionSeriesSplin... |
def load_results(run_key, conn):
cursor = conn.cursor()
cursor.execute('SELECT query_key, result, time FROM results\n WHERE run_key = ?', (run_key,))
results = []
times = []
for (query_key, result, time) in cursor:
results.append((query_key, json.loads(result)))
times.appe... |
class Info(types.SimpleNamespace):
ATTRS = None
RENDER_NAMES = None
CODE_ATTRS = ['co_name', 'co_filename', 'co_flags', 'co_stacksize', 'co_firstlineno', 'co_lnotab', 'co_code', 'co_nlocals', 'co_argcount', 'co_kwonlyargcount', 'co_consts', 'co_names', 'co_localslots', 'co_varnames', 'co_cellvars', 'co_free... |
class ForwardingRuleScanner(base_scanner.BaseScanner):
def __init__(self, global_configs, scanner_configs, service_config, model_name, snapshot_timestamp, rules):
super(ForwardingRuleScanner, self).__init__(global_configs, scanner_configs, service_config, model_name, snapshot_timestamp, rules)
self.... |
class mbuf(object):
__list = None
__payload_size = 0
offset = 0
def __init__(self):
self.__list = list(bytes(MBUF_AREA_SIZE))
def get_data(self):
return bytes(self.__list[self.offset:self.__payload_size])
def get_part(self, size):
if (size == 1):
return self._... |
class OptionSeriesVariwideSonificationContexttracksMappingLowpass(Options):
def frequency(self) -> 'OptionSeriesVariwideSonificationContexttracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionSeriesVariwideSonificationContexttracksMappingLowpassFrequency)
def resonance(self) -... |
def test_adding_a_secret_mount():
config = '\ndaemonset:\n secretMounts:\n - name: elastic-certificates\n secretName: elastic-certificates-name\n path: /usr/share/metricbeat/config/certs\n'
r = helm_template(config)
assert ({'mountPath': '/usr/share/metricbeat/config/certs', 'name': 'elastic-c... |
class TopicTracker(MethodView):
decorators = [login_required]
def get(self):
page = request.args.get('page', 1, type=int)
topics = real(current_user).tracked_topics.outerjoin(TopicsRead, db.and_((TopicsRead.topic_id == Topic.id), (TopicsRead.user_id == real(current_user).id))).outerjoin(Post, (T... |
.parametrize('vm_class, encoded, expected_failure', ((BerlinVM, to_bytes(2), UnrecognizedTransactionType),))
def test_receipt_decode_failure_by_vm(vm_class, encoded, expected_failure):
sedes = vm_class.get_receipt_builder()
with pytest.raises(expected_failure):
rlp.decode(encoded, sedes=sedes) |
def _get_available_ram_freebsd() -> int:
import ctypes.util
libc = ctypes.CDLL(ctypes.util.find_library('libc'), use_errno=True)
sysctlbyname = libc.sysctlbyname
sysctlbyname.restype = ctypes.c_int
sysctlbyname.argtypes = [ctypes.c_char_p, ctypes.c_void_p, ctypes.POINTER(ctypes.c_size_t), ctypes.c_v... |
_os(*metadata.platforms)
def main():
autoit = 'C:\\Users\\Public\\rta.exe'
rcedit = 'C:\\Users\\Public\\rcedit.exe'
common.copy_file(RENAMER, rcedit)
common.copy_file(EXE_FILE, autoit)
common.log('Modifying the OriginalFileName attribute')
common.execute([rcedit, autoit, '--set-version-string', ... |
class Html(ExecutorTopicContinuum, CreateMakeDependencies):
def __init__(self, oconfig):
tracer.debug('Called: html ouput module constructed.')
self._config = Cfg(oconfig)
CreateMakeDependencies.__init__(self)
self.__fd_stack = []
self.__topic_name_set = []
self.__ul_... |
class RecipientCount(APIView):
endpoint_doc = 'usaspending_api/api_contracts/contracts/v2/recipient/count.md'
cache_key_whitelist = ['keyword', 'award_type']
_response()
def post(self, request):
validated_payload = TinyShield(RECIPIENT_MODELS).block(request.data)
return Response({'count'... |
def test_csv():
df = pd.DataFrame({'Name': ['Tom', 'Joseph'], 'Age': [20, 22]})
encoder = basic_dfs.PandasToCSVEncodingHandler()
decoder = basic_dfs.CSVToPandasDecodingHandler()
ctx = context_manager.FlyteContextManager.current_context()
sd = StructuredDataset(dataframe=df)
sd_type = StructuredD... |
class MassMatrixAdapter():
def __init__(self, initial_positions: torch.Tensor, full_mass_matrix: bool=False):
self.mass_inv = torch.ones_like(initial_positions)
self.momentum_dist: dist.Distribution = dist.Normal(0.0, self.mass_inv)
if full_mass_matrix:
self.mass_inv = torch.diag... |
class OptionPlotoptionsArcdiagramSonificationDefaultinstrumentoptionsMappingLowpass(Options):
def frequency(self) -> 'OptionPlotoptionsArcdiagramSonificationDefaultinstrumentoptionsMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsArcdiagramSonificationDefaultinstrumentopt... |
class SagemakerHPOTask(PythonTask[HPOJob]):
_SAGEMAKER_HYPERPARAMETER_TUNING_JOB_TASK = 'sagemaker_hyperparameter_tuning_job_task'
def __init__(self, name: str, task_config: HPOJob, training_task: Union[(SagemakerCustomTrainingTask, SagemakerBuiltinAlgorithmsTask)], **kwargs):
if ((training_task is None... |
class _Matcher():
__slots__ = ['tag', 'content', 'lower']
def __init__(self, tag, content, lower):
self.tag = tag
if (content and (not self.tag.startswith('__'))):
content = lower(content)
self.content = content
self.lower = lower
def match(self, srtrack):
... |
class RowGroup(Options):
def activate(self):
self.dataSrc = 'group'
return self
def className(self):
return self._config_get()
def className(self, val):
self._config(val)
def dataSrc(self):
return self._config_get()
def dataSrc(self, val):
self._config... |
class TestDataSharder():
def test_random_sharder(self) -> None:
random.seed(1)
random_sharder = instantiate(RandomSharderConfig(num_shards=TestDataSetting.NUM_SHARDS))
for i in range((random_sharder.cfg.num_shards + 1)):
shard = random_sharder.shard_for_row(MockData.provide_data(... |
.network
.skipif((tqdm is None), reason='requires tqdm')
def test_downloader_progressbar_ftp(capsys, ftpserver):
with data_over_ftp(ftpserver, 'tiny-data.txt') as url:
download = FTPDownloader(progressbar=True, port=ftpserver.server_port)
with TemporaryDirectory() as local_store:
outfile... |
class _DurationConfigValue(_ConfigValue):
units = (('us', 1e-06), ('ms', 0.001), ('s', 1), ('m', 60))
def __init__(self, dict_key, allow_microseconds=False, unitless_factor=None, **kwargs) -> None:
self.type = None
used_units = (self.units if allow_microseconds else self.units[1:])
patte... |
.external
.skipif((has_openai_key is False), reason='OpenAI API key not available')
def test_ner_io(nlp: Language):
assert (nlp.pipe_names == ['llm'])
with make_tempdir() as tmpdir:
nlp.to_disk(tmpdir)
nlp2 = spacy.load(tmpdir)
assert (nlp2.pipe_names == ['llm'])
text = 'Marc and Bob bot... |
class BLEScanRequester(asyncio.Protocol):
def __init__(self):
self._supported_commands = None
self._le_features = None
self._initialized = asyncio.Event()
self._uninitialized = True
self.transport = None
self.smac = None
self.sip = None
self.process = ... |
class OptionPlotoptionsBubbleAccessibility(Options):
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def descriptionFormat(self):
return self._config_get(None)
def descriptionFormat(self, text: str):
... |
def first_name_capture(records):
logging.info('Applying _first_name_capture generator: making sure ID only contains the first whitespace-delimited word.')
whitespace = re.compile('\\s+')
for record in records:
if whitespace.search(record.description):
(yield SeqRecord(record.seq, id=reco... |
def create_nw_dict(data: List[List[Tuple[(str, str)]]]) -> Dict[(str, List[Tuple[(str, float)]])]:
model = dict()
for sentence in data:
for (i, (_, curr_pos)) in enumerate(sentence):
next_word = (sentence[(i + 1)][0] if ((i + 1) < len(sentence)) else DUMMY)
model.setdefault(next_... |
def _graphs_dangerous_dereference_in_the_same_block_as_target_and_definition_64bit() -> Tuple[(ControlFlowGraph, ControlFlowGraph)]:
in_cfg = ControlFlowGraph()
x = vars('x', 2, int64, aliased=False)
y = vars('y', 2, int32, aliased=True)
ptr = vars('ptr', 1, int64, aliased=False)
c = const(11)
i... |
.parametrize('primary_type, expected_hex', (('Mail', 'a0cedeb2dc280ba39b857546d74f5549c3a1d7bdc2dd96bf881f76108e23dac2'), ('Person', 'b9d8c78acf9b987311de6c7b45bb6a9c8e1bf361fa7fd3467a2163f994c79500')))
def test_hash_struct_type_eip712(primary_type, expected_hex, eip712_example_types):
assert (hash_struct_type(prim... |
class FlippingData():
def load_from(cls, ctx):
return cls(ctx.sender)
def __init__(self, sender: Document):
self._now = datetime.now(timezone.utc)
self._sender = sender
self._sender_hash = make_hash(sender)
self._ensure_required_fields()
def read_args(string):
... |
def runtests(*test_args):
import django.test.utils
settings.configure(**SETTINGS)
django.setup()
runner_class = django.test.utils.get_runner(settings)
test_runner = runner_class(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(['tests'])
sys.exit(failures) |
class TestDispatch(TestCase):
def setUp(self):
pass
def test_basic(self):
dy = Dispatch()
def f(i: int):
return (i + 1)
def f(s: str):
return (s + '1')
try:
def f(x: int):
return NotImplemented
except ValueError:... |
def read_decimal(data, writer_schema=None, reader_schema=None):
scale = writer_schema.get('scale', 0)
precision = writer_schema['precision']
unscaled_datum = int.from_bytes(data, byteorder='big', signed=True)
decimal_context.prec = precision
return decimal_context.create_decimal(unscaled_datum).scal... |
def test_lt_tagger_spaces(NLP):
doc = NLP('Some\nspaces are\tnecessary.')
assert (doc[0].pos != SPACE)
assert (doc[0].pos_ != 'SPACE')
assert (doc[1].pos == SPACE)
assert (doc[1].pos_ == 'SPACE')
assert (doc[1].tag_ == '_SP')
assert (doc[2].pos != SPACE)
assert (doc[3].pos != SPACE)
... |
class ScriptOperationProcessor(FileOperationProcessor):
_script_type: str = None
def get_argv(self, filepath) -> List[str]:
raise NotImplementedError
def process(self, operation: GenericOperation, elyra_run_name: str):
filepath = self.get_valid_filepath(operation.filename)
file_dir =... |
class TestUtils(unittest.TestCase):
def check_bitmap_512(self, value, data):
self.assertEqual(data, ofp.util.pack_bitmap_512(set(value)))
self.assertEqual(ofp.util.unpack_bitmap_512(OFReader(data)), set(value))
def test_bitmap_512(self):
self.check_bitmap_512([0], ((b'\x00' * 63) + b'\x0... |
class TestTriggerTests(BasePyTestCase):
def test_update_status_not_testing(self, *args):
nvr = 'bodhi-2.0-1.fc17'
up = self.db.query(Build).filter_by(nvr=nvr).one().update
up.status = UpdateStatus.pending
post_data = dict(csrf_token=self.get_csrf_token())
res = self.app.post_... |
class GaugePortStatsPollerTest(GaugePollerTest):
def test_send_req(self):
conf = mock.Mock(interval=1)
poller = gauge_pollers.GaugePortStatsPoller(conf, '__name__', mock.Mock())
self.check_send_req(poller, parser.OFPPortStatsRequest)
def test_no_response(self):
poller = gauge_pol... |
def test_doxygen_doc_for_module_use():
string = write_rpc_request(1, 'initialize', {'rootPath': str((test_dir / 'docs'))})
file_path = ((test_dir / 'docs') / 'test_module_and_type_doc.f90')
string += hover_request(file_path, 24, 14)
(errcode, results) = run_request(string)
assert (errcode == 0)
... |
def _get_sizing(vars, sizing, method, optimal_size=None):
if (not isinstance(vars, list)):
vars = [vars]
signed = bool(np.any([v.signed for v in vars]))
if (sizing == 'optimal'):
if (optimal_size is not None):
(signed, _, n_int, n_frac) = optimal_size
else:
si... |
def exposed_rss_db_sync(target=None, days=False, silent=False):
json_file = 'rss_filter_misses-1.json'
config.C_DO_RABBIT = False
write_debug = True
if silent:
config.C_DO_RABBIT = False
if target:
config.C_DO_RABBIT = False
flags.RSS_DEBUG = True
write_debug = False
... |
def find_order(cat_talents: list[dict[(str, Any)]], cat_talent_data: dict[(str, Any)]) -> list[str]:
letters = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']
letter_order: list[str] = []
for talent in cat_talents:
talent_id = talent['id']
for letter in letters:
key = f'abilityID_{lette... |
class OptionPlotoptionsSeriesSonificationDefaultinstrumentoptionsMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(s... |
_op([NestedForCursorA])
def reorder_loops(proc, nested_loops):
stmt_c = nested_loops._impl
if ((len(stmt_c.body()) != 1) or (not isinstance(stmt_c.body()[0]._node, LoopIR.For))):
raise ValueError(f'expected loop directly inside of {stmt_c._node.iter} loop')
(ir, fwd) = scheduling.DoLiftScope(stmt_c.... |
class GetFragmentNode(template.Node):
def __init__(self, request, fragment, as_var=None):
self.request = template.Variable(request)
self.fragment = template.Variable(fragment)
self.as_var = as_var
def render(self, context):
request = self.request.resolve(context)
fragment... |
def validate_discount_code(discount_code, tickets=None, ticket_holders=None, event_id=None):
from app.models.discount_code import DiscountCode
if (isinstance(discount_code, int) or (isinstance(discount_code, str) and discount_code.isdigit())):
discount_code = safe_query_by_id(DiscountCode, discount_code... |
class _Volume(base._Widget, ExtendedPopupMixin, ProgressBarMixin):
orientations = base.ORIENTATION_HORIZONTAL
defaults: list[tuple[(str, Any, str)]] = [('font', 'sans', 'Default font'), ('fontsize', None, 'Font size'), ('foreground', 'ffffff', 'Font colour'), ('mode', 'bar', "Display mode: 'icon', 'bar', 'both'... |
def save_filter(session: Session, filter: Filter) -> None:
existing = session.query(FilterRecord).filter((FilterRecord.name == filter.name)).first()
filter_json = json.loads(filter.json)
filter_json.pop('name', None)
filter_json.pop('description', None)
filter_record = StoredFilter(filter.name, (fil... |
.django_db
def test_spending_by_subaward_type_success(client):
resp = client.post('/api/v2/search/spending_by_award', content_type='application/json', data=json.dumps({'fields': ['Sub-Award ID'], 'filters': {'award_type_codes': ['10', '06', '07', '08', '09', '11']}, 'subawards': True}))
assert (resp.status_code... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.