code stringlengths 281 23.7M |
|---|
class ElasticNet(Regression):
def __init__(self, degree=1, reg_factor=0.05, l1_ratio=0.5, n_iterations=3000, learning_rate=0.01):
self.degree = degree
self.regularization = l1_l2_regularization(alpha=reg_factor, l1_ratio=l1_ratio)
super(ElasticNet, self).__init__(n_iterations, learning_rate)... |
class MyModel(HasTraits):
n_meridional = Range(0, 30, 6)
n_longitudinal = Range(0, 30, 11)
scene = Instance(MlabSceneModel, ())
plot = Instance(PipelineBase)
_trait_change('n_meridional,n_longitudinal,scene.activated')
def update_plot(self):
(x, y, z, t) = curve(self.n_meridional, self.n... |
.usefixtures('use_tmpdir')
def test_that_sampling_prior_makes_initialized_fs(prior_ensemble):
config_text = dedent('\n NUM_REALIZATIONS 1\n JOBNAME my_case%d\n GEN_KW KW_NAME template.txt kw.txt prior.txt\n ')
Path('template.tmpl').write_text('Not important, name of the file is impor... |
(stability='alpha')
class InMemoryStorage(StorageInterface[(T, T)]):
def __init__(self, serializer: Optional[Serializer]=None):
super().__init__(serializer)
self._data = {}
def save(self, data: T) -> None:
if (not data):
raise StorageError('Data cannot be None')
if (n... |
class GroupApi(CRUDApi):
def __init__(self, config):
super(GroupApi, self).__init__(config, object_type='group')
_id(Group)
def users(self, group, include=None):
return self._get(self._build_url(self.endpoint.users(id=group, include=include, cursor_pagination=True)))
_id(Group)
def m... |
class SimpleShipitTransformerFetcher(Fetcher):
def __init__(self, build_options, manifest, ctx) -> None:
self.build_options = build_options
self.manifest = manifest
self.repo_dir = os.path.join(build_options.scratch_dir, 'shipit', manifest.name)
self.ctx = ctx
def clean(self) -> ... |
class MassStorageHandler(Handler):
name = 'massstorage'
def is_type(self, device, capabilities):
if ('portable_audio_player' in capabilities):
try:
if ('storage' in device.GetProperty('portable_audio_player.access_method.protocols')):
return 10
... |
class TestConfiguration(unittest.TestCase):
_country: AbstractCountry
_configuration: Configuration
def setUpClass(cls) -> None:
TestConfiguration._country = US()
TestConfiguration._configuration = Configuration('./config/test_data.ini', TestConfiguration._country)
def setUp(self) -> Non... |
def _fail_runaway_processes(logger, worker: mp.Process=None, terminator: mp.Process=None, dispatcher: mp.Process=None):
fail_with_runaway_proc = False
if (worker and worker.is_alive() and ps.pid_exists(worker.pid)):
os.kill(worker.pid, signal.SIGKILL)
logger.warning('Dispatched worker process wi... |
def pick_object(region, rv3d, x, y, near, far, objects):
blender_version = bpy.app.version
coord = mathutils.Vector((x, y))
ray_dir = region_2d_to_vector_3d(region, rv3d, coord)
ray_start = (region_2d_to_origin_3d(region, rv3d, coord) + (ray_dir * near))
ray_end = (ray_start + (ray_dir * (far - near... |
class ArgCountError(ArgValidationError):
__slots__ = ('expected', 'actual')
def __init__(self, expected, actual):
self.expected = expected
self.actual = actual
def __unicode__(self):
return ('Expecting %s args, %s found' % (self.expected, self.actual))
def __str__(self):
... |
def _dumpstruct(generic_obj, obj_dump, color, data, output, offset):
palette = []
colors = [(COLOR_RED, COLOR_BG_RED), (COLOR_GREEN, COLOR_BG_GREEN), (COLOR_YELLOW, COLOR_BG_YELLOW), (COLOR_BLUE, COLOR_BG_BLUE), (COLOR_PURPLE, COLOR_BG_PURPLE), (COLOR_CYAN, COLOR_BG_CYAN), (COLOR_WHITE, COLOR_BG_WHITE)]
ci ... |
class NodeMetadata(_common.FlyteIdlEntity):
def __init__(self, name, timeout=None, retries=None, interruptible=None):
self._name = name
self._timeout = (timeout if (timeout is not None) else datetime.timedelta())
self._retries = (retries if (retries is not None) else _RetryStrategy(0))
... |
class FileSessionPipe(BackendStoredSessionPipe):
_fs_transaction_suffix = '.__emt_sess'
_fs_mode = 384
def __init__(self, expire=3600, secure=False, samesite='Lax', domain=None, cookie_name=None, cookie_data=None, filename_template='emt_%s.sess'):
super().__init__(expire=expire, secure=secure, sames... |
class TestSteerer(unittest.TestCase):
def test_nop_not_valid(self):
def main_generator(dut):
(yield dut.steerer.sel[0].eq(STEER_NOP))
(yield dut.steerer.sel[1].eq(STEER_NOP))
(yield from dut.drivers[0].nop())
(yield)
for i in range(2):
... |
def collapse_excgroups() -> typing.Generator[(None, None, None)]:
try:
(yield)
except BaseException as exc:
if has_exceptiongroups:
while (isinstance(exc, BaseExceptionGroup) and (len(exc.exceptions) == 1)):
exc = exc.exceptions[0]
raise exc |
class NetFlowV5Flow(object):
_PACK_STR = '!IIIHHIIIIHHxBBBHHBB2x'
_MIN_LEN = struct.calcsize(_PACK_STR)
def __init__(self, srcaddr, dstaddr, nexthop, input_, output, dpkts, doctets, first, last, srcport, dstport, tcp_flags, prot, tos, src_as, dst_as, src_mask, dst_mask):
self.srcaddr = srcaddr
... |
class ITimer(Interface):
interval = Range(low=0.0)
repeat = Union(None, Int)
expire = Union(None, Float)
active = Bool()
def timer(cls, **traits):
pass
def single_shot(cls, **traits):
pass
def start(self):
pass
def stop(self):
pass
def perform(self):
... |
class QueryReinforce():
def __init__(self, query: str=None, model_name: str=None, llm_chat: BaseChat=None):
self.query = query
self.model_name = model_name
self.llm_chat = llm_chat
async def rewrite(self) -> List[str]:
from dbgpt._private.chat_util import llm_chat_response_nostre... |
.django_db
def test_date_range_search_counts_with_one_range(client, monkeypatch, elasticsearch_award_index, awards_over_different_date_ranges_with_different_counts):
setup_elasticsearch_test(monkeypatch, elasticsearch_award_index)
request = {'subawards': False, 'auditTrail': 'Award Table - Tab Counts', 'filters... |
def main():
parser = argparse.ArgumentParser(description='Dmenu (or compatible launcher) frontend for Keepass databases')
parser.add_argument('-a', '--autotype', type=str, required=False, help='Override autotype sequence in config.ini')
parser.add_argument('-c', '--config', type=str, required=False, help='F... |
def test_download_model_weights(isolated_client, mock_fal_persistent_dirs):
from fal.toolkit.utils.download_utils import FAL_MODEL_WEIGHTS_DIR
print(FAL_MODEL_WEIGHTS_DIR)
EXAMPLE_FILE_URL = '
expected_path = ((FAL_MODEL_WEIGHTS_DIR / _hash_url(EXAMPLE_FILE_URL)) / 'README.md')
_client()
def dow... |
def get_arg_parser():
import argparse
ap = argparse.ArgumentParser(description='Run a wxpy-ready python console.')
ap.add_argument('bot', type=str, nargs='*', help='One or more variable name(s) for bot(s) to init (default: None).')
ap.add_argument('-c', '--cache', action='store_true', help='Cache sessio... |
class ESP32P4StubLoader(ESP32P4ROM):
FLASH_WRITE_SIZE = 16384
STATUS_BYTES_LENGTH = 2
IS_STUB = True
def __init__(self, rom_loader):
self.secure_download_mode = rom_loader.secure_download_mode
self._port = rom_loader._port
self._trace_enabled = rom_loader._trace_enabled
s... |
class TriggerThread(threading.Thread):
def __init__(self, redischannel, name, zeromqtopic):
threading.Thread.__init__(self)
self.redischannel = redischannel
self.name = name
self.zeromqtopic = zeromqtopic
self.running = True
def stop(self):
self.running = False
... |
def extractXppletranslateHomeBlog(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type... |
def test_modify_id():
with _get_data_folder().joinpath('network-config.yaml').open() as fp:
mainnet = yaml.safe_load(fp)['live'][0]['networks'][0]
cli_networks._modify('mainnet', 'id=foo')
with _get_data_folder().joinpath('network-config.yaml').open() as fp:
foo = yaml.safe_load(fp)['live'][... |
def gen_function(func_attrs, backend_spec):
inputs = func_attrs['inputs']
x = inputs[0]
y = func_attrs['outputs'][0]
x_shape = x._attrs['shape']
input_type = backend_spec.dtype_to_backend_type(x._attrs['dtype'])
output_type = backend_spec.dtype_to_backend_type(y._attrs['dtype'])
if (input_ty... |
def lazy_import():
from fastly.model.billing import Billing
from fastly.model.billing_estimate_lines import BillingEstimateLines
from fastly.model.billing_estimate_lines_line_items import BillingEstimateLinesLineItems
from fastly.model.billing_status import BillingStatus
from fastly.model.billing_to... |
def extractWhitecherryblWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_t... |
def test_merge_embedded_messages_primitive() -> None:
class Inner(BaseMessage):
foo: Annotated[(int, Field(1, packed=False))] = 0
class Outer(BaseMessage):
inner: Annotated[(Inner, Field(1))] = field(default_factory=Inner)
assert (Outer.loads(b'\n\x02\x08\x01\n\x02\x08\x02') == Outer(inner=I... |
def create_default_config(model: str='gpt-4', stream: bool=True, verbose: bool=True, max_iterations: int=10, system_instruction: str=DEFAULT_SYSTEM_INSTRUCTION, abs_max_tokens: Optional[int]=None, tools: Optional[list]=None, **kwargs) -> dict:
return {'model': model, 'stream': stream, 'verbose': verbose, 'max_itera... |
class Validator(abc.ABC):
def validate(self) -> ValidationReport:
try:
return self.__validate__()
except Exception as e:
return ValidationReport(ValidationResult.SUCCESS, self.name, f'WARNING: {self.name} threw an unexpected error: {e}')
def name(self) -> str:
pas... |
def write_phylip(aln, outfile=None, interleaved=True, relaxed=False):
width = 60
seq_visited = set([])
show_name_warning = False
lenghts = set((len(seq) for seq in list(aln.id2seq.values())))
if (len(lenghts) > 1):
raise Exception('Phylip format requires sequences of equal lenght.')
seql... |
class RunnerOptions():
reference_data_path: str
reference_data_options: DataOptions
reference_data_sampling: Optional[SamplingOptions]
current_data_path: Optional[str]
current_data_options: Optional[DataOptions]
current_data_sampling: Optional[SamplingOptions]
column_mapping: ColumnMapping
... |
def create_custom3D(domain, folder=None):
domain3D = domain
bt3D = {'bottom': 1, 'front': 2, 'right': 3, 'back': 4, 'left': 5, 'top': 6}
vertices3D = [[0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [1.0, 1.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0], [1.0, 0.0, 1.0], [1.0, 1.0, 1.0], [0.0, 1.0, 1.0]]
vertexFlags3D = [... |
def has_system_permissions(system_auth_data: SystemAuthContainer, authorization: str, security_scopes: SecurityScopes, db: Session) -> Union[(str, System)]:
(token_data, client) = extract_token_and_load_client(authorization, db)
has_model_level_permissions: bool = has_permissions(token_data, client, security_sc... |
class OptionSeriesTimelineAccessibilityPoint(Options):
def dateFormat(self):
return self._config_get(None)
def dateFormat(self, text: str):
self._config(text, js_type=False)
def dateFormatter(self):
return self._config_get(None)
def dateFormatter(self, value: Any):
self._... |
def run(passed_args=None):
log.setup()
parser = argparse.ArgumentParser(usage='radicale [OPTIONS]')
parser.add_argument('--version', action='version', version=VERSION)
parser.add_argument('--verify-storage', action='store_true', help='check the storage for errors and exit')
parser.add_argument('-C',... |
class TestTopicDeleteView(BaseClientTestCase):
(autouse=True)
def setup(self):
self.perm_handler = PermissionHandler()
self.top_level_forum = create_forum()
self.topic = create_topic(forum=self.top_level_forum, poster=self.user)
self.first_post = PostFactory.create(topic=self.top... |
def extractUnluckyslimeWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_ty... |
def test_ne():
reg = Register('reg_a', 'Register A', 4)
reg.add_bitfields([BitField('bf_a', 'Bit field A', lsb=0), BitField('bf_b', 'Bit field B', lsb=1)])
rmap1 = RegisterMap()
rmap1.add_registers(reg)
rmap2 = copy.deepcopy(rmap1)
rmap2['reg_a']['bf_b'].reset = 1
assert (rmap1 != rmap2) |
.parametrize(('platform', 'wayland_display', 'xdg_session_type', 'has_wlcopy', 'result'), [('linux', 'Wayland', '', True, True), ('linux', '', 'Gnome Wayland', True, True), ('linux', 'Wayland', 'Gnome Wayland', True, True), ('linux', '', 'Gnome Shell', True, False), ('linux', '', '', True, False), ('linux', 'Wayland', ... |
class ChecksumType(sqlalchemy.types.TypeDecorator):
impl = sqlalchemy.LargeBinary
cache_ok = True
def process_bind_param(self, value: Optional[str], dialect) -> Optional[bytes]:
if (value is not None):
return unhexlify(value)
else:
return None
def process_result_v... |
def hue_quadrature(h: float) -> float:
hp = util.constrain_hue(h)
if (hp <= HUE_QUADRATURE['h'][0]):
hp += 360
i = (bisect.bisect_left(HUE_QUADRATURE['h'], hp) - 1)
(hi, hii) = HUE_QUADRATURE['h'][i:(i + 2)]
(ei, eii) = HUE_QUADRATURE['e'][i:(i + 2)]
Hi = HUE_QUADRATURE['H'][i]
t = (... |
_metaclass(ABCMeta)
class QCScene():
_attributes = {}
name = ''
path = ''
make_index = False
order = 0
def _get_attribute(self, key, manditory=True):
logger = logging.getLogger(__name__)
try:
attribute = self._attributes[key]
except KeyError:
if ma... |
class PandasToBQEncodingHandlers(StructuredDatasetEncoder):
def __init__(self):
super().__init__(pd.DataFrame, BIGQUERY, supported_format='')
def encode(self, ctx: FlyteContext, structured_dataset: StructuredDataset, structured_dataset_type: StructuredDatasetType) -> literals.StructuredDataset:
... |
class TestCoprDir(CoprsTestCase):
('u1')
.usefixtures('f_users', 'f_users_api', 'f_coprs', 'f_builds', 'f_mock_chroots', 'f_other_distgit', 'f_db')
def test_custom_copr_dir(self):
self.web_ui.create_distgit_package('foocopr', 'unused-package')
self.api3.rebuild_package('foocopr:unused', 'unu... |
class SimpleFrozenDict(dict):
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.error = "Can't write to frozen dict. This is likely an internal error."
def __setitem__(self, key, value):
raise NotImplementedError(self.error)
def pop(self, key: Any, d... |
.usefixtures('use_tmpdir')
def test_read_internal_function():
WorkflowCommon.createErtScriptsJob()
workflow_job = WorkflowJob.from_file(name='SUBTRACT', config_file='subtract_script_job')
assert (workflow_job.name == 'SUBTRACT')
assert workflow_job.internal
assert workflow_job.script.endswith('subtr... |
def test_conf_with_stdin(tmp_path, capfd, monkeypatch):
read_toml_opts.cache_clear()
config_path = (tmp_path / '.mdformat.toml')
config_path.write_text('number = true')
monkeypatch.setattr(sys, 'stdin', StringIO('1. one\n1. two\n1. three'))
with mock.patch('mdformat._cli.Path.cwd', return_value=tmp_... |
def get_business_categories_fabs(business_types):
business_category_set = set()
if (business_types in ('R', '23')):
business_category_set.add('small_business')
if (business_types in ('Q', '22')):
business_category_set.add('other_than_small_business')
if (business_category_set & {'small_b... |
class TaskDataOutputMetaData(TaskOutputMetaData):
def __init__(self, sample_df, o_sequence, dtypes, number_of_records, df_description):
super(TaskDataOutputMetaData, self).__init__(o_sequence, elmdpenum.TaskOutputMetaDataTypes.DATA_OUTPUT.value)
self.sample_df = sample_df
self.dtypes = dtype... |
def create_ovmm_env_fn(config: 'DictConfig') -> HabitatOpenVocabManipEnv:
habitat_config = config.habitat
dataset = make_dataset(habitat_config.dataset.type, config=habitat_config.dataset)
env_class_name = _get_env_name(config)
env_class = get_env_class(env_class_name)
habitat_env = env_class(config... |
class PermChecker(commands.Cog):
__version__ = '0.1.0'
def format_help_for_context(self, ctx):
pre_processed = super().format_help_for_context(ctx)
return f'''{pre_processed}
Cog Version: {self.__version__}'''
def __init__(self, bot):
self.bot = bot
_group()
async def permche... |
class MayaviMlabPreferencesPage(PreferencesPage):
category = 'Mayavi'
help_id = ''
name = 'Mlab'
preferences_path = 'mayavi.mlab'
backend = Enum('auto', 'envisage', 'simple', 'test', desc='the mlab backend to use')
background_color = Tuple(Range(0.0, 1.0, 1.0), Range(0.0, 1.0, 1.0), Range(0.0, 1... |
def get_elems_count(timings, slice, site, bel_type):
combinational = 0
sequential = 0
for delay in timings[slice][site][bel_type]:
if ('sequential' in timings[slice][site][bel_type][delay]):
sequential += 1
else:
combinational += 1
return (combinational, sequentia... |
_deserializable
class DocxFileChunker(BaseChunker):
def __init__(self, config: Optional[ChunkerConfig]=None):
if (config is None):
config = ChunkerConfig(chunk_size=1000, chunk_overlap=0, length_function=len)
text_splitter = RecursiveCharacterTextSplitter(chunk_size=config.chunk_size, ch... |
def execute(commands, parameters, stdout=None):
for command in commands:
click.echo('[EXECUTING] {}'.format(command.format(**parameters)))
try:
subprocess.check_call([arg.format(**parameters) for arg in command.split()], stdout=stdout)
except subprocess.CalledProcessError as exc:... |
def test_item_group_set_invalid(monkeypatch: MonkeyPatch):
inputs = ['0', '5']
monkeypatch.setattr('builtins.input', (lambda : inputs.pop(0)))
val = item.create_item_group(['name_1', 'name_2', 'name_3'], [0, 1, 5], [50, 50, 50], 'value', 'name')
val.edit()
assert (val.values == [0, 1, 5]) |
('cuda.dual_bmm_rrr_div.func_decl')
def gen_function_decl(func_attrs):
func_name = func_attrs['name']
input_ndims = len(func_attrs['input_accessors'][0].original_shapes)
weight_ndims = len(func_attrs['input_accessors'][1].original_shapes)
return common_bias.FUNC_DECL_TEMPLATE.render(func_name=func_name,... |
class OptionSeriesItemSonificationTracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._c... |
class GripSettings(Settings):
absolute_params = {'style': ('wave', 'bumps'), 'outset': True}
relative_params = {'depth': 0.3}
def edgeObjects(self, boxes, chars: str='g', add: bool=True):
edges = [GrippingEdge(boxes, self)]
return self._edgeObjects(edges, boxes, chars, add) |
def norm_angle_channel(angle: str) -> float:
if angle.endswith('turn'):
value = (norm_float(angle[:(- 4)]) * CONVERT_TURN)
elif angle.endswith('grad'):
value = (norm_float(angle[:(- 4)]) * CONVERT_GRAD)
elif angle.endswith('rad'):
value = math.degrees(norm_float(angle[:(- 3)]))
e... |
def insert_test_fo(backend_db, uid, file_name='test.zip', size=1, analysis: (dict | None)=None, parent_fw=None, comments=None):
test_fo = create_test_file_object()
test_fo.uid = uid
test_fo.file_name = file_name
test_fo.size = size
test_fo.virtual_file_path = {}
if analysis:
test_fo.proc... |
class OptionSeriesPictorialSonificationDefaultinstrumentoptionsMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: s... |
class PieChartFace(CircleFace):
def __init__(self, radius, data, name='', padding_x=0, padding_y=0, tooltip=None):
super().__init__(self, name=name, color=None, padding_x=padding_x, padding_y=padding_y, tooltip=tooltip)
self.radius = radius
self._max_radius = 0
self._center = (0, 0)
... |
def upgrade():
op.execute("UPDATE users SET deleted_at = current_timestamp, _email = concat(_email, '_') where _email not in (SELECT DISTINCT ON (upper(_email)) _email FROM users);", execution_options=None)
op.execute('create extension citext;', execution_options=None)
op.execute('alter table users alter co... |
.parametrize('with_spiders', [True, False])
def test_jwst_aperture(with_spiders):
name = 'jwst/pupil'
name += ('_without_spiders' if (not with_spiders) else '')
check_aperture(make_jwst_aperture, 6.603464, name, check_normalization=True, check_segmentation=True, with_spiders=with_spiders) |
class Capacity(StrictBaseModel):
battery_storage: (NonNegativeInt | None) = Field(None, alias='battery storage')
biomass: (NonNegativeInt | None)
coal: (NonNegativeInt | None)
gas: (NonNegativeInt | None)
geothermal: (NonNegativeInt | None)
hydro_storage: (NonNegativeInt | None) = Field(None, al... |
def test_parse_block_identifier_bytes_and_hex(w3):
block_0 = w3.eth.get_block(0)
block_0_hash = block_0['hash']
block_id_by_hash = parse_block_identifier(w3, block_0_hash)
assert (block_id_by_hash == 0)
block_0_hexstring = w3.to_hex(block_0_hash)
block_id_by_hex = parse_block_identifier(w3, bloc... |
def test_butterworth_returns_correct_value_with_lowpass_filter_type_and_float64_precision(trace):
(b, a) = signal.butter(3, (.0 / (.0 / 2)), 'low')
b = b.astype('float64', copy=False)
a = a.astype('float64', copy=False)
expected = signal.lfilter(b, a, trace)
result = scared.signal_processing.butterw... |
class DataScientistAgent(ConversableAgent):
DEFAULT_SYSTEM_MESSAGE = 'You are a helpful AI assistant who is good at writing SQL for various databases.\n Based on the given data structure information, use the correct {dialect} SQL to analyze and solve the task, subject to the following constraints.\n Data ... |
.parametrize('function', [get_observations])
.parametrize('keyword', ['summary', 'gen_data', 'summary_with_obs', 'gen_data_with_obs'])
.integration_test
def test_direct_dark_performance_with_libres_facade(benchmark, template_config, monkeypatch, function, keyword):
key = {'summary': 'PSUM1', 'gen_data': 'POLY_RES_'... |
def extractMaouTheYuusha(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if (':' in item['title']):
postfix = item['title'].split(':', 1)[(- 1)]
if (('Maou the Yuusha' in item[... |
class RegularNode(TestCase):
def test_truthiness(self):
self.assertTrue(_graph.RegularNode('test', (), {}, 'meta'))
((sys.version_info < (3, 6)), 'test requires dict with insertion order')
def test_generate_asciitree_nodes(self):
args = (DummyNode('a'), DummyNode('b'), DummyNode())
k... |
def test_workflow_workflow_get_version_single():
factory = WorkerFactory('localhost', 7933, DOMAIN)
worker = factory.new_worker(TASK_LIST)
worker.register_workflow_implementation_type(TestWorkflowGetVersionSingleImpl)
factory.start()
client = WorkflowClient.new_client(domain=DOMAIN)
workflow: Te... |
def update_bnf_table():
storage_client = StorageClient()
bucket = storage_client.get_bucket()
blobs = bucket.list_blobs(prefix='hscic/bnf_codes/')
blobs = sorted(blobs, key=(lambda blob: blob.name), reverse=True)
blob = blobs[0]
bq_client = BQClient('hscic')
table = bq_client.get_table('bnf'... |
class TASFilterTree(FilterTree):
def raw_search(self, tiered_keys, child_layers, filter_string):
if (len(tiered_keys) == 0):
if (child_layers != 0):
if ((child_layers == 2) or (child_layers == (- 1))):
tier2_nodes = self.tier_2_search(tiered_keys, filter_strin... |
('init', help='Initialize a new bench instance in the specified path')
('path')
('--version', '--frappe-branch', 'frappe_branch', default=None, help='Clone a particular branch of frappe')
('--ignore-exist', is_flag=True, default=False, help='Ignore if Bench instance exists.')
('--python', type=str, default='python3', h... |
class PlanePoiseuilleFlow_p2(PlaneBase):
def __init__(self, plane_theta=0.0, plane_phi=old_div(math.pi, 2.0), v_theta=old_div(math.pi, 2.0), v_phi=None, v_norm=1.0, mu=1.0, grad_p=1.0, L=[1.0, 1.0, 1.0]):
PlaneBase.__init__(self, plane_theta, plane_phi, v_theta, v_phi, v_norm, mu, grad_p, L)
def uOfX(se... |
class OptionSeriesPyramid3dSonificationTracksMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
... |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = 'name'
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path':... |
def common_reverse_passthrough(args):
replace_args = {'-A': '-D', '--append': '--delete', '-I': '-D', '--insert': '--delete', '-N': '-X', '--new-chain': '--delete-chain'}
ret_args = args[:]
for x in replace_args:
try:
idx = ret_args.index(x)
except ValueError:
continu... |
class WindowModalDialog(QDialog, MessageBoxMixin):
def __init__(self, parent, title: Optional[str]=None, hide_close_button: bool=False):
flags = (Qt.WindowSystemMenuHint | Qt.WindowTitleHint)
if (not hide_close_button):
flags |= Qt.WindowCloseButtonHint
QDialog.__init__(self, par... |
class FalModelTask(DBTTask):
script: Union[(FalLocalHookTask, FalIsolatedHookTask)]
def set_run_index(self, index_provider: DynamicIndexProvider) -> None:
super().set_run_index(index_provider)
self.script.set_run_index(index_provider)
def execute(self, args: argparse.Namespace, fal_dbt: FalD... |
class TestCreateIndexParamSource():
def test_create_index_inline_with_body(self):
source = params.CreateIndexParamSource(track.Track(name='unit-test'), params={'index': 'test', 'body': {'settings': {'index.number_of_replicas': 0}, 'mappings': {'doc': {'properties': {'name': {'type': 'keyword'}}}}}})
... |
class OptionSeriesXrangeSonificationTracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self.... |
class OptionsSparkLinePie(OptionsSpark):
def sliceColors(self):
return self._config_get(None)
def sliceColors(self, values):
self._config(values)
def offset(self):
return self._config_get(None)
def offset(self, value):
self._config(value)
def borderWidth(self):
... |
def _check_left_operator(node: NodeBase, depth: int) -> bool:
parents = node.parents(depth, {'nodeType': 'BinaryOperation', 'typeDescriptions.typeString': 'bool'})
op = next((i for i in parents if ((i.leftExpression == node) or node.is_child_of(i.leftExpression)))).operator
return (op == '||') |
class DetoolsBsdiffTest(unittest.TestCase):
def test_bsdiff(self):
datas = [([0], b'', b'', []), ([1, 0], b'1', b'12', [b'\x01', b'\x00', b'\x01', b'2', b'A']), ([4, 0, 1, 2, 3], b'1234', b'', [b'\x00', b'', b'\x11', b'', b'\x01']), ([41, 28, 32, 29, 34, 31, 37, 33, 38, 35, 30, 36, 5, 4, 0, 10, 1, 13, 23, 1... |
def subproc_execute(command: List[str]) -> Tuple[(str, str)]:
try:
result = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, check=True)
return (result.stdout, result.stderr)
except subprocess.CalledProcessError as e:
raise Exception(f'''Command: {e.cmd}... |
def test_database_url_and_parts_defined_ok_if_consistent_none_parts():
with mock.patch.dict(os.environ, {ENV_CODE_VAR: _UnitTestDbPartsNoneConfig.ENV_CODE, 'DATABASE_URL': 'postgres://dummy::12345/fresh_new_db_name', 'USASPENDING_DB_HOST': 'foobar', 'USASPENDING_DB_PORT': '12345', 'USASPENDING_DB_NAME': 'fresh_new_... |
class TestFieldImports():
def is_field(self, name, value):
return (isinstance(value, type) and issubclass(value, Field) and (not name.startswith('_')))
def test_fields(self):
msg = 'Expected `fields.%s` to be imported in `serializers`'
field_classes = [key for (key, value) in inspect.get... |
def add_NotificationServiceServicer_to_server(servicer, server):
rpc_method_handlers = {'sendEvent': grpc.unary_unary_rpc_method_handler(servicer.sendEvent, request_deserializer=notification__service__pb2.SendEventRequest.FromString, response_serializer=notification__service__pb2.SendEventsResponse.SerializeToStrin... |
class InRange(Generic[_T], QueryPredicate):
def __init__(self, column: Column[_T], *, lower: Optional[_T]=None, upper: Optional[_T]=None) -> None:
self._column = column
self._lower: Final[Optional[_T]] = lower
self._upper: Final[Optional[_T]] = upper
def apply(self, query: Query[_Q]) -> ... |
def get_serializable(entity_mapping: OrderedDict, settings: SerializationSettings, entity: FlyteLocalEntity, options: Optional[Options]=None) -> FlyteControlPlaneEntity:
if (entity in entity_mapping):
return entity_mapping[entity]
from flytekit.remote import FlyteLaunchPlan, FlyteTask, FlyteWorkflow
... |
class SharedMemoryModel(Model, metaclass=SharedMemoryModelBase):
objects = SharedMemoryManager()
class Meta(object):
abstract = True
def _get_cache_key(cls, args, kwargs):
result = None
if hasattr(cls._meta, 'pks'):
pk = cls._meta.pks[0]
else:
pk = cls... |
def test_node_not_alive_can_not_be_started():
runner = CliRunner()
with patch('ipfs side_effect=ipfs patch('time.sleep'), patch('subprocess.Popen'):
with pytest.raises(click.ClickException, match='Failed to connect or start ipfs node! Please check ipfs is installed or launched!'):
runner.inv... |
def main():
logging.basicConfig(format=LOGGING_FORMAT)
parser = argparse.ArgumentParser(description='Example with non-optional arguments')
add_debug(parser)
add_app(parser)
add_env(parser)
add_region(parser)
add_properties(parser)
args = parser.parse_args()
logging.getLogger(__packag... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.