code stringlengths 281 23.7M |
|---|
def unmarshal_webhook_request(request: WebhookRequest, spec: SchemaPath, base_url: Optional[str]=None, cls: Optional[WebhookRequestUnmarshallerType]=None, **unmarshaller_kwargs: Any) -> RequestUnmarshalResult:
config = Config(server_base_url=base_url, webhook_request_unmarshaller_cls=(cls or _UNSET), **unmarshaller... |
class ExternalSubscription(Resource):
schema = {'account': 'AccountMini', 'activated_at': datetime, 'app_identifier': str, 'auto_renew': bool, 'canceled_at': datetime, 'created_at': datetime, 'expires_at': datetime, 'external_id': str, 'external_product_reference': 'ExternalProductReferenceMini', 'id': str, 'in_gra... |
class _BackendPathFinder():
def __init__(self, backend_path, backend_module):
self.backend_path = backend_path
self.backend_module = backend_module
(self.backend_parent, _, _) = backend_module.partition('.')
def find_spec(self, fullname, _path, _target=None):
if ('.' in fullname)... |
class CalcChangeModuleChargesCommand(wx.Command):
def __init__(self, fitID, projected, chargeMap, ignoreRestrictions=False, recalc=True):
wx.Command.__init__(self, True, 'Change Module Charges')
self.fitID = fitID
self.projected = projected
self.chargeMap = chargeMap
self.ign... |
def convert_standalone_batchnorms(sess, input_op_names: Union[(str, List[str])], output_op_names: Union[(str, List[str])], bns_folded: List) -> List[tf.Operation]:
list_of_ordered_ops = get_ordered_ops(sess.graph, input_op_names, output_op_names)
converted_bns = []
for op in list_of_ordered_ops:
if ... |
class RiemannianSpace(GeodesicLengthSpace):
def to_tangent(self, pt_a: Point, vec_w: Vector) -> Vector:
def inner_product(self, pt_a: Point, vec_w: Vector, vec_v: Vector):
def squared_norm(self, pt_a: Point, vec_w: Vector):
return self.inner_product(pt_a, vec_w, vec_w)
def norm(self, pt_a: Point... |
def _dump_2e_ints(hijkl: np.ndarray, mos: Union[(range, List[int])], outfile: TextIO, beta: int=0) -> None:
idx_offsets = [1, 1]
for b in range(beta):
idx_offsets[(1 - b)] += len(mos)
hijkl_elements = set()
for elem in itertools.product(mos, repeat=4):
if np.isclose(hijkl[elem], 0.0, ato... |
def euler2mat(euler):
euler = np.asarray(euler, dtype=np.float64)
assert (euler.shape[(- 1)] == 3), 'Invalid shaped euler {}'.format(euler)
(ai, aj, ak) = ((- euler[(..., 2)]), (- euler[(..., 1)]), (- euler[(..., 0)]))
(si, sj, sk) = (np.sin(ai), np.sin(aj), np.sin(ak))
(ci, cj, ck) = (np.cos(ai), n... |
class ImageNetDataLoader():
def __init__(self, tfrecord_dir: str, image_size: int=224, batch_size: int=128, num_epochs: int=1, format_bgr: bool=False, is_training: bool=False, model_type: str='resnet'):
self._image_size = image_size
self._batch_size = batch_size
self._format_bgr = format_bgr... |
class F19_TestCase(F18_TestCase):
def runTest(self):
F18_TestCase.runTest(self)
self.assert_parse('network --device=eth0 --bondslaves=A,B --bondopts=opt1,opt2', 'network --bootproto=dhcp --device=eth0 --bondslaves=A,B --bondopts=opt1,opt2\n')
self.assert_parse('network --device=eth0 --vlani... |
class VerticalTileConfig(Config):
auto_fullscreen = True
groups = [libqtile.config.Group('a'), libqtile.config.Group('b'), libqtile.config.Group('c'), libqtile.config.Group('d')]
layouts = [layout.VerticalTile(columns=2)]
floating_layout = libqtile.resources.default_config.floating_layout
keys = []
... |
def generate_boixo_2018_beyond_classical_v2(qubits: Iterable[cirq.GridQubit], cz_depth: int, seed: int) -> cirq.Circuit:
non_diagonal_gates = [(cirq.X ** (1 / 2)), (cirq.Y ** (1 / 2))]
rand_gen = random.Random(seed).random
circuit = cirq.Circuit()
circuit.append((cirq.H(qubit) for qubit in qubits))
... |
class Titer_paths(TestCase):
def setUp(self):
self.root = os.path.realpath(mkdtemp())
def tearDown(self):
shutil.rmtree(self.root)
def test_empty(self):
assert (list(iter_paths(self.root)) == [])
def test_one_file(self):
(fd, name) = mkstemp(dir=self.root)
os.clos... |
def apply_logging_patch():
if ((sys.version_info.major > 3) or ((sys.version_info.major == 3) and (sys.version_info.minor >= 8))):
return
global _patch_applied
if (not _config.is_cli):
raise ValueError('This patch globally adjusts the logging module. This patch is not to be used within pymed... |
def map_jetson_nano(engine):
return [add_engine_in_list('APE', engine, 'APE', 'APE'), (add_engine_in_list('NVENC', engine, 'NVENC', 'NVENC') + add_engine_in_list('NVDEC', engine, 'NVDEC', 'NVDEC')), (add_engine_in_list('NVJPG', engine, 'NVJPG', 'NVJPG') + add_engine_in_list('SE', engine, 'SE', 'SE'))] |
_predicate(bytes)
class BytesBase64Provider(LoaderProvider, Base64DumperMixin):
def _provide_loader(self, mediator: Mediator, request: LoaderRequest) -> Loader:
def bytes_base64_loader(data):
try:
encoded = data.encode('ascii')
except AttributeError:
r... |
def perturb_utterances(utterances, allowed_durations, args):
perturbed_utterances = []
for u in utterances:
if (u.dur < allowed_durations[0]):
i = 0
elif (u.dur > allowed_durations[(- 1)]):
i = len(allowed_durations)
else:
i = 1
while (i < ... |
def convert_openai_checkpoint_to_pytorch(openai_checkpoint_folder_path, openai_config_file, pytorch_dump_folder_path):
if (openai_config_file == ''):
config = OpenAIGPTConfig()
else:
config = OpenAIGPTConfig.from_json_file(openai_config_file)
model = OpenAIGPTModel(config)
load_tf_weight... |
class FTraceLine():
def __init__(self, t, m='', d=''):
self.length = 0.0
self.fcall = False
self.freturn = False
self.fevent = False
self.fkprobe = False
self.depth = 0
self.name = ''
self.type = ''
self.time = float(t)
if ((not m) and ... |
class QuotientView(discord.ui.View):
message: discord.Message
custom_id = None
def __init__(self, ctx: Context, *, timeout: Optional[float]=30):
super().__init__(timeout=timeout)
self.ctx = ctx
self.bot = ctx.bot
async def interaction_check(self, interaction: discord.Interaction)... |
_task('pytorch_translate_multilingual')
class PytorchTranslateMultilingualTask(PytorchTranslateTask):
def __init__(self, args, source_dictionaries, target_dictionaries):
self.source_dictionaries = source_dictionaries
self.target_dictionaries = target_dictionaries
self.encoder_langs = list(so... |
def infix_notation(base_expr: ParserElement, op_list: List[InfixNotationOperatorSpec], lpar: Union[(str, ParserElement)]=Suppress('('), rpar: Union[(str, ParserElement)]=Suppress(')')) -> ParserElement:
class _FB(FollowedBy):
def parseImpl(self, instring, loc, doActions=True):
self.expr.try_pars... |
def test_blobs_2d_cutting_plane():
(X, Y) = make_blobs(n_samples=80, centers=2, random_state=1)
Y = ((2 * Y) - 1)
X = np.hstack([X, np.ones((X.shape[0], 1))])
(X_train, X_test, Y_train, Y_test) = (X[:40], X[40:], Y[:40], Y[40:])
pbl = BinaryClf(n_features=3)
svm = NSlackSSVM(pbl, check_constrain... |
def test_show_latest_non_decorated(tester: CommandTester, poetry: Poetry, installed: Repository, repo: TestRepository) -> None:
poetry.package.add_dependency(Factory.create_dependency('cachy', '^0.1.0'))
poetry.package.add_dependency(Factory.create_dependency('pendulum', '^2.0.0'))
cachy_010 = get_package('... |
class Subscription(Resource):
schema = {'account': 'AccountMini', 'action_result': dict, 'activated_at': datetime, 'active_invoice_id': str, 'add_ons': ['SubscriptionAddOn'], 'add_ons_total': float, 'auto_renew': bool, 'bank_account_authorized_at': datetime, 'billing_info_id': str, 'canceled_at': datetime, 'collect... |
class ExecutionPlan(TermGraph):
def __init__(self, domain, terms, start_date, end_date, min_extra_rows=0):
super(ExecutionPlan, self).__init__(terms)
specializations = {t: t.specialize(domain) for t in self.graph if isinstance(t, LoadableTerm)}
self.graph = nx.relabel_nodes(self.graph, speci... |
class BusinessEntity(Resource):
schema = {'code': str, 'created_at': datetime, 'default_registration_number': str, 'default_vat_number': str, 'id': str, 'invoice_display_address': 'Address', 'name': str, 'object': str, 'subscriber_location_countries': list, 'tax_address': 'Address', 'updated_at': datetime} |
class Xor(Codec):
codec_id = 'imagecodecs_xor'
def __init__(self, shape=None, dtype=None, axis=(- 1)):
self.shape = (None if (shape is None) else tuple(shape))
self.dtype = (None if (dtype is None) else numpy.dtype(dtype).str)
self.axis = axis
def encode(self, buf):
if ((self... |
def monitor_namespace(namespace):
pods = list_pods(namespace)
notready_pods = []
for pod in pods:
try:
pod_info = cli.read_namespaced_pod_status(pod, namespace, pretty=True)
except ApiException as e:
logging.error(('Exception when calling Co... |
def test_reportchars_all_error(pytester: Pytester) -> None:
pytester.makepyfile(conftest='\n def pytest_runtest_teardown():\n assert 0\n ', test_simple='\n def test_foo():\n pass\n ')
result = pytester.runpytest('-ra')
result.stdout.fnmatch_lines(['ERROR*tes... |
class AttrVI_ATTR_SUPPRESS_END_EN(BooleanAttribute):
resources = [(constants.InterfaceType.asrl, 'INSTR'), (constants.InterfaceType.gpib, 'INSTR'), (constants.InterfaceType.tcpip, 'INSTR'), (constants.InterfaceType.tcpip, 'SOCKET'), (constants.InterfaceType.usb, 'INSTR'), (constants.InterfaceType.usb, 'RAW'), (cons... |
class AsymmetricSplitOperatorTrotterStep(SplitOperatorTrotterStep):
def trotter_step(self, qubits: Sequence[cirq.Qid], time: float, control_qubit: Optional[cirq.Qid]=None) -> cirq.OP_TREE:
n_qubits = len(qubits)
def two_body_interaction(p, q, a, b) -> cirq.OP_TREE:
(yield rot11(rads=(((-... |
def getTopoSetDictTemplate(topoSetName, topoSetType, box):
return ('\n actions\n (\n {\n name %s;\n type %s;\n action new;\n source boxToPoint;\n sourceInfo\n {\n box (%f %f %f) (%f %f %f);\n }\n }\n );\n ' % (topoSetName, topoS... |
class SawyerCoffeeButtonEnvV2(SawyerXYZEnv):
def __init__(self):
self.max_dist = 0.03
hand_low = ((- 0.5), 0.4, 0.05)
hand_high = (0.5, 1.0, 0.5)
obj_low = ((- 0.1), 0.8, (- 0.001))
obj_high = (0.1, 0.9, (+ 0.001))
goal_low = (obj_low + np.array([(- 0.001), ((- 0.22) ... |
def fake_platforms_file(tmp_path):
file_path = (tmp_path / 'platforms.txt')
lines = ['# Some header lines - line 1\n', '# Some header lines - line 2\n', 'NOAA-21 54234\n', 'NOAA-20 43013\n', 'UNKNOWN SATELLITE 99999\n']
with open(file_path, 'w') as fpt:
fpt.writelines(lines)
(yield file_path) |
def main():
data_argumentation = json.load(open('argumentation_map.json', 'r', encoding='utf-8'))
original_data = json.load(open('../data/Total_data.json', 'r', encoding='utf-8'))
dev_path = open('../data/dev_natural_perturbation.txt', 'a', encoding='utf-8')
argumentation_dict = open('argumentation_map_... |
class _Kernel32(Protocol):
def CreateIoCompletionPort(self, FileHandle: Handle, ExistingCompletionPort: (CData | AlwaysNull), CompletionKey: int, NumberOfConcurrentThreads: int, /) -> Handle:
...
def CreateEventA(self, lpEventAttributes: AlwaysNull, bManualReset: bool, bInitialState: bool, lpName: Alway... |
class _LazyConfigMapping(OrderedDict):
def __init__(self, mapping):
self._mapping = mapping
self._extra_content = {}
self._modules = {}
def __getitem__(self, key):
if (key in self._extra_content):
return self._extra_content[key]
if (key not in self._mapping):
... |
def convert_deprecated_list(vals: list[str], name: str) -> re.Pattern:
regex_input = '^({})$'.format('|'.join(map(re.escape, vals)))
logger.warning('Your Match with the %s property is using lists which are deprecated, replace Match(%s=%s) with Match(%s=re.compile(r"%s")) after importing the \'re\' module', name... |
def fuzzy_match_filter(t, col, val, negate=False):
trim_t = t[col].str.replace(' ', '')
trim_val = val.replace(' ', '')
if negate:
res = t[(~ trim_t.str.contains(trim_val, regex=False))]
else:
res = t[trim_t.str.contains(trim_val, regex=False)]
res = res.reset_index(drop=True)
re... |
class PrometheusReporter(ProgressReporter):
def __init__(self, prom_pushgateway_addr, prom_job, labels, total_steps_num=None):
self._total_steps_num = total_steps_num
self._completed_steps = 0.0
registry = CollectorRegistry()
self._migration_completion_percent = Gauge('migration_comp... |
.register_api()
_api()
class scatter(DaskStream):
def update(self, x, who=None, metadata=None):
client = default_client()
self._retain_refs(metadata)
future_as_list = (yield client.scatter([x], asynchronous=True, hash=False))
future = future_as_list[0]
f = (yield self._emit(f... |
def test_assert_key_type_value_wrong_type_raises_with_extra_error_text():
info = ContextItemInfo(key='key1', key_in_context=True, expected_type=str, is_expected_type=False, has_value=True)
with pytest.raises(KeyInContextHasNoValueError) as err_info:
Context().assert_key_type_value(info, 'mydesc', 'extra... |
def get_paginated_repositories_for_namespace(namespace_id, page_token=None, page_size=50):
try:
query = Repository.select(Repository.name, Repository.id).where((Repository.state == RepositoryState.NORMAL), (Repository.namespace_user == namespace_id))
(repos, next_page_token) = modelutil.paginate(que... |
class NeighborList(Sequence[_T]):
class Modes(enum.Enum):
edge = enum.auto()
exception = enum.auto()
def __init__(self, items: Sequence[_T]=None, default: Union[(_T, Unset)]=UNSET, mode: Modes=Modes.exception) -> None:
if (not isinstance(mode, self.Modes)):
raise TypeError('M... |
def export_scripting(torch_model):
assert (TORCH_VERSION >= (1, 8))
fields = {'proposal_boxes': Boxes, 'objectness_logits': Tensor, 'pred_boxes': Boxes, 'scores': Tensor, 'pred_classes': Tensor, 'pred_masks': Tensor, 'pred_keypoints': torch.Tensor, 'pred_keypoint_heatmaps': torch.Tensor}
assert (args.format... |
def test_widgetbox_with_systray_reconfigure_screens_box_open(manager_nospawn, minimal_conf_noscreen, backend_name):
if (backend_name == 'wayland'):
pytest.skip('Skipping test on Wayland.')
config = minimal_conf_noscreen
config.screens = [libqtile.config.Screen(top=libqtile.bar.Bar([WidgetBox(widgets... |
class CorLocTest(tf.test.TestCase):
def test_compute_corloc_with_normal_iou_threshold(self):
num_groundtruth_classes = 3
matching_iou_threshold = 0.5
nms_iou_threshold = 1.0
nms_max_output_boxes = 10000
eval1 = per_image_evaluation.PerImageEvaluation(num_groundtruth_classes, ... |
def test_executor_should_write_pep610_url_references_for_wheel_files(tmp_venv: VirtualEnv, pool: RepositoryPool, config: Config, io: BufferedIO, fixture_dir: FixtureDirGetter) -> None:
url = (fixture_dir('distributions') / 'demo-0.1.0-py2.py3-none-any.whl').resolve()
package = Package('demo', '0.1.0', source_ty... |
class BaseRDBMSIndexWriter(StartMixin):
def __init__(self, uri, db, conn, title):
self.uri = uri
self.db = db
self.conn = conn
self.title = title
self.prepare_sql_statements()
def prepare_sql_statements(self):
self.ADD_DATASET_SQL = self.prepare_single(ADD_DATASET... |
class TestAccount(CommandTest):
def test_ooc_look(self):
if (settings.MULTISESSION_MODE < 2):
self.call(account.CmdOOCLook(), '', 'You are out-of-character (OOC).', caller=self.account)
if (settings.MULTISESSION_MODE == 2):
self.call(account.CmdOOCLook(), '', 'Account TestAcc... |
def get_mean(norm_value=255, dataset='activitynet'):
assert (dataset in ['activitynet', 'kinetics'])
if (dataset == 'activitynet'):
return [(114.7748 / norm_value), (107.7354 / norm_value), (99.475 / norm_value)]
elif (dataset == 'kinetics'):
return [(110. / norm_value), (103. / norm_value),... |
class SaveUtils():
def remove_quantization_wrappers(module):
for (module_name, module_ref) in module.named_children():
if isinstance(module_ref, QcQuantizeWrapper):
setattr(module, module_name, module_ref._module_to_wrap)
else:
SaveUtils.remove_quantiz... |
def retrieveLogs():
if sysvals.useftrace:
tracer = sysvals.fgetVal('current_tracer').strip()
if (tracer != 'function_graph'):
doError('ftrace not configured for a boot callgraph')
sysvals.systemInfo(aslib.dmidecode(sysvals.mempath))
sysvals.initTestOutput('boot')
sysvals.writ... |
class LDIFCopy(LDIFParser):
def __init__(self, input_file, output_file, ignored_attr_types=None, max_entries=0, process_url_schemes=None, base64_attrs=None, cols=76, line_sep='\n'):
LDIFParser.__init__(self, input_file, ignored_attr_types, max_entries, process_url_schemes)
self._output_ldif = LDIFWr... |
def create_mc_sharding(sharding_type: str, sharding_infos: List[EmbeddingShardingInfo], env: ShardingEnv, device: Optional[torch.device]=None) -> EmbeddingSharding[(SequenceShardingContext, KeyedJaggedTensor, torch.Tensor, torch.Tensor)]:
if (sharding_type == ShardingType.ROW_WISE.value):
return RwSequenceE... |
def get_prompt(sample, resource):
ref = resource[sample['question_id']]
messages = [{'role': 'system', 'content': 'You are a helpful assistant.'}, {'role': 'user', 'content': ''}]
messages[(- 1)]['content'] = ',:\n1. ,,\n2. ,,,,\n3. ,,\n4. ,\n5. ,,,\n6. ,,'
messages.append({'role': 'assistant', 'content... |
class Decoder(torch.nn.Module):
def __init__(self, out_channels, layers, bridges, norm=NullModule):
super().__init__()
layers = list(layers)
bridges = list(bridges)
assert (len(layers) == len(bridges))
kernel_size = 3
padding = (kernel_size // 2)
num_convs = 2... |
class StaticSids(Filter):
inputs = ()
window_length = 0
params = ('sids',)
def __new__(cls, sids):
sids = frozenset(sids)
return super(StaticSids, cls).__new__(cls, sids=sids)
def _compute(self, arrays, dates, sids, mask):
my_columns = sids.isin(self.params['sids'])
r... |
def test_emit_session_meta_update(session_update, flask_app, mocker, default_game_list):
mock_emit: MagicMock = mocker.patch('flask_socketio.emit')
session_json = {'id': 1, 'name': 'Debug', 'visibility': MultiplayerSessionVisibility.VISIBLE.value, 'users_list': [{'id': 1234, 'name': 'The Name', 'admin': True, '... |
def test_upload_generic_package_as_bytes(tmp_path, project):
path = (tmp_path / file_name)
path.write_text(file_content)
package = project.generic_packages.upload(package_name=package_name, package_version=package_version, file_name=file_name, data=path.read_bytes())
assert isinstance(package, GenericPa... |
class TestLocScaleRVTransform():
.parametrize('rv_size, loc_type, addition', [(None, pt.scalar, True), (2, pt.vector, False), ((2, 1), pt.col, True)])
def test_loc_transform_rv(self, rv_size, loc_type, addition):
loc = loc_type('loc')
if addition:
y_rv = (loc + pt.random.normal(0, 1,... |
def _convert_examples_to_generation_features(examples: List[GenerationExample], tokenizer: PreTrainedTokenizerFast, args: GenerationTrainArguments):
logger.info('tokenize sentences, it could take a lot of time...')
start = time.time()
batch_encoding = tokenizer([example.text for example in examples], max_le... |
.parametrize('max_labels,expected', [(10, [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]), (5, [0.0, '', 2.0, '', 4.0, '', 6.0, '', 8.0, '']), (3, [0.0, '', '', '', 4.0, '', '', '', 8.0, '', '', ''])])
def test_max_labels_linear(max_labels, expected):
colorbar = cm.LinearColormap((['red'] * 10), vmin=0, vmax=9,... |
def main():
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('urdf_file', help='urdf file')
args = parser.parse_args()
pybullet_planning.connect()
with pybullet_planning.LockRenderer():
body = p.loadURDF(args.urdf_file)
aabb = p... |
def test_holder_with_addressof_operator():
a = m.TypeForHolderWithAddressOf.make()
a.print_object_1()
a.print_object_2()
a.print_object_3()
a.print_object_4()
stats = ConstructorStats.get(m.TypeForHolderWithAddressOf)
assert (stats.alive() == 1)
np = m.TypeForHolderWithAddressOf.make()
... |
class DataDrivenTestCase(pytest.Item):
parent: DataSuiteCollector
input: list[str]
output: list[str]
output_inline_start: int
output2: dict[(int, list[str])]
file = ''
line = 0
files: list[tuple[(str, str)]]
test_modules: list[str]
expected_stale_modules: dict[(int, set[str])]
... |
class BasicDataclass():
a: int
b: InitVarInt
c: InitVarInt = field(default=1)
d: str = 'text'
e: list = field(default_factory=list)
f: int = field(default=3, init=False)
g: ClassVar[int]
h: ClassVar[int] = 1
i: int = field(default=4, metadata={'meta': 'data'})
def __post_init__(s... |
def _bng_validate_directory():
bng_exec = os.path.realpath(pf.get_path('bng'))
if bng_exec.endswith('.bat'):
conda_prefix = os.environ.get('CONDA_PREFIX')
if conda_prefix:
return os.path.join(conda_prefix, 'share\\bionetgen\\Validate')
return os.path.join(os.path.dirname(bng_exec... |
def get_peer_id(peer: raw.base.Peer) -> int:
if isinstance(peer, raw.types.PeerUser):
return peer.user_id
if isinstance(peer, raw.types.PeerChat):
return (- peer.chat_id)
if isinstance(peer, raw.types.PeerChannel):
return (MAX_CHANNEL_ID - peer.channel_id)
raise ValueError(f'Peer... |
class Effect1012(BaseEffect):
type = 'passive'
def handler(fit, skill, context, projectionRange, **kwargs):
fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Medium Railgun Specialization')), 'damageMultiplier', (skill.getModifiedItemAttr('damageMultiplierBonus') * skill.level), **kwarg... |
class MusepackInfo(StreamInfo):
_error(IOError, MusepackHeaderError)
def __init__(self, fileobj):
header = fileobj.read(4)
if (len(header) != 4):
raise MusepackHeaderError('not a Musepack file')
if (header[:3] == b'ID3'):
header = fileobj.read(6)
if (l... |
def test_min_and_max_seconds_between_redraws(ansi_bar: ProgressBar, ansi_io: BufferedIO, sleep: Callable[([float], None)]) -> None:
ansi_bar.min_seconds_between_redraws(0.5)
ansi_bar.max_seconds_between_redraws((2 - 1))
ansi_bar.start()
ansi_bar.set_progress(1)
sleep(1)
ansi_bar.set_progress(2)
... |
def vgg_16(inputs, num_classes=1000, is_training=True, dropout_keep_prob=0.5, spatial_squeeze=True, scope='vgg_16', fc_conv_padding='VALID'):
with tf.variable_scope(scope, 'vgg_16', [inputs]) as sc:
end_points_collection = (sc.name + '_end_points')
with slim.arg_scope([slim.conv2d, slim.fully_connec... |
('beeref.view.BeeGraphicsView.reset_previous_transform')
('beeref.view.BeeGraphicsView.pan')
def test_zoom_in_max_zoom_size(pan_mock, reset_mock, view, imgfilename3x3):
item = BeePixmapItem(QtGui.QImage(imgfilename3x3))
view.scale(, )
view.scene.addItem(item)
view.zoom(40, QtCore.QPointF(10.0, 10.0))
... |
class Effect11063(BaseEffect):
type = 'passive'
def handler(fit, ship, context, projectionRange, **kwargs):
fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Medium Energy Turret')), 'damageMultiplier', ship.getModifiedItemAttr('shipBonusABC3'), skill='Amarr Battlecruiser', **kwargs) |
class ChangeVolumeCall(Scaffold):
async def change_volume_call(self, chat_id: Union[(int, str)], volume: int):
if (self._app is None):
raise NoMTProtoClientSet()
if (not self._is_running):
raise ClientNotStarted()
chat_id = (await self._resolve_chat_id(chat_id))
... |
class TestAES():
.parametrize(('key', 'keysize'), [((b'0' * 32), 128), ((b'0' * 48), 192), ((b'0' * 64), 256)])
def test_key_size(self, key, keysize):
cipher = AES(binascii.unhexlify(key))
assert (cipher.key_size == keysize)
def test_invalid_key_size(self):
with pytest.raises(ValueEr... |
(simple_typed_classes(newtypes=False), unstructure_strats)
def test_simple_roundtrip(cls_and_vals, strat):
converter = BaseConverter(unstruct_strat=strat)
(cl, vals, kwargs) = cls_and_vals
assume(((strat is UnstructureStrategy.AS_DICT) or (not kwargs)))
inst = cl(*vals, **kwargs)
assert (inst == con... |
def convert_to_df(all_losses):
d = {'slambda': [], 'llambda': [], 'EleutherAI/gpt-neo-125M': [], 'EleutherAI/gpt-neo-1.3B': [], 'EleutherAI/gpt-neo-2.7B': [], 'data_file': []}
for (i, model) in enumerate(all_losses):
for item in all_losses[model]:
data_file = item['data_file']
su... |
def load_partition_data_mnist(dataset, data_dir, partition_method, partition_alpha, client_number, batch_size, args=None):
(X_train, y_train, X_test, y_test, net_dataidx_map, traindata_cls_counts) = partition_data(dataset, data_dir, partition_method, client_number, partition_alpha)
class_num = len(np.unique(y_t... |
def iram(A: LinearOperator, start_vector: Array=None, eig_n: int=6, max_iters: int=100, tol: float=1e-07, pbar: bool=False):
xnp = A.xnp
np_dtype = get_numpy_dtype(A.dtype)
del pbar
if (start_vector is not None):
v0 = np.array(start_vector, dtype=np_dtype)
def matvec(x):
X = xnp.arra... |
.parametrize('return_back_azimuth', [True, False])
.parametrize('ellipsoid,true_az12,true_az21,expected_distance', [('clrk66', (- 66.), 75., 4164192.708), ('WGS84', (- 66.), 75., 4164074.239)])
def test_geodesic_fwd(ellipsoid, true_az12, true_az21, expected_distance, return_back_azimuth, scalar_and_array):
geod = G... |
.unused
def resize(img, size, interpolation=Image.BILINEAR, max_size=None):
if (not _is_pil_image(img)):
raise TypeError('img should be PIL Image. Got {}'.format(type(img)))
if (not (isinstance(size, int) or (isinstance(size, Sequence) and (len(size) in (1, 2))))):
raise TypeError('Got inappropr... |
def _create_markdown(signatures: (list[str] | None), description: Iterable[Tag], url: str) -> str:
description = _get_truncated_description(description, markdown_converter=DocMarkdownConverter(bullets='', page_url=url), max_length=750, max_lines=13)
description = _WHITESPACE_AFTER_NEWLINES_RE.sub('', descriptio... |
class PointLight(VisualizationFrame):
def __init__(self, *args, **kwargs):
try:
self._color = kwargs['color']
except KeyError:
self._color = 'white'
i = 0
if isinstance(args[i], str):
self._name = args[i]
i += 1
else:
... |
class NINConv(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0):
super(NINConv, self).__init__()
self.conv = nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding, bias=True)
self.activ ... |
def get_referenced_beam_sequence(dicom_dataset, fraction_group_number):
fraction_group_index = get_fraction_group_index(dicom_dataset, fraction_group_number)
fraction_group = dicom_dataset.FractionGroupSequence[fraction_group_index]
referenced_beam_sequence = fraction_group.ReferencedBeamSequence
beam_n... |
class TreeTabConfig(Config):
auto_fullscreen = True
groups = [libqtile.config.Group('a'), libqtile.config.Group('b'), libqtile.config.Group('c'), libqtile.config.Group('d')]
layouts = [layout.TreeTab(sections=['Foo', 'Bar'])]
floating_layout = libqtile.resources.default_config.floating_layout
keys =... |
(((asyncio is None) or ((pgv is None) and (gv is None))), 'AsyncGraphMachine requires asyncio and (py)gaphviz')
class TestAsyncGraphMachine(TestAsync):
def setUp(self):
super(TestAsync, self).setUp()
self.machine_cls = AsyncGraphMachine
self.machine = self.machine_cls(states=['A', 'B', 'C'],... |
def build_nuscenes_dataloader(config, args, val=False, pinet=False, polyline=False, gt_polygon_extraction=False):
(train_data, val_data) = build_nuscenes_datasets(config, args, val=val, pinet=pinet, polyline=polyline, gt_polygon_extraction=gt_polygon_extraction)
if gt_polygon_extraction:
if val:
... |
def eval_particle_forces(model, state, forces):
if (model.particle_radius > 0.0):
wp.launch(kernel=eval_particle_forces_kernel, dim=model.particle_count, inputs=[model.particle_grid.id, state.particle_q, state.particle_qd, forces, model.particle_radius, model.particle_ke, model.particle_kd, model.particle_k... |
def plugin_unloaded():
settings = sublime.load_settings('Terminus.sublime-settings')
preferences = sublime.load_settings('Preferences.sublime-settings')
settings_on_change(settings, ['256color', 'user_theme_colors', 'user_light_theme_colors', 'user_dark_theme_colors', 'theme'], clear=True)
settings_on_c... |
_end_docstrings(INIT_TOKENIZER_DOCSTRING)
class PreTrainedTokenizerFast(PreTrainedTokenizerBase):
vocab_files_names = VOCAB_FILES_NAMES
slow_tokenizer_class: PreTrainedTokenizer = None
can_save_slow_tokenizer: bool = True
def __init__(self, *args, **kwargs):
tokenizer_object = kwargs.pop('tokeni... |
def main():
parser = HfArgumentParser((TrainingArguments,))
sys.argv += ['--output_dir', './examples']
training_args = parser.parse_args_into_dataclasses()[0]
logger.warning(f'Process rank: {training_args.local_rank}, device: {training_args.device}, tpu_num_cores: {training_args.tpu_num_cores}')
for... |
class D2LCallback(Callback):
def __init__(self, model, X_train, y_train, dataset, noise_ratio, epochs=150, pace_type='d2l', init_epoch=5, epoch_win=5, lid_subset_size=1280, lid_k=20, verbose=1):
super(D2LCallback, self).__init__()
self.validation_data = None
self.model = model
self.t... |
class AsyncRunner():
def __init__(self, args: Any) -> None:
self.args = args
self.aiobrowser: Optional[AsyncServiceBrowser] = None
self.aiozc: Optional[AsyncZeroconf] = None
async def async_run(self) -> None:
self.aiozc = AsyncZeroconf(ip_version=ip_version)
services = ['... |
class GPUStats():
def __init__(self, log=True):
self.logger = None
if log:
self.logger = logging.getLogger(__name__)
self.logger.debug('Initializing %s', self.__class__.__name__)
self.plaid = None
self.initialized = False
self.device_count = 0
... |
def test_retarget_tag_wrong_name(initialized_db):
repo = get_repository('devtable', 'history')
(results, _) = list_repository_tag_history(repo, 1, 100, specific_tag_name='latest')
assert (len(results) == 2)
created = retarget_tag('someothername', results[1].manifest, is_reversion=True)
assert (creat... |
class Gromacs(Parametrisation):
type: Literal['Gromacs'] = 'Gromacs'
def is_available(cls) -> bool:
return True
def _improper_torsion_ordering(cls) -> str:
return 'amber'
def _build_system(self, molecule: 'Ligand', input_files: Optional[List[str]]=None) -> System:
top_file = None... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.