code stringlengths 281 23.7M |
|---|
class PasswordDialog(WindowModalDialog):
def __init__(self, parent=None, msg=None):
msg = (msg or _('Please enter your password'))
WindowModalDialog.__init__(self, parent, _('Enter Password'))
self.pw = pw = PasswordLineEdit()
vbox = QVBoxLayout()
vbox.addWidget(QLabel(msg))
... |
def test_columnize_basic():
assert (columnize([]) == '<empty>\n')
assert (columnize(['a', '2', 'c'], 10, ', ') == 'a, 2, c\n')
assert (columnize(['oneitem']) == 'oneitem\n')
assert (columnize(('one', 'two', 'three')) == 'one two three\n')
assert (columnize(list(range(4))) == '0 1 2 3\n') |
class Cuboid_Collider(Collider):
def __init__(self, width, height, length, **kwargs):
super().__init__(**kwargs)
self.lb = (self.center - vec3((width / 2), (height / 2), (length / 2)))
self.rt = (self.center + vec3((width / 2), (height / 2), (length / 2)))
self.lb_local_basis = self.... |
def _enqueue_blobs_for_replication(manifest, storage, namespace_name):
blobs = registry_model.get_manifest_local_blobs(manifest, storage)
if (blobs is None):
logger.error('Could not lookup blobs for manifest `%s`', manifest.digest)
else:
with queue_replication_batch(namespace_name) as queue_... |
class SplitCreationTest(tf.test.TestCase):
def validate_roots(self, valid_test_roots, spanning_leaves):
(valid_root, test_root) = (valid_test_roots['valid'], valid_test_roots['test'])
num_valid_leaves = len(spanning_leaves[valid_root])
num_test_leaves = len(spanning_leaves[test_root])
... |
def find_implementations(project, resource, offset, resources=None, task_handle=taskhandle.DEFAULT_TASK_HANDLE):
name = worder.get_name_at(resource, offset)
this_pymodule = project.get_pymodule(resource)
pyname = evaluate.eval_location(this_pymodule, offset)
if (pyname is not None):
pyobject = p... |
class Upsample3DBlock(nn.Module):
def __init__(self, in_planes, out_planes, kernel_size, stride):
super(Upsample3DBlock, self).__init__()
assert (kernel_size == 2)
assert (stride == 2)
self.block = nn.Sequential(nn.ConvTranspose3d(in_planes, out_planes, kernel_size=kernel_size, strid... |
def get_args_parser(add_help=True):
import argparse
parser = argparse.ArgumentParser(description='PyTorch Classification Training', add_help=add_help)
parser.add_argument('--fs', default='fsx', type=str)
parser.add_argument('--model', default='resnet18', type=str, help='model name')
parser.add_argum... |
def linkcode_resolve(domain, info):
if ((domain == 'js') or (info['module'] == 'connect4')):
return
assert (domain == 'py'), 'expected only Python objects'
mod = importlib.import_module(info['module'])
if ('.' in info['fullname']):
(objname, attrname) = info['fullname'].split('.')
... |
(config_path='../../exp_config', config_name='config')
def main(global_cfg):
cfg = global_cfg.preprocessing.clf
random.seed(cfg.rnd_seed)
logger.info('Data generation for classification objective')
seed_dir = Path(cfg.seed_dir)
candidate_dir = Path(cfg.candidate_dir)
if (not cfg.output_dir):
... |
def matern_kernel(T, length_scale):
xs = tf.range(T, dtype=tf.float32)
xs_in = tf.expand_dims(xs, 0)
xs_out = tf.expand_dims(xs, 1)
distance_matrix = tf.math.abs((xs_in - xs_out))
distance_matrix_scaled = (distance_matrix / tf.cast(tf.math.sqrt(length_scale), dtype=tf.float32))
kernel_matrix = t... |
class DB():
def __init__(self, plugin):
self.plugin = plugin
def store(self, key, value):
entry = b85encode(json.dumps(value, ensure_ascii=False).encode()).decode()
self.plugin.pyload.db.set_storage(self.plugin.classname, key, entry)
def retrieve(self, key=None, default=None):
... |
def detach_callable(typ: CallableType, class_type_vars: list[TypeVarLikeType]) -> CallableType:
if (not class_type_vars):
return typ
seen_type_vars = set()
for t in (typ.arg_types + [typ.ret_type]):
seen_type_vars |= set(get_type_vars(t))
return typ.copy_modified(variables=(list(typ.vari... |
def wait_for_sync_blockcypher(rpc_client: JSONRPCClient, tolerance: BlockTimeout, sleep: float) -> None:
syncing_str = '\rSyncing ... Current: {} / Target: ~{}'
error_str = 'Could not get blockchain information from blockcypher. Ignoring.'
local_block = rpc_client.block_number()
blockcypher_block = bloc... |
class ExceptionContext():
def __init__(self, exctype):
self.exctype = exctype
self.value = ''
def __enter__(self):
return self
def __exit__(self, exctype, excvalue, exctb):
assert (exctype is self.exctype)
self.value = str(excvalue)
return True |
class BluetoothDevice(_BluetoothBase):
def __init__(self, path, interface, properties_interface, widget):
_BluetoothBase.__init__(self, path, interface, properties_interface, widget)
self._connected = False
self._paired = False
self._status = DeviceState.UNPAIRED
self._adapte... |
def _execute_eql_query(events, query):
schema = eql.Schema.learn(events)
query_results = []
def callback(results):
for event in results.events:
query_results.append(event.data)
engine = eql.PythonEngine()
with schema:
try:
eql_query = eql.parse_query(query, im... |
def log_loaded_dataset(dataset, format, name):
logging.info(f"[*] Loaded dataset '{name}' from '{format}':")
logging.info(f' {dataset.data}')
logging.info(f' undirected: {dataset[0].is_undirected()}')
logging.info(f' num graphs: {len(dataset)}')
total_num_nodes = 0
if hasattr(dataset.data, 'n... |
def download_file_from_google_drive(file_id, root, filename=None, md5=None):
import requests
url = '
root = os.path.expanduser(root)
if (not filename):
filename = file_id
fpath = os.path.join(root, filename)
makedir_exist_ok(root)
if (os.path.isfile(fpath) and check_integrity(fpath, ... |
def writeToFile(name, key, data, iscorpus=False, extension='tsv'):
if (not iscorpus):
ofolder = os.path.join(sys.path[0], 'results', 'stats', 'texts', name)
else:
ofolder = os.path.join(sys.path[0], 'results', 'stats', 'corpora', name)
if (not os.path.exists(ofolder)):
os.makedirs(of... |
.file_utils.add_start_docstrings(_DESCRIPTION, _KWARGS_DESCRIPTION)
class Response_entity_hit(datasets.Metric):
def __init__(self, config_name: Optional[str]=None, keep_in_memory: bool=False, cache_dir: Optional[str]=None, num_process: int=1, process_id: int=0, seed: Optional[int]=None, experiment_id: Optional[str]... |
class Drawer(Factory.RelativeLayout):
state = OptionProperty('closed', options=('closed', 'open', 'opening', 'closing'))
scroll_timeout = NumericProperty(200)
scroll_distance = NumericProperty('9dp')
drag_area = NumericProperty('9dp')
hidden_widget = ObjectProperty(None)
overlay_widget = ObjectP... |
def _render_ologin_error(service_name, error_message=None, register_redirect=False):
user_creation = bool((features.USER_CREATION and features.DIRECT_LOGIN and (not features.INVITE_ONLY_USER_CREATION)))
error_info = {'reason': 'ologinerror', 'service_name': service_name, 'error_message': (error_message or 'Coul... |
class _AlgoL_BFGS_B(_AlgoBase):
_id = 5
_options = [_makeProp('maxcor', 'The maximum number of variable metric corrections used to define\nthe limited memory matrix. (The limited memory BFGS method does\nnot store the full hessian but uses this many terms in an \napproximation to it.)', 'App::PropertyInteger'),... |
def _forward_magic(cls: AsyncAutoWrapperType, attr: Callable[(..., T)]) -> Callable[(..., (Path | T))]:
sentinel = object()
(attr)
def wrapper(self: Path, other: object=sentinel) -> (Path | T):
if (other is sentinel):
return attr(self._wrapped)
if isinstance(other, cls):
... |
class CameraConfiguration(Configuration):
_ALLOWED_FIELDS = ('use_case', 'buffer_count', 'transform', 'display', 'encode', 'colour_space', 'controls', 'main', 'lores', 'raw', 'queue', 'sensor')
_FIELD_CLASS_MAP = {'main': StreamConfiguration, 'lores': StreamConfiguration, 'raw': StreamConfiguration, 'sensor': S... |
def unregistered_custom_token(token_amount, deploy_client, contract_manager) -> TokenAddress:
(contract_proxy, _) = deploy_client.deploy_single_contract(contract_name=CONTRACT_CUSTOM_TOKEN, contract=contract_manager.get_contract(CONTRACT_CUSTOM_TOKEN), constructor_parameters=(token_amount, 2, 'raiden', 'Rd'))
r... |
def prepare_issue_news(issue: Issue):
news = issue.item_set
result = {}
for x in news.filter(status='active').iterator():
if (x.section not in result):
result[x.section] = []
result[x.section].append({'link': x.link, 'title': x.title, 'description': x.description, 'tags': []})
... |
def main_worker(gpu, ngpus_per_node, args):
global best_acc1
args.gpu = gpu
if (args.gpu is not None):
print('Use GPU: {} for training'.format(args.gpu))
print("=> creating model '{}'".format(args.arch))
num_classes = (100 if (args.dataset == 'cifar100') else 10)
use_norm = (True if (arg... |
class SupergroupConfigCallback(AimetCommonSupergroupConfigCallback):
def __init__(self, module_to_quantsim_wrapper_dict: Dict[(torch.nn.Module, QcQuantizeWrapper)]):
self._module_to_quantsim_wrapper_dict = module_to_quantsim_wrapper_dict
def __call__(self, _, op_list: List[Op]):
for (index, op) ... |
class ParamProvider(interfaces.IParamProvider):
def __init__(self, *delegates):
self._delegates = delegates
def __call__(self, pyfunc, param_name):
for delegate in self._delegates:
result = delegate(pyfunc, param_name)
if result:
return result |
def create_optimization_profiles(builder, inputs, batch_sizes=[1, 8, 16, 32, 64]):
if all([(inp.shape[0] > (- 1)) for inp in inputs]):
profile = builder.create_optimization_profile()
for inp in inputs:
(fbs, shape) = (inp.shape[0], inp.shape[1:])
profile.set_shape(inp.name, m... |
def test_url_properties():
url = Url('')
assert (str(url) == ''), 'unexpected url'
url.scheme = 'scheme'
assert (str(url) == 'scheme://'), 'unexpected url'
assert (url.get_scheme() == 'scheme'), 'unexpected scheme'
url.set_scheme('tscheme')
assert (url.get_scheme() == 'tscheme'), 'unexpected... |
class ProbNet(nn.Module):
def __init__(self, n_channels, feature_dim):
super(ProbNet, self).__init__()
self.mu = nn.Sequential(nn.Conv2d(n_channels, 64, kernel_size=1), nn.ReLU(), nn.Conv2d(64, feature_dim, kernel_size=1))
self.logvar = nn.Sequential(nn.Conv2d(n_channels, 64, kernel_size=1),... |
class Effect6374(BaseEffect):
type = 'passive'
def handler(fit, src, context, projectionRange, **kwargs):
fit.drones.filteredItemBoost((lambda drone: (drone.item.group.name == 'Logistic Drone')), 'structureDamageAmount', src.getModifiedItemAttr('droneArmorDamageAmountBonus'), **kwargs) |
class BTADDataset(Dataset):
def __init__(self, data_path, classname, resize=256, cropsize=256, is_train=True):
assert (classname in BTAD_CLASS_NAMES), 'class_name: {}, should be in {}'.format(classname, BTAD_CLASS_NAMES)
self.dataset_path = data_path
self.class_name = classname
self.... |
def check_path_overlapping(corner, end_1, end_2):
vec_1 = ((end_1[0] - corner[0]), (end_1[1] - corner[1]))
vec_2 = ((end_2[0] - corner[0]), (end_2[1] - corner[1]))
dist_to_1 = np.sqrt(((vec_1[0] ** 2) + (vec_1[1] ** 2)))
dist_to_2 = np.sqrt(((vec_2[0] ** 2) + (vec_2[1] ** 2)))
if ((dist_to_1 == 0) o... |
def is_accel(event, *accels):
assert accels
if (event.type != Gdk.EventType.KEY_PRESS):
return False
keyval = event.keyval
if (not (keyval & (~ 255))):
keyval = ord(chr(keyval).lower())
default_mod = Gtk.accelerator_get_default_mod_mask()
keymap = Gdk.Keymap.get_default()
for... |
def parse_index(arg, min_val=None, max_val=None):
if ((arg is None) or (arg == '')):
indices = []
elif isinstance(arg, int):
indices = [arg]
elif isinstance(arg, (list, tuple)):
indices = list(arg)
elif isinstance(arg, str):
indices = []
if (arg.lower() not in ['n... |
class TokenCount():
def __init__(self, token):
self.token = token
self.files = {}
def add(self, filename, linenumber):
if (filename not in self.files):
self.files[filename] = FileCount(filename)
self.files[filename].add(linenumber)
def __str__(self):
if (l... |
def test_dataset_frames_subset(zarr_dataset: ChunkedDataset) -> None:
zarr_dataset = zarr_dataset.get_scene_dataset(0)
frame_start = 10
frame_end = 25
zarr_cut = get_frames_subset(zarr_dataset, frame_start, frame_end)
assert (len(zarr_cut.scenes) == 1)
assert (len(zarr_cut.frames) == (frame_end ... |
class ListProductIterator():
def __init__(self, *list_of_products):
self.list_of_products = list_of_products
self.iter = (- 1)
def create_a_list_from_a_query(query):
list_of_instances_of_Product_class = list(map(Product.create_a_instance_of_this_class_from_a_list_of_properties, query))
... |
class VCDBPairDataset(Dataset):
def __init__(self, annotation_path, feature_path='hdfs://haruna/user/lab/wenxin.me/datasets/vcdb/features/imac/vcdb99709_resnet50_imac_pca1024', padding_size=300, random_sampling=False, neg_num=1):
self.feature_path = feature_path
self.padding_size = padding_size
... |
def might_extract_tar(path):
path = Path(path)
dir_name = '.'.join(path.name.split('.')[:(- 2)])
dir_output = (path.parent / dir_name)
if (not dir_output.exists()):
if path.exists():
tf = tarfile.open(str(path))
tf.extractall(path.parent)
else:
logging... |
class ImmutableStruct(type):
_names = []
def __setattr__(self, name, value):
if (name in self._names):
raise PyUnityException(f'Field {name!r} is read-only')
super(ImmutableStruct, self).__setattr__(name, value)
def __delattr__(self, name):
if (name in self._names):
... |
class TestTransformSetOutputFormat(unittest.TestCase):
def setUp(self):
self.tfm = new_transformer()
def test_defaults(self):
actual = self.tfm.output_format
expected = {}
self.assertEqual(expected, actual)
actual_args = self.tfm._output_format_args(self.tfm.output_format... |
class AdministerRepositoryPermission(QuayPermission):
def __init__(self, namespace, name):
admin_need = _RepositoryNeed(namespace, name, 'admin')
org_admin_need = _OrganizationRepoNeed(namespace, 'admin')
self.namespace = namespace
self.repo_name = name
super(AdministerReposi... |
def modpath_to_modname(modpath, hide_init=True, hide_main=False, check=True, relativeto=None):
if (check and (relativeto is None)):
if (not exists(modpath)):
raise ValueError('modpath={} does not exist'.format(modpath))
modpath_ = abspath(expanduser(modpath))
modpath_ = normalize_modpath... |
class DescribeLatentStyle():
def it_can_delete_itself(self, delete_fixture):
(latent_style, latent_styles, expected_xml) = delete_fixture
latent_style.delete()
assert (latent_styles.xml == expected_xml)
assert (latent_style._element is None)
def it_knows_its_name(self, name_get_f... |
def resblock_body(x, num_filters, num_blocks):
x = ZeroPadding2D(((1, 0), (1, 0)))(x)
x = darknet_conv2d_bn_leaky(num_filters, (3, 3), strides=(2, 2))(x)
for i in range(num_blocks):
y = compose(darknet_conv2d_bn_leaky((num_filters // 2), (1, 1)), darknet_conv2d_bn_leaky(num_filters, (3, 3)))(x)
... |
class GamePlayerOpponentIndex(LocalSecondaryIndex):
class Meta():
read_capacity_units = 1
write_capacity_units = 1
table_name = 'GamePlayerOpponentIndex'
host = '
projection = AllProjection()
player_id = UnicodeAttribute(hash_key=True)
winner_id = UnicodeAttribute(ran... |
def filter_test_data(data, max_length=25):
new_test = {'text': [], 'label': []}
for i in range(len(data['test']['text'])):
text = data['test']['text'][i]
label = data['test']['label'][i]
if (len(text.split()) <= max_length):
new_test['text'].append(text)
new_test[... |
(tryfirst=True)
def pytest_collection_modifyitems(config: Config, items: list[Item]) -> None:
if (not config.getoption('randomly_reorganize')):
return
seed = _reseed(config)
modules_items: list[tuple[((ModuleType | None), list[Item])]] = []
for (module, group) in groupby(items, _get_module):
... |
def read_tables(data_dir, bc):
bc.create_table('store', os.path.join(data_dir, 'store/*.parquet'))
bc.create_table('store_sales', os.path.join(data_dir, 'store_sales/*.parquet'))
bc.create_table('date_dim', os.path.join(data_dir, 'date_dim/*.parquet'))
bc.create_table('product_reviews', os.path.join(dat... |
class GLUEProcessor(CLSProcessor):
def __init__(self, data_args, training_args, model_args, tokenizer=None, post_tokenizer=False, keep_raw_data=True):
super().__init__(data_args, training_args, model_args, tokenizer, post_tokenizer=post_tokenizer, keep_raw_data=keep_raw_data)
param = {p.split('=')[0... |
class CallTreeLevelOneItem(CallGraphLevelItemBase):
def __init__(self, glb, params, row, comm_id, comm, parent_item):
super(CallTreeLevelOneItem, self).__init__(glb, params, row, parent_item)
if self.params.have_ipc:
self.data = [comm, '', '', '', '', '', '', '', '', '', '', '']
... |
class TestQueryTree(EndianTest):
def setUp(self):
self.req_args_0 = {'window': }
self.req_bin_0 = b'\x0f\x00\x02\x00\x00\xdf\xe8\x15'
self.reply_args_0 = {'children': [, , , , , , ], 'parent': , 'root': , 'sequence_number': 65105}
self.reply_bin_0 = b'\x01\x00Q\xfe\x07\x00\x00\x00\xd... |
class VGG(nn.Module):
def __init__(self, num_classes=10, depth=16, dropout=0.0, number_net=4):
super(VGG, self).__init__()
self.inplances = 64
self.number_net = number_net
self.conv1 = nn.Conv2d(3, self.inplances, kernel_size=3, padding=1)
self.bn1 = nn.BatchNorm2d(self.inpla... |
def get_train_dataset(p, transform=None):
if (p['train_db_name'] == 'VOCSegmentation'):
from data.dataloaders.pascal_voc import VOC12
dataset = VOC12(split=p['train_db_kwargs']['split'], transform=transform)
else:
raise ValueError('Invalid train dataset {}'.format(p['train_db_name']))
... |
_useless
_canonicalize
_rewriter([Unique])
def local_Unique_second(fgraph, node):
if (not isinstance(node.op, Unique)):
return False
if (node.op.return_index or node.op.return_inverse or node.op.return_counts or (node.op.axis is not None)):
return False
second_var = node.inputs[0]
if (no... |
class LineChart(Chart):
def __init__(self, start_x: any=None, end_x: any=None, upper_y: any=None, lower_y: any=None, log_scale: bool=False, rotate_x_axis=False):
super().__init__(start_x, end_x, upper_y, lower_y)
self.log_scale = log_scale
self._rotate_x_axis = rotate_x_axis
def plot(sel... |
def copy_byte_range(infile, outfile, start=None, stop=None, bufsize=(16 * 1024)):
if (start is not None):
infile.seek(start)
while 1:
to_read = min(bufsize, (((stop + 1) - infile.tell()) if stop else bufsize))
buf = infile.read(to_read)
if (not buf):
break
out... |
class FaAllocationSamples(Object):
FaOneGroup = ''.join(('<?xml version="1.0" encoding="UTF-8"?>', '<ListOfGroups>', '<Group>', '<name>Equal_Quantity</name>', '<ListOfAccts varName="list">', '<String>DU119915</String>', '<String>DU119916</String>', '</ListOfAccts>', '<defaultMethod>EqualQuantity</defaultMethod>', '... |
def iter_stack_frames(frames=None):
if (not frames):
frames = inspect.stack()[1:]
for (frame, lineno) in ((f[0], f[2]) for f in frames):
f_locals = getattr(frame, 'f_locals', {})
if (not _getitem_from_frame(f_locals, '__traceback_hide__')):
(yield (frame, lineno)) |
def data_type_to_sparse_type(data_type: DataType) -> SparseType:
if (data_type == DataType.FP32):
return SparseType.FP32
elif (data_type == DataType.FP16):
return SparseType.FP16
elif (data_type == DataType.BF16):
return SparseType.BF16
elif ((data_type == DataType.INT8) or (data... |
class TraceFactory():
def __init__(self):
self.trace_collectors = {subcls.FORMAT_NAME: subcls for subcls in get_all_subclasses(base.QlBaseTrace)}
def formats(self):
return self.trace_collectors.keys()
def get_trace_collector(self, ql, name):
return self.trace_collectors[name](ql) |
class DarkStage(nn.Module):
def __init__(self, in_chs, out_chs, stride, dilation, depth, block_ratio=1.0, bottle_ratio=1.0, groups=1, first_dilation=None, avg_down=False, block_fn=BottleneckBlock, block_dpr=None, **block_kwargs):
super(DarkStage, self).__init__()
first_dilation = (first_dilation or ... |
_arg_scope
def max_pool3d(inputs, kernel_size, stride=2, padding='VALID', data_format=DATA_FORMAT_NDHWC, outputs_collections=None, scope=None):
if (data_format not in (DATA_FORMAT_NCDHW, DATA_FORMAT_NDHWC)):
raise ValueError('data_format has to be either NCDHW or NDHWC.')
with ops.name_scope(scope, 'Max... |
def get_qr_map_with_hints(version, mode='binary', error='L', url=''):
qrmap = get_qr_map(version, mode, error, url)
error_map = get_error_reserved_map(version, mode, error, url)
qr = create_qr_from_map(qrmap, url, mode, error)
map_with_hints = QrMap(qr.version)
for y in range(qrmap.size):
fo... |
def main(args: Any=None) -> None:
if (args is None):
args = sys.argv[1:]
parser = create_parser()
args = parser.parse_args(args)
if (not args.source):
print("Error: missing 'checkpoint' or 'pretrained' source.", file=sys.stderr)
parser.print_help()
raise SystemExit(1)
... |
def utcToLocalTime(tokens):
utctime = datetime.datetime.strptime(('%(date)s %(time)s' % tokens), '%Y/%m/%d %H:%M:%S')
localtime = (utctime - datetime.timedelta(0, time.timezone, 0))
(tokens['utcdate'], tokens['utctime']) = (tokens['date'], tokens['time'])
(tokens['localdate'], tokens['localtime']) = str... |
class PooledEmbeddingsAllToAll(nn.Module):
def __init__(self, pg: dist.ProcessGroup, dim_sum_per_rank: List[int], device: Optional[torch.device]=None, callbacks: Optional[List[Callable[([torch.Tensor], torch.Tensor)]]]=None, codecs: Optional[QuantizedCommCodecs]=None) -> None:
super().__init__()
sel... |
def is_ready(base_url: str) -> bool:
try:
result = requests.get(f'{base_url}/api/v1/status').json()
except KeyError:
log.info(f'Server {base_url} returned invalid json data.')
except requests.ConnectionError:
log.info(f'Waiting for the server {base_url} to start.')
except request... |
class DarkThemeColors():
Red = '#F44336'
Pink = '#F48FB1'
Purple = '#CE93D8'
DeepPurple = '#B39DDB'
Indigo = '#9FA8DA'
Blue = '#90CAF9'
LightBlue = '#81D4FA'
Cyan = '#80DEEA'
Teal = '#80CBC4'
Green = '#A5D6A7'
LightGreen = '#C5E1A5'
Lime = '#E6EE9C'
Yellow = '#FFF59D'... |
class GlobalValue():
R1_BJTU = {'1': '39,20', '10': '37,23', '100': '7,21', '101': '8,21', '102': '10,21', '103': '10,20', '104': '8,20', '105': '7,20', '106': '5,20', '107': '7,13', '108': '8,12', '109': '9,11', '11': '36,23', '110': '10,10', '111': '11,9', '112': '16,6', '113': '16,8', '114': '16,9', '115': '16,1... |
_dataframe_method
def reorder_columns(df: pd.DataFrame, column_order: Union[(Iterable[str], pd.Index, Hashable)]) -> pd.DataFrame:
check('column_order', column_order, [list, tuple, pd.Index])
if any(((col not in df.columns) for col in column_order)):
raise IndexError('One or more columns in `column_orde... |
def parseLine(line):
ret = []
(carLogs, tlLogs) = line.split(';')
logs = []
for carLog in carLogs.split(',')[:(- 1)]:
logs.append([float(x) for x in carLog.split(' ')])
ret.append(logs)
logs = []
for tlLog in tlLogs.split(',')[:(- 1)]:
logs.append(tlLog.split(' '))
ret.ap... |
class MultiDownloader(SimpleDownloader):
__name__ = 'MultiDownloader'
__type__ = 'downloader'
__version__ = '0.72'
__status__ = 'stable'
__pattern__ = '^unmatchable$'
__config__ = [('enabled', 'bool', 'Activated', True), ('use_premium', 'bool', 'Use premium account if available', True), ('fallba... |
class MultiHeadedDotAttention_d(nn.Module):
def __init__(self, h, d_model, dropout=0.1, scale=1, project_k_v=1, use_output_layer=1, do_aoa=0, norm_q=0, dropout_aoa=0.3):
super(MultiHeadedDotAttention_d, self).__init__()
assert (((d_model * scale) % h) == 0)
self.d_k = ((d_model * scale) // h... |
def get_input_ids(example, vocab: Dict, columns: List, mask_rate: float=0, return_label_ids: Optional[bool]=True, return_token_type_ids: Optional[bool]=False, affix_bos: Optional[bool]=True, affix_eos: Optional[bool]=True) -> Dict:
assert (return_token_type_ids is False)
input_ids: List[int] = []
token_type... |
def test_lambert_cylindrical_equal_area():
crs = ProjectedCRS(conversion=LambertCylindricalEqualAreaConversion(1, 2, 3, 4))
expected_cf = {'semi_major_axis': 6378137.0, 'semi_minor_axis': crs.ellipsoid.semi_minor_metre, 'inverse_flattening': crs.ellipsoid.inverse_flattening, 'reference_ellipsoid_name': 'WGS 84'... |
def index_tuple_basis(codomain_elements: List[Tuple[(int, ...)]]) -> Bijection:
idx_dict = dict(zip(range(len(codomain_elements)), codomain_elements))
tuple_dict = dict(zip(codomain_elements, range(len(codomain_elements))))
codomain_element_size = len(codomain_elements[0])
def forward(i: int):
r... |
class VGG16(nn.Module):
def __init__(self, align_tf_resize=False, no_top=True, enable_lpips=True):
super().__init__()
self.align_tf_resize = align_tf_resize
self.no_top = no_top
self.enable_lpips = enable_lpips
self.conv11 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1... |
class CheetahPythonLexer(Lexer):
def get_tokens_unprocessed(self, text):
pylexer = PythonLexer(**self.options)
for (pos, type_, value) in pylexer.get_tokens_unprocessed(text):
if ((type_ == Token.Error) and (value == '$')):
type_ = Comment.Preproc
(yield (pos,... |
class AttrVI_ATTR_VXI_TRIG_LINES_EN(RangeAttribute):
resources = [(constants.InterfaceType.vxi, 'INSTR')]
py_name = ''
visa_name = 'VI_ATTR_VXI_TRIG_LINES_EN'
visa_type = 'ViUInt16'
default = 0
(read, write, local) = (True, True, False)
(min_value, max_value, values) = (0, 65535, None) |
class TVShow(ItemEntity):
isdir: ClassVar[bool] = True
mediatype: ClassVar[str] = 'tvshow'
def __init__(self, *, parent=ItemsCollection, item_data=Dict, **kwargs) -> None:
super().__init__(parent=parent, item_data=item_data)
self.url = self.plugin.routing.build_url('seasons', f'{self.item_id... |
def orders():
states = ['paid', 'pending', 'delivered']
orders = []
for i in range(1, 5000):
id = i
customer_id = random.randint(1, 100)
status = random.choice(states)
amount = random.randint(50, 200)
created_at = get_proper_date()
orders.append([id, customer_... |
def test_bootstrap_with_legacy_macos_default_config(monkeypatch, tmpfolder, macos_config_dir):
monkeypatch.setattr('sys.platform', 'darwin_test')
new_dir = macos_config_dir
assert (not new_dir.exists())
old_dir = info._old_macos_config_dir(new_dir)
old_dir.mkdir(parents=True, exist_ok=False)
(ol... |
def test_existing_account(app, auth_system, login_service):
login_service_lid = 'someexternaluser'
created_user = model.user.create_federated_user('someuser', '', login_service.service_id(), login_service_lid, False)
existing_user_count = database.User.select().count()
with mock_ldap():
result =... |
def me_pow_lb(lamb, thresh, B, ctilde, J, n, gamma):
c1 = ((((4 * (B ** 2)) * J) * (J ** 0.5)) * ctilde)
c2 = (((4 * B) * (J ** 0.5)) * ctilde)
c3 = (((4 * (B ** 2)) * J) * (ctilde ** 2))
assert (gamma > 0)
t1_arg = (((lamb - thresh) ** 2.0) / ((((72 * (B ** 2)) * (c2 ** 2)) * J) * n))
t2_arg = ... |
class ccy(NamedTuple):
code: str
isonumber: str
twoletterscode: str
order: int
name: str
rounding: int
default_country: str
fixeddc: str = 'Act/365'
floatdc: str = 'Act/365'
fixedfreq: str = ''
floatfreq: str = ''
future: str = ''
symbol_raw: str = '\\00a4'
html: ... |
class SddVisitor(TreeNodeVisitor):
def visit_And(self, node, mgr):
lv = self.visit(node.left, mgr)
rv = self.visit(node.right, mgr)
return (lv & rv)
def visit_Or(self, node, mgr):
lv = self.visit(node.left, mgr)
rv = self.visit(node.right, mgr)
return (lv | rv)
... |
class TestScheduler(TestCase):
def test_warmup_multistep(self):
p = nn.Parameter(torch.zeros(0))
opt = torch.optim.SGD([p], lr=5)
multiplier = WarmupParamScheduler(MultiStepParamScheduler([1, 0.1, 0.01, 0.001], milestones=[10, 15, 20], num_updates=30), 0.001, (5 / 30))
sched = LRMult... |
def main():
pybullet_planning.connect()
pybullet_planning.add_data_path()
p.setGravity(0, 0, (- 9.8))
p.resetDebugVisualizerCamera(cameraDistance=1, cameraYaw=120, cameraPitch=(- 30), cameraTargetPosition=(0, 0, 0.3))
p.loadURDF('plane.urdf')
ri = reorientbot.pybullet.PandaRobotInterface()
c... |
class TestObjectIdentifier():
def test_eq(self):
oid1 = x509.ObjectIdentifier('2.999.1')
oid2 = x509.ObjectIdentifier('2.999.1')
assert (oid1 == oid2)
def test_ne(self):
oid1 = x509.ObjectIdentifier('2.999.1')
assert (oid1 != x509.ObjectIdentifier('2.999.2'))
asse... |
class Schedule(str, Enum):
EVERY_15_MIN = '_15_MINUTES_'
EVERY_15_MINS = '_15_MINUTES_'
EVERY_15_MINUTE = '_15_MINUTES_'
EVERY_15_MINUTES = '_15_MINUTES_'
EVERY_30_MIN = '_30_MINUTES_'
EVERY_30_MINS = '_30_MINUTES_'
EVERY_30_MINUTE = '_30_MINUTES_'
EVERY_30_MINUTES = '_30_MINUTES_'
E... |
class OnlineContrastiveLoss(GroupLoss):
def __init__(self, margin: Optional[float]=0.5, distance_metric_name: Distance=Distance.COSINE, mining: Optional[str]='hard'):
mining_types = ['all', 'hard']
if (mining not in mining_types):
raise ValueError(f"Unrecognized mining strategy: {mining}... |
class _VArgsWrapper():
base_func: Callable
def __init__(self, func: Callable, visit_wrapper: Callable[([Callable, str, list, Any], Any)]):
if isinstance(func, _VArgsWrapper):
func = func.base_func
self.base_func = func
self.visit_wrapper = visit_wrapper
update_wrapper... |
def progress(reporter, start_msg):
t1 = time.time()
msg = (start_msg + ' ...')
reporter.update(msg)
try:
(yield)
except:
t2 = time.time()
reporter.report((msg + f' Exception after {SECS(t1, t2)}!'))
raise
else:
t2 = time.time()
reporter.update((msg... |
def read_file(file):
lines = [line.strip('\r\n') for line in file.splitlines()]
size = int(lines[0])
hex = Hex(size)
linei = 1
tiles = (8 * [0])
done = Done(hex.count)
for y in range(size):
line = lines[linei][((size - y) - 1):]
p = 0
for x in range((size + y)):
... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.