code stringlengths 281 23.7M |
|---|
def from_pretrained(model_name_or_path, checkpoint_file='model.pt', data_name_or_path='.', archive_map=None, **kwargs):
from fairseq import checkpoint_utils, file_utils
if (archive_map is not None):
if (model_name_or_path in archive_map):
model_name_or_path = archive_map[model_name_or_path]
... |
_doc(krks.get_veff.__doc__)
def get_veff(ks, cell=None, dm=None, dm_last=0, vhf_last=0, hermi=1, kpts=None, kpts_band=None):
if (cell is None):
cell = ks.cell
if (dm is None):
dm = ks.make_rdm1()
if (kpts is None):
kpts = ks.kpts
if isinstance(kpts, np.ndarray):
return kr... |
def find_path() -> Optional[str]:
if (sys.platform in ['win32', 'win64']):
bat_path = wch.which('electron')
return os.path.join(bat_path, '..\\node_modules\\electron\\dist\\electron.exe')
elif (sys.platform in ['darwin', 'linux']):
return wch.which('electron')
else:
return No... |
def getTrainLoader(rank: int, worldSize: int, datasetPath: StrPath, batchSize: int, logger: Union[(logging.Logger, LoggerBase)]=logging.root):
trainDataset = BasicLMDB(datasetPath, maxTxns=((2 * batchSize) * worldSize), transform=getTrainingPreprocess())
logger.debug('Create training set: %s', trainDataset)
... |
def gen_connections(top):
_inst_conns = defaultdict(set)
nets = top.get_all_value_nets()
adjs = top.get_signal_adjacency_dict()
for (writer, net) in nets:
S = deque([writer])
visited = {writer}
while S:
u = S.pop()
writer_host = u.get_host_component()
... |
def save_checkpoint(args, save_checkpoints_config, extra_name=None, epoch='init', model_state_dict=None, optimizer_state_dict=None, train_metric_info=None, test_metric_info=None, check_epoch_require=True, postfix=None):
if (save_checkpoints_config is None):
logging.info('WARNING: Not save checkpoints......'... |
def bdrloss(prediction, label, radius, device='cpu'):
filt = torch.ones(1, 1, ((2 * radius) + 1), ((2 * radius) + 1))
filt.requires_grad = False
filt = filt.to(device)
bdr_pred = (prediction * label)
pred_bdr_sum = (label * F.conv2d(bdr_pred, filt, bias=None, stride=1, padding=radius))
texture_m... |
class AutodeleteRejected(TourneyButton):
def __init__(self, ctx: Context, letter: str):
super().__init__(emoji=ri(letter))
self.ctx = ctx
async def callback(self, interaction: discord.Interaction):
(await interaction.response.defer())
self.view.record.autodelete_rejected = (not s... |
.utils
def test_check_column_single(dataframe):
assert (check_column(dataframe, 'a') is None)
with pytest.raises(ValueError):
check_column(dataframe, 'b')
with pytest.raises(ValueError):
check_column(dataframe, 2)
dataframe[2] = 'asdf'
assert (check_column(dataframe, 2) is None) |
def rescale_emisscoeff(spec, rescaled, initial, old_mole_fraction, new_mole_fraction, old_path_length_cm, optically_thin, wunit, units, extra, true_path_length):
unit = None
def get_emisscoeff_unit(unit_radiance):
if ('/cm2' in unit_radiance):
return unit_radiance.replace('/cm2', '/cm3')
... |
def summary_plot(traffic_performance, figure_dir, mode_name, num_rounds):
minimum_round = (50 if (num_rounds > 50) else 0)
validation_duration_length = 10
anomaly_threshold = 1.3
for traffic_name in traffic_performance:
(f, ax) = plt.subplots(2, 1, figsize=(12, 9), dpi=100)
performance_t... |
def simxReadStringStream(clientID, signalName, operationMode):
signalLength = ct.c_int()
signalValue = ct.POINTER(ct.c_ubyte)()
if ((sys.version_info[0] == 3) and (type(signalName) is str)):
signalName = signalName.encode('utf-8')
ret = c_ReadStringStream(clientID, signalName, ct.byref(signalVal... |
class CrtcChangeNotify(rq.Event):
_code = None
_fields = rq.Struct(rq.Card8('type'), rq.Card8('sub_code'), rq.Card16('sequence_number'), rq.Card32('timestamp'), rq.Window('window'), rq.Card32('crtc'), rq.Card32('mode'), rq.Card16('rotation'), rq.Pad(2), rq.Int16('x'), rq.Int16('y'), rq.Card16('width'), rq.Card1... |
class TerminusCancelBuildCommand(sublime_plugin.WindowCommand):
def run(self, *args, exec_panel=EXEC_PANEL, **kwargs):
window = self.window
for panel_name in window.panels():
panel_name = panel_name.replace('output.', '')
if (panel_name != exec_panel):
continu... |
def test_request_has_no_auth_header_if_no_token_or_netrc():
with mock.patch.dict(os.environ, {}, clear=True):
client = Gitea(remote_url=':something/somewhere.git')
with requests_mock.Mocker(session=client.session) as m:
m.register_uri('POST', gitea_api_matcher, json={'id': 1}, status_cod... |
class Ex3Job(IndependentJob):
def __init__(self, aggregator, p, data_source, prob_label, rep, job_func, n_locs):
walltime = (60 * 59)
memory = (int(((tr_proportion * sample_size) * 0.01)) + 50)
IndependentJob.__init__(self, aggregator, walltime=walltime, memory=memory)
self.p = p
... |
class _TestClassB(_TestClassA):
def __init__(self, input_shape, arg1, arg2, arg3=3):
assert (input_shape == 'shape')
super().__init__(arg1, arg2, arg3)
def from_config(cls, cfg, input_shape):
args = {'arg1': cfg.ARG1, 'arg2': cfg.ARG2}
args['input_shape'] = input_shape
re... |
class LowRankMixtureCrossNet(torch.nn.Module):
def __init__(self, in_features: int, num_layers: int, num_experts: int=1, low_rank: int=1, activation: Union[(torch.nn.Module, Callable[([torch.Tensor], torch.Tensor)])]=torch.relu) -> None:
super().__init__()
assert (num_experts >= 1), 'num_experts mus... |
class GatesetTomographyFitter():
def __init__(self, result: Result, circuits: List, gateset_basis: Union[(GateSetBasis, str)]='default'):
self.gateset_basis = gateset_basis
if (gateset_basis == 'default'):
self.gateset_basis = default_gateset_basis()
data = TomographyFitter(resul... |
class Visualizer():
def __init__(self, opt, rank=0):
self.rank = rank
self.opt = opt
self.tf_log = (opt.isTrain and opt.tf_log)
self.use_html = (opt.isTrain and (not opt.no_html))
self.win_size = opt.display_winsize
self.name = opt.name
if self.tf_log:
... |
def nearest_next(d1, d2, **kwargs):
suffix = kwargs.get('suffix', '_b')
ties = kwargs.get('ties', None)
(lidx, ridx, dist) = nearest_next_idx(d1, d2, d1.__k__.values, ties)
d1 = d1.reindex(lidx)
d2 = d2.reindex(ridx)
d2 = d2.drop('Chromosome', axis=1)
d1.index = range(len(d1))
d2.index =... |
_torch
class AlignModelTest(ModelTesterMixin, unittest.TestCase):
all_model_classes = ((AlignModel,) if is_torch_available() else ())
fx_compatible = False
test_head_masking = False
test_pruning = False
test_resize_embeddings = False
test_attention_outputs = False
def setUp(self):
se... |
class PendingIOWork():
def __init__(self, ready_for_io: Set[_WritePipeline], io_tasks: Set[asyncio.Task], memory_budget_bytes: int, write_reporter: _WriteReporter) -> None:
self.ready_for_io = ready_for_io
self.io_tasks = io_tasks
self.memory_budget_bytes = memory_budget_bytes
self.w... |
class TagSerializer(serializers.ModelSerializer):
def get_fields(self):
field_mappings = super().get_fields()
fields = self.Meta.model._meta.get_fields()
for field in fields:
if (field.name not in field_mappings):
continue
if isinstance(field, SingleTa... |
def codevxml_read(file, session):
cat = Catalog()
data = cat.load(file)
(cat.type, cat.source, cat.format) = ('material', 'codev', 'codev')
data = et.fromstring(data)
cat.name = data.find('./Name').text
cat.comment = data.find('./ID').text
session.add(cat)
for glass in data.iterfind('./G... |
_exception
def refresh_school_info(homepage_response='', a_task: utils.Atask=object(), sess=object, m_headers={}, m_cookies={}, schl_abbr='', sql_conn=object) -> dict:
user_conf_dict = {}
libid_and_name = {}
sql_param = []
usage_rules_url = a_task.BASE_URL['rules']
html_opentime = utils.get_response... |
class SquadQuestionWithDistractors(SquadQuestion):
def __init__(self, question_id: str, question: List[str], answers: Set[str], paragraph: SquadParagraph, distractors: List[SquadParagraph]):
super().__init__(question_id, question, answers, paragraph)
self.distractors = distractors
def add_distra... |
class SearchTerm():
def __init__(self, termList):
if (type(termList) == type('')):
termList = searchStringToList(termList)
self.type = None
possibleType = termList[0]
if isTypeName(possibleType):
self.type = possibleType[:(- 1)]
termList = termList... |
class GruCellSpec(RnnCellSpec):
def __init__(self, num_units, bais_init=1.0, kernel_initializer='glorot_uniform', recurrent_initializer='orthogonal', candidate_initializer='glorot_uniform', activation='tanh'):
self.num_units = num_units
self.bais_init = bais_init
self.activation = activation... |
class TestLoadCheckpoint(unittest.TestCase):
def setUp(self):
self.cfg_mock = get_mock_cfg(None)
self.patches = {'os.makedirs': MagicMock(), 'os.path.join': MagicMock(), 'os.path.isfile': MagicMock(return_value=True), 'os.path.isabs': MagicMock(return_value=False), 'fairseq.file_io.PathManager.exist... |
def stm32f411_freertos():
ql = Qiling(['../rootfs/mcu/stm32f411/os-demo.hex'], archtype=QL_ARCH.CORTEX_M, ostype=QL_OS.MCU, env=stm32f411, verbose=QL_VERBOSE.DEBUG)
ql.hw.create('usart2').watch()
ql.hw.create('gpioa').watch()
ql.hw.create('rcc')
ql.hw.systick.set_ratio(100)
ql.run(count=200000) |
def test_unittest_skip_issue148(pytester: Pytester) -> None:
testpath = pytester.makepyfile('\n import unittest\n\n ("hello")\n class MyTestCase(unittest.TestCase):\n \n def setUpClass(self):\n xxx\n def test_one(self):\n pass\n ... |
def create_plugins_interaction_executor(selected_plugins: List[str], api_key_info: List[Dict], llm: BaseLanguageModel, llm_name: str) -> AgentExecutor:
memory = ConversationReActBufferMemory(memory_key='chat_history', return_messages=True, style='plugin', max_token_limit=10000)
class RunPlugin():
def __... |
def diou_loss(boxes1: torch.Tensor, boxes2: torch.Tensor, reduction: str='none', eps: float=1e-07) -> torch.Tensor:
(x1, y1, x2, y2) = boxes1.unbind(dim=(- 1))
(x1g, y1g, x2g, y2g) = boxes2.unbind(dim=(- 1))
assert (x2 >= x1).all(), 'bad box: x1 larger than x2'
assert (y2 >= y1).all(), 'bad box: y1 larg... |
class exec_evaluator(object):
def __init__(self, api_model, config):
if (api_model == 'llama'):
self.model = llm.Llama_Forward(config)
elif (api_model == 'flan-t5'):
self.model = llm.Flan_T5(config)
def evaluate(self, prompts, eval_template, eval_data, demos_template, few... |
def collect_files(img_dir, gt_dir, ratio):
assert isinstance(img_dir, str)
assert img_dir
assert isinstance(gt_dir, str)
assert gt_dir
assert isinstance(ratio, float)
assert (ratio < 1.0), 'val_ratio should be a float between 0.0 to 1.0'
(ann_list, imgs_list) = ([], [])
for ann_file in o... |
class Serializer(object):
ANCHOR_TEMPLATE = u'id%03d'
def __init__(self, encoding=None, explicit_start=None, explicit_end=None, version=None, tags=None):
self.use_encoding = encoding
self.use_explicit_start = explicit_start
self.use_explicit_end = explicit_end
self.use_version = ... |
def fetch_info_in_venv(venv_python_path: Path) -> Tuple[(List[str], Dict[(str, str)], str)]:
command_str = textwrap.dedent('\n import json\n import os\n import platform\n import sys\n\n impl_ver = sys.implementation.version\n implementation_version = "{0.major}.{0.minor}.{0... |
def test_read_pvgis_tmy_csv(expected, month_year_expected, inputs_expected, meta_expected, csv_meta):
fn = (DATA_DIR / 'tmy_45.000_8.000_2005_2016.csv')
pvgis_data = read_pvgis_tmy(fn, map_variables=False)
_compare_pvgis_tmy_csv(expected, month_year_expected, inputs_expected, meta_expected, csv_meta, pvgis_... |
class VKontakteOpenAPI(BaseAuth):
name = 'vk-openapi'
ID_KEY = 'id'
def get_user_details(self, response):
nickname = (response.get('nickname') or '')
(fullname, first_name, last_name) = self.get_user_names(first_name=response.get('first_name', [''])[0], last_name=response.get('last_name', ['... |
class CallbackList(object):
def __init__(self, callbacks=None, queue_length=10):
callbacks = (callbacks or [])
self.callbacks = [c for c in callbacks]
self.queue_length = queue_length
def append(self, callback):
self.callbacks.append(callback)
def set_params(self, params):
... |
class DefaultTheme(Theme):
DEFAULTS = dict(show=dict(purviews=True, mechanisms=True, cause_effect_links=True, mechanism_purview_links=True, two_faces=True, three_faces=True), labels=dict(postprocessor=None), layout={**dict(autosize=True, showlegend=True, title='', width=1000, height=800, paper_bgcolor='rgba(0, 0, 0... |
def create_client(url: str) -> Generator[(Client, None, None)]:
url_args = _url_to_dict(url)
scheme = url_args['scheme']
if (client_path := CLIENTS.get(scheme)):
(module_path, class_name) = client_path.rsplit('.', 1)
module = import_module(module_path)
client_class = getattr(module, ... |
class Decoder(nn.Module):
def __init__(self, num_classes):
super().__init__()
self.layer1 = UpsamplerBlock(128, 64)
self.layer2 = non_bottleneck_1d(64, 0, 1)
self.layer3 = non_bottleneck_1d(64, 0, 1)
self.layer4 = UpsamplerBlock(64, 32)
self.layer5 = non_bottleneck_1d... |
def FCN8(img_shape, class_n=None):
input_shape = (None, img_shape[0], img_shape[1], img_shape[2], 1)
input_img = Input(shape=input_shape[1:])
conv1_1 = Conv3D(32, 3, padding='same', activation='relu')(input_img)
conv1_2 = Conv3D(32, 3, padding='same', activation='relu')(conv1_1)
pool1 = MaxPooling3D... |
def configure():
set_hook(package_event('reticulate', 'onLoad'), reticulate_config_hook)
if (package_is_installed('reticulate') and roption('radian.enable_reticulate_prompt', True)):
set_hook(package_event('reticulate', 'onLoad'), reticulate_prompt_hook)
session = get_app().session
kb = ... |
class EMA(object):
def __init__(self, model, decay=0.99, update_interval=1, device=torch.device('cpu')):
self.decay = decay
self.update_iterval = update_interval
self.device = device
self.model = model
with torch.no_grad():
if (hasattr(model, 'get_ema_model') and ... |
class PairwiseLoss(SimilarityLoss):
def __init__(self, distance_metric_name: Distance=Distance.COSINE):
super(PairwiseLoss, self).__init__(distance_metric_name=distance_metric_name)
def forward(self, embeddings: Tensor, pairs: Tensor, labels: Tensor, subgroups: Tensor) -> Tensor:
raise NotImplem... |
class FakeHDF5FileHandler2(FakeHDF5FileHandler):
def get_test_content(self, filename, filename_info, filetype_info):
file_content = {}
attrs = []
if ('SO2NRT' in filename):
k = 'HDFEOS/SWATHS/OMPS Column Amount SO2/Data Fields/ColumnAmountSO2_TRM'
file_content[k] = DE... |
def write_area(area: Area) -> dict:
errors = []
nodes = {}
for node in area.nodes:
if node.is_derived_node:
continue
try:
data = write_node(node)
data['connections'] = {target_node.name: write_requirement(area.connections[node][target_node]) for target_nod... |
class Migration(migrations.Migration):
dependencies = [('jobs', '0004_jobfeed_is_activated')]
operations = [migrations.RemoveField(model_name='jobitem', name='salary_currency'), migrations.RemoveField(model_name='jobitem', name='salary_from'), migrations.RemoveField(model_name='jobitem', name='salary_till'), mi... |
class EncodingTests(unittest.TestCase):
def _forms(self):
file = BytesIO(b'<meta charset="iso-8859-1"><form method="POST"><input name="name" value=""><input name="unicode" value=""></form>')
return parse_file(file, ' backwards_compat=False)
def testFillFormEncoding(self):
forms = self._f... |
class TestPolySegment(EndianTest):
def setUp(self):
self.req_args_0 = {'drawable': , 'gc': , 'segments': [{'x1': (- 5123), 'y1': (- 15198), 'x2': (- 21917), 'y2': (- 1992)}]}
self.req_bin_0 = b'B\x00\x05\x00\x18j\x89\x03\xb3\x84\xc4\x0e\xfd\xeb\xa2\xc4c\xaa8\xf8'
def testPackRequest0(self):
... |
class MaskingFormatter(logging.Formatter):
def __init__(self, fmt: Optional[str]=logging.BASIC_FORMAT, datefmt: Optional[str]=None, style: Literal[('%', '{', '$')]='%', validate: bool=True, masked: Optional[str]=None) -> None:
super().__init__(fmt, datefmt, style, validate)
self.masked = masked
... |
def main(client, config):
(item_df, ss_df) = benchmark(read_tables, config=config, compute_result=config['get_read_time'])
f_ss_df = ss_df.loc[ss_df['ss_store_sk'].isin(q01_ss_store_sk_IN)][['ss_item_sk', 'ss_ticket_number']].reset_index(drop=True)
f_item_df = item_df.loc[item_df['i_category_id'].isin(q01_i... |
class DefaultDataParallelWrapper(DataParallelWrapper):
def __init__(self, bucket_cap_mb: int=25, static_graph: bool=True, find_unused_parameters: bool=False, allreduce_comm_precision: Optional[str]=None) -> None:
self._bucket_cap_mb: int = bucket_cap_mb
self._static_graph: bool = static_graph
... |
def load_model(args):
feature_extractor = FeatureExtractor(num_latent=args.num_latent, hidden_size=args.hidden_size)
factor_encoder = FactorEncoder(num_factors=args.num_factor, num_portfolio=args.num_latent, hidden_size=args.hidden_size)
alpha_layer = AlphaLayer(args.hidden_size)
beta_layer = BetaLayer(... |
def load_conv_layer(full_name, value, feature_extractor, unused_weights):
name = full_name.split('conv_layers.')[(- 1)]
items = name.split('.')
layer_id = int(items[0])
type_id = int(items[1])
weight_type = name.split('.')[(- 1)]
if (type_id == 0):
layer_type = 'conv'
elif (type_id =... |
def check_rasterizer(cfg: dict, rasterizer: Rasterizer, zarr_dataset: ChunkedDataset) -> None:
frames = zarr_dataset.frames[:]
for current_frame in [0, 50, (len(frames) - 1)]:
history_num_frames = cfg['model_params']['history_num_frames']
s = get_history_slice(current_frame, history_num_frames, ... |
class RawFeatureVector(BlueprintCircuit):
def __init__(self, feature_dimension: Optional[int]) -> None:
super().__init__()
self._ordered_parameters = ParameterVector('x')
if (feature_dimension is not None):
self.feature_dimension = feature_dimension
def _build(self):
... |
def do_train(cfg, model, resume=False, use_lsj=False):
model.train()
optimizer = build_optimizer(cfg, model)
scheduler = build_lr_scheduler(cfg, optimizer)
checkpointer = DetectionCheckpointer(model, cfg.OUTPUT_DIR, optimizer=optimizer, scheduler=scheduler)
start_iter = (checkpointer.resume_or_load(... |
def metric(value: float, unit: str='', precision: int=3) -> str:
if (not math.isfinite(value)):
return _format_not_finite(value)
exponent = (int(math.floor(math.log10(abs(value)))) if (value != 0) else 0)
if ((exponent >= 33) or (exponent < (- 30))):
return (scientific(value, (precision - 1)... |
class FakeRandomForest(nn.Module):
def __init__(self, input_dim, output_dim):
super(FakeRandomForest, self).__init__()
self.dense = nn.Sequential(nn.Linear(input_dim, 2000), nn.Dropout(), nn.ReLU(), nn.Linear(2000, 200), nn.Dropout(), nn.ReLU(), nn.Linear(200, output_dim), nn.Sigmoid())
def forw... |
_module()
class EnsembleBaronShared2FCBBoxHead(EnsembleBaronConvFCBBoxHead):
def __init__(self, fc_out_channels: int=1024, *args, **kwargs) -> None:
super().__init__(*args, num_shared_convs=0, num_shared_fcs=2, num_cls_convs=0, num_cls_fcs=0, num_reg_convs=0, num_reg_fcs=0, fc_out_channels=fc_out_channels, ... |
def gray_label_encode(angle_label, angle_range, omega=1.0):
assert (((angle_range / omega) % 1) == 0), 'wrong omega'
angle_label = np.array(angle_label, np.int32)
angle_label = np.divide(angle_label, omega)
angle_range /= omega
angle_range = int(angle_range)
angle_label = np.array((- np.round(an... |
def test_assign_controller_action():
prop = OSC.Properties()
prop.add_property('mything', '2')
prop.add_property('theotherthing', 'true')
cnt = OSC.Controller('mycontroller', prop)
aca = OSC.AssignControllerAction(cnt)
prettyprint(aca.get_element(), None)
prop2 = OSC.Properties()
prop2.a... |
def have_ffmpeg():
try:
from . import ffmpeg_lib
if _debug:
print('FFmpeg available, using to load media files.')
found = False
for (release_versions, build_versions) in ffmpeg_lib.release_versions.items():
if (ffmpeg_lib.compat.versions == build_versions):
... |
class TestGateway():
server = None
server_thread = None
def setup_class(cls):
cls.connection = psycopg2.connect(host='localhost', port='5432', user='postgres', password='password', dbname='testdb')
cursor = cls.connection.cursor()
cursor.execute('CREATE TABLE IF NOT EXISTS test_types... |
def get_transform(opt, params, method=Image.BICUBIC, normalize=True):
transform_list = []
if ('resize' in opt.resize_or_crop):
osize = [opt.loadSize, opt.loadSize]
transform_list.append(transforms.Scale(osize, method))
elif ('scale_width' in opt.resize_or_crop):
transform_list.append... |
def check_categories():
categorized_fixtures = set(((((((dimmer.args[1] + strip.args[1]) + plug.args[1]) + bulb.args[1]) + lightstrip.args[1]) + plug_smart.args[1]) + bulb_smart.args[1]))
diff = (set(SUPPORTED_DEVICES) - set(categorized_fixtures))
if diff:
for (file, protocol) in diff:
p... |
class PFToolbar():
def __init__(self, parent):
self.Parent = parent
self.buttons = []
self.toolbarX = 0
self.toolbarY = 0
self.padding = 2
self.hoverLabel = ''
def SetPosition(self, pos):
(self.toolbarX, self.toolbarY) = pos
def AddButton(self, btnBitm... |
class PusherEnv7DOFExp2(mujoco_env.MujocoEnv, utils.EzPickle):
def __init__(self):
utils.EzPickle.__init__(self)
self.randomize_xml('pr2_arm3d_blockpush_new_2.xml')
mujoco_env.MujocoEnv.__init__(self, 'temp.xml', 5)
def _step(self, a):
vec_1 = (self.get_body_com('object') - self.... |
def test_add_latest_should_not_create_duplicate_keys(project_factory: ProjectFactory, repo: TestRepository, command_tester_factory: CommandTesterFactory) -> None:
pyproject_content = ' [tool.poetry]\n name = "simple-project"\n version = "1.2.3"\n description = "Some description."\n authors = [\n ... |
class FakeConfig():
basetemp: Union[(str, Path)]
def trace(self):
return self
def get(self, key):
return (lambda *k: None)
def getini(self, name):
if (name == 'tmp_path_retention_count'):
return 3
elif (name == 'tmp_path_retention_policy'):
return ... |
class Char(_WordRegex):
def __init__(self, charset, asKeyword=False, excludeChars=None):
super().__init__(charset, exact=1, asKeyword=asKeyword, excludeChars=excludeChars)
self.reString = ('[%s]' % _collapseAndEscapeRegexRangeChars(self.initChars))
if asKeyword:
self.reString = (... |
def generate_context_response_pairs(dialog_file, context_response_file, step, max_seq_len, min_seq_len):
print('start generate context/response pairs...')
with open(dialog_file, encoding='utf-8') as fin, open(context_response_file, 'w', encoding='utf-8') as fout:
for l in tqdm(fin):
tokens =... |
class IdentificationEnv(ProxyEnv, Serializable):
def __init__(self, mdp_cls, mdp_args):
Serializable.quick_init(self, locals())
self.mdp_cls = mdp_cls
self.mdp_args = dict(mdp_args)
self.mdp_args['template_args'] = dict(noise=True)
mdp = self.gen_mdp()
super(Identific... |
class Alphabet(object):
def __init__(self, iterable: Iterable[str], offset: int) -> None:
self.instances: List[str] = list(iterable)
self.instance2index: Dict[(str, int)] = {k: (i + offset) for (i, k) in enumerate(self.instances)}
self.offset: int = offset
def get_index(self, instance: s... |
class BookmarkForm(discord.ui.Modal):
bookmark_title = discord.ui.TextInput(label='Choose a title for your bookmark (optional)', placeholder='Type your bookmark title here', default='Bookmark', max_length=50, min_length=0, required=False)
def __init__(self, message: discord.Message):
super().__init__(ti... |
.skipif(TEST_WITH_OPT_DEPS, reason='Only relevant if the optional dependency is not installed')
class TestNoWebhooks():
async def test_no_webhooks(self, bot):
async with Updater(bot=bot, update_queue=asyncio.Queue()) as updater:
with pytest.raises(RuntimeError, match='python-telegram-bot\\[webho... |
class Beam(object):
def __init__(self, size, pad, bos, eos, n_best=1, cuda=False, global_scorer=None, min_length=0, stepwise_penalty=False, block_ngram_repeat=0, exclusion_tokens=set()):
self.size = size
self.tt = (torch.cuda if cuda else torch)
self.scores = self.tt.FloatTensor(size).zero_(... |
def test_one_input(data):
reader = StreamReader()
reader.feed_data(data)
reader.feed_eof()
parser = Response.parse(reader.read_line, reader.read_exact, reader.read_to_eof)
try:
next(parser)
except StopIteration as exc:
assert isinstance(exc.value, Response)
return
exc... |
class GridBasicsPage(HTML5Page):
def __init__(self, view):
super().__init__(view)
self.body.use_layout(Container())
self.add_four()
self.add_twelve()
def add_four(self):
layout = ColumnLayout(ColumnOptions('first', ResponsiveSize(md=6)), ColumnOptions('second', Responsive... |
def extract_numbers_from_str(s):
numbers = []
for token in s.split():
try:
num = int(token.replace(',', ''))
except:
try:
num = float(token)
except:
num = None
if num:
numbers.append(num)
return numbers |
_guvectorize(['void(uint8[:], uint8[:], boolean[:], uint8[:], uint8[:])'], '(b),(),(),(c)->(c)')
def _format_genotype_bytes(chars: ArrayLike, ploidy: int, phased: bool, _: ArrayLike, out: ArrayLike) -> None:
ploidy = ploidy[0]
sep = (124 if phased[0] else 47)
chars_per_allele = (len(chars) // ploidy)
sl... |
def _open(fn, mode):
compress = fn.endswith(b'.gz')
buffered = (io.BufferedReader if ('r' in mode) else io.BufferedWriter)
if (not compress):
return buffered(open(fn, mode))
if (not _GZIP):
return buffered(gzip.open(fn, mode))
if ('r' in mode):
proc = subprocess.Popen([_bytes... |
(PublisherImpression, PublisherPaidImpression)
class PublisherImpressionAdmin(ImpressionsAdmin):
date_hierarchy = 'date'
readonly_fields = ('date', 'publisher', 'views', 'clicks', 'offers', 'decisions', 'click_to_offer_rate', 'view_to_offer_rate', 'revenue', 'modified', 'created')
list_display = readonly_fi... |
def test_forward_references_in_annotations_are_handled():
class CustomModule(Module):
def provide_x(self) -> 'X':
return X('hello')
def fun(s: 'X') -> 'X':
return s
global X
class X():
def __init__(self, message: str) -> None:
self.message = message
tr... |
def preprocess_request_params(params: Optional[RequestParams], convert_lists: bool=True) -> RequestParams:
if (not params):
return {}
params = normalize_rank_params(params)
params = validate_multiple_choice_params(params)
params = convert_pagination_params(params)
params = convert_bool_param... |
class PKCS12KeyAndCertificates():
def __init__(self, key: (PrivateKeyTypes | None), cert: (PKCS12Certificate | None), additional_certs: list[PKCS12Certificate]):
if ((key is not None) and (not isinstance(key, (rsa.RSAPrivateKey, dsa.DSAPrivateKey, ec.EllipticCurvePrivateKey, ed25519.Ed25519PrivateKey, ed448... |
def get_visible_nodes(graph, agent_id):
state = graph
id2node = {node['id']: node for node in state['nodes']}
rooms_ids = [node['id'] for node in graph['nodes'] if (node['category'] == 'Rooms')]
character = id2node[agent_id]
character_id = character['id']
(inside_of, is_inside, edge_from) = ({},... |
def main():
parser = get_parser()
args = parser.parse_args()
source_path = osp.join(args.source, args.split)
cluster_path = osp.join(args.cluster_dir, (args.split + '.src'))
print(f'data path: {source_path}')
features = np.load((source_path + '.npy'), mmap_mode='r')
sizes = []
offsets = ... |
class Seq2SeqLMOutput(ModelOutput):
loss: Optional[torch.FloatTensor] = None
logits: torch.FloatTensor = None
past_key_values: Optional[Tuple[Tuple[torch.FloatTensor]]] = None
decoder_hidden_states: Optional[Tuple[torch.FloatTensor]] = None
decoder_attentions: Optional[Tuple[torch.FloatTensor]] = No... |
class DataArguments():
img_dir: str = field(default='', metadata={'help': 'Path to the training data.'})
Test_csv_path: str = field(default='./Data/final_train/final_test.csv', metadata={'help': 'Path to the training data.'})
tokenizer_path: str = field(default='./LLAMA/tokenizer', metadata={'help': 'Path t... |
def add_argument(parser, train=True):
parser.add_argument('--name', type=str, default='baseline')
parser.add_argument('--epoch', type=int, default=400)
parser.add_argument('--dst_list', type=str, default='knee_zhao')
parser.add_argument('--split', type=str, default='test')
parser.add_argument('--com... |
class PointwiseConv(nn.Module):
def __init__(self, in_channels, out_channels, stride=1, padding=0, bias=True):
super(PointwiseConv, self).__init__()
self.conv = nn.Conv1d(in_channels=in_channels, out_channels=out_channels, kernel_size=1, stride=stride, padding=padding, bias=bias)
def forward(sel... |
_pipeline_test
_torch
_vision
class VisualQuestionAnsweringPipelineTests(unittest.TestCase):
model_mapping = MODEL_FOR_VISUAL_QUESTION_ANSWERING_MAPPING
def get_test_pipeline(self, model, tokenizer, processor):
vqa_pipeline = pipeline('visual-question-answering', model='hf-internal-testing/tiny-vilt-ran... |
def igpu_read_status(path):
gpu = {}
if os.access((path + '/railgate_enable'), os.R_OK):
with open((path + '/railgate_enable'), 'r') as f:
gpu['railgate'] = (int(f.read()) == 1)
if os.access((path + '/tpc_pg_mask'), os.R_OK):
with open((path + '/tpc_pg_mask'), 'r') as f:
... |
class Choice():
def __init__(self, value, field):
self._value = value
self._value_as_set = None
self.field = field
self.field.bind('value', self)
def get_value(self):
return self._value
def set_value(self, value):
self._value_as_set = value
value = propert... |
class Poll(Object, Update):
def __init__(self, *, client: 'pyrogram.Client'=None, id: str, question: str, options: List['types.PollOption'], total_voter_count: int, is_closed: bool, is_anonymous: bool=None, type: 'enums.PollType'=None, allows_multiple_answers: bool=None, chosen_option_id: Optional[int]=None, correc... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.