code stringlengths 281 23.7M |
|---|
class OptionSeriesVennDataAccessibility(Options):
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js... |
class BasePage(create_base_model(MPTTModel), ContentModelMixin):
active = models.BooleanField(_('active'), default=True)
title = models.CharField(_('title'), max_length=200, help_text=_('This title is also used for navigation menu items.'))
slug = models.SlugField(_('slug'), max_length=150, help_text=_('Thi... |
.parametrize('withdrawal,message', ((Withdrawal((- 1), 0, Address((b'\x00' * 20)), 0), 'cannot be negative'), (Withdrawal(0, (- 1), Address((b'\x00' * 20)), 1), 'cannot be negative'), (Withdrawal(0, 0, Address((b'\x00' * 20)), (- 1)), 'cannot be negative'), (Withdrawal((UINT_64_MAX + 1), UINT_64_MAX, Address((b'\x00' *... |
def test_build_failure(f_build_rpm_case, caplog):
config = f_build_rpm_case
config.ssh.unlink_success = True
worker = config.bw
worker.process()
assert_logs_exist(['Backend process error: No success file => build failure', 'Worker failed build, took ', 'Finished build: id=848963 failed=True '], capl... |
def titleFixup(text):
text = text.title()
text = re.sub('\\b(Mc)([a-z])', (lambda match: (match.group(1) + match.group(2).upper())), text)
text = re.sub('\\b(Mac)([bcdfgjklmnpqrstvwxyz])([a-z]{3,})', (lambda m: ((m.group(1) + m.group(2).upper()) + m.group(3))), text)
text = re.sub('\x08(von|van|de|du|of... |
class TestHandlerWithCircularQueueHandler(unittest.TestCase):
Q_SIZE = 1
def setUp(self):
super(TestHandlerWithCircularQueueHandler, self).setUp()
self._server = mockserver.MockRecvServer('localhost')
self._port = self._server.port
def tearDown(self):
self._server.close()
... |
def sign_secure_boot_v2(args):
SIG_BLOCK_MAX_COUNT = 3
contents = args.datafile.read()
sig_block_num = 0
signature_sector = b''
signature = args.signature
pub_key = args.pub_key
if ((len(contents) % SECTOR_SIZE) != 0):
if args.signature:
raise esptool.FatalError('Secure B... |
def extractIsekaisummonMe(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Railway Hero', 'The Legend of the Railway Hero', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiter... |
class LEDHandler(BPHandler):
def __init__(self, map_dict=None):
self.model = LEDModel
if (map_dict == None):
self.led_map = {}
else:
self.led_map = map_dict
def get_id(self, led_id):
try:
led_id = self.led_map[led_id]
except KeyError:
... |
def extractImperiasplaceWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('the end of an unrequired love', 'the end of an unrequired love', 'translated'), ("cinderell... |
def extractBookboatblogWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('Haroon' in item['tags']):
return buildReleaseMessageWithType(item, 'Haroon', vol, chp, frag... |
class HSLuv(HSLish, Space):
BASE = 'luv'
NAME = 'hsluv'
SERIALIZE = ('--hsluv',)
CHANNELS = (Channel('h', 0.0, 360.0, flags=FLG_ANGLE), Channel('s', 0.0, 100.0, bound=True), Channel('l', 0.0, 100.0, bound=True))
CHANNEL_ALIASES = {'hue': 'h', 'saturation': 's', 'lightness': 'l'}
WHITE = WHITES['... |
def sparse_filterbank_serialize(sparse, name, frequencies=None, n_fft=None, sr=None, fmin=None, fmax=None):
(starts, ends, coeffs) = sparse
assert (len(starts) == len(ends))
n_bands = len(starts)
arrays = [cgen.constant_declare((name + '_bands'), val=n_bands), cgen.array_declare((name + '_starts'), len(... |
def filter_bits(site, bits):
if (site == 'IOB_Y0'):
min_bitidx = 64
max_bitidx = 127
elif (site == 'IOB_Y1'):
min_bitidx = 0
max_bitidx = 63
else:
assert False, site
def inner():
for bit in bits:
bitidx = int(bit.split('_')[1])
if (... |
def stripe_charge_user(user, bill, amount_dollars, reference):
logger.debug(('stripe_charge_user(%s, %s, %d, %s)' % (user, bill, amount_dollars, reference)))
amt_cents = int((amount_dollars * 100))
stripe.api_key = settings.STRIPE_SECRET_KEY
charge = stripe.Charge.create(amount=amt_cents, currency='usd'... |
.flaky(reruns=MAX_FLAKY_RERUNS)
def test_run(password_or_none):
runner = CliRunner()
agent_name = 'myagent'
cwd = os.getcwd()
t = tempfile.mkdtemp()
shutil.copytree(Path(ROOT_DIR, 'packages'), Path(t, 'packages'))
password_options = _get_password_option_args(password_or_none)
os.chdir(t)
... |
class FBHideViewCommand(fb.FBCommand):
def name(self):
return 'hide'
def description(self):
return 'Hide a view or layer.'
def args(self):
return [fb.FBCommandArgument(arg='viewOrLayer', type='UIView/NSView/CALayer *', help='The view/layer to hide.')]
def run(self, args, options)... |
def test_dont_add_data_volume_when_persistance_is_disabled():
config = '\npersistence:\n enabled: false\n'
r = helm_template(config)
assert ('volumeClaimTemplates' not in r['statefulset'][uname]['spec'])
assert ({'name': 'elasticsearch-master', 'mountPath': '/usr/share/elasticsearch/data'} not in r['st... |
class BatchDataTest(Tidy3dBaseModel):
task_paths: Dict[(str, str)] = pd.Field(..., title='Data Paths', description='Mapping of task_name to path to corresponding data for each task in batch.')
task_ids: Dict[(str, str)] = pd.Field(..., title='Task IDs', description='Mapping of task_name to task_id for each task... |
def edit_gamatoto_xp(save_stats: dict[(str, Any)]) -> dict[(str, Any)]:
gamatoto_xp = save_stats['gamatoto_xp']
data = get_level_from_xp(gamatoto_xp['Value'], helper.check_data_is_jp(save_stats))
if (data is None):
return save_stats
level = data['level']
helper.colored_text(f'''Gamatoto xp: ... |
class table__g_l_y_f(DefaultTable.DefaultTable):
dependencies = ['fvar']
padding = 1
def decompile(self, data, ttFont):
self.axisTags = ([axis.axisTag for axis in ttFont['fvar'].axes] if ('fvar' in ttFont) else [])
loca = ttFont['loca']
pos = int(loca[0])
nextPos = 0
... |
def get_default_output_device():
import winreg as wr
read_access = (wr.KEY_READ | wr.KEY_WOW64_64KEY)
audio_path = 'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\MMDevices\\Audio\\Render'
audio_key = wr.OpenKeyEx(wr.HKEY_LOCAL_MACHINE, audio_path, 0, read_access)
num_devices = wr.QueryInfoKey(audio_... |
class Vertex_Root():
def __init__(self, graph, name=None):
assert isinstance(graph, Graph)
assert ((name is None) or isinstance(name, str))
self.graph = graph
self.uid = graph.get_next_uid()
self.out_edges = set()
self.in_edges = set()
self.name = name
... |
def send_order_cancel_email(order):
cancel_msg = ''
if order.cancel_note:
cancel_msg = '<br/>Message from the organizer: {cancel_note}'.format(cancel_note=order.cancel_note)
order_url = (((get_settings()['frontend_url'] + '/orders/') + str(order.identifier)) + '/view/')
event_url = ((get_setting... |
class TestEchoBehaviour(BaseSkillTestCase):
path_to_skill = Path(ROOT_DIR, 'packages', 'fetchai', 'skills', 'echo')
is_agent_to_agent_messages = False
def setup(cls):
super().setup()
cls.echo_behaviour = cast(EchoBehaviour, cls._skill.skill_context.behaviours.echo)
cls.logger = cls._... |
def test_cli_arg_codecs():
given_cmd_args = [sys.executable, '-m', 'fastavro', '--codecs']
default_codecs = ('deflate', 'null')
result_output = subprocess.check_output(given_cmd_args).decode()
result_codecs = [line.strip() for line in result_output.splitlines() if line.strip()]
for codec in default_... |
def test_valid_schema_nullable():
spark_schema = foundry_schema_to_spark_schema({'fieldSchemaList': [{'type': 'STRING', 'name': 'roles', 'nullable': None, 'customMetadata': {}}]})
assert (spark_schema == StructType([StructField('roles', StringType(), True, {})]))
spark_schema = foundry_schema_to_spark_schem... |
def get_meter_desc(dp, waiters, meter_id=None, to_user=True):
flags = {dp.ofproto.OFPMF_KBPS: 'KBPS', dp.ofproto.OFPMF_PKTPS: 'PKTPS', dp.ofproto.OFPMF_BURST: 'BURST', dp.ofproto.OFPMF_STATS: 'STATS'}
if (meter_id is None):
meter_id = dp.ofproto.OFPM_ALL
else:
meter_id = UTIL.ofp_meter_from_... |
def extractAddicttranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name,... |
class SigningKey(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'signing_key': (str,)}
_property
... |
class BlackListTest(unittest.TestCase):
def test_empty_black_list(self):
event_listener = ami.EventListener(black_list=[])
for i in range(samples_to_test):
event_name = sample(letters, event_name_size)
self.assertTrue(event_listener.check_event_name(event_name))
def test_... |
.parametrize('features_in', [['Age', 'Marks'], ['Name', 'dob']])
.parametrize('input_features', [None, variables_str, np.array(variables_str)])
def test_new_feature_names_pipe_with_skl_transformer_and_df(df_vartypes, features_in, input_features):
pipe = Pipeline([('imputer', SimpleImputer(strategy='constant')), ('t... |
class ClosePointInTime(Runner):
async def __call__(self, es, params):
pit_op = mandatory(params, 'with-point-in-time-from', self)
pit_id = CompositeContext.get(pit_op)
request_params = params.get('request-params', {})
body = {'id': pit_id}
(await es.close_point_in_time(body=b... |
class TestGetCleanedAbstractText():
def test_should_return_none_if_passed_in_text_was_none(self):
assert (get_cleaned_abstract_text(None) is None)
def test_should_return_empty_str_if_passed_in_text_was_empty(self):
assert (get_cleaned_abstract_text('') == '')
def test_should_return_abstract_... |
(slots=True, repr=False)
class XTxInput(TxInput):
value: Optional[int] = attr.ib(default=None)
x_pubkeys: List[XPublicKey] = attr.ib(default=attr.Factory(list))
threshold: int = attr.ib(default=0)
signatures: List[bytes] = attr.ib(default=attr.Factory(list))
script_type: ScriptType = attr.ib(default... |
class KorParser(BaseOutputParser):
encoder: Encoder
schema_: Object
validator: Optional[Validator] = None
def _type(self) -> str:
return 'KorEncoder'
def parse(self, text: str) -> Extraction:
try:
data = self.encoder.decode(text)
except ParseError as e:
... |
def main():
parser = argparse.ArgumentParser(description='todo test suite')
parser.add_argument('-a', '--all', action='store_true', help='Run functional test in addition to unit tests')
parser.add_argument('-f', '--func', action='store_true', help='Run only functional test')
parser.add_argument('-v', '-... |
class Accessors(Enums):
def bespoke(self, func_name: str, js_funcs: types.JS_FUNCS_TYPES=None, accessor_params: dict=None, profile: types.PROFILE_TYPE=None):
if (js_funcs is None):
self._set_value(func_name, js_type=True)
else:
if (not isinstance(js_funcs, list)):
... |
def get_lookups_tuple(user, source_model, target_model, order_field, id_field):
queryset = filter_model_queryset_by_user(user, source_model)
ids = queryset.order_by(order_field).values_list(id_field, flat=True).distinct()
return [(id, str(target_model.objects.get(id=id))) for id in ids if (id is not None)] |
def do_program(stdscr: ScreenBase, flags: ScreenFlags, key_bindings: Optional[KeyBindings]=None, curses_api: Optional[CursesApiBase]=None, line_objs: Optional[Dict[(int, LineBase)]]=None) -> None:
if (not key_bindings):
key_bindings = read_key_bindings()
if (not curses_api):
curses_api = CursesA... |
_deserializable
class AzureOpenAILlm(BaseLlm):
def __init__(self, config: Optional[BaseLlmConfig]=None):
super().__init__(config=config)
def get_llm_model_answer(self, prompt):
return AzureOpenAILlm._get_answer(prompt=prompt, config=self.config)
def _get_answer(prompt: str, config: BaseLlmCo... |
class build_ext_options():
def build_options(self):
if hasattr(self.compiler, 'initialize'):
self.compiler.initialize()
self.compiler.platform = sys.platform[:6]
for e in self.extensions:
e.extra_compile_args += COMPILE_OPTIONS.get(self.compiler.compiler_type, COMPILE... |
_meta(equipment.Hakurouken)
class Hakurouken():
def choose_card_text(self, act, cards):
if act.cond(cards):
return (True, '')
else:
return (False, '()')
def effect_string_before(self, act):
return f'{N.char(act.source)}<style=Card.Name></style>,{N.char(act.target)... |
def train(model, train_loader, val_loader, optimizer, init_lr=0.002, checkpoint_dir=None, checkpoint_interval=None, nepochs=None, clip_thresh=1.0):
if use_cuda:
model = model.cuda()
criterion = nn.CrossEntropyLoss()
global global_step, global_epoch
if (hparams.exponential_moving_average is not N... |
def t_OtherLessEqThanN(n):
def t_OtherLessEqThanN(self, src: Character, tl: Sequence[Character]) -> Tuple[(List[Character], bool)]:
tl = [t for t in tl if (not t.dead)]
try:
tl.remove(src)
except ValueError:
pass
return (tl[:n], bool(len(tl)))
t_OtherLessE... |
def clean_parsed_data(d):
if isinstance(d, (dict, collections.OrderedDict)):
d = dict(d)
if (('chapters' in d) and (not isinstance(d['chapters'], list))):
d = clean_parsed_data(d['chapters'])
if isinstance(d, dict):
d = [d]
else:
for key in... |
def read_logs(log_name, root_log_dir, extra_data=None):
log_reader = LogReader()
if isinstance(log_name, str):
config_names = _get_config_names(root_log_dir)
if (config_names.index(log_name) == (- 1)):
return {'status': 'fail', 'msg': 'There is no config named {}.'.format(log_name)}
... |
class Migration(migrations.Migration):
dependencies = [('home', '0004_contentpage')]
operations = [migrations.AddField(model_name='contentpage', name='button_text', field=models.CharField(default='', max_length=50)), migrations.AddField(model_name='contentpage', name='button_url', field=models.CharField(default... |
def test_get_zone_thickness_one_well(testpath):
wlist = [xtgeo.well_from_file((testpath / WFILES1), zonelogname='Zonelog')]
mypoints = Points()
mypoints = xtgeo.points_from_wells(wlist, tops=False, zonelist=[1, 2, 3])
mypoints.zname = 'THICKNESS'
assert (mypoints.dataframe['THICKNESS'][0] == pytest.... |
def main():
parser = ArgumentParser()
parser.add_argument('eggs', nargs='*', help='Eggs to convert')
parser.add_argument('--dest-dir', '-d', default=os.path.curdir, help='Directory to store wheels (default %(default)s)')
parser.add_argument('--verbose', '-v', action='store_true')
args = parser.parse... |
class TestCountryField():
class CountryForm(Form):
country = CountryField()
.parametrize('country', ['XX', 'ZZ'])
def test_invalid(self, country: str):
form = TestCountryField.CountryForm(FormData({'country': country}))
assert (form.validate() is False)
assert (len(form.count... |
_meta(characters.reimu.Tribute)
class Tribute():
name = ''
description = ',,'
def clickable(self):
g = self.game
me = self.me
if self.limit1_skill_used('tribute_tag'):
return False
try:
act = g.action_stack[(- 1)]
except IndexError:
... |
def load_oasis_montaj_grid(fname):
with open(fname, 'rb') as grd_file:
header = _read_header(grd_file.read(512))
_check_ordering(header['ordering'])
_check_sign_flag(header['sign_flag'])
data_type = _get_data_type(header['n_bytes_per_element'], header['sign_flag'])
grid = grd... |
def get_node_type(node):
if (node == BLANK_NODE):
return NODE_TYPE_BLANK
elif (len(node) == 2):
(key, _) = node
nibbles = decode_nibbles(key)
if is_nibbles_terminated(nibbles):
return NODE_TYPE_LEAF
else:
return NODE_TYPE_EXTENSION
elif (len(no... |
class _AwardTypeCodes(_Filter):
underscore_name = 'award_type_codes'
def generate_elasticsearch_query(cls, filter_values: List[str], query_type: _QueryType, **options) -> ES_Q:
award_type_codes_query = []
for filter_value in filter_values:
award_type_codes_query.append(ES_Q('match', ... |
def test_serializer_fields_initialization():
assert hasattr(importable, 'ExampleSerializer')
serializer = importable.ExampleSerializer()
assert ('charfield' in serializer.fields)
assert ('integerfield' in serializer.fields)
assert ('floatfield' in serializer.fields)
assert ('decimalfield' in ser... |
class ViewSetStructureHandler(SetStructureHandler):
def __init__(self, window_layout):
self.window_layout = window_layout
return
def resolve_id(self, id):
window_layout = self.window_layout
window = window_layout.window
view = window.get_view_by_id(id)
if (view is... |
('config_name, overrides, expected', [param('include_nested_group_pkg_header_foo_override_pkg_bar', [], [ResultDefault(config_path='group1/group2/file1', parent='group1/group_item1_pkg_header_foo', package='bar.group2', is_self=False), ResultDefault(config_path='group1/group_item1_pkg_header_foo', parent='include_neste... |
def test_opt_str_validation():
res = telemetry.opt_str_param('')
assert isinstance(res, str)
res = telemetry.opt_str_param('TEST')
assert ('TEST' == res)
res = telemetry.opt_str_param(None)
assert (not res)
with pytest.raises(TypeError) as exc_info:
telemetry.opt_str_param(3)
exc... |
.parametrize('name,hashed', ((None, ('0x' + ('00' * 32))), ('', ('0x' + ('00' * 32))), ('eth', '0x93cdeb708b7545dc668ebd1c33cfd8ed6f04690a0bcc88a93fc4ae'), ('foo.eth', '0xde9b09fd7c5f901e23a3f19fecc54828e9ce86591bd9801b019f84f')))
def test_normal_name_to_hash(name, hashed):
assert (normal_name_to_hash(name).hex() =... |
def test_swag(client, specs_data):
invalid_film = {'_id': '594dba7b2879334e411f3dcc', 'title': 'The Last Airbender', 'director': 'M. Night Shyamalan', 'distributor': 'Paramount Pictures', 'running_time': 103, 'release_date': 'June 30, 2010'}
super_invalid_film = {'title': 'The Last Airbender', 'release_date': 2... |
def get_bounding_box_list_distance(bounding_box_list_1: Sequence[LayoutPageCoordinates], bounding_box_list_2: Sequence[LayoutPageCoordinates]) -> BoundingBoxDistance:
sorted_distances = get_sorted_bounding_box_distances([get_bounding_box_distance(bounding_box_1, bounding_box_2) for bounding_box_1 in bounding_box_li... |
class Extension(extensions.Extension):
def handle_model(self):
cls = self.model
cls.add_to_class('language', models.CharField(_('language'), max_length=10, choices=django_settings.LANGUAGES, default=PRIMARY_LANGUAGE))
cls.add_to_class('translation_of', models.ForeignKey('self', on_delete=mod... |
class TPriv(Protocol):
def encrypt_data(self, localised_key: bytes, engine_id: bytes, engine_boots: int, engine_time: int, data: bytes) -> EncryptionResult:
...
def decrypt_data(self, localised_key: bytes, engine_id: bytes, engine_boots: int, engine_time: int, salt: bytes, data: bytes) -> bytes:
... |
def populate_outlier_dict(connection, session):
global outlier_dict
outliers = connection.execute('select * from {}'.format(TABLES[2]))
requests = session.query(Request).options(joinedload(Request.endpoint)).all()
index = 0
for outlier in outliers:
(req_id, index) = get_request_id(requests, ... |
class SoltrafegoStation(BikeShareStation):
def __init__(self, data):
super(SoltrafegoStation, self).__init__()
self.name = data['name']
self.latitude = float(data['lat'])
self.longitude = float(data['lon'])
self.bikes = int(data['online_docked'])
self.free = int(data[... |
class OptionSeriesSankeySonificationTracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._conf... |
def test_cli_disabling_import_hash_parsing():
runner = CliRunner()
result = runner.invoke(ioc_finder.cli_find_iocs, ['imphash 18ddf28a71089acdbab5038f58044c0a', '--no_import_hashes'])
assert (result.exit_code == 0)
json_results = json.loads(result.output.strip())
assert (json_results['md5s'] == [])
... |
class AsyncPsycopgInstrumentation(AsyncDbApi2Instrumentation):
name = 'psycopg_async'
instrument_list = [('psycopg.connection_async', 'AsyncConnection.connect')]
async def call(self, module, method, wrapped, instance, args, kwargs):
signature = 'psycopg.connect_async'
(host, port) = get_dest... |
def add_items(doc, item_data):
for item in item_data:
invoice_item = {}
for (key, value) in item.items():
invoice_item[key] = value
if (key == 'item_code'):
item_code = frappe.db.get_value('Item', {'zenoti_item_code': item['item_code'], 'item_name': item['item... |
class DbBase(dbb.TransactionMixin, metaclass=abc.ABCMeta):
__metaclass__ = abc.ABCMeta
def loggerPath(self):
return None
def __init__(self):
self.log = logging.getLogger(self.loggerPath)
self.log.info('Base DB Interface Starting!')
def openDB(self):
self.log.info('Opening... |
()
_options(Command.SEND_REPORT)
('--slack-file-name', type=str, default=None, help="The report's file name, this is how it will be sent to slack.")
('--aws-profile-name', type=str, default=None, help='AWS profile name')
('--aws-region-name', type=str, default=None, help='AWS region name')
('--aws-access-key-id', type=... |
class ArgCheckSimple(ArgCheckBase):
def arg_check0(self):
self.calls += 1
def arg_check1(self, new):
self.calls += 1
self.tc.assertEqual(new, self.value)
def arg_check2(self, name, new):
self.calls += 1
self.tc.assertEqual(name, 'value')
self.tc.assertEqual(ne... |
.parametrize('estimator', _estimators)
def test_raises_non_fitted_error_when_error_during_fit(estimator):
(X, y) = test_df(categorical=True)
X.loc[(len(X) - 1)] = nan
transformer = clone(estimator)
with pytest.raises(ValueError):
transformer.fit(X, y)
with pytest.raises(NotFittedError):
... |
def extractMutekitranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name,... |
def test_gamma():
num_trials = 100
num_runs = 100000
lam = 100.0
theta = (1 / 10.0)
mean = (lam * theta)
sigma = np.sqrt(((lam * (theta ** 2)) / num_trials))
lam_realization = large_gamma((lam * np.ones((num_trials, num_runs))), theta, thresh=1000000.0)
assert (((np.std((np.mean(lam_real... |
class SchedulerBase(object):
def __init__(self, studio=None):
self._studio = None
self.studio = studio
def _validate_studio(self, studio_in):
if (studio_in is not None):
from stalker import Studio
if (not isinstance(studio_in, Studio)):
raise TypeE... |
class BarrierEdge(edges.BaseEdge):
char = 'A'
def __call__(self, length, **kw):
self.edge((0.2 * length))
self.corner(90, (self.thickness / 2))
self.corner((- 90), (self.thickness / 2))
self.edge(((0.6 * length) - (2 * self.thickness)))
self.corner((- 90), (self.thickness... |
class GptsMessage():
conv_id: str
sender: str
receiver: str
role: str
content: str
rounds: Optional[int]
current_gogal: str = None
context: Optional[str] = None
review_info: Optional[str] = None
action_report: Optional[str] = None
model_name: Optional[str] = None
created_... |
def _get_profile(shader_type: ShaderType) -> str:
renderer_type = bgfx.getRendererType()
sys_platform = platform.system()
windows_shader_type = {ShaderType.FRAGMENT: 'ps_', ShaderType.VERTEX: 'vs_', ShaderType.COMPUTE: 'cs_'}
if (sys_platform == 'Darwin'):
return 'metal'
elif (sys_platform =... |
def test_need_more_languages_triggers_messagebox(qapp, monkeypatch, menu_btn_without_lang_man):
menu_btn = menu_btn_without_lang_man
exec_args = []
def mocked_exec(cls):
exec_args.append(cls)
monkeypatch.setattr(menu_button.QtWidgets.QMessageBox, 'exec', mocked_exec)
menu_btn.menu().aboutToS... |
class IntegrationAPI(BaseAPI):
_config_data: (ConfigData | None)
def __init__(self, hass: (HomeAssistant | None), async_on_data_changed: (Callable[([], Awaitable[None])] | None)=None, async_on_status_changed: (Callable[([ConnectivityStatus], Awaitable[None])] | None)=None):
super().__init__(hass, async_... |
def extractDomhanmadraruaBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_t... |
def test_pocket_modifiability(pocket):
with pytest.raises(FrozenInstanceError):
pocket.center = np.array([10, 10, 10])
with pytest.raises(FrozenInstanceError):
pocket.contains = set()
with pytest.raises(ValueError, match='assignment destination is read-only'):
pocket.center[0] = 10
... |
def render_examples(render_images=False, out_dir='mayavi/auto'):
if (not os.path.exists(out_dir)):
os.makedirs(out_dir)
example_gallery_file = open(os.path.join(out_dir, 'examples.rst'), 'w')
example_gallery_file.write('\n\n.. _example_gallery:\n\nExample gallery\n\n\n')
example_files = [filenam... |
class IterableAlreadyHandledTest(_TestBase):
def set_site(self):
self.site = IterableSite()
def get_app(self):
return IterableApp(True)
def test_iterable_app_keeps_socket_open_unless_connection_close_sent(self):
self.site.application = self.get_app()
sock = eventlet.connect(s... |
class TestMacsXLSForMacs2010__broad(unittest.TestCase):
def test_load_macs2_xls_file(self):
macsxls = MacsXLS(fp=io.StringIO(MACS2010__broad_data))
self.assertEqual(macsxls.macs_version, '2.0.10.')
self.assertEqual(macsxls.name, 'NW-H3K27ac-chIP_vs_input_E13.5_50bp_bowtie_mm10_BASE_q0.05_bw3... |
def assemble_email_contents(secret_login_code):
email_template = 'signin_code_template.html'
login_link_staging = (' + secret_login_code)
login_link_prod = (' + secret_login_code)
abs_dir = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(abs_dir, email_template)) as fh:
con... |
def _dense_idx_to_jagged_idx(dense_idx: List[int], offsets_list: List[List[int]]) -> int:
assert (len(dense_idx) == (1 + len(offsets_list)))
offset = 0
for (i, (d, offsets)) in enumerate(zip(dense_idx, offsets_list)):
(prev_offset, next_offset) = offsets[(offset + d):((offset + d) + 2)]
grou... |
def verify_state(expected_state: AccountState, state: StateAPI) -> None:
diff = diff_state(expected_state, state)
new_line = '\n'
if diff:
error_messages = []
for (account, field, actual_value, expected_value) in diff:
if (field == 'balance'):
error_messages.appen... |
def pytest_generate_tests(metafunc):
perf_log_shapes = [(4,), (10,), (13,), (4, 4), (7, 7), (10, 10), (4, 4, 4), (5, 5, 7), (7, 7, 7)]
perf_mem_limit = (4 * (2 ** 20))
if ('local_shape_and_axes' in metafunc.fixturenames):
def idgen(val):
(batch, size) = val[0]
return ((str(ba... |
.parametrize('typ,_131,_132', [(str, '131', '132'), (int, 131, 132), (float, 131.0, 132.0)])
def test_aliases_mutiple_none(typ, _131, _132):
aliases_func = normalize('x', type=typ, aliases={'u': _131, 'v': _132})(func_x)
assert (aliases_func('u') == _131)
assert (aliases_func(131) == _131)
assert (alias... |
class BaseSingleSiteMHProposer(BaseProposer):
def __init__(self, target_rv: RVIdentifier):
self.node = target_rv
def propose(self, world: World):
proposal_dist = forward_dist = self.get_proposal_distribution(world)
old_value = world[self.node]
proposed_value = proposal_dist.sampl... |
def reduce_sum(X, lengths, *, threads_per_block=128, num_blocks=128):
_is_float_array(X)
B = len(lengths)
T = X.shape[0]
O = X.shape[1]
_check_lengths(lengths, T)
out = _alloc((B, O), dtype=X.dtype, zeros=True)
if (X.dtype == 'float32'):
reduce_sum_kernel_float((num_blocks,), (thread... |
def test_AHHY_mutate_residues():
mutations = {'A:HIS:2': 'A:HIP:2', 'A:HIS:3': 'A:HIP:3'}
delete_residues = ('A:TYR:4',)
f = open(ahhy_example, 'r')
pdb_string = f.read()
chorizo = LinkedRDKitChorizo(pdb_string, deleted_residues=delete_residues, mutate_res_dict=mutations)
assert (len(chorizo.res... |
def create_default_priority_levels(silent=False):
pl = ['low', 'medium', 'high']
for p in pl:
priority = FlicketPriority.query.filter_by(priority=p).first()
if (not priority):
add_priority = FlicketPriority(priority=p)
db.session.add(add_priority)
if (not sile... |
def test_weekly_working_days_is_calculated_correctly():
wh = WorkingHours()
wh['mon'] = [[1, 2]]
wh['tue'] = [[3, 4]]
wh['wed'] = [[5, 6]]
wh['thu'] = [[7, 8]]
wh['fri'] = [[9, 10]]
wh['sat'] = []
wh['sun'] = []
assert (wh.weekly_working_days == 5)
wh = WorkingHours()
wh['mon... |
class ViTLoraAdapter(fl.Chain, Adapter[ViT]):
def __init__(self, target: ViT, rank: int=16) -> None:
self.rank = rank
with self.setup_adapter(target):
super().__init__(target)
self.sub_adapters: list[tuple[(LoraAdapter, fl.Chain)]] = []
for (linear, parent) in self.target... |
class Migration(migrations.Migration):
dependencies = [('accounts', '0001_initial')]
operations = [migrations.AlterField(model_name='user', name='first_name', field=models.CharField(blank=True, max_length=150, verbose_name='first name')), migrations.AlterField(model_name='user', name='id', field=models.BigAutoF... |
def generate_aggregated_capacity_config_list(capacity_config: list[Any], parent_zone: ZoneKey) -> (list[dict[(str, Any)]] | None):
flat_capacity_config = [item for sublist in capacity_config for item in sublist]
datetime_values = set([item['datetime'] for item in flat_capacity_config])
updated_aggregated_ca... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.