code stringlengths 281 23.7M |
|---|
def clear_memory():
global memory
memory = [{'role': 'system', 'content': CODE_SYSTEM_CALIBRATION_MESSAGE}, {'role': 'user', 'content': CODE_USER_CALIBRATION_MESSAGE}, {'role': 'assistant', 'content': CODE_ASSISTANT_CALIBRATION_MESSAGE}, {'role': 'system', 'content': CONSOLE_OUTPUT_CALIBRATION_MESSAGE}] |
def extractRarissimaAvisBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Water Recovery System', 'Water Recovery System', 'translated'), ('PRC', 'PRC', 'translated')... |
class LazyBackend():
def __init__(self, backend: 'Union[BaseECCBackend, Type[BaseECCBackend], str, None]'=None) -> None:
from eth_keys.backends.base import BaseECCBackend
if (backend is None):
pass
elif isinstance(backend, BaseECCBackend):
pass
elif (isinstanc... |
class Traceback(object):
def __init__(self, exc_type, exc_value, tb):
self.exc_type = exc_type
self.exc_value = exc_value
if (not isinstance(exc_type, str)):
exception_type = exc_type.__name__
if (exc_type.__module__ not in ('__builtin__', 'exceptions')):
... |
def create_group(node: (str | nx.DiGraph), properties: dict, node_graph: NodeGraph) -> TaskGroup:
kind = properties['kind']
if isinstance(node, nx.DiGraph):
model_ids = sorted(list(node), key=(lambda node: (node == properties['exit_node'])))
else:
model_ids = [node]
bound_model_name = mo... |
def policy_reader(filename, path, no_check_name=False):
policy = Policy()
if (not filename.endswith('.xml')):
raise FirewallError(errors.INVALID_NAME, ("'%s' is missing .xml suffix" % filename))
policy.name = filename[:(- 4)]
if (not no_check_name):
policy.check_name(policy.name)
pol... |
class flow_delete_strict(flow_mod):
version = 2
type = 14
_command = 4
def __init__(self, xid=None, cookie=None, cookie_mask=None, table_id=None, idle_timeout=None, hard_timeout=None, priority=None, buffer_id=None, out_port=None, out_group=None, flags=None, match=None, instructions=None):
if (xi... |
class Offset(Node):
total = 0
known = 0
unknown = 0
inf = 0
giv = 0
tp_1p = 0
fp_1p = 0
tn_1p = 0
fn_1p = 0
correct = 0
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __repr__(self):
return 'Offset'
def __str__(self):
... |
def argparseSections(s):
result = []
s = re.split('\\s|:', s)
try:
for part in s:
m = re.match('^(\\d+(\\.\\d+)?)/(\\d+)$', part)
if m:
n = int(m.group(3))
result.extend(([(float(m.group(1)) / n)] * n))
continue
m = ... |
def predict(message, history):
history_langchain_format = []
for (human, ai) in history:
history_langchain_format.append(HumanMessage(content=human))
history_langchain_format.append(AIMessage(content=ai))
history_langchain_format.append(HumanMessage(content=message))
gpt_response = llm(m... |
def delete_resource_type(test_config: FidesConfig, resource_type: str) -> None:
url = test_config.cli.server_url
fides_keys = [resource['fides_key'] for resource in _api.ls(url, resource_type, headers=test_config.user.auth_header).json()]
for fides_key in fides_keys:
_api.delete(url, resource_type, ... |
.parametrize('types,hex_data,expected', ((['string'], 'e6fe67206e', ('no padding needed',)), (['bytes'], '', (b'\x01',)), (['uint256', 'uint256', 'address', 'bytes'], 'f28c10d7d9b40cb5c7ad3cb6506c65da01f2fac2e667dcb9e66e9cc904853d955acef822db058eb8505911ed77f175b99e1531c1a63a169ac75a2daaefa51def28c10d7d9b40c7bc2c873190... |
.parametrize('target_case, format_mode, expected', [('test', False, 'test'), (None, False, 'default_smoother_update'), (None, True, 'default_%d')])
def test_target_case_name(target_case, expected, format_mode, poly_case):
args = Namespace(random_seed=None, current_case='default', target_case=target_case)
assert... |
def test_slave_message_reaction_footer(slave):
assert (not SlaveMessageProcessor.build_reactions_footer({}))
reactions = {ReactionName('__reaction_a__'): [slave.chat_with_alias, slave.chat_without_alias], ReactionName('__reaction_b__'): [slave.chat_with_alias], ReactionName('__reaction_c__'): []}
footer = S... |
('knowledge')
('--address', type=str, default=API_ADDRESS, required=False, show_default=True, help='Address of the Api server(If not set, try to read from environment variable: API_ADDRESS).')
def knowledge_cli_group(address: str):
global API_ADDRESS
if (address == _DEFAULT_API_ADDRESS):
address = os.ge... |
def compare(items_a: Any, items_b: Any) -> bool:
if ((type_no_int(items_a) != type_no_int(items_b)) and (items_a is not None) and (items_b is not None)):
return False
if isinstance(items_a, dict):
return compare_dicts(items_a, items_b)
elif isinstance(items_a, list):
return compare_l... |
class OptionPlotoptionsOrganizationAccessibility(Options):
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def descriptionFormat(self):
return self._config_get(None)
def descriptionFormat(self, text: str):
... |
class Rescaler(nn.Module, abc.ABC):
def forward(self, x: torch.Tensor) -> torch.Tensor:
return self.rescale(x)
def reset(self) -> None:
pass
def update(self, x: torch.Tensor) -> None:
pass
def rescale(self, x: torch.Tensor) -> torch.Tensor:
def recover(self, x: torch.Tensor) ... |
def run_gff_decoration(mode, gff_ID_field, is_prodigal, is_blastx, gff_genepred_file, gff_genepred_fasta, gff_outfile, predictor, searcher_name, annotated_hits):
annot_generator = None
if (mode == DECORATE_GFF_NONE):
annot_generator = annotated_hits
elif (mode == DECORATE_GFF_GENEPRED):
if i... |
class TestConfig(unittest.TestCase):
def test_load_configs(self):
for location in ['detectron2', 'detectron2go']:
root_dir = os.path.abspath(reroute_config_path(f'{location}://.'))
files = glob.glob(os.path.join(root_dir, '**/*.yaml'), recursive=True)
files = [f for f in ... |
def test_set_get_del_providers():
p13 = providers.Provider()
container_1 = Container()
container_2 = Container()
container_1.p13 = p13
container_2.p13 = p13
assert (Container.providers == dict(p11=Container.p11, p12=Container.p12))
assert Container.cls_providers, dict(p11=Container.p11, p12=... |
class TestDetail(BaseTestCase):
def test_detail(self):
create_instance(quantity=3)
pk = Example.objects.all()[0].pk
view = DetailView.as_view(model=Example)
response = self.get(view, pk=pk)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.template... |
class TestPrivacyExperience():
def test_get_experiences_by_region(self, db):
(queried_overlay_exp, queried_pc_exp) = PrivacyExperience.get_overlay_and_privacy_center_experience_by_region(db, PrivacyNoticeRegion.us_tx)
assert (queried_overlay_exp is None)
assert (queried_pc_exp is None)
... |
class TestValidateMethod():
def test_non_field_error_validate_method(self):
class ExampleSerializer(serializers.Serializer):
char = serializers.CharField()
integer = serializers.IntegerField()
def validate(self, attrs):
raise serializers.ValidationError('N... |
def get_compiled_contract(contract_path: pathlib.Path, contract_name: str) -> Dict[(str, str)]:
compiled_path = (derive_compile_path(contract_path) / 'combined.json')
with open(compiled_path) as file:
data = json.load(file)
return data['contracts'][f'{contract_path}:{contract_name}'] |
class Refspec(object):
def __init__(self, owner, ptr):
self._owner = owner
self._refspec = ptr
def src(self):
return ffi.string(C.git_refspec_src(self._refspec)).decode()
def dst(self):
return ffi.string(C.git_refspec_dst(self._refspec)).decode()
def force(self):
... |
class FilterGroup(object):
def __init__(self, label):
self.label = label
self.filters = []
def append(self, filter):
self.filters.append(filter)
def non_lazy(self):
filters = []
for item in self.filters:
copy = dict(item)
copy['operation'] = as... |
def _build_goods_datamodel(good_ids: List[str], is_supply: bool) -> DataModel:
good_quantities_attributes = [Attribute(good_id, int, True, 'A good on offer.') for good_id in good_ids]
ledger_id_attribute = Attribute('ledger_id', str, True, 'The ledger for transacting.')
currency_attribute = Attribute('curre... |
_renderer(wrap_type=ColumnQuantileMetric)
class ColumnQuantileMetricRenderer(MetricRenderer):
def _get_counters(metric_result: ColumnQuantileMetricResult) -> BaseWidgetInfo:
counters = [CounterData.float(label='Quantile', value=metric_result.quantile, precision=3), CounterData.float(label='Quantile value (c... |
class EventHandlerWrapper():
__slots__ = ['event', 'f']
def __init__(self, event: str, f: EventHandler):
self.event = event
self.f = f
async def __call__(self, handler: Handler, scope: Scope, receive: Receive, send: Send, event: Event) -> Tuple[(Optional[EventHandler], None)]:
task =... |
class MultipleHeaderFieldL2(base_tests.SimpleDataPlane):
def runTest(self):
logging.info('Running Multiple Header Field L2 test')
of_ports = config['port_map'].keys()
of_ports.sort()
self.assertTrue((len(of_ports) > 1), 'Not enough ports for test')
delete_all_flows(self.contr... |
.usefixtures('use_tmpdir')
def test_that_giving_non_float_values_give_config_validation_error():
test_config_file_name = 'test.ert'
test_config_contents = dedent('\n NUM_REALIZATIONS 1\n ENKF_ALPHA hello\n ')
with open(test_config_file_name, 'w', encoding='utf-8') as fh:
fh.wr... |
def python3_message_handler(addr, data):
global monitor, patch, prefix, output_scale, output_offset
monitor.debug(('addr = %s, data = %s' % (addr, data)))
key = (prefix + addr.replace('/', '.'))
val = EEGsynth.rescale(data, slope=output_scale, offset=output_offset)
patch.setvalue(key, val)
monit... |
def get_format(loc: str) -> Optional[BaseFormat]:
loc = Gio.File.new_for_uri(loc).get_path()
if (not loc):
return None
ext = os.path.splitext(loc)[1]
ext = ext[1:]
ext = ext.lower()
try:
formatclass = formats[ext]
except KeyError:
return None
if (formatclass is No... |
class OriginInspectorHistoricalMetaFilters(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'region': (... |
def test_qstrings():
assert_parse('hello [world]', ['hello ', TemplateVariable('world', index=0)])
assert_parse('hello [[world]]', ['hello [[world]]'])
assert_parse('hello [[[world]]]', ['hello [[', TemplateVariable('world', index=0), ']]'])
assert_parse('hello [[[[world]]]]', ['hello [[[[world]]]]'])
... |
class Test2(event.Component):
other = event.ComponentProp(None, settable=True)
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.data = []
('other.data')
def track_data(self, *events):
for ev in events:
if (ev.mutation == 'set'):
self.data[... |
def extractSodachipsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type)... |
def reconstruct_adapter(flags: Namespace, config: RuntimeConfig, manifest: Manifest, macro_manifest: MacroManifest) -> BaseAdapter:
from dbt.flags import set_flags
from dbt.tracking import do_not_track
do_not_track()
set_flags(flags)
factory.load_plugin(config.credentials.type)
factory.load_plug... |
class S3Downloader(object):
def __init__(self):
pass
def download_from_s3(self, bucket_url, file_name, destination):
s3_url = ((bucket_url + '/') + file_name)
response = requests.get(s3_url)
response.raise_for_status()
with open(destination, 'wb') as f:
f.writ... |
class OptionPlotoptionsBoxplotZones(Options):
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
... |
def qidianSmartFeedFetch(params, rid, joburl, netloc, job_aggregator_instance):
print('qidianSmartFeedFetch', params, rid, joburl, netloc)
sess = db.get_db_session(flask_sess_if_possible=False)
have = sess.query(db.QidianFeedPostMeta).order_by(desc(db.QidianFeedPostMeta.id)).limit(500).all()
meta_dict =... |
def test_use_map_contains():
(instruction_list, use_map) = define_use_map()
assert (Variable('u', Integer.int32_t()) in use_map)
assert (Variable('v', Integer.int32_t(), 3) in use_map)
assert (Variable('v', Integer.int32_t(), 1) in use_map)
assert (not (Variable('v', Integer.int32_t()) in use_map)) |
def buildopts_from_args(args, progress_callback=None):
buildopts = {'timeout': args.timeout, 'chroots': args.chroots, 'background': args.background, 'progress_callback': progress_callback}
if (args.enable_net is not None):
buildopts['enable_net'] = ON_OFF_MAP[args.enable_net]
for opt in ['exclude_ch... |
_metaclass(abc.ABCMeta)
class ServerTransport(Transport):
def __init__(self, service_name, metrics=noop_metrics, instance_index=1):
super(ServerTransport, self).__init__(service_name, metrics)
self.instance_index = instance_index
def receive_request_message(self):
def send_response_message(s... |
def filter_bounds(things, key, *point_bounds):
def default_getter(thing):
if isinstance(thing, BikeShareStation):
return (thing.latitude, thing.longitude)
return (thing[0], thing[1])
key = (key or default_getter)
bounds = []
for pb in point_bounds:
if (isinstance(pb, ... |
class ResponseException(Exception):
def __init__(self, message, status=500, error_class=None, error_type=None, **kwargs):
super(ResponseException, self).__init__(message)
self.status = status
self.errorType = error_type
self.extraInfo = {}
for key in kwargs:
self.... |
def get_xy_value_lists(self: GridProperty, grid: Optional[(Grid | GridProperty)]=None, mask: Optional[bool]=None) -> XYValueLists:
if (grid is None):
raise RuntimeError('Missing grid object')
if (not isinstance(grid, xtgeo.grid3d.Grid)):
raise RuntimeError('The input grid is not a XTGeo Grid ins... |
_all_methods(bind_proxy)
class BuildProxy(BaseProxy):
def get(self, build_id):
endpoint = '/build/{0}'.format(build_id)
response = self.request.send(endpoint=endpoint)
return munchify(response)
def get_source_chroot(self, build_id):
endpoint = '/build/source-chroot/{0}'.format(bu... |
def _parse_schema_with_repo(schema, repo, named_schemas, write_hint, injected_schemas):
try:
schema_copy = deepcopy(named_schemas)
return parse_schema(schema, named_schemas=named_schemas, _write_hint=write_hint)
except UnknownType as error:
missing_subject = error.name
try:
... |
.use_numba
.parametrize('weights', [None, np.ones((8, 8))])
def test_gb_eqs_small_data(coordinates_small, data_small, weights):
eqs = EquivalentSourcesGB(depth=1000.0, damping=None, window_size=1000.0, random_state=42)
eqs.fit(coordinates_small, data_small, weights=weights)
npt.assert_allclose(data_small, e... |
class TableAlterType(BaseAlterType):
CHANGE_ROW_FORMAT = 'change_row_format'
CHANGE_TABLE_KEY_BLOCK_SIZE = 'change_table_key_block_size'
CHANGE_TABLE_CHARSET = 'change_table_charset'
CHANGE_TABLE_COLLATE = 'change_table_collate'
CHANGE_TABLE_COMMENT = 'change_table_comment'
CHANGE_ENGINE = 'chan... |
class RandomizeEnvTransform():
def __init__(self, transform, randomize_environments=False):
self.apply = transform
self.randomize_environments = randomize_environments
def __call__(self, x: torch.Tensor, N: Optional[int]=None):
x = ToTensorIfNot()(x)
single_img = (x.ndim == 3)
... |
class ImproveCode(_BaseCodeTool):
def run(self, code: str, suggestions: List[str]):
func = 'def generate_improved_code(suggestions: str, code: str) -> str:'
desc = 'Improves the provided code based on the suggestions provided, making no other changes.'
return ai_function(func, desc, [code, s... |
class LoggingMixin(InterceptedStreamsMixin):
def __init__(self, *args, **kwargs):
stdout_logfile_path = kwargs.pop('stdout_logfile_path', construct_logger_file_path('geth', 'stdout'))
stderr_logfile_path = kwargs.pop('stderr_logfile_path', construct_logger_file_path('geth', 'stderr'))
super(... |
class RegressionQualityMetricResults(MetricResult):
class Config():
dict_exclude_fields = {'hist_for_plot', 'vals_for_plots', 'me_hist_for_plot'}
pd_exclude_fields = {'hist_for_plot', 'vals_for_plots', 'me_hist_for_plot'}
columns: DatasetColumns
current: MoreRegressionMetrics
reference: ... |
def download_zip(path=settings.FREEZE_ZIP_PATH, name=settings.FREEZE_ZIP_NAME):
response = StreamingHttpResponse(FileWrapper(open(path), 8192), content_type='application/zip')
response['Content-Length'] = os.path.getsize(path)
response['Content-Disposition'] = f'attachment; filename={name}'
return respo... |
def validate_model(model, val_loader):
print('Validating the model')
model.eval()
y_true = []
y_pred = []
fnames = []
running_loss = 0.0
criterion = nn.CrossEntropyLoss()
with torch.no_grad():
for (step, (mfcc, mfcc_lengths, mel, mol, lid, fname)) in enumerate(val_loader):
... |
(name=IS_ROUTE_REFLECTOR_CLIENT)
def validate_is_route_reflector_client(is_route_reflector_client):
if (not isinstance(is_route_reflector_client, bool)):
raise ConfigValueError(desc=('Invalid is_route_reflector_client(%s)' % is_route_reflector_client))
return is_route_reflector_client |
def start_and_end_dates_from_fyq(fiscal_year, fiscal_quarter):
if (fiscal_quarter == 1):
start_date = datetime.date((fiscal_year - 1), 10, 1)
end_date = datetime.date((fiscal_year - 1), 12, 31)
elif (fiscal_quarter == 2):
start_date = datetime.date(fiscal_year, 1, 1)
end_date = d... |
(name='vrf.update')
def update_vrf(**kwargs):
route_dist = kwargs.get(ROUTE_DISTINGUISHER)
vrf_id = kwargs.get(ConfWithId.ID)
vrf_rf = kwargs.get(VRF_RF)
vrf_conf = CORE_MANAGER.vrfs_conf.get_vrf_conf(route_dist, vrf_rf, vrf_id=vrf_id)
if (not vrf_conf):
create_vrf(**kwargs)
else:
... |
def extract_manifest_url(result):
if (('manifest_url' in result) and get_adaptive_type_from_url(result['manifest_url'])):
return result['manifest_url']
if ('requested_formats' not in result):
return None
for entry in result['requested_formats']:
if (('manifest_url' in entry) and ('vc... |
class table_features_stats_reply(stats_reply):
version = 5
type = 19
stats_type = 12
def __init__(self, xid=None, flags=None, entries=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
e... |
class Profiles():
def load_predefined_profiles():
profiles = glob.glob('/etc/opensnitchd/system-fw.d/profiles/*.profile')
p = []
for pr_path in profiles:
with open(pr_path) as f:
p.append({os.path.basename(pr_path): json.load(f)})
return p |
class Stories(AbstractObject):
def __init__(self, api=None):
super(Stories, self).__init__()
self._isStories = True
self._api = api
class Field(AbstractObject.Field):
creation_time = 'creation_time'
media_id = 'media_id'
media_type = 'media_type'
post_id =... |
class HexEdge(SuperEnum):
__keys__ = ['id', 'title', 'short', 'arrow']
east = (1, 'East', 'E', '')
north_east = (2, 'North East', 'NE', '')
north_west = (3, 'North West', 'NW', '')
west = (4, 'West', 'W', '')
south_west = (5, 'South West', 'SW', '')
south_east = (6, 'South East', 'SE', '') |
class OptionPlotoptionsPolygonTooltipDatetimelabelformats(Options):
def day(self):
return self._config_get('%A, %e %b %Y')
def day(self, text: str):
self._config(text, js_type=False)
def hour(self):
return self._config_get('%A, %e %b, %H:%M')
def hour(self, text: str):
se... |
(suppress_health_check=[HealthCheck.function_scoped_fixture])
.parametrize('fformat', ['grdecl', 'bgrdecl'])
(grid_properties())
def test_read_write_roundtrip(tmp_path, fformat, grid_property):
filename = (tmp_path / f'gridprop.{fformat}')
grid_property.to_file(filename, fformat=fformat, name=grid_property.name... |
def test_not_force_defaults_text_explicit_empty_value():
html = '<input type="text" name="text-1" class="my_text" value="i like this text" />'
expected_html = '<input type="text" name="text-1" class="my_text" value="" />'
rendered_html = htmlfill.render(html, defaults={'text-1': ''}, force_defaults=False)
... |
class TestPyTorchEstimator(unittest.TestCase):
def setUp(self) -> None:
add_dl_on_flink_jar()
self.env = StreamExecutionEnvironment.get_execution_environment()
self.env.add_jars('file://{}'.format(find_jar_path()))
self.t_env = StreamTableEnvironment.create(self.env)
self.sta... |
def run_server(application: WSGIApplication, *, threads: int=4, host: Optional[str]=None, port: int=8080, unix_socket: Optional[str]=None, unix_socket_perms: int=384, backlog: int=1024, static: Iterable[Tuple[(str, str)]]=(), static_cors: Optional[str]=None, script_name: str='', shutdown_timeout: float=60.0, **kwargs: ... |
('/query', methods=['POST'])
def query():
data = request.get_json()
question = data.get('question')
if question:
try:
response = App().query(question)
return (jsonify({'data': response}), 200)
except Exception:
logging.exception(f'Failed to query question=... |
class gemm_rcr_bias_softmax(gemm_rcr_softmax):
def __init__(self):
super().__init__()
self._attrs['op'] = 'gemm_rcr_bias_softmax'
def _infer_shapes(self, a: Tensor, b: Tensor, bias: Tensor):
bias_shape = bias._attrs['shape']
if (len(bias_shape) != 1):
raise RuntimeErr... |
class OptionSeriesSolidgaugeSonificationDefaultspeechoptionsMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
... |
def setMayaTweaks():
base_palette = QtGui.QApplication.palette()
LIGHT_COLOR = QtGui.QColor(100, 100, 100)
MID_COLOR = QtGui.QColor(68, 68, 68)
tab_palette = QtGui.QPalette(base_palette)
tab_palette.setBrush(QtGui.QPalette.Window, QtGui.QBrush(LIGHT_COLOR))
tab_palette.setBrush(QtGui.QPalette.Bu... |
_eh
class AyaRoundfanHandler(THBEventHandler):
interested = ['action_after']
execute_after = ['DyingHandler']
card_usage = 'drop'
def handle(self, evt_type, act):
if ((evt_type == 'action_after') and isinstance(act, Damage)):
if (not act.succeeded):
return act
... |
def test_read_byte(la: LogicAnalyzer, slave: I2CSlave):
la.capture(2, block=False)
slave.read_byte(REGISTER_ADDRESS)
la.stop()
(scl, sda) = la.fetch_data()
assert (len(scl) == (((((SCL_START + (SCL_WRITE * 2)) + SCL_RESTART) + SCL_WRITE) + SCL_READ) + SCL_STOP))
assert (len(sda) == (((((SDA_STAR... |
class DcAdmin(admin.ModelAdmin):
form = DcAdminForm
list_display = ('name', 'alias', 'owner', 'access')
list_filter = ('access',)
def save_model(self, request, obj, form, change):
obj.json = form.cleaned_data['json']
return super(DcAdmin, self).save_model(request, obj, form, change) |
class io_dict_base64_test_case(io_dict_test_case):
def test_from_base64_with_valid_data(self):
j = 'eyJhIjogMSwgImIiOiAyLCAiYyI6IDN9'
d = IODict.from_base64(j)
self.assertTrue(isinstance(d, dict))
self.assertEqual(d, {'a': 1, 'b': 2, 'c': 3})
d = IODict(j, format='base64')
... |
.WebInterfaceUnitTestConfig(database_mock_class=DbMock)
class TestAppBinarySearch():
def test_app_binary_search_get(self, test_client):
response = test_client.get('/database/binary_search').data.decode()
assert ('<h3 class="mb-3">Binary Pattern Search</h3>' in response)
def test_app_binary_searc... |
class TestCreateRegionMask():
def test_shape(self):
_test_eq(region_mask(1, 5, 7, 10).shape, (1, 10))
_test_eq(region_mask(2, 3, 7, 12).shape, (2, 12))
_test_eq(region_mask(4, 0, 3, 3).shape, (4, 3))
def test_max(self):
with patch('torch.rand', side_effect=[torch.Tensor([[[[1.0]]... |
def extractSparrowtranslations(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, ... |
class GsuiteMemberIterator(ResourceIterator):
def iter(self):
gsuite = self.client
try:
for (data, _) in gsuite.iter_gsuite_group_members(self.resource['id']):
if (data['type'] == 'USER'):
(yield FACTORIES['gsuite_user_member'].create_new(data))
... |
class ChosenInlineResult(JsonDeserializable):
def de_json(cls, json_string):
if (json_string is None):
return None
obj = cls.check_json(json_string)
obj['from_user'] = User.de_json(obj.pop('from'))
if ('location' in obj):
obj['location'] = Location.de_json(obj... |
class LatencyTests(unittest.TestCase):
def test_latency_default_zero(self):
l = Latency()
self.assertEqual((l._sys.numerator, l._sys.denominator), (0, 1))
def test_latency_sys_init(self):
for n in [0, 1, 2, 4, 9]:
l = Latency(sys=n)
self.assertEqual((l._sys.numera... |
def extractInterimmtlBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Man Man Qing Luo', 'Man Man Qing Luo', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous... |
def gen_good_prec1():
def good_prec1(n: size, m: size, x: f32[(n, m)], y: f32[(n, m)], res: f64[(n, m)]):
for i in seq(0, n):
rloc: f64[m]
xloc: f32[m]
yloc: f32[m]
for j in seq(0, m):
xloc[j] = x[(i, j)]
for j in seq(0, m):
... |
def display_search_results(user_query_text, google_findings, links):
if (len(user_query_text) > 0):
st.markdown('---')
st.markdown(f'# {user_query_text}')
for (i, finding) in enumerate(google_findings):
st.markdown((markdown_litteral(finding) + f' [Source]({links[i]})')) |
def checkForEmptyStationList(category, focusPlace, stationList, jumps):
if stationList:
return
if jumps:
raise NoDataError('Local database has no price data for any stations within {} jumps of {} ({})'.format(jumps, focusPlace.name(), category))
if isinstance(focusPlace, System):
rai... |
.parametrize('opt_cls, opt_kwargs_, neb_kwargs_, ref_cycle, between', [(SteepestDescent, {}, {}, 30, 5), (SteepestDescent, {}, {}, 32, 10), (ConjugateGradient, {}, {}, 44, 5), (QuickMin, {'dt': 0.1}, {}, 27, 5), (FIRE, {'dt_max': 0.2}, {}, 42, 5), (LBFGS, {'gamma_mult': True}, {}, 12, 5)])
def test_anapot_neb(opt_cls, ... |
def __add_arrows_prefix(df: pd.DataFrame, column: str):
def get_arrow(v):
if (v == 0):
return ''
arrow_down = ''
arrow_up = ''
arrow = (arrow_down if (v > 0) else arrow_up)
return f'{arrow} {abs(v)}'
if df.empty:
return df
ndf = df.copy()
ndf[c... |
def init(**context):
options = {}
if context['options']:
options.update(dict([split(x, ':') for x in split(context['options'], ',')]))
if ('dbtype' not in options):
raise error.SnmpsimError('database type not specified')
db = __import__(options['dbtype'], globals(), locals(), options['db... |
class TestReset():
def test_reset_password_status_codes(self, openapi_dict):
route = openapi_dict['paths']['/reset-password']['post']
assert (list(route['responses'].keys()) == ['200', '400', '422'])
def test_forgot_password_status_codes(self, openapi_dict):
route = openapi_dict['paths']... |
def convert_dataset(dataset_from_flink: tf.data.TFRecordDataset, input_col_names: List[str], input_types: List[str], feature_cols: List[str], label_col: str, batch_size: int) -> tf.data.Dataset:
default_tensor_map = {'STRING': tf.constant([''], dtype=tf.string), 'INT_32': tf.constant([0], dtype=tf.int32), 'INT_64':... |
def get_mwfn_exc_str(energies, Xa, Ya=None, Xb=None, Yb=None, thresh=0.001):
spin_a = ('A' if (Xb is not None) else '')
assert (len(energies) == (len(Xa) + 1)), 'Found too few energies. Is the GS energy missing?'
exc_energies = ((energies[1:] - energies[0]) * AU2EV)
(nstates, occ_mos, _) = Xa.shape
... |
def check_database(tdb, name, x, y, z):
cur = tdb.query('\n SELECT name, pos_x, pos_y, pos_z\n FROM System\n WHERE pos_x BETWEEN ? and ?\n AND pos_y BETWEEN ? and ?\n AND pos_z BETWEEN ? and ?\n ', [(x - 0.5), (x + 0.5), (y - 0.5), (y + 0.5), (z - 0.5), (z + 0.5)])
... |
def example(page):
gender = ft.RadioGroup(content=ft.Row([ft.Radio(value='female', label='Female'), ft.Radio(value='male', label='Male'), ft.Radio(value='not_specified', label='Not specified')]))
choice_of_instrument = ft.Dropdown(label='Choice of instrument', options=[ft.dropdown.Option('Piano'), ft.dropdown.O... |
class MercuryChassis():
def __init__(self):
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._sock.connect(('192.168.123.167', 9000))
self.recv = threading.Thread(target=self.check_move_end, daemon=True)
self.recv.start()
self.move_end = False
def check... |
.parametrize('elasticapm_client', [{'transaction_max_spans': 5}], indirect=True)
def test_structlog_processor_span(elasticapm_client):
transaction = elasticapm_client.begin_transaction('test')
with capture_span('test') as span:
event_dict = {}
new_dict = structlog_processor(None, None, event_dic... |
def gen_width_warning_str(freq_res, bwl):
output = '\n'.join(['', ('WARNING: Lower-bound peak width limit is < or ~= the frequency resolution: ' + '{:1.2f} <= {:1.2f}'.format(bwl, freq_res)), ('\tLower bounds below frequency-resolution have no effect ' + '(effective lower bound is the frequency resolution).'), '\tT... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.