code stringlengths 281 23.7M |
|---|
def hide_password(config: dict) -> Dict:
new_config = copy.deepcopy(config)
try:
for (k, v) in new_config.items():
if (v['type'] == 'password'):
v['value'] = '****'
except TypeError:
if (new_config['type'] == 'password'):
new_config['value'] = '****'
return new_config |
.parametrize('currentlayout_manager', [lazy_fixture('temp_icons')], indirect=True)
def test_currentlayouticon_bad_icon(currentlayout_manager, logger):
recs = logger.get_records('setup')
assert recs
assert (recs[0].levelname == 'WARNING')
assert recs[0].msg.startswith('Failed to load icon') |
class OptionPlotoptionsSolidgaugeSonificationDefaultspeechoptionsMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Scanner():
def get_modules_file(self, path):
files = glob.glob(os.path.join(path, '*.py'))
return [os.path.basename(m).split('.')[0] for m in files if (not m.endswith('__.py'))]
def __init__(self, argv, db_file=None):
self.scanners = self.get_modules_file(os.path.join(getrealdir(__file__), 'scanners'))
num_threads = None
request_types = None
display_progress = True
modules_path = None
proxy = None
cookies = None
user_agent = None
extra_headers = None
try:
(opts, args) = getopt.getopt(argv, 'hn:r:vm:p:U:c:E:')
except getopt.GetoptError as err:
print(str(err))
sys.exit(1)
for (o, v) in opts:
if (o == '-h'):
self.usage()
sys.exit(0)
elif (o == '-m'):
modules_path = v
self.scanners.extend(self.get_modules_file(modules_path))
sys.path.append(modules_path)
if (len(args) < 2):
if ((not db_file) or (len(args) == 0)):
self.usage()
sys.exit(1)
args.append(db_file)
self.scanner = args[0]
self.db_file = (args[1] if (not db_file) else db_file)
db = Database(self.db_file)
crawl_info = db.get_crawl_info()
try:
proxy = json.loads(crawl_info['proxy'])
cookies = json.loads(crawl_info['cookies'])
extra_headers = json.loads(crawl_info['extra_headers'])
if (not extra_headers):
extra_headers = {}
user_agent = crawl_info['user_agent']
except KeyError:
print('Unable to read proxy, cookies and user_agent from db.. maybe db created vith an old version . . .')
pass
for (o, v) in opts:
if (o == '-n'):
num_threads = int(v)
elif (o == '-v'):
display_progress = False
elif (o == '-r'):
request_types = v
elif (o == '-p'):
if (v == '0'):
proxy = None
else:
try:
proxy = parse_proxy_string(v)
except Exception as e:
print(e)
sys.exit(1)
elif (o == '-c'):
try:
cookies = parse_cookie_string(v)
except:
print('Unable to decode cookies')
sys.exit(1)
elif (o == '-U'):
user_agent = v
elif (o == '-E'):
if (not extra_headers):
extra_headers = {}
(hn, hv) = v.split('=', 1)
extra_headers[hn] = hv
scanner_argv = args[2:]
if (not (self.scanner in self.scanners)):
print(('Available scanners are:\n %s' % '\n '.join(sorted(self.scanners))))
sys.exit(1)
if (not os.path.exists(self.db_file)):
print(('No such file %s' % self.db_file))
sys.exit(1)
try:
mod = importlib.import_module(('core.scan.scanners.%s' % self.scanner))
except Exception as e:
if modules_path:
try:
mod = importlib.import_module(self.scanner)
except Exception as e1:
raise e1
else:
raise e
try:
run = getattr(mod, self.scanner.title())
run(self.db_file, num_threads, request_types, display_progress, scanner_argv, proxy, cookies, user_agent, extra_headers)
except Exception as e:
print(('Error : %s' % e))
return
print('Scan finished')
def usage(self):
print((('\nUsage: scan [options] <scanner> <db_file> [scanner_options]\nOptions: \n -h this help\n -n THREADS number of parallel threads\n -r REQUEST_TYPES comma separated list of request types to pass to the scanner\n -m PATH path to custom modules dir\n -v verbose mode\n -p PROXY | 0 proxy, set to 0 to disable default (default: crawler)\n -U USERAGENT user agent (default: crawler)\n -c STRING | PATH cookies (default: request)\n -E HEADER extra http headers (default: crawler)\n\nScanner Options: \n those are scanner-specific options (if available), you should try -h ..\n\nAvailable scanners are:\n - ' + '\n - '.join(sorted(self.scanners))) + '\n\n')) |
_ns.route('/get-build-task/<task_id>/')
_ns.route('/get-build-task/<task_id>')
def get_build_task(task_id):
try:
task = BuildsLogic.get_build_task(task_id)
build_record = get_build_record(task)
return flask.jsonify(build_record)
except CoprHttpException as ex:
jsonout = flask.jsonify({'msg': str(ex)})
jsonout.status_code = ex.code
return jsonout |
def test_get_latest_removed_firmware(comparison_db, backend_db, admin_db):
(fw_one, fw_two, compare_dict, _) = _create_comparison()
backend_db.insert_multiple_objects(fw_one, fw_two)
comparison_db.add_comparison_result(compare_dict)
result = comparison_db.page_comparison_results(limit=10)
assert (result != []), 'A compare result should be available'
admin_db.delete_firmware(fw_two.uid)
result = comparison_db.page_comparison_results(limit=10)
assert (result == []), 'No compare result should be available' |
class MainLibTests(TestCase):
('memory_analyzer.memory_analyzer.os.geteuid')
def test_is_root_passes(self, mock_geteuid):
mock_geteuid.return_value = 0
self.assertTrue(memory_analyzer.is_root())
('memory_analyzer.memory_analyzer.os.geteuid')
def test_is_root_fails(self, mock_geteuid):
mock_geteuid.return_value = 42
self.assertFalse(memory_analyzer.is_root())
('memory_analyzer.memory_analyzer.os.kill')
def test_validate_pids_with_valid_pids(self, mock_kill):
ctx = param = mock.MagicMock()
mock_kill.side_effect = fake_kill
self.assertEqual([42], memory_analyzer.validate_pids(ctx, param, [42]))
('memory_analyzer.memory_analyzer.os.geteuid')
('memory_analyzer.memory_analyzer.os.kill')
def test_validate_pids_kills_with_signal_value_zero(self, mock_kill, mock_geteuid):
mock_geteuid.return_value = 42
ctx = param = mock.MagicMock()
memory_analyzer.validate_pids(ctx, param, [42, 314])
mock_kill.assert_has_calls([mock.call(42, 0), mock.call(314, 0)])
('memory_analyzer.memory_analyzer.os.geteuid')
('memory_analyzer.memory_analyzer.os.kill')
def test_validate_pids_with_an_invalid_pid_and_no_root(self, mock_kill, mock_geteuid):
mock_geteuid.return_value = 42
ctx = param = mock.MagicMock()
mock_kill.side_effect = partial(fake_kill, errno=errno.EPERM)
with self.assertRaises(click.UsageError):
memory_analyzer.validate_pids(ctx, param, [42, 314])
('memory_analyzer.memory_analyzer.os.geteuid')
('memory_analyzer.memory_analyzer.os.kill')
def test_validate_pids_with_an_invalid_pid_and_error_is_not_permission_related(self, mock_kill, mock_geteuid):
mock_geteuid.return_value = 42
ctx = param = mock.MagicMock()
mock_kill.side_effect = partial(fake_kill, errno=errno.ESRCH)
with self.assertRaises(click.BadParameter):
memory_analyzer.validate_pids(ctx, param, [314])
def test_check_positive_int_valid(self):
ctx = param = mock.MagicMock()
self.assertEqual(0, memory_analyzer.check_positive_int(ctx, param, 0))
self.assertEqual(42, memory_analyzer.check_positive_int(ctx, param, 42))
def test_check_positive_int_invalid(self):
ctx = param = mock.MagicMock()
with self.assertRaises(click.BadParameter):
memory_analyzer.check_positive_int(ctx, param, (- 1)) |
class TestRateLimiter():
def test_filter_new_record(self):
record = test_log.makeRecord('test_name', logging.INFO, '/my/file.py', 3, 'beep boop', tuple(), None)
rate_filter = bodhi_logging.RateLimiter()
assert rate_filter.filter(record)
def test_filter_false(self):
record = test_log.makeRecord('test_name', logging.INFO, '/my/file.py', 3, 'beep boop', tuple(), None)
rate_filter = bodhi_logging.RateLimiter(rate=2)
rate_filter._sent['/my/file.py:3'] = (record.created - 1)
assert (not rate_filter.filter(record))
def test_rate_is_used(self):
record = test_log.makeRecord('test_name', logging.INFO, '/my/file.py', 3, 'beep boop', tuple(), None)
rate_filter = bodhi_logging.RateLimiter(rate=2)
rate_filter._sent['/my/file.py:3'] = (record.created - 2)
assert rate_filter.filter(record)
def test_rate_limited(self):
record = test_log.makeRecord('test_name', logging.INFO, '/my/file.py', 3, 'beep boop', tuple(), None)
rate_filter = bodhi_logging.RateLimiter(rate=60)
assert rate_filter.filter(record)
assert (not rate_filter.filter(record))
def test_different_lines(self):
record1 = test_log.makeRecord('test_name', logging.INFO, '/my/file.py', 3, 'beep boop', tuple(), None)
record2 = test_log.makeRecord('test_name', logging.INFO, '/my/file.py', 4, 'beep boop', tuple(), None)
rate_filter = bodhi_logging.RateLimiter()
assert rate_filter.filter(record1)
assert rate_filter.filter(record2) |
class AccountSerializer(TypeclassSerializerMixin, serializers.ModelSerializer):
attributes = serializers.SerializerMethodField()
nicks = serializers.SerializerMethodField()
db_key = serializers.CharField(required=False)
session_ids = serializers.SerializerMethodField()
tags = serializers.SerializerMethodField()
aliases = serializers.SerializerMethodField()
permissions = serializers.SerializerMethodField()
def get_session_ids(obj):
return [sess.sessid for sess in obj.sessions.all() if hasattr(sess, 'sessid')]
class Meta():
model = DefaultAccount
fields = (['username', 'session_ids', 'nicks'] + TypeclassSerializerMixin.shared_fields)
read_only_fields = ['id'] |
class GLU(Activation):
def __init__(self, activation: Activation) -> None:
super().__init__()
self.activation = activation
def __repr__(self):
return f'{self.__class__.__name__}(activation={self.activation})'
def forward(self, x: Tensor) -> Tensor:
assert ((x.shape[(- 1)] % 2) == 0), 'Non-batch input dimension must be divisible by 2'
(output, gate) = x.chunk(2, dim=(- 1))
return (output * self.activation(gate)) |
def main(args=None):
args = parse_arguments().parse_args(args)
with open(args.outFileName, 'w') as file:
header = "# Threshold file of HiCExplorer's hicCreateThresholdFile version "
header += str(__version__)
header += '\n'
header += '# Standard threshold {}\n'.format(args.thresholdValue)
file.write(header)
if (args.range[0] > 0):
args.range[0] = (- args.range[0])
for i in range(args.range[0], (args.range[1] + args.resolution), args.resolution):
file.write('{}\t{}\n'.format(i, args.thresholdValue)) |
class AgentConfig(ABC, BaseModel):
tools: List[Tool] = []
instructions: str = ''
description: str = ''
system_template: str = ''
model: str = 'gpt-4'
stream: bool = False
verbose: bool = False
max_iterations: int = 50
abs_max_tokens: int = 8192
max_token_percentage: float = 0.9
max_tokens = int((0.9 * 8192))
temperature: float = 0.7
class Config():
arbitrary_types_allowed = True
provider = LLMProvider.OPENAI |
class BaseInstance(BaseClass):
adapt_default = 'no'
def __init__(self, klass=None, factory=None, args=None, kw=None, allow_none=True, adapt=None, module=None, **metadata):
if (klass is None):
raise TraitError(('A %s trait must have a class specified.' % self.__class__.__name__))
metadata.setdefault('copy', 'deep')
metadata.setdefault('instance_handler', '_instance_changed_handler')
adapt = (adapt or self.adapt_default)
if (adapt not in AdaptMap):
raise TraitError("'adapt' must be 'yes', 'no' or 'default'.")
if isinstance(factory, tuple):
if (args is None):
(args, factory) = (factory, klass)
elif isinstance(args, dict):
(factory, args, kw) = (klass, factory, args)
elif ((kw is None) and isinstance(factory, dict)):
(kw, factory) = (factory, klass)
elif (((args is not None) or (kw is not None)) and (factory is None)):
factory = klass
self._allow_none = allow_none
self.adapt = AdaptMap[adapt]
self.module = (module or get_module_name())
if isinstance(klass, str):
self.klass = klass
else:
if (not isinstance(klass, type)):
klass = klass.__class__
self.klass = klass
self.init_fast_validate()
value = factory
if (factory is not None):
if (args is None):
args = ()
if (kw is None):
if isinstance(args, dict):
kw = args
args = ()
else:
kw = {}
elif (not isinstance(kw, dict)):
raise TraitError("The 'kw' argument must be a dictionary.")
if ((not callable(factory)) and (not isinstance(factory, str))):
if ((len(args) > 0) or (len(kw) > 0)):
raise TraitError("'factory' must be callable")
else:
self.default_value_type = DefaultValue.callable_and_args
value = (self.create_default_value, (factory, *args), kw)
self.default_value = value
super().__init__(value, **metadata)
def validate(self, object, name, value):
from traits.adaptation.api import adapt
if (value is None):
if self._allow_none:
return value
self.validate_failed(object, name, value)
if isinstance(self.klass, str):
self.resolve_class(object, name, value)
if (self.adapt == 0):
if isinstance(value, self.klass):
return value
else:
self.validate_failed(object, name, value)
result = adapt(value, self.klass, None)
if (result is not None):
return result
if isinstance(value, self.klass):
return value
if (self.adapt == 1):
self.validate_failed(object, name, value)
else:
result = self.default_value
if (self.default_value_type == DefaultValue.callable_and_args):
return result[0](*result[1], **result[2])
else:
return result
def info(self):
klass = self.klass
if (not isinstance(klass, str)):
klass = klass.__name__
if (self.adapt == 0):
result = class_of(klass)
else:
result = ('an implementor of, or can be adapted to implement, %s' % klass)
if self._allow_none:
return (result + ' or None')
return result
def clone(self, default_value=NoDefaultSpecified, **metadata):
allow_none = metadata.pop('allow_none', None)
clone_of_self = super().clone(default_value=default_value, **metadata)
if (allow_none is not None):
clone_of_self._allow_none = allow_none
return clone_of_self
def create_editor(self):
from traitsui.api import InstanceEditor
return InstanceEditor(label=(self.label or ''), view=(self.view or ''), kind=(self.kind or 'live'))
def create_default_value(self, *args, **kw):
klass = args[0]
if isinstance(klass, str):
klass = self.validate_class(self.find_class(klass))
if (klass is None):
raise TraitError(('Unable to locate class: ' + args[0]))
return klass(*args[1:], **kw)
def allow_none(self):
self._allow_none = True
self.init_fast_validate()
def init_fast_validate(self):
pass
def resolve_class(self, object, name, value):
super().resolve_class(object, name, value)
self.init_fast_validate()
trait = object.base_trait(name)
handler = trait.handler
if (handler is not self):
set_validate = getattr(handler, 'set_validate', None)
if (set_validate is not None):
set_validate()
else:
item_trait = getattr(handler, 'item_trait', None)
if ((item_trait is not None) and (item_trait.handler is self)):
trait = item_trait
handler = self
else:
return
if (handler.fast_validate is not None):
trait.set_validate(handler.fast_validate) |
class TestSfxUnpacker(TestUnpackerBase):
def test_unpacker_selection_generic(self):
for mime in ['application/x-executable', 'application/x-dosexec']:
self.check_unpacker_selection(mime, 'SFX')
def test_normal_elf_is_skipped(self):
(files, meta_data) = self.unpacker.extract_files_from_file(os.path.join(TEST_DATA_DIR, 'test_elf_normal'), self.tmp_dir.name)
assert (not files), 'no file should be extracted'
assert ('will not be extracted' in meta_data['output'])
def test_normal_pe_with_rsrc_directory(self):
(files, meta_data) = self.unpacker.extract_files_from_file(os.path.join(TEST_DATA_DIR, 'test_rsrc'), self.tmp_dir.name)
assert (not files), 'no file should be extracted'
assert ('will not be extracted' in meta_data['output'])
def test_with_self_extracting_archives(self):
self.check_unpacking_of_standard_unpack_set(os.path.join(TEST_DATA_DIR, 'test_elf_sfx'), additional_prefix_folder='get_files_test', output=True)
self.check_unpacking_of_standard_unpack_set(os.path.join(TEST_DATA_DIR, 'test_pe_sfx'), additional_prefix_folder='get_files_test', output=True) |
class DataLoaderVisWrapper():
def __init__(self, cfg, tbx_writer, data_loader, visualizer: Optional[Type[VisualizerWrapper]]=None):
self.tbx_writer = tbx_writer
self.data_loader = data_loader
self._visualizer = (visualizer(cfg) if visualizer else VisualizerWrapper(cfg))
self.log_frequency = cfg.TENSORBOARD.TRAIN_LOADER_VIS_WRITE_PERIOD
self.log_limit = cfg.TENSORBOARD.TRAIN_LOADER_VIS_MAX_IMAGES
self.batch_log_limit = cfg.TENSORBOARD.TRAIN_LOADER_VIS_MAX_BATCH_IMAGES
assert (self.log_frequency >= 0)
assert (self.log_limit >= 0)
assert (self.batch_log_limit >= 0)
self._remaining = self.log_limit
def __iter__(self):
for data in self.data_loader:
self._maybe_write_vis(data)
(yield data)
def _maybe_write_vis(self, data):
try:
storage = get_event_storage()
except AssertionError:
return
if ((self.log_frequency == 0) or (not ((storage.iter % self.log_frequency) == 0)) or (self._remaining <= 0)):
return
length = min(len(data), min(self.batch_log_limit, self._remaining))
data = data[:length]
self._remaining -= length
for (i, per_image) in enumerate(data):
vis_image = self._visualizer.visualize_train_input(per_image)
tag = [f'train_loader_batch_{storage.iter}']
if ('dataset_name' in per_image):
tag += [per_image['dataset_name']]
if ('file_name' in per_image):
tag += [f'img_{i}', per_image['file_name']]
if isinstance(vis_image, dict):
for k in vis_image:
self.tbx_writer._writer.add_image(tag='/'.join((tag + [k])), img_tensor=vis_image[k], global_step=storage.iter, dataformats='HWC')
else:
self.tbx_writer._writer.add_image(tag='/'.join(tag), img_tensor=vis_image, global_step=storage.iter, dataformats='HWC') |
.skipif((WatchFilesReload is None), reason='watchfiles not available')
def test_should_watch_one_dir_cwd(mocker, reload_directory_structure):
mock_watch = mocker.patch('uvicorn.supervisors.watchfilesreload.watch')
app_dir = (reload_directory_structure / 'app')
app_first_dir = (reload_directory_structure / 'app_first')
with as_cwd(reload_directory_structure):
config = Config(app='tests.test_config:asgi_app', reload=True, reload_dirs=[str(app_dir), str(app_first_dir)])
WatchFilesReload(config, target=run, sockets=[])
mock_watch.assert_called_once()
assert (mock_watch.call_args[0] == (Path.cwd(),)) |
class Outport2(base_tests.SimpleDataPlane):
def runTest(self):
logging.info('Running Outport2 test ')
of_ports = config['port_map'].keys()
of_ports.sort()
self.assertTrue((len(of_ports) > 1), 'Not enough ports for test')
delete_all_flows(self.controller)
logging.info('Adding and modifying flow with out_port fields set')
logging.info('Expecting switch to ignore out_port')
(pkt, match) = wildcard_all_except_ingress(self, of_ports)
verify_tablestats(self, expect_active=1)
send_packet(self, pkt, of_ports[0], of_ports[1])
modify_flow_action(self, of_ports, match)
verify_tablestats(self, expect_active=1)
send_packet(self, pkt, of_ports[0], of_ports[2]) |
class OptionPlotoptionsFunnelOnpoint(Options):
def connectorOptions(self) -> 'OptionPlotoptionsFunnelOnpointConnectoroptions':
return self._config_sub_data('connectorOptions', OptionPlotoptionsFunnelOnpointConnectoroptions)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def position(self) -> 'OptionPlotoptionsFunnelOnpointPosition':
return self._config_sub_data('position', OptionPlotoptionsFunnelOnpointPosition) |
class OptionSeriesTreegraphSonificationDefaultspeechoptions(Options):
def activeWhen(self) -> 'OptionSeriesTreegraphSonificationDefaultspeechoptionsActivewhen':
return self._config_sub_data('activeWhen', OptionSeriesTreegraphSonificationDefaultspeechoptionsActivewhen)
def language(self):
return self._config_get('en-US')
def language(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionSeriesTreegraphSonificationDefaultspeechoptionsMapping':
return self._config_sub_data('mapping', OptionSeriesTreegraphSonificationDefaultspeechoptionsMapping)
def pointGrouping(self) -> 'OptionSeriesTreegraphSonificationDefaultspeechoptionsPointgrouping':
return self._config_sub_data('pointGrouping', OptionSeriesTreegraphSonificationDefaultspeechoptionsPointgrouping)
def preferredVoice(self):
return self._config_get(None)
def preferredVoice(self, text: str):
self._config(text, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def type(self):
return self._config_get('speech')
def type(self, text: str):
self._config(text, js_type=False) |
def create_transaction(transaction, block, transaction_index, is_pending, overrides=None):
filled_txn = _fill_transaction(transaction, block, transaction_index, is_pending, overrides)
if ('hash' in filled_txn):
return filled_txn
else:
return assoc(filled_txn, 'hash', fake_rlp_hash(filled_txn)) |
def extractErokitsune707TumblrCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def main():
parser = argparse.ArgumentParser(description='Flatpak NPM generator')
parser.add_argument('lockfile', type=str)
parser.add_argument('-o', type=str, dest='sourcesOutFile', default='generated-sources.json')
parser.add_argument('--production', action='store_true', default=False)
parser.add_argument('--recursive', action='store_true', default=False)
parser.add_argument('--npm3', action='store_true', default=False)
args = parser.parse_args()
include_devel = (not args.production)
npm3 = args.npm3
sourcesOutFile = args.sourcesOutFile
if args.recursive:
import glob
lockfiles = glob.iglob(('**/%s' % args.lockfile), recursive=True)
else:
lockfiles = [args.lockfile]
sources = []
patches = []
seen = {}
sources += [{'type': 'shell', 'commands': ['ln -fs `pwd` /var/tmp/build-dir']}]
for lockfile in lockfiles:
print(('Scanning "%s" ' % lockfile), file=sys.stderr)
with open(lockfile, 'r') as f:
root = json.loads(f.read())
s = getModuleSources(root, None, seen, include_devel=include_devel, npm3=npm3)
sources += s['sources']
patches += s['patches']
print((' ... %d new sources' % len(s['sources'])), file=sys.stderr)
print(('%d total sources' % len(sources)), file=sys.stderr)
if (len(patches) > 0):
sources += [{'type': 'shell', 'commands': patches}]
print(('Writing to "%s"' % sourcesOutFile))
with open(sourcesOutFile, 'w') as f:
f.write(json.dumps(sources, indent=4, sort_keys=True)) |
class StatusLayer(AbstractOverlay):
filename = Str()
document = Instance(SVGDocument)
doc_width = 48.0
doc_height = 48.0
type = Enum('overlay', 'underlay')
align = Enum('c', 'ur', 'ul', 'll', 'lr')
scale_factor = Float(0.5)
alpha = Float(1.0)
fade_out_time = Float(50)
fade_out_steps = Int(10)
def __init__(self, component, *args, **kw):
super().__init__(component, *args, **kw)
if (self.document is None):
if (self.filename == ''):
self.filename = os.path.join(os.path.dirname(__file__), 'data', 'Dialog-error.svg')
tree = etree.parse(self.filename)
root = tree.getroot()
self.document = SVGDocument(root, renderer=KivaRenderer)
if hasattr(self.document, 'getSize'):
self.doc_width = self.document.getSize()[0]
self.doc_height = self.document.getSize()[1]
def overlay(self, other_component, gc, view_bounds=None, mode='normal'):
with gc:
gc.set_alpha(self.alpha)
plot_width = self.component.width
plot_height = self.component.height
origin_x = self.component.padding_left
origin_y = self.component.padding_top
if (gc.height() < gc.width()):
scale = ((plot_height / self.doc_height) * self.scale_factor)
else:
scale = ((plot_width / self.doc_width) * self.scale_factor)
scale_width = (scale * self.doc_width)
scale_height = (scale * self.doc_height)
if (self.align == 'ur'):
gc.translate_ctm((origin_x + (plot_width - scale_width)), (origin_y + plot_height))
elif (self.align == 'lr'):
gc.translate_ctm((origin_x + (plot_width - scale_width)), (origin_y + scale_height))
elif (self.align == 'ul'):
gc.translate_ctm(origin_x, (origin_y + plot_height))
elif (self.align == 'll'):
gc.translate_ctm(origin_x, (origin_y + scale_height))
else:
gc.translate_ctm((origin_x + ((plot_width - scale_width) / 2)), (origin_y + ((plot_height + scale_height) / 2)))
gc.scale_ctm(scale, (- scale))
self.document.render(gc)
def fade_out(self):
interval = (self.fade_out_time / self.fade_out_steps)
self.timer = Timer(interval, self._fade_out_step)
def _fade_out_step(self):
if (self.alpha <= 0):
if (self.type == 'overlay'):
self.component.overlays.remove(self)
else:
self.component.underlays.remove(self)
self.alpha = 1.0
raise StopIteration
else:
self.alpha -= 0.1
self.component.request_redraw() |
def _get_kwargs(file: str, *, client: Client) -> Dict[(str, Any)]:
url = '{}/files/file/{file}'.format(client.base_url, file=file)
headers: Dict[(str, str)] = client.get_headers()
cookies: Dict[(str, Any)] = client.get_cookies()
return {'method': 'get', 'url': url, 'headers': headers, 'cookies': cookies, 'timeout': client.get_timeout(), 'follow_redirects': client.follow_redirects} |
.unit
def test_get_evaluation_policies_with_no_key(test_config: FidesConfig) -> None:
server_policy_1 = Policy(fides_key='fides_key_1', rules=[])
server_policy_2 = Policy(fides_key='fides_key_2', rules=[])
local_policy_1 = Policy(fides_key='fides_key_3', rules=[])
local_policy_2 = Policy(fides_key='fides_key_4', rules=[])
get_all_server_policies_mock = MagicMock(return_value=[server_policy_1, server_policy_2])
with patch('fides.core.evaluate.get_all_server_policies', get_all_server_policies_mock):
policies = evaluate.get_evaluation_policies(local_policies=[local_policy_1, local_policy_2], evaluate_fides_key='', url='url', headers={})
assert (len(policies) == 4)
get_all_server_policies_mock.assert_called_with(url='url', headers={}, exclude=['fides_key_3', 'fides_key_4']) |
def add_self_loops(pruned_ngrams):
maxorder = len(pruned_ngrams)
all_grams = set([gram for grams in pruned_ngrams for gram in grams])
for o in range(1, maxorder):
for gram in pruned_ngrams[(o - 1)]:
for pos in range(len(gram)):
if ((gram[pos] == START_IDX) or (gram[pos] == END_IDX)):
continue
new_gram = ((gram[:pos] + (gram[pos],)) + gram[pos:])
if (new_gram not in all_grams):
pruned_ngrams[o].append(new_gram)
all_grams.add(new_gram)
return pruned_ngrams |
_deserializable
class SitemapChunker(BaseChunker):
def __init__(self, config: Optional[ChunkerConfig]=None):
if (config is None):
config = ChunkerConfig(chunk_size=500, chunk_overlap=0, length_function=len)
text_splitter = RecursiveCharacterTextSplitter(chunk_size=config.chunk_size, chunk_overlap=config.chunk_overlap, length_function=config.length_function)
super().__init__(text_splitter) |
('is_partial,expected_params', [(True, Parameters([1, 2, 3])), (False, partial(Parameters))])
def test_instantiate_adam_conf(instantiate_func: Any, is_partial: bool, expected_params: Any) -> None:
with raises(InstantiationException, match="Error in call to target 'tests\\.instantiate\\.Adam':\\nTypeError\\(.*\\)"):
instantiate_func(AdamConf())
adam_params = expected_params
res = instantiate_func(AdamConf(lr=0.123), params=adam_params)
expected = Adam(lr=0.123, params=adam_params)
if is_partial:
partial_equal(res.params, expected.params)
else:
assert (res.params == expected.params)
assert (res.lr == expected.lr)
assert (list(res.betas) == list(expected.betas))
assert (res.eps == expected.eps)
assert (res.weight_decay == expected.weight_decay)
assert (res.amsgrad == expected.amsgrad) |
class TestListField(FieldValues):
valid_inputs = [([1, 2, 3], [1, 2, 3]), (['1', '2', '3'], [1, 2, 3]), ([], [])]
invalid_inputs = [('not a list', ['Expected a list of items but got type "str".']), ([1, 2, 'error', 'error'], {2: ['A valid integer is required.'], 3: ['A valid integer is required.']}), ({'one': 'two'}, ['Expected a list of items but got type "dict".'])]
outputs = [([1, 2, 3], [1, 2, 3]), (['1', '2', '3'], [1, 2, 3])]
field = serializers.ListField(child=serializers.IntegerField())
def test_no_source_on_child(self):
with pytest.raises(AssertionError) as exc_info:
serializers.ListField(child=serializers.IntegerField(source='other'))
assert (str(exc_info.value) == 'The `source` argument is not meaningful when applied to a `child=` field. Remove `source=` from the field declaration.')
def test_collection_types_are_invalid_input(self):
field = serializers.ListField(child=serializers.CharField())
input_value = {'one': 'two'}
with pytest.raises(serializers.ValidationError) as exc_info:
field.to_internal_value(input_value)
assert (exc_info.value.detail == ['Expected a list of items but got type "dict".'])
def test_constructor_misuse_raises(self):
with pytest.raises(TypeError):
serializers.ListField(serializers.CharField()) |
class ServiceAuthorizationsResponse(ModelComposed):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'links': (PaginationLinks,), 'meta': (PaginationMeta,), 'data': ([ServiceAuthorizationResponseData],)}
_property
def discriminator():
return None
attribute_map = {'links': 'links', 'meta': 'meta', 'data': 'data'}
read_only_vars = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
_property
def _composed_schemas():
lazy_import()
return {'anyOf': [], 'allOf': [Pagination, ServiceAuthorizationsResponseAllOf], 'oneOf': []} |
def create_bi_wn_dict(data: List[List[Tuple[(str, str)]]]) -> Dict[(str, List[Tuple[(str, float)]])]:
model = dict()
for sentence in data:
for (i, (_, curr_pos)) in enumerate(sentence):
next_word = (sentence[(i + 1)][0] if ((i + 1) < len(sentence)) else PREV_DUMMY)
model.setdefault(next_word, Counter()).update([curr_pos])
return to_probs(model) |
class OptionSeriesLollipopSonificationTracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class CloudWorkspace(WorkspaceView):
token: str
URL: str = '
def __init__(self, token: str, team_id: Optional[STR_UUID]=None, url: str=None):
self.token = token
self.url = (url or self.URL)
meta = CloudMetadataStorage(base_url=self.url, token=self.token, cookie_name='app.at')
pm = ProjectManager(metadata=meta, blob=NoopBlobStorage(), data=NoopDataStorage(), auth=NoopAuthManager())
super().__init__(None, pm, (UUID(team_id) if isinstance(team_id, str) else team_id)) |
def _build_file_filters():
image_file_filter = Gtk.FileFilter()
image_file_filter.set_name(_('Image Files'))
if (os.name == 'nt'):
for ext in get_supported_input_format_exts():
image_file_filter.add_pattern(('*%s' % ext))
else:
for mimetype in get_supported_input_format_mimetypes():
image_file_filter.add_mime_type(mimetype)
any_file_filter = Gtk.FileFilter()
any_file_filter.set_name(_('All Files'))
any_file_filter.add_pattern('*')
return [image_file_filter, any_file_filter] |
_entries.HANDLERS.register(DOMAIN)
class HPPrinterFlowHandler(config_entries.ConfigFlow):
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
def __init__(self):
super().__init__()
self._config_flow = ConfigFlowManager()
def async_get_options_flow(config_entry):
return HPPrinterOptionsFlowHandler(config_entry)
async def async_step_user(self, user_input=None):
_LOGGER.debug(f'Starting async_step_user of {DOMAIN}')
errors = None
self._config_flow.initialize(self.hass)
if (user_input is not None):
self._config_flow.update_data(user_input, True)
name = self._config_flow.config_data.name
ha = get_ha(self.hass, name)
if (ha is None):
errors = (await self._config_flow.valid_login())
else:
_LOGGER.warning(f'{DEFAULT_NAME} ({name}) already configured')
return self.async_abort(reason='already_configured', description_placeholders=user_input)
if (errors is None):
_LOGGER.info(f'Storing configuration data: {user_input}')
return self.async_create_entry(title=name, data=user_input)
data_schema = self._config_flow.get_default_data()
return self.async_show_form(step_id='user', data_schema=data_schema, errors=errors)
async def async_step_import(self, info):
_LOGGER.debug(f'Starting async_step_import of {DOMAIN}')
return self.async_create_entry(title='HPPrinter (import from configuration.yaml)', data=info) |
class ImageDisplayer(object):
def __init__(self):
self._window_name = 'cv2_display_window'
cv2.namedWindow(self._window_name, cv2.WINDOW_NORMAL)
def display(self, image, wait_key_ms=1):
cv2.imshow(self._window_name, image)
cv2.waitKey(wait_key_ms)
def __del__(self):
cv2.destroyWindow(self._window_name) |
def test_get_referenced_versions_returns_a_list_of_Version_instances_even_with_representations(create_test_data, create_pymel, create_maya_env, store_local_session):
data = create_test_data
pm = create_pymel
maya_env = create_maya_env
pm.newFile(force=True)
root_node = pm.nt.Transform(name='Asset2_Take1')
box = pm.polyCube(name='Box1')[0]
pm.parent(box, root_node)
pm.runtime.DeleteHistory()
maya_env.save_as(data['asset2_model_main_v003'])
maya_env.save_as(data['asset2_model_take1_v001'])
maya_env.save_as(data['asset2_model_take1_v002'])
from anima.dcc.mayaEnv.repr_tools import RepresentationGenerator
gen = RepresentationGenerator()
gen.version = data['asset2_model_take1_v001']
gen.generate_all()
gen.version = data['asset2_model_take1_v002']
gen.generate_all()
gen.version = data['asset2_model_main_v003']
gen.generate_all()
maya_env.open(data['asset2_model_take1_v003'])
maya_env.reference(data['asset2_model_take1_v001'])
maya_env.reference(data['asset2_model_take1_v002'])
maya_env.reference(data['asset2_model_take1_v002'])
maya_env.reference(data['asset2_model_take1_v001'])
maya_env.reference(data['asset2_model_main_v003'])
for ref in pm.listReferences():
ref.to_repr('ASS')
referenced_versions = maya_env.get_referenced_versions()
assert (sorted(referenced_versions, key=(lambda x: x.name)) == sorted([data['asset2_model_main_v003'], data['asset2_model_take1_v001'], data['asset2_model_take1_v002']], key=(lambda x: x.name))) |
()
('input_dir', type=click.Path(exists=True, file_okay=False))
('--recursive/--no-recursive', default=True, help='Search recursively')
('--visualize/--no-visualize', default=True, help='Visualize the distribution')
('--num-workers', default=os.cpu_count(), help='Number of workers for parallel processing')
def frequency(input_dir: str, recursive: bool, visualize: bool, num_workers: int):
import librosa
from matplotlib import pyplot as plt
input_dir = Path(input_dir)
files = list_files(input_dir, {'.wav'}, recursive=recursive)
logger.info(f'Found {len(files)} files, calculating frequency')
counter = Counter()
with ProcessPoolExecutor(max_workers=num_workers) as executor:
tasks = [executor.submit(count_notes_from_file, file) for file in tqdm(files, desc='Preparing')]
for i in tqdm(as_completed(tasks), desc='Collecting infos', total=len(tasks)):
assert (i.exception() is None), i.exception()
counter += i.result()
data = sorted(counter.items(), key=(lambda kv: kv[1]), reverse=True)
for (note, count) in data:
logger.info(f'{note}: {count}')
if (not visualize):
return
x_axis_order = librosa.midi_to_note(list(range(300)))
data = sorted(counter.items(), key=(lambda kv: x_axis_order.index(kv[0])))
plt.rcParams['figure.figsize'] = [10, 4]
plt.rcParams['figure.autolayout'] = True
plt.bar([x[0] for x in data], [x[1] for x in data])
plt.xticks(rotation=90)
plt.title('Notes distribution')
plt.xlabel('Notes')
plt.ylabel('Count')
plt.grid(axis='y', alpha=0.75)
plt.grid(axis='x', alpha=0.75)
total = sum((x[1] for x in data))
for (i, v) in enumerate((x[1] for x in data)):
if ((v / total) < 0.001):
continue
plt.text((i - 1), (v + 1), f'{((v / total) * 100):.2f}%', color='black', fontweight='bold')
plt.show() |
def updateMeters(dom, game, myTurn):
if (myTurn is None):
(a, b) = (1, 2)
turnA = (game.turn if (game.current == 1) else 0)
turnB = (game.turn if (game.current == 2) else 0)
diceA = (game.dice if (game.current == 1) else 0)
diceB = (game.dice if (game.current == 2) else 0)
else:
a = (game.current if myTurn else getOpponent(game.current))
b = getOpponent(a)
turnA = (game.turn if myTurn else 0)
turnB = (0 if myTurn else game.turn)
diceA = (game.dice if myTurn else 0)
diceB = (0 if myTurn else game.dice)
updateMeter(dom, 'A', game.scores[a], turnA, diceA)
updateMeter(dom, 'B', game.scores[b], turnB, diceB) |
class IamServiceGroup(ModelComposed):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'created_at': (datetime, none_type), 'updated_at': (datetime, none_type), 'id': (str,), 'object': (str,), 'name': (str,), 'description': (str,), 'services_count': (int,)}
_property
def discriminator():
return None
attribute_map = {'created_at': 'created_at', 'updated_at': 'updated_at', 'id': 'id', 'object': 'object', 'name': 'name', 'description': 'description', 'services_count': 'services_count'}
read_only_vars = {'created_at', 'updated_at'}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
_property
def _composed_schemas():
lazy_import()
return {'anyOf': [], 'allOf': [IamServiceGroupAllOf, TimestampsNoDelete], 'oneOf': []} |
class SyncedSlidersWrong(SyncedSlidersBase):
TITLE = 'Synced sliders, done wrong'
('slider1.value')
def __slider1(self, *events):
self.slider2.set_value(events[(- 1)].new_value)
('slider2.value')
def __slider2(self, *events):
self.slider1.set_value(events[(- 1)].new_value) |
class Proxy(object):
def __init__(self):
self.url = '
self.re_ip_port_encode_pattern = re.compile("Proxy\\(\\'([\\w\\d=+]+)\\'\\)", re.I)
self.cur_proxy = None
self.proxies = []
self.result = []
(stop_max_attempt_number=3)
def extract_proxy(self, page_num):
try:
rp = requests.get(self.url.format(page=page_num), proxies=self.cur_proxy, timeout=10)
re_ip_port_encode_result = self.re_ip_port_encode_pattern.findall(rp.text)
if (not re_ip_port_encode_result):
raise Exception('empty')
except Exception as e:
logger.error('[-] Request page {page} error: {error}'.format(page=page_num, error=str(e)))
while self.proxies:
new_proxy = self.proxies.pop(0)
self.cur_proxy = {new_proxy['type']: ('%s:%s' % (new_proxy['host'], new_proxy['port']))}
raise e
else:
return []
re_ip_port_result = []
for each_result in re_ip_port_encode_result:
decode_ip_port = base64.b64decode(each_result).decode('utf-8')
(host, port) = decode_ip_port.split(':')
re_ip_port_result.append({'host': host, 'port': int(port), 'from': 'proxylist'})
return re_ip_port_result
def start(self):
for page in range(1, 10):
page_result = self.extract_proxy(page)
time.sleep(3)
if (not page_result):
return
self.result.extend(page_result) |
.parametrize('to_parse, output', [(UnaryOperation(OperationType.negate, [var_x.copy()]), '(~ )'), (UnaryOperation(OperationType.cast, [var_x.copy()]), ''), (UnaryOperation(OperationType.logical_not, [Variable('x', Integer(1))]), '(! )')])
def test_unary_operation(converter, to_parse, output):
w = converter._world
assert (converter.convert(to_parse) == w.from_string(output)) |
class TestCheckBalance(unittest.TestCase):
def test_check_balance_empty(self):
bst = BstBalance(None)
bst.check_balance()
def test_check_balance(self):
bst = BstBalance(Node(5))
self.assertEqual(bst.check_balance(), True)
bst.insert(3)
bst.insert(8)
bst.insert(1)
bst.insert(4)
self.assertEqual(bst.check_balance(), True)
bst = BstBalance(Node(5))
bst.insert(3)
bst.insert(8)
bst.insert(9)
bst.insert(10)
self.assertEqual(bst.check_balance(), False)
bst = BstBalance(Node(3))
bst.insert(2)
bst.insert(1)
bst.insert(5)
bst.insert(4)
bst.insert(6)
bst.insert(7)
self.assertEqual(bst.check_balance(), True)
print('Success: test_check_balance') |
class OptionSeriesTreegraphStatesSelectMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
class OptionSeriesXrange(Options):
def accessibility(self) -> 'OptionSeriesXrangeAccessibility':
return self._config_sub_data('accessibility', OptionSeriesXrangeAccessibility)
def allowPointSelect(self):
return self._config_get(False)
def allowPointSelect(self, flag: bool):
self._config(flag, js_type=False)
def animation(self):
return self._config_get(True)
def animation(self, flag: bool):
self._config(flag, js_type=False)
def animationLimit(self):
return self._config_get(None)
def animationLimit(self, num: float):
self._config(num, js_type=False)
def borderColor(self):
return self._config_get('#ffffff')
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(3)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get('undefined')
def borderWidth(self, num: float):
self._config(num, js_type=False)
def centerInCategory(self):
return self._config_get(False)
def centerInCategory(self, flag: bool):
self._config(flag, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def clip(self):
return self._config_get(True)
def clip(self, flag: bool):
self._config(flag, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def colorAxis(self):
return self._config_get(0)
def colorAxis(self, num: float):
self._config(num, js_type=False)
def colorByPoint(self):
return self._config_get(True)
def colorByPoint(self, flag: bool):
self._config(flag, js_type=False)
def colorIndex(self):
return self._config_get(None)
def colorIndex(self, num: float):
self._config(num, js_type=False)
def colorKey(self):
return self._config_get('y')
def colorKey(self, text: str):
self._config(text, js_type=False)
def colors(self):
return self._config_get(None)
def colors(self, value: Any):
self._config(value, js_type=False)
def connectEnds(self):
return self._config_get(None)
def connectEnds(self, flag: bool):
self._config(flag, js_type=False)
def connectNulls(self):
return self._config_get(False)
def connectNulls(self, flag: bool):
self._config(flag, js_type=False)
def cursor(self):
return self._config_get(None)
def cursor(self, text: str):
self._config(text, js_type=False)
def custom(self):
return self._config_get(None)
def custom(self, value: Any):
self._config(value, js_type=False)
def dashStyle(self):
return self._config_get('Solid')
def dashStyle(self, text: str):
self._config(text, js_type=False)
def data(self) -> 'OptionSeriesXrangeData':
return self._config_sub_data('data', OptionSeriesXrangeData)
def dataLabels(self) -> 'OptionSeriesXrangeDatalabels':
return self._config_sub_data('dataLabels', OptionSeriesXrangeDatalabels)
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def dragDrop(self) -> 'OptionSeriesXrangeDragdrop':
return self._config_sub_data('dragDrop', OptionSeriesXrangeDragdrop)
def enableMouseTracking(self):
return self._config_get(True)
def enableMouseTracking(self, flag: bool):
self._config(flag, js_type=False)
def events(self) -> 'OptionSeriesXrangeEvents':
return self._config_sub_data('events', OptionSeriesXrangeEvents)
def grouping(self):
return self._config_get(True)
def grouping(self, flag: bool):
self._config(flag, js_type=False)
def groupPadding(self):
return self._config_get(0.2)
def groupPadding(self, num: float):
self._config(num, js_type=False)
def groupZPadding(self):
return self._config_get(1)
def groupZPadding(self, num: float):
self._config(num, js_type=False)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def inactiveOtherPoints(self):
return self._config_get(False)
def inactiveOtherPoints(self, flag: bool):
self._config(flag, js_type=False)
def includeInDataExport(self):
return self._config_get(None)
def includeInDataExport(self, flag: bool):
self._config(flag, js_type=False)
def index(self):
return self._config_get(None)
def index(self, num: float):
self._config(num, js_type=False)
def keys(self):
return self._config_get(None)
def keys(self, value: Any):
self._config(value, js_type=False)
def label(self) -> 'OptionSeriesXrangeLabel':
return self._config_sub_data('label', OptionSeriesXrangeLabel)
def legendIndex(self):
return self._config_get(None)
def legendIndex(self, num: float):
self._config(num, js_type=False)
def legendSymbol(self):
return self._config_get('rectangle')
def legendSymbol(self, text: str):
self._config(text, js_type=False)
def linecap(self):
return self._config_get(round)
def linecap(self, value: Any):
self._config(value, js_type=False)
def lineWidth(self):
return self._config_get(2)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def linkedTo(self):
return self._config_get(None)
def linkedTo(self, text: str):
self._config(text, js_type=False)
def marker(self) -> 'OptionSeriesXrangeMarker':
return self._config_sub_data('marker', OptionSeriesXrangeMarker)
def maxPointWidth(self):
return self._config_get(None)
def maxPointWidth(self, num: float):
self._config(num, js_type=False)
def minPointLength(self):
return self._config_get(0)
def minPointLength(self, num: float):
self._config(num, js_type=False)
def name(self):
return self._config_get(None)
def name(self, text: str):
self._config(text, js_type=False)
def onPoint(self) -> 'OptionSeriesXrangeOnpoint':
return self._config_sub_data('onPoint', OptionSeriesXrangeOnpoint)
def opacity(self):
return self._config_get(1)
def opacity(self, num: float):
self._config(num, js_type=False)
def partialFill(self) -> 'OptionSeriesXrangePartialfill':
return self._config_sub_data('partialFill', OptionSeriesXrangePartialfill)
def point(self) -> 'OptionSeriesXrangePoint':
return self._config_sub_data('point', OptionSeriesXrangePoint)
def pointDescriptionFormat(self):
return self._config_get(None)
def pointDescriptionFormat(self, value: Any):
self._config(value, js_type=False)
def pointDescriptionFormatter(self):
return self._config_get(None)
def pointDescriptionFormatter(self, value: Any):
self._config(value, js_type=False)
def pointPadding(self):
return self._config_get(0.1)
def pointPadding(self, num: float):
self._config(num, js_type=False)
def pointWidth(self):
return self._config_get(None)
def pointWidth(self, num: float):
self._config(num, js_type=False)
def relativeXValue(self):
return self._config_get(False)
def relativeXValue(self, flag: bool):
self._config(flag, js_type=False)
def selected(self):
return self._config_get(False)
def selected(self, flag: bool):
self._config(flag, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def showCheckbox(self):
return self._config_get(False)
def showCheckbox(self, flag: bool):
self._config(flag, js_type=False)
def showInLegend(self):
return self._config_get(None)
def showInLegend(self, flag: bool):
self._config(flag, js_type=False)
def skipKeyboardNavigation(self):
return self._config_get(None)
def skipKeyboardNavigation(self, flag: bool):
self._config(flag, js_type=False)
def sonification(self) -> 'OptionSeriesXrangeSonification':
return self._config_sub_data('sonification', OptionSeriesXrangeSonification)
def stack(self):
return self._config_get(None)
def stack(self, num: float):
self._config(num, js_type=False)
def states(self) -> 'OptionSeriesXrangeStates':
return self._config_sub_data('states', OptionSeriesXrangeStates)
def step(self):
return self._config_get(None)
def step(self, value: Any):
self._config(value, js_type=False)
def stickyTracking(self):
return self._config_get(False)
def stickyTracking(self, flag: bool):
self._config(flag, js_type=False)
def tooltip(self) -> 'OptionSeriesXrangeTooltip':
return self._config_sub_data('tooltip', OptionSeriesXrangeTooltip)
def turboThreshold(self):
return self._config_get(1000)
def turboThreshold(self, num: float):
self._config(num, js_type=False)
def type(self):
return self._config_get(None)
def type(self, text: str):
self._config(text, js_type=False)
def visible(self):
return self._config_get(True)
def visible(self, flag: bool):
self._config(flag, js_type=False)
def xAxis(self):
return self._config_get(0)
def xAxis(self, num: float):
self._config(num, js_type=False)
def yAxis(self):
return self._config_get(0)
def yAxis(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(None)
def zIndex(self, num: float):
self._config(num, js_type=False)
def zoneAxis(self):
return self._config_get('y')
def zoneAxis(self, text: str):
self._config(text, js_type=False)
def zones(self) -> 'OptionSeriesXrangeZones':
return self._config_sub_data('zones', OptionSeriesXrangeZones) |
def extractRainycornertlWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def check_working_tree_is_dirty() -> None:
print('Checking whether the Git working tree is dirty...')
result = subprocess.check_output(['git', 'diff', '--stat'])
if (len(result) > 0):
print('Git working tree is dirty:')
print(result.decode('utf-8'))
sys.exit(1)
else:
print('All good!') |
class OptionPlotoptionsDependencywheelSonificationDefaultinstrumentoptionsMappingHighpass(Options):
def frequency(self) -> 'OptionPlotoptionsDependencywheelSonificationDefaultinstrumentoptionsMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsDependencywheelSonificationDefaultinstrumentoptionsMappingHighpassFrequency)
def resonance(self) -> 'OptionPlotoptionsDependencywheelSonificationDefaultinstrumentoptionsMappingHighpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsDependencywheelSonificationDefaultinstrumentoptionsMappingHighpassResonance) |
class Command(DanubeCloudCommand):
help = 'Manual cleanup after a failed task.'
args = '<API function name> [API function parameter1=value1 [parameter2=value2] ...]'
api_view_names = frozenset(['vm_migrate'])
def get_vm(**params):
from vms.models import Vm
if ('uuid' in params):
query = {'uuid': params['uuid']}
elif ('hostname' in params):
query = {'hostname': params['hostname']}
else:
raise CommandError('Missing "hostname" or "uuid" parameter')
return Vm.objects.get(**query)
def run_cleanup(api_view_name, method, obj=None, **api_view_params):
from que.utils import generate_internal_task_id
from api.task.cleanup import task_cleanup
api_view_params['view'] = api_view_name
api_view_params['method'] = method
result = {'meta': {'apiview': api_view_params}}
task_id = generate_internal_task_id()
return task_cleanup(result, task_id, None, obj=obj)
def handle(self, api_view_name, *args, **options):
if (api_view_name not in self.api_view_names):
raise CommandError('Unsupported API function')
params = dict((i.split('=') for i in args))
if api_view_name.startswith('vm_'):
obj = self.get_vm(**params)
else:
obj = None
method = params.get('method', 'PUT').upper()
self.run_cleanup(api_view_name, method, obj=obj, **params)
self.display('Done.', color='green') |
def kinetic3d_10(ax, da, A, bx, db, B):
result = numpy.zeros((3, 1), dtype=float)
x0 = ((ax + bx) ** (- 1.0))
x1 = (((- x0) * ((ax * A[0]) + (bx * B[0]))) + A[0])
x2 = (((- x0) * ((ax * A[1]) + (bx * B[1]))) + A[1])
x3 = (ax * (x0 + (2.0 * (x2 ** 2))))
x4 = (((- x0) * ((ax * A[2]) + (bx * B[2]))) + A[2])
x5 = (bx * x0)
x6 = ((- 2.0) * x5)
x7 = (((ax * (x0 + (2.0 * (x4 ** 2)))) + x6) - 3.0)
x8 = (ax * x5)
x9 = (((((5. * ax) * da) * db) * (x0 ** 1.5)) * numpy.exp(((- x8) * ((((A[0] - B[0]) ** 2) + ((A[1] - B[1]) ** 2)) + ((A[2] - B[2]) ** 2)))))
x10 = (ax * (x0 + (2.0 * (x1 ** 2))))
result[(0, 0)] = numpy.sum(((x1 * x9) * (((ax * (x0 + (2.0 * (x1 ** 2)))) + x3) + x7)))
result[(1, 0)] = numpy.sum(((x2 * x9) * (((ax * (x0 + (2.0 * (x2 ** 2)))) + x10) + x7)))
result[(2, 0)] = numpy.sum(((x4 * x9) * (((((ax * (x0 + (2.0 * (x4 ** 2)))) + x10) + x3) + x6) - 3.0)))
return result |
class DWARFStructs(object):
def __init__(self, little_endian, dwarf_format, address_size, dwarf_version=2):
assert ((dwarf_format == 32) or (dwarf_format == 64))
assert ((address_size == 8) or (address_size == 4))
self.little_endian = little_endian
self.dwarf_format = dwarf_format
self.address_size = address_size
self.dwarf_version = dwarf_version
self._create_structs()
def initial_length_field_size(self):
return (4 if (self.dwarf_format == 32) else 12)
def _create_structs(self):
if self.little_endian:
self.Dwarf_uint8 = ULInt8
self.Dwarf_uint16 = ULInt16
self.Dwarf_uint32 = ULInt32
self.Dwarf_uint64 = ULInt64
self.Dwarf_offset = (ULInt32 if (self.dwarf_format == 32) else ULInt64)
self.Dwarf_target_addr = (ULInt32 if (self.address_size == 4) else ULInt64)
self.Dwarf_int8 = SLInt8
self.Dwarf_int16 = SLInt16
self.Dwarf_int32 = SLInt32
self.Dwarf_int64 = SLInt64
else:
self.Dwarf_uint8 = UBInt8
self.Dwarf_uint16 = UBInt16
self.Dwarf_uint32 = UBInt32
self.Dwarf_uint64 = UBInt64
self.Dwarf_offset = (UBInt32 if (self.dwarf_format == 32) else UBInt64)
self.Dwarf_target_addr = (UBInt32 if (self.address_size == 4) else UBInt64)
self.Dwarf_int8 = SBInt8
self.Dwarf_int16 = SBInt16
self.Dwarf_int32 = SBInt32
self.Dwarf_int64 = SBInt64
self._create_initial_length()
self._create_leb128()
self._create_cu_header()
self._create_abbrev_declaration()
self._create_dw_form()
self._create_lineprog_header()
self._create_callframe_entry_headers()
self._create_aranges_header()
def _create_initial_length(self):
def _InitialLength(name):
return _InitialLengthAdapter(Struct(name, self.Dwarf_uint32('first'), If((lambda ctx: (ctx.first == )), self.Dwarf_uint64('second'), elsevalue=None)))
self.Dwarf_initial_length = _InitialLength
def _create_leb128(self):
self.Dwarf_uleb128 = _ULEB128
self.Dwarf_sleb128 = _SLEB128
def _create_cu_header(self):
self.Dwarf_CU_header = Struct('Dwarf_CU_header', self.Dwarf_initial_length('unit_length'), self.Dwarf_uint16('version'), self.Dwarf_offset('debug_abbrev_offset'), self.Dwarf_uint8('address_size'))
def _create_abbrev_declaration(self):
self.Dwarf_abbrev_declaration = Struct('Dwarf_abbrev_entry', Enum(self.Dwarf_uleb128('tag'), **ENUM_DW_TAG), Enum(self.Dwarf_uint8('children_flag'), **ENUM_DW_CHILDREN), RepeatUntilExcluding((lambda obj, ctx: ((obj.name == 'DW_AT_null') and (obj.form == 'DW_FORM_null'))), Struct('attr_spec', Enum(self.Dwarf_uleb128('name'), **ENUM_DW_AT), Enum(self.Dwarf_uleb128('form'), **ENUM_DW_FORM))))
def _create_dw_form(self):
self.Dwarf_dw_form = dict(DW_FORM_addr=self.Dwarf_target_addr(''), DW_FORM_block1=self._make_block_struct(self.Dwarf_uint8), DW_FORM_block2=self._make_block_struct(self.Dwarf_uint16), DW_FORM_block4=self._make_block_struct(self.Dwarf_uint32), DW_FORM_block=self._make_block_struct(self.Dwarf_uleb128), DW_FORM_data1=self.Dwarf_uint8(''), DW_FORM_data2=self.Dwarf_uint16(''), DW_FORM_data4=self.Dwarf_uint32(''), DW_FORM_data8=self.Dwarf_uint64(''), DW_FORM_sdata=self.Dwarf_sleb128(''), DW_FORM_udata=self.Dwarf_uleb128(''), DW_FORM_string=CString(''), DW_FORM_strp=self.Dwarf_offset(''), DW_FORM_flag=self.Dwarf_uint8(''), DW_FORM_ref1=self.Dwarf_uint8(''), DW_FORM_ref2=self.Dwarf_uint16(''), DW_FORM_ref4=self.Dwarf_uint32(''), DW_FORM_ref8=self.Dwarf_uint64(''), DW_FORM_ref_udata=self.Dwarf_uleb128(''), DW_FORM_ref_addr=self.Dwarf_offset(''), DW_FORM_indirect=self.Dwarf_uleb128(''), DW_FORM_flag_present=StaticField('', 0), DW_FORM_sec_offset=self.Dwarf_offset(''), DW_FORM_exprloc=self._make_block_struct(self.Dwarf_uleb128), DW_FORM_ref_sig8=self.Dwarf_uint64(''), DW_FORM_GNU_strp_alt=self.Dwarf_offset(''), DW_FORM_GNU_ref_alt=self.Dwarf_offset(''), DW_AT_GNU_all_call_sites=self.Dwarf_uleb128(''))
def _create_aranges_header(self):
self.Dwarf_aranges_header = Struct('Dwarf_aranges_header', self.Dwarf_initial_length('unit_length'), self.Dwarf_uint16('version'), self.Dwarf_offset('debug_info_offset'), self.Dwarf_uint8('address_size'), self.Dwarf_uint8('segment_size'))
def _create_lineprog_header(self):
self.Dwarf_lineprog_file_entry = Struct('file_entry', CString('name'), If((lambda ctx: (len(ctx.name) != 0)), Embed(Struct('', self.Dwarf_uleb128('dir_index'), self.Dwarf_uleb128('mtime'), self.Dwarf_uleb128('length')))))
self.Dwarf_lineprog_header = Struct('Dwarf_lineprog_header', self.Dwarf_initial_length('unit_length'), self.Dwarf_uint16('version'), self.Dwarf_offset('header_length'), self.Dwarf_uint8('minimum_instruction_length'), If((lambda ctx: (ctx['version'] >= 4)), self.Dwarf_uint8('maximum_operations_per_instruction'), 1), self.Dwarf_uint8('default_is_stmt'), self.Dwarf_int8('line_base'), self.Dwarf_uint8('line_range'), self.Dwarf_uint8('opcode_base'), Array((lambda ctx: (ctx['opcode_base'] - 1)), self.Dwarf_uint8('standard_opcode_lengths')), RepeatUntilExcluding((lambda obj, ctx: (obj == b'')), CString('include_directory')), RepeatUntilExcluding((lambda obj, ctx: (len(obj.name) == 0)), self.Dwarf_lineprog_file_entry))
def _create_callframe_entry_headers(self):
self.Dwarf_CIE_header = Struct('Dwarf_CIE_header', self.Dwarf_initial_length('length'), self.Dwarf_offset('CIE_id'), self.Dwarf_uint8('version'), CString('augmentation'), self.Dwarf_uleb128('code_alignment_factor'), self.Dwarf_sleb128('data_alignment_factor'), self.Dwarf_uleb128('return_address_register'))
self.EH_CIE_header = self.Dwarf_CIE_header
if (self.dwarf_version == 4):
self.Dwarf_CIE_header = Struct('Dwarf_CIE_header', self.Dwarf_initial_length('length'), self.Dwarf_offset('CIE_id'), self.Dwarf_uint8('version'), CString('augmentation'), self.Dwarf_uint8('address_size'), self.Dwarf_uint8('segment_size'), self.Dwarf_uleb128('code_alignment_factor'), self.Dwarf_sleb128('data_alignment_factor'), self.Dwarf_uleb128('return_address_register'))
self.Dwarf_FDE_header = Struct('Dwarf_FDE_header', self.Dwarf_initial_length('length'), self.Dwarf_offset('CIE_pointer'), self.Dwarf_target_addr('initial_location'), self.Dwarf_target_addr('address_range'))
def _make_block_struct(self, length_field):
return PrefixedArray(subcon=self.Dwarf_uint8('elem'), length_field=length_field('')) |
.parametrize('config_fixture', [None, 'mock_config_changed_db_engine_settings'])
def test_get_task_session(config_fixture, request):
if (config_fixture is not None):
request.getfixturevalue(config_fixture)
pool_size = CONFIG.database.task_engine_pool_size
max_overflow = CONFIG.database.task_engine_max_overflow
t = DatabaseTask()
session: Session = t.get_new_session()
engine: Engine = session.get_bind()
pool: QueuePool = engine.pool
assert (pool.size() == pool_size)
assert (pool._max_overflow == max_overflow) |
class TrafficMatrix(object):
def __init__(self, volume_unit='Mbps', flows=None):
if (not (volume_unit in capacity_units)):
raise ValueError('The volume_unit argument is not valid')
self.attrib = {}
self.attrib['volume_unit'] = volume_unit
self.flow = (flows if (flows is not None) else {})
return
def __iter__(self):
return iter(self.flows())
def __len__(self):
return len(self.flows())
def __contains__(self, item):
(origin, destination) = item
return ((origin in self.flow) and (destination in self.flow[origin]))
def __getitem__(self, key):
(origin, destination) = key
return self.flow[origin][destination]
def __setitem__(self, key, value):
(origin, destination) = key
if (not (origin in self.flow)):
self.flow[origin] = {}
self.flow[origin][destination] = value
def __delitem__(self, key):
(origin, destination) = key
del self.flow[origin][destination]
if (len(self.flow[origin]) == 0):
del self.flow[origin]
def flows(self):
return {(o, d): self.flow[o][d] for o in self.flow for d in self.flow[o] if (o != d)}
def od_pairs(self):
return [(o, d) for o in self.flow for d in self.flow[o] if (o != d)]
def add_flow(self, origin, destination, volume):
if (origin not in self.flow):
self.flow[origin] = {}
self.flow[origin][destination] = volume
def pop_flow(self, origin, destination):
if ((origin not in self.flow) or (destination not in self.flow[origin])):
raise KeyError(('There is no flow from %s to %s' % (str(origin), str(destination))))
return self.flow[origin].pop(destination) |
()
def setup_to_fail():
shutil.copy('/usr/lib/systemd/system/rescue.service', '/usr/lib/systemd/system/rescue.service.bak')
shellexec('sed -i -- \'/^ExecStart=/ s|^.*|ExecStart=-/bin/sh -c "/usr/sbin/sulogin; /usr/bin/systemctl --no-block default"|\' /usr/lib/systemd/system/rescue.service')
print(shellexec('grep ExecStart= /usr/lib/systemd/system/rescue.service'))
(yield None)
shutil.move('/usr/lib/systemd/system/rescue.service.bak', '/usr/lib/systemd/system/rescue.service') |
class OptionPlotoptionsScatterSonificationContexttracksMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
(max_attempts=5, wait=2, exceptions=SpinnakerSecurityGroupError)
def get_security_group_id(name='', env='', region=''):
vpc_id = get_vpc_id(env, region)
LOG.info('Find %s sg in %s [%s] in %s', name, env, region, vpc_id)
uri = '/securityGroups/{0}/{1}/{2}?vpcId={3}'.format(env, region, name, vpc_id)
response = gate_request(uri=uri)
assert response.ok
result = response.json()
try:
security_group_id = result['id']
except KeyError:
msg = 'Security group ({0}) not found'.format(name)
raise SpinnakerSecurityGroupError(msg)
LOG.info('Found: %s', security_group_id)
return security_group_id |
class LowLightTest(srdata.SRData):
def __init__(self, args, name='LowLightTest', train=True, benchmark=False):
super(LowLightTest, self).__init__(args, name=name, train=train, benchmark=benchmark)
def _set_filesystem(self, dir_data):
super(LowLightTest, self)._set_filesystem(dir_data)
self.apath = '/data1/yangwenhan/datasets/'
self.dir_hr = os.path.join(self.apath, 'Our_normal_test')
self.dir_lr = os.path.join(self.apath, 'Our_low_test')
self.dir_hq = os.path.join(self.apath, 'AVA_good_2')
self.dir_lrr = os.path.join(self.apath, 'Low_real_test_2_rs')
self.ext = ('.png', '.png', '.png', '.jpg')
def _scan(self):
(names_hr, names_lr, names_hq, names_lrr) = super(LowLightTest, self)._scan()
names_hr = names_hr[(self.begin - 1):self.end]
names_lr = names_lr[(self.begin - 1):self.end]
names_lrr = names_lrr[(self.begin - 1):self.end]
names_hq = names_hq[(self.begin - 1):self.end]
return (names_hr, names_lr, names_lrr, names_hq) |
class Config():
_SLACK = 'slack'
_AWS = 'aws'
_GOOGLE = 'google'
_AZURE = 'azure'
_CONFIG_FILE_NAME = 'config.yml'
_DATABASE_QUOTING = 'DATABASE_QUOTING'
_SCHEMA_QUOTING = 'SCHEMA_QUOTING'
_IDENTIFIER_QUOTING = 'IDENTIFIER_QUOTING'
_QUOTING_KEY_MAPPING = {'database': _DATABASE_QUOTING, 'schema': _SCHEMA_QUOTING, 'identifier': _IDENTIFIER_QUOTING}
_QUOTING_VALID_KEYS = set(_QUOTING_KEY_MAPPING.keys())
_QUOTING_ENV_VARS = set(_QUOTING_KEY_MAPPING.values())
DEFAULT_CONFIG_DIR = str((Path.home() / '.edr'))
DEFAULT_TARGET_PATH = (os.getcwd() + '/edr_target')
def __init__(self, config_dir: str=DEFAULT_CONFIG_DIR, profiles_dir: Optional[str]=None, project_dir: Optional[str]=None, profile_target: Optional[str]=None, project_profile_target: Optional[str]=None, target_path: str=DEFAULT_TARGET_PATH, dbt_quoting: Optional[bool]=None, update_bucket_website: Optional[bool]=None, slack_webhook: Optional[str]=None, slack_token: Optional[str]=None, slack_channel_name: Optional[str]=None, slack_group_alerts_by: Optional[str]=None, timezone: Optional[str]=None, aws_profile_name: Optional[str]=None, aws_region_name: Optional[str]=None, aws_access_key_id: Optional[str]=None, aws_secret_access_key: Optional[str]=None, s3_endpoint_url: Optional[str]=None, s3_bucket_name: Optional[str]=None, google_project_name: Optional[str]=None, google_service_account_path: Optional[str]=None, gcs_bucket_name: Optional[str]=None, gcs_timeout_limit: Optional[int]=None, azure_connection_string: Optional[str]=None, azure_container_name: Optional[str]=None, report_url: Optional[str]=None, env: str='dev'):
self.config_dir = config_dir
self.profiles_dir = profiles_dir
self.project_dir = project_dir
self.profile_target = profile_target
self.project_profile_target = project_profile_target
self.env = env
self.env_vars = dict()
self.env_vars.update(self._parse_dbt_quoting_to_env_vars(dbt_quoting))
config = self._load_configuration()
self.target_dir = self._first_not_none(target_path, config.get('target-path'), os.getcwd())
os.makedirs(os.path.abspath(self.target_dir), exist_ok=True)
os.environ['DBT_LOG_PATH'] = os.path.abspath(target_path)
self.update_bucket_website = self._first_not_none(update_bucket_website, config.get('update_bucket_website'), False)
self.timezone = self._first_not_none(timezone, config.get('timezone'))
slack_config = config.get(self._SLACK, {})
self.slack_webhook = self._first_not_none(slack_webhook, slack_config.get('notification_webhook'))
self.slack_token = self._first_not_none(slack_token, slack_config.get('token'))
self.slack_channel_name = self._first_not_none(slack_channel_name, slack_config.get('channel_name'))
self.is_slack_workflow = self._first_not_none(slack_config.get('workflows'), False)
self.slack_group_alerts_by = self._first_not_none(slack_group_alerts_by, slack_config.get('group_alerts_by'), GroupingType.BY_ALERT.value)
aws_config = config.get(self._AWS, {})
self.aws_profile_name = self._first_not_none(aws_profile_name, aws_config.get('profile_name'))
self.aws_region_name = self._first_not_none(aws_region_name, aws_config.get('region_name'))
self.s3_endpoint_url = self._first_not_none(s3_endpoint_url, aws_config.get('s3_endpoint_url'))
self.s3_bucket_name = self._first_not_none(s3_bucket_name, aws_config.get('s3_bucket_name'))
self.aws_access_key_id = aws_access_key_id
self.aws_secret_access_key = aws_secret_access_key
google_config = config.get(self._GOOGLE, {})
self.google_project_name = self._first_not_none(google_project_name, google_config.get('project_name'))
self.google_service_account_path = self._first_not_none(google_service_account_path, google_config.get('service_account_path'))
self.gcs_bucket_name = self._first_not_none(gcs_bucket_name, google_config.get('gcs_bucket_name'))
self.gcs_timeout_limit = self._first_not_none(gcs_timeout_limit, google_config.get('gcs_timeout_limit'))
azure_config = config.get(self._AZURE, {})
self.azure_connection_string = self._first_not_none(azure_connection_string, azure_config.get('azure_connection_string'))
self.azure_container_name = self._first_not_none(azure_container_name, azure_config.get('azure_container_name'))
self.report_url = self._first_not_none(report_url, aws_config.get('report_url'), google_config.get('report_url'), azure_config.get('report_url'))
self.anonymous_tracking_enabled = config.get('anonymous_usage_tracking', True)
def _load_configuration(self) -> dict:
if (not os.path.exists(self.config_dir)):
os.makedirs(self.config_dir)
config_file_path = os.path.join(self.config_dir, self._CONFIG_FILE_NAME)
if (not os.path.exists(config_file_path)):
return {}
return (OrderedYaml().load(config_file_path) or {})
def has_send_report_platform(self):
return ((self.slack_token and self.slack_channel_name) or self.has_s3 or self.has_gcs or self.has_blob)
def has_slack(self) -> bool:
return (self.slack_webhook or (self.slack_token and self.slack_channel_name))
def has_s3(self):
return self.s3_bucket_name
def has_blob(self):
return self.azure_container_name
def has_gcloud(self):
if self.google_service_account_path:
return True
try:
google.auth.default()
return True
except DefaultCredentialsError:
return False
def has_gcs(self):
return (self.gcs_bucket_name and self.has_gcloud)
def validate_monitor(self):
self._validate_timezone()
if (not self.has_slack):
raise InvalidArgumentsError('Either a Slack token and a channel or a Slack webhook is required.')
def validate_send_report(self):
if (not self.has_send_report_platform):
raise InvalidArgumentsError('You must provide a platform to upload the report to (Slack token / S3 / GCS).')
def _validate_timezone(self):
if (self.timezone and (not tz.gettz(self.timezone))):
raise InvalidArgumentsError('An invalid timezone was provided.')
def _first_not_none(*values):
return next((v for v in values if (v is not None)), None)
def _parse_dbt_quoting_to_env_vars(cls, dbt_quoting):
if (dbt_quoting is None):
return {}
if (dbt_quoting == 'all'):
return {env_var: 'True' for env_var in cls._QUOTING_ENV_VARS}
elif (dbt_quoting == 'none'):
return {env_var: 'False' for env_var in cls._QUOTING_ENV_VARS}
dbt_quoting_keys = {part.strip() for part in dbt_quoting.split(',')}
if (not dbt_quoting_keys.issubset(cls._QUOTING_VALID_KEYS)):
raise InvalidArgumentsError(('Invalid quoting specification: %s' % dbt_quoting))
env_vars = {env_var: 'False' for env_var in cls._QUOTING_ENV_VARS}
env_vars.update({cls._QUOTING_KEY_MAPPING[key]: 'True' for key in dbt_quoting_keys})
return env_vars
def locate_user_project_dir() -> Optional[str]:
working_dir = Path.cwd()
if working_dir.joinpath('dbt_project.yml').exists():
return str(working_dir)
return None |
def does_file_need_fix(filepath: str) -> bool:
if (not filepath.endswith('.py')):
return False
with open(filepath) as f:
first_license_line = None
for line in f:
if (line == license_header_lines[0]):
first_license_line = line
break
elif (line not in lines_to_keep):
return True
for (header_line, line) in zip(license_header_lines, chain((first_license_line,), f)):
if (line != header_line):
return True
return False |
class _Constraint(NamedTuple):
class Average(_BaseConstraint):
def __init__(self, attribute: Attribute):
super().__init__(attribute)
class Gap(_BaseConstraint):
def __init__(self, attribute: Attribute):
super().__init__(attribute)
def check_satisfaction(self, value):
res = True
if self.has_upper_bound():
if (max(value) > self.upper_bound):
return False
if self.has_lower_bound():
if (min(value) < self.lower_bound):
return False
return res
class Median(_BaseConstraint):
def __init__(self, attribute: Attribute):
super().__init__(attribute)
class Span(_BaseConstraint):
def __init__(self, attribute: Attribute):
super().__init__(attribute) |
def _get_all_overrides_from_hydra() -> List[Dict[(str, str)]]:
config_sources = GlobalHydra.instance().config_loader().get_sources()
main_config_source = list(filter((lambda source: (source.provider == 'main')), config_sources))[0]
overrides = []
for env in main_config_source.list('env', results_filter=None):
overrides.append(dict(env=env))
if main_config_source.exists('env_configuration'):
for env_configuration in main_config_source.list('env_configuration', results_filter=None):
(env, configuration) = env_configuration.split('-')
overrides.append(dict(env=env, configuration=configuration))
return overrides |
def test_empty():
ignored = get_ignore([], [])
assert (not ignored('amazing-file.txt'))
assert (not ignored('module.pyc'))
assert (not ignored('one.rst'))
assert (not ignored('two.rst'))
assert (not ignored('one.md'))
assert (not ignored('foo/random.txt'))
assert (not ignored('bar/__pycache__/file.pyc')) |
class LeakyReLU(Fixed):
def _forward(self, x: torch.Tensor, params: Optional[Sequence[torch.Tensor]]) -> Tuple[(torch.Tensor, Optional[torch.Tensor])]:
y = F.leaky_relu(x)
ladj = self._log_abs_det_jacobian(x, y, params)
return (y, ladj)
def _inverse(self, y: torch.Tensor, params: Optional[Sequence[torch.Tensor]]) -> Tuple[(torch.Tensor, Optional[torch.Tensor])]:
x = F.leaky_relu(y, negative_slope=100.0)
ladj = self._log_abs_det_jacobian(x, y, params)
return (x, ladj)
def _log_abs_det_jacobian(self, x: torch.Tensor, y: torch.Tensor, params: Optional[Sequence[torch.Tensor]]) -> torch.Tensor:
return torch.where((x >= 0.0), torch.zeros_like(x), (torch.ones_like(x) * math.log(0.01))) |
def test_get_llm_model_answer_empty_prompt(config, mocker):
mocked_get_answer = mocker.patch('embedchain.llm.jina.JinaLlm._get_answer', return_value='Test answer')
llm = JinaLlm(config)
answer = llm.get_llm_model_answer('')
assert (answer == 'Test answer')
mocked_get_answer.assert_called_once_with('', config) |
def test_set_get_clip_units():
with Drawing() as ctx:
ctx.clip_units = 'object_bounding_box'
assert (ctx.clip_units == 'object_bounding_box')
with raises(TypeError):
ctx.clip_units =
with raises(ValueError):
ctx.clip_units = 'not-a-clip_unit' |
class TestGetConnectionSecretSchema():
(scope='function')
def base_url(self, oauth_client: ClientDetail, policy) -> str:
return (V1_URL_PREFIX + CONNECTION_TYPE_SECRETS)
def test_get_connection_secret_schema_not_authenticated(self, api_client, base_url):
resp = api_client.get(base_url.format(connection_type='sentry'), headers={})
assert (resp.status_code == 401)
def test_get_connection_secret_schema_forbidden(self, api_client, base_url, generate_auth_header):
auth_header = generate_auth_header(scopes=[CONNECTION_READ])
resp = api_client.get(base_url.format(connection_type='sentry'), headers=auth_header)
assert (resp.status_code == 403)
def test_get_connection_secret_schema_not_found(self, api_client: TestClient, generate_auth_header, base_url):
auth_header = generate_auth_header(scopes=[CONNECTION_TYPE_READ])
resp = api_client.get(base_url.format(connection_type='connection_type_we_do_not_support'), headers=auth_header)
assert (resp.status_code == 404)
assert (resp.json()['detail'] == "No connection type found with name 'connection_type_we_do_not_support'.")
def test_get_connection_secret_schema_bigquery(self, api_client: TestClient, generate_auth_header, base_url) -> None:
auth_header = generate_auth_header(scopes=[CONNECTION_TYPE_READ])
resp = api_client.get(base_url.format(connection_type='bigquery'), headers=auth_header)
assert (resp.json() == {'title': 'BigQuerySchema', 'description': 'Schema to validate the secrets needed to connect to BigQuery', 'type': 'object', 'properties': {'keyfile_creds': {'title': 'Keyfile Creds', 'description': 'The contents of the key file that contains authentication credentials for a service account in GCP.', 'sensitive': True, 'allOf': [{'$ref': '#/definitions/KeyfileCreds'}]}, 'dataset': {'title': 'BigQuery Dataset', 'description': 'The dataset within your BigQuery project that contains the tables you want to access.', 'type': 'string'}}, 'required': ['keyfile_creds', 'dataset'], 'definitions': {'KeyfileCreds': {'title': 'KeyfileCreds', 'description': 'Schema that holds BigQuery keyfile key/vals', 'type': 'object', 'properties': {'type': {'title': 'Type', 'type': 'string'}, 'project_id': {'title': 'Project ID', 'type': 'string'}, 'private_key_id': {'title': 'Private Key ID', 'type': 'string'}, 'private_key': {'title': 'Private Key', 'sensitive': True, 'type': 'string'}, 'client_email': {'title': 'Client Email', 'type': 'string', 'format': 'email'}, 'client_id': {'title': 'Client ID', 'type': 'string'}, 'auth_uri': {'title': 'Auth URI', 'type': 'string'}, 'token_uri': {'title': 'Token URI', 'type': 'string'}, 'auth_provider_x509_cert_url': {'title': 'Auth Provider X509 Cert URL', 'type': 'string'}, 'client_x509_cert_url': {'title': 'Client X509 Cert URL', 'type': 'string'}}, 'required': ['project_id']}}})
def test_get_connection_secret_schema_dynamodb(self, api_client: TestClient, generate_auth_header, base_url) -> None:
auth_header = generate_auth_header(scopes=[CONNECTION_TYPE_READ])
resp = api_client.get(base_url.format(connection_type='dynamodb'), headers=auth_header)
assert (resp.json() == {'title': 'DynamoDBSchema', 'description': 'Schema to validate the secrets needed to connect to an Amazon DynamoDB cluster', 'type': 'object', 'properties': {'region_name': {'title': 'Region', 'description': 'The AWS region where your DynamoDB table is located (ex. us-west-2).', 'type': 'string'}, 'aws_access_key_id': {'title': 'Access Key ID', 'description': 'Part of the credentials that provide access to your AWS account.', 'type': 'string'}, 'aws_secret_access_key': {'title': 'Secret Access Key', 'description': 'Part of the credentials that provide access to your AWS account.', 'sensitive': True, 'type': 'string'}}, 'required': ['region_name', 'aws_access_key_id', 'aws_secret_access_key']})
def test_get_connection_secret_schema_mariadb(self, api_client: TestClient, generate_auth_header, base_url) -> None:
auth_header = generate_auth_header(scopes=[CONNECTION_TYPE_READ])
resp = api_client.get(base_url.format(connection_type='mariadb'), headers=auth_header)
assert (resp.json() == {'title': 'MariaDBSchema', 'description': 'Schema to validate the secrets needed to connect to a MariaDB Database', 'type': 'object', 'properties': {'host': {'title': 'Host', 'description': 'The hostname or IP address of the server where the database is running.', 'type': 'string'}, 'port': {'default': 3306, 'title': 'Port', 'description': 'The network port number on which the server is listening for incoming connections (default: 3306).', 'type': 'integer'}, 'username': {'title': 'Username', 'description': 'The user account used to authenticate and access the database.', 'type': 'string'}, 'password': {'title': 'Password', 'description': 'The password used to authenticate and access the database.', 'sensitive': True, 'type': 'string'}, 'dbname': {'title': 'Database', 'description': 'The name of the specific database within the database server that you want to connect to.', 'type': 'string'}}, 'required': ['host', 'dbname']})
def test_get_connection_secret_schema_mongodb(self, api_client: TestClient, generate_auth_header, base_url) -> None:
auth_header = generate_auth_header(scopes=[CONNECTION_TYPE_READ])
resp = api_client.get(base_url.format(connection_type='mongodb'), headers=auth_header)
assert (resp.json() == {'title': 'MongoDBSchema', 'description': 'Schema to validate the secrets needed to connect to a MongoDB Database', 'type': 'object', 'properties': {'host': {'title': 'Host', 'description': 'The hostname or IP address of the server where the database is running.', 'type': 'string'}, 'port': {'default': 27017, 'title': 'Port', 'description': 'The network port number on which the server is listening for incoming connections (default: 27017).', 'type': 'integer'}, 'username': {'title': 'Username', 'description': 'The user account used to authenticate and access the database.', 'type': 'string'}, 'password': {'title': 'Password', 'description': 'The password used to authenticate and access the database.', 'sensitive': True, 'type': 'string'}, 'defaultauthdb': {'title': 'Default Auth DB', 'description': 'Used to specify the default authentication database.', 'type': 'string'}}, 'required': ['host', 'username', 'password', 'defaultauthdb']})
def test_get_connection_secret_schema_mssql(self, api_client: TestClient, generate_auth_header, base_url) -> None:
auth_header = generate_auth_header(scopes=[CONNECTION_TYPE_READ])
resp = api_client.get(base_url.format(connection_type='mssql'), headers=auth_header)
assert (resp.json() == {'title': 'MicrosoftSQLServerSchema', 'description': 'Schema to validate the secrets needed to connect to a MS SQL Database\n\nconnection string takes the format:\nmssql+pymssql://[username]:[password][host]:[port]/[dbname]', 'type': 'object', 'properties': {'host': {'title': 'Host', 'description': 'The hostname or IP address of the server where the database is running.', 'type': 'string'}, 'port': {'default': 1433, 'title': 'Port', 'description': 'The network port number on which the server is listening for incoming connections (default: 1433).', 'type': 'integer'}, 'username': {'title': 'Username', 'description': 'The user account used to authenticate and access the database.', 'type': 'string'}, 'password': {'title': 'Password', 'description': 'The password used to authenticate and access the database.', 'sensitive': True, 'type': 'string'}, 'dbname': {'title': 'Database', 'description': 'The name of the specific database within the database server that you want to connect to.', 'type': 'string'}}, 'required': ['host', 'username', 'password', 'dbname']})
def test_get_connection_secret_schema_mysql(self, api_client: TestClient, generate_auth_header, base_url) -> None:
auth_header = generate_auth_header(scopes=[CONNECTION_TYPE_READ])
resp = api_client.get(base_url.format(connection_type='mysql'), headers=auth_header)
assert (resp.json() == {'title': 'MySQLSchema', 'description': 'Schema to validate the secrets needed to connect to a MySQL Database', 'type': 'object', 'properties': {'host': {'title': 'Host', 'description': 'The hostname or IP address of the server where the database is running.', 'type': 'string'}, 'port': {'default': 3306, 'title': 'Port', 'description': 'The network port number on which the server is listening for incoming connections (default: 3306).', 'type': 'integer'}, 'username': {'title': 'Username', 'description': 'The user account used to authenticate and access the database.', 'type': 'string'}, 'password': {'title': 'Password', 'description': 'The password used to authenticate and access the database.', 'sensitive': True, 'type': 'string'}, 'dbname': {'title': 'Database', 'description': 'The name of the specific database within the database server that you want to connect to.', 'type': 'string'}, 'ssh_required': {'title': 'SSH required', 'description': 'Indicates whether an SSH tunnel is required for the connection. Enable this option if your MySQL server is behind a firewall and requires SSH tunneling for remote connections.', 'default': False, 'type': 'boolean'}}, 'required': ['host', 'dbname']})
def test_get_connection_secret_schema_postgres(self, api_client: TestClient, generate_auth_header, base_url) -> None:
auth_header = generate_auth_header(scopes=[CONNECTION_TYPE_READ])
resp = api_client.get(base_url.format(connection_type='postgres'), headers=auth_header)
assert (resp.json() == {'title': 'PostgreSQLSchema', 'description': 'Schema to validate the secrets needed to connect to a PostgreSQL Database', 'type': 'object', 'properties': {'host': {'title': 'Host', 'description': 'The hostname or IP address of the server where the database is running.', 'type': 'string'}, 'port': {'default': 5432, 'title': 'Port', 'description': 'The network port number on which the server is listening for incoming connections (default: 5432).', 'type': 'integer'}, 'username': {'title': 'Username', 'description': 'The user account used to authenticate and access the database.', 'type': 'string'}, 'password': {'title': 'Password', 'description': 'The password used to authenticate and access the database.', 'sensitive': True, 'type': 'string'}, 'dbname': {'title': 'Database', 'description': 'The name of the specific database within the database server that you want to connect to.', 'type': 'string'}, 'db_schema': {'title': 'Schema', 'description': 'The default schema to be used for the database connection (defaults to public).', 'type': 'string'}, 'ssh_required': {'title': 'SSH required', 'description': 'Indicates whether an SSH tunnel is required for the connection. Enable this option if your PostgreSQL server is behind a firewall and requires SSH tunneling for remote connections.', 'default': False, 'type': 'boolean'}}, 'required': ['host', 'dbname']})
def test_get_connection_secret_schema_redshift(self, api_client: TestClient, generate_auth_header, base_url) -> None:
auth_header = generate_auth_header(scopes=[CONNECTION_TYPE_READ])
resp = api_client.get(base_url.format(connection_type='redshift'), headers=auth_header)
assert (resp.json() == {'title': 'RedshiftSchema', 'description': 'Schema to validate the secrets needed to connect to an Amazon Redshift cluster', 'type': 'object', 'properties': {'host': {'title': 'Host', 'description': 'The hostname or IP address of the server where the database is running.', 'type': 'string'}, 'port': {'default': 5439, 'title': 'Port', 'description': 'The network port number on which the server is listening for incoming connections (default: 5439).', 'type': 'integer'}, 'user': {'title': 'Username', 'description': 'The user account used to authenticate and access the database.', 'type': 'string'}, 'password': {'title': 'Password', 'description': 'The password used to authenticate and access the database.', 'sensitive': True, 'type': 'string'}, 'database': {'title': 'Database', 'description': 'The name of the specific database within the database server that you want to connect to.', 'type': 'string'}, 'db_schema': {'title': 'Schema', 'description': 'The default schema to be used for the database connection (defaults to public).', 'type': 'string'}, 'ssh_required': {'title': 'SSH required', 'description': 'Indicates whether an SSH tunnel is required for the connection. Enable this option if your Redshift database is behind a firewall and requires SSH tunneling for remote connections.', 'default': False, 'type': 'boolean'}}, 'required': ['host', 'user', 'password', 'database']})
def test_get_connection_secret_schema_snowflake(self, api_client: TestClient, generate_auth_header, base_url) -> None:
auth_header = generate_auth_header(scopes=[CONNECTION_TYPE_READ])
resp = api_client.get(base_url.format(connection_type='snowflake'), headers=auth_header)
assert (resp.json() == {'title': 'SnowflakeSchema', 'description': 'Schema to validate the secrets needed to connect to Snowflake', 'type': 'object', 'properties': {'account_identifier': {'title': 'Account Name', 'description': 'The unique identifier for your Snowflake account.', 'type': 'string'}, 'user_login_name': {'title': 'Username', 'description': 'The user account used to authenticate and access the database.', 'type': 'string'}, 'password': {'title': 'Password', 'description': 'The password used to authenticate and access the database.', 'sensitive': True, 'type': 'string'}, 'warehouse_name': {'title': 'Warehouse', 'description': 'The name of the Snowflake warehouse where your queries will be executed.', 'type': 'string'}, 'database_name': {'title': 'Database', 'description': 'The name of the Snowflake database you want to connect to.', 'type': 'string'}, 'schema_name': {'title': 'Schema', 'description': 'The name of the Snowflake schema within the selected database.', 'type': 'string'}, 'role_name': {'title': 'Role', 'description': 'The Snowflake role to assume for the session, if different than Username.', 'type': 'string'}}, 'required': ['account_identifier', 'user_login_name', 'password', 'warehouse_name', 'database_name', 'schema_name']})
def test_get_connection_secret_schema_hubspot(self, api_client: TestClient, generate_auth_header, base_url) -> None:
auth_header = generate_auth_header(scopes=[CONNECTION_TYPE_READ])
resp = api_client.get(base_url.format(connection_type='hubspot'), headers=auth_header)
assert (resp.json() == {'title': 'hubspot_schema', 'description': 'Hubspot secrets schema', 'type': 'object', 'properties': {'domain': {'title': 'Domain', 'description': 'Your HubSpot domain', 'default': 'api.hubapi.com', 'sensitive': False, 'type': 'string'}, 'private_app_token': {'title': 'Private app token', 'description': 'Your HubSpot Private Apps access token', 'sensitive': True, 'type': 'string'}}, 'required': ['private_app_token']})
def test_get_connection_secrets_manual_webhook(self, api_client: TestClient, generate_auth_header, base_url):
auth_header = generate_auth_header(scopes=[CONNECTION_TYPE_READ])
resp = api_client.get(base_url.format(connection_type='manual_webhook'), headers=auth_header)
assert (resp.status_code == 200)
assert (resp.json() == {'title': 'ManualWebhookSchema', 'description': 'Secrets for manual webhooks. No secrets needed at this time.', 'type': 'object', 'properties': {}}) |
class Adder(MethodView):
def __init__(self):
self.crack_q = crackqueue.Queuer()
self.q = self.crack_q.q_connect()
self.log_dir = CRACK_CONF['files']['log_dir']
rconf = CRACK_CONF['redis']
self.redis_con = Redis(rconf['host'], rconf['port'])
self.speed_q = Queue('speed_check', connection=self.redis_con, serializer=JSONSerializer)
def mode_check(self, mode):
modes_dict = dict(hash_modes.HModes.modes_dict())
logger.debug('Checking hash mode is supported: {}'.format(mode))
if (str(mode) in modes_dict.keys()):
return int(mode)
else:
return False
def get_restore(self, log_dir, job_id):
logger.debug('Checking for restore value')
if job_id.isalnum():
job_file = valid.val_filepath(path_string=self.log_dir, file_string='{}.json'.format(job_id))
logger.debug('Using session file: {}'.format(job_file))
try:
with open(job_file) as fh_job_file:
try:
status_json = json.loads(fh_job_file.read())
logger.debug('Restoring job details: {}'.format(status_json))
return status_json
except IOError as err:
logger.warning('Invalid job ID: {}'.format(err))
return False
except TypeError as err:
logger.warning('Invalid job ID: {}'.format(err))
return False
except IOError as err:
logger.warning('Restore file Error: {}'.format(err))
return False
except Exception as err:
logger.warning('Restore file Error: {}'.format(err))
return False
else:
logger.warning('Invalid job ID')
return False
def session_check(self, log_dir, job_id):
logger.debug('Checking for existing session')
log_dir = Path(log_dir)
sess_id = False
if (job_id.isalnum() or isinstance(job_id, uuid)):
try:
for f in Path.iterdir(log_dir):
if (str(job_id) in str(f)):
sess_id = True
break
except ValueError as err:
logger.debug('Invalid session ID: {}'.format(err))
sess_id = False
except Exception as err:
logger.warning('Invalid session: {}'.format(err))
sess_id = False
else:
logger.debug('Invalid session ID provided')
sess_id = False
if (sess_id is not False):
logger.debug('Existing session found')
return sess_id
def speed_check(self, q_args=None):
logger.debug('Running speed check')
if q_args:
speed_args = {}
speedq_args = {}
speed_args['hash_file'] = q_args['kwargs']['hash_file']
speed_session = '{}_speed'.format(q_args['kwargs']['session'])
speed_args['speed_session'] = speed_session
speed_args['session'] = q_args['kwargs']['session']
speed_args['wordlist'] = q_args['kwargs']['wordlist']
speed_args['wordlist2'] = q_args['kwargs']['wordlist2']
speed_args['hash_mode'] = q_args['kwargs']['hash_mode']
speed_args['username'] = q_args['kwargs']['username']
speed_args['name'] = q_args['kwargs']['name']
speed_args['brain'] = q_args['kwargs']['brain']
speed_args['attack_mode'] = q_args['kwargs']['attack_mode']
speed_args['mask'] = '?a?a?a?a?a?a'
speed_args['pot_path'] = q_args['kwargs']['pot_path']
speedq_args['kwargs'] = speed_args
speedq_args['job_id'] = speed_session
self.crack_q.q_add(self.speed_q, speedq_args, timeout=600)
logger.debug('Queuing speed check')
return True
return False
def queue_job(self, args, job_id=None):
if job_id:
if job_id.isalnum():
if self.session_check(self.log_dir, job_id):
logger.debug('Valid session found')
started = rq.registry.StartedJobRegistry(queue=self.q)
cur_list = started.get_job_ids()
q_dict = self.crack_q.q_monitor(self.q)
if (job_id in cur_list):
logger.error('Job is already running')
return (jsonify({'msg': 'Job is already running'}), 500)
if (job_id in q_dict['Queued Jobs'].keys()):
logger.error('Job is already queued')
return (jsonify({'msg': 'Job is already queued'}), 500)
outfile = str(valid.val_filepath(path_string=self.log_dir, file_string='{}.cracked'.format(job_id)))
hash_file = str(valid.val_filepath(path_string=self.log_dir, file_string='{}.hashes'.format(job_id)))
pot_path = str(valid.val_filepath(path_string=self.log_dir, file_string='crackq.pot'))
job_deets = self.get_restore(self.log_dir, job_id)
job = self.q.fetch_job(job_id)
if (not job_deets):
logger.debug('Job restor error. Never started')
return (jsonify({'msg': 'Error restoring job'}), 500)
elif (not job_deets['restore']):
logger.debug('Job not previously started, restore = 0')
job_deets['restore'] = 0
elif (job_deets['restore'] == 0):
logger.debug('Job not previously started, restore = 0')
wordlist = None
wordlist2 = None
rules = None
if ('wordlist' in job_deets):
if (job_deets['wordlist'] in CRACK_CONF['wordlists']):
wordlist = CRACK_CONF['wordlists'][job_deets['wordlist']]
if ('wordlist2' in job_deets):
if job_deets['wordlist2']:
if (job_deets['wordlist2'] in CRACK_CONF['wordlists']):
wordlist2 = CRACK_CONF['wordlists'][job_deets['wordlist2']]
if ('rules' in job_deets):
rules = check_rules(job_deets['rules'])
if (rules is False):
return (jsonify({'msg': 'Invalid rules selected'}), 500)
mask_file = check_mask(job_deets['mask'])
mask = (mask_file if mask_file else job_deets['mask'])
try:
timeout = job_deets['timeout']
except KeyError as err:
logger.warning('No timeout info in job details, using default')
timeout = 1814400
hc_args = {'hash_file': hash_file, 'session': job_id, 'wordlist': wordlist, 'wordlist2': wordlist2, 'mask': mask, 'mask_file': (True if mask_file else False), 'attack_mode': int(job_deets['attack_mode']), 'hash_mode': int(job_deets['hash_mode']), 'outfile': outfile, 'rules': rules, 'restore': job_deets['restore'], 'username': (job_deets['username'] if ('user' in job_deets) else None), 'increment': (job_deets['increment'] if ('increment' in job_deets) else None), 'increment_min': (job_deets['increment_min'] if ('increment_min' in job_deets) else None), 'increment_max': (job_deets['increment_max'] if ('increment_max' in job_deets) else None), 'brain': (False if ('disable_brain' in job_deets) else True), 'name': (job_deets['name'] if ('name' in job_deets) else None), 'pot_path': pot_path}
job = self.q.fetch_job(job_id)
job.meta['CrackQ State'] = 'Run/Restored'
job.save_meta()
else:
return (jsonify(ERR_INVAL_JID), 500)
else:
return (jsonify(ERR_INVAL_JID), 500)
else:
logger.debug('Creating new session')
job_id = uuid.uuid4().hex
add_jobid(job_id)
outfile = str(valid.val_filepath(path_string=self.log_dir, file_string='{}.cracked'.format(job_id)))
hash_file = str(valid.val_filepath(path_string=self.log_dir, file_string='{}.hashes'.format(job_id)))
pot_path = str(valid.val_filepath(path_string=self.log_dir, file_string='crackq.pot'))
try:
attack_mode = int(args['attack_mode'])
except TypeError:
attack_mode = None
try:
logger.debug('Writing hashes to file: {}'.format(hash_file))
with open(hash_file, 'w') as hash_fh:
for hash_l in args['hash_list']:
hash_fh.write((hash_l.rstrip() + '\n'))
except KeyError as err:
logger.debug('No hash list provided: {}'.format(err))
return (jsonify({'msg': 'No hashes provided'}), 500)
except IOError as err:
logger.debug('Unable to write to hash file: {}'.format(err))
return (jsonify({'msg': 'System error'}), 500)
try:
args['hash_mode']
check_m = self.mode_check(args['hash_mode'])
except KeyError:
check_m = False
logger.debug('Hash mode check: {}'.format(check_m))
if (check_m is not False):
try:
mode = int(check_m)
except TypeError as err:
logger.error('Incorrect type supplied for hash_mode:\n{}'.format(err))
return (jsonify({'msg': 'Invalid hash mode selected'}), 500)
else:
return (jsonify({'msg': 'Invalid hash mode selected'}), 500)
if (attack_mode != 3):
if (args['wordlist'] in CRACK_CONF['wordlists']):
wordlist = CRACK_CONF['wordlists'][args['wordlist']]
else:
return (jsonify({'msg': 'Invalid wordlist selected'}), 500)
if (attack_mode == 1):
if ('wordlist2' in args):
if (args['wordlist2'] in CRACK_CONF['wordlists']):
wordlist2 = CRACK_CONF['wordlists'][args['wordlist2']]
else:
return (jsonify({'msg': 'Combinator mode requires 2 wordlists'}), 500)
else:
wordlist2 = None
try:
mask_file = check_mask(args['mask_file'])
except KeyError:
mask_file = None
try:
mask = args['mask']
except KeyError:
mask = None
mask = (mask_file[0] if mask_file else mask)
rules = check_rules(args['rules'])
if (rules is False):
return ({'msg': 'Invalid rules selected'}, 500)
try:
username = args['username']
except KeyError as err:
logger.debug('Username value not provided')
username = False
try:
increment = args['increment']
except KeyError as err:
logger.debug('Increment value not provided')
increment = False
try:
increment_min = args['increment_min']
except KeyError as err:
logger.debug('Increment min value not provided')
increment_min = None
try:
increment_max = args['increment_max']
except KeyError as err:
logger.debug('Increment max value not provided')
increment_max = None
try:
if args['disable_brain']:
logger.debug('Brain disabled')
brain = False
else:
brain = True
except KeyError as err:
logger.debug('Brain not disabled: {}'.format(err))
brain = True
try:
name = args['name']
except KeyError as err:
logger.debug('Name value not provided')
name = None
try:
potcheck = args['potcheck']
except KeyError as err:
logger.debug('Potcheck value not provided')
potcheck = False
timeout = 1814400
if ('jobtimeout' in CRACK_CONF):
if (not CRACK_CONF['jobtimeout']['Modify']):
logger.debug('Timeout modification not permitted')
timeout = CRACK_CONF['jobtimeout']['Value']
elif ('timeout' in args):
timeout = args['timeout']
hc_args = {'hash_file': hash_file, 'session': job_id, 'wordlist': (wordlist if (attack_mode != 3) else None), 'wordlist2': (wordlist2 if (attack_mode == 1) else None), 'mask': (mask if (attack_mode > 2) else None), 'mask_file': (True if mask_file else False), 'attack_mode': attack_mode, 'hash_mode': mode, 'outfile': outfile, 'rules': rules, 'username': username, 'increment': increment, 'increment_min': increment_min, 'increment_max': increment_max, 'brain': brain, 'name': name, 'pot_path': pot_path, 'restore': 0, 'potcheck': potcheck}
q_args = {'job_id': job_id, 'kwargs': hc_args}
try:
q = self.crack_q.q_connect()
try:
if (hc_args['restore'] > 0):
job = self.q.fetch_job(job_id)
if job.meta['brain_check']:
logger.debug('Brain check previously complete')
elif (job.meta['brain_check'] is None):
self.speed_check(q_args=q_args)
time.sleep(3)
else:
logger.debug('Restored job, disabling speed check')
else:
logger.debug('Job not a restore, queuing speed_check')
self.speed_check(q_args=q_args)
time.sleep(3)
except KeyError as err:
logger.debug('Job not a restore, queuing speed_check')
self.speed_check(q_args=q_args)
time.sleep(3)
self.crack_q.q_add(q, q_args, timeout=timeout)
logger.debug('API Job {} added to queue'.format(job_id))
logger.debug('Job Details: {}'.format(q_args))
job = self.q.fetch_job(job_id)
if ('task_id' in args):
job.meta['task_id'] = args['task_id']
job.meta['email_count'] = 0
if ('notify' in args):
job.meta['notify'] = args['notify']
else:
job.meta['notify'] = False
if current_user.email:
if email_check(current_user.email):
job.meta['email'] = str(current_user.email)
job.meta['last_seen'] = str(current_user.last_seen)
elif email_check(current_user.username):
job.meta['email'] = current_user.username
job.meta['last_seen'] = str(current_user.last_seen)
job.meta['CrackQ State'] = 'Run/Restored'
job.meta['Speed Array'] = []
job.save_meta()
return (job_id, 202)
except KeyError as err:
logger.warning('Key missing from meta data:\n{}'.format(err))
return (job_id, 202)
except TypeError as err:
logger.warning('Type error in job meta data:\n{}'.format(err))
return (job_id, 202)
_required
def post(self):
try:
marsh_schema = parse_json_schema().load(request.json)
args = marsh_schema
except ValidationError as errors:
logger.debug('Validation error: {}'.format(errors))
return (errors.messages, 500)
try:
job_id = args['job_id'].hex
except KeyError:
logger.debug('No job ID provided')
job_id = None
except AttributeError:
logger.debug('No job ID provided')
job_id = None
enqueue_result = self.queue_job(args, job_id=job_id)
return enqueue_result |
def prob_product_constant(E1, E2):
dim1 = (len(E1.shape) - 2)
dim2 = (len(E2.shape) - 3)
E1_det_root = (lin_alg.det2x2(E1) ** 0.25)
E2_det_root = (lin_alg.det2x2(E2) ** 0.25)
C = (((2.0 * E1_det_root.unsqueeze(dim1)) * E2_det_root.unsqueeze(dim2)) / safe_sqrt((((E1[(..., 0, 0)].unsqueeze(dim1) + E2[(..., 0, 0)].unsqueeze(dim2)) * (E1[(..., 1, 1)].unsqueeze(dim1) + E2[(..., 1, 1)].unsqueeze(dim2))) - torch.square((E1[(..., 0, 1)].unsqueeze(dim1) + E2[(..., 0, 1)].unsqueeze(dim2))))))
return C |
class LazyKernel():
name: str
_kernel: Optional['cupy.RawKernel']
_compile_callback: Optional[Callable[([], 'cupy.RawKernel')]]
__slots__ = ['name', '_kernel', '_compile_callback']
def __init__(self, name: str, *, compile_callback: Optional[Callable[([], 'cupy.RawKernel')]]=None) -> None:
self.name = name
self._kernel = None
self._compile_callback = compile_callback
def __call__(self, *args, **kwargs):
self._compile_kernel()
self._kernel(*args, **kwargs)
def _compile_kernel(self):
if (self._kernel is not None):
return
if (self._compile_callback is not None):
self._kernel = self._compile_callback()
elif (KERNELS is not None):
self._kernel = KERNELS.get_function(self.name)
if (self._kernel is None):
raise ValueError(f"couldn't compile Cupy kernel '{self.name}'") |
def extractRyuxenjiBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('My Mr. Mermaid', 'My Mr. Mermaid', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionPlotoptionsBoxplotSonificationDefaultinstrumentoptionsMappingLowpass(Options):
def frequency(self) -> 'OptionPlotoptionsBoxplotSonificationDefaultinstrumentoptionsMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsBoxplotSonificationDefaultinstrumentoptionsMappingLowpassFrequency)
def resonance(self) -> 'OptionPlotoptionsBoxplotSonificationDefaultinstrumentoptionsMappingLowpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsBoxplotSonificationDefaultinstrumentoptionsMappingLowpassResonance) |
def execute():
frappe.reload_doctype('Codification Table', force=True)
frappe.reload_doctype('Medical Code Standard', force=True)
doctypes = ['Lab Test', 'Clinical Procedure', 'Therapy Session', 'Lab Test Template', 'Clinical Procedure Template', 'Therapy Type']
for doctype in doctypes:
if frappe.db.has_column(doctype, 'medical_code'):
data = frappe.db.get_all(doctype, filters={'medical_code': ['!=', '']}, fields=['name', 'medical_code'])
frappe.reload_doctype(doctype, force=True)
for d in data:
frappe.get_doc({'doctype': 'Codification Table', 'parent': d['name'], 'parentfield': 'codification_table', 'parenttype': doctype, 'medical_code': d['medical_code'], 'medical_code_standard': frappe.db.get_value('Medical Code', d['medical_code'], 'medical_code_standard')}).insert() |
def get_notes_by_created_date(index, editor, decks, limit, sortOrder) -> List[IndexNote]:
if (sortOrder == 'desc'):
index.lastSearch = (None, decks, 'lastCreated', limit)
else:
index.lastSearch = (None, decks, 'firstCreated', limit)
if ((not ('-1' in decks)) and (len(decks) > 0)):
deckQ = ('(%s)' % ','.join(decks))
else:
deckQ = ''
if (len(deckQ) > 0):
res = mw.col.db.all(('select distinct notes.id, flds, tags, did, mid from notes left join cards on notes.id = cards.nid where did in %s order by nid %s limit %s' % (deckQ, sortOrder, limit)))
else:
res = mw.col.db.all(('select distinct notes.id, flds, tags, did, mid from notes left join cards on notes.id = cards.nid order by nid %s limit %s' % (sortOrder, limit)))
rList = []
for r in res:
if (not (str(r[0]) in index.pinned)):
rList.append(IndexNote((r[0], r[1], r[2], r[3], r[1], (- 1), r[4], '')))
return rList |
def enable(exaile):
try:
subprocess.call(['streamripper'], stdout=(- 1), stderr=(- 1))
except OSError:
raise NotImplementedError('Streamripper is not available.')
return False
if exaile.loading:
event.add_callback(_enable, 'exaile_loaded')
else:
_enable(None, exaile, None) |
class TestRemoveNoOpConcats(unittest.TestCase):
def test_remove_no_op_concats_no_ops(self):
self._test_remove_no_op_concats_impl(input_shapes=[[2, 4, 6]], should_keep_concat=False, test_name='test_remove_no_op_concats_single_non_empty')
self._test_remove_no_op_concats_impl(input_shapes=[[0], [3], [0]], should_keep_concat=False, test_name='test_remove_no_op_concats_single_non_empty_and_double_empty')
def test_remove_no_op_concats_no_ops_all_empty(self):
self._test_remove_no_op_concats_impl(input_shapes=[[0, 0, 0]], should_keep_concat=False, test_name='test_remove_no_op_concats_single_empty')
self._test_remove_no_op_concats_impl(input_shapes=[[0, 0, 0], [0, 0, 0]], should_keep_concat=False, test_name='test_remove_no_op_concats_double_empty')
def test_remove_no_op_concats_meaningful(self):
self._test_remove_no_op_concats_impl(input_shapes=[[3, 5], [3, 5]], should_keep_concat=True, test_name='test_remove_no_op_concats_double_non_empty')
self._test_remove_no_op_concats_impl(input_shapes=[[3], [0], [5]], should_keep_concat=True, test_name='test_remove_no_op_concats_two_non_empty_and_empty')
int_var = IntVar([0, 10])
self._test_remove_no_op_concats_impl(input_shapes=[[int_var, 3], [int_var, 5]], should_keep_concat=True, concat_dim=1, test_name='test_remove_no_op_concats_zero_lower_bound_int_var')
def test_remove_no_op_concats_exceptions(self):
with self.assertRaises(RuntimeError):
self._test_remove_no_op_concats_impl(input_shapes=[[2, 4], [0]], should_keep_concat=False, test_name='test_remove_no_op_concats_same_rank')
with self.assertRaises(RuntimeError):
self._test_remove_no_op_concats_impl(input_shapes=[[2, 4], [0, 0]], should_keep_concat=False, test_name='test_remove_no_ops_concat_same_dim_sizes')
def _test_remove_no_op_concats_impl(self, input_shapes: Sequence[Sequence[int]], should_keep_concat: bool, test_name: str, concat_dim: int=0):
inputs = [Tensor(shape=shape, name=f'input_{i}', is_input=True) for (i, shape) in enumerate(input_shapes)]
concatenated = ops.concatenate()(inputs, dim=concat_dim)
c = Tensor(shape=[1], name='input_const', is_input=True)
model_output = ((concatenated * c) + (concatenated / c))
model_output._attrs['name'] = 'output_0'
model_output._attrs['is_output'] = True
input_shapes = [[(d.upper_bound() if isinstance(d, IntVar) else d) for d in shape] for shape in input_shapes]
inputs_pt = {f'input_{i}': get_random_torch_tensor(shape=shape) for (i, shape) in enumerate(input_shapes)}
concatenated_pt = torch.concat(list(inputs_pt.values()), dim=concat_dim)
c_pt = get_random_torch_tensor(shape=[1])
Y_pt = ((concatenated_pt * c_pt) + (concatenated_pt / c_pt))
Y_ait = torch.empty_like(Y_pt)
with compile_model(model_output, detect_target(), './tmp', test_name) as module:
module.run_with_tensors({**inputs_pt, 'input_const': c_pt}, {'output_0': Y_ait})
self.assertEquals(graph_has_op(module.debug_sorted_graph, 'concatenate'), should_keep_concat)
self.assertTrue(torch.allclose(Y_pt, Y_ait, atol=0.01, rtol=0.01)) |
class UnixTwistedTransport(DgramTwistedTransport):
ADDRESS_TYPE = UnixTransportAddress
_lport = None
def openClientMode(self, iface=''):
try:
self._lport = reactor.connectUNIXDatagram(iface, self)
except Exception as exc:
raise error.CarrierError(exc)
return self
def openServerMode(self, iface):
try:
self._lport = reactor.listenUNIXDatagram(iface, self)
except Exception as exc:
raise error.CarrierError(exc)
return self
def closeTransport(self):
if (self._lport is not None):
deferred = self._lport.stopListening()
if deferred:
deferred.addCallback((lambda x: None))
DgramTwistedTransport.closeTransport(self) |
class OptionSeriesSplineSonificationContexttracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def iter_parent_by_semantic_type_recursively(semantic_content_iterable: Iterable[SemanticContentWrapper], type_: Type[T_SemanticContentWrapper], parent_content: SemanticContentWrapper) -> Iterable[SemanticContentWrapper]:
for semantic_content in semantic_content_iterable:
if isinstance(semantic_content, type_):
(yield parent_content)
return
if isinstance(semantic_content, SemanticMixedContentWrapper):
(yield from iter_parent_by_semantic_type_recursively(semantic_content.mixed_content, type_=type_, parent_content=semantic_content)) |
def make_function_strided_args(func_attrs, dim_info_dict, default_mm_info, is_permute=False):
backend_spec = CUDASpec()
elem_input_type = backend_spec.dtype_to_lib_type(func_attrs['inputs'][0]._attrs['dtype'])
elem_output_type = backend_spec.dtype_to_lib_type(func_attrs['outputs'][0]._attrs['dtype'])
input_a_batch_stride_dim = default_mm_info.a_batch_stride
input_a_stride_lda_dim = default_mm_info.lda
input_a_offset = 0
input_b_batch_stride_dim = default_mm_info.b_batch_stride
input_b_stride_ldb_dim = default_mm_info.ldb
input_b_offset = 0
has_bias = (len(func_attrs['inputs']) == 3)
if ('input_accessors' in func_attrs):
input_a_accessor = func_attrs['input_accessors'][0]
input_b_accessor = func_attrs['input_accessors'][1]
if input_a_accessor.is_from_strided_tensor:
input_a_offset = input_a_accessor.offset
if (not input_a_accessor.is_contiguous):
a_dims = reverse_dim_info_mapping(dim_info_dict, gemm_common.Source.INPUT, 0)
input_a_batch_stride_dim = input_a_accessor.gen_stride_str(0, a_dims)
input_a_stride_lda_dim = input_a_accessor.stride(1)
if input_b_accessor.is_from_strided_tensor:
input_b_offset = input_b_accessor.offset
if (not input_b_accessor.is_contiguous):
b_dims = reverse_dim_info_mapping(dim_info_dict, gemm_common.Source.INPUT, 1)
input_b_batch_stride_dim = input_b_accessor.gen_stride_str(0, b_dims)
input_b_stride_ldb_dim = input_b_accessor.stride(1)
if has_bias:
input_bias_accessor = func_attrs['input_accessors'][2]
assert (not input_bias_accessor.is_from_strided_tensor), f"strided bias is not supported for op {func_attrs['name']}"
input_addr_calculator = common.INPUT_ADDR_CALCULATOR.render(input_a_batch_stride_dim=input_a_batch_stride_dim, input_a_stride_dim=input_a_stride_lda_dim, input_a_offset_val=input_a_offset, input_b_batch_stride_dim=input_b_batch_stride_dim, input_b_stride_dim=input_b_stride_ldb_dim, input_b_offset_val=input_b_offset)
if is_permute:
output_batch_stride_dim = default_mm_info.bias_batch_stride
c_batch_stride = default_mm_info.c_batch_stride
else:
output_batch_stride_dim = default_mm_info.c_batch_stride
c_batch_stride = 'output_batch_stride'
output_stride_ldc_dim = default_mm_info.ldc
output_offset = 0
if ('output_accessors' in func_attrs):
output_accessor = func_attrs['output_accessors'][0]
if output_accessor.is_from_strided_tensor:
output_offset = output_accessor.offset
if (not output_accessor.is_contiguous):
c_dims = reverse_dim_info_mapping(dim_info_dict, gemm_common.Source.OUTPUT, 0)
output_batch_stride_dim = output_accessor.gen_stride_str(0, c_dims)
output_stride_ldc_dim = output_accessor.stride(1)
output_addr_calculator = OUTPUT_ADDR_CALCULATOR.render(output_batch_stride_dim=output_batch_stride_dim, output_stride_dim=output_stride_ldc_dim, output_offset_val=output_offset)
bmm_problem_info = Bmm_problem_info(alpha_value=default_mm_info.alpha_value, beta_value=default_mm_info.beta_value, a_ptr=f'({elem_input_type}*)({default_mm_info.a_ptr}) + input_a_offset', b_ptr=f'({elem_input_type}*)({default_mm_info.b_ptr}) + input_b_offset', bias_ptr=f'({elem_output_type}*)({default_mm_info.bias_ptr})', c_ptr=f'({elem_output_type}*)({default_mm_info.c_ptr}) + output_offset', a_batch_stride='input_a_batch_stride', b_batch_stride='input_b_batch_stride', bias_batch_stride=f'{default_mm_info.bias_batch_stride}', c_batch_stride=c_batch_stride, lda='input_a_stride', ldb='input_b_stride', ldbias=f'{default_mm_info.ldbias}', ldc='output_stride', a_row_major=default_mm_info.a_row_major, b_row_major=default_mm_info.b_row_major, c_row_major=default_mm_info.c_row_major)
a_shapes = func_attrs['input_accessors'][0].original_shapes
b_shapes = func_attrs['input_accessors'][1].original_shapes
d_shapes = None
if has_bias:
d_shapes = func_attrs['input_accessors'][2].original_shapes
_update_stride_info(bmm_problem_info, a_shapes, b_shapes, d_shapes)
problem_args = PROBLEM_ARGS_TEMPLATE.render(mm_info=bmm_problem_info)
problem_args_cutlass_3x = PROBLEM_ARGS_TEMPLATE_CUTLASS_3X.render(mm_info=bmm_problem_info)
return (problem_args, problem_args_cutlass_3x, input_addr_calculator, output_addr_calculator) |
class PubsubMessageDataToBinaryString(PropertyPreprocessor):
type = 'pubsub_message_data_to_binary_string'
def imports(self):
return {'modules': ['json', 'base64']}
def process_arg(self, arg, node, raw_args):
res = []
for message in arg:
if ('data' in message):
message['data'] = self._process_data_arg(message['data'])
res.append(message)
return res
def _process_data_arg(self, arg):
string = None
try:
res_str = (arg if (not self._is_dict(arg)) else self._json_handler(arg))
string = base64.b64encode(res_str.encode('utf-8')).decode('ascii')
except Exception as e:
raise Exception('Error in preprocessor {} for argument `{}`: {}'.format(self.type, arg, str(e)))
return string
def _json_handler(self, arg):
return json.dumps(arg)
def _is_dict(self, arg):
return isinstance(arg, dict) |
def delete_run(session: Session, id: str) -> None:
deleted_run_rows = session.query(RunColumn).filter((RunColumn.id == id)).delete()
if (deleted_run_rows == 0):
raise EmptyDeletionError(f'No run with `id` "{id}" exists.')
session.query(IssueInstance).filter((IssueInstance.run_id == id)).delete()
session.query(TraceFrame).filter((TraceFrame.run_id == id)).delete()
session.query(RunOrigin).filter((RunOrigin.run_id == id)).delete()
session.query(MetaRunToRunAssoc).filter((MetaRunToRunAssoc.run_id == id)).delete()
session.commit() |
def build_docker_version_notifier() -> Callable:
last_notified_version = PersistedDict('/tmp/exorde/docker_version_notification.json')
async def docker_version_notifier(live_configuration: LiveConfiguration, command_line_arguments: argparse.Namespace) -> None:
current_img_version = os.environ.get('EXORDE_DOCKER_IMG_VERSION', None)
if (not current_img_version):
return
'else check the version and notify the user'
nonlocal last_notified_version
live_version = live_configuration.get('docker_version', None)
if (not live_version):
logging.warning('no docker version specified in LiveConfiguration')
return
if (live_version != current_img_version):
if ((last_notified_version['last_notification'] == None) or (last_notified_version != live_version)):
(await send_notification(command_line_arguments, 'A new exorde image is available'))
last_notified_version['last_notification'] = live_version
return docker_version_notifier |
_routes.route('/<string:event_identifier>/reorder-speakers', methods=['POST'])
_event_id
_coorganizer
def reorder_speakers(event_id):
if ('reset' in request.args):
updates = Speaker.query.filter((Speaker.event_id == event_id)).update({Speaker.order: 0}, synchronize_session=False)
db.session.commit()
return jsonify({'success': True, 'updates': updates})
(data, errors) = SpeakerReorderSchema(many=True).load(request.json)
if errors:
raise UnprocessableEntityError({'pointer': '/data', 'errors': errors}, 'Data in incorrect format')
speaker_ids = {item['speaker'] for item in data}
event_ids = db.session.query(distinct(Speaker.event_id)).filter(Speaker.id.in_(speaker_ids)).all()
if ((len(event_ids) != 1) or (event_ids[0][0] != event_id)):
raise ForbiddenError({'pointer': 'event_id'}, 'All speakers should be of single event which user has co-organizer access to')
result = group_by(data, 'order')
updates = {}
for (order, items) in result.items():
speaker_ids = {item['speaker'] for item in items}
result = Speaker.query.filter(Speaker.id.in_(speaker_ids)).update({Speaker.order: order}, synchronize_session=False)
updates[order] = result
db.session.commit()
return jsonify({'success': True, 'updates': updates}) |
class UserDialog(QtWidgets.QDialog):
def __init__(self, parent=None):
super(UserDialog, self).__init__(parent=parent)
self.setWindowTitle('User Dialog')
self.setWindowFlags((self.windowFlags() ^ QtCore.Qt.WindowContextHelpButtonHint))
self.create_widgets()
self.create_layouts()
self.create_connections()
def create_widgets(self):
self.dialog_label = QtWidgets.QLabel()
self.dialog_label.setStyleSheet('color: rgb(71, 143, 202);\nfont: 18pt;')
self.dialog_label.setText('Create User')
self.line = QtWidgets.QFrame()
self.line.setFrameShape(QtWidgets.QFrame.HLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.name_line_edit = QtWidgets.QLineEdit()
self.name_line_edit.setPlaceholderText('Enter Name')
self.login_line_edit = QtWidgets.QLineEdit()
self.login_line_edit.setPlaceholderText('stalker')
self.email_line_edit = QtWidgets.QLineEdit()
self.email_line_edit.setPlaceholderText('')
self.password_line_edit = QtWidgets.QLineEdit()
self.password_line_edit.setPlaceholderText('******')
self.ok_button = QtWidgets.QPushButton('OK')
self.cancel_button = QtWidgets.QPushButton('Cancel')
def create_layouts(self):
form_layout = QtWidgets.QFormLayout()
form_layout.addRow('Name', self.name_line_edit)
form_layout.addRow('Login', self.login_line_edit)
form_layout.addRow('Email', self.email_line_edit)
form_layout.addRow('Password', self.password_line_edit)
button_layout = QtWidgets.QHBoxLayout()
button_layout.addStretch()
button_layout.addWidget(self.ok_button)
button_layout.addWidget(self.cancel_button)
label_layout = QtWidgets.QVBoxLayout()
label_layout.addWidget(self.dialog_label)
label_layout.addWidget(self.line)
label_layout.addStretch()
main_layout = QtWidgets.QVBoxLayout(self)
main_layout.addLayout(label_layout)
main_layout.addLayout(form_layout)
main_layout.addLayout(button_layout)
self.resize(328, 184)
def create_connections(self):
self.ok_button.clicked.connect(self.check_user_pass)
self.cancel_button.clicked.connect(self.close)
def check_user_pass(self):
self.user_name = self.name_line_edit.text()
self.user_login = self.login_line_edit.text()
self.user_email = self.email_line_edit.text()
self.user_password = self.password_line_edit.text()
if ((len(self.user_name) < 2) or (len(self.user_login) < 2) or (len(self.user_email) < 2) or (len(self.user_password) < 2)):
QtWidgets.QMessageBox.warning(self, 'Warning', 'Please, fill out user information completely!')
else:
self.create_user()
def create_user(self):
from stalker.db.session import DBSession
from stalker import User
new_user = User(name='{0}'.format(self.user_name), login='{0}'.format(self.user_login), email='{0}'.format(self.user_email), password='{0}'.format(self.user_password))
if (not (User.query.filter_by(email=self.user_email).scalar() is None)):
QtWidgets.QMessageBox.warning(self, 'Warning', 'The email address you entered already belongs to an existing user , Please re-enter your e-mail address!')
elif (not (User.query.filter_by(login=self.user_login).scalar() is None)):
QtWidgets.QMessageBox.warning(self, 'Warning', "The user '{0}' already exists, Please enter new username!".format(self.user_login))
else:
try:
DBSession.save(new_user)
except BaseException as e:
DBSession.rollback()
QtWidgets.QMessageBox.critical(self, 'Error', str(e))
QtWidgets.QMessageBox.information(self, 'Success', "User '{0}' successfully created!".format(self.user_login))
self.close() |
class _GlobSplit(Generic[AnyStr]):
def __init__(self, pattern: AnyStr, flags: int) -> None:
self.pattern = pattern
self.unix = _wcparse.is_unix_style(flags)
self.flags = flags
self.no_abs = bool((flags & _wcparse._NOABSOLUTE))
self.globstar = bool((flags & GLOBSTAR))
self.matchbase = bool((flags & MATCHBASE))
self.extmatchbase = bool((flags & _wcparse._EXTMATCHBASE))
self.tilde = bool((flags & GLOBTILDE))
if _wcparse.is_negative(self.pattern, flags):
self.pattern = self.pattern[0:1]
if (flags & NEGATE):
flags ^= NEGATE
self.flags = flags
self.extend = bool((flags & EXTMATCH))
if (not self.unix):
self.win_drive_detect = True
self.bslash_abort = True
self.sep = '\\'
else:
self.win_drive_detect = False
self.bslash_abort = False
self.sep = '/'
self.magic_symbols = _wcparse._get_magic_symbols(pattern, self.unix, self.flags)[0]
def is_magic(self, name: AnyStr) -> bool:
for c in self.magic_symbols:
if (c in name):
return True
return False
def _sequence(self, i: util.StringIter) -> None:
c = next(i)
if (c == '!'):
c = next(i)
if (c in ('^', '-', '[')):
c = next(i)
while (c != ']'):
if (c == '\\'):
try:
self._references(i, True)
except _wcparse.PathNameException as e:
raise StopIteration from e
elif (c == '/'):
raise StopIteration
c = next(i)
def _references(self, i: util.StringIter, sequence: bool=False) -> str:
value = ''
c = next(i)
if (c == '\\'):
if (sequence and self.bslash_abort):
raise _wcparse.PathNameException
value = c
elif (c == '/'):
if sequence:
raise _wcparse.PathNameException
value = c
else:
pass
return value
def parse_extend(self, c: str, i: util.StringIter) -> bool:
success = True
index = i.index
list_type = c
try:
c = next(i)
if (c != '('):
raise StopIteration
while (c != ')'):
c = next(i)
if (self.extend and (c in _wcparse.EXT_TYPES) and self.parse_extend(c, i)):
continue
if (c == '\\'):
try:
self._references(i)
except StopIteration:
pass
elif (c == '['):
index = i.index
try:
self._sequence(i)
except StopIteration:
i.rewind((i.index - index))
except StopIteration:
success = False
c = list_type
i.rewind((i.index - index))
return success
def store(self, value: AnyStr, l: list[_GlobPart], dir_only: bool) -> None:
if (l and (value in (b'', ''))):
return
globstar = ((value in (b'**', '**')) and self.globstar)
magic = self.is_magic(value)
if magic:
v = cast(Pattern[AnyStr], _wcparse._compile(value, self.flags))
else:
v = value
if (globstar and l and l[(- 1)].is_globstar):
l[(- 1)] = _GlobPart(v, magic, globstar, dir_only, False)
else:
l.append(_GlobPart(v, magic, globstar, dir_only, False))
def split(self) -> list[_GlobPart]:
split_index = []
parts = []
start = (- 1)
if isinstance(self.pattern, bytes):
is_bytes = True
pattern = self.pattern.decode('latin-1')
else:
is_bytes = False
pattern = self.pattern
i = util.StringIter(pattern)
if self.win_drive_detect:
(root_specified, drive, slash, end) = _wcparse._get_win_drive(pattern)
if (drive is not None):
parts.append(_GlobPart((drive.encode('latin-1') if is_bytes else drive), False, False, True, True))
start = (end - 1)
i.advance(start)
elif ((drive is None) and root_specified):
parts.append(_GlobPart((b'\\' if is_bytes else '\\'), False, False, True, True))
if pattern.startswith('/'):
start = 0
i.advance(1)
else:
start = 1
i.advance(2)
elif ((not self.win_drive_detect) and pattern.startswith('/')):
parts.append(_GlobPart((b'/' if is_bytes else '/'), False, False, True, True))
start = 0
i.advance(1)
for c in i:
if (self.extend and (c in _wcparse.EXT_TYPES) and self.parse_extend(c, i)):
continue
if (c == '\\'):
index = i.index
value = ''
try:
value = self._references(i)
if ((self.bslash_abort and (value == '\\')) or (value == '/')):
split_index.append(((i.index - 2), 1))
except StopIteration:
i.rewind((i.index - index))
elif (c == '/'):
split_index.append(((i.index - 1), 0))
elif (c == '['):
index = i.index
try:
self._sequence(i)
except StopIteration:
i.rewind((i.index - index))
for (split, offset) in split_index:
value = pattern[(start + 1):split]
self.store(cast(AnyStr, (value.encode('latin-1') if is_bytes else value)), parts, True)
start = (split + offset)
if (start < len(pattern)):
value = pattern[(start + 1):]
if value:
self.store(cast(AnyStr, (value.encode('latin-1') if is_bytes else value)), parts, False)
if (len(pattern) == 0):
parts.append(_GlobPart((pattern.encode('latin-1') if is_bytes else pattern), False, False, False, False))
if ((self.extmatchbase and (not parts[0].is_drive)) or (self.matchbase and (len(parts) == 1) and (not parts[0].dir_only))):
self.globstar = True
parts.insert(0, _GlobPart((b'**' if is_bytes else '**'), True, True, True, False))
if (self.no_abs and parts and parts[0].is_drive):
raise ValueError('The pattern must be a relative path pattern')
return parts |
class Metrics(Base):
__tablename__ = 'metrics'
metric = Column(String, primary_key=True)
label = Column(String, primary_key=True)
value = Column(Float)
def update(cls, session, metric, label, value, initial_value=None):
rows_affected = session.query(cls).filter(and_((cls.metric == metric), (cls.label == label))).update({'value': value})
if (not rows_affected):
if (initial_value is None):
initial_value = value
session.add(cls(metric=metric, label=label, value=initial_value)) |
def extractEnsigsWritings(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if ('Sword-shisho' in item['tags']):
return buildReleaseMessageWithType(item, 'I was a Sword when I Reincarnated!', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if ('Gentle Demon' in item['tags']):
return buildReleaseMessageWithType(item, 'Demon Noble Girl ~Tale of a Gentle Demon~', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if ('Undead(?) Life' in item['tags']):
return buildReleaseMessageWithType(item, 'Life(?) as an Undead', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False |
class ListOfAllSavedSubjects():
class Request():
pass
def __init__(self, domain: str, port: str, api_key: str):
self.add_example_of_subject = AddExampleOfSubjectClient(api_key=api_key, domain=domain, port=port)
def execute(self) -> dict:
result: dict = self.add_example_of_subject.get()
return result |
class DDR3SPDData():
memtype = 'DDR3'
_speedgrades = [800, 1066, 1333, 1600, 1866, 2133]
def __init__(self, spd_data):
self.get_geometry(spd_data)
self.init_timebase(spd_data)
self.get_timings(spd_data)
def get_geometry(self, data):
bankbits = {0: 3, 1: 4, 2: 5, 3: 6}[_read_field(data[4], nbits=3, shift=4)]
rowbits = {0: 12, 1: 13, 2: 14, 3: 15, 4: 16}[_read_field(data[5], nbits=3, shift=3)]
colbits = {0: 9, 1: 10, 2: 11, 3: 12}[_read_field(data[5], nbits=3, shift=0)]
self.nbanks = (2 ** bankbits)
self.nrows = (2 ** rowbits)
self.ncols = (2 ** colbits)
def get_timings(self, spd_data):
b = spd_data
tck_min = self.txx_ns(mtb=b[12], ftb=b[34])
taa_min = self.txx_ns(mtb=b[16], ftb=b[35])
twr_min = self.txx_ns(mtb=b[17])
trcd_min = self.txx_ns(mtb=b[18], ftb=b[36])
trrd_min = self.txx_ns(mtb=b[19])
trp_min = self.txx_ns(mtb=b[20], ftb=b[37])
tras_min = self.txx_ns(mtb=_word(_lsn(b[21]), b[22]))
trc_min = self.txx_ns(mtb=_word(_msn(b[21]), b[23]), ftb=b[38])
trfc_min = self.txx_ns(mtb=_word(b[25], b[24]))
twtr_min = self.txx_ns(mtb=b[26])
trtp_min = self.txx_ns(mtb=b[27])
tfaw_min = self.txx_ns(mtb=_word(_lsn(b[28]), b[29]))
technology_timings = _TechnologyTimings(tREFI=(.0 / 8192), tWTR=(4, twtr_min), tCCD=(4, None), tRRD=(4, trrd_min), tZQCS=(64, 80))
speedgrade_timings = _SpeedgradeTimings(tRP=trp_min, tRCD=trcd_min, tWR=twr_min, tRFC=(None, trfc_min), tFAW=(None, tfaw_min), tRAS=tras_min)
self.speedgrade = str(self.speedgrade_freq(tck_min))
self.technology_timings = technology_timings
self.speedgrade_timings = {self.speedgrade: speedgrade_timings, 'default': speedgrade_timings}
def init_timebase(self, data):
fine_timebase_dividend = _read_field(data[9], nbits=4, shift=4)
fine_timebase_divisor = _read_field(data[9], nbits=4, shift=0)
fine_timebase_ps = (fine_timebase_dividend / fine_timebase_divisor)
self.fine_timebase_ns = (fine_timebase_ps * 0.001)
medium_timebase_dividend = data[10]
medium_timebase_divisor = data[11]
self.medium_timebase_ns = (medium_timebase_dividend / medium_timebase_divisor)
def txx_ns(self, mtb, ftb=0):
ftb = _twos_complement(ftb, 8)
return ((mtb * self.medium_timebase_ns) + (ftb * self.fine_timebase_ns))
def speedgrade_freq(cls, tck_ns):
freq_mhz = ((1 / (tck_ns * 1e-09)) / 1000000.0)
freq_mhz *= 2
for f in cls._speedgrades:
max_error = 2
if (abs((freq_mhz - f)) < max_error):
return f
raise ValueError('Transfer rate = {:.2f} does not correspond to any speedgrade'.format(freq_mhz)) |
class OptionSeriesColumnrangeSonificationContexttracksMappingLowpass(Options):
def frequency(self) -> 'OptionSeriesColumnrangeSonificationContexttracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionSeriesColumnrangeSonificationContexttracksMappingLowpassFrequency)
def resonance(self) -> 'OptionSeriesColumnrangeSonificationContexttracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionSeriesColumnrangeSonificationContexttracksMappingLowpassResonance) |
class LPDDR4SimulationPads(SimulationPads):
def layout(self, databits=16):
return [SimPad('clk', 1), SimPad('cke', 1), SimPad('odt', 1), SimPad('reset_n', 1), SimPad('cs', 1), SimPad('ca', 6), SimPad('dq', databits, io=True), SimPad('dqs', (databits // 8), io=True), SimPad('dmi', (databits // 8), io=True)] |
class StripeAuthorizationSchema(StripeAuthorizationSchemaPublic):
class Meta():
type_ = 'stripe-authorization'
self_view = 'v1.stripe_authorization_detail'
self_view_kwargs = {'id': '<id>'}
inflect = dasherize
stripe_auth_code = fields.Str(load_only=True, required=True) |
def _configure_and_start_service(service_branch, fledge_url, remove_directories):
try:
subprocess.run(['$FLEDGE_ROOT/tests/system/python/scripts/install_c_service {} {}'.format(service_branch, SERVICE)], shell=True, check=True, stdout=subprocess.DEVNULL)
except subprocess.CalledProcessError:
assert False, '{} installation failed'.format(SERVICE)
finally:
remove_directories('/tmp/fledge-service-{}'.format(SERVICE))
conn =
data = {'name': SERVICE_NAME, 'type': 'notification', 'enabled': 'true'}
conn.request('POST', '/fledge/service', json.dumps(data))
r = conn.getresponse()
assert (200 == r.status)
r = r.read().decode()
jdoc = json.loads(r)
assert (2 == len(jdoc))
assert (SERVICE_NAME == jdoc['name']) |
class TestDateTime(TestData):
times = ['2019-11-26T19:58:15.246+0000', '1970-01-01T00:00:03.000+0000']
time_index_name = 'test_time_formats'
def setup_class(cls):
es = ES_TEST_CLIENT
if es.indices.exists(index=cls.time_index_name):
es.indices.delete(index=cls.time_index_name)
dts = [datetime.strptime(time, '%Y-%m-%dT%H:%M:%S.%f%z') for time in cls.times]
time_formats_docs = [TestDateTime.get_time_values_from_datetime(dt) for dt in dts]
mappings = {'properties': {}}
for (field_name, field_value) in time_formats_docs[0].items():
mappings['properties'][field_name] = {}
mappings['properties'][field_name]['type'] = 'date'
mappings['properties'][field_name]['format'] = field_name
index = 'test_time_formats'
es.options(ignore_status=[400, 404]).indices.delete(index=index)
es.indices.create(index=index, mappings=mappings)
for (i, time_formats) in enumerate(time_formats_docs):
es.index(index=index, id=i, document=time_formats)
es.indices.refresh(index=index)
def teardown_class(cls):
es = ES_TEST_CLIENT
es.indices.delete(index=cls.time_index_name)
def test_all_formats(self):
ed_field_mappings = FieldMappings(client=ES_TEST_CLIENT, index_pattern=self.time_index_name)
ed_field_mappings.rename({'strict_year_month': 'renamed_strict_year_month'})
for format_name in self.time_formats.keys():
es_date_format = ed_field_mappings.date_field_format(format_name)
assert (format_name == es_date_format)
def get_time_values_from_datetime(dt: datetime) -> dict:
time_formats = {'epoch_millis': int((dt.timestamp() * 1000)), 'epoch_second': int(dt.timestamp()), 'strict_date_optional_time': (dt.strftime('%Y-%m-%dT%H:%M:%S.%f')[:(- 3)] + dt.strftime('%z')), 'basic_date': dt.strftime('%Y%m%d'), 'basic_date_time': (dt.strftime('%Y%m%dT%H%M%S.%f')[:(- 3)] + dt.strftime('%z')), 'basic_date_time_no_millis': dt.strftime('%Y%m%dT%H%M%S%z'), 'basic_ordinal_date': dt.strftime('%Y%j'), 'basic_ordinal_date_time': (dt.strftime('%Y%jT%H%M%S.%f')[:(- 3)] + dt.strftime('%z')), 'basic_ordinal_date_time_no_millis': dt.strftime('%Y%jT%H%M%S%z'), 'basic_time': (dt.strftime('%H%M%S.%f')[:(- 3)] + dt.strftime('%z')), 'basic_time_no_millis': dt.strftime('%H%M%S%z'), 'basic_t_time': (dt.strftime('T%H%M%S.%f')[:(- 3)] + dt.strftime('%z')), 'basic_t_time_no_millis': dt.strftime('T%H%M%S%z'), 'basic_week_date': dt.strftime('%GW%V%u'), 'basic_week_date_time': (dt.strftime('%GW%V%uT%H%M%S.%f')[:(- 3)] + dt.strftime('%z')), 'basic_week_date_time_no_millis': dt.strftime('%GW%V%uT%H%M%S%z'), 'strict_date': dt.strftime('%Y-%m-%d'), 'date': dt.strftime('%Y-%m-%d'), 'strict_date_hour': dt.strftime('%Y-%m-%dT%H'), 'date_hour': dt.strftime('%Y-%m-%dT%H'), 'strict_date_hour_minute': dt.strftime('%Y-%m-%dT%H:%M'), 'date_hour_minute': dt.strftime('%Y-%m-%dT%H:%M'), 'strict_date_hour_minute_second': dt.strftime('%Y-%m-%dT%H:%M:%S'), 'date_hour_minute_second': dt.strftime('%Y-%m-%dT%H:%M:%S'), 'strict_date_hour_minute_second_fraction': dt.strftime('%Y-%m-%dT%H:%M:%S.%f')[:(- 3)], 'date_hour_minute_second_fraction': dt.strftime('%Y-%m-%dT%H:%M:%S.%f')[:(- 3)], 'strict_date_hour_minute_second_millis': dt.strftime('%Y-%m-%dT%H:%M:%S.%f')[:(- 3)], 'date_hour_minute_second_millis': dt.strftime('%Y-%m-%dT%H:%M:%S.%f')[:(- 3)], 'strict_date_time': (dt.strftime('%Y-%m-%dT%H:%M:%S.%f')[:(- 3)] + dt.strftime('%z')), 'date_time': (dt.strftime('%Y-%m-%dT%H:%M:%S.%f')[:(- 3)] + dt.strftime('%z')), 'strict_date_time_no_millis': dt.strftime('%Y-%m-%dT%H:%M:%S%z'), 'date_time_no_millis': dt.strftime('%Y-%m-%dT%H:%M:%S%z'), 'strict_hour': dt.strftime('%H'), 'hour': dt.strftime('%H'), 'strict_hour_minute': dt.strftime('%H:%M'), 'hour_minute': dt.strftime('%H:%M'), 'strict_hour_minute_second': dt.strftime('%H:%M:%S'), 'hour_minute_second': dt.strftime('%H:%M:%S'), 'strict_hour_minute_second_fraction': dt.strftime('%H:%M:%S.%f')[:(- 3)], 'hour_minute_second_fraction': dt.strftime('%H:%M:%S.%f')[:(- 3)], 'strict_hour_minute_second_millis': dt.strftime('%H:%M:%S.%f')[:(- 3)], 'hour_minute_second_millis': dt.strftime('%H:%M:%S.%f')[:(- 3)], 'strict_ordinal_date': dt.strftime('%Y-%j'), 'ordinal_date': dt.strftime('%Y-%j'), 'strict_ordinal_date_time': (dt.strftime('%Y-%jT%H:%M:%S.%f')[:(- 3)] + dt.strftime('%z')), 'ordinal_date_time': (dt.strftime('%Y-%jT%H:%M:%S.%f')[:(- 3)] + dt.strftime('%z')), 'strict_ordinal_date_time_no_millis': dt.strftime('%Y-%jT%H:%M:%S%z'), 'ordinal_date_time_no_millis': dt.strftime('%Y-%jT%H:%M:%S%z'), 'strict_time': (dt.strftime('%H:%M:%S.%f')[:(- 3)] + dt.strftime('%z')), 'time': (dt.strftime('%H:%M:%S.%f')[:(- 3)] + dt.strftime('%z')), 'strict_time_no_millis': dt.strftime('%H:%M:%S%z'), 'time_no_millis': dt.strftime('%H:%M:%S%z'), 'strict_t_time': (dt.strftime('T%H:%M:%S.%f')[:(- 3)] + dt.strftime('%z')), 't_time': (dt.strftime('T%H:%M:%S.%f')[:(- 3)] + dt.strftime('%z')), 'strict_t_time_no_millis': dt.strftime('T%H:%M:%S%z'), 't_time_no_millis': dt.strftime('T%H:%M:%S%z'), 'strict_week_date': dt.strftime('%G-W%V-%u'), 'week_date': dt.strftime('%G-W%V-%u'), 'strict_week_date_time': (dt.strftime('%G-W%V-%uT%H:%M:%S.%f')[:(- 3)] + dt.strftime('%z')), 'week_date_time': (dt.strftime('%G-W%V-%uT%H:%M:%S.%f')[:(- 3)] + dt.strftime('%z')), 'strict_week_date_time_no_millis': dt.strftime('%G-W%V-%uT%H:%M:%S%z'), 'week_date_time_no_millis': dt.strftime('%G-W%V-%uT%H:%M:%S%z'), 'strict_weekyear': dt.strftime('%G'), 'weekyear': dt.strftime('%G'), 'strict_weekyear_week': dt.strftime('%G-W%V'), 'weekyear_week': dt.strftime('%G-W%V'), 'strict_weekyear_week_day': dt.strftime('%G-W%V-%u'), 'weekyear_week_day': dt.strftime('%G-W%V-%u'), 'strict_year': dt.strftime('%Y'), 'year': dt.strftime('%Y'), 'strict_year_month': dt.strftime('%Y-%m'), 'year_month': dt.strftime('%Y-%m'), 'strict_year_month_day': dt.strftime('%Y-%m-%d'), 'year_month_day': dt.strftime('%Y-%m-%d')}
return time_formats
time_formats = {'epoch_millis': '%Y-%m-%dT%H:%M:%S.%f', 'epoch_second': '%Y-%m-%dT%H:%M:%S', 'strict_date_optional_time': '%Y-%m-%dT%H:%M:%S.%f%z', 'basic_date': '%Y%m%d', 'basic_date_time': '%Y%m%dT%H%M%S.%f', 'basic_date_time_no_millis': '%Y%m%dT%H%M%S%z', 'basic_ordinal_date': '%Y%j', 'basic_ordinal_date_time': '%Y%jT%H%M%S.%f%z', 'basic_ordinal_date_time_no_millis': '%Y%jT%H%M%S%z', 'basic_time': '%H%M%S.%f%z', 'basic_time_no_millis': '%H%M%S%z', 'basic_t_time': 'T%H%M%S.%f%z', 'basic_t_time_no_millis': 'T%H%M%S%z', 'basic_week_date': '%GW%V%u', 'basic_week_date_time': '%GW%V%uT%H%M%S.%f%z', 'basic_week_date_time_no_millis': '%GW%V%uT%H%M%S%z', 'date': '%Y-%m-%d', 'strict_date': '%Y-%m-%d', 'strict_date_hour': '%Y-%m-%dT%H', 'date_hour': '%Y-%m-%dT%H', 'strict_date_hour_minute': '%Y-%m-%dT%H:%M', 'date_hour_minute': '%Y-%m-%dT%H:%M', 'strict_date_hour_minute_second': '%Y-%m-%dT%H:%M:%S', 'date_hour_minute_second': '%Y-%m-%dT%H:%M:%S', 'strict_date_hour_minute_second_fraction': '%Y-%m-%dT%H:%M:%S.%f', 'date_hour_minute_second_fraction': '%Y-%m-%dT%H:%M:%S.%f', 'strict_date_hour_minute_second_millis': '%Y-%m-%dT%H:%M:%S.%f', 'date_hour_minute_second_millis': '%Y-%m-%dT%H:%M:%S.%f', 'strict_date_time': '%Y-%m-%dT%H:%M:%S.%f%z', 'date_time': '%Y-%m-%dT%H:%M:%S.%f%z', 'strict_date_time_no_millis': '%Y-%m-%dT%H:%M:%S%z', 'date_time_no_millis': '%Y-%m-%dT%H:%M:%S%z', 'strict_hour': '%H', 'hour': '%H', 'strict_hour_minute': '%H:%M', 'hour_minute': '%H:%M', 'strict_hour_minute_second': '%H:%M:%S', 'hour_minute_second': '%H:%M:%S', 'strict_hour_minute_second_fraction': '%H:%M:%S.%f', 'hour_minute_second_fraction': '%H:%M:%S.%f', 'strict_hour_minute_second_millis': '%H:%M:%S.%f', 'hour_minute_second_millis': '%H:%M:%S.%f', 'strict_ordinal_date': '%Y-%j', 'ordinal_date': '%Y-%j', 'strict_ordinal_date_time': '%Y-%jT%H:%M:%S.%f%z', 'ordinal_date_time': '%Y-%jT%H:%M:%S.%f%z', 'strict_ordinal_date_time_no_millis': '%Y-%jT%H:%M:%S%z', 'ordinal_date_time_no_millis': '%Y-%jT%H:%M:%S%z', 'strict_time': '%H:%M:%S.%f%z', 'time': '%H:%M:%S.%f%z', 'strict_time_no_millis': '%H:%M:%S%z', 'time_no_millis': '%H:%M:%S%z', 'strict_t_time': 'T%H:%M:%S.%f%z', 't_time': 'T%H:%M:%S.%f%z', 'strict_t_time_no_millis': 'T%H:%M:%S%z', 't_time_no_millis': 'T%H:%M:%S%z', 'strict_week_date': '%G-W%V-%u', 'week_date': '%G-W%V-%u', 'strict_week_date_time': '%G-W%V-%uT%H:%M:%S.%f%z', 'week_date_time': '%G-W%V-%uT%H:%M:%S.%f%z', 'strict_week_date_time_no_millis': '%G-W%V-%uT%H:%M:%S%z', 'week_date_time_no_millis': '%G-W%V-%uT%H:%M:%S%z', 'strict_weekyear_week_day': '%G-W%V-%u', 'weekyear_week_day': '%G-W%V-%u', 'strict_year': '%Y', 'year': '%Y', 'strict_year_month': '%Y-%m', 'year_month': '%Y-%m', 'strict_year_month_day': '%Y-%m-%d', 'year_month_day': '%Y-%m-%d'} |
def topics(request):
if request.user.is_authenticated:
topics = Topic.objects.filter(owner=request.user).order_by('date_added')
public_topics = Topic.objects.filter(public=True).exclude(owner=request.user).order_by('date_added')
else:
topics = None
public_topics = Topic.objects.filter(public=True).order_by('date_added')
context = {'topics': topics, 'public_topics': public_topics}
return render(request, 'learning_logs/topics.html', context) |
def eye(n: int, m: Optional[int]=None, k: int=0) -> Matrix:
if (m is None):
m = n
dlen = (m if ((n > m) and (k < 0)) else (m - abs(k)))
a = []
for i in range(n):
pos = (i + k)
idx = (i if (k >= 0) else pos)
d = int((0 <= idx < dlen))
a.append(((([0.0] * clamp(pos, 0, m)) + ([1.0] * d)) + ([0.0] * clamp(((m - pos) - d), 0, m))))
return a |
class OptionsEditable(DataClass):
def add(self):
return self._attrs['add']
def add(self, val):
self._attrs['add'] = val
def remove(self):
return self._attrs['remove']
def remove(self, val):
self._attrs['remove'] = val
def updateGroup(self):
return self._attrs['updateGroup']
def updateGroup(self, val):
self._attrs['updateGroup'] = val
def updateTime(self):
return self._attrs['updateTime']
def updateTime(self, val):
self._attrs['updateTime'] = val
def overrideItems(self):
return self._attrs['overrideItems']
def overrideItems(self, val):
self._attrs['overrideItems'] = val |
.parametrize('input, expected', [['a b c d', 'A b c...'], ['a b c d', 'A b c...'], ['a b', 'A b'], ['a b c', 'A b c'], ['1 2 3', '1 2 3'], ['A b c', 'A b c'], [''.join(('a' for _ in range(35))), (('A' + ''.join(('a' for _ in range(31)))) + '...')], [''.join(('a' for _ in range(27))), ('A' + ''.join(('a' for _ in range(26))))]])
def test_get_subject(input, expected):
assert (get_subject(input) == expected) |
def train(args, model, device, train_loader, optimizer, epoch):
model.train()
train_loss = 0
for (batch_idx, (data, target)) in enumerate(train_loader):
(data, target) = (data.to(device), target.to(device))
optimizer.zero_grad()
output = model(data)
loss = F.nll_loss(output, target)
loss.backward()
optimizer.step()
if ((batch_idx % args.log_interval) == 0):
logger.info('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(epoch, (batch_idx * len(data)), len(train_loader.dataset), ((100.0 * batch_idx) / len(train_loader)), loss.item()))
if args.dry_run:
break
train_loss += (loss.item() * len(data))
return (train_loss / len(train_loader.dataset)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.