code stringlengths 281 23.7M |
|---|
def test_mem_from_cgroup_files_dont_exist(elasticapm_client, tmpdir):
proc_stat_self = os.path.join(tmpdir.strpath, 'self-stat')
proc_stat = os.path.join(tmpdir.strpath, 'stat')
proc_meminfo = os.path.join(tmpdir.strpath, 'meminfo')
cgroup_memory_limit = os.path.join(tmpdir.strpath, 'memory', 'memory.limit_in_bytes')
cgroup_memory_stat = os.path.join(tmpdir.strpath, 'memory', 'memory.stat')
proc_self_cgroup = os.path.join(tmpdir.strpath, 'cgroup')
os.mkdir(os.path.join(tmpdir.strpath, 'memory'))
proc_self_mount = os.path.join(tmpdir.strpath, 'mountinfo')
for (path, content) in ((proc_stat, TEMPLATE_PROC_STAT_DEBIAN.format(user=0, idle=0)), (proc_stat_self, TEMPLATE_PROC_STAT_SELF.format(utime=0, stime=0)), (proc_meminfo, TEMPLATE_PROC_MEMINFO), (cgroup_memory_limit, TEMPLATE_CGROUP_MEM_LIMIT_IN_BYTES_LIMITED), (cgroup_memory_stat, TEMPLATE_CGROUP_MEM_STAT), (proc_self_cgroup, '9:memory:/slice'), (proc_self_mount, (('39 30 0:35 / ' + tmpdir.strpath) + '/memory rw,nosuid,nodev,noexec,relatime shared:10 - cgroup cgroup rw,seclabel,memory\n'))):
with open(path, mode='w') as f:
f.write(content)
metricset = CPUMetricSet(MetricsRegistry(elasticapm_client), sys_stats_file=proc_stat, process_stats_file=proc_stat_self, memory_stats_file=proc_meminfo, proc_self_cgroup=proc_self_cgroup, mount_info=proc_self_mount)
assert (metricset.cgroup_files.limit is not None)
assert (metricset.cgroup_files.usage is None)
data = next(metricset.collect())
assert ('system.process.cgroup.memory.mem.limit.bytes' in data['samples'])
assert ('system.process.cgroup.memory.mem.usage.bytes' not in data['samples']) |
def genesis_state(address_with_balance, balance, address_with_bytecode, bytecode, address_with_storage):
return {address_with_balance: {'balance': balance, 'code': b'', 'nonce': 0, 'storage': {}}, address_with_bytecode: {'balance': 0, 'code': bytecode, 'nonce': 0, 'storage': {}}, address_with_storage: {'balance': 0, 'code': b'', 'nonce': 0, 'storage': {i: i for i in range(100)}}} |
def test_get_device_name_dict(backend_db, frontend_db):
insert_test_fw(backend_db, 'fw1', vendor='vendor1', device_class='class1', device_name='name1')
insert_test_fw(backend_db, 'fw2', vendor='vendor1', device_class='class1', device_name='name2')
insert_test_fw(backend_db, 'fw3', vendor='vendor1', device_class='class2', device_name='name1')
insert_test_fw(backend_db, 'fw4', vendor='vendor2', device_class='class1', device_name='name1')
device_name_dict = frontend_db.get_device_name_dict()
device_name_dict.get('class1', {}).get('vendor1', []).sort()
assert (device_name_dict == {'class1': {'vendor1': ['name1', 'name2'], 'vendor2': ['name1']}, 'class2': {'vendor1': ['name1']}}) |
class TestNewlinePreservation(unittest.TestCase):
def test_newline(self):
with tempfile.TemporaryDirectory() as d:
fp = os.path.join(d, 'trailingnewline.json')
with open(fp, 'w') as f:
f.write(MANIFEST_WITH_NEWLINE)
with open(fp, 'r') as f:
self.assertTrue(_check_newline(f))
manifest = read_manifest(fp)
dump_manifest(manifest, fp)
with open(fp, 'r') as f:
self.assertTrue(_check_newline(f))
def test_no_newline(self):
with tempfile.TemporaryDirectory() as d:
fp = os.path.join(d, 'notrailingnewline.json')
with open(fp, 'w') as f:
f.write(MANIFEST_NO_NEWLINE)
with open(fp, 'r') as f:
self.assertFalse(_check_newline(f))
manifest = read_manifest(fp)
dump_manifest(manifest, fp)
with open(fp, 'r') as f:
self.assertFalse(_check_newline(f)) |
def parse_hmmsearch_file(pfam_file):
pfams = {}
with open(pfam_file, 'r') as pfamf:
for line in pfamf:
if line.startswith('#'):
continue
(pfam, query, evalue, score, qlen, hmmfrom, hmmto, seqfrom, seqto, qcov) = map(str.strip, line.split('\t'))
if (query in pfams):
pfams[query].add(pfam)
else:
pfams[query] = {pfam}
return pfams |
def invent_brands_from_generic_bnf_code(generic_code, num_brands=5):
assert (0 <= num_brands <= 9)
chemical = generic_code[0:9]
strength_and_form = generic_code[13:15]
products = ['B{}'.format(j) for j in range(num_brands)]
return [(((chemical + product) + strength_and_form) + strength_and_form) for product in products] |
def get_episodes(html, url):
if is_search_page(url):
return get_episodes_from_search(html, url)
if is_search_ajax(url):
return get_episodes_from_search_ajax(html, url)
try:
return get_episodes_from_ajax_result(html, url)
except DataNotFound:
pass
try:
return get_episodes_from_init_data(html, url)
except DataNotFound:
pass
check_login_html(html)
s = []
match = re.search('id="js-mount-point-search-result-list"data-items="([^"]+)', html)
if match:
data = unescape(match.group(1))
for illust in json.loads(data):
s.append(Episode('{illustId} - {illustTitle}'.format_map(illust), urljoin(url, '/member_illust.php?mode=medium&illust_id={illustId}'.format_map(illust))))
if ('member_illust.php?mode=medium&illust_id' in url):
s.append(Episode('image', url))
return s[::(- 1)] |
class PortStats(base_tests.SimpleProtocol):
def runTest(self):
request = ofp.message.port_stats_request(port_no=ofp.OFPP_ANY)
logging.info('Sending port stats request')
stats = get_stats(self, request)
logging.info('Received %d port stats entries', len(stats))
for entry in stats:
logging.info(entry.show()) |
def pytest_configure():
from django.conf import settings
settings.configure(DEBUG_PROPAGATE_EXCEPTIONS=True, DATABASES={'default': {'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:'}}, SITE_ID=1, SECRET_KEY='not very secret in tests', USE_I18N=True, USE_L10N=True, STATIC_URL='/static/', ROOT_URLCONF='tests.urls', TEMPLATE_LOADERS=('django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader'), MIDDLEWARE_CLASSES=('django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware'), INSTALLED_APPS=('django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'rest_framework.authtoken', 'rest_framework_serializer_extensions', 'tests'), PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher', 'django.contrib.auth.hashers.PBKDF2PasswordHasher', 'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher', 'django.contrib.auth.hashers.BCryptPasswordHasher', 'django.contrib.auth.hashers.MD5PasswordHasher', 'django.contrib.auth.hashers.CryptPasswordHasher'))
try:
import oauth2
import oauth_provider
except ImportError:
pass
else:
settings.INSTALLED_APPS += ('oauth_provider',)
try:
import provider
except ImportError:
pass
else:
settings.INSTALLED_APPS += ('provider', 'provider.oauth2')
try:
import guardian
except ImportError:
pass
else:
settings.ANONYMOUS_USER_ID = (- 1)
settings.AUTHENTICATION_BACKENDS = ('django.contrib.auth.backends.ModelBackend', 'guardian.backends.ObjectPermissionBackend')
settings.INSTALLED_APPS += ('guardian',)
try:
import django
django.setup()
except AttributeError:
pass |
def LLM(prompt, mode='text', gpt4=False):
global memory
if (len(prompt) > MAX_PROMPT):
raise ValueError(f'prompt ({len(prompt)}) too large (max {MAX_PROMPT})')
if (mode == 'text'):
messages = [{'role': 'system', 'content': LLM_SYSTEM_CALIBRATION_MESSAGE}, {'role': 'user', 'content': prompt}]
elif (mode == 'code'):
messages = memory
elif (mode == 'debug'):
messages = [{'role': 'system', 'content': DEBUG_SYSTEM_CALIBRATION_MESSAGE}, {'role': 'user', 'content': prompt}]
elif (mode == 'install'):
messages = [{'role': 'system', 'content': INSTALL_SYSTEM_CALIBRATION_MESSAGE}, {'role': 'user', 'content': prompt}]
response = openai_client.chat.completions.create(model=('gpt-4-1106-preview' if gpt4 else 'gpt-3.5-turbo-1106'), messages=messages)
response_content = response.choices[0].message.content
if ((mode == 'code') or (mode == 'debug')):
response_content = clean_code_string(response_content)
elif (mode == 'install'):
response_content = clean_install_string(response_content)
return response_content |
def test_inline_df():
df = pd.DataFrame({'x': [1, 2, 3], 'y': [4, 2, 2]})
assert (len((sql ^ 'SELECT * FROM df')) == 3)
assert (len((sql ^ 'SELECT * FROM df a, df b WHERE a.x = b.y')) == 2)
assert ((sql_val ^ '\n SELECT COUNT() FROM (\n SELECT * FROM df a\n LEFT JOIN df b ON a.x = b.y\n LEFT JOIN df c ON b.x = c.y\n )\n ') == 5) |
def fuzzgadgets(ch, method, properties, body):
(vid, pid, dclass, serial, man, prod, min, max) = body.decode('utf-8').split('!!')
chmodgadget = 'chmod a+x tempgadget.sh'
creategadget = './tempgadget.sh'
removegadget = './removegadget.sh'
cprint('Creating new gadget', color='blue')
with open('tempgadget.sh', 'w') as gdt:
basedir = '/sys/kernel/config/usb_gadget'
gdt.write('#!/bin/bash\n')
gdt.write('modprobe libcomposite\n')
gdt.write('cd /sys/kernel/config/usb_gadget/\n')
gdt.write('mkdir g && cd g\n')
gdt.write('mkdir -p /sys/kernel/config/usb_gadget/g/strings/0x409/\n')
gdt.write('mkdir -p /sys/kernel/config/usb_gadget/g/functions/hid.usb0/\n')
gdt.write('mkdir -p /sys/kernel/config/usb_gadget/g/configs/c.1/strings/0x409/\n')
gdt.write(('echo %s > %s/g/idVendor\n' % ((hex(int(vid)) if (vid != 'None') else f'0x{binascii.hexlify(os.urandom(2)).decode()}'), basedir)))
gdt.write(('echo %s > %s/g/idProduct\n' % ((hex(int(pid)) if (pid != 'None') else f'0x{binascii.hexlify(os.urandom(2)).decode()}'), basedir)))
gdt.write(('echo %s > %s/g/bcdDevice\n' % ('0x200', basedir)))
gdt.write(('echo %s > %s/g/bcdUSB\n' % ('0x0058', basedir)))
gdt.write(('echo %s > %s/g/bDeviceClass\n' % ((hex(int(dclass)) if (dclass != 'None') else f'0x{binascii.hexlify(os.urandom(1)).decode()}'), basedir)))
gdt.write(('echo %s > %s/g/bDeviceSubClass\n' % (f'0x{binascii.hexlify(os.urandom(1)).decode()}', basedir)))
gdt.write(('echo %s > %s/g/bDeviceProtocol\n' % (f'0x{binascii.hexlify(os.urandom(1)).decode()}', basedir)))
gdt.write(('echo %s > %s/g/bMaxPacketSize0\n' % (f'0x{binascii.hexlify(os.urandom(1)).decode()}', basedir)))
gdt.write(('echo %s > %s/g/strings/0x409/serialnumber\n' % ((serial if serial else f'{binascii.hexlify(os.urandom(50))}%c%c%c%s%s%s%d%p'), basedir)))
gdt.write(("echo '%s' > %s/g/strings/0x409/manufacturer\n" % ((man if (man != 'None') else f'0x{binascii.hexlify(os.urandom(1)).decode()}'), basedir)))
gdt.write(("echo '%s' > %s/g/strings/0x409/product\n" % ((prod if (prod != 'None') else f'0x{binascii.hexlify(os.urandom(50)).decode()}'), basedir)))
gdt.write(('echo %s > %s/g/configs/c.1/MaxPower\n' % (f'0x{binascii.hexlify(os.urandom(1)).decode()}', basedir)))
gdt.write(('echo %s > %s/g/configs/c.1/bmAttributes\n' % ('0x80', basedir)))
gdt.write(("echo 'Default Configuration' > %s/g/configs/c.1/strings/0x409/configuration\n" % basedir))
gdt.write(('echo %s > %s/g/functions/hid.usb0/protocol\n' % (f'0x{binascii.hexlify(os.urandom(1)).decode()}', basedir)))
gdt.write(('echo 256 > %s/g/functions/hid.usb0/report_length\n' % basedir))
gdt.write(('echo %s > %s/g/functions/hid.usb0/subclass\n' % (f'0x{binascii.hexlify(os.urandom(1)).decode()}', basedir)))
s = f'a1{binascii.hexlify(os.urandom(randint(int(min), int(max)))).decode()}c0'
gdt.write(("echo '%s' | xxd -r -ps > %s/g/functions/hid.usb0/report_desc\n" % (s, basedir)))
gdt.write(('ln -s %s/g/functions/hid.usb0 %s/g/configs/c.1\n' % (basedir, basedir)))
gdt.write('udevadm settle -t 5 || :\n')
gdt.write(('ls /sys/class/udc/ > %s/g/UDC\n' % basedir))
gdt.close()
cprint('Running the gadget', color='blue')
os.system(chmodgadget)
os.system(creategadget)
sleep(4)
cprint('removing the gadget', color='green')
os.system(removegadget) |
def _validate_array(datum, schema, named_schemas, parent_ns, raise_errors, options):
return (isinstance(datum, (Sequence, array.array)) and (not isinstance(datum, str)) and all((_validate(datum=d, schema=schema['items'], named_schemas=named_schemas, field=parent_ns, raise_errors=raise_errors, options=options) for d in datum))) |
class queue_stats_request(stats_request):
version = 3
type = 18
stats_type = 5
def __init__(self, xid=None, flags=None, port_no=None, queue_id=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (port_no != None):
self.port_no = port_no
else:
self.port_no = 0
if (queue_id != None):
self.queue_id = queue_id
else:
self.queue_id = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(util.pack_port_no(self.port_no))
packed.append(struct.pack('!L', self.queue_id))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = queue_stats_request()
_version = reader.read('!B')[0]
assert (_version == 3)
_type = reader.read('!B')[0]
assert (_type == 18)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 5)
obj.flags = reader.read('!H')[0]
reader.skip(4)
obj.port_no = util.unpack_port_no(reader)
obj.queue_id = reader.read('!L')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.port_no != other.port_no):
return False
if (self.queue_id != other.queue_id):
return False
return True
def pretty_print(self, q):
q.text('queue_stats_request {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('port_no = ')
q.text(util.pretty_port(self.port_no))
q.text(',')
q.breakable()
q.text('queue_id = ')
q.text(('%#x' % self.queue_id))
q.breakable()
q.text('}') |
class OptionPlotoptionsAreaSonificationTracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionPlotoptionsErrorbarOnpointPosition(Options):
def offsetX(self):
return self._config_get(None)
def offsetX(self, num: float):
self._config(num, js_type=False)
def offsetY(self):
return self._config_get(None)
def offsetY(self, num: float):
self._config(num, js_type=False)
def x(self):
return self._config_get(None)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(None)
def y(self, num: float):
self._config(num, js_type=False) |
class Media():
def __init__(self, ui):
self.page = ui.page
def video(self, value: str='', align: str='center', path: str=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, profile: types.PROFILE_TYPE=None, options: dict=None):
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
dft_options = {'autoplay': True}
if (options is not None):
dft_options.update(options)
html_media = html.HtmlMedia.Media(self.page, value, path, width, height, html_code, profile, dft_options)
if (align == 'center'):
html_media.style.css.margin = 'auto'
html_media.style.css.display = 'block'
html.Html.set_component_skin(html_media)
return html_media
def audio(self, value: str='', path: str=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, profile: types.PROFILE_TYPE=None, options: dict=None):
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
dft_options = {'autoplay': True}
if (options is not None):
dft_options.update(options)
html_audio = html.HtmlMedia.Audio(self.page, value, path, width, height, html_code, profile, dft_options)
html.Html.set_component_skin(html_audio)
return html_audio
def youtube(self, link: str, align: str='center', width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, profile: types.PROFILE_TYPE=None, options: dict=None):
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
dflt_options = {'width': '420', 'height': '315', 'type': 'text/html'}
if ('/embed/' not in link):
link = html.HtmlMedia.Youtube.get_embed_link(link)
if (options is not None):
dflt_options.update(options)
html_youtube = html.HtmlMedia.Youtube(self.page, link, width, height, html_code, profile, dflt_options)
html_youtube.style.css.text_align = align
html.Html.set_component_skin(html_youtube)
return html_youtube
def camera(self, align: str='center', width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, profile: types.PROFILE_TYPE=None, options: dict=None):
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
dft_options = {'autoplay': True}
if (options is not None):
dft_options.update(options)
html_media = html.HtmlMedia.Camera(self.page, width, height, html_code, profile, dft_options)
if (align == 'center'):
html_media.style.css.margin = 'auto'
html_media.style.css.display = 'block'
html.Html.set_component_skin(html_media)
return html_media |
def display_library_view(params):
node_id = params.get('view_id')
view_info_url = ('{server}/emby/Users/{userid}/Items/' + node_id)
data_manager = DataManager()
view_info = data_manager.get_content(view_info_url)
log.debug('VIEW_INFO : {0}', view_info)
collection_type = view_info.get('CollectionType', None)
if (collection_type == 'movies'):
display_movies_type(params, view_info)
elif (collection_type == 'tvshows'):
display_tvshow_type(params, view_info)
elif (collection_type == 'homevideos'):
display_homevideos_type(params, view_info)
elif (collection_type == 'music'):
display_music_type(params, view_info)
elif (collection_type == 'musicvideos'):
display_musicvideos_type(params, view_info)
elif (collection_type == 'livetv'):
display_livetv_type(params, view_info) |
class MockDeviceDB(BaseDeviceDB):
def __init__(self, service: 'MockService') -> None:
super().__init__(service)
self.mock_devices = [self.mock_dev(i) for i in range(1, 10)]
def mock_dev(self, idx: int) -> DeviceInfo:
addrs = ['fd01:db00:11:{:04x}::a'.format(idx), 'fd01:db00:11:{:04x}::b'.format(idx), '10.10.{}.11'.format(idx), '10.10.{}.12'.format(idx)]
addrs = [DeviceIP(a, a, False) for a in addrs]
vendor_id = ((idx // 5) + 1)
vendor = ('vendor%d' % vendor_id)
return DeviceInfo(self.service, ('test-dev-%d' % idx), addrs, addrs[0], self.service.vendors.get(vendor), ('role%d' % (idx % 3)), 'ch_model')
async def _fetch_device_data(self, name_filter=None, hostname=None) -> typing.List[DeviceInfo]:
self.logger.info('got devices: %s', self.mock_devices)
return self.mock_devices |
def _create_plot_component():
x = linspace((- 2.0), 10.0, 40)
pd = ArrayPlotData(index=x, y0=jn(0, x))
plot1 = Plot(pd, title='render_style = hold', padding=50, border_visible=True, overlay_border=True)
plot1.legend.visible = True
lineplot = plot1.plot(('index', 'y0'), name='j_0', color='red', render_style='hold')
attach_tools(plot1)
plot2 = Plot(pd, range2d=plot1.range2d, title='render_style = connectedhold', padding=50, border_visible=True, overlay_border=True)
plot2.plot(('index', 'y0'), color='blue', render_style='connectedhold')
attach_tools(plot2)
container = HPlotContainer()
container.add(plot1)
container.add(plot2)
return container |
class ConcatModelBuilder(BaseModelBuilder):
def __init__(self, modality_config: Dict[(str, Union[(str, Dict[(str, Any)])])], observation_modality_mapping: Dict[(str, str)], shared_embedding_keys: Optional[Union[(List[str], Dict[(str, List[str])])]]):
self._check_modality_config(modality_config)
super().__init__(modality_config, observation_modality_mapping, shared_embedding_keys)
self.obs_to_block: Dict[(str, PerceptionBlock)] = dict()
self.block_params = dict()
for (modality, config) in self.modality_config.items():
if (config != {}):
self.obs_to_block[modality] = Factory(PerceptionBlock).type_from_name(config['block_type'])
self.block_params[modality] = config['block_params']
(BaseModelBuilder)
def from_observation_space(self, observation_space: spaces.Dict) -> InferenceBlock:
sample = observation_space.sample()
perception_dict = dict()
in_keys = list()
for obs_key in observation_space.spaces.keys():
if (obs_key not in self.observation_modality_mapping):
BColors.print_colored(f"ConcatModelBuilder: The observation '{obs_key}' could not be found in the model_builder.observation_modality_mapping and wont be considered as an input to the network.", BColors.WARNING)
continue
in_keys.append(obs_key)
modality = self.observation_modality_mapping[obs_key]
block_type = self.obs_to_block[modality]
params = self.block_params[modality]
net = block_type(in_keys=obs_key, out_keys=f'{obs_key}_{block_type.__name__}', in_shapes=sample[obs_key].shape, **params)
perception_dict[f'{obs_key}_{block_type.__name__}'] = net
out_key = ConcatModelBuilderKeys.CONCAT
if ((ConcatModelBuilderKeys.HIDDEN not in self.obs_to_block) and (ConcatModelBuilderKeys.RECURRENCE not in self.obs_to_block)):
out_key = ConcatModelBuilderKeys.LATENT
latent_keys = list(perception_dict.keys())
latent_shapes = [net.out_shapes()[0] for net in perception_dict.values()]
net = ConcatenationBlock(in_keys=latent_keys, out_keys=out_key, in_shapes=latent_shapes, concat_dim=(- 1))
perception_dict[out_key] = net
if (ConcatModelBuilderKeys.HIDDEN in self.obs_to_block):
in_key = out_key
out_key = ConcatModelBuilderKeys.HIDDEN
if (ConcatModelBuilderKeys.RECURRENCE not in self.obs_to_block):
out_key = ConcatModelBuilderKeys.LATENT
block_type = self.obs_to_block[ConcatModelBuilderKeys.HIDDEN]
net = block_type(in_keys=in_key, out_keys=out_key, in_shapes=perception_dict[in_key].out_shapes(), **self.block_params[ConcatModelBuilderKeys.HIDDEN])
perception_dict[out_key] = net
if (ConcatModelBuilderKeys.RECURRENCE in self.obs_to_block):
in_key = out_key
out_key = ConcatModelBuilderKeys.LATENT
block_type = self.obs_to_block[ConcatModelBuilderKeys.RECURRENCE]
net = block_type(in_keys=in_key, out_keys=out_key, in_shapes=perception_dict[in_key].out_shapes(), **self.block_params[ConcatModelBuilderKeys.RECURRENCE])
perception_dict[out_key] = net
in_shapes = [sample[obs_key].shape for obs_key in in_keys]
net = InferenceBlock(in_keys=in_keys, out_keys=ConcatModelBuilderKeys.LATENT, in_shapes=in_shapes, perception_blocks=perception_dict)
return net
def _check_modality_config(self, modality_config: Dict):
assert (ConcatModelBuilderKeys.RECURRENCE in modality_config), f'make sure to specify a block for {ConcatModelBuilderKeys.RECURRENCE}!'
assert (ConcatModelBuilderKeys.HIDDEN in modality_config), f'make sure to specify a block for {str(ConcatModelBuilderKeys.HIDDEN)}!'
for (key, value) in modality_config.items():
if (value != {}):
assert (list(value.keys()) == ['block_type', 'block_params']), f"{self.__class__.__name__} requires 'block_type' and 'block_params' as arguments for modality '{key}'." |
class DWARFInfo(object):
def __init__(self, config, debug_info_sec, debug_aranges_sec, debug_abbrev_sec, debug_frame_sec, eh_frame_sec, debug_str_sec, debug_loc_sec, debug_ranges_sec, debug_line_sec):
self.config = config
self.debug_info_sec = debug_info_sec
self.debug_aranges_sec = debug_aranges_sec
self.debug_abbrev_sec = debug_abbrev_sec
self.debug_frame_sec = debug_frame_sec
self.eh_frame_sec = eh_frame_sec
self.debug_str_sec = debug_str_sec
self.debug_loc_sec = debug_loc_sec
self.debug_ranges_sec = debug_ranges_sec
self.debug_line_sec = debug_line_sec
self.structs = DWARFStructs(little_endian=self.config.little_endian, dwarf_format=32, address_size=self.config.default_address_size)
self._abbrevtable_cache = {}
def has_debug_info(self):
return bool(self.debug_info_sec)
def iter_CUs(self):
return self._parse_CUs_iter()
def get_abbrev_table(self, offset):
dwarf_assert((offset < self.debug_abbrev_sec.size), ("Offset '0x%x' to abbrev table out of section bounds" % offset))
if (offset not in self._abbrevtable_cache):
self._abbrevtable_cache[offset] = AbbrevTable(structs=self.structs, stream=self.debug_abbrev_sec.stream, offset=offset)
return self._abbrevtable_cache[offset]
def get_string_from_table(self, offset):
return parse_cstring_from_stream(self.debug_str_sec.stream, offset)
def line_program_for_CU(self, CU):
top_DIE = CU.get_top_DIE()
if ('DW_AT_stmt_list' in top_DIE.attributes):
return self._parse_line_program_at_offset(top_DIE.attributes['DW_AT_stmt_list'].value, CU.structs)
else:
return None
def has_CFI(self):
return (self.debug_frame_sec is not None)
def CFI_entries(self):
cfi = CallFrameInfo(stream=self.debug_frame_sec.stream, size=self.debug_frame_sec.size, address=self.debug_frame_sec.address, base_structs=self.structs)
return cfi.get_entries()
def has_EH_CFI(self):
return (self.eh_frame_sec is not None)
def EH_CFI_entries(self):
cfi = CallFrameInfo(stream=self.eh_frame_sec.stream, size=self.eh_frame_sec.size, address=self.eh_frame_sec.address, base_structs=self.structs, for_eh_frame=True)
return cfi.get_entries()
def get_aranges(self):
if self.debug_aranges_sec:
return ARanges(self.debug_aranges_sec.stream, self.debug_aranges_sec.size, self.structs)
else:
return None
def location_lists(self):
if self.debug_loc_sec:
return LocationLists(self.debug_loc_sec.stream, self.structs)
else:
return None
def range_lists(self):
if self.debug_ranges_sec:
return RangeLists(self.debug_ranges_sec.stream, self.structs)
else:
return None
def _parse_CUs_iter(self):
if (self.debug_info_sec is None):
return
offset = 0
while (offset < self.debug_info_sec.size):
cu = self._parse_CU_at_offset(offset)
offset = ((offset + cu['unit_length']) + cu.structs.initial_length_field_size())
(yield cu)
def _parse_CU_at_offset(self, offset):
initial_length = struct_parse(self.structs.Dwarf_uint32(''), self.debug_info_sec.stream, offset)
dwarf_format = (64 if (initial_length == ) else 32)
cu_structs = DWARFStructs(little_endian=self.config.little_endian, dwarf_format=dwarf_format, address_size=4)
cu_header = struct_parse(cu_structs.Dwarf_CU_header, self.debug_info_sec.stream, offset)
if (cu_header['address_size'] == 8):
cu_structs = DWARFStructs(little_endian=self.config.little_endian, dwarf_format=dwarf_format, address_size=8)
cu_die_offset = self.debug_info_sec.stream.tell()
dwarf_assert(self._is_supported_version(cu_header['version']), ("Expected supported DWARF version. Got '%s'" % cu_header['version']))
return CompileUnit(header=cu_header, dwarfinfo=self, structs=cu_structs, cu_offset=offset, cu_die_offset=cu_die_offset)
def _is_supported_version(self, version):
return (2 <= version <= 4)
def _parse_line_program_at_offset(self, debug_line_offset, structs):
lineprog_header = struct_parse(structs.Dwarf_lineprog_header, self.debug_line_sec.stream, debug_line_offset)
end_offset = ((debug_line_offset + lineprog_header['unit_length']) + structs.initial_length_field_size())
return LineProgram(header=lineprog_header, stream=self.debug_line_sec.stream, structs=structs, program_start_offset=self.debug_line_sec.stream.tell(), program_end_offset=end_offset) |
def test_no_flash_fullscreen_false_server_doesnt_flash_fullscreen_windows(no_flash_fullscreen_server: FlashServer) -> None:
with new_watched_window() as (window, watcher):
set_fullscreen(window)
with server_running(no_flash_fullscreen_server):
switch_workspace(1)
change_focus(window)
assert (watcher.count_flashes() == 0) |
def header(logo_and_title=True):
if ('first_run' not in st.session_state):
st.session_state.first_run = True
for key in ['selected_value', 'filename', 'executed', 'play_karaoke', 'url', 'random_song', 'last_dir', 'player_restart']:
st.session_state[key] = None
st.session_state.video_options = []
st.session_state.tot_delay = 0
if ('search_results' not in st.session_state):
st.session_state.search_results = []
if ('page' not in st.session_state):
switch_page(DEFAULT_PAGE)
st.set_page_config(page_title='Moseca - Music Separation and Karaoke - Free and Open Source alternative to lalal.ai, splitter.ai or media.io vocal remover.', page_icon='img/logo_moseca.png', layout='wide', initial_sidebar_state='collapsed')
st.markdown(CSS, unsafe_allow_html=True)
options = ['Separate', 'Karaoke', 'About']
page = option_menu(menu_title=None, options=options, icons=['play-btn-fill', 'file-earmark-music', 'info-circle'], default_index=options.index(st.session_state.get('page', DEFAULT_PAGE)), orientation='horizontal', styles={'nav-link': {'padding-left': '1.5rem', 'padding-right': '1.5rem'}}, key='')
if (page != st.session_state.get('page', DEFAULT_PAGE)):
log.info(f'Go to {page}')
switch_page(page)
if logo_and_title:
head = st.columns([5, 1, 3, 5])
with head[1]:
st.image('img/logo_moseca.png', use_column_width=False, width=80)
with head[2]:
st.markdown('<h1>moseca</h1><p><b>Music Source Separation & Karaoke</b></p>', unsafe_allow_html=True) |
class TestDynamicNotifiers(unittest.TestCase):
def setUp(self):
self.exceptions = []
trait_notifiers.push_exception_handler(self._handle_exception)
def tearDown(self):
trait_notifiers.pop_exception_handler()
def _handle_exception(self, obj, name, old, new):
self.exceptions.append((obj, name, old, new))
def test_dynamic_notifiers_methods(self):
obj = DynamicNotifiers(ok=2)
obj.ok = 3
self.assertEqual(len(obj.rebind_calls_0), 2)
expected_1 = [2, 3]
self.assertEqual(expected_1, obj.rebind_calls_1)
expected_2 = [('ok', 2), ('ok', 3)]
self.assertEqual(expected_2, obj.rebind_calls_2)
expected_3 = [(obj, 'ok', 2), (obj, 'ok', 3)]
self.assertEqual(expected_3, obj.rebind_calls_3)
expected_4 = [(obj, 'ok', 0, 2), (obj, 'ok', 2, 3)]
self.assertEqual(expected_4, obj.rebind_calls_4)
def test_dynamic_notifiers_methods_failing(self):
obj = DynamicNotifiers()
obj.fail = 1
self.assertCountEqual([0, 1, 2, 3, 4], obj.exceptions_from)
self.assertEqual(([(obj, 'fail', 0, 1)] * 5), self.exceptions)
def test_dynamic_notifiers_functions(self):
calls_0 = []
def function_listener_0():
calls_0.append(())
calls_1 = []
def function_listener_1(new):
calls_1.append((new,))
calls_2 = []
def function_listener_2(name, new):
calls_2.append((name, new))
calls_3 = []
def function_listener_3(obj, name, new):
calls_3.append((obj, name, new))
calls_4 = []
def function_listener_4(obj, name, old, new):
calls_4.append((obj, name, old, new))
obj = DynamicNotifiers()
obj.on_trait_change(function_listener_0, 'ok')
obj.on_trait_change(function_listener_1, 'ok')
obj.on_trait_change(function_listener_2, 'ok')
obj.on_trait_change(function_listener_3, 'ok')
obj.on_trait_change(function_listener_4, 'ok')
obj.ok = 2
obj.ok = 3
expected_0 = [(), ()]
self.assertEqual(expected_0, calls_0)
expected_1 = [(2.0,), (3.0,)]
self.assertEqual(expected_1, calls_1)
expected_2 = [('ok', 2.0), ('ok', 3.0)]
self.assertEqual(expected_2, calls_2)
expected_3 = [(obj, 'ok', 2.0), (obj, 'ok', 3.0)]
self.assertEqual(expected_3, calls_3)
expected_4 = [(obj, 'ok', 0.0, 2.0), (obj, 'ok', 2.0, 3.0)]
self.assertEqual(expected_4, calls_4)
def test_priority_notifiers_first(self):
obj = DynamicNotifiers()
expected_high = set([1, 3])
expected_low = set([0, 2])
obj.on_trait_change(obj.low_priority_first, 'priority_test')
obj.on_trait_change(obj.high_priority_first, 'priority_test', priority=True)
obj.on_trait_change(obj.low_priority_second, 'priority_test')
obj.on_trait_change(obj.high_priority_second, 'priority_test', priority=True)
obj.priority_test = None
high = set(obj.prioritized_notifications[:2])
low = set(obj.prioritized_notifications[2:])
self.assertSetEqual(expected_high, high)
self.assertSetEqual(expected_low, low)
def test_dynamic_notifiers_functions_failing(self):
obj = DynamicNotifiers()
exceptions_from = []
def failing_function_listener_0():
exceptions_from.append(0)
raise Exception('error')
def failing_function_listener_1(new):
exceptions_from.append(1)
raise Exception('error')
def failing_function_listener_2(name, new):
exceptions_from.append(2)
raise Exception('error')
def failing_function_listener_3(obj, name, new):
exceptions_from.append(3)
raise Exception('error')
def failing_function_listener_4(obj, name, old, new):
exceptions_from.append(4)
raise Exception('error')
obj.on_trait_change(failing_function_listener_0, 'fail')
obj.on_trait_change(failing_function_listener_1, 'fail')
obj.on_trait_change(failing_function_listener_2, 'fail')
obj.on_trait_change(failing_function_listener_3, 'fail')
obj.on_trait_change(failing_function_listener_4, 'fail')
obj.fail = 1
self.assertEqual([0, 1, 2, 3, 4], exceptions_from)
self.assertCountEqual([0, 1, 2, 3, 4], obj.exceptions_from)
self.assertEqual(([(obj, 'fail', 0, 1)] * 10), self.exceptions)
def test_object_can_be_garbage_collected(self):
import weakref
def listener():
pass
obj = DynamicNotifiers()
obj.on_trait_change(listener, 'ok')
obj_collected = []
def obj_collected_callback(weakref):
obj_collected.append(True)
obj_weakref = weakref.ref(obj, obj_collected_callback)
del obj
self.assertEqual(obj_collected, [True])
self.assertIsNone(obj_weakref())
def test_unhashable_object_can_be_garbage_collected(self):
import weakref
def listener():
pass
obj = UnhashableDynamicNotifiers()
obj.on_trait_change(listener, 'a_list:ok')
obj.a_list.append(UnhashableDynamicNotifiers())
obj_collected = []
def obj_collected_callback(weakref):
obj_collected.append(True)
obj_weakref = weakref.ref(obj, obj_collected_callback)
del obj
self.assertEqual(obj_collected, [True])
self.assertIsNone(obj_weakref())
def test_creating_notifiers_dont_create_cyclic_garbage(self):
gc.collect()
DynamicNotifiers()
self.assertEqual(gc.collect(), 0) |
def test_writePlist_to_path(tmpdir, pl_no_builtin_types):
old_plistlib = pytest.importorskip('fontTools.ufoLib.plistlib')
testpath = (tmpdir / 'test.plist')
old_plistlib.writePlist(pl_no_builtin_types, str(testpath))
with testpath.open('rb') as fp:
pl2 = plistlib.load(fp, use_builtin_types=False)
assert (pl2 == pl_no_builtin_types) |
class ColumnValuePlot(Metric[ColumnValuePlotResults]):
column_name: str
def __init__(self, column_name: str, options: AnyOptions=None):
self.column_name = column_name
super().__init__(options=options)
def calculate(self, data: InputData) -> ColumnValuePlotResults:
if (self.column_name not in data.current_data.columns):
raise ValueError(f"Column '{self.column_name}' should present in the current dataset")
if (data.reference_data is None):
raise ValueError('Reference data should be present')
if (self.column_name not in data.reference_data.columns):
raise ValueError(f"Column '{self.column_name}' should present in the reference dataset")
dataset_columns = process_columns(data.current_data, data.column_mapping)
if (not ((self.column_name in dataset_columns.num_feature_names) or ((self.column_name == dataset_columns.utility_columns.target) and ((data.column_mapping.task == 'regression') or is_numeric_dtype(data.current_data[self.column_name]))) or (isinstance(dataset_columns.utility_columns.prediction, str) and (self.column_name == dataset_columns.utility_columns.prediction) and ((data.column_mapping.task == 'regression') or is_numeric_dtype(data.current_data[self.column_name]))))):
raise ValueError('Expected numerical feature')
datetime_column_name = dataset_columns.utility_columns.date
curr_df = data.current_data.copy()
ref_df = data.reference_data.copy()
curr_df = self._make_df_for_plot(curr_df, self.column_name, datetime_column_name)
ref_df = self._make_df_for_plot(ref_df, self.column_name, datetime_column_name)
if self.get_options().render_options.raw_data:
return ColumnValuePlotResults(column_name=self.column_name, datetime_column_name=datetime_column_name, current=column_scatter_from_df(curr_df, True), reference=column_scatter_from_df(ref_df, True))
prefix = None
if (datetime_column_name is not None):
(prefix, freq) = choose_agg_period(curr_df[datetime_column_name], ref_df[datetime_column_name])
(curr_plot, _) = prepare_df_for_time_index_plot(curr_df, self.column_name, datetime_column_name, prefix, freq)
(ref_plot, _) = prepare_df_for_time_index_plot(ref_df, self.column_name, datetime_column_name, prefix, freq)
else:
(_, bins) = pd.cut(curr_df.index.union(ref_df.index), 150, retbins=True)
(curr_plot, _) = prepare_df_for_time_index_plot(curr_df, self.column_name, datetime_column_name, bins=bins)
(ref_plot, _) = prepare_df_for_time_index_plot(ref_df, self.column_name, datetime_column_name, bins=bins)
return ColumnValuePlotResults(column_name=self.column_name, datetime_column_name=datetime_column_name, current={'current': curr_plot}, reference={'reference': ref_plot}, prefix=prefix)
def _make_df_for_plot(self, df, column_name: str, datetime_column_name: Optional[str]):
result = df.replace([np.inf, (- np.inf)], np.nan)
if (datetime_column_name is not None):
result.dropna(axis=0, how='any', inplace=True, subset=[column_name, datetime_column_name])
return result.sort_values(datetime_column_name)
result.dropna(axis=0, how='any', inplace=True, subset=[column_name])
return result.sort_index() |
def create_or_update_messaging_config(db: Session, config: MessagingConfigRequest) -> MessagingConfigResponse:
data = {'key': config.key, 'name': config.name, 'service_type': config.service_type.value}
if config.details:
data['details'] = config.details.__dict__
messaging_config: MessagingConfig = MessagingConfig.create_or_update(db=db, data=data)
return MessagingConfigResponse(name=messaging_config.name, key=messaging_config.key, service_type=messaging_config.service_type.value, details=messaging_config.details) |
def extract_transform_load(task: TaskSpec) -> None:
if abort.is_set():
logger.warning(format_log(f'Skipping partition #{task.partition_number} due to previous error', name=task.name))
return
start = perf_counter()
msg = f'Started processing on partition #{task.partition_number}: {task.name}'
logger.info(format_log(msg, name=task.name))
client = instantiate_elasticsearch_client()
try:
records = task.transform_func(task, extract_records(task))
if abort.is_set():
logger.warning(format_log(msg, name=task.name))
return
if (len(records) > 0):
(success, fail) = load_data(task, records, client)
else:
logger.info(format_log('No records to index', name=task.name))
(success, fail) = (0, 0)
with total_doc_success.get_lock():
total_doc_success.value += success
with total_doc_fail.get_lock():
total_doc_fail.value += fail
except Exception:
if abort.is_set():
msg = f'Partition #{task.partition_number} failed after an error was previously encountered'
logger.warning(format_log(msg, name=task.name))
else:
logger.exception(format_log(f'{task.name} failed!', name=task.name))
abort.set()
else:
msg = f'Partition #{task.partition_number} was successfully processed in {(perf_counter() - start):.2f}s'
logger.info(format_log(msg, name=task.name)) |
def extractShmeimeiiWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class DateTimeField(WritableField):
type_name = 'DateTimeField'
type_label = 'datetime'
widget = widgets.DateTimeInput
form_field_class = forms.DateTimeField
default_error_messages = {'invalid': _('Datetime has wrong format. Use one of these formats instead: %s')}
empty = None
input_formats = api_settings.DATETIME_INPUT_FORMATS
format = api_settings.DATETIME_FORMAT
def __init__(self, input_formats=None, format=None, *args, **kwargs):
self.input_formats = (self.input_formats if (input_formats is None) else input_formats)
self.format = (self.format if (format is None) else format)
super(DateTimeField, self).__init__(*args, **kwargs)
def from_native(self, value):
if (value in validators.EMPTY_VALUES):
return None
if isinstance(value, datetime.datetime):
return value
if isinstance(value, datetime.date):
value = datetime.datetime(value.year, value.month, value.day)
if settings.USE_TZ:
warnings.warn(('DateTimeField received a naive datetime (%s) while time zone support is active.' % value), RuntimeWarning)
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
return value
for fmt in self.input_formats:
if (fmt.lower() == ISO_8601):
try:
parsed = parse_datetime(value)
except (ValueError, TypeError):
pass
else:
if (parsed is not None):
return parsed
else:
try:
parsed = datetime.datetime.strptime(value, fmt)
except (ValueError, TypeError):
pass
else:
return parsed
msg = (self.error_messages['invalid'] % readable_datetime_formats(self.input_formats))
raise ValidationError(msg)
def to_native(self, value):
if ((value is None) or (self.format is None)):
return value
if (self.format.lower() == ISO_8601):
ret = value.isoformat()
if ret.endswith('+00:00'):
ret = (ret[:(- 6)] + 'Z')
return ret
return value.strftime(self.format) |
def dichotomic_pattern_mining(seq2pat_pos: Seq2Pat, seq2pat_neg: Seq2Pat, min_frequency_pos: Num=0.3, min_frequency_neg: Num=0.3) -> Dict[(str, List[list])]:
patterns_pos = seq2pat_pos.get_patterns(min_frequency=min_frequency_pos)
patterns_neg = seq2pat_neg.get_patterns(min_frequency=min_frequency_neg)
patterns_pos = drop_frequency(patterns_pos)
patterns_neg = drop_frequency(patterns_neg)
aggregation_to_patterns = dict()
intersection_patterns = set(map(tuple, patterns_pos)).intersection(set(map(tuple, patterns_neg)))
aggregation_to_patterns[DichotomicAggregation.intersection] = sorted(list(map(list, intersection_patterns)))
union_patterns = set(map(tuple, patterns_pos)).union(set(map(tuple, patterns_neg)))
aggregation_to_patterns[DichotomicAggregation.union] = sorted(list(map(list, union_patterns)))
unique_neg_patterns = (set(map(tuple, patterns_neg)) - set(map(tuple, patterns_pos)))
aggregation_to_patterns[DichotomicAggregation.unique_neg] = sorted(list(map(list, unique_neg_patterns)))
unique_pos_patterns = (set(map(tuple, patterns_pos)) - set(map(tuple, patterns_neg)))
aggregation_to_patterns[DichotomicAggregation.unique_pos] = sorted(list(map(list, unique_pos_patterns)))
return aggregation_to_patterns |
class News(JsPackage):
def __init__(self, component: primitives.HtmlModel, js_code: str=None, set_var: bool=True, is_py_data: bool=True, page: primitives.PageModel=None):
self.htmlCode = (js_code if (js_code is not None) else component.htmlCode)
(self.varName, self.varData, self.__var_def) = (("document.getElementById('%s')" % self.htmlCode), '', None)
(self.component, self.page) = (component, page)
(self._js, self._jquery) = ([], None)
def reset(self):
return JsObjects.JsVoid(("%(varName)s.innerHTML = '' " % {'varName': self.varName})) |
class Piece():
def __init__(self, x_pos, y_pos, color):
diameter = 0.7
self.x = x_pos
self.y = y_pos
self.radius = (diameter / 2)
self.grabbed = False
self.targeted = False
self.color = color
self.start_x = self.x
self.start_y = self.y
text_scale = 0.85
self.letter = 'X'
self.font = pygame.font.SysFont(get_fontname(), int((((diameter / 8) * 640) * text_scale)))
self.text = self.font.render(self.letter, True, (255, 255, 255))
self.direction = False
self.targeted = False
self.turn = 0
self.deleted = False
def set_letter(self, letter):
self.letter = letter
if (not self.grabbed):
self.text = self.font.render(self.letter, True, ((255 - self.color[0]), (255 - self.color[1]), (255 - self.color[2])))
else:
self.text = self.font.render(self.letter, True, (0, 255, 0))
def can_promote(self):
return False
def calc_paths(self, pieces):
pass
def draw_paths(self, pieces):
pass
def target(self):
self.targeted = True
self.text = self.font.render(self.letter, True, (255, 0, 0))
def untarget(self):
self.targeted = False
self.set_letter(self.letter)
def draw(self):
x = int(((self.x / 8) * width))
y = (height - int(((self.y / 8) * height)))
draw_circle(screen, x, y, int(((self.radius / 8) * width)), self.color)
screen.blit(self.text, ((x - (self.text.get_width() // 2)), ((y - 2) - (self.text.get_height() // 2))))
def try_grab(self, pos):
if (dist(pos, (self.x, self.y)) < self.radius):
self.grabbed = True
self.text = self.font.render(self.letter, True, (0, 255, 0))
def cancel(self, pieces):
if self.grabbed:
self.grabbed = False
for piece in pieces:
if piece.targeted:
piece.untarget()
self.direction = False
self.text = self.font.render(self.letter, True, ((255 - self.color[0]), (255 - self.color[1]), (255 - self.color[2])))
self.x = self.start_x
self.y = self.start_y
def confirm(self, pieces):
if self.grabbed:
self.grabbed = False
for piece in pieces:
if piece.targeted:
piece.deleted = True
piece.x = 100
piece.start_x = 100
self.direction = False
self.text = self.font.render(self.letter, True, ((255 - self.color[0]), (255 - self.color[1]), (255 - self.color[2])))
self.start_x = self.x
self.start_y = self.y
self.turn += 1
def ungrab(self, pieces):
if self.grabbed:
if ((abs((self.x - self.start_x)) < (1 / 1000)) and (abs((self.y - self.start_y)) < (1 / 1000))):
self.cancel(pieces)
return
font = pygame.font.SysFont('oldenglishtext', int(80))
confirm_text = font.render('Confirm?', True, (0, 0, 0))
draw_center_text(confirm_text)
pygame.display.flip()
while True:
for event in pygame.event.get():
if (event.type == pygame.QUIT):
pygame.quit()
quit()
if (event.type == pygame.MOUSEBUTTONUP):
if (dist(to_game_coords(pygame.mouse.get_pos()), (self.x, self.y)) < self.radius):
self.confirm(pieces)
return
else:
self.cancel(pieces)
return
elif (event.type == pygame.KEYDOWN):
if (event.key == pygame.K_RETURN):
self.confirm(pieces)
return
elif (event.key == pygame.K_ESCAPE):
self.cancel(pieces)
return
def overlaps(self, piece):
return (dist((self.x, self.y), (piece.x, piece.y)) < (self.radius * 2))
def slide(self, dx, dy, pieces, capture=True, fake=False):
all_pieces = pieces
if capture:
pieces = [p for p in pieces if (((((p.x - self.start_x) * dx) + ((p.y - self.start_y) * dy)) > 0) and (p != self) and (p.color == self.color))]
if fake:
pieces = [p for p in pieces if (((((p.x - self.start_x) * dx) + ((p.y - self.start_y) * dy)) > 0) and (p != self) and (p.color == self.color) and (p.targeted == False))]
else:
pieces = [p for p in pieces if (((((p.x - self.start_x) * dx) + ((p.y - self.start_y) * dy)) > 0) and (p != self))]
angle = math.atan2(dy, dx)
if ((0 <= self.start_x <= 8) and (0 <= self.start_y <= 8)):
if (abs(dx) > 0):
if (((self.start_x + dx) + self.radius) > 8):
ratio = (dy / dx)
dx = ((8 - self.start_x) - self.radius)
dy = (ratio * ((8 - self.start_x) - self.radius))
if (((self.start_x + dx) - self.radius) < 0):
ratio = (dy / dx)
dx = ((- self.start_x) + self.radius)
dy = (ratio * ((- self.start_x) + self.radius))
if (abs(dy) > 0):
if (((self.start_y + dy) + self.radius) > 8):
ratio = (dx / dy)
dy = ((8 - self.start_y) - self.radius)
dx = (ratio * ((8 - self.start_y) - self.radius))
if (((self.start_y + dy) - self.radius) < 0):
ratio = (dx / dy)
dy = ((- self.start_y) + self.radius)
dx = (ratio * ((- self.start_y) + self.radius))
first_block = False
block_dist =
block_perp_dist =
full_dist = math.sqrt(((dx ** 2) + (dy ** 2)))
new_dist = full_dist
for piece in pieces:
h = abs(((math.cos(angle) * (self.y - piece.y)) - (math.sin(angle) * (self.x - piece.x))))
if (h < (piece.radius * 2)):
proj_dist = math.sqrt(((dist((self.start_x, self.start_y), (piece.x, piece.y)) ** 2) - (h ** 2)))
if (proj_dist < block_dist):
block_dist = proj_dist
block_perp_dist = h
first_block = piece
hit_first_block = False
if first_block:
distance = dist((first_block.x, first_block.y), ((self.start_x + dx), (self.start_y + dy)))
if (math.sqrt(((dx ** 2) + (dy ** 2))) > block_dist):
hit_first_block = True
new_dist = (block_dist - math.sqrt(((4 * (self.radius ** 2)) - (block_perp_dist ** 2))))
if (abs(full_dist) > 0):
self.x = (self.start_x + ((dx * new_dist) / full_dist))
self.y = (self.start_y + ((dy * new_dist) / full_dist))
new_new_dist = new_dist
first_hit_piece = False
for piece in pieces:
if self.overlaps(piece):
block_perp_dist = abs(((math.cos(angle) * (self.y - piece.y)) - (math.sin(angle) * (self.x - piece.x))))
block_dist = math.sqrt(((dist((self.start_x, self.start_y), (piece.x, piece.y)) ** 2) - (block_perp_dist ** 2)))
new_new_dist = (block_dist - math.sqrt(((4 * (self.radius ** 2)) - (block_perp_dist ** 2))))
if (new_new_dist < new_dist):
new_dist = new_new_dist
first_hit_piece = piece
if (abs(full_dist) > 0):
self.x = (self.start_x + ((dx * new_dist) / full_dist))
self.y = (self.start_y + ((dy * new_dist) / full_dist))
else:
self.x = self.start_x
self.y = self.start_y
if capture:
self.slide_attack((self.x - self.start_x), (self.y - self.start_y), all_pieces, fake=fake)
def slide_attack(self, dx, dy, pieces, fake=False):
angle = math.atan2(dy, dx)
all_pieces = pieces
pieces = [p for p in pieces if (((((p.x - self.start_x) * dx) + ((p.y - self.start_y) * dy)) > 0) and (p != self) and (p.color != self.color))]
first_piece_hit = False
first_hit_dist =
perp_dist =
full_dist = math.sqrt(((dx ** 2) + (dy ** 2)))
new_dist = full_dist
for piece in pieces:
h = abs(((math.cos(angle) * (self.y - piece.y)) - (math.sin(angle) * (self.x - piece.x))))
if (h < (piece.radius * 2)):
d = dist((piece.x, piece.y), (self.start_x, self.start_y))
hit_dist = (math.sqrt(((d ** 2) - (h ** 2))) - math.sqrt(((4 * (piece.radius ** 2)) - (h ** 2))))
if (hit_dist < first_hit_dist):
first_hit_dist = hit_dist
perp_dist = h
first_piece_hit = piece
if (not fake):
for piece in all_pieces:
piece.untarget()
if first_piece_hit:
if self.overlaps(first_piece_hit):
if (not fake):
first_piece_hit.target()
elif (dist((self.x, self.y), (self.start_x, self.start_y)) > (first_hit_dist + (2 * math.sqrt(((4 * (piece.radius ** 2)) - (perp_dist ** 2)))))):
new_dist = (first_hit_dist + (2 * math.sqrt(((4 * (piece.radius ** 2)) - (perp_dist ** 2)))))
if (not fake):
first_piece_hit.target()
if (abs(full_dist) > 0):
self.x = (self.start_x + ((dx * new_dist) / full_dist))
self.y = (self.start_y + ((dy * new_dist) / full_dist))
if (not fake):
for piece in pieces:
if self.overlaps(piece):
piece.target()
def select_path(self, start, paths, point):
min_h = 9999999
min_path = None
for path in paths:
h = (abs(((path[0] * (start[1] - point[1])) - ((start[0] - point[0]) * path[1]))) / math.sqrt(((path[0] ** 2) + (path[1] ** 2))))
if (h < min_h):
min_h = h
min_path = path
dot_prod = ((path[0] * (point[0] - start[0])) + (path[1] * (point[1] - start[1])))
if (dot_prod == 0):
min_l = 0
else:
min_l = ((math.sqrt(((dist(point, start) ** 2) - (h ** 2))) * dot_prod) / abs(dot_prod))
return (min_path, min_l)
def draw_moves(self, pieces):
pass |
class GitDriver():
def __init__(self, repodir):
self.repodir = Path(repodir)
def init(self):
self.repodir.mkdir()
self.run_command('init')
def log(self):
output = self.run_command('log', '--format=%s', '--reverse')
return output.strip().splitlines()
def grep_log(self, pattern):
output = self.run_command('log', '--format=%H', '-F', '--grep', pattern)
return output.strip().splitlines()
def details(self, commit_hash):
fmt = '%s%n%P'
output = self.run_command('show', '-s', ('--format=' + fmt), commit_hash)
(subject, parents) = output.splitlines()
return (subject, parents.split())
def run_command(self, *args):
p = subprocess.run((('git', '--no-pager') + args), cwd=str(self.repodir), check=True, text=True, capture_output=True)
return p.stdout |
class _Decoration(base.PaddingMixin):
defaults = [('padding', 0, 'Default padding'), ('extrawidth', 0, 'Add additional width to the end of the decoration'), ('ignore_extrawidth', False, 'Ignores additional width added by decoration. Useful when stacking decorations on top of a PowerLineDecoration.')]
def __init__(self, **config):
base.PaddingMixin.__init__(self, **config)
self.add_defaults(_Decoration.defaults)
self._extrawidth = self.extrawidth
def __eq__(self, other):
return ((type(self) is type(other)) and (self._user_config == other._user_config))
def _configure(self, parent: base._Widget) -> None:
self.parent = parent
def single_or_four(self, value, name: str):
if isinstance(value, (float, int)):
n = e = s = w = value
elif isinstance(value, (tuple, list)):
if (len(value) == 1):
n = e = s = w = value[0]
elif (len(value) == 4):
(n, e, s, w) = value
else:
logger.info('%s should be a single number or a list of 1 or 4 values', name)
n = e = s = w = 0
else:
logger.info('%s should be a single number or a list of 1 or 4 values', name)
n = e = s = w = 0
return [n, e, s, w]
def clone(self) -> _Decoration:
return copy.copy(self)
def height(self) -> int:
if self.parent.bar.horizontal:
return self.parent.bar.height
return self.parent.height
def parent_length(self):
if (self.parent.length_type == bar.CALCULATED):
return int(self.parent.calculate_length())
return self.parent._length
def width(self) -> int:
if self.parent.bar.horizontal:
if self.ignore_extrawidth:
return self.parent_length
else:
return self.parent.width
return self.parent.bar.width
def drawer(self) -> Drawer:
return self.parent.drawer
def ctx(self) -> Context:
return self.drawer.ctx
def set_source_rgb(self, colour) -> None:
self.drawer.set_source_rgb(colour, ctx=self.ctx) |
def test_validator_tangential_field():
field_dataset = FIELD_SRC.field_dataset
field_dataset = field_dataset.copy(update=dict(Ex=None, Ez=None, Hx=None, Hz=None))
with pytest.raises(pydantic.ValidationError):
_ = td.CustomFieldSource(size=SIZE, source_time=ST, field_dataset=field_dataset) |
def prompt_for_pkce_token(client_id: str, redirect_uri: str, scope=None) -> RefreshingToken:
cred = RefreshingCredentials(client_id, redirect_uri=redirect_uri)
auth = UserAuth(cred, scope=scope, pkce=True)
print('Opening browser for Spotify login...')
webbrowser.open(auth.url)
redirected = input('Please paste redirect URL: ').strip()
return auth.request_token(url=redirected) |
class EnumParamType(click.Choice):
def __init__(self, enum_type: typing.Type[enum.Enum]):
super().__init__([str(e.value) for e in enum_type])
self._enum_type = enum_type
def convert(self, value: typing.Any, param: typing.Optional[click.Parameter], ctx: typing.Optional[click.Context]) -> enum.Enum:
if isinstance(value, self._enum_type):
return value
return self._enum_type(super().convert(value, param, ctx)) |
def test_import_zmap_and_xyz(testpath):
mypol2a = xtgeo.polygons_from_file((testpath / PFILE1A), fformat='zmap')
mypol2b = xtgeo.polygons_from_file((testpath / PFILE1B))
mypol2c = xtgeo.polygons_from_file((testpath / PFILE1C))
assert (mypol2a.nrow == mypol2b.nrow)
assert (mypol2b.nrow == mypol2c.nrow)
for col in ['X_UTME', 'Y_UTMN', 'Z_TVDSS', 'POLY_ID']:
assert np.allclose(mypol2a.dataframe[col].values, mypol2b.dataframe[col].values) |
class Record(object):
child_list_key = None
def __init__(self, client, id, *args, **kwargs):
self._client = client
self._id = extract_id(id)
self._callbacks = []
if (self._client._monitor is not None):
self._client._monitor.subscribe(self)
def id(self):
return self._id
def role(self):
return self._client._store.get_role(self._table, self.id)
def _str_fields(self):
return ['id']
def __str__(self):
return ', '.join(['{}={}'.format(field, repr(getattr(self, field))) for field in self._str_fields() if getattr(self, field, '')])
def __repr__(self):
return '<{} ({})>'.format(self.__class__.__name__, self)
def refresh(self):
self._get_record_data(force_refresh=True)
def _convert_diff_to_changelist(self, difference, old_val, new_val):
changed_values = set()
for (operation, path, values) in deepcopy(difference):
path = (path.split('.') if isinstance(path, str) else path)
if (operation in ['add', 'remove']):
path.append(values[0][0])
while isinstance(path[(- 1)], int):
path.pop()
changed_values.add('.'.join(map(str, path)))
return [('changed_value', path, (get_by_path(path, old_val), get_by_path(path, new_val))) for path in changed_values]
def add_callback(self, callback, callback_id=None, extra_kwargs={}):
assert callable(callback), "The callback must be a 'callable' object, such as a function."
callback_obj = self._client._store.add_callback(self, callback, callback_id=callback_id, extra_kwargs=extra_kwargs)
self._callbacks.append(callback_obj)
return callback_obj
def remove_callbacks(self, callback_or_callback_id_prefix=None):
print(('removing callback ' + callback_or_callback_id_prefix))
if (callback_or_callback_id_prefix is None):
for callback_obj in list(self._callbacks):
self._client._store.remove_callbacks(self._table, self.id, callback_or_callback_id_prefix=callback_obj)
self._callbacks = []
else:
self._client._store.remove_callbacks(self._table, self.id, callback_or_callback_id_prefix=callback_or_callback_id_prefix)
if (callback_or_callback_id_prefix in self._callbacks):
self._callbacks.remove(callback_or_callback_id_prefix)
def _get_record_data(self, force_refresh=False):
return self._client.get_record_data(self._table, self.id, force_refresh=force_refresh)
def get(self, path=[], default=None, force_refresh=False):
return get_by_path(path, self._get_record_data(force_refresh=force_refresh), default=default)
def set(self, path, value):
self._client.submit_transaction(build_operation(id=self.id, path=path, args=value, table=self._table))
def __eq__(self, other):
return (self.id == other.id)
def __ne__(self, other):
return (self.id != other.id)
def __hash__(self):
return hash(self.id) |
class HandshakeV10():
def __init__(self):
self.server_version = '5.0.2'
self.capability = CapabilitySet((Capability.LONG_PASSWORD, Capability.LONG_FLAG, Capability.CONNECT_WITH_DB, Capability.PROTOCOL_41, Capability.TRANSACTIONS, Capability.SECURE_CONNECTION, Capability.PLUGIN_AUTH))
self.status = StatusSet((Status.STATUS_AUTOCOMMIT,))
self.character_set = CharacterSet.utf8
self.auth_plugin = 'mysql_clear_password'
def write(self, stream):
capability = struct.pack('<I', self.capability.int)
status = struct.pack('<H', self.status.int)
packet = [b'\n', self.server_version.encode('ascii'), b'\x00', (b'\x00' * 4), (b'\x01' * 8), b'\x00', capability[:2], bytes((self.character_set.value,)), status, capability[2:], b'\x00', (b'\x01' * 10)]
if (Capability.SECURE_CONNECTION in self.capability):
packet.append((b'\x00' * 13))
if (Capability.PLUGIN_AUTH in self.capability):
packet.extend((self.auth_plugin.encode('ascii'), b'\x00'))
p = b''.join(packet)
stream.write(p) |
def test_same_instruction_with_different_memory_version():
aliased_x = [Variable('x', Integer.int32_t(), i, is_aliased=True) for i in range(5)]
aliased_y = [Variable('y', Integer.int32_t(), i, is_aliased=True) for i in range(5)]
esi = Variable('esi', Integer.int32_t(), 3)
var_v = [Variable('v', Integer.int32_t(), i) for i in range(5)]
instructions = [Assignment(UnaryOperation(OperationType.dereference, [esi], writes_memory=1), Constant(3)), Assignment(var_v[1], aliased_x[1]), Assignment(ListOperation([]), Call(function_symbol('printf'), [Constant(), var_v[1]], writes_memory=2)), Assignment(var_v[2], aliased_y[2]), Assignment(ListOperation([]), Call(function_symbol('printf'), [Constant(), var_v[2]], writes_memory=3)), Assignment(UnaryOperation(OperationType.dereference, [esi], writes_memory=4), Constant(3)), Assignment(var_v[3], Constant(3)), Return([Constant(3)])]
cfg = ControlFlowGraph()
cfg.add_node(BasicBlock(1, instructions))
task = DecompilerTask('test', cfg)
InsertMissingDefinitions().run(task)
assert ((list(task.graph.instructions) == [Assignment(UnaryOperation(OperationType.dereference, [esi], writes_memory=1), Constant(3)), Assignment(aliased_x[1], aliased_x[0]), Assignment(aliased_y[1], aliased_y[0]), Assignment(var_v[1], aliased_x[1]), Assignment(ListOperation([]), Call(function_symbol('printf'), [Constant(), var_v[1]], writes_memory=2)), Assignment(aliased_x[2], aliased_x[1]), Assignment(aliased_y[2], aliased_y[1]), Assignment(var_v[2], aliased_y[2]), Assignment(ListOperation([]), Call(function_symbol('printf'), [Constant(), var_v[2]], writes_memory=3)), Assignment(aliased_x[3], aliased_x[2]), Assignment(aliased_y[3], aliased_y[2]), Assignment(UnaryOperation(OperationType.dereference, [esi], writes_memory=4), Constant(3)), Assignment(aliased_x[4], aliased_x[3]), Assignment(aliased_y[4], aliased_y[3]), Assignment(var_v[3], Constant(3)), Return([Constant(3)])]) or (list(task.graph.instructions) == [Assignment(UnaryOperation(OperationType.dereference, [esi], writes_memory=1), Constant(3)), Assignment(aliased_y[1], aliased_y[0]), Assignment(aliased_x[1], aliased_x[0]), Assignment(var_v[1], aliased_x[1]), Assignment(ListOperation([]), Call(function_symbol('printf'), [Constant(), var_v[1]], writes_memory=2)), Assignment(aliased_y[2], aliased_y[1]), Assignment(aliased_x[2], aliased_x[1]), Assignment(var_v[2], aliased_y[2]), Assignment(ListOperation([]), Call(function_symbol('printf'), [Constant(), var_v[2]], writes_memory=3)), Assignment(aliased_y[3], aliased_y[2]), Assignment(aliased_x[3], aliased_x[2]), Assignment(UnaryOperation(OperationType.dereference, [esi], writes_memory=4), Constant(3)), Assignment(aliased_y[4], aliased_y[3]), Assignment(aliased_x[4], aliased_x[3]), Assignment(var_v[3], Constant(3)), Return([Constant(3)])])) |
class RelationshipMemberCustomer(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'type': (TypeCustomer,), 'id': (str,)}
_property
def discriminator():
return None
attribute_map = {'type': 'type', 'id': 'id'}
read_only_vars = {'id'}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class MockSecretHandler(SecretHandler):
def load_secret(self, resource: 'IRResource', secret_name: str, namespace: str) -> Optional[SecretInfo]:
return SecretInfo('fallback-self-signed-cert', 'ambassador', 'mocked-fallback-secret', TLSCerts['acook'].pubcert, TLSCerts['acook'].privkey, decode_b64=False) |
def reduce(pitch_motif: PitchLine, duration_motif: DurationLine, start: int, end: int, position: str) -> Tuple[(PitchLine, DurationLine)]:
duration = sum(duration_motif[start:(end + 1)])
duration_motif = deepcopy(duration_motif)
if (position == 'left'):
duration_motif[(start - 1)] = (duration_motif[(start - 1)] + duration)
elif (position == 'right'):
duration_motif[(end + 1)] = (duration_motif[(end + 1)] + duration)
duration_motif = (duration_motif[:start] + duration_motif[(end + 1):])
pitch_motif = (pitch_motif[:start] + pitch_motif[(end + 1):])
return (pitch_motif, duration_motif) |
def wb_labels_to_csv(wb_labels_txt, csv_out=None):
labels = pd.read_csv(wb_labels_txt, header=None, names=['lab', 'R', 'G', 'B', 'alpha'], delim_whitespace=True)
labels['A_stack'] = (['one', 'two'] * int((labels.shape[0] / 2)))
labels['B_stack'] = pd.Series(range(int((labels.shape[0] / 2)))).repeat(2).values
labels_p = labels.pivot(index='B_stack', columns='A_stack')
label_df = pd.DataFrame({'labelname': labels_p['lab']['one'], 'int_value': labels_p['lab']['two'], 'red': labels_p['R']['two'], 'green': labels_p['G']['two'], 'blue': labels_p['B']['two'], 'alpha': labels_p['alpha']['two']})
if csv_out:
label_df.to_csv(csv_out, index=False, columns=['int_value', 'labelname', 'red', 'green', 'blue', 'alpha'])
return 0
return label_df |
('/feed-filters/merge-parsers')
_required
def mergeFeedParsers():
if ((not ('f1' in request.args)) and ('f2' in request.args)):
return render_template('error.html', title='Viewer', message='This function has to have feeds to merge as parameters!')
try:
f1 = int(request.args['f1'])
f2 = int(request.args['f2'])
except ValueError:
return render_template('error.html', title='Viewer', message='Feed IDs must be integers!')
if (f1 == f2):
return render_template('error.html', title='Viewer', message='Cannot merge a feed with itself!')
feed1 = g.session.query(db.RssFeedEntry).filter((db.RssFeedEntry.id == f1)).scalar()
feed2 = g.session.query(db.RssFeedEntry).filter((db.RssFeedEntry.id == f2)).scalar()
if ((not feed1) and feed2):
return render_template('error.html', title='Viewer', message='One of the feed IDs has no feed!!')
return render_template('rss-pages/feed_filter_merge.html', feed1=feed1, feed2=feed2) |
class StringTest(AnyTraitTest):
def setUp(self):
self.obj = StringTrait()
_default_value = 'string'
_good_values = [10, (- 10), 10.1, (- 10.1), '10', '-10', '10L', '-10L', '10.1', '-10.1', 'string', 1j, [10], ['ten'], {'ten': 10}, (10,), None]
_bad_values = []
def coerce(self, value):
return str(value) |
def resource_img_upload_to(instance, filename):
ext = filename.split('.')[(- 1)]
filename = ('%s.%s' % (uuid.uuid4(), ext.lower()))
upload_path = 'rooms/'
upload_abs_path = os.path.join(settings.MEDIA_ROOT, upload_path)
if (not os.path.exists(upload_abs_path)):
os.makedirs(upload_abs_path)
return os.path.join(upload_path, filename) |
.skip_ci
('xtb')
def test_kick():
geom = geom_loader('lib:benzene_and_chlorine.xyz')
stoc_kwargs = {'cycle_size': 10, 'radius': 1.25, 'seed': , 'max_cycles': 5}
stoc = Kick(geom, **stoc_kwargs)
stoc.run()
assert (stoc.cur_cycle == 4)
assert (len(stoc.new_geoms) == 9)
assert (min(stoc.new_energies) == pytest.approx((- 24.9688182))) |
def sync_branch(new_branch, branch_commits, message):
for branch in branch_commits:
if (not subprocess.call(['git', 'merge', branch, '--ff-only'], encoding='utf-8')):
log.debug("merged '{0}' fast forward into '{1}' or noop".format(branch, new_branch))
return
branch = next(iter(branch_commits))
log.debug("resetting branch '{0}' to contents of '{1}'".format(new_branch, branch))
subprocess.check_call(['git', 'read-tree', '-m', '-u', branch], encoding='utf-8')
date = subprocess.check_output(['git', 'show', branch, '-q', '--format=%ai'], encoding='utf-8')
if subprocess.call(['git', 'diff', '--cached', '--exit-code'], encoding='utf-8'):
subprocess.check_call(['git', 'commit', '--no-verify', '-m', message, '--date', date], encoding='utf-8')
else:
log.debug("nothing to commit into branch '{0}'".format(new_branch)) |
class PrivateComputationPCF2LocalTestStageFlow(PrivateComputationBaseStageFlow):
_order_ = 'CREATED PID_SHARD PID_PREPARE ID_MATCH ID_MATCH_POST_PROCESS ID_SPINE_COMBINER RESHARD PCF2_ATTRIBUTION PCF2_AGGREGATION AGGREGATE'
CREATED = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.CREATION_INITIALIZED, started_status=PrivateComputationInstanceStatus.CREATION_STARTED, completed_status=PrivateComputationInstanceStatus.CREATED, failed_status=PrivateComputationInstanceStatus.CREATION_FAILED, is_joint_stage=False)
PID_SHARD = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.PID_SHARD_INITIALIZED, started_status=PrivateComputationInstanceStatus.PID_SHARD_STARTED, completed_status=PrivateComputationInstanceStatus.PID_SHARD_COMPLETED, failed_status=PrivateComputationInstanceStatus.PID_SHARD_FAILED, is_joint_stage=False)
PID_PREPARE = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.PID_PREPARE_INITIALIZED, started_status=PrivateComputationInstanceStatus.PID_PREPARE_STARTED, completed_status=PrivateComputationInstanceStatus.PID_PREPARE_COMPLETED, failed_status=PrivateComputationInstanceStatus.PID_PREPARE_FAILED, is_joint_stage=False)
ID_MATCH = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.ID_MATCHING_INITIALIZED, started_status=PrivateComputationInstanceStatus.ID_MATCHING_STARTED, completed_status=PrivateComputationInstanceStatus.ID_MATCHING_COMPLETED, failed_status=PrivateComputationInstanceStatus.ID_MATCHING_FAILED, is_joint_stage=True, is_retryable=True, timeout=DEFAULT_RUN_PID_TIMEOUT_IN_SEC)
ID_MATCH_POST_PROCESS = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.ID_MATCHING_POST_PROCESS_INITIALIZED, started_status=PrivateComputationInstanceStatus.ID_MATCHING_POST_PROCESS_STARTED, completed_status=PrivateComputationInstanceStatus.ID_MATCHING_POST_PROCESS_COMPLETED, failed_status=PrivateComputationInstanceStatus.ID_MATCHING_POST_PROCESS_FAILED, is_joint_stage=False)
ID_SPINE_COMBINER = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.ID_SPINE_COMBINER_INITIALIZED, started_status=PrivateComputationInstanceStatus.ID_SPINE_COMBINER_STARTED, completed_status=PrivateComputationInstanceStatus.ID_SPINE_COMBINER_COMPLETED, failed_status=PrivateComputationInstanceStatus.ID_SPINE_COMBINER_FAILED, is_joint_stage=False)
RESHARD = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.RESHARD_INITIALIZED, started_status=PrivateComputationInstanceStatus.RESHARD_STARTED, completed_status=PrivateComputationInstanceStatus.RESHARD_COMPLETED, failed_status=PrivateComputationInstanceStatus.RESHARD_FAILED, is_joint_stage=False)
PCF2_ATTRIBUTION = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.PCF2_ATTRIBUTION_INITIALIZED, started_status=PrivateComputationInstanceStatus.PCF2_ATTRIBUTION_STARTED, completed_status=PrivateComputationInstanceStatus.PCF2_ATTRIBUTION_COMPLETED, failed_status=PrivateComputationInstanceStatus.PCF2_ATTRIBUTION_FAILED, is_joint_stage=True)
PCF2_AGGREGATION = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.PCF2_AGGREGATION_INITIALIZED, started_status=PrivateComputationInstanceStatus.PCF2_AGGREGATION_STARTED, completed_status=PrivateComputationInstanceStatus.PCF2_AGGREGATION_COMPLETED, failed_status=PrivateComputationInstanceStatus.PCF2_AGGREGATION_FAILED, is_joint_stage=True)
AGGREGATE = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.AGGREGATION_INITIALIZED, started_status=PrivateComputationInstanceStatus.AGGREGATION_STARTED, completed_status=PrivateComputationInstanceStatus.AGGREGATION_COMPLETED, failed_status=PrivateComputationInstanceStatus.AGGREGATION_FAILED, is_joint_stage=True)
def get_stage_service(self, args: PrivateComputationStageServiceArgs) -> PrivateComputationStageService:
if (self is self.PCF2_ATTRIBUTION):
return PCF2AttributionStageService(args.onedocker_binary_config_map, args.mpc_svc)
elif (self is self.PCF2_AGGREGATION):
return PCF2AggregationStageService(args.onedocker_binary_config_map, args.mpc_svc)
else:
return self.get_default_stage_service(args) |
class OptionSeriesWindbarbSonificationTracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def launch_green_threads():
pool = eventlet.GreenPool(((CONCURRENCY * 2) + 1))
server_sock = eventlet.green.socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_sock.bind(('localhost', 0))
server_sock.listen(50)
addr = ('localhost', server_sock.getsockname()[1])
pool.spawn_n(green_accepter, server_sock, pool)
for i in range(CONCURRENCY):
pool.spawn_n(writer, addr, eventlet.green.socket.socket)
pool.waitall() |
.command('restore_es_snapshot_downtime')
('repo_name', default=None, required=False)
('snapshot_name', default=None, required=False)
('index_name', default=None, required=False)
def restore_es_snapshot_downtime_cli(repo_name, snapshot_name, index_name):
legal_docs.restore_es_snapshot_downtime(repo_name, snapshot_name, index_name) |
def update_popup(view, content, md=True, css=None, wrapper_class=None, template_vars=None, template_env_options=None, **kwargs):
disabled = _get_setting('mdpopups.disable', False)
if disabled:
_debug('Popups disabled', WARNING)
return
try:
html = _create_html(view, content, md, css, css_type=POPUP, wrapper_class=wrapper_class, template_vars=template_vars, template_env_options=template_env_options)
except Exception:
_log(traceback.format_exc())
html = IDK
view.update_popup(html) |
_routes.route('/events/<string:event_identifier>/export/speakers/csv', methods=['POST'], endpoint='export_speakers_csv')
_event_id
_coorganizer
def export_speakers_csv(event_id):
from .helpers.tasks import export_speakers_csv_task
status = request.json.get('status')
task = export_speakers_csv_task.delay(event_id, status)
create_export_job(task.id, event_id)
return jsonify(task_url=url_for('tasks.celery_task', task_id=task.id)) |
class ImitationEvents(ABC):
_epoch_stats(np.nanmean)
_stats_grouping('step_id', 'agent_id')
def policy_loss(self, step_id: Union[(str, int)], agent_id: int, value: float):
_epoch_stats(np.nanmean)
_stats_grouping('step_id', 'agent_id')
def policy_entropy(self, step_id: Union[(str, int)], agent_id: int, value: float):
_epoch_stats(np.nanmean)
_stats_grouping('step_id', 'agent_id')
def policy_l2_norm(self, step_id: Union[(str, int)], agent_id: int, value: float):
_epoch_stats(np.nanmean)
_stats_grouping('step_id', 'agent_id')
def policy_grad_norm(self, step_id: Union[(str, int)], agent_id: int, value: float):
_epoch_stats(np.nanmean)
_stats_grouping('step_id', 'subspace_name', 'agent_id')
def discrete_accuracy(self, step_id: Union[(str, int)], agent_id: int, subspace_name: str, value: int):
_epoch_stats(np.nanmean)
_stats_grouping('step_id', 'subspace_name', 'agent_id')
def discrete_top_5_accuracy(self, step_id: Union[(str, int)], agent_id: int, subspace_name: str, value: int):
_epoch_stats(np.nanmean)
_stats_grouping('step_id', 'subspace_name', 'agent_id')
def discrete_top_10_accuracy(self, step_id: Union[(str, int)], agent_id: int, subspace_name: str, value: int):
_epoch_stats(np.nanmean)
_stats_grouping('step_id', 'subspace_name', 'agent_id')
def discrete_action_rank(self, step_id: Union[(str, int)], agent_id: int, subspace_name: str, value: int):
_epoch_stats(np.nanmean)
_stats_grouping('step_id', 'subspace_name', 'agent_id')
def multi_binary_accuracy(self, step_id: Union[(str, int)], agent_id: int, subspace_name: str, value: int):
_epoch_stats(np.nanmean)
_stats_grouping('step_id', 'subspace_name', 'agent_id')
def box_mean_abs_deviation(self, step_id: Union[(str, int)], agent_id: int, subspace_name: str, value: int):
_epoch_stats(np.nanmean)
def training_iterations(self, value: int): |
class Command(DanubeCloudCommand):
args = '[DB name]'
help = 'Create database dump.'
default_verbosity = 2
options = (CommandOption('-d', '--database', action='store', dest='database', help='Nominates a specific database to dump. Defaults to the "default" database.'), CommandOption('-a', '--data-only', action='store_true', dest='data_only', default=False, help='Dump only the data, not the schema.'), CommandOption('-s', '--schema-only', action='store_true', dest='schema_only', default=False, help='Dump only the schema, no data.'), CommandOption('-i', '--inserts', action='store_true', dest='inserts', default=False, help='Dump data as INSERT commands with column names.'))
def handle(self, db_name=DEFAULT_DB_ALIAS, data_only=False, schema_only=False, inserts=False, **options):
db_name = (options.get('database') or db_name)
try:
db = self.settings.DATABASES[db_name]
except KeyError:
raise CommandError('Invalid database name!')
cmd = ('PGPASSWORD="%(PASSWORD)s" pg_dump -U %(USER)s -h %(HOST)s -p %(PORT)s -d %(NAME)s' % db)
if (schema_only and data_only):
pass
elif schema_only:
cmd += ' -s'
elif data_only:
cmd += ' -a'
if inserts:
cmd += ' --inserts'
self.local(cmd, stderr_to_stdout=False) |
def cbFun(snmpEngine, sendRequesthandle, errorIndication, errorStatus, errorIndex, varBindTable, cbCtx):
if errorIndication:
print(errorIndication)
return
if errorStatus:
print(('%s at %s' % (errorStatus.prettyPrint(), ((errorIndex and varBindTable[(- 1)][(int(errorIndex) - 1)][0]) or '?'))))
return
for varBindRow in varBindTable:
for (oid, val) in varBindRow:
print(('%s = %s' % (oid.prettyPrint(), val.prettyPrint())))
return True |
class OptionPlotoptionsPyramidSonificationDefaultspeechoptionsMapping(Options):
def pitch(self) -> 'OptionPlotoptionsPyramidSonificationDefaultspeechoptionsMappingPitch':
return self._config_sub_data('pitch', OptionPlotoptionsPyramidSonificationDefaultspeechoptionsMappingPitch)
def playDelay(self) -> 'OptionPlotoptionsPyramidSonificationDefaultspeechoptionsMappingPlaydelay':
return self._config_sub_data('playDelay', OptionPlotoptionsPyramidSonificationDefaultspeechoptionsMappingPlaydelay)
def rate(self) -> 'OptionPlotoptionsPyramidSonificationDefaultspeechoptionsMappingRate':
return self._config_sub_data('rate', OptionPlotoptionsPyramidSonificationDefaultspeechoptionsMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionPlotoptionsPyramidSonificationDefaultspeechoptionsMappingTime':
return self._config_sub_data('time', OptionPlotoptionsPyramidSonificationDefaultspeechoptionsMappingTime)
def volume(self) -> 'OptionPlotoptionsPyramidSonificationDefaultspeechoptionsMappingVolume':
return self._config_sub_data('volume', OptionPlotoptionsPyramidSonificationDefaultspeechoptionsMappingVolume) |
class MImageResource(HasTraits):
_image_not_found = None
def __init__(self, name, search_path=None):
self.name = name
if isinstance(search_path, str):
_path = [search_path]
elif isinstance(search_path, Sequence):
_path = search_path
elif (search_path is not None):
_path = [search_path]
else:
_path = [resource_path()]
self.search_path = (_path + [resource_module()])
def create_image(self, size=None):
ref = self._get_ref(size)
if (ref is not None):
image = ref.load()
else:
image = self._get_image_not_found_image()
return image
def _get_ref(self, size=None):
if (self._ref is None):
from pyface.resource_manager import resource_manager
self._ref = resource_manager.locate_image(self.name, self.search_path, size)
return self._ref
def _get_image_not_found_image(self):
not_found = self._get_image_not_found()
if (self is not not_found):
image = not_found.create_image()
else:
raise ValueError("cannot locate the file for 'image_not_found'")
return image
def _get_image_not_found(cls):
if (cls._image_not_found is None):
from pyface.image_resource import ImageResource
cls._image_not_found = ImageResource('image_not_found')
return cls._image_not_found |
class OptionPlotoptionsStreamgraphSonificationTracksMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def handle_results(report, appname, file_version, item_location, application_cve, application_secure):
try:
logging.debug('%s with version %s from %s with vulnerability %s. This installation should be updated to at least version %s.', appname, file_version, item_location, application_cve, application_secure)
print(('%s Found: %s %s %s (%s)' % (get_timestamp(), item_location, file_version, application_secure, appname)))
report.add(appname, item_location, file_version, application_secure, application_cve)
except Exception:
logging.error(traceback.format_exc()) |
class TestBstSecondLargest(unittest.TestCase):
def test_bst_second_largest(self):
bst = Solution(None)
self.assertRaises(TypeError, bst.find_second_largest)
root = Node(10)
bst = Solution(root)
node5 = bst.insert(5)
node15 = bst.insert(15)
node3 = bst.insert(3)
node8 = bst.insert(8)
node12 = bst.insert(12)
node20 = bst.insert(20)
node2 = bst.insert(2)
node4 = bst.insert(4)
node30 = bst.insert(30)
self.assertEqual(bst.find_second_largest(), node20)
root = Node(10)
bst = Solution(root)
node5 = bst.insert(5)
node3 = bst.insert(3)
node7 = bst.insert(7)
self.assertEqual(bst.find_second_largest(), node7)
print('Success: test_bst_second_largest') |
class _Border(_BaseRow):
def __init__(self, prev, next, table, borders, window_too_small=None, align=HorizontalAlign.JUSTIFY, padding=D.exact(0), padding_char=None, padding_style='', width=None, height=None, z_index=None, modal=False, key_bindings=None, style=''):
assert (prev or next)
self.prev = prev
self.next = next
self.table = table
self.borders = borders
children = ([_HorizontalBorder(borders=borders)] * self.columns)
super().__init__(children=children, window_too_small=window_too_small, align=align, padding=padding, padding_char=padding_char, padding_style=padding_style, width=width, height=(height or 1), z_index=z_index, modal=modal, key_bindings=key_bindings, style=style)
def has_borders(self, row):
(yield None)
if (not row):
(yield from ([False] * (self.columns - 1)))
else:
c = 0
for child in row.children:
(yield from ([False] * (child.merge - 1)))
(yield True)
c += child.merge
(yield from ([True] * (self.columns - c)))
(yield None)
def _all_children(self):
def get():
result = []
if (self.align in (HorizontalAlign.CENTER, HorizontalAlign.RIGHT)):
result.append(Window(width=D(preferred=0)))
def char(i, pc=False, nc=False):
if (i == 0):
if (self.prev and self.next):
return self.borders.LEFT_T
elif self.prev:
return self.borders.BOTTOM_LEFT
else:
return self.borders.TOP_LEFT
if (i == self.columns):
if (self.prev and self.next):
return self.borders.RIGHT_T
elif self.prev:
return self.borders.BOTTOM_RIGHT
else:
return self.borders.TOP_RIGHT
if (pc and nc):
return self.borders.INTERSECT
elif pc:
return self.borders.BOTTOM_T
elif nc:
return self.borders.TOP_T
else:
return self.borders.HORIZONTAL
pcs = self.has_borders(self.prev)
ncs = self.has_borders(self.next)
for (i, (child, pc, nc)) in enumerate(zip(self.children, pcs, ncs)):
result.append(_UnitBorder(char=char(i, pc, nc)))
result.append(child)
result.append(_UnitBorder(char=char(self.columns)))
if (self.align in (HorizontalAlign.CENTER, HorizontalAlign.LEFT)):
result.append(Window(width=D(preferred=0)))
return result
return self._children_cache.get(tuple(self.children), get) |
.feature('unit')
.story('services', 'core', 'scheduler')
class TestSchedulerExceptions():
def test_NotReadyError(self):
with pytest.raises(NotReadyError) as excinfo:
raise NotReadyError()
assert (excinfo.type is NotReadyError)
assert issubclass(excinfo.type, RuntimeError)
def test_DuplicateRequestError(self):
with pytest.raises(DuplicateRequestError) as excinfo:
raise DuplicateRequestError()
assert (excinfo.type is DuplicateRequestError)
assert issubclass(excinfo.type, RuntimeError)
def test_TaskNotRunningError(self):
task_id = uuid.uuid4()
with pytest.raises(TaskNotRunningError) as excinfo:
raise TaskNotRunningError(task_id)
assert (excinfo.type is TaskNotRunningError)
assert issubclass(excinfo.type, RuntimeError)
assert str(excinfo).endswith('Task is not running: {}'.format(str(task_id)))
def test_TaskNotFoundError(self):
task_id = uuid.uuid4()
with pytest.raises(TaskNotFoundError) as excinfo:
raise TaskNotFoundError(task_id)
assert (excinfo.type is TaskNotFoundError)
assert issubclass(excinfo.type, ValueError)
assert str(excinfo).endswith('Task not found: {}'.format(task_id))
def test_ScheduleNotFoundError(self):
schedule_id = uuid.uuid4()
with pytest.raises(ScheduleNotFoundError) as excinfo:
raise ScheduleNotFoundError(schedule_id)
assert (excinfo.type is ScheduleNotFoundError)
assert issubclass(excinfo.type, ValueError)
assert str(excinfo).endswith('Schedule not found: {}'.format(schedule_id))
def test_ScheduleProcessNameNotFound(self):
with pytest.raises(ScheduleProcessNameNotFoundError) as excinfo:
raise ScheduleProcessNameNotFoundError()
assert (excinfo.type is ScheduleProcessNameNotFoundError)
assert issubclass(excinfo.type, ValueError) |
class IngestClient(NamespacedClient):
_rewrite_parameters()
async def delete_pipeline(self, *, id: str, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None) -> ObjectApiResponse[t.Any]:
if (id in SKIP_IN_PATH):
raise ValueError("Empty value passed for parameter 'id'")
__path = f'/_ingest/pipeline/{_quote(id)}'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (timeout is not None):
__query['timeout'] = timeout
__headers = {'accept': 'application/json'}
return (await self.perform_request('DELETE', __path, params=__query, headers=__headers))
_rewrite_parameters()
async def geo_ip_stats(self, *, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, pretty: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]:
__path = '/_ingest/geoip/stats'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
__headers = {'accept': 'application/json'}
return (await self.perform_request('GET', __path, params=__query, headers=__headers))
_rewrite_parameters()
async def get_pipeline(self, *, id: t.Optional[str]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, summary: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]:
if (id not in SKIP_IN_PATH):
__path = f'/_ingest/pipeline/{_quote(id)}'
else:
__path = '/_ingest/pipeline'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (summary is not None):
__query['summary'] = summary
__headers = {'accept': 'application/json'}
return (await self.perform_request('GET', __path, params=__query, headers=__headers))
_rewrite_parameters()
async def processor_grok(self, *, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, pretty: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]:
__path = '/_ingest/processor/grok'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
__headers = {'accept': 'application/json'}
return (await self.perform_request('GET', __path, params=__query, headers=__headers))
_rewrite_parameters(body_fields=('description', 'meta', 'on_failure', 'processors', 'version'), parameter_aliases={'_meta': 'meta'})
async def put_pipeline(self, *, id: str, description: t.Optional[str]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, if_version: t.Optional[int]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, meta: t.Optional[t.Mapping[(str, t.Any)]]=None, on_failure: t.Optional[t.Sequence[t.Mapping[(str, t.Any)]]]=None, pretty: t.Optional[bool]=None, processors: t.Optional[t.Sequence[t.Mapping[(str, t.Any)]]]=None, timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, version: t.Optional[int]=None, body: t.Optional[t.Dict[(str, t.Any)]]=None) -> ObjectApiResponse[t.Any]:
if (id in SKIP_IN_PATH):
raise ValueError("Empty value passed for parameter 'id'")
__path = f'/_ingest/pipeline/{_quote(id)}'
__query: t.Dict[(str, t.Any)] = {}
__body: t.Dict[(str, t.Any)] = (body if (body is not None) else {})
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (if_version is not None):
__query['if_version'] = if_version
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (timeout is not None):
__query['timeout'] = timeout
if (not __body):
if (description is not None):
__body['description'] = description
if (meta is not None):
__body['_meta'] = meta
if (on_failure is not None):
__body['on_failure'] = on_failure
if (processors is not None):
__body['processors'] = processors
if (version is not None):
__body['version'] = version
__headers = {'accept': 'application/json', 'content-type': 'application/json'}
return (await self.perform_request('PUT', __path, params=__query, headers=__headers, body=__body))
_rewrite_parameters(body_fields=('docs', 'pipeline'))
async def simulate(self, *, id: t.Optional[str]=None, docs: t.Optional[t.Sequence[t.Mapping[(str, t.Any)]]]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, pipeline: t.Optional[t.Mapping[(str, t.Any)]]=None, pretty: t.Optional[bool]=None, verbose: t.Optional[bool]=None, body: t.Optional[t.Dict[(str, t.Any)]]=None) -> ObjectApiResponse[t.Any]:
if (id not in SKIP_IN_PATH):
__path = f'/_ingest/pipeline/{_quote(id)}/_simulate'
else:
__path = '/_ingest/pipeline/_simulate'
__query: t.Dict[(str, t.Any)] = {}
__body: t.Dict[(str, t.Any)] = (body if (body is not None) else {})
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
if (verbose is not None):
__query['verbose'] = verbose
if (not __body):
if (docs is not None):
__body['docs'] = docs
if (pipeline is not None):
__body['pipeline'] = pipeline
__headers = {'accept': 'application/json', 'content-type': 'application/json'}
return (await self.perform_request('POST', __path, params=__query, headers=__headers, body=__body)) |
def _parse_table_summary(conn: RDBMSDatabase, summary_template: str, table_name: str) -> str:
columns = []
for column in conn.get_columns(table_name):
if column.get('comment'):
columns.append(f"{column['name']} ({column.get('comment')})")
else:
columns.append(f"{column['name']}")
column_str = ', '.join(columns)
index_keys = []
for index_key in conn.get_indexes(table_name):
key_str = ', '.join(index_key['column_names'])
index_keys.append(f"{index_key['name']}(`{key_str}`) ")
table_str = summary_template.format(table_name=table_name, columns=column_str)
if (len(index_keys) > 0):
index_key_str = ', '.join(index_keys)
table_str += f', and index keys: {index_key_str}'
try:
comment = conn.get_table_comment(table_name)
except Exception:
comment = dict(text=None)
if comment.get('text'):
table_str += f", and table comment: {comment.get('text')}"
return table_str |
class WorkflowContextImpl(WorkflowContext):
def __init__(self, namespace: str, workflow: Workflow, metadata_manager: MetadataManager):
super().__init__(namespace, workflow)
self._metadata_manager = metadata_manager
def get_state(self, state_descriptor: StateDescriptor) -> State:
workflow_meta = self._metadata_manager.get_workflow_by_name(namespace=self.namespace, name=self.workflow.name)
return self._metadata_manager.get_or_create_workflow_state(workflow_id=workflow_meta.id, descriptor=state_descriptor) |
def parameter_count_table(model: nn.Module, max_depth: int=3) -> str:
count: typing.DefaultDict[(str, int)] = parameter_count(model)
param_shape: typing.Dict[(str, typing.Tuple)] = {k: tuple(v.shape) for (k, v) in model.named_parameters()}
table: typing.List[typing.Tuple] = []
def format_size(x: int) -> str:
if (x > .0):
return '{:.1f}G'.format((x / .0))
if (x > 100000.0):
return '{:.1f}M'.format((x / 1000000.0))
if (x > 100.0):
return '{:.1f}K'.format((x / 1000.0))
return str(x)
def fill(lvl: int, prefix: str) -> None:
if (lvl >= max_depth):
return
for (name, v) in count.items():
if ((name.count('.') == lvl) and name.startswith(prefix)):
indent = (' ' * (lvl + 1))
if (name in param_shape):
table.append(((indent + name), (indent + str(param_shape[name]))))
else:
table.append(((indent + name), (indent + format_size(v))))
fill((lvl + 1), (name + '.'))
table.append(('model', format_size(count.pop(''))))
fill(0, '')
old_ws = tabulate.PRESERVE_WHITESPACE
tabulate.PRESERVE_WHITESPACE = True
tab = tabulate.tabulate(table, headers=['name', '#elements or shape'], tablefmt='pipe')
tabulate.PRESERVE_WHITESPACE = old_ws
return tab |
class OptionPlotoptionsBubbleLabelStyle(Options):
def fontSize(self):
return self._config_get('0.8em')
def fontSize(self, num: float):
self._config(num, js_type=False)
def fontWeight(self):
return self._config_get('bold')
def fontWeight(self, text: str):
self._config(text, js_type=False) |
_page.route('/table/edit', methods=['POST'])
def table_edit():
try:
res = check_uuid(all_data['uuid'], request.json['uuid'])
if (res != None):
return jsonify(res)
id = request.json['id']
field = request.json['field']
new_field_value = request.json['new_field_value']
if (id in all_data['extra_data']):
all_data['extra_data'][id][field] = new_field_value
else:
all_data['extra_data'][id] = {field: new_field_value}
if ((id in all_data['data']) and (field in all_data['data'][id])):
all_data['data'][id][field] = new_field_value
return jsonify(status='success', msg='')
except Exception as e:
print(e)
import traceback
traceback.print_exc()
return jsonify(status='fail', msg='Unknown error fail to save edit results.') |
def test_jobpaths(tmp_path: Path) -> None:
assert (utils.JobPaths(tmp_path, '123').stdout == (tmp_path / '123_0_log.out'))
assert (utils.JobPaths(tmp_path, '123', 1).stdout == (tmp_path / '123_1_log.out'))
assert (utils.JobPaths((tmp_path / 'array-%A-index-%a'), '456_3').stdout == ((tmp_path / 'array-456-index-3') / '456_3_0_log.out')) |
def calculate_db_score(start_date):
if ah.user_settings['keep_log']:
ah.log.debug('Begin function')
db_correct = int(db_helper.correct_answer_count(start_date))
if ah.user_settings['tries_eq']:
db_wrong = int((db_helper.wrong_answer_count(start_date) / ah.user_settings['tries_eq']))
else:
db_wrong = 0
if ah.user_settings['timeboxpoints']:
db_timebox = int((db_helper.timebox_count(start_date) * ah.user_settings['timeboxpoints']))
else:
db_timebox = 0
if ah.user_settings['deckpoints']:
db_decks = int((db_helper.decks_count(start_date) * ah.user_settings['deckpoints']))
else:
db_decks = 0
if ah.user_settings['learned_eq']:
db_learned = int((db_helper.learned_count(start_date) / ah.user_settings['learned_eq']))
else:
db_learned = 0
if ah.user_settings['matured_eq']:
db_matured = int((db_helper.matured_count(start_date) / ah.user_settings['matured_eq']))
else:
db_matured = 0
db_score = (((((db_correct + db_wrong) + db_timebox) + db_decks) + db_learned) + db_matured)
if (db_score < 0):
db_score = 0
if ah.user_settings['keep_log']:
ah.log.debug(('End function returning: %s' % db_score))
return db_score |
_renderer(wrap_type=ColumnRegExpMetric)
class ColumnRegExpMetricRenderer(MetricRenderer):
def _get_counters(dataset_name: str, metrics: DataIntegrityValueByRegexpStat) -> BaseWidgetInfo:
percents = round(((metrics.number_of_not_matched * 100) / metrics.number_of_rows), 3)
counters = [CounterData(label='Number of Values', value=f'{metrics.number_of_rows}'), CounterData(label='Mismatched', value=f'{metrics.number_of_not_matched} ({percents}%)')]
return counter(counters=counters, title=f'{dataset_name.capitalize()} dataset')
def _get_table_stat(dataset_name: str, top: int, metrics: DataIntegrityValueByRegexpStat) -> BaseWidgetInfo:
return table_data(title=f'{dataset_name.capitalize()} Dataset: top {top} mismatched values', column_names=['Value', 'Count'], data=metrics.table_of_not_matched.items())
def render_html(self, obj: ColumnRegExpMetric) -> List[BaseWidgetInfo]:
metric_result = obj.get_result()
column_name = metric_result.column_name
result = [header_text(label=f"RegExp Match for column '{column_name}'."), self._get_counters('current', metric_result.current)]
if (metric_result.reference is not None):
result.append(self._get_counters('reference', metric_result.reference))
current_table = self._get_table_stat('current', metric_result.top, metric_result.current)
if (metric_result.reference is not None):
tables_tabs = [TabData(title='Current dataset', widget=current_table), TabData(title='Reference dataset', widget=self._get_table_stat('reference', metric_result.top, metric_result.reference))]
tables = widget_tabs(tabs=tables_tabs)
else:
tables = current_table
result.append(tables)
return result |
class SignalGeneratorNode(FunctionGeneratorNode):
SAMPLE_TOPIC = lg.Topic(SignalSampleMessage)
def setup(self):
self._shutdown = asyncio.Event()
def cleanup(self):
self._shutdown.set()
(SAMPLE_TOPIC)
async def publish_samples(self) -> lg.AsyncPublisher:
while (not self._shutdown.is_set()):
sample_message = self._generator.next_sample()
(yield (self.SAMPLE_TOPIC, SignalSampleMessage(timestamp=sample_message.timestamp, sample=sample_message.data)))
(await asyncio.sleep(PUBLISHER_SLEEP_SECS)) |
class BackendService(resource.Resource):
def __init__(self, **kwargs):
super(BackendService, self).__init__(resource_id=kwargs.get('id'), resource_type=resource.ResourceType.BACKEND_SERVICE, name=kwargs.get('name'), display_name=kwargs.get('name'))
self.full_name = kwargs.get('full_name')
self.affinity_cookie_ttl_sec = kwargs.get('affinity_cookie_ttl_sec')
self.backends = kwargs.get('backends')
self.cdn_policy = kwargs.get('cdn_policy')
self.connection_draining = kwargs.get('connection_draining')
self.creation_timestamp = kwargs.get('creation_timestamp')
self.description = kwargs.get('description')
self.enable_cdn = kwargs.get('enable_cdn')
self.health_checks = kwargs.get('health_checks')
self.iap = kwargs.get('iap')
self.load_balancing_scheme = kwargs.get('load_balancing_scheme')
self.port = kwargs.get('port')
self.port_name = kwargs.get('port_name')
self.project_id = kwargs.get('project_id')
self.protocol = kwargs.get('protocol')
self.region = kwargs.get('region')
self.resource_id = kwargs.get('id')
self.session_affinity = kwargs.get('session_affinity')
self.timeout_sec = kwargs.get('timeout_sec')
self._json = kwargs.get('raw_backend_service')
def from_dict(cls, full_name, backend_service, project_id=None):
kwargs = {'project_id': project_id, 'id': backend_service.get('id'), 'full_name': full_name, 'creation_timestamp': backend_service.get('creationTimestamp'), 'name': backend_service.get('name'), 'description': backend_service.get('description'), 'affinity_cookie_ttl_sec': backend_service.get('affinityCookieTtlSec'), 'backends': backend_service.get('backends', []), 'cdn_policy': backend_service.get('cdnPolicy', {}), 'connection_draining': backend_service.get('connectionDraining', {}), 'enable_cdn': backend_service.get('enableCDN'), 'health_checks': backend_service.get('healthChecks', []), 'iap': backend_service.get('iap', {}), 'load_balancing_scheme': backend_service.get('loadBalancingScheme'), 'port': backend_service.get('port'), 'port_name': backend_service.get('portName'), 'protocol': backend_service.get('protocol'), 'region': backend_service.get('region'), 'session_affinity': backend_service.get('sessionAffinity'), 'timeout_sec': backend_service.get('timeoutSec'), 'raw_backend_service': json.dumps(backend_service, sort_keys=True)}
return cls(**kwargs)
def from_json(full_name, json_string, project_id=None):
backend_service = json.loads(json_string)
return BackendService.from_dict(full_name, backend_service, project_id)
def _create_json_str(self):
resource_dict = {'id': self.id, 'full_name': self.full_name, 'creationTimestamp': self.creation_timestamp, 'name': self.name, 'description': self.description, 'affinityCookieTtlSec': self.affinity_cookie_ttl_sec, 'backends': self.backends, 'cdnPolicy': self.cdn_policy, 'connectionDraining': self.connection_draining, 'enableCDN': self.enable_cdn, 'healthChecks': self.health_checks, 'iap': self.iap, 'loadBalancingScheme': self.load_balancing_scheme, 'port': self.port, 'portName': self.port_name, 'protocol': self.protocol, 'region': self.region, 'sessionAffinity': self.session_affinity, 'timeoutSec': self.timeout_sec}
resource_dict = dict(((k, v) for (k, v) in list(resource_dict.items()) if v))
return json.dumps(resource_dict, sort_keys=True)
def json(self):
if (not self._json):
self._json = self._create_json_str()
return self._json
def key(self):
return Key.from_args(self.project_id, self.name, region=self.region) |
class ExcludeFromFileTestCase(IncludeExcludeMixIn, unittest.TestCase):
def test_filter(self):
expected = [self.sequences[2], self.sequences[4]]
actual = list(transform.exclude_from_file(self.sequences, self.handle))
self.assertEqual(2, len(actual))
self.assertEqual(expected, actual) |
class Migration(migrations.Migration):
dependencies = [('frontend', '0080_replace_nullboolean')]
operations = [migrations.AlterField(model_name='maillog', name='metadata', field=models.JSONField(blank=True, null=True)), migrations.AlterField(model_name='measureglobal', name='cost_savings', field=models.JSONField(blank=True, null=True)), migrations.AlterField(model_name='measureglobal', name='percentiles', field=models.JSONField(blank=True, null=True)), migrations.AlterField(model_name='measurevalue', name='cost_savings', field=models.JSONField(blank=True, null=True)), migrations.AlterField(model_name='practicestatistics', name='star_pu', field=models.JSONField(blank=True, null=True))] |
class BuildTest(TestCase):
def setUpTestData(cls):
factory = DataFactory()
factory.create_all(start_date='2018-06-01', num_months=6, num_practices=6, num_presentations=6)
ccg = PCT.objects.create(code='ABC', org_type='CCG')
for i in range(6):
ccg.practice_set.create(code=f'ABC00{i}', setting=4)
cls._remove_patch = patch_global_matrixstore(matrixstore_from_data_factory(factory))
def test_build_smoke_test(self):
df = prescribing_for_orgs('2018-06-01', '2018-09-01', 'practice')
assert (len(df) == 6)
def tearDownClass(cls):
cls._remove_patch()
super().tearDownClass() |
def attribute_around_surface_asymmetric():
cubefile = (EXPATH1 / 'ib_test_cube2.segy')
surfacefile = (EXPATH2 / 'h1.dat')
above = 10
below = 20
mycube = xtgeo.cube_from_file(cubefile)
mysurf = xtgeo.surface_from_file(surfacefile, fformat='ijxyz', template=mycube)
sabove = mysurf.copy()
sbelow = mysurf.copy()
sabove.values -= above
sbelow.values += below
if DEBUG:
sabove.describe()
sbelow.describe()
attrs = 'all'
myattrs = mysurf.slice_cube_window(mycube, attribute=attrs, sampling='trilinear', zsurf=sabove, other=sbelow)
for attr in myattrs:
if DEBUG:
myattrs[attr].describe()
myattrs[attr].to_file((TMPDIR / (('myfile_asymmetric_' + attr) + '.dat')), fformat='ijxyz') |
class OptionSeriesBarSonificationContexttracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestLinearDisplayP3Serialize(util.ColorAssertsPyTest):
COLORS = [('color(--display-p3-linear 0 0.3 0.75 / 0.5)', {}, 'color(--display-p3-linear 0 0.3 0.75 / 0.5)'), ('color(--display-p3-linear 0 0.3 0.75)', {'alpha': True}, 'color(--display-p3-linear 0 0.3 0.75 / 1)'), ('color(--display-p3-linear 0 0.3 0.75 / 0.5)', {'alpha': False}, 'color(--display-p3-linear 0 0.3 0.75)'), ('color(--display-p3-linear none 0.3 0.75)', {}, 'color(--display-p3-linear 0 0.3 0.75)'), ('color(--display-p3-linear none 0.3 0.75)', {'none': True}, 'color(--display-p3-linear none 0.3 0.75)'), ('color(--display-p3-linear 1.2 0.2 0)', {}, 'color(--display-p3-linear 1 0.23331 0.01276)'), ('color(--display-p3-linear 1.2 0.2 0)', {'fit': False}, 'color(--display-p3-linear 1.2 0.2 0)')]
.parametrize('color1,options,color2', COLORS)
def test_colors(self, color1, options, color2):
self.assertEqual(Color(color1).to_string(**options), color2) |
class BackupSerializer(_HideNodeSerializer):
_model_ = Backup
_update_fields_ = ('note',)
_default_fields_ = ('hostname', 'vm', 'dc', 'name', 'disk_id')
hostname = s.CharField(source='vm_hostname', read_only=True)
vm_uuid = s.CharField(source='vm.uuid', read_only=True)
vm = s.CharField(source='vm', required=False, read_only=True)
dc = s.CharField(source='dc', read_only=True)
define = s.CharField(source='define.name', read_only=True)
name = s.RegexField('^[A-Za-z0-9][A-Za-z0-9\\._-]*$', max_length=24, min_length=1)
disk_id = s.IntegerField(source='array_disk_id', max_value=DISK_ID_MAX_BHYVE, min_value=DISK_ID_MIN)
type = s.IntegerChoiceField(choices=Backup.TYPE, read_only=True)
node = s.CharField(source='node.hostname', read_only=True)
zpool = s.CharField(source='zpool.zpool', read_only=True)
created = s.DateTimeField(read_only=True, required=False)
status = s.IntegerChoiceField(choices=Backup.STATUS, read_only=True, required=False)
size = s.IntegerField(read_only=True)
time = s.IntegerField(read_only=True)
file_path = s.CharField(read_only=True)
note = s.SafeCharField(max_length=128, required=False)
def __init__(self, request, instance, node_view=False, *args, **kwargs):
super(BackupSerializer, self).__init__(request, instance, *args, **kwargs)
if (not node_view):
del self.fields['dc'] |
class Ops(enum.Enum):
ADD = 1
MUL = 2
SUB = 3
DIV = 4
SDIV = 5
MOD = 6
SMOD = 7
ADDMOD = 8
MULMOD = 9
EXP = 10
SIGNEXTEND = 11
LT = 16
GT = 17
SLT = 18
SGT = 19
EQ = 20
ISZERO = 21
AND = 22
OR = 23
XOR = 24
NOT = 25
BYTE = 26
KECCAK = 32
ADDRESS = 48
BALANCE = 49
ORIGIN = 50
CALLER = 51
CALLVALUE = 52
CALLDATALOAD = 53
CALLDATASIZE = 54
CALLDATACOPY = 55
CODESIZE = 56
CODECOPY = 57
GASPRICE = 58
EXTCODESIZE = 59
EXTCODECOPY = 60
RETURNDATASIZE = 61
RETURNDATACOPY = 62
BLOCKHASH = 64
COINBASE = 65
TIMESTAMP = 66
NUMBER = 67
DIFFICULTY = 68
GASLIMIT = 69
STOP = 0
JUMP = 86
JUMPI = 87
PC = 88
GAS = 90
JUMPDEST = 91
SLOAD = 84
SSTORE = 85
POP = 80
PUSH1 = 96
PUSH2 = 97
PUSH3 = 98
PUSH4 = 99
PUSH5 = 100
PUSH6 = 101
PUSH7 = 102
PUSH8 = 103
PUSH9 = 104
PUSH10 = 105
PUSH11 = 106
PUSH12 = 107
PUSH13 = 108
PUSH14 = 109
PUSH15 = 110
PUSH16 = 111
PUSH17 = 112
PUSH18 = 113
PUSH19 = 114
PUSH20 = 115
PUSH21 = 116
PUSH22 = 117
PUSH23 = 118
PUSH24 = 119
PUSH25 = 120
PUSH26 = 121
PUSH27 = 122
PUSH28 = 123
PUSH29 = 124
PUSH30 = 125
PUSH31 = 126
PUSH32 = 127
DUP1 = 128
DUP2 = 129
DUP3 = 130
DUP4 = 131
DUP5 = 132
DUP6 = 133
DUP7 = 134
DUP8 = 135
DUP9 = 136
DUP10 = 137
DUP11 = 138
DUP12 = 139
DUP13 = 140
DUP14 = 141
DUP15 = 142
DUP16 = 143
SWAP1 = 144
SWAP2 = 145
SWAP3 = 146
SWAP4 = 147
SWAP5 = 148
SWAP6 = 149
SWAP7 = 150
SWAP8 = 151
SWAP9 = 152
SWAP10 = 153
SWAP11 = 154
SWAP12 = 155
SWAP13 = 156
SWAP14 = 157
SWAP15 = 158
SWAP16 = 159
MLOAD = 81
MSTORE = 82
MSTORE8 = 83
MSIZE = 89
LOG0 = 160
LOG1 = 161
LOG2 = 162
LOG3 = 163
LOG4 = 164
CREATE = 240
RETURN = 243
CALL = 241
CALLCODE = 242
DELEGATECALL = 244
STATICCALL = 250
REVERT = 253
SELFDESTRUCT = 255 |
def assert_single_freq_in_range(field_name: str):
(field_name, always=True, allow_reuse=True)
def _single_frequency_in_range(cls, val: FieldDataset, values: dict) -> FieldDataset:
if (val is None):
return val
source_time = get_value(key='source_time', values=values)
(fmin, fmax) = source_time.frequency_range()
for (name, scalar_field) in val.field_components.items():
freqs = scalar_field.f
if (len(freqs) != 1):
raise SetupError(f"'{field_name}.{name}' must have a single frequency, contains {len(freqs)} frequencies.")
freq = float(freqs[0])
if ((freq < fmin) or (freq > fmax)):
raise SetupError(f"'{field_name}.{name}' contains frequency: {freq:.2e} Hz, which is outside of the 'source_time' frequency range [{fmin:.2e}-{fmax:.2e}] Hz.")
return val
return _single_frequency_in_range |
class FlytePathResolver():
protocol = 'flyte://'
_flyte_path_to_remote_map: typing.Dict[(str, str)] = {}
_lock = threading.Lock()
def resolve_remote_path(cls, flyte_uri: str) -> typing.Optional[str]:
with cls._lock:
if (flyte_uri in cls._flyte_path_to_remote_map):
return cls._flyte_path_to_remote_map[flyte_uri]
return None
def add_mapping(cls, flyte_uri: str, remote_path: str):
with cls._lock:
cls._flyte_path_to_remote_map[flyte_uri] = remote_path |
class DIAYNActionModel(nn.Module):
def __init__(self, n_observations, n_actions, n_hidden, n_policies):
super().__init__()
self.linear = nn.Linear(n_observations, n_hidden)
self.linear2 = nn.Linear(n_hidden, (n_actions * n_policies))
self.n_policies = n_policies
self.n_actions = n_actions
def forward(self, frame, idx_policy):
z = torch.tanh(self.linear(frame))
score_actions = self.linear2(z)
s = score_actions.size()
score_actions = score_actions.reshape(s[0], self.n_policies, self.n_actions)
score_actions = score_actions[(torch.arange(s[0]), idx_policy)]
probabilities_actions = torch.softmax(score_actions, dim=(- 1))
return probabilities_actions |
def get_f_tran_mod(f, N, C):
f_parallel = (f.dot(N) * N)
f_perp = (f - f_parallel)
if (C < 0):
lambda_ = get_lambda(f_parallel)
f_tran = (f_perp - (lambda_ * f_parallel))
else:
perp_rms = get_rms(f_perp)
if (perp_rms < (2 * EVANG2AUBOHR)):
f_tran = ((0.5 * f_perp) - f_parallel)
else:
f_tran = (f_perp - (0.5 * f_parallel))
return (f_tran, f_parallel, f_perp) |
def test_get_canonical_transaction_by_index(chain, tx):
if hasattr(chain, 'apply_transaction'):
(new_block, _, computation) = chain.apply_transaction(tx)
computation.raise_if_error()
else:
(new_block, receipts, computations) = chain.build_block_with_transactions_and_withdrawals([tx])
assert (len(computations) == 1)
computations[0].raise_if_error()
block_import_result = chain.import_block(new_block)
block = block_import_result.imported_block
assert (block.transactions == (tx,))
assert (chain.get_canonical_transaction_index(tx.hash) == (1, 0))
assert (chain.get_canonical_transaction_by_index(1, 0) == tx) |
class HasDynamicViews(HasTraits):
_dynamic_view_registry = Dict(Str, Instance(DynamicView))
def trait_view(self, name=None, view_element=None):
result = None
if (not isinstance(name, ViewElement)):
if ((view_element is None) and ((name is None) or (len(name) < 1))):
for (dname, declaration) in self._dynamic_view_registry.items():
if declaration.use_as_default:
result = self._compose_dynamic_view(dname)
break
elif ((view_element is None) and (name in self._dynamic_view_registry)):
result = self._compose_dynamic_view(name)
if (result is None):
result = super().trait_view(name, view_element)
return result
def declare_dynamic_view(self, declaration):
self._dynamic_view_registry[declaration.name] = declaration
def _build_dynamic_sub_element(self, definition, sub_elements):
logger.debug('\tBuilding dynamic sub-element [%s] with elements [%s]', definition.name, sub_elements)
return definition.klass(*sub_elements, **definition.keywords)
def _build_dynamic_view(self, declaration, sub_elements, handler):
logger.debug('\tBuilding dynamic view [%s] with elements [%s]', declaration.name, sub_elements)
return View(*sub_elements, id=declaration.id, handler=handler, **declaration.keywords)
def _compose_dynamic_sub_element(self, definition):
logger.debug('Composing dynamic sub-element named [%s] for [%s]', definition.name, self)
elements = self._get_dynamic_elements(definition.name)
return self._build_dynamic_sub_element(definition, elements)
def _compose_dynamic_view(self, name):
logger.debug('Composing dynamic view [%s] for [%s]', name, self)
declaration = self._dynamic_view_registry[name]
elements = self._get_dynamic_elements(declaration.name)
handler = None
handlers = self._get_dynamic_handlers(declaration.name, elements)
if (len(handlers) > 0):
handler = DelegatingHandler(sub_handlers=handlers)
return self._build_dynamic_view(declaration, elements, handler)
def _get_dynamic_elements(self, name):
name = name.replace(' ', '_')
order_trait_name = ('_%s_order' % name)
priority_trait_name = ('_%s_priority' % name)
all_elements = [self.trait_view(g) for g in self.trait_views(klass=ViewSubElement)]
elements = [e for e in all_elements if (hasattr(e, order_trait_name) and (getattr(e, order_trait_name) is not None))]
filtered = {}
for e in elements:
order = getattr(e, order_trait_name)
priority = (getattr(e, priority_trait_name) or 0)
current = filtered.setdefault(order, e)
if (current is not e):
current_priority = getattr(current, priority_trait_name)
if (current_priority < priority):
filtered[order] = e
ordering = sorted(filtered)
elements = [filtered[order] for order in ordering]
for i in range(len(elements)):
if isinstance(elements[i], DynamicViewSubElement):
e = elements.pop(i)
composed = self._compose_dynamic_sub_element(e)
elements.insert(i, composed)
return elements
def _get_dynamic_handlers(self, name, elements):
name = name.replace(' ', '_')
handler_name = ('_%s_handler' % name)
handlers = [getattr(e, handler_name) for e in elements if (hasattr(e, handler_name) and (getattr(e, handler_name) is not None))]
logger.debug('\tFound sub-handlers: %s', handlers)
return handlers |
class OptionsChartShared(abc.ABC):
def __init__(self, component: primitives.HtmlModel, page: primitives.PageModel=None):
(self.component, self.page) = (component, page)
if (page is None):
self.page = component.page
def x_format(self, js_funcs, profile: Union[(dict, bool)]=None):
def x_format_money(self, symbol='', digit=0, thousand_sep='.', decimal_sep=',', fmt='%v %s', factor=None, alias=''):
def x_format_number(self, factor=1000, alias=None, digits=0, thousand_sep='.'):
def x_label(self, value: str):
def x_tick_count(self, num):
def y_format(self, js_funcs, profile: Union[(dict, bool)]=None):
def y_format_money(self, symbol: str='', digit: int=0, thousand_sep: str='.', decimal_sep: str=',', fmt: str='%v %s', factor: int=None, alias: str=''):
def y_format_number(self, factor=1000, alias=None, digits=0, thousand_sep='.'):
def y_label(self, value):
def y_tick_count(self, num): |
def test_transaction_name_from_class_based_view(client, django_elasticapm_client):
with override_settings(**middleware_setting(django.VERSION, ['elasticapm.contrib.django.middleware.TracingMiddleware'])):
client.get(reverse('elasticapm-class-based'))
transaction = django_elasticapm_client.events[TRANSACTION][0]
assert (transaction['name'] == 'GET tests.contrib.django.testapp.views.ClassBasedView') |
def _sanitize(key, value, **kwargs):
if ('sanitize_field_names' in kwargs):
sanitize_field_names = kwargs['sanitize_field_names']
else:
sanitize_field_names = BASE_SANITIZE_FIELD_NAMES
if (value is None):
return
if isinstance(value, dict):
return value
if (not key):
return value
key = key.lower()
for field in sanitize_field_names:
if field.match(key.strip()):
return MASK
return value |
def main(page: ft.Page):
normal_radius = 50
hover_radius = 60
normal_title_style = ft.TextStyle(size=16, color=ft.colors.WHITE, weight=ft.FontWeight.BOLD)
hover_title_style = ft.TextStyle(size=22, color=ft.colors.WHITE, weight=ft.FontWeight.BOLD, shadow=ft.BoxShadow(blur_radius=2, color=ft.colors.BLACK54))
def on_chart_event(e: ft.PieChartEvent):
for (idx, section) in enumerate(chart.sections):
if (idx == e.section_index):
section.radius = hover_radius
section.title_style = hover_title_style
else:
section.radius = normal_radius
section.title_style = normal_title_style
chart.update()
chart = ft.PieChart(sections=[ft.PieChartSection(40, title='40%', title_style=normal_title_style, color=ft.colors.BLUE, radius=normal_radius), ft.PieChartSection(30, title='30%', title_style=normal_title_style, color=ft.colors.YELLOW, radius=normal_radius), ft.PieChartSection(15, title='15%', title_style=normal_title_style, color=ft.colors.PURPLE, radius=normal_radius), ft.PieChartSection(15, title='15%', title_style=normal_title_style, color=ft.colors.GREEN, radius=normal_radius)], sections_space=0, center_space_radius=40, on_chart_event=on_chart_event, expand=True)
page.add(chart) |
def sinkhorn_tensorized(, x, , y, p=2, blur=0.05, reach=None, diameter=None, scaling=0.5, cost=None, debias=True, potentials=False, **kwargs):
(B, N, D) = x.shape
(_, M, _) = y.shape
if (cost is None):
cost = cost_routines[p]
(C_xx, C_yy) = ((cost(x, x.detach()), cost(y, y.detach())) if debias else (None, None))
(C_xy, C_yx) = (cost(x, y.detach()), cost(y, x.detach()))
(diameter, , _s, ) = scaling_parameters(x, y, p, blur, reach, diameter, scaling)
(a_x, b_y, a_y, b_x) = sinkhorn_loop(softmin_tensorized, log_weights(), log_weights(), C_xx, C_yy, C_xy, C_yx, _s, , debias=debias)
return sinkhorn_cost(, , , , a_x, b_y, a_y, b_x, batch=True, debias=debias, potentials=potentials) |
class OptionPlotoptionsLineSonificationDefaultinstrumentoptionsMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def _is_jupyter():
try:
shell = get_ipython().__class__.__name__
if (shell == 'ZMQInteractiveShell'):
return True
elif (shell == 'TerminalInteractiveShell'):
return False
else:
return ('google.colab' in str(get_ipython()))
except NameError:
return False |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.