code stringlengths 281 23.7M |
|---|
def twilio_conversations_dataset_config(db: Session, twilio_conversations_connection_config: ConnectionConfig, twilio_conversations_dataset: Dict[(str, Any)]) -> Generator:
fides_key = twilio_conversations_dataset['fides_key']
twilio_conversations_connection_config.name = fides_key
twilio_conversations_connection_config.key = fides_key
twilio_conversations_connection_config.save(db=db)
ctl_dataset = CtlDataset.create_from_dataset_dict(db, twilio_conversations_dataset)
dataset = DatasetConfig.create(db=db, data={'connection_config_id': twilio_conversations_connection_config.id, 'fides_key': fides_key, 'ctl_dataset_id': ctl_dataset.id})
(yield dataset)
dataset.delete(db=db)
ctl_dataset.delete(db=db) |
.parametrize('add_svg_table, gids, retain_gids, expected_xml', [(simple_svg_table_glyph_ids_on_children, '2,4-6', False, _lines(' <svgDoc endGlyphID="1" startGlyphID="1">\n <![CDATA[<svg xmlns=" id="glyph1" d="M2,2"/></svg>]]>\n </svgDoc>\n <svgDoc endGlyphID="2" startGlyphID="2">\n <![CDATA[<svg xmlns=" id="glyph2" d="M4,4"/></svg>]]>\n </svgDoc>\n <svgDoc endGlyphID="3" startGlyphID="3">\n <![CDATA[<svg xmlns=" id="glyph3" d="M5,5"/></svg>]]>\n </svgDoc>\n <svgDoc endGlyphID="4" startGlyphID="4">\n <![CDATA[<svg xmlns=" id="glyph4" d="M6,6"/></svg>]]>\n </svgDoc>\n ')), (simple_svg_table_glyph_ids_on_roots, '2,4-6', False, _lines(' <svgDoc endGlyphID="1" startGlyphID="1">\n <![CDATA[<svg xmlns=" id="glyph1"><path d="M2,2"/></svg>]]>\n </svgDoc>\n <svgDoc endGlyphID="2" startGlyphID="2">\n <![CDATA[<svg xmlns=" id="glyph2"><path d="M4,4"/></svg>]]>\n </svgDoc>\n <svgDoc endGlyphID="3" startGlyphID="3">\n <![CDATA[<svg xmlns=" id="glyph3"><path d="M5,5"/></svg>]]>\n </svgDoc>\n <svgDoc endGlyphID="4" startGlyphID="4">\n <![CDATA[<svg xmlns=" id="glyph4"><path d="M6,6"/></svg>]]>\n </svgDoc>\n ')), (simple_svg_table_glyph_ids_on_children, '2,4-6', True, _lines(' <svgDoc endGlyphID="2" startGlyphID="2">\n <![CDATA[<svg xmlns=" id="glyph2" d="M2,2"/></svg>]]>\n </svgDoc>\n <svgDoc endGlyphID="4" startGlyphID="4">\n <![CDATA[<svg xmlns=" id="glyph4" d="M4,4"/></svg>]]>\n </svgDoc>\n <svgDoc endGlyphID="5" startGlyphID="5">\n <![CDATA[<svg xmlns=" id="glyph5" d="M5,5"/></svg>]]>\n </svgDoc>\n <svgDoc endGlyphID="6" startGlyphID="6">\n <![CDATA[<svg xmlns=" id="glyph6" d="M6,6"/></svg>]]>\n </svgDoc>\n ')), (simple_svg_table_glyph_ids_on_roots, '2,4-6', True, _lines(' <svgDoc endGlyphID="2" startGlyphID="2">\n <![CDATA[<svg xmlns=" id="glyph2"><path d="M2,2"/></svg>]]>\n </svgDoc>\n <svgDoc endGlyphID="4" startGlyphID="4">\n <![CDATA[<svg xmlns=" id="glyph4"><path d="M4,4"/></svg>]]>\n </svgDoc>\n <svgDoc endGlyphID="5" startGlyphID="5">\n <![CDATA[<svg xmlns=" id="glyph5"><path d="M5,5"/></svg>]]>\n </svgDoc>\n <svgDoc endGlyphID="6" startGlyphID="6">\n <![CDATA[<svg xmlns=" id="glyph6"><path d="M6,6"/></svg>]]>\n </svgDoc>\n '))])
def test_subset_single_glyph_per_svg(empty_svg_font, add_svg_table, tmp_path, gids, retain_gids, expected_xml):
font = add_svg_table(empty_svg_font)
svg_font_path = (tmp_path / 'TestSVG.ttf')
font.save(svg_font_path)
subset_path = svg_font_path.with_suffix('.subset.ttf')
subset.main([str(svg_font_path), f'--output-file={subset_path}', f'--gids={gids}', ('--retain_gids' if retain_gids else '--no-retain_gids')])
subset_font = TTFont(subset_path)
assert (getXML(subset_font['SVG '].toXML, subset_font) == expected_xml) |
class OptionSeriesErrorbarSonificationDefaultspeechoptions(Options):
def activeWhen(self) -> 'OptionSeriesErrorbarSonificationDefaultspeechoptionsActivewhen':
return self._config_sub_data('activeWhen', OptionSeriesErrorbarSonificationDefaultspeechoptionsActivewhen)
def language(self):
return self._config_get('en-US')
def language(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionSeriesErrorbarSonificationDefaultspeechoptionsMapping':
return self._config_sub_data('mapping', OptionSeriesErrorbarSonificationDefaultspeechoptionsMapping)
def pointGrouping(self) -> 'OptionSeriesErrorbarSonificationDefaultspeechoptionsPointgrouping':
return self._config_sub_data('pointGrouping', OptionSeriesErrorbarSonificationDefaultspeechoptionsPointgrouping)
def preferredVoice(self):
return self._config_get(None)
def preferredVoice(self, text: str):
self._config(text, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def type(self):
return self._config_get('speech')
def type(self, text: str):
self._config(text, js_type=False) |
class Application(dbus.service.Object):
def __init__(self, bus):
self.path = '/'
self.services = []
dbus.service.Object.__init__(self, bus, self.path)
self.add_service(HeartRateService(bus, 0))
self.add_service(BatteryService(bus, 1))
self.add_service(TestService(bus, 2))
def get_path(self):
return dbus.ObjectPath(self.path)
def add_service(self, service):
self.services.append(service)
.method(DBUS_OM_IFACE, out_signature='a{oa{sa{sv}}}')
def GetManagedObjects(self):
response = {}
print('GetManagedObjects')
for service in self.services:
response[service.get_path()] = service.get_properties()
chrcs = service.get_characteristics()
for chrc in chrcs:
response[chrc.get_path()] = chrc.get_properties()
descs = chrc.get_descriptors()
for desc in descs:
response[desc.get_path()] = desc.get_properties()
return response |
def main():
previous_title = None
result_count = 0
with open(sys.argv[1]) as f:
for line in f:
title = _TO_SIMPLIFIED_CHINESE.convert(line.strip())
if is_good_title(title, previous_title):
pinyin = [_PINYIN_FIXES.get(item, item) for item in lazy_pinyin(title)]
pinyin = _PINYIN_SEPARATOR.join(pinyin)
if (pinyin == title):
logging.info(f'Failed to convert to Pinyin. Ignoring: {pinyin}')
continue
print(make_output(title, pinyin))
result_count += 1
if ((result_count % _LOG_EVERY) == 0):
log_count(result_count)
previous_title = title
log_count(result_count) |
.parametrize('index, expected_cost', [(1, 35), (2, 53), (3, 25), (4, 31)])
def test_create_node_from_parent(cost_matrix, index, expected_cost):
parent = Node.from_cost_matrix(cost_matrix=cost_matrix)
response = Node.from_parent(parent=parent, index=index)
assert (response.level == 1)
assert (response.index == index)
assert (response.cost == expected_cost)
assert (response.path == [parent.index, response.index]) |
class TextIOTransformer(TypeTransformer[typing.TextIO]):
def __init__(self):
super().__init__(name='TextIO', t=typing.TextIO)
def _blob_type(self) -> _core_types.BlobType:
return _core_types.BlobType(format=mimetypes.types_map['.txt'], dimensionality=_core_types.BlobType.BlobDimensionality.SINGLE)
def get_literal_type(self, t: typing.TextIO) -> LiteralType:
return _type_models.LiteralType(blob=self._blob_type())
def to_literal(self, ctx: FlyteContext, python_val: typing.TextIO, python_type: Type[typing.TextIO], expected: LiteralType) -> Literal:
raise NotImplementedError('Implement handle for TextIO')
def to_python_value(self, ctx: FlyteContext, lv: Literal, expected_python_type: Type[typing.TextIO]) -> typing.TextIO:
local_path = ctx.file_access.get_random_local_path()
ctx.file_access.get_data(lv.scalar.blob.uri, local_path, is_multipart=False)
return open(local_path, 'r') |
('Updater.read_dom0_update_flag_from_disk', return_value={'last_status_update': str(datetime.now().strftime(updater.DATE_FORMAT)), 'status': UpdateStatus.REBOOT_REQUIRED.value})
('Updater.sdlog.error')
('Updater.sdlog.info')
def test_last_required_reboot_performed_failed(mocked_info, mocked_error, mocked_read):
result = updater.last_required_reboot_performed()
assert (result is False)
assert (not mocked_error.called) |
def main(page: Page):
page.title = 'Images Example'
page.theme_mode = 'light'
page.padding = 50
page.update()
img = Image(src=f'/icons/icon-512.png', width=100, height=100, fit='contain')
images = Row(expand=1, wrap=False, scroll='always')
page.add(img, images)
for i in range(0, 30):
images.controls.append(Image(src=f' width=200, height=200, fit='none', repeat='noRepeat', border_radius=border_radius.all(10)))
page.update() |
class HashingFile():
def __init__(self, path, mode, hashtype='sha256'):
self.fd = open(path, mode)
self.hashtype = hashtype
self.hash = hashlib.new(hashtype)
self.length = 0
def write(self, data):
self.hash.update(data)
self.length += len(data)
self.fd.write(data)
def close(self):
self.fd.close()
def digest(self):
if (self.hashtype == 'md5'):
return self.hash.hexdigest()
digest = self.hash.digest()
return ((self.hashtype + '=') + native(urlsafe_b64encode(digest)))
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.fd.close() |
def bind(app, schedule=True, include_dashboard=True):
blueprint.name = config.blueprint_name
config.app = app
if (not app.secret_key):
log('WARNING: You should provide a security key.')
app.secret_key = 'my-secret-key'
if include_dashboard:
from flask_monitoringdashboard.views import deployment, custom, endpoint, outlier, request, profiler, version, auth, reporting, telemetry
import flask_monitoringdashboard.views
from flask_monitoringdashboard.core.measurement import init_measurement
from flask_monitoringdashboard.core.cache import init_cache
from flask_monitoringdashboard.core import custom_graph
blueprint.record_once((lambda _state: init_measurement()))
blueprint.record_once((lambda _state: init_cache()))
if schedule:
custom_graph.init(app)
app.register_blueprint(blueprint, url_prefix=('/' + config.link))
import atexit
from flask_monitoringdashboard.core.cache import flush_cache
atexit.register(flush_cache)
if (not include_dashboard):
_request
def teardown(_):
flush_cache() |
def search_repo(search_term: str, qualifier_dict: Dict):
g = get_github_client()
qualifier_str = ' '.join(['{}:{}'.format(k, v) for (k, v) in iter(qualifier_dict.items())])
if (qualifier_str != ''):
final_search_term = '{} {}'.format(search_term, qualifier_str)
else:
final_search_term = search_term
repo_result = g.search_repositories(final_search_term)
return repo_result |
class ServiceRegistryTestCase(unittest.TestCase):
def setUp(self):
self.service_registry = Application(service_registry=ServiceRegistry())
if ((PKG + '.foo') in sys.modules):
del sys.modules[(PKG + '.foo')]
def test_should_get_required_service(self):
class Foo(HasTraits):
price = Int
foo = Foo()
self.service_registry.register_service(Foo, foo)
service = self.service_registry.get_required_service(Foo)
self.assertIs(foo, service)
def test_should_get_exception_if_required_service_is_missing(self):
class IFoo(Interface):
price = Int
with self.assertRaises(NoSuchServiceError):
self.service_registry.get_required_service(IFoo)
def test_imported_service_factory(self):
class IFoo(Interface):
price = Int
self.service_registry.register_service(HasTraits, (PKG + '.test_service_registry.service_factory'), {'price': 100})
service = self.service_registry.get_service(HasTraits, 'price <= 100')
self.assertNotEqual(None, service)
self.assertEqual(HasTraits, type(service))
self.assertEqual(100, service.price)
service2 = self.service_registry.get_service(HasTraits, 'price <= 100')
self.assertTrue((service is service2))
def test_function_service_factory(self):
class IFoo(Interface):
price = Int
(IFoo)
class Foo(HasTraits):
price = Int
def foo_factory(**properties):
return Foo(**properties)
self.service_registry.register_service(IFoo, foo_factory, {'price': 100})
service = self.service_registry.get_service(IFoo, 'price <= 100')
self.assertNotEqual(None, service)
self.assertEqual(Foo, type(service))
service2 = self.service_registry.get_service(IFoo, 'price <= 100')
self.assertTrue((service is service2))
def test_lazy_function_service_factory(self):
def foo_factory(**properties):
from envisage.tests.foo import Foo
foo_factory.foo = Foo()
return foo_factory.foo
i_foo = (PKG + '.i_foo.IFoo')
foo = (PKG + '.foo')
self.service_registry.register_service(i_foo, foo_factory)
if (foo in sys.modules):
del sys.modules[foo]
self.assertTrue((foo not in sys.modules))
services = self.service_registry.get_services('bogus.IBogus')
self.assertTrue((foo not in sys.modules))
services = self.service_registry.get_services(i_foo)
self.assertEqual([foo_factory.foo], services)
self.assertTrue((foo in sys.modules))
del sys.modules[foo]
def test_lazy_bound_method_service_factory(self):
i_foo = (PKG + '.i_foo.IFoo')
foo = (PKG + '.foo')
class ServiceProvider(HasTraits):
def foo_factory(self, **properties):
from envisage.tests.foo import Foo
self.foo = Foo()
return self.foo
sp = ServiceProvider()
self.service_registry.register_service(i_foo, sp.foo_factory)
if (foo in sys.modules):
del sys.modules[foo]
self.assertTrue((foo not in sys.modules))
services = self.service_registry.get_services('bogus.IBogus')
self.assertTrue((foo not in sys.modules))
services = self.service_registry.get_services(i_foo)
self.assertEqual([sp.foo], services)
self.assertTrue((foo in sys.modules))
del sys.modules[foo]
def test_get_services(self):
class IFoo(Interface):
pass
(IFoo)
class Foo(HasTraits):
pass
foo = Foo()
self.service_registry.register_service(IFoo, foo)
foo = Foo()
self.service_registry.register_service(IFoo, foo)
services = self.service_registry.get_services(IFoo)
self.assertEqual(2, len(services))
class IBar(Interface):
pass
services = self.service_registry.get_services(IBar)
self.assertEqual([], services)
def test_get_services_with_strings(self):
from envisage.tests.foo import Foo
protocol_name = 'envisage.tests.foo.IFoo'
self.service_registry.register_service(protocol_name, Foo())
self.service_registry.register_service(protocol_name, Foo())
services = self.service_registry.get_services(protocol_name)
self.assertEqual(2, len(services))
def test_get_services_with_query(self):
class IFoo(Interface):
price = Int
(IFoo)
class Foo(HasTraits):
price = Int
foo = Foo(price=100)
self.service_registry.register_service(IFoo, foo)
goo = Foo(price=10)
self.service_registry.register_service(IFoo, goo, {'price': 200})
services = self.service_registry.get_services(IFoo, 'color == "red"')
self.assertEqual([], services)
services = self.service_registry.get_services(IFoo, 'price <= 100')
self.assertEqual([foo], services)
services = self.service_registry.get_services(IFoo, 'price >= 100')
self.assertTrue((foo in services))
self.assertTrue((goo in services))
self.assertEqual(2, len(services))
class IBar(Interface):
pass
services = self.service_registry.get_services(IBar, 'price <= 100')
self.assertEqual([], services)
def test_get_service(self):
class IFoo(Interface):
pass
(IFoo)
class Foo(HasTraits):
pass
foo = Foo()
self.service_registry.register_service(IFoo, foo)
goo = Foo()
self.service_registry.register_service(IFoo, goo)
service = self.service_registry.get_service(IFoo)
self.assertTrue(((foo is service) or (goo is service)))
class IBar(Interface):
pass
service = self.service_registry.get_service(IBar)
self.assertEqual(None, service)
def test_get_service_with_query(self):
class IFoo(Interface):
price = Int
(IFoo)
class Foo(HasTraits):
price = Int
foo = Foo(price=100)
self.service_registry.register_service(IFoo, foo)
goo = Foo(price=10)
self.service_registry.register_service(IFoo, goo, {'price': 200})
service = self.service_registry.get_service(IFoo, 'price < 100')
self.assertEqual(None, service)
service = self.service_registry.get_service(IFoo, 'price <= 100')
self.assertEqual(foo, service)
service = self.service_registry.get_service(IFoo, 'price >= 100')
self.assertTrue(((foo is service) or (goo is service)))
class IBar(Interface):
pass
service = self.service_registry.get_service(IBar, 'price <= 100')
self.assertEqual(None, service)
def test_get_and_set_service_properties(self):
class IFoo(Interface):
price = Int
(IFoo)
class Foo(HasTraits):
price = Int
foo = Foo(price=100)
foo_id = self.service_registry.register_service(IFoo, foo)
goo = Foo(price=10)
goo_id = self.service_registry.register_service(IFoo, goo, {'price': 200})
foo_properties = self.service_registry.get_service_properties(foo_id)
self.assertEqual({}, foo_properties)
goo_properties = self.service_registry.get_service_properties(goo_id)
self.assertEqual(200, goo_properties['price'])
foo_properties['price'] = 300
goo_properties['price'] = 500
self.service_registry.set_service_properties(foo_id, foo_properties)
self.service_registry.set_service_properties(goo_id, goo_properties)
foo_properties = self.service_registry.get_service_properties(foo_id)
self.assertEqual(300, foo_properties['price'])
goo_properties = self.service_registry.get_service_properties(goo_id)
self.assertEqual(500, goo_properties['price'])
with self.assertRaises(ValueError):
self.service_registry.get_service_properties((- 1))
with self.assertRaises(ValueError):
self.service_registry.set_service_properties((- 1), {})
def test_unregister_service(self):
class IFoo(Interface):
price = Int
(IFoo)
class Foo(HasTraits):
price = Int
foo = Foo(price=100)
foo_id = self.service_registry.register_service(IFoo, foo)
goo = Foo(price=10)
goo_id = self.service_registry.register_service(IFoo, goo, {'price': 200})
service = self.service_registry.get_service(IFoo, 'price < 100')
self.assertEqual(None, service)
service = self.service_registry.get_service(IFoo, 'price <= 100')
self.assertEqual(foo, service)
service = self.service_registry.get_service(IFoo, 'price >= 100')
self.assertTrue(((foo is service) or (goo is service)))
self.service_registry.unregister_service(foo_id)
service = self.service_registry.get_service(IFoo, 'price <= 100')
self.assertEqual(None, service)
self.service_registry.unregister_service(goo_id)
service = self.service_registry.get_service(IFoo, 'price >= 100')
self.assertEqual(None, service)
with self.assertRaises(ValueError):
self.service_registry.unregister_service((- 1))
def test_minimize_and_maximize(self):
class IFoo(Interface):
price = Int
(IFoo)
class Foo(HasTraits):
price = Int
x = Foo(price=10)
y = Foo(price=5)
z = Foo(price=100)
for foo in [x, y, z]:
self.service_registry.register_service(IFoo, foo)
service = self.service_registry.get_service(IFoo, minimize='price')
self.assertNotEqual(None, service)
self.assertEqual(Foo, type(service))
self.assertEqual(y, service)
service = self.service_registry.get_service(IFoo, maximize='price')
self.assertNotEqual(None, service)
self.assertEqual(Foo, type(service))
self.assertEqual(z, service) |
def combine_date_range_queryset(date_dicts, table, min_start, max_end, dt_format='%Y-%m-%d', is_subaward=False):
final_ranges = []
date_type_list = []
for time_period in date_dicts:
dt_type = time_period.get('date_type', 'action_date')
gte_dt_type = time_period.get('gte_date_type')
lte_dt_type = time_period.get('lte_date_type')
date_type_list.append(((gte_dt_type, lte_dt_type) if ((gte_dt_type is not None) and (lte_dt_type is not None)) else (dt_type, dt_type)))
for date_type_tuple in set(date_type_list):
list_of_ranges = [(datetime.strptime((v.get('start_date', None) or min_start), dt_format), datetime.strptime((v.get('end_date', None) or max_end), dt_format)) for v in date_dicts]
final_ranges.extend([{'start_date': r[0], 'end_date': r[1], 'date_type_dict': {'gte': date_type_tuple[0], 'lte': date_type_tuple[1]}} for r in list(merge_date_ranges(list_of_ranges))])
return date_list_to_queryset(final_ranges, table, is_subaward=is_subaward) |
def lazy_import():
from fastly.model.logging_address_and_port import LoggingAddressAndPort
from fastly.model.logging_common_response import LoggingCommonResponse
from fastly.model.service_id_and_version_string import ServiceIdAndVersionString
from fastly.model.timestamps import Timestamps
globals()['LoggingAddressAndPort'] = LoggingAddressAndPort
globals()['LoggingCommonResponse'] = LoggingCommonResponse
globals()['ServiceIdAndVersionString'] = ServiceIdAndVersionString
globals()['Timestamps'] = Timestamps |
class OptionSeriesHeatmapStatesHoverMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
class NavierStokes_SC(ResGradQuad_SC):
def __init__(self, coefficients, nd, shockCapturingFactor=0.25, lag=True, nStepsToDelay=None):
ResGradQuad_SC.__init__(self, coefficients, nd, shockCapturingFactor, lag)
self.nStepsToDelay = nStepsToDelay
self.nSteps = 0
def calculateNumericalDiffusion(self, q):
for ci in range(1, self.nc):
if numpy.isnan(q[('pdeResidual', ci)]).any():
import pdb
print("NaN's in res")
pdb.set_trace()
cshockCapturing.calculateNumericalDiffusionResGradQuad(self.shockCapturingFactor, self.mesh.elementDiametersArray, q[('pdeResidual', ci)], q[('grad(u)', ci)], self.numDiff[ci])
if numpy.isnan(self.numDiff[ci]).any():
import pdb
print("NaN's in numDiff")
pdb.set_trace()
def initializeElementQuadrature(self, mesh, t, cq):
self.mesh = mesh
self.numDiff = {}
self.numDiff_last = {}
self.cq_numDiff = {}
for ci in range(1, self.nc):
if self.lag:
self.numDiff_last[ci] = cq[('numDiff', ci, ci)]
self.numDiff[ci] = numpy.zeros(cq[('u', ci)].shape, 'd')
elif ((self.lag == False) and (self.nStepsToDelay is not None)):
self.cq_numDiff[ci] = cq[('numDiff', ci, ci)]
self.numDiff[ci] = cq[('numDiff', ci, ci)]
else:
self.numDiff[ci] = cq[('numDiff', ci, ci)]
def updateShockCapturingHistory(self):
self.nSteps += 1
if self.lag:
for ci in range(1, self.nc):
self.numDiff_last[ci][:] = self.numDiff[ci]
if ((self.lag == False) and (self.nStepsToDelay is not None) and (self.nSteps > self.nStepsToDelay)):
self.lag = True
self.numDiff = []
self.numDiff_last = []
for ci in range(1, self.nc):
self.numDiff_last[ci] = self.cq_numDiff[ci]
self.numDiff[ci] = numpy.zeros(self.cq_numDiff[ci].shape, 'd') |
class TestCreateScript(BaseEvenniaTest):
def test_create_script(self):
class TestScriptA(DefaultScript):
def at_script_creation(self):
self.key = 'test_script'
self.interval = 10
self.persistent = False
script = create.create_script(TestScriptA, key='test_script')
assert (script is not None)
assert (script.interval == 10)
assert (script.key == 'test_script')
script.stop()
def test_create_script_w_repeats_equal_1(self):
class TestScriptB(DefaultScript):
def at_script_creation(self):
self.key = 'test_script'
self.interval = 10
self.repeats = 1
self.persistent = False
script = create.create_script(TestScriptB, key='test_script')
assert script
assert (not script.is_active)
def test_create_script_w_repeats_equal_1_persisted(self):
class TestScriptB1(DefaultScript):
def at_script_creation(self):
self.key = 'test_script'
self.interval = 10
self.repeats = 1
self.persistent = True
script = create.create_script(TestScriptB1, key='test_script')
assert script
assert (not script.is_active)
def test_create_script_w_repeats_equal_2(self):
class TestScriptC(DefaultScript):
def at_script_creation(self):
self.key = 'test_script'
self.interval = 10
self.repeats = 2
self.persistent = False
script = create.create_script(TestScriptC, key='test_script')
assert (script is not None)
assert (script.interval == 10)
assert (script.repeats == 2)
assert (script.key == 'test_script')
script.stop()
def test_create_script_w_repeats_equal_1_and_delayed(self):
class TestScriptD(DefaultScript):
def at_script_creation(self):
self.key = 'test_script'
self.interval = 10
self.start_delay = True
self.repeats = 1
self.persistent = False
script = create.create_script(TestScriptD, key='test_script')
assert (script is not None)
assert (script.interval == 10)
assert (script.repeats == 1)
assert (script.key == 'test_script')
script.stop() |
class TGMsgType(Enum):
Text = 'Text'
Audio = 'Audio'
Document = 'Document'
Photo = 'Photo'
Sticker = 'Sticker'
AnimatedSticker = 'AnimatedSticker'
VideoSticker = 'VideoSticker'
Video = 'Video'
Voice = 'Voice'
Contact = 'Contact'
Location = 'Location'
Venue = 'Venue'
System = 'System'
Game = 'Game'
VideoNote = 'Video_note'
Animation = 'Animation'
Poll = 'Poll'
Dice = 'Dice' |
class OptionSeriesAreasplineSonificationTracksMappingLowpass(Options):
def frequency(self) -> 'OptionSeriesAreasplineSonificationTracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionSeriesAreasplineSonificationTracksMappingLowpassFrequency)
def resonance(self) -> 'OptionSeriesAreasplineSonificationTracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionSeriesAreasplineSonificationTracksMappingLowpassResonance) |
class FLModel(IFLModel):
def __init__(self, model: nn.Module, device: Optional[str]=None):
self.model = model
self.device = device
def fl_forward(self, batch) -> FLBatchMetrics:
features = batch['features']
batch_label = batch['labels']
stacked_label = batch_label.view((- 1)).long().clone().detach()
if (self.device is not None):
features = features.to(self.device)
output = self.model(features)
if (self.device is not None):
(output, batch_label, stacked_label) = (output.to(self.device), batch_label.to(self.device), stacked_label.to(self.device))
loss = F.cross_entropy(output, stacked_label)
num_examples = self.get_num_examples(batch)
output = output.detach().cpu()
stacked_label = stacked_label.detach().cpu()
del features
return FLBatchMetrics(loss=loss, num_examples=num_examples, predictions=output, targets=stacked_label, model_inputs=[])
def fl_create_training_batch(self, **kwargs):
features = kwargs.get('features', None)
labels = kwargs.get('labels', None)
return UserData.fl_training_batch(features, labels)
def fl_get_module(self) -> nn.Module:
return self.model
def fl_cuda(self) -> None:
self.model = self.model.to(self.device)
def get_eval_metrics(self, batch) -> FLBatchMetrics:
with torch.no_grad():
return self.fl_forward(batch)
def get_num_examples(self, batch) -> int:
return UserData.get_num_examples(batch['labels']) |
class OptionPlotoptionsBulletSonificationContexttracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
def test_globals_not_propagated_1():
global_var = GlobalVariable('global_x', ssa_label=0)
y = Variable('y', ssa_label=0)
instructions = [_assign(global_var, Constant(5)), _assign(y, _add(global_var, Constant(5)))]
original = _assign(y, _add(global_var, Constant(5)))
cfg = ControlFlowGraph()
cfg.add_node(BasicBlock(0, instructions))
_run_expression_propagation(cfg)
assert (list(cfg.instructions)[1] == original) |
class StateGen(object):
def __init__(self, sites, states_per_site):
self.sites = sites
self.states_per_site = states_per_site
self.curr_site_idx = 0
self.curr_state = None
self.states = None
self.curr_site = None
def __iter__(self):
assert (self.curr_state is None)
assert (self.states is None)
assert (self.curr_state is None)
self.curr_site_idx = 0
self.curr_state = None
self.states = util.gen_fuzz_states((len(self.sites) * self.states_per_site))
self.curr_site = iter(self.sites)
return self
def __next__(self):
next_site = next(self.curr_site)
self.curr_site_idx += 1
if (self.curr_state is not None):
while (self.curr_state < self.states_per_site):
self.next_state()
assert (self.curr_state == self.states_per_site), self.curr_state
self.curr_state = 0
return next_site
def next_state(self):
self.curr_state += 1
try:
state = next(self.states)
except StopIteration:
assert False, 'Insufficent states, at state {} for site {}'.format(self.curr_state, self.curr_site_idx)
return state |
class EnterSendQTextEdit(QTextEdit):
def __init__(self, parent=None):
super().__init__(parent)
self.parent = parent
self.action = self.show
def keyPressEvent(self, e):
if (e.key() == Qt.Key_Return):
try:
if (QApplication.keyboardModifiers() in (Qt.ShiftModifier, Qt.ControlModifier, Qt.AltModifier)):
if (QApplication.keyboardModifiers() in (Qt.ControlModifier, Qt.AltModifier)):
self.insertPlainText('\n')
else:
pass
else:
self.action()
except:
print('', sys.exc_info())
return
super().keyPressEvent(e)
def keyenter_connect(self, action):
self.action = action |
def _create_plot_component():
xbounds = (((- 2) * pi), (2 * pi), 600)
ybounds = (((- 1.5) * pi), (1.5 * pi), 300)
xs = linspace(*xbounds)
ys = linspace(*ybounds)
(x, y) = meshgrid(xs, ys)
z = (sin(x) * y)
pd = ArrayPlotData()
pd.set_data('imagedata', z)
plot = Plot(pd)
img_plot = plot.img_plot('imagedata', xbounds=xbounds[:2], ybounds=ybounds[:2], colormap=viridis)[0]
plot.title = 'Image Plot with Lasso'
plot.padding = 50
lasso_selection = LassoSelection(component=img_plot)
lasso_selection.observe(lasso_updated, 'disjoint_selections')
lasso_overlay = LassoOverlay(lasso_selection=lasso_selection, component=img_plot)
img_plot.tools.append(lasso_selection)
img_plot.overlays.append(lasso_overlay)
return plot |
.parametrize('directions,quantities,expected_x,expected_y', [([Dir.LEFT], [1], (- 20), 0), ([Dir.RIGHT], [1], 20, 0), ([Dir.UP], [1], 0, 20), ([Dir.DOWN], [1], 0, (- 20)), ([Dir.UPRIGHT], [1], 10, (10 * np.sqrt(3))), ([Dir.DOWNRIGHT], [1], 10, ((- 10) * np.sqrt(3))), ([Dir.UPLEFT], [1], (- 10), (10 * np.sqrt(3))), ([Dir.DOWNLEFT], [1], (- 10), ((- 10) * np.sqrt(3))), ([Dir.LEFT, Dir.RIGHT], [1, 1], 0, 0), ([Dir.UP, Dir.DOWN], [1, 1], 0, 0), ([Dir.UPLEFT, Dir.DOWNRIGHT], [1, 1], 0, 0), ([Dir.UPRIGHT, Dir.DOWNLEFT], [1, 1], 0, 0)])
def test_get_translation(directions, quantities, expected_x, expected_y, radius):
(x, y) = (0, 0)
for (direction, quantity) in zip(directions, quantities):
trans = ([direction] * quantity)
(dx, dy) = Jump.eval(trans, radius)
x += dx
y += dy
assert (x == expected_x)
assert (y == expected_y) |
class VendorTests(unittest.TestCase):
def test_unicode(self):
vendor = Vendor()
vendor.DisplayName = 'test'
self.assertEqual(str(vendor), 'test')
def test_to_ref(self):
vendor = Vendor()
vendor.DisplayName = 'test'
vendor.Id = 100
ref = vendor.to_ref()
self.assertEqual(ref.name, 'test')
self.assertEqual(ref.type, 'Vendor')
self.assertEqual(ref.value, 100)
def test_valid_object_name(self):
obj = Vendor()
client = QuickBooks()
result = client.isvalid_object_name(obj.qbo_object_name)
self.assertTrue(result) |
def test_validate_should_not_parse_schema_if_it_was_parsed_already():
named_schemas = {}
parse_schema({'name': 'B', 'type': 'record', 'fields': [{'name': 'bar', 'type': 'string'}]}, named_schemas)
a_schema = parse_schema({'name': 'A', 'type': 'record', 'fields': [{'name': 'b', 'type': 'B'}]}, named_schemas)
records = [{'b': {'bar': 'bar'}}]
validate_many(records, a_schema)
validate(records[0], a_schema) |
()
_migration_options
('--sql-output', type=str, default=None, help='Generate SQL script for migration instead of applying it. ex: --sql-output=upgrade.sql')
def upgrade(alembic_ini_path: str, script_location: str, sql_output: str):
from dbgpt.util._db_migration_utils import upgrade_database, generate_sql_for_upgrade
(alembic_cfg, db_manager) = _get_migration_config(alembic_ini_path, script_location)
if sql_output:
generate_sql_for_upgrade(alembic_cfg, db_manager.engine, output_file=sql_output)
else:
upgrade_database(alembic_cfg, db_manager.engine) |
class DominanceHandler(THBEventHandler):
interested = ['action_after', 'action_apply']
def handle(self, evt_type, act):
if ((evt_type == 'action_apply') and isinstance(act, PrepareStage)):
t = act.target.tags
t['dominance_suits'] = set()
t['dominance_suit_SPADE'] = False
t['dominance_suit_CLUB'] = False
t['dominance_suit_HEART'] = False
t['dominance_suit_DIAMOND'] = False
elif ((evt_type == 'action_apply') and isinstance(act, LaunchCard)):
if (not act.source.has_skill(Dominance)):
return act
card = act.card
if (not card.is_card(PhysicalCard)):
return act
suit = card.suit
if (suit not in (Card.SPADE, Card.CLUB, Card.HEART, Card.DIAMOND)):
return act
try:
t = act.source.tags
t['dominance_suits'].add(suit)
t[('dominance_suit_%s' % Card.SUIT_REV[suit])] = True
except AttributeError:
pass
elif ((evt_type == 'action_after') and isinstance(act, FinalizeStage)):
tgt = act.target
if (not tgt.has_skill(Dominance)):
return act
if (len((tgt.tags['dominance_suits'] or set())) != 4):
return act
g = self.game
if (not g.user_input([tgt], ChooseOptionInputlet(self, (False, True)))):
return act
g.process_action(DominanceAction(tgt))
return act |
def command_line(argv):
arguments = parse_command_line(argv)
if arguments.generate:
generate_fixer_file(arguments.generate)
paths = edit_files(arguments.patterns, expressions=arguments.expressions, functions=arguments.functions, executables=arguments.executables, start_dirs=arguments.start_dirs, max_depth=arguments.max_depth, dry_run=arguments.dry_run, output=arguments.output, encoding=arguments.encoding, newline=arguments.newline)
is_sys = (arguments.output in [sys.stdout, sys.stderr])
if ((not is_sys) and isinstance(arguments.output, io.IOBase)):
arguments.output.close()
return paths |
class OptionSeriesSankeyNodes(Options):
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def colorIndex(self):
return self._config_get(None)
def colorIndex(self, num: float):
self._config(num, js_type=False)
def column(self):
return self._config_get(None)
def column(self, num: float):
self._config(num, js_type=False)
def dataLabels(self) -> 'OptionSeriesSankeyNodesDatalabels':
return self._config_sub_data('dataLabels', OptionSeriesSankeyNodesDatalabels)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def level(self):
return self._config_get(None)
def level(self, num: float):
self._config(num, js_type=False)
def name(self):
return self._config_get(None)
def name(self, text: str):
self._config(text, js_type=False)
def offset(self):
return self._config_get(0)
def offset(self, num: float):
self._config(num, js_type=False)
def offsetHorizontal(self):
return self._config_get(None)
def offsetHorizontal(self, num: float):
self._config(num, js_type=False)
def offsetVertical(self):
return self._config_get(None)
def offsetVertical(self, num: float):
self._config(num, js_type=False) |
def test_assign_mixed_multiple_shaped():
mesh = UnitTriangleMesh()
V = VectorFunctionSpace(mesh, 'DG', 0)
Q = FunctionSpace(mesh, 'P', 1)
P = FunctionSpace(mesh, 'RT', 2)
X = TensorFunctionSpace(mesh, 'DG', 1)
Z = (((V * Q) * P) * X)
z1 = Function(Z)
z2 = Function(Z)
z1.dat[0].data[:] = [1, 2]
z1.dat[1].data[:] = 3
z1.dat[2].data[:] = 4
z1.dat[3].data[:] = [[6, 7], [8, 9]]
z2.dat[0].data[:] = [10, 11]
z2.dat[1].data[:] = 12
z2.dat[2].data[:] = 13
z2.dat[3].data[:] = [[15, 16], [17, 18]]
q = assemble((z1 - z2))
for (q, p1, p2) in zip(q.subfunctions, z1.subfunctions, z2.subfunctions):
assert np.allclose(q.dat.data_ro, (p1.dat.data_ro - p2.dat.data_ro)) |
def command_exist(cmd):
if (cmd.split()[0] not in ['nvme', 'flash_manager', 'smartctl']):
return False
args = shlex.split(cmd)
try:
subprocess.call(args, stdout=open(os.devnull, 'wb'), stderr=open(os.devnull, 'wb'))
except OSError:
print(('%s command not installed' % cmd))
return False
test = cmdline(cmd)
if (test.decode('UTF-8') == ''):
return False
else:
return True |
def _get_displayed_page_numbers(current, final):
assert (current >= 1)
assert (final >= current)
if (final <= 5):
return list(range(1, (final + 1)))
included = {1, (current - 1), current, (current + 1), final}
if (current <= 4):
included.add(2)
included.add(3)
if (current >= (final - 3)):
included.add((final - 1))
included.add((final - 2))
included = [idx for idx in sorted(included) if (0 < idx <= final)]
if (current > 4):
included.insert(1, None)
if (current < (final - 3)):
included.insert((len(included) - 1), None)
return included |
def test_load_mappings(mocker):
file_path = 'mappings.yml'
mappings = {'airflow.task.Foo': 'diagrams.node.Bar'}
mock_open = mocker.patch('builtins.open', mocker.mock_open(read_data=json.dumps(mappings)))
assert (load_mappings(file=file_path) == mappings)
mock_open.assert_called_once_with(file_path) |
def extractDorkzillatranslationsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('get to know about wife fan', 'Get to know about Wife Fan', 'translated'), ('i have four dads after transmigrating in a novel', 'I have four dads after transmigrating in a novel', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def replace_batch_norm(module: torch.fx.GraphModule) -> torch.fx.GraphModule:
batch_node_list = []
for n in module.graph.nodes:
if (n.target == torch.ops.aten._native_batch_norm_legit_functional.default):
batch_node_list.append(n)
if (n.target == 'output'):
output_node = n
if (len(batch_node_list) > 0):
modified = True
else:
modified = False
for n in batch_node_list:
new_op = torch.ops.aten.batch_norm
new_args = list(n.args)
new_args.append(False)
new_args = tuple(new_args)
user_list = [x for x in n.users]
user_list_copy_node = []
user_list_copy_node.append(next(iter(user_list[1].users)))
user_list_copy_node.append(next(iter(user_list[2].users)))
getitem_node = user_list[0]
with module.graph.inserting_after(getitem_node):
new_node = module.graph.create_node('call_function', new_op, args=new_args, kwargs=n.kwargs)
getitem_node.replace_all_uses_with(new_node)
output_args = output_node.args[0]
new_output_args = [x for x in output_args if (x not in user_list_copy_node)]
output_node.args = (new_output_args,)
module.graph.eliminate_dead_code()
module.recompile()
return PassResult(module, modified) |
def font_to_toolkit_font(font):
size = int(font.size)
for family in font.family:
if (family in generic_family_to_wx_family):
default_family = generic_family_to_wx_family[family]
break
else:
default_family = wx.FONTFAMILY_DEFAULT
weight = weight_to_wx_weight[font.weight_]
style = style_to_wx_style[font.style]
underline = ('underline' in font.decorations)
wx_font = wx.Font(size, default_family, style, weight, underline)
for face in font.family:
if (face in generic_family_to_wx_family):
break
wx_font = wx.Font(size, default_family, style, weight, underline, face)
if (wx_font.GetFaceName().lower() == face.lower()):
break
wx_font.SetStrikethrough(('strikethrough' in font.decorations))
return wx_font |
def listen() -> None:
OUTPUT_PATH_TEXTBOX.change(update_output_path, inputs=OUTPUT_PATH_TEXTBOX)
OUTPUT_IMAGE_QUALITY_SLIDER.change(update_output_image_quality, inputs=OUTPUT_IMAGE_QUALITY_SLIDER)
OUTPUT_VIDEO_ENCODER_DROPDOWN.select(update_output_video_encoder, inputs=OUTPUT_VIDEO_ENCODER_DROPDOWN)
OUTPUT_VIDEO_QUALITY_SLIDER.change(update_output_video_quality, inputs=OUTPUT_VIDEO_QUALITY_SLIDER)
multi_component_names: List[ComponentName] = ['source_image', 'target_image', 'target_video']
for component_name in multi_component_names:
component = get_ui_component(component_name)
if component:
for method in ['upload', 'change', 'clear']:
getattr(component, method)(remote_update, outputs=[OUTPUT_IMAGE_QUALITY_SLIDER, OUTPUT_VIDEO_ENCODER_DROPDOWN, OUTPUT_VIDEO_QUALITY_SLIDER]) |
.parametrize('flex', [False, True])
def test_prefix_id_transform(dash_duo, flex):
app = _get_basic_dash_proxy(transforms=[PrefixIdTransform(prefix='x')])
_bind_basic_callback(app, flex)
_bind_basic_clientside_callback(app)
_basic_dash_proxy_test(dash_duo, app, ['x-log_server', 'x-log_client'], 'x-btn') |
class ABCIApplication(object):
def Echo(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/tendermint.abci.ABCIApplication/Echo', tendermint_dot_abci_dot_types__pb2.RequestEcho.SerializeToString, tendermint_dot_abci_dot_types__pb2.ResponseEcho.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def Flush(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/tendermint.abci.ABCIApplication/Flush', tendermint_dot_abci_dot_types__pb2.RequestFlush.SerializeToString, tendermint_dot_abci_dot_types__pb2.ResponseFlush.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def Info(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/tendermint.abci.ABCIApplication/Info', tendermint_dot_abci_dot_types__pb2.RequestInfo.SerializeToString, tendermint_dot_abci_dot_types__pb2.ResponseInfo.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def SetOption(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/tendermint.abci.ABCIApplication/SetOption', tendermint_dot_abci_dot_types__pb2.RequestSetOption.SerializeToString, tendermint_dot_abci_dot_types__pb2.ResponseSetOption.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def DeliverTx(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/tendermint.abci.ABCIApplication/DeliverTx', tendermint_dot_abci_dot_types__pb2.RequestDeliverTx.SerializeToString, tendermint_dot_abci_dot_types__pb2.ResponseDeliverTx.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def CheckTx(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/tendermint.abci.ABCIApplication/CheckTx', tendermint_dot_abci_dot_types__pb2.RequestCheckTx.SerializeToString, tendermint_dot_abci_dot_types__pb2.ResponseCheckTx.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def Query(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/tendermint.abci.ABCIApplication/Query', tendermint_dot_abci_dot_types__pb2.RequestQuery.SerializeToString, tendermint_dot_abci_dot_types__pb2.ResponseQuery.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def Commit(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/tendermint.abci.ABCIApplication/Commit', tendermint_dot_abci_dot_types__pb2.RequestCommit.SerializeToString, tendermint_dot_abci_dot_types__pb2.ResponseCommit.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def InitChain(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/tendermint.abci.ABCIApplication/InitChain', tendermint_dot_abci_dot_types__pb2.RequestInitChain.SerializeToString, tendermint_dot_abci_dot_types__pb2.ResponseInitChain.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def BeginBlock(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/tendermint.abci.ABCIApplication/BeginBlock', tendermint_dot_abci_dot_types__pb2.RequestBeginBlock.SerializeToString, tendermint_dot_abci_dot_types__pb2.ResponseBeginBlock.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def EndBlock(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/tendermint.abci.ABCIApplication/EndBlock', tendermint_dot_abci_dot_types__pb2.RequestEndBlock.SerializeToString, tendermint_dot_abci_dot_types__pb2.ResponseEndBlock.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def ListSnapshots(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/tendermint.abci.ABCIApplication/ListSnapshots', tendermint_dot_abci_dot_types__pb2.RequestListSnapshots.SerializeToString, tendermint_dot_abci_dot_types__pb2.ResponseListSnapshots.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def OfferSnapshot(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/tendermint.abci.ABCIApplication/OfferSnapshot', tendermint_dot_abci_dot_types__pb2.RequestOfferSnapshot.SerializeToString, tendermint_dot_abci_dot_types__pb2.ResponseOfferSnapshot.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def LoadSnapshotChunk(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/tendermint.abci.ABCIApplication/LoadSnapshotChunk', tendermint_dot_abci_dot_types__pb2.RequestLoadSnapshotChunk.SerializeToString, tendermint_dot_abci_dot_types__pb2.ResponseLoadSnapshotChunk.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def ApplySnapshotChunk(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/tendermint.abci.ABCIApplication/ApplySnapshotChunk', tendermint_dot_abci_dot_types__pb2.RequestApplySnapshotChunk.SerializeToString, tendermint_dot_abci_dot_types__pb2.ResponseApplySnapshotChunk.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) |
def test_chained_context_exception_suppressed(elasticapm_client):
try:
try:
(1 / 0)
except ZeroDivisionError:
raise ValueError('bla') from None
except ValueError:
elasticapm_client.capture_exception()
error = elasticapm_client.events[ERROR][0]
assert (error['exception']['type'] == 'ValueError')
assert ('cause' not in error['exception']) |
class EsMetricsStore(MetricsStore):
def __init__(self, cfg, client_factory_class=EsClientFactory, index_template_provider_class=IndexTemplateProvider, clock=time.Clock, meta_info=None):
MetricsStore.__init__(self, cfg=cfg, clock=clock, meta_info=meta_info)
self._index = None
self._client = client_factory_class(cfg).create()
self._index_template_provider = index_template_provider_class(cfg)
self._docs = None
def open(self, race_id=None, race_timestamp=None, track_name=None, challenge_name=None, car_name=None, ctx=None, create=False):
self._docs = []
MetricsStore.open(self, race_id, race_timestamp, track_name, challenge_name, car_name, ctx, create)
self._index = self.index_name()
if create:
self._client.put_template('rally-metrics', self._get_template())
if (not self._client.exists(index=self._index)):
self._client.create_index(index=self._index)
else:
self.logger.info('[%s] already exists.', self._index)
else:
new_name = self._migrated_index_name(self._index)
if self._client.exists(index=new_name):
self._index = new_name
self._client.refresh(index=self._index)
def index_name(self):
ts = time.from_iso8601(self._race_timestamp)
return ('rally-metrics-%04d-%02d' % (ts.year, ts.month))
def _migrated_index_name(self, original_name):
return f'{original_name}.new'
def _get_template(self):
return self._index_template_provider.metrics_template()
def flush(self, refresh=True):
if self._docs:
sw = time.StopWatch()
sw.start()
self._client.bulk_index(index=self._index, items=self._docs)
sw.stop()
self.logger.info('Successfully added %d metrics documents for race timestamp=[%s], track=[%s], challenge=[%s], car=[%s] in [%f] seconds.', len(self._docs), self._race_timestamp, self._track, self._challenge, self._car, sw.total_time())
self._docs = []
if refresh:
self._client.refresh(index=self._index)
def _add(self, doc):
self._docs.append(doc)
def _get(self, name, task, operation_type, sample_type, node_name, mapper):
query = {'query': self._query_by_name(name, task, operation_type, sample_type, node_name), 'track_total_hits': True, 'size': 10000}
self.logger.debug('Issuing get against index=[%s], query=[%s].', self._index, query)
result = self._client.search(index=self._index, body=query)
es_count = result['hits']['total']['value']
self.logger.debug('Metrics query found [%s] results.', es_count)
if (es_count != len(result['hits']['hits'])):
self.logger.warning('Metrics query returned [%d] out of [%s] matching docs.', len(result['hits']['hits']), es_count)
return [mapper(v['_source']) for v in result['hits']['hits']]
def get_one(self, name, sample_type=None, node_name=None, task=None, mapper=(lambda doc: doc['value']), sort_key=None, sort_reverse=False):
order = ('desc' if sort_reverse else 'asc')
query = {'query': self._query_by_name(name, task, None, sample_type, node_name), 'size': 1}
if sort_key:
query['sort'] = [{sort_key: {'order': order}}]
self.logger.debug('Issuing get against index=[%s], query=[%s].', self._index, query)
result = self._client.search(index=self._index, body=query)
hits = result['hits']['total']
if isinstance(hits, dict):
hits = hits['value']
self.logger.debug('Metrics query produced [%s] results.', hits)
if (hits > 0):
return mapper(result['hits']['hits'][0]['_source'])
else:
return None
def get_error_rate(self, task, operation_type=None, sample_type=None):
query = {'query': self._query_by_name('service_time', task, operation_type, sample_type, None), 'size': 0, 'aggs': {'error_rate': {'terms': {'field': 'meta.success'}}}}
self.logger.debug('Issuing get_error_rate against index=[%s], query=[%s]', self._index, query)
result = self._client.search(index=self._index, body=query)
buckets = result['aggregations']['error_rate']['buckets']
self.logger.debug('Query returned [%d] buckets.', len(buckets))
count_success = 0
count_errors = 0
for bucket in buckets:
k = bucket['key_as_string']
doc_count = int(bucket['doc_count'])
self.logger.debug('Processing key [%s] with [%d] docs.', k, doc_count)
if (k == 'true'):
count_success = doc_count
elif (k == 'false'):
count_errors = doc_count
else:
self.logger.warning('Unrecognized bucket key [%s] with [%d] docs.', k, doc_count)
if (count_errors == 0):
return 0.0
elif (count_success == 0):
return 1.0
else:
return (count_errors / (count_errors + count_success))
def get_stats(self, name, task=None, operation_type=None, sample_type=None):
query = {'query': self._query_by_name(name, task, operation_type, sample_type, None), 'size': 0, 'aggs': {'metric_stats': {'stats': {'field': 'value'}}}}
self.logger.debug('Issuing get_stats against index=[%s], query=[%s]', self._index, query)
result = self._client.search(index=self._index, body=query)
return result['aggregations']['metric_stats']
def get_percentiles(self, name, task=None, operation_type=None, sample_type=None, percentiles=None):
if (percentiles is None):
percentiles = [99, 99.9, 100]
query = {'query': self._query_by_name(name, task, operation_type, sample_type, None), 'size': 0, 'aggs': {'percentile_stats': {'percentiles': {'field': 'value', 'percents': percentiles}}}}
self.logger.debug('Issuing get_percentiles against index=[%s], query=[%s]', self._index, query)
result = self._client.search(index=self._index, body=query)
hits = result['hits']['total']
if isinstance(hits, dict):
hits = hits['value']
self.logger.debug('get_percentiles produced %d hits', hits)
if (hits > 0):
raw = result['aggregations']['percentile_stats']['values']
return collections.OrderedDict(sorted(raw.items(), key=(lambda t: float(t[0]))))
else:
return None
def _query_by_name(self, name, task, operation_type, sample_type, node_name):
q = {'bool': {'filter': [{'term': {'race-id': self._race_id}}, {'term': {'name': name}}]}}
if task:
q['bool']['filter'].append({'term': {'task': task}})
if operation_type:
q['bool']['filter'].append({'term': {'operation-type': operation_type}})
if sample_type:
q['bool']['filter'].append({'term': {'sample-type': sample_type.name.lower()}})
if node_name:
q['bool']['filter'].append({'term': {'meta.node_name': node_name}})
return q
def to_externalizable(self, clear=False):
return None
def __str__(self):
return 'Elasticsearch metrics store' |
def format_primitive_literal(t, value):
(signed, bits, cast_needed) = java_primitives_info[t]
if (t == 'boolean'):
return ('true' if (bool(value) and (value not in ('False', 'false'))) else 'false')
max = ((1 << bits) - 1)
if (value > max):
raise Exception(('Value %s to large for type %s' % (value, t)))
if signed:
max_pos = ((1 << (bits - 1)) - 1)
if (value > max_pos):
if (t == 'long'):
return str(((1 << bits) - value))
else:
return ('(%s) 0x%x' % (t, value))
return ('%s0x%x%s' % ((('(%s) ' % t) if cast_needed else ''), value, ('L' if (t == 'long') else ''))) |
class PresidentialCoverage(ApiBaseTest):
def test_without_filter(self):
factories.PresidentialCoverageFactory(candidate_id='P', election_year=2016, coverage_end_date=datetime.date(2016, 12, 31))
factories.PresidentialCoverageFactory(candidate_id='P', election_year=2016, coverage_end_date=datetime.date(2016, 12, 31))
factories.PresidentialCoverageFactory(candidate_id='P', election_year=2020, coverage_end_date=datetime.date(2018, 12, 31))
factories.PresidentialCoverageFactory(candidate_id='P', election_year=2020, coverage_end_date=datetime.date(2018, 12, 31))
results = self._results(api.url_for(PresidentialCoverageView))
self.assertEqual(len(results), 4)
def test_filters(self):
factories.PresidentialCoverageFactory(candidate_id='P', election_year=2016, coverage_end_date=datetime.date(2016, 12, 31))
factories.PresidentialCoverageFactory(candidate_id='P', election_year=2016, coverage_end_date=datetime.date(2016, 12, 31))
factories.PresidentialCoverageFactory(candidate_id='P', election_year=2020, coverage_end_date=datetime.date(2018, 12, 31))
factories.PresidentialCoverageFactory(candidate_id='P', election_year=2020, coverage_end_date=datetime.date(2018, 12, 31))
factories.PresidentialCoverageFactory(candidate_id='P', election_year=2020, coverage_end_date=datetime.date(2018, 12, 31))
factories.PresidentialCoverageFactory(candidate_id='P', election_year=2020, coverage_end_date=datetime.date(2018, 12, 31))
filter_fields = (('election_year', [2020]), ('candidate_id', ['P', 'P']))
orig_response = self._response(api.url_for(PresidentialCoverageView))
original_count = orig_response['pagination']['count']
for (field, example) in filter_fields:
page = api.url_for(PresidentialCoverageView, **{field: example})
results = self._results(page)
self.assertGreater(len(results), 0)
response = self._response(page)
self.assertGreater(original_count, response['pagination']['count']) |
def extractTheMustangTranslator(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('The Six Immortals' in item['tags']):
return buildReleaseMessageWithType(item, 'The Six Immortals', vol, chp, frag=frag, postfix=postfix)
return False |
def extractMistreamWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('IQ', "I'm Scattering IQ to the Protagonist", 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesAreasplineSonificationTracksMappingTremolo(Options):
def depth(self) -> 'OptionSeriesAreasplineSonificationTracksMappingTremoloDepth':
return self._config_sub_data('depth', OptionSeriesAreasplineSonificationTracksMappingTremoloDepth)
def speed(self) -> 'OptionSeriesAreasplineSonificationTracksMappingTremoloSpeed':
return self._config_sub_data('speed', OptionSeriesAreasplineSonificationTracksMappingTremoloSpeed) |
.EventDecorator()
def inject(fine, coarse):
check_arguments(coarse, fine)
Vf = fine.function_space()
Vc = coarse.function_space()
if (len(Vc) > 1):
if (len(Vc) != len(Vf)):
raise ValueError('Mixed spaces have different lengths')
for (in_, out) in zip(fine.subfunctions, coarse.subfunctions):
manager = firedrake.dmhooks.get_transfer_manager(in_.function_space().dm)
manager.inject(in_, out)
return
if ((Vc.ufl_element().family() == 'Real') or (Vf.ufl_element().family() == 'Real')):
assert (Vc.ufl_element().family() == 'Real')
assert (Vf.ufl_element().family() == 'Real')
with coarse.dat.vec_wo as dest, fine.dat.vec_ro as src:
src.copy(dest)
return
(kernel, dg) = kernels.inject_kernel(Vf, Vc)
(hierarchy, coarse_level) = utils.get_level(ufl_expr.extract_unique_domain(coarse))
if (dg and (not hierarchy.nested)):
raise NotImplementedError("Sorry, we can't do supermesh projections yet!")
(_, fine_level) = utils.get_level(ufl_expr.extract_unique_domain(fine))
refinements_per_level = hierarchy.refinements_per_level
repeat = ((fine_level - coarse_level) * refinements_per_level)
next_level = (fine_level * refinements_per_level)
element = Vc.ufl_element()
meshes = hierarchy._meshes
for j in range(repeat):
next_level -= 1
if (j == (repeat - 1)):
coarse.dat.zero()
next = coarse
Vc = next.function_space()
else:
Vc = firedrake.FunctionSpace(meshes[next_level], element)
next = firedrake.Function(Vc)
if (not dg):
node_locations = utils.physical_node_locations(Vc)
fine_coords = Vf.mesh().coordinates
coarse_node_to_fine_nodes = utils.coarse_node_to_fine_node_map(Vc, Vf)
coarse_node_to_fine_coords = utils.coarse_node_to_fine_node_map(Vc, fine_coords.function_space())
for d in [fine, fine_coords]:
d.dat.global_to_local_begin(op2.READ)
d.dat.global_to_local_end(op2.READ)
op2.par_loop(kernel, next.node_set, next.dat(op2.INC), node_locations.dat(op2.READ), fine.dat(op2.READ, coarse_node_to_fine_nodes), fine_coords.dat(op2.READ, coarse_node_to_fine_coords))
else:
coarse_coords = Vc.mesh().coordinates
fine_coords = Vf.mesh().coordinates
coarse_cell_to_fine_nodes = utils.coarse_cell_to_fine_node_map(Vc, Vf)
coarse_cell_to_fine_coords = utils.coarse_cell_to_fine_node_map(Vc, fine_coords.function_space())
for d in [fine, fine_coords]:
d.dat.global_to_local_begin(op2.READ)
d.dat.global_to_local_end(op2.READ)
op2.par_loop(kernel, Vc.mesh().cell_set, next.dat(op2.INC, next.cell_node_map()), fine.dat(op2.READ, coarse_cell_to_fine_nodes), fine_coords.dat(op2.READ, coarse_cell_to_fine_coords), coarse_coords.dat(op2.READ, coarse_coords.cell_node_map()))
fine = next
Vf = Vc
return coarse |
def test_message_identifier(message_identifier):
assert (message_identifier.conv_uid == 'conv1')
assert (message_identifier.index == 1)
assert (message_identifier.identifier_type == 'message')
assert (message_identifier.str_identifier == 'message___conv1___1')
assert (message_identifier.to_dict() == {'conv_uid': 'conv1', 'index': 1, 'identifier_type': 'message'}) |
class OptionSeriesWaterfallSonificationTracksMappingLowpass(Options):
def frequency(self) -> 'OptionSeriesWaterfallSonificationTracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionSeriesWaterfallSonificationTracksMappingLowpassFrequency)
def resonance(self) -> 'OptionSeriesWaterfallSonificationTracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionSeriesWaterfallSonificationTracksMappingLowpassResonance) |
class TestSessionStop():
.parametrize('state', [State.INITIAL, State.ENDED, State.STOPPED, State.STARTED])
def test_not_stop_when_session_is_not_running(self, state, session):
session.state = state
assert (not session.stop())
def test_stops_when_session_is_running(self, session, bus, mocker):
subscriber = mocker.Mock()
bus.connect(Events.SESSION_INTERRUPT, subscriber, False)
session.ready()
session.start()
result = session.stop()
assert (result is True)
payload = SessionPayload(type=SessionType.POMODORO, duration=(25 * 60), pomodoros=0)
subscriber.assert_called_once_with(Events.SESSION_INTERRUPT, payload=payload) |
class OptionSeriesXrangeSonificationTracksMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def table(data: Union[(Collection, Dict)], header: Optional[Iterable]=None, footer: Optional[Iterable]=None, divider: bool=False, widths: Union[(Iterable[int], Literal['auto'])]='auto', max_col: int=30, spacing: int=3, aligns: Optional[Union[(Iterable[Literal[('r', 'c', 'l')]], Literal[('r', 'c', 'l')])]]=None, multiline: bool=False, env_prefix: str='WASABI', color_values: Optional[Dict]=None, fg_colors: Optional[Iterable]=None, bg_colors: Optional[Iterable]=None) -> str:
if ((fg_colors is not None) or (bg_colors is not None)):
colors = dict(COLORS)
if (color_values is not None):
colors.update(color_values)
if (fg_colors is not None):
fg_colors = [colors.get(fg_color, fg_color) for fg_color in fg_colors]
if (bg_colors is not None):
bg_colors = [colors.get(bg_color, bg_color) for bg_color in bg_colors]
if isinstance(data, dict):
data = list(data.items())
if multiline:
zipped_data = []
for (i, item) in enumerate(data):
vals = [(v if isinstance(v, (list, tuple)) else [v]) for v in item]
zipped_data.extend(list(zip_longest(*vals, fillvalue='')))
if (i < (len(data) - 1)):
zipped_data.append(tuple(['' for i in item]))
data = zipped_data
if (widths == 'auto'):
widths = _get_max_widths(data, header, footer, max_col)
settings = {'widths': widths, 'spacing': spacing, 'aligns': aligns, 'env_prefix': env_prefix, 'fg_colors': fg_colors, 'bg_colors': bg_colors}
divider_row = row([('-' * width) for width in widths], **settings)
rows = []
if header:
rows.append(row(header, **settings))
if divider:
rows.append(divider_row)
for (i, item) in enumerate(data):
rows.append(row(item, **settings))
if footer:
if divider:
rows.append(divider_row)
rows.append(row(footer, **settings))
return '\n{}\n'.format('\n'.join(rows)) |
class JsDomsTransforms():
def __init__(self, page, selector):
(self.page, self.selector) = (page, selector)
def initial(self):
return ('%s.initial' % self.selector)
def inherit(self):
return ('%s.initial' % self.selector)
def matrix(self, scale_x: float, skew_y: float, skew_x: float, scale_y: float, translate_x: float, translate_y: float):
return ("%s.style.transform = 'matrix(%s, %s, %s, %s, %s, %s)'" % (self.selector, scale_x, skew_y, skew_x, scale_y, translate_x, translate_y))
def translateX(self, x: int, unit: str='px'):
return ("%s.style.transform = 'translateX(%s%s)'" % (self.selector, x, unit))
def translateY(self, y: int, unit: str='px'):
return ("%s.style.transform = 'translateY(%s%s)'" % (self.selector, y, unit))
def translate(self, x: int, y: int, unit: str='px'):
return ("%s.style.transform = 'translate(%s%s, %s%s)'" % (self.selector, x, unit, y, unit))
def perspective(self, d: int, unit: str='px'):
return ("%s.style.transform = 'perspective(%s%s)'" % (self.selector, d, unit))
def scale(self, x: float, y: float):
return ("%s.style.transform = 'scale(%s, %s)'" % (self.selector, x, y))
def scaleX(self, x: float):
return ("%s.style.transform = 'scaleX(%s)'" % (self.selector, x))
def scaleY(self, y: float):
return ("%s.style.transform = 'scaleY(%s)'" % (self.selector, y))
def skew(self, angle_x: float, angle_y: float=0, unit: str='deg'):
return ("%s.style.transform = 'skew(%s%s, %s%s)'" % (self.selector, angle_x, unit, angle_y, unit))
def skewX(self, angle: float, unit: str='deg'):
return ("%s.style.transform = 'skewX(%s%s)'" % (self.selector, angle, unit))
def skewY(self, angle: float, unit: str='deg'):
return ("%s.style.transform = 'skewY(%s%s)'" % (self.selector, angle, unit))
def rotate(self, r: float, unit: str='deg'):
return ("%s.style.transform = 'rotate(%s%s)'" % (self.selector, r, unit))
def rotate3d(self, x: float, y: float, z: float, a: float, unit: str='deg'):
return ("%s.style.transform = 'rotate3d(%s, %s, %s, %s%s)'" % (self.selector, x, y, z, a, unit))
def rotateX(self, r: float, unit: str='deg'):
return ("%s.style.transform = 'rotateX(%s%s)'" % (self.selector, r, unit))
def rotateY(self, r: float, unit: str='deg'):
return ("%s.style.transform = 'rotateY(%s%s)'" % (self.selector, r, unit))
def rotateZ(self, r: float, unit: str='deg'):
return ("%s.style.transform = 'rotateZ(%s%s)'" % (self.selector, r, unit)) |
def test_affine_index_range10():
def bar():
for i in seq(0, 6):
for j in seq(0, 8):
for k in seq(0, ((2 * i) + (3 * j))):
pass
e = bar.find('for k in _:_').hi()._impl._node
i_sym = bar.find('for i in _:_')._impl._node.iter
j_sym = bar.find('for j in _:_')._impl._node.iter
e_range = index_range_analysis(e, {i_sym: (0, 5), j_sym: (0, 7)})
assert (e_range == (0, 31)) |
('cuda.perm102_bmm_rcr_bias.func_decl')
def gen_function_decl(func_attrs):
func_name = func_attrs['name']
input_ndims = len(func_attrs['input_accessors'][0].original_shapes)
weight_ndims = len(func_attrs['input_accessors'][1].original_shapes)
return common_bias.FUNC_DECL_TEMPLATE.render(func_name=func_name, input_ndims=input_ndims, weight_ndims=weight_ndims) |
class IdvDownloadValidator(DownloadValidatorBase):
name = 'idv'
def __init__(self, request_data: dict):
super().__init__(request_data)
self.tinyshield_models.extend([{'key': 'award_id', 'name': 'award_id', 'type': 'any', 'models': [{'type': 'integer'}, {'type': 'text', 'text_type': 'raw'}], 'optional': False, 'allow_nulls': False}, {'name': 'limit', 'key': 'limit', 'type': 'integer', 'min': 0, 'max': settings.MAX_DOWNLOAD_LIMIT, 'default': settings.MAX_DOWNLOAD_LIMIT}])
self._json_request = request_data
self._json_request = self.get_validated_request()
(award_id, piid, _, _, _) = _validate_award_id(self._json_request.pop('award_id'))
filters = {'idv_award_id': award_id, 'award_type_codes': tuple((set(contract_type_mapping) | set(idv_type_mapping)))}
self._json_request.update({'account_level': 'treasury_account', 'download_types': ['idv_orders', 'idv_transaction_history', 'idv_federal_account_funding'], 'include_file_description': {'source': settings.IDV_DOWNLOAD_README_FILE_PATH, 'destination': 'readme.txt'}, 'piid': piid, 'is_for_idv': True, 'filters': filters, 'include_data_dictionary': True}) |
class TlsDomainData(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'id': (str,), 'type': (TypeTlsDomain,), 'relationships': (RelationshipsForTlsDomain,)}
_property
def discriminator():
return None
attribute_map = {'id': 'id', 'type': 'type', 'relationships': 'relationships'}
read_only_vars = {'id'}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class AsyncEthereumTesterProvider(AsyncBaseProvider):
middlewares = (async_attrdict_middleware, async_buffered_gas_estimate_middleware, async_default_transaction_fields_middleware, async_ethereum_tester_middleware)
def __init__(self) -> None:
super().__init__()
from eth_tester import EthereumTester
from web3.providers.eth_tester.defaults import API_ENDPOINTS
self.ethereum_tester = EthereumTester()
self.api_endpoints = API_ENDPOINTS
async def make_request(self, method: RPCEndpoint, params: Any) -> RPCResponse:
return _make_request(method, params, self.api_endpoints, self.ethereum_tester)
async def is_connected(self, show_traceback: bool=False) -> Literal[True]:
return True |
class TestOFPTableStats(unittest.TestCase):
def test_init(self):
table_id = 91
name = 'name'
match =
wildcards =
write_actions =
apply_actions =
write_setfields =
apply_setfields =
metadata_match =
metadata_write =
instructions =
config =
max_entries =
active_count =
lookup_count =
matched_count =
res = OFPTableStats(table_id, name, match, wildcards, write_actions, apply_actions, write_setfields, apply_setfields, metadata_match, metadata_write, instructions, config, max_entries, active_count, lookup_count, matched_count)
eq_(table_id, res.table_id)
eq_(name, res.name)
eq_(match, res.match)
eq_(wildcards, res.wildcards)
eq_(write_actions, res.write_actions)
eq_(apply_actions, res.apply_actions)
eq_(write_setfields, res.write_setfields)
eq_(apply_setfields, res.apply_setfields)
eq_(metadata_match, res.metadata_match)
eq_(metadata_write, res.metadata_write)
eq_(instructions, res.instructions)
eq_(config, res.config)
eq_(max_entries, res.max_entries)
eq_(active_count, res.active_count)
eq_(lookup_count, res.lookup_count)
eq_(matched_count, res.matched_count)
def _test_parser(self, table_id, name, match, wildcards, write_actions, apply_actions, write_setfields, apply_setfields, metadata_match, metadata_write, instructions, config, max_entries, active_count, lookup_count, matched_count):
fmt = ofproto.OFP_TABLE_STATS_PACK_STR
buf = pack(fmt, table_id, name, match, wildcards, write_actions, apply_actions, write_setfields, apply_setfields, metadata_match, metadata_write, instructions, config, max_entries, active_count, lookup_count, matched_count)
res = OFPTableStats.parser(buf, 0)
eq_(table_id, res.table_id)
eq_(name, res.name.replace(b'\x00', b''))
eq_(match, res.match)
eq_(wildcards, res.wildcards)
eq_(write_actions, res.write_actions)
eq_(apply_actions, res.apply_actions)
eq_(write_setfields, res.write_setfields)
eq_(apply_setfields, res.apply_setfields)
eq_(metadata_match, res.metadata_match)
eq_(metadata_write, res.metadata_write)
eq_(instructions, res.instructions)
eq_(config, res.config)
eq_(max_entries, res.max_entries)
eq_(active_count, res.active_count)
eq_(lookup_count, res.lookup_count)
eq_(matched_count, res.matched_count)
def test_parser_mid(self):
table_id = 91
name = b'name'
match =
wildcards =
write_actions =
apply_actions =
write_setfields =
apply_setfields =
metadata_match =
metadata_write =
instructions =
config =
max_entries =
active_count =
lookup_count =
matched_count =
self._test_parser(table_id, name, match, wildcards, write_actions, apply_actions, write_setfields, apply_setfields, metadata_match, metadata_write, instructions, config, max_entries, active_count, lookup_count, matched_count)
def test_parser_max(self):
table_id = 255
name = (b'a' * 32)
match =
wildcards =
write_actions =
apply_actions =
write_setfields =
apply_setfields =
metadata_match =
metadata_write =
instructions =
config =
max_entries =
active_count =
lookup_count =
matched_count =
self._test_parser(table_id, name, match, wildcards, write_actions, apply_actions, write_setfields, apply_setfields, metadata_match, metadata_write, instructions, config, max_entries, active_count, lookup_count, matched_count)
def test_parser_min(self):
table_id = 0
name = b''
match = 0
wildcards = 0
write_actions = 0
apply_actions = 0
write_setfields = 0
apply_setfields = 0
metadata_match = 0
metadata_write = 0
instructions = 0
config = 0
max_entries = 0
active_count = 0
lookup_count = 0
matched_count = 0
self._test_parser(table_id, name, match, wildcards, write_actions, apply_actions, write_setfields, apply_setfields, metadata_match, metadata_write, instructions, config, max_entries, active_count, lookup_count, matched_count)
def _test_parser_p(self, ofpxmt, ofpit, ofptc):
table_id = 91
name = b'name'
match = ofpxmt
wildcards = ofpxmt
write_actions =
apply_actions =
write_setfields = ofpxmt
apply_setfields = ofpxmt
metadata_match =
metadata_write =
instructions = ofpit
config = ofptc
max_entries =
active_count =
lookup_count =
matched_count =
self._test_parser(table_id, name, match, wildcards, write_actions, apply_actions, write_setfields, apply_setfields, metadata_match, metadata_write, instructions, config, max_entries, active_count, lookup_count, matched_count)
def test_parser_p1(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IN_PORT, ofproto.OFPIT_GOTO_TABLE, ofproto.OFPTC_TABLE_MISS_CONTINUE)
def test_parser_p2(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IN_PHY_PORT, ofproto.OFPIT_WRITE_METADATA, ofproto.OFPTC_TABLE_MISS_DROP)
def test_parser_p3(self):
self._test_parser_p(ofproto.OFPXMT_OFB_METADATA, ofproto.OFPIT_WRITE_ACTIONS, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p4(self):
self._test_parser_p(ofproto.OFPXMT_OFB_ETH_DST, ofproto.OFPIT_APPLY_ACTIONS, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p5(self):
self._test_parser_p(ofproto.OFPXMT_OFB_ETH_SRC, ofproto.OFPIT_CLEAR_ACTIONS, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p6(self):
self._test_parser_p(ofproto.OFPXMT_OFB_ETH_TYPE, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p7(self):
self._test_parser_p(ofproto.OFPXMT_OFB_VLAN_VID, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p8(self):
self._test_parser_p(ofproto.OFPXMT_OFB_VLAN_PCP, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p9(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IP_DSCP, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p10(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IP_ECN, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p11(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IP_PROTO, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p12(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IPV4_SRC, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p13(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IPV4_DST, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p14(self):
self._test_parser_p(ofproto.OFPXMT_OFB_TCP_SRC, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p15(self):
self._test_parser_p(ofproto.OFPXMT_OFB_TCP_DST, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p16(self):
self._test_parser_p(ofproto.OFPXMT_OFB_UDP_SRC, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p17(self):
self._test_parser_p(ofproto.OFPXMT_OFB_UDP_DST, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p18(self):
self._test_parser_p(ofproto.OFPXMT_OFB_SCTP_SRC, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p19(self):
self._test_parser_p(ofproto.OFPXMT_OFB_SCTP_DST, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p20(self):
self._test_parser_p(ofproto.OFPXMT_OFB_ICMPV4_TYPE, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p21(self):
self._test_parser_p(ofproto.OFPXMT_OFB_ICMPV4_CODE, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p22(self):
self._test_parser_p(ofproto.OFPXMT_OFB_ARP_OP, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p23(self):
self._test_parser_p(ofproto.OFPXMT_OFB_ARP_SPA, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p24(self):
self._test_parser_p(ofproto.OFPXMT_OFB_ARP_TPA, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p25(self):
self._test_parser_p(ofproto.OFPXMT_OFB_ARP_SHA, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p26(self):
self._test_parser_p(ofproto.OFPXMT_OFB_ARP_THA, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p27(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IPV6_SRC, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p28(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IPV6_DST, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p29(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IPV6_FLABEL, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p30(self):
self._test_parser_p(ofproto.OFPXMT_OFB_ICMPV6_TYPE, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p31(self):
self._test_parser_p(ofproto.OFPXMT_OFB_ICMPV6_CODE, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p32(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IPV6_ND_TARGET, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p33(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IPV6_ND_SLL, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p34(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IPV6_ND_TLL, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p35(self):
self._test_parser_p(ofproto.OFPXMT_OFB_MPLS_LABEL, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p36(self):
self._test_parser_p(ofproto.OFPXMT_OFB_MPLS_TC, ofproto.OFPIT_EXPERIMENTER, ofproto.OFPTC_TABLE_MISS_MASK) |
class OptionSeriesAreaSonificationDefaultinstrumentoptionsMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class SparseCrossDenseAttention(fl.Residual):
def __init__(self, embedding_dim: int, num_heads: int=8, device: ((Device | str) | None)=None, dtype: (DType | None)=None) -> None:
self.embedding_dim = embedding_dim
self.num_heads = num_heads
super().__init__(fl.Parallel(fl.Residual(fl.UseContext(context='mask_decoder', key='sparse_embedding')), fl.Sum(fl.UseContext(context='mask_decoder', key='dense_embedding'), fl.UseContext(context='mask_decoder', key='dense_positional_embedding')), fl.UseContext(context='mask_decoder', key='dense_embedding')), fl.Attention(embedding_dim=embedding_dim, inner_dim=(embedding_dim // 2), num_heads=num_heads, is_optimized=False, device=device, dtype=dtype)) |
def _call_view_op(view_op: Callable, view_output_shape: List[IntVar], input_tensor: Tensor) -> Tensor:
view_op_type = view_op._attrs['op']
if (view_op_type == 'reshape'):
output = view_op(input_tensor, view_output_shape)
elif (view_op_type == 'flatten'):
output = view_op(input_tensor)
else:
raise AssertionError(f'unsupported view_op_type={view_op_type!r}')
return output |
def extractMlkynovelWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def avoid_duplicates_in_column(dict_, tablename, column):
query = dict_[tablename][column].duplicated(keep=False)
for double in dict_[tablename][column].loc[query].unique():
idx = dict_[tablename][column].index[(dict_[tablename][column] == double)]
dict_[tablename][column].loc[idx] = [(double + (' (%i)' % i)) for i in range(len(idx))]
if sum(dict_[tablename][column].duplicated()):
raise ValueError(("The renaming by 'double + int' was not appropriate to remove all " + 'duplicates.')) |
class OptionSonificationGlobaltracksMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def value(self):
return self._config_get(None)
def value(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.usefixtures('use_tmpdir')
def test_workflow():
WorkflowCommon.createExternalDumpJob()
dump_job = WorkflowJob.from_file('dump_job', name='DUMP')
with pytest.raises(ConfigValidationError, match='Could not open config_file'):
_ = WorkflowJob.from_file('knock_job', name='KNOCK')
workflow = Workflow.from_file('dump_workflow', None, {'DUMP': dump_job})
assert (len(workflow) == 2)
(job, args) = workflow[0]
assert (args[0] == 'dump1')
assert (args[1] == 'dump_text_1')
(job, args) = workflow[1]
assert (job.name == 'DUMP') |
def convolutionOfGaussianCovariance(x1, E1, x2, E2):
diff = (x1.unsqueeze(2) - x2.unsqueeze(1)).float()
E_sum = (E1.unsqueeze(2) + E2.unsqueeze(1))
(E_sum_inv, E_sum_det) = lin_alg.inv2x2(E_sum)
diff_sq = torch.square(diff)
diff_corr = (diff[(..., 0)] * diff[(..., 1)])
Q = (0.5 * (((E_sum_inv[(..., 0, 0)] * diff_sq[(..., 0)]) + ((2 * E_sum_inv[(..., 0, 1)]) * diff_corr)) + (E_sum_inv[(..., 1, 1)] * diff_sq[(..., 1)])))
k = x1.shape[(- 1)]
C = (1.0 / torch.sqrt((((2 * np.pi) ** k) * E_sum_det)))
K = (C * torch.exp((- Q)))
return K |
class PanelPermission(db.Model):
__tablename__ = 'panel_permissions'
id = db.Column(db.Integer, primary_key=True)
panel_name = db.Column(db.String)
custom_system_roles = db.relationship('CustomSysRole', secondary=roles_panels, backref=db.backref('panel_permissions', lazy='dynamic'))
can_access = db.Column(db.Boolean, default=True)
def __repr__(self):
return '<PanelPerm {!r} for {!r}>'.format(self.custom_system_roles, self.panel_name) |
('xtb')
def test_opt_linear_dihedrals():
geom = geom_loader('lib:dihedral_gen_test.cjson', coord_type='redund')
geom.set_calculator(XTB())
opt_kwargs = {'thresh': 'gau_tight'}
opt = RFOptimizer(geom, **opt_kwargs)
opt.run()
assert opt.is_converged
assert (opt.cur_cycle == 13)
assert (geom.energy == pytest.approx((- 10.))) |
class OptionSubtitleStyle(Options):
def color(self):
return self._config_get('#666666')
def color(self, text: str):
self._config(text, js_type=False)
def fontSize(self):
return self._config_get('0.8em')
def fontSize(self, num: float):
self._config(num, js_type=False) |
_os(*metadata.platforms)
def main():
svchost = 'C:\\Users\\Public\\svchost.exe'
rta = 'C:\\Users\\Public\\rta.exe'
common.copy_file(EXE_FILE, rta)
common.copy_file(EXE_FILE, svchost)
common.execute([svchost, 'echo', 'WdiSystemHost', ';', rta], timeout=5, kill=True)
common.remove_files(rta, svchost) |
class TestOAuthRegistry(base.BasePyTestCase):
def test_register(self):
oauth = OAuth()
oauth.register('dev', client_id='dev', client_secret='dev', client_kwargs={'scope': 'openid email profile', 'token_endpoint_auth_method': 'client_secret_post'})
assert (oauth.dev.name == 'dev')
assert (oauth.dev.client_id == 'dev')
assert (oauth.dev.client_secret == 'dev')
def test_register_from_settings(self):
self.config.add_settings({'oidc.dev.client_id': 'test-client-id', 'oidc.dev.client_secret': 'test-client-secret'})
oauth = OAuth()
oauth.register('dev', client_kwargs={'scope': 'openid email profile', 'token_endpoint_auth_method': 'client_secret_post'})
assert (oauth.dev.name == 'dev')
assert (oauth.dev.client_id == 'test-client-id')
assert (oauth.dev.client_secret == 'test-client-secret') |
class TestBarcodes(unittest.TestCase):
def test_barcodes(self):
fastq_data = io.StringIO(u'-700511R:233:C446JACXX:6:1101:1241:2242 1:N:0:CCGTCCAT\nGAAACGCGGCACAGA\n+\n<BBFFBFFBBFFF7B\-700511R:233:C446JACXX:6:1101:1280:2080 1:N:0:GTCNNCAT\nCGAGCTCGAATTCAT\n+\n<0<<BFF<0BFFFII\-700511R:233:C446JACXX:6:1101:1241:2242 1:N:0:CCGTGCAT\nGAAACGCGGCACAGA\n+\n<BBFFBFFBBFFF7B\n')
b = Barcodes()
b.load(fp=fastq_data)
self.assertEqual(b.sequences(), ['CCGTCCAT', 'CCGTGCAT', 'GTCNNCAT'])
self.assertEqual(b.count_for('CCGTCCAT'), 1)
self.assertEqual(b.count_for('CCGTGCAT'), 1)
self.assertEqual(b.count_for('GTCNNCAT'), 1)
self.assertEqual(b.count_for('CCGTCCAT', 'CCGTGCAT'), 2)
self.assertEqual(b.count_for('ATCTGCAT'), 0)
self.assertEqual(b.group('CCGTCCAT'), ['CCGTCCAT', 'CCGTGCAT'])
self.assertEqual(b.group('GTCNNCAT'), [])
self.assertEqual(b.group('GTCNNCAT', max_mismatches=2), ['GTCNNCAT'])
group = b.group('CCGTCCAT')
self.assertEqual(b.count_for(*group), 2) |
class GenQuarterlyData():
def __init__(self):
self.quarterly_columns = ['marketcap', 'ebit', 'debt', 'netinc', 'ncf', 'fcf', 'revenue', 'ebitda']
self.quart_cnt = 50
def load(self, index):
df = pd.DataFrame()
df['ticker'] = [x for x in index for _ in range(self.quart_cnt)]
df['date'] = ([(np.datetime64('2020-01-17') - (90 * np.timedelta64(k, 'D'))) for k in range(self.quart_cnt)] * len(index))
np.random.seed(int_hash_of_str(str(index)))
for col in self.quarterly_columns:
if (col == 'marketcap'):
df[col] = np.random.uniform(1000, 100000.0, (self.quart_cnt * len(index)))
else:
df[col] = np.random.uniform((- 100000.0), 100000.0, (self.quart_cnt * len(index)))
return df |
def hydrogen_bde(geom, mult, diss_geom, diss_mult, calc_getter, thermo_calc_getter):
H_en = (- 0.5)
charge = 0
H = calc_H(geom, calc_getter, thermo_calc_getter, charge, mult)
diss_H = calc_H(diss_geom, calc_getter, thermo_calc_getter, charge, diss_mult)
print('\tH', H)
print('\tdiss_H', diss_H)
bde = ((diss_H + H_en) - H)
print('\tBDE', bde)
return bde |
class OptionSeriesPolygonOnpoint(Options):
def connectorOptions(self) -> 'OptionSeriesPolygonOnpointConnectoroptions':
return self._config_sub_data('connectorOptions', OptionSeriesPolygonOnpointConnectoroptions)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def position(self) -> 'OptionSeriesPolygonOnpointPosition':
return self._config_sub_data('position', OptionSeriesPolygonOnpointPosition) |
class OptionSeriesNetworkgraphSonificationTracksMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class CustomFormFactory(BaseFactory):
class Meta():
model = CustomForms
event = factory.RelatedFactory(EventFactoryBasic)
form = 'attendee'
name = 'First Name'
field_identifier = 'firstname'
type = 'text'
is_required = False
is_included = False
is_fixed = False
event_id = 1 |
def get_cat_cannon_data() -> dict[(int, dict[(str, Any)])]:
length = next_int(4)
cannon_data: dict[(int, dict[(str, Any)])] = {}
for _ in range(length):
cannon: dict[(str, Any)] = {}
cannon_id = next_int(4)
len_val = next_int(4)
unlock_flag = next_int(4)
effect_level = next_int(4)
foundation_level = 0
style_level = 0
if (len_val == 4):
foundation_level = next_int(4)
style_level = next_int(4)
cannon['levels'] = {'effect': effect_level, 'foundation': foundation_level, 'style': style_level}
cannon['unlock_flag'] = unlock_flag
cannon['len_val'] = len_val
cannon_data[cannon_id] = cannon
return cannon_data |
def compute_gas_limit(parent_header: BlockHeaderAPI, genesis_gas_limit: int) -> int:
if (genesis_gas_limit < GAS_LIMIT_MINIMUM):
raise ValueError(f'The `genesis_gas_limit` value must be greater than the GAS_LIMIT_MINIMUM. Got {genesis_gas_limit}. Must be greater than {GAS_LIMIT_MINIMUM}')
if (parent_header is None):
return genesis_gas_limit
decay = (parent_header.gas_limit // GAS_LIMIT_EMA_DENOMINATOR)
if parent_header.gas_used:
usage_increase = (((parent_header.gas_used * GAS_LIMIT_USAGE_ADJUSTMENT_NUMERATOR) // GAS_LIMIT_USAGE_ADJUSTMENT_DENOMINATOR) // GAS_LIMIT_EMA_DENOMINATOR)
else:
usage_increase = 0
gas_limit = max(GAS_LIMIT_MINIMUM, (((parent_header.gas_limit - decay) + 1) + usage_increase))
if (gas_limit < GAS_LIMIT_MINIMUM):
return GAS_LIMIT_MINIMUM
elif (gas_limit < genesis_gas_limit):
return ((parent_header.gas_limit + decay) - 1)
else:
return gas_limit |
class FintocError(Exception):
def __init__(self, error_data):
error_type = error_data.get('type')
error_code = error_data.get('code')
error_message = error_data.get('message')
error_param = error_data.get('param')
error_doc_url = error_data.get('doc_url')
message = error_type
message += (f': {error_code}' if (error_code is not None) else '')
message += (f' ({error_param})' if (error_param is not None) else '')
message += f'''
{error_message}'''
message += (f'''
Check the docs for more info: {error_doc_url}''' if (error_doc_url is not None) else '')
super().__init__(message) |
class IlluminaSample():
def __init__(self, dirn, fastqs=None, name=None, prefix='Sample_'):
self.dirn = dirn
self.fastq = []
self.paired_end = False
if (fastqs is None):
fastqs = [f for f in os.listdir(self.dirn) if f.endswith('.fastq.gz')]
else:
fastqs = [os.path.basename(f) for f in fastqs]
self.sample_prefix = prefix
if (name is not None):
self.name = name
elif (self.sample_prefix and os.path.basename(dirn).startswith(self.sample_prefix)):
self.name = os.path.basename(dirn)[len(self.sample_prefix):]
else:
self.sample_prefix = ''
self.name = IlluminaFastq(fastqs[0]).sample_name
if (self.name == 'Undetermined'):
try:
self.name = ('lane%d' % IlluminaFastq(fastqs[0]).lane_number)
except TypeError:
self.name = 'undetermined'
logging.debug(('\tSample: %s' % self.name))
for f in fastqs:
self.add_fastq(f)
logging.debug(('\tFastq : %s' % f))
if (not self.fastq):
logging.debug(('\tUnable to find fastq.gz files for %s' % self.name))
def add_fastq(self, fastq):
self.fastq.append(fastq)
self.fastq = sorted(self.fastq)
if (not self.paired_end):
fq = IlluminaFastq(fastq)
if (fq.read_number == 2):
self.paired_end = True
def fastq_subset(self, read_number=None, full_path=False):
fastqs = []
for fastq in self.fastq:
fq = IlluminaFastq(fastq)
if (fq.read_number is None):
raise IlluminaDataError(('Unable to determine read number for %s' % fastq))
if (fq.read_number == read_number):
if full_path:
fastqs.append(os.path.join(self.dirn, fastq))
else:
fastqs.append(fastq)
return sorted(fastqs)
def __repr__(self):
return str(self.name) |
class TestYAMLData(object):
def yaml(self, yaml_version=None):
from srsly.ruamel_yaml import YAML
y = YAML()
y.preserve_quotes = True
if yaml_version:
y.version = yaml_version
return y
def docs(self, path):
from srsly.ruamel_yaml import YAML
tyaml = YAML(typ='safe', pure=True)
tyaml.register_class(YAMLData)
tyaml.register_class(Python)
tyaml.register_class(Output)
tyaml.register_class(Assert)
return list(tyaml.load_all(path))
def yaml_load(self, value, yaml_version=None):
yaml = self.yaml(yaml_version=yaml_version)
data = yaml.load(value)
return (yaml, data)
def round_trip(self, input, output=None, yaml_version=None):
from srsly.ruamel_yaml.compat import StringIO
(yaml, data) = self.yaml_load(input.value, yaml_version=yaml_version)
buf = StringIO()
yaml.dump(data, buf)
expected = (input.value if (output is None) else output.value)
value = buf.getvalue()
if PY2:
value = value.decode('utf-8')
print('value', value)
assert (value == expected)
def load_assert(self, input, confirm, yaml_version=None):
from srsly.ruamel_yaml.compat import Mapping
d = self.yaml_load(input.value, yaml_version=yaml_version)[1]
print('confirm.value', confirm.value, type(confirm.value))
if isinstance(confirm.value, Mapping):
r = range(confirm.value['range'])
lines = confirm.value['lines'].splitlines()
for idx in r:
for line in lines:
line = ('assert ' + line)
print(line)
exec(line)
else:
for line in confirm.value.splitlines():
line = ('assert ' + line)
print(line)
exec(line)
def run_python(self, python, data, tmpdir):
from .roundtrip import save_and_run
assert (save_and_run(python.value, base_dir=tmpdir, output=data.value) == 0)
def test_yaml_data(self, yaml, tmpdir):
from srsly.ruamel_yaml.compat import Mapping
idx = 0
typ = None
yaml_version = None
docs = self.docs(yaml)
if isinstance(docs[0], Mapping):
d = docs[0]
typ = d.get('type')
yaml_version = d.get('yaml_version')
if ('python' in d):
if (not check_python_version(d['python'])):
pytest.skip('unsupported version')
idx += 1
data = output = confirm = python = None
for doc in docs[idx:]:
if isinstance(doc, Output):
output = doc
elif isinstance(doc, Assert):
confirm = doc
elif isinstance(doc, Python):
python = doc
if (typ is None):
typ = 'python_run'
elif isinstance(doc, YAMLData):
data = doc
else:
print('no handler for type:', type(doc), repr(doc))
raise AssertionError()
if (typ is None):
if ((data is not None) and (output is not None)):
typ = 'rt'
elif ((data is not None) and (confirm is not None)):
typ = 'load_assert'
else:
assert (data is not None)
typ = 'rt'
print('type:', typ)
if (data is not None):
print('data:', data.value, end='')
print('output:', (output.value if (output is not None) else output))
if (typ == 'rt'):
self.round_trip(data, output, yaml_version=yaml_version)
elif (typ == 'python_run'):
self.run_python(python, (output if (output is not None) else data), tmpdir)
elif (typ == 'load_assert'):
self.load_assert(data, confirm, yaml_version=yaml_version)
else:
print('\nrun type unknown:', typ)
raise AssertionError() |
.skipif((starlette_version_tuple < (0, 14)), reason='trailing slash behaviour new in 0.14')
.parametrize('url,expected', (('/hi/shay/with/slash', 'GET /hi/{name}/with/slash'), ('/hi/shay/without/slash/', 'GET /hi/{name}/without/slash/'), ('/sub/subsub/hihi/shay/', 'GET /sub/subsub/hihi/{name}/')))
def test_trailing_slash_redirect_detection(app, elasticapm_client, url, expected):
client = TestClient(app)
response = client.get(url, allow_redirects=False)
assert (response.status_code == 307)
assert (len(elasticapm_client.events[constants.TRANSACTION]) == 1)
for transaction in elasticapm_client.events[constants.TRANSACTION]:
assert (transaction['name'] == expected) |
class RepoDriver():
def __init__(self, **kwargs):
raw_args = kwargs.get('raw_args', None)
(self.args, self.unknowns) = parser.parse_known_args(raw_args)
self.repo = getRepo(self.args.repo, self.args.repo_dir)
self.queue_lock = threading.Lock()
self.work_queue = deque()
self.executables_builder = ExecutablesBuilder(self.repo, self.work_queue, self.queue_lock, raw_args=raw_args)
def run(self):
getLogger().info(('Start benchmark run %s' % datetime.datetime.now().strftime('%Y_%m_%d_%H_%M')))
self.executables_builder.start()
self._runBenchmarkSuites()
return getRunStatus()
def _runBenchmarkSuites(self):
time.sleep(10)
if self.args.interval:
while (not stopRun(self.args.status_file)):
self._runBenchmarkSuitesInQueue()
time.sleep(self.args.interval)
else:
while self.executables_builder.is_alive():
time.sleep(10)
self._runBenchmarkSuitesInQueue()
def _runBenchmarkSuitesInQueue(self):
same_host = self.args.same_host
while ((not stopRun(self.args.status_file)) and self.work_queue):
self.queue_lock.acquire()
repo_info = self.work_queue.popleft()
if (not same_host):
self.queue_lock.release()
self._runOneBenchmarkSuite(repo_info)
if same_host:
self.queue_lock.release()
def _runOneBenchmarkSuite(self, repo_info):
raw_args = self._getRawArgs(repo_info)
if (not _runIndividual(self.args.interval, self.args.regression, self.args.ab_testing)):
time.sleep(10)
app = BenchmarkDriver(raw_args=raw_args)
app.run()
ret = 0
setRunStatus((ret >> 8))
if (self.args.commit_file and self.args.regression):
with open(self.args.commit_file, 'w') as file:
file.write(repo_info['treatment']['commit'])
getLogger().info(('One benchmark run {} for '.format(('successful' if (ret == 0) else 'failed')) + repo_info['treatment']['commit']))
def _getRawArgs(self, repo_info):
platform = repo_info['platform']
del repo_info['platform']
unknowns = self.unknowns
if ('--info' in unknowns):
info_idx = unknowns.index('--info')
info = json.loads(unknowns[(info_idx + 1)])
deepMerge(repo_info, info)
del unknowns[(info_idx + 1)]
del unknowns[info_idx]
info = json.dumps(repo_info)
raw_args = []
raw_args.extend(['--platform', getString(platform), '--framework', getString(self.args.framework), '--info', info])
raw_args.extend(unknowns)
if self.args.env:
raw_args.append('--env')
env_vars = self.args.env.split()
for env_var in env_vars:
raw_args.append(env_var)
return raw_args |
class vrrp(packet_base.PacketBase):
_VERSION_PACK_STR = '!B'
_IPV4_ADDRESS_PACK_STR_RAW = '4s'
_IPV4_ADDRESS_PACK_STR = ('!' + _IPV4_ADDRESS_PACK_STR_RAW)
_IPV4_ADDRESS_LEN = struct.calcsize(_IPV4_ADDRESS_PACK_STR)
_IPV6_ADDRESS_LEN = 16
_IPV6_ADDRESS_PACK_STR_RAW = ('%ds' % _IPV6_ADDRESS_LEN)
_IPV6_ADDRESS_PACK_STR = ('!' + _IPV6_ADDRESS_PACK_STR_RAW)
_IPV6_ADDRESS_LEN = struct.calcsize(_IPV6_ADDRESS_PACK_STR)
_VRRP_VERSIONS = {}
_SEC_IN_MAX_ADVER_INT_UNIT = {}
def get_payload(packet_):
may_ip = None
may_vrrp = None
idx = 0
for protocol in packet_:
if (isinstance(protocol, ipv4.ipv4) or isinstance(protocol, ipv6.ipv6)):
may_ip = protocol
try:
if isinstance(packet_.protocols[(idx + 1)], vrrp):
may_vrrp = packet_.protocols[(idx + 1)]
finally:
break
idx += 1
if (may_ip and may_vrrp):
return (may_ip, may_vrrp)
else:
return (None, None)
def register_vrrp_version(cls, version, sec_in_max_adver_int_unit):
def _register_vrrp_version(cls_):
cls._VRRP_VERSIONS[version] = cls_
cls._SEC_IN_MAX_ADVER_INT_UNIT[version] = sec_in_max_adver_int_unit
return cls_
return _register_vrrp_version
def sec_to_max_adver_int(version, seconds):
return int((seconds * vrrp._SEC_IN_MAX_ADVER_INT_UNIT[version]))
def max_adver_int_to_sec(version, max_adver_int):
return (float(max_adver_int) / vrrp._SEC_IN_MAX_ADVER_INT_UNIT[version])
def __init__(self, version, type_, vrid, priority, count_ip, max_adver_int, checksum, ip_addresses, auth_type=None, auth_data=None):
super(vrrp, self).__init__()
self.version = version
self.type = type_
self.vrid = vrid
self.priority = priority
self.count_ip = count_ip
self.max_adver_int = max_adver_int
self.checksum = checksum
self.ip_addresses = ip_addresses
assert (len(list(ip_addresses)) == self.count_ip)
self.auth_type = auth_type
self.auth_data = auth_data
self._is_ipv6 = is_ipv6(list(self.ip_addresses)[0])
self.identification = 0
def checksum_ok(self, ipvx, vrrp_buf):
cls_ = self._VRRP_VERSIONS[self.version]
return cls_.checksum_ok(self, ipvx, vrrp_buf)
def max_adver_int_in_sec(self):
return self.max_adver_int_to_sec(self.version, self.max_adver_int)
def is_ipv6(self):
return self._is_ipv6
def __len__(self):
cls_ = self._VRRP_VERSIONS[self.version]
return cls_.__len__(self)
def create_version(version, type_, vrid, priority, max_adver_int, ip_addresses, auth_type=None, auth_data=None):
cls_ = vrrp._VRRP_VERSIONS.get(version, None)
if (not cls_):
raise ValueError(('unknown VRRP version %d' % version))
if (priority is None):
priority = VRRP_PRIORITY_BACKUP_DEFAULT
count_ip = len(ip_addresses)
if (max_adver_int is None):
max_adver_int = cls_.sec_to_max_adver_int(VRRP_MAX_ADVER_INT_DEFAULT_IN_SEC)
return cls_(version, type_, vrid, priority, count_ip, max_adver_int, None, ip_addresses, auth_type=auth_type, auth_data=auth_data)
def get_identification(self):
self.identification += 1
self.identification &= 65535
if (self.identification == 0):
self.identification += 1
self.identification &= 65535
return self.identification
def create_packet(self, primary_ip_address, vlan_id=None):
if self.is_ipv6:
traffic_class = 192
flow_label = 0
payload_length = (ipv6.ipv6._MIN_LEN + len(self))
e = ethernet.ethernet(VRRP_IPV6_DST_MAC_ADDRESS, vrrp_ipv6_src_mac_address(self.vrid), ether.ETH_TYPE_IPV6)
ip = ipv6.ipv6(6, traffic_class, flow_label, payload_length, inet.IPPROTO_VRRP, VRRP_IPV6_HOP_LIMIT, primary_ip_address, VRRP_IPV6_DST_ADDRESS)
else:
header_length = (ipv4.ipv4._MIN_LEN // 4)
total_length = 0
tos = 192
identification = self.get_identification()
e = ethernet.ethernet(VRRP_IPV4_DST_MAC_ADDRESS, vrrp_ipv4_src_mac_address(self.vrid), ether.ETH_TYPE_IP)
ip = ipv4.ipv4(4, header_length, tos, total_length, identification, 0, 0, VRRP_IPV4_TTL, inet.IPPROTO_VRRP, 0, primary_ip_address, VRRP_IPV4_DST_ADDRESS)
p = packet.Packet()
p.add_protocol(e)
if (vlan_id is not None):
vlan_ = vlan.vlan(0, 0, vlan_id, e.ethertype)
e.ethertype = ether.ETH_TYPE_8021Q
p.add_protocol(vlan_)
p.add_protocol(ip)
p.add_protocol(self)
return p
def parser(cls, buf):
(version_type,) = struct.unpack_from(cls._VERSION_PACK_STR, buf)
(version, _type) = vrrp_from_version_type(version_type)
cls_ = cls._VRRP_VERSIONS[version]
return cls_.parser(buf)
def serialize_static(vrrp_, prev):
assert isinstance(vrrp_, vrrp)
cls = vrrp._VRRP_VERSIONS[vrrp_.version]
return cls.serialize_static(vrrp_, prev)
def serialize(self, payload, prev):
return self.serialize_static(self, prev)
def is_valid_ttl(ipvx):
version = ipvx.version
if (version == 4):
return (ipvx.ttl == VRRP_IPV4_TTL)
if (version == 6):
return (ipvx.hop_limit == VRRP_IPV6_HOP_LIMIT)
raise ValueError(('invalid ip version %d' % version))
def is_valid(self):
cls = self._VRRP_VERSIONS.get(self.version, None)
if (cls is None):
return False
return cls.is_valid(self) |
class OptionSeriesErrorbarSonificationContexttracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesPolygonSonificationTracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestPreferences():
def reset_settings(self):
try:
os.remove((os.environ['HOME'] + '/.config/opensnitch/settings.conf'))
except Exception:
pass
def setup_method(self):
white_icon = QtGui.QIcon('../res/icon-white.svg')
self.reset_settings()
self.prefs = PreferencesDialog(appicon=white_icon)
self.prefs.show()
def run(self, qtbot):
def handle_dialog():
qtbot.mouseClick(self.prefs.applyButton, QtCore.Qt.LeftButton)
qtbot.mouseClick(self.prefs.acceptButton, QtCore.Qt.LeftButton)
QtCore.QTimer.singleShot(500, handle_dialog)
self.prefs.exec_()
def test_save_popups_settings(self, qtbot):
qtbot.addWidget(self.prefs)
self.prefs.comboUIAction.setCurrentIndex(Config.ACTION_ALLOW_IDX)
self.prefs.comboUITarget.setCurrentIndex(2)
self.prefs.comboUIDuration.setCurrentIndex(4)
self.prefs.comboUIDialogPos.setCurrentIndex(2)
self.prefs.spinUITimeout.setValue(30)
self.prefs.showAdvancedCheck.setChecked(True)
self.prefs.uidCheck.setChecked(True)
self.run(qtbot)
assert ((self.prefs._cfg.getInt(self.prefs._cfg.DEFAULT_ACTION_KEY) == Config.ACTION_ALLOW_IDX) and (self.prefs.comboUIAction.currentText() == Config.ACTION_ALLOW))
assert (self.prefs._cfg.getInt(self.prefs._cfg.DEFAULT_TARGET_KEY) == 2)
assert (self.prefs._cfg.getInt(self.prefs._cfg.DEFAULT_DURATION_KEY) == 4)
assert (self.prefs._cfg.getInt(self.prefs._cfg.DEFAULT_TIMEOUT_KEY) == 30)
assert (self.prefs._cfg.getInt(self.prefs._cfg.DEFAULT_POPUP_POSITION) == 2)
assert (self.prefs._cfg.getBool(self.prefs._cfg.DEFAULT_POPUP_ADVANCED) == True)
assert (self.prefs._cfg.getBool(self.prefs._cfg.DEFAULT_POPUP_ADVANCED_UID) == True)
def test_save_ui_settings(self, qtbot):
self.prefs.checkUIRules.setChecked(True)
self.prefs.comboUIRules.setCurrentIndex(1)
self.prefs.checkHideNode.setChecked(False)
self.prefs.checkHideProto.setChecked(False)
self.run(qtbot)
assert ((self.prefs._cfg.getBool(self.prefs._cfg.DEFAULT_IGNORE_RULES) == True) and (self.prefs._cfg.getInt(self.prefs._cfg.DEFAULT_IGNORE_TEMPORARY_RULES) == 1))
cols = self.prefs._cfg.getSettings(Config.STATS_SHOW_COLUMNS)
assert (cols == ['0', '2', '3', '5', '6'])
def test_save_node_settings(self, qtbot, capsys):
self.prefs.comboNodeAction.setCurrentIndex(Config.ACTION_ALLOW_IDX)
self.prefs.comboNodeMonitorMethod.setCurrentIndex(2)
self.prefs.comboNodeLogLevel.setCurrentIndex(5)
self.prefs.checkNodeLogUTC.setChecked(False)
self.prefs.checkNodeLogMicro.setChecked(True)
self.prefs.checkInterceptUnknown.setChecked(True)
self.prefs.tabWidget.setCurrentIndex(self.prefs.TAB_NODES)
self.prefs._node_needs_update = True
self.run(qtbot)
assert (len(self.prefs._notifications_sent) == 1)
for n in self.prefs._notifications_sent:
conf = json.loads(self.prefs._notifications_sent[n].data)
assert (conf['InterceptUnknown'] == True)
assert (conf['ProcMonitorMethod'] == 'audit')
assert (conf['LogLevel'] == 5)
assert (conf['LogUTC'] == False)
assert (conf['LogMicro'] == True)
assert (conf['DefaultAction'] == 'allow')
def test_load_ui_settings(self, qtbot, capsys):
self.prefs.checkUIRules.setChecked(False)
self.prefs.comboUIRules.setCurrentIndex(0)
self.prefs.comboUITarget.setCurrentIndex(0)
self.prefs.comboUIDuration.setCurrentIndex(0)
self.prefs.checkHideNode.setChecked(True)
self.prefs.checkHideProto.setChecked(True)
def handle_dialog():
qtbot.mouseClick(self.prefs.cancelButton, QtCore.Qt.LeftButton)
QtCore.QTimer.singleShot(500, handle_dialog)
self.prefs.exec_()
self.prefs.show()
print(self.prefs._cfg.getBool(self.prefs._cfg.DEFAULT_IGNORE_RULES))
assert ((self.prefs.comboUIAction.currentIndex() == Config.ACTION_ALLOW_IDX) and (self.prefs.comboUIAction.currentText() == Config.ACTION_ALLOW))
assert (self.prefs.checkUIRules.isChecked() == True)
assert (self.prefs.comboUIRules.currentIndex() == 1)
assert (self.prefs.comboUITarget.currentIndex() == 2)
assert ((self.prefs.comboUIDuration.currentIndex() == 4) and (self.prefs.comboUIDuration.currentText() == Config.DURATION_30m))
assert (self.prefs.comboUIDialogPos.currentIndex() == 2)
assert (self.prefs.spinUITimeout.value() == 30) |
('pyscf')
.parametrize('step_length', [0.1, 0.2, 0.3])
def test_hcn_iso_gs2(step_length):
geom = geom_loader('lib:hcn_iso_hf_sto3g_ts_opt.xyz')
calc = PySCF(basis='sto3g', verbose=0)
geom.set_calculator(calc)
irc_kwargs = {'step_length': step_length, 'displ_energy': 0.0005}
irc = GonzalezSchlegel(geom, **irc_kwargs)
irc.run()
assert irc.forward_is_converged
assert irc.backward_is_converged |
def create_langchain_prompt(schema: Object, encoder: Encoder, type_descriptor: TypeDescriptor, *, validator: Optional[Validator]=None, input_formatter: InputFormatter=None, instruction_template: Optional[PromptTemplate]=None) -> ExtractionPromptTemplate:
return ExtractionPromptTemplate(input_variables=['text'], output_parser=KorParser(encoder=encoder, validator=validator, schema_=schema), encoder=encoder, node=schema, input_formatter=input_formatter, type_descriptor=type_descriptor, instruction_template=(instruction_template or DEFAULT_INSTRUCTION_TEMPLATE)) |
def gen_imports(function_list):
imports = defaultdict(list)
s = get_all_types(function_list)
s = (s - c_types)
ret = set([])
for t in s:
if (t in import_dict):
imports[import_dict[t]].append(t)
else:
ret.add(t)
for (k, v) in imports.items():
types = ', '.join(v)
print(((('from flint.flintlib.' + k) + ' cimport ') + types))
return ret |
class NodeToken(Node):
def __init__(self, ids_map_list, left=None, token=None, right=None):
Node.__init__(self, ids_map_list)
self.left = left
self.right = right
self.token = token
def get_code(self, current_pointer, *args, **kwargs):
if (is_token_literal(self.token) or (self.token.type == Token.ID)):
assert ((self.left is None) and (self.right is None))
if (self.token.type == Token.ID):
return get_token_ID_code(self.ids_map_list, self.token, current_pointer)
else:
return get_literal_token_code(self.token)
elif (self.token.type in [Token.BINOP, Token.RELOP, Token.BITWISE_SHIFT, Token.BITWISE_AND, Token.BITWISE_OR, Token.BITWISE_XOR]):
code = self.left.get_code(current_pointer)
code += self.right.get_code((current_pointer + 1))
code += '<<'
right_token = None
if isinstance(self.right, NodeToken):
right_token = self.right.token
code += get_op_between_literals_code(self.token, right_token)
return code
elif (self.token.type in [Token.AND, Token.OR]):
return get_op_boolean_operator_code(self, current_pointer)
elif (self.token.type == Token.ASSIGN):
assert (self.left.token.type == Token.ID)
if (self.token.data == '='):
code = self.right.get_code(current_pointer)
code += '<'
code += get_copy_to_variable_code(self.ids_map_list, self.left.token, current_pointer)
code += '>'
return code
else:
assert (self.token.data in ['+=', '-=', '*=', '/=', '%=', '<<=', '>>=', '&=', '|=', '^='])
op_node = self.assign_token_to_op_token(self.token)
op_node.left = self.left
op_node.right = self.right
assign_token = Token(Token.ASSIGN, self.token.line, self.token.column, data='=')
assignment_node = NodeToken(self.ids_map_list, left=self.left, token=assign_token, right=op_node)
return assignment_node.get_code(current_pointer) |
class KeRulesEngine(bre.BaseRulesEngine):
def __init__(self, rules_file_path, snapshot_timestamp=None):
super(KeRulesEngine, self).__init__(rules_file_path=rules_file_path)
self.rule_book = None
self.snapshot_timestamp = snapshot_timestamp
self._lock = threading.Lock()
def build_rule_book(self, global_configs=None):
with self._lock:
self.rule_book = KeRuleBook(self._load_rule_definitions())
def find_violations(self, ke_cluster, force_rebuild=False):
if ((self.rule_book is None) or force_rebuild):
self.build_rule_book()
return self.rule_book.find_violations(ke_cluster) |
class OptionPlotoptionsColumnrangeSonificationContexttracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionPlotoptionsFunnelSonificationContexttracksMapping(Options):
def frequency(self) -> 'OptionPlotoptionsFunnelSonificationContexttracksMappingFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsFunnelSonificationContexttracksMappingFrequency)
def gapBetweenNotes(self) -> 'OptionPlotoptionsFunnelSonificationContexttracksMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionPlotoptionsFunnelSonificationContexttracksMappingGapbetweennotes)
def highpass(self) -> 'OptionPlotoptionsFunnelSonificationContexttracksMappingHighpass':
return self._config_sub_data('highpass', OptionPlotoptionsFunnelSonificationContexttracksMappingHighpass)
def lowpass(self) -> 'OptionPlotoptionsFunnelSonificationContexttracksMappingLowpass':
return self._config_sub_data('lowpass', OptionPlotoptionsFunnelSonificationContexttracksMappingLowpass)
def noteDuration(self) -> 'OptionPlotoptionsFunnelSonificationContexttracksMappingNoteduration':
return self._config_sub_data('noteDuration', OptionPlotoptionsFunnelSonificationContexttracksMappingNoteduration)
def pan(self) -> 'OptionPlotoptionsFunnelSonificationContexttracksMappingPan':
return self._config_sub_data('pan', OptionPlotoptionsFunnelSonificationContexttracksMappingPan)
def pitch(self) -> 'OptionPlotoptionsFunnelSonificationContexttracksMappingPitch':
return self._config_sub_data('pitch', OptionPlotoptionsFunnelSonificationContexttracksMappingPitch)
def playDelay(self) -> 'OptionPlotoptionsFunnelSonificationContexttracksMappingPlaydelay':
return self._config_sub_data('playDelay', OptionPlotoptionsFunnelSonificationContexttracksMappingPlaydelay)
def rate(self) -> 'OptionPlotoptionsFunnelSonificationContexttracksMappingRate':
return self._config_sub_data('rate', OptionPlotoptionsFunnelSonificationContexttracksMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionPlotoptionsFunnelSonificationContexttracksMappingTime':
return self._config_sub_data('time', OptionPlotoptionsFunnelSonificationContexttracksMappingTime)
def tremolo(self) -> 'OptionPlotoptionsFunnelSonificationContexttracksMappingTremolo':
return self._config_sub_data('tremolo', OptionPlotoptionsFunnelSonificationContexttracksMappingTremolo)
def volume(self) -> 'OptionPlotoptionsFunnelSonificationContexttracksMappingVolume':
return self._config_sub_data('volume', OptionPlotoptionsFunnelSonificationContexttracksMappingVolume) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.