code stringlengths 281 23.7M |
|---|
def get_register_erc1155() -> Contract:
directory = Path(ROOT_DIR, 'packages', 'fetchai', 'contracts', 'erc1155')
configuration = load_component_configuration(ComponentType.CONTRACT, directory)
configuration._directory = directory
configuration = cast(ContractConfig, configuration)
if (str(configuration.public_id) not in contract_registry.specs):
Contract.from_config(configuration)
contract = contract_registry.make(str(configuration.public_id))
return contract |
class AdAssetFeedSpecLinkURL(AbstractObject):
def __init__(self, api=None):
super(AdAssetFeedSpecLinkURL, self).__init__()
self._isAdAssetFeedSpecLinkURL = True
self._api = api
class Field(AbstractObject.Field):
adlabels = 'adlabels'
carousel_see_more_url = 'carousel_see_more_url'
deeplink_url = 'deeplink_url'
display_url = 'display_url'
url_tags = 'url_tags'
website_url = 'website_url'
_field_types = {'adlabels': 'list<AdAssetFeedSpecAssetLabel>', 'carousel_see_more_url': 'string', 'deeplink_url': 'string', 'display_url': 'string', 'url_tags': 'string', 'website_url': 'string'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
def init_async_web3(provider: 'AsyncBaseProvider'=cast('AsyncBaseProvider', default), middlewares: Optional[Sequence[Tuple[('AsyncMiddleware', str)]]]=()) -> 'AsyncWeb3':
from web3 import AsyncWeb3 as AsyncWeb3Main
from web3.eth import AsyncEth as AsyncEthMain
middlewares = list(middlewares)
for (i, (middleware, name)) in enumerate(middlewares):
if (name == 'name_to_address'):
middlewares.pop(i)
if ('stalecheck' not in (name for (mw, name) in middlewares)):
middlewares.append((_async_ens_stalecheck_middleware, 'stalecheck'))
if (provider is default):
async_w3 = AsyncWeb3Main(middlewares=middlewares, ens=None, modules={'eth': AsyncEthMain})
else:
async_w3 = AsyncWeb3Main(provider, middlewares=middlewares, ens=None, modules={'eth': AsyncEthMain})
return async_w3 |
def test_ignore_on_order_event_update(client, db, user, jwt):
order_id = create_order(db, user)
order = Order.query.get(order_id)
order_event = order.event
event = EventFactoryBasic()
db.session.commit()
data = json.dumps({'data': {'type': 'order', 'id': order_id, 'relationships': {'event': {'data': {'id': str(event.id), 'type': 'event'}}}}})
response = client.patch(f'/v1/orders/{order_id}', content_type='application/vnd.api+json', headers=jwt, data=data)
db.session.refresh(order)
assert (response.status_code == 200)
assert (order.event == order_event) |
class u5Ex(object):
def __init__(self):
pass
def uOfX(self, x):
return ((x[0] ** 2) + (x[1] ** 2))
def uOfXT(self, X, T):
return self.uOfX(X)
def duOfX(self, X):
du = (2.0 * numpy.reshape(X[0:2], (2,)))
return du
def duOfXT(self, X, T):
return self.duOfX(X) |
def test_ge_runtimebatchrequest_sqlite_config():
task_object = GreatExpectationsTask(name='test4', datasource_name='sqlite_data', inputs=kwtypes(dataset=str), expectation_suite_name='sqlite.movies', data_connector_name='sqlite_data_connector', data_asset_name='sqlite_data', task_config=BatchRequestConfig(batch_identifiers={'pipeline_stage': 'validation'}))
def runtime_sqlite_wf():
task_object(dataset='SELECT * FROM movies')
runtime_sqlite_wf() |
def extractChenguangsorchardBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class Solution():
def findDuplicate(self, nums: List[int]) -> int:
if (len(nums) < 2):
return (- 1)
slow = nums[0]
fast = nums[nums[0]]
while (slow != fast):
slow = nums[slow]
fast = nums[nums[fast]]
fast = 0
while (fast != slow):
fast = nums[fast]
slow = nums[slow]
return slow |
def filter_firewall_internet_service_ipbl_reason_data(json):
option_list = ['id', 'name']
json = remove_invalid_fields(json)
dictionary = {}
for attribute in option_list:
if ((attribute in json) and (json[attribute] is not None)):
dictionary[attribute] = json[attribute]
return dictionary |
class InlineResponse2002(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'expires_at': (str,)}
_property
def discriminator():
return None
attribute_map = {'expires_at': 'expires_at'}
read_only_vars = {'expires_at'}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def test_imperative_tuples():
def t1() -> (int, str):
return (3, 'three')
def t3(a: int, b: str) -> typing.Tuple[(int, str)]:
return ((a + 2), ('world' + b))
wb = ImperativeWorkflow(name='my.workflow.a')
t1_node = wb.add_entity(t1)
t3_node = wb.add_entity(t3, a=t1_node.outputs['o0'], b=t1_node.outputs['o1'])
wb.add_workflow_output('wf0', t3_node.outputs['o0'], python_type=int)
wb.add_workflow_output('wf1', t3_node.outputs['o1'], python_type=str)
res = wb()
assert (res == (5, 'worldthree'))
with pytest.raises(KeyError):
wb = ImperativeWorkflow(name='my.workflow.b')
t1_node = wb.add_entity(t1)
wb.add_entity(t3, a=t1_node.outputs['bad'], b=t1_node.outputs['o2']) |
class ProgramSelect(InteractiveEntityBase, SelectEntity):
def unique_id(self) -> str:
return f'{self.haId}_programs'
def translation_key(self) -> str:
return 'programs'
def name_ext(self) -> str:
return 'Programs'
def icon(self) -> str:
if (self._appliance.type in DEVICE_ICON_MAP):
return DEVICE_ICON_MAP[self._appliance.type]
return None
def device_class(self) -> str:
return f'{DOMAIN}__programs'
def available(self) -> bool:
return (super().available and self._appliance.available_programs and (('BSH.Common.Status.RemoteControlActive' not in self._appliance.status) or self._appliance.status['BSH.Common.Status.RemoteControlActive'].value))
def options(self) -> list[str]:
if self._appliance.available_programs:
if (self._conf[CONF_TRANSLATION_MODE] == CONF_TRANSLATION_MODE_SERVER):
return [(program.name if program.name else program.key) for program in self._appliance.available_programs.values()]
return list(self._appliance.available_programs.keys())
return []
def current_option(self) -> str:
current_program = self._appliance.get_applied_program()
if current_program:
if (self._appliance.available_programs and (current_program.key in self._appliance.available_programs)):
CL.debug(_LOGGER, CL.LogMode.VERBOSE, 'Current selected program is %s', current_program.key)
return (current_program.name if (current_program.name and (self._conf[CONF_TRANSLATION_MODE] == CONF_TRANSLATION_MODE_SERVER)) else current_program.key)
CL.debug(_LOGGER, CL.LogMode.VERBOSE, 'Current program %s is not in available_programs', current_program.key)
else:
CL.debug(_LOGGER, CL.LogMode.VERBOSE, 'Current program is None')
return None
async def async_select_option(self, option: str) -> None:
try:
if (self._conf[CONF_TRANSLATION_MODE] == CONF_TRANSLATION_MODE_SERVER):
program = next((p for p in self._appliance.available_programs.values() if (p.name == option)), None)
(await self._appliance.async_select_program(program_key=program.key))
else:
(await self._appliance.async_select_program(program_key=option))
except HomeConnectError as ex:
if ex.error_description:
raise HomeAssistantError(f'Failed to set the selected program: {ex.error_description} ({ex.code} - {self._key}={option})')
raise HomeAssistantError(f'Failed to set the selected program ({ex.code} - {self._key}={option})')
async def async_on_update(self, appliance: Appliance, key: str, value) -> None:
self.async_write_ha_state() |
def downgrade():
bind: Connection = op.get_bind()
logger.info('Downgrading data use on privacy declaration')
update_privacy_declaration_data_uses(bind, data_use_downgrades)
logger.info('Downgrading ctl policy rule data uses')
update_ctl_policy_data_uses(bind, data_use_downgrades) |
def main():
async def run():
result = (await asyncio.gather(*[pool.run_async(g, i) for i in range(REPS)]))
print(result)
async def map_async():
async for result in pool.map_async(g, range(REPS), chunksize=1, timeout=None):
pass
print(result)
pool = Pool()
if 1:
loop.run_until_complete(map_async())
elif 1:
loop.run_until_complete(run())
else:
for r in pool.map(g, range(REPS)):
pass
print(r) |
def make_python_patch():
pywin32_filename = ('pywin32-%s.win-amd64-py%s.exe' % (pywin32_version, major_minor_version))
filename = ('python-%s-amd64.zip' % version)
out_filename = ('python-%s-%s-amd64+pywin32.zip' % (version, revision))
if (not os.path.exists(pywin32_filename)):
url = (pywin32_base + pywin32_filename)
print(('Downloading pywin32: ' + url))
urllib.request.urlretrieve(url, pywin32_filename)
if (not os.path.exists(filename)):
print(('Downloading python: ' + download_url))
urllib.request.urlretrieve(download_url, filename)
os.mkdir('python-nuget')
check_call((unzip_cmd() + [os.path.abspath(filename)]), cwd='python-nuget')
os.mkdir('pywin32')
rtn = subprocess.call((unzip_cmd() + [os.path.abspath(pywin32_filename)]), cwd='pywin32')
assert (rtn in [0, 1])
os.mkdir(os.path.join('python-nuget', 'lib'))
shutil.move(os.path.join('pywin32', 'PLATLIB'), os.path.join('python-nuget', 'toolss', 'Lib', 'site-packages'))
check_call((zip_cmd() + [os.path.join('..', '..', out_filename), '.']), cwd='python-nuget/tools')
shutil.rmtree('python-nuget')
shutil.rmtree('pywin32')
print(('Created: %s' % out_filename))
if ('--upload' in sys.argv):
upload_url = (upload_base + out_filename)
print(('Uploading: ' + upload_url))
cmd = ['gsutil', 'cp', '-n', out_filename, upload_url]
print(' '.join(cmd))
check_call(cmd) |
class SolidRunDefinition():
def __init__(self, run_definition_file):
self.file = run_definition_file
self.version = None
self.userId = None
self.runType = None
self.isMultiplexing = None
self.runName = None
self.runDesc = None
self.mask = None
self.protocol = None
self.header_fields = []
self.data = []
try:
self.populate()
except IOError as ex:
logging.error(("Failed to populate SolidRunDefinition: '%s'" % ex))
def __nonzero__(self):
return self.__bool__()
def __bool__(self):
return (len(self.data) != 0)
def fields(self):
return self.header_fields
def nSamples(self):
return len(self.data)
def getDataItem(self, field, i):
try:
pos = self.header_fields.index(field)
except ValueError:
logging.error(("Field '%s' not found in '%s'" % (field, self.file)))
return None
return self.data[i][pos]
def populate(self):
reading_header = False
reading_samples = False
f = io.open(self.file, 'rt')
for line in f:
if line.startswith('version'):
reading_header = True
elif reading_header:
data = line.strip().split('\t')
self.version = data[0]
self.userId = data[1]
self.runType = data[2]
self.isMultiplexing = data[3]
self.runName = data[4]
self.runDesc = data[5]
self.mask = data[6]
self.protocol = data[7]
reading_header = False
if line.startswith('sampleName'):
for field in line.strip().split('\t'):
self.header_fields.append(field)
reading_samples = True
elif reading_samples:
data = line.strip().split('\t')
self.data.append(data)
f.close() |
class OptionPlotoptionsScatter3dTooltip(Options):
def clusterFormat(self):
return self._config_get('Clustered points: {point.clusterPointsAmount}')
def clusterFormat(self, text: str):
self._config(text, js_type=False)
def dateTimeLabelFormats(self) -> 'OptionPlotoptionsScatter3dTooltipDatetimelabelformats':
return self._config_sub_data('dateTimeLabelFormats', OptionPlotoptionsScatter3dTooltipDatetimelabelformats)
def distance(self):
return self._config_get(16)
def distance(self, num: float):
self._config(num, js_type=False)
def followPointer(self):
return self._config_get(False)
def followPointer(self, flag: bool):
self._config(flag, js_type=False)
def followTouchMove(self):
return self._config_get(True)
def followTouchMove(self, flag: bool):
self._config(flag, js_type=False)
def footerFormat(self):
return self._config_get('')
def footerFormat(self, text: str):
self._config(text, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def headerFormat(self):
return self._config_get('<span style="color:{point.color}"></span> <span style="font-size: 0.8em"> {series.name}</span><br/>')
def headerFormat(self, text: str):
self._config(text, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, text: str):
self._config(text, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def pointFormat(self):
return self._config_get('x: <b>{point.x}</b><br/>y: <b>{point.y}</b><br/>z: <b>{point.z}</b><br/>')
def pointFormat(self, text: str):
self._config(text, js_type=False)
def pointFormatter(self):
return self._config_get(None)
def pointFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False)
def xDateFormat(self):
return self._config_get(None)
def xDateFormat(self, text: str):
self._config(text, js_type=False) |
class PcapWriter():
def __init__(self, filename, port_cp=4729, port_up=47290):
self.port_cp = port_cp
self.port_up = port_up
self.ip_id = 0
self.base_address =
self.pcap_file = open(filename, 'wb')
self.eth_hdr = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00'
pcap_global_hdr = struct.pack('<LHHLLLL', , 2, 4, 0, 0, 65535, 1)
self.pcap_file.write(pcap_global_hdr)
def __enter__(self):
return self
def write_pkt(self, sock_content, port, radio_id=0, ts=datetime.datetime.now()):
pcap_hdr = struct.pack('<LLLL', int(ts.timestamp()), ts.microsecond, (((len(sock_content) + 8) + 20) + 14), (((len(sock_content) + 8) + 20) + 14))
if (radio_id <= 0):
dest_address = self.base_address
else:
dest_address = (self.base_address + radio_id)
ip_hdr = struct.pack('!BBHHBBBBHLL', 69, 0, ((len(sock_content) + 8) + 20), self.ip_id, 64, 0, 64, 17, 65535, , dest_address)
udp_hdr = struct.pack('!HHHH', 13337, port, (len(sock_content) + 8), 65535)
self.pcap_file.write(((((pcap_hdr + self.eth_hdr) + ip_hdr) + udp_hdr) + sock_content))
self.ip_id += 1
if (self.ip_id > 65535):
self.ip_id = 0
def write_cp(self, sock_content, radio_id=0, ts=datetime.datetime.now()):
self.write_pkt(sock_content, self.port_cp, radio_id, ts)
def write_up(self, sock_content, radio_id=0, ts=datetime.datetime.now()):
self.write_pkt(sock_content, self.port_up, radio_id, ts)
def __exit__(self, exc_type, exc_value, traceback):
self.pcap_file.close() |
class TestAutoValue(TestCase):
def test_bare(self):
class BareEnum(Enum):
_order_ = 'ONE TWO THREE'
ONE = auto()
TWO = auto()
THREE = auto()
self.assertEqual(BareEnum.THREE.value, 3)
class BareIntEnum(IntEnum):
_order_ = 'ONE TWO THREE'
ONE = auto()
TWO = auto()
THREE = auto()
self.assertEqual(BareIntEnum.THREE, 3)
class BareFlag(Flag):
_order_ = 'ONE TWO THREE'
ONE = auto()
TWO = auto()
THREE = auto()
self.assertEqual(BareFlag.THREE.value, 4)
class BareIntFlag(IntFlag):
_order_ = 'ONE TWO THREE'
ONE = auto()
TWO = auto()
THREE = auto()
self.assertEqual(BareIntFlag.THREE, 4)
def test_init_only_final(self):
class InitEnumValue(Enum):
_init_ = 'value description'
_order_ = 'ONE TWO THREE'
ONE = 'the loneliest number'
TWO = 'the number with you'
THREE = 'a triangle'
self.assertEqual(InitEnumValue.THREE.value, 3)
self.assertEqual(InitEnumValue.THREE.description, 'a triangle')
class InitEnum(Enum):
_init_ = 'value description'
_order_ = 'ONE TWO THREE'
ONE = 'the loneliest number'
TWO = 'the number with you'
THREE = 'a triangle'
self.assertEqual(InitEnum.THREE.value, 3)
self.assertEqual(InitEnum.THREE.description, 'a triangle')
class InitIntEnum(IntEnum):
_init_ = 'value description'
_order_ = 'ONE TWO THREE'
ONE = 'the loneliest number'
TWO = 'the number with you'
THREE = 'a triangle'
self.assertEqual(InitIntEnum.THREE, 3)
self.assertEqual(InitIntEnum.THREE.description, 'a triangle')
class InitFlag(Flag):
_init_ = 'value description'
_order_ = 'ONE TWO THREE'
ONE = 'the loneliest number'
TWO = 'the number with you'
THREE = 'a triangle'
self.assertEqual(InitFlag.THREE.value, 4)
self.assertEqual(InitFlag.THREE.description, 'a triangle')
class InitIntFlag(IntFlag):
_init_ = 'value description'
_order_ = 'ONE TWO THREE'
ONE = 'the loneliest number'
TWO = 'the number with you'
THREE = 'a triangle'
self.assertEqual(InitIntFlag.THREE, 4)
self.assertEqual(InitIntFlag.THREE.description, 'a triangle')
def test_init_only_inherit(self):
class InitInheritEnum(Enum):
_init_ = 'value description'
class InitEnum(InitInheritEnum):
_order_ = 'ONE TWO THREE'
ONE = 'the loneliest number'
TWO = 'the number with you'
THREE = 'a triangle'
self.assertEqual(InitEnum.THREE.value, 3)
self.assertEqual(InitEnum.THREE.description, 'a triangle')
class InitInheritValueEnum(Enum):
_init_ = 'value description'
class InitEnum(InitInheritValueEnum):
_order_ = 'ONE TWO THREE'
ONE = 'the loneliest number'
TWO = 'the number with you'
THREE = 'a triangle'
self.assertEqual(InitEnum.THREE.value, 3)
self.assertEqual(InitEnum.THREE.description, 'a triangle')
class InitIntEnum(int, InitInheritValueEnum):
_order_ = 'ONE TWO THREE'
ONE = 'the loneliest number'
TWO = 'the number with you'
THREE = 'a triangle'
self.assertEqual(InitIntEnum.THREE, 3)
self.assertEqual(InitIntEnum.THREE.description, 'a triangle')
class InitInheritValueFlag(Flag):
_init_ = 'value description'
class InitFlag(InitInheritValueFlag):
_order_ = 'ONE TWO THREE'
ONE = 'the loneliest number'
TWO = 'the number with you'
THREE = 'a triangle'
self.assertEqual(InitFlag.THREE.value, 4)
self.assertEqual(InitFlag.THREE.description, 'a triangle')
class InitIntFlag(int, InitInheritValueFlag):
_order_ = 'ONE TWO THREE'
ONE = 'the loneliest number'
TWO = 'the number with you'
THREE = 'a triangle'
self.assertEqual(InitIntFlag.THREE, 4)
self.assertEqual(InitIntFlag.THREE.description, 'a triangle')
def test_new_only_final(self):
class NewFinalEnum(Enum):
_order_ = 'ONE TWO THREE'
def __new__(cls, value):
member = object.__new__(cls)
member._value_ = value
member.proof = 'NFE1'
return member
ONE = auto()
TWO = auto()
THREE = auto()
self.assertEqual(NewFinalEnum.THREE.value, 3)
self.assertEqual(NewFinalEnum.TWO.proof, 'NFE1')
class NewFinalIntEnum(IntEnum):
_order_ = 'ONE TWO THREE'
def __new__(cls, value):
member = int.__new__(cls, value)
member._value_ = value
member.proof = 'NFE2'
return member
ONE = auto()
TWO = auto()
THREE = auto()
self.assertEqual(NewFinalIntEnum.THREE, 3)
self.assertEqual(NewFinalIntEnum.TWO.proof, 'NFE2')
class NewFinalFlag(Flag):
_order_ = 'ONE TWO THREE'
def __new__(cls, value):
member = object.__new__(cls)
member._value_ = value
member.proof = 'NFE3'
return member
ONE = auto()
TWO = auto()
THREE = auto()
self.assertEqual(NewFinalFlag.THREE.value, 4)
self.assertEqual(NewFinalFlag.TWO.proof, 'NFE3')
class NewFinalIntFlag(IntFlag):
_order_ = 'ONE TWO THREE'
def __new__(cls, value):
member = int.__new__(cls, value)
member._value_ = value
member.proof = 'NFE4'
return member
ONE = auto()
TWO = auto()
THREE = auto()
self.assertEqual(NewFinalIntFlag.THREE, 4)
self.assertEqual(NewFinalIntFlag.TWO.proof, 'NFE4')
class NewFinalStrEnum(str, Enum):
_order_ = 'AllReset Bright FG_Cyan BG_Black'
def __new__(cls, value, code, description):
str_value = ('\x1b[%sm' % code)
obj = str.__new__(cls, str_value)
obj._value_ = value
obj.code = code
obj.description = description
return obj
__str__ = str.__str__
AllReset = ('0', 'reset all (colors and brightness)')
Bright = ('1', 'bright lights!')
FG_Cyan = ('36', 'cyan')
BG_Black = ('40', 'black')
self.assertEqual(NewFinalStrEnum.FG_Cyan.value, 3)
self.assertEqual(NewFinalStrEnum.BG_Black.value, 4)
self.assertEqual(NewFinalStrEnum.AllReset.code, '0')
self.assertEqual(NewFinalStrEnum.Bright.description, 'bright lights!')
class NewFinalStrFlag(str, Flag):
_order_ = 'AllReset Bright FG_Cyan BG_Black'
def __new__(cls, value, code, description):
str_value = ('\x1b[%sm' % code)
obj = str.__new__(cls, str_value)
obj._value_ = value
obj.code = code
obj.description = description
return obj
__str__ = str.__str__
AllReset = ('0', 'reset all (colors and brightness)')
Bright = ('1', 'bright lights!')
FG_Cyan = ('36', 'cyan')
BG_Black = ('40', 'black')
self.assertEqual(NewFinalStrFlag.FG_Cyan.value, 4)
self.assertEqual(NewFinalStrFlag.BG_Black.value, 8)
self.assertEqual(NewFinalStrFlag.AllReset.code, '0')
self.assertEqual(NewFinalStrFlag.Bright.description, 'bright lights!')
def test_new_only_inherited(self):
class NewInheritEnum(Enum):
def __new__(cls, value):
if (cls._member_type_ is int):
member = int.__new__(cls, (value * 2))
else:
member = object.__new__(cls)
member._value_ = (value * 2)
member.proof = 'NIE'
return member
class NewFinalEnum(NewInheritEnum):
_order_ = 'ONE TWO THREE'
ONE = auto()
TWO = auto()
THREE = auto()
self.assertEqual(NewFinalEnum.THREE.value, 6)
self.assertEqual(NewFinalEnum.TWO.proof, 'NIE')
class NewFinalIntEnum(int, NewInheritEnum):
_order_ = 'ONE TWO THREE'
ONE = auto()
TWO = auto()
THREE = auto()
self.assertEqual(NewFinalIntEnum.THREE, 6)
self.assertEqual(NewFinalIntEnum.TWO.proof, 'NIE')
class NewInheritFlag(Flag):
def __new__(cls, value):
if (cls._member_type_ is int):
member = int.__new__(cls, (value * 2))
else:
member = object.__new__(cls)
member._value_ = (value * 2)
member.proof = 'NIE'
return member
class NewFinalFlag(NewInheritFlag):
_order_ = 'ONE TWO THREE'
ONE = auto()
TWO = auto()
THREE = auto()
self.assertEqual(NewFinalFlag.THREE.value, 8)
self.assertEqual(NewFinalFlag.TWO.proof, 'NIE')
class NewFinalIntFlag(int, NewInheritFlag):
_order_ = 'ONE TWO THREE'
ONE = auto()
TWO = auto()
THREE = auto()
self.assertEqual(NewFinalIntFlag.THREE, 8)
self.assertEqual(NewFinalIntFlag.TWO.proof, 'NIE')
def test_init_new_only(self):
class InitNewEnum(Enum):
_init_ = 'value description'
_order_ = 'ONE TWO THREE'
def __new__(cls, value, *args):
member = object.__new__(cls)
member._value_ = value
member.proof = 'INE1'
return member
ONE = 'the loneliest number'
TWO = 'the number with you'
THREE = 'a triangle'
self.assertEqual(InitNewEnum.THREE.value, 3)
self.assertEqual(InitNewEnum.THREE.description, 'a triangle')
self.assertEqual(InitNewEnum.TWO.proof, 'INE1')
class InitNewIntEnum(IntEnum):
_init_ = 'value description'
_order_ = 'ONE TWO THREE'
def __new__(cls, value, *args):
member = int.__new__(cls, value)
member._value_ = value
member.proof = 'INE2'
return member
ONE = 'the loneliest number'
TWO = 'the number with you'
THREE = 'a triangle'
self.assertEqual(InitNewIntEnum.THREE, 3)
self.assertEqual(InitNewIntEnum.THREE.description, 'a triangle')
self.assertEqual(InitNewIntEnum.TWO.proof, 'INE2')
class InitNewFlag(Flag):
_init_ = 'value description'
_order_ = 'ONE TWO THREE'
def __new__(cls, value, *args):
member = object.__new__(cls)
member._value_ = value
member.proof = 'INE3'
return member
ONE = 'the loneliest number'
TWO = 'the number with you'
THREE = 'a triangle'
self.assertEqual(InitNewFlag.THREE.value, 4)
self.assertEqual(InitNewFlag.THREE.description, 'a triangle')
self.assertEqual(InitNewFlag.TWO.proof, 'INE3')
class InitNewIntFlag(IntFlag):
_init_ = 'value description'
_order_ = 'ONE TWO THREE'
def __new__(cls, value, *args):
member = int.__new__(cls, value)
member._value_ = value
member.proof = 'INE4'
return member
ONE = 'the loneliest number'
TWO = 'the number with you'
THREE = 'a triangle'
self.assertEqual(InitNewIntFlag.THREE, 4)
self.assertEqual(InitNewIntFlag.THREE.description, 'a triangle')
self.assertEqual(InitNewIntFlag.TWO.proof, 'INE4')
def test_init_new_inherit(self):
class InitNew(Enum):
_init_ = 'value description'
def __new__(cls, value, *args):
member = object.__new__(cls)
member._value_ = value
member.proof = 'IN'
return member
class InitNewEnum(InitNew):
_order_ = 'ONE TWO THREE'
ONE = 'the loneliest number'
TWO = 'the number with you'
THREE = 'a triangle'
self.assertEqual(InitNewEnum.THREE.value, 3)
self.assertEqual(InitNewEnum.THREE.description, 'a triangle')
self.assertEqual(InitNewEnum.TWO.proof, 'IN')
class InitNewInt(Enum):
_init_ = 'value description'
def __new__(cls, value, *args):
member = int.__new__(cls, value)
member._value_ = value
member.proof = 'IN'
return member
class InitNewIntEnum(int, InitNewInt):
_order_ = 'ONE TWO THREE'
ONE = 'the loneliest number'
TWO = 'the number with you'
THREE = 'a triangle'
self.assertEqual(InitNewIntEnum.THREE, 3)
self.assertEqual(InitNewIntEnum.THREE.description, 'a triangle')
self.assertEqual(InitNewIntEnum.TWO.proof, 'IN')
class InitNewFlagBase(Flag):
_init_ = 'value description'
def __new__(cls, value, *args):
member = object.__new__(cls)
member._value_ = value
member.proof = 'IN'
return member
class InitNewFlag(InitNewFlagBase):
_order_ = 'ONE TWO THREE'
ONE = 'the loneliest number'
TWO = 'the number with you'
THREE = 'a triangle'
self.assertEqual(InitNewFlag.THREE.value, 4)
self.assertEqual(InitNewFlag.THREE.description, 'a triangle')
self.assertEqual(InitNewFlag.TWO.proof, 'IN')
class InitNewIntFlagBase(int, Flag):
_init_ = 'value description'
def __new__(cls, value, *args):
member = int.__new__(cls, value)
member._value_ = value
member.proof = 'IN'
return member
class InitNewIntFlag(InitNewIntFlagBase):
_order_ = 'ONE TWO THREE'
ONE = 'the loneliest number'
TWO = 'the number with you'
THREE = 'a triangle'
self.assertEqual(InitNewIntFlag.THREE, 4)
self.assertEqual(InitNewIntFlag.THREE.description, 'a triangle')
self.assertEqual(InitNewIntFlag.TWO.proof, 'IN') |
class CookieTokenAuthTests(mixins.CookieTokenAuthMixin, CookieTestCase):
query = '\n mutation TokenAuth($username: String!, $password: String!) {\n tokenAuth(username: $username, password: $password) {\n token\n payload\n refreshToken\n refreshExpiresIn\n }\n }'
refresh_token_mutations = {'token_auth': graphql_jwt.ObtainJSONWebToken} |
class TestReportingDates(ApiBaseTest):
def test_reporting_dates_filters(self):
factories.ReportTypeFactory(report_type='YE', report_type_full='Year End')
factories.ReportDateFactory(due_date=datetime.datetime(2014, 1, 2))
factories.ReportDateFactory(report_year=2015)
factories.ReportDateFactory(report_type='YE')
factories.ReportDateFactory(create_date=datetime.datetime(2014, 3, 2))
factories.ReportDateFactory(update_date=datetime.datetime(2014, 4, 2))
filter_fields = (('min_due_date', '2014-01-02'), ('report_year', 2015), ('report_type', 'YE'), ('min_create_date', '2014-03-02'), ('max_update_date', '2014-04-02'))
for (field, example) in filter_fields:
page = api.url_for(ReportingDatesView, **{field: example})
results = self._results(page)
assert (len(results) > 0)
def test_clean_report_type(self):
factories.ReportTypeFactory(report_type='Q1', report_type_full='April Quarterly {One of 4 valid Report Codes on Form 5, RptCode}')
report_date = factories.ReportDateFactory(report_type='Q1', due_date=datetime.datetime(2015, 1, 2))
db.session.flush()
assert (report_date.report_type_full == 'April Quarterly') |
_oriented
class ParticleSystem():
def __init__(self, config: SimConfig, GGUI=False):
self.cfg = config
self.GGUI = GGUI
self.domain_start = np.array([0.0, 0.0, 0.0])
self.domain_start = np.array(self.cfg.get_cfg('domainStart'))
self.domain_end = np.array([1.0, 1.0, 1.0])
self.domian_end = np.array(self.cfg.get_cfg('domainEnd'))
self.domain_size = (self.domian_end - self.domain_start)
self.dim = len(self.domain_size)
assert (self.dim > 1)
self.simulation_method = self.cfg.get_cfg('simulationMethod')
self.material_solid = 0
self.material_fluid = 1
self.particle_radius = 0.01
self.particle_radius = self.cfg.get_cfg('particleRadius')
self.particle_diameter = (2 * self.particle_radius)
self.support_radius = (self.particle_radius * 4.0)
self.m_V0 = (0.8 * (self.particle_diameter ** self.dim))
self.particle_num = ti.field(int, shape=())
self.grid_size = self.support_radius
self.grid_num = np.ceil((self.domain_size / self.grid_size)).astype(int)
print('grid size: ', self.grid_num)
self.padding = self.grid_size
self.object_collection = dict()
self.object_id_rigid_body = set()
fluid_blocks = self.cfg.get_fluid_blocks()
fluid_particle_num = 0
for fluid in fluid_blocks:
particle_num = self.compute_cube_particle_num(fluid['start'], fluid['end'])
fluid['particleNum'] = particle_num
self.object_collection[fluid['objectId']] = fluid
fluid_particle_num += particle_num
rigid_blocks = self.cfg.get_rigid_blocks()
rigid_particle_num = 0
for rigid in rigid_blocks:
particle_num = self.compute_cube_particle_num(rigid['start'], rigid['end'])
rigid['particleNum'] = particle_num
self.object_collection[rigid['objectId']] = rigid
rigid_particle_num += particle_num
rigid_bodies = self.cfg.get_rigid_bodies()
for rigid_body in rigid_bodies:
voxelized_points_np = self.load_rigid_body(rigid_body)
rigid_body['particleNum'] = voxelized_points_np.shape[0]
rigid_body['voxelizedPoints'] = voxelized_points_np
self.object_collection[rigid_body['objectId']] = rigid_body
rigid_particle_num += voxelized_points_np.shape[0]
self.fluid_particle_num = fluid_particle_num
self.solid_particle_num = rigid_particle_num
self.particle_max_num = (fluid_particle_num + rigid_particle_num)
self.num_rigid_bodies = (len(rigid_blocks) + len(rigid_bodies))
print(f'Current particle num: {self.particle_num[None]}, Particle max num: {self.particle_max_num}')
if (self.num_rigid_bodies > 0):
self.rigid_rest_cm = ti.Vector.field(self.dim, dtype=float, shape=(self.num_rigid_bodies + len(fluid_blocks)))
self.grid_particles_num = ti.field(int, shape=int(((self.grid_num[0] * self.grid_num[1]) * self.grid_num[2])))
self.grid_particles_num_temp = ti.field(int, shape=int(((self.grid_num[0] * self.grid_num[1]) * self.grid_num[2])))
self.prefix_sum_executor = ti.algorithms.PrefixSumExecutor(self.grid_particles_num.shape[0])
self.object_id = ti.field(dtype=int, shape=self.particle_max_num)
self.x = ti.Vector.field(self.dim, dtype=float, shape=self.particle_max_num)
self.x_0 = ti.Vector.field(self.dim, dtype=float, shape=self.particle_max_num)
self.v = ti.Vector.field(self.dim, dtype=float, shape=self.particle_max_num)
self.acceleration = ti.Vector.field(self.dim, dtype=float, shape=self.particle_max_num)
self.m_V = ti.field(dtype=float, shape=self.particle_max_num)
self.m = ti.field(dtype=float, shape=self.particle_max_num)
self.density = ti.field(dtype=float, shape=self.particle_max_num)
self.pressure = ti.field(dtype=float, shape=self.particle_max_num)
self.material = ti.field(dtype=int, shape=self.particle_max_num)
self.color = ti.Vector.field(3, dtype=int, shape=self.particle_max_num)
self.is_dynamic = ti.field(dtype=int, shape=self.particle_max_num)
if (self.cfg.get_cfg('simulationMethod') == 4):
self.dfsph_factor = ti.field(dtype=float, shape=self.particle_max_num)
self.density_adv = ti.field(dtype=float, shape=self.particle_max_num)
self.object_id_buffer = ti.field(dtype=int, shape=self.particle_max_num)
self.x_buffer = ti.Vector.field(self.dim, dtype=float, shape=self.particle_max_num)
self.x_0_buffer = ti.Vector.field(self.dim, dtype=float, shape=self.particle_max_num)
self.v_buffer = ti.Vector.field(self.dim, dtype=float, shape=self.particle_max_num)
self.acceleration_buffer = ti.Vector.field(self.dim, dtype=float, shape=self.particle_max_num)
self.m_V_buffer = ti.field(dtype=float, shape=self.particle_max_num)
self.m_buffer = ti.field(dtype=float, shape=self.particle_max_num)
self.density_buffer = ti.field(dtype=float, shape=self.particle_max_num)
self.pressure_buffer = ti.field(dtype=float, shape=self.particle_max_num)
self.material_buffer = ti.field(dtype=int, shape=self.particle_max_num)
self.color_buffer = ti.Vector.field(3, dtype=int, shape=self.particle_max_num)
self.is_dynamic_buffer = ti.field(dtype=int, shape=self.particle_max_num)
if (self.cfg.get_cfg('simulationMethod') == 4):
self.dfsph_factor_buffer = ti.field(dtype=float, shape=self.particle_max_num)
self.density_adv_buffer = ti.field(dtype=float, shape=self.particle_max_num)
self.grid_ids = ti.field(int, shape=self.particle_max_num)
self.grid_ids_buffer = ti.field(int, shape=self.particle_max_num)
self.grid_ids_new = ti.field(int, shape=self.particle_max_num)
self.x_vis_buffer = None
if self.GGUI:
self.x_vis_buffer = ti.Vector.field(self.dim, dtype=float, shape=self.particle_max_num)
self.color_vis_buffer = ti.Vector.field(3, dtype=float, shape=self.particle_max_num)
for fluid in fluid_blocks:
obj_id = fluid['objectId']
offset = np.array(fluid['translation'])
start = (np.array(fluid['start']) + offset)
end = (np.array(fluid['end']) + offset)
scale = np.array(fluid['scale'])
velocity = fluid['velocity']
density = fluid['density']
color = fluid['color']
self.add_cube(object_id=obj_id, lower_corner=start, cube_size=((end - start) * scale), velocity=velocity, density=density, is_dynamic=1, color=color, material=1)
for rigid in rigid_blocks:
obj_id = rigid['objectId']
offset = np.array(rigid['translation'])
start = (np.array(rigid['start']) + offset)
end = (np.array(rigid['end']) + offset)
scale = np.array(rigid['scale'])
velocity = rigid['velocity']
density = rigid['density']
color = rigid['color']
is_dynamic = rigid['isDynamic']
self.add_cube(object_id=obj_id, lower_corner=start, cube_size=((end - start) * scale), velocity=velocity, density=density, is_dynamic=is_dynamic, color=color, material=0)
for rigid_body in rigid_bodies:
obj_id = rigid_body['objectId']
self.object_id_rigid_body.add(obj_id)
num_particles_obj = rigid_body['particleNum']
voxelized_points_np = rigid_body['voxelizedPoints']
is_dynamic = rigid_body['isDynamic']
if is_dynamic:
velocity = np.array(rigid_body['velocity'], dtype=np.float32)
else:
velocity = np.array([0.0 for _ in range(self.dim)], dtype=np.float32)
density = rigid_body['density']
color = np.array(rigid_body['color'], dtype=np.int32)
self.add_particles(obj_id, num_particles_obj, np.array(voxelized_points_np, dtype=np.float32), np.stack([velocity for _ in range(num_particles_obj)]), (density * np.ones(num_particles_obj, dtype=np.float32)), np.zeros(num_particles_obj, dtype=np.float32), np.array([0 for _ in range(num_particles_obj)], dtype=np.int32), (is_dynamic * np.ones(num_particles_obj, dtype=np.int32)), np.stack([color for _ in range(num_particles_obj)]))
def build_solver(self):
solver_type = self.cfg.get_cfg('simulationMethod')
if (solver_type == 0):
return WCSPHSolver(self)
elif (solver_type == 4):
return DFSPHSolver(self)
else:
raise NotImplementedError(f'Solver type {solver_type} has not been implemented.')
def add_particle(self, p, obj_id, x, v, density, pressure, material, is_dynamic, color):
self.object_id[p] = obj_id
self.x[p] = x
self.x_0[p] = x
self.v[p] = v
self.density[p] = density
self.m_V[p] = self.m_V0
self.m[p] = (self.m_V0 * density)
self.pressure[p] = pressure
self.material[p] = material
self.is_dynamic[p] = is_dynamic
self.color[p] = color
def add_particles(self, object_id: int, new_particles_num: int, new_particles_positions: ti.types.ndarray(), new_particles_velocity: ti.types.ndarray(), new_particle_density: ti.types.ndarray(), new_particle_pressure: ti.types.ndarray(), new_particles_material: ti.types.ndarray(), new_particles_is_dynamic: ti.types.ndarray(), new_particles_color: ti.types.ndarray()):
self._add_particles(object_id, new_particles_num, new_particles_positions, new_particles_velocity, new_particle_density, new_particle_pressure, new_particles_material, new_particles_is_dynamic, new_particles_color)
def _add_particles(self, object_id: int, new_particles_num: int, new_particles_positions: ti.types.ndarray(), new_particles_velocity: ti.types.ndarray(), new_particle_density: ti.types.ndarray(), new_particle_pressure: ti.types.ndarray(), new_particles_material: ti.types.ndarray(), new_particles_is_dynamic: ti.types.ndarray(), new_particles_color: ti.types.ndarray()):
for p in range(self.particle_num[None], (self.particle_num[None] + new_particles_num)):
v = ti.Vector.zero(float, self.dim)
x = ti.Vector.zero(float, self.dim)
for d in ti.static(range(self.dim)):
v[d] = new_particles_velocity[((p - self.particle_num[None]), d)]
x[d] = new_particles_positions[((p - self.particle_num[None]), d)]
self.add_particle(p, object_id, x, v, new_particle_density[(p - self.particle_num[None])], new_particle_pressure[(p - self.particle_num[None])], new_particles_material[(p - self.particle_num[None])], new_particles_is_dynamic[(p - self.particle_num[None])], ti.Vector([new_particles_color[((p - self.particle_num[None]), i)] for i in range(3)]))
self.particle_num[None] += new_particles_num
def pos_to_index(self, pos):
return (pos / self.grid_size).cast(int)
def flatten_grid_index(self, grid_index):
return ((((grid_index[0] * self.grid_num[1]) * self.grid_num[2]) + (grid_index[1] * self.grid_num[2])) + grid_index[2])
def get_flatten_grid_index(self, pos):
return self.flatten_grid_index(self.pos_to_index(pos))
def is_static_rigid_body(self, p):
return ((self.material[p] == self.material_solid) and (not self.is_dynamic[p]))
def is_dynamic_rigid_body(self, p):
return ((self.material[p] == self.material_solid) and self.is_dynamic[p])
def update_grid_id(self):
for I in ti.grouped(self.grid_particles_num):
self.grid_particles_num[I] = 0
for I in ti.grouped(self.x):
grid_index = self.get_flatten_grid_index(self.x[I])
self.grid_ids[I] = grid_index
ti.atomic_add(self.grid_particles_num[grid_index], 1)
for I in ti.grouped(self.grid_particles_num):
self.grid_particles_num_temp[I] = self.grid_particles_num[I]
def counting_sort(self):
for i in range(self.particle_max_num):
I = ((self.particle_max_num - 1) - i)
base_offset = 0
if ((self.grid_ids[I] - 1) >= 0):
base_offset = self.grid_particles_num[(self.grid_ids[I] - 1)]
self.grid_ids_new[I] = ((ti.atomic_sub(self.grid_particles_num_temp[self.grid_ids[I]], 1) - 1) + base_offset)
for I in ti.grouped(self.grid_ids):
new_index = self.grid_ids_new[I]
self.grid_ids_buffer[new_index] = self.grid_ids[I]
self.object_id_buffer[new_index] = self.object_id[I]
self.x_0_buffer[new_index] = self.x_0[I]
self.x_buffer[new_index] = self.x[I]
self.v_buffer[new_index] = self.v[I]
self.acceleration_buffer[new_index] = self.acceleration[I]
self.m_V_buffer[new_index] = self.m_V[I]
self.m_buffer[new_index] = self.m[I]
self.density_buffer[new_index] = self.density[I]
self.pressure_buffer[new_index] = self.pressure[I]
self.material_buffer[new_index] = self.material[I]
self.color_buffer[new_index] = self.color[I]
self.is_dynamic_buffer[new_index] = self.is_dynamic[I]
if ti.static((self.simulation_method == 4)):
self.dfsph_factor_buffer[new_index] = self.dfsph_factor[I]
self.density_adv_buffer[new_index] = self.density_adv[I]
for I in ti.grouped(self.x):
self.grid_ids[I] = self.grid_ids_buffer[I]
self.object_id[I] = self.object_id_buffer[I]
self.x_0[I] = self.x_0_buffer[I]
self.x[I] = self.x_buffer[I]
self.v[I] = self.v_buffer[I]
self.acceleration[I] = self.acceleration_buffer[I]
self.m_V[I] = self.m_V_buffer[I]
self.m[I] = self.m_buffer[I]
self.density[I] = self.density_buffer[I]
self.pressure[I] = self.pressure_buffer[I]
self.material[I] = self.material_buffer[I]
self.color[I] = self.color_buffer[I]
self.is_dynamic[I] = self.is_dynamic_buffer[I]
if ti.static((self.simulation_method == 4)):
self.dfsph_factor[I] = self.dfsph_factor_buffer[I]
self.density_adv[I] = self.density_adv_buffer[I]
def initialize_particle_system(self):
self.update_grid_id()
self.prefix_sum_executor.run(self.grid_particles_num)
self.counting_sort()
def for_all_neighbors(self, p_i, task: ti.template(), ret: ti.template()):
center_cell = self.pos_to_index(self.x[p_i])
for offset in ti.grouped(ti.ndrange(*((((- 1), 2),) * self.dim))):
grid_index = self.flatten_grid_index((center_cell + offset))
for p_j in range(self.grid_particles_num[ti.max(0, (grid_index - 1))], self.grid_particles_num[grid_index]):
if ((p_i[0] != p_j) and ((self.x[p_i] - self.x[p_j]).norm() < self.support_radius)):
task(p_i, p_j, ret)
def copy_to_numpy(self, np_arr: ti.types.ndarray(), src_arr: ti.template()):
for i in range(self.particle_num[None]):
np_arr[i] = src_arr[i]
def copy_to_vis_buffer(self, invisible_objects=[]):
if (len(invisible_objects) != 0):
self.x_vis_buffer.fill(0.0)
self.color_vis_buffer.fill(0.0)
for obj_id in self.object_collection:
if (obj_id not in invisible_objects):
self._copy_to_vis_buffer(obj_id)
def _copy_to_vis_buffer(self, obj_id: int):
assert self.GGUI
for i in range(self.particle_max_num):
if (self.object_id[i] == obj_id):
self.x_vis_buffer[i] = self.x[i]
self.color_vis_buffer[i] = (self.color[i] / 255.0)
def dump(self, obj_id):
np_object_id = self.object_id.to_numpy()
mask = (np_object_id == obj_id).nonzero()
np_x = self.x.to_numpy()[mask]
np_v = self.v.to_numpy()[mask]
return {'position': np_x, 'velocity': np_v}
def load_rigid_body(self, rigid_body):
obj_id = rigid_body['objectId']
mesh = tm.load(rigid_body['geometryFile'])
mesh.apply_scale(rigid_body['scale'])
offset = np.array(rigid_body['translation'])
angle = (((rigid_body['rotationAngle'] / 360) * 2) * 3.1415926)
direction = rigid_body['rotationAxis']
rot_matrix = tm.transformations.rotation_matrix(angle, direction, mesh.vertices.mean(axis=0))
mesh.apply_transform(rot_matrix)
mesh.vertices += offset
mesh_backup = mesh.copy()
rigid_body['mesh'] = mesh_backup
rigid_body['restPosition'] = mesh_backup.vertices
rigid_body['restCenterOfMass'] = mesh_backup.vertices.mean(axis=0)
is_success = tm.repair.fill_holes(mesh)
voxelized_mesh = mesh.voxelized(pitch=self.particle_diameter)
voxelized_mesh = mesh.voxelized(pitch=self.particle_diameter).fill()
voxelized_points_np = voxelized_mesh.points
print(f'rigid body {obj_id} num: {voxelized_points_np.shape[0]}')
return voxelized_points_np
def compute_cube_particle_num(self, start, end):
num_dim = []
for i in range(self.dim):
num_dim.append(np.arange(start[i], end[i], self.particle_diameter))
return reduce((lambda x, y: (x * y)), [len(n) for n in num_dim])
def add_cube(self, object_id, lower_corner, cube_size, material, is_dynamic, color=(0, 0, 0), density=None, pressure=None, velocity=None):
num_dim = []
for i in range(self.dim):
num_dim.append(np.arange(lower_corner[i], (lower_corner[i] + cube_size[i]), self.particle_diameter))
num_new_particles = reduce((lambda x, y: (x * y)), [len(n) for n in num_dim])
print('particle num ', num_new_particles)
new_positions = np.array(np.meshgrid(*num_dim, sparse=False, indexing='ij'), dtype=np.float32)
new_positions = new_positions.reshape((- 1), reduce((lambda x, y: (x * y)), list(new_positions.shape[1:]))).transpose()
print('new position shape ', new_positions.shape)
if (velocity is None):
velocity_arr = np.full_like(new_positions, 0, dtype=np.float32)
else:
velocity_arr = np.array([velocity for _ in range(num_new_particles)], dtype=np.float32)
material_arr = np.full_like(np.zeros(num_new_particles, dtype=np.int32), material)
is_dynamic_arr = np.full_like(np.zeros(num_new_particles, dtype=np.int32), is_dynamic)
color_arr = np.stack([np.full_like(np.zeros(num_new_particles, dtype=np.int32), c) for c in color], axis=1)
density_arr = np.full_like(np.zeros(num_new_particles, dtype=np.float32), (density if (density is not None) else 1000.0))
pressure_arr = np.full_like(np.zeros(num_new_particles, dtype=np.float32), (pressure if (pressure is not None) else 0.0))
self.add_particles(object_id, num_new_particles, new_positions, velocity_arr, density_arr, pressure_arr, material_arr, is_dynamic_arr, color_arr) |
def _secho_task_execution_status(status, nl=True):
red_phases = {_core_execution_models.TaskExecutionPhase.ABORTED, _core_execution_models.TaskExecutionPhase.FAILED}
yellow_phases = {_core_execution_models.TaskExecutionPhase.QUEUED, _core_execution_models.TaskExecutionPhase.UNDEFINED, _core_execution_models.TaskExecutionPhase.RUNNING}
green_phases = {_core_execution_models.TaskExecutionPhase.SUCCEEDED}
if (status in red_phases):
fg = 'red'
elif (status in yellow_phases):
fg = 'yellow'
elif (status in green_phases):
fg = 'green'
else:
fg = 'blue'
_click.secho('{:10} '.format(_tt(_core_execution_models.TaskExecutionPhase.enum_to_string(status))), bold=True, fg=fg, nl=nl) |
_cache
def display_pickle_warning(python_type: str):
logger.warning(f'Unsupported Type {python_type} found, Flyte will default to use PickleFile as the transport. Pickle can only be used to send objects between the exact same version of Python, and we strongly recommend to use python type that flyte support.') |
class CustomAudienceTestCase(unittest.TestCase):
def test_format_params(self):
payload = customaudience.CustomAudience.format_params(customaudience.CustomAudience.Schema.email_hash, [' test ', 'test', '..test..'])
test_hash = '9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08'
users = payload['payload']['data']
assert (users[0] == test_hash)
assert (users[1] == users[0])
assert (users[2] == users[1])
def test_fail_when_no_app_ids(self):
def uid_payload():
customaudience.CustomAudience.format_params(customaudience.CustomAudience.Schema.uid, ['123123'])
self.assertRaises(exceptions.FacebookBadObjectError, uid_payload)
def test_format_params_pre_hashed(self):
user = 'test'
test_hash = hashlib.sha256(user.encode('utf8')).hexdigest()
payload = customaudience.CustomAudience.format_params(customaudience.CustomAudience.Schema.email_hash, [test_hash], pre_hashed=True)
users = payload['payload']['data']
assert (users[0] == test_hash)
def test_multi_key_params(self):
schema = [customaudience.CustomAudience.Schema.MultiKeySchema.extern_id, customaudience.CustomAudience.Schema.MultiKeySchema.fn, customaudience.CustomAudience.Schema.MultiKeySchema.email, customaudience.CustomAudience.Schema.MultiKeySchema.ln]
payload = customaudience.CustomAudience.format_params(schema, [['abc123def', ' TEST ', 'test', '..test..']], is_raw=True)
test_hash1 = ['abc123def', '9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08', '9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08', '9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08']
users = payload['payload']['data']
assert (users[0] == test_hash1)
def test_extern_id_key_single(self):
schema = [customaudience.CustomAudience.Schema.MultiKeySchema.extern_id]
payload = customaudience.CustomAudience.format_params(schema, [['abc123def'], ['abc234def'], ['abc345def'], ['abc456def']], is_raw=True)
expected = [['abc123def'], ['abc234def'], ['abc345def'], ['abc456def']]
actual = payload['payload']['data']
assert (actual == expected) |
def _re_wrap_length(cmp, l):
if (cmp == '=='):
return '(?:[^ ]{{{}}}+)'.format(l)
elif (cmp == '!='):
return '(?:[^ ]{{1,{}}}+|[^ ]{{{},}}+)'.format((l - 1), (l + 1))
elif (cmp == '>='):
return '(?:[^ ]{{{},}}+)'.format(l)
elif (cmp == '<='):
return '(?:[^ ]{{1,{}}}+)'.format(l)
elif (cmp == '>'):
return '(?:[^ ]{{{},}}+)'.format((l + 1))
elif (cmp == '<'):
return '(?:[^ ]{{1,{}}}+)'.format((l - 1))
else:
raise ValueError(Errors.E126.format(bad=cmp, good=_COMPARISON_PREDICATES)) |
class DataTypeProxyModel(QSortFilterProxyModel):
def __init__(self, parent, model):
QSortFilterProxyModel.__init__(self, parent)
self.__show_summary_keys = True
self.__show_block_keys = True
self.__show_gen_kw_keys = True
self.__show_gen_data_keys = True
self.__show_custom_pca_keys = True
self._metadata_filters = {}
self.setFilterCaseSensitivity(Qt.CaseInsensitive)
self.setSourceModel(model)
def filterAcceptsRow(self, index, q_model_index):
show = QSortFilterProxyModel.filterAcceptsRow(self, index, q_model_index)
if show:
source_model = self.sourceModel()
source_index = source_model.index(index, 0, q_model_index)
key = source_model.itemAt(source_index)
for (meta_key, values) in self._metadata_filters.items():
for (value, visible) in values.items():
if ((not visible) and (meta_key in key['metadata']) and (key['metadata'][meta_key] == value)):
show = False
return show
def sourceModel(self) -> 'DataTypeKeysListModel':
return QSortFilterProxyModel.sourceModel(self)
def setFilterOnMetadata(self, key, value, visible):
if (key not in self._metadata_filters):
self._metadata_filters[key] = {}
self._metadata_filters[key][value] = visible
self.invalidateFilter() |
class OptionPlotoptionsParetoAccessibility(Options):
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def descriptionFormat(self):
return self._config_get(None)
def descriptionFormat(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def exposeAsGroupOnly(self):
return self._config_get(None)
def exposeAsGroupOnly(self, flag: bool):
self._config(flag, js_type=False)
def keyboardNavigation(self) -> 'OptionPlotoptionsParetoAccessibilityKeyboardnavigation':
return self._config_sub_data('keyboardNavigation', OptionPlotoptionsParetoAccessibilityKeyboardnavigation)
def point(self) -> 'OptionPlotoptionsParetoAccessibilityPoint':
return self._config_sub_data('point', OptionPlotoptionsParetoAccessibilityPoint) |
class op(bpy.types.Operator):
bl_idname = 'uv.textools_island_align_world'
bl_label = 'Align World'
bl_description = 'Align selected UV islands or faces to world / gravity directions'
bl_options = {'REGISTER', 'UNDO'}
bool_face: bpy.props.BoolProperty(name='Per Face', default=False, description='Process each face independently.')
axis: bpy.props.EnumProperty(items=[('-1', 'Auto', 'Detect World axis to align to.'), ('0', 'X', 'Align to the X axis of the World.'), ('1', 'Y', 'Align to the Y axis of the World.'), ('2', 'Z', 'Align to the Z axis of the World.')], name='Axis', default='-1')
def poll(cls, context):
if (bpy.context.area.ui_type != 'UV'):
return False
if (not bpy.context.active_object):
return False
if (bpy.context.active_object.mode != 'EDIT'):
return False
if (not bpy.context.object.data.uv_layers):
return False
if bpy.context.scene.tool_settings.use_uv_select_sync:
return False
return True
def execute(self, context):
utilities_uv.multi_object_loop(main, self, context)
return {'FINISHED'} |
class OptionPlotoptionsFunnel3dSonificationDefaultinstrumentoptionsMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TCFVendorLegitimateInterestsRecord(UserSpecificConsentDetails, CommonVendorFields):
purpose_legitimate_interests: List[EmbeddedPurpose] = []
_validator
def add_default_preference(cls, values: Dict[(str, Any)]) -> Dict[(str, Any)]:
values['default_preference'] = UserConsentPreference.opt_in
return values |
class GraphDiffSummary(FidesSchema):
prev_collection_count: int = 0
curr_collection_count: int = 0
added_collection_count: int = 0
removed_collection_count: int = 0
added_edge_count: int = 0
removed_edge_count: int = 0
already_processed_access_collection_count: int = 0
already_processed_erasure_collection_count: int = 0
skipped_added_edge_count: int = 0 |
class TransformOddAlignmentCase(unittest.TestCase):
def setUpClass(cls):
torch.manual_seed(0)
def _create_permute_bmm_graph(self, A_shape, B_shape, bmm_type, const_A=None, const_B=None):
OP = getattr(ops, bmm_type, None)
assert (OP is not None)
A = (const_A if const_A else Tensor(shape=A_shape, dtype='float16', name='input_0', is_input=True))
B = (const_B if const_B else Tensor(shape=B_shape, dtype='float16', name='input_1', is_input=True))
Y = OP()(A, B)
Y._attrs['name'] = 'target_bmm_tensor'
return Y
def _extract_src_op(self, tensors):
ret = []
for tensor in tensors:
if (len(tensor.src_ops()) != 1):
ret.append(None)
else:
ret.append(list(tensor.src_ops())[0])
return ret
def _test_permute_bmm_A(self, B, shape_A, shape_B, origin_bmm, target_bmm, is_const, is_elementwise=False, strided_output=True, test_prefix=''):
M = (shape_A[(- 2)] if (origin_bmm[(- 3)] == 'r') else shape_A[(- 1)])
N = (shape_B[(- 1)] if (origin_bmm[(- 2)] == 'r') else shape_B[(- 2)])
for b in B:
(const_A, const_B) = (None, None)
if is_elementwise:
const_A = Tensor(shape=shape_A, dtype='float16', name='input_0', is_input=True)
const_A = ops.elementwise(FuncEnum.ADD)(const_A, const_A)
elif is_const:
const_A_data = torch.randn(_extract_shape(1, shape_A)).half().cuda()
const_A = Tensor(shape=_extract_shape(1, shape_A), name='input_0')
const_A._bind_data(_TorchConstantTensorData(const_A_data))
bmm_tensor = self._create_permute_bmm_graph(shape_A, shape_B, origin_bmm, const_A=const_A, const_B=const_B)
if strided_output:
output = ops.elementwise(FuncEnum.COS)(bmm_tensor)
else:
output = bmm_tensor
output._attrs['name'] = 'output_0'
output._attrs['is_output'] = True
target = detect_target()
module = compile_model(output, target, './tmp', f'{test_prefix}alignment_permute_bmm_A_{b}_{origin_bmm}_to_{target_bmm}_{is_const}')
exist_new_bmm = False
for tensor in module.debug_sorted_graph:
src_ops = tensor.src_ops()
if (len(src_ops) == 0):
continue
if (not is_elementwise):
self.assertEqual(len(src_ops), 1, 'constructed graph should only have single-source op tensors')
src_op = list(tensor.src_ops())[0]
if src_op._attrs['op'].startswith('bmm'):
if (not is_elementwise):
self.assertEqual(src_op._attrs['op'], target_bmm)
exist_new_bmm = True
if is_const:
continue
inputs_op = self._extract_src_op(src_op._attrs['inputs'])
if (origin_bmm == target_bmm):
if (not is_elementwise):
self.assertNotEqual(inputs_op[0]._attrs['op'], 'permute021')
else:
self.assertEqual(inputs_op[0]._attrs['op'], 'permute021')
self.assertTrue(exist_new_bmm, "Can't find converted bmm op in graph")
if is_const:
X_pt = const_A_data
else:
X_pt = torch.randn(_extract_shape(b, shape_A)).cuda().half()
X_pt_in = X_pt
if (origin_bmm[(- 3)] == 'c'):
X_pt_in = torch.permute(X_pt, [0, 2, 1])
W_pt = torch.randn(_extract_shape(b, shape_B)).cuda().half()
W_pt_in = W_pt
if (origin_bmm[(- 2)] == 'c'):
W_pt_in = torch.permute(W_pt, [0, 2, 1])
if is_elementwise:
W_pt_in = torch.add(W_pt_in, W_pt_in)
Y_pt = torch.matmul(X_pt_in, W_pt_in)
if strided_output:
Y_pt = torch.cos(Y_pt)
inputs = {'input_1': W_pt}
if (not is_const):
inputs['input_0'] = X_pt
y = torch.empty([b, M, N]).cuda().half()
module.run_with_tensors(inputs, [y])
self.assertTrue(torch.allclose(Y_pt, y, atol=0.1, rtol=0.1))
def test_permute_bmm_A(self):
B = [1, 3]
batch_dim = shape_utils.gen_int_var_min_max(B)
self._test_permute_bmm_A(B, [batch_dim, 8, 7], [batch_dim, 7, 16], 'bmm_rrr', 'bmm_crr', is_const=True)
self._test_permute_bmm_A(B, [8, 7], [batch_dim, 7, 16], 'bmm_rrr', 'bmm_crr', is_const=True, test_prefix='2d_broadcast_')
self._test_permute_bmm_A(B, [batch_dim, 8, 7], [batch_dim, 7, 16], 'bmm_rrr', 'bmm_crr', is_const=False)
self._test_permute_bmm_A(B, [batch_dim, 8, 7], [batch_dim, 7, 16], 'bmm_rrr', 'bmm_rrr', is_const=False, is_elementwise=True)
self._test_permute_bmm_A(B, [batch_dim, 8, 7], [batch_dim, 8, 16], 'bmm_crr', 'bmm_rrr', is_const=False, strided_output=False)
self._test_permute_bmm_A(B, [batch_dim, 8, 7], [batch_dim, 8, 16], 'bmm_crr', 'bmm_crr', is_const=False, is_elementwise=True)
def _test_permute_bmm_B(self, B, shape_A, shape_B, origin_bmm, target_bmm, is_const, is_elementwise=False, strided_output=True):
M = (shape_A[(- 2)] if (origin_bmm[(- 3)] == 'r') else shape_A[(- 1)])
N = (shape_B[(- 1)] if (origin_bmm[(- 2)] == 'r') else shape_B[(- 2)])
for b in B:
(const_A, const_B) = (None, None)
if is_elementwise:
const_B = Tensor(shape=shape_B, dtype='float16', name='input_1', is_input=True)
const_B = ops.elementwise(FuncEnum.ADD)(const_B, const_B)
elif is_const:
const_B_data = torch.randn(_extract_shape(1, shape_B)).half().cuda()
const_B = Tensor(shape=_extract_shape(1, shape_B), name='input_1')
const_B._bind_data(_TorchConstantTensorData(const_B_data))
bmm_tensor = self._create_permute_bmm_graph(shape_A, shape_B, origin_bmm, const_A=const_A, const_B=const_B)
if strided_output:
output = ops.elementwise(FuncEnum.COS)(bmm_tensor)
else:
output = bmm_tensor
output._attrs['name'] = 'output_0'
output._attrs['is_output'] = True
target = detect_target()
module = compile_model(output, target, './tmp', f'alignment_permute_bmm_B_{b}_{origin_bmm}_to_{target_bmm}_{is_const}')
exist_new_bmm = False
for tensor in module.debug_sorted_graph:
src_ops = tensor.src_ops()
if (len(src_ops) == 0):
continue
if (not is_elementwise):
self.assertEqual(len(src_ops), 1, 'constructed graph should only have single-source op tensors')
src_op = list(tensor.src_ops())[0]
if src_op._attrs['op'].startswith('bmm'):
self.assertEqual(src_op._attrs['op'], target_bmm)
exist_new_bmm = True
if is_const:
continue
inputs_op = self._extract_src_op(src_op._attrs['inputs'])
if (origin_bmm == target_bmm):
if (not is_elementwise):
self.assertNotEqual(inputs_op[1]._attrs['op'], 'permute021')
else:
self.assertEqual(inputs_op[1]._attrs['op'], 'permute021')
self.assertTrue(exist_new_bmm, "Can't find converted bmm op in graph")
if is_const:
W_pt = const_B_data
else:
W_pt = torch.randn(_extract_shape(b, shape_B)).cuda().half()
W_pt_in = W_pt
if (origin_bmm[(- 2)] == 'c'):
W_pt_in = torch.permute(W_pt, [0, 2, 1])
X_pt = torch.randn(_extract_shape(b, shape_A)).cuda().half()
X_pt_in = X_pt
if (origin_bmm[(- 3)] == 'c'):
X_pt_in = torch.permute(X_pt, [0, 2, 1])
if is_elementwise:
W_pt_in = torch.add(W_pt_in, W_pt_in)
Y_pt = torch.matmul(X_pt_in, W_pt_in)
if strided_output:
Y_pt = torch.cos(Y_pt)
inputs = {'input_0': X_pt}
if (not is_const):
inputs['input_1'] = W_pt
y = torch.empty([b, M, N]).cuda().half()
module.run_with_tensors(inputs, [y])
self.assertTrue(torch.allclose(Y_pt, y, atol=0.1, rtol=0.1))
def test_permute_bmm_B(self):
B = [1, 3]
batch_dim = shape_utils.gen_int_var_min_max(B)
self._test_permute_bmm_B(B, [batch_dim, 7, 8], [batch_dim, 12, 7], 'bmm_ccr', 'bmm_crr', is_const=True)
self._test_permute_bmm_B(B, [batch_dim, 8, 16], [16, 7], 'bmm_rrr', 'bmm_rcr', is_const=True)
self._test_permute_bmm_B(B, [batch_dim, 7, 8], [batch_dim, 16, 7], 'bmm_ccr', 'bmm_crr', is_const=False)
self._test_permute_bmm_B(B, [batch_dim, 7, 8], [batch_dim, 16, 7], 'bmm_ccr', 'bmm_ccr', is_const=False, is_elementwise=True)
self._test_permute_bmm_B(B, [batch_dim, 8, 16], [batch_dim, 16, 7], 'bmm_rrr', 'bmm_rcr', is_const=False, strided_output=False)
self._test_permute_bmm_B(B, [batch_dim, 8, 16], [batch_dim, 16, 7], 'bmm_rrr', 'bmm_rrr', is_const=False, is_elementwise=True)
def test_permute_bmm_epilogue(self):
B = [1, 3]
M = 7
K = 8
N = 16
batch_dim = shape_utils.gen_int_var_min_max(B)
shape_A = [batch_dim, K, M]
shape_B = [batch_dim, K, N]
shape_D = [batch_dim, M, N]
D = Tensor(shape=shape_D, dtype='float16', name='input_2', is_input=True)
bmm_tensor = self._create_permute_bmm_graph(shape_A, shape_B, 'bmm_crr')
add_tensor = ops.elementwise(FuncEnum.ADD)(bmm_tensor, D)
output = ops.elementwise(FuncEnum.COS)(add_tensor)
output._attrs['name'] = 'output_0'
output._attrs['is_output'] = True
target = detect_target()
module = compile_model(output, target, './tmp', 'alignment_permute_bmm_epilogue')
exist_new_bmm = False
for tensor in module.debug_sorted_graph:
src_ops = tensor.src_ops()
if (len(src_ops) == 0):
continue
self.assertEqual(len(src_ops), 1, 'constructed graph should only have single-source op tensors')
src_op = list(tensor.src_ops())[0]
if src_op._attrs['op'].startswith('bmm'):
self.assertEqual(src_op._attrs['op'], 'bmm_rrr_add')
exist_new_bmm = True
inputs_op = self._extract_src_op(src_op._attrs['inputs'])
self.assertEqual(inputs_op[0]._attrs['op'], 'permute021')
self.assertTrue(exist_new_bmm, "Can't find converted bmm op in graph")
for b in B:
X_pt = torch.randn(b, K, M).cuda().half()
W_pt = torch.randn(b, K, N).cuda().half()
D_pt = torch.randn(b, M, N).cuda().half()
Y_pt = torch.cos((torch.matmul(torch.permute(X_pt, [0, 2, 1]), W_pt) + D_pt))
y = torch.empty([b, M, N]).cuda().half()
module.run_with_tensors({'input_0': X_pt, 'input_1': W_pt, 'input_2': D_pt}, [y])
self.assertTrue(torch.allclose(Y_pt, y, atol=0.1, rtol=0.1))
def test_bmm_pad_special_case(self):
B = [1, 3]
M = 2
K = 3
N = 6
batch_dim = shape_utils.gen_int_var_min_max(B)
shape_A = [batch_dim, M, K]
shape_B = [batch_dim, N, K]
bmm_tensor = self._create_permute_bmm_graph(shape_A, shape_B, 'bmm_rcr')
output = ops.elementwise(FuncEnum.COS)(bmm_tensor)
output._attrs['name'] = 'output_0'
output._attrs['is_output'] = True
target = detect_target()
module = compile_model(output, target, './tmp', 'alignment_pad_bmm')
exist_new_bmm = False
for tensor in module.debug_sorted_graph:
src_ops = tensor.src_ops()
if (len(src_ops) == 0):
continue
self.assertEqual(len(src_ops), 1, 'constructed graph should only have single-source op tensors')
src_op = list(tensor.src_ops())[0]
if src_op._attrs['op'].startswith('bmm'):
self.assertEqual(src_op._attrs['op'], 'bmm_rcr')
exist_new_bmm = True
self.assertTrue(exist_new_bmm, "Can't find converted bmm op in graph")
for b in B:
X_pt = torch.randn(b, M, K).cuda().half()
W_pt = torch.randn(b, N, K).cuda().half()
Y_pt = torch.cos(torch.matmul(X_pt, torch.permute(W_pt, [0, 2, 1])))
y = torch.empty([b, M, N]).cuda().half()
module.run_with_tensors({'input_0': X_pt, 'input_1': W_pt}, [y])
self.assertTrue(torch.allclose(Y_pt, y, atol=0.1, rtol=0.1)) |
.parametrize('condition_args,expected', [({}, ''), ({'gte': 10}, 'gte=10'), ({'gte': 10.5}, 'gte=10.5'), ({'gt': 10}, 'gt=10'), ({'gt': 10, 'lt': 40}, 'gt=10 and lt=40'), ({'eq': 8}, 'eq=8'), ({'not_eq': 10}, 'not_eq=10'), ({'lte': 10}, 'lte=10'), ({'lt': 10}, 'lt=10'), ({'is_in': [10, 20, 30]}, 'is_in=[10, 20, 30]'), ({'not_in': [10, 20, 30]}, 'not_in=[10, 20, 30]')])
def test_value_condition_str(condition_args, expected):
condition = TestValueCondition(**condition_args)
assert (str(condition) == expected) |
def cidr_validator(value, return_ip_interface=False):
try:
if ('/' in value):
(ipaddr, netmask) = value.split('/')
netmask = int(netmask)
else:
(ipaddr, netmask) = (value, 32)
if ((not validators.ipv4_re.match(ipaddr)) or (not (1 <= netmask <= 32))):
raise ValueError
ipi = ipaddress.ip_interface(six.text_type(value))
if ipi.is_reserved:
raise ValueError
except ValueError:
raise ValidationError(_('Enter a valid IPv4 address or IPv4 network.'))
if return_ip_interface:
return ipi |
def show_results(model_name: str, dir_name_output: str, file_sources: List):
sources = get_sources(((out_path / Path(model_name)) / dir_name_output), file_sources)
tab_sources = st.tabs([f'**{label_sources.get(k)}**' for k in sources.keys()])
for (i, (file, pathname)) in enumerate(sources.items()):
with tab_sources[i]:
cols = st.columns(2)
with cols[0]:
auseg = load_audio_segment(pathname, 'mp3')
st.image(plot_audio(auseg, 32767, file=file, model_name=model_name, dir_name_output=dir_name_output), use_column_width='always')
with cols[1]:
st.audio(str(pathname))
log.info(f'Displaying results for {dir_name_output} - {model_name}') |
class DBApi():
def __init__(self, database='') -> None:
if (not database):
database = current_app.config['DATABASE']
port = int(current_app.config['PORT'])
self.db = pymysql.connect(host=current_app.config['HOST'], user=current_app.config['USERNAME'], password=current_app.config['PASSWORD'], db=database, port=port)
self.cur = self.db.cursor()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def create_db(self, db_name):
cur = self.db.cursor()
sql = f'create database {db_name}'
cur.execute(sql)
def create_tb(self):
cur = self.db.cursor()
sql = 'CREATE TABLE users (\n id INT UNSIGNED NOT NULL AUTO_INCREMENT,\n username VARCHAR(64) NOT NULL,\n password CHAR(56) NOT NULL,\n quota BIGINT NOT NULL DEFAULT 0,\n download BIGINT UNSIGNED NOT NULL DEFAULT 0,\n upload BIGINT UNSIGNED NOT NULL DEFAULT 0,\n PRIMARY KEY (id),\n INDEX (password)\n );\n '
cur.execute(sql)
def select(self):
cur = self.db.cursor()
sql = 'select * from users'
cur.execute(sql)
results = cur.fetchall()
def insert_user(self, username, password):
password = sha224(password.encode()).hexdigest()
sql = 'select password from users;'
self.cur.execute(sql)
results = [i[0] for i in self.cur.fetchall() if i]
if (password in results):
return ''
sql = f'insert into users (username,password,quota) value ("{username}","{password}",-1);'
self.cur.execute(sql)
self.db.commit()
def close(self):
if self.db:
self.db.close()
if self.cur:
self.cur.close() |
def handle_done_response_ocr_async(result, client, job_id) -> AsyncResponseType[OcrAsyncDataClass]:
gcs_destination_uri = result['response']['responses'][0]['outputConfig']['gcsDestination']['uri']
match = re.match('gs://([^/]+)/(.+)', gcs_destination_uri)
bucket_name = match.group(1)
prefix = match.group(2)
bucket = client.get_bucket(bucket_name)
blob_list = [blob for blob in list(bucket.list_blobs(prefix=prefix)) if (not blob.name.endswith('/'))]
original_response = {'responses': []}
pages: List[Page] = []
for blob in blob_list:
output = blob
json_string = output.download_as_bytes()
response = json.loads(json_string)
for response in response['responses']:
original_response['responses'].append(response['fullTextAnnotation'])
for page in response['fullTextAnnotation']['pages']:
lines: Sequence[Line] = []
for block in page['blocks']:
words: Sequence[Word] = []
for paragraph in block['paragraphs']:
line_boxes = BoundingBox.from_normalized_vertices(paragraph['boundingBox']['normalizedVertices'])
for word in paragraph['words']:
word_boxes = BoundingBox.from_normalized_vertices(word['boundingBox']['normalizedVertices'])
word_text = ''
for symbol in word['symbols']:
word_text += symbol['text']
words.append(Word(text=word_text, bounding_box=word_boxes, confidence=word['confidence']))
lines.append(Line(text=' '.join([word.text for word in words]), words=words, bounding_box=line_boxes, confidence=paragraph['confidence']))
pages.append(OcrAsyncPage(lines=lines))
raw_text = ''.join([res['text'] for res in original_response['responses']])
return AsyncResponseType(provider_job_id=job_id, original_response=original_response, standardized_response=OcrAsyncDataClass(raw_text=raw_text, pages=pages, number_of_pages=len(pages))) |
(scope='function')
def unlinked_dataset(db: Session):
ds = Dataset(fides_key='unlinked_dataset', organization_fides_key='default_organization', name='Unlinked Dataset', description='Example dataset created in test fixtures', collections=[{'name': 'subscriptions', 'fields': [{'name': 'id', 'data_categories': ['system.operations']}, {'name': 'email', 'data_categories': ['user.contact.email'], 'fidesops_meta': {'identity': 'email'}}]}])
dataset = CtlDataset(**ds.dict())
db.add(dataset)
db.commit()
(yield dataset)
dataset.delete(db) |
def visualize2(facts):
graph = Digraph()
graph.attr('graph', fontname='mono')
graph.attr('node', fontname='mono')
stmt_block_map = {f.id_stmt: f.id_block for f in facts if isinstance(f, BlockStmtFact)}
block_graph_map = {f.id_block: Digraph() for f in facts if isinstance(f, BlockFact)}
for fact in facts:
if (not isinstance(fact, statement_facts)):
continue
g: Digraph = block_graph_map[stmt_block_map[fact.id]]
kw_args_node = {'shape': 'box'}
if isinstance(fact, AssignFact):
g.node(fact.id, f'{fact.id} = {fact.var_id}', **kw_args_node)
elif isinstance(fact, ConstFact):
g.node(fact.id, f'{fact.id} = {fact.value}', **kw_args_node)
elif isinstance(fact, BinaryOpFact):
g.node(fact.id, f'{fact.id} = {fact.id_lhs} {fact.op} {fact.id_rhs}', **kw_args_node)
else:
raise NotImplementedError(fact)
for fact in facts:
if (not isinstance(fact, FollowsFact)):
continue
g: Digraph = block_graph_map[stmt_block_map[fact.id_next]]
g.edge(fact.id_next, fact.id_prev)
for sg in block_graph_map.values():
graph.subgraph(sg)
return graph |
def reduce_max(X, lengths, *, threads_per_block=128, num_blocks=128):
_is_float_array(X)
B = len(lengths)
T = X.shape[0]
O = X.shape[1]
_check_lengths(lengths, T, min_length=1)
out_shape = (B, O)
maxes = _alloc(out_shape, dtype=X.dtype, zeros=False)
which = _alloc(out_shape, dtype='i', zeros=False)
if (X.dtype == 'float32'):
reduce_max_kernel_float((num_blocks,), (threads_per_block,), (maxes, which, X, lengths, B, T, O))
else:
reduce_max_kernel_double((num_blocks,), (threads_per_block,), (maxes, which, X, lengths, B, T, O))
return (maxes, which) |
def _plotCrossCaseStatistics(axes: 'Axes', plot_config: 'PlotConfig', data: CcsData, index: int):
axes.set_xlabel(plot_config.xLabel())
axes.set_ylabel(plot_config.yLabel())
style = plot_config.getStatisticsStyle('mean')
if style.isVisible():
axes.plot([index], data['mean'][index], alpha=style.alpha, linestyle='', color=style.color, marker=style.marker, markersize=style.size)
style = plot_config.getStatisticsStyle('p50')
if style.isVisible():
axes.plot([index], data['p50'][index], alpha=style.alpha, linestyle='', color=style.color, marker=style.marker, markersize=style.size)
style = plot_config.getStatisticsStyle('std')
if style.isVisible():
axes.plot([index], (data['mean'][index] + data['std'][index]), alpha=style.alpha, linestyle='', color=style.color, marker=style.marker, markersize=style.size)
axes.plot([index], (data['mean'][index] - data['std'][index]), alpha=style.alpha, linestyle='', color=style.color, marker=style.marker, markersize=style.size)
style = plot_config.getStatisticsStyle('min-max')
if style.isVisible():
axes.plot([index], data['min'][index], alpha=style.alpha, linestyle='', color=style.color, marker=style.marker, markersize=style.size)
axes.plot([index], data['max'][index], alpha=style.alpha, linestyle='', color=style.color, marker=style.marker, markersize=style.size)
style = plot_config.getStatisticsStyle('p10-p90')
if style.isVisible():
axes.plot([index], data['p10'][index], alpha=style.alpha, linestyle='', color=style.color, marker=style.marker, markersize=style.size)
axes.plot([index], data['p90'][index], alpha=style.alpha, linestyle='', color=style.color, marker=style.marker, markersize=style.size)
style = plot_config.getStatisticsStyle('p33-p67')
if style.isVisible():
axes.plot([index], data['p33'][index], alpha=style.alpha, linestyle='', color=style.color, marker=style.marker, markersize=style.size)
axes.plot([index], data['p67'][index], alpha=style.alpha, linestyle='', color=style.color, marker=style.marker, markersize=style.size) |
class BackendResult():
def __init__(self, bres):
self.name = 'raw'
self.request = None
self.response = bres
if isinstance(bres, dict):
self.name = cast(str, bres.get('backend'))
self.request = (BackendRequest(bres['request']) if ('request' in bres) else None)
self.response = (BackendResponse(bres['response']) if ('response' in bres) else None)
def as_dict(self) -> Dict[(str, Any)]:
od = {'name': self.name}
if self.request:
od['request'] = dictify(self.request)
if self.response:
od['response'] = dictify(self.response)
return od |
class OptionPlotoptionsTimelineDragdropGuideboxDefault(Options):
def className(self):
return self._config_get('highcharts-drag-box-default')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('rgba(0, 0, 0, 0.1)')
def color(self, text: str):
self._config(text, js_type=False)
def cursor(self):
return self._config_get('move')
def cursor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#888')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(900)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def main(page: ft.Page):
data_1 = [ft.LineChartData(data_points=[ft.LineChartDataPoint(1, 1), ft.LineChartDataPoint(3, 1.5), ft.LineChartDataPoint(5, 1.4), ft.LineChartDataPoint(7, 3.4), ft.LineChartDataPoint(10, 2), ft.LineChartDataPoint(12, 2.2), ft.LineChartDataPoint(13, 1.8)], stroke_width=8, color=ft.colors.LIGHT_GREEN, curved=True, stroke_cap_round=True), ft.LineChartData(data_points=[ft.LineChartDataPoint(1, 1), ft.LineChartDataPoint(3, 2.8), ft.LineChartDataPoint(7, 1.2), ft.LineChartDataPoint(10, 2.8), ft.LineChartDataPoint(12, 2.6), ft.LineChartDataPoint(13, 3.9)], color=ft.colors.PINK, below_line_bgcolor=ft.colors.with_opacity(0, ft.colors.PINK), stroke_width=8, curved=True, stroke_cap_round=True), ft.LineChartData(data_points=[ft.LineChartDataPoint(1, 2.8), ft.LineChartDataPoint(3, 1.9), ft.LineChartDataPoint(6, 3), ft.LineChartDataPoint(10, 1.3), ft.LineChartDataPoint(13, 2.5)], color=ft.colors.CYAN, stroke_width=8, curved=True, stroke_cap_round=True)]
data_2 = [ft.LineChartData(data_points=[ft.LineChartDataPoint(1, 1), ft.LineChartDataPoint(3, 4), ft.LineChartDataPoint(5, 1.8), ft.LineChartDataPoint(7, 5), ft.LineChartDataPoint(10, 2), ft.LineChartDataPoint(12, 2.2), ft.LineChartDataPoint(13, 1.8)], stroke_width=4, color=ft.colors.with_opacity(0.5, ft.colors.LIGHT_GREEN), stroke_cap_round=True), ft.LineChartData(data_points=[ft.LineChartDataPoint(1, 1), ft.LineChartDataPoint(3, 2.8), ft.LineChartDataPoint(7, 1.2), ft.LineChartDataPoint(10, 2.8), ft.LineChartDataPoint(12, 2.6), ft.LineChartDataPoint(13, 3.9)], color=ft.colors.with_opacity(0.5, ft.colors.PINK), below_line_bgcolor=ft.colors.with_opacity(0.2, ft.colors.PINK), stroke_width=4, curved=True, stroke_cap_round=True), ft.LineChartData(data_points=[ft.LineChartDataPoint(1, 3.8), ft.LineChartDataPoint(3, 1.9), ft.LineChartDataPoint(6, 5), ft.LineChartDataPoint(10, 3.3), ft.LineChartDataPoint(13, 4.5)], color=ft.colors.with_opacity(0.5, ft.colors.CYAN), stroke_width=4, stroke_cap_round=True)]
chart = ft.LineChart(data_series=data_1, border=ft.Border(bottom=ft.BorderSide(4, ft.colors.with_opacity(0.5, ft.colors.ON_SURFACE))), left_axis=ft.ChartAxis(labels=[ft.ChartAxisLabel(value=1, label=ft.Text('1m', size=14, weight=ft.FontWeight.BOLD)), ft.ChartAxisLabel(value=2, label=ft.Text('2m', size=14, weight=ft.FontWeight.BOLD)), ft.ChartAxisLabel(value=3, label=ft.Text('3m', size=14, weight=ft.FontWeight.BOLD)), ft.ChartAxisLabel(value=4, label=ft.Text('4m', size=14, weight=ft.FontWeight.BOLD)), ft.ChartAxisLabel(value=5, label=ft.Text('5m', size=14, weight=ft.FontWeight.BOLD)), ft.ChartAxisLabel(value=6, label=ft.Text('6m', size=14, weight=ft.FontWeight.BOLD))], labels_size=40), bottom_axis=ft.ChartAxis(labels=[ft.ChartAxisLabel(value=2, label=ft.Container(ft.Text('SEP', size=16, weight=ft.FontWeight.BOLD, color=ft.colors.with_opacity(0.5, ft.colors.ON_SURFACE)), margin=ft.margin.only(top=10))), ft.ChartAxisLabel(value=7, label=ft.Container(ft.Text('OCT', size=16, weight=ft.FontWeight.BOLD, color=ft.colors.with_opacity(0.5, ft.colors.ON_SURFACE)), margin=ft.margin.only(top=10))), ft.ChartAxisLabel(value=12, label=ft.Container(ft.Text('DEC', size=16, weight=ft.FontWeight.BOLD, color=ft.colors.with_opacity(0.5, ft.colors.ON_SURFACE)), margin=ft.margin.only(top=10)))], labels_size=32), tooltip_bgcolor=ft.colors.with_opacity(0.8, ft.colors.BLUE_GREY), min_y=0, max_y=4, min_x=0, max_x=14, expand=True)
def toggle_data(e):
if s.toggle:
chart.data_series = data_2
chart.data_series[2].point = True
chart.max_y = 6
chart.interactive = False
else:
chart.data_series = data_1
chart.max_y = 4
chart.interactive = True
s.toggle = (not s.toggle)
chart.update()
page.add(ft.IconButton(ft.icons.REFRESH, on_click=toggle_data), chart) |
class OptionSeriesItemSonificationPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def read(self, genre, method=None, command=None, _class=None):
datas = b''
data_len = (- 1)
k = 0
pre = 0
t = time.time()
wait_time = 0.1
if (genre == ProtocolCode.GO_ZERO):
wait_time = 120
if (method is not None):
if (genre == 177):
while True:
data = self.sock.recv(1024)
if (b'password' in data):
break
elif (genre == 192):
while True:
data += self.sock.recv(1024)
if (len(data) == 6):
break
else:
try:
self.sock.settimeout(0.5)
data = self.sock.recv(1024)
if isinstance(data, str):
datas = bytearray()
for i in data:
datas += hex(ord(i))
except:
data = b''
if (check_python_version() == 2):
self.log.debug('_read: {}'.format([hex(ord(d)) for d in data]))
else:
self.log.debug('_read: {}'.format([hex(d) for d in data]))
return data
else:
if (genre == ProtocolCode.GET_SSID_PWD):
time.sleep(0.1)
if (self._serial_port.inWaiting() > 0):
datas = self._serial_port.read(self._serial_port.inWaiting())
return datas
elif (genre == ProtocolCode.GET_ACCEI_DATA):
wait_time = 1
while (True and ((time.time() - t) < wait_time)):
data = self._serial_port.read()
k += 1
if (_class == 'Mercury'):
if (data_len == 3):
datas += data
crc = self._serial_port.read(2)
if (DataProcessor.crc_check(datas) == [v for v in crc]):
datas += crc
break
if ((data_len == 1) and (data == b'\xfa')):
datas += data
if ([i for i in datas] == command):
datas = b''
data_len = (- 1)
k = 0
pre = 0
continue
break
elif (len(datas) == 2):
data_len = struct.unpack('b', data)[0]
datas += data
elif ((len(datas) > 2) and (data_len > 0)):
datas += data
data_len -= 1
elif (data == b'\xfe'):
if (datas == b''):
datas += data
pre = k
elif ((k - 1) == pre):
datas += data
else:
datas = b'\xfe'
pre = k
else:
datas = b''
if (check_python_version() == 2):
self.log.debug('_read: {}'.format([hex(ord(data)) for data in datas]))
else:
self.log.debug('_read: {}'.format([hex(data) for data in datas]))
return datas |
def normalize_severity_args(args):
translate = dict(C='CRITICAL', H='HIGH', M='MEDIUM', L='LOW', O='OPTIMIZATION', I='INFO')
if (args == 'all'):
return translate.values()
if (args == 'none'):
return []
severities = args.split(',')
severities = [s.strip().upper() for s in severities]
def expand_severity(s):
if (len(s) > 1):
return s
else:
return translate[s]
severities = [expand_severity(s) for s in severities]
return severities |
def batch_pictures():
cur_path = pathlib.Path(__file__)
config_path = (cur_path.parent.parent / 'config.yaml')
config = dm.Config.parse_file(config_path, content_type='yaml')
ctx = zmq.Context()
sock_sender = ctx.socket(zmq.PUSH)
sock_sender.connect(config.zmq_input_address)
sock_receiver = ctx.socket(zmq.PULL)
sock_receiver.bind(config.zmq_output_address)
image = Image.open('test.jpg')
data = np.asarray(image)
request_info = dm.RequestInfo(input=data, parameters={})
batch = MinimalBatchObject(uid='test', requests_info=[request_info], model=stub_model)
sock_sender.send_pyobj(batch)
logger.info('Start listening')
while True:
result = sock_receiver.recv_pyobj()
logger.info(f'Result batch {result}') |
class BenchSoC(SoCCore):
def __init__(self, uart='crossover', sys_clk_freq=int(.0), with_bist=False, with_analyzer=False):
platform = xilinx_kcu105.Platform()
SoCCore.__init__(self, platform, clk_freq=sys_clk_freq, ident='LiteDRAM bench on KCU105', integrated_rom_size=65536, integrated_rom_mode='rw', uart_name=uart)
self.submodules.crg = _CRG(platform, sys_clk_freq)
self.submodules.ddrphy = usddrphy.USDDRPHY(pads=PHYPadsReducer(platform.request('ddram'), [0, 1, 2, 3, 4, 5, 6, 7]), memtype='DDR4', sys_clk_freq=sys_clk_freq, iodelay_clk_freq=.0)
self.add_sdram('sdram', phy=self.ddrphy, module=EDY4016A(sys_clk_freq, '1:4'), origin=self.mem_map['main_ram'], size=, with_bist=with_bist)
if (uart != 'serial'):
self.add_uartbone(clk_freq=.0, baudrate=115200, cd='uart')
self.submodules.ethphy = KU_1000BASEX(self.crg.cd_eth.clk, data_pads=self.platform.request('sfp', 0), sys_clk_freq=self.clk_freq)
self.comb += self.platform.request('sfp_tx_disable_n', 0).eq(1)
self.platform.add_platform_command('set_property SEVERITY {{Warning}} [get_drc_checks REQP-1753]')
self.add_etherbone(phy=self.ethphy)
if with_analyzer:
from litescope import LiteScopeAnalyzer
analyzer_signals = [self.ddrphy.dfi]
self.submodules.analyzer = LiteScopeAnalyzer(analyzer_signals, depth=256, clock_domain='sys', csr_csv='analyzer.csv')
from litex.soc.cores.led import LedChaser
self.submodules.leds = LedChaser(pads=platform.request_all('user_led'), sys_clk_freq=sys_clk_freq) |
class OptionSeriesWaterfallSonificationDefaultinstrumentoptionsMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.parametrize('tuple_type,python_value,solidity_abi_encoded,_', CORRECT_TUPLE_ENCODINGS)
def test_abi_encode_for_multiple_types_as_list(tuple_type, python_value, solidity_abi_encoded, _):
abi_type = parse(tuple_type)
if (abi_type.arrlist is not None):
pytest.skip('ABI coding functions do not support array types')
separated_list_of_types = [t.to_type_str() for t in abi_type.components]
eth_abi_encoded = encode(separated_list_of_types, python_value)
assert (eth_abi_encoded == solidity_abi_encoded) |
def dumpdexInit(packageName):
path = (((('/data/data/' + packageName) + '/files/') + '/dump_dex_') + packageName)
res = ''
res += (adbshellCmd(('mkdir -p ' + path)) + '\n')
res += (adbshellCmd(((('chmod 0777 ' + '/data/data/') + packageName) + '/files/')) + '\n')
res += (adbshellCmd(('chmod 0777 ' + path)) + '\n')
return res |
def test_chroma_db_collection_reset():
db1 = ChromaDB(config=ChromaDbConfig(allow_reset=True, dir='test-db'))
app1 = App(config=AppConfig(collect_metrics=False), db=db1)
app1.set_collection_name('one_collection')
db2 = ChromaDB(config=ChromaDbConfig(allow_reset=True, dir='test-db'))
app2 = App(config=AppConfig(collect_metrics=False), db=db2)
app2.set_collection_name('two_collection')
db3 = ChromaDB(config=ChromaDbConfig(allow_reset=True, dir='test-db'))
app3 = App(config=AppConfig(collect_metrics=False), db=db3)
app3.set_collection_name('three_collection')
db4 = ChromaDB(config=ChromaDbConfig(allow_reset=True, dir='test-db'))
app4 = App(config=AppConfig(collect_metrics=False), db=db4)
app4.set_collection_name('four_collection')
app1.db.collection.add(embeddings=[0, 0, 0], ids=['1'])
app2.db.collection.add(embeddings=[0, 0, 0], ids=['2'])
app3.db.collection.add(embeddings=[0, 0, 0], ids=['3'])
app4.db.collection.add(embeddings=[0, 0, 0], ids=['4'])
app1.db.reset()
assert (app1.db.count() == 0)
assert (app2.db.count() == 1)
assert (app3.db.count() == 1)
assert (app4.db.count() == 1)
app2.db.reset()
app3.db.reset()
app4.db.reset() |
def extractThebrilliantjadeinaiWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [("Marielle Clarac's Engagement", "Marielle Clarac's Engagement", 'translated'), ('The Magician wants Normality', 'The Magician wants Normality', 'translated'), ('TMWN', 'The Magician wants Normality', 'translated'), ('I am a princess responsible for settling circumstances', 'I am a princess responsible for settling circumstances', 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
if (item['tags'] != ['Senza categoria']):
return False
titlemap = [('The Magician Wants Normality', 'The Magician Wants Normality', 'translated'), ('Master of Dungeon', 'Master of Dungeon', 'oel')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class Window(lg.Node):
PLOT: LinePlot
PLOT_2: LinePlot
def run_plot(self) -> None:
win = pg.GraphicsWindow()
win.addItem(self.PLOT.build())
win.nextRow()
win.addItem(self.PLOT_2.build())
QtGui.QApplication.instance().exec_()
def cleanup(self) -> None:
self.PLOT.stop()
self.PLOT_2.stop()
QtGui.QApplication.instance().quit() |
class DistributedDrQV2Test(absltest.TestCase):
def test_distributed_drq_v2_run(self):
def environment_factory(seed, testing):
del seed, testing
environment = fakes.Environment(spec=specs.EnvironmentSpec(observations=specs.Array((84, 84, 3), dtype=np.uint8), actions=specs.BoundedArray((1,), dtype=np.float32, minimum=(- 1), maximum=1.0), rewards=specs.Array((), dtype=np.float32), discounts=specs.BoundedArray((), dtype=np.float32, minimum=0.0, maximum=1.0)))
return environment
agent = drq_v2_lib.DistributedDrQV2(seed=0, environment_factory=environment_factory, network_factory=(lambda spec: networks.make_networks(spec, hidden_size=10, latent_size=10)), num_actors=2, config=config.DrQV2Config(batch_size=32, min_replay_size=32, max_replay_size=1000, samples_per_insert=32.0, samples_per_insert_tolerance_rate=0.1), max_actor_steps=None)
program = agent.build()
(learner_node,) = program.groups['learner']
learner_node.disable_run()
lp.launch(program, launch_type='test_mt')
learner: acme.Learner = learner_node.create_handle().dereference()
for _ in range(5):
learner.step() |
class ImportJobList(ResourceList):
def query(self, kwargs):
query_ = self.session.query(ImportJob)
query_ = query_.filter_by(user_id=current_user.id)
return query_
decorators = (jwt_required,)
schema = ImportJobSchema
data_layer = {'session': db.session, 'model': ImportJob, 'methods': {'query': query}} |
def get_cuda_version_from_nvidia_smi():
try:
output = subprocess.check_output(['nvidia-smi']).decode('utf-8')
match = re.search('CUDA Version:\\s+(\\d+\\.\\d+)', output)
if match:
return match.group(1)
else:
return None
except:
return None |
def sort_frame_processors(frame_processors: List[str]) -> list[str]:
available_frame_processors = list_module_names('facefusion/processors/frame/modules')
return sorted(available_frame_processors, key=(lambda frame_processor: (frame_processors.index(frame_processor) if (frame_processor in frame_processors) else len(frame_processors)))) |
class ConditionEncoder(Chain):
def __init__(self, device: ((Device | str) | None)=None, dtype: (DType | None)=None) -> None:
self.out_channels = (16, 32, 96, 256)
super().__init__(Chain(Conv2d(in_channels=3, out_channels=self.out_channels[0], kernel_size=3, stride=1, padding=1, device=device, dtype=dtype), SiLU()), *(Chain(Conv2d(in_channels=self.out_channels[i], out_channels=self.out_channels[i], kernel_size=3, padding=1, device=device, dtype=dtype), SiLU(), Conv2d(in_channels=self.out_channels[i], out_channels=self.out_channels[(i + 1)], kernel_size=3, stride=2, padding=1, device=device, dtype=dtype), SiLU()) for i in range((len(self.out_channels) - 1))), Conv2d(in_channels=self.out_channels[(- 1)], out_channels=320, kernel_size=3, padding=1, device=device, dtype=dtype)) |
def int2c2e3d_33(ax, da, A, bx, db, B):
result = numpy.zeros((10, 10), dtype=float)
x0 = (ax + bx)
x1 = (x0 ** (- 1.0))
x2 = ((- x1) * ((ax * A[0]) + (bx * B[0])))
x3 = (x2 + B[0])
x4 = (bx ** (- 1.0))
x5 = (((ax * bx) * x1) * ((((A[0] - B[0]) ** 2) + ((A[1] - B[1]) ** 2)) + ((A[2] - B[2]) ** 2)))
x6 = boys(4, x5)
x7 = (x1 * x6)
x8 = (ax ** (- 1.0))
x9 = boys(3, x5)
x10 = (x8 * x9)
x11 = ((- x10) + x7)
x12 = (x11 * x4)
x13 = (x3 ** 2)
x14 = boys(5, x5)
x15 = (x12 - (((2.0 * x13) * x14) * x8))
x16 = (- x15)
x17 = (- x3)
x18 = (2.0 * x17)
x19 = ((x12 * x18) + (x16 * x3))
x20 = (x2 + A[0])
x21 = (2.0 * x20)
x22 = (x1 * x9)
x23 = boys(2, x5)
x24 = (x22 - (x23 * x8))
x25 = (x24 * x4)
x26 = ((((2.0 * x13) * x6) * x8) - x25)
x27 = (x1 * x26)
x28 = (x26 * x3)
x29 = (x21 * x4)
x30 = ((x1 * x23) - (x8 * boys(1, x5)))
x31 = (x30 * x4)
x32 = ((((2.0 * x13) * x8) * x9) - x31)
x33 = (3.0 * x1)
x34 = (x33 * x4)
x35 = (2.0 * x8)
x36 = (bx ** (- 2.0))
x37 = (x24 * x36)
x38 = ((((- x1) * x19) + (x18 * x37)) + (x28 * x4))
x39 = (x38 * x8)
x40 = (x1 * x14)
x41 = (x6 * x8)
x42 = (x4 * (x40 - x41))
x43 = boys(6, x5)
x44 = ((x18 * x42) + (x3 * ((((2.0 * x13) * x43) * x8) - x42)))
x45 = (x20 * x44)
x46 = (x1 * x16)
x47 = (x20 * x4)
x48 = (x47 * ((2.0 * x45) + (3.0 * x46)))
x49 = (x3 * x35)
x50 = (x49 * x7)
x51 = ((x16 * x20) + x50)
x52 = (x30 * x36)
x53 = (2.0 * x13)
x54 = (((((- x10) * x4) * x53) + x27) + x52)
x55 = (x54 * x8)
x56 = (x21 * x3)
x57 = (x1 * x4)
x58 = (x35 * x57)
x59 = (((17. * da) * db) * (x0 ** (- 0.5)))
x60 = (0. * x59)
x61 = ((- x1) * ((ax * A[1]) + (bx * B[1])))
x62 = (x61 + B[1])
x63 = (- x62)
x64 = (x12 * x63)
x65 = (x13 * x35)
x66 = (x14 * x65)
x67 = ((x62 * x66) + x64)
x68 = (x25 * x63)
x69 = (x41 * x53)
x70 = (x62 * x69)
x71 = (x68 + x70)
x72 = (2.0 * x10)
x73 = (x57 * x72)
x74 = (x62 * x73)
x75 = (x37 * x63)
x76 = ((((- x1) * x67) + (x4 * x70)) + x75)
x77 = (x42 * x63)
x78 = (x43 * x65)
x79 = ((x62 * x78) + x77)
x80 = (x40 * x49)
x81 = ((x20 * x79) + (x62 * x80))
x82 = (x35 * x62)
x83 = ((x14 * x56) + x7)
x84 = (x1 * x83)
x85 = (x4 * x84)
x86 = (x47 * x83)
x87 = (((- x4) * x9) + x7)
x88 = (x17 * x8)
x89 = (x87 * x88)
x90 = ((- x63) * x89)
x91 = (x47 * x7)
x92 = (x1 * x8)
x93 = 2.
x94 = (0. * x59)
x95 = (x93 * x94)
x96 = ((- x1) * ((ax * A[2]) + (bx * B[2])))
x97 = (x96 + B[2])
x98 = (- x97)
x99 = (x12 * x98)
x100 = ((x66 * x97) + x99)
x101 = (x25 * x98)
x102 = (x69 * x97)
x103 = (x101 + x102)
x104 = (x73 * x97)
x105 = (x37 * x98)
x106 = ((((- x1) * x100) + (x102 * x4)) + x105)
x107 = (x42 * x98)
x108 = (x107 + (x78 * x97))
x109 = ((x108 * x20) + (x80 * x97))
x110 = (x35 * x97)
x111 = ((- x89) * x98)
x112 = (x62 ** 2)
x113 = (((((- 2.0) * x112) * x6) * x8) + x25)
x114 = (- x113)
x115 = (x1 * x114)
x116 = (2.0 * x112)
x117 = (x10 * x116)
x118 = ((x115 - (x117 * x4)) + x52)
x119 = (x118 * x8)
x120 = (x14 * x35)
x121 = (x112 * x120)
x122 = (x12 - x121)
x123 = (x20 ** 2)
x124 = (x123 * x4)
x125 = (2.0 * x124)
x126 = ((- x117) + x31)
x127 = (x29 * x3)
x128 = ((- x11) * x4)
x129 = (x63 ** 2)
x130 = (x120 * x129)
x131 = ((- x24) * x4)
x132 = (2.0 * x41)
x133 = (x129 * x132)
x134 = ((x1 * (x128 + x130)) - (x4 * (x131 + x133)))
x135 = (x134 * x88)
x136 = (- x122)
x137 = (x1 * x136)
x138 = ((((2.0 * x112) * x43) * x8) - x42)
x139 = (x47 * (x137 + (x138 * x56)))
x140 = (x137 * x47)
x141 = (x60 * x93)
x142 = (x4 * x6)
x143 = (((x63 * x88) * x98) * ((- x142) + x40))
x144 = (x62 * x97)
x145 = (x144 * x47)
x146 = (x63 * x8)
x147 = (((- x146) * x87) * x98)
x148 = (0.5 * x1)
x149 = (x22 * x4)
x150 = (x62 * x8)
x151 = (3. * x94)
x152 = (x151 * x8)
x153 = (x97 ** 2)
x154 = (((((- 2.0) * x153) * x6) * x8) + x25)
x155 = (- x154)
x156 = (x1 * x155)
x157 = (x153 * x72)
x158 = ((x156 - (x157 * x4)) + x52)
x159 = (x158 * x8)
x160 = (x12 - (x120 * x153))
x161 = (x1 * x154)
x162 = ((- x157) + x31)
x163 = (x162 * x57)
x164 = (x98 ** 2)
x165 = (x120 * x164)
x166 = (x132 * x164)
x167 = ((x1 * (x128 + x165)) - (x4 * (x131 + x166)))
x168 = (x167 * x88)
x169 = (- x160)
x170 = (x1 * x169)
x171 = (((((- 2.0) * x153) * x43) * x8) + x42)
x172 = (- x171)
x173 = (x47 * (x170 + (x172 * x56)))
x174 = (x170 * x47)
x175 = ((x136 * x62) + (2.0 * x64))
x176 = (x114 * x62)
x177 = ((((- x1) * x175) + (x176 * x4)) + (2.0 * x75))
x178 = (x177 * x8)
x179 = ((x138 * x62) + (2.0 * x77))
x180 = (0.5 * x20)
x181 = (- x20)
x182 = (3.0 * x128)
x183 = (3.0 * x131)
x184 = ((x1 * (x130 + x182)) - (x4 * (x133 + x183)))
x185 = (x146 * x184)
x186 = ((x121 * x97) + x99)
x187 = ((x116 * x41) * x97)
x188 = ((((- x1) * x186) + x105) + (x187 * x4))
x189 = (x188 * x8)
x190 = (x107 + (((x112 * x35) * x43) * x97))
x191 = (x8 * x98)
x192 = (x181 * x191)
x193 = (x146 * x167)
x194 = ((x169 * x97) + (2.0 * x99))
x195 = (x155 * x97)
x196 = ((((- x1) * x194) + (2.0 * x105)) + (x195 * x4))
x197 = (x196 * x8)
x198 = ((2.0 * x107) + (x172 * x97))
x199 = ((x1 * (x165 + x182)) - (x4 * (x166 + x183)))
x200 = (x61 + A[1])
x201 = (- x200)
x202 = (x17 ** 2)
x203 = (x120 * x202)
x204 = (x132 * x202)
x205 = ((x1 * (x182 + x203)) - (x4 * (x183 + x204)))
x206 = (x200 * x4)
x207 = (x33 * x51)
x208 = ((0. * x59) * x93)
x209 = (2.0 * x200)
x210 = (x32 * x57)
x211 = (x209 * x4)
x212 = ((- x8) * ((((- x1) * ((x209 * x67) + x27)) + x210) + (x211 * x71)))
x213 = ((x209 * x79) + x46)
x214 = (x20 * x213)
x215 = (x209 * x62)
x216 = ((x14 * x215) + x7)
x217 = (x1 * x216)
x218 = (x217 * x49)
x219 = (x1 * ((x215 * x6) + x22))
x220 = (0. * x59)
x221 = (x200 * x47)
x222 = ((x1 * (x128 + x203)) - (x4 * (x131 + x204)))
x223 = (x206 * x97)
x224 = (0. * x59)
x225 = ((x136 * x200) + (x7 * x82))
x226 = (x1 * x225)
x227 = (x3 * x8)
x228 = ((x138 * x200) + (x40 * x82))
x229 = (0. * x59)
x230 = (x227 * (((x142 * x215) + x149) - x217))
x231 = ((x215 * x43) + x40)
x232 = (x8 * x97)
x233 = 1.
x234 = (x229 * x233)
x235 = ((((- x167) * x17) * x201) * x8)
x236 = (- x126)
x237 = ((- x8) * ((((- x1) * ((3.0 * x115) + (x175 * x209))) + (x211 * (x176 + (2.0 * x68)))) + (x236 * x34)))
x238 = (x179 * x200)
x239 = ((3.0 * x137) + (2.0 * x238))
x240 = (x144 * x35)
x241 = (x101 + x187)
x242 = ((- x8) * ((((- x1) * ((x186 * x200) + (x240 * x7))) + (x144 * x73)) + (x206 * x241)))
x243 = ((x190 * x200) + (x240 * x40))
x244 = (((x1 * ((x160 * x215) + x161)) - ((x154 * x211) * x62)) - x163)
x245 = (x96 + A[2])
x246 = (- x245)
x247 = (x245 * x4)
x248 = (x245 * x47)
x249 = (2.0 * x245)
x250 = (x249 * x4)
x251 = ((- x8) * ((((- x1) * ((x100 * x249) + x27)) + (x103 * x250)) + x210))
x252 = (- x251)
x253 = ((x108 * x249) + x46)
x254 = (x20 * x253)
x255 = (x249 * x97)
x256 = ((x14 * x255) + x7)
x257 = (x1 * x256)
x258 = (x257 * x49)
x259 = (x1 * (x22 + (x255 * x6)))
x260 = ((((- x134) * x17) * x246) * x8)
x261 = (((x142 * x255) + x149) - x257)
x262 = (x227 * x261)
x263 = ((x255 * x43) + x40)
x264 = ((x110 * x7) + (x169 * x245))
x265 = (x1 * x264)
x266 = ((x104 + (x155 * x247)) - x265)
x267 = ((x110 * x40) + (x172 * x245))
x268 = ((- x8) * ((((- x1) * (x115 + (x186 * x249))) + (x236 * x57)) + (x241 * x250)))
x269 = (- x268)
x270 = (x137 + (x190 * x249))
x271 = (((((((2.0 * x1) * x4) * x8) * x9) * x97) - (x1 * (((((2.0 * x1) * x6) * x8) * x97) - (x160 * x245)))) - (x154 * x247))
x272 = (x271 * x8)
x273 = (0.5 * x272)
x274 = (((((2.0 * x1) * x14) * x8) * x97) - (x171 * x245))
x275 = (x8 * (((x1 * ((3.0 * x156) + (x194 * x249))) + (x162 * x34)) - (x250 * ((2.0 * x101) + x195))))
x276 = (- x275)
x277 = (x198 * x245)
x278 = ((3.0 * x170) + (2.0 * x277))
x279 = ((- x54) * x8)
x280 = (x200 ** 2)
x281 = (x280 * x4)
x282 = (2.0 * x281)
x283 = (x38 * x8)
x284 = (((- x282) * x44) - x283)
x285 = (x76 * x8)
x286 = (x206 * x213)
x287 = (x206 * x46)
x288 = ((x285 + x286) + x287)
x289 = (x206 * x216)
x290 = (x206 * x3)
x291 = (((x289 * x3) + (x290 * x7)) + x90)
x292 = (x106 * x8)
x293 = (((- x108) * x282) - x292)
x294 = (x282 * x3)
x295 = (x111 + ((x14 * x294) * x97))
x296 = ((- x118) * x8)
x297 = (x35 * x4)
x298 = ((x211 * x225) + (x219 * x297))
x299 = ((x135 + ((x211 * x228) * x3)) + (x218 * x4))
x300 = (x290 * x97)
x301 = ((x143 + (x231 * x300)) + (x300 * x40))
x302 = ((x147 + (x223 * x7)) + (x289 * x97))
x303 = (x224 * x233)
x304 = (x303 * x8)
x305 = ((- x158) * x8)
x306 = (x168 + (x172 * x294))
x307 = (x177 * x8)
x308 = (x206 * x239)
x309 = (3.0 * x4)
x310 = (((x226 * x309) + x307) + x308)
x311 = (x20 * x208)
x312 = (x188 * x8)
x313 = (x217 * x4)
x314 = (x20 * x224)
x315 = (((x170 * x206) + x193) + (x206 * (x170 + (x172 * x215))))
x316 = (x20 * x229)
x317 = (x196 * x8)
x318 = (x198 * x281)
x319 = (x20 * x95)
x320 = (x217 * x227)
x321 = (x247 * x303)
x322 = (x227 * x257)
x323 = (x206 * x303)
x324 = (x20 * x3)
x325 = (x1 * ((x215 * x256) + x259))
x326 = ((x215 * x263) + x257)
x327 = (0.5 * x265)
x328 = (x200 * x270)
x329 = (x150 * x257)
x330 = (x303 * x47)
x331 = (x267 * x62)
x332 = ((x245 ** 2) * x4)
x333 = (2.0 * x332)
x334 = (x1 * ((x16 * x333) + x279))
x335 = (x332 * x44)
x336 = ((- x283) - (2.0 * x335))
x337 = ((- x285) - (x333 * x79))
x338 = (x3 * x333)
x339 = (((x14 * x338) * x62) + x90)
x340 = (x339 * x92)
x341 = (((x247 * x253) + (x247 * x46)) + x292)
x342 = (x247 * x256)
x343 = (x247 * x3)
x344 = ((x111 + (x3 * x342)) + (x343 * x7))
x345 = (x1 * ((x136 * x333) + x296))
x346 = (x135 + (x138 * x338))
x347 = (x343 * x62)
x348 = ((x143 + (x263 * x347)) + (x347 * x40))
x349 = ((x147 + ((x247 * x62) * x7)) + (x342 * x62))
x350 = ((x250 * x264) + (x259 * x297))
x351 = (x1 * (x305 + x350))
x352 = ((x168 + ((x250 * x267) * x3)) + (x258 * x4))
x353 = (x179 * x332)
x354 = (((x137 * x247) + x189) + (x247 * x270))
x355 = (((x247 * x278) + (x265 * x309)) + x317)
x356 = (0.5 * x200)
x357 = (x205 * x88)
x358 = (x191 * x201)
x359 = (x257 * x82)
x360 = ((- x178) - (2.0 * x353))
x361 = ((x193 + (x250 * x331)) + (x359 * x4))
x362 = (0.5 * x245)
result[(0, 0)] = numpy.sum((x60 * (((x21 * (((x34 * x51) + x39) + x48)) + (x33 * (((x29 * x51) - x55) + (x58 * (x22 + (x56 * x6)))))) + (x35 * ((((- x1) * ((x19 * x21) + (3.0 * x27))) + (x29 * ((x18 * x25) + x28))) + (x32 * x34))))))
result[(0, 1)] = numpy.sum(((0.5 * x95) * (((x20 * (((x29 * x81) + (x76 * x8)) + (x82 * x85))) + ((2.0 * x8) * ((((- x1) * ((x20 * x67) + (x50 * x62))) + (x3 * x74)) + (x47 * x71)))) + ((2.0 * x92) * (((x62 * x86) + (x62 * x91)) + x90)))))
result[(0, 2)] = numpy.sum(((0.5 * x95) * (((x20 * (((x106 * x8) + (x109 * x29)) + (x110 * x85))) + ((2.0 * x8) * ((((- x1) * ((x100 * x20) + (x50 * x97))) + (x103 * x47)) + (x104 * x3)))) + ((2.0 * x92) * ((x111 + (x86 * x97)) + (x91 * x97))))))
result[(0, 3)] = numpy.sum(((- x141) * (((x1 * (x119 + (x122 * x125))) - (x21 * ((x135 + x139) + x140))) + (x35 * ((((- x1) * ((x1 * x113) + (x122 * x56))) + (x113 * x127)) + (x126 * x57))))))
result[(0, 4)] = numpy.sum((x152 * (((x148 * (((x125 * x14) * x144) + x147)) + ((x150 * x97) * (((x142 * x56) + x149) - x84))) + (x20 * ((x143 + (x145 * x40)) + (x145 * (x40 + (x43 * x56
result[(0, 5)] = numpy.sum(((- x141) * (((x1 * ((x125 * x160) + x159)) - (x21 * ((x168 + x173) + x174))) + (x35 * ((((- x1) * ((x160 * x56) + x161)) + (x127 * x154)) + x163)))))
result[(0, 6)] = numpy.sum((x94 * ((x180 * ((x125 * x179) + x178)) - (x181 * x185))))
result[(0, 7)] = numpy.sum(((- x95) * ((x134 * x192) - (x180 * ((x125 * x190) + x189)))))
result[(0, 8)] = numpy.sum(((- x95) * ((((x167 * x181) * x63) * x8) - (x180 * (((x125 * x172) * x62) + x193)))))
result[(0, 9)] = numpy.sum((x94 * ((x180 * ((x125 * x198) + x197)) - (x192 * x199))))
result[(1, 0)] = numpy.sum((x208 * ((((((- x17) * x201) * x205) * x8) + (x200 * x48)) + (x206 * x207))))
result[(1, 1)] = numpy.sum((x220 * (((- x212) + (x29 * (x214 + x218))) + (x58 * ((x216 * x56) + x219)))))
result[(1, 2)] = numpy.sum(((0.5 * x224) * ((((2.0 * x109) * x221) - (((x201 * x222) * x8) * x98)) + (((2.0 * x223) * x8) * x84))))
result[(1, 3)] = numpy.sum((x229 * (((x226 * x47) + (x227 * (((x114 * x206) - x226) + x74))) + (x47 * (x226 + (x228 * x56))))))
result[(1, 4)] = numpy.sum(((x232 * x234) * (((x217 * x47) + x230) + (x47 * (x217 + (x231 * x56))))))
result[(1, 5)] = numpy.sum((x229 * (((x173 * x200) + (x174 * x200)) + x235)))
result[(1, 6)] = numpy.sum((x141 * ((x125 * x239) - x237)))
result[(1, 7)] = numpy.sum(((0.5 * x224) * (((2.0 * x124) * x243) - x242)))
result[(1, 8)] = numpy.sum(((- x220) * ((x125 * ((x1 * x160) + (x171 * x215))) - (x244 * x8))))
result[(1, 9)] = numpy.sum(((0.5 * x95) * ((((2.0 * x123) * x198) * x206) - (((x199 * x201) * x8) * x98))))
result[(2, 0)] = numpy.sum((x208 * ((((((- x17) * x205) * x246) * x8) + (x207 * x247)) + (x245 * x48))))
result[(2, 1)] = numpy.sum(((0.5 * x224) * (((((2.0 * x150) * x247) * x84) - (((x222 * x246) * x63) * x8)) + ((2.0 * x248) * x81))))
result[(2, 2)] = numpy.sum((x220 * ((x252 + (x29 * (x254 + x258))) + (x58 * ((x256 * x56) + x259)))))
result[(2, 3)] = numpy.sum((x229 * (((x139 * x245) + (x140 * x245)) + x260)))
result[(2, 4)] = numpy.sum(((x150 * x234) * (((x257 * x47) + x262) + (x47 * (x257 + (x263 * x56))))))
result[(2, 5)] = numpy.sum((x229 * (((x227 * x266) + (x265 * x47)) + (x47 * (x265 + (x267 * x56))))))
result[(2, 6)] = numpy.sum(((0.5 * x95) * ((((2.0 * x124) * x179) * x245) - (((x184 * x246) * x63) * x8))))
result[(2, 7)] = numpy.sum((x220 * ((x125 * x270) + x269)))
result[(2, 8)] = numpy.sum(((x224 * x62) * ((x124 * x274) + x273)))
result[(2, 9)] = numpy.sum((x141 * ((x125 * x278) + x276)))
result[(3, 0)] = numpy.sum(((- x141) * ((x21 * x284) - (x33 * ((x16 * x282) + x279)))))
result[(3, 1)] = numpy.sum((x224 * ((x180 * x288) + (x291 * x92))))
result[(3, 2)] = numpy.sum((x224 * (((- x180) * x293) + (x295 * x92))))
result[(3, 3)] = numpy.sum((x220 * ((x1 * (x296 + x298)) + (x21 * x299))))
result[(3, 4)] = numpy.sum((x304 * ((x148 * x302) + (x20 * x301))))
result[(3, 5)] = numpy.sum((x220 * ((x1 * ((x169 * x282) + x305)) + (x21 * x306))))
result[(3, 6)] = numpy.sum((x310 * x311))
result[(3, 7)] = numpy.sum(((0.5 * x314) * ((((2.0 * x206) * x243) + ((2.0 * x232) * x313)) + x312)))
result[(3, 8)] = numpy.sum((x315 * x316))
result[(3, 9)] = numpy.sum(((0.5 * x319) * (x317 + (2.0 * x318))))
result[(4, 0)] = numpy.sum(((((0.5 * x151) * x206) * x245) * ((2.0 * x45) + (3.0 * x46))))
result[(4, 1)] = numpy.sum(((0.5 * x321) * (x214 + (2.0 * x320))))
result[(4, 2)] = numpy.sum(((0.5 * x323) * (x254 + (2.0 * x322))))
result[(4, 3)] = numpy.sum(((0.5 * x321) * (x226 + ((2.0 * x228) * x324))))
result[(4, 4)] = numpy.sum(((((0.25 * x4) * x59) * x8) * (x325 + (x326 * x56))))
result[(4, 5)] = numpy.sum((x323 * ((x274 * x324) + x327)))
result[(4, 6)] = numpy.sum((((0.5 * x151) * x248) * ((((- 3.0) * x1) * x122) + (2.0 * x238))))
result[(4, 7)] = numpy.sum(((0.5 * x330) * (x328 + (2.0 * x329))))
result[(4, 8)] = numpy.sum((x330 * ((x200 * x331) + x327)))
result[(4, 9)] = numpy.sum((((0.5 * x151) * x221) * ((3.0 * x170) + (2.0 * x277))))
result[(5, 0)] = numpy.sum((x141 * (((- x21) * x336) + (3.0 * x334))))
result[(5, 1)] = numpy.sum((x224 * (((- x180) * x337) + x340)))
result[(5, 2)] = numpy.sum((x224 * ((x180 * x341) + (x344 * x92))))
result[(5, 3)] = numpy.sum((x220 * ((x21 * x346) + x345)))
result[(5, 4)] = numpy.sum((x304 * ((x148 * x349) + (x20 * x348))))
result[(5, 5)] = numpy.sum((x220 * ((x21 * x352) + x351)))
result[(5, 6)] = numpy.sum(((0.5 * x319) * (x307 + (2.0 * x353))))
result[(5, 7)] = numpy.sum((x316 * x354))
result[(5, 8)] = numpy.sum(((0.5 * x314) * ((x193 + ((2.0 * x247) * x331)) + ((2.0 * x329) * x4))))
result[(5, 9)] = numpy.sum((x311 * x355))
result[(6, 0)] = numpy.sum(((- x94) * ((x201 * x357) + (x284 * x356))))
result[(6, 1)] = numpy.sum(((- x141) * (((x1 * ((x15 * x282) + x55)) - ((2.0 * x200) * x288)) + (2.0 * x212))))
result[(6, 2)] = numpy.sum(((- x95) * ((x222 * x358) + (x293 * x356))))
result[(6, 3)] = numpy.sum((x95 * ((((- x227) * - 2.0) * x1) * x4) * x62) * x8) * x9) + (x1 * (((((2.0 * x1) * x6) * x62) * x8) - (x122 * x200)))) + (x113 * x206))) + (x291 * x92)) + (x299 * x356))))
result[(6, 4)] = numpy.sum((x152 * (((x148 * x295) + (x200 * x301)) + (x230 * x97))))
result[(6, 5)] = numpy.sum((x95 * (x235 + (x306 * x356))))
result[(6, 6)] = numpy.sum(((- x60) * ((((- x209) * x310) + (2.0 * x237)) + (x33 * (x119 - x298)))))
result[(6, 7)] = numpy.sum(((0.5 * x95) * (((x200 * (((x110 * x313) + (x211 * x243)) + x312)) - (2.0 * x242)) + ((2.0 * x302) * x92))))
result[(6, 8)] = numpy.sum((x141 * ((((- x1) * (x159 + (x160 * x282))) + (x209 * x315)) + ((2.0 * x244) * x8))))
result[(6, 9)] = numpy.sum(((- x94) * ((x199 * x358) - (x356 * (x197 + (2.0 * x318))))))
result[(7, 0)] = numpy.sum(((0.5 * x95) * (((((- x17) * x205) * x246) * x8) + (((2.0 * x247) * x280) * x44))))
result[(7, 1)] = numpy.sum((x229 * ((((((- x222) * x246) * x63) * x8) + (x245 * x286)) + (x245 * x287))))
result[(7, 2)] = numpy.sum((x220 * (x252 + (x253 * x282))))
result[(7, 3)] = numpy.sum(((0.5 * x224) * ((((((- x134) * x17) * x246) * x8) + (((2.0 * x228) * x245) * x290)) + ((2.0 * x247) * x320))))
result[(7, 4)] = numpy.sum(((x227 * x234) * (((x150 * x261) + (x206 * x257)) + (x206 * x326))))
result[(7, 5)] = numpy.sum(((x224 * x3) * (x273 + (x274 * x281))))
result[(7, 6)] = numpy.sum((x208 * ((((((- x184) * x246) * x63) * x8) + ((3.0 * x226) * x247)) + (x245 * x308))))
result[(7, 7)] = numpy.sum((x220 * (((x211 * (x328 + x359)) + x269) + (x297 * x325))))
result[(7, 8)] = numpy.sum((x229 * (((x150 * x266) + (x206 * x265)) + (x206 * ((x209 * x331) + x265)))))
result[(7, 9)] = numpy.sum((x141 * (x276 + (x278 * x282))))
result[(8, 0)] = numpy.sum((((0.5 * x200) * x95) * ((2.0 * x335) + x39)))
result[(8, 1)] = numpy.sum((x220 * (((- x209) * x337) + x334)))
result[(8, 2)] = numpy.sum(((x200 * x229) * x341))
result[(8, 3)] = numpy.sum((x224 * (x340 + (x346 * x356))))
result[(8, 4)] = numpy.sum((x304 * ((x148 * x344) + (x200 * x348))))
result[(8, 5)] = numpy.sum((((0.5 * x200) * x224) * ((x168 + ((2.0 * x267) * x343)) + ((2.0 * x322) * x4))))
result[(8, 6)] = numpy.sum((x141 * (((- x209) * x360) + (3.0 * x345))))
result[(8, 7)] = numpy.sum((x224 * ((x349 * x92) + (x354 * x356))))
result[(8, 8)] = numpy.sum((x220 * ((x209 * x361) + x351)))
result[(8, 9)] = numpy.sum(((x200 * x208) * x355))
result[(9, 0)] = numpy.sum(((- x94) * ((x246 * x357) + (x336 * x362))))
result[(9, 1)] = numpy.sum(((- x95) * (((x146 * x222) * x246) + (x337 * x362))))
result[(9, 2)] = numpy.sum(((- x141) * (((x1 * ((x15 * x333) + x55)) - ((2.0 * x245) * x341)) + (2.0 * x251))))
result[(9, 3)] = numpy.sum((x95 * (x260 + (x346 * x362))))
result[(9, 4)] = numpy.sum((x152 * (((x148 * x339) + (x245 * x348)) + (x262 * x62))))
result[(9, 5)] = numpy.sum((x95 * (((x227 * x271) + (x344 * x92)) + (x352 * x362))))
result[(9, 6)] = numpy.sum(((- x94) * ((x185 * x246) + (x360 * x362))))
result[(9, 7)] = numpy.sum(((- x141) * (((x1 * (x119 + (x122 * x333))) - ((2.0 * x245) * x354)) + (2.0 * x268))))
result[(9, 8)] = numpy.sum((x95 * (((x272 * x62) + (x349 * x92)) + (x361 * x362))))
result[(9, 9)] = numpy.sum(((- x60) * ((((- x249) * x355) + (2.0 * x275)) + (x33 * (x159 - x350)))))
return result |
class ModelTester(ErsiliaBase):
def __init__(self, model_id, config_json=None):
ErsiliaBase.__init__(self, config_json=config_json, credentials_json=None)
self.model_id = model_id
self.model_size = 0
self.tmp_folder = tempfile.mkdtemp(prefix='ersilia-')
self._info = self._read_information()
self._input = self._info['card']['Input']
self.RUN_FILE = 'run.sh'
self.information_check = False
self.single_input = False
self.example_input = False
self.consistent_output = False
self.run_using_bash = False
def _read_information(self):
json_file = os.path.join(self._dest_dir, self.model_id, INFORMATION_FILE)
self.logger.debug('Reading model information from {0}'.format(json_file))
if (not os.path.exists(json_file)):
raise texc.InformationFileNotExist(self.model_id)
with open(json_file, 'r') as f:
data = json.load(f)
return data
"\n This function uses the fuzzy wuzzy package to compare the differences between outputs when \n they're strings and not floats. The fuzz.ratio gives the percent of similarity between the two outputs.\n Example: two strings that are the exact same will return 100\n "
def _compare_output_strings(self, output1, output2):
if ((output1 is None) and (output2 is None)):
return 100
else:
return fuzz.ratio(output1, output2)
"\n To compare outputs, we are stating that numbers generated by the models need to be within 5% of each \n other in order to be considered consistent. This function returns true if the outputs are within that \n 5% threshold (meaning they're consistent), and false if they are not (meaning they are not consistent).\n "
def _is_below_difference_threshold(self, output1, output2):
if ((output1 == 0.0) or (output2 == 0.0)):
return (output1 == output2)
elif ((output1 is None) or (output2 is None)):
return (output1 == output2)
else:
return ((100 * (abs((output1 - output2)) / ((output1 + output2) / 2))) < DIFFERENCE_THRESHOLD)
'\n When the user specifies an output file, the file will show the user how big the model is. This function \n calculates the size of the model to allow this. \n '
def _set_model_size(self, directory):
for (dirpath, dirnames, filenames) in os.walk(directory):
for filename in filenames:
file_path = os.path.join(dirpath, filename)
self.model_size += os.path.getsize(file_path)
'\n This helper method was taken from the run.py file, and just prints the output for the user \n '
def _print_output(self, result, output):
echo('Printing output...')
if isinstance(result, types.GeneratorType):
for r in result:
if (r is not None):
if (output is not None):
with open(output.name, 'w') as file:
json.dump(r, output.name)
else:
echo(json.dumps(r, indent=4))
elif (output is not None):
message = echo('Something went wrong', fg='red')
with open(output.name, 'w') as file:
json.dump(message, output.name)
else:
echo('Something went wrong', fg='red')
else:
echo(result)
'\n This helper method checks that the model ID is correct.\n '
def _check_model_id(self, data):
print('Checking model ID...')
if (data['card']['Identifier'] != self.model_id):
raise texc.WrongCardIdentifierError(self.model_id)
'\n This helper method checks that the slug field is non-empty.\n '
def _check_model_slug(self, data):
print('Checking model slug...')
if (not data['card']['Slug']):
raise texc.EmptyField('slug')
'\n This helper method checks that the description field is non-empty.\n '
def _check_model_description(self, data):
print('Checking model description...')
if (not data['card']['Description']):
raise texc.EmptyField('Description')
'\n This helper method checks that the model task is one of the following valid entries:\n - Classification\n - Regression\n - Generative\n - Representation\n - Similarity\n - Clustering\n - Dimensionality reduction\n '
def _check_model_task(self, data):
print('Checking model task...')
valid_tasks = ['Classification', 'Regression', 'Generative', 'Representation', 'Similarity', 'Clustering', 'Dimensionality reduction']
sep = ', '
tasks = []
if (sep in data['card']['Task']):
tasks = data['card']['Task'].split(sep)
else:
tasks = data['card']['Task']
for task in tasks:
if (task not in valid_tasks):
raise texc.InvalidEntry('Task')
'\n This helper method checks that the input field is one of the following valid entries:\n - Compound\n - Protein\n - Text\n '
def _check_model_input(self, data):
print('Checking model input...')
valid_inputs = [['Compound'], ['Protein'], ['Text']]
if (data['card']['Input'] not in valid_inputs):
raise texc.InvalidEntry('Input')
'\n This helper method checks that the input shape field is one of the following valid entries:\n - Single\n - Pair\n - List\n - Pair of Lists\n - List of Lists\n '
def _check_model_input_shape(self, data):
print('Checking model input shape...')
valid_input_shapes = ['Single', 'Pair', 'List', 'Pair of Lists', 'List of Lists']
if (data['card']['Input Shape'] not in valid_input_shapes):
raise texc.InvalidEntry('Input Shape')
'\n This helper method checks the the output is one of the following valid entries:\n - Boolean\n - Compound\n - Descriptor\n - Distance\n - Experimental value\n - Image\n - Other value\n - Probability\n - Protein\n - Score\n - Text\n '
def _check_model_output(self, data):
print('Checking model output...')
valid_outputs = ['Boolean', 'Compound', 'Descriptor', 'Distance', 'Experimental value', 'Image', 'Other value', 'Probability', 'Protein', 'Score', 'Text']
sep = ', '
outputs = []
if (sep in data['card']['Output']):
outputs = data['card']['Output'].split(sep)
else:
outputs = data['card']['Output']
for output in outputs:
if (output not in valid_outputs):
raise texc.InvalidEntry('Output')
'\n This helper method checks that the output type is one of the following valid entries:\n - String\n - Float\n - Integer\n '
def _check_model_output_type(self, data):
print('Checking model output type...')
valid_output_types = [['String'], ['Float'], ['Integer']]
if (data['card']['Output Type'] not in valid_output_types):
raise texc.InvalidEntry('Output Type')
'\n This helper method checks that the output shape is one of the following valid entries:\n - Single\n - List\n - Flexible List\n - Matrix\n - Serializable Object\n '
def _check_model_output_shape(self, data):
print('Checking model output shape...')
valid_output_shapes = ['Single', 'List', 'Flexible List', 'Matrix', 'Serializable Object']
if (data['card']['Output Shape'] not in valid_output_shapes):
raise texc.InvalidEntry('Output Shape')
"\n Check the model information to make sure it's correct. Performs the following checks:\n - Checks that model ID is correct\n - Checks that model slug is non-empty\n - Checks that model description is non-empty\n - Checks that the model task is valid\n - Checks that the model input, input shape is valid\n - Checks that the model output, output type, output shape is valid\n "
_ersilia_exception
def check_information(self, output):
self.logger.debug('Checking that model information is correct')
print(((BOLD + 'Beginning checks for {0} model information:'.format(self.model_id)) + RESET))
json_file = os.path.join(self._dest_dir, self.model_id, INFORMATION_FILE)
with open(json_file, 'r') as f:
data = json.load(f)
self._check_model_id(data)
self._check_model_slug(data)
self._check_model_description(data)
self._check_model_task(data)
self._check_model_input(data)
self._check_model_input_shape(data)
self._check_model_output(data)
self._check_model_output_type(data)
self._check_model_output_shape(data)
print('SUCCESS! Model information verified.\n')
if (output is not None):
self.information_check = True
'\n Runs the model on a single smiles string and prints to the user if no output is specified.\n '
_ersilia_exception
def check_single_input(self, output):
session = Session(config_json=None)
service_class = session.current_service_class()
input = 'COc1ccc2c(NC(=O)Nc3cccc(C(F)(F)F)n3)ccnc2c1'
click.echo(((BOLD + 'Testing model on single smiles input...\n') + RESET))
mdl = ErsiliaModel(self.model_id, service_class=service_class, config_json=None)
result = mdl.run(input=input, output=output, batch_size=100)
if (output is not None):
self.single_input = True
else:
self._print_output(result, output)
"\n Generates an example input of 5 smiles using the 'example' command, and then tests the model on that input and prints it\n to the consol if no output file is specified by the user.\n "
_ersilia_exception
def check_example_input(self, output):
session = Session(config_json=None)
service_class = session.current_service_class()
eg = ExampleGenerator(model_id=self.model_id)
input = eg.example(n_samples=NUM_SAMPLES, file_name=None, simple=True)
click.echo(((BOLD + "\nTesting model on input of 5 smiles given by 'example' command...\n") + RESET))
mdl = ErsiliaModel(self.model_id, service_class=service_class, config_json=None)
result = mdl.run(input=input, output=output, batch_size=100)
if (output is not None):
self.example_input = True
else:
self._print_output(result, output)
"\n Gets an example input of 5 smiles using the 'example' command, and then runs this same input on the \n model twice. Then, it checks if the outputs are consistent or not and specifies that to the user. If \n it is not consistent, an InconsistentOutput error is raised. Lastly, it makes sure that the number of \n outputs equals the number of inputs. \n "
_ersilia_exception
def check_consistent_output(self):
click.echo(((BOLD + '\nConfirming model produces consistent output...') + RESET))
session = Session(config_json=None)
service_class = session.current_service_class()
eg = ExampleGenerator(model_id=self.model_id)
input = eg.example(n_samples=NUM_SAMPLES, file_name=None, simple=True)
mdl1 = ErsiliaModel(self.model_id, service_class=service_class, config_json=None)
mdl2 = ErsiliaModel(self.model_id, service_class=service_class, config_json=None)
result = mdl1.run(input=input, output=None, batch_size=100)
result2 = mdl2.run(input=input, output=None, batch_size=100)
zipped = list(zip(result, result2))
for (item1, item2) in zipped:
output1 = item1['output']
output2 = item2['output']
keys1 = list(output1.keys())
keys2 = list(output2.keys())
for (key1, key2) in zip(keys1, keys2):
if (not isinstance(output1[key1], type(output2[key2]))):
for (item1, item2) in zipped:
print(item1)
print(item2)
print('\n')
raise texc.InconsistentOutputTypes(self.model_id)
if (output1[key1] is None):
continue
elif isinstance(output1[key1], (float, int)):
if (not self._is_below_difference_threshold(output1[key1], output2[key2])):
for (item1, item2) in zipped:
print(item1)
print(item2)
print('\n')
raise texc.InconsistentOutputs(self.model_id)
elif isinstance(output1[key1], list):
ls1 = output1[key1]
ls2 = output2[key2]
for (elem1, elem2) in zip(ls1, ls2):
if isinstance(elem1, float):
if (not self._is_below_difference_threshold(elem1, elem2)):
for (item1, item2) in zipped:
print(item1)
print(item2)
print('\n')
raise texc.InconsistentOutputs(self.model_id)
elif (self._compare_output_strings(elem1, elem2) <= 95):
print('output1 value:', elem1)
print('output2 value:', elem2)
raise texc.InconsistentOutputs(self.model_id)
elif (self._compare_output_strings(output1[key1], output2[key2]) <= 95):
print('output1 value:', output1[key1])
print('output2 value:', output2[key2])
raise texc.InconsistentOutputs(self.model_id)
self.consistent_output = True
print('Model output is consistent!')
click.echo(((BOLD + '\nConfirming there are same number of outputs as inputs...') + RESET))
print('Number of inputs:', NUM_SAMPLES)
print('Number of outputs:', len(zipped))
if (NUM_SAMPLES != len(zipped)):
raise texc.MissingOutputs()
else:
echo('Number of outputs and inputs are equal!\n')
def default_env():
if ('CONDA_DEFAULT_ENV' in os.environ):
return os.environ['CONDA_DEFAULT_ENV']
else:
return BASE
def conda_prefix(is_base):
o = run_command_check_output('which conda').rstrip()
if o:
o = os.path.abspath(os.path.join(o, '..', '..'))
return o
if is_base:
o = run_command_check_output('echo $CONDA_PREFIX').rstrip()
return o
else:
o = run_command_check_output('echo $CONDA_PREFIX_1').rstrip()
return o
def is_base(self):
default_env = self.default_env()
if (default_env == 'base'):
return True
else:
return False
def _compare_tolerance(self, value1, value2, tolerance_percentage):
diff = abs((value1 - value2))
tolerance = ((tolerance_percentage / 100) * max(abs(value1), abs(value2)))
return (diff <= tolerance)
def _compare_string_similarity(self, str1, str2, similarity_threshold):
similarity = fuzz.ratio(str1, str2)
return (similarity >= similarity_threshold)
def read_csv(self, file_path):
data = []
with open(file_path, 'r') as file:
lines = file.readlines()
header = lines[0].strip().split(',')
for line in lines[1:]:
values = line.strip().split(',')
data.append(dict(zip(header, values)))
return data
_ersilia_exception
def run_bash(self):
click.echo(((BOLD + 'Calculating model size...') + RESET))
with tempfile.TemporaryDirectory() as temp_dir:
self._set_model_size(os.path.join(self.conda_prefix(self.is_base()), '../eos/dest/{0}'.format(self.model_id)))
size_kb = (self.model_size / 1024)
size_mb = (size_kb / 1024)
size_gb = (size_mb / 1024)
print('\nModel Size:')
print('KB:', size_kb)
print('MB:', size_mb)
print('GB:', size_gb)
click.echo(((BOLD + '\nRunning the model bash script...') + RESET))
eg = ExampleGenerator(model_id=self.model_id)
input = eg.example(n_samples=NUM_SAMPLES, file_name=None, simple=True)
ex_file = os.path.abspath(os.path.join(temp_dir, 'example_file.csv'))
with open(ex_file, 'w') as f:
f.write('smiles')
for item in input:
f.write((str(item) + '\n'))
if (not os.path.exists(os.path.join(self.conda_prefix(self.is_base()), '../eos/dest/{0}/model/framework/run.sh'.format(self.model_id)))):
print('Check halted. Either run.sh file does not exist, or model was not fetched via --from_github or --from_s3.')
return
subdirectory_path = os.path.join(self.conda_prefix(self.is_base()), '../eos/dest/{0}/model/framework'.format(self.model_id))
os.chdir(subdirectory_path)
try:
run_path = os.path.abspath(os.path.join(self.conda_prefix(self.is_base()), '../eos/dest/{0}/model/framework/'.format(self.model_id)))
tmp_script = os.path.abspath(os.path.join(temp_dir, 'script.sh'))
arg1 = os.path.join(temp_dir, 'bash_output.csv')
output_log = os.path.abspath(os.path.join(temp_dir, 'output.txt'))
error_log = os.path.abspath(os.path.join(temp_dir, 'error.txt'))
bash_script = '\n source {0}/etc/profile.d/conda.sh \n conda activate {1}\n cd {2}\n bash run.sh . {3} {4} > {5} 2> {6}\n conda deactivate\n '.format(self.conda_prefix(self.is_base()), self.model_id, run_path, ex_file, arg1, output_log, error_log)
with open(tmp_script, 'w') as f:
f.write(bash_script)
print("Executing 'bash run.sh'...")
try:
subprocess.run(['bash', tmp_script], capture_output=True, text=True, check=True)
print('Bash execution completed!\n')
except subprocess.CalledProcessError as e:
print('Error encountered while running the bash script.')
with open(output_log, 'r') as output_file:
output_content = output_file.read()
print('Captured Output:')
print(output_content)
with open(error_log, 'r') as error_file:
error_content = error_file.read()
print('Captured Error:')
print(error_content)
except Exception as e:
print(f'Error while activating the conda environment: {e}')
print('Executing ersilia run...')
output_file = os.path.abspath(os.path.join(temp_dir, 'ersilia_output.csv'))
session = Session(config_json=None)
service_class = session.current_service_class()
mdl = ErsiliaModel(self.model_id, service_class=service_class, config_json=None)
result = mdl.run(input=ex_file, output=output_file, batch_size=100)
print('Ersilia run completed!\n')
ersilia_run = self.read_csv(output_file)
remove_cols = ['key', 'input']
for row in ersilia_run:
for col in remove_cols:
if (col in row):
del row[col]
bash_run = self.read_csv(arg1)
print('Bash output:\n', bash_run)
print('\nErsilia output:\n', ersilia_run)
ersilia_columns = set()
for row in ersilia_run:
ersilia_columns.update(row.keys())
bash_columns = set()
for row in bash_run:
bash_columns.update(row.keys())
common_columns = (ersilia_columns & bash_columns)
for column in common_columns:
for i in range(len(ersilia_run)):
if (isinstance(ersilia_run[i][column], (float, int)) and isinstance(ersilia_run[i][column], (float, int))):
if (not all((self._compare_tolerance(a, b, DIFFERENCE_THRESHOLD) for (a, b) in zip(ersilia_run[i][column], bash_run[i][column])))):
click.echo(((BOLD + '\nBash run and Ersilia run produce inconsistent results.') + RESET))
print('Error in the following column: ', column)
print(ersilia_run[i][column])
print(bash_run[i][column])
raise texc.InconsistentOutputs(self.model_id)
elif (isinstance(ersilia_run[i][column], str) and isinstance(ersilia_run[i][column], str)):
if (not all((self._compare_string_similarity(a, b, 95) for (a, b) in zip(ersilia_run[i][column], bash_run[i][column])))):
click.echo(((BOLD + '\nBash run and Ersilia run produce inconsistent results.') + RESET))
print('Error in the following column: ', column)
print(ersilia_run[i][column])
print(bash_run[i][column])
raise texc.InconsistentOutputs(self.model_id)
elif (isinstance(ersilia_run[i][column], bool) and isinstance(ersilia_run[i][column], bool)):
if (not ersilia_run[i][column].equals(bash_run[i][column])):
click.echo(((BOLD + '\nBash run and Ersilia run produce inconsistent results.') + RESET))
print('Error in the following column: ', column)
print(ersilia_run[i][column])
print(bash_run[i][column])
raise texc.InconsistentOutputs(self.model_id)
click.echo(((BOLD + '\nSUCCESS! Bash run and Ersilia run produce consistent results.') + RESET))
'\n writes to the .json file all the basic information received from the test module:\n - size of the model\n - did the basic checks pass? True or False\n - time to run the model\n - did the single input run without error? True or False\n - did the run bash run without error? True or False\n - did the example input run without error? True or False \n - are the outputs consistent? True or False \n '
def make_output(self, output, time):
size_kb = (self.model_size / 1024)
size_mb = (size_kb / 1024)
size_gb = (size_mb / 1024)
data = {'model size': {'KB': size_kb, 'MB': size_mb, 'GB': size_gb}, 'time to run tests (seconds)': time, 'basic checks passed': self.information_check, 'single input run without error': self.single_input, 'example input run without error': self.example_input, 'outputs consistent': self.consistent_output, 'bash run without error': self.run_using_bash}
with open(output, 'w') as json_file:
json.dump(data, json_file, indent=4)
def run(self, output_file):
start = time.time()
self.check_information(output_file)
self.check_single_input(output_file)
self.check_example_input(output_file)
self.check_consistent_output()
self.run_bash()
end = time.time()
seconds_taken = (end - start)
if (output_file is not None):
self.make_output(output_file, seconds_taken) |
.django_db(transaction=True)
def test_threaded_data_loader():
field_map = {'treasury_account_identifier': 'ACCT_NUM', 'account_title': 'GWA_TAS_NAME'}
loader = ThreadedDataLoader(model_class=TreasuryAppropriationAccount, field_map=field_map, collision_field='treasury_account_identifier', collision_behavior='update')
file_path_1 = str((((settings.APP_DIR / 'data') / 'testing_data') / 'tas_list_1.csv'))
file_1_account_title = 'Compensation of Members and Related Administrative Expenses, Senat'
file_path_2 = str((((settings.APP_DIR / 'data') / 'testing_data') / 'tas_list_2.csv'))
file_2_account_title = 'Update Test Name'
loader.load_from_file(file_path_1)
gwa_tas = TreasuryAppropriationAccount.objects.get(treasury_account_identifier='53021')
assert (gwa_tas.account_title == file_1_account_title)
gwa_tas.beginning_period_of_availability = 2004
gwa_tas.save()
loader.load_from_file(file_path_2)
gwa_tas = TreasuryAppropriationAccount.objects.get(treasury_account_identifier='53021')
assert (gwa_tas.account_title == file_2_account_title)
assert (gwa_tas.beginning_period_of_availability == '2004')
loader.collision_behavior = 'delete'
loader.load_from_file(file_path_1)
gwa_tas = TreasuryAppropriationAccount.objects.get(treasury_account_identifier='53021')
assert (gwa_tas.beginning_period_of_availability is None)
assert (gwa_tas.account_title == file_1_account_title)
loader.collision_behavior = 'skip'
loader.load_from_file(file_path_2)
gwa_tas = TreasuryAppropriationAccount.objects.get(treasury_account_identifier='53021')
assert (gwa_tas.account_title == file_1_account_title)
loader.collision_behavior = 'skip_and_complain'
loader.load_from_file(file_path_2)
gwa_tas = TreasuryAppropriationAccount.objects.get(treasury_account_identifier='53021')
assert (gwa_tas.account_title == file_1_account_title) |
class OptionSeriesAreasplineTooltip(Options):
def clusterFormat(self):
return self._config_get('Clustered points: {point.clusterPointsAmount}')
def clusterFormat(self, text: str):
self._config(text, js_type=False)
def dateTimeLabelFormats(self) -> 'OptionSeriesAreasplineTooltipDatetimelabelformats':
return self._config_sub_data('dateTimeLabelFormats', OptionSeriesAreasplineTooltipDatetimelabelformats)
def distance(self):
return self._config_get(16)
def distance(self, num: float):
self._config(num, js_type=False)
def followPointer(self):
return self._config_get(False)
def followPointer(self, flag: bool):
self._config(flag, js_type=False)
def followTouchMove(self):
return self._config_get(True)
def followTouchMove(self, flag: bool):
self._config(flag, js_type=False)
def footerFormat(self):
return self._config_get('')
def footerFormat(self, text: str):
self._config(text, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def headerFormat(self):
return self._config_get(None)
def headerFormat(self, text: str):
self._config(text, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, text: str):
self._config(text, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def pointFormat(self):
return self._config_get(None)
def pointFormat(self, text: str):
self._config(text, js_type=False)
def pointFormatter(self):
return self._config_get(None)
def pointFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False)
def xDateFormat(self):
return self._config_get(None)
def xDateFormat(self, text: str):
self._config(text, js_type=False) |
class Utils(BotPlugin):
def echo(self, _, args):
return args
def whoami(self, msg, args):
if args:
frm = self.build_identifier(str(args).strip('"'))
else:
frm = msg.frm
resp = ''
if self.bot_config.GROUPCHAT_NICK_PREFIXED:
resp += '\n\n'
resp += '| key | value\n'
resp += '| | \n'
resp += f'''| person | `{frm.person}`
'''
resp += f'''| nick | `{frm.nick}`
'''
resp += f'''| fullname | `{frm.fullname}`
'''
resp += f'''| client | `{frm.client}`
'''
resp += f'''| email | `{frm.email}`
'''
if hasattr(frm, 'room'):
resp += f'''
`room` is {frm.room}
'''
resp += f'''
- string representation is '{frm}'
'''
resp += f'''- class is '{frm.__class__.__name__}'
'''
return resp
(historize=False)
def history(self, msg, args):
answer = []
user_cmd_history = self._bot.cmd_history[msg.frm.person]
length = len(user_cmd_history)
for i in range(0, length):
c = user_cmd_history[i]
answer.append(f'{(length - i):2d}:{self._bot.prefix}{c[0]} {c[1]}')
return '\n'.join(answer)
(admin_only=True)
def log_tail(self, msg, args):
n = 40
if args.isdigit():
n = int(args)
if self.bot_config.BOT_LOG_FILE:
with open(self.bot_config.BOT_LOG_FILE) as f:
return (('```\n' + tail(f, n)) + '\n```')
return 'No log is configured, please define BOT_LOG_FILE in config.py'
def render_test(self, _, args):
with open(path.join(path.dirname(path.realpath(__file__)), 'test.md')) as f:
return f.read() |
class TestOFPVendorStatsReply(unittest.TestCase):
class Datapath(object):
ofproto = ofproto
ofproto_parser = ofproto_v1_0_parser
c = OFPVendorStatsReply(Datapath)
def setUp(self):
pass
def tearDown(self):
pass
def test_init(self):
pass
def test_parser(self):
version = {'buf': b'\x01', 'val': ofproto.OFP_VERSION}
msg_type = {'buf': b'\x11', 'val': ofproto.OFPT_STATS_REPLY}
msg_len = {'buf': b'\x00\x18', 'val': (ofproto.OFP_STATS_MSG_SIZE + 12)}
xid = {'buf': b'\x94\xc4\xd2\xcd', 'val': }
buf = (((version['buf'] + msg_type['buf']) + msg_len['buf']) + xid['buf'])
type_ = {'buf': b'\xff\xff', 'val': ofproto.OFPST_VENDOR}
flags = {'buf': b'0\xd9', 'val': 12505}
buf += (type_['buf'] + flags['buf'])
specific_data = b'specific_data'
buf += specific_data
res = OFPVendorStatsReply.parser(object, version['val'], msg_type['val'], msg_len['val'], xid['val'], buf)
eq_(version['val'], res.version)
eq_(msg_type['val'], res.msg_type)
eq_(msg_len['val'], res.msg_len)
eq_(xid['val'], res.xid)
eq_(type_['val'], res.type)
eq_(flags['val'], res.flags)
body = res.body[0]
eq_(specific_data, body)
def test_serialize(self):
pass |
class AppEngineClient(object):
def __init__(self, global_configs, **kwargs):
(max_calls, quota_period) = api_helpers.get_ratelimiter_config(global_configs, API_NAME)
cache_discovery = (global_configs['cache_discovery'] if ('cache_discovery' in global_configs) else False)
self.repository = AppEngineRepositoryClient(quota_max_calls=max_calls, quota_period=quota_period, use_rate_limiter=kwargs.get('use_rate_limiter', True), cache_discovery=cache_discovery, cache=global_configs.get('cache'))
def get_app(self, project_id):
try:
results = self.repository.apps.get(project_id)
LOGGER.debug('Getting information about an application, project_id = %s, result = %s', project_id, results)
return results
except (errors.HttpError, HttpLib2Error) as e:
if _is_status_not_found(e):
return {}
raise api_errors.ApiExecutionError(project_id, e)
def get_service(self, project_id, service_id):
try:
results = self.repository.app_services.get(project_id, target=service_id)
LOGGER.debug('Getting information about a specific service, project_id = %s, service_id = %s, results = %s', project_id, service_id, results)
return results
except (errors.HttpError, HttpLib2Error) as e:
if _is_status_not_found(e):
return {}
raise api_errors.ApiExecutionError(project_id, e)
def list_services(self, project_id):
try:
paged_results = self.repository.app_services.list(project_id)
flattened_results = api_helpers.flatten_list_results(paged_results, 'services')
LOGGER.debug('Listing services of a project, project_id = %s, flattened_results = %s', project_id, flattened_results)
return flattened_results
except (errors.HttpError, HttpLib2Error) as e:
if _is_status_not_found(e):
return []
raise api_errors.ApiExecutionError(project_id, e)
def get_version(self, project_id, service_id, version_id):
try:
results = self.repository.service_versions.get(project_id, target=version_id, services_id=service_id)
LOGGER.debug('Getting information about a specific version of a service, project_id = %s, service_id = %s, version_id = %s, results = %s', project_id, service_id, version_id, results)
return results
except (errors.HttpError, HttpLib2Error) as e:
if _is_status_not_found(e):
return {}
raise api_errors.ApiExecutionError(project_id, e)
def list_versions(self, project_id, service_id):
try:
paged_results = self.repository.service_versions.list(project_id, services_id=service_id)
flattened_results = api_helpers.flatten_list_results(paged_results, 'versions')
LOGGER.debug('Listing versions of a given service, project_id = %s, service_id = %s, flattened_results = %s', project_id, service_id, flattened_results)
return flattened_results
except (errors.HttpError, HttpLib2Error) as e:
if _is_status_not_found(e):
return []
raise api_errors.ApiExecutionError(project_id, e)
def get_instance(self, project_id, service_id, version_id, instances_id):
try:
results = self.repository.version_instances.get(project_id, target=instances_id, services_id=service_id, versions_id=version_id)
LOGGER.debug('Getting information about a specific instance of a service, project_id = %s, service_id = %s, version_id = %s, instance_id = %s, results = %s', project_id, service_id, version_id, instances_id, results)
return results
except (errors.HttpError, HttpLib2Error) as e:
if _is_status_not_found(e):
return {}
raise api_errors.ApiExecutionError(project_id, e)
def list_instances(self, project_id, service_id, version_id):
try:
paged_results = self.repository.version_instances.list(project_id, services_id=service_id, versions_id=version_id)
flattened_results = api_helpers.flatten_list_results(paged_results, 'instances')
LOGGER.debug('Listing instances of a given service and version, project_id = %s, service_id = %s, version_id = %s, flattened_results = %s', project_id, service_id, version_id, flattened_results)
return flattened_results
except (errors.HttpError, HttpLib2Error) as e:
if (e.resp.status == 501):
LOGGER.debug(e)
return []
if _is_status_not_found(e):
return []
raise api_errors.ApiExecutionError(project_id, e) |
def test_jenkins_dir(host):
assert host.file('/jenkins').is_directory
assert (host.file('/jenkins').mode == 493)
assert (host.file('/jenkins').user == 'jenkins')
assert (host.file('/jenkins').group == 'jenkins')
assert host.file('/jenkins/config.xml').is_file
assert (host.file('/jenkins/config.xml').user == 'jenkins')
assert (host.file('/jenkins/config.xml').group == 'jenkins') |
def test_default_notation_1():
true_value = LogicCondition.initialize_true(LogicCondition.generate_new_context())
ast = AbstractSyntaxTree(CodeNode(Assignment((var := Variable('var_0', I32)), Constant(0)), true_value), {})
_run_vng(ast, _generate_options(notation='default'))
assert (var.name == 'var_0') |
.skipif(('pandas' not in sys.modules), reason='Pandas is not installed.')
def test_structured_dataset_in_dataclass():
import pandas as pd
from pandas._testing import assert_frame_equal
df = pd.DataFrame({'Name': ['Tom', 'Joseph'], 'Age': [20, 22]})
People = Annotated[(StructuredDataset, 'parquet', kwtypes(Name=str, Age=int))]
class InnerDatasetStruct(DataClassJsonMixin):
a: StructuredDataset
b: typing.List[Annotated[(StructuredDataset, 'parquet')]]
c: typing.Dict[(str, Annotated[(StructuredDataset, kwtypes(Name=str, Age=int))])]
class DatasetStruct(DataClassJsonMixin):
a: People
b: InnerDatasetStruct
sd = StructuredDataset(dataframe=df, file_format='parquet')
o = DatasetStruct(a=sd, b=InnerDatasetStruct(a=sd, b=[sd], c={'hello': sd}))
ctx = FlyteContext.current_context()
tf = DataclassTransformer()
lt = tf.get_literal_type(DatasetStruct)
lv = tf.to_literal(ctx, o, DatasetStruct, lt)
ot = tf.to_python_value(ctx, lv=lv, expected_python_type=DatasetStruct)
assert_frame_equal(df, ot.a.open(pd.DataFrame).all())
assert_frame_equal(df, ot.b.a.open(pd.DataFrame).all())
assert_frame_equal(df, ot.b.b[0].open(pd.DataFrame).all())
assert_frame_equal(df, ot.b.c['hello'].open(pd.DataFrame).all())
assert ('parquet' == ot.a.file_format)
assert ('parquet' == ot.b.a.file_format)
assert ('parquet' == ot.b.b[0].file_format)
assert ('parquet' == ot.b.c['hello'].file_format) |
class KMSGateway(AWSGateway):
def __init__(self, region: str, access_key_id: Optional[str]=None, access_key_data: Optional[str]=None, config: Optional[Dict[(str, Any)]]=None) -> None:
super().__init__(region, access_key_id, access_key_data, config)
self.client: BaseClient = boto3.client('kms', region_name=self.region, **self.config)
_handler
def sign(self, key_id: str, message: str, message_type: str, grant_tokens: List[str], signing_algorithm: str) -> str:
response = self.client.sign(KeyId=key_id, Message=message.encode(), MessageType=message_type, GrantTokens=grant_tokens, SigningAlgorithm=signing_algorithm)
signature = b64encode(response['Signature']).decode()
return signature
_handler
def verify(self, key_id: str, message: str, message_type: str, signature: str, signing_algorithm: str, grant_tokens: List[str]) -> bool:
b64_signature = b64decode(signature.encode())
response = self.client.verify(KeyId=key_id, Message=message.encode(), MessageType=message_type, Signature=b64_signature, SigningAlgorithm=signing_algorithm, GrantTokens=grant_tokens)
return response['SignatureValid'] |
def named_color_function(obj: 'Color', func: str, alpha: Optional[bool], precision: int, fit: Union[(str, bool)], none: bool, percent: bool, legacy: bool, scale: float) -> str:
a = get_alpha(obj, alpha, none, legacy)
string = ['{}{}('.format(func, ('a' if (legacy and (a is not None)) else EMPTY))]
coords = get_coords(obj, fit, none, legacy)
channels = obj._space.CHANNELS
for (idx, value) in enumerate(coords):
channel = channels[idx]
use_percent = ((channel.flags & FLG_PERCENT) or (percent and (channel.flags & FLG_OPT_PERCENT)))
is_angle = (channel.flags & FLG_ANGLE)
if ((not use_percent) and (not is_angle)):
value *= scale
if (idx != 0):
string.append((COMMA if legacy else SPACE))
string.append(util.fmt_float(value, precision, (channel.span if use_percent else 0.0), (channel.offset if use_percent else 0.0)))
if (a is not None):
string.append('{}{})'.format((COMMA if legacy else SLASH), util.fmt_float(a, max(precision, util.DEF_PREC))))
else:
string.append(')')
return EMPTY.join(string) |
def unwrap_workflow_schedule_response(response):
if (response.return_code == str(SUCCESS)):
return ProtoToMeta.proto_to_workflow_schedule_meta(Parse(response.data, WorkflowScheduleProto()))
elif (response.return_code == str(RESOURCE_DOES_NOT_EXIST)):
return None
else:
raise AIFlowException(response.error_msg) |
class TestRallyRepository():
('esrally.utils.io.exists', autospec=True)
('esrally.utils.git.is_working_copy', autospec=True)
def test_fails_in_offline_mode_if_not_a_git_repo(self, is_working_copy, exists):
is_working_copy.return_value = False
exists.return_value = True
with pytest.raises(exceptions.SystemSetupError) as exc:
repo.RallyRepository(remote_url=None, root_dir='/rally-resources', repo_name='unit-test', resource_name='unittest-resources', offline=True)
assert (exc.value.args[0] == '[/rally-resources/unit-test] must be a git repository.\n\nPlease run:\ngit -C /rally-resources/unit-test init')
('esrally.utils.io.exists', autospec=True)
('esrally.utils.git.is_working_copy', autospec=True)
def test_fails_in_offline_mode_if_not_existing(self, is_working_copy, exists):
is_working_copy.return_value = False
exists.return_value = False
with pytest.raises(exceptions.SystemSetupError) as exc:
repo.RallyRepository(remote_url=None, root_dir='/rally-resources', repo_name='unit-test', resource_name='unittest-resources', offline=True)
assert (exc.value.args[0] == 'Expected a git repository at [/rally-resources/unit-test] but the directory does not exist.')
('esrally.utils.git.is_working_copy', autospec=True)
def test_does_nothing_if_working_copy_present(self, is_working_copy):
is_working_copy.return_value = True
r = repo.RallyRepository(remote_url=None, root_dir='/rally-resources', repo_name='unit-test', resource_name='unittest-resources', offline=True)
assert (not r.remote)
('esrally.utils.git.is_working_copy', autospec=True)
('esrally.utils.git.clone', autospec=True)
def test_clones_initially(self, clone, is_working_copy):
is_working_copy.return_value = False
r = repo.RallyRepository(remote_url='.org/rally-resources', root_dir='/rally-resources', repo_name='unit-test', resource_name='unittest-resources', offline=False)
assert r.remote
clone.assert_called_with(src='/rally-resources/unit-test', remote='.org/rally-resources')
('esrally.utils.git.is_working_copy', autospec=True)
('esrally.utils.git.fetch', autospec=True)
def test_fetches_if_already_cloned(self, fetch, is_working_copy):
is_working_copy.return_value = True
repo.RallyRepository(remote_url='.org/rally-resources', root_dir='/rally-resources', repo_name='unit-test', resource_name='unittest-resources', offline=False)
fetch.assert_called_with(src='/rally-resources/unit-test', remote='origin')
('esrally.utils.git.is_working_copy', autospec=True)
('esrally.utils.git.fetch')
def test_does_not_fetch_if_suppressed(self, fetch, is_working_copy):
is_working_copy.return_value = True
r = repo.RallyRepository(remote_url='.org/rally-resources', root_dir='/rally-resources', repo_name='unit-test', resource_name='unittest-resources', offline=False, fetch=False)
assert r.remote
assert (fetch.call_count == 0)
('esrally.utils.git.is_working_copy', autospec=True)
('esrally.utils.git.fetch')
def test_ignores_fetch_errors(self, fetch, is_working_copy):
fetch.side_effect = exceptions.SupplyError('Testing error')
is_working_copy.return_value = True
r = repo.RallyRepository(remote_url='.org/rally-resources', root_dir='/rally-resources', repo_name='unit-test', resource_name='unittest-resources', offline=False)
assert r.remote
fetch.assert_called_with(src='/rally-resources/unit-test', remote='origin')
('esrally.utils.git.head_revision')
('esrally.utils.git.is_working_copy', autospec=True)
('esrally.utils.git.fetch', autospec=True)
('esrally.utils.git.branches', autospec=True)
('esrally.utils.git.checkout', autospec=True)
('esrally.utils.git.rebase', autospec=True)
def test_updates_from_remote(self, rebase, checkout, branches, fetch, is_working_copy, head_revision):
branches.return_value = ['1', '2', '5', 'master']
is_working_copy.return_value = True
head_revision.return_value = '123a'
r = repo.RallyRepository(remote_url='.org/rally-resources', root_dir='/rally-resources', repo_name='unit-test', resource_name='unittest-resources', offline=random.choice([True, False]))
r.update(distribution_version='1.7.3')
branches.assert_called_with('/rally-resources/unit-test', remote=True)
rebase.assert_called_with('/rally-resources/unit-test', remote='origin', branch='1')
checkout.assert_called_with('/rally-resources/unit-test', branch='1')
('esrally.utils.git.head_revision')
('esrally.utils.git.is_working_copy', autospec=True)
('esrally.utils.git.fetch', autospec=True)
('esrally.utils.git.branches', autospec=True)
('esrally.utils.git.checkout', autospec=True)
('esrally.utils.git.rebase')
('esrally.utils.git.current_branch')
def test_updates_locally(self, curr_branch, rebase, checkout, branches, fetch, is_working_copy, head_revision):
curr_branch.return_value = '5'
branches.return_value = ['1', '2', '5', 'master']
is_working_copy.return_value = True
head_revision.return_value = '123a'
r = repo.RallyRepository(remote_url=None, root_dir='/rally-resources', repo_name='unit-test', resource_name='unittest-resources', offline=False)
r.update(distribution_version='6.0.0')
branches.assert_called_with('/rally-resources/unit-test', remote=False)
assert (rebase.call_count == 0)
checkout.assert_called_with('/rally-resources/unit-test', branch='master')
('esrally.utils.git.head_revision')
('esrally.utils.git.is_working_copy', autospec=True)
('esrally.utils.git.fetch', autospec=True)
('esrally.utils.git.tags', autospec=True)
('esrally.utils.git.branches', autospec=True)
('esrally.utils.git.checkout', autospec=True)
('esrally.utils.git.rebase')
('esrally.utils.git.current_branch')
def test_fallback_to_tags(self, curr_branch, rebase, checkout, branches, tags, fetch, is_working_copy, head_revision):
curr_branch.return_value = 'master'
branches.return_value = ['5', 'master']
tags.return_value = ['v1', 'v1.7', 'v2']
is_working_copy.return_value = True
head_revision.return_value = '123a'
r = repo.RallyRepository(remote_url=None, root_dir='/rally-resources', repo_name='unit-test', resource_name='unittest-resources', offline=False)
r.update(distribution_version='1.7.4')
branches.assert_called_with('/rally-resources/unit-test', remote=False)
assert (rebase.call_count == 0)
tags.assert_called_with('/rally-resources/unit-test')
checkout.assert_called_with('/rally-resources/unit-test', branch='v1.7')
('esrally.utils.git.is_working_copy', autospec=True)
('esrally.utils.git.fetch', autospec=True)
('esrally.utils.git.tags', autospec=True)
('esrally.utils.git.branches', autospec=True)
('esrally.utils.git.checkout')
('esrally.utils.git.rebase')
def test_does_not_update_unknown_branch_remotely(self, rebase, checkout, branches, tags, fetch, is_working_copy):
branches.return_value = ['1', '2', '5', 'master']
tags.return_value = []
is_working_copy.return_value = True
r = repo.RallyRepository(remote_url='.org/rally-resources', root_dir='/rally-resources', repo_name='unit-test', resource_name='unittest-resources', offline=False)
assert r.remote
with pytest.raises(exceptions.SystemSetupError) as exc:
r.update(distribution_version='4.0.0')
assert (exc.value.args[0] == 'Cannot find unittest-resources for distribution version 4.0.0')
calls = [mock.call('/rally-resources/unit-test', remote=True), mock.call('/rally-resources/unit-test', remote=False)]
branches.assert_has_calls(calls)
tags.assert_called_with('/rally-resources/unit-test')
assert (checkout.call_count == 0)
assert (rebase.call_count == 0)
('esrally.utils.git.head_revision')
('esrally.utils.git.is_working_copy', autospec=True)
('esrally.utils.git.fetch', autospec=True)
('esrally.utils.git.tags', autospec=True)
('esrally.utils.git.branches', autospec=True)
('esrally.utils.git.checkout', autospec=True)
('esrally.utils.git.rebase')
('esrally.utils.git.current_branch')
def test_does_not_update_unknown_branch_remotely_local_fallback(self, curr_branch, rebase, checkout, branches, tags, fetch, is_working_copy, head_revision):
curr_branch.return_value = 'master'
branches.side_effect = ['5', ['1', '2', '5', 'master']]
tags.return_value = []
is_working_copy.return_value = True
head_revision.retun_value = '123a'
r = repo.RallyRepository(remote_url='.org/rally-resources', root_dir='/rally-resources', repo_name='unit-test', resource_name='unittest-resources', offline=False)
r.update(distribution_version='1.7.3')
calls = [mock.call('/rally-resources/unit-test', remote=True), mock.call('/rally-resources/unit-test', remote=False)]
branches.assert_has_calls(calls)
assert (tags.call_count == 0)
checkout.assert_called_with('/rally-resources/unit-test', branch='1')
assert (rebase.call_count == 0)
('esrally.utils.git.is_working_copy', autospec=True)
('esrally.utils.git.fetch', autospec=True)
('esrally.utils.git.tags', autospec=True)
('esrally.utils.git.branches', autospec=True)
('esrally.utils.git.checkout')
('esrally.utils.git.rebase')
def test_does_not_update_unknown_branch_locally(self, rebase, checkout, branches, tags, fetch, is_working_copy):
branches.return_value = ['1', '2', '5', 'master']
tags.return_value = []
is_working_copy.return_value = True
r = repo.RallyRepository(remote_url=None, root_dir='/rally-resources', repo_name='unit-test', resource_name='unittest-resources', offline=False)
with pytest.raises(exceptions.SystemSetupError) as exc:
r.update(distribution_version='4.0.0')
assert (exc.value.args[0] == 'Cannot find unittest-resources for distribution version 4.0.0')
branches.assert_called_with('/rally-resources/unit-test', remote=False)
assert (checkout.call_count == 0)
assert (rebase.call_count == 0)
('esrally.utils.git.is_working_copy', autospec=True)
('esrally.utils.git.fetch', autospec=True)
('esrally.utils.git.checkout', autospec=True)
def test_checkout_revision(self, checkout, fetch, is_working_copy):
is_working_copy.return_value = True
r = repo.RallyRepository(remote_url=None, root_dir='/rally-resources', repo_name='unit-test', resource_name='unittest-resources', offline=False)
r.checkout('abcdef123')
checkout.assert_called_with('/rally-resources/unit-test', branch='abcdef123') |
def _deployment_test():
import torch
set_log_level('INFO')
logger.info('')
logger.info(' TESTING DEPLOYMENT ')
logger.info('')
logger.info('')
logger.info('Testing CUDA init... ')
enable_cuda()
set_precision('double')
logger.info('Done.')
logger.info('')
logger.info('')
logger.info('Initializing integrators... ')
tp = Trapezoid()
sp = Simpson()
boole = Boole()
mc = MonteCarlo()
vegas = VEGAS()
logger.info('Done.')
def some_test_function(x):
return (torch.exp(x) * torch.pow(x, 2))
logger.info('')
logger.info('')
logger.info('Testing integrate functions... ')
tp.integrate(some_test_function, dim=1, N=101)
sp.integrate(some_test_function, dim=1, N=101)
boole.integrate(some_test_function, dim=1, N=101)
mc.integrate(some_test_function, dim=1, N=101)
vegas.integrate(some_test_function, dim=1, N=300)
logger.info('Done.')
logger.info('')
logger.info('')
logger.info(' ALL DONE. ')
logger.info('') |
class GraphClient(NamespacedClient):
_rewrite_parameters(body_fields=('connections', 'controls', 'query', 'vertices'))
def explore(self, *, index: t.Union[(str, t.Sequence[str])], connections: t.Optional[t.Mapping[(str, t.Any)]]=None, controls: t.Optional[t.Mapping[(str, t.Any)]]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, pretty: t.Optional[bool]=None, query: t.Optional[t.Mapping[(str, t.Any)]]=None, routing: t.Optional[str]=None, timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, vertices: t.Optional[t.Sequence[t.Mapping[(str, t.Any)]]]=None, body: t.Optional[t.Dict[(str, t.Any)]]=None) -> ObjectApiResponse[t.Any]:
if (index in SKIP_IN_PATH):
raise ValueError("Empty value passed for parameter 'index'")
__path = f'/{_quote(index)}/_graph/explore'
__query: t.Dict[(str, t.Any)] = {}
__body: t.Dict[(str, t.Any)] = (body if (body is not None) else {})
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
if (routing is not None):
__query['routing'] = routing
if (timeout is not None):
__query['timeout'] = timeout
if (not __body):
if (connections is not None):
__body['connections'] = connections
if (controls is not None):
__body['controls'] = controls
if (query is not None):
__body['query'] = query
if (vertices is not None):
__body['vertices'] = vertices
if (not __body):
__body = None
__headers = {'accept': 'application/json'}
if (__body is not None):
__headers['content-type'] = 'application/json'
return self.perform_request('POST', __path, params=__query, headers=__headers, body=__body) |
def test_just_three_padded_mol():
f = open(just_three_residues, 'r')
pdb_string = f.read()
termini = {':MET:15': 'N'}
chorizo = LinkedRDKitChorizo(pdb_string, termini=termini)
assert (len(chorizo.residues) == 3)
assert (len(chorizo.getIgnoredResidues()) == 0)
expected_suggested_mutations = {}
assert (chorizo.suggested_mutations == expected_suggested_mutations)
expected_residue_data = {':MET:15': ChorizoResidue(':MET:15', 'ATOM 220 N MET 15 14.163 15.881 16.252 1.00 0.00 N \nATOM 221 H MET 15 14.428 15.654 17.200 1.00 0.00 H \nATOM 222 CA MET 15 13.351 17.068 16.029 1.00 0.00 C \nATOM 223 HA MET 15 12.366 16.779 15.662 1.00 0.00 H \nATOM 224 CB MET 15 13.177 17.850 17.335 1.00 0.00 C \nATOM 225 HB2 MET 15 14.168 18.063 17.735 1.00 0.00 H \nATOM 226 HB3 MET 15 12.671 18.787 17.102 1.00 0.00 H \nATOM 227 CG MET 15 12.367 17.079 18.369 1.00 0.00 C \nATOM 228 HG2 MET 15 11.362 16.943 17.969 1.00 0.00 H \nATOM 229 HG3 MET 15 12.839 16.106 18.503 1.00 0.00 H \nATOM 230 SD MET 15 12.262 17.912 19.967 1.00 0.00 S \nATOM 231 CE MET 15 11.025 19.105 19.625 1.00 0.00 C \nATOM 232 HE1 MET 15 10.104 18.601 19.332 1.00 0.00 H \nATOM 233 HE2 MET 15 10.842 19.705 20.516 1.00 0.00 H \nATOM 234 HE3 MET 15 11.358 19.752 18.814 1.00 0.00 H \nATOM 235 C MET 15 13.900 17.930 14.890 1.00 0.00 C \nATOM 236 O MET 15 13.137 18.390 14.061 1.00 0.00 O \n', None, ':SER:16'), ':SER:16': ChorizoResidue(':SER:16', 'ATOM 237 N SER 16 15.208 18.136 14.839 1.00 0.00 N \nATOM 238 H SER 16 15.818 17.724 15.530 1.00 0.00 H \nATOM 239 CA SER 16 15.812 18.990 13.787 1.00 0.00 C \nATOM 240 HA SER 16 15.275 19.938 13.751 1.00 0.00 H \nATOM 241 CB SER 16 17.291 19.276 14.049 1.00 0.00 C \nATOM 242 HB2 SER 16 17.812 18.327 14.172 1.00 0.00 H \nATOM 243 HB3 SER 16 17.701 19.806 13.189 1.00 0.00 H \nATOM 244 OG SER 16 17.464 20.070 15.224 1.00 0.00 O \nATOM 245 HG SER 16 18.399 20.234 15.364 1.00 0.00 H \nATOM 246 C SER 16 15.682 18.311 12.430 1.00 0.00 C \nATOM 247 O SER 16 15.436 18.983 11.441 1.00 0.00 O \n', ':MET:15', ':LEU:17'), ':LEU:17': ChorizoResidue(':LEU:17', 'ATOM 248 N LEU 17 15.835 16.986 12.389 1.00 0.00 N \nATOM 249 H LEU 17 16.162 16.471 13.194 1.00 0.00 H \nATOM 250 CA LEU 17 15.588 16.249 11.147 1.00 0.00 C \nATOM 251 HA LEU 17 16.256 16.610 10.366 1.00 0.00 H \nATOM 252 CB LEU 17 15.838 14.741 11.351 1.00 0.00 C \nATOM 253 HB2 LEU 17 16.705 14.733 12.011 1.00 0.00 H \nATOM 254 HB3 LEU 17 14.953 14.441 11.912 1.00 0.00 H \nATOM 255 CG LEU 17 16.069 13.752 10.185 1.00 0.00 C \nATOM 256 HG LEU 17 15.182 13.764 9.551 1.00 0.00 H \nATOM 257 CD1 LEU 17 17.278 14.110 9.362 1.00 0.00 C \nATOM 258 HD11 LEU 17 18.165 14.098 9.995 1.00 0.00 H \nATOM 259 HD12 LEU 17 17.397 13.386 8.556 1.00 0.00 H \nATOM 260 HD13 LEU 17 17.148 15.106 8.938 1.00 0.00 H \nATOM 261 CD2 LEU 17 16.290 12.344 10.717 1.00 0.00 C \nATOM 262 HD21 LEU 17 15.413 12.027 11.282 1.00 0.00 H \nATOM 263 HD22 LEU 17 16.451 11.661 9.883 1.00 0.00 H \nATOM 264 HD23 LEU 17 17.164 12.334 11.368 1.00 0.00 H \nATOM 265 C LEU 17 14.162 16.490 10.648 1.00 0.00 C \nATOM 266 O LEU 17 13.948 16.756 9.478 1.00 0.00 O \n', ':SER:16', None)}
for residue_id in chorizo.residues:
residue_object = chorizo.residues[residue_id]
expected_object = expected_residue_data[residue_id]
assert (residue_object.residue_id == expected_object.residue_id)
assert (residue_object.pdb_text == expected_object.pdb_text)
assert (residue_object.previous_id == expected_object.previous_id)
assert (residue_object.next_id == expected_object.next_id)
assert (residue_object.rdkit_mol != None)
pdbqt_strings = PDBQTWriterLegacy.write_string_from_linked_rdkit_chorizo(chorizo)
(rigid_part, movable_part) = pdbqt_strings
rigid_part = ''.join(rigid_part.splitlines())
assert (len(rigid_part) == 2212)
assert (len(movable_part) == 0)
expected_termini = {':MET:15': 'N'}
assert (chorizo.termini == expected_termini)
(met15_padded, is_actual_res, atom_index_map) = chorizo.get_padded_mol(':MET:15')
met15_resmol = chorizo.residues[':MET:15'].rdkit_mol
expected_is_actual_res = [False, False, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, False, False, False, False]
expected_atom_index_map = {2: 0, 3: 1, 4: 2, 5: 4, 6: 3, 7: 8, 8: 9, 9: 10, 10: 11, 11: 12, 12: 13, 13: 14, 14: 15, 15: 16, 16: 17, 17: 18, 18: 5, 19: 6, 20: 7}
assert met15_resmol
assert met15_padded
assert (is_actual_res == expected_is_actual_res)
assert (atom_index_map == expected_atom_index_map) |
def _nuke_set_zero_margins(widget_object):
parentApp = QtWidgets.QApplication.allWidgets()
parentWidgetList = []
for parent in parentApp:
for child in parent.children():
if (widget_object.__class__.__name__ == child.__class__.__name__):
parentWidgetList.append(parent.parentWidget())
try:
twoup = parent.parentWidget().parentWidget()
parentWidgetList.append(twoup)
threeup = twoup.parentWidget()
parentWidgetList.append(threeup)
except AttributeError:
pass
for sub in parentWidgetList:
if (sub is not None):
for tinychild in sub.children():
try:
tinychild.setContentsMargins(0, 0, 0, 0)
except AttributeError:
pass |
class _AsyncWrapperForSyncCallbackManagerForRun(AsyncCallbackManagerForLLMRun):
def __init__(self, callback_manager: 'CallbackManagerForLLMRun'):
super().__init__(run_id=callback_manager.run_id, handlers=callback_manager.handlers, inheritable_handlers=callback_manager.inheritable_handlers, parent_run_id=callback_manager.parent_run_id, tags=callback_manager.tags, inheritable_tags=callback_manager.inheritable_tags, metadata=callback_manager.metadata, inheritable_metadata=callback_manager.inheritable_metadata)
self.callback_manager = callback_manager
async def on_llm_new_token(self, token: str, **kwargs: Any) -> None:
return self.callback_manager.on_llm_new_token(token, **kwargs)
async def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None:
return self.callback_manager.on_llm_end(response, **kwargs)
async def on_llm_error(self, error: Union[(Exception, KeyboardInterrupt)], **kwargs: Any) -> None:
return self.callback_manager.on_llm_error(error, **kwargs)
async def on_text(self, text: str, **kwargs: Any) -> None:
return self.callback_manager.on_text(text, **kwargs)
async def on_retry(self, retry_state: 'RetryCallState', **kwargs: Any) -> None:
return self.callback_manager.on_retry(retry_state, **kwargs) |
def gen_function_single_thread(fused_func_metadata, input_names, output_names, type_converter) -> str:
tensor_to_expr: Dict[(Tensor, str)] = {}
float32_t = fused_func_metadata.float32_t
body = ''
for (tensor, name) in zip(fused_func_metadata.original_inputs, input_names):
if (fused_func_metadata.use_fp32_acc and (fused_func_metadata.op_t != float32_t)):
input_converter = type_converter.get(fused_func_metadata.op_t).get(float32_t)
name = '{}({})'.format(input_converter, name)
tensor_to_expr[tensor] = name
tmp_output_idx: int = 0
for func_metadata in fused_func_metadata.sub_funcs:
params: List[str] = []
input_converter = None
output_converter = None
func_op_t = func_metadata.op_t
if (not fused_func_metadata.use_fp32_acc):
if (func_op_t != fused_func_metadata.op_t):
input_converter = type_converter.get(fused_func_metadata.op_t).get(func_op_t)
output_converter = type_converter.get(func_op_t).get(fused_func_metadata.op_t)
assert (input_converter is not None), 'Unsupported convertion from {} to {}'.format(fused_func_metadata.op_t, func_op_t)
assert (output_converter is not None), 'Unsupported convertion from {} to {}'.format(func_op_t, fused_func_metadata.op_t)
for arg in func_metadata.args:
if (arg in tensor_to_expr):
param = tensor_to_expr[arg]
params.append(('{}({})'.format(input_converter, param) if (input_converter is not None) else param))
elif arg.is_a_const_num():
arg_str = ''
if math.isinf(arg._attrs['value']):
arg_str = 'CUDART_INF_F'
else:
arg_str = str(arg._attrs['value'])
if (func_op_t[(- 1)] == '2'):
params.append('{}({},{})'.format(func_op_t, arg_str, arg_str))
else:
params.append('{}({})'.format(func_op_t, arg_str))
else:
raise RuntimeError('Cannot generate expression for node {}, ops: {}'.format(arg, func_metadata))
assert (len(func_metadata.outputs) == 1), 'Operator has more than 1 output! Operator: {}'.format(func_metadata)
output = func_metadata.outputs[0]
func_def = '{}({})'.format(func_metadata.func_name, ','.join(params))
func_def = ('{}({})'.format(output_converter, func_def) if (output_converter is not None) else func_def)
if (len(output._attrs['dst_ops']) > 1):
name = ('tmp_' + str(tmp_output_idx))
tmp_output_idx += 1
temp_t = (float32_t if fused_func_metadata.use_fp32_acc else fused_func_metadata.op_t)
body += '{} {} = {};\n'.format(temp_t, name, func_def)
tensor_to_expr[output] = name
else:
tensor_to_expr[output] = func_def
for (tensor, name) in zip(fused_func_metadata.original_outputs, output_names):
if (tensor not in tensor_to_expr):
raise RuntimeError('Cannot generate expression for node {}, outputs: {}'.format(tensor, fused_func_metadata.original_outputs))
expr = tensor_to_expr[tensor]
if (fused_func_metadata.use_fp32_acc and (fused_func_metadata.op_t != float32_t)):
output_converter = type_converter.get(float32_t).get(fused_func_metadata.op_t)
expr = '{}({})'.format(output_converter, expr)
body += '{} = {};\n'.format(name, expr)
return body |
def test_json_interface(newproject):
with newproject._path.joinpath('contracts/Foo.vy').open('w') as fp:
fp.write(CONTRACT)
with newproject._path.joinpath('interfaces/Bar.json').open('w') as fp:
json.dump(ABI, fp)
newproject.load()
assert newproject._path.joinpath('build/contracts/Foo.json').exists()
assert hasattr(newproject, 'Foo')
assert (not newproject._path.joinpath('build/contracts/Bar.json').exists())
assert (not hasattr(newproject, 'Bar'))
assert hasattr(newproject.interface, 'Bar') |
class AddMetaTest(TestModelMixin, TestBase):
databases = {'default', 'mysql', 'postgres'}
def testAddMeta(self):
with reversion.create_revision():
reversion.add_meta(TestMeta, name='meta v1')
obj = TestModel.objects.create()
self.assertSingleRevision((obj,), meta_names=('meta v1',))
def testAddMetaNoBlock(self):
with self.assertRaises(reversion.RevisionManagementError):
reversion.add_meta(TestMeta, name='meta v1')
def testAddMetaMultDb(self):
with reversion.create_revision(using='mysql'), reversion.create_revision(using='postgres'):
obj = TestModel.objects.create()
reversion.add_meta(TestMeta, name='meta v1')
self.assertNoRevision()
self.assertSingleRevision((obj,), meta_names=('meta v1',), using='mysql')
self.assertSingleRevision((obj,), meta_names=('meta v1',), using='postgres') |
def test_should_burn_gas_with_revert(computation):
assert (computation.get_gas_remaining() == 100)
with computation:
raise Revert('Triggered VMError for tests')
assert (not computation.should_burn_gas)
assert (computation.get_gas_used() == 0)
assert (computation.get_gas_remaining() == 100) |
class OptionSeriesOrganizationSonificationContexttracksMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
_models('spacy.Dolly.v1')
def dolly_hf(name: Dolly.MODEL_NAMES, config_init: Optional[Dict[(str, Any)]]=SimpleFrozenDict(), config_run: Optional[Dict[(str, Any)]]=SimpleFrozenDict()) -> Callable[([Iterable[str]], Iterable[str])]:
return Dolly(name=name, config_init=config_init, config_run=config_run) |
class App():
def __init__(self, server):
(self.imports, self.comps) = ({}, {})
(self.vars, self.__map_var_names, self.server, self.__components) = ({}, {}, server, None)
def route(self, component: str, alias: str):
index_router = Path(self.server.app_path, 'index.js')
if (not index_router.exists()):
logging.warning('Creating a router file...')
route = ''
if index_router.exists():
with open(index_router) as f:
route = f.read()
if (component not in route):
routes = route.split("import { Route, Link, BrowserRouter as Router } from 'react-router-dom';")
if routes:
routes[0] = ("%s\nimport %s from '%s';\n" % (routes[0].strip(), component, ('./%s/%s.component' % (self.server.app_path.name, component))))
dis_route = routes[1].split('\n')
else:
routes.append(("import %s from '%s';\n" % (component, ('./%s/%s.component' % (self.server.app_path.name, component)))))
dis_route = []
for (i, line) in enumerate(dis_route):
if line.strip().startswith('<Route '):
break
else:
raise ValueError('Issue with file, please update the index.js manually')
route_end = ((dis_route[:i] + [(' <Route path="/%s" component={%s} />' % (alias, component))]) + dis_route[i:])
with open(index_router, 'w') as f:
f.write('\n'.join((([routes[0]] + ["import { Route, Link, BrowserRouter as Router } from 'react-router-dom';"]) + route_end)))
def export(self, selector: str):
logging.info(('export %s to: %s' % (selector, self.server.views_path)))
self.server.views_path.mkdir(parents=True, exist_ok=True)
add_to_app(self.server.page._props['schema'].values(), self.server.app_path, folder=self.server.assets_path.name)
to_view(self.server.page, selector, self.server.views_path) |
class StateContext():
def __init__(self, obj, chat_id, user_id):
self.obj = obj
self.data = None
self.chat_id = chat_id
self.user_id = user_id
async def __aenter__(self):
self.data = copy.deepcopy((await self.obj.get_data(self.chat_id, self.user_id)))
return self.data
async def __aexit__(self, exc_type, exc_val, exc_tb):
return (await self.obj.save(self.chat_id, self.user_id, self.data)) |
def test_config_entry_precedence():
c = ConfigEntry(LegacyConfigEntry('platform', 'url', str))
assert (c.read() is None)
old_environ = dict(os.environ)
os.environ['FLYTE_PLATFORM_URL'] = 'xyz'
cfg = get_config_file(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'configs/good.config'))
assert (c.read(cfg) == 'xyz')
os.environ = old_environ |
def interactive_map(obj, **kwargs):
h = (100 * 3)
tmp = temp_file('.svg')
p = new_plot(projection='web-mercator', width=(6 * 1024), width_cm=h, height_cm=h, frame=False, foreground=False, background=False)
p.plot_map(obj)
bbox = p.save(tmp.path)
return make_map(tmp.path, bbox) |
class OptionPlotoptionsColumnrangeStatesHover(Options):
def animation(self) -> 'OptionPlotoptionsColumnrangeStatesHoverAnimation':
return self._config_sub_data('animation', OptionPlotoptionsColumnrangeStatesHoverAnimation)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def brightness(self):
return self._config_get(0.1)
def brightness(self, num: float):
self._config(num, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
def test_receive_before_accept(test_client_factory):
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
(await websocket.accept())
websocket.client_state = WebSocketState.CONNECTING
(await websocket.receive())
client = test_client_factory(app)
with pytest.raises(RuntimeError):
with client.websocket_connect('/') as websocket:
websocket.send({'type': 'websocket.send'}) |
class OptionSeriesSunburstSonificationTracksMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def extractMehtranslationsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('paw in paw', 'Paw in Paw, Lets Satisfy Our Desire for Dogs', 'translated'), ('the favoured genius', 'Space and Rebirth: The Favoured Genius Doctor and Businesswoman', 'translated'), ('the favoured genius doctor and businesswoman', 'Space and Rebirth: The Favoured Genius Doctor and Businesswoman', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class BranchesAlreadyExistError(FoundryAPIError):
def __init__(self, dataset_rid: str, branch_rid: str, response: (requests.Response | None)=None):
super().__init__((f'''Branch {branch_rid} already exists in {dataset_rid}.
''' + (response.text if (response is not None) else '')))
self.dataset_rid = dataset_rid
self.branch_rid = branch_rid
self.response = response |
class OptionPlotoptionsFunnel3dSonificationDefaultinstrumentoptionsMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
('aea.cli.utils.config.get_or_create_cli_config')
('aea.cli.utils.generic.yaml.dump')
('aea.cli.utils.click_utils.open_file', mock.mock_open())
class UpdateCLIConfigTestCase(TestCase):
def testupdate_cli_config_positive(self, dump_mock, icf_mock):
update_cli_config({'some': 'config'})
icf_mock.assert_called_once()
dump_mock.assert_called_once() |
class StartResponseMockLite():
def __init__(self):
self._called = 0
self.status = None
self.headers = None
self.exc_info = None
def __call__(self, status, headers, exc_info=None):
self._called += 1
self.status = status
self.headers = headers
self.exc_info = exc_info
def call_count(self):
return self._called |
def _add_tests():
_ofp_vers = {'of10': ofproto_v1_0.OFP_VERSION, 'of12': ofproto_v1_2.OFP_VERSION, 'of13': ofproto_v1_3.OFP_VERSION, 'of14': ofproto_v1_4.OFP_VERSION, 'of15': ofproto_v1_5.OFP_VERSION}
this_dir = os.path.dirname(sys.modules[__name__].__file__)
ofctl_rest_json_dir = os.path.join(this_dir, 'ofctl_rest_json/')
for ofp_ver in _ofp_vers:
json_path = os.path.join(ofctl_rest_json_dir, (ofp_ver + '.json'))
if os.path.exists(json_path):
_test_cases = json.load(open(json_path))
else:
continue
for test in _test_cases:
method = test['method']
path = test['path']
body = test.get('body', {})
name = ((((('test_ofctl_rest_' + method) + '_') + ofp_ver) + '_') + path)
f = functools.partial(Test_ofctl_rest._test, name=name, dp=DummyDatapath(_ofp_vers[ofp_ver]), method=test['method'], path=test['path'], body=body)
test_lib.add_method(Test_ofctl_rest, name, f) |
class StringifyObject(PropertyPreprocessor):
type = 'stringify_object'
def imports(self):
return {'modules': ['json']}
def process_arg(self, arg, node, raw_args):
scrubbed = {}
for (k, v) in arg.items():
if (v == ''):
scrubbed[k] = 'null'
elif isinstance(v, bool):
scrubbed[k] = str(v).lower()
elif isinstance(v, (dict, list, tuple)):
scrubbed[k] = json.dumps(v)
else:
scrubbed[k] = str(v)
return scrubbed |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.