code stringlengths 281 23.7M |
|---|
def test_prepare_transaction_replacement_not_higher_gas_price_raises(w3):
current_transaction = SIMPLE_CURRENT_TRANSACTION
new_transaction = {'value': 1, 'gasPrice': 5}
with pytest.raises(ValueError):
prepare_replacement_transaction(w3, current_transaction, new_transaction)
new_transaction['gasPrice'] = 10
with pytest.raises(ValueError):
prepare_replacement_transaction(w3, current_transaction, new_transaction) |
def benchmark_only_pipeline():
test_pipeline_name = 'benchmark-only'
original = racecontrol.pipelines[test_pipeline_name]
pipeline = racecontrol.Pipeline(test_pipeline_name, 'Pipeline intended for unit-testing', mock.Mock())
(yield pipeline)
racecontrol.pipelines[test_pipeline_name] = original |
class StockFinanceReportEventSpider(scrapy.Spider):
name = 'stock_finance_report_event'
custom_settings = {'DOWNLOAD_DELAY': 2, 'CONCURRENT_REQUESTS_PER_DOMAIN': 8, 'SPIDER_MIDDLEWARES': {'fooltrader.middlewares.FoolErrorMiddleware': 1000}}
def start_requests(self):
security_item = self.settings.get('security_item')
if (security_item is not None):
for request in self.yield_request(security_item):
(yield request)
else:
for (_, item) in get_security_list().iterrows():
for request in self.yield_request(item):
(yield request)
def yield_request(self, item):
for period_type in ['yjdbg', 'zqbg', 'sjdbg', 'ndbg']:
url = self.get_finance_report_event_url(item['code'], period_type)
(yield Request(url=url, headers=DEFAULT_KDATA_HEADER, meta={'item': item, 'period_type': period_type}, callback=self.download_fi_report_event_data))
def report_period_from_title(title, period_type, report_event_date):
try:
year = re.match('.*(\\d{4}).*', title).group(1)
report_event_year = pd.Timestamp(report_event_date).date().year
if ((int(year) < (int(report_event_year) - 2)) or (int(year) > int(report_event_year))):
raise Exception('wrong report year')
except Exception as e:
year = pd.Timestamp(report_event_date).date().year
if (period_type == 'ndbg'):
year -= 1
if (period_type == 'yjdbg'):
return '{}-03-31'.format(year)
elif (period_type == 'zqbg'):
return '{}-06-30'.format(year)
elif (period_type == 'sjdbg'):
return '{}-09-30'.format(year)
elif (period_type == 'ndbg'):
return '{}-12-31'.format(year)
def download_fi_report_event_data(self, response):
security_item = response.meta['item']
period_type = response.meta['period_type']
path = get_finance_report_event_path(security_item)
df = pd.DataFrame()
try:
report_timestamps = response.xpath('//*[="con02-7"]/table[2]/tr/td[2]//ul/text()').extract()
report_timestamps = [date.strip() for date in report_timestamps if date.strip()]
report_contents = response.xpath('//*[="con02-7"]/table[2]/tr/td[2]//ul//a').extract()
for (i, tr) in enumerate(report_contents):
href = Selector(text=tr).xpath('//').extract()[0]
title = Selector(text=tr).xpath('//text()').extract()[0]
report_period = self.report_period_from_title(title, period_type, report_timestamps[i])
df = df.append({'securityId': security_item['id'], 'timestamp': report_timestamps[i], 'url': (' + href), 'title': title, 'reportPeriod': report_period}, ignore_index=True)
if (not df.empty):
df = df.drop_duplicates()
df = index_df_with_time(df)
df.to_csv(path, index=False)
except Exception as e:
self.logger.exception('error when getting k data url={}'.format(response.url))
def from_crawler(cls, crawler, *args, **kwargs):
spider = super(StockFinanceReportEventSpider, cls).from_crawler(crawler, *args, **kwargs)
crawler.signals.connect(spider.spider_closed, signal=signals.spider_closed)
return spider
def spider_closed(self, spider, reason):
spider.logger.info('Spider closed: %s,%s\n', spider.name, reason)
def get_finance_report_event_url(self, code, report_period):
return ' report_period) |
def extractMarcell13BlogWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionPlotoptionsFunnelSonificationDefaultspeechoptionsPointgrouping(Options):
def algorithm(self):
return self._config_get('last')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def erase_flash(esp, args):
if ((not args.force) and (esp.CHIP_NAME != 'ESP8266') and (not esp.secure_download_mode)):
if (esp.get_flash_encryption_enabled() or esp.get_secure_boot_enabled()):
raise FatalError('Active security features detected, erasing flash is disabled as a safety measure. Use --force to override, please use with caution, otherwise it may brick your device!')
print('Erasing flash (this may take a while)...')
t = time.time()
esp.erase_flash()
print(('Chip erase completed successfully in %.1fs' % (time.time() - t))) |
('function_length')
def function_length(node):
assert isinstance(node, (Function_Definition, Script_File))
if isinstance(node, Script_File):
return None
elif node.t_end:
return ((node.t_end.location.line - node.t_fun.location.line) + 1)
else:
n_cu = node.n_parent
if (not isinstance(n_cu, Function_File)):
raise ICE('unterminated function must be a child of a function file')
this_function_idx = n_cu.l_functions.index(node)
next_function_idx = (this_function_idx + 1)
if (next_function_idx < len(n_cu.l_functions)):
return (n_cu.l_functions[next_function_idx].t_fun.location.line - node.t_fun.location.line)
else:
return ((n_cu.file_length - node.t_fun.location.line) + 1) |
_ocx_not_available
_not_currently_in_session
def test_GetCodeListByConditionAsStream(entrypoint):
entrypoint.EnsureConditionLoaded()
conditions = entrypoint.GetConditionNameListAsList()
condition_name = ' '
assert (condition_name in [item[1] for item in conditions])
stream = entrypoint.GetCodeListByConditionAsStream(condition_name, with_info=True)
condition_event = next(stream)
info_event = next(stream)
stream.cancel()
codes = condition_event.arguments[1].string_value.strip(';').split(';')
records = [values.values for values in info_event.multi_data.values]
columns = info_event.multi_data.names
info = pd.DataFrame.from_records(records, columns=columns)
assert (len(codes) > 0)
assert (info.shape[0] > 0)
info_codes = info[''].tolist()
for code in codes:
assert (code in info_codes) |
def run():
segmk = Segmaker('design.bits')
print('Loading tags')
f = open('params.jl', 'r')
f.readline()
for l in f:
j = json.loads(l)
ps = j['params']
assert (j['module'] == 'my_XADC')
site = verilog.unquote(ps['LOC'])
bus_tags(segmk, ps, site)
segmk.compile()
segmk.write() |
def send_verification_code_to_user(db: Session, request: (ConsentRequest | PrivacyRequest), to_identity: (Identity | None)) -> str:
config_proxy = ConfigProxy(db)
verification_code = generate_id_verification_code()
request.cache_identity_verification_code(verification_code)
messaging_action_type = MessagingActionType.SUBJECT_IDENTITY_VERIFICATION
dispatch_message(db, action_type=messaging_action_type, to_identity=to_identity, service_type=config_proxy.notifications.notification_service_type, message_body_params=SubjectIdentityVerificationBodyParams(verification_code=verification_code, verification_code_ttl_seconds=CONFIG.redis.identity_verification_code_ttl_seconds))
return verification_code |
def downgrade():
with op.batch_alter_table('Budgets', schema=None) as batch_op:
batch_op.drop_column('parent_id')
with op.batch_alter_table('BudgetEntries', schema=None) as batch_op:
batch_op.drop_column('unit')
batch_op.drop_column('realized_total')
batch_op.drop_column('price')
batch_op.drop_column('msrp')
batch_op.drop_column('cost')
op.drop_table('PriceList_Goods')
op.drop_table('Goods')
op.drop_table('PriceLists') |
(st.floats(allow_nan=False, allow_infinity=False), valid_derrf_parameters())
def test_that_derrf_corresponds_scaled_binned_normal_cdf(x, arg):
(_steps, _min, _max, _skew, _width) = arg
q_values = np.linspace(start=0, stop=1, num=_steps)
q_checks = np.linspace(start=0, stop=1, num=(_steps + 1))[1:]
p = norm.cdf(x, loc=(- _skew), scale=_width)
bin_index = np.digitize(p, q_checks, right=True)
expected = q_values[bin_index]
expected = (_min + (expected * (_max - _min)))
if ((expected > _max) or (expected < _min)):
np.clip(expected, _min, _max)
result = TransferFunction.trans_derrf(x, arg)
assert np.isclose(result, expected) |
def init_embedded(level, sentry_dsn, release):
patch_gevent_hub_print_exception()
root = logging.getLogger()
root.setLevel(0)
hdlr: Any
logging.getLogger('sentry.errors').setLevel(1000)
if sentry_dsn:
hdlr = SentryHandler(raven.Client(sentry_dsn, transport=GeventedHTTPTransport, release=release))
hdlr.setLevel(logging.ERROR)
root.addHandler(hdlr)
hdlr = logging.StreamHandler(sys.stdout)
hdlr.setLevel(level)
formatter = ServerLogFormatter(use_color=(sys.platform == 'linux'))
hdlr.setFormatter(formatter)
root.addHandler(hdlr)
root.info(datetime.datetime.now().strftime('%Y-%m-%d %H:%M'))
root.info('') |
class WafRuleRevisionAttributes(ModelNormal):
allowed_values = {('state',): {'LATEST': 'latest', 'OUTDATED': 'outdated'}}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'message': (str,), 'modsec_rule_id': (int,), 'paranoia_level': (int,), 'revision': (int,), 'severity': (int,), 'source': (str,), 'state': (str,), 'vcl': (str,)}
_property
def discriminator():
return None
attribute_map = {'message': 'message', 'modsec_rule_id': 'modsec_rule_id', 'paranoia_level': 'paranoia_level', 'revision': 'revision', 'severity': 'severity', 'source': 'source', 'state': 'state', 'vcl': 'vcl'}
read_only_vars = {'message', 'modsec_rule_id', 'paranoia_level', 'severity', 'source', 'state', 'vcl'}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def extractTaxingcorn117StranslationsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def parse_ip_address_and_port(addr, default_port=22):
for regexp in _IP_ADDRESS_RE:
match = regexp.search(addr)
if (not match):
continue
ip_addr = netaddr.IPAddress(match.group('ip_addr'))
try:
port = match.group('port')
except IndexError:
port = None
if (port is not None):
port = int(port[1:])
else:
port = default_port
return (ip_addr, port)
raise netaddr.AddrFormatError('Could not extract IP address and port from `{1}`'.format(addr)) |
class DoraConfig():
dir: Path = Path('./outputs')
exclude: tp.List[str] = field(default_factory=list)
git_save: bool = False
shared: tp.Optional[Path] = None
grid_package: tp.Optional[str] = None
history: str = 'history.json'
xps: str = 'xps'
shep: ShepConfig = field(default_factory=ShepConfig)
rendezvous_file: str = 'rendezvous.txt'
use_rendezvous: bool = False
_grids: str = 'grids'
_codes: str = 'codes'
def is_excluded(self, arg_name: str) -> bool:
for pattern in self.exclude:
if fnmatch(arg_name, pattern):
return True
return False
def __setattr__(self, name, value):
if (name in ['dir', 'shared']):
from .git_save import to_absolute_path
if (value is not None):
value = Path(to_absolute_path(value))
super().__setattr__(name, value) |
class PresidentialCoverage(db.Model):
__table_args__ = {'schema': 'public'}
__tablename__ = 'ofec_presidential_coverage_date_vw'
idx = db.Column(db.Integer, primary_key=True)
candidate_id = db.Column(db.String, doc=docs.CANDIDATE_ID_PRESIDENTIAL)
coverage_end_date = db.Column(db.DateTime, doc=docs.COVERAGE_END_DATE)
election_year = db.Column(db.Integer, doc=docs.ELECTION_YEAR) |
class TaskService():
def get(pk: str) -> (AsyncResult | None):
try:
result = celery_app.AsyncResult(pk)
except (BackendGetMetaError, NotRegistered):
raise NotFoundError(msg='')
if result.failed():
return None
return result
def gets() -> dict:
filtered_tasks = {}
tasks = celery_app.tasks
for (key, value) in tasks.items():
if (not key.startswith('celery.')):
filtered_tasks[key] = value
return filtered_tasks
def run(*, module: str, args: (list | None)=None, kwargs: (dict | None)=None) -> AsyncResult:
task = celery_app.send_task(module, args, kwargs)
return task |
class CapabilityHeader(QHeaderView):
description_requested_signal = Signal(str, str)
def __init__(self, orientation, parent=None):
QHeaderView.__init__(self, orientation, parent)
self._data = []
if (orientation == Qt.Horizontal):
self.setDefaultAlignment((Qt.AlignHCenter | Qt.AlignBottom))
elif (orientation == Qt.Vertical):
self.setDefaultAlignment((Qt.AlignLeft | Qt.AlignBottom))
self.controlWidget = []
def index(self, name):
for (index, entry) in enumerate(self._data):
if (entry['name'] == name):
return index
return (- 1)
def paintSection(self, painter, rect, logicalIndex):
painter.save()
QHeaderView.paintSection(self, painter, rect, logicalIndex)
painter.restore()
if ((logicalIndex in range(len(self._data))) and self._data[logicalIndex]['images']):
if (len(self._data[logicalIndex]['images']) == 1):
pix = self._data[logicalIndex]['images'][0]
pix = pix.scaled(rect.width(), (rect.height() - 20), Qt.KeepAspectRatio, Qt.SmoothTransformation)
self.style().drawItemPixmap(painter, rect, 5, pix)
elif (len(self._data[logicalIndex]['images']) > 1):
new_rect = QRect(rect.left(), rect.top(), rect.width(), ((rect.height() - 20) / 2.0))
pix = self._data[logicalIndex]['images'][0]
pix = pix.scaled(new_rect.width(), new_rect.height(), Qt.KeepAspectRatio, Qt.SmoothTransformation)
self.style().drawItemPixmap(painter, new_rect, 5, pix)
new_rect = QRect(rect.left(), (rect.top() + new_rect.height()), rect.width(), new_rect.height())
pix = self._data[logicalIndex]['images'][1]
pix = pix.scaled(new_rect.width(), new_rect.height(), Qt.KeepAspectRatio, Qt.SmoothTransformation)
self.style().drawItemPixmap(painter, new_rect, 5, pix)
def mousePressEvent(self, event):
QHeaderView.mousePressEvent(self, event)
index = self.logicalIndexAt(event.pos())
if (index in range(len(self._data))):
suffix = 'Capability'
if (self.orientation() == Qt.Horizontal):
suffix = 'Robot'
title = ' - '.join([self._data[index]['name'], suffix])
text = self._data[index]['description']
try:
from docutils import examples
text = examples.html_body(text)
except Exception:
import traceback
rospy.logwarn('Error while generate description for %s: %s', self._data[index]['name'], traceback.format_exc(1))
self.description_requested_signal.emit(title, text)
def setDescription(self, index, cfg, name, displayed_name, robot_type, description, images):
if (index < len(self._data)):
obj = self._data[index]
if (cfg not in obj['cfgs']):
obj['cfgs'].append(cfg)
obj['name'] = name
if displayed_name:
obj['displayed_name'] = displayed_name
obj['type'] = robot_type
obj['description'] = replace_paths(description)
if images:
del obj['images'][:]
for image_path in images:
img = interpret_path(image_path)
if (img and (img[0] != os.path.sep)):
img = os.path.join(nm.settings().PACKAGE_DIR, image_path)
if os.path.isfile(img):
obj['images'].append(QPixmap(img))
def update_description(self, index, cfg, name, displayed_name, robot_type, description, images):
if (index < len(self._data)):
obj = self._data[index]
if (cfg not in obj['cfgs']):
obj['cfgs'].append(cfg)
if (not obj['name']):
obj['name'] = name
if (not obj['displayed_name']):
obj['displayed_name'] = displayed_name
if (not obj['type']):
obj['type'] = robot_type
if (not obj['description']):
obj['description'] = replace_paths(description)
if (not obj['images']):
for image_path in images:
img = interpret_path(image_path)
if (img and (img[0] != os.path.sep)):
img = os.path.join(nm.settings().PACKAGE_DIR, image_path)
if os.path.isfile(img):
obj['images'].append(QPixmap(img))
def removeDescription(self, index):
if (index < len(self._data)):
self._data.pop(index)
def insertItem(self, index):
new_dict = {'cfgs': [], 'name': '', 'displayed_name': '', 'type': '', 'description': '', 'images': []}
if (index < len(self._data)):
self._data.insert(index, new_dict)
else:
self._data.append(new_dict)
def insertSortedItem(self, name, displayed_name):
new_dict = {'cfgs': [], 'name': name, 'displayed_name': displayed_name, 'type': '', 'description': '', 'images': []}
for (index, item) in enumerate(self._data):
if (item['displayed_name'].lower() > displayed_name.lower()):
self._data.insert(index, new_dict)
return index
self._data.append(new_dict)
return (len(self._data) - 1)
def removeCfg(self, cfg):
result = []
for (index, d) in enumerate(self._data):
if (cfg in d['cfgs']):
d['cfgs'].remove(cfg)
result.append(index)
return result
def count(self):
return len(self._data)
def getConfigs(self, index):
result = []
if (index < len(self._data)):
result = list(self._data[index]['cfgs'])
return result |
class OptionSeriesVariwideDatalabels(Options):
def align(self):
return self._config_get('undefined')
def align(self, text: str):
self._config(text, js_type=False)
def allowOverlap(self):
return self._config_get(False)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self) -> 'OptionSeriesVariwideDatalabelsAnimation':
return self._config_sub_data('animation', OptionSeriesVariwideDatalabelsAnimation)
def backgroundColor(self):
return self._config_get(None)
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(0)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def crop(self):
return self._config_get(True)
def crop(self, flag: bool):
self._config(flag, js_type=False)
def defer(self):
return self._config_get(True)
def defer(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def filter(self) -> 'OptionSeriesVariwideDatalabelsFilter':
return self._config_sub_data('filter', OptionSeriesVariwideDatalabelsFilter)
def format(self):
return self._config_get('point.value')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get(None)
def formatter(self, value: Any):
self._config(value, js_type=False)
def inside(self):
return self._config_get(None)
def inside(self, flag: bool):
self._config(flag, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, flag: bool):
self._config(flag, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def overflow(self):
return self._config_get('justify')
def overflow(self, text: str):
self._config(text, js_type=False)
def padding(self):
return self._config_get(5)
def padding(self, num: float):
self._config(num, js_type=False)
def position(self):
return self._config_get('center')
def position(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def shape(self):
return self._config_get('square')
def shape(self, text: str):
self._config(text, js_type=False)
def style(self):
return self._config_get(None)
def style(self, value: Any):
self._config(value, js_type=False)
def textPath(self) -> 'OptionSeriesVariwideDatalabelsTextpath':
return self._config_sub_data('textPath', OptionSeriesVariwideDatalabelsTextpath)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('undefined')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get('undefined')
def y(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(6)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def extractMionlineWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class Demo(lg.Graph):
WS_SERVER_NODE: WSAPIServerNode
SERIALIZER: Serializer
NOISE_GENERATOR: NoiseGenerator
ROLLING_AVERAGER: RollingAverager
AMPLIFIER: Amplifier
ATTENUATOR: Attenuator
def set_topology(self, topology: SerializedGraph, sub_pub_map: Dict) -> None:
self._topology = topology
self._sub_pub_match = sub_pub_map
def setup(self) -> None:
self.WS_SERVER_NODE.configure(WSAPIServerConfig(app_id=APP_ID, ip=WS_SERVER.DEFAULT_IP, port=ENUMS.WS_SERVER.DEFAULT_PORT, api_version=ENUMS.WS_SERVER.DEFAULT_API_VERSION, num_messages=(- 1), enums=ENUMS(), sample_rate=SAMPLE_RATE))
self.SERIALIZER.configure(SerializerConfig(data=self._topology, sub_pub_match=self._sub_pub_match, sample_rate=SAMPLE_RATE, stream_name=STREAM.LABGRAPH_MONITOR, stream_id=STREAM.LABGRAPH_MONITOR_ID))
self.NOISE_GENERATOR.configure(NoiseGeneratorConfig(sample_rate=float(SAMPLE_RATE), num_features=NUM_FEATURES))
self.ROLLING_AVERAGER.configure(RollingConfig(window=WINDOW))
self.AMPLIFIER.configure(AmplifierConfig(out_in_ratio=OUT_IN_RATIO))
self.ATTENUATOR.configure(AttenuatorConfig(attenuation=ATTENUATION))
def connections(self) -> lg.Connections:
return ((self.NOISE_GENERATOR.NOISE_GENERATOR_OUTPUT, self.ROLLING_AVERAGER.ROLLING_AVERAGER_INPUT), (self.NOISE_GENERATOR.NOISE_GENERATOR_OUTPUT, self.AMPLIFIER.AMPLIFIER_INPUT), (self.NOISE_GENERATOR.NOISE_GENERATOR_OUTPUT, self.ATTENUATOR.ATTENUATOR_INPUT), (self.NOISE_GENERATOR.NOISE_GENERATOR_OUTPUT, self.SERIALIZER.SERIALIZER_INPUT_1), (self.ROLLING_AVERAGER.ROLLING_AVERAGER_OUTPUT, self.SERIALIZER.SERIALIZER_INPUT_2), (self.AMPLIFIER.AMPLIFIER_OUTPUT, self.SERIALIZER.SERIALIZER_INPUT_3), (self.ATTENUATOR.ATTENUATOR_OUTPUT, self.SERIALIZER.SERIALIZER_INPUT_4), (self.SERIALIZER.SERIALIZER_OUTPUT, self.WS_SERVER_NODE.topic))
def process_modules(self) -> Tuple[(lg.Module, ...)]:
return (self.NOISE_GENERATOR, self.ROLLING_AVERAGER, self.AMPLIFIER, self.ATTENUATOR, self.SERIALIZER, self.WS_SERVER_NODE) |
def extractWhitehemlocktranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Male God is Chasing My Brother', 'Male God is Chasing My Brother', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
(('reconf' in cfgdiff.supported_formats), 'requires reconfigure')
class ReconfigureDiffTestCase(CfgDiffTestCase):
def setUp(self):
self.configs = __import__('reconfigure.configs', fromlist=['reconfigure'])
def test_reconf_same(self):
self._test_same(cfgdiff.ReconfigureDiff, './tests/test_same_1-a.ini', './tests/test_same_1-b.ini', self.configs.SambaConfig)
def test_reconf_different(self):
self._test_different(cfgdiff.ReconfigureDiff, './tests/test_different_1-a.ini', './tests/test_different_1-b.ini', self.configs.SambaConfig)
def test_reconf_same_bind(self):
self._test_same(cfgdiff.ReconfigureDiff, './tests/test_same_1-a.isc', './tests/test_same_1-b.isc', self.configs.BIND9Config)
def test_reconf_different_bind(self):
self._test_different(cfgdiff.ReconfigureDiff, './tests/test_different_1-a.isc', './tests/test_different_1-b.isc', self.configs.BIND9Config) |
def render() -> None:
global FACE_SWAPPER_MODEL_DROPDOWN
global FACE_ENHANCER_MODEL_DROPDOWN
global FACE_ENHANCER_BLEND_SLIDER
global FRAME_ENHANCER_MODEL_DROPDOWN
global FRAME_ENHANCER_BLEND_SLIDER
global FACE_DEBUGGER_ITEMS_CHECKBOX_GROUP
FACE_SWAPPER_MODEL_DROPDOWN = gradio.Dropdown(label=wording.get('face_swapper_model_dropdown_label'), choices=frame_processors_choices.face_swapper_models, value=frame_processors_globals.face_swapper_model, visible=('face_swapper' in facefusion.globals.frame_processors))
FACE_ENHANCER_MODEL_DROPDOWN = gradio.Dropdown(label=wording.get('face_enhancer_model_dropdown_label'), choices=frame_processors_choices.face_enhancer_models, value=frame_processors_globals.face_enhancer_model, visible=('face_enhancer' in facefusion.globals.frame_processors))
FACE_ENHANCER_BLEND_SLIDER = gradio.Slider(label=wording.get('face_enhancer_blend_slider_label'), value=frame_processors_globals.face_enhancer_blend, step=(frame_processors_choices.face_enhancer_blend_range[1] - frame_processors_choices.face_enhancer_blend_range[0]), minimum=frame_processors_choices.face_enhancer_blend_range[0], maximum=frame_processors_choices.face_enhancer_blend_range[(- 1)], visible=('face_enhancer' in facefusion.globals.frame_processors))
FRAME_ENHANCER_MODEL_DROPDOWN = gradio.Dropdown(label=wording.get('frame_enhancer_model_dropdown_label'), choices=frame_processors_choices.frame_enhancer_models, value=frame_processors_globals.frame_enhancer_model, visible=('frame_enhancer' in facefusion.globals.frame_processors))
FRAME_ENHANCER_BLEND_SLIDER = gradio.Slider(label=wording.get('frame_enhancer_blend_slider_label'), value=frame_processors_globals.frame_enhancer_blend, step=(frame_processors_choices.frame_enhancer_blend_range[1] - frame_processors_choices.frame_enhancer_blend_range[0]), minimum=frame_processors_choices.frame_enhancer_blend_range[0], maximum=frame_processors_choices.frame_enhancer_blend_range[(- 1)], visible=('face_enhancer' in facefusion.globals.frame_processors))
FACE_DEBUGGER_ITEMS_CHECKBOX_GROUP = gradio.CheckboxGroup(label=wording.get('face_debugger_items_checkbox_group_label'), choices=frame_processors_choices.face_debugger_items, value=frame_processors_globals.face_debugger_items, visible=('face_debugger' in facefusion.globals.frame_processors))
register_ui_component('face_swapper_model_dropdown', FACE_SWAPPER_MODEL_DROPDOWN)
register_ui_component('face_enhancer_model_dropdown', FACE_ENHANCER_MODEL_DROPDOWN)
register_ui_component('face_enhancer_blend_slider', FACE_ENHANCER_BLEND_SLIDER)
register_ui_component('frame_enhancer_model_dropdown', FRAME_ENHANCER_MODEL_DROPDOWN)
register_ui_component('frame_enhancer_blend_slider', FRAME_ENHANCER_BLEND_SLIDER)
register_ui_component('face_debugger_items_checkbox_group', FACE_DEBUGGER_ITEMS_CHECKBOX_GROUP) |
_control_type
class ControlFormatI(stringify.StringifyMixin):
TYPE = 0
_PACK_STR = '!H'
_PACK_LEN = struct.calcsize(_PACK_STR)
def __init__(self, send_sequence_number=0, pf_bit=0, receive_sequence_number=0):
super(ControlFormatI, self).__init__()
self.send_sequence_number = send_sequence_number
self.pf_bit = pf_bit
self.receive_sequence_number = receive_sequence_number
def parser(cls, buf):
assert (len(buf) >= cls._PACK_LEN)
(control,) = struct.unpack_from(cls._PACK_STR, buf)
assert (((control >> 8) & 1) == cls.TYPE)
send_sequence_number = ((control >> 9) & 127)
pf_bit = ((control >> 8) & 1)
receive_sequence_number = ((control >> 1) & 127)
return (cls(send_sequence_number, pf_bit, receive_sequence_number), buf[cls._PACK_LEN:])
def serialize(self):
control = ((((self.send_sequence_number << 9) | (self.TYPE << 8)) | (self.receive_sequence_number << 1)) | self.pf_bit)
return struct.pack(self._PACK_STR, control) |
def tex_Cases(head, args, **kwargs):
in_small = kwargs.get('in_small', False)
s = '\\begin{cases} '
displaystyle = False
for arg in args:
assert (arg.head() == Tuple)
(v, c) = arg.args()
v = v.latex(in_small=in_small)
if (c == Otherwise):
c = '\\text{otherwise}'
else:
c = c.latex(in_small=in_small)
if displaystyle:
s += ('\\displaystyle{%s}, & \\displaystyle{%s}\\\\' % (v, c))
else:
s += ('%s, & %s\\\\' % (v, c))
s += ' \\end{cases}'
return s |
def setup_temp_env(tmp_path: Path):
cfg_file = (tmp_path / 'jobs.json')
results_repo_root = (tmp_path / 'ideas')
github_target = _utils.GitHubTarget.from_url(IDEAS_GIT_URL)
github_target.ensure_local(str(results_repo_root))
content = json.loads(CONFIG_FILENAME.read_text())
content['data_dir'] = str((tmp_path / 'BENCH'))
content['results_repo_root'] = str(results_repo_root)
cfg_file.write_text(json.dumps(content))
requests = ((tmp_path / 'BENCH') / 'REQUESTS')
requests.mkdir(parents=True)
queues = ((tmp_path / 'BENCH') / 'QUEUES')
queues.mkdir(parents=True)
for workerid in ('linux', 'mac'):
path = (queues / workerid)
path.mkdir()
return ['--config', str(cfg_file)] |
class OptionSeriesColumnrangeSonificationDefaultspeechoptionsMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('undefined')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('undefined')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('undefined')
def min(self, text: str):
self._config(text, js_type=False)
def within(self):
return self._config_get('undefined')
def within(self, text: str):
self._config(text, js_type=False) |
class MyBuddySocket(MyBuddyCommandGenerator):
_write = write
_read = read
def __init__(self, ip, netport=9000):
super(MyBuddySocket, self).__init__()
self.calibration_parameters = calibration_parameters
self.SERVER_IP = ip
self.SERVER_PORT = netport
self.rasp = False
self.sock = self.connect_socket()
def connect(self, serialport='/dev/ttyAMA0', baudrate='1000000', timeout='0.1'):
self.rasp = True
self._write(serialport, 'socket')
self._write(baudrate, 'socket')
self._write(timeout, 'socket')
def connect_socket(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((self.SERVER_IP, self.SERVER_PORT))
return sock
def _mesg(self, genre, *args, **kwargs):
(real_command, has_reply) = super(MyBuddySocket, self)._mesg(genre, *args, **kwargs)
self._write(self._flatten(real_command), 'socket')
if has_reply:
data = self._read(genre, 'socket')
res = self._process_received(data, genre, arm=12)
if (res == []):
return None
if (genre in [ProtocolCode.ROBOT_VERSION, ProtocolCode.IS_POWER_ON, ProtocolCode.IS_CONTROLLER_CONNECTED, ProtocolCode.IS_PAUSED, ProtocolCode.IS_IN_POSITION, ProtocolCode.IS_MOVING, ProtocolCode.IS_SERVO_ENABLE, ProtocolCode.IS_ALL_SERVO_ENABLE, ProtocolCode.GET_SERVO_DATA, ProtocolCode.GET_DIGITAL_INPUT, ProtocolCode.GET_GRIPPER_VALUE, ProtocolCode.IS_GRIPPER_MOVING, ProtocolCode.GET_SPEED, ProtocolCode.GET_ENCODER, ProtocolCode.GET_BASIC_INPUT, ProtocolCode.GET_TOF_DISTANCE, ProtocolCode.GET_END_TYPE, ProtocolCode.GET_MOVEMENT_TYPE, ProtocolCode.GET_REFERENCE_FRAME, ProtocolCode.GET_JOINT_MIN_ANGLE, ProtocolCode.GET_JOINT_MAX_ANGLE, ProtocolCode.SetHTSGripperTorque, ProtocolCode.GetHTSGripperTorque, ProtocolCode.GetGripperProtectCurrent, ProtocolCode.InitGripper, ProtocolCode.SET_FOUR_PIECES_ZERO]):
return self._process_single(res)
elif (genre in [ProtocolCode.GET_ANGLES]):
return [self._int2angle(angle) for angle in res]
elif (genre in [ProtocolCode.GET_COORDS, ProtocolCode.GET_TOOL_REFERENCE, ProtocolCode.GET_WORLD_REFERENCE]):
if res:
r = []
for idx in range(3):
r.append(self._int2coord(res[idx]))
for idx in range(3, 6):
r.append(self._int2angle(res[idx]))
return r
else:
return res
elif (genre in [ProtocolCode.GET_SERVO_VOLTAGES]):
return [self._int2coord(angle) for angle in res]
else:
return res
return None
def get_radians(self, id):
angles = self._mesg(ProtocolCode.GET_ANGLES, id, has_reply=True)
return [round((angle * (math.pi / 180)), 3) for angle in angles]
def send_radians(self, id, radians, speed):
degrees = [self._angle2int((radian * (180 / math.pi))) for radian in radians]
return self._mesg(ProtocolCode.SEND_ANGLES, id, degrees, speed)
def set_gpio_mode(self, mode):
self.calibration_parameters(gpiomode=mode)
if (mode == 'BCM'):
return self._mesg(ProtocolCode.SET_GPIO_MODE, 0)
else:
return self._mesg(ProtocolCode.SET_GPIO_MODE, 1)
def set_gpio_out(self, pin_no, mode):
if (mode == 'in'):
return self._mesg(ProtocolCode.SET_GPIO_UP, pin_no, 0)
else:
return self._mesg(ProtocolCode.SET_GPIO_UP, pin_no, 1)
def set_gpio_output(self, pin_no, state):
return self._mesg(ProtocolCode.SET_GPIO_OUTPUT, pin_no, state)
def get_gpio_in(self, pin_no):
return self._mesg(ProtocolCode.GET_GPIO_IN, pin_no)
def wait(self, t):
time.sleep(t)
return self
def close(self):
self.sock.close()
def sync_send_angles(self, id, degrees, speed, timeout=15):
t = time.time()
self.send_angles(id, degrees, speed)
while ((time.time() - t) < timeout):
f = self.is_in_position(id, degrees, 0)
if (f == 1):
break
time.sleep(0.1)
return self
def sync_send_coords(self, id, coords, speed, mode=0, timeout=15):
t = time.time()
self.send_coords(id, coords, speed, mode)
while ((time.time() - t) < timeout):
if (self.is_in_position(id, coords, 1) == 1):
break
time.sleep(0.1)
return self |
.parametrize('plugin_name', KNOWN_PLUGINS)
def test_smoke_extract(plugin_name):
codes = extract(plugin_name)
assert codes
for (code, msg) in codes.items():
assert (type(code) is str), 'bad code type'
assert (type(msg) is str), 'bad message type'
assert code[0].isalpha(), 'code must start from letter'
assert code[0].isupper(), 'code must be uppercase' |
def test_gso_coherence_gram_matrix():
EPSILON = 0.0001
for (m, n) in dimensions:
for int_type in int_types:
if ((m > 20) and (int_type == 'long')):
continue
A = make_integer_matrix(m, n, int_type=int_type).transpose()
G = tools.compute_gram(A)
for float_type in float_types:
M_A = GSO.Mat(copy(A), float_type=float_type, gram=False, flags=GSO.INT_GRAM)
M_A.update_gso()
M_G = GSO.Mat(copy(G), float_type=float_type, gram=True, flags=GSO.INT_GRAM)
M_G.update_gso()
for i in range(m):
for j in range(i):
assert (M_A.get_int_gram(i, j) == G[(i, j)])
for i in range(m):
(M_A.get_r(i, i) == pytest.approx(M_G.get_r(i, j), rel=EPSILON))
for j in range(i):
assert (M_A.get_r(i, j) == pytest.approx(M_G.get_r(i, j), rel=EPSILON))
assert (M_A.get_mu(i, j) == pytest.approx(M_G.get_mu(i, j), rel=EPSILON)) |
.django_db
def test_RkAdjustRanking(Q, auth_header):
import system.models
system.models.Setting(key='ranking-season', value='1').save()
from player.tests import PlayerFactory
from game.tests import GameFactory
PlayerFactory.create()
PlayerFactory.create()
g = GameFactory.create()
return
game = {'gameId': g.id, 'name': 'foo!', 'type': 'THBattle2v2', 'flags': {}, 'players': [1, 2], 'winners': [1], 'deserters': [], 'startedAt': '2020-12-02T15:43:05Z', 'duration': 333}
rst = Q('\n mutation TestRkAdjustRanking($game: GameInput!) {\n RkAdjustRanking(game: $game) {\n scoreBefore\n }\n }\n ', variables={'game': game}, headers=auth_header)
assert ('errors' not in rst)
assert (rst['data']['RkAdjustRanking'][0]['scoreBefore'] > 0)
rst = Q('\n mutation TestRkAdjustRanking($game: GameInput!) {\n RkAdjustRanking(game: $game) {\n scoreBefore\n }\n }\n ', variables={'game': game}, headers=auth_header)
assert ('errors' not in rst)
assert (rst['data']['RkAdjustRanking'][0]['scoreBefore'] > 0) |
def visualize_tokens(doc: spacy.tokens.Doc, *, attrs: List[str]=TOKEN_ATTRS, title: Optional[str]='Token attributes', key: Optional[str]=None) -> None:
if title:
st.header(title)
exp = st.expander('Select token attributes')
selected = exp.multiselect('Token attributes', options=attrs, default=list(attrs), key=f'{key}_tokens_attr_select')
data = [[str(getattr(token, attr)) for attr in selected] for token in doc]
df = pd.DataFrame(data, columns=selected)
st.dataframe(df) |
def select_jobs(jobs: ToJobsType, criteria: Optional[Union[(str, Sequence[str])]]=None) -> Iterator[_job.Job]:
if isinstance(jobs, Jobs):
jobs = list(jobs.iter_all())
if (not criteria):
(yield from jobs)
return
if isinstance(criteria, str):
criteria = [criteria]
else:
criteria = list(criteria)
if (len(criteria) > 1):
raise NotImplementedError(criteria)
selection = _utils.get_slice(criteria[0])
if (not isinstance(jobs, (list, tuple))):
jobs = list(jobs)
(yield from jobs[selection]) |
class VirtualRoot(InputDefault):
def is_virtual(self) -> bool:
return True
def is_self(self) -> bool:
return False
def is_optional(self) -> bool:
raise NotImplementedError()
def get_group_path(self) -> str:
raise NotImplementedError()
def get_config_path(self) -> str:
return '<root>'
def get_final_package(self, default_to_package_header: bool=True) -> str:
raise NotImplementedError()
def _relative_group_path(self) -> str:
raise NotImplementedError()
def get_name(self) -> str:
raise NotImplementedError()
def is_missing(self) -> bool:
return False
def _get_attributes(self) -> List[str]:
raise NotImplementedError()
def _get_flags(self) -> List[str]:
raise NotImplementedError()
def __repr__(self) -> str:
return 'VirtualRoot()'
def resolve_interpolation(self, known_choices: DictConfig) -> None:
raise NotImplementedError()
def is_override(self) -> bool:
return False
def is_external_append(self) -> bool:
return False |
class Navs(models.Model):
nid = models.AutoField(primary_key=True)
title = models.CharField(max_length=32, verbose_name='')
abstract = models.CharField(max_length=128, verbose_name='', null=True)
href = models.URLField(verbose_name='')
icon_href = models.URLField(verbose_name='', help_text='', null=True, blank=True)
create_date = models.DateTimeField(verbose_name='', auto_now=True)
collects_count = models.IntegerField(verbose_name='', default=0)
digg_count = models.IntegerField(verbose_name='', default=0)
status_choice = ((0, ''), (1, ''), (2, ''))
status = models.IntegerField(verbose_name='', choices=status_choice, default=0)
def color_state(self):
if (self.status == 0):
assign_state_name = ''
color_code = '#ec921e'
elif (self.status == 1):
color_code = 'green'
assign_state_name = ''
else:
color_code = 'red'
assign_state_name = ''
return format_html('<span style="color:{};">{}</span>', color_code, assign_state_name)
color_state.short_description = ''
tag = models.ManyToManyField(to='NavTags', verbose_name='')
def __str__(self):
return self.title
class Meta():
verbose_name_plural = '' |
()
('--config', default='configs/exp_cn_hubert_soft_finetune.py')
('--checkpoint', default='logs/DiffSVC/oonzyobz/checkpoints/epoch=1249-step=5000-valid_loss=0.31.ckpt')
def main(config: str, checkpoint: str):
Path('exported').mkdir(exist_ok=True)
device = 'cpu'
config = Config.fromfile(config)
model = load_checkpoint(config, checkpoint, device, model_cls=DiffSingerLightning)
model = model.model
logger.info('Model loaded.')
if (config.model.type == 'DiffSinger'):
logger.info('DiffSingeer model does not have feature extractor. Skip exporting.')
else:
export_feature_extractor(config, device)
export_feature_embedding(model, device)
export_diffusion(config, model, device) |
def data_over_ftp(server, fname):
package_path = str(((Path(__file__).parent / 'data') / fname))
server_path = os.path.join(server.anon_root, fname)
try:
shutil.copyfile(package_path, server_path)
url = f'ftp://localhost/{fname}'
(yield url)
finally:
if os.path.exists(server_path):
os.remove(server_path) |
def get_line_context(line: str) -> (tuple[(str, None)] | tuple[(str, str)]):
(last_level, sections) = get_paren_level(line)
lev1_end = sections[(- 1)].end
test_match = read_var_def(line)
if (test_match is not None):
if (test_match[0] == 'var'):
if ((test_match[1].var_names is None) and (lev1_end == len(line))):
return ('var_key', None)
if ((test_match[1].var_type == 'PROCEDURE') and (line.find('=>') > 0)):
return ('pro_link', None)
return ('var_only', None)
test_match = read_use_stmt(line)
if (test_match is not None):
if (len(test_match[1].only_list) > 0):
return ('mod_mems', test_match[1].mod_name)
else:
return ('mod_only', None)
if FRegex.PRO_LINK.match(line):
return ('pro_link', None)
if (FRegex.SCOPE_DEF.match(line) or FRegex.END.match(line)):
return ('skip', None)
if FRegex.IMPORT.match(line):
return ('import', None)
if FRegex.VIS.match(line):
return ('vis', None)
type_def = False
if FRegex.TYPE_DEF.match(line):
type_def = True
if ((lev1_end == len(line)) and FRegex.CALL.match(last_level)):
return ('call', None)
if ((len(sections) == 1) and (sections[0].start >= 1)):
(test_str, _) = get_paren_level(line[:(sections[0].start - 1)])
if (FRegex.TYPE_STMNT.match(test_str) or (type_def and FRegex.EXTENDS.search(test_str))):
return ('type_only', None)
if FRegex.PROCEDURE_STMNT.match(test_str):
return ('int_only', None)
if FRegex.INT_STMNT.match(line):
return ('first', None)
if type_def:
return ('skip', None)
else:
return ('default', None) |
def try_update(latest_version: str):
success = update(latest_version, 'py')
if success:
return
success = update(latest_version, 'python3')
if success:
return
success = update(latest_version, 'python')
if success:
return
helper.colored_text('Update failed\nYou may need to manually update with py -m pip install -U battle-cats-save-editor', base=helper.RED) |
class PlanetStats(Base):
__tablename__ = 'planetstats'
planet_stats_id = Column(Integer, primary_key=True)
countrydata_id = Column(ForeignKey(CountryData.country_data_id), index=True)
planet_id = Column(ForeignKey(Planet.planet_id))
pop_count = Column(Integer)
happiness = Column(Float)
power = Column(Float)
crime = Column(Float)
migration = Column(Float)
free_amenities = Column(Float)
free_housing = Column(Float)
stability = Column(Float)
planet = relationship(Planet)
country_data = relationship(CountryData, back_populates='pop_stats_planets') |
class OptionPlotoptionsVennSonificationDefaultinstrumentoptionsMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
class TraceParent(object):
__slots__ = ('version', 'trace_id', 'span_id', 'trace_options', 'tracestate', 'tracestate_dict', 'is_legacy')
def __init__(self, version: int, trace_id: str, span_id: str, trace_options: 'TracingOptions', tracestate: Optional[str]=None, is_legacy: bool=False) -> None:
self.version: int = version
self.trace_id: str = trace_id
self.span_id: str = span_id
self.trace_options: TracingOptions = trace_options
self.is_legacy: bool = is_legacy
self.tracestate: Optional[str] = tracestate
self.tracestate_dict = self._parse_tracestate(tracestate)
def copy_from(self, version: int=None, trace_id: str=None, span_id: str=None, trace_options: 'TracingOptions'=None, tracestate: str=None):
return TraceParent((version or self.version), (trace_id or self.trace_id), (span_id or self.span_id), (trace_options or self.trace_options), (tracestate or self.tracestate))
def to_string(self) -> str:
return '{:02x}-{}-{}-{:02x}'.format(self.version, self.trace_id, self.span_id, self.trace_options.asByte)
def to_ascii(self) -> bytes:
return self.to_string().encode('ascii')
def to_binary(self) -> bytes:
return b''.join([self.version.to_bytes(1, byteorder='big'), 0 .to_bytes(1, byteorder='big'), bytes.fromhex(self.trace_id), 1 .to_bytes(1, byteorder='big'), bytes.fromhex(self.span_id), 2 .to_bytes(1, byteorder='big'), self.trace_options.asByte.to_bytes(1, byteorder='big')])
def new(cls, transaction_id: str, is_sampled: bool) -> 'TraceParent':
return cls(version=constants.TRACE_CONTEXT_VERSION, trace_id=('%032x' % random.getrandbits(128)), span_id=transaction_id, trace_options=TracingOptions(recorded=is_sampled))
def from_string(cls, traceparent_string: str, tracestate_string: Optional[str]=None, is_legacy: bool=False) -> Optional['TraceParent']:
try:
parts = traceparent_string.split('-')
(version, trace_id, span_id, trace_flags) = parts[:4]
except ValueError:
logger.debug('Invalid traceparent header format, value %s', traceparent_string)
return
try:
version = int(version, 16)
if (version == 255):
raise ValueError()
except ValueError:
logger.debug('Invalid version field, value %s', version)
return
try:
tracing_options = TracingOptions()
tracing_options.asByte = int(trace_flags, 16)
except ValueError:
logger.debug('Invalid trace-options field, value %s', trace_flags)
return
return TraceParent(version, trace_id, span_id, tracing_options, tracestate_string, is_legacy)
def from_headers(cls, headers: dict, header_name: str=constants.TRACEPARENT_HEADER_NAME, legacy_header_name: str=constants.TRACEPARENT_LEGACY_HEADER_NAME, tracestate_header_name: str=constants.TRACESTATE_HEADER_NAME) -> Optional['TraceParent']:
tracestate = cls.merge_duplicate_headers(headers, tracestate_header_name)
if (header_name in headers):
return TraceParent.from_string(headers[header_name], tracestate, is_legacy=False)
elif (legacy_header_name in headers):
return TraceParent.from_string(headers[legacy_header_name], tracestate, is_legacy=False)
else:
return None
def from_binary(cls, data: bytes) -> Optional['TraceParent']:
if (len(data) != 29):
logger.debug('Invalid binary traceparent format, length is %d, should be 29, value %r', len(data), data)
return
if ((int.from_bytes(data[1:2], byteorder='big') != 0) or (int.from_bytes(data[18:19], byteorder='big') != 1) or (int.from_bytes(data[27:28], byteorder='big') != 2)):
logger.debug('Invalid binary traceparent format, field identifiers not correct, value %r', data)
return
version = int.from_bytes(data[0:1], byteorder='big')
trace_id = str(binascii.hexlify(data[2:18]), encoding='ascii')
span_id = str(binascii.hexlify(data[19:27]), encoding='ascii')
try:
tracing_options = TracingOptions()
tracing_options.asByte = int.from_bytes(data[28:29], byteorder='big')
except ValueError:
logger.debug('Invalid trace-options field, value %r', data[28:29])
return
return TraceParent(version, trace_id, span_id, tracing_options)
def merge_duplicate_headers(cls, headers, key):
if isinstance(headers, list):
return ','.join([item[1] for item in headers if (item[0] == key)])
elif ((not hasattr(headers, 'get')) and hasattr(headers, '__iter__')):
return ','.join([item[1] for item in headers if (item[0] == key)])
return headers.get(key)
def _parse_tracestate(self, tracestate) -> Dict[(str, str)]:
if (not tracestate):
return {}
if ('es=' not in tracestate):
return {}
ret = {}
try:
state = re.search('(?:,|^)\\s*es=([^,]*?)\\s*(?:,|$)', tracestate).group(1).split(';')
except IndexError:
return {}
for keyval in state:
if (not keyval):
continue
(key, _, val) = keyval.partition(':')
ret[key] = val
return ret
def _set_tracestate(self):
elastic_value = ';'.join(['{}:{}'.format(k, v) for (k, v) in self.tracestate_dict.items()])
if (len(elastic_value) > 256):
logger.debug('Modifications to TraceState would violate length limits, ignoring.')
raise TraceStateFormatException()
elastic_state = 'es={}'.format(elastic_value)
if (not self.tracestate):
return elastic_state
else:
otherstate = re.sub('(?:,|^)\\s*es=([^,]*?)\\s*(?:,|$)', '', self.tracestate)
otherstate = otherstate.lstrip(',')
otherstate = re.sub(',,', ',', otherstate)
if otherstate:
return '{},{}'.format(otherstate.rstrip(','), elastic_state)
else:
return elastic_state
def add_tracestate(self, key, val) -> None:
key = str(key)
val = str(val)
for bad in (':', ';', ',', '='):
if ((bad in key) or (bad in val)):
logger.debug("New tracestate key/val pair contains invalid character '{}', ignoring.".format(bad))
return
for c in itertools.chain(key, val):
if ((ord(c) < 32) or (ord(c) > 126)):
logger.debug("Modifications to TraceState would introduce invalid character '{}', ignoring.".format(c))
return
oldval = self.tracestate_dict.pop(key, None)
self.tracestate_dict[key] = val
try:
self.tracestate = self._set_tracestate()
except TraceStateFormatException:
if (oldval is not None):
self.tracestate_dict[key] = oldval
else:
self.tracestate_dict.pop(key) |
def test_finding_functions_with_literals():
program = cleandoc("\n mdl = ref('model_a')\n ref('model_b')\n ref('package', 'model_c')\n src = source('db', 'table_a')\n source('db', 'table_b')\n ")
module = ast.parse(program)
results = generate_dbt_dependencies(module)
assert ("{{ ref('model_a') }}" in results)
assert ("{{ ref('model_b') }}" in results)
assert ("{{ ref('package', 'model_c') }}" in results)
assert ("{{ source('db', 'table_a') }}" in results)
assert ("{{ source('db', 'table_b') }}" in results) |
def main(page: ft.Page):
page.title = 'Autocomplete search names'
def textbox_changed(string):
str_lower = string.control.value.lower()
list_view.controls = ([list_items.get(n) for n in NAMES if (str_lower in n.lower())] if str_lower else [])
page.update()
list_items = {name: ft.ListTile(title=ft.Text(name), leading=ft.Icon(ft.icons.ACCESSIBILITY), on_click=printer) for name in NAMES}
text_field = ft.TextField(label='Search name:', on_change=textbox_changed)
list_view = ft.ListView(expand=1, spacing=10, padding=20)
page.add(text_field, list_view) |
(name='api.node.image.tasks.node_img_sources_sync', base=InternalTask)
_lock(timeout=3600, wait_for_release=True)
def node_img_sources_sync(task_id, sender, **kwargs):
new_img_sources = ImageVm().sources
for node in Node.all():
if (not node.is_online()):
logger.warn('Excluding node %s from updating imgadm sources because it is not in online state', node)
continue
run_node_img_sources_sync(node, new_img_sources=new_img_sources) |
def summary_by_id(event_id):
sales_summary = Order.query.filter_by(event_id=event_id).with_entities(Order.status, func.sum(Order.amount)).group_by(Order.status).all()
tickets_summary = TicketHolder.query.join(Order).filter((Order.event_id == event_id)).with_entities(Order.status, func.count()).group_by(Order.status).all()
status_codes = ['placed', 'completed', 'pending']
return {s: status_summary(sales_summary, tickets_summary, s) for s in status_codes} |
class OptionPlotoptionsDependencywheelSonificationDefaultinstrumentoptionsMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class FaucetRouterConfigReloadTest(FaucetConfigReloadTestBase):
def test_router_config_reload(self):
conf = self._get_faucet_conf()
conf['routers'] = {'router-1': {'vlans': [100, 200]}}
self.reload_conf(conf, self.faucet_config_path, restart=True, cold_start=True, change_expected=True) |
def _run_timeit(func, number):
gc.collect()
manager = getattr(func, '_benchmark_manager', None)
try:
if (manager is not None):
with manager(number) as ctx:
return timeit.Timer((lambda : func(ctx))).timeit(number=number)
else:
return timeit.Timer(func).timeit(number=number)
finally:
eventlet.sleep(0.01) |
class FmtNumber():
def __init__(self, page: primitives.PageModel, selector: str, value):
(self.page, self._val) = (page, value)
self.selector = selector
def toFixed(self, value: Optional[int]=None):
if (value is None):
return JsObjects.JsObjects.get(('%s = %s.toFixed()' % (self.selector, self._val)))
return JsObjects.JsObjects.get(('%s = %s.toFixed(%s)' % (self.selector, self._val, value)))
def toPrecision(self, value: Optional[int]=None):
if (value is None):
return JsObjects.JsObjects.get(('%s = %s.toPrecision()' % (self.selector, self._val)))
return JsObjects.JsObjects.get(('%s = %s.toPrecision(%s)' % (self.selector, self._val, value)))
def toExponential(self):
return JsObjects.JsObjects.get(('%s = %s.toExponential()' % (self.selector, self._val))) |
def _replace_contraction_with_cast(instruction: Instruction):
for expr in _find_cast_subexpressions(instruction):
if expr.contraction:
expr.contraction = False
if (_is_cast(expr.operand) and (expr.operand.type == expr.type)):
instruction.substitute(expr, expr.operand) |
('config_name,overrides,expected', [param('legacy_override_hydra2', [], DefaultsTreeNode(node=VirtualRoot(), children=[DefaultsTreeNode(node=ConfigDefault(path='hydra/config'), children=[GroupDefault(group='help', value='custom1'), GroupDefault(group='output', value='disabled'), ConfigDefault(path='_self_')]), DefaultsTreeNode(node=ConfigDefault(path='legacy_override_hydra2'), children=[ConfigDefault(path='_self_')])]), id='legacy_override_hydra+external')])
def test_legacy_hydra_overrides_from_primary_config_2(config_name: str, overrides: List[str], expected: DefaultsTreeNode, recwarn: Any) -> None:
_test_defaults_tree_impl(config_name=config_name, input_overrides=overrides, expected=expected, prepend_hydra=True)
assert (len(recwarn) == 2)
assert ('Invalid overriding of hydra/help:' in recwarn.list[0].message.args[0])
assert ('Invalid overriding of hydra/output:' in recwarn.list[1].message.args[0]) |
def upgrade():
op.create_table('classified_traces', sa.Column('classified_at', sa.TIMESTAMP, server_default=sa.func.now()), sa.Column('transaction_hash', sa.String(66), nullable=False), sa.Column('block_number', sa.Numeric, nullable=False), sa.Column('classification', sa.String(256), nullable=False), sa.Column('trace_type', sa.String(256), nullable=False), sa.Column('trace_address', sa.String(256), nullable=False), sa.Column('protocol', sa.String(256), nullable=True), sa.Column('abi_name', sa.String(1024), nullable=True), sa.Column('function_name', sa.String(2048), nullable=True), sa.Column('function_signature', sa.String(2048), nullable=True), sa.Column('inputs', sa.JSON, nullable=True), sa.Column('from_address', sa.String(256), nullable=True), sa.Column('to_address', sa.String(256), nullable=True), sa.Column('gas', sa.Numeric, nullable=True), sa.Column('value', sa.Numeric, nullable=True), sa.Column('gas_used', sa.Numeric, nullable=True), sa.Column('error', sa.String(256), nullable=True), sa.PrimaryKeyConstraint('transaction_hash', 'trace_address')) |
class OFPGroupStats(StringifyMixin):
def __init__(self, length=None, group_id=None, ref_count=None, packet_count=None, byte_count=None, duration_sec=None, duration_nsec=None, bucket_stats=None):
super(OFPGroupStats, self).__init__()
self.length = length
self.group_id = group_id
self.ref_count = ref_count
self.packet_count = packet_count
self.byte_count = byte_count
self.duration_sec = duration_sec
self.duration_nsec = duration_nsec
self.bucket_stats = bucket_stats
def parser(cls, buf, offset):
group = struct.unpack_from(ofproto.OFP_GROUP_STATS_PACK_STR, buf, offset)
group_stats = cls(*group)
group_stats.bucket_stats = []
if (group_stats.length is not None):
total_len = (group_stats.length + offset)
offset += ofproto.OFP_GROUP_STATS_SIZE
while (total_len > offset):
b = OFPBucketCounter.parser(buf, offset)
group_stats.bucket_stats.append(b)
offset += ofproto.OFP_BUCKET_COUNTER_SIZE
return group_stats |
def main():
parser = OptionParser()
parser.add_option('-v', '--verbosity', action='store', dest='verbosity', default='1', type='choice', choices=['0', '1', '2', '3'], help='Verbosity level; 0=minimal output, 1=normal output, 2=all output')
parser.add_option('--noinput', action='store_false', dest='interactive', default=True, help='Tells Django to NOT prompt the user for input of any kind.')
parser.add_option('--failfast', action='store_true', dest='failfast', default=False, help='Tells Django to stop running the test suite after first failed test.')
parser.add_option('-d', '--database', action='store', dest='database', default='sqlite', type='choice', choices=list(AVAILABLE_DATABASES.keys()), help='Select database backend for tests. Available choices: {}'.format(', '.join(AVAILABLE_DATABASES.keys())))
(options, args) = parser.parse_args()
from django.conf import settings
if options.database:
database_setting = AVAILABLE_DATABASES[options.database]
database_default_host = '127.0.0.1'
if (options.database == 'sqlite'):
database_default_name = os.path.join(os.path.dirname(__file__), 'db.sqlite3')
else:
database_default_name = 'test_project'
database_setting.update(dict(NAME=os.environ.get('DB_NAME', database_default_name), HOST=os.environ.get('DB_HOST', database_default_host), USER=os.environ.get('DB_USER', ''), PASSWORD=os.environ.get('DB_PASSWORD', '')))
else:
database_setting = dict(ENGINE=os.environ.get('DB_ENGINE', 'django.db.backends.sqlite3'), HOST=os.environ.get('DB_HOST', database_default_host), NAME=os.environ.get('DB_NAME', os.path.join(os.path.dirname(__file__), 'db.sqlite3')), USER=os.environ.get('DB_USER', ''), PASSWORD=os.environ.get('DB_PASSWORD', ''))
settings.configure(DEBUG=False, DATABASES={'default': database_setting}, ROOT_URLCONF='test_watson.urls', INSTALLED_APPS=('django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', 'django.contrib.admin', 'watson', 'test_watson'), MIDDLEWARE_CLASSES=('django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware'), MIDDLEWARE=('django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware'), USE_TZ=True, STATIC_URL='/static/', TEST_RUNNER='django.test.runner.DiscoverRunner', TEMPLATES=[{'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': ['templates'], 'OPTIONS': {'context_processors': ['django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages']}, 'APP_DIRS': True}], SECRET_KEY='fake-key')
import django
try:
django.setup()
except AttributeError:
pass
from django.test.utils import get_runner
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=int(options.verbosity), interactive=options.interactive, failfast=options.failfast)
failures = test_runner.run_tests(['test_watson'])
if failures:
sys.exit(failures) |
('resourceclosing_container')
def test_closing_resource_bypass_marker_injection():
resourceclosing.Service.reset_counter()
result_1 = resourceclosing.test_function(service=Closing[Provide['service']])
assert isinstance(result_1, resourceclosing.Service)
assert (result_1.init_counter == 1)
assert (result_1.shutdown_counter == 1)
result_2 = resourceclosing.test_function(service=Closing[Provide['service']])
assert isinstance(result_2, resourceclosing.Service)
assert (result_2.init_counter == 2)
assert (result_2.shutdown_counter == 2)
assert (result_1 is not result_2) |
class InfosReceiptParserDataClass(BaseModel):
invoice_number: Optional[StrictStr] = None
invoice_total: Optional[float] = None
invoice_subtotal: Optional[float] = None
barcodes: Sequence[BarCode] = Field(default_factory=list)
category: Optional[StrictStr] = None
date: Optional[StrictStr] = None
due_date: Optional[StrictStr] = None
time: Optional[StrictStr] = None
customer_information: CustomerInformation = CustomerInformation()
merchant_information: MerchantInformation = MerchantInformation()
payment_information: PaymentInformation = PaymentInformation()
locale: Locale = Locale()
taxes: Sequence[Taxes] = Field(default_factory=list)
receipt_infos: Dict[(str, object)] = Field(default_factory=dict)
item_lines: Sequence[ItemLines] = Field(default_factory=list) |
def to_file(self, pfile, fformat='roff', name=None, append=False, dtype=None, fmt=None):
logger.info('Export property to file %s as %s', pfile, fformat)
fobj = xtgeo._XTGeoFile(pfile, mode='rb')
if (not fobj.memstream):
fobj.check_folder(raiseerror=OSError)
if (name is None):
name = self.name
if ('roff' in fformat):
binary = True
if ('asc' in fformat):
binary = False
if append:
logger.warning('Append is not implemented for roff format, defaulting to write.')
export_roff(self, fobj.name, name, binary=binary)
elif (fformat == 'grdecl'):
export_grdecl(self, fobj.name, name, append=append, binary=False, dtype=dtype, fmt=fmt)
elif (fformat == 'bgrdecl'):
export_grdecl(self, fobj.name, name, append=append, binary=True, dtype=dtype)
elif (fformat == 'xtgcpprop'):
export_xtgcpprop(self, fobj.name)
else:
raise ValueError(f'Cannot export, invalid fformat: {fformat}') |
class TransactionReceipt():
block_number = None
contract_address: Optional[str] = None
contract_name = None
fn_name = None
gas_used = None
logs: Optional[List] = None
nonce = None
sender = None
txid: str
txindex = None
type: int
def __init__(self, txid: Union[(str, bytes)], sender: Any=None, silent: bool=True, required_confs: int=1, is_blocking: bool=True, name: str='', revert_data: Optional[Tuple]=None) -> None:
self._silent = silent
if isinstance(txid, bytes):
txid = HexBytes(txid).hex()
self._confirmed = threading.Event()
self._call_cost = 0
self._trace_exc: Optional[Exception] = None
self._trace_origin: Optional[str] = None
self._raw_trace: Optional[List] = None
self._trace: Optional[List] = None
self._events: Optional[EventDict] = None
self._return_value: Any = None
self._revert_msg: Optional[str] = None
self._dev_revert_msg: Optional[str] = None
self._modified_state: Optional[bool] = None
self._new_contracts: Optional[List] = None
self._internal_transfers: Optional[List[Dict]] = None
self._subcalls: Optional[List[Dict]] = None
self.sender = sender
self.status = Status((- 1))
self.txid = str(txid)
self.contract_name = None
self.fn_name = name
if (name and ('.' in name)):
(self.contract_name, self.fn_name) = name.split('.', maxsplit=1)
(self._revert_msg, self._revert_pc, revert_type) = (revert_data or (None, None, None))
if ((self._revert_msg is None) and (revert_type not in ('revert', 'invalid_opcode'))):
self._revert_msg = revert_type
if (self._revert_pc is not None):
self._dev_revert_msg = (build._get_dev_revert(self._revert_pc) or None)
tx: Dict = web3.eth.get_transaction(HexBytes(self.txid))
self._set_from_tx(tx)
if (not self._silent):
output_str = ''
if (self.type == 2):
max_gas = (tx['maxFeePerGas'] / (10 ** 9))
priority_gas = (tx['maxPriorityFeePerGas'] / (10 ** 9))
output_str = f" Max fee: {color('bright blue')}{max_gas}{color} gwei Priority fee: {color('bright blue')}{priority_gas}{color} gwei"
elif (self.gas_price is not None):
gas_price = (self.gas_price / (10 ** 9))
output_str = f" Gas price: {color('bright blue')}{gas_price}{color} gwei"
print(f"{output_str} Gas limit: {color('bright blue')}{self.gas_limit}{color} Nonce: {color('bright blue')}{self.nonce}{color}")
confirm_thread = threading.Thread(target=self._await_confirmation, args=(tx['blockNumber'], required_confs), daemon=True)
confirm_thread.start()
if (is_blocking and ((required_confs > 0) or tx['blockNumber'])):
confirm_thread.join()
def __repr__(self) -> str:
color_str = {(- 2): 'dark white', (- 1): 'bright yellow', 0: 'bright red', 1: ''}[self.status]
return f"<Transaction '{color(color_str)}{self.txid}{color}'>"
def __hash__(self) -> int:
return hash(self.txid)
_property
def events(self) -> Optional[EventDict]:
if (self._events is None):
if self.status:
addrs = ({log.address for log in self.logs} if self.logs else set())
contracts = {addr: state._find_contract(addr) for addr in addrs}
self._events = _decode_logs(self.logs, contracts=contracts)
else:
self._get_trace()
initial_address = str((self.receiver or self.contract_address))
self._events = _decode_trace(self._raw_trace, initial_address)
return self._events
_property
def internal_transfers(self) -> Optional[List]:
if (not self.status):
return []
if (self._internal_transfers is None):
self._expand_trace()
return self._internal_transfers
_property
def modified_state(self) -> Optional[bool]:
if (not self.status):
self._modified_state = False
elif (self._modified_state is None):
self._get_trace()
return self._modified_state
_property
def new_contracts(self) -> Optional[List]:
if (not self.status):
return []
if (self._new_contracts is None):
self._expand_trace()
return self._new_contracts
_property
def return_value(self) -> Optional[str]:
if (not self.status):
return None
if (self._return_value is None):
self._get_trace()
return self._return_value
_property
def revert_msg(self) -> Optional[str]:
if self.status:
return None
if (self._revert_msg is None):
self._get_trace()
elif (self.contract_address and (self._revert_msg == 'out of gas')):
self._get_trace()
return self._revert_msg
_property
def dev_revert_msg(self) -> Optional[str]:
if self.status:
return None
if (self._dev_revert_msg is None):
self._get_trace()
return (self._dev_revert_msg or None)
_property
def subcalls(self) -> Optional[List]:
if (self._subcalls is None):
self._expand_trace()
subcalls = filter((lambda s: (not _is_call_to_precompile(s))), self._subcalls)
return list(subcalls)
_property
def trace(self) -> Optional[List]:
if (self._trace is None):
self._expand_trace()
return self._trace
def timestamp(self) -> Optional[int]:
if (self.status < 0):
return None
return web3.eth.get_block(self.block_number)['timestamp']
def confirmations(self) -> int:
if (not self.block_number):
return 0
return ((web3.eth.block_number - self.block_number) + 1)
def replace(self, increment: Optional[float]=None, gas_price: Optional[Wei]=None, silent: Optional[bool]=None) -> 'TransactionReceipt':
if ((increment is None) and (gas_price is None)):
raise ValueError('Must give one of `increment` or `gas_price`')
if ((gas_price is not None) and (increment is not None)):
raise ValueError('Cannot set `increment` and `gas_price` together')
if (self.status > (- 1)):
raise ValueError('Transaction has already confirmed')
if (self.gas_price is not None):
if (increment is not None):
gas_price = Wei((self.gas_price * increment))
else:
gas_price = Wei(gas_price)
(max_fee, priority_fee) = (None, None)
if ((self.max_fee is not None) and (self.priority_fee is not None)):
max_fee = gas_price
priority_fee = Wei((self.priority_fee * 1.1))
gas_price = None
if (silent is None):
silent = self._silent
sender = self.sender
if isinstance(sender, EthAddress):
from brownie import accounts
if (sender in accounts):
sender = accounts.at(sender)
else:
raise ValueError('Sender address not in `accounts`')
return sender.transfer(self.receiver, self.value, gas_limit=self.gas_limit, gas_price=gas_price, max_fee=max_fee, priority_fee=priority_fee, data=self.input, nonce=self.nonce, required_confs=0, silent=silent)
def wait(self, required_confs: int) -> None:
if (required_confs < 1):
return
if (self.confirmations > required_confs):
print(f'This transaction already has {self.confirmations} confirmations.')
return
while True:
try:
tx: Dict = web3.eth.get_transaction(self.txid)
break
except TransactionNotFound:
if (self.nonce is not None):
sender_nonce = web3.eth.get_transaction_count(str(self.sender))
if (sender_nonce > self.nonce):
self.status = Status((- 2))
self._confirmed.set()
return
time.sleep(1)
self._await_confirmation(tx['blockNumber'], required_confs)
def _raise_if_reverted(self, exc: Any) -> None:
if (self.status or (CONFIG.mode == 'console')):
return
if (not web3.supports_traces):
raise (exc or ValueError('Execution reverted'))
if (self._dev_revert_msg is None):
self._expand_trace()
if self.contract_address:
source = ''
elif CONFIG.argv['revert']:
source = self._traceback_string()
else:
source = self._error_string(1)
contract = state._find_contract(self.receiver)
if contract:
marker = ('//' if (contract._build['language'] == 'Solidity') else '#')
line = self._traceback_string().split('\n')[(- 1)]
if ((marker + ' dev: ') in line):
self._dev_revert_msg = line[(line.index(marker) + len(marker)):(- 5)].strip()
raise exc._with_attr(source=source, revert_msg=self._revert_msg, dev_revert_msg=self._dev_revert_msg)
def _await_confirmation(self, block_number: int=None, required_confs: int=1) -> None:
block_number = (block_number or self.block_number)
nonce_time = 0.0
sender_nonce = 0
while True:
if ((time.time() - nonce_time) > 15):
sender_nonce = web3.eth.get_transaction_count(str(self.sender))
nonce_time = time.time()
try:
receipt = web3.eth.get_transaction_receipt(HexBytes(self.txid))
except TransactionNotFound:
receipt = None
if ((receipt is not None) and (receipt['blockHash'] is not None)):
break
if (sender_nonce > self.nonce):
self.status = Status((- 2))
self._confirmed.set()
return
if ((not block_number) and (not self._silent) and (required_confs > 0)):
if (required_confs == 1):
sys.stdout.write(f''' Waiting for confirmation... {_marker[0]}
''')
else:
sys.stdout.write(f''' Required confirmations: {color('bright yellow')}0/{required_confs}{color} {_marker[0]}
''')
_marker.rotate(1)
sys.stdout.flush()
time.sleep(1)
for dropped_tx in state.TxHistory().filter(sender=self.sender, nonce=self.nonce, key=(lambda k: (k != self))):
dropped_tx._silent = True
self.block_number = receipt['blockNumber']
remaining_confs = required_confs
while ((remaining_confs > 0) and (required_confs > 1)):
try:
receipt = web3.eth.get_transaction_receipt(self.txid)
self.block_number = receipt['blockNumber']
except TransactionNotFound:
if (not self._silent):
sys.stdout.write(f'''
{color('red')}Transaction was lost...{color}{(' ' * 8)}''')
sys.stdout.flush()
tx = web3.eth.get_transaction(self.txid)
self.block_number = None
return self._await_confirmation(tx['blockNumber'], required_confs)
if ((required_confs - self.confirmations) != remaining_confs):
remaining_confs = (required_confs - self.confirmations)
if (not self._silent):
sys.stdout.write(f'''
Required confirmations: {color('bright yellow')}{self.confirmations}/{required_confs}{color} ''')
if (remaining_confs == 0):
sys.stdout.write('\n')
sys.stdout.flush()
if (remaining_confs > 0):
time.sleep(1)
self._set_from_receipt(receipt)
if (CONFIG.argv['coverage'] and (not coverage._check_cached(self.coverage_hash)) and self.trace):
self._expand_trace()
if ((not self._silent) and (required_confs > 0)):
print(self._confirm_output())
self._confirmed.set()
for dropped_tx in state.TxHistory().filter(sender=self.sender, nonce=self.nonce, key=(lambda k: (k != self))):
dropped_tx.status = Status((- 2))
dropped_tx._confirmed.set()
def _set_from_tx(self, tx: Dict) -> None:
if (not self.sender):
self.sender = EthAddress(tx['from'])
self.receiver = (EthAddress(tx['to']) if tx.get('to', None) else None)
self.value = Wei(tx['value'])
self.gas_price = tx.get('gasPrice')
self.max_fee = tx.get('maxFeePerGas')
self.priority_fee = tx.get('maxPriorityFeePerGas')
self.gas_limit = tx['gas']
self.input = tx['input']
self.nonce = tx['nonce']
self.type = int(HexBytes(tx.get('type', 0)).hex(), 16)
if self.fn_name:
return
try:
contract = state._find_contract(tx.get('to'))
if (contract is not None):
self.contract_name = contract._name
self.fn_name = contract.get_method(tx['input'])
except ContractNotFound:
pass
def _set_from_receipt(self, receipt: Dict) -> None:
self.block_number = receipt['blockNumber']
self.txindex = receipt['transactionIndex']
self.gas_used = receipt['gasUsed']
self.logs = receipt['logs']
self.status = Status(receipt['status'])
if ('effectiveGasPrice' in receipt):
self.gas_price = receipt['effectiveGasPrice']
self.contract_address = receipt['contractAddress']
if (self.contract_address and (not self.contract_name)):
self.contract_name = 'UnknownContract'
base = f'{self.nonce}{self.block_number}{self.sender}{self.receiver}{self.value}{self.input}{int(self.status)}{self.gas_used}{self.txindex}'
self.coverage_hash = sha1(base.encode()).hexdigest()
if self.fn_name:
state.TxHistory()._gas(self._full_name(), receipt['gasUsed'], (self.status == Status(1)))
def _confirm_output(self) -> str:
status = ''
if (not self.status):
revert_msg = (self.revert_msg if web3.supports_traces else None)
status = f"({color('bright red')}{(revert_msg or 'reverted')}{color}) "
result = f'''
{self._full_name()} confirmed {status} Block: {color('bright blue')}{self.block_number}{color} Gas used: {color('bright blue')}{self.gas_used}{color} ({color('bright blue')}{(self.gas_used / self.gas_limit):.2%}{color})'''
if ((self.type == 2) and (self.gas_price is not None)):
result += f" Gas price: {color('bright blue')}{(self.gas_price / (10 ** 9))}{color} gwei"
if (self.status and self.contract_address):
result += f'''
{self.contract_name} deployed at: {color('bright blue')}{self.contract_address}{color}'''
return (result + '\n')
def _get_trace(self) -> None:
if (self._raw_trace is not None):
return
self._raw_trace = []
if ((self.input == '0x') and (self.gas_used == 21000)):
self._modified_state = False
self._trace = []
return
if (not web3.supports_traces):
raise RPCRequestError('Node client does not support `debug_traceTransaction`')
try:
trace = web3.provider.make_request('debug_traceTransaction', (self.txid, {'disableStorage': (CONFIG.mode != 'console'), 'enableMemory': True}))
except (requests.exceptions.Timeout, requests.exceptions.ConnectionError) as e:
msg = f'Encountered a {type(e).__name__} while requesting '
msg += '`debug_traceTransaction`. The local RPC client has likely crashed.'
if CONFIG.argv['coverage']:
msg += ' If the error persists, add the `skip_coverage` marker to this test.'
raise RPCRequestError(msg) from None
if ('error' in trace):
self._modified_state = None
self._trace_exc = RPCRequestError(trace['error']['message'])
raise self._trace_exc
self._raw_trace = trace = trace['result']['structLogs']
if (not trace):
self._modified_state = False
return
fix_stack = False
for step in trace:
if (not step['stack']):
continue
check = step['stack'][0]
if (not isinstance(check, str)):
break
if check.startswith('0x'):
fix_stack = True
break
fix_gas = isinstance(trace[0]['gas'], str)
if (fix_stack or fix_gas):
for step in trace:
if fix_stack:
step['stack'] = [HexBytes(s).hex()[2:].zfill(64) for s in step['stack']]
if fix_gas:
step['gas'] = int(step['gas'], 16)
if isinstance(step['gasCost'], str):
step['gasCost'] = int.from_bytes(HexBytes(step['gasCost']), 'big', signed=True)
if isinstance(step['pc'], str):
step['pc'] = int(step['pc'], 16)
if self.status:
self._confirmed_trace(trace)
else:
self._reverted_trace(trace)
def _confirmed_trace(self, trace: Sequence) -> None:
self._modified_state = next((True for i in trace if (i['op'] == 'SSTORE')), False)
if ((trace[(- 1)]['op'] != 'RETURN') or self.contract_address):
return
contract = state._find_contract(self.receiver)
if contract:
data = _get_memory(trace[(- 1)], (- 1))
fn = contract.get_method_object(self.input)
if (not fn):
warn(f'Unable to find function on {contract} for input {self.input}')
return
self._return_value = fn.decode_output(data)
def _reverted_trace(self, trace: Sequence) -> None:
self._modified_state = False
if self.contract_address:
step = next((i for i in trace if (i['op'] == 'CODECOPY')), None)
if ((step is not None) and (int(step['stack'][(- 3)], 16) > 24577)):
self._revert_msg = 'exceeds EIP-170 size limit'
self._dev_revert_msg = ''
if (self._dev_revert_msg is not None):
return
for step in (i for i in trace[::(- 1)] if (i['op'] in ('REVERT', 'INVALID'))):
if ((step['op'] == 'REVERT') and int(step['stack'][(- 2)], 16)):
data = _get_memory(step, (- 1))
selector = data[:4].hex()
if (selector == '0x4e487b71'):
error_code = int(data[4:].hex(), 16)
if (error_code in SOLIDITY_ERROR_CODES):
self._revert_msg = SOLIDITY_ERROR_CODES[error_code]
else:
self._revert_msg = f'Panic (error code: {error_code})'
elif (selector == '0x08c379a0'):
self._revert_msg = decode(['string'], data[4:])[0]
else:
self._revert_msg = f'typed error: {data.hex()}'
elif self.contract_address:
self._revert_msg = ('invalid opcode' if (step['op'] == 'INVALID') else '')
self._dev_revert_msg = ''
return
dev_revert = (build._get_dev_revert(step['pc']) or None)
if (dev_revert is not None):
self._dev_revert_msg = dev_revert
if (self._revert_msg is None):
self._revert_msg = dev_revert
else:
self._expand_trace()
try:
contract = state._find_contract(step['address'])
pc_map = contract._build['pcMap']
if ('first_revert' in pc_map[step['pc']]):
idx = (trace.index(step) - 4)
if (trace[idx]['pc'] != (step['pc'] - 4)):
step = trace[idx]
if ('optimizer_revert' in pc_map[step['pc']]):
idx = (trace.index(step) - 1)
while (trace[(idx + 1)]['op'] != 'JUMPDEST'):
if (trace[idx]['source'] != step['source']):
idx = trace.index(step)
break
idx -= 1
while (not trace[idx]['source']):
idx -= 1
step['source'] = trace[idx]['source']
step = trace[idx]
if ('dev' in pc_map[step['pc']]):
self._dev_revert_msg = pc_map[step['pc']]['dev']
else:
source = contract._sources.get(step['source']['filename'])
offset = step['source']['offset'][1]
line = source[offset:].split('\n')[0]
marker = ('//' if (contract._build['language'] == 'Solidity') else '#')
revert_str = line[(line.index(marker) + len(marker)):].strip()
if revert_str.startswith('dev:'):
self._dev_revert_msg = revert_str
if (self._revert_msg is None):
self._revert_msg = (self._dev_revert_msg or '')
return
except (KeyError, AttributeError, TypeError, ValueError):
pass
if (self._revert_msg is not None):
if (self._dev_revert_msg is None):
self._dev_revert_msg = ''
return
op = next((i['op'] for i in trace[::(- 1)] if (i['op'] in ('REVERT', 'INVALID'))), None)
self._revert_msg = ('invalid opcode' if (op == 'INVALID') else '')
def _expand_trace(self) -> None:
if (self._raw_trace is None):
self._get_trace()
if (self._trace is not None):
return
self._trace = trace = self._raw_trace
self._new_contracts = []
self._internal_transfers = []
self._subcalls = []
if (self.contract_address or (not trace)):
coverage._add_transaction(self.coverage_hash, {})
return
if (trace[0]['depth'] == 1):
self._trace_origin = 'geth'
self._call_cost = ((self.gas_used - trace[0]['gas']) + trace[(- 1)]['gas'])
for t in trace:
t['depth'] = (t['depth'] - 1)
else:
self._trace_origin = 'ganache'
if (trace[0]['gasCost'] >= 21000):
self._call_cost = trace[0]['gasCost']
for i in range((len(trace) - 1)):
trace[i]['gasCost'] = trace[(i + 1)]['gasCost']
trace[(- 1)]['gasCost'] = 0
else:
self._call_cost = ((self.gas_used - trace[0]['gas']) + trace[(- 1)]['gas'])
last_map = {0: _get_last_map(self.receiver, self.input[:10])}
coverage_eval: Dict = {last_map[0]['name']: {}}
precompile_contract = re.compile('0x0{38}(?:0[1-9]|1[0-8])')
call_opcodes = ('CALL', 'STATICCALL', 'DELEGATECALL')
for i in range(len(trace)):
is_depth_increase = (trace[i]['depth'] > trace[(i - 1)]['depth'])
is_subcall = (trace[(i - 1)]['op'] in call_opcodes)
if (is_depth_increase or is_subcall):
step = trace[(i - 1)]
if (step['op'] in ('CREATE', 'CREATE2')):
out = next((x for x in trace[i:] if (x['depth'] == step['depth'])))
address = out['stack'][(- 1)][(- 40):]
sig = f"<{step['op']}>"
calldata = None
self._new_contracts.append(EthAddress(address))
if int(step['stack'][(- 1)], 16):
self._add_internal_xfer(step['address'], address, step['stack'][(- 1)])
else:
stack_idx = ((- 4) if (step['op'] in ('CALL', 'CALLCODE')) else (- 3))
offset = int(step['stack'][stack_idx], 16)
length = int(step['stack'][(stack_idx - 1)], 16)
calldata = HexBytes(''.join(step['memory']))[offset:(offset + length)]
sig = calldata[:4].hex()
address = step['stack'][(- 2)][(- 40):]
if is_depth_increase:
last_map[trace[i]['depth']] = _get_last_map(address, sig)
coverage_eval.setdefault(last_map[trace[i]['depth']]['name'], {})
self._subcalls.append({'from': step['address'], 'to': EthAddress(address), 'op': step['op']})
if (step['op'] in ('CALL', 'CALLCODE')):
self._subcalls[(- 1)]['value'] = int(step['stack'][(- 3)], 16)
if (is_depth_increase and calldata and last_map[trace[i]['depth']].get('function')):
fn = last_map[trace[i]['depth']]['function']
self._subcalls[(- 1)]['function'] = fn._input_sig
try:
zip_ = zip(fn.abi['inputs'], fn.decode_input(calldata))
inputs = {i[0]['name']: i[1] for i in zip_}
self._subcalls[(- 1)]['inputs'] = inputs
except Exception:
self._subcalls[(- 1)]['calldata'] = calldata.hex()
elif (calldata or is_subcall):
self._subcalls[(- 1)]['calldata'] = calldata.hex()
if (precompile_contract.search(str(self._subcalls[(- 1)]['from'])) is not None):
caller = self._subcalls.pop((- 2))['from']
self._subcalls[(- 1)]['from'] = caller
last = last_map[trace[i]['depth']]
trace[i].update(address=last['address'], contractName=last['name'], fn=last['internal_calls'][(- 1)], jumpDepth=last['jumpDepth'], source=False)
opcode = trace[i]['op']
if ((opcode == 'CALL') and int(trace[i]['stack'][(- 3)], 16)):
self._add_internal_xfer(last['address'], trace[i]['stack'][(- 2)][(- 40):], trace[i]['stack'][(- 3)])
try:
pc = last['pc_map'][trace[i]['pc']]
except (KeyError, TypeError):
continue
if (trace[i]['depth'] and (opcode in ('RETURN', 'REVERT', 'INVALID', 'SELFDESTRUCT'))):
subcall: dict = next((i for i in self._subcalls[::(- 1)] if (i['to'] == last['address'])))
if (opcode == 'RETURN'):
returndata = _get_memory(trace[i], (- 1))
if returndata:
fn = last['function']
try:
return_values = fn.decode_output(returndata)
if (len(fn.abi['outputs']) == 1):
return_values = (return_values,)
subcall['return_value'] = return_values
except Exception:
subcall['returndata'] = returndata.hex()
else:
subcall['return_value'] = None
elif (opcode == 'SELFDESTRUCT'):
subcall['selfdestruct'] = True
else:
if (opcode == 'REVERT'):
data = _get_memory(trace[i], (- 1))
if (len(data) > 4):
try:
subcall['revert_msg'] = decode(['string'], data[4:])[0]
except Exception:
subcall['revert_msg'] = data.hex()
if (('revert_msg' not in subcall) and ('dev' in pc)):
subcall['revert_msg'] = pc['dev']
if ('path' not in pc):
continue
trace[i]['source'] = {'filename': last['path_map'][pc['path']], 'offset': pc['offset']}
if ('fn' not in pc):
continue
if last['coverage']:
if (pc['path'] not in coverage_eval[last['name']]):
coverage_eval[last['name']][pc['path']] = [set(), set(), set()]
if ('statement' in pc):
coverage_eval[last['name']][pc['path']][0].add(pc['statement'])
if ('branch' in pc):
if (pc['op'] != 'JUMPI'):
last['active_branches'].add(pc['branch'])
elif (('active_branches' not in last) or (pc['branch'] in last['active_branches'])):
key = (1 if (trace[(i + 1)]['pc'] == (trace[i]['pc'] + 1)) else 2)
coverage_eval[last['name']][pc['path']][key].add(pc['branch'])
if ('active_branches' in last):
last['active_branches'].remove(pc['branch'])
if ('jump' in pc):
if (pc['jump'] == 'i'):
try:
fn = last['pc_map'][trace[(i + 1)]['pc']]['fn']
except (KeyError, IndexError):
continue
if (fn != last['internal_calls'][(- 1)]):
last['internal_calls'].append(fn)
last['jumpDepth'] += 1
elif (last['jumpDepth'] > 0):
del last['internal_calls'][(- 1)]
last['jumpDepth'] -= 1
coverage._add_transaction(self.coverage_hash, dict(((k, v) for (k, v) in coverage_eval.items() if v)))
def _add_internal_xfer(self, from_: str, to: str, value: str) -> None:
if (not value.startswith('0x')):
value = f'0x{value}'
self._internal_transfers.append({'from': EthAddress(from_), 'to': EthAddress(to), 'value': Wei(value)})
def _full_name(self) -> str:
if (self.contract_name and self.fn_name):
return f'{self.contract_name}.{self.fn_name}'
return (self.fn_name or 'Transaction')
def info(self) -> None:
result = f'''Tx Hash: {self.txid}
From: {self.sender}
'''
if (self.contract_address and self.status):
result += f'''New {self.contract_name} address: {self.contract_address}
'''
else:
result += f'''To: {self.receiver}
Value: {self.value}
'''
if ((self.input != '0x') and int(self.input, 16)):
result += f'''Function: {self._full_name()}
'''
result += f'''Block: {self.block_number}
Gas Used: {self.gas_used} / {self.gas_limit} ({(self.gas_used / self.gas_limit):.1%})
'''
if self.events:
events = list(self.events)
call_tree: List = ['']
while events:
idx = next((events.index(i) for i in events if (i.address != events[0].address)), len(events))
contract = state._find_contract(events[0].address)
if contract:
try:
name = contract.name()
except Exception:
name = contract._name
sub_tree: List = [f'{name} ({events[0].address})']
else:
sub_tree = [f'{events[0].address}']
for event in events[:idx]:
sub_tree.append([event.name, *(f'{k}: {v}' for (k, v) in event.items())])
call_tree.append(sub_tree)
events = events[idx:]
event_tree = build_tree([call_tree], multiline_pad=0, pad_depth=[0, 1])
result = f'''{result}
Events In This Transaction
{event_tree}'''
result = color.highlight(result)
status = ''
if (not self.status):
status = f"({color('bright red')}{(self.revert_msg or 'reverted')}{color})"
print(f'''Transaction was Mined {status}
{result}''')
def _get_trace_gas(self, start: int, stop: int) -> Tuple[(int, int)]:
total_gas = 0
internal_gas = 0
is_internal = True
trace = self.trace
for i in range(start, stop):
if (is_internal and (not _step_compare(trace[i], trace[start]))):
is_internal = False
if (trace[i]['depth'] > trace[start]['depth']):
internal_gas -= trace[(i - 1)]['gasCost']
elif ((not is_internal) and _step_compare(trace[i], trace[start])):
is_internal = True
total_gas += trace[i]['gasCost']
if is_internal:
internal_gas += trace[i]['gasCost']
if ((trace[i]['op'] == 'SSTORE') and (int(trace[i]['stack'][(- 2)], 16) == 0)):
total_gas -= 15000
if is_internal:
internal_gas -= 15000
if (trace[i]['op'] == 'SELFDESTRUCT'):
total_gas -= 24000
if is_internal:
internal_gas -= 24000
if ((start > 0) and (trace[start]['depth'] > trace[(start - 1)]['depth'])):
total_gas += trace[(start - 1)]['gasCost']
internal_gas += trace[(start - 1)]['gasCost']
return (internal_gas, total_gas)
_inspection
def call_trace(self, expand: bool=False) -> None:
trace = self.trace
key = _step_internal(trace[0], trace[(- 1)], 0, len(trace), self._get_trace_gas(0, len(self.trace)))
call_tree: List = [[key]]
active_tree: List = [call_tree[0]]
trace_index = ([(0, 0, 0)] + [(i, trace[i]['depth'], trace[i]['jumpDepth']) for i in range(1, len(trace)) if (not _step_compare(trace[i], trace[(i - 1)]))])
subcalls = self.subcalls[::(- 1)]
for (i, (idx, depth, jump_depth)) in enumerate(trace_index[1:], start=1):
last = trace_index[(i - 1)]
if ((depth == last[1]) and (jump_depth < last[2])):
active_tree.pop()
continue
elif (depth < last[1]):
active_tree = active_tree[:(- (last[2] + 1))]
continue
if (depth > last[1]):
end = next((x[0] for x in trace_index[(i + 1):] if (x[1] < depth)), len(trace))
(total_gas, internal_gas) = self._get_trace_gas(idx, end)
key = _step_external(trace[idx], trace[(end - 1)], idx, end, (total_gas, internal_gas), subcalls.pop(), expand)
elif ((depth == last[1]) and (jump_depth > last[2])):
end = next((x[0] for x in trace_index[(i + 1):] if ((x[1] < depth) or ((x[1] == depth) and (x[2] < jump_depth)))), len(trace))
(total_gas, internal_gas) = self._get_trace_gas(idx, end)
key = _step_internal(trace[idx], trace[(end - 1)], idx, end, (total_gas, internal_gas))
active_tree[(- 1)].append([key])
active_tree.append(active_tree[(- 1)][(- 1)])
print(f'''Call trace for '{color('bright blue')}{self.txid}{color}':
Initial call cost [{color('bright yellow')}{self._call_cost} gas{color}]''')
print(build_tree(call_tree).rstrip())
def traceback(self) -> None:
print((self._traceback_string() or ''))
_inspection
def _traceback_string(self) -> str:
if (self.status == 1):
return ''
trace = self.trace
try:
idx = next((i for i in range(len(trace)) if (trace[i]['op'] in ('REVERT', 'INVALID'))))
trace_range = range(idx, (- 1), (- 1))
except StopIteration:
return ''
try:
result = [next((i for i in trace_range if trace[i]['source']))]
except StopIteration:
return ''
(depth, jump_depth) = (trace[idx]['depth'], trace[idx]['jumpDepth'])
while True:
try:
idx = next((i for i in trace_range if ((trace[i]['depth'] < depth) or ((trace[i]['depth'] == depth) and (trace[i]['jumpDepth'] < jump_depth)))))
result.append(idx)
(depth, jump_depth) = (trace[idx]['depth'], trace[idx]['jumpDepth'])
except StopIteration:
break
return (f'''{color}Traceback for '{color('bright blue')}{self.txid}{color}':
''' + '\n'.join((self._source_string(i, 0) for i in result[::(- 1)])))
def error(self, pad: int=3) -> None:
print((self._error_string(pad) or ''))
_inspection
def _error_string(self, pad: int=3) -> str:
if (self.status == 1):
return ''
if self._revert_pc:
(highlight, linenos, path, fn_name) = build._get_error_source_from_pc(self._revert_pc)
if highlight:
return _format_source(highlight, linenos, path, self._revert_pc, (- 1), fn_name)
self._revert_pc = None
trace = self.trace
trace_range = range((len(trace) - 1), (- 1), (- 1))
try:
idx = next((i for i in trace_range if (trace[i]['op'] in {'REVERT', 'INVALID'})))
idx = next((i for i in trace_range if trace[i]['source']))
return self._source_string(idx, pad)
except StopIteration:
return ''
def source(self, idx: int, pad: int=3) -> None:
print((self._source_string(idx, pad) or ''))
_inspection
def _source_string(self, idx: int, pad: int) -> str:
trace = self.trace[idx]
if (not trace.get('source', None)):
return ''
contract = state._find_contract(self.trace[idx]['address'])
(source, linenos) = highlight_source(contract._sources.get(trace['source']['filename']), trace['source']['offset'], pad)
if (not source):
return ''
return _format_source(source, linenos, trace['source']['filename'], trace['pc'], self.trace.index(trace), trace['fn']) |
class FixUncleanShutdownComponent(Application):
logger = logging.getLogger('trinity.components.fix_unclean_shutdown.FixUncleanShutdown')
def configure_parser(cls, arg_parser: ArgumentParser, subparser: _SubParsersAction) -> None:
attach_parser = subparser.add_parser('fix-unclean-shutdown', help='close any dangling processes from a previous unclean shutdown')
attach_parser.set_defaults(func=cls.fix_unclean_shutdown)
def fix_unclean_shutdown(cls, args: Namespace, trinity_config: TrinityConfig) -> None:
cls.logger.info('Cleaning up unclean shutdown...')
cls.logger.info('Searching for process id files in %s...', trinity_config.data_dir)
pidfiles = tuple(trinity_config.pid_dir.glob('*.pid'))
if (len(pidfiles) > 1):
cls.logger.info('Found %d processes from a previous run. Closing...', len(pidfiles))
elif (len(pidfiles) == 1):
cls.logger.info('Found 1 process from a previous run. Closing...')
else:
cls.logger.info('Found 0 processes from a previous run. No processes to kill.')
for pidfile in pidfiles:
process_id = int(pidfile.read_text())
kill_process_id_gracefully(process_id, time.sleep, cls.logger)
try:
pidfile.unlink()
cls.logger.info('Manually removed %s after killing process id %d', pidfile, process_id)
except FileNotFoundError:
cls.logger.debug('pidfile %s was gone after killing process id %d', pidfile, process_id)
remove_dangling_ipc_files(cls.logger, trinity_config.ipc_dir) |
class OptionPlotoptionsHeatmapSonificationTracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class InputShapeBaseInformationError(ErsiliaError):
def __init__(self):
self.message = 'Wrong Ersilia input shape'
self.hints = 'Only one of the following shapes is allowed: {}'.format(', '.join(_read_default_fields('Input Shape')))
ErsiliaError.__init__(self, self.message, self.hints) |
class OptionSeriesOrganizationSonificationContexttracksMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestGenerateProtocolFailsWhenDirectoryAlreadyExists():
def setup_class(cls):
cls.runner = CliRunner()
cls.agent_name = 'myagent'
cls.protocol_name = 't_protocol'
cls.cwd = os.getcwd()
cls.t = tempfile.mkdtemp()
dir_path = Path('packages')
tmp_dir = (cls.t / dir_path)
src_dir = (cls.cwd / Path(ROOT_DIR, dir_path))
shutil.copytree(str(src_dir), str(tmp_dir))
shutil.copyfile(Path(CUR_PATH, 'data', 'sample_specification.yaml'), Path(cls.t, 'sample_specification.yaml'))
cls.path_to_specification = str(Path('..', 'sample_specification.yaml'))
os.chdir(cls.t)
result = cls.runner.invoke(cli, [*CLI_LOG_OPTION, 'init', '--author', AUTHOR])
assert (result.exit_code == 0)
cls.create_result = cls.runner.invoke(cli, [*CLI_LOG_OPTION, 'create', '--local', cls.agent_name], standalone_mode=False)
os.chdir(cls.agent_name)
Path(cls.t, cls.agent_name, 'protocols', cls.protocol_name).mkdir(exist_ok=False, parents=True)
cls.result = cls.runner.invoke(cli, [*CLI_LOG_OPTION, 'generate', 'protocol', cls.path_to_specification], standalone_mode=False)
os.chdir(cls.cwd)
def test_create_agent_exit_code_equal_to_0(self):
assert (self.create_result.exit_code == 0)
def test_exit_code_equal_to_1(self):
assert (self.result.exit_code == 1)
def test_error_message_protocol_already_existing(self):
s = ('Protocol is NOT generated. The following error happened while generating the protocol:\n' + "A directory with name '{}' already exists. Aborting...".format(self.protocol_name))
assert (self.result.exception.message == s)
def test_resource_directory_exists(self):
assert Path(self.t, self.agent_name, 'protocols', self.protocol_name).exists()
def teardown_class(cls):
try:
shutil.rmtree(cls.t)
except (OSError, IOError):
pass |
def extractShinSekaiYori(item):
chStr = ''
for tag in item['tags']:
if ('chapter' in tag.lower()):
chStr = ((chStr + ' ') + tag)
chStr += (' ' + item['title'])
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(chStr)
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if frag:
frag = (frag / 10)
return buildReleaseMessageWithType(item, 'Shin Sekai yori', vol, chp, frag=frag, postfix=postfix) |
class Command(BaseRevisionCommand):
help = 'Creates initial revisions for a given app [and model].'
def add_arguments(self, parser):
super().add_arguments(parser)
parser.add_argument('--comment', action='store', default='Initial version.', help="Specify the comment to add to the revisions. Defaults to 'Initial version'.")
parser.add_argument('--batch-size', action='store', type=int, default=500, help='For large sets of data, revisions will be populated in batches. Defaults to 500.')
parser.add_argument('--meta', action='store', default={}, type=json.loads, help='Specify meta models and corresponding values for each initial revision as JSONeg. --meta "{"core.RevisionMeta", {"hello": "world"}}"')
def handle(self, *app_labels, **options):
verbosity = options['verbosity']
using = options['using']
model_db = options['model_db']
comment = options['comment']
batch_size = options['batch_size']
meta = options['meta']
meta_models = []
for label in meta.keys():
try:
model = apps.get_model(label)
meta_models.append(model)
except LookupError:
raise CommandError(f'Unknown model: {label}')
meta_values = meta.values()
using = (using or router.db_for_write(Revision))
server_side_cursors = (not connections[using].settings_dict.get('DISABLE_SERVER_SIDE_CURSORS'))
use_iterator = ((connections[using].vendor in ('postgresql',)) and server_side_cursors)
with transaction.atomic(using=using):
for model in self.get_models(options):
if (verbosity >= 1):
self.stdout.write('Creating revisions for {name}'.format(name=model._meta.verbose_name))
created_count = 0
live_objs = _safe_subquery('exclude', model._default_manager.using(model_db), model._meta.pk.name, Version.objects.using(using).get_for_model(model, model_db=model_db), 'object_id')
live_objs = live_objs.order_by()
if use_iterator:
total = live_objs.count()
if total:
for obj in live_objs.iterator(batch_size):
self.create_revision(obj, using, meta, meta_models, meta_values, comment, model_db)
created_count += 1
if (not (created_count % batch_size)):
self.batch_complete(verbosity, created_count, total)
else:
ids = list(live_objs.values_list('pk', flat=True))
total = len(ids)
for i in range(0, total, batch_size):
chunked_ids = ids[i:(i + batch_size)]
objects = live_objs.in_bulk(chunked_ids)
for obj in objects.values():
self.create_revision(obj, using, meta, meta_models, meta_values, comment, model_db)
created_count += 1
self.batch_complete(verbosity, created_count, total)
if (verbosity >= 1):
self.stdout.write('- Created {total} / {total}'.format(total=total))
def create_revision(self, obj, using, meta, meta_models, meta_values, comment, model_db):
with create_revision(using=using):
if meta:
for (model, values) in zip(meta_models, meta_values):
add_meta(model, **values)
set_comment(comment)
add_to_revision(obj, model_db=model_db)
def batch_complete(self, verbosity, created_count, total):
reset_queries()
if (verbosity >= 2):
self.stdout.write('- Created {created_count} / {total}'.format(created_count=created_count, total=total)) |
def _get_active_updates(request):
query = models.Update.query
update_status = [models.UpdateStatus.pending, models.UpdateStatus.testing]
query = query.filter(sa.sql.or_(*[(models.Update.status == s) for s in update_status]))
user = models.User.get(request.identity.name)
query = query.filter((models.Update.user == user))
query = query.order_by(models.Update.date_submitted.desc())
return query.all() |
_required
def new_entry(request, topic_id):
topic = Topic.objects.get(id=topic_id)
check_topic_owner(topic, request.user)
if (request.method != 'POST'):
form = EntryForm()
else:
form = EntryForm(data=request.POST)
if form.is_valid():
new_entry = form.save(commit=False)
new_entry.topic = topic
new_entry.save()
return redirect('learning_logs:topic', topic_id=topic_id)
context = {'topic': topic, 'form': form}
return render(request, 'learning_logs/new_entry.html', context) |
def _run_make_cmds(cmds, timeout, build_dir, allow_cache=True):
_LOGGER.debug(f'make cmds={cmds!r}')
if allow_cache:
(cached_results_available, store_cache_key) = build_cache.BUILD_CACHE.retrieve_build_cache(cmds, build_dir)
else:
(cached_results_available, store_cache_key) = (False, None)
if (not cached_results_available):
proc = subprocess.Popen([' && '.join(cmds)], shell=True, env=os.environ.copy(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
try:
(out, err) = proc.communicate(timeout)
if ((proc.returncode == 0) and (store_cache_key is not None)):
build_cache.BUILD_CACHE.store_build_cache(cmds, build_dir, store_cache_key)
except subprocess.TimeoutExpired as e:
proc.kill()
(out, err) = proc.communicate()
raise e
finally:
stdout = out.decode()
stderr = err.decode()
if (proc.returncode != 0):
_LOGGER.info(f'''make stdout:
{stdout}''')
_LOGGER.info(f'''make stderr:
{stderr}''')
_log_error_context(stderr, build_dir)
raise RuntimeError('Build has failed.')
else:
_LOGGER.debug(f'''make stdout:
{stdout}''')
_LOGGER.debug(f'''make stderr:
{stderr}''') |
_processor
def context_get_head_injects():
async def get_head_injects():
for plugin in PluginHandler.get_loaded_plugin_values():
if plugin.meta.get_injected_head:
(yield (await plugin.meta.get_injected_head()))
return dict(get_head_injects=get_head_injects) |
class OptionSonificationGlobaltracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def value(self):
return self._config_get(None)
def value(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class System(Base, FidesBase):
__tablename__ = 'ctl_systems'
meta = Column(JSON)
fidesctl_meta = Column(JSON)
system_type = Column(String)
administrating_department = Column(String)
egress = Column(JSON)
ingress = Column(JSON)
vendor_id = Column(String)
previous_vendor_id = Column(String)
dataset_references = Column(ARRAY(String), server_default='{}', nullable=False)
processes_personal_data = Column(BOOLEAN(), server_default='t', nullable=False)
exempt_from_privacy_regulations = Column(BOOLEAN(), server_default='f', nullable=False)
reason_for_exemption = Column(String)
uses_profiling = Column(BOOLEAN(), server_default='f', nullable=False)
legal_basis_for_profiling = Column(ARRAY(String), server_default='{}')
does_international_transfers = Column(BOOLEAN(), server_default='f', nullable=False)
legal_basis_for_transfers = Column(ARRAY(String), server_default='{}')
requires_data_protection_assessments = Column(BOOLEAN(), server_default='f', nullable=False)
dpa_location = Column(String)
dpa_progress = Column(String)
privacy_policy = Column(String)
legal_name = Column(String)
legal_address = Column(String)
responsibility = Column(ARRAY(String), server_default='{}')
dpo = Column(String)
joint_controller_info = Column(String)
data_security_practices = Column(String)
cookie_max_age_seconds = Column(BIGINT)
uses_cookies = Column(BOOLEAN(), default=False, server_default='f', nullable=False)
cookie_refresh = Column(BOOLEAN(), default=False, server_default='f', nullable=False)
uses_non_cookie_access = Column(BOOLEAN(), default=False, server_default='f', nullable=False)
legitimate_interest_disclosure_url = Column(String)
privacy_declarations = relationship('PrivacyDeclaration', cascade='all, delete', back_populates='system', lazy='selectin')
data_stewards = relationship('FidesUser', secondary='systemmanager', back_populates='systems', lazy='selectin')
connection_configs = relationship('ConnectionConfig', back_populates='system', cascade='all, delete', uselist=False, lazy='selectin')
cookies = relationship('Cookies', back_populates='system', lazy='selectin', uselist=True, viewonly=True)
user_id = Column(String, nullable=True)
def get_data_uses(cls: Type[System], systems: List[System], include_parents: bool=True) -> set[str]:
data_uses = set()
for system in systems:
for declaration in system.privacy_declarations:
if (data_use := declaration.data_use):
if include_parents:
data_uses.update(DataUse.get_parent_uses_from_key(data_use))
else:
data_uses.add(data_use)
return data_uses |
class IGUI(Interface):
busy = Bool(False)
started = Bool(False)
state_location = Str()
def __init__(self, splash_screen=None):
def allow_interrupt():
def invoke_after(cls, millisecs, callable, *args, **kw):
def invoke_later(cls, callable, *args, **kw):
def set_trait_after(cls, millisecs, obj, trait_name, new):
def set_trait_later(cls, obj, trait_name, new):
def process_events(allow_user_events=True):
def set_busy(busy=True):
def start_event_loop(self):
def stop_event_loop(self): |
def test_onnx_pytorch():
def train() -> Annotated[(PyTorch2ONNX, PyTorch2ONNXConfig(args=torch.randn(1, 1, 224, 224, requires_grad=True), export_params=True, opset_version=10, do_constant_folding=True, input_names=['input'], output_names=['output'], dynamic_axes={'input': {0: 'batch_size'}, 'output': {0: 'batch_size'}}))]:
torch_model = SuperResolutionNet(upscale_factor=3)
model_url = '
map_location = (lambda storage, loc: storage)
if torch.cuda.is_available():
map_location = None
torch_model.load_state_dict(model_zoo.load_url(model_url, map_location=map_location))
return PyTorch2ONNX(model=torch_model)
def onnx_predict(model_file: ONNXFile) -> JPEGImageFile:
ort_session = onnxruntime.InferenceSession(model_file.download())
img = Image.open(requests.get(' stream=True).raw)
resize = transforms.Resize([224, 224])
img = resize(img)
img_ycbcr = img.convert('YCbCr')
(img_y, img_cb, img_cr) = img_ycbcr.split()
to_tensor = transforms.ToTensor()
img_y = to_tensor(img_y)
img_y.unsqueeze_(0)
ort_inputs = {ort_session.get_inputs()[0].name: (img_y.detach().cpu().numpy() if img_y.requires_grad else img_y.cpu().numpy())}
ort_outs = ort_session.run(None, ort_inputs)
img_out_y = ort_outs[0]
img_out_y = Image.fromarray(np.uint8((img_out_y[0] * 255.0).clip(0, 255)[0]), mode='L')
final_img = Image.merge('YCbCr', [img_out_y, img_cb.resize(img_out_y.size, Image.BICUBIC), img_cr.resize(img_out_y.size, Image.BICUBIC)]).convert('RGB')
img_path = (Path(flytekit.current_context().working_directory) / 'cat_superres_with_ort.jpg')
final_img.save(img_path)
return JPEGImageFile(path=str(img_path))
def wf() -> JPEGImageFile:
model = train()
return onnx_predict(model_file=model)
print(wf()) |
def parse_timedelta(s: Optional[str]) -> datetime.timedelta:
s = (s or DEFAULT_EVENT_DURATION)
try:
parts = s.split(':')
hours = int(parts[0])
minutes = (int(parts[1]) if (len(parts) > 1) else 0)
seconds = (int(parts[2]) if (len(parts) > 2) else 0)
except:
raise ValueError(f'duration was wrongly formatted: {s}')
return datetime.timedelta(hours=hours, minutes=minutes, seconds=seconds) |
class OwnerTestSerializer(ExtensionsModelSerializer):
class Meta():
model = test_models.Owner
fields = ('id', 'name')
expandable_fields = dict(organization=OrganizationTestSerializer, cars=dict(serializer=SkuTestSerializer, many=True), identity=dict(serializer=OwnerIdentityTestSerializer, id_source=False, source='*')) |
class OptionPlotoptionsBarDragdropDraghandle(Options):
def className(self):
return self._config_get('highcharts-drag-handle')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#fff')
def color(self, text: str):
self._config(text, js_type=False)
def cursor(self):
return self._config_get(None)
def cursor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('rgba(0, 0, 0, 0.6)')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def pathFormatter(self):
return self._config_get(None)
def pathFormatter(self, value: Any):
self._config(value, js_type=False)
def zIndex(self):
return self._config_get(901)
def zIndex(self, num: float):
self._config(num, js_type=False) |
class ScannerIndexTest(ForsetiTestCase):
def setUp(self):
ForsetiTestCase.setUp(self)
(self.engine, self.dbfile) = create_test_engine_with_file()
scanner_dao.ScannerIndex.__table__.create(bind=self.engine)
session_maker = sessionmaker()
self.session = session_maker(bind=self.engine)
def tearDown(self):
os.unlink(self.dbfile)
ForsetiTestCase.tearDown(self)
.object(date_time, 'get_utc_now_datetime')
def test_scanner_index_create(self, mock_date_time):
utc_now = datetime.utcnow()
mock_date_time.return_value = utc_now
expected_id = date_time.get_utc_now_microtimestamp(utc_now)
db_row = scanner_dao.ScannerIndex.create(expected_id)
self.assertEqual(expected_id, db_row.id)
self.assertEqual(utc_now, db_row.created_at_datetime)
self.assertEqual(IndexState.CREATED, db_row.scanner_status)
.object(date_time, 'get_utc_now_datetime')
def test_scanner_index_complete(self, mock_date_time):
start = datetime.utcnow()
end = (start + timedelta(minutes=5))
mock_date_time.side_effect = [start, end]
expected_id = date_time.get_utc_now_microtimestamp(start)
db_row = scanner_dao.ScannerIndex.create(expected_id)
self.assertEqual(expected_id, db_row.id)
db_row.complete()
self.assertEqual(end, db_row.completed_at_datetime)
self.assertEqual(IndexState.SUCCESS, db_row.scanner_status)
def test_scanner_index_add_warning(self):
db_row = scanner_dao.ScannerIndex.create('aaa')
db_row.add_warning(self.session, '1st warning')
db_row.add_warning(self.session, '2nd warning')
self.assertEqual('1st warning\n2nd warning\n', db_row.scanner_index_warnings)
def test_scanner_index_set_error(self):
db_row = scanner_dao.ScannerIndex.create('aaa')
db_row.set_error(self.session, 'scanner error!')
self.assertEqual('scanner error!', db_row.scanner_index_errors) |
def test_libp2pconnection_mixed_ip_address():
assert (_ip_all_private_or_all_public([]) is True)
assert (_ip_all_private_or_all_public(['127.0.0.1', '127.0.0.1']) is True)
assert (_ip_all_private_or_all_public(['localhost', '127.0.0.1']) is True)
assert (_ip_all_private_or_all_public(['10.0.0.1', '127.0.0.1']) is False)
assert (_ip_all_private_or_all_public(['fetch.ai', '127.0.0.1']) is False)
assert (_ip_all_private_or_all_public(['104.26.2.97', '127.0.0.1']) is False)
assert (_ip_all_private_or_all_public(['fetch.ai', 'acn.fetch.ai']) is True) |
class OptionSeriesVariwideStatesSelectHalo(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def opacity(self):
return self._config_get(0.25)
def opacity(self, num: float):
self._config(num, js_type=False)
def size(self):
return self._config_get(10)
def size(self, num: float):
self._config(num, js_type=False) |
class SioConnection():
async def emit(self, msg_type: str, msg: str, user_id: str, session_id: str, message_id: str):
raise Exception('Not implemented!')
async def safe_emit(self, msg_type: str, msg: str, user_id: str, session_id: str, msg_id: str):
try:
(await self.emit(msg_type, msg, user_id, session_id, msg_id))
except:
logger.debug('Failed to safe emit text')
pass |
def _raise_contract_error(response_error_data: str) -> None:
if response_error_data.startswith('Reverted '):
reason_string = _parse_error_with_reverted_prefix(response_error_data)
raise ContractLogicError(f'execution reverted: {reason_string}', data=response_error_data)
elif (response_error_data[:10] == OFFCHAIN_LOOKUP_FUNC_SELECTOR):
parsed_data_as_bytes = to_bytes(hexstr=response_error_data[10:])
abi_decoded_data = abi.decode(list(OFFCHAIN_LOOKUP_FIELDS.values()), parsed_data_as_bytes)
offchain_lookup_payload = dict(zip(OFFCHAIN_LOOKUP_FIELDS.keys(), abi_decoded_data))
raise OffchainLookup(offchain_lookup_payload, data=response_error_data)
elif (response_error_data[:10] == PANIC_ERROR_FUNC_SELECTOR):
panic_error_code = response_error_data[(- 2):]
raise ContractPanicError(PANIC_ERROR_CODES[panic_error_code], data=response_error_data)
elif ((len(response_error_data) >= 10) and (not (response_error_data[:10] == SOLIDITY_ERROR_FUNC_SELECTOR))):
raise ContractCustomError(response_error_data, data=response_error_data) |
class FloatTypeConverter(DataTypeConverter[float]):
def __init__(self) -> None:
super().__init__(name='float', empty_val=0.0)
def to_value(self, other: Any) -> Optional[float]:
try:
return float(other)
except (ValueError, TypeError):
return None |
class ScheduleMixinTester(unittest.TestCase):
def setUp(self):
super(ScheduleMixinTester, self).setUp()
self.kwargs = {'name': 'ozgur', 'start': datetime.datetime(2013, 3, 20, 4, 0, tzinfo=pytz.utc), 'end': datetime.datetime(2013, 3, 30, 4, 0, tzinfo=pytz.utc), 'duration': datetime.timedelta(10)}
def test_mixin_setup_is_working_properly(self):
new_A = DeclSchedMixA(**self.kwargs)
assert (new_A.start == self.kwargs['start'])
assert (new_A.end == self.kwargs['end'])
assert (new_A.duration == self.kwargs['duration'])
new_A.start = datetime.datetime(2013, 3, 30, 10, 0, tzinfo=pytz.utc)
assert (new_A.start == datetime.datetime(2013, 3, 30, 10, 0, tzinfo=pytz.utc))
assert (new_A.end == datetime.datetime(2013, 4, 9, 10, 0, tzinfo=pytz.utc))
assert (new_A.duration == datetime.timedelta(10))
a_start = new_A.start
a_end = new_A.end
a_duration = new_A.duration
new_B = DeclSchedMixB(**self.kwargs)
assert (new_B.start == self.kwargs['start'])
assert (new_B.end == self.kwargs['end'])
assert (new_B.duration == self.kwargs['duration'])
assert (new_A.start == a_start)
assert (new_A.end == a_end)
assert (new_A.duration == a_duration) |
class Events(object):
def __init__(self, core: Core) -> None:
self.core = core
self.core_initialized = EventHub[Core]()
self.server_command = _ServerCommandMapping(core)
self.server_connected = EventHub[bool]()
self.server_refused = EventHub[bool]()
self.server_dropped = EventHub[bool]()
self.version_mismatch = EventHub[bool]()
self.server_error = EventHub[str]()
self.server_info = EventHub[str]()
self.lobby_users = EventHub[Sequence[wire.model.User]]()
self.lobby_status = EventHub[wire.LobbyStatus]()
self.observe_request = EventHub[int]()
self.observer_enter = EventHub[Tuple[(int, int)]]()
self.observer_leave = EventHub[Tuple[(int, int)]]()
self.game_joined = EventHub[Game]()
self.set_game_param = EventHub[wire.SetGameParam]()
self.player_presence = EventHub[Tuple[(Game, List[Tuple[(int, wire.PresenceState)]])]]()
self.game_left = EventHub[Game]()
self.room_users = EventHub[Tuple[(Game, Sequence[wire.model.User])]]()
self.game_started = EventHub[Game]()
self.game_crashed = EventHub[Game]()
self.client_game_finished = EventHub[Game]()
self.game_ended = EventHub[Game]()
self.auth_success = EventHub[int]()
self.auth_error = EventHub[str]()
def __setattr__(self, name: str, v: Any) -> None:
if hasattr(v, 'name'):
v.name = f'{repr(self.core)}::{name}'
object.__setattr__(self, name, v) |
('{dst_data} = _mm512_maskz_loadu_ps(((1 << {N}) - 1), &{src_data});')
def mm512_maskz_loadu_ps(N: size, dst: ([f32][16] AVX512), src: ([f32][N] DRAM)):
assert (stride(src, 0) == 1)
assert (stride(dst, 0) == 1)
assert (N <= 16)
for i in seq(0, 16):
if (i < N):
dst[i] = src[i] |
def test_hic_transfer_covariance():
outfile = NamedTemporaryFile(suffix='covariance_.h5', delete=False)
outfile.close()
args = '--matrix {} --outFileName {} --method covariance'.format(original_matrix, outfile.name).split()
compute(hicTransform.main, args, 5)
test = hm.hiCMatrix((ROOT + 'hicTransform/covariance.h5'))
new = hm.hiCMatrix(outfile.name)
nt.assert_array_almost_equal(test.matrix.data, new.matrix.data, decimal=DELTA_DECIMAL)
os.unlink(outfile.name) |
def test_transaction_metrics(django_elasticapm_client, client):
with override_settings(**middleware_setting(django.VERSION, ['elasticapm.contrib.django.middleware.TracingMiddleware'])):
assert (len(django_elasticapm_client.events[TRANSACTION]) == 0)
client.get(reverse('elasticapm-no-error'))
assert (len(django_elasticapm_client.events[TRANSACTION]) == 1)
transactions = django_elasticapm_client.events[TRANSACTION]
assert (len(transactions) == 1)
transaction = transactions[0]
assert (transaction['duration'] > 0)
assert (transaction['result'] == 'HTTP 2xx')
assert (transaction['name'] == 'GET tests.contrib.django.testapp.views.no_error')
assert (transaction['outcome'] == 'success') |
def _setup_locale() -> None:
try:
locale.textdomain('exaile')
except AttributeError:
pass
gettextmod.textdomain('exaile')
locale_path = _get_locale_path()
if (locale_path is not None):
try:
locale.bindtextdomain('exaile', locale_path)
except AttributeError:
pass
gettextmod.bindtextdomain('exaile', locale_path) |
class OpenIdCredentialManager(CredentialManager):
def __init__(self, service_information: ServiceInformation, proxies: Optional[dict]=None):
super().__init__(service_information, proxies)
self.refresh_callbacks = []
def _grant_password_request_realm(self, login: str, password: str, realm: str) -> dict:
return {'grant_type': 'password', 'username': login, 'scope': ' '.join(self.service_information.scopes), 'password': password, 'realm': realm}
def init_with_user_credentials_realm(self, login: str, password: str, realm: str):
self._token_request(self._grant_password_request_realm(login, password, realm), True)
def _is_token_expired(response: Response) -> bool:
if (response.status_code == HTTPStatus.UNAUTHORIZED.value):
logger.info('token expired, renew')
try:
json_data = response.json()
return (json_data.get('moreInformation') == 'Token is invalid')
except ValueError:
return False
else:
return False
def access_token(self):
return self._access_token
_limit(6, 1800)
def refresh_token_now(self):
try:
self._refresh_token()
for refresh_callback in self.refresh_callbacks:
refresh_callback()
return True
except RequestException as e:
logger.error("Can't refresh token %s", e)
return False |
('ner.match', dataset=('The dataset to use', 'positional', None, str), spacy_model=('The base model', 'positional', None, str), source=('The source data as a JSONL file', 'positional', None, str), patterns=('Optional match patterns', 'option', 'p', str), exclude=('Names of datasets to exclude', 'option', 'e', split_string), resume=('Resume from existing dataset and update matcher', 'flag', 'R', bool))
def ner_match(dataset: str, spacy_model: str, source: str, patterns: Optional[str]=None, exclude: Optional[List[str]]=None, resume: bool=False):
nlp = spacy.load(spacy_model)
matcher = PatternMatcher(nlp).from_disk(patterns)
if resume:
DB = connect()
if (dataset and (dataset in DB)):
existing = DB.get_dataset(dataset)
matcher.update(existing)
stream = JSONL(source)
stream = (eg for (score, eg) in matcher(stream))
return {'view_id': 'ner', 'dataset': dataset, 'stream': stream, 'exclude': exclude, 'config': {'lang': nlp.lang}} |
class IDHelperTestCase(unittest.TestCase):
def test_object_counter(self):
from traits.api import WeakRef
class Bogus(object):
weak = WeakRef
class Foo(object):
foo = 3
foo = Foo()
self.assertEqual(object_counter.get_count(Bogus), 0)
self.assertEqual(object_counter.next_count(Bogus), 1)
self.assertEqual(object_counter.next_count(Bogus), 2)
self.assertEqual(object_counter.get_count(Bogus), 2)
self.assertEqual(object_counter.next_count(foo), 1)
self.assertEqual(object_counter.next_count(Bogus), 3)
def test_get_unique_id(self):
class Bogus(object):
pass
bogus_1 = Bogus()
bogus_2 = Bogus()
self.assertEqual(get_unique_id(bogus_1), 'Bogus_1')
self.assertEqual(get_unique_id(bogus_2), 'Bogus_2') |
def draw_result_img(img_disp, ith_img, humans, dict_id2skeleton, skeleton_detector, multiperson_classifier):
(r, c) = img_disp.shape[0:2]
desired_cols = int(((1.0 * c) * (img_disp_desired_rows / r)))
img_disp = cv2.resize(img_disp, dsize=(desired_cols, img_disp_desired_rows))
skeleton_detector.draw(img_disp, humans)
if len(dict_id2skeleton):
for (id, label) in dict_id2label.items():
skeleton = dict_id2skeleton[id]
skeleton[1::2] = (skeleton[1::2] / scale_h)
lib_plot.draw_action_result(img_disp, id, skeleton, label)
img_disp = lib_plot.add_white_region_to_left_of_image(img_disp)
cv2.putText(img_disp, ('Frame:' + str(ith_img)), (20, 20), fontScale=1.5, fontFace=cv2.FONT_HERSHEY_PLAIN, color=(0, 0, 0), thickness=2)
if len(dict_id2skeleton):
classifier_of_a_person = multiperson_classifier.get_classifier(id='min')
classifier_of_a_person.draw_scores_onto_image(img_disp)
return img_disp |
class ASMStarPC(ASMPatchPC):
_prefix = 'pc_star_'
def get_patches(self, V):
mesh = V._mesh
mesh_dm = mesh.topology_dm
if mesh.cell_set._extruded:
warning('applying ASMStarPC on an extruded mesh')
depth = PETSc.Options().getInt((self.prefix + 'construct_dim'), default=0)
ordering = PETSc.Options().getString((self.prefix + 'mat_ordering_type'), default='natural')
V_local_ises_indices = []
for (i, W) in enumerate(V):
V_local_ises_indices.append(V.dof_dset.local_ises[i].indices)
ises = []
(start, end) = mesh_dm.getDepthStratum(depth)
for seed in range(start, end):
if (mesh_dm.getLabelValue('pyop2_ghost', seed) != (- 1)):
continue
(pt_array, _) = mesh_dm.getTransitiveClosure(seed, useCone=False)
pt_array = order_points(mesh_dm, pt_array, ordering, self.prefix)
indices = []
for (i, W) in enumerate(V):
section = W.dm.getDefaultSection()
for p in pt_array.tolist():
dof = section.getDof(p)
if (dof <= 0):
continue
off = section.getOffset(p)
W_indices = slice((off * W.value_size), (W.value_size * (off + dof)))
indices.extend(V_local_ises_indices[i][W_indices])
iset = PETSc.IS().createGeneral(indices, comm=PETSc.COMM_SELF)
ises.append(iset)
return ises |
class UI_MT_op_color_dropdown_convert_to(Menu):
bl_idname = 'UI_MT_op_color_dropdown_convert_to'
bl_label = 'To'
bl_description = 'Convert Color IDs into ...'
def draw(self, context):
layout = self.layout
layout.operator(op_color_convert_texture.op.bl_idname, text='Texture Atlas', icon_value=icon_get('op_color_convert_texture'))
layout.operator(op_color_convert_vertex_colors.op.bl_idname, text='Vertex Colors', icon_value=icon_get('op_color_convert_vertex_colors')) |
def mark_range_as_read(nid: int, start: int, end: int, pages_total: int):
now = _date_now_str()
conn = _get_connection()
res = conn.execute(f'select page from read where nid={nid} and page > -1').fetchall()
res = [r[0] for r in res]
to_insert = []
for p in range(start, (end + 1)):
if (not (p in res)):
to_insert.append((nid, p, now, pages_total))
conn.executemany('insert into read (nid, page, created, pagestotal) values (?,?,?,?)', to_insert)
conn.commit()
conn.close() |
.parametrize('enum_member, code', [(TypeOfGrid.CORNER_POINT, 0), (TypeOfGrid.UNSTRUCTURED, 1), (TypeOfGrid.COMPOSITE, 2), (TypeOfGrid.BLOCK_CENTER, 3)])
def test_type_of_grid_alternate_values(enum_member, code):
assert (enum_member.alternate_value == code)
assert (TypeOfGrid.alternate_code(code) == enum_member) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.