code stringlengths 281 23.7M |
|---|
class AdAssetFeedSpec(AbstractObject):
def __init__(self, api=None):
super(AdAssetFeedSpec, self).__init__()
self._isAdAssetFeedSpec = True
self._api = api
class Field(AbstractObject.Field):
ad_formats = 'ad_formats'
additional_data = 'additional_data'
app_product_page_id = 'app_product_page_id'
asset_customization_rules = 'asset_customization_rules'
autotranslate = 'autotranslate'
bodies = 'bodies'
call_to_action_types = 'call_to_action_types'
call_to_actions = 'call_to_actions'
captions = 'captions'
carousels = 'carousels'
descriptions = 'descriptions'
events = 'events'
groups = 'groups'
images = 'images'
link_urls = 'link_urls'
message_extensions = 'message_extensions'
onsite_destinations = 'onsite_destinations'
optimization_type = 'optimization_type'
reasons_to_shop = 'reasons_to_shop'
shops_bundle = 'shops_bundle'
titles = 'titles'
videos = 'videos'
class CallToActionTypes():
add_to_cart = 'ADD_TO_CART'
apply_now = 'APPLY_NOW'
audio_call = 'AUDIO_CALL'
book_now = 'BOOK_NOW'
book_travel = 'BOOK_TRAVEL'
buy = 'BUY'
buy_now = 'BUY_NOW'
buy_tickets = 'BUY_TICKETS'
call = 'CALL'
call_me = 'CALL_ME'
call_now = 'CALL_NOW'
confirm = 'CONFIRM'
contact = 'CONTACT'
contact_us = 'CONTACT_US'
donate = 'DONATE'
donate_now = 'DONATE_NOW'
download = 'DOWNLOAD'
event_rsvp = 'EVENT_RSVP'
find_a_group = 'FIND_A_GROUP'
find_your_groups = 'FIND_YOUR_GROUPS'
follow_news_storyline = 'FOLLOW_NEWS_STORYLINE'
follow_page = 'FOLLOW_PAGE'
follow_user = 'FOLLOW_USER'
get_directions = 'GET_DIRECTIONS'
get_offer = 'GET_OFFER'
get_offer_view = 'GET_OFFER_VIEW'
get_promotions = 'GET_PROMOTIONS'
get_quote = 'GET_QUOTE'
get_showtimes = 'GET_SHOWTIMES'
get_started = 'GET_STARTED'
inquire_now = 'INQUIRE_NOW'
install_app = 'INSTALL_APP'
install_mobile_app = 'INSTALL_MOBILE_APP'
learn_more = 'LEARN_MORE'
like_page = 'LIKE_PAGE'
listen_music = 'LISTEN_MUSIC'
listen_now = 'LISTEN_NOW'
message_page = 'MESSAGE_PAGE'
mobile_download = 'MOBILE_DOWNLOAD'
no_button = 'NO_BUTTON'
open_instant_app = 'OPEN_INSTANT_APP'
open_link = 'OPEN_LINK'
order_now = 'ORDER_NOW'
pay_to_access = 'PAY_TO_ACCESS'
play_game = 'PLAY_GAME'
play_game_on_facebook = 'PLAY_GAME_ON_FACEBOOK'
purchase_gift_cards = 'PURCHASE_GIFT_CARDS'
raise_money = 'RAISE_MONEY'
record_now = 'RECORD_NOW'
refer_friends = 'REFER_FRIENDS'
request_time = 'REQUEST_TIME'
say_thanks = 'SAY_THANKS'
see_more = 'SEE_MORE'
sell_now = 'SELL_NOW'
send_a_gift = 'SEND_A_GIFT'
send_gift_money = 'SEND_GIFT_MONEY'
send_updates = 'SEND_UPDATES'
share = 'SHARE'
shop_now = 'SHOP_NOW'
sign_up = 'SIGN_UP'
sotto_subscribe = 'SOTTO_SUBSCRIBE'
start_order = 'START_ORDER'
subscribe = 'SUBSCRIBE'
swipe_up_product = 'SWIPE_UP_PRODUCT'
swipe_up_shop = 'SWIPE_UP_SHOP'
update_app = 'UPDATE_APP'
use_app = 'USE_APP'
use_mobile_app = 'USE_MOBILE_APP'
video_annotation = 'VIDEO_ANNOTATION'
video_call = 'VIDEO_CALL'
visit_pages_feed = 'VISIT_PAGES_FEED'
watch_more = 'WATCH_MORE'
watch_video = 'WATCH_VIDEO'
whatsapp_message = 'WHATSAPP_MESSAGE'
woodhenge_support = 'WOODHENGE_SUPPORT'
_field_types = {'ad_formats': 'list<string>', 'additional_data': 'AdAssetFeedAdditionalData', 'app_product_page_id': 'string', 'asset_customization_rules': 'list<AdAssetFeedSpecAssetCustomizationRule>', 'autotranslate': 'list<string>', 'bodies': 'list<AdAssetFeedSpecBody>', 'call_to_action_types': 'list<CallToActionTypes>', 'call_to_actions': 'list<AdAssetFeedSpecCallToAction>', 'captions': 'list<AdAssetFeedSpecCaption>', 'carousels': 'list<AdAssetFeedSpecCarousel>', 'descriptions': 'list<AdAssetFeedSpecDescription>', 'events': 'list<AdAssetFeedSpecEvents>', 'groups': 'list<AdAssetFeedSpecGroupRule>', 'images': 'list<AdAssetFeedSpecImage>', 'link_urls': 'list<AdAssetFeedSpecLinkURL>', 'message_extensions': 'list<AdAssetMessageExtensions>', 'onsite_destinations': 'list<AdAssetOnsiteDestinations>', 'optimization_type': 'string', 'reasons_to_shop': 'bool', 'shops_bundle': 'bool', 'titles': 'list<AdAssetFeedSpecTitle>', 'videos': 'list<AdAssetFeedSpecVideo>'}
def _get_field_enum_info(cls):
field_enum_info = {}
field_enum_info['CallToActionTypes'] = AdAssetFeedSpec.CallToActionTypes.__dict__.values()
return field_enum_info |
def int3c2e3d_sph_010(ax, da, A, bx, db, B, cx, dc, C):
result = numpy.zeros((1, 3, 1), dtype=float)
x0 = (A[0] - B[0])
x1 = (ax + bx)
x2 = (cx + x1)
x3 = (x2 ** (- 1.0))
x4 = (x1 ** (- 1.0))
x5 = ((- x4) * ((ax * A[0]) + (bx * B[0])))
x6 = (x5 + C[0])
x7 = ((- x4) * ((ax * A[1]) + (bx * B[1])))
x8 = (x7 + C[1])
x9 = ((- x4) * ((ax * A[2]) + (bx * B[2])))
x10 = (x9 + C[2])
x11 = (((cx * x1) * x3) * (((x10 ** 2) + (x6 ** 2)) + (x8 ** 2)))
x12 = (boys(0, x11) / cx)
x13 = (x3 * boys(1, x11))
x14 = (A[1] - B[1])
x15 = (A[2] - B[2])
x16 = ((((((34. * da) * db) * dc) * (x2 ** (- 0.5))) * x4) * numpy.exp(((((- ax) * bx) * x4) * (((x0 ** 2) + (x14 ** 2)) + (x15 ** 2)))))
result[(0, 0, 0)] = numpy.sum((x16 * (((x0 * x12) - (x12 * (x5 + A[0]))) + (x13 * x6))))
result[(0, 1, 0)] = numpy.sum((x16 * (((x12 * x14) - (x12 * (x7 + A[1]))) + (x13 * x8))))
result[(0, 2, 0)] = numpy.sum((x16 * (((x10 * x13) + (x12 * x15)) - (x12 * (x9 + A[2])))))
return result |
class XGBoost(object):
def __init__(self, n_estimators=200, learning_rate=0.001, min_samples_split=2, min_impurity=1e-07, max_depth=2):
self.n_estimators = n_estimators
self.learning_rate = learning_rate
self.min_samples_split = min_samples_split
self.min_impurity = min_impurity
self.max_depth = max_depth
self.bar = progressbar.ProgressBar(widgets=bar_widgets)
self.loss = LogisticLoss()
self.trees = []
for _ in range(n_estimators):
tree = XGBoostRegressionTree(min_samples_split=self.min_samples_split, min_impurity=min_impurity, max_depth=self.max_depth, loss=self.loss)
self.trees.append(tree)
def fit(self, X, y):
y = to_categorical(y)
y_pred = np.zeros(np.shape(y))
for i in self.bar(range(self.n_estimators)):
tree = self.trees[i]
y_and_pred = np.concatenate((y, y_pred), axis=1)
tree.fit(X, y_and_pred)
update_pred = tree.predict(X)
y_pred -= np.multiply(self.learning_rate, update_pred)
def predict(self, X):
y_pred = None
for tree in self.trees:
update_pred = tree.predict(X)
if (y_pred is None):
y_pred = np.zeros_like(update_pred)
y_pred -= np.multiply(self.learning_rate, update_pred)
y_pred = (np.exp(y_pred) / np.sum(np.exp(y_pred), axis=1, keepdims=True))
y_pred = np.argmax(y_pred, axis=1)
return y_pred |
def award_types(row):
pulled_from = row.get('pulled_from', None)
idv_type = row.get('idv_type', None)
type_of_idc = row.get('type_of_idc', None)
type_of_idc_description = row.get('type_of_idc_description', None)
if (pulled_from != 'IDV'):
award_type = row.get('contract_award_type')
elif ((idv_type == 'B') and (type_of_idc is not None)):
award_type = 'IDV_B_{}'.format(type_of_idc)
elif ((idv_type == 'B') and (type_of_idc_description == 'INDEFINITE DELIVERY / REQUIREMENTS')):
award_type = 'IDV_B_A'
elif ((idv_type == 'B') and (type_of_idc_description == 'INDEFINITE DELIVERY / INDEFINITE QUANTITY')):
award_type = 'IDV_B_B'
elif ((idv_type == 'B') and (type_of_idc_description == 'INDEFINITE DELIVERY / DEFINITE QUANTITY')):
award_type = 'IDV_B_C'
else:
award_type = 'IDV_{}'.format(idv_type)
if (pulled_from != 'IDV'):
award_type_desc = row.get('contract_award_type_desc')
elif ((idv_type == 'B') and (type_of_idc_description not in (None, 'NAN'))):
award_type_desc = type_of_idc_description
elif (idv_type == 'B'):
award_type_desc = 'INDEFINITE DELIVERY CONTRACT'
else:
award_type_desc = row.get('idv_type_description')
return (award_type, award_type_desc) |
class Context():
agent_config: AgentConfig
def __init__(self, cwd: str, verbosity: str, registry_path: Optional[str]) -> None:
self.config = {}
self.cwd = cwd
self.verbosity = verbosity
self.clean_paths: List = []
self._registry_path = registry_path
def registry_path(self) -> str:
if self._registry_path:
registry_path = Path(self._registry_path)
if (not (registry_path.exists() and registry_path.is_dir())):
raise ValueError(f'Registry path directory provided ({self._registry_path}) can not be found. Current work dir is {self.cwd}')
return str(registry_path)
registry_path = (Path(self.cwd) / DEFAULT_REGISTRY_NAME).absolute()
if registry_path.is_dir():
return str(registry_path)
registry_path = ((Path(self.cwd) / '..') / DEFAULT_REGISTRY_NAME).absolute()
if registry_path.is_dir():
return str(registry_path)
raise ValueError(f'Registry path not provided and local registry `{DEFAULT_REGISTRY_NAME}` not found in current ({self.cwd}) and parent directory.')
def skip_aea_validation(self) -> bool:
return self.config.get('skip_aea_validation', True)
def agent_loader(self) -> ConfigLoader:
return ConfigLoader.from_configuration_type(PackageType.AGENT, skip_aea_validation=self.skip_aea_validation)
def protocol_loader(self) -> ConfigLoader:
return ConfigLoader.from_configuration_type(PackageType.PROTOCOL, skip_aea_validation=self.skip_aea_validation)
def connection_loader(self) -> ConfigLoader:
return ConfigLoader.from_configuration_type(PackageType.CONNECTION, skip_aea_validation=self.skip_aea_validation)
def skill_loader(self) -> ConfigLoader:
return ConfigLoader.from_configuration_type(PackageType.SKILL, skip_aea_validation=self.skip_aea_validation)
def contract_loader(self) -> ConfigLoader:
return ConfigLoader.from_configuration_type(PackageType.CONTRACT, skip_aea_validation=self.skip_aea_validation)
def set_config(self, key: str, value: Any) -> None:
self.config[key] = value
logger.debug(' config[{}] = {}'.format(key, value))
def _get_item_dependencies(item_type: str, public_id: PublicId) -> Dependencies:
item_type_plural = (item_type + 's')
default_config_file_name = _get_default_configuration_file_name_from_type(item_type)
path = Path(VENDOR, public_id.author, item_type_plural, public_id.name, default_config_file_name)
if (not path.exists()):
path = Path(item_type_plural, public_id.name, default_config_file_name)
config_loader = ConfigLoader.from_configuration_type(item_type)
with open_file(path) as fp:
config = config_loader.load(fp)
deps = cast(Dependencies, config.dependencies)
return deps
def get_dependencies(self) -> Dependencies:
protocol_dependencies = [self._get_item_dependencies(PROTOCOL, protocol_id) for protocol_id in self.agent_config.protocols]
connection_dependencies = [self._get_item_dependencies(CONNECTION, connection_id) for connection_id in self.agent_config.connections]
skill_dependencies = [self._get_item_dependencies(SKILL, skill_id) for skill_id in self.agent_config.skills]
contract_dependencies = [self._get_item_dependencies(CONTRACT, contract_id) for contract_id in self.agent_config.contracts]
all_dependencies = [self.agent_config.dependencies, *protocol_dependencies, *connection_dependencies, *skill_dependencies, *contract_dependencies]
result = merge_dependencies_list(*all_dependencies)
return result
def dump_agent_config(self) -> None:
with open(os.path.join(self.cwd, DEFAULT_AEA_CONFIG_FILE), 'w', encoding='utf-8') as f:
self.agent_loader.dump(self.agent_config, f) |
def collect_queries(traversal: Traversal, resources: TaskResources) -> Dict[(CollectionAddress, str)]:
def collect_queries_fn(tn: TraversalNode, data: Dict[(CollectionAddress, str)]) -> None:
if (not tn.is_root_node()):
data[tn.address] = GraphTask(tn, resources).generate_dry_run_query()
env: Dict[(CollectionAddress, str)] = {}
traversal.traverse(env, collect_queries_fn)
return env |
def copy_current_request_context(f: t.Callable) -> t.Callable:
top = _request_ctx_stack.top
if (top is None):
raise RuntimeError('This decorator can only be used when a request context is active, such as within a view function.')
reqctx = top.copy()
def wrapper(*args, **kwargs):
with reqctx:
return f(*args, **kwargs)
return update_wrapper(wrapper, f) |
class OptionSeriesAreasplinerangeSonificationDefaultinstrumentoptionsMappingHighpass(Options):
def frequency(self) -> 'OptionSeriesAreasplinerangeSonificationDefaultinstrumentoptionsMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionSeriesAreasplinerangeSonificationDefaultinstrumentoptionsMappingHighpassFrequency)
def resonance(self) -> 'OptionSeriesAreasplinerangeSonificationDefaultinstrumentoptionsMappingHighpassResonance':
return self._config_sub_data('resonance', OptionSeriesAreasplinerangeSonificationDefaultinstrumentoptionsMappingHighpassResonance) |
class SteepestDescent(BacktrackingOptimizer):
def __init__(self, geometry, alpha=0.1, **kwargs):
super().__init__(geometry, alpha=alpha, **kwargs)
def prepare_opt(self):
self.log('no backtracking in cycle 0')
def optimize(self):
if (self.is_cos and self.align):
self.procrustes()
self.forces.append(self.geometry.forces)
self.energies.append(self.geometry.energy)
if (self.cur_cycle > 0):
self.skip = self.backtrack(self.forces[(- 1)], self.forces[(- 2)])
step = (self.alpha * self.forces[(- 1)])
step = self.scale_by_max_step(step)
return step |
def stack_plot_cmdline():
parser = argparse.ArgumentParser(description='Plot stack plot')
parser.add_argument('--display', action='store_true', help='Display plot')
parser.add_argument('--outfile', default='stack_plot.png', type=str, help='Output file to store results (default: %(default)s)')
parser.add_argument('--max-n', default=20, type=int, help='Max number of dataseries (will roll everything else into "other") (default: %(default)s)')
parser.add_argument('--normalize', action='store_true', help='Normalize the plot to 100%%')
parser.add_argument('input_fn')
kwargs = vars(parser.parse_args())
stack_plot(**kwargs) |
class CSRFCookie(CSRF):
def setup_form(self, form: Form):
self.form_meta = form.meta
return super().setup_form(form)
def generate_csrf_token(self, csrf_token_field: Field):
csrf_token: (str | None) = self.get_challenge_csrf_token()
if (csrf_token is None):
csrf_token = secrets.token_urlsafe()
self.request.scope[CSRF_ATTRIBUTE_NAME] = csrf_token
return csrf_token
def validate_csrf_token(self, form: Form, field: Field):
challenge_csrf_token = self.get_challenge_csrf_token()
if ((field.data is None) or (challenge_csrf_token is None) or (not secrets.compare_digest(field.data, challenge_csrf_token))):
raise validators.ValidationError(field.gettext('CSRF failed.'))
def get_challenge_csrf_token(self) -> (str | None):
return self.request.cookies.get(self.form_meta.csrf_cookie_name)
def request(self) -> Request:
try:
return getattr(self.form_meta, 'request')
except AttributeError as e:
raise CSRFCookieMissingRequest() from e |
.parametrize(('packaged', 'data', 'expected_show_message_args', 'expected_log', 'expected_triggered'), [(True, b'/releases/tag/v9.9.9"', ['9.9.9'], 'Newest version: 9.9.9', True), (True, b'/releases/tag/v0.0.0"', [], 'Newest version: 0.0.0', True), (False, b'"version": "9.9.9"', ['9.9.9'], 'Newest version: 9.9.9', True), (True, b"doesn't include version", [], 'Could not detect remote', False), (True, 'not-decodable', [], 'Parsing response of update check failed', False)])
def test_on_download_finished(caplog, qtbot, monkeypatch, packaged, data, expected_show_message_args, expected_log, expected_triggered):
checker = update_check.UpdateChecker(parent=None, packaged=packaged)
args = []
def mocked_show_update_message(version):
args.append(version)
monkeypatch.setattr(checker, '_show_update_message', mocked_show_update_message)
def _mocked_downloader_get(cls, url: str, timeout: float):
cls.com.on_download_finished.emit(data, url)
monkeypatch.setattr(downloader.Downloader, 'get', _mocked_downloader_get)
with caplog.at_level(logging.DEBUG), qtbot.wait_signal(checker.com.on_version_checked, timeout=200, raising=False) as result:
checker.com.check.emit()
assert (result.signal_triggered == expected_triggered)
assert (args == expected_show_message_args)
assert (expected_log in caplog.text) |
def drop_outliers(cnarr, width, factor):
if (not len(cnarr)):
return cnarr
outlier_mask = np.concatenate([smoothing.rolling_outlier_quantile(subarr['log2'], width, 0.95, factor) for (_chrom, subarr) in cnarr.by_chromosome()])
n_outliers = outlier_mask.sum()
if n_outliers:
logging.info('Dropped %d outlier bins:\n%s%s', n_outliers, cnarr[outlier_mask].data.head(20), ('\n...' if (n_outliers > 20) else ''))
return cnarr[(~ outlier_mask)] |
def test_comp_no_signature_help():
string = write_rpc_request(1, 'initialize', {'rootPath': str(test_dir)})
file_path = (test_dir / 'test_prog.f08')
string += comp_request(file_path, 12, 6)
(errcode, results) = run_request(string)
assert (errcode == 0)
exp_results = ([1, 'myfun', 'DOUBLE PRECISION FUNCTION myfun(n, xval)', 'myfun(${1:n}, ${2:xval})'],)
assert (len(exp_results) == (len(results) - 1))
for (i, ref) in enumerate(exp_results):
validate_comp(results[(i + 1)], ref) |
class TransitionList():
def __init__(self, transitions: 'Iterable | None'=None):
self.transitions = (list(transitions) if transitions else [])
def __repr__(self):
return f'{type(self).__name__}({self.transitions!r})'
def __or__(self, other: 'TransitionList | Iterable'):
return TransitionList(self.transitions).add_transitions(other)
def add_transitions(self, transition: 'Transition | TransitionList | Iterable'):
if isinstance(transition, TransitionList):
transition = transition.transitions
transitions = ensure_iterable(transition)
for transition in transitions:
self.transitions.append(transition)
return self
def __getitem__(self, index: int):
return self.transitions[index]
def __len__(self):
return len(self.transitions)
def _add_callback(self, callback, name, is_event=False, **kwargs):
for transition in self.transitions:
list_obj = getattr(transition, name)
list_obj._add_unbounded_callback(callback, is_event=is_event, transitions=self, **kwargs)
return callback
def __call__(self, f):
return self._add_callback(f, 'on', is_event=True)
def before(self, f: Callable):
return self._add_callback(f, 'before')
def after(self, f: Callable):
return self._add_callback(f, 'after')
def on(self, f: Callable):
return self._add_callback(f, 'on')
def cond(self, f: Callable):
return self._add_callback(f, 'cond')
def unless(self, f: Callable):
return self._add_callback(f, 'cond', expected_value=False)
def validators(self, f: Callable):
return self._add_callback(f, 'validators')
def add_event(self, event: str):
for transition in self.transitions:
transition.add_event(event)
def unique_events(self) -> List[str]:
tmp_ordered_unique_events_as_keys_on_dict = {}
for transition in self.transitions:
for event in transition.events:
tmp_ordered_unique_events_as_keys_on_dict[event] = True
return list(tmp_ordered_unique_events_as_keys_on_dict.keys()) |
class FBComponentsPrintCommand(fb.FBCommand):
def name(self):
return 'pcomponents'
def description(self):
return 'Print a recursive description of components found starting from <aView>.'
def options(self):
return [fb.FBCommandArgument(short='-u', long='--up', arg='upwards', boolean=True, default=False, help='Print only the component hierarchy found on the first superview that has them, carrying the search up to its window.'), fb.FBCommandArgument(short='-v', long='--show-views', arg='showViews', type='BOOL', default='YES', help="Prints the component hierarchy and does not print the views if the supplied argument is 'NO'. Supply either a 'YES' or a 'NO'. The default is to show views.")]
def args(self):
return [fb.FBCommandArgument(arg='aView', type='UIView* or CKComponent*', help='The view or component from which the search for components begins.', default='(id)[[UIApplication sharedApplication] keyWindow]')]
def run(self, arguments, options):
upwards = ('YES' if options.upwards else 'NO')
showViews = ('YES' if (options.showViews == 'YES') else 'NO')
view = fb.evaluateInputExpression(arguments[0])
if (not viewHelpers.isView(view)):
view = fb.evaluateExpression(('((CKComponent *)%s).viewContext.view' % view))
print(fb.describeObject((((((('[CKComponentHierarchyDebugHelper componentHierarchyDescriptionForView:(UIView *)' + view) + ' searchUpwards:') + upwards) + ' showViews:') + showViews) + ']'))) |
def iter_beacon_config_blocks(fobj: BinaryIO, xor_keys=None, xordecode=True, all_xor_keys=False) -> Iterator[Tuple[(bytes, dict)]]:
found = False
xor_keys = (xor_keys or DEFAULT_XOR_KEYS)
logger.debug(f'xor_keys: {xor_keys!r}')
if ((not found) and xordecode):
try:
fxor = cast(BinaryIO, XorEncodedFile.from_file(fobj))
for xorkey in xor_keys:
for config_block in find_beacon_config_bytes(fxor, xorkey):
found = True
(yield (config_block, {'xorkey': xorkey, 'xorencoded': True}))
except ValueError:
pass
if (not found):
for xorkey in xor_keys:
for config_block in find_beacon_config_bytes(fobj, xorkey):
found = True
(yield (config_block, {'xorkey': xorkey, 'xorencoded': False}))
if ((not found) and all_xor_keys):
logger.debug('config_block not found, trying all xor keys...')
if xordecode:
try:
fxor = XorEncodedFile.from_file(fobj)
except ValueError:
fxor = fobj
left_xor_keys = make_byte_list(exclude=xor_keys)
bytes_counter = collections.Counter()
for chunk in iter(functools.partial(fxor.read, io.DEFAULT_BUFFER_SIZE), b''):
fourgrams = grouper(chunk, n=4, fillvalue=0)
bytes_counter.update((gram[0] for gram in fourgrams if (gram[0] == gram[1] == gram[2] == gram[3])))
most_common_bytes = [p8(x[0]) for x in bytes_counter.most_common()]
left_xor_keys.sort(key=(lambda x: (most_common_bytes.index(x) if (x in most_common_bytes) else 256)))
logger.debug(f'left xor keys to try: {left_xor_keys}')
(yield from iter_beacon_config_blocks(fobj, left_xor_keys, xordecode=xordecode, all_xor_keys=False)) |
class GoogleOAuth2(BaseOAuth2[GoogleOAuth2AuthorizeParams]):
display_name = 'Google'
logo_svg = LOGO_SVG
def __init__(self, client_id: str, client_secret: str, scopes: Optional[List[str]]=BASE_SCOPES, name='google'):
super().__init__(client_id, client_secret, AUTHORIZE_ENDPOINT, ACCESS_TOKEN_ENDPOINT, ACCESS_TOKEN_ENDPOINT, REVOKE_TOKEN_ENDPOINT, name=name, base_scopes=scopes)
async def get_id_email(self, token: str) -> Tuple[(str, Optional[str])]:
async with self.get_ as client:
response = (await client.get(PROFILE_ENDPOINT, params={'personFields': 'emailAddresses'}, headers={**self.request_headers, 'Authorization': f'Bearer {token}'}))
if (response.status_code >= 400):
raise GetIdEmailError(response.json())
data = cast(Dict[(str, Any)], response.json())
user_id = data['resourceName']
user_email = next((email['value'] for email in data['emailAddresses'] if email['metadata']['primary']))
return (user_id, user_email) |
def check_full_report(ids_to_ignore=[]):
success = True
command = ['safety', 'check', '--full-report']
for ignored_id in ids_to_ignore:
command.extend(['-i', ignored_id])
for filename in glob.glob('**/*requirements.txt', recursive=True):
print('Checking {}'.format(filename))
result = subprocess.run((command + ['-r', filename]))
if (result.returncode != 0):
success = False
return (0 if success else 1) |
def test_greenpipe_read_overwrite():
old_data = b'existing data...'
new_data = b'overwrite with mode=r+'
with tempfile.NamedTemporaryFile() as f:
with greenio.GreenPipe(f.name, 'wb') as writer:
writer.write(old_data)
with greenio.GreenPipe(f.name, 'r+b') as writer:
writer.write(new_data)
actual = tests.read_file(f.name)
assert (actual == new_data) |
class OptionPlotoptionsTreemapSonificationContexttracksMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_from_file(tmp_path):
with pytest.raises(ValueError, match='MZ header not found for: .*'):
xordecode.XorEncodedFile.from_file(io.BytesIO(b'testing'))
xf = xordecode.XorEncodedFile(io.BytesIO(b'\x00\x00\x00\x00SSSStest'))
assert (xf.read() == b'test')
xf = xordecode.XorEncodedFile(io.BytesIO(b'\x01\x02\x03\x04SSSStestABCD'))
assert (xf.read() == (utils.xor(b'test', b'\x01\x02\x03\x04') + utils.xor(b'test', b'ABCD')))
p = (tmp_path / 'small')
p.write_bytes(b'foo')
with p.open('rb') as f:
xf = xordecode.XorEncodedFile(f)
assert (xf.read() == b'') |
_deserializable
class WhatsAppBot(BaseBot):
def __init__(self):
try:
self.flask = importlib.import_module('flask')
self.twilio = importlib.import_module('twilio')
except ModuleNotFoundError:
raise ModuleNotFoundError('The required dependencies for WhatsApp are not installed. Please install with `pip install --upgrade "embedchain[whatsapp]"`') from None
super().__init__()
def handle_message(self, message):
if message.startswith('add '):
response = self.add_data(message)
else:
response = self.ask_bot(message)
return response
def add_data(self, message):
data = message.split(' ')[(- 1)]
try:
self.add(data)
response = f'Added data from: {data}'
except Exception:
logging.exception(f'Failed to add data {data}.')
response = 'Some error occurred while adding data.'
return response
def ask_bot(self, message):
try:
response = self.query(message)
except Exception:
logging.exception(f'Failed to query {message}.')
response = 'An error occurred. Please try again!'
return response
def start(self, host='0.0.0.0', port=5000, debug=True):
app = self.flask.Flask(__name__)
def signal_handler(sig, frame):
logging.info('\nGracefully shutting down the WhatsAppBot...')
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
('/chat', methods=['POST'])
def chat():
incoming_message = self.flask.request.values.get('Body', '').lower()
response = self.handle_message(incoming_message)
twilio_response = self.twilio.twiml.messaging_response.MessagingResponse()
twilio_response.message(response)
return str(twilio_response)
app.run(host=host, port=port, debug=debug) |
.django_db
def test_category_funding_agency_subawards(agency_test_data):
test_payload = {'category': 'funding_agency', 'subawards': True, 'page': 1, 'limit': 50}
spending_by_category_logic = FundingAgencyViewSet().perform_search(test_payload, {})
expected_response = {'category': 'funding_agency', 'limit': 50, 'page_metadata': {'page': 1, 'next': None, 'previous': None, 'hasNext': False, 'hasPrevious': False}, 'results': [{'amount': 150, 'name': 'Funding Toptier Agency 4', 'code': 'TA4', 'id': 1004}], 'messages': [get_time_period_message()]}
assert (expected_response == spending_by_category_logic) |
class OptionSeriesBellcurveSonificationContexttracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def _get_callable_type():
if (minor == 6):
from typing import CallableMeta as _VariadicGenericAlias
elif ((minor > 6) and (minor < 9)):
from typing import _VariadicGenericAlias
elif (minor > 8):
from typing import _CallableType as _VariadicGenericAlias
else:
raise RuntimeError(f'Attempting to use spock with python version `3.{minor}` which is unsupported')
return _VariadicGenericAlias |
class VmQGASerializer(s.Serializer):
params = s.ArrayField(max_items=1)
def __init__(self, request, command, *args, **kwargs):
self.request = request
self.command = command
super(VmQGASerializer, self).__init__(*args, **kwargs)
def get_full_command(self):
assert (self.data is not None)
return ([self.command] + self.object['params'])
def detail_dict(self, **kwargs):
res = super(VmQGASerializer, self).detail_dict(**kwargs)
res['command'] = self.command
return res |
def get_nr_primary_components(responses: npt.NDArray[np.float_], threshold: float) -> int:
data_matrix = (responses - responses.mean(axis=0))
(_, singulars, _) = np.linalg.svd(data_matrix.astype(float), full_matrices=False)
variance_ratio = (np.cumsum((singulars ** 2)) / np.sum((singulars ** 2)))
return len([1 for i in variance_ratio[:(- 1)] if (i < threshold)]) |
class TestInitialWithCallable():
def setup_method(self):
def initial_value():
return 123
class TestSerializer(serializers.Serializer):
initial_field = serializers.IntegerField(initial=initial_value)
self.serializer = TestSerializer()
def test_initial_should_accept_callable(self):
assert (self.serializer.data == {'initial_field': 123}) |
.unit
class TestFilterResults():
def test_select_and_save_field(self):
final_results = {}
flat = {'A': 'a', 'B': 'b', 'C': ['d', 'e', 'f'], 'D': ['g', 'h', 'i', 'j'], 'E': {'F': 'g', 'H': 'i', 'J': {'K': {'L': {'M': ['m', 'n', 'o'], 'P': 'p'}}, 'N': {'O': 'o'}}}, 'F': [{'G': 'g', 'H': 'h'}, {'G': 'h', 'H': 'i'}, {'G': 'i', 'H': 'j'}], 'H': [[{'M': [1, 2, 3], 'N': 'n'}, {'M': [3, 2, 1], 'N': 'o'}, {'M': [1, 1, 1], 'N': 'p'}], [{'M': [4, 5, 6], 'N': 'q'}, {'M': [2, 2, 2], 'N': 's'}, {'M': [], 'N': 'u'}], [{'M': [7, 8, 9], 'N': 'w'}, {'M': [6, 6, 6], 'N': 'y'}, {'M': [2], 'N': 'z'}]], 'I': {'X': [{'J': 'j', 'K': ['k']}, {'J': 'm', 'K': ['', 'customer-']}], 'Y': [{'J': 'l', 'K': ['n']}, {'J': 'm', 'K': ['']}], 'Z': [{'J': 'm', 'K': ['n']}]}, 'J': {'K': {'L': {'M': {'N': {'O': ['', ''], 'P': ['', '']}}}}}, 'K': [{'L': 'l', 'M': 'm'}, {'L': 'n', 'M': 'o'}]}
assert (select_and_save_field(final_results, flat, FieldPath('A')) == {'A': 'a'})
assert (select_and_save_field(final_results, flat, FieldPath('C')) == {'A': 'a', 'C': ['d', 'e', 'f']})
assert (select_and_save_field(final_results, flat, FieldPath('D')) == {'A': 'a', 'C': ['d', 'e', 'f'], 'D': ['g', 'h', 'i', 'j']})
assert (select_and_save_field(final_results, flat, FieldPath('E', 'F')) == {'A': 'a', 'C': ['d', 'e', 'f'], 'D': ['g', 'h', 'i', 'j'], 'E': {'F': 'g'}})
assert (select_and_save_field(final_results, flat, FieldPath('E', 'F', 'Z', 'X')) == {'A': 'a', 'C': ['d', 'e', 'f'], 'D': ['g', 'h', 'i', 'j'], 'E': {'F': 'g'}})
assert (select_and_save_field(final_results, flat, FieldPath('E', 'J', 'K', 'L', 'M')) == {'A': 'a', 'C': ['d', 'e', 'f'], 'D': ['g', 'h', 'i', 'j'], 'E': {'F': 'g', 'J': {'K': {'L': {'M': ['m', 'n', 'o']}}}}})
assert (select_and_save_field(final_results, flat, FieldPath('F', 'G')) == {'A': 'a', 'C': ['d', 'e', 'f'], 'D': ['g', 'h', 'i', 'j'], 'E': {'F': 'g', 'J': {'K': {'L': {'M': ['m', 'n', 'o']}}}}, 'F': [{'G': 'g'}, {'G': 'h'}, {'G': 'i'}]})
assert (select_and_save_field(final_results, flat, FieldPath('H', 'N')) == {'A': 'a', 'C': ['d', 'e', 'f'], 'D': ['g', 'h', 'i', 'j'], 'E': {'F': 'g', 'J': {'K': {'L': {'M': ['m', 'n', 'o']}}}}, 'F': [{'G': 'g'}, {'G': 'h'}, {'G': 'i'}], 'H': [[{'N': 'n'}, {'N': 'o'}, {'N': 'p'}], [{'N': 'q'}, {'N': 's'}, {'N': 'u'}], [{'N': 'w'}, {'N': 'y'}, {'N': 'z'}]]})
assert (select_and_save_field(final_results, flat, FieldPath('H', 'M')) == {'A': 'a', 'C': ['d', 'e', 'f'], 'D': ['g', 'h', 'i', 'j'], 'E': {'F': 'g', 'J': {'K': {'L': {'M': ['m', 'n', 'o']}}}}, 'F': [{'G': 'g'}, {'G': 'h'}, {'G': 'i'}], 'H': [[{'N': 'n', 'M': [1, 2, 3]}, {'N': 'o', 'M': [3, 2, 1]}, {'N': 'p', 'M': [1, 1, 1]}], [{'N': 'q', 'M': [4, 5, 6]}, {'N': 's', 'M': [2, 2, 2]}, {'N': 'u', 'M': []}], [{'N': 'w', 'M': [7, 8, 9]}, {'N': 'y', 'M': [6, 6, 6]}, {'N': 'z', 'M': [2]}]]})
assert (select_and_save_field(final_results, flat, FieldPath('I', 'X', 'J')) == {'A': 'a', 'C': ['d', 'e', 'f'], 'D': ['g', 'h', 'i', 'j'], 'E': {'F': 'g', 'J': {'K': {'L': {'M': ['m', 'n', 'o']}}}}, 'F': [{'G': 'g'}, {'G': 'h'}, {'G': 'i'}], 'H': [[{'N': 'n', 'M': [1, 2, 3]}, {'N': 'o', 'M': [3, 2, 1]}, {'N': 'p', 'M': [1, 1, 1]}], [{'N': 'q', 'M': [4, 5, 6]}, {'N': 's', 'M': [2, 2, 2]}, {'N': 'u', 'M': []}], [{'N': 'w', 'M': [7, 8, 9]}, {'N': 'y', 'M': [6, 6, 6]}, {'N': 'z', 'M': [2]}]], 'I': {'X': [{'J': 'j'}, {'J': 'm'}]}})
assert (select_and_save_field(final_results, flat, FieldPath('I', 'X', 'K')) == {'A': 'a', 'C': ['d', 'e', 'f'], 'D': ['g', 'h', 'i', 'j'], 'E': {'F': 'g', 'J': {'K': {'L': {'M': ['m', 'n', 'o']}}}}, 'F': [{'G': 'g'}, {'G': 'h'}, {'G': 'i'}], 'H': [[{'N': 'n', 'M': [1, 2, 3]}, {'N': 'o', 'M': [3, 2, 1]}, {'N': 'p', 'M': [1, 1, 1]}], [{'N': 'q', 'M': [4, 5, 6]}, {'N': 's', 'M': [2, 2, 2]}, {'N': 'u', 'M': []}], [{'N': 'w', 'M': [7, 8, 9]}, {'N': 'y', 'M': [6, 6, 6]}, {'N': 'z', 'M': [2]}]], 'I': {'X': [{'J': 'j', 'K': ['k']}, {'J': 'm', 'K': ['', 'customer-']}]}})
assert (select_and_save_field(final_results, flat, FieldPath('I', 'Y', 'K')) == {'A': 'a', 'C': ['d', 'e', 'f'], 'D': ['g', 'h', 'i', 'j'], 'E': {'F': 'g', 'J': {'K': {'L': {'M': ['m', 'n', 'o']}}}}, 'F': [{'G': 'g'}, {'G': 'h'}, {'G': 'i'}], 'H': [[{'N': 'n', 'M': [1, 2, 3]}, {'N': 'o', 'M': [3, 2, 1]}, {'N': 'p', 'M': [1, 1, 1]}], [{'N': 'q', 'M': [4, 5, 6]}, {'N': 's', 'M': [2, 2, 2]}, {'N': 'u', 'M': []}], [{'N': 'w', 'M': [7, 8, 9]}, {'N': 'y', 'M': [6, 6, 6]}, {'N': 'z', 'M': [2]}]], 'I': {'X': [{'J': 'j', 'K': ['k']}, {'J': 'm', 'K': ['', 'customer-']}], 'Y': [{'K': ['n']}, {'K': ['']}]}})
assert (select_and_save_field(final_results, flat, FieldPath('J', 'K', 'L', 'M', 'N', 'O')) == {'A': 'a', 'C': ['d', 'e', 'f'], 'D': ['g', 'h', 'i', 'j'], 'E': {'F': 'g', 'J': {'K': {'L': {'M': ['m', 'n', 'o']}}}}, 'F': [{'G': 'g'}, {'G': 'h'}, {'G': 'i'}], 'H': [[{'N': 'n', 'M': [1, 2, 3]}, {'N': 'o', 'M': [3, 2, 1]}, {'N': 'p', 'M': [1, 1, 1]}], [{'N': 'q', 'M': [4, 5, 6]}, {'N': 's', 'M': [2, 2, 2]}, {'N': 'u', 'M': []}], [{'N': 'w', 'M': [7, 8, 9]}, {'N': 'y', 'M': [6, 6, 6]}, {'N': 'z', 'M': [2]}]], 'I': {'X': [{'J': 'j', 'K': ['k']}, {'J': 'm', 'K': ['', 'customer-']}], 'Y': [{'K': ['n']}, {'K': ['']}]}, 'J': {'K': {'L': {'M': {'N': {'O': ['', '']}}}}}})
assert (select_and_save_field(final_results, flat, FieldPath('B')) == {'A': 'a', 'C': ['d', 'e', 'f'], 'D': ['g', 'h', 'i', 'j'], 'E': {'F': 'g', 'J': {'K': {'L': {'M': ['m', 'n', 'o']}}}}, 'F': [{'G': 'g'}, {'G': 'h'}, {'G': 'i'}], 'H': [[{'N': 'n', 'M': [1, 2, 3]}, {'N': 'o', 'M': [3, 2, 1]}, {'N': 'p', 'M': [1, 1, 1]}], [{'N': 'q', 'M': [4, 5, 6]}, {'N': 's', 'M': [2, 2, 2]}, {'N': 'u', 'M': []}], [{'N': 'w', 'M': [7, 8, 9]}, {'N': 'y', 'M': [6, 6, 6]}, {'N': 'z', 'M': [2]}]], 'I': {'X': [{'J': 'j', 'K': ['k']}, {'J': 'm', 'K': ['', 'customer-']}], 'Y': [{'K': ['n']}, {'K': ['']}]}, 'J': {'K': {'L': {'M': {'N': {'O': ['', '']}}}}}, 'B': 'b'})
assert (select_and_save_field(final_results, flat, FieldPath('K', 'L')) == {'A': 'a', 'C': ['d', 'e', 'f'], 'D': ['g', 'h', 'i', 'j'], 'E': {'F': 'g', 'J': {'K': {'L': {'M': ['m', 'n', 'o']}}}}, 'F': [{'G': 'g'}, {'G': 'h'}, {'G': 'i'}], 'H': [[{'N': 'n', 'M': [1, 2, 3]}, {'N': 'o', 'M': [3, 2, 1]}, {'N': 'p', 'M': [1, 1, 1]}], [{'N': 'q', 'M': [4, 5, 6]}, {'N': 's', 'M': [2, 2, 2]}, {'N': 'u', 'M': []}], [{'N': 'w', 'M': [7, 8, 9]}, {'N': 'y', 'M': [6, 6, 6]}, {'N': 'z', 'M': [2]}]], 'I': {'X': [{'J': 'j', 'K': ['k']}, {'J': 'm', 'K': ['', 'customer-']}], 'Y': [{'K': ['n']}, {'K': ['']}]}, 'J': {'K': {'L': {'M': {'N': {'O': ['', '']}}}}}, 'B': 'b', 'K': [{'L': 'l'}, {'L': 'n'}]})
.parametrize('orig, expected', [({'A': {'B': {'C': 0}, 'G': {'H': None}}, 'I': 0, 'J': False}, {'A': {'B': {'C': 0}, 'G': {'H': None}}, 'I': 0, 'J': False}), ({'A': [], 'B': [], 'C': False}, {'C': False}), ({'A': {}, 'B': {}, 'C': {}}, {}), ({'A': {'B': {'C': []}, 'G': {'H': None}}, 'I': 0, 'J': False}, {'A': {'G': {'H': None}}, 'I': 0, 'J': False}), ({'A': {'B': {'C': []}, 'G': {'H': {'I': {}}}}, 'J': 0}, {'J': 0}), ({'A': [{'B': 'C', 'D': {}}, {'B': 'G', 'D': {}}, {'B': 'J', 'D': {'J': 'K'}}]}, {'A': [{'B': 'C'}, {'B': 'G'}, {'B': 'J', 'D': {'J': 'K'}}]}), ({}, {}), ({'A': {}}, {}), ({'A': [[{'B': 'C', 'D': [{'F': {}}, {'G': []}]}, {'B': 'D'}, {'B': 'G'}]]}, {'A': [[{'B': 'C'}, {'B': 'D'}, {'B': 'G'}]]}), ([{'A': {'B': {'C': 0}, 'G': {'H': None}}, 'I': 0, 'J': False}, {'K': 'L'}], [{'A': {'B': {'C': 0}, 'G': {'H': None}}, 'I': 0, 'J': False}, {'K': 'L'}]), ([{'A': [], 'B': [], 'C': False}], [{'C': False}]), ([{'A': {}, 'B': {}, 'C': {}}, {'D': 'E'}], [{'D': 'E'}]), ([{'A': {'B': {'C': []}, 'G': {'H': None}}, 'I': 0, 'J': False}], [{'A': {'G': {'H': None}}, 'I': 0, 'J': False}]), ([{'A': {'B': {'C': []}, 'G': {'H': {'I': {}}}}, 'J': 0}], [{'J': 0}]), ([{'A': [{'B': 'C', 'D': {}}, {'B': 'G', 'D': {}}, {'B': 'J', 'D': {'J': 'K'}}]}], [{'A': [{'B': 'C'}, {'B': 'G'}, {'B': 'J', 'D': {'J': 'K'}}]}]), ([{}], []), ([{'A': {}}], []), ([{'A': [[{'B': 'C', 'D': [{'F': {}}, {'G': []}]}, {'B': 'D'}, {'B': 'G'}]]}], [{'A': [[{'B': 'C'}, {'B': 'D'}, {'B': 'G'}]]}])])
def test_remove_empty_containers(self, orig, expected):
results = copy.deepcopy(orig)
remove_empty_containers(results)
assert (results == expected)
def test_filter_data_categories(self):
access_request_results = {'postgres_example:supplies': [{'foods': {'vegetables': True, 'fruits': {'apples': True, 'oranges': False, 'berries': {'strawberries': True, 'blueberries': False}}, 'grains': {'rice': False, 'wheat': True}}, 'clothing': True}]}
data_category_fields = {CollectionAddress('postgres_example', 'supplies'): {'A': [FieldPath('foods', 'fruits', 'apples'), FieldPath('clothing')], 'B': [FieldPath('foods', 'vegetables')], 'C': [FieldPath('foods', 'grains', 'rice'), FieldPath('foods', 'grains', 'wheat')], 'D': [], 'E': [FieldPath('foods', 'fruits', 'berries', 'strawberries'), FieldPath('foods', 'fruits', 'oranges')]}}
only_a_categories = filter_data_categories(copy.deepcopy(access_request_results), {'A'}, data_category_fields)
assert (only_a_categories == {'postgres_example:supplies': [{'foods': {'fruits': {'apples': True}}, 'clothing': True}]})
only_b_categories = filter_data_categories(copy.deepcopy(access_request_results), {'B'}, data_category_fields)
assert (only_b_categories == {'postgres_example:supplies': [{'foods': {'vegetables': True}}]})
only_c_categories = filter_data_categories(copy.deepcopy(access_request_results), {'C'}, data_category_fields)
assert (only_c_categories == {'postgres_example:supplies': [{'foods': {'grains': {'rice': False, 'wheat': True}}}]})
only_d_categories = filter_data_categories(copy.deepcopy(access_request_results), {'D'}, data_category_fields)
assert (only_d_categories == {})
only_e_categories = filter_data_categories(copy.deepcopy(access_request_results), {'E'}, data_category_fields)
assert (only_e_categories == {'postgres_example:supplies': [{'foods': {'fruits': {'oranges': False, 'berries': {'strawberries': True}}}}]})
def test_filter_data_categories_arrays(self):
access_request_results = {'postgres_example:flights': [{'people': {'passenger_ids': [222, 445, 311, 4444], 'pilot_ids': [123, 12, 112]}, 'flight_number': 101}]}
data_category_fields = {CollectionAddress('postgres_example', 'flights'): {'A': [FieldPath('people', 'passenger_ids')], 'B': [FieldPath('people', 'pilot_ids')]}}
only_a_category = filter_data_categories(copy.deepcopy(access_request_results), {'A'}, data_category_fields)
assert (only_a_category == {'postgres_example:flights': [{'people': {'passenger_ids': [222, 445, 311, 4444]}}]})
only_b_category = filter_data_categories(copy.deepcopy(access_request_results), {'B'}, data_category_fields)
assert (only_b_category == {'postgres_example:flights': [{'people': {'pilot_ids': [123, 12, 112]}}]})
def test_filter_data_categories_limited_results(self):
jane_results = {'mongo_test:customer_details': [{'_id': ObjectId('61f2bc8d6362fd78d72d8791'), 'customer_id': 3.0, 'gender': 'female', 'birthday': datetime(1990, 2, 28, 0, 0)}], 'postgres_example:order_item': [], 'postgres_example:report': [], 'postgres_example:orders': [{'customer_id': 3, 'id': 'ord_ddd-eee', 'shipping_address_id': 4}], 'postgres_example:employee': [], 'postgres_example:address': [{'city': 'Example Mountain', 'house': 1111, 'id': 4, 'state': 'TX', 'street': 'Example Place', 'zip': '54321'}], 'postgres_example:visit': [], 'postgres_example:product': [], 'postgres_example:customer': [{'address_id': 4, 'created': datetime(2020, 4, 1, 11, 47, 42), 'email': '', 'id': 3, 'name': 'Jane Customer'}], 'postgres_example:service_request': [], 'postgres_example:payment_card': [{'billing_address_id': 4, 'ccn': , 'code': 222, 'customer_id': 3, 'id': 'pay_ccc-ccc', 'name': 'Example Card 3', 'preferred': False}], 'mongo_test:customer_feedback': [], 'postgres_example:login': [{'customer_id': 3, 'id': 8, 'time': datetime(2021, 1, 6, 1, 0)}], 'mongo_test:internal_customer_profile': []}
target_categories = {'user'}
data_category_fields = {CollectionAddress.from_string('postgres_example:address'): {'user.contact.address.city': [FieldPath('city')], 'user.contact.address.street': [FieldPath('house'), FieldPath('street')], 'system.operations': [FieldPath('id')], 'user.contact.address.state': [FieldPath('state')], 'user.contact.address.postal_code': [FieldPath('zip')]}, CollectionAddress.from_string('postgres_example:customer'): {'system.operations': [FieldPath('address_id'), FieldPath('created')], 'user.contact.email': [FieldPath('email')], 'user.unique_id': [FieldPath('id')], 'user.name': [FieldPath('name')]}, CollectionAddress.from_string('postgres_example:employee'): {'system.operations': [FieldPath('address_id')], 'user.contact.email': [FieldPath('email')], 'user.unique_id': [FieldPath('id')], 'user.name': [FieldPath('name')]}, CollectionAddress.from_string('postgres_example:login'): {'user.unique_id': [FieldPath('customer_id')], 'system.operations': [FieldPath('id')], 'user.sensor': [FieldPath('time')]}, CollectionAddress.from_string('postgres_example:order_item'): {'system.operations': [FieldPath('order_id'), FieldPath('product_id'), FieldPath('quantity')]}, CollectionAddress.from_string('postgres_example:orders'): {'user.unique_id': [FieldPath('customer_id')], 'system.operations': [FieldPath('id'), FieldPath('shipping_address_id')]}, CollectionAddress.from_string('postgres_example:payment_card'): {'system.operations': [FieldPath('billing_address_id'), FieldPath('id')], 'user.financial.bank_account': [FieldPath('ccn')], 'user.financial': [FieldPath('code'), FieldPath('name')], 'user.unique_id': [FieldPath('customer_id')], 'user': [FieldPath('preferred')]}, CollectionAddress.from_string('postgres_example:product'): {'system.operations': [FieldPath('id'), FieldPath('name'), FieldPath('price')]}, CollectionAddress.from_string('postgres_example:report'): {'user.contact.email': [FieldPath('email')], 'system.operations': [FieldPath('id'), FieldPath('month'), FieldPath('name'), FieldPath('total_visits'), FieldPath('year')]}, CollectionAddress.from_string('postgres_example:service_request'): {'user.contact.email': [FieldPath('alt_email')], 'system.operations': [FieldPath('closed'), FieldPath('email'), FieldPath('id'), FieldPath('opened')], 'user.unique_id': [FieldPath('employee_id')]}, CollectionAddress.from_string('postgres_example:visit'): {'user.contact.email': [FieldPath('email')], 'system.operations': [FieldPath('last_visit')]}, CollectionAddress.from_string('mongo_test:customer_details'): {'system.operations': [FieldPath('_id')], 'user.demographic.date_of_birth': [FieldPath('birthday')], 'user.unique_id': [FieldPath('customer_id')], 'user.demographic.gender': [FieldPath('gender')], 'user.job_title': [FieldPath('workplace_info', 'position')]}, CollectionAddress.from_string('mongo_test:customer_feedback'): {'system.operations': [FieldPath('_id')], 'user.contact.phone_number': [FieldPath('customer_information', 'phone')], 'user': [FieldPath('message'), FieldPath('rating')]}, CollectionAddress.from_string('mongo_test:internal_customer_profile'): {'user': [FieldPath('derived_interests')]}}
filtered_results = filter_data_categories(copy.deepcopy(jane_results), target_categories, data_category_fields)
expected_results = {'mongo_test:customer_details': [{'birthday': datetime(1990, 2, 28, 0, 0), 'gender': 'female', 'customer_id': 3.0}], 'postgres_example:orders': [{'customer_id': 3}], 'postgres_example:address': [{'state': 'TX', 'street': 'Example Place', 'zip': '54321', 'house': 1111, 'city': 'Example Mountain'}], 'postgres_example:customer': [{'id': 3, 'name': 'Jane Customer', 'email': ''}], 'postgres_example:payment_card': [{'code': 222, 'name': 'Example Card 3', 'customer_id': 3, 'preferred': False, 'ccn': }], 'postgres_example:login': [{'time': datetime(2021, 1, 6, 1, 0), 'customer_id': 3}]}
print(filtered_results)
print(('-' * 10))
print(expected_results)
assert (filtered_results == expected_results)
def test_unpack_fides_connector_results_key_error(self, loguru_caplog):
unpack_fides_connector_results(connector_results=[{'test': 'bad'}], filtered_access_results={'test': [{'test': 't'}]}, rule_key='bad', node_address='nothing')
assert ('Did not find a result entry' in loguru_caplog.text) |
.external
def test_example_1_classifier():
with util.make_tempdir() as tmpdir:
cfg_str = '\n [nlp]\n lang = "en"\n pipeline = ["llm"]\n\n [components]\n\n [components.llm]\n factory = "llm"\n\n [components.llm.task]\n _tasks = "spacy.TextCat.v2"\n labels = ["COMPLIMENT", "INSULT"]\n\n [components.llm.model]\n _models = "spacy.GPT-3-5.v2"\n '
with open((tmpdir / 'cfg'), 'w') as text_file:
text_file.write(cfg_str)
nlp = assemble((tmpdir / 'cfg'))
doc = nlp('You look gorgeous!')
print(doc.cats) |
def upgrade():
op.execute('ALTER TABLE events RENAME has_organizer_info TO has_owner_info')
op.execute('ALTER TABLE events RENAME organizer_description TO owner_description')
op.execute('ALTER TABLE events RENAME organizer_name TO owner_name')
op.execute('ALTER TABLE events_version RENAME has_organizer_info TO has_owner_info')
op.execute('ALTER TABLE events_version RENAME organizer_description TO owner_description')
op.execute('ALTER TABLE events_version RENAME organizer_name TO owner_name')
op.execute("INSERT INTO roles(name, title_name) SELECT 'owner', 'Owner' WHERE NOT EXISTS (SELECT id FROM roles WHERE name='owner')")
op.execute("UPDATE users_events_roles SET role_id=(SELECT id FROM roles WHERE name='owner') WHERE id IN (SELECT DISTINCT ON(event_id, role_id) id FROM users_events_roles WHERE role_id=(SELECT id FROM roles WHERE name='organizer'))") |
(nopython=True, cache=const.numba_cache)
def get_u(rvw, R, phi, s):
if (s == const.rolling):
return np.array([1, 0, 0], dtype=np.float64)
rel_vel = physics_utils.rel_velocity(rvw, R)
if (rel_vel == 0).all():
return np.array([1, 0, 0], dtype=np.float64)
return ptmath.coordinate_rotation(ptmath.unit_vector(rel_vel), (- phi)) |
def set_metadata(track: BasicTrack, filename: str, playlist_info=None, **kwargs):
logger.info('Setting tags...')
artwork_url = track.artwork_url
user = track.user
if (not artwork_url):
artwork_url = user.avatar_url
response = None
if kwargs.get('original_art'):
new_artwork_url = artwork_url.replace('large', 'original')
try:
response = requests.get(new_artwork_url, stream=True)
if (response.headers['Content-Type'] not in ('image/png', 'image/jpeg', 'image/jpg')):
response = None
except Exception:
pass
if (response is None):
new_artwork_url = artwork_url.replace('large', 't500x500')
response = requests.get(new_artwork_url, stream=True)
if (response.headers['Content-Type'] not in ('image/png', 'image/jpeg', 'image/jpg')):
response = None
if (response is None):
logger.error(f'Could not get cover art at {new_artwork_url}')
with tempfile.NamedTemporaryFile() as out_file:
if response:
shutil.copyfileobj(response.raw, out_file)
out_file.seek(0)
track.date = track.created_at.strftime('%Y-%m-%d %H::%M::%S')
track.artist = user.username
if kwargs.get('extract_artist'):
for dash in [' - ', ' ', ' ', ' ', ' ']:
if (dash in track.title):
artist_title = track.title.split(dash)
track.artist = artist_title[0].strip()
track.title = artist_title[1].strip()
break
mutagen_file = mutagen.File(filename)
mutagen_file.delete()
if track.description:
if (mutagen_file.__class__ == mutagen.flac.FLAC):
mutagen_file['description'] = track.description
elif ((mutagen_file.__class__ == mutagen.mp3.MP3) or (mutagen_file.__class__ == mutagen.wave.WAVE)):
mutagen_file['COMM'] = mutagen.id3.COMM(encoding=3, lang='ENG', text=track.description)
elif (mutagen_file.__class__ == mutagen.mp4.MP4):
mutagen_file['cmt'] = track.description
if response:
if (mutagen_file.__class__ == mutagen.flac.FLAC):
p = mutagen.flac.Picture()
p.data = out_file.read()
p.mime = 'image/jpeg'
p.type = mutagen.id3.PictureType.COVER_FRONT
mutagen_file.add_picture(p)
elif ((mutagen_file.__class__ == mutagen.mp3.MP3) or (mutagen_file.__class__ == mutagen.wave.WAVE)):
mutagen_file['APIC'] = mutagen.id3.APIC(encoding=3, mime='image/jpeg', type=3, desc='Cover', data=out_file.read())
elif (mutagen_file.__class__ == mutagen.mp4.MP4):
mutagen_file['covr'] = [mutagen.mp4.MP4Cover(out_file.read())]
if (mutagen_file.__class__ == mutagen.wave.WAVE):
mutagen_file['TIT2'] = mutagen.id3.TIT2(encoding=3, text=track.title)
mutagen_file['TPE1'] = mutagen.id3.TPE1(encoding=3, text=track.artist)
if track.genre:
mutagen_file['TCON'] = mutagen.id3.TCON(encoding=3, text=track.genre)
if track.permalink_url:
mutagen_file['WOAS'] = mutagen.id3.WOAS(url=track.permalink_url)
if track.date:
mutagen_file['TDAT'] = mutagen.id3.TDAT(encoding=3, text=track.date)
if playlist_info:
if (not kwargs.get('no_album_tag')):
mutagen_file['TALB'] = mutagen.id3.TALB(encoding=3, text=playlist_info['title'])
mutagen_file['TRCK'] = mutagen.id3.TRCK(encoding=3, text=str(playlist_info['tracknumber']))
mutagen_file.save()
else:
mutagen_file.save()
audio = mutagen.File(filename, easy=True)
audio['title'] = track.title
audio['artist'] = track.artist
if track.genre:
audio['genre'] = track.genre
if track.permalink_url:
audio['website'] = track.permalink_url
if track.date:
audio['date'] = track.date
if playlist_info:
if (not kwargs.get('no_album_tag')):
audio['album'] = playlist_info['title']
audio['tracknumber'] = str(playlist_info['tracknumber'])
audio.save() |
class EditTicketForm(CreateTicketForm):
def __init__(self, ticket_id, *args, **kwargs):
self.form = super(EditTicketForm, self).__init__(*args, **kwargs)
ticket = FlicketTicket.query.filter_by(id=ticket_id).first()
uploads = []
for u in ticket.uploads:
uploads.append((u.id, u.filename, u.original_filename))
self.uploads.choices = []
for x in uploads:
uri = url_for('flicket_bp.view_ticket_uploads', filename=x[1])
uri_label = (((('<a href="' + uri) + '">') + x[2]) + '</a>')
self.uploads.choices.append((x[0], uri_label))
uploads = MultiCheckBoxField('Label', coerce=int)
submit = SubmitField(lazy_gettext('Edit Ticket'), render_kw=form_class_button, validators=[DataRequired()]) |
def teardown_module():
os.chdir(cwd)
for namedir in ('build', f'__{backend_default}__', '__pycache__'):
with suppress(FileNotFoundError):
shutil.rmtree((setup_dir / namedir))
to_remove = (list(setup_dir.glob('*.h')) + list(setup_dir.glob('*.so')))
for path in to_remove:
os.remove(path) |
.usefixtures('_run_around_tests')
def test_transform_pandas_one_input(mocker):
from_foundry_and_cache = mocker.spy(Input, '_retrieve_from_foundry_and_cache')
from_cache = mocker.spy(Input, '_retrieve_from_cache')
_pandas(Output('/output/to/dataset'), input1=Input('/input1'))
def transform_me(input1: pd.DataFrame) -> pd.DataFrame:
assert isinstance(input1, pd.DataFrame)
assert_frame_equal(input1, spark_df_return_data_one.toPandas())
return input1
df = transform_me.compute()
assert isinstance(df, pd.DataFrame)
from_foundry_and_cache.assert_called()
from_cache.assert_not_called()
from_foundry_and_cache.reset_mock()
from_cache.reset_mock()
Input('/input1')
from_foundry_and_cache.assert_not_called()
from_cache.assert_called() |
class OptionSeriesPyramidSonificationContexttracksMappingLowpass(Options):
def frequency(self) -> 'OptionSeriesPyramidSonificationContexttracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionSeriesPyramidSonificationContexttracksMappingLowpassFrequency)
def resonance(self) -> 'OptionSeriesPyramidSonificationContexttracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionSeriesPyramidSonificationContexttracksMappingLowpassResonance) |
class OneShotBehaviour(SimpleBehaviour, ABC):
def __init__(self, **kwargs: Any) -> None:
super().__init__(**kwargs)
self._already_executed = False
def is_done(self) -> bool:
return self._already_executed
def act_wrapper(self) -> None:
if (not self._already_executed):
super().act_wrapper()
self._already_executed = True |
def test_userdoc():
natspec = merge_natspec(DEVDOC, USERDOC)
assert (USERDOC['notice'] == natspec['notice'])
assert ('constructor' not in natspec['methods'])
notice = USERDOC['methods']['age(uint256,uint256)']['notice']
assert (notice == natspec['methods']['age(uint256,uint256)']['notice']) |
class TestRetrieveFirstNumberFromString():
def test_valid_number_with_one_number(self):
string_number = 'Bonjour1'
output = retreive_first_number_from_string(string_number)
expected_output = '1'
assert (output == expected_output), f'Expected `{expected_output}` for `{string_number}` but got `{output}`'
def test_valid_string_with_two_number_in_a_row(self):
string_number = 'Bonjour12'
output = retreive_first_number_from_string(string_number)
expected_output = '12'
assert (output == expected_output), f'Expected `{expected_output}` for `{string_number}` but got `{output}`'
def test_valid_string_with_two_number_not_in_a_row(self):
string_number = 'Bonjour1q2'
output = retreive_first_number_from_string(string_number)
expected_output = '1'
assert (output == expected_output), f'Expected `{expected_output}` for `{string_number}` but got `{output}`'
def test_valid_string_without_number(self):
string_number = 'Bonjour'
output = retreive_first_number_from_string(string_number)
expected_output = None
assert (output == expected_output), f'Expected `{expected_output}` for `{string_number}` but got `{output}`'
def test_empty_string(self):
string_number = ''
output = retreive_first_number_from_string(string_number)
expected_output = None
assert (output == expected_output), f'Expected `{expected_output}` for `{string_number}` but got `{output}`'
def test_int_input(self):
string_number = 2
output = retreive_first_number_from_string(string_number)
expected_output = None
assert (output == expected_output), f'Expected `{expected_output}` for `{string_number}` but got `{output}`' |
class TestMap4(_MapTest):
map_data = {'map': MAP4, 'zcoord': 'map4'}
map_display = MAP4_DISPLAY
def test_str_output(self):
stripped_map = '\n'.join((line.rstrip() for line in str(self.map).split('\n')))
self.assertEqual(MAP4_DISPLAY, stripped_map.replace('||', '|'))
([((1, 0), (1, 2), ('n',)), ((0, 1), (2, 1), ('e',)), ((4, 1), (1, 0), ('w', 'w', 'n', 'e', 's')), ((1, 2), (2, 3), ('ne',)), ((1, 2), (2, 3), ('ne',)), ((2, 2), (0, 4), ('w', 'ne', 'nw', 'w'))])
def test_shortest_path(self, startcoord, endcoord, expected_directions):
(directions, _) = self.map.get_shortest_path(startcoord, endcoord)
self.assertEqual(expected_directions, tuple(directions))
def test_spawn(self):
self.grid.spawn()
self.assertEqual(xyzroom.XYZRoom.objects.all().count(), 16)
self.assertEqual(xyzroom.XYZExit.objects.all().count(), 44) |
class _AsyncThread(threading.Thread):
def __init__(self, runner: LocalRunner) -> None:
super().__init__()
self.runner = runner
self.module = runner._module
self.options = (runner._options or RunnerOptions())
self.state = runner._state
self.original_stream_types = self.get_original_stream_types()
def run(self) -> None:
logger.debug(f'{self.module}:started background thread')
import asyncio
loop = asyncio.new_event_loop()
loop.set_exception_handler(self.handle_exception)
asyncio.set_event_loop(loop)
try:
with self.state.lock:
for stream in self.module.__streams__.values():
callbacks = []
for (subscriber_path, subscriber) in self.module.subscribers.items():
if (subscriber.subscribed_topic_path in stream.topic_paths):
if isinstance(subscriber, Transformer):
callbacks.append(self.wrap_transformer_callback(transformer_path=subscriber_path, loop=loop))
else:
callbacks.append(self.wrap_subscriber_callback(subscriber_path=subscriber_path, loop=loop))
stream_callback = self.wrap_all_callbacks(callbacks, loop=loop)
if (self.options.aligner is not None):
self.options.aligner.register(stream.id, stream_callback)
stream_callback = self.options.aligner.push
self.state.callbacks[stream.id] = stream_callback
self.state.setup_barrier.wait()
self.state.ready_event.wait()
for awaitable in self.get_startup_methods():
asyncio.ensure_future(awaitable, loop=loop)
if (self.options.aligner is not None):
logger.debug(f'{self.module}:background thread:run aligner')
loop.create_task(self.options.aligner.run())
logger.debug(f'{self.module}:background thread:run event loop')
with contextlib.ExitStack() as run_stack:
if ('PROFILE' in os.environ):
run_stack.enter_context(yappi.run())
while self.runner._running:
loop.run_until_complete(asyncio.sleep(0.01))
except BaseException:
logger.debug(f'{self.module}:handling exception in background thread')
self.runner._handle_exception()
if ((not self.state.cleanup_started) and self.state.setup_complete):
self.state.cleanup_started = True
logger.debug(f'{self.module}:running cleanup in background thread')
self.runner._run_cleanup()
logger.debug(f'{self.module}:cleanup complete')
if (self.options.aligner is not None):
logger.debug(f'{self.module}:background thread:terminate aligner')
self.options.aligner.wait_for_completion()
logger.debug(f'{self.module}:background thread:shutting down async gens')
for task in asyncio.Task.all_tasks(loop=loop):
task.cancel()
loop.run_until_complete(loop.shutdown_asyncgens())
logger.debug(f'{self.module}:background thread:waiting for pending tasks')
pending_start_time = time.perf_counter()
while True:
time.sleep(ASYNCIO_SHUTDOWN_POLL_TIME)
pending = [task for task in asyncio.Task.all_tasks(loop=loop) if (not task.done())]
if (len(pending) == 0):
logger.debug(f'{self.module}:background thread:closing event loop')
loop.close()
return
elif ((time.perf_counter() - pending_start_time) >= ASYNCIO_SHUTDOWN_TIME):
logger.warning(f'{self.module}:background thread:closing event loop with {len(pending)} tasks left')
loop.set_exception_handler((lambda _l, _c: None))
try:
loop.close()
except Exception:
pass
return
logger.debug(f'{self.module}:{len(pending)} tasks left')
loop.run_until_complete(asyncio.sleep(1))
def get_original_stream_types(self) -> Dict[(str, Type[Message])]:
stream_types = {}
for stream in self.module.__streams__.values():
publishers = [publisher for publisher in self.module.publishers.values() if (len(set(publisher.published_topic_paths).intersection(stream.topic_paths)) > 0)]
if (len(publishers) != 1):
continue
publisher = publishers[0]
topic_path = list(set(publisher.published_topic_paths).intersection(stream.topic_paths))[0]
topic = self.module.__topics__[topic_path]
subscriber_paths = [subscriber_path for (subscriber_path, subscriber) in self.module.subscribers.items() if (subscriber.subscribed_topic_path in stream.topic_paths)]
for subscriber_path in subscriber_paths:
stream_types[subscriber_path] = topic.message_type
return stream_types
def get_startup_methods(self) -> List[Coroutine[(None, None, None)]]:
return ([self.run_publisher_method(publisher_method) for publisher_method in self.get_publisher_methods()] + self.get_background_methods())
async def run_publisher_method(self, publisher_method: Callable[([], AsyncIterable[Tuple[(Topic, Message)]])]) -> None:
import asyncio
async for (topic, message) in publisher_method():
topic_path = self.module._get_topic_path(topic)
stream = self.module._stream_for_topic_path(topic_path)
producer = self.state.producers[stream.id]
producer.produce_message(message)
def get_publisher_methods(self) -> List[Callable[([], AsyncIterable[Tuple[(Topic, Message)]])]]:
return [self.module._get_publisher_method(publisher_path) for (publisher_path, publisher) in self.module.publishers.items() if (not isinstance(publisher, Transformer))]
def get_background_methods(self) -> List[Awaitable[None]]:
return [self.module._get_background_method(background_path)() for background_path in self.module.backgrounds.keys()]
def wrap_subscriber_callback(self, subscriber_path: str, loop: Any) -> Callable[([Message], Awaitable[None])]:
subscriber_method = self.module._get_subscriber_method(subscriber_path)
if inspect.iscoroutinefunction(subscriber_method):
return subscriber_method
async def subscriber_callback(message: Message) -> None:
if (subscriber_path in self.original_stream_types):
object.__setattr__(message, '__original_message_type__', self.original_stream_types[subscriber_path])
if loop.is_closed():
logger.warn(f'{message.__class__.__name__} dropped while graph shutting down')
return
loop.call_soon(self.module._get_subscriber_method(subscriber_path), message)
return
return subscriber_callback
def wrap_transformer_callback(self, transformer_path: str, loop: Any) -> Callable[([Message], Awaitable[None])]:
async def transformer_callback(message: Message) -> None:
if (transformer_path in self.original_stream_types):
object.__setattr__(message, '__original_message_type__', self.original_stream_types[transformer_path])
(await self.run_publisher_method(functools.partial(self.module._get_transformer_method(transformer_path), message)))
return transformer_callback
def wrap_all_callbacks(self, callbacks: List[Callable[([Message], Awaitable[None])]], loop: Any) -> SubscriberType:
import asyncio
def callback(message: Message) -> None:
if loop.is_closed():
logger.warn(f'{message.__class__.__name__} dropped while graph shutting down')
return
for callback in callbacks:
asyncio.ensure_future(callback(message), loop=loop)
return callback
def handle_exception(self, loop: Any, context: Dict[(str, Any)]) -> None:
try:
if ('exception' in context):
exception = context['exception']
else:
exception = Exception(f'{context}')
raise exception
except Exception:
self.runner._handle_exception() |
def write_workspace_settings(fips_dir, proj_dir, cfg):
log.info('=== writing JetBrains CLion config files...')
clion_dir = (proj_dir + '/.idea')
if (not os.path.isdir(clion_dir)):
os.makedirs(clion_dir)
write_clion_module_files(fips_dir, proj_dir, cfg)
write_clion_workspace_file(fips_dir, proj_dir, cfg) |
def mgmt_task_response(request, task_id, error, result, data=None, **kwargs):
if error:
return FailureTaskResponse(request, error, **kwargs)
elif task_id:
return TaskResponse(request, task_id, data=data, **kwargs)
elif (result is not None):
return SuccessTaskResponse(request, result, **kwargs)
else:
return BadRequestResponse(request) |
class KiwoomOpenApiPlusLoadConditionEventHandler(KiwoomOpenApiPlusEventHandlerForGrpc):
def __init__(self, control, context, request):
super().__init__(control, context)
self._request = request
def on_enter(self):
KiwoomOpenApiPlusError.try_or_raise_boolean(self.control.GetConditionLoad(), 'Failed to load condition')
def OnReceiveConditionVer(self, ret, msg):
if (ret != 1):
error = KiwoomOpenApiPlusError(msg)
self.observer.on_error(error)
response = KiwoomOpenApiPlusService_pb2.ListenResponse()
response.name = 'OnReceiveConditionVer'
response.arguments.add().long_value = ret
response.arguments.add().string_value = msg
self.observer.on_next(response)
self.observer.on_completed() |
def downgrade():
op.add_column('message_settings', sa.Column('notification_status', sa.BOOLEAN(), autoincrement=False, nullable=True))
op.add_column('message_settings', sa.Column('user_control_status', sa.BOOLEAN(), autoincrement=False, nullable=True))
op.add_column('message_settings', sa.Column('mail_status', sa.BOOLEAN(), autoincrement=False, nullable=True))
op.add_column('message_settings', sa.Column('sent_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True))
op.alter_column('message_settings', 'action', existing_type=sa.VARCHAR(), nullable=True)
op.drop_column('message_settings', 'modified_at')
op.drop_column('message_settings', 'enabled')
op.drop_column('message_settings', 'created_at') |
class SecondaryOutputPlugin():
__play_image = Gtk.Image.new_from_icon_name('media-playback-start', Gtk.IconSize.BUTTON)
__pause_image = Gtk.Image.new_from_icon_name('media-playback-pause', Gtk.IconSize.BUTTON)
def get_preferences_pane(self):
return previewprefs
def enable(self, exaile):
self.exaile = exaile
def on_gui_loaded(self):
self.hooked = False
self.player = player.player.ExailePlayer('preview_device', disable_autoswitch=True)
self.queue = player.queue.PlayQueue(self.player, location=os.path.join(xdg.get_data_dir(), 'preview_device_queue.state'), name='Preview Device Queue')
self._init_gui()
if settings.get_option('plugin/previewdevice/shown', True):
self._init_gui_hooks()
def disable(self, exaile):
logger.debug('Disabling Preview Device')
event.log_event('preview_device_disabling', self, None)
self._destroy_gui_hooks()
self._destroy_gui()
self.player.destroy()
self.player = None
self.queue = None
logger.debug('Preview Device Disabled')
def on_plugin_installed(self):
settings.set_option('plugin/previewdevice/shown', True)
def _init_gui(self):
self.pane = Gtk.Paned()
self.info_area = main.MainWindowTrackInfoPane(self.player)
self.info_area.set_auto_update(True)
self.info_area.set_border_width(3)
self.info_area.hide()
self.info_area.set_no_show_all(True)
volume_control = playback.VolumeControl(self.player)
self.info_area.get_action_area().pack_end(volume_control, False, False, 0)
self.playpause_button = Gtk.Button()
self.playpause_button.set_relief(Gtk.ReliefStyle.NONE)
self._on_playback_end(None, None, None)
self.playpause_button.connect('button-press-event', self._on_playpause_button_clicked)
self.progress_bar = playback.SeekProgressBar(self.player, use_markers=False)
self.progress_bar.set_valign(Gtk.Align.CENTER)
play_toolbar = Gtk.Box()
play_toolbar.pack_start(self.playpause_button, False, False, 0)
play_toolbar.pack_start(self.progress_bar, True, True, 0)
play_toolbar.child_set_property(self.progress_bar, 'padding', 3)
self.pane1_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
self.pane2_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
self.pane2_box.pack_start(self.info_area, False, False, 0)
self.pane2_box.pack_start(play_toolbar, False, False, 0)
self.pane.pack1(self.pane1_box, resize=True, shrink=True)
self.pane.pack2(self.pane2_box, resize=True, shrink=True)
self.menu = menu.check_menu_item('preview_player', '', _('Preview Player'), (lambda *e: self.hooked), self._on_view_setting_changed)
providers.register('menubar-view-menu', self.menu)
self.preview_menuitem = menu.simple_menu_item('_preview', ['enqueue'], _('Preview'), callback=self._on_preview, condition_fn=(lambda n, p, c: (not c['selection-empty'])))
self.preview_provides = ['track-panel-menu', 'playlist-context-menu']
for provide in self.preview_provides:
providers.register(provide, self.preview_menuitem)
self._on_option_set('gui_option_set', settings, 'gui/show_info_area')
self._on_option_set('gui_option_set', settings, 'gui/show_info_area_covers')
event.add_ui_callback(self._on_option_set, 'option_set')
def _destroy_gui(self):
event.remove_callback(self._on_option_set, 'option_set')
for provide in self.preview_provides:
providers.unregister(provide, self.preview_menuitem)
providers.unregister('menubar-view-menu', self.menu)
self.info_area.destroy()
self.playpause_button.destroy()
self.pane2_box.destroy()
self.pane1_box.destroy()
self.pane.destroy()
def _setup_events(self, setup):
setup(self._on_playback_end, 'playback_player_end', self.player)
setup(self._on_playback_error, 'playback_error', self.player)
setup(self._on_playback_start, 'playback_track_start', self.player)
setup(self._on_toggle_pause, 'playback_toggle_pause', self.player)
def _init_gui_hooks(self):
if self.hooked:
return
info_area = main.mainwindow().info_area
play_toolbar = main.mainwindow().builder.get_object('play_toolbar')
parent = play_toolbar.get_parent()
parent.remove(play_toolbar)
parent = info_area.get_parent()
parent.remove(info_area)
parent.pack_start(self.pane, False, False, 0)
parent.reorder_child(self.pane, 0)
self.pane1_box.pack_start(info_area, False, False, 0)
self.pane1_box.pack_start(play_toolbar, False, False, 0)
self.pane.show_all()
self._setup_events(event.add_ui_callback)
self.hooked = True
logger.debug('Preview device gui hooked')
event.log_event('preview_device_enabled', self, None)
def _destroy_gui_hooks(self):
if (not self.hooked):
return
info_area = main.mainwindow().info_area
play_toolbar = main.mainwindow().builder.get_object('play_toolbar')
parent = play_toolbar.get_parent()
parent.remove(play_toolbar)
parent = info_area.get_parent()
parent.remove(info_area)
parent = self.pane.get_parent()
parent.remove(self.pane)
parent.pack_start(info_area, False, False, 0)
parent.reorder_child(info_area, 0)
parent.pack_start(play_toolbar, False, False, 0)
self._setup_events(event.remove_callback)
self.hooked = False
logger.debug('Preview device unhooked')
def _on_view_setting_changed(self, menu, name, parent, context):
if self.hooked:
self._destroy_gui_hooks()
settings.set_option('plugin/previewdevice/shown', False)
else:
self._init_gui_hooks()
settings.set_option('plugin/previewdevice/shown', True)
def _on_preview(self, menu, display_name, playlist_view, context):
self._init_gui_hooks()
tracks = context['selected-tracks']
if (len(tracks) > 0):
self.queue.play(tracks[0])
def _on_playpause_button_clicked(self, widget, event):
if (event.button == Gdk.BUTTON_PRIMARY):
if ((event.type == Gdk.EventType.BUTTON_PRESS) and (self.player.is_paused() or self.player.is_playing())):
self.player.toggle_pause()
elif (event.type == Gdk.EventType._2BUTTON_PRESS):
self.player.stop()
def _on_option_set(self, name, object, option):
if (option == 'gui/show_info_area'):
self.info_area.set_no_show_all(False)
if settings.get_option(option, True):
self.info_area.show_all()
else:
self.info_area.hide()
self.info_area.set_no_show_all(True)
elif (option == 'gui/show_info_area_covers'):
cover = self.info_area.cover
cover.set_no_show_all(False)
if settings.get_option(option, True):
cover.show_all()
else:
cover.hide()
cover.set_no_show_all(True)
def _on_playback_start(self, type, player, object):
self.playpause_button.set_image(self.__pause_image)
self.playpause_button.set_tooltip_text(_('Pause Playback (double click to stop)'))
def _on_playback_end(self, type, player, object):
self.playpause_button.set_image(self.__play_image)
self.playpause_button.set_tooltip_text(_('Start Playback'))
def _on_playback_error(self, type, player, message):
main.mainwindow().message.show_error(_('Playback error encountered!'), message)
def _on_toggle_pause(self, type, player, object):
if player.is_paused():
image = self.__play_image
tooltip = _('Continue Playback')
else:
image = self.__pause_image
tooltip = _('Pause Playback')
self.playpause_button.set_image(image)
self.playpause_button.set_tooltip_text(tooltip) |
class RRLJsonXmlSeriesUpdateFilter(WebMirror.OutputFilters.FilterBase.FilterBase):
wanted_mimetypes = ['text/xml', 'application/xml', 'text/json', 'application/json']
want_priority = 50
loggerPath = 'Main.Filter.RoyalRoad.XmlJsonSeries'
def wantsUrl(url):
want = set([' ' ' ' ' ' ' '
url = url.lower()
if any([url.startswith(tmp) for tmp in want]):
print(("RRLJsonXmlSeriesUpdateFilter Wants url: '%s'" % url))
return True
return False
def __init__(self, **kwargs):
self.kwargs = kwargs
self.wg = WebRequest.WebGetRobust()
self.pageUrl = kwargs['pageUrl']
self.content = kwargs['pgContent']
self.mtype = kwargs['mimeType']
self.db_sess = kwargs['db_sess']
print(kwargs.keys())
self.log.info('Processing RoyalRoadL Json/XML Item')
super().__init__(**kwargs)
def validate_sdata(self, sinfo):
expect = ['description', 'firstUpdate', 'lastUpdate', 'tags', 'id', 'title', 'cover', 'topCover', 'topCoverAlignment']
have_expected = all([(tmp in sinfo) for tmp in expect])
return have_expected
def validate_cdata(self, cinfo):
if (not isinstance(cinfo, list)):
return False
if (len(cinfo) < SeriesPageCommon.MIN_CHAPTERS):
self.log.info('Too few chapters. Not adding.')
return False
return True
def extract_description(self, desc_str):
soup = bs4.BeautifulSoup(desc_str, 'html.parser')
bad_attrs = ['style', 'font', 'size']
for tag in soup.find_all():
for bad_attr in bad_attrs:
if (bad_attr in tag.attrs):
tag.attrs.pop(bad_attr)
return soup.prettify()
def process_series(self, series):
expected_keys = ['chapters', 'cover', 'description', 'firstUpdate', 'id', 'lastUpdate', 'tags', 'title']
if (not all([(tmp in series) for tmp in expected_keys])):
self.log.error('Missing key(s) %s from series %s. Cannot continue', [tmp for tmp in expected_keys if (not (tmp in series))], series)
return
kv_db_key = 'last-seen-rrl-{}-t{}'.format(series['id'], series['chapters'][0]['title'])
last_seen = db.get_from_db_key_value_store(kv_db_key)
last_update_ts = series['lastUpdate'].timestamp()
if ('date' in last_seen):
if (last_seen['date'] >= last_update_ts):
self.log.info("Fetched series %s after it's last update (%s, %s). Nothing to do.", series['id'], last_seen['date'], last_update_ts)
return
db.set_in_db_key_value_store(kv_db_key, {'date': last_update_ts})
sinfo = get_json(self.wg, ' key=settings.RRL_API_KEY))
if (not self.validate_sdata(sinfo)):
self.log.warning(('Series data for sid %s failed validation' % series['id']))
return
assert (int(series['id']) == int(sinfo['id'])), ('Mismatchin series ID: %s -> %s (%s, %s)' % (series['id'], sinfo['id'], type(series['id']), type(sinfo['id'])))
cinfo = get_json(self.wg, ' key=settings.RRL_API_KEY))
if (not self.validate_cdata(cinfo)):
return
if ((sinfo.get('ratingCount', 0) > SeriesPageCommon.MIN_RATE_CNT) and (sinfo.get('ratingValue', 0) > SeriesPageCommon.MIN_RATING_FLOAT)):
return
author = sinfo.get('authorName')
if (not author):
self.log.error("Could not find author for series '%s'", series['id'])
return
if isinstance(sinfo['tags'], str):
tags = sinfo['tags'].split(',')
elif isinstance(sinfo['tags'], (list, tuple)):
tags = list(sinfo['tags'])
else:
print('sinfo unknown type: ', sinfo['tags'])
print('Sinfo: ', sinfo)
tags = [SeriesPageCommon.fix_tag(tag) for tag in tags]
description = self.extract_description(sinfo['description'])
title = sinfo['title'].strip()
seriesmeta = {}
seriesPageUrl = '
seriesmeta['title'] = msgpackers.fix_string(title)
seriesmeta['author'] = msgpackers.fix_string(author)
seriesmeta['tags'] = tags
seriesmeta['homepage'] = seriesPageUrl
seriesmeta['desc'] = description
seriesmeta['tl_type'] = 'oel'
seriesmeta['sourcesite'] = 'RoyalRoadL'
seriesmeta['create_tags'] = True
meta_pkt = msgpackers.createSeriesInfoPacket(seriesmeta, matchAuthor=True)
trigger_urls = [seriesPageUrl]
extra = {}
extra['tags'] = tags
extra['homepage'] = seriesPageUrl
extra['sourcesite'] = 'RoyalRoadL'
raw_retval = []
for chapter in cinfo:
reldate = chapter['date']
chap_url = '
chp_title = chapter['title']
(vol, chp, frag, _) = titleParsers.extractTitle(((chp_title + ' ') + title))
raw_item = {}
raw_item['srcname'] = 'RoyalRoadL'
raw_item['published'] = float(reldate)
raw_item['linkUrl'] = chap_url
raw_msg = msgpackers._buildReleaseMessage(raw_item, title, vol, chp, frag, author=author, postfix=chp_title, tl_type='oel', extraData=extra, matchAuthor=True)
trigger_urls.append(chap_url)
raw_retval.append(raw_msg)
raw_retval = SeriesPageCommon.check_fix_numbering(self.log, raw_retval, str(series['id']), rrl=True)
self.amqp_put_item(meta_pkt)
retval = [msgpackers.createReleasePacket(raw_msg) for raw_msg in raw_retval]
self.amqp_put_many(retval)
self.low_priority_links_trigger(trigger_urls)
def extractSeriesReleases(self, seriesPageUrl, soup):
containers = soup.find_all('div', class_='fiction-list-item')
if (not containers):
return []
urls = []
for item in containers:
div = item.find('h2', class_='fiction-title')
a = div.find('a')
if a:
url = common.util.urlFuncs.rebaseUrl(a['href'], seriesPageUrl)
urls.append(url)
else:
self.log.error('No series in container: %s', item)
return set(urls)
def retrigger_pages(self, releases):
self.log.info('Total releases found on page: %s. Forcing retrigger of item pages.', len(releases))
for release_url in releases:
self.retrigger_page(release_url)
def load_xml(self):
xmlstring = re.sub(' xmlns="[^"]+"', '', self.content, count=1)
tree = et.fromstring(xmlstring)
data = xmljson.parker.data(tree)
loaded = clean_parsed_data(data['ApiFictionInfoWithChapters'])
loaded.sort(key=(lambda x: x['lastUpdate']))
return loaded
def load_json(self):
loaded = json.loads(self.content)
loaded.sort(key=(lambda x: x['lastUpdate']))
content = clean_parsed_data(loaded)
return content
def processParsedData(self, loaded):
for series in loaded:
self.process_series(series)
def processPage(self, url, content):
self.log.info('processPage() call: %s, %s', self.mtype, self.pageUrl)
if (self.mtype in ['text/xml', 'application/xml']):
loaded = self.load_xml()
elif (self.mtype in ['text/json', 'application/json']):
loaded = self.load_json()
else:
self.log.error('Unknown content type (%s)!', self.mtype)
return self.processParsedData(loaded)
def extractContent(self):
return self.processPage(self.pageUrl, self.content) |
class OptionPlotoptionsColumnDatalabels(Options):
def align(self):
return self._config_get('undefined')
def align(self, text: str):
self._config(text, js_type=False)
def allowOverlap(self):
return self._config_get(False)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self) -> 'OptionPlotoptionsColumnDatalabelsAnimation':
return self._config_sub_data('animation', OptionPlotoptionsColumnDatalabelsAnimation)
def backgroundColor(self):
return self._config_get(None)
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(0)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def crop(self):
return self._config_get(True)
def crop(self, flag: bool):
self._config(flag, js_type=False)
def defer(self):
return self._config_get(True)
def defer(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def filter(self) -> 'OptionPlotoptionsColumnDatalabelsFilter':
return self._config_sub_data('filter', OptionPlotoptionsColumnDatalabelsFilter)
def format(self):
return self._config_get('point.value')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get(None)
def formatter(self, value: Any):
self._config(value, js_type=False)
def inside(self):
return self._config_get(None)
def inside(self, flag: bool):
self._config(flag, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, flag: bool):
self._config(flag, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def overflow(self):
return self._config_get('justify')
def overflow(self, text: str):
self._config(text, js_type=False)
def padding(self):
return self._config_get(5)
def padding(self, num: float):
self._config(num, js_type=False)
def position(self):
return self._config_get('center')
def position(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def shape(self):
return self._config_get('square')
def shape(self, text: str):
self._config(text, js_type=False)
def style(self):
return self._config_get(None)
def style(self, value: Any):
self._config(value, js_type=False)
def textPath(self) -> 'OptionPlotoptionsColumnDatalabelsTextpath':
return self._config_sub_data('textPath', OptionPlotoptionsColumnDatalabelsTextpath)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('undefined')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get('undefined')
def y(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(6)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def initialize_apiserver(controller_addr: str, app=None, system_app: SystemApp=None, host: str=None, port: int=None, api_keys: List[str]=None):
global global_system_app
global api_settings
embedded_mod = True
if (not app):
embedded_mod = False
app = FastAPI()
app.add_middleware(CORSMiddleware, allow_origins=['*'], allow_credentials=True, allow_methods=['GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'OPTIONS'], allow_headers=['*'])
if (not system_app):
system_app = SystemApp(app)
global_system_app = system_app
if api_keys:
api_settings.api_keys = api_keys
app.include_router(router, prefix='/api', tags=['APIServer'])
_handler(APIServerException)
async def validation_apiserver_exception_handler(request, exc: APIServerException):
return create_error_response(exc.code, exc.message)
_handler(RequestValidationError)
async def validation_exception_handler(request, exc):
return create_error_response(ErrorCode.VALIDATION_TYPE_ERROR, str(exc))
_initialize_all(controller_addr, system_app)
if (not embedded_mod):
import uvicorn
uvicorn.run(app, host=host, port=port, log_level='info') |
class optics_always_enabled(bsn_tlv):
type = 150
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack('!H', self.type))
packed.append(struct.pack('!H', 0))
length = sum([len(x) for x in packed])
packed[1] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = optics_always_enabled()
_type = reader.read('!H')[0]
assert (_type == 150)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
return True
def pretty_print(self, q):
q.text('optics_always_enabled {')
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}') |
def update_note_text(id: int, text: str):
conn = _get_connection()
sql = "\n update notes set text=?, modified=datetime('now', 'localtime') where id=?\n "
text = utility.text.clean_user_note_text(text)
conn.execute(sql, (text, id))
conn.commit()
note = conn.execute(f'select title, source, tags from notes where id={id}').fetchone()
conn.close()
source = note[1]
index = get_index()
if (index is not None):
index.update_user_note((id, note[0], text, source, note[2], (- 1), ''))
if ((source is not None) and source.startswith('md:///') and source.endswith('.md')):
fpath = source[6:]
update_markdown_file(fpath, text) |
def send_alert(bookmark, date):
try:
message = bookmark_utils.make_all_england_email(bookmark, tag=date)
email_message = EmailMessage.objects.create_from_message(message)
email_message.send()
logger.info('Sent alert to %s about %s', email_message.to, bookmark.name)
except bookmark_utils.BadAlertImageError as e:
logger.exception(e) |
def make_item_group():
for item_group in ['Gift or Pre-paid Cards', 'Memberships', 'Packages']:
if (not frappe.db.exists('Item Group', item_group)):
doc = frappe.new_doc('Item Group')
doc.item_group_name = item_group
doc.parent_item_group = 'All Item Groups'
doc.insert() |
('', doc={'description': 'Browse the file database'})
class RestFileObjectWithoutUid(RestResourceBase):
URL = '/rest/file_object'
_accepted(*PRIVILEGES['view_analysis'])
(responses={200: 'Success', 400: 'Error'}, params={'offset': {'description': 'offset of results (paging)', 'in': 'query', 'type': 'int'}, 'limit': {'description': 'number of results (paging)', 'in': 'query', 'type': 'int'}, 'query': {'description': 'MongoDB style query', 'in': 'query', 'type': 'dict'}})
def get(self):
try:
query = get_query(request.args)
(offset, limit) = get_paging(request.args)
except ValueError as value_error:
request_data = {k: request.args.get(k) for k in ['query', 'limit', 'offset']}
return error_message(str(value_error), self.URL, request_data=request_data)
parameters = {'offset': offset, 'limit': limit, 'query': query}
try:
uids = self.db.frontend.rest_get_file_object_uids(**parameters)
return success_message({'uids': uids}, self.URL, parameters)
except DbInterfaceError:
return error_message('Unknown exception on request', self.URL, parameters) |
class RedshiftSchema(ConnectionConfigSecretsSchema):
host: str = Field(title='Host', description='The hostname or IP address of the server where the database is running.')
port: int = Field(5439, title='Port', description='The network port number on which the server is listening for incoming connections (default: 5439).')
user: str = Field(title='Username', description='The user account used to authenticate and access the database.')
password: str = Field(title='Password', description='The password used to authenticate and access the database.', sensitive=True)
database: str = Field(title='Database', description='The name of the specific database within the database server that you want to connect to.')
db_schema: Optional[str] = Field(None, title='Schema', description='The default schema to be used for the database connection (defaults to public).')
ssh_required: bool = Field(False, title='SSH required', description='Indicates whether an SSH tunnel is required for the connection. Enable this option if your Redshift database is behind a firewall and requires SSH tunneling for remote connections.')
_required_components: List[str] = ['host', 'user', 'password', 'database'] |
_os(*metadata.platforms)
def main():
rdr_cef_dir = Path('C:\\Program Files (x86)\\Adobe\\Acrobat Reader DC\\Reader\\AcroCEF')
rdrcef_exe = (rdr_cef_dir / 'RdrCEF.exe')
cmd_path = 'C:\\Windows\\System32\\cmd.exe'
backup = Path('xxxxxx').resolve()
backedup = False
if rdrcef_exe.is_file():
common.log('{} already exists, backing up file.'.format(rdrcef_exe))
common.copy_file(rdrcef_exe, backup)
backedup = True
else:
common.log("{} doesn't exist. Creating path.".format(rdrcef_exe))
rdr_cef_dir.mkdir(parents=True)
common.copy_file(cmd_path, rdrcef_exe)
if backedup:
common.log('Putting back backup copy.')
common.copy_file(backup, rdrcef_exe)
backup.unlink()
else:
common.remove_file(rdrcef_exe)
rdr_cef_dir.rmdir() |
def extract_dependencies(dependencies):
results = set()
if isinstance(dependencies, Iterable):
for dep in dependencies:
results.update(extract_dependencies(dep))
elif hasattr(dependencies, '__tempalloc__'):
results.update(extract_dependencies(dependencies.__tempalloc__))
elif hasattr(dependencies, '__tempalloc_id__'):
return set([dependencies.__tempalloc_id__])
return results |
def gen_function_decl(func_attrs):
func_name = func_attrs['name']
input_ndims = len(func_attrs['input_accessors'][0].original_shapes)
weight_ndims = len(func_attrs['input_accessors'][1].original_shapes)
return common_bias.FUNC_DECL_TEMPLATE.render(func_name=func_name, input_ndims=input_ndims, weight_ndims=weight_ndims, support_split_k=True) |
class AnyContainingAll(Matcher):
def __init__(self, subset: Iterable[AnyType]) -> None:
self.subset_repr = (repr(subset) if (subset is not None) else '')
self.subset = list(subset)
def __eq__(self, other: Container[AnyType]) -> bool:
return all(((x in other) for x in self.subset))
def __repr__(self) -> str:
return '<{} 0x{:02X}{}>'.format(type(self).__name__, id(self), f' subset={self.subset_repr}') |
def test_calc_series_stats_nans():
assert (calc_series_stats([np.nan, 10, 0, 1]) == calc_series_stats([10, 0, 1]))
assert (calc_series_stats([None, 10, 0, 1]) == calc_series_stats([10, 0, 1]))
assert (calc_series_stats([10, 0, np.nan, 1]) == calc_series_stats([10, 0, 1]))
result = calc_series_stats([])
for key in result:
assert np.isnan(result[key])
result = calc_series_stats([np.nan, None])
for key in result:
assert np.isnan(result[key]) |
_ns.route('/<username>/<coprname>/new_build_rubygems/', methods=['POST'])
_ns.route('/g/<group_name>/<coprname>/new_build_rubygems/', methods=['POST'])
_required
_with_copr
def copr_new_build_rubygems(copr):
view = 'coprs_ns.copr_new_build_rubygems'
url_on_success = helpers.copr_url('coprs_ns.copr_builds', copr)
return process_new_build_rubygems(copr, view, url_on_success) |
def get_file_hash(path: PathIn, *, func: str='md5') -> str:
path = _get_path(path)
assert_file(path)
hash = hashlib.new(func)
with open(path, 'rb') as file:
for chunk in iter((lambda : file.read(4096)), b''):
hash.update(chunk)
hash_hex = hash.hexdigest()
return hash_hex |
def gen_sites():
db = Database(util.get_db_root(), util.get_part())
grid = db.grid()
for tile_name in sorted(grid.tiles()):
loc = grid.loc_of_tilename(tile_name)
gridinfo = grid.gridinfo_at_loc(loc)
for (site_name, site_type) in gridinfo.sites.items():
if (site_type in ['PCIE_2_1']):
(yield (tile_name, site_name)) |
def setlang(request):
next = request.GET.get('next', None)
if (not is_safe_url(url=next, host=request.get_host())):
next = request.META.get('HTTP_REFERER')
if (not is_safe_url(url=next, host=request.get_host())):
next = '/'
response = redirect(next)
lang_code = request.GET.get('language', None)
if (lang_code and check_for_language(lang_code)):
if hasattr(request, 'session'):
request.session[LANGUAGE_SESSION_KEY] = lang_code
else:
response.set_cookie(settings.LANGUAGE_COOKIE_NAME, lang_code, max_age=settings.LANGUAGE_COOKIE_AGE, path=settings.LANGUAGE_COOKIE_PATH, domain=settings.LANGUAGE_COOKIE_DOMAIN)
return response |
def salesforce_dataset_config(db: Session, salesforce_connection_config: ConnectionConfig, salesforce_dataset: Dict[(str, Any)]) -> Generator:
fides_key = salesforce_dataset['fides_key']
salesforce_connection_config.name = fides_key
salesforce_connection_config.key = fides_key
salesforce_connection_config.save(db=db)
ctl_dataset = CtlDataset.create_from_dataset_dict(db, salesforce_dataset)
dataset = DatasetConfig.create(db=db, data={'connection_config_id': salesforce_connection_config.id, 'fides_key': fides_key, 'ctl_dataset_id': ctl_dataset.id})
(yield dataset)
dataset.delete(db=db)
ctl_dataset.delete(db=db) |
def generate_fbgemm_tensors(n, device, rows_per_table, pooling_factor, alpha):
assert (n.parent is not None)
(rows, num_tables, dims, batch_size, _, weighted, weights_precision, optimizer, _, _, _, _) = get_fbgemm_info(n, rows_per_table)
if (num_tables == 1):
rows = rows[0]
dims = dims[0]
pooling_factors = pooling_factor
else:
pooling_factors = ([pooling_factor] * num_tables)
data_generator_config = create_op_args([{'type': 'int', 'name': 'num_tables', 'value': num_tables}, {'type': 'int', 'name': 'rows', 'value': rows}, {'type': 'int', 'name': 'dim', 'value': dims}, {'type': 'int', 'name': 'batch_size', 'value': batch_size}, {'type': 'int', 'name': 'pooling_factor', 'value': pooling_factors}, {'type': 'bool', 'name': 'weighted', 'value': weighted}, {'type': 'str', 'name': 'weights_precision', 'value': weights_precision}], {'optimizer': {'type': 'str', 'value': optimizer}})
input_data_gen = SplitTableBatchedEmbeddingBagsCodegenInputDataGenerator()
(input_args, input_kwargs) = input_data_gen.get_data(data_generator_config, device, alpha)
if is_fbgemm_forward_unweighted(n):
input_args.pop((- 1))
return (input_args, input_kwargs) |
def openai_call(prompt: str, use_gpt4: bool=False, temperature: float=0.5, max_tokens: int=100):
if (not use_gpt4):
messages = [{'role': 'user', 'content': prompt}]
response = openai.ChatCompletion.create(model='gpt-3.5-turbo', messages=messages, temperature=temperature, max_tokens=max_tokens, top_p=1, frequency_penalty=0, presence_penalty=0)
return response.choices[0].message.content.strip()
else:
messages = [{'role': 'user', 'content': prompt}]
response = openai.ChatCompletion.create(model='gpt-4', messages=messages, temperature=temperature, max_tokens=max_tokens, n=1, stop=None)
return response.choices[0].message.content.strip() |
('remove')
('theme_identifier')
('--force', '-f', default=False, is_flag=True, help='Removes the theme without asking for confirmation.')
def remove_theme(theme_identifier, force):
validate_theme(theme_identifier)
if ((not force) and (not click.confirm(click.style('Are you sure?', fg='magenta')))):
sys.exit(0)
theme = get_theme(theme_identifier)
click.secho('[+] Removing theme from filesystem...', fg='cyan')
shutil.rmtree(theme.path, ignore_errors=False, onerror=None) |
def _start():
global patch, name, path, monitor
global input_name, input_variable, output_name, output_equation, variable, equation
if ('initial' in patch.config.sections()):
for item in patch.config.items('initial'):
val = patch.getfloat('initial', item[0], default=np.nan)
patch.setvalue(item[0], val)
monitor.update(item[0], val)
if len(patch.config.items('input')):
(input_name, input_variable) = list(zip(*patch.config.items('input')))
else:
(input_name, input_variable) = ([], [])
if len(patch.config.items('output')):
(output_name, output_equation) = list(zip(*patch.config.items('output')))
else:
(output_name, output_equation) = ([], [])
output_equation = [sanitize(equation) for equation in output_equation]
monitor.info('===== input variables =====')
for (name, variable) in zip(input_name, input_variable):
monitor.info(((name + ' = ') + variable))
monitor.info('===== output equations =====')
for (name, equation) in zip(output_name, output_equation):
monitor.info(((name + ' = ') + equation))
monitor.info('')
if len(locals()):
print(('LOCALS: ' + ', '.join(locals().keys()))) |
('llama_recipes.finetuning.train')
('llama_recipes.finetuning.LlamaForCausalLM.from_pretrained')
('llama_recipes.finetuning.LlamaTokenizer.from_pretrained')
('llama_recipes.finetuning.get_preprocessed_dataset')
('llama_recipes.finetuning.get_peft_model')
('llama_recipes.finetuning.StepLR')
def test_finetuning_weight_decay(step_lr, get_peft_model, get_dataset, tokenizer, get_model, train, mocker):
kwargs = {'weight_decay': 0.01}
get_dataset.return_value = get_fake_dataset()
get_model.return_value = Linear(1, 1)
main(**kwargs)
assert (train.call_count == 1)
(args, kwargs) = train.call_args
optimizer = args[4]
print(optimizer.state_dict())
assert isinstance(optimizer, AdamW)
assert (optimizer.state_dict()['param_groups'][0]['weight_decay'] == approx(0.01)) |
def obj_python_attrs(msg_):
if hasattr(msg_, '_fields'):
for k in msg_._fields:
(yield (k, getattr(msg_, k)))
return
base = getattr(msg_, '_base_attributes', [])
opt = getattr(msg_, '_opt_attributes', [])
for (k, v) in inspect.getmembers(msg_):
if (k in opt):
pass
elif k.startswith('_'):
continue
elif callable(v):
continue
elif (k in base):
continue
elif hasattr(msg_.__class__, k):
continue
(yield (k, v)) |
class Person(models.Model):
name = models.CharField(max_length=100)
best_friend = models.ForeignKey('self', on_delete=models.CASCADE, blank=True, null=True)
twin = models.OneToOneField('self', on_delete=models.CASCADE, blank=True, null=True, related_name='rev_twin')
siblings = models.ManyToManyField('self', blank=True)
favorite_food = models.ForeignKey(Food, on_delete=models.CASCADE, blank=True, null=True)
least_favorite_food = models.ForeignKey(Food, on_delete=models.CASCADE, blank=True, null=True, related_name='food_is_least_fav', related_query_name='people_with_this_least_fav_food')
curated_collections = models.ManyToManyField(Collection, blank=True, db_table=Collection.curators.field.db_table)
favorite_book = models.ForeignKey('Book', on_delete=models.CASCADE, blank=True, null=True, related_name='people_with_this_fav_book')
def __repr__(self):
return ('Person#' + str(self.id))
def __str__(self):
return self.name |
class ContentSubCategoryType(SubCategoryBase, Enum):
MiddleFinger = 'MiddleFinger'
PublicSafety = 'PublicSafety'
Health = 'Health'
Explicit = 'Explicit'
QRCode = 'QRCode'
Medical = 'Medical'
Politics = 'Politics'
Legal = 'Legal'
def list_choices(cls) -> Dict[('SubCategoryBase', List[str])]:
return {cls.MiddleFinger: SubCategoryPattern.Content.MIDDLE_FINGER, cls.PublicSafety: SubCategoryPattern.Content.PUBLIC_SAFETY, cls.Health: SubCategoryPattern.Content.HEALTH, cls.Explicit: SubCategoryPattern.Content.EXPLICIT, cls.QRCode: SubCategoryPattern.Content.QRCODE, cls.Medical: SubCategoryPattern.Content.MEDICAL, cls.Politics: SubCategoryPattern.Content.POLITICS, cls.Legal: SubCategoryPattern.Content.LEGAL} |
(st.sampled_from(xtge.TypeOfGrid), st.sampled_from(xtge.RockModel), st.sampled_from(xtge.GridFormat))
def test_from_to_filehead_type(type_of_grid, rock_model, grid_format):
filehead = xtge.Filehead(3, 2007, 2, type_of_grid, rock_model, grid_format)
filehead_roundtrip = xtge.Filehead.from_egrid(filehead.to_egrid())
assert (filehead_roundtrip.year == 2007)
assert (filehead_roundtrip.version_number == 3)
assert (filehead_roundtrip.version_bound == 2)
assert (filehead_roundtrip.type_of_grid == type_of_grid)
assert (filehead_roundtrip.rock_model == rock_model)
assert (filehead_roundtrip.grid_format == grid_format) |
class TaskDependency(Base, ScheduleMixin):
from stalker import defaults
__default_schedule_attr_name__ = 'gap'
__default_schedule_models__ = defaults.task_dependency_gap_models
__default_schedule_timing__ = 0
__default_schedule_unit__ = 'h'
__tablename__ = 'Task_Dependencies'
depends_to_id = Column(Integer, ForeignKey('Tasks.id'), primary_key=True)
depends_to = relationship(Task, back_populates='task_dependent_of', primaryjoin='Task.task_id==TaskDependency.depends_to_id')
task_id = Column(Integer, ForeignKey('Tasks.id'), primary_key=True)
task = relationship(Task, back_populates='task_depends_to', primaryjoin='Task.task_id==TaskDependency.task_id')
dependency_target = Column(Enum(*defaults.task_dependency_targets, name='TaskDependencyTarget'), nullable=False, doc='The dependency target of the relation. The default value is\n "onend", which will create a dependency between two tasks so that the\n depending task will start after the task that it is depending to is\n finished.\n\n The dependency_target attribute is updated to "onstart" when a task has\n a revision and needs to work together with its depending tasks.\n ', default=defaults.task_dependency_targets[0])
gap_timing = synonym('schedule_timing', doc='A positive float value showing the desired gap between the\n dependent and dependee tasks. The meaning of the gap value, either is\n it *work time* or *calendar time* is defined by the :attr:`.gap_model`\n attribute. So when the gap model is "duration" then the value of `gap`\n is in calendar time, if `gap` is "length" then it is considered as work\n time.\n ')
gap_unit = synonym('schedule_unit')
gap_model = synonym('schedule_model', doc='An enumeration value one of ["length", "duration"]. The value of\n this attribute defines if the :attr:`.gap` value is in *Work Time* or\n *Calendar Time*. The default value is "length" so the gap value defines\n a time interval in work time.\n ')
def __init__(self, task=None, depends_to=None, dependency_target=None, gap_timing=0, gap_unit='h', gap_model='length'):
ScheduleMixin.__init__(self, schedule_timing=gap_timing, schedule_unit=gap_unit, schedule_model=gap_model)
self.task = task
self.depends_to = depends_to
self.dependency_target = dependency_target
('task')
def _validate_task(self, key, task):
if (task is not None):
if (not isinstance(task, Task)):
raise TypeError(('%s.task should be and instance of stalker.models.task.Task, not %s' % (self.__class__.__name__, task.__class__.__name__)))
return task
('depends_to')
def _validate_depends_to(self, key, dep):
if (dep is not None):
if (not isinstance(dep, Task)):
raise TypeError(('%s.depends_to can should be and instance of stalker.models.task.Task, not %s' % (self.__class__.__name__, dep.__class__.__name__)))
return dep
('dependency_target')
def _validate_dependency_target(self, key, dep_target):
from stalker import defaults
if (dep_target is None):
dep_target = defaults.task_dependency_targets[0]
from stalker import __string_types__
if (not isinstance(dep_target, __string_types__)):
raise TypeError(('%s.dependency_target should be a string with a value one of %s, not %s' % (self.__class__.__name__, defaults.task_dependency_targets, dep_target.__class__.__name__)))
if (dep_target not in defaults.task_dependency_targets):
raise ValueError(("%s.dependency_target should be one of %s, not '%s'" % (self.__class__.__name__, defaults.task_dependency_targets, dep_target)))
return dep_target
def to_tjp(self):
from jinja2 import Template
template_variables = {'task': self.task, 'depends_to': self.depends_to, 'dependency_target': self.dependency_target, 'gap_timing': self.gap_timing, 'gap_unit': self.gap_unit, 'gap_model': self.gap_model}
from stalker import defaults
temp = Template(defaults.tjp_task_dependency_template, trim_blocks=True)
return temp.render(template_variables) |
def extractToukatlsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_multiple_returns_multiple_stackchecks():
cfg = ControlFlowGraph()
cfg.add_nodes_from([(n0 := BasicBlock(0, instructions=[])), (n1 := BasicBlock(1, instructions=[Branch(Condition(OperationType.less, [Variable('a'), Constant(0)]))])), (n2 := BasicBlock(2, instructions=[Branch(Condition(OperationType.less, [Variable('b'), Constant(1)]))])), (n3 := BasicBlock(3, instructions=[Branch(Condition(OperationType.equal, [Variable('canary'), Constant(0)]))])), (n4 := BasicBlock(4, instructions=[Branch(Condition(OperationType.equal, [Variable('canary'), Constant(0)]))])), (n5 := BasicBlock(5, instructions=[Return([Constant(0)])])), (n6 := BasicBlock(6, instructions=[Assignment(ListOperation([]), Call(ImportedFunctionSymbol('__stack_chk_fail', 0), []))])), (n7 := BasicBlock(7, instructions=[Return([Constant(1)])])), (n8 := BasicBlock(8, instructions=[Assignment(ListOperation([]), Call(ImportedFunctionSymbol('__stack_chk_fail', 0), []))]))])
cfg.add_edges_from([UnconditionalEdge(n0, n1), TrueCase(n1, n2), FalseCase(n1, n3), TrueCase(n2, n1), FalseCase(n2, n4), TrueCase(n3, n7), FalseCase(n3, n8), TrueCase(n4, n6), FalseCase(n4, n5)])
_run_remove_stack_canary(cfg)
assert (set(cfg) == {n0, n1, n2, n3, n4, n5, n7})
assert (n3.instructions == [])
assert (n4.instructions == [])
assert isinstance(cfg.get_edge(n0, n1), UnconditionalEdge)
assert isinstance(cfg.get_edge(n1, n2), TrueCase)
assert isinstance(cfg.get_edge(n1, n3), FalseCase)
assert isinstance(cfg.get_edge(n2, n1), TrueCase)
assert isinstance(cfg.get_edge(n2, n4), FalseCase)
assert isinstance(cfg.get_edge(n3, n7), UnconditionalEdge)
assert isinstance(cfg.get_edge(n4, n5), UnconditionalEdge) |
(GatherWebTargets)
class AquatoneScan(luigi.Task):
threads = luigi.Parameter(default=defaults.get('threads', ''))
scan_timeout = luigi.Parameter(default=defaults.get('aquatone-scan-timeout', ''))
requirements = ['aquatone', 'masscan']
exception = True
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.db_mgr = pipeline.models.db_manager.DBManager(db_location=self.db_location)
self.results_subfolder = (Path(self.results_dir) / 'aquatone-results')
def requires(self):
meets_requirements(self.requirements, self.exception)
args = {'results_dir': self.results_dir, 'rate': self.rate, 'target_file': self.target_file, 'top_ports': self.top_ports, 'interface': self.interface, 'ports': self.ports, 'exempt_list': self.exempt_list, 'db_location': self.db_location}
return GatherWebTargets(**args)
def output(self):
return SQLAlchemyTarget(connection_string=self.db_mgr.connection_string, target_table='screenshot', update_id=self.task_id)
def _get_similar_pages(self, url, results):
similar_pages = None
for (cluster_id, cluster) in results.get('pageSimilarityClusters').items():
if (url not in cluster):
continue
similar_pages = list()
for similar_url in cluster:
if (similar_url == url):
continue
similar_pages.append(self.db_mgr.get_or_create(Screenshot, url=similar_url))
return similar_pages
def parse_results(self):
try:
with open((self.results_subfolder / 'aquatone_session.json')) as f:
results = json.load(f)
except FileNotFoundError as e:
logging.error(e)
return
for (page, page_dict) in results.get('pages').items():
headers = list()
url = page_dict.get('url')
endpoint = self.db_mgr.get_or_create(Endpoint, url=url)
if (not endpoint.status_code):
status = page_dict.get('status').split(maxsplit=1)
if (len(status) > 1):
(endpoint.status_code, _) = status
else:
endpoint.status_code = status[0]
for header_dict in page_dict.get('headers'):
header = self.db_mgr.get_or_create(Header, name=header_dict.get('name'), value=header_dict.get('value'))
if (endpoint not in header.endpoints):
header.endpoints.append(endpoint)
headers.append(header)
endpoint.headers = headers
parsed_url = urlparse(url)
ip_or_hostname = parsed_url.hostname
tgt = self.db_mgr.get_or_create_target_by_ip_or_hostname(ip_or_hostname)
endpoint.target = tgt
if (not page_dict.get('hasScreenshot')):
self.db_mgr.add(endpoint)
continue
port = (parsed_url.port if parsed_url.port else 80)
port = self.db_mgr.get_or_create(Port, protocol='tcp', port_number=port)
image = (self.results_subfolder / page_dict.get('screenshotPath')).read_bytes()
screenshot = self.db_mgr.get_or_create(Screenshot, url=url)
screenshot.port = port
screenshot.endpoint = endpoint
screenshot.target = screenshot.endpoint.target
screenshot.image = image
similar_pages = self._get_similar_pages(url, results)
if (similar_pages is not None):
screenshot.similar_pages = similar_pages
self.db_mgr.add(screenshot)
self.output().touch()
self.db_mgr.close()
def run(self):
self.results_subfolder.mkdir(parents=True, exist_ok=True)
command = [tools.get('aquatone').get('path'), '-scan-timeout', self.scan_timeout, '-threads', self.threads, '-silent', '-out', self.results_subfolder]
aquatone_input_file = (self.results_subfolder / 'input-from-webtargets')
with open(aquatone_input_file, 'w') as f:
for target in self.db_mgr.get_all_web_targets():
f.write(f'''{target}
''')
with open(aquatone_input_file) as target_list:
subprocess.run(command, stdin=target_list)
aquatone_input_file.unlink()
self.parse_results() |
def check_extra_coords_names(coordinates, extra_coords_names):
if isinstance(extra_coords_names, str):
extra_coords_names = (extra_coords_names,)
if (extra_coords_names is None):
raise ValueError((('Invalid extra_coords_names equal to None. ' + 'When passing one or more extra coordinate, ') + 'extra_coords_names cannot be None.'))
if (len(coordinates[2:]) != len(extra_coords_names)):
raise ValueError((("Invalid extra_coords_names '{}'. ".format(extra_coords_names) + 'Number of extra coordinates names must match the number of ') + "additional coordinates ('{}').".format(len(coordinates[2:]))))
return extra_coords_names |
.usefixtures('dummyG')
.parametrize('sort_by', ('value', None))
.parametrize('group_by', ('group', None))
def test_arc(dummyG, group_by, sort_by):
(pos, nt) = get_pos_df(dummyG, layouts.arc, group_by=group_by, sort_by=sort_by)
assert (pos['x'].min() == 0)
assert (pos['x'].max() == (2 * (len(nt) - 1)))
assert all((pos['y'] == 0.0)) |
class IPAUtils():
def __init__(self, BASIS_G, BASIS_Q, primefield):
self.MODULUS = primefield.MODULUS
self.BASIS_G = BASIS_G
self.BASIS_Q = BASIS_Q
self.WIDTH = primefield.WIDTH
self.DOMAIN = primefield.DOMAIN
self.primefield = primefield
def hash_to_field(self, x):
return (int.from_bytes(hash(x), 'little') % self.MODULUS)
def pedersen_commit(self, a):
return Point().msm(self.BASIS_G, [Scalar().from_int(x) for x in a])
def pedersen_commit_sparse(self, values):
if (len(values) < 5):
if (len(values) == 0):
return Point().mul(0)
else:
it = iter(values.items())
(k, v) = next(it)
r = self.BASIS_G[k].dup().glv(v)
for (k, v) in it:
r = r.add(self.BASIS_G[k].dup().glv(v))
return r
return Point().msm([self.BASIS_G[i] for i in values.keys()], [Scalar().from_int(x) for x in values.values()])
def pedersen_commit_basis(self, a, basis):
return Point().msm(basis, [Scalar().from_int(x) for x in a])
def f_g_coefs(self, xinv_vec):
f_g_coefs = []
for i in range(len(self.DOMAIN)):
binary = [int(x) for x in bin(i)[2:].rjust(len(xinv_vec), '0')]
coef = 1
for (xinv, b) in zip(xinv_vec, binary):
if (b == 1):
coef = ((coef * xinv) % self.MODULUS)
f_g_coefs.append(coef)
return f_g_coefs
def check_ipa_proof(self, C, z, y, proof):
n = len(self.DOMAIN)
m = (n // 2)
b = self.primefield.barycentric_formula_constants(z)
w = self.hash_to_field([C, z, y])
q = self.BASIS_Q.dup().glv(w)
current_commitment = C.dup().add(q.dup().glv(y))
current_basis = self.BASIS_G
i = 0
xs = []
xinvs = []
while (n > 1):
(C_L, C_R) = [Point().deserialize(C) for C in proof[i]]
x = self.hash_to_field([C_L, C_R])
xinv = self.primefield.inv(x)
xs.append(x)
xinvs.append(xinv)
current_commitment = current_commitment.dup().add(C_L.dup().glv(x)).add(C_R.dup().glv(xinv))
n = m
m = (n // 2)
i = (i + 1)
f_g_coefs = self.f_g_coefs(xinvs)
g_l = Point().msm(self.BASIS_G, f_g_coefs)
b_l = self.inner_product(b, f_g_coefs)
a_l = proof[(- 1)][0]
a_l_times_b_l = ((a_l * b_l) % self.MODULUS)
computed_commitment = g_l.glv(a_l).add(q.glv(a_l_times_b_l))
return (current_commitment == computed_commitment)
def inner_product(self, a, b):
return (sum((((x * y) % self.MODULUS) for (x, y) in zip(a, b))) % self.MODULUS)
def evaluate_and_compute_ipa_proof(self, C, f_eval, z):
assert (len(f_eval) == len(self.DOMAIN))
n = len(self.DOMAIN)
m = (n // 2)
a = f_eval[:]
b = self.primefield.barycentric_formula_constants(z)
y = self.inner_product(a, b)
proof = []
w = self.hash_to_field([C, z, y])
q = self.BASIS_Q.dup().glv(w)
current_basis = self.BASIS_G
while (n > 1):
a_L = a[:m]
a_R = a[m:]
b_L = b[:m]
b_R = b[m:]
z_L = self.inner_product(a_R, b_L)
z_R = self.inner_product(a_L, b_R)
C_L = self.pedersen_commit_basis(a_R, current_basis[:m]).add(q.dup().glv(z_L))
C_R = self.pedersen_commit_basis(a_L, current_basis[m:]).add(q.dup().glv(z_R))
proof.append([C_L.serialize(), C_R.serialize()])
x = self.hash_to_field([C_L, C_R])
xinv = self.primefield.inv(x)
a = [((v + (x * w)) % self.MODULUS) for (v, w) in zip(a_L, a_R)]
b = [((v + (xinv * w)) % self.MODULUS) for (v, w) in zip(b_L, b_R)]
current_basis = [v.dup().add(w.dup().glv(xinv)) for (v, w) in zip(current_basis[:m], current_basis[m:])]
n = m
m = (n // 2)
proof.append([a[0]])
return (y, proof) |
class InternationalPhoneNumber(FancyValidator):
strip = True
default_cc = None
_mark_chars_re = re.compile("[_.!~*'/]")
_preTransformations = [(re.compile('^(\\(?)(?:00\\s*)(.+)$'), '%s+%s'), (re.compile('^\\(\\s*(\\+?\\d+)\\s*(\\d+)\\s*\\)(.+)$'), '(%s%s)%s'), (re.compile('^\\((\\+?[-\\d]+)\\)\\s?(\\d.+)$'), '%s-%s'), (re.compile('^(?:1-)(\\d+.+)$'), '+1-%s'), (re.compile('^(\\+\\d+)\\s+\\(0\\)\\s*(\\d+.+)$'), '%s-%s'), (re.compile('^([0+]\\d+)[-\\s](\\d+)$'), '%s-%s'), (re.compile('^([0+]\\d+)[-\\s](\\d+)[-\\s](\\d+)$'), '%s-%s-%s')]
_ccIncluder = [(re.compile('^\\(?0([1-9]\\d*)[-)](\\d.*)$'), '+%d-%s-%s')]
_postTransformations = [(re.compile('^(\\+\\d+)[-\\s]\\(?(\\d+)\\)?[-\\s](\\d+.+)$'), '%s-%s-%s'), (re.compile('^(.+)\\s(\\d+)$'), '%s-%s')]
_phoneIsSane = re.compile('^(\\+[1-9]\\d*)-([\\d\\-]+)$')
messages = dict(phoneFormat=_('Please enter a number, with area code, in the form +##-###-#######.'))
def _perform_rex_transformation(self, value, transformations):
for (rex, trf) in transformations:
match = rex.search(value)
if match:
value = (trf % match.groups())
return value
def _prepend_country_code(self, value, transformations, country_code):
for (rex, trf) in transformations:
match = rex.search(value)
if match:
return (trf % ((country_code,) + match.groups()))
return value
def _convert_to_python(self, value, state):
self.assert_string(value, state)
try:
value = value.encode('ascii', 'strict')
except UnicodeEncodeError:
raise Invalid(self.message('phoneFormat', state), value, state)
value = value.decode('ascii')
value = self._mark_chars_re.sub('-', value)
for (f, t) in [(' ', ' '), ('--', '-'), (' - ', '-'), ('- ', '-'), (' -', '-')]:
value = value.replace(f, t)
value = self._perform_rex_transformation(value, self._preTransformations)
if self.default_cc:
if callable(self.default_cc):
cc = self.default_cc()
else:
cc = self.default_cc
value = self._prepend_country_code(value, self._ccIncluder, cc)
value = self._perform_rex_transformation(value, self._postTransformations)
value = value.replace(' ', '')
if (not self._phoneIsSane.search(value)):
raise Invalid(self.message('phoneFormat', state), value, state)
return value |
def test_media_types():
media_type = MediaType('*/*')
assert (media_type.precedence == 0)
assert (str(media_type) == '*/*')
media_type = MediaType('text/*')
assert (media_type.precedence == 1)
assert (str(media_type) == 'text/*')
media_type = MediaType('text/html')
assert (media_type.precedence == 2)
assert (str(media_type) == 'text/html')
media_type = MediaType('text/html; q=0.9')
assert (media_type.precedence == 2)
assert (str(media_type) == 'text/html; q=0.9') |
class TestSnippetsFile(util.MdCase):
extension = ['pymdownx.snippets']
extension_configs = {'pymdownx.snippets': {'base_path': os.path.join(BASE, '_snippets', 'b.txt')}}
def test_user(self):
self.check_markdown('\n --8<-- "b.txt"\n ', '\n <p>Snippet</p>\n ', True) |
.parametrize('x,y,w,p', [([1, 2, 3], np.ones((10, 10)), None, None), ([[1], [1]], np.ones((1, 1)), None, None), ([[1, 2, 3], [1, 2, 3]], np.ones((4, 3)), None, None), ([[1, 2, 3], [1, 2, 3]], np.ones((3, 3, 3)), None, None), ([[1, 2, 3], [1, 2, 3]], np.ones((3, 3)), [1, 2, 3], None), ([[1, 2, 3], [1, 2, 3]], np.ones((3, 3)), [[1, 2, 3]], None), ([[1, 2, 3], [1, 2, 3]], np.ones((3, 3)), [[1, 2], [1, 2]], None), ([[1, 2, 3], [1, 2, 3]], np.ones((3, 3)), None, [0.5, 0.4, 0.2]), (np.array([[1, 2, 3], [4, 5, 6]]), np.ones((3, 3)), None, None), ([np.arange(6).reshape(2, 3), np.arange(6).reshape(2, 3)], np.ones((6, 6)), None, None)])
def test_invalid_data(x, y, w, p):
with pytest.raises((ValueError, TypeError)):
csaps.NdGridCubicSmoothingSpline(x, y, w, p) |
class WitnessHandshaker(Handshaker[WitnessProtocol]):
protocol_class = WitnessProtocol
async def do_handshake(self, multiplexer: MultiplexerAPI, protocol: WitnessProtocol) -> WitnessHandshakeReceipt:
self.logger.debug('Performing %s handshake with %s', protocol, multiplexer.remote)
return WitnessHandshakeReceipt(protocol) |
class OptionSeriesVariablepieSonificationTracksMappingHighpass(Options):
def frequency(self) -> 'OptionSeriesVariablepieSonificationTracksMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionSeriesVariablepieSonificationTracksMappingHighpassFrequency)
def resonance(self) -> 'OptionSeriesVariablepieSonificationTracksMappingHighpassResonance':
return self._config_sub_data('resonance', OptionSeriesVariablepieSonificationTracksMappingHighpassResonance) |
class AccessTokenDatabaseMock(AccessTokenDatabase[AccessTokenModel]):
store: Dict[(str, AccessTokenModel)]
def __init__(self):
self.store = {}
async def get_by_token(self, token: str, max_age: Optional[datetime]=None) -> Optional[AccessTokenModel]:
try:
access_token = self.store[token]
if ((max_age is not None) and (access_token.created_at < max_age)):
return None
return access_token
except KeyError:
return None
async def create(self, create_dict: Dict[(str, Any)]) -> AccessTokenModel:
access_token = AccessTokenModel(**create_dict)
self.store[access_token.token] = access_token
return access_token
async def update(self, access_token: AccessTokenModel, update_dict: Dict[(str, Any)]) -> AccessTokenModel:
for (field, value) in update_dict.items():
setattr(access_token, field, value)
self.store[access_token.token] = access_token
return access_token
async def delete(self, access_token: AccessTokenModel) -> None:
try:
del self.store[access_token.token]
except KeyError:
pass |
class Expected(_coconut.collections.namedtuple('Expected', ('result', 'error')), object):
__slots__ = ()
_coconut_is_data = True
__match_args__ = ('result', 'error')
_coconut_data_defaults = {0: None, 1: None}
def __add__(self, other):
return _coconut.NotImplemented
def __mul__(self, other):
return _coconut.NotImplemented
def __rmul__(self, other):
return _coconut.NotImplemented
__ne__ = _coconut.object.__ne__
def __eq__(self, other):
return ((self.__class__ is other.__class__) and _coconut.tuple.__eq__(self, other))
def __hash__(self):
return (_coconut.tuple.__hash__(self) ^ hash(self.__class__))
def __new__(cls, result=_coconut_sentinel, error=None):
if ((result is not _coconut_sentinel) and (error is not None)):
raise _coconut.TypeError('Expected cannot have both a result and an error')
if ((result is _coconut_sentinel) and (error is None)):
raise _coconut.TypeError('Expected must have either a result or an error')
if (result is _coconut_sentinel):
result = None
return _coconut.tuple.__new__(cls, (result, error))
def __bool__(self):
return (self.error is None)
def __fmap__(self, func):
return (self.__class__(func(self.result)) if self else self)
def and_then(self, func):
return self.__fmap__(func).join()
def join(self):
if (not self):
return self
if (not _coconut.isinstance(self.result, Expected)):
raise _coconut.TypeError('Expected.join() requires an Expected[Expected[_]]')
return self.result
def map_error(self, func):
return (self if self else self.__class__(error=func(self.error)))
def handle(self, err_type, handler):
if ((not self) and _coconut.isinstance(self.error, err_type)):
return self.__class__(handler(self.error))
return self
def expect_error(self, *err_types):
if ((not self) and (not _coconut.isinstance(self.error, err_types))):
raise self.error
return self
def unwrap(self):
if (not self):
raise self.error
return self.result
def or_else(self, func):
if self:
return self
got = func(self.error)
if (not _coconut.isinstance(got, Expected)):
raise _coconut.TypeError('Expected.or_else() requires a function that returns an Expected')
return got
def result_or_else(self, func):
return (self.result if self else func(self.error))
def result_or(self, default):
return (self.result if self else default) |
def _get_cmd_runner(strace_argv, cmd_argv, *, _strace=shutil.which('strace')):
argv = [_strace, *strace_argv, *cmd_argv]
logger.info(f'# {shlex.join(argv)}')
def run_cmd(*, capture=True):
proc = subprocess.run(argv, stdout=(subprocess.PIPE if capture else None), stderr=(subprocess.STDOUT if capture else None), text=capture, check=True)
return proc.stdout
return run_cmd |
class OptionPlotoptionsBubbleTooltipDatetimelabelformats(Options):
def day(self):
return self._config_get('%A, %e %b %Y')
def day(self, text: str):
self._config(text, js_type=False)
def hour(self):
return self._config_get('%A, %e %b, %H:%M')
def hour(self, text: str):
self._config(text, js_type=False)
def millisecond(self):
return self._config_get('%A, %e %b, %H:%M:%S.%L')
def millisecond(self, text: str):
self._config(text, js_type=False)
def minute(self):
return self._config_get('%A, %e %b, %H:%M')
def minute(self, text: str):
self._config(text, js_type=False)
def month(self):
return self._config_get('%B %Y')
def month(self, text: str):
self._config(text, js_type=False)
def second(self):
return self._config_get('%A, %e %b, %H:%M:%S')
def second(self, text: str):
self._config(text, js_type=False)
def week(self):
return self._config_get('Week from %A, %e %b %Y')
def week(self, text: str):
self._config(text, js_type=False)
def year(self):
return self._config_get('%Y')
def year(self, text: str):
self._config(text, js_type=False) |
.parametrize(('provider', 'feature', 'subfeature', 'phase'), global_features(filter=only_async, return_phase=True)['ungrouped_providers'])
class TestGetAsyncJobResult():
def test_output_fake(self, mocker: MockerFixture, provider, feature, subfeature, phase):
mocker.patch('edenai_apis.interface.validate_all_provider_constraints', return_value={})
final_result = compute_output(provider, feature, subfeature, {}, fake=True, phase=phase)
assert (final_result['provider'] == provider)
assert (final_result['status'] == 'success') |
def example():
async def check_item_clicked(e):
e.control.checked = (not e.control.checked)
(await e.control.update_async())
pb = ft.PopupMenuButton(items=[ft.PopupMenuItem(text='Item 1'), ft.PopupMenuItem(icon=ft.icons.POWER_INPUT, text='Check power'), ft.PopupMenuItem(content=ft.Row([ft.Icon(ft.icons.HOURGLASS_TOP_OUTLINED), ft.Text('Item with a custom content')]), on_click=(lambda _: print('Button with a custom content clicked!'))), ft.PopupMenuItem(), ft.PopupMenuItem(text='Checked item', checked=False, on_click=check_item_clicked)])
return pb |
class TransactionSearchTimePeriod(AbstractTimePeriod):
def __init__(self, default_start_date: str, default_end_date: str):
self._default_start_date = default_start_date
self._default_end_date = default_end_date
self._filter_value = {}
self._date_type_transaction_search_map = {'date_signed': 'award_date_signed'}
def filter_value(self):
return self._filter_value
_value.setter
def filter_value(self, filter_value):
self._filter_value = filter_value
def start_date(self):
return (self._filter_value.get('start_date') or self._default_start_date)
def end_date(self):
return (self._filter_value.get('end_date') or self._default_end_date)
def gte_date_type(self):
return self._return_date_type()
def lte_date_type(self):
return self._return_date_type()
def gte_date_range(self):
return [{f'{self.gte_date_type()}': {'gte': self.start_date()}}]
def lte_date_range(self):
return [{f'{self.lte_date_type()}': {'lte': self.end_date()}}]
def _return_date_type(self) -> str:
ret_date_type = self._filter_value.get('date_type', 'action_date')
if (ret_date_type in self._date_type_transaction_search_map):
ret_date_type = self._date_type_transaction_search_map[ret_date_type]
return ret_date_type |
def check_if_updates_and_builds_set(ctx: click.core.Context, param: click.core.Option, value: str) -> str:
if ((value is not None) and (((param.name == 'builds') and ctx.params.get('updates', False)) or ((param.name == 'updates') and ctx.params.get('builds', False)))):
click.echo('ERROR: Must specify only one of --updates or --builds', err=True)
sys.exit(1)
return value |
.parametrize('test_file_high_nonce', ['ttNonce/TransactionWithHighNonce64Minus1.json', 'ttNonce/TransactionWithHighNonce64.json', 'ttNonce/TransactionWithHighNonce64Plus1.json'])
def test_high_nonce(test_file_high_nonce: str) -> None:
test = load_byzantium_transaction(test_dir, test_file_high_nonce)
tx = rlp.decode_to(Transaction, test['tx_rlp'])
assert (validate_transaction(tx) == False) |
class StatusBody():
__slots__ = ('_status_code', '_msgs')
class StatusCode(Enum):
SUCCESS = 0
ERROR_UNSUPPORTED_VERSION = 1
ERROR_UNEXPECTED_PAYLOAD = 2
ERROR_GENERIC = 3
ERROR_DECODE = 4
ERROR_WRONG_AGENT_ADDRESS = 10
ERROR_WRONG_PUBLIC_KEY = 11
ERROR_INVALID_PROOF = 12
ERROR_UNSUPPORTED_LEDGER = 13
ERROR_UNKNOWN_AGENT_ADDRESS = 20
ERROR_AGENT_NOT_READY = 21
def __int__(self) -> int:
return self.value
def __init__(self, status_code: StatusCode, msgs: List[str]) -> None:
self._status_code = status_code
self._msgs = msgs
def status_code(self) -> 'StatusCode':
return self._status_code
def msgs(self) -> List[str]:
return self._msgs
def encode(status_body_protobuf_object: Any, status_body_object: 'StatusBody') -> None:
status_body_protobuf_object.code = int(status_body_object.status_code)
status_body_protobuf_object.msgs.extend(status_body_object.msgs)
def decode(cls, status_body_protobuf_object: Any) -> 'StatusBody':
status_body = cls(status_code=cls.StatusCode(status_body_protobuf_object.code), msgs=status_body_protobuf_object.msgs)
return status_body
def __eq__(self, other: Any) -> bool:
return (isinstance(other, StatusBody) and (self.status_code == other.status_code) and (self.msgs == other.msgs)) |
class OptionPlotoptionsPackedbubbleMarkerStatesSelect(Options):
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get('#cccccc')
def fillColor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#000000')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(2)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(None)
def radius(self, num: float):
self._config(num, js_type=False) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.