code stringlengths 281 23.7M |
|---|
def extractKaparinTranslations(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return False
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
def get_material(mode):
if (modes[mode].material == ''):
return None
name = modes[mode].material
path = os.path.join(os.path.dirname(__file__), 'resources/materials.blend', 'Material')
if (('bevel' in mode) or ('thickness' in mode)):
path = os.path.join(os.path.dirname(__file__), 'resources/materials_2.80.blend', 'Material')
if (bpy.data.materials.get(name) is None):
bpy.ops.wm.append(filename=name, directory=path, link=False, autoselect=False)
return bpy.data.materials.get(name) |
class TestGaussianPrivacyEngine():
def _init_privacy_engine(self, alphas=[(1 + (x / 10.0)) for x in range(1, 100)], noise_multiplier=1.0, target_delta=1e-05, users_per_round=10, num_total_users=10, global_model_parameter_val=5.0, noise_seed=0):
privacy_setting = PrivacySetting(alphas=alphas, noise_multiplier=noise_multiplier, target_delta=target_delta, noise_seed=noise_seed)
privacy_engine = GaussianPrivacyEngine(privacy_setting=privacy_setting, users_per_round=users_per_round, num_total_users=num_total_users)
global_model = utils.TwoFC()
global_model.fill_all(global_model_parameter_val)
privacy_engine.attach(global_model)
return privacy_engine
def _calc_eps(self, sample_rate, noise_multiplier, steps, alphas, delta):
rdp = privacy_analysis.compute_rdp(q=sample_rate, noise_multiplier=noise_multiplier, steps=steps, orders=alphas)
(eps, _) = privacy_analysis.get_privacy_spent(orders=alphas, rdp=rdp, delta=delta)
return eps
def test_privacy_analysis_alpha_in_alphas(self):
privacy_engine = self._init_privacy_engine()
privacy_budget = privacy_engine.get_privacy_spent()
assertTrue((privacy_budget.alpha in privacy_engine.alphas))
def test_privacy_analysis_epsilon_reasonable(self):
privacy_engine = self._init_privacy_engine()
privacy_budget = privacy_engine.get_privacy_spent()
assertTrue((privacy_budget.epsilon > 0))
privacy_engine.noise_multiplier = 0
privacy_budget = privacy_engine.get_privacy_spent()
assertTrue((privacy_budget.epsilon == float('inf')))
def test_privacy_analysis_epsilon(self):
alphas = [(1 + (x / 10.0)) for x in range(1, 100)]
noise_multiplier = 1.5
target_delta = 1e-05
num_users = 1000
num_users_per_round = 50
steps = (num_users // num_users_per_round)
user_sampling_rate = (num_users_per_round / num_users)
privacy_engine = self._init_privacy_engine(alphas=alphas, noise_multiplier=noise_multiplier, target_delta=target_delta, num_total_users=num_users, users_per_round=num_users_per_round)
model_diff = utils.TwoFC()
for _ in range(steps):
privacy_engine.add_noise(model_diff, 1.0)
privacy_budget = privacy_engine.get_privacy_spent()
eps = self._calc_eps(user_sampling_rate, noise_multiplier, steps, alphas, target_delta)
assertEqual(privacy_budget.epsilon, eps)
def test_noise_added(self):
model_diff = utils.TwoFC()
model_diff.fill_all(1.0)
model_diff_before_noise = FLModelParamUtils.clone(model_diff)
privacy_engine = self._init_privacy_engine()
privacy_engine.add_noise(model_diff, sensitivity=0.5)
mismatched = utils.verify_models_equivalent_after_training(model_diff_before_noise, model_diff)
assertNotEqual(mismatched, '')
def test_deterministic_noise_addition(self):
model_diff = utils.TwoFC()
model_diff.fill_all(1.0)
model_diff_another_seed = FLModelParamUtils.clone(model_diff)
model_diff_same_seed = FLModelParamUtils.clone(model_diff)
privacy_engine = self._init_privacy_engine(noise_seed=1003)
privacy_engine.add_noise(model_diff, sensitivity=0.5)
privacy_engine = self._init_privacy_engine(noise_seed=2000)
privacy_engine.add_noise(model_diff_another_seed, sensitivity=0.5)
mismatched = utils.verify_models_equivalent_after_training(model_diff, model_diff_another_seed)
assertNotEqual(mismatched, '')
privacy_engine = self._init_privacy_engine(noise_seed=1003)
privacy_engine.add_noise(model_diff_same_seed, sensitivity=0.5)
mismatched = utils.verify_models_equivalent_after_training(model_diff, model_diff_same_seed)
assertEqual(mismatched, '')
def test_not_attached_validator(self):
model_diff = utils.TwoFC()
model_diff.fill_all(1.0)
privacy_setting = PrivacySetting(alphas=[(1 + (x / 10.0)) for x in range(1, 100)], noise_multiplier=1.0, target_delta=1e-06)
privacy_engine = GaussianPrivacyEngine(privacy_setting=privacy_setting, users_per_round=1, num_total_users=1)
sensitivity = 0.5
with assertRaises(PrivacyEngineNotAttachedException):
privacy_engine.add_noise(model_diff, sensitivity)
raised_exception = False
global_model = utils.TwoFC()
global_model.fill_all(5.0)
privacy_engine.attach(global_model)
try:
privacy_engine.add_noise(model_diff, sensitivity)
except PrivacyEngineNotAttachedException:
raised_exception = True
assertFalse(raised_exception) |
def extractLittleShanksTranslations(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if ('Rebirth Thief' in item['tags']):
return buildReleaseMessageWithType(item, 'Rebirth of the Thief Who Roamed The World', vol, chp, frag=frag, postfix=postfix)
return False |
def get_databases():
dargs = keepmenu.CONF.items('database')
args_dict = dict(dargs)
dbases = [i for i in args_dict if i.startswith('database')]
dbs = []
for dbase in dbases:
dbn = args_dict[dbase]
idx = dbase.rsplit('_', 1)[(- 1)]
try:
keyfile = args_dict[f'keyfile_{idx}']
except KeyError:
keyfile = None
try:
passw = args_dict[f'password_{idx}']
except KeyError:
passw = None
try:
autotype = args_dict[f'autotype_default_{idx}']
except KeyError:
autotype = None
try:
cmd = expanduser(args_dict[f'password_cmd_{idx}'])
res = subprocess.run(shlex.split(cmd), capture_output=True, check=False, encoding=keepmenu.ENC)
if res.stderr:
dmenu_err(f'Password command error: {res.stderr}')
sys.exit()
else:
passw = (res.stdout.rstrip('\n') if res.stdout else passw)
except KeyError:
pass
if dbn:
dbo = DataBase(dbase=dbn, kfile=keyfile, pword=passw, atype=autotype)
dbs.append(dbo)
return dbs |
class OptionPlotoptionsSankeySonificationContexttracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class OptionXaxisTitleStyle(Options):
def color(self):
return self._config_get('#666666')
def color(self, text: str):
self._config(text, js_type=False)
def fontSize(self):
return self._config_get('0.8em')
def fontSize(self, num: float):
self._config(num, js_type=False) |
_all_methods(bind_proxy)
class BuildChrootProxy(BaseProxy):
def get(self, build_id, chrootname):
endpoint = '/build-chroot'
params = {'build_id': build_id, 'chrootname': chrootname}
response = self.request.send(endpoint=endpoint, params=params)
return munchify(response)
def get_list(self, build_id, pagination=None):
endpoint = '/build-chroot/list'
params = {'build_id': build_id}
params.update((pagination or {}))
response = self.request.send(endpoint=endpoint, params=params)
return munchify(response)
def get_build_config(self, build_id, chrootname):
endpoint = '/build-chroot/build-config'
params = {'build_id': build_id, 'chrootname': chrootname}
response = self.request.send(endpoint=endpoint, params=params)
return munchify(response)
def get_built_packages(self, build_id, chrootname):
endpoint = '/build-chroot/built-packages'
params = {'build_id': build_id, 'chrootname': chrootname}
response = self.request.send(endpoint=endpoint, params=params)
return munchify(response) |
class RMTTestReqNote(object):
def rmttest_positive_01(self):
(config, req) = create_parameters()
rt = ReqNote(config)
(name, value) = rt.rewrite('Note-test', req)
assert ('Note' == name)
assert (value is None)
def rmttest_positive_02(self):
(config, req) = create_parameters()
req = {'Note': 'something'}
rt = ReqNote(config)
(name, value) = rt.rewrite('Note-test', req)
assert ('Note' == name)
assert ('something' == value) |
def test_simple_model_can_from_dict():
model = Maxout(5, 10, nP=2).initialize()
model_dict = model.to_dict()
assert model.can_from_dict(model_dict)
assert Maxout(5, 10, nP=2).can_from_dict(model_dict)
assert (not Maxout(10, 5, nP=2).can_from_dict(model_dict))
assert Maxout(5, nP=2).can_from_dict(model_dict) |
def _generate_apidocs_aea_modules() -> None:
for module_path in filter(is_not_dir, Path(AEA_DIR).rglob('*')):
print(f'Processing {module_path}... ', end='')
if should_skip(module_path):
continue
parents = module_path.parts[:(- 1)]
parents_without_root = module_path.parts[1:(- 1)]
last = module_path.stem
doc_file = ((API_DIR / Path(*parents_without_root)) / f'{last}.md')
dotted_path = (('.'.join(parents) + '.') + last)
make_pydoc(dotted_path, doc_file) |
def make_tx(input_txo_1: Output, inclusion_proof_1, input_txo_2: Output, inclusion_proof_2) -> (Transaction, Output, Output):
fee = Field.random(1, 10)
input_val = 0
if (input_txo_1 is not None):
input_val += input_txo_1.v
if (input_txo_2 is not None):
input_val += input_txo_2.v
output_val = Field((int(input_val) // 2))
change_val = ((input_val - fee) - output_val)
output_txo = Output.new(output_val)
change_txo = Output.new(change_val)
tx_send = TxSend.builder().value(output_val).fee(fee).input_txo(input_txo_1, inclusion_proof_1).input_txo(input_txo_2, inclusion_proof_2).change_txo(change_txo).metadata(ERC20_ADDRESS, EXPIRATION).sig_salt(Field.random(1, SNARK_SCALAR_FIELD)).build()
tx_receive = TxReceive.builder().request(tx_send).output_txo(output_txo).sig_salt(Field.random(1, SNARK_SCALAR_FIELD)).build()
tx = tx_send.merge(tx_receive.response)
return (tx, output_txo, change_txo) |
class AutumnWindEffect(GenericAction):
def apply_action(self):
(src, tgt) = (self.source, self.target)
g = self.game
catnames = ('cards', 'showncards', 'equips')
cats = [getattr(tgt, i) for i in catnames]
card = g.user_input([src], ChoosePeerCardInputlet(self, tgt, catnames))
card = (card or random_choose_card(g, cats))
if (not card):
return False
self.card = card
g.players.exclude(tgt).reveal(card)
g.process_action(DropCards(src, tgt, cards=[card]))
return True
def is_valid(self):
tgt = self.target
catnames = ('cards', 'showncards', 'equips')
return ((not tgt.dead) and any((getattr(tgt, i) for i in catnames))) |
class DefinedVarsCollector(ast.NodeVisitor):
def __init__(self, defined_vars, defined_constraints):
self.defined_vars = defined_vars
self.defined_constraints = defined_constraints
def visit_Name(self, node):
if (type(node.ctx) is ast.Store):
self.defined_vars.add(node.id)
super().generic_visit(node)
def visit_FunctionDef(self, node: FunctionDef) -> Any:
self.defined_vars.add(node.name)
super().generic_visit(node)
def visit_ClassDef(self, node: ClassDef) -> Any:
self.defined_vars.add(node.name)
visit_potential_constraint_def(node, self)
super().generic_visit(node)
def visit_Import(self, node: Import) -> Any:
for alias in node.names:
self.defined_vars.add((alias.asname or alias.name))
super().generic_visit(node)
def visit_ImportFrom(self, node: ImportFrom) -> Any:
for alias in node.names:
self.defined_vars.add((alias.asname or alias.name))
super().generic_visit(node)
def visit_AsyncFunctionDef(self, node: AsyncFunctionDef) -> Any:
self.defined_vars.add(node.name)
super().generic_visit(node)
def visit_Call(self, node: ast.Call) -> Any:
return super().generic_visit(node) |
def fetch_production_capacity(zone_key: ZoneKey, target_datetime: datetime, session: Session) -> (dict[(str, Any)] | None):
r: Response = session.get(REQUEST_URL)
if (not r.ok):
raise ValueError(f'Failed to fetch capacity data for DE at {target_datetime.date()}')
data = r.json()
all_capacity = {}
for i in range(len(data['production_types'])):
capacity_mode = {}
mode = data['production_types'][i]['name']
for j in range(len(data['time'])):
capacity_mode[int(data['time'][j])] = convert_to_mw(data['production_types'][i]['data'][j])
all_capacity[mode] = capacity_mode
capacity = update_capacity_breakdown(all_capacity, target_datetime)
logger.info(f'''Fetched capacity for {zone_key} on {target_datetime.date()}:
{capacity}''')
return capacity |
()
def graph_with_input_arguments_different_variable_types_2(arg1, arg2, variable_v, variable_u, variable_x, variable_y) -> Tuple[(List[BasicBlock], ControlFlowGraph)]:
instructions = [Branch(Condition(OperationType.less, [arg2[0], arg1[0]])), Phi(arg2[2], [arg2[0], arg1[0]]), Branch(Condition(OperationType.greater, [arg1[0], BinaryOperation(OperationType.plus, [arg2[2], arg2[2]])])), Assignment(arg2[3], BinaryOperation(OperationType.minus, [arg1[0], arg2[2]])), Phi(arg2[4], [arg2[2], arg2[3]]), Assignment(variable_v[1], BinaryOperation(OperationType.plus, [BinaryOperation(OperationType.minus, [arg1[0], arg2[4]]), Constant(1)])), Phi(variable_u[2], [Constant(1), variable_u[5]]), Phi(variable_v[2], [variable_v[1], variable_v[2]]), Phi(variable_x[2], [Constant(1), variable_y[1]]), Branch(Condition(OperationType.less_or_equal, [variable_u[2], arg2[4]])), Assignment(variable_u[5], BinaryOperation(OperationType.plus, [variable_u[2], Constant(1)])), Assignment(variable_y[1], BinaryOperation(OperationType.divide, [BinaryOperation(OperationType.multiply, [UnaryOperation(OperationType.cast, [variable_v[2]], vartype=Integer.int64_t()), variable_x[2]]), variable_u[2]])), Return([variable_x[2]])]
nodes = [BasicBlock(i) for i in range(8)]
nodes[0].instructions = [instructions[0]]
nodes[1].instructions = []
nodes[2].instructions = instructions[1:3]
nodes[3].instructions = [instructions[3]]
nodes[4].instructions = instructions[4:6]
nodes[5].instructions = instructions[6:10]
nodes[6].instructions = instructions[10:12]
nodes[7].instructions = [instructions[12]]
instructions[1]._origin_block = {nodes[1]: arg2[0], nodes[0]: arg1[0]}
instructions[4]._origin_block = {nodes[2]: arg2[2], nodes[3]: arg2[3]}
instructions[6]._origin_block = {nodes[4]: Constant(1), nodes[6]: variable_u[5]}
instructions[7]._origin_block = {nodes[4]: variable_v[1], nodes[6]: variable_v[2]}
instructions[8]._origin_block = {nodes[4]: Constant(1), nodes[6]: variable_y[1]}
cfg = ControlFlowGraph()
cfg.add_edges_from([TrueCase(nodes[0], nodes[1]), FalseCase(nodes[0], nodes[2]), UnconditionalEdge(nodes[1], nodes[2]), FalseCase(nodes[2], nodes[3]), TrueCase(nodes[2], nodes[4]), UnconditionalEdge(nodes[3], nodes[4]), UnconditionalEdge(nodes[4], nodes[5]), TrueCase(nodes[5], nodes[6]), FalseCase(nodes[5], nodes[7]), UnconditionalEdge(nodes[6], nodes[5])])
return (nodes, cfg) |
def test_history(testbot):
assert ('up' in testbot.exec_command('!uptime'))
assert ('uptime' in testbot.exec_command('!history'))
orig_sender = testbot.bot.sender
testbot.bot.sender = testbot.bot.build_identifier('non_default_person')
testbot.push_message('!history')
with pytest.raises(Empty):
testbot.pop_message(timeout=1)
assert ('should be a separate history' in testbot.exec_command('!echo should be a separate history'))
assert ('should be a separate history' in testbot.exec_command('!history'))
testbot.bot.sender = orig_sender
assert ('uptime' in testbot.exec_command('!history')) |
class LMQLModule(object):
def __init__(self, compiled_file, lmql_code=None, output_variables=None):
self.compiled_file = compiled_file
self._code = None
self.lmql_code = lmql_code
self.output_variables = (output_variables or [])
def load(self):
sys.path.append(os.path.dirname(self.compiled_file))
m = __import__(os.path.basename(self.compiled_file[:(- 3)]))
for v in m.__dict__.values():
if (type(v) is lmql_runtime.LMQLQueryFunction):
v.lmql_code = self.lmql_code
setattr(m, 'code', self.code)
setattr(m, 'lmql_code', self.lmql_code)
return m
def __str__(self):
with open(self.compiled_file, 'r') as f:
return f.read()
def code(self):
if (self._code is None):
with open(self.compiled_file, 'r') as f:
self._code = f.read()
return self._code |
class ResetStrategy(object):
def __init__(self, port, reset_delay=DEFAULT_RESET_DELAY):
self.port = port
self.reset_delay = reset_delay
def __call__():
pass
def _setDTR(self, state):
self.port.setDTR(state)
def _setRTS(self, state):
self.port.setRTS(state)
self.port.setDTR(self.port.dtr)
def _setDTRandRTS(self, dtr=False, rts=False):
status = struct.unpack('I', fcntl.ioctl(self.port.fileno(), TIOCMGET, struct.pack('I', 0)))[0]
if dtr:
status |= TIOCM_DTR
else:
status &= (~ TIOCM_DTR)
if rts:
status |= TIOCM_RTS
else:
status &= (~ TIOCM_RTS)
fcntl.ioctl(self.port.fileno(), TIOCMSET, struct.pack('I', status)) |
def exposed_streaming_incremental_delete_invalid_urls():
print('Purge invalid URLs')
rulemgr = RuleManager()
bad_tot = 1
step = 2500
bad_tot = 1
out_sampler = 1
try:
with db.session_context(name='query_sess', override_timeout_ms=(((1000 * 60) * 60) * 15)) as sess:
print('Counting items in table')
print('Getting minimum row in need or update..')
start = sess.execute("SELECT min(id), max(id) FROM web_pages WHERE (state = 'fetching' OR state = 'processing')")
(start, stop) = list(start)[0]
if (start is None):
print('No rows to reset!')
return
print('Minimum row ID: ', start, 'Maximum row ID: ', stop)
pbar = tqdm.tqdm(range(stop, start, (- step)), position=0)
for idx in pbar:
ids = sess.query(db.WebPages.id, db.WebPages.url).filter((db.WebPages.id >= idx)).filter((db.WebPages.id <= (idx + step))).all()
bad_urls = []
for (_, url) in ids:
if (not urlFuncs.cleanUrl(url)):
bad_urls.append(url)
if (out_sampler == 5000):
pbar.write(('Unclean URL: %s' % (url,)))
out_sampler = 0
bad_tot += 1
out_sampler += 1
else:
parsed = urllib.parse.urlparse(url)
nl = parsed.netloc
if rulemgr.is_bad(nl, url):
bad_urls.append(url)
if (out_sampler == 5000):
pbar.write(('Bad URL: %s' % (url,)))
out_sampler = 0
bad_tot += 1
out_sampler += 1
if bad_urls:
delete_internal_urls(sess=sess, urls=bad_urls, chunk_size=10)
sess.commit()
except KeyboardInterrupt:
print('Interrupt!') |
def extractLazysakuratranslationsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_get_features_to_drop():
sel = ProbeFeatureSelection(estimator=LogisticRegression(), n_probes=1)
sel.feature_importances_ = pd.Series([11, 12, 9, 10], index=['var1', 'var2', 'var3', 'probe'])
sel.probe_features_ = pd.DataFrame({'probe': [1, 1, 1, 1, 1]})
sel.variables_ = ['var1', 'var2', 'var3']
assert (sel._get_features_to_drop() == ['var3'])
sel = ProbeFeatureSelection(estimator=LogisticRegression(), n_probes=2)
sel.feature_importances_ = pd.Series([11, 12, 10, 8.7, 10, 8], index=['var1', 'var2', 'var3', 'var4', 'probe1', 'probe2'])
sel.probe_features_ = pd.DataFrame({'probe1': [1, 1, 1, 1, 1], 'probe2': [1, 1, 1, 1, 1]})
sel.variables_ = ['var1', 'var2', 'var3', 'var4']
assert (sel._get_features_to_drop() == ['var4']) |
def construct_command(operation: dict) -> list:
cmd = []
if ('bufferingMode' in operation):
cmd += ['-B', operation['bufferingMode']]
if ('format' in operation):
cmd += ['-f', operation['format']]
if ('outputFile' in operation):
cmd += ['-o', operation['outputFile']]
if (('debugInfo' in operation) and operation['debugInfo']):
cmd += ['-d']
if (('verboseDebugInfo' in operation) and operation['verboseDebugInfo']):
cmd += ['-dd']
if ('program' in operation):
cmd += ['-e', operation['program']]
if ('includeDir' in operation):
for dir in operation['includeDir']:
cmd += ['-I', dir]
if (('usdtFileActivation' in operation) and operation['usdtFileActivation']):
cmd += ['--usdt-file-activation']
if (('unsafe' in operation) and operation['unsafe']):
cmd += ['--unsafe']
if (('quiet' in operation) and operation['quiet']):
cmd += ['-q']
if (('verbose' in operation) and operation['verbose']):
cmd += ['-v']
if (('noWarnings' in operation) and operation['noWarnings']):
cmd += ['--no-warnings']
return cmd |
class Page(AbstractCrudObject):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isPage = True
super(Page, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
about = 'about'
access_token = 'access_token'
ad_campaign = 'ad_campaign'
affiliation = 'affiliation'
app_id = 'app_id'
artists_we_like = 'artists_we_like'
attire = 'attire'
awards = 'awards'
band_interests = 'band_interests'
band_members = 'band_members'
best_page = 'best_page'
bio = 'bio'
birthday = 'birthday'
booking_agent = 'booking_agent'
built = 'built'
business = 'business'
can_checkin = 'can_checkin'
can_post = 'can_post'
category = 'category'
category_list = 'category_list'
checkins = 'checkins'
company_overview = 'company_overview'
connected_instagram_account = 'connected_instagram_account'
connected_page_backed_instagram_account = 'connected_page_backed_instagram_account'
contact_address = 'contact_address'
copyright_whitelisted_ig_partners = 'copyright_whitelisted_ig_partners'
country_page_likes = 'country_page_likes'
cover = 'cover'
culinary_team = 'culinary_team'
current_location = 'current_location'
delivery_and_pickup_option_info = 'delivery_and_pickup_option_info'
description = 'description'
description_html = 'description_html'
differently_open_offerings = 'differently_open_offerings'
directed_by = 'directed_by'
display_subtext = 'display_subtext'
displayed_message_response_time = 'displayed_message_response_time'
does_viewer_have_page_permission_link_ig = 'does_viewer_have_page_permission_link_ig'
emails = 'emails'
engagement = 'engagement'
fan_count = 'fan_count'
featured_video = 'featured_video'
features = 'features'
followers_count = 'followers_count'
food_styles = 'food_styles'
founded = 'founded'
general_info = 'general_info'
general_manager = 'general_manager'
genre = 'genre'
global_brand_page_name = 'global_brand_page_name'
global_brand_root_id = 'global_brand_root_id'
has_added_app = 'has_added_app'
has_lead_access = 'has_lead_access'
has_transitioned_to_new_page_experience = 'has_transitioned_to_new_page_experience'
has_whatsapp_business_number = 'has_whatsapp_business_number'
has_whatsapp_enterprise_number_using_cloud_api = 'has_whatsapp_enterprise_number_using_cloud_api'
has_whatsapp_number = 'has_whatsapp_number'
hometown = 'hometown'
hours = 'hours'
id = 'id'
impressum = 'impressum'
influences = 'influences'
instagram_business_account = 'instagram_business_account'
is_always_open = 'is_always_open'
is_chain = 'is_chain'
is_community_page = 'is_community_page'
is_eligible_for_branded_content = 'is_eligible_for_branded_content'
is_eligible_for_disable_connect_ig_btn_for_non_page_admin_am_web = 'is_eligible_for_disable_connect_ig_btn_for_non_page_admin_am_web'
is_messenger_bot_get_started_enabled = 'is_messenger_bot_get_started_enabled'
is_messenger_platform_bot = 'is_messenger_platform_bot'
is_owned = 'is_owned'
is_permanently_closed = 'is_permanently_closed'
is_published = 'is_published'
is_unclaimed = 'is_unclaimed'
is_verified = 'is_verified'
is_webhooks_subscribed = 'is_webhooks_subscribed'
keywords = 'keywords'
leadgen_tos_acceptance_time = 'leadgen_tos_acceptance_time'
leadgen_tos_accepted = 'leadgen_tos_accepted'
leadgen_tos_accepting_user = 'leadgen_tos_accepting_user'
link = 'link'
location = 'location'
members = 'members'
merchant_id = 'merchant_id'
merchant_review_status = 'merchant_review_status'
messaging_feature_status = 'messaging_feature_status'
messenger_ads_default_icebreakers = 'messenger_ads_default_icebreakers'
messenger_ads_default_quick_replies = 'messenger_ads_default_quick_replies'
messenger_ads_quick_replies_type = 'messenger_ads_quick_replies_type'
mini_shop_storefront = 'mini_shop_storefront'
mission = 'mission'
mpg = 'mpg'
name = 'name'
name_with_location_descriptor = 'name_with_location_descriptor'
network = 'network'
new_like_count = 'new_like_count'
offer_eligible = 'offer_eligible'
overall_star_rating = 'overall_star_rating'
owner_business = 'owner_business'
page_about_story = 'page_about_story'
page_token = 'page_token'
parent_page = 'parent_page'
parking = 'parking'
payment_options = 'payment_options'
personal_info = 'personal_info'
personal_interests = 'personal_interests'
pharma_safety_info = 'pharma_safety_info'
phone = 'phone'
pickup_options = 'pickup_options'
place_type = 'place_type'
plot_outline = 'plot_outline'
preferred_audience = 'preferred_audience'
press_contact = 'press_contact'
price_range = 'price_range'
privacy_info_url = 'privacy_info_url'
produced_by = 'produced_by'
products = 'products'
promotion_eligible = 'promotion_eligible'
promotion_ineligible_reason = 'promotion_ineligible_reason'
public_transit = 'public_transit'
rating_count = 'rating_count'
recipient = 'recipient'
record_label = 'record_label'
release_date = 'release_date'
restaurant_services = 'restaurant_services'
restaurant_specialties = 'restaurant_specialties'
schedule = 'schedule'
screenplay_by = 'screenplay_by'
season = 'season'
single_line_address = 'single_line_address'
starring = 'starring'
start_info = 'start_info'
store_code = 'store_code'
store_location_descriptor = 'store_location_descriptor'
store_number = 'store_number'
studio = 'studio'
supports_donate_button_in_live_video = 'supports_donate_button_in_live_video'
talking_about_count = 'talking_about_count'
temporary_status = 'temporary_status'
unread_message_count = 'unread_message_count'
unread_notif_count = 'unread_notif_count'
unseen_message_count = 'unseen_message_count'
user_access_expire_time = 'user_access_expire_time'
username = 'username'
verification_status = 'verification_status'
voip_info = 'voip_info'
website = 'website'
were_here_count = 'were_here_count'
whatsapp_number = 'whatsapp_number'
written_by = 'written_by'
class Attire():
casual = 'Casual'
dressy = 'Dressy'
unspecified = 'Unspecified'
class FoodStyles():
afghani = 'Afghani'
american_new_ = 'American (New)'
american_traditional_ = 'American (Traditional)'
asian_fusion = 'Asian Fusion'
barbeque = 'Barbeque'
brazilian = 'Brazilian'
breakfast = 'Breakfast'
british = 'British'
brunch = 'Brunch'
buffets = 'Buffets'
burgers = 'Burgers'
burmese = 'Burmese'
cajun_creole = 'Cajun/Creole'
caribbean = 'Caribbean'
chinese = 'Chinese'
creperies = 'Creperies'
cuban = 'Cuban'
delis = 'Delis'
diners = 'Diners'
ethiopian = 'Ethiopian'
fast_food = 'Fast Food'
filipino = 'Filipino'
fondue = 'Fondue'
food_stands = 'Food Stands'
french = 'French'
german = 'German'
greek_and_mediterranean = 'Greek and Mediterranean'
hawaiian = 'Hawaiian'
himalayan_nepalese = 'Himalayan/Nepalese'
hot_dogs = 'Hot Dogs'
indian_pakistani = 'Indian/Pakistani'
irish = 'Irish'
italian = 'Italian'
japanese = 'Japanese'
korean = 'Korean'
latin_american = 'Latin American'
mexican = 'Mexican'
middle_eastern = 'Middle Eastern'
moroccan = 'Moroccan'
pizza = 'Pizza'
russian = 'Russian'
sandwiches = 'Sandwiches'
seafood = 'Seafood'
singaporean = 'Singaporean'
soul_food = 'Soul Food'
southern = 'Southern'
spanish_basque = 'Spanish/Basque'
steakhouses = 'Steakhouses'
sushi_bars = 'Sushi Bars'
taiwanese = 'Taiwanese'
tapas_bars = 'Tapas Bars'
tex_mex = 'Tex-Mex'
thai = 'Thai'
turkish = 'Turkish'
vegan = 'Vegan'
vegetarian = 'Vegetarian'
vietnamese = 'Vietnamese'
class PickupOptions():
curbside = 'CURBSIDE'
in_store = 'IN_STORE'
other = 'OTHER'
class TemporaryStatus():
differently_open = 'DIFFERENTLY_OPEN'
no_data = 'NO_DATA'
operating_as_usual = 'OPERATING_AS_USUAL'
temporarily_closed = 'TEMPORARILY_CLOSED'
class PermittedTasks():
advertise = 'ADVERTISE'
analyze = 'ANALYZE'
cashier_role = 'CASHIER_ROLE'
create_content = 'CREATE_CONTENT'
manage = 'MANAGE'
manage_jobs = 'MANAGE_JOBS'
manage_leads = 'MANAGE_LEADS'
messaging = 'MESSAGING'
moderate = 'MODERATE'
moderate_community = 'MODERATE_COMMUNITY'
pages_messaging = 'PAGES_MESSAGING'
pages_messaging_subscriptions = 'PAGES_MESSAGING_SUBSCRIPTIONS'
profile_plus_advertise = 'PROFILE_PLUS_ADVERTISE'
profile_plus_analyze = 'PROFILE_PLUS_ANALYZE'
profile_plus_create_content = 'PROFILE_PLUS_CREATE_CONTENT'
profile_plus_facebook_access = 'PROFILE_PLUS_FACEBOOK_ACCESS'
profile_plus_full_control = 'PROFILE_PLUS_FULL_CONTROL'
profile_plus_manage = 'PROFILE_PLUS_MANAGE'
profile_plus_manage_leads = 'PROFILE_PLUS_MANAGE_LEADS'
profile_plus_messaging = 'PROFILE_PLUS_MESSAGING'
profile_plus_moderate = 'PROFILE_PLUS_MODERATE'
profile_plus_moderate_delegate_community = 'PROFILE_PLUS_MODERATE_DELEGATE_COMMUNITY'
profile_plus_revenue = 'PROFILE_PLUS_REVENUE'
read_page_mailboxes = 'READ_PAGE_MAILBOXES'
view_monetization_insights = 'VIEW_MONETIZATION_INSIGHTS'
class Tasks():
advertise = 'ADVERTISE'
analyze = 'ANALYZE'
cashier_role = 'CASHIER_ROLE'
create_content = 'CREATE_CONTENT'
manage = 'MANAGE'
manage_jobs = 'MANAGE_JOBS'
manage_leads = 'MANAGE_LEADS'
messaging = 'MESSAGING'
moderate = 'MODERATE'
moderate_community = 'MODERATE_COMMUNITY'
pages_messaging = 'PAGES_MESSAGING'
pages_messaging_subscriptions = 'PAGES_MESSAGING_SUBSCRIPTIONS'
profile_plus_advertise = 'PROFILE_PLUS_ADVERTISE'
profile_plus_analyze = 'PROFILE_PLUS_ANALYZE'
profile_plus_create_content = 'PROFILE_PLUS_CREATE_CONTENT'
profile_plus_facebook_access = 'PROFILE_PLUS_FACEBOOK_ACCESS'
profile_plus_full_control = 'PROFILE_PLUS_FULL_CONTROL'
profile_plus_manage = 'PROFILE_PLUS_MANAGE'
profile_plus_manage_leads = 'PROFILE_PLUS_MANAGE_LEADS'
profile_plus_messaging = 'PROFILE_PLUS_MESSAGING'
profile_plus_moderate = 'PROFILE_PLUS_MODERATE'
profile_plus_moderate_delegate_community = 'PROFILE_PLUS_MODERATE_DELEGATE_COMMUNITY'
profile_plus_revenue = 'PROFILE_PLUS_REVENUE'
read_page_mailboxes = 'READ_PAGE_MAILBOXES'
view_monetization_insights = 'VIEW_MONETIZATION_INSIGHTS'
class Alignment():
left = 'LEFT'
right = 'RIGHT'
class EntryPointIcon():
chat_angular_icon = 'CHAT_ANGULAR_ICON'
chat_round_icon = 'CHAT_ROUND_ICON'
messenger_icon = 'MESSENGER_ICON'
none = 'NONE'
class EntryPointLabel():
ask_us = 'ASK_US'
chat = 'CHAT'
help = 'HELP'
none = 'NONE'
class GreetingDialogDisplay():
hide = 'HIDE'
show = 'SHOW'
welcome_message = 'WELCOME_MESSAGE'
class GuestChatMode():
disabled = 'DISABLED'
enabled = 'ENABLED'
class MobileChatDisplay():
app_switch = 'APP_SWITCH'
chat_tab = 'CHAT_TAB'
class BackdatedTimeGranularity():
day = 'day'
hour = 'hour'
min = 'min'
month = 'month'
none = 'none'
year = 'year'
class Formatting():
markdown = 'MARKDOWN'
plaintext = 'PLAINTEXT'
class PlaceAttachmentSetting():
value_1 = '1'
value_2 = '2'
class PostSurfacesBlacklist():
value_1 = '1'
value_2 = '2'
value_3 = '3'
value_4 = '4'
value_5 = '5'
class PostingToRedspace():
disabled = 'disabled'
enabled = 'enabled'
class TargetSurface():
story = 'STORY'
timeline = 'TIMELINE'
class UnpublishedContentType():
ads_post = 'ADS_POST'
draft = 'DRAFT'
inline_created = 'INLINE_CREATED'
published = 'PUBLISHED'
reviewable_branded_content = 'REVIEWABLE_BRANDED_CONTENT'
scheduled = 'SCHEDULED'
scheduled_recurring = 'SCHEDULED_RECURRING'
class MessagingType():
message_tag = 'MESSAGE_TAG'
response = 'RESPONSE'
update = 'UPDATE'
class NotificationType():
no_push = 'NO_PUSH'
regular = 'REGULAR'
silent_push = 'SILENT_PUSH'
class SenderAction():
mark_seen = 'MARK_SEEN'
react = 'REACT'
typing_off = 'TYPING_OFF'
typing_on = 'TYPING_ON'
unreact = 'UNREACT'
class SuggestionAction():
accept = 'ACCEPT'
dismiss = 'DISMISS'
impression = 'IMPRESSION'
class Platform():
instagram = 'INSTAGRAM'
messenger = 'MESSENGER'
class Model():
arabic = 'ARABIC'
chinese = 'CHINESE'
croatian = 'CROATIAN'
custom = 'CUSTOM'
danish = 'DANISH'
dutch = 'DUTCH'
english = 'ENGLISH'
french_standard = 'FRENCH_STANDARD'
georgian = 'GEORGIAN'
german_standard = 'GERMAN_STANDARD'
greek = 'GREEK'
hebrew = 'HEBREW'
hungarian = 'HUNGARIAN'
irish = 'IRISH'
italian_standard = 'ITALIAN_STANDARD'
korean = 'KOREAN'
norwegian_bokmal = 'NORWEGIAN_BOKMAL'
polish = 'POLISH'
portuguese = 'PORTUGUESE'
romanian = 'ROMANIAN'
spanish = 'SPANISH'
swedish = 'SWEDISH'
vietnamese = 'VIETNAMESE'
class DeveloperAction():
enable_followup_message = 'ENABLE_FOLLOWUP_MESSAGE'
class SubscribedFields():
affiliation = 'affiliation'
attire = 'attire'
awards = 'awards'
bio = 'bio'
birthday = 'birthday'
category = 'category'
checkins = 'checkins'
company_overview = 'company_overview'
conversations = 'conversations'
culinary_team = 'culinary_team'
current_location = 'current_location'
description = 'description'
email = 'email'
feature_access_list = 'feature_access_list'
feed = 'feed'
founded = 'founded'
general_info = 'general_info'
general_manager = 'general_manager'
group_feed = 'group_feed'
hometown = 'hometown'
hours = 'hours'
inbox_labels = 'inbox_labels'
invalid_topic_placeholder = 'invalid_topic_placeholder'
invoice_access_bank_slip_events = 'invoice_access_bank_slip_events'
invoice_access_invoice_change = 'invoice_access_invoice_change'
invoice_access_invoice_draft_change = 'invoice_access_invoice_draft_change'
invoice_access_onboarding_status_active = 'invoice_access_onboarding_status_active'
leadgen = 'leadgen'
leadgen_fat = 'leadgen_fat'
live_videos = 'live_videos'
local_delivery = 'local_delivery'
location = 'location'
mcom_invoice_change = 'mcom_invoice_change'
members = 'members'
mention = 'mention'
merchant_review = 'merchant_review'
message_context = 'message_context'
message_deliveries = 'message_deliveries'
message_echoes = 'message_echoes'
message_mention = 'message_mention'
message_reactions = 'message_reactions'
message_reads = 'message_reads'
messages = 'messages'
messaging_account_linking = 'messaging_account_linking'
messaging_appointments = 'messaging_appointments'
messaging_checkout_updates = 'messaging_checkout_updates'
messaging_customer_information = 'messaging_customer_information'
messaging_direct_sends = 'messaging_direct_sends'
messaging_fblogin_account_linking = 'messaging_fblogin_account_linking'
messaging_feedback = 'messaging_feedback'
messaging_game_plays = 'messaging_game_plays'
messaging_handovers = 'messaging_handovers'
messaging_in_thread_lead_form_submit = 'messaging_in_thread_lead_form_submit'
messaging_optins = 'messaging_optins'
messaging_optouts = 'messaging_optouts'
messaging_payments = 'messaging_payments'
messaging_policy_enforcement = 'messaging_policy_enforcement'
messaging_postbacks = 'messaging_postbacks'
messaging_pre_checkouts = 'messaging_pre_checkouts'
messaging_referrals = 'messaging_referrals'
mission = 'mission'
name = 'name'
otp_verification = 'otp_verification'
page_about_story = 'page_about_story'
page_change_proposal = 'page_change_proposal'
page_upcoming_change = 'page_upcoming_change'
parking = 'parking'
payment_options = 'payment_options'
personal_info = 'personal_info'
personal_interests = 'personal_interests'
phone = 'phone'
picture = 'picture'
price_range = 'price_range'
product_review = 'product_review'
products = 'products'
public_transit = 'public_transit'
publisher_subscriptions = 'publisher_subscriptions'
ratings = 'ratings'
registration = 'registration'
send_cart = 'send_cart'
standby = 'standby'
user_action = 'user_action'
video_text_question_responses = 'video_text_question_responses'
videos = 'videos'
website = 'website'
def get_endpoint(cls):
return 'accounts'
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'account_linking_token': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_update(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'about': 'string', 'accept_crossposting_handshake': 'list<map>', 'allow_spherical_photo': 'bool', 'attire': 'attire_enum', 'begin_crossposting_handshake': 'list<map>', 'bio': 'string', 'category_list': 'list<string>', 'company_overview': 'string', 'contact_address': 'Object', 'cover': 'string', 'culinary_team': 'string', 'delivery_and_pickup_option_info': 'list<string>', 'description': 'string', 'differently_open_offerings': 'map', 'directed_by': 'string', 'displayed_message_response_time': 'string', 'emails': 'list<string>', 'focus_x': 'float', 'focus_y': 'float', 'food_styles': 'list<food_styles_enum>', 'general_info': 'string', 'general_manager': 'string', 'genre': 'string', 'hours': 'map', 'ignore_coordinate_warnings': 'bool', 'impressum': 'string', 'is_always_open': 'bool', 'is_permanently_closed': 'bool', 'is_published': 'bool', 'is_webhooks_subscribed': 'bool', 'location': 'Object', 'menu': 'string', 'mission': 'string', 'no_feed_story': 'bool', 'no_notification': 'bool', 'offset_x': 'int', 'offset_y': 'int', 'parking': 'map', 'payment_options': 'map', 'phone': 'string', 'pickup_options': 'list<pickup_options_enum>', 'plot_outline': 'string', 'price_range': 'string', 'public_transit': 'string', 'restaurant_services': 'map', 'restaurant_specialties': 'map', 'scrape': 'bool', 'service_details': 'string', 'spherical_metadata': 'map', 'start_info': 'Object', 'store_location_descriptor': 'string', 'temporary_status': 'temporary_status_enum', 'website': 'string', 'zoom_scale_x': 'float', 'zoom_scale_y': 'float'}
enums = {'attire_enum': Page.Attire.__dict__.values(), 'food_styles_enum': Page.FoodStyles.__dict__.values(), 'pickup_options_enum': Page.PickupOptions.__dict__.values(), 'temporary_status_enum': Page.TemporaryStatus.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_ab_tests(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.pagepostexperiment import PagePostExperiment
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/ab_tests', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=PagePostExperiment, api_type='EDGE', response_parser=ObjectParser(target_class=PagePostExperiment, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_ab_test(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.pagepostexperiment import PagePostExperiment
param_types = {'control_video_id': 'string', 'description': 'string', 'duration': 'unsigned int', 'experiment_video_ids': 'list<string>', 'name': 'string', 'optimization_goal': 'optimization_goal_enum', 'scheduled_experiment_timestamp': 'unsigned int'}
enums = {'optimization_goal_enum': PagePostExperiment.OptimizationGoal.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/ab_tests', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=PagePostExperiment, api_type='EDGE', response_parser=ObjectParser(target_class=PagePostExperiment, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_acknowledge_order(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'idempotency_key': 'string', 'orders': 'list<map>'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/acknowledge_orders', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_ads_posts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.pagepost import PagePost
param_types = {'exclude_dynamic_ads': 'bool', 'include_inline_create': 'bool', 'since': 'datetime', 'until': 'datetime'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/ads_posts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=PagePost, api_type='EDGE', response_parser=ObjectParser(target_class=PagePost, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def delete_agencies(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'business': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/agencies', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_agencies(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.business import Business
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/agencies', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Business, api_type='EDGE', response_parser=ObjectParser(target_class=Business, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_agency(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'business': 'string', 'permitted_tasks': 'list<permitted_tasks_enum>'}
enums = {'permitted_tasks_enum': Page.PermittedTasks.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/agencies', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_albums(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.album import Album
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/albums', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Album, api_type='EDGE', response_parser=ObjectParser(target_class=Album, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_ar_experience(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/ar_experience', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def delete_assigned_users(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'user': 'int'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/assigned_users', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_assigned_users(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.assigneduser import AssignedUser
param_types = {'business': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/assigned_users', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AssignedUser, api_type='EDGE', response_parser=ObjectParser(target_class=AssignedUser, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_assigned_user(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'tasks': 'list<tasks_enum>', 'user': 'int'}
enums = {'tasks_enum': Page.Tasks.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/assigned_users', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def delete_blocked(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'asid': 'string', 'psid': 'int', 'uid': 'int', 'user': 'int'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/blocked', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_blocked(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.profile import Profile
param_types = {'uid': 'int', 'user': 'int'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/blocked', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Profile, api_type='EDGE', response_parser=ObjectParser(target_class=Profile, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_blocked(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'asid': 'list<string>', 'psid': 'list<int>', 'uid': 'list<string>', 'user': 'list<string>'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/blocked', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_business_datum(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'data': 'list<string>', 'partner_agent': 'string', 'processing_type': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/business_data', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_business_projects(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'business': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/businessprojects', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_call_to_actions(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.pagecalltoaction import PageCallToAction
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/call_to_actions', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=PageCallToAction, api_type='EDGE', response_parser=ObjectParser(target_class=PageCallToAction, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_canvas_elements(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.canvasbodyelement import CanvasBodyElement
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/canvas_elements', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=CanvasBodyElement, api_type='EDGE', response_parser=ObjectParser(target_class=CanvasBodyElement, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_canvas_element(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.canvasbodyelement import CanvasBodyElement
param_types = {'canvas_button': 'Object', 'canvas_carousel': 'Object', 'canvas_footer': 'Object', 'canvas_header': 'Object', 'canvas_lead_form': 'Object', 'canvas_photo': 'Object', 'canvas_product_list': 'Object', 'canvas_product_set': 'Object', 'canvas_store_locator': 'Object', 'canvas_template_video': 'Object', 'canvas_text': 'Object', 'canvas_video': 'Object'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/canvas_elements', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=CanvasBodyElement, api_type='EDGE', response_parser=ObjectParser(target_class=CanvasBodyElement, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_canvases(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.canvas import Canvas
param_types = {'is_hidden': 'bool', 'is_published': 'bool'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/canvases', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Canvas, api_type='EDGE', response_parser=ObjectParser(target_class=Canvas, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_canvase(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.canvas import Canvas
param_types = {'background_color': 'string', 'body_element_ids': 'list<string>', 'enable_swipe_to_open': 'bool', 'is_hidden': 'bool', 'is_published': 'bool', 'name': 'string', 'source_template_id': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/canvases', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Canvas, api_type='EDGE', response_parser=ObjectParser(target_class=Canvas, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_chat_plugin(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.chatplugin import ChatPlugin
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/chat_plugin', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=ChatPlugin, api_type='EDGE', response_parser=ObjectParser(target_class=ChatPlugin, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_chat_plugin(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'alignment': 'alignment_enum', 'desktop_bottom_spacing': 'unsigned int', 'desktop_side_spacing': 'unsigned int', 'entry_point_icon': 'entry_point_icon_enum', 'entry_point_label': 'entry_point_label_enum', 'greeting_dialog_display': 'greeting_dialog_display_enum', 'guest_chat_mode': 'guest_chat_mode_enum', 'mobile_bottom_spacing': 'unsigned int', 'mobile_chat_display': 'mobile_chat_display_enum', 'mobile_side_spacing': 'unsigned int', 'theme_color': 'string', 'welcome_screen_greeting': 'string'}
enums = {'alignment_enum': Page.Alignment.__dict__.values(), 'entry_point_icon_enum': Page.EntryPointIcon.__dict__.values(), 'entry_point_label_enum': Page.EntryPointLabel.__dict__.values(), 'greeting_dialog_display_enum': Page.GreetingDialogDisplay.__dict__.values(), 'guest_chat_mode_enum': Page.GuestChatMode.__dict__.values(), 'mobile_chat_display_enum': Page.MobileChatDisplay.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/chat_plugin', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_commerce_eligibility(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.pagecommerceeligibility import PageCommerceEligibility
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/commerce_eligibility', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=PageCommerceEligibility, api_type='EDGE', response_parser=ObjectParser(target_class=PageCommerceEligibility, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_commerce_merchant_settings(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.commercemerchantsettings import CommerceMerchantSettings
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/commerce_merchant_settings', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=CommerceMerchantSettings, api_type='EDGE', response_parser=ObjectParser(target_class=CommerceMerchantSettings, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_commerce_orders(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.commerceorder import CommerceOrder
param_types = {'filters': 'list<filters_enum>', 'state': 'list<state_enum>', 'updated_after': 'datetime', 'updated_before': 'datetime'}
enums = {'filters_enum': CommerceOrder.Filters.__dict__.values(), 'state_enum': CommerceOrder.State.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/commerce_orders', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=CommerceOrder, api_type='EDGE', response_parser=ObjectParser(target_class=CommerceOrder, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_commerce_payouts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.commercepayout import CommercePayout
param_types = {'end_time': 'datetime', 'start_time': 'datetime'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/commerce_payouts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=CommercePayout, api_type='EDGE', response_parser=ObjectParser(target_class=CommercePayout, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_commerce_transactions(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.commerceordertransactiondetail import CommerceOrderTransactionDetail
param_types = {'end_time': 'datetime', 'payout_reference_id': 'string', 'start_time': 'datetime'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/commerce_transactions', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=CommerceOrderTransactionDetail, api_type='EDGE', response_parser=ObjectParser(target_class=CommerceOrderTransactionDetail, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_conversations(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.unifiedthread import UnifiedThread
param_types = {'folder': 'string', 'platform': 'platform_enum', 'tags': 'list<string>', 'user_id': 'string'}
enums = {'platform_enum': UnifiedThread.Platform.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/conversations', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=UnifiedThread, api_type='EDGE', response_parser=ObjectParser(target_class=UnifiedThread, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_copyright_manual_claim(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'action': 'action_enum', 'action_reason': 'action_reason_enum', 'countries': 'Object', 'match_content_type': 'match_content_type_enum', 'matched_asset_id': 'string', 'reference_asset_id': 'string', 'selected_segments': 'list<map>'}
enums = {'action_enum': ['BLOCK', 'CLAIM_AD_EARNINGS', 'MANUAL_REVIEW', 'MONITOR', 'REQUEST_TAKEDOWN'], 'action_reason_enum': ['ARTICLE_17_PREFLAGGING', 'ARTIST_OBJECTION', 'OBJECTIONABLE_CONTENT', 'PREMIUM_MUSIC_VIDEO', 'PRERELEASE_CONTENT', 'PRODUCT_PARAMETERS', 'RESTRICTED_CONTENT', 'UNAUTHORIZED_COMMERCIAL_USE'], 'match_content_type_enum': ['AUDIO_ONLY', 'VIDEO_AND_AUDIO', 'VIDEO_ONLY']}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/copyright_manual_claims', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_crosspost_whitelisted_pages(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/crosspost_whitelisted_pages', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_custom_labels(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.pageusermessagethreadlabel import PageUserMessageThreadLabel
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/custom_labels', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=PageUserMessageThreadLabel, api_type='EDGE', response_parser=ObjectParser(target_class=PageUserMessageThreadLabel, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_custom_label(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.pageusermessagethreadlabel import PageUserMessageThreadLabel
param_types = {'name': 'string', 'page_label_name': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/custom_labels', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=PageUserMessageThreadLabel, api_type='EDGE', response_parser=ObjectParser(target_class=PageUserMessageThreadLabel, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def delete_custom_user_settings(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'params': 'list<params_enum>', 'psid': 'string'}
enums = {'params_enum': ['PERSISTENT_MENU']}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/custom_user_settings', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_custom_user_settings(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.customusersettings import CustomUserSettings
param_types = {'psid': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/custom_user_settings', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=CustomUserSettings, api_type='EDGE', response_parser=ObjectParser(target_class=CustomUserSettings, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_custom_user_setting(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'persistent_menu': 'list<Object>', 'psid': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/custom_user_settings', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_dataset(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.dataset import Dataset
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/dataset', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Dataset, api_type='EDGE', response_parser=ObjectParser(target_class=Dataset, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_events(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.event import Event
param_types = {'event_state_filter': 'list<event_state_filter_enum>', 'include_canceled': 'bool', 'time_filter': 'time_filter_enum', 'type': 'type_enum'}
enums = {'event_state_filter_enum': Event.EventStateFilter.__dict__.values(), 'time_filter_enum': Event.TimeFilter.__dict__.values(), 'type_enum': Event.Type.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/events', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Event, api_type='EDGE', response_parser=ObjectParser(target_class=Event, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_extend_thread_control(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'duration': 'unsigned int', 'recipient': 'Object'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/extend_thread_control', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_fantasy_games(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/fantasy_games', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_feed(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.pagepost import PagePost
param_types = {'include_hidden': 'bool', 'limit': 'unsigned int', 'show_expired': 'bool', 'with': 'with_enum'}
enums = {'with_enum': PagePost.With.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/feed', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=PagePost, api_type='EDGE', response_parser=ObjectParser(target_class=PagePost, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_feed(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'actions': 'Object', 'adaptive_type': 'string', 'album_id': 'string', 'android_key_hash': 'string', 'animated_effect_id': 'unsigned int', 'application_id': 'string', 'asked_fun_fact_prompt_id': 'unsigned int', 'asset3d_id': 'unsigned int', 'associated_id': 'string', 'attach_place_suggestion': 'bool', 'attached_media': 'list<Object>', 'audience_exp': 'bool', 'backdated_time': 'datetime', 'backdated_time_granularity': 'backdated_time_granularity_enum', 'call_to_action': 'Object', 'caption': 'string', 'child_attachments': 'list<Object>', 'client_mutation_id': 'string', 'composer_entry_picker': 'string', 'composer_entry_point': 'string', 'composer_entry_time': 'unsigned int', 'composer_session_events_log': 'string', 'composer_session_id': 'string', 'composer_source_surface': 'string', 'composer_type': 'string', 'connection_class': 'string', 'content_attachment': 'string', 'coordinates': 'Object', 'cta_link': 'string', 'cta_type': 'string', 'description': 'string', 'direct_share_status': 'unsigned int', 'enforce_link_ownership': 'bool', 'expanded_height': 'unsigned int', 'expanded_width': 'unsigned int', 'feed_targeting': 'Object', 'formatting': 'formatting_enum', 'fun_fact_prompt_id': 'unsigned int', 'fun_fact_toastee_id': 'unsigned int', 'has_nickname': 'bool', 'height': 'unsigned int', 'holiday_card': 'string', 'home_checkin_city_id': 'Object', 'image_crops': 'map', 'implicit_with_tags': 'list<int>', 'instant_game_entry_point_data': 'string', 'ios_bundle_id': 'string', 'is_backout_draft': 'bool', 'is_boost_intended': 'bool', 'is_explicit_location': 'bool', 'is_explicit_share': 'bool', 'is_group_linking_post': 'bool', 'is_photo_container': 'bool', 'link': 'string', 'location_source_id': 'string', 'manual_privacy': 'bool', 'message': 'string', 'multi_share_end_card': 'bool', 'multi_share_optimized': 'bool', 'name': 'string', 'nectar_module': 'string', 'object_attachment': 'string', 'offer_like_post_id': 'unsigned int', 'og_action_type_id': 'string', 'og_hide_object_attachment': 'bool', 'og_icon_id': 'string', 'og_object_id': 'string', 'og_phrase': 'string', 'og_set_profile_badge': 'bool', 'og_suggestion_mechanism': 'string', 'page_recommendation': 'string', 'picture': 'string', 'place': 'Object', 'place_attachment_setting': 'place_attachment_setting_enum', 'place_list': 'string', 'place_list_data': 'list', 'post_surfaces_blacklist': 'list<post_surfaces_blacklist_enum>', 'posting_to_redspace': 'posting_to_redspace_enum', 'privacy': 'string', 'prompt_id': 'string', 'prompt_tracking_string': 'string', 'properties': 'Object', 'proxied_app_id': 'string', 'publish_event_id': 'unsigned int', 'published': 'bool', 'quote': 'string', 'react_mode_metadata': 'string', 'ref': 'list<string>', 'referenceable_image_ids': 'list<string>', 'referral_id': 'string', 'scheduled_publish_time': 'datetime', 'source': 'string', 'sponsor_id': 'string', 'sponsor_relationship': 'unsigned int', 'suggested_place_id': 'Object', 'tags': 'list<int>', 'target_surface': 'target_surface_enum', 'targeting': 'Object', 'text_format_metadata': 'string', 'text_format_preset_id': 'string', 'text_only_place': 'string', 'throwback_camera_roll_media': 'string', 'thumbnail': 'file', 'time_since_original_post': 'unsigned int', 'title': 'string', 'tracking_info': 'string', 'unpublished_content_type': 'unpublished_content_type_enum', 'user_selected_tags': 'bool', 'video_start_time_ms': 'unsigned int', 'viewer_coordinates': 'Object', 'width': 'unsigned int'}
enums = {'backdated_time_granularity_enum': Page.BackdatedTimeGranularity.__dict__.values(), 'formatting_enum': Page.Formatting.__dict__.values(), 'place_attachment_setting_enum': Page.PlaceAttachmentSetting.__dict__.values(), 'post_surfaces_blacklist_enum': Page.PostSurfacesBlacklist.__dict__.values(), 'posting_to_redspace_enum': Page.PostingToRedspace.__dict__.values(), 'target_surface_enum': Page.TargetSurface.__dict__.values(), 'unpublished_content_type_enum': Page.UnpublishedContentType.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/feed', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_global_brand_children(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/global_brand_children', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_groups(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.group import Group
param_types = {'admin_only': 'bool', 'parent': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/groups', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Group, api_type='EDGE', response_parser=ObjectParser(target_class=Group, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_image_copyrights(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.imagecopyright import ImageCopyright
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/image_copyrights', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=ImageCopyright, api_type='EDGE', response_parser=ObjectParser(target_class=ImageCopyright, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_image_copyright(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.imagecopyright import ImageCopyright
param_types = {'artist': 'string', 'attribution_link': 'string', 'creator': 'string', 'custom_id': 'string', 'description': 'string', 'filename': 'string', 'geo_ownership': 'list<geo_ownership_enum>', 'original_content_creation_date': 'unsigned int', 'reference_photo': 'string', 'title': 'string'}
enums = {'geo_ownership_enum': ImageCopyright.GeoOwnership.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/image_copyrights', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=ImageCopyright, api_type='EDGE', response_parser=ObjectParser(target_class=ImageCopyright, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_indexed_videos(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.advideo import AdVideo
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/indexed_videos', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdVideo, api_type='EDGE', response_parser=ObjectParser(target_class=AdVideo, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_insights(self, fields=None, params=None, is_async=False, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.insightsresult import InsightsResult
if is_async:
return self.get_insights_async(fields, params, batch, success, failure, pending)
param_types = {'breakdown': 'list<Object>', 'date_preset': 'date_preset_enum', 'metric': 'list<Object>', 'period': 'period_enum', 'show_description_from_api_doc': 'bool', 'since': 'datetime', 'until': 'datetime'}
enums = {'date_preset_enum': InsightsResult.DatePreset.__dict__.values(), 'period_enum': InsightsResult.Period.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/insights', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=InsightsResult, api_type='EDGE', response_parser=ObjectParser(target_class=InsightsResult, api=self._api), include_summary=False)
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_instagram_accounts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.instagramuser import InstagramUser
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/instagram_accounts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=InstagramUser, api_type='EDGE', response_parser=ObjectParser(target_class=InstagramUser, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_instant_articles_stats(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.instantarticlesstats import InstantArticlesStats
param_types = {'metrics_list': 'list<map>', 'page_list': 'list<string>', 'since': 'datetime', 'until': 'datetime'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/instant_articles_stats', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=InstantArticlesStats, api_type='EDGE', response_parser=ObjectParser(target_class=InstantArticlesStats, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_lead_gen_forms(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.leadgenform import LeadgenForm
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/leadgen_forms', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=LeadgenForm, api_type='EDGE', response_parser=ObjectParser(target_class=LeadgenForm, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_lead_gen_form(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.leadgenform import LeadgenForm
param_types = {'allow_organic_lead_retrieval': 'bool', 'block_display_for_non_targeted_viewer': 'bool', 'context_card': 'Object', 'cover_photo': 'file', 'custom_disclaimer': 'Object', 'follow_up_action_url': 'string', 'is_for_canvas': 'bool', 'is_optimized_for_quality': 'bool', 'locale': 'locale_enum', 'name': 'string', 'privacy_policy': 'Object', 'question_page_custom_headline': 'string', 'questions': 'list<Object>', 'thank_you_page': 'Object', 'tracking_parameters': 'map'}
enums = {'locale_enum': LeadgenForm.Locale.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/leadgen_forms', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=LeadgenForm, api_type='EDGE', response_parser=ObjectParser(target_class=LeadgenForm, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_likes(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'target_id': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/likes', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_live_videos(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.livevideo import LiveVideo
param_types = {'broadcast_status': 'list<broadcast_status_enum>', 'source': 'source_enum'}
enums = {'broadcast_status_enum': LiveVideo.BroadcastStatus.__dict__.values(), 'source_enum': LiveVideo.Source.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/live_videos', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=LiveVideo, api_type='EDGE', response_parser=ObjectParser(target_class=LiveVideo, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_live_video(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.livevideo import LiveVideo
param_types = {'content_tags': 'list<string>', 'crossposting_actions': 'list<map>', 'custom_labels': 'list<string>', 'description': 'string', 'enable_backup_ingest': 'bool', 'encoding_settings': 'string', 'event_params': 'Object', 'fisheye_video_cropped': 'bool', 'front_z_rotation': 'float', 'game_show': 'map', 'is_audio_only': 'bool', 'is_spherical': 'bool', 'original_fov': 'unsigned int', 'privacy': 'string', 'projection': 'projection_enum', 'published': 'bool', 'schedule_custom_profile_image': 'file', 'spatial_audio_format': 'spatial_audio_format_enum', 'status': 'status_enum', 'stereoscopic_mode': 'stereoscopic_mode_enum', 'stop_on_delete_stream': 'bool', 'stream_type': 'stream_type_enum', 'targeting': 'Object', 'title': 'string'}
enums = {'projection_enum': LiveVideo.Projection.__dict__.values(), 'spatial_audio_format_enum': LiveVideo.SpatialAudioFormat.__dict__.values(), 'status_enum': LiveVideo.Status.__dict__.values(), 'stereoscopic_mode_enum': LiveVideo.StereoscopicMode.__dict__.values(), 'stream_type_enum': LiveVideo.StreamType.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/live_videos', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=LiveVideo, api_type='EDGE', response_parser=ObjectParser(target_class=LiveVideo, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def delete_locations(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'location_page_ids': 'list<string>', 'store_numbers': 'list<unsigned int>'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/locations', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_locations(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/locations', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_location(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'always_open': 'bool', 'delivery_and_pickup_option_info': 'list<string>', 'differently_open_offerings': 'map', 'hours': 'map', 'ignore_warnings': 'bool', 'location': 'Object', 'location_page_id': 'string', 'old_store_number': 'unsigned int', 'page_username': 'string', 'permanently_closed': 'bool', 'phone': 'string', 'pickup_options': 'list<pickup_options_enum>', 'place_topics': 'list<string>', 'price_range': 'string', 'store_code': 'string', 'store_location_descriptor': 'string', 'store_name': 'string', 'store_number': 'unsigned int', 'temporary_status': 'temporary_status_enum', 'website': 'string'}
enums = {'pickup_options_enum': Page.PickupOptions.__dict__.values(), 'temporary_status_enum': Page.TemporaryStatus.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/locations', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_media_fingerprints(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.mediafingerprint import MediaFingerprint
param_types = {'universal_content_id': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/media_fingerprints', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=MediaFingerprint, api_type='EDGE', response_parser=ObjectParser(target_class=MediaFingerprint, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_media_fingerprint(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.mediafingerprint import MediaFingerprint
param_types = {'fingerprint_content_type': 'fingerprint_content_type_enum', 'metadata': 'list', 'source': 'string', 'title': 'string', 'universal_content_id': 'string'}
enums = {'fingerprint_content_type_enum': MediaFingerprint.FingerprintContentType.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/media_fingerprints', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=MediaFingerprint, api_type='EDGE', response_parser=ObjectParser(target_class=MediaFingerprint, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_message_attachment(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'message': 'Object', 'platform': 'platform_enum'}
enums = {'platform_enum': ['INSTAGRAM', 'MESSENGER']}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/message_attachments', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_message(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'message': 'Object', 'messaging_type': 'messaging_type_enum', 'notification_type': 'notification_type_enum', 'payload': 'string', 'persona_id': 'string', 'recipient': 'Object', 'sender_action': 'sender_action_enum', 'suggestion_action': 'suggestion_action_enum', 'tag': 'Object', 'thread_control': 'Object'}
enums = {'messaging_type_enum': Page.MessagingType.__dict__.values(), 'notification_type_enum': Page.NotificationType.__dict__.values(), 'sender_action_enum': Page.SenderAction.__dict__.values(), 'suggestion_action_enum': Page.SuggestionAction.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/messages', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_messaging_feature_review(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.messagingfeaturereview import MessagingFeatureReview
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/messaging_feature_review', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=MessagingFeatureReview, api_type='EDGE', response_parser=ObjectParser(target_class=MessagingFeatureReview, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_messenger_lead_forms(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.messengeradspartialautomatedsteplist import MessengerAdsPartialAutomatedStepList
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/messenger_lead_forms', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=MessengerAdsPartialAutomatedStepList, api_type='EDGE', response_parser=ObjectParser(target_class=MessengerAdsPartialAutomatedStepList, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_messenger_lead_form(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'account_id': 'unsigned int', 'block_send_api': 'bool', 'exit_keyphrases': 'string', 'handover_app_id': 'unsigned int', 'handover_summary': 'bool', 'privacy_url': 'string', 'reminder_text': 'string', 'step_list': 'list<map>', 'stop_question_message': 'string', 'template_name': 'string', 'tracking_parameters': 'map'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/messenger_lead_forms', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def delete_messenger_profile(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'fields': 'list<fields_enum>', 'platform': 'platform_enum'}
enums = {'fields_enum': ['ACCOUNT_LINKING_URL', 'COMMANDS', 'DESCRIPTION', 'GET_STARTED', 'GREETING', 'HOME_URL', 'ICE_BREAKERS', 'PAYMENT_SETTINGS', 'PERSISTENT_MENU', 'PLATFORM', 'SUBJECT_TO_NEW_EU_PRIVACY_RULES', 'TARGET_AUDIENCE', 'TITLE', 'WHITELISTED_DOMAINS'], 'platform_enum': Page.Platform.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/messenger_profile', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_messenger_profile(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.messengerprofile import MessengerProfile
param_types = {'platform': 'platform_enum'}
enums = {'platform_enum': Page.Platform.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/messenger_profile', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=MessengerProfile, api_type='EDGE', response_parser=ObjectParser(target_class=MessengerProfile, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_messenger_profile(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'account_linking_url': 'string', 'commands': 'list<Object>', 'description': 'list<Object>', 'get_started': 'Object', 'greeting': 'list<Object>', 'ice_breakers': 'list<map>', 'payment_settings': 'Object', 'persistent_menu': 'list<Object>', 'platform': 'platform_enum', 'target_audience': 'Object', 'title': 'list<Object>', 'whitelisted_domains': 'list<string>'}
enums = {'platform_enum': Page.Platform.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/messenger_profile', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_nlp_config(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'api_version': 'Object', 'custom_token': 'string', 'model': 'model_enum', 'n_best': 'unsigned int', 'nlp_enabled': 'bool', 'other_language_support': 'map', 'verbose': 'bool'}
enums = {'model_enum': Page.Model.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/nlp_configs', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_notification_message_tokens(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.userpageonetimeoptintokensettings import UserPageOneTimeOptInTokenSettings
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/notification_message_tokens', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=UserPageOneTimeOptInTokenSettings, api_type='EDGE', response_parser=ObjectParser(target_class=UserPageOneTimeOptInTokenSettings, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_notification_messages_dev_support(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'developer_action': 'developer_action_enum', 'recipient': 'Object'}
enums = {'developer_action_enum': Page.DeveloperAction.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/notification_messages_dev_support', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_page_backed_instagram_accounts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.instagramuser import InstagramUser
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/page_backed_instagram_accounts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=InstagramUser, api_type='EDGE', response_parser=ObjectParser(target_class=InstagramUser, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_page_backed_instagram_account(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.instagramuser import InstagramUser
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/page_backed_instagram_accounts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=InstagramUser, api_type='EDGE', response_parser=ObjectParser(target_class=InstagramUser, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_page_whatsapp_number_verification(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'verification_code': 'string', 'whatsapp_number': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/page_whatsapp_number_verification', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_pass_thread_control(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'metadata': 'string', 'recipient': 'Object', 'target_app_id': 'int'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/pass_thread_control', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_pass_thread_metadatum(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'metadata': 'string', 'recipient': 'Object', 'target_app_id': 'int'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/pass_thread_metadata', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_personas(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.persona import Persona
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/personas', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Persona, api_type='EDGE', response_parser=ObjectParser(target_class=Persona, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_persona(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.persona import Persona
param_types = {'name': 'string', 'profile_picture_url': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/personas', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Persona, api_type='EDGE', response_parser=ObjectParser(target_class=Persona, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_photo_story(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'photo_id': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/photo_stories', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_photos(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.photo import Photo
param_types = {'biz_tag_id': 'unsigned int', 'business_id': 'string', 'type': 'type_enum'}
enums = {'type_enum': Photo.Type.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/photos', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Photo, api_type='EDGE', response_parser=ObjectParser(target_class=Photo, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_photo(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.photo import Photo
param_types = {'aid': 'string', 'allow_spherical_photo': 'bool', 'alt_text_custom': 'string', 'android_key_hash': 'string', 'application_id': 'string', 'attempt': 'unsigned int', 'audience_exp': 'bool', 'backdated_time': 'datetime', 'backdated_time_granularity': 'backdated_time_granularity_enum', 'caption': 'string', 'composer_session_id': 'string', 'direct_share_status': 'unsigned int', 'feed_targeting': 'Object', 'filter_type': 'unsigned int', 'full_res_is_coming_later': 'bool', 'initial_view_heading_override_degrees': 'unsigned int', 'initial_view_pitch_override_degrees': 'unsigned int', 'initial_view_vertical_fov_override_degrees': 'unsigned int', 'ios_bundle_id': 'string', 'is_explicit_location': 'bool', 'is_explicit_place': 'bool', 'location_source_id': 'string', 'manual_privacy': 'bool', 'message': 'string', 'name': 'string', 'nectar_module': 'string', 'no_story': 'bool', 'offline_id': 'unsigned int', 'og_action_type_id': 'string', 'og_icon_id': 'string', 'og_object_id': 'string', 'og_phrase': 'string', 'og_set_profile_badge': 'bool', 'og_suggestion_mechanism': 'string', 'parent_media_id': 'unsigned int', 'place': 'Object', 'privacy': 'string', 'profile_id': 'int', 'proxied_app_id': 'string', 'published': 'bool', 'qn': 'string', 'scheduled_publish_time': 'unsigned int', 'spherical_metadata': 'map', 'sponsor_id': 'string', 'sponsor_relationship': 'unsigned int', 'tags': 'list<Object>', 'target_id': 'int', 'targeting': 'Object', 'temporary': 'bool', 'time_since_original_post': 'unsigned int', 'uid': 'int', 'unpublished_content_type': 'unpublished_content_type_enum', 'url': 'string', 'user_selected_tags': 'bool', 'vault_image_id': 'string'}
enums = {'backdated_time_granularity_enum': Photo.BackdatedTimeGranularity.__dict__.values(), 'unpublished_content_type_enum': Photo.UnpublishedContentType.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/photos', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Photo, api_type='EDGE', response_parser=ObjectParser(target_class=Photo, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_picture(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.profilepicturesource import ProfilePictureSource
param_types = {'breaking_change': 'breaking_change_enum', 'height': 'int', 'redirect': 'bool', 'type': 'type_enum', 'width': 'int'}
enums = {'breaking_change_enum': ProfilePictureSource.BreakingChange.__dict__.values(), 'type_enum': ProfilePictureSource.Type.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/picture', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=ProfilePictureSource, api_type='EDGE', response_parser=ObjectParser(target_class=ProfilePictureSource, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_picture(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.profilepicturesource import ProfilePictureSource
param_types = {'android_key_hash': 'string', 'burn_media_effect': 'bool', 'caption': 'string', 'composer_session_id': 'string', 'frame_entrypoint': 'string', 'has_umg': 'bool', 'height': 'unsigned int', 'ios_bundle_id': 'string', 'media_effect_ids': 'list<int>', 'media_effect_source_object_id': 'int', 'msqrd_mask_id': 'string', 'photo': 'string', 'picture': 'string', 'profile_pic_method': 'string', 'profile_pic_source': 'string', 'proxied_app_id': 'int', 'qn': 'string', 'reuse': 'bool', 'scaled_crop_rect': 'Object', 'set_profile_photo_shield': 'string', 'sticker_id': 'int', 'sticker_source_object_id': 'int', 'suppress_stories': 'bool', 'width': 'unsigned int', 'x': 'unsigned int', 'y': 'unsigned int'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/picture', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=ProfilePictureSource, api_type='EDGE', response_parser=ObjectParser(target_class=ProfilePictureSource, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_posts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.pagepost import PagePost
param_types = {'include_hidden': 'bool', 'limit': 'unsigned int', 'q': 'string', 'show_expired': 'bool', 'with': 'with_enum'}
enums = {'with_enum': PagePost.With.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/posts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=PagePost, api_type='EDGE', response_parser=ObjectParser(target_class=PagePost, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_product_catalogs(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.productcatalog import ProductCatalog
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/product_catalogs', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=ProductCatalog, api_type='EDGE', response_parser=ObjectParser(target_class=ProductCatalog, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_published_posts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.pagepost import PagePost
param_types = {'include_hidden': 'bool', 'limit': 'unsigned int', 'show_expired': 'bool', 'with': 'with_enum'}
enums = {'with_enum': PagePost.With.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/published_posts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=PagePost, api_type='EDGE', response_parser=ObjectParser(target_class=PagePost, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_ratings(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.recommendation import Recommendation
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/ratings', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Recommendation, api_type='EDGE', response_parser=ObjectParser(target_class=Recommendation, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_release_thread_control(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'recipient': 'Object'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/release_thread_control', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_request_thread_control(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'metadata': 'string', 'recipient': 'Object'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/request_thread_control', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_roles(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.user import User
param_types = {'include_deactivated': 'bool', 'uid': 'int'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/roles', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=User, api_type='EDGE', response_parser=ObjectParser(target_class=User, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_rtb_dynamic_posts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.rtbdynamicpost import RTBDynamicPost
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/rtb_dynamic_posts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=RTBDynamicPost, api_type='EDGE', response_parser=ObjectParser(target_class=RTBDynamicPost, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_scheduled_posts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.pagepost import PagePost
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/scheduled_posts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=PagePost, api_type='EDGE', response_parser=ObjectParser(target_class=PagePost, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_secondary_receivers(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.application import Application
param_types = {'platform': 'platform_enum'}
enums = {'platform_enum': Application.Platform.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/secondary_receivers', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Application, api_type='EDGE', response_parser=ObjectParser(target_class=Application, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_settings(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.pagesettings import PageSettings
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/settings', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=PageSettings, api_type='EDGE', response_parser=ObjectParser(target_class=PageSettings, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_setting(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'option': 'Object'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/settings', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_shop_setup_status(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.commercemerchantsettingssetupstatus import CommerceMerchantSettingsSetupStatus
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/shop_setup_status', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=CommerceMerchantSettingsSetupStatus, api_type='EDGE', response_parser=ObjectParser(target_class=CommerceMerchantSettingsSetupStatus, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_stories(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.stories import Stories
param_types = {'since': 'datetime', 'status': 'list<status_enum>', 'until': 'datetime'}
enums = {'status_enum': Stories.Status.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/stories', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Stories, api_type='EDGE', response_parser=ObjectParser(target_class=Stories, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def delete_subscribed_apps(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/subscribed_apps', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_subscribed_apps(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.application import Application
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/subscribed_apps', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Application, api_type='EDGE', response_parser=ObjectParser(target_class=Application, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_subscribed_app(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'subscribed_fields': 'list<subscribed_fields_enum>'}
enums = {'subscribed_fields_enum': Page.SubscribedFields.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/subscribed_apps', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_tabs(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.tab import Tab
param_types = {'tab': 'list<string>'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/tabs', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Tab, api_type='EDGE', response_parser=ObjectParser(target_class=Tab, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_tagged(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.pagepost import PagePost
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/tagged', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=PagePost, api_type='EDGE', response_parser=ObjectParser(target_class=PagePost, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_take_thread_control(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'metadata': 'string', 'recipient': 'Object'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/take_thread_control', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_thread_owner(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.pagethreadowner import PageThreadOwner
param_types = {'recipient': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/thread_owner', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=PageThreadOwner, api_type='EDGE', response_parser=ObjectParser(target_class=PageThreadOwner, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_threads(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.unifiedthread import UnifiedThread
param_types = {'folder': 'string', 'platform': 'platform_enum', 'tags': 'list<string>', 'user_id': 'string'}
enums = {'platform_enum': UnifiedThread.Platform.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/threads', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=UnifiedThread, api_type='EDGE', response_parser=ObjectParser(target_class=UnifiedThread, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_unlink_account(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'psid': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/unlink_accounts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_video_copyright_rules(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.videocopyrightrule import VideoCopyrightRule
param_types = {'selected_rule_id': 'string', 'source': 'source_enum'}
enums = {'source_enum': VideoCopyrightRule.Source.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/video_copyright_rules', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=VideoCopyrightRule, api_type='EDGE', response_parser=ObjectParser(target_class=VideoCopyrightRule, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_video_copyright_rule(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.videocopyrightrule import VideoCopyrightRule
param_types = {'condition_groups': 'list<Object>', 'name': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/video_copyright_rules', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=VideoCopyrightRule, api_type='EDGE', response_parser=ObjectParser(target_class=VideoCopyrightRule, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_video_copyright(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.videocopyright import VideoCopyright
param_types = {'attribution_id': 'string', 'content_category': 'content_category_enum', 'copyright_content_id': 'string', 'excluded_ownership_countries': 'list<string>', 'excluded_ownership_segments': 'list<Object>', 'is_reference_disabled': 'bool', 'is_reference_video': 'bool', 'monitoring_type': 'monitoring_type_enum', 'ownership_countries': 'list<string>', 'rule_id': 'string', 'tags': 'list<string>', 'whitelisted_ids': 'list<string>', 'whitelisted_ig_user_ids': 'list<string>'}
enums = {'content_category_enum': VideoCopyright.ContentCategory.__dict__.values(), 'monitoring_type_enum': VideoCopyright.MonitoringType.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/video_copyrights', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=VideoCopyright, api_type='EDGE', response_parser=ObjectParser(target_class=VideoCopyright, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_video_lists(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.videolist import VideoList
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/video_lists', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=VideoList, api_type='EDGE', response_parser=ObjectParser(target_class=VideoList, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_video_reels(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.advideo import AdVideo
param_types = {'since': 'datetime', 'until': 'datetime'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/video_reels', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdVideo, api_type='EDGE', response_parser=ObjectParser(target_class=AdVideo, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_video_reel(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.advideo import AdVideo
param_types = {'description': 'string', 'feed_targeting': 'Object', 'place': 'string', 'scheduled_publish_time': 'datetime', 'targeting': 'Object', 'title': 'string', 'upload_phase': 'upload_phase_enum', 'video_id': 'string', 'video_state': 'video_state_enum'}
enums = {'upload_phase_enum': AdVideo.UploadPhase.__dict__.values(), 'video_state_enum': AdVideo.VideoState.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/video_reels', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdVideo, api_type='EDGE', response_parser=ObjectParser(target_class=AdVideo, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_video_story(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'description': 'string', 'feed_targeting': 'Object', 'place': 'string', 'scheduled_publish_time': 'datetime', 'targeting': 'Object', 'title': 'string', 'upload_phase': 'upload_phase_enum', 'video_id': 'string', 'video_state': 'video_state_enum'}
enums = {'upload_phase_enum': ['FINISH', 'START'], 'video_state_enum': ['DRAFT', 'PUBLISHED', 'SCHEDULED']}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/video_stories', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_videos(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.advideo import AdVideo
param_types = {'type': 'type_enum'}
enums = {'type_enum': AdVideo.Type.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/videos', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdVideo, api_type='EDGE', response_parser=ObjectParser(target_class=AdVideo, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_video(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.advideo import AdVideo
param_types = {'ad_breaks': 'list', 'adaptive_type': 'string', 'animated_effect_id': 'unsigned int', 'application_id': 'string', 'asked_fun_fact_prompt_id': 'unsigned int', 'audio_story_wave_animation_handle': 'string', 'backdated_post': 'list', 'call_to_action': 'Object', 'composer_entry_picker': 'string', 'composer_entry_point': 'string', 'composer_entry_time': 'unsigned int', 'composer_session_events_log': 'string', 'composer_session_id': 'string', 'composer_source_surface': 'string', 'composer_type': 'string', 'container_type': 'container_type_enum', 'content_category': 'content_category_enum', 'content_tags': 'list<string>', 'creative_tools': 'string', 'crossposted_video_id': 'string', 'custom_labels': 'list<string>', 'description': 'string', 'direct_share_status': 'unsigned int', 'embeddable': 'bool', 'end_offset': 'unsigned int', 'expiration': 'Object', 'fbuploader_video_file_chunk': 'string', 'feed_targeting': 'Object', 'file_size': 'unsigned int', 'file_url': 'string', 'fisheye_video_cropped': 'bool', 'formatting': 'formatting_enum', 'fov': 'unsigned int', 'front_z_rotation': 'float', 'fun_fact_prompt_id': 'unsigned int', 'fun_fact_toastee_id': 'unsigned int', 'guide': 'list<list<unsigned int>>', 'guide_enabled': 'bool', 'has_nickname': 'bool', 'holiday_card': 'string', 'initial_heading': 'unsigned int', 'initial_pitch': 'unsigned int', 'instant_game_entry_point_data': 'string', 'is_boost_intended': 'bool', 'is_explicit_share': 'bool', 'is_group_linking_post': 'bool', 'is_voice_clip': 'bool', 'location_source_id': 'string', 'manual_privacy': 'bool', 'multilingual_data': 'list<Object>', 'no_story': 'bool', 'offer_like_post_id': 'unsigned int', 'og_action_type_id': 'string', 'og_icon_id': 'string', 'og_object_id': 'string', 'og_phrase': 'string', 'og_suggestion_mechanism': 'string', 'original_fov': 'unsigned int', 'original_projection_type': 'original_projection_type_enum', 'publish_event_id': 'unsigned int', 'published': 'bool', 'react_mode_metadata': 'string', 'reference_only': 'bool', 'referenced_sticker_id': 'string', 'replace_video_id': 'string', 'scheduled_publish_time': 'unsigned int', 'secret': 'bool', 'slideshow_spec': 'map', 'social_actions': 'bool', 'source': 'string', 'source_instagram_media_id': 'string', 'specified_dialect': 'string', 'spherical': 'bool', 'sponsor_id': 'string', 'sponsor_relationship': 'unsigned int', 'start_offset': 'unsigned int', 'swap_mode': 'swap_mode_enum', 'targeting': 'Object', 'text_format_metadata': 'string', 'throwback_camera_roll_media': 'string', 'thumb': 'file', 'time_since_original_post': 'unsigned int', 'title': 'string', 'transcode_setting_properties': 'string', 'universal_video_id': 'string', 'unpublished_content_type': 'unpublished_content_type_enum', 'upload_phase': 'upload_phase_enum', 'upload_session_id': 'string', 'upload_setting_properties': 'string', 'video_asset_id': 'string', 'video_file_chunk': 'string', 'video_id_original': 'string', 'video_start_time_ms': 'unsigned int', 'waterfall_id': 'string'}
enums = {'container_type_enum': AdVideo.ContainerType.__dict__.values(), 'content_category_enum': AdVideo.ContentCategory.__dict__.values(), 'formatting_enum': AdVideo.Formatting.__dict__.values(), 'original_projection_type_enum': AdVideo.OriginalProjectionType.__dict__.values(), 'swap_mode_enum': AdVideo.SwapMode.__dict__.values(), 'unpublished_content_type_enum': AdVideo.UnpublishedContentType.__dict__.values(), 'upload_phase_enum': AdVideo.UploadPhase.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/videos', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdVideo, api_type='EDGE', response_parser=ObjectParser(target_class=AdVideo, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_visitor_posts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.pagepost import PagePost
param_types = {'include_hidden': 'bool', 'limit': 'unsigned int', 'show_expired': 'bool', 'with': 'with_enum'}
enums = {'with_enum': PagePost.With.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/visitor_posts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=PagePost, api_type='EDGE', response_parser=ObjectParser(target_class=PagePost, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def delete_welcome_message_flows(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'flow_id': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/welcome_message_flows', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_welcome_message_flows(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.ctxpartnerappwelcomemessageflow import CTXPartnerAppWelcomeMessageFlow
param_types = {'app_id': 'string', 'flow_id': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/welcome_message_flows', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=CTXPartnerAppWelcomeMessageFlow, api_type='EDGE', response_parser=ObjectParser(target_class=CTXPartnerAppWelcomeMessageFlow, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_welcome_message_flow(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'eligible_platforms': 'list<eligible_platforms_enum>', 'flow_id': 'string', 'name': 'string', 'welcome_message_flow': 'list<Object>'}
enums = {'eligible_platforms_enum': ['INSTAGRAM', 'MESSENGER']}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/welcome_message_flows', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {'about': 'string', 'access_token': 'string', 'ad_campaign': 'AdSet', 'affiliation': 'string', 'app_id': 'string', 'artists_we_like': 'string', 'attire': 'string', 'awards': 'string', 'band_interests': 'string', 'band_members': 'string', 'best_page': 'Page', 'bio': 'string', 'birthday': 'string', 'booking_agent': 'string', 'built': 'string', 'business': 'Object', 'can_checkin': 'bool', 'can_post': 'bool', 'category': 'string', 'category_list': 'list<PageCategory>', 'checkins': 'unsigned int', 'company_overview': 'string', 'connected_instagram_account': 'IGUser', 'connected_page_backed_instagram_account': 'IGUser', 'contact_address': 'MailingAddress', 'copyright_whitelisted_ig_partners': 'list<string>', 'country_page_likes': 'unsigned int', 'cover': 'CoverPhoto', 'culinary_team': 'string', 'current_location': 'string', 'delivery_and_pickup_option_info': 'list<string>', 'description': 'string', 'description_html': 'string', 'differently_open_offerings': 'list<map<string, bool>>', 'directed_by': 'string', 'display_subtext': 'string', 'displayed_message_response_time': 'string', 'does_viewer_have_page_permission_link_ig': 'bool', 'emails': 'list<string>', 'engagement': 'Engagement', 'fan_count': 'unsigned int', 'featured_video': 'AdVideo', 'features': 'string', 'followers_count': 'unsigned int', 'food_styles': 'list<string>', 'founded': 'string', 'general_info': 'string', 'general_manager': 'string', 'genre': 'string', 'global_brand_page_name': 'string', 'global_brand_root_id': 'string', 'has_added_app': 'bool', 'has_lead_access': 'HasLeadAccess', 'has_transitioned_to_new_page_experience': 'bool', 'has_whatsapp_business_number': 'bool', 'has_whatsapp_enterprise_number_using_cloud_api': 'bool', 'has_whatsapp_number': 'bool', 'hometown': 'string', 'hours': 'map<string, string>', 'id': 'string', 'impressum': 'string', 'influences': 'string', 'instagram_business_account': 'IGUser', 'is_always_open': 'bool', 'is_chain': 'bool', 'is_community_page': 'bool', 'is_eligible_for_branded_content': 'bool', 'is_eligible_for_disable_connect_ig_btn_for_non_page_admin_am_web': 'bool', 'is_messenger_bot_get_started_enabled': 'bool', 'is_messenger_platform_bot': 'bool', 'is_owned': 'bool', 'is_permanently_closed': 'bool', 'is_published': 'bool', 'is_unclaimed': 'bool', 'is_verified': 'bool', 'is_webhooks_subscribed': 'bool', 'keywords': 'Object', 'leadgen_tos_acceptance_time': 'datetime', 'leadgen_tos_accepted': 'bool', 'leadgen_tos_accepting_user': 'User', 'link': 'string', 'location': 'Location', 'members': 'string', 'merchant_id': 'string', 'merchant_review_status': 'string', 'messaging_feature_status': 'MessagingFeatureStatus', 'messenger_ads_default_icebreakers': 'list<string>', 'messenger_ads_default_quick_replies': 'list<string>', 'messenger_ads_quick_replies_type': 'string', 'mini_shop_storefront': 'Shop', 'mission': 'string', 'mpg': 'string', 'name': 'string', 'name_with_location_descriptor': 'string', 'network': 'string', 'new_like_count': 'unsigned int', 'offer_eligible': 'bool', 'overall_star_rating': 'float', 'owner_business': 'Business', 'page_about_story': 'PageAboutStory', 'page_token': 'string', 'parent_page': 'Page', 'parking': 'PageParking', 'payment_options': 'PagePaymentOptions', 'personal_info': 'string', 'personal_interests': 'string', 'pharma_safety_info': 'string', 'phone': 'string', 'pickup_options': 'list<string>', 'place_type': 'string', 'plot_outline': 'string', 'preferred_audience': 'Targeting', 'press_contact': 'string', 'price_range': 'string', 'privacy_info_url': 'string', 'produced_by': 'string', 'products': 'string', 'promotion_eligible': 'bool', 'promotion_ineligible_reason': 'string', 'public_transit': 'string', 'rating_count': 'unsigned int', 'recipient': 'string', 'record_label': 'string', 'release_date': 'string', 'restaurant_services': 'PageRestaurantServices', 'restaurant_specialties': 'PageRestaurantSpecialties', 'schedule': 'string', 'screenplay_by': 'string', 'season': 'string', 'single_line_address': 'string', 'starring': 'string', 'start_info': 'PageStartInfo', 'store_code': 'string', 'store_location_descriptor': 'string', 'store_number': 'unsigned int', 'studio': 'string', 'supports_donate_button_in_live_video': 'bool', 'talking_about_count': 'unsigned int', 'temporary_status': 'string', 'unread_message_count': 'unsigned int', 'unread_notif_count': 'unsigned int', 'unseen_message_count': 'unsigned int', 'user_access_expire_time': 'datetime', 'username': 'string', 'verification_status': 'string', 'voip_info': 'VoipInfo', 'website': 'string', 'were_here_count': 'unsigned int', 'whatsapp_number': 'string', 'written_by': 'string'}
def _get_field_enum_info(cls):
field_enum_info = {}
field_enum_info['Attire'] = Page.Attire.__dict__.values()
field_enum_info['FoodStyles'] = Page.FoodStyles.__dict__.values()
field_enum_info['PickupOptions'] = Page.PickupOptions.__dict__.values()
field_enum_info['TemporaryStatus'] = Page.TemporaryStatus.__dict__.values()
field_enum_info['PermittedTasks'] = Page.PermittedTasks.__dict__.values()
field_enum_info['Tasks'] = Page.Tasks.__dict__.values()
field_enum_info['Alignment'] = Page.Alignment.__dict__.values()
field_enum_info['EntryPointIcon'] = Page.EntryPointIcon.__dict__.values()
field_enum_info['EntryPointLabel'] = Page.EntryPointLabel.__dict__.values()
field_enum_info['GreetingDialogDisplay'] = Page.GreetingDialogDisplay.__dict__.values()
field_enum_info['GuestChatMode'] = Page.GuestChatMode.__dict__.values()
field_enum_info['MobileChatDisplay'] = Page.MobileChatDisplay.__dict__.values()
field_enum_info['BackdatedTimeGranularity'] = Page.BackdatedTimeGranularity.__dict__.values()
field_enum_info['Formatting'] = Page.Formatting.__dict__.values()
field_enum_info['PlaceAttachmentSetting'] = Page.PlaceAttachmentSetting.__dict__.values()
field_enum_info['PostSurfacesBlacklist'] = Page.PostSurfacesBlacklist.__dict__.values()
field_enum_info['PostingToRedspace'] = Page.PostingToRedspace.__dict__.values()
field_enum_info['TargetSurface'] = Page.TargetSurface.__dict__.values()
field_enum_info['UnpublishedContentType'] = Page.UnpublishedContentType.__dict__.values()
field_enum_info['MessagingType'] = Page.MessagingType.__dict__.values()
field_enum_info['NotificationType'] = Page.NotificationType.__dict__.values()
field_enum_info['SenderAction'] = Page.SenderAction.__dict__.values()
field_enum_info['SuggestionAction'] = Page.SuggestionAction.__dict__.values()
field_enum_info['Platform'] = Page.Platform.__dict__.values()
field_enum_info['Model'] = Page.Model.__dict__.values()
field_enum_info['DeveloperAction'] = Page.DeveloperAction.__dict__.values()
field_enum_info['SubscribedFields'] = Page.SubscribedFields.__dict__.values()
return field_enum_info |
_bad_request
def price_per_unit_by_presentation(request, entity_code, bnf_code):
date = _specified_or_last_date(request, 'prescribing')
presentation = get_object_or_404(Presentation, pk=bnf_code)
primary_code = _get_primary_substitutable_bnf_code(bnf_code)
if (bnf_code != primary_code):
url = request.get_full_path().replace(bnf_code, primary_code)
return HttpResponseRedirect(url)
if (len(entity_code) in [3, 5]):
entity = get_object_or_404(PCT, code=entity_code)
elif (len(entity_code) == 6):
entity = get_object_or_404(Practice, code=entity_code)
params = {'format': 'json', 'bnf_code': presentation.bnf_code, 'highlight': entity.code, 'date': date.strftime('%Y-%m-%d')}
bubble_data_url = _build_api_url('bubble', params)
context = {'entity': entity, 'entity_name': entity.cased_name, 'entity_name_and_status': entity.name_and_status, 'highlight': entity.code, 'highlight_name': entity.cased_name, 'name': presentation.product_name, 'bnf_code': presentation.bnf_code, 'presentation': presentation, 'dmd_info': presentation.dmd_info(), 'date': date, 'by_presentation': True, 'bubble_data_url': bubble_data_url}
return render(request, 'price_per_unit.html', context) |
class MayaviOffscreen(MayaviApp):
def _script_default(self):
from mayavi.plugins.script import Script
from mayavi.core.off_screen_engine import OffScreenEngine
engine = OffScreenEngine()
engine.start()
s = Script(engine=engine)
return s
def setup_logger(self):
from traits.etsconfig.api import ETSConfig
path = join(ETSConfig.application_data, 'mayavi_e3', 'mayavi.log')
path = abspath(path)
logger = logging.getLogger()
setup_logger(logger, path, mode=self.log_mode)
def main(self, argv=None):
if (argv is None):
argv = []
self.parse_command_line(argv)
self.setup_logger()
self.run() |
class initialize():
def __init__(self, config_path: Optional[str]=_UNSPECIFIED_, job_name: Optional[str]=None, caller_stack_depth: int=1) -> None:
from hydra import initialize as real_initialize
message = 'hydra.experimental.initialize() is no longer experimental. Use hydra.initialize()'
if version.base_at_least('1.2'):
raise ImportError(message)
deprecation_warning(message=message)
self.delegate = real_initialize(config_path=config_path, job_name=job_name, caller_stack_depth=(caller_stack_depth + 1))
def __enter__(self, *args: Any, **kwargs: Any) -> None:
self.delegate.__enter__(*args, **kwargs)
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
self.delegate.__exit__(exc_type, exc_val, exc_tb)
def __repr__(self) -> str:
return 'hydra.experimental.initialize()' |
def plot_boxes(*, curr_for_plots: Boxes, ref_for_plots: Optional[Boxes], color_options: ColorOptions):
current_color = color_options.get_current_data_color()
reference_color = color_options.get_reference_data_color()
fig = go.Figure()
trace = go.Box(lowerfence=curr_for_plots.mins, q1=curr_for_plots.lowers, q3=curr_for_plots.uppers, median=curr_for_plots.means, upperfence=curr_for_plots.maxs, name='current', marker_color=current_color)
fig.add_trace(trace)
if (ref_for_plots is not None):
trace = go.Box(lowerfence=curr_for_plots.mins, q1=ref_for_plots.lowers, q3=ref_for_plots.uppers, median=ref_for_plots.means, upperfence=ref_for_plots.maxs, name='reference', marker_color=reference_color)
fig.add_trace(trace)
fig.update_layout(boxmode='group')
fig.update_layout(yaxis_title='Prerdictions', xaxis_title='Class')
return fig |
def run():
fn = '07_ref_rx_phosphine_def2tzvp_reopt.xyz'
geom = geom_from_xyz_file(fn)
bm = get_bond_mat(geom)
print(bm)
node_attrs = {i: {'atom': atom} for (i, atom) in enumerate(geom.atoms)}
g = nx.from_numpy_array(bm)
nx.set_node_attributes(g, node_attrs)
prod_fn = '01_ref_rx_product_opt.xyz'
prod = geom_from_xyz_file(prod_fn)
pbm = get_bond_mat(prod)
gp = nx.from_numpy_array(pbm)
pnode_attrs = {i: {'atom': atom} for (i, atom) in enumerate(prod.atoms)}
nx.set_node_attributes(gp, pnode_attrs)
gm = isomorphism.GraphMatcher(gp, g)
si = gm.subgraph_is_isomorphic()
sims = list(gm.subgraph_isomorphisms_iter())
llens = [len(_) for _ in sims]
pprint(sims)
print(llens)
ms = [i for (i, d) in enumerate(sims) if all([(i == j) for (i, j) in d.items()])]
mapping = sims[ms[0]]
pprint(mapping)
pass |
class DeptBase(SchemaBase):
name: str
parent_id: (int | None) = Field(default=None, description='ID')
sort: int = Field(default=0, ge=0, description='')
leader: (str | None) = None
phone: (CustomPhoneNumber | None) = None
email: (EmailStr | None) = None
status: StatusType = Field(default=StatusType.enable) |
class OpticalSystem(OpticalElement):
def __init__(self, optical_elements):
self.optical_elements = optical_elements
def forward(self, wavefront):
wf = wavefront
for optical_element in self.optical_elements:
wf = optical_element.forward(wf)
return wf
def backward(self, wavefront):
wf = wavefront
for optical_element in reversed(self.optical_elements):
wf = optical_element.backward(wf)
return wf
def get_transformation_matrix_forward(self, wavelength=1):
matrix = np.array(1)
for optical_element in self.optical_elements:
matrix = np.dot(optical_element.get_transformation_matrix_forward(wavelength), matrix)
return matrix
def get_transformation_matrix_backward(self, wavelength=1):
matrix = np.array(1)
for optical_element in reversed(self.optical_elements):
matrix = np.dot(optical_element.get_transformation_matrix_backward(wavelength), matrix)
return matrix
def optical_elements(self):
return self._optical_elements
_elements.setter
def optical_elements(self, optical_elements):
self._optical_elements = list(optical_elements) |
def init(target_bytes: bytes):
symbols.spim_context.fillDefaultBannedSymbols()
if options.opts.libultra_symbols:
symbols.spim_context.globalSegment.fillLibultraSymbols()
if options.opts.ique_symbols:
symbols.spim_context.globalSegment.fillIQueSymbols()
if options.opts.hardware_regs:
symbols.spim_context.globalSegment.fillHardwareRegs(True) |
class PyTorchEstimator(Estimator):
def __init__(self, statement_set: StatementSet, model: torch.nn.Module, loss: Loss, optimizer: OPTIMIZER_CREATOR_T, worker_num: int, feature_cols: List[str], label_col: str, max_epochs: int=1, lr_scheduler_creator: Optional[LR_SCHEDULER_CREATOR_T]=None, batch_size: Optional[int]=32, cluster_config_properties: Optional[Mapping[(str, str)]]=None):
self.batch_size = batch_size
self.lr_scheduler_creator = lr_scheduler_creator
self.max_epochs = max_epochs
self.label_col = label_col
self.feature_cols = feature_cols
self.worker_num = worker_num
self.optimizer_creator = optimizer
self.loss = loss
self.model = model
self.statement_set = statement_set
self.cluster_config_properties = (cluster_config_properties if (cluster_config_properties is not None) else {})
def fit(self, *inputs: Table) -> 'PyTorchModel':
if (len(inputs) != 1):
raise ValueError('Only one input table is allowed.')
self._verify_input_table(inputs[0])
input_table = inputs[0]
pytorch_cluster_config_builder = PyTorchClusterConfig.new_builder()
predict_col_data_type = input_table.get_schema().get_field_data_type(self.label_col)
pytorch_cluster_config_builder.set_world_size(self.worker_num).set_node_entry(pytorch_train_entry).set_property(INPUT_COL_NAMES, self._get_column_names(input_table.get_schema())).set_property(FEATURE_COLS, ','.join(self.feature_cols)).set_property(LABEL_COL, self.label_col).set_property(BATCH_SIZE, str(self.batch_size)).set_property(INPUT_TYPES, self._get_input_type(input_table.get_schema())).set_property(MAX_EPOCHS, str(self.max_epochs))
for (k, v) in self.cluster_config_properties.items():
pytorch_cluster_config_builder.set_property(k, v)
tf_model_factory = SimplePyTorchModelFactory(model=self.model, loss=self.loss, optimizer_creator=self.optimizer_creator, lr_scheduler_creator=self.lr_scheduler_creator)
pytorch_cluster_config_builder.set_property(MODEL_FACTORY_BASE64, self._pickle_model_factory(tf_model_factory))
return PyTorchModel(pytorch_cluster_config_builder=pytorch_cluster_config_builder, predict_col_data_type=predict_col_data_type, statement_set=self.statement_set, input_table=inputs[0])
def save(self, path: str) -> None:
raise Exception('PyTorch does not support save and load')
def load(cls, env: StreamExecutionEnvironment, path: str) -> 'PyTorchEstimator':
raise Exception('PyTorch does not support save and load')
def get_param_map(self) -> Dict[('Param[Any]', Any)]:
return {}
def _verify_input_table(self, table: Table):
for feature_col in self.feature_cols:
assert (feature_col in table.get_schema().get_field_names()), f'''{feature_col} not in the given input table:
{table.get_schema()}'''
assert (self.label_col in table.get_schema().get_field_names()), f'''{self.label_col} not in the given input table:
{table.get_schema()}'''
def _pickle_model_factory(model_factory: PyTorchModelFactory) -> str:
return base64.encodebytes(pickle.dumps(model_factory)).decode('utf-8')
def _get_input_type(schema: TableSchema) -> str:
data_types = schema.get_field_data_types()
dl_on_flink_types = []
for data_type in data_types:
data_type = type(data_type)
if (data_type not in FLINK_TYPE_TO_DL_ON_FLINK_TYPE):
raise TypeError(f'Unsupported type of column {data_type}')
dl_on_flink_types.append(FLINK_TYPE_TO_DL_ON_FLINK_TYPE[data_type])
return ','.join(dl_on_flink_types)
def _get_column_names(schema: TableSchema):
return ','.join(schema.get_field_names()) |
def gen_function_call(func_attrs, backend_spec, indent=' '):
x = func_attrs['inputs'][0]
outputs = func_attrs['outputs']
split_dim = func_attrs['split_dim']
num_splits = len(func_attrs['outputs'])
output_names = ',\n '.join([i._attrs['name'] for i in outputs])
output_shape_defs = []
output_shape_names = []
for i in outputs:
output_shape_name = '{}_shape'.format(i._attrs['name'])
if (output_shape_name not in output_shape_names):
dim_refs = ', '.join([('&' + dim._attrs['name']) for dim in i._attrs['shape']])
one_shape_def = OUTPUT_SHAPE_DEF_TEMPLATE.render(indent=' ', output_shape_name=output_shape_name, output_dim_refs=dim_refs, index_type=backend_spec.index_type)
output_shape_defs.append(one_shape_def)
output_shape_names.append(output_shape_name)
x_shape = x._attrs['shape']
x_dims = ', '.join([dim._attrs['name'] for dim in x_shape])
split_sizes = ', '.join([str(i) for i in func_attrs['split_sizes']])
output_masks_str = ', '.join([('true' if (mask is True) else 'false') for mask in func_attrs['output_masks']])
return FUNC_CALL_TEMPLATE.render(indent=indent, outputs=output_names, output_shape_defs=''.join(output_shape_defs), output_shapes=', '.join(output_shape_names), output_masks=output_masks_str, input_dims=x_dims, func_name=func_attrs['name'], input_name=x._attrs['name'], input_ptr=x._attrs['name'], split_dim=split_dim, real_num_splits=len(func_attrs['outputs']), all_num_splits=len(func_attrs['output_masks']), rank=len(x._attrs['shape']), num_splits=num_splits, split_sizes=split_sizes, index_type=backend_spec.index_type) |
def get_input_data(config, dir):
if (config is not None):
conf = configparser.ConfigParser()
conf.optionxform = str
conf.read(config)
groups = conf.items('samples')
groups = [(g[0], os.path.abspath(os.path.expanduser(g[1]))) for g in groups]
for sample in groups:
try:
assert os.path.isdir(os.path.abspath(os.path.expanduser(sample[1])))
except:
raise IOError('{} is not a directory'.format(sample[1]))
else:
groups = []
for name in glob.glob(os.path.join(dir, '*')):
groups.append((os.path.basename(name), name))
return groups |
def test_analysis_raise_exception_if_convergence_step_is_not_positive_integer(template_klass, sf, building_container):
with pytest.raises(TypeError):
template_klass(container_building=building_container, reverse_selection_function=sf, model=scared.Monobit(5), convergence_step='foo', selection_function=sf)
with pytest.raises(ValueError):
template_klass(container_building=building_container, reverse_selection_function=sf, model=scared.Monobit(5), convergence_step=0, selection_function=sf)
with pytest.raises(ValueError):
template_klass(container_building=building_container, reverse_selection_function=sf, model=scared.Monobit(5), convergence_step=(- 12), selection_function=sf) |
class OptionSeriesBubbleSonificationContexttracksMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class SetDefaultWeightWidthSlantTest(object):
.parametrize('location, expected', [({'wght': 0}, 1), ({'wght': 1}, 1), ({'wght': 100}, 100), ({'wght': 1000}, 1000), ({'wght': 1001}, 1000)])
def test_wght(self, ttFont, location, expected):
set_default_weight_width_slant(ttFont, location)
assert (ttFont['OS/2'].usWeightClass == expected)
.parametrize('location, expected', [({'wdth': 0}, 1), ({'wdth': 56}, 1), ({'wdth': 57}, 2), ({'wdth': 62.5}, 2), ({'wdth': 75}, 3), ({'wdth': 87.5}, 4), ({'wdth': 100}, 5), ({'wdth': 112.5}, 6), ({'wdth': 125}, 7), ({'wdth': 150}, 8), ({'wdth': 200}, 9), ({'wdth': 201}, 9), ({'wdth': 1000}, 9)])
def test_wdth(self, ttFont, location, expected):
set_default_weight_width_slant(ttFont, location)
assert (ttFont['OS/2'].usWidthClass == expected)
.parametrize('location, expected', [({'slnt': (- 91)}, (- 90)), ({'slnt': (- 90)}, (- 90)), ({'slnt': 0}, 0), ({'slnt': 11.5}, 11.5), ({'slnt': 90}, 90), ({'slnt': 91}, 90)])
def test_slnt(self, ttFont, location, expected):
set_default_weight_width_slant(ttFont, location)
assert (ttFont['post'].italicAngle == expected)
def test_all(self, ttFont):
set_default_weight_width_slant(ttFont, {'wght': 500, 'wdth': 150, 'slnt': (- 12.0)})
assert (ttFont['OS/2'].usWeightClass == 500)
assert (ttFont['OS/2'].usWidthClass == 8)
assert (ttFont['post'].italicAngle == (- 12.0)) |
class serienRecCheckForRecording():
epgrefresh_instance = None
__instance = None
def __init__(self):
assert (not serienRecCheckForRecording.__instance), 'serienRecCheckForRecording is a singleton class!'
serienRecCheckForRecording.__instance = self
self.session = None
self.database = None
self.manuell = False
self.tvplaner_manuell = False
self.newSeriesOrEpisodesFound = False
self.senderListe = {}
self.markers = []
self.messageList = []
self.speedStartTime = 0
self.speedEndTime = 0
self.countSerien = 0
self.countActivatedSeries = 0
self.noOfRecords = 0
self.emailData = None
self.uhrzeit = None
self.daypage = 0
self.tempDB = None
self.autoCheckFinished = False
def initialize(self, session, manuell, tvplaner_manuell=False):
self.session = session
self.manuell = manuell
self.tvplaner_manuell = tvplaner_manuell
self.database = None
self.newSeriesOrEpisodesFound = False
self.senderListe = {}
self.markers = []
self.messageList = []
self.speedStartTime = 0
self.speedEndTime = 0
self.countSerien = 0
self.countActivatedSeries = 0
self.noOfRecords = int(config.plugins.serienRec.NoOfRecords.value)
self.emailData = None
self.daypage = 0
self.tempDB = None
self.autoCheckFinished = False
print(('[SerienRecorder] Initialize checkForRecording manual: %s (with TV-Planer: %s)' % (str(manuell), str(tvplaner_manuell))))
SRLogger.checkFileAccess()
lt = time.localtime()
self.uhrzeit = time.strftime('%a, %d.%m.%Y - %H:%M:%S', lt)
SRLogger.writeLog(("\n' %s '" % self.uhrzeit), True)
global refreshTimer
if refreshTimer:
refreshTimer.stop()
refreshTimer = None
global refreshTimerConnection
if refreshTimerConnection:
refreshTimerConnection = None
if (config.plugins.serienRec.autochecktype.value == '0'):
SRLogger.writeLog('Automatischer Timer-Suchlauf ist deaktiviert - nur manuelle Timersuche', True)
elif (config.plugins.serienRec.autochecktype.value == '1'):
SRLogger.writeLog('Automatischer Timer-Suchlauf ist aktiviert - er wird zur gewahlten Uhrzeit gestartet', True)
elif (config.plugins.serienRec.autochecktype.value == '2'):
SRLogger.writeLog('Automatischer Timer-Suchlauf ist aktiviert - er wird nach dem EPGRefresh ausgefuhrt', True)
if ((not self.manuell) and (config.plugins.serienRec.autochecktype.value == '1') and config.plugins.serienRec.timeUpdate.value):
deltatime = self.getNextAutoCheckTimer(lt)
refreshTimer = eTimer()
if isDreamOS():
refreshTimerConnection = refreshTimer.timeout.connect(self.startCheck)
else:
refreshTimer.callback.append(self.startCheck)
refreshTimer.start((((deltatime * 60) + random.randint(0, (int(config.plugins.serienRec.maxDelayForAutocheck.value) * 60))) * 1000), True)
print('[SerienRecorder] Timer-Suchlauf Uhrzeit-Timer gestartet.')
print(('[SerienRecorder] Verbleibende Zeit: %s Stunden' % TimeHelpers.td2HHMMstr(datetime.timedelta(minutes=(deltatime + int(config.plugins.serienRec.maxDelayForAutocheck.value))))))
SRLogger.writeLog(('Verbleibende Zeit bis zum nachsten automatischen Timer-Suchlauf: %s Stunden\n' % TimeHelpers.td2HHMMstr(datetime.timedelta(minutes=(deltatime + int(config.plugins.serienRec.maxDelayForAutocheck.value))))), True)
if self.manuell:
print('[SerienRecorder] checkRecTimer manuell.')
self.startCheck()
self.manuell = False
self.tvplaner_manuell = False
else:
try:
from Plugins.Extensions.EPGRefresh.EPGRefresh import epgrefresh
self.epgrefresh_instance = epgrefresh
config.plugins.serienRec.autochecktype.addNotifier(self.setEPGRefreshCallback)
except Exception as e:
SRLogger.writeLog(('EPGRefresh plugin nicht installiert! ' + str(e)), True)
def isAutoCheckFinished(self):
return self.autoCheckFinished
def setAutoCheckFinished(self, finished):
self.autoCheckFinished = finished
def getNextAutoCheckTimer(lt):
current_time = ((lt.tm_hour * 60) + lt.tm_min)
next_autocheck_time = ((config.plugins.serienRec.deltime.value[0] * 60) + config.plugins.serienRec.deltime.value[1])
if (current_time < next_autocheck_time):
delta_time = (next_autocheck_time - current_time)
else:
delta_time = abs(((1440 - current_time) + next_autocheck_time))
return delta_time
def setEPGRefreshCallback(self, configentry=None):
try:
if self.epgrefresh_instance:
if (config.plugins.serienRec.autochecktype.value == '2'):
self.epgrefresh_instance.addFinishNotifier(self.startCheck)
else:
self.epgrefresh_instance.removeFinishNotifier(self.startCheck)
except Exception as e:
try:
from Tools.HardwareInfoVu import HardwareInfoVu
pass
except:
SRLogger.writeLog(('Um die EPGRefresh Optionen nutzen zu konnen, muss mindestens die EPGRefresh Version 2.1.1 installiert sein. ' + str(e)), True)
def getMarkerCover(self):
self.database = SRDatabase(serienRecDataBaseFilePath)
markers = self.database.getAllMarkers(False)
for marker in markers:
(ID, Serie, Info, Url, AufnahmeVerzeichnis, AlleStaffelnAb, alleSender, Vorlaufzeit, Nachlaufzeit, AnzahlAufnahmen, preferredChannel, useAlternativeChannel, AbEpisode, TimerForSpecials, ErlaubteSTB, ErlaubteStaffelCount, fsID) = marker
getCover(self, Serie, fsID, True)
def startCheck(self):
self.database = SRDatabase(serienRecDataBaseFilePath)
self.autoCheckFinished = False
print('[SerienRecorder] Starting check')
lt = time.localtime()
self.uhrzeit = time.strftime('%a, %d.%m.%Y - %H:%M:%S', lt)
global refreshTimer
global refreshTimerConnection
print('[SerienRecorder] Check file access for log file and backup folder')
SRLogger.checkFileAccess()
if (config.plugins.serienRec.AutoBackup.value != '0'):
os.path.exists(config.plugins.serienRec.BackupPath.value)
SRLogger.writeLog(("\n' %s '" % self.uhrzeit), True)
if ((not self.manuell) and (not initDB())):
self.askForDSB()
return
if ((not self.database.hasMarkers()) and (not config.plugins.serienRec.tvplaner.value)):
SRLogger.writeLog(("\n' Timer-Suchlauf gestartet am %s '" % self.uhrzeit), True)
print('[SerienRecorder] check: Tabelle Serien-Marker leer.')
SRLogger.writeLog('Es sind keine Serien-Marker vorhanden - Timer-Suchlauf kann nicht ausgefuhrt werden.', True)
SRLogger.writeLog("' Timer-Suchlauf beendet '", True)
self.askForDSB()
return
if (not self.database.hasChannels()):
SRLogger.writeLog(("\n' Timer-Suchlauf gestartet am %s '" % self.uhrzeit), True)
print('[SerienRecorder] check: Tabelle Channels leer.')
SRLogger.writeLog('Es wurden keine Sender zugeordnet - Timer-Suchlauf kann nicht ausgefuhrt werden.', True)
SRLogger.writeLog("' Timer-Suchlauf beendet '", True)
self.askForDSB()
return
if refreshTimer:
refreshTimer.stop()
refreshTimer = None
if refreshTimerConnection:
refreshTimerConnection = None
print('[SerienRecorder] Auto-Check Timer stop.')
SRLogger.writeLog('Automatischer Timer-Suchlauf Uhrzeit-Timer angehalten.', True)
self.speedStartTime = time.time()
print(('[SerienRecorder] Stopwatch Start: ' + str(self.speedStartTime)))
if ((config.plugins.serienRec.autochecktype.value == '1') and config.plugins.serienRec.timeUpdate.value):
deltatime = self.getNextAutoCheckTimer(lt)
refreshTimer = eTimer()
if isDreamOS():
refreshTimerConnection = refreshTimer.timeout.connect(self.startCheck)
else:
refreshTimer.callback.append(self.startCheck)
refreshTimer.start((((deltatime * 60) + random.randint(0, (int(config.plugins.serienRec.maxDelayForAutocheck.value) * 60))) * 1000), True)
print('[SerienRecorder] Auto-Check Uhrzeit-Timer gestartet.')
print(('[SerienRecorder] Verbleibende Zeit: %s Stunden' % TimeHelpers.td2HHMMstr(datetime.timedelta(minutes=(deltatime + int(config.plugins.serienRec.maxDelayForAutocheck.value))))))
SRLogger.writeLog('Automatischer Timer-Suchlauf Uhrzeit-Timer gestartet.', True)
SRLogger.writeLog(('Verbleibende Zeit: %s Stunden' % TimeHelpers.td2HHMMstr(datetime.timedelta(minutes=(deltatime + int(config.plugins.serienRec.maxDelayForAutocheck.value))))), True)
if (config.plugins.serienRec.AutoBackup.value == 'before'):
if config.plugins.serienRec.createCompressedBackup.value:
createCompressedBackup(self.manuell)
else:
createBackup(self.manuell)
SRLogger.reset()
from .SerienRecorderTVPlaner import resetTVPlanerHTMLBackup
resetTVPlanerHTMLBackup()
self.database.removeExpiredTimerConflicts()
self.database.removeExpiredUndoTimer()
check_type = ''
if config.plugins.serienRec.tvplaner.value:
check_type += 'TV-Planer '
elif (config.plugins.serienRec.autochecktype == '2'):
check_type += 'EPG-Refresh '
if self.manuell:
check_type += 'manuell'
else:
check_type += 'auto'
print(("' Timer-Suchlauf gestartet am %s (%s) '" % (self.uhrzeit, check_type)))
SRLogger.writeLog(("\n' Timer-Suchlauf gestartet am %s (%s) '\n" % (self.uhrzeit, check_type)), True)
if config.plugins.serienRec.writeLogVersion.value:
SRLogger.writeLog(('Box-Typ: %s' % STBHelpers.getSTBType()), True)
SRLogger.writeLog(('Image: %s' % STBHelpers.getImageVersionString()), True)
pos = config.skin.primary_skin.value.rfind('/')
if (pos != (- 1)):
skin = config.skin.primary_skin.value[:pos]
else:
skin = 'Default Skin'
SRLogger.writeLog(('Box-Skin: %s (%s x %s)\n' % (skin, str(getDesktop(0).size().width()), str(getDesktop(0).size().height()))), True)
SRLogger.writeLog(('SerienRecorder Version: %s' % config.plugins.serienRec.showversion.value), True)
SRLogger.writeLog(('Datenbank Schema Version: %s' % str(self.database.getVersion())), True)
if config.plugins.serienRec.enableWebinterface.value:
SRLogger.writeLog(('Schnittstellen Version: %s' % SRAPIVERSION), True)
SRLogger.writeLog(('SerienRecorder Box ID: %s' % str(config.plugins.serienRec.BoxID.value)), True)
sMsg = '\nDEBUG Filter: '
if config.plugins.serienRec.writeLogChannels.value:
sMsg += 'Senderliste '
if config.plugins.serienRec.writeLogAllowedEpisodes.value:
sMsg += 'Episoden '
if config.plugins.serienRec.writeLogAdded.value:
sMsg += 'Added '
if config.plugins.serienRec.writeLogDisk.value:
sMsg += 'Disk '
if config.plugins.serienRec.writeLogTimeRange.value:
sMsg += 'Tageszeit '
if config.plugins.serienRec.writeLogTimeLimit.value:
sMsg += 'Zeitlimit '
if config.plugins.serienRec.writeLogTimerDebug.value:
sMsg += 'Timer '
SRLogger.writeLog(sMsg, True)
self.markers = []
self.messageList = []
print('[SerienRecorder] Check internet connection')
from .SerienRecorderHelpers import testWebConnection
if (not testWebConnection()):
SRLogger.writeLog('\nKeine Verbindung ins Internet. Suchlauf wurde abgebrochen!!\n', True)
self.speedEndTime = time.time()
speedTime = (self.speedEndTime - self.speedStartTime)
SRLogger.writeLog(("' Timer-Suchlauf beendet ( Ausfuhrungsdauer: %3.2f Sek.) '" % speedTime), True)
print(("[SerienRecorder] ' Timer-Suchlauf beendet ( Ausfuhrungsdauer: %3.2f Sek.) '" % speedTime))
SRLogger.backup()
from .SerienRecorderTVPlaner import backupTVPlanerHTML
backupTVPlanerHTML()
self.autoCheckFinished = True
if (config.plugins.serienRec.AutoBackup.value == 'after'):
if config.plugins.serienRec.createCompressedBackup.value:
createCompressedBackup(self.manuell)
else:
createBackup(self.manuell)
self.askForDSB()
return
print('[SerienRecorder] Check configured recording directories')
try:
SRLogger.writeLog('\nPrufe konfigurierte Aufnahmeverzeichnisse:', True)
recordDirectories = self.database.getRecordDirectories(config.plugins.serienRec.savetopath.value)
for directory in recordDirectories:
SRLogger.writeLog((" ' %s '" % directory), True)
os.path.exists(directory)
except:
SRLogger.writeLog('Es konnten nicht alle Aufnahmeverzeichnisse gefunden werden', True)
if ((not self.manuell) and (config.plugins.serienRec.firstscreen.value == '0')):
print('[SerienRecorder] Update series planer data')
from twisted.internet import reactor
from .SerienRecorderSeriesPlanner import serienRecSeriesPlanner
seriesPlanner = serienRecSeriesPlanner()
reactor.callFromThread(seriesPlanner.updatePlannerData)
self.startCheckTransmissions()
def startCheckTransmissions(self):
print('[SerienRecorder] Start check transmissions')
self.database = SRDatabase(serienRecDataBaseFilePath)
self.tempDB = SRTempDatabase()
self.tempDB.initialize()
self.senderListe = {}
stbChannelList = STBHelpers.buildSTBChannelList()
for s in self.database.getChannels():
self.senderListe[s[0].lower()] = (s[0], STBHelpers.getChannelByRef(stbChannelList, s[2]), s[2], STBHelpers.getChannelByRef(stbChannelList, s[4]), s[4], s[5])
webChannels = self.database.getActiveChannels()
SRLogger.writeLog(('\nAnzahl aktiver Websender: %d' % len(webChannels)), True)
epgTimeSpan = 'Deaktiviert'
if config.plugins.serienRec.eventid.value:
epgTimeSpan = (' %d Minuten' % config.plugins.serienRec.epgTimeSpan.value)
SRLogger.writeLog(('Eingestellte EPG Suchgrenzen: %s' % epgTimeSpan), True)
current_time = int(time.time())
future_time = (int(config.plugins.serienRec.checkfordays.value) * 86400)
future_time += int(current_time)
search_start = time.strftime('%a, %d.%m.%Y - %H:%M', time.localtime(int(current_time)))
search_end = time.strftime('%a, %d.%m.%Y - %H:%M', time.localtime(int(future_time)))
search_rerun_end = time.strftime('%a, %d.%m.%Y - %H:%M', time.localtime((future_time + ((int(config.plugins.serienRec.TimeSpanForRegularTimer.value) - int(config.plugins.serienRec.checkfordays.value)) * 86400))))
SRLogger.writeLog(('Berucksichtige Ausstrahlungstermine zwischen %s und %s' % (search_start, search_end)), True)
if self.database.hasForceRecording(config.plugins.serienRec.forceRecording.value):
SRLogger.writeLog(('Berucksichtige Wiederholungen zwischen %s und %s' % (search_start, search_rerun_end)), True)
self.emailData = None
if (config.plugins.serienRec.tvplaner.value and ((not self.manuell) or self.tvplaner_manuell)):
print('[SerienRecorder] Parsing TV-Planer e-mail')
try:
from .SerienRecorderTVPlaner import getEmailData
emailParserThread = backgroundThread(getEmailData)
emailParserThread.start()
emailParserThread.join()
self.emailData = emailParserThread.result
del emailParserThread
except:
SRLogger.writeLog('TV-Planer Verarbeitung fehlgeschlagen!', True)
print('[SerienRecorder] TV-Planer exception!')
self.emailData = None
print(('[SerienRecorder] lastFullCheckTime %s' % time.strftime('%a, %d.%m.%Y - %H:%M', time.localtime(int(config.plugins.serienRec.tvplaner_last_full_check.value)))))
if (self.emailData is None):
self.markers = self.database.getMarkers(config.plugins.serienRec.BoxID.value, config.plugins.serienRec.NoOfRecords.value)
config.plugins.serienRec.tvplaner_last_full_check.value = int(time.time())
config.plugins.serienRec.tvplaner_last_full_check.save()
configfile.save()
if (config.plugins.serienRec.tvplaner.value and ((not self.manuell) and self.tvplaner_manuell)):
if config.plugins.serienRec.showMessageOnTVPlanerError.value:
timeout = config.plugins.serienRec.showMessageTimeout.value
if (config.plugins.serienRec.showMessageTimeout.value == 0):
timeout = (- 1)
self.messageList.append(('Beim Abrufen der TV-Planer E-Mail ist ein Fehler aufgetreten - es wurde ein voller Suchlauf durchgefuhrt.\nWeitere Informationen wurden ins Log geschrieben.', MessageBox.TYPE_INFO, timeout, 'tvplaner-error'))
Notifications.AddPopup('Beim Abrufen der TV-Planer E-Mail ist ein Fehler aufgetreten - es wurde ein voller Suchlauf durchgefuhrt.\nWeitere Informationen wurden ins Log geschrieben.', MessageBox.TYPE_INFO, timeout=timeout, id='tvplaner-error')
fullCheck = '- keine TV-Planer Daten - voller Suchlauf'
else:
fullCheck = '- voller Suchlauf'
elif (config.plugins.serienRec.tvplaner_full_check.value and ((int(config.plugins.serienRec.tvplaner_last_full_check.value) + ((int(config.plugins.serienRec.checkfordays.value) - 1) * 86400)) < int(time.time()))):
self.markers = self.database.getMarkers(config.plugins.serienRec.BoxID.value, config.plugins.serienRec.NoOfRecords.value)
config.plugins.serienRec.tvplaner_last_full_check.value = int(time.time())
config.plugins.serienRec.tvplaner_last_full_check.save()
configfile.save()
fullCheck = '- Zeit abgelaufen - voller Suchlauf'
else:
self.markers = self.database.getMarkers(config.plugins.serienRec.BoxID.value, config.plugins.serienRec.NoOfRecords.value, list(self.emailData.keys()))
fullCheck = '- nur Serien der TV-Planer E-Mail'
self.countSerien = 0
self.countActivatedSeries = 0
self.noOfRecords = int(config.plugins.serienRec.NoOfRecords.value)
if (len(self.markers) > 0):
while True:
if (config.plugins.serienRec.tvplaner.value and config.plugins.serienRec.tvplaner_skipSerienServer.value):
SRLogger.writeLog('\nGema den globalen Einstellungen werden Timer nur aus den Terminen der TV-Planer E-Mail angelegt.\n', True)
global transmissionFailed
transmissionFailed = False
self.tempDB.cleanUp()
if (not (config.plugins.serienRec.tvplaner.value and config.plugins.serienRec.tvplaner_skipSerienServer.value)):
SRLogger.writeLog(("\n' Verarbeite Daten vom Server %s '\n" % fullCheck), True)
print('[SerienRecorder] Processing data from Serien-Server')
if PY2:
import Queue
else:
import queue as Queue
jobQueue = Queue.Queue()
resultQueue = Queue.Queue()
for (serienTitle, SerieUrl, SerieStaffel, SerieSender, AbEpisode, AnzahlAufnahmen, SerieEnabled, excludedWeekdays, skipSeriesServer, markerType, fsID, forceRecording) in self.markers:
if config.plugins.serienRec.tvplaner.value:
if (skipSeriesServer is None):
if config.plugins.serienRec.tvplaner_skipSerienServer.value:
continue
elif skipSeriesServer:
SRLogger.writeLog(("' %s ' - Fur diesen Serien-Marker sollen Timer nur aus den Terminen der TV-Planer E-Mail angelegt werden." % serienTitle), True)
continue
else:
SRLogger.writeLog(("' %s ' - Fur diesen Serien-Marker sollen Timer aus den Terminen des Serien-Servers angelegt werden." % serienTitle), True)
if (markerType == 1):
print(("[SerienRecorder] ' %s - TV-Planer Film wird ignoriert '" % serienTitle))
continue
self.countSerien += 1
if SerieEnabled:
limitedChannels = False
if ('Alle' in SerieSender):
markerChannels = webChannels
else:
markerChannels = SerieSender
SRLogger.writeLogFilter('channel', ("' %s ' - Fur diesen Serien-Marker sind die Sender eingeschrankt - es werden nicht alle Ausstrahlungstermine berucksichtigt." % serienTitle))
limitedChannels = True
self.countActivatedSeries += 1
seriesID = SerieUrl
if (config.plugins.serienRec.forceRecording.value or bool(forceRecording)):
timeSpan = int(config.plugins.serienRec.TimeSpanForRegularTimer.value)
else:
timeSpan = int(config.plugins.serienRec.checkfordays.value)
jobQueue.put((seriesID, fsID, timeSpan, markerChannels, serienTitle, SerieStaffel, AbEpisode, AnzahlAufnahmen, current_time, future_time, excludedWeekdays, limitedChannels))
else:
SRLogger.writeLog(("' %s ' - Dieser Serien-Marker ist deaktiviert - es werden keine Timer angelegt." % serienTitle), True)
if ((- 2) in SerieStaffel):
SRLogger.writeLog(("' %s ' - Dieser Serien-Marker steht auf manuell - es werden keine Timer automatisch angelegt." % serienTitle), True)
elif (((- 1) in SerieStaffel) and (0 in SerieStaffel)):
dummy = 0
else:
SRLogger.writeLogFilter('allowedEpisodes', ("' %s ' - Fur diesen Serien-Marker sind die Staffeln eingeschrankt - es werden nicht alle Ausstrahlungstermine berucksichtigt." % serienTitle))
for i in range(4):
worker = downloadTransmissionsThread(i, jobQueue, resultQueue)
worker.setDaemon(True)
worker.start()
jobQueue.join()
number_of_server_transmissions = 0
number_of_server_series = 0
while (not resultQueue.empty()):
(transmissionFailed, transmissions, seriesID, fsID, serienTitle, SerieStaffel, AbEpisode, AnzahlAufnahmen, current_time, future_time, excludedWeekdays, limitedChannels) = resultQueue.get()
self.processTransmission(transmissions, seriesID, fsID, serienTitle, SerieStaffel, AbEpisode, AnzahlAufnahmen, current_time, future_time, limitedChannels, 0, excludedWeekdays, 0)
if transmissions:
number_of_server_transmissions += len(transmissions)
if (len(transmissions) > 0):
number_of_server_series += 1
resultQueue.task_done()
SRLogger.writeLog(("\nEs wurden ' %d ' Ausstrahlungstermine fur ' %d ' Serien vom SerienServer abgerufen." % (number_of_server_transmissions, number_of_server_series)), True)
(number_of_considered_transmissions, number_of_considered_series) = self.tempDB.countTransmissions(0)
SRLogger.writeLog(("Berucksichtigt werden ' %d ' Ausstrahlungstermine fur ' %d ' Serien.\n" % (number_of_considered_transmissions, number_of_considered_series)), True)
break
if (config.plugins.serienRec.tvplaner.value and (self.emailData is not None)):
SRLogger.writeLog("\n' Verarbeite Daten aus TV-Planer E-Mail '\n", True)
print('[SerienRecorder] Processing data from TV-Planer e-mail')
if PY2:
import Queue
else:
import queue as Queue
jobQueue = Queue.Queue()
resultQueue = Queue.Queue()
for (serienTitle, SerieUrl, SerieStaffel, SerieSender, AbEpisode, AnzahlAufnahmen, SerieEnabled, excludedWeekdays, skipSeriesServer, markerType, fsID, forceRecording) in self.database.getMarkers(config.plugins.serienRec.BoxID.value, config.plugins.serienRec.NoOfRecords.value, list(self.emailData.keys())):
if SerieEnabled:
limitedChannels = False
if ('Alle' in SerieSender):
markerChannels = {x: x for x in webChannels}
else:
markerChannels = {x: x for x in SerieSender}
SRLogger.writeLogFilter('channels', ("' %s ' - Fur diesen Serien-Marker sind die Sender eingeschrankt - es werden nicht alle Ausstrahlungstermine berucksichtigt." % serienTitle))
limitedChannels = True
jobQueue.put((markerChannels, SerieUrl, fsID, serienTitle, SerieStaffel, AbEpisode, AnzahlAufnahmen, current_time, future_time, excludedWeekdays, markerType, limitedChannels))
else:
SRLogger.writeLog(("' %s ' - Dieser Serien-Marker ist deaktiviert - es werden keine Timer angelegt." % serienTitle), True)
if ((- 2) in SerieStaffel):
SRLogger.writeLog(("' %s ' - Fur diesen Serien-Marker sind die Staffeln auf 'manuell' gestellt - es werden keine Timer automatisch angelegt." % serienTitle), True)
elif (((- 1) in SerieStaffel) and (0 in SerieStaffel)):
dummy = 0
else:
SRLogger.writeLogFilter('allowedEpisodes', ("' %s ' - Fur diesen Serien-Marker sind die Staffeln eingeschrankt - es werden nicht alle Ausstrahlungstermine berucksichtigt." % serienTitle))
for i in range(4):
worker = processEMailDataThread(i, self.emailData, jobQueue, resultQueue)
worker.setDaemon(True)
worker.start()
jobQueue.join()
number_of_planer_transmissions = 0
number_of_planer_series = 0
while (not resultQueue.empty()):
(transmissions, seriesID, fsID, serienTitle, SerieStaffel, AbEpisode, AnzahlAufnahmen, current_time, future_time, excludedWeekdays, markerType, limitedChannels) = resultQueue.get()
self.processTransmission(transmissions, seriesID, fsID, serienTitle, SerieStaffel, AbEpisode, AnzahlAufnahmen, current_time, future_time, limitedChannels, 1, excludedWeekdays, markerType)
if transmissions:
number_of_planer_transmissions += len(transmissions)
if (len(transmissions) > 0):
number_of_planer_series += 1
resultQueue.task_done()
SRLogger.writeLog(("\nEs wurden ' %d ' Ausstrahlungstermine fur ' %d ' Serien aus der TV-Planer E-Mail ausgelesen." % (number_of_planer_transmissions, number_of_planer_series)), True)
(number_of_considered_transmissions, number_of_considered_series) = self.tempDB.countTransmissions(1)
SRLogger.writeLog(("Berucksichtigt werden ' %d ' Ausstrahlungstermine fur ' %d ' Serien.\n" % (number_of_considered_transmissions, number_of_considered_series)), True)
self.createTimer()
self.checkFinal()
def createTimer(self):
from .SerienRecorderTimer import serienRecTimer
timer = serienRecTimer()
timer.setTempDB(self.tempDB)
timer.activate()
for x in range(self.noOfRecords):
timer.search(x)
current_time = int(time.time())
timer.adjustEPGtimes(current_time)
SRLogger.writeLog('\n', True)
self.database.rebuild()
self.tempDB.rebuild()
self.autoCheckFinished = True
(countTimer, countTimerUpdate, countNotActiveTimer, countTimerFromWishlist, countBoxOnlyTimer, self.messageList) = timer.getCounts()
self.speedEndTime = time.time()
print(('[SerienRecorder] Stopwatch End: ' + str(self.speedEndTime)))
speedTime = (self.speedEndTime - self.speedStartTime)
if config.plugins.serienRec.eventid.value:
SRLogger.writeLog(('%s/%s Serie(n) sind vorgemerkt, dafur wurde(n) %s Timer erstellt und %s Timer aktualisiert.' % (str(self.countActivatedSeries), str(self.countSerien), str(countTimer), str(countTimerUpdate))), True)
print(('[SerienRecorder] %s/%s Serie(n) sind vorgemerkt, dafur wurde(n) %s Timer erstellt und %s Timer aktualisiert.' % (str(self.countActivatedSeries), str(self.countSerien), str(countTimer), str(countTimerUpdate))))
else:
SRLogger.writeLog(('%s/%s Serie(n) sind vorgemerkt, dafur wurde(n) %s Timer erstellt.' % (str(self.countActivatedSeries), str(self.countSerien), str(countTimer))), True)
print(('[SerienRecorder] %s/%s Serie(n) sind vorgemerkt, dafur wurde(n) %s Timer erstellt.' % (str(self.countActivatedSeries), str(self.countSerien), str(countTimer))))
if (countNotActiveTimer > 0):
SRLogger.writeLog(('%s Timer wurde(n) wegen Konflikten deaktiviert erstellt!' % str(countNotActiveTimer)), True)
print(('[SerienRecorder] %s Timer wurde(n) wegen Konflikten deaktiviert erstellt!' % str(countNotActiveTimer)))
if (countTimerFromWishlist > 0):
SRLogger.writeLog(('%s Timer wurde(n) vom Merkzettel erstellt!' % str(countTimerFromWishlist)), True)
print(('[SerienRecorder] %s Timer wurde(n) vom Merkzettel erstellt!' % str(countTimerFromWishlist)))
if (countBoxOnlyTimer > 0):
SRLogger.writeLog(('%s Timer wurde(n) auf Wunsch nicht in der SerienRecorder Datenbank gespeichert.' % str(countBoxOnlyTimer)), True)
print(('[SerienRecorder] %s Timer wurde(n) auf Wunsch nicht in der SerienRecorder Datenbank gespeichert.' % str(countBoxOnlyTimer)))
SRLogger.writeLog(("' Timer-Suchlauf beendet (Ausfuhrungsdauer: %3.2f Sek.) '" % speedTime), True)
print(("[SerienRecorder] ' Timer-Suchlauf beendet (Ausfuhrungsdauer: %3.2f Sek.) '" % speedTime))
if (not self.manuell):
if (config.plugins.serienRec.showNotification.value == '1'):
Notifications.AddPopup('SerienRecorder Suchlauf fur neue Timer wurde beendet.', MessageBox.TYPE_INFO, timeout=3, id='Suchlauf wurde beendet')
elif (config.plugins.serienRec.showNotification.value == '2'):
statisticMessage = ('Serien vorgemerkt: %s/%s\nTimer erstellt: %s\nTimer aktualisiert: %s\nTimer mit Konflikten: %s\nTimer vom Merkzettel: %s' % (str(self.countActivatedSeries), str(self.countSerien), str(countTimer), str(countTimerUpdate), str(countNotActiveTimer), str(countTimerFromWishlist)))
newSeasonOrEpisodeMessage = ''
if self.newSeriesOrEpisodesFound:
newSeasonOrEpisodeMessage = '\n\nNeuer Serien- oder Staffelbeginn gefunden.'
Notifications.AddPopup(('SerienRecorder Suchlauf fur neue Timer wurde beendet.\n\n%s%s' % (statisticMessage, newSeasonOrEpisodeMessage)), MessageBox.TYPE_INFO, timeout=10, id='Suchlauf wurde beendet')
if (config.plugins.serienRec.channelUpdateNotification.value == '1'):
from .SerienRecorderChannelScreen import checkChannelListTimelineness
channelListUpToDate = checkChannelListTimelineness(self.database)
if (not channelListUpToDate):
Notifications.AddPopup('Die Senderliste wurde auf dem Serien-Server aktualisiert.\nSie muss auch im SerienRecorder aktualisiert werden.', MessageBox.TYPE_INFO, timeout=0, id='Senderliste aktualisieren')
def checkFinal(self):
print('[SerienRecorder] checkFinal')
if (config.plugins.serienRec.tvplaner.value and config.plugins.serienRec.tvplaner_movies.value):
try:
self.database.removeMovieMarkers()
print("[SerienRecorder] ' TV-Planer FilmMarker geloscht '")
except:
SRLogger.writeLog("' TV-Planer FilmMarker loschen fehlgeschlagen '", True)
print("[SerienRecorder] ' TV-Planer FilmMarker loschen fehlgeschlagen '")
global transmissionFailed
if transmissionFailed:
config.plugins.serienRec.tvplaner_last_full_check.value = int(0)
config.plugins.serienRec.tvplaner_last_full_check.save()
configfile.save()
if (config.plugins.serienRec.AutoBackup.value == 'after'):
if config.plugins.serienRec.createCompressedBackup.value:
createCompressedBackup(self.manuell)
else:
createBackup(self.manuell)
SRLogger.backup()
from .SerienRecorderTVPlaner import backupTVPlanerHTML
backupTVPlanerHTML()
self.autoCheckFinished = True
print('[SerienRecorder] checkFinal: autoCheckFinished')
if (config.plugins.serienRec.autochecktype.value == '1'):
lt = time.localtime()
deltatime = self.getNextAutoCheckTimer(lt)
SRLogger.writeLog(('\nVerbleibende Zeit bis zum nachsten automatischen Timer-Suchlauf: %s Stunden\n' % TimeHelpers.td2HHMMstr(datetime.timedelta(minutes=(deltatime + int(config.plugins.serienRec.maxDelayForAutocheck.value))))), True)
if (config.plugins.serienRec.tvplaner.value and config.plugins.serienRec.tvplaner_full_check.value):
autoCheckDays = (((int(config.plugins.serienRec.tvplaner_last_full_check.value) + ((int(config.plugins.serienRec.checkfordays.value) - 1) * 86400)) - int(time.time())) / 86400)
if (autoCheckDays < 0):
autoCheckDays = 0
SRLogger.writeLog(('Verbleibende Zeit bis zum nachsten vollen Timer-Suchlauf: %d Tage' % autoCheckDays), True)
self.tempDB = None
self.database = None
self.askForDSB()
def processTransmission(self, data, serien_wlid, serien_fsid, serien_name, staffeln, AbEpisode, AnzahlAufnahmen, current_time, future_time, limitedChannels, source, excludedWeekdays=None, markerType=0):
if (data is None):
SRLogger.writeLog(('Fehler beim Abrufen und Verarbeiten der Ausstrahlungstermine [%s]' % serien_name), True)
return
print(('[SerienRecorder] processTransmissions: (%d) %r [%d]' % (source, toStr(serien_name), len(data))))
if ((len(data) == 0) and limitedChannels):
SRLogger.writeLogFilter('channels', ("' %s ' - Fur diesen Serien-Marker wurden keine Ausstrahlungstermine gefunden, die Sender sind am Marker eingeschrankt." % serien_name))
(from_time, to_time) = self.database.getTimeSpan(serien_fsid, config.plugins.serienRec.globalFromTime.value, config.plugins.serienRec.globalToTime.value)
if (self.noOfRecords < AnzahlAufnahmen):
self.noOfRecords = AnzahlAufnahmen
TimeSpan_time = int(future_time)
if self.database.getForceRecording(serien_fsid, config.plugins.serienRec.forceRecording.value):
TimeSpan_time += ((int(config.plugins.serienRec.TimeSpanForRegularTimer.value) - int(config.plugins.serienRec.checkfordays.value)) * 86400)
self.tempDB.beginTransaction()
for (current_serien_name, sender, startzeit, endzeit, staffel, episode, title, status) in data:
start_unixtime = startzeit
end_unixtime = endzeit
if ((not staffel) and (not episode)):
staffel = 'S'
episode = '00'
seasonEpisodeString = ('S%sE%s' % (str(staffel).zfill(2), str(episode).zfill(2)))
label_serie = ('%s - %s - %s' % (serien_name, seasonEpisodeString, title))
if (not self.database.getForceRecording(serien_fsid, config.plugins.serienRec.forceRecording.value)):
if ((int(from_time) > 0) or (int(to_time) < ((23 * 60) + 59))):
start_time = ((time.localtime(int(start_unixtime)).tm_hour * 60) + time.localtime(int(start_unixtime)).tm_min)
end_time = ((time.localtime(int(end_unixtime)).tm_hour * 60) + time.localtime(int(end_unixtime)).tm_min)
if (not TimeHelpers.allowedTimeRange(from_time, to_time, start_time)):
print(('[SerienRecorder] processTransmissions time range ignore: %r' % serien_name))
timeRangeConfigured = ('%s:%s - %s:%s' % (str((int(from_time) // 60)).zfill(2), str((int(from_time) % 60)).zfill(2), str((int(to_time) // 60)).zfill(2), str((int(to_time) % 60)).zfill(2)))
timeRangeTransmission = ('%s:%s - %s:%s' % (str((int(start_time) // 60)).zfill(2), str((int(start_time) % 60)).zfill(2), str((int(end_time) // 60)).zfill(2), str((int(end_time) % 60)).zfill(2)))
SRLogger.writeLogFilter('timeRange', ("' %s ' - Sendung (%s) nicht in Zeitspanne (%s)" % (label_serie, timeRangeTransmission, timeRangeConfigured)))
continue
(webChannel, stbChannel, stbRef, altstbChannel, altstbRef, status) = self.checkChannel(sender)
if ((stbRef == '') and (altstbRef == '')):
SRLogger.writeLogFilter('channels', ("' %s ' - Box-Sender nicht gefunden ' ' %s '" % (label_serie, webChannel)))
continue
if (int(status) == 0):
SRLogger.writeLogFilter('channels', ("' %s ' - Box-Sender deaktiviert ' %s '" % (label_serie, webChannel)))
continue
serieAllowed = False
if ((- 2) in staffeln):
serieAllowed = False
elif (((- 1) in staffeln) and (0 in staffeln)):
serieAllowed = True
elif str(staffel).isdigit():
if (int(staffel) == 0):
if str(episode).isdigit():
if (int(episode) < int(AbEpisode)):
if config.plugins.serienRec.writeLogAllowedEpisodes.value:
liste = staffeln[:]
liste.sort()
liste.reverse()
if ((- 1) in staffeln):
liste.remove((- 1))
liste[0] = ('ab %s' % liste[0])
liste.reverse()
liste.insert(0, ('0 ab E%s' % str(AbEpisode).zfill(2)))
SRLogger.writeLogFilter('allowedEpisodes', ("' %s ' - Episode nicht erlaubt ' %s ' ' %s '" % (label_serie, seasonEpisodeString, str(liste).replace("'", '').replace('"', ''))))
else:
serieAllowed = True
elif (int(staffel) in staffeln):
serieAllowed = True
elif ((- 1) in staffeln):
if (int(staffel) >= max(staffeln)):
serieAllowed = True
elif self.database.getSpecialsAllowed(serien_fsid):
serieAllowed = True
vomMerkzettel = False
if (not serieAllowed):
if self.database.hasBookmark(serien_fsid, staffel, episode):
SRLogger.writeLog(("' %s ' - Timer vom Merkzettel wird angelegt %s" % (label_serie, stbChannel)), True)
serieAllowed = True
vomMerkzettel = True
if (not serieAllowed):
if config.plugins.serienRec.writeLogAllowedEpisodes.value:
liste = staffeln[:]
liste.sort()
liste.reverse()
if ((- 1) in staffeln):
liste.remove((- 1))
liste[0] = ('ab %s' % liste[0])
liste.reverse()
if str(episode).isdigit():
if (int(episode) < int(AbEpisode)):
liste.insert(0, ('0 ab E%s' % str(AbEpisode).zfill(2)))
if ((- 2) in staffeln):
liste.remove((- 2))
liste.insert(0, 'Manuell')
SRLogger.writeLogFilter('allowedEpisodes', ("' %s ' - Staffel nicht erlaubt ' %s ' ' %s '" % (label_serie, seasonEpisodeString, str(liste).replace("'", '').replace('"', ''))))
continue
updateFromEPG = self.database.getUpdateFromEPG(serien_fsid, config.plugins.serienRec.eventid.value)
(dirname, dirname_serie) = getDirname(self.database, serien_name, serien_fsid, staffel)
self.tempDB.addTransmission([(current_time, future_time, serien_name, serien_wlid, serien_fsid, markerType, staffel, episode, seasonEpisodeString, title, label_serie, webChannel, stbChannel, stbRef, start_unixtime, end_unixtime, altstbChannel, altstbRef, dirname, AnzahlAufnahmen, from_time, to_time, int(vomMerkzettel), excludedWeekdays, updateFromEPG, source)])
self.tempDB.commitTransaction()
def askForDSB(self):
if (not self.manuell):
if (config.plugins.serienRec.afterAutocheck.value != '0'):
if ((config.plugins.serienRec.DSBTimeout.value > 0) and (not Screens.Standby.inStandby)):
print('[SerienRecorder] Try to display shutdown notification...')
try:
notificationText = 'Soll der SerienRecorder die Box in den Ruhemodus (Standby) schalten?'
if (config.plugins.serienRec.afterAutocheck.value == '2'):
notificationText = 'Soll der SerienRecorder die Box ausschalten (Deep-Standby)?'
Notifications.AddNotificationWithCallback(self.gotoDeepStandby, MessageBox, text=notificationText, type=MessageBox.TYPE_YESNO, timeout=config.plugins.serienRec.DSBTimeout.value, default=True)
except Exception as e:
print(('[SerienRecorder] Could not display shutdown notification - shutdown box without notification... (%s)' % str(e)))
self.gotoDeepStandby(True)
else:
self.gotoDeepStandby(True)
def gotoDeepStandby(self, answer):
if answer:
if (config.plugins.serienRec.afterAutocheck.value == '2'):
if (not NavigationInstance.instance.RecordTimer.isRecording()):
for each in self.messageList:
Notifications.RemovePopup(each[3])
print('[SerienRecorder] Going into Deep-Standby')
SRLogger.writeLog('Gehe in den Deep-Standby')
if Screens.Standby.inStandby:
quitMainloop(1)
else:
Notifications.AddNotificationWithID('Shutdown', Screens.Standby.TryQuitMainloop, 1)
else:
print('[SerienRecorder] A running recording prevents Deep-Standby')
SRLogger.writeLog('Eine laufende Aufnahme verhindert den Deep-Standby')
elif (not Screens.Standby.inStandby):
print('[SerienRecorder] Going into standby')
SRLogger.writeLog('Gehe in den Standby')
Notifications.AddNotification(Screens.Standby.Standby)
def checkChannel(self, channel):
if (channel.lower() in self.senderListe):
(webChannel, stbChannel, stbRef, altstbChannel, altstbRef, status) = self.senderListe[channel.lower()]
else:
webChannel = channel
stbChannel = ''
stbRef = ''
altstbChannel = ''
altstbRef = ''
status = '0'
return (webChannel, stbChannel, stbRef, altstbChannel, altstbRef, status)
def dataError(error):
print(('[SerienRecorder] Es ist ein Fehler aufgetreten - die Daten konnten nicht abgerufen/verarbeitet werden: (%s)' % error)) |
class Terminal(CmdModules):
def __init__(self, session):
cmd.Cmd.__init__(self)
self.session = session
self.prompt = 'weevely> '
self._load_modules()
self._load_history()
self.intro = template.Template(messages.terminal.welcome_to_s).render(path=self.session.get('path'), conn_info=session.get_connection_info(), version=messages.version, default_shell=self.session.get('default_shell'))
def emptyline(self):
pass
def precmd(self, line):
dlog.info(('>>>> %s' % line))
if ((not line) or any((line.startswith(cmnd) for cmnd in (':set', ':unset', ':show', ':help')))):
return line
if ((self.session['shell_sh']['status'] == Status.IDLE) or (self.session['shell_php']['status'] != Status.RUN)):
self.session['default_shell'] = None
self.session['shell_php']['status'] = Status.IDLE
try:
self.session['shell_sh']['status'] = modules.loaded['shell_sh'].setup()
except ChannelException as e:
log.error(str(e))
return ''
for shell in ('shell_sh', 'shell_php'):
if (self.session[shell]['status'] == Status.RUN):
self.session['default_shell'] = shell
break
if (not self.session.get('default_shell')):
log.error(messages.terminal.backdoor_unavailable)
return ''
if (not self.session['system_info']['results'].get('hostname')):
modules.loaded['system_info'].run_argv(['-info', 'hostname'])
if (not self.session['system_info']['results'].get('whoami')):
modules.loaded['system_info'].run_argv(['-info', 'whoami'])
if (not self.session['file_cd']['results'].get('cwd')):
self.do_file_cd('.')
return line
def postcmd(self, stop, line):
default_shell = self.session.get('default_shell')
if (not default_shell):
self.prompt = 'weevely> '
else:
if (default_shell == 'shell_sh'):
prompt = '$'
elif (default_shell == 'shell_php'):
prompt = 'PHP>'
else:
prompt = '?'
self.prompt = ('%s %s ' % (self.session.get_connection_info(), prompt))
def default(self, line):
if (not line):
return
default_shell = self.session.get('default_shell')
if (not default_shell):
return
result = modules.loaded[default_shell].run_argv([line])
if (not result):
return
result = (result[:(- 1)] if (isinstance(result, str) and result.endswith('\n')) else result)
log.info(result)
def do_show(self, line, cmd):
self.session.print_to_user(line)
def do_set(self, line, cmd):
try:
args = shlex.split(line)
except Exception as e:
import traceback
log.debug(traceback.format_exc())
log.warning((messages.generic.error_parsing_command_s % str(e)))
else:
if (len(args) < 2):
log.warning(messages.terminal.set_usage)
elif (len(args) >= 2):
args[1] = ' '.join(args[1:])
self.session.set(args[0], args[1])
def do_unset(self, line, cmd):
if (not line):
log.warning(messages.terminal.unset_usage)
else:
self.session.unset(line)
def _load_modules(self):
for (module_name, module_class) in modules.loaded.items():
setattr(Terminal, ('do_%s' % module_name), module_class.run_cmdline)
for alias in module_class.aliases:
setattr(Terminal, ('do_alias_%s' % alias), module_class.run_alias)
setattr(Terminal, ('help_%s' % alias), module_class.help)
setattr(Terminal, ('help_%s' % module_name), module_class.help)
def _load_history(self):
open(config.history_path, 'a').close()
readline.set_history_length(100)
try:
readline.read_history_file(config.history_path)
except IOError:
pass
atexit.register(readline.write_history_file, config.history_path) |
class OptionSeriesNetworkgraphStatesSelectMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
def test_analysis_config_iter_config_dict_initialisation():
expected_case_format = 'case_%d'
analysis_config = AnalysisConfig.from_dict({ConfigKeys.NUM_REALIZATIONS: 10, ConfigKeys.ITER_CASE: expected_case_format, ConfigKeys.ITER_COUNT: 42, ConfigKeys.ITER_RETRY_COUNT: 24})
assert (analysis_config.case_format_is_set() is True)
assert (analysis_config.case_format == expected_case_format)
assert (analysis_config.num_iterations == 42)
assert (analysis_config.num_retries_per_iter == 24) |
class RobotWebServer(object):
def __init__(self, robot, handler_class, port_number=8000):
self.content_server = None
self.handler_class = handler_class
self.handler_class.robot = robot
self.port_number = port_number
def run(self):
try:
log.info(('Started HTTP server (content) on port %d' % self.port_number))
self.content_server = HTTPServer(('', self.port_number), self.handler_class)
self.content_server.serve_forever()
except (KeyboardInterrupt, Exception) as e:
log.exception(e)
if self.content_server:
self.content_server.socket.close()
self.content_server = None
for motor in list_motors():
motor.stop() |
def create_customer(doc):
customer = frappe.get_doc({'doctype': 'Customer', 'customer_name': doc.patient_name, 'customer_group': (doc.customer_group or frappe.db.get_single_value('Selling Settings', 'customer_group')), 'territory': (doc.territory or frappe.db.get_single_value('Selling Settings', 'territory')), 'customer_type': 'Individual', 'default_currency': doc.default_currency, 'default_price_list': doc.default_price_list, 'language': doc.language, 'image': doc.image}).insert(ignore_permissions=True, ignore_mandatory=True)
frappe.db.set_value('Patient', doc.name, 'customer', customer.name)
frappe.msgprint(_('Customer {0} created and linked to Patient').format(customer.name), alert=True) |
class OptionPlotoptionsSankeySonificationTracksMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class StateMemoryStorage(StateStorageBase):
def __init__(self) -> None:
self.data = {}
async def set_state(self, chat_id, user_id, state):
if hasattr(state, 'name'):
state = state.name
if (chat_id in self.data):
if (user_id in self.data[chat_id]):
self.data[chat_id][user_id]['state'] = state
return True
else:
self.data[chat_id][user_id] = {'state': state, 'data': {}}
return True
self.data[chat_id] = {user_id: {'state': state, 'data': {}}}
return True
async def delete_state(self, chat_id, user_id):
if self.data.get(chat_id):
if self.data[chat_id].get(user_id):
del self.data[chat_id][user_id]
if (chat_id == user_id):
del self.data[chat_id]
return True
return False
async def get_state(self, chat_id, user_id):
if self.data.get(chat_id):
if self.data[chat_id].get(user_id):
return self.data[chat_id][user_id]['state']
return None
async def get_data(self, chat_id, user_id):
if self.data.get(chat_id):
if self.data[chat_id].get(user_id):
return self.data[chat_id][user_id]['data']
return None
async def reset_data(self, chat_id, user_id):
if self.data.get(chat_id):
if self.data[chat_id].get(user_id):
self.data[chat_id][user_id]['data'] = {}
return True
return False
async def set_data(self, chat_id, user_id, key, value):
if self.data.get(chat_id):
if self.data[chat_id].get(user_id):
self.data[chat_id][user_id]['data'][key] = value
return True
raise RuntimeError('chat_id {} and user_id {} does not exist'.format(chat_id, user_id))
def get_interactive_data(self, chat_id, user_id):
return StateContext(self, chat_id, user_id)
async def save(self, chat_id, user_id, data):
self.data[chat_id][user_id]['data'] = data |
class Solution():
def findNumOfValidWords(self, words: List[str], puzzles: List[str]) -> List[int]:
word_count = Counter((self.mask(word) for word in words))
result = []
for word in puzzles:
(mask, first) = (self.mask(word[1:]), self.mask(word[0]))
(submask, count) = (mask, word_count[first])
while submask:
count += word_count[(submask | first)]
submask = ((submask - 1) & mask)
result.append(count)
return result
def mask(self, word: str) -> int:
result = 0
for ch in word:
result |= (1 << (ord(ch) - ord('a')))
return result |
def test_dstdirs_to_youngest_phase() -> None:
all_jobs = [job_w_dstdir_phase('/plots1', job.Phase(1, 5)), job_w_dstdir_phase('/plots2', job.Phase(1, 1)), job_w_dstdir_phase('/plots2', job.Phase(3, 1)), job_w_dstdir_phase('/plots2', job.Phase(2, 1)), job_w_dstdir_phase('/plots3', job.Phase(4, 1))]
assert (manager.dstdirs_to_youngest_phase(all_jobs) == {'/plots1': job.Phase(1, 5), '/plots2': job.Phase(1, 1), '/plots3': job.Phase(4, 1)}) |
class Generator(nn.Module):
def __init__(self, latent_dim: int, n_classes: int, code_dim: int, img_size: int, channels: int) -> None:
super().__init__()
input_dim = ((latent_dim + n_classes) + code_dim)
self.init_size: int = (img_size // 4)
self.l1: nn.modules.Sequential = nn.Sequential(nn.Linear(input_dim, (128 * (self.init_size ** 2))))
self.conv_blocks: nn.modules.Sequential = nn.Sequential(nn.BatchNorm2d(128), nn.Upsample(scale_factor=2), nn.Conv2d(128, 128, 3, stride=1, padding=1), nn.BatchNorm2d(128, 0.8), nn.LeakyReLU(0.2, inplace=True), nn.Upsample(scale_factor=2), nn.Conv2d(128, 64, 3, stride=1, padding=1), nn.BatchNorm2d(64, 0.8), nn.LeakyReLU(0.2, inplace=True), nn.Conv2d(64, channels, 3, stride=1, padding=1), nn.Tanh())
def forward(self, noise: torch.Tensor, labels: torch.Tensor, code: torch.Tensor) -> torch.Tensor:
gen_input = torch.cat((noise, labels, code), (- 1))
out = self.l1(gen_input)
out = out.view(out.shape[0], 128, self.init_size, self.init_size)
img = self.conv_blocks(out)
return img |
class Color(metaclass=ColorMeta):
CS_MAP = {}
DE_MAP = {}
FIT_MAP = {}
CAT_MAP = {}
CONTRAST_MAP = {}
FILTER_MAP = {}
INTERPOLATE_MAP = {}
CCT_MAP = {}
PRECISION = util.DEF_PREC
FIT = util.DEF_FIT
INTERPOLATE = util.DEF_INTERPOLATE
DELTA_E = util.DEF_DELTA_E
HARMONY = util.DEF_HARMONY
AVERAGE = util.DEF_AVERAGE
CHROMATIC_ADAPTATION = util.DEF_CHROMATIC_ADAPTATION
CONTRAST = util.DEF_CONTRAST
CCT = util.DEF_CCT
POWERLESS = False
CARRYFORWARD = False
_MAX_CONVERT_ITERATIONS = 10
def __init__(self, color: ColorInput, data: Optional[VectorLike]=None, alpha: float=util.DEF_ALPHA, **kwargs: Any) -> None:
(self._space, self._coords) = self._parse(color, data, alpha, **kwargs)
def __len__(self) -> int:
return (len(self._space.CHANNELS) + 1)
def __getitem__(self, i: Union[(str, int)]) -> float:
...
def __getitem__(self, i: slice) -> Vector:
...
def __getitem__(self, i: Union[(str, int, slice)]) -> Union[(float, Vector)]:
return (self._coords[self._space.get_channel_index(i)] if isinstance(i, str) else self._coords[i])
def __setitem__(self, i: Union[(str, int)], v: float) -> None:
...
def __setitem__(self, i: slice, v: Vector) -> None:
...
def __setitem__(self, i: Union[(str, int, slice)], v: Union[(float, Vector)]) -> None:
space = self._space
if isinstance(i, slice):
for (index, value) in zip(range(len(self._coords))[i], v):
self._coords[index] = alg.clamp(float(value), *space.channels[index].limit)
else:
index = (space.get_channel_index(i) if isinstance(i, str) else i)
self._coords[index] = alg.clamp(float(v), *space.channels[index].limit)
def __eq__(self, other: Any) -> bool:
return ((type(other) == type(self)) and (other.space() == self.space()) and util.cmp_coords(other[:], self[:]))
def _parse(cls, color: ColorInput, data: Optional[VectorLike]=None, alpha: float=util.DEF_ALPHA, **kwargs: Any) -> Tuple[(Space, List[float])]:
if isinstance(color, str):
if (data is not None):
s = color
space_class = cls.CS_MAP.get(s)
if (not space_class):
raise ValueError("'{}' is not a registered color space".format(s))
num_channels = len(space_class.CHANNELS)
num_data = len(data)
if (num_data < num_channels):
data = (list(data) + ([alg.nan] * (num_channels - num_data)))
coords = [alg.clamp(float(v), *c.limit) for (c, v) in zipl(space_class.CHANNELS, data)]
coords.append(alg.clamp(float(alpha), *space_class.channels[(- 1)].limit))
obj = (space_class, coords)
else:
m = cls._match(color, fullmatch=True)
if (m is None):
raise ValueError("'{}' is not a valid color".format(color))
coords = [alg.clamp(float(v), *c.limit) for (c, v) in zipl(m[0].CHANNELS, m[1])]
coords.append(alg.clamp(float(m[2]), *m[0].channels[(- 1)].limit))
obj = (m[0], coords)
elif isinstance(color, Color):
space_class = cls.CS_MAP.get(color.space())
if (not space_class):
raise ValueError("'{}' is not a registered color space".format(color.space()))
obj = (space_class, color[:])
elif isinstance(color, Mapping):
obj = cls._parse(color['space'], color['coords'], color.get('alpha', 1.0))
else:
raise TypeError("'{}' is an unrecognized type".format(type(color)))
return obj
def _match(cls, string: str, start: int=0, fullmatch: bool=False) -> Optional[Tuple[('Space', Vector, float, int, int)]]:
if (string[start:(start + 6)].lower() == 'color('):
for space_class in cls.CS_MAP.values():
if (not space_class.COLOR_FORMAT):
continue
m = parse.parse_css(space_class, string, start, fullmatch, True)
if (m is not None):
return (space_class, m[0][0], m[0][1], start, m[1])
for space_class in cls.CS_MAP.values():
m2 = space_class.match(string, start, fullmatch)
if (m2 is not None):
return (space_class, m2[0][0], m2[0][1], start, m2[1])
return None
def match(cls, string: str, start: int=0, fullmatch: bool=False) -> Optional[ColorMatch]:
m = cls._match(string, start, fullmatch)
if (m is not None):
return ColorMatch(cls(m[0].NAME, m[1], m[2]), m[3], m[4])
return None
def _is_this_color(cls, obj: Any) -> bool:
return (type(obj) is cls)
def _is_color(cls, obj: Any) -> bool:
return isinstance(obj, Color)
def register(cls, plugin: Union[(Plugin, Sequence[Plugin])], *, overwrite: bool=False, silent: bool=False) -> None:
reset_convert_cache = False
mapping = None
p = None
for i in ([plugin] if (not isinstance(plugin, Sequence)) else plugin):
if isinstance(i, Space):
mapping = cls.CS_MAP
reset_convert_cache = True
p = i
elif isinstance(i, DeltaE):
mapping = cls.DE_MAP
p = i
elif isinstance(i, CAT):
mapping = cls.CAT_MAP
p = i
elif isinstance(i, Filter):
mapping = cls.FILTER_MAP
p = i
elif isinstance(i, ColorContrast):
mapping = cls.CONTRAST_MAP
p = i
elif isinstance(i, Interpolate):
mapping = cls.INTERPOLATE_MAP
p = i
elif isinstance(i, CCT):
mapping = cls.CCT_MAP
p = i
elif isinstance(i, Fit):
mapping = cls.FIT_MAP
p = i
if (p.NAME == 'clip'):
if reset_convert_cache:
cls._get_convert_chain.cache_clear()
if (not silent):
raise ValueError("'{}' is a reserved name for gamut mapping/reduction and cannot be overridden")
continue
else:
if reset_convert_cache:
cls._get_convert_chain.cache_clear()
raise TypeError("Cannot register plugin of type '{}'".format(type(i)))
if (((p.NAME != '*') and (p.NAME not in mapping)) or overwrite):
mapping[p.NAME] = p
elif (not silent):
if reset_convert_cache:
cls._get_convert_chain.cache_clear()
raise ValueError("A plugin of name '{}' already exists or is not allowed".format(p.NAME))
if reset_convert_cache:
cls._get_convert_chain.cache_clear()
def deregister(cls, plugin: Union[(str, Sequence[str])], *, silent: bool=False) -> None:
reset_convert_cache = False
if isinstance(plugin, str):
plugin = [plugin]
mapping = None
for p in plugin:
if (p == '*'):
cls.CS_MAP.clear()
cls.DE_MAP.clear()
cls.CAT_MAP.clear()
cls.FILTER_MAP.clear()
cls.CONTRAST_MAP.clear()
cls.INTERPOLATE_MAP.clear()
cls.CCT_MAP.clear()
cls.FIT_MAP.clear()
return
(ptype, name) = p.split(':', 1)
if (ptype == 'space'):
mapping = cls.CS_MAP
reset_convert_cache = True
elif (ptype == 'delta-e'):
mapping = cls.DE_MAP
elif (ptype == 'cat'):
mapping = cls.CAT_MAP
elif (ptype == 'filter'):
mapping = cls.FILTER_MAP
elif (ptype == 'contrast'):
mapping = cls.CONTRAST_MAP
elif (ptype == 'interpolate'):
mapping = cls.INTERPOLATE_MAP
elif (ptype == 'cct'):
mapping = cls.CCT_MAP
elif (ptype == 'fit'):
mapping = cls.FIT_MAP
if (name == 'clip'):
if reset_convert_cache:
cls._get_convert_chain.cache_clear()
if (not silent):
raise ValueError("'{}' is a reserved name gamut mapping/reduction and cannot be removed".format(name))
continue
else:
if reset_convert_cache:
cls._get_convert_chain.cache_clear()
raise ValueError("The plugin category of '{}' is not recognized".format(ptype))
if (name == '*'):
mapping.clear()
elif (name in mapping):
del mapping[name]
elif (not silent):
if reset_convert_cache:
cls._get_convert_chain.cache_clear()
raise ValueError("A plugin of name '{}' under category '{}' could not be found".format(name, ptype))
if reset_convert_cache:
cls._get_convert_chain.cache_clear()
def random(cls, space: str, *, limits: Optional[Sequence[Optional[Sequence[float]]]]=None) -> 'Color':
cs = cls.CS_MAP[space]
num_chan = len(cs.CHANNELS)
if (limits is None):
limits = []
length = len(limits)
coords = []
for i in range(num_chan):
chan = (limits[i] if (i < length) else None)
if (chan is None):
chan = cs.channels[i]
(a, b) = (chan.low, chan.high)
else:
(a, b) = chan
coords.append(random.uniform(a, b))
obj = cls(space, coords)
if hasattr(obj._space, 'hue_index'):
obj.normalize()
return obj
def blackbody(cls, space: str, temp: float, duv: float=0.0, *, scale: bool=True, scale_space: Optional[str]=None, method: Optional[str]=None, **kwargs: Any) -> 'Color':
cct = temperature.cct(method, cls)
color = cct.from_cct(cls, space, temp, duv, scale, scale_space, **kwargs)
return color
def cct(self, *, method: Optional[str]=None, **kwargs: Any) -> Vector:
cct = temperature.cct(method, self)
return cct.to_cct(self, **kwargs)
def to_dict(self, *, nans: bool=True) -> Mapping[(str, Any)]:
return {'space': self.space(), 'coords': self.coords(nans=nans), 'alpha': self.alpha(nans=nans)}
def normalize(self, *, nans: bool=True) -> 'Color':
self[:(- 1)] = self.coords(nans=False)
if (nans and hasattr(self._space, 'hue_index') and self.is_achromatic()):
i = self._space.hue_index()
self[i] = alg.nan
alpha = self[(- 1)]
self[(- 1)] = (0.0 if math.isnan(alpha) else alpha)
return self
def is_nan(self, name: str) -> bool:
return math.isnan(self.get(name))
def _handle_color_input(self, color: ColorInput) -> 'Color':
if isinstance(color, (str, Mapping)):
return self.new(color)
elif self._is_color(color):
return (color if self._is_this_color(color) else self.new(color))
else:
raise TypeError("Unexpected type '{}'".format(type(color)))
def space(self) -> str:
return self._space.NAME
def new(self, color: ColorInput, data: Optional[VectorLike]=None, alpha: float=util.DEF_ALPHA, **kwargs: Any) -> 'Color':
return type(self)(color, data, alpha, **kwargs)
def clone(self) -> 'Color':
return self.new(self.space(), self[:(- 1)], self[(- 1)])
def convert(self, space: str, *, fit: Union[(bool, str)]=False, in_place: bool=False, norm: bool=True) -> 'Color':
if fit:
method = (None if (not isinstance(fit, str)) else fit)
if (not self.in_gamut(space, tolerance=0.0)):
converted = self.convert(space, in_place=in_place, norm=norm)
return converted.fit(space, method=method)
if (space == self.space()):
return (self if in_place else self.clone())
(c, coords) = convert.convert(self, space)
this = (self if in_place else self.clone())
this._space = c
this._coords[:(- 1)] = coords
if (norm and hasattr(this._space, 'hue_index') and this.is_achromatic()):
this[this._space.hue_index()] = alg.nan
return this
def is_achromatic(self) -> bool:
value = self._space.is_achromatic(self.coords(nans=False))
if (value is None):
xyz = self.convert('xyz-d65')
return bool(xyz._space.is_achromatic(xyz[:(- 1)]))
return value
def mutate(self, color: ColorInput, data: Optional[VectorLike]=None, alpha: float=util.DEF_ALPHA, **kwargs: Any) -> 'Color':
(self._space, self._coords) = self._parse(color, data=data, alpha=alpha, **kwargs)
return self
def update(self, color: ColorInput, data: Optional[VectorLike]=None, alpha: float=util.DEF_ALPHA, *, norm: bool=True, **kwargs: Any) -> 'Color':
space = self.space()
(self._space, self._coords) = self._parse(color, data=data, alpha=alpha, **kwargs)
if (self._space.NAME != space):
self.convert(space, in_place=True, norm=norm)
return self
def _hotswap(self, color: 'Color') -> 'Color':
(self._space, self._coords) = (self.CS_MAP[color.space()], color[:])
return self
def to_string(self, **kwargs: Any) -> str:
return self._space.to_string(self, **kwargs)
def __repr__(self) -> str:
return 'color({} {} / {})'.format(self._space._serialize()[0], ' '.join([util.fmt_float(coord, util.DEF_PREC) for coord in self[:(- 1)]]), util.fmt_float(self[(- 1)], util.DEF_PREC))
__str__ = __repr__
def white(self, cspace: str='xyz') -> Vector:
value = self.convert_chromaticity('xy-1931', cspace, self._space.WHITE)
return (value if (cspace == 'xyz') else value[:(- 1)])
def uv(self, mode: str='1976', *, white: Optional[VectorLike]=None) -> Vector:
return self.split_chromaticity(('uv-' + mode))[:(- 1)]
def xy(self, *, white: Optional[VectorLike]=None) -> Vector:
return self.split_chromaticity('xy-1931')[:(- 1)]
def split_chromaticity(self, cspace: str='uv-1976', *, white: Optional[VectorLike]=None) -> Vector:
if (white is None):
white = self._space.WHITE
xyz = self.convert('xyz-d65')
coords = self.chromatic_adaptation(xyz._space.WHITE, white, xyz.coords(nans=False))
if (cspace == 'xyz'):
raise ValueError('XYZ is not a luminant-chromaticity color space.')
return (self.convert_chromaticity('xyz', cspace, coords, white=white) if (cspace != 'xy_1931') else coords)
def chromaticity(cls, space: str, coords: VectorLike, cspace: str='uv-1976', *, scale: bool=False, scale_space: Optional[str]=None, white: Optional[VectorLike]=None) -> 'Color':
if (scale_space is None):
scale_space = 'srgb-linear'
if (white is None):
white = cls.CS_MAP[space].WHITE
if (cspace == 'xyz'):
raise ValueError('XYZ is not a luminant-chromaticity color space.')
coords = cls.convert_chromaticity(cspace, 'xyz', coords, white=white)
color = cls('xyz-d65', cls.chromatic_adaptation(white, cls.CS_MAP['xyz-d65'].WHITE, coords))
if (scale and isinstance(cls.CS_MAP[scale_space], RGBish)):
color.convert(scale_space, in_place=True)
color[:(- 1)] = util.rgb_scale(color.coords())
if (space != color.space()):
color.convert(space, in_place=True)
return color
def convert_chromaticity(cls, cspace1: str, cspace2: str, coords: VectorLike, *, white: Optional[VectorLike]=None) -> Vector:
if (cspace1 not in SUPPORTED_CHROMATICITY_SPACES):
raise ValueError("Unexpected chromaticity space '{}'".format(cspace1))
if (cspace2 not in SUPPORTED_CHROMATICITY_SPACES):
raise ValueError("Unexpected chromaticity space '{}'".format(cspace2))
l = len(coords)
if (((cspace1 == 'xyz') and (l != 3)) or (l not in (2, 3))):
raise ValueError('Unexpected number of coordinates ({}) for {}'.format(l, cspace1))
if (cspace1 == cspace2):
return ((list(coords) + [1]) if (l == 2) else list(coords))
if (cspace1 == 'xyz'):
coords = util.xyz_to_xyY(coords, (([0.0] * 2) if (white is None) else white))
cspace1 = 'xy-1931'
if (cspace2 == cspace1):
return coords
(pair, Y) = ((coords[:(- 1)], coords[(- 1)]) if (l == 3) else (coords, 1.0))
target = cspace2
if (cspace2 == 'xyz'):
cspace2 = 'xy-1931'
if ((cspace1 == 'xy-1931') and (cspace2 != 'xy-1931')):
pair = (util.xy_to_uv_1960(pair) if (cspace2 == 'uv-1960') else util.xy_to_uv(pair))
elif (cspace1 == 'uv-1960'):
pair = (util.uv_1960_to_xy(pair) if (cspace2 == 'xy-1931') else util.xy_to_uv(util.uv_1960_to_xy(pair)))
elif (cspace1 == 'uv-1976'):
pair = (util.uv_to_xy(pair) if (cspace2 == 'xy-1931') else util.xy_to_uv_1960(util.uv_to_xy(pair)))
if (target == 'xyz'):
return util.xy_to_xyz(pair, Y)
return (list(pair) + [Y])
def chromatic_adaptation(cls, w1: VectorLike, w2: VectorLike, xyz: VectorLike, *, method: Optional[str]=None) -> Vector:
adapter = cls.CAT_MAP.get((method if (method is not None) else cls.CHROMATIC_ADAPTATION))
if (not adapter):
raise ValueError("'{}' is not a supported CAT".format(method))
return adapter.adapt(tuple(w1), tuple(w2), xyz)
def clip(self, space: Optional[str]=None) -> 'Color':
orig_space = self.space()
if (space is None):
space = self.space()
c = self.convert(space, in_place=True, norm=False)
gamut.clip_channels(c)
return c.convert(orig_space, in_place=True)
def fit(self, space: Optional[str]=None, *, method: Optional[str]=None, **kwargs: Any) -> 'Color':
if (method is None):
method = self.FIT
if (method == 'clip'):
return self.clip(space)
orig_space = self.space()
if (space is None):
space = self.space()
mapping = self.FIT_MAP.get(method)
if (not mapping):
raise ValueError("'{}' gamut mapping is not currently supported".format(method))
self.convert(space, in_place=True, norm=False)
if self.in_gamut(tolerance=0):
gamut.clip_channels(self)
else:
mapping.fit(self, **kwargs)
return self.convert(orig_space, in_place=True)
def in_gamut(self, space: Optional[str]=None, *, tolerance: float=util.DEF_FIT_TOLERANCE) -> bool:
if (space is None):
space = self.space()
c = (self.convert(space, norm=False) if ((space is not None) and (space != self.space())) else self)
if ((c._space.GAMUT_CHECK is not None) and (not c.convert(c._space.GAMUT_CHECK, norm=False).in_gamut(tolerance=tolerance))):
return False
return gamut.verify(c, tolerance)
def in_pointer_gamut(self, *, tolerance: float=util.DEF_FIT_TOLERANCE) -> bool:
return gamut.pointer.in_pointer_gamut(self, tolerance)
def fit_pointer_gamut(self) -> 'Color':
return gamut.pointer.fit_pointer_gamut(self)
def mask(self, channel: Union[(str, Sequence[str])], *, invert: bool=False, in_place: bool=False) -> 'Color':
this = (self if in_place else self.clone())
aliases = self._space.CHANNEL_ALIASES
masks = set(([aliases.get(channel, channel)] if isinstance(channel, str) else [aliases.get(c, c) for c in channel]))
for name in self._space.channels:
if (((not invert) and (name in masks)) or (invert and (name not in masks))):
this[name] = alg.nan
return this
def mix(self, color: ColorInput, percent: float=util.DEF_MIX, *, in_place: bool=False, **interpolate_args: Any) -> 'Color':
domain = interpolate_args.get('domain')
if (domain is not None):
interpolate_args['domain'] = interpolate.normalize_domain(domain)
if ((not self._is_color(color)) and (not isinstance(color, (str, Mapping)))):
raise TypeError("Unexpected type '{}'".format(type(color)))
mixed = self.interpolate([self, color], **interpolate_args)(percent)
return (self._hotswap(mixed) if in_place else mixed)
def steps(cls, colors: Sequence[Union[(ColorInput, interpolate.stop, Callable[(..., float)])]], *, steps: int=2, max_steps: int=1000, max_delta_e: float=0, delta_e: Optional[str]=None, **interpolate_args: Any) -> List['Color']:
domain = interpolate_args.get('domain')
if (domain is not None):
interpolate_args['domain'] = interpolate.normalize_domain(domain)
return cls.interpolate(colors, **interpolate_args).steps(steps, max_steps, max_delta_e, delta_e)
def discrete(cls, colors: Sequence[Union[(ColorInput, interpolate.stop, Callable[(..., float)])]], *, space: Union[(str, None)]=None, out_space: Union[(str, None)]=None, steps: Union[(int, None)]=None, max_steps: int=1000, max_delta_e: float=0, delta_e: Union[(str, None)]=None, domain: Optional[List[float]]=None, **interpolate_args: Any) -> Interpolator:
num = (sum((((not callable(c)) or (not isinstance(c, interpolate.stop))) for c in colors)) if (steps is None) else steps)
i = cls.interpolate(colors, space=space, **interpolate_args)
i.discretize(num, max_steps, max_delta_e, delta_e)
if (domain is not None):
i.domain(domain)
if (out_space is not None):
i.out_space(out_space)
return i
def interpolate(cls, colors: Sequence[Union[(ColorInput, interpolate.stop, Callable[(..., float)])]], *, space: Optional[str]=None, out_space: Optional[str]=None, progress: Optional[Union[(Mapping[(str, Callable[(..., float)])], Callable[(..., float)])]]=None, hue: str=util.DEF_HUE_ADJ, premultiplied: bool=True, extrapolate: bool=False, domain: Optional[List[float]]=None, method: str='linear', padding: Optional[Union[(float, Tuple[(float, float)])]]=None, carryforward: Optional[bool]=None, powerless: Optional[bool]=None, **kwargs: Any) -> Interpolator:
return interpolate.interpolator(method, cls, colors=colors, space=space, out_space=out_space, progress=progress, hue=hue, premultiplied=premultiplied, extrapolate=extrapolate, domain=domain, padding=padding, carryforward=(carryforward if (carryforward is not None) else cls.CARRYFORWARD), powerless=(powerless if (powerless is not None) else cls.POWERLESS), **kwargs)
def average(cls, colors: Iterable[ColorInput], *, space: Optional[str]=None, out_space: Optional[str]=None, premultiplied: bool=True, powerless: Optional[bool]=None, **kwargs: Any) -> 'Color':
if (space is None):
space = cls.AVERAGE
if (out_space is None):
out_space = space
return average.average(cls, colors, space, premultiplied, (powerless if (powerless is not None) else cls.POWERLESS)).convert(out_space, in_place=True)
def filter(self, name: str, amount: Optional[float]=None, *, space: Optional[str]=None, out_space: Optional[str]=None, in_place: bool=False, **kwargs: Any) -> 'Color':
return filters.filters(self, name, amount, space, out_space, in_place, **kwargs)
def harmony(self, name: str, *, space: Optional[str]=None, out_space: Optional[str]=None) -> List['Color']:
if (space is None):
space = self.HARMONY
if (out_space is None):
out_space = space
return [c.convert(out_space, in_place=True) for c in harmonies.harmonize(self, name, space, **kwargs)]
def compose(self, backdrop: Union[(ColorInput, Sequence[ColorInput])], *, blend: Union[(str, bool)]=True, operator: Union[(str, bool)]=True, space: Optional[str]=None, out_space: Optional[str]=None, in_place: bool=False) -> 'Color':
if ((not isinstance(backdrop, str)) and isinstance(backdrop, Sequence)):
bcolor = [self._handle_color_input(c) for c in backdrop]
else:
bcolor = [self._handle_color_input(backdrop)]
color = compositing.compose(self, bcolor, blend, operator, space, out_space)
return (self._hotswap(color) if in_place else color)
def delta_e(self, color: ColorInput, *, method: Optional[str]=None, **kwargs: Any) -> float:
color = self._handle_color_input(color)
if (method is None):
method = self.DELTA_E
delta = self.DE_MAP.get(method)
if (not delta):
raise ValueError("'{}' is not currently a supported distancing algorithm.".format(method))
return delta.distance(self, color, **kwargs)
def distance(self, color: ColorInput, *, space: str='lab') -> float:
return distance.distance_euclidean(self, self._handle_color_input(color), space=space)
def closest(self, colors: Sequence[ColorInput], *, method: Optional[str]=None, **kwargs: Any) -> 'Color':
return distance.closest(self, colors, method=method, **kwargs)
def luminance(self, *, white: Optional[VectorLike]=cat.WHITES['2deg']['D65']) -> float:
if (white is None):
white = self._space.WHITE
xyz = self.convert('xyz-d65')
coords = self.chromatic_adaptation(xyz._space.WHITE, white, xyz.coords(nans=False))
return coords[1]
def contrast(self, color: ColorInput, method: Optional[str]=None) -> float:
color = self._handle_color_input(color)
return contrast.contrast(method, self, color)
def get(self, name: str, *, nans: bool=True) -> float:
...
def get(self, name: Union[(List[str], Tuple[(str, ...)])], *, nans: bool=True) -> List[float]:
...
def get(self, name: Union[(str, List[str], Tuple[(str, ...)])], *, nans: bool=True) -> Union[(float, List[float])]:
if isinstance(name, str):
if ('.' in name):
(space, channel) = name.split('.', 1)
if nans:
return self.convert(space)[channel]
else:
obj = self.convert(space, norm=nans)
i = obj._space.get_channel_index(channel)
return obj._space.resolve_channel(i, obj._coords)
elif nans:
return self[name]
else:
i = self._space.get_channel_index(name)
return self._space.resolve_channel(i, self._coords)
else:
original_space = current_space = self.space()
obj = self
values = []
for n in name:
(space, channel) = (n.split('.', 1) if ('.' in n) else (original_space, n))
if (space != current_space):
obj = (self if (space == original_space) else self.convert(space, norm=nans))
current_space = space
if nans:
values.append(obj[channel])
else:
i = obj._space.get_channel_index(channel)
values.append(obj._space.resolve_channel(i, obj._coords))
return values
def set(self, name: Union[(str, Dict[(str, Union[(float, Callable[(..., float)])])])], value: Optional[Union[(float, Callable[(..., float)])]]=None, *, nans: bool=True) -> 'Color':
if (value is None):
if isinstance(name, str):
raise ValueError("Missing the positional 'value' argument for channel '{}'".format(name))
original_space = current_space = self.space()
obj = self.clone()
for (k, v) in name.items():
(space, channel) = (k.split('.', 1) if ('.' in k) else (original_space, k))
if (space != current_space):
obj.convert(space, in_place=True, norm=nans)
current_space = space
if (not callable(v)):
obj[channel] = v
else:
i = obj._space.get_channel_index(channel)
obj[channel] = v((obj[i] if nans else obj._space.resolve_channel(i, obj._coords)))
self.update(obj)
else:
if isinstance(name, dict):
raise ValueError("A dict of channels and values cannot be used with the positional 'value' parameter")
if ('.' in name):
(space, channel) = name.split('.', 1)
obj = self.convert(space, norm=nans)
if (not callable(value)):
obj[channel] = value
else:
i = obj._space.get_channel_index(channel)
obj[channel] = value((obj[i] if nans else obj._space.resolve_channel(i, obj._coords)))
return self.update(obj)
if (not callable(value)):
self[name] = value
else:
i = self._space.get_channel_index(name)
self[name] = value((self[i] if nans else self._space.resolve_channel(i, self._coords)))
return self
def coords(self, *, nans: bool=True) -> Vector:
if nans:
return self[:(- 1)]
else:
return [self._space.resolve_channel(index, self._coords) for index in range((len(self._coords) - 1))]
def alpha(self, *, nans: bool=True) -> float:
if nans:
return self[(- 1)]
else:
return self._space.resolve_channel((- 1), self._coords) |
def example():
c1 = ft.Container(ft.Text('Hello!', style=ft.TextThemeStyle.HEADLINE_MEDIUM), alignment=ft.alignment.center, width=200, height=200, bgcolor=ft.colors.GREEN)
c2 = ft.Container(ft.Text('Bye!', size=50), alignment=ft.alignment.center, width=200, height=200, bgcolor=ft.colors.YELLOW)
c = ft.AnimatedSwitcher(c1, transition=ft.AnimatedSwitcherTransition.SCALE, duration=500, reverse_duration=100, switch_in_curve=ft.AnimationCurve.BOUNCE_OUT, switch_out_curve=ft.AnimationCurve.BOUNCE_IN)
async def animate(e):
c.content = (c2 if (c.content == c1) else c1)
(await c.update_async())
return ft.Column(controls=[c, ft.ElevatedButton('Animate!', on_click=animate)]) |
class Stability_ComputeNode(ComputeNode):
_instance = None
def get_instance(cls):
if (cls._instance is None):
cls._instance = Stability_ComputeNode()
return cls._instance
def declare_user_config(cls):
user_config = AIStorage.get_instance().get_user_config()
user_config.add_user_config('stability_api_key', 'stability api key', False, None)
user_config.add_user_config('stability_model', 'stability model name', True, 'stable-diffusion-512-v2-1')
if (os.getenv('TEXT2IMG_OUTPUT_DIR') is None):
home_dir = Path.home()
output_dir = Path.joinpath(home_dir, 'text2img_output')
Path.mkdir(output_dir, exist_ok=True)
user_config.add_user_config('text2img_output_dir', 'text2image output dir', True, output_dir)
if (os.getenv('STABILITY_DEFAULT_MODEL') is None):
user_config.add_user_config('stability_default_model', 'stability default model', True, 'stable-diffusion-512-v2-1')
def __init__(self):
super().__init__()
self.is_start = False
self.node_id = 'stability_node'
self.api_key = ''
self.default_model = ''
self.task_queue = Queue()
async def initial(self):
if (os.getenv('STABILITY_API_KEY') is not None):
self.api_key = os.getenv('STABILITY_API_KEY')
else:
self.api_key = AIStorage.get_instance().get_user_config().get_value('stability_api_key')
if (self.api_key is None):
logger.error('stability api key is None!')
return False
if (os.getenv('STABILITY_DEFAULT_MODEL') is not None):
self.default_model = os.getenv('STABILITY_DEFAULT_MODEL')
else:
self.default_model = AIStorage.get_instance().get_user_config().get_value('stability_default_model')
if (self.default_model is None):
self.default_model = 'stable-diffusion-512-v2-1'
if (os.getenv('TEXT2IMG_OUTPUT_DIR') is not None):
self.output_dir = os.getenv('TEXT2IMG_OUTPUT_DIR')
else:
self.output_dir = AIStorage.get_instance().get_user_config().get_value('text2img_output_dir')
if (self.output_dir is None):
self.output_dir = './'
self.output_dir = os.path.abspath(self.output_dir)
self.start()
return True
async def push_task(self, task: ComputeTask, proiority: int=0):
logger.info(f'stability_node push task: {task.display()}')
self.task_queue.put_nowait(task)
async def remove_task(self, task_id: str):
pass
def _run_task(self, task: ComputeTask):
task.state = ComputeTaskState.RUNNING
result = ComputeTaskResult()
result.result_code = ComputeTaskResultCode.ERROR
result.set_from_task(task)
model_name = task.params['model_name']
prompt = task.params['prompt']
negative_prompt = task.params['negative_prompt']
logging.info(f'call stability {self.default_model} prompts: {prompt}, negative_prompt: {negative_prompt}')
api = None
try:
api = client.StabilityInference(key=self.api_key, verbose=True, engine=model_name)
except Exception as e:
task.error_str = f'create stability client failed: {e}'
result.error_str = f'create stability client failed: {e}'
logging.warn(task.error_str)
task.state = ComputeTaskState.ERROR
return result
answers = api.generate(prompt=prompt, seed=0, steps=30, cfg_scale=7.0, width=512, height=512, samples=1, sampler=generation.SAMPLER_K_DPMPP_2M)
for resp in answers:
for artifact in resp.artifacts:
if (artifact.finish_reason == generation.FILTER):
err_msg = "request activated the API's safety filters"
logging.warn(err_msg)
task.error_str = err_msg
result.error_str = err_msg
task.state = ComputeTaskState.ERROR
return result
if (artifact.type == generation.ARTIFACT_IMAGE):
img = Image.open(io.BytesIO(artifact.binary))
file_name = os.path.join(self.output_dir, (task.task_id + '.png'))
img.save(file_name)
task.state = ComputeTaskState.DONE
result.result_code = ComputeTaskResultCode.OK
result.worker_id = self.node_id
result.result = {'file': file_name}
return result
task.error_str = 'Unknown error!'
result.error_str = 'Unknown error!'
task.state = ComputeTaskState.ERROR
return result
def start(self):
if self.is_start:
return
self.is_start = True
async def _run_task_loop():
while True:
logger.info('stability_node is waiting for task...')
task = (await self.task_queue.get())
logger.info(f'stability_node get task: {task.display()}')
result = self._run_task(task)
asyncio.create_task(_run_task_loop())
def display(self) -> str:
return f'Stability_ComputeNode: {self.node_id}'
def get_task_state(self, task_id: str):
pass
def get_capacity(self):
pass
def is_support(self, task: ComputeTask) -> bool:
return (task.task_type == ComputeTaskType.TEXT_2_IMAGE)
def is_local(self) -> bool:
return False |
class SetActiveAttributeFactory(PipeFactory):
point_scalars = String(adapts='point_scalars_name', help='The name of the active point scalars')
point_vectors = String(adapts='point_vectors_name', help='The name of the active point vectors')
point_tensors = String(adapts='point_tensors_name', help='The name of the active point tensors')
cell_scalars = String(adapts='cell_scalars_name', help='The name of the active cell scalars')
cell_vectors = String(adapts='cell_vectors_name', help='The name of the active cell vectors')
cell_tensors = String(adapts='cell_tensors_name', help='The name of the active cell tensors')
_target = Instance(filters.SetActiveAttribute, ()) |
class PokerEngine():
def __init__(self, table: PokerTable, small_blind: int, big_blind: int):
self.table = table
self.small_blind = small_blind
self.big_blind = big_blind
self.evaluator = Evaluator()
self.state = PokerGameState.new_hand(self.table)
self.wins_and_losses = []
def play_one_round(self):
self.round_setup()
self._all_dealing_and_betting_rounds()
self.compute_winners()
self._round_cleanup()
def round_setup(self):
self.table.pot.reset()
self._assign_order_to_players()
self._assign_blinds()
def _all_dealing_and_betting_rounds(self):
self.table.dealer.deal_private_cards(self.table.players)
self._betting_round(first_round=True)
self.table.dealer.deal_flop(self.table)
self._betting_round()
self.table.dealer.deal_turn(self.table)
self._betting_round()
self.table.dealer.deal_river(self.table)
self._betting_round()
def compute_winners(self):
ranked_player_groups = self._rank_players_by_best_hand()
payouts = self._compute_payouts(ranked_player_groups)
self._payout_players(payouts)
logger.debug('Winnings computation complete. Players:')
for player in self.table.players:
logger.debug(f'{player}')
def _round_cleanup(self):
self._move_blinds()
def _get_players_in_pot(self, player_group, pot):
return sorted([player for player in player_group if (player in pot)], key=operator.attrgetter('order'))
def _process_side_pot(self, player_group, pot):
payouts = collections.Counter()
players_in_pot = self._get_players_in_pot(player_group, pot)
n_players = len(players_in_pot)
if (not n_players):
return {}
n_total = sum(pot.values())
n_per_player = (n_total // n_players)
n_remainder = (n_total - (n_players * n_per_player))
for player in players_in_pot:
payouts[player] += n_per_player
for i in range(n_remainder):
payouts[players_in_pot[i]] += 1
return payouts
def _compute_payouts(self, ranked_player_groups: List[Player]):
payouts = collections.Counter()
for pot in self.table.pot.side_pots:
for player_group in ranked_player_groups:
pot_payouts = self._process_side_pot(player_group, pot)
if pot_payouts:
payouts.update(pot_payouts)
break
return payouts
def _payout_players(self, payouts: Dict[(Player, int)]):
self.table.pot.reset()
for (player, winnings) in payouts.items():
player.add_chips(winnings)
def _rank_players_by_best_hand(self) -> List[List[Player]]:
table_cards = [card.eval_card for card in self.table.community_cards]
grouped_players = collections.defaultdict(list)
for player in self.table.players:
if player.is_active:
hand_cards = [card.eval_card for card in player.cards]
rank = self.evaluator.evaluate(table_cards, hand_cards)
hand_class = self.evaluator.get_rank_class(rank)
hand_desc = self.evaluator.class_to_string(hand_class).lower()
logger.debug(f'f"Rank #{rank} {player} {hand_desc}')
grouped_players[rank].append(player)
ranked_player_groups: List[List[Player]] = []
for rank in sorted(grouped_players.keys()):
ranked_player_groups.append(grouped_players[rank])
return ranked_player_groups
def _assign_order_to_players(self):
for (player_i, player) in enumerate(self.table.players):
player.order = player_i
def _assign_blinds(self):
self.table.players[0].add_to_pot(self.small_blind)
self.table.players[1].add_to_pot(self.big_blind)
logger.debug(f'Assigned blinds to players {self.table.players[:2]}')
def _move_blinds(self):
players = copy.deepcopy(self.table.players)
players.append(players.pop(0))
logger.debug(f'Rotated players from {self.table.players} to {players}')
self.table.set_players(players)
def _players_in_order_of_betting(self, first_round: bool) -> List[Player]:
if first_round:
return (self.table.players[2:] + self.table.players[:2])
return self.table.players
def _all_active_players_take_action(self, first_round: bool):
for player in self._players_in_order_of_betting(first_round):
if player.is_active:
self.state = player.take_action(self.state)
def _bet_until_everyone_has_bet_evenly(self):
first_round = True
logger.debug('Started round of betting.')
while (first_round or self.more_betting_needed):
self._all_active_players_take_action(first_round)
first_round = False
logger.debug(f'> Betting iter, total: {sum(self.all_bets)}')
def _betting_round(self, first_round: bool=False):
if (self.n_players_with_moves > 1):
self._bet_until_everyone_has_bet_evenly()
logger.debug(f'Finished round of betting, {self.n_active_players} active players, {self.n_all_in_players} all in players.')
else:
logger.debug('Skipping betting as no players are free to bet.')
self._post_betting_analysis()
def _post_betting_analysis(self):
logger.debug(f'Pot at the end of betting: {self.table.pot}')
logger.debug('Players at the end of betting:')
for player in self.table.players:
logger.debug(f'{player}')
total_n_chips = (self.table.pot.total + sum((p.n_chips for p in self.table.players)))
n_chips_correct = (total_n_chips == self.table.total_n_chips_on_table)
pot_correct = (self.table.pot.total == sum((p.n_bet_chips for p in self.table.players)))
if ((not n_chips_correct) or (not pot_correct)):
raise ValueError('Bad logic - total n_chips are not the same as at the start of the game')
def n_players_with_moves(self) -> int:
return sum(((p.is_active and (not p.is_all_in)) for p in self.table.players))
def n_active_players(self) -> int:
return sum((p.is_active for p in self.table.players))
def n_all_in_players(self) -> int:
return sum(((p.is_active and p.is_all_in) for p in self.table.players))
def all_bets(self) -> List[int]:
return [p.n_bet_chips for p in self.table.players]
def more_betting_needed(self) -> bool:
active_complete_bets = []
for player in self.table.players:
if (player.is_active and (not player.is_all_in)):
active_complete_bets.append(player.n_bet_chips)
all_bets_equal = all([(x == active_complete_bets[0]) for x in active_complete_bets])
return (not all_bets_equal) |
class MH_Style(command_line.MISS_HIT_Back_End):
def __init__(self):
super().__init__('MH Style')
def process_wp(cls, wp):
rule_set = wp.extra_options['rule_set']
autofix = wp.options.fix
fd_tree = wp.extra_options['fd_tree']
debug_validate_links = wp.options.debug_validate_links
rule_lib = build_library(wp.cfg, rule_set)
content = wp.get_content()
lexer = MATLAB_Lexer(wp.cfg.language, wp.mh, content, wp.filename, wp.blockname)
if (not wp.cfg.pragmas):
lexer.process_pragmas = False
if (len(lexer.text.strip()) == 0):
return MH_Style_Result(wp)
for rule in rule_lib['on_file']:
rule.apply(wp.mh, wp.cfg, lexer.filename, lexer.text, lexer.context_line)
for (line_no, line) in enumerate(lexer.context_line, 1):
for rule in rule_lib['on_line']:
rule.apply(wp.mh, wp.cfg, lexer.filename, line_no, line)
if autofix:
lexer.correct_tabs(wp.cfg.style_config['tab_width'])
try:
tbuf = Token_Buffer(lexer, wp.cfg)
except Error:
return MH_Style_Result(wp)
try:
parser = MATLAB_Parser(wp.mh, tbuf, wp.cfg)
parse_tree = parser.parse_file()
parse_tree.sty_check_naming(wp.mh, wp.cfg)
parse_docstrings(wp.mh, wp.cfg, parse_tree, tbuf)
if debug_validate_links:
tbuf.debug_validate_links()
if fd_tree:
fd_tree.write(('-- Parse tree for %s\n' % wp.filename))
parse_tree.pp_node(fd_tree)
fd_tree.write('\n\n')
except Error:
parse_tree = None
stage_3_analysis(mh=wp.mh, cfg=wp.cfg, tbuf=tbuf, is_embedded=isinstance(wp, work_package.Embedded_MATLAB_WP), fixed=(parse_tree is not None), valid_code=(parse_tree is not None))
if parse_tree:
stage_4_analysis(mh=wp.mh, cfg=wp.cfg, parse_tree=parse_tree, is_embedded=isinstance(wp, work_package.Embedded_MATLAB_WP))
if autofix:
if (not parse_tree):
wp.mh.error(lexer.get_file_loc(), 'file is not auto-fixed because it contains parse errors', fatal=False)
else:
wp.write_modified(tbuf.replay())
return MH_Style_Result(wp) |
class OptionPlotoptionsXrangeSonificationContexttracksMappingLowpass(Options):
def frequency(self) -> 'OptionPlotoptionsXrangeSonificationContexttracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsXrangeSonificationContexttracksMappingLowpassFrequency)
def resonance(self) -> 'OptionPlotoptionsXrangeSonificationContexttracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsXrangeSonificationContexttracksMappingLowpassResonance) |
class FalAdapterMixin(TeleportAdapter, metaclass=AdapterMeta):
ConnectionManager = FalConnectionManager
def __init__(self, config, db_adapter: BaseAdapter):
self.config = config
self._db_adapter = db_adapter
self._relation_data_location_cache: DataLocation = DataLocation({})
if self.is_teleport():
self._wrapper = wrap_db_adapter(self._db_adapter, self.credentials.teleport)
def type(cls):
return 'fal_experimental'
def storage_formats(cls):
return ['csv', 'parquet']
def is_teleport(self) -> bool:
return (getattr(self.credentials, 'teleport', None) is not None)
def manifest(self) -> Manifest:
return ManifestLoader.get_full_manifest(self.config)
def macro_manifest(self) -> MacroManifest:
return self._db_adapter.load_macro_manifest()
_call('experimental_submit_python_job', config=True)
def submit_python_job(self, parsed_model: dict, compiled_code: str) -> AdapterResponse:
config_dict = parsed_model['config']
environment_name = config_dict.get('fal_environment', self.credentials.default_environment)
machine_type = config_dict.get('fal_machine', 'S')
(environment, is_local) = fetch_environment(self.config.project_root, environment_name, machine_type, self.credentials)
telemetry.log_api('experimental_submit_python_job_config', config=self.config, additional_props={'is_teleport': self.is_teleport(), 'environment_is_local': is_local})
if self.is_teleport():
teleport_info = self._build_teleport_info()
if is_local:
result_table_path = run_with_teleport(code=compiled_code, teleport_info=teleport_info, locations=self._relation_data_location_cache, config=db_adapter_config(self.config))
else:
result_table_path = run_in_environment_with_teleport(environment, compiled_code, teleport_info=teleport_info, locations=self._relation_data_location_cache, config=db_adapter_config(self.config), adapter_type=self._db_adapter.type())
relation = self._db_adapter.Relation.create(parsed_model['database'], parsed_model['schema'], parsed_model['alias'])
self._sync_result_table(relation)
return AdapterResponse('OK')
else:
if is_local:
return run_with_adapter(compiled_code, self._db_adapter, self.config)
with self._invalidate_db_cache():
return run_in_environment_with_adapter(environment, compiled_code, db_adapter_config(self.config), self.manifest, self.macro_manifest, self._db_adapter.type())
def _invalidate_db_cache(self) -> Iterator[None]:
try:
(yield)
finally:
reload_adapter_cache(self._db_adapter, self.manifest)
def credentials(self):
python_creds: FalCredentials = self.config.python_adapter_credentials
assert (python_creds is not None)
return python_creds
def teleport_from_external_storage(self, relation: BaseRelation, relation_path: str, teleport_info: TeleportInfo):
rel_name = teleport_info.relation_name(relation)
self._relation_data_location_cache[rel_name] = relation_path
def teleport_to_external_storage(self, relation: BaseRelation, teleport_info: TeleportInfo):
return teleport_info.build_relation_path(relation)
def _build_teleport_info(self):
teleport_creds = self.credentials.teleport
assert teleport_creds
teleport_format = TeleportAdapter.find_format(self, self._wrapper)
if (teleport_creds.type == TeleportTypeEnum.LOCAL):
assert teleport_creds.local_path
return LocalTeleportInfo(teleport_format, teleport_creds, teleport_creds.local_path)
elif (teleport_creds.type == TeleportTypeEnum.REMOTE_S3):
assert teleport_creds.s3_bucket
return S3TeleportInfo(teleport_format, teleport_creds, teleport_creds.s3_bucket, 'teleport')
else:
raise NotImplementedError(f'Teleport credentials of type {teleport_creds.type} not supported')
def sync_teleport_relation(self, relation: BaseRelation):
teleport_info = self._build_teleport_info()
data_path = self._wrapper.teleport_to_external_storage(relation, teleport_info)
self.teleport_from_external_storage(relation, data_path, teleport_info)
def _sync_result_table(self, relation: BaseRelation):
teleport_info = self._build_teleport_info()
data_path = self.teleport_to_external_storage(relation, teleport_info)
self._wrapper.teleport_from_external_storage(relation, data_path, teleport_info) |
def pohlig(E, H, P) -> int:
bases = []
resids = []
for (i, j) in factor(E.order()):
e = (i ** j)
logging.info(f' pohlig: {e}')
t = (E.order() // e)
tH = (H * t)
tP = (P * t)
dlog = bsgs(tP, tH, (0, e), '+')
bases.append(dlog)
resids.append(e)
return CRT_list(bases, resids) |
class ExcludeFieldsSerializerMixinTests(SerializerMixinTestCase):
def serialize(self, **context):
return CarModelTestSerializer(self.carmodel_model_s, context=context).data
def test_no_exclude_implicit(self):
self.assertDictEqual(self.serialize(), self.expected_complete_data)
def test_no_exclude_explicit(self):
self.assertDictEqual(self.serialize(exclude=set()), self.expected_complete_data)
def test_exclude_single_root_field(self):
expected = dict(self.expected_complete_data)
del expected['id']
self.assertDictEqual(self.serialize(exclude={'id'}), expected)
def test_exclude_multiple_root_fields(self):
expected = dict(self.expected_complete_data)
del expected['id']
del expected['name']
self.assertDictEqual(self.serialize(exclude={'id', 'name'}), expected)
def test_exclude_serializer_foreign_key_field(self):
expected = dict(self.expected_complete_data)
del expected['manufacturer']
self.assertDictEqual(self.serialize(exclude={'manufacturer'}), expected)
def test_exclude_serializer_many_field(self):
expected = dict(self.expected_complete_data)
del expected['skus']
self.assertDictEqual(self.serialize(exclude={'skus'}), expected)
def test_exclude_single_child_field_foreign_key(self):
expected = dict(self.expected_complete_data)
del expected['manufacturer']['id']
self.assertDictEqual(self.serialize(exclude={'manufacturer__id'}), expected)
def test_exclude_multiple_child_fields_foreign_key(self):
expected = dict(self.expected_complete_data)
del expected['manufacturer']['id']
del expected['manufacturer']['name']
self.assertDictEqual(self.serialize(exclude={'manufacturer__id', 'manufacturer__name'}), expected)
def test_exclude_single_child_field_many(self):
expected = dict(self.expected_complete_data)
del expected['skus'][0]['id']
del expected['skus'][1]['id']
self.assertDictEqual(self.serialize(exclude={'skus__id'}), expected)
def test_exclude_multiple_child_fields_many(self):
expected = dict(self.expected_complete_data)
del expected['skus'][0]['id']
del expected['skus'][0]['variant']
del expected['skus'][1]['id']
del expected['skus'][1]['variant']
self.assertDictEqual(self.serialize(exclude={'skus__id', 'skus__variant'}), expected)
def test_exclude_double_nested_field(self):
expected = dict(self.expected_complete_data)
del expected['skus'][0]['owners'][0]['email']
self.assertDictEqual(self.serialize(exclude={'skus__owners__email'}), expected)
def test_exclude_complex(self):
expected = dict(self.expected_complete_data)
del expected['name']
del expected['manufacturer']['id']
del expected['skus'][0]['variant']
del expected['skus'][1]['variant']
self.assertDictEqual(self.serialize(exclude={'name', 'manufacturer__id', 'skus__variant'}), expected)
def test_missing_root_field(self):
with self.assertRaises(ValueError):
self.serialize(exclude={'not_found'})
def test_missing_child_field_foreign_key(self):
with self.assertRaises(ValueError):
self.serialize(exclude={'manufacturer__not_found'})
def test_missing_child_key_many(self):
with self.assertRaises(ValueError):
self.serialize(exclude={'skus__not_found'})
def test_field_ordering_unchanged_root(self):
root_1 = self.serialize(exclude=('id', 'manufacturer'))
root_2 = self.serialize(exclude=('manufacturer', 'id'))
self.assertEqual(root_1.keys(), root_2.keys())
self.assertEqual(list(root_1.keys()), ['name', 'skus'])
def test_field_ordering_unchanged_nested(self):
child_1 = self.serialize(exclude=('skus__variant',))
child_2 = self.serialize(exclude=('skus__owners',))
keys_1 = child_1['skus'][0].keys()
self.assertEqual(list(keys_1), ['id', 'owners'])
keys_2 = child_2['skus'][0].keys()
self.assertEqual(list(keys_2), ['id', 'variant']) |
class FirewallOfsList(dict):
def __init__(self):
super(FirewallOfsList, self).__init__()
def get_ofs(self, dp_id):
if (len(self) == 0):
raise ValueError('firewall sw is not connected.')
dps = {}
if (dp_id == REST_ALL):
dps = self
else:
try:
dpid = dpid_lib.str_to_dpid(dp_id)
except:
raise ValueError('Invalid switchID.')
if (dpid in self):
dps = {dpid: self[dpid]}
else:
msg = ('firewall sw is not connected. : switchID=%s' % dp_id)
raise ValueError(msg)
return dps |
class Status(object):
def __init__(self, name, value=None):
self._name = name
self._value = value
__pnmltag__ = 'status'
def __pnmldump__(self):
return Tree(self.__pnmltag__, None, Tree('name', self._name), Tree('value', None, Tree.from_obj(self._value)))
def __pnmlload__(cls, tree):
return cls(tree.child('name').data, tree.child('value').child().to_obj())
def copy(self):
return self.__class__(self._name, self._value)
def __str__(self):
if (self._value is None):
return str(self._name)
else:
return ('%s(%s)' % (self._name, self._value))
def __repr__(self):
if (self._value is None):
return ('%s(%s)' % (self.__class__.__name__, repr(self._name)))
else:
return ('%s(%s,%s)' % (self.__class__.__name__, repr(self._name), repr(self._value)))
def __hash__(self):
return hash((self._name, self._value))
def __eq__(self, other):
try:
return ((self._name, self._value) == (other._name, other._value))
except:
return False
def __ne__(self, other):
return (not (self == other))
def __add__(self, other):
if (self == other):
return self.copy()
else:
raise ConstraintError('incompatible status')
def name(self):
return self._name
def value(self):
return self._value
def merge(self, net, nodes, name=None):
pass |
class ASDLSyntaxError():
def __init__(self, lineno, token=None, msg=None):
self.lineno = lineno
self.token = token
self.msg = msg
def __str__(self):
if (self.msg is None):
return ("Error at '%s', line %d" % (self.token, self.lineno))
else:
return ('%s, line %d' % (self.msg, self.lineno)) |
class Game(object):
BG_TILE_IMG = 'images/brick_tile.png'
(SCREEN_WIDTH, SCREEN_HEIGHT) = (580, 500)
GRID_SIZE = 20
FIELD_SIZE = (400, 400)
CREEP_FILENAMES = [('images/bluecreep_0.png', 'images/bluecreep_45.png'), ('images/greencreep_0.png', 'images/greencreep_45.png'), ('images/yellowcreep_0.png', 'images/yellowcreep_45.png'), ('images/pinkcreep_0.png', 'images/pinkcreep_45.png')]
MAX_N_CREEPS = 50
def __init__(self):
pygame.init()
self.screen = pygame.display.set_mode((self.SCREEN_WIDTH, self.SCREEN_HEIGHT), 0, 32)
self.tile_img = pygame.image.load(self.BG_TILE_IMG).convert_alpha()
self.tile_img_rect = self.tile_img.get_rect()
self.field_border_width = 4
field_outer_width = (self.FIELD_SIZE[0] + (2 * self.field_border_width))
field_outer_height = (self.FIELD_SIZE[1] + (2 * self.field_border_width))
self.field_rect_outer = Rect(20, 60, field_outer_width, field_outer_height)
self.field_bgcolor = Color(109, 41, 1, 100)
self.field_border_color = Color(0, 0, 0)
self.field_box = Box(self.screen, rect=self.field_rect_outer, bgcolor=self.field_bgcolor, border_width=self.field_border_width, border_color=self.field_border_color)
self.tboard_text = ['The amazing Creeps!']
self.tboard_rect = Rect(20, 20, field_outer_width, 30)
self.tboard_bgcolor = Color(50, 20, 0)
self.tboard = MessageBoard(self.screen, rect=self.tboard_rect, bgcolor=self.tboard_bgcolor, border_width=4, border_color=Color('black'), text=self.tboard_text, font=('tahoma', 18), font_color=Color('yellow'))
self.mboard_text = []
self.mboard_rect = Rect(440, 60, 120, 60)
self.mboard_bgcolor = Color(50, 20, 0)
self.mboard = MessageBoard(self.screen, rect=self.mboard_rect, bgcolor=self.mboard_bgcolor, border_width=4, border_color=Color('black'), text=self.mboard_text, font=('verdana', 16), font_color=Color('white'))
self.clock = pygame.time.Clock()
self.paused = False
self.creep_images = [(pygame.image.load(f1).convert_alpha(), pygame.image.load(f2).convert_alpha()) for (f1, f2) in self.CREEP_FILENAMES]
explosion_img = pygame.image.load('images/explosion1.png').convert_alpha()
self.explosion_images = [explosion_img, pygame.transform.rotate(explosion_img, 90)]
self.field_rect = self.get_field_rect()
self.entrance_rect = Rect(self.field_rect.left, self.field_rect.top, (self.GRID_SIZE * 2), (self.GRID_SIZE * 2))
self.exit_rect = Rect((self.field_rect.right - (self.GRID_SIZE * 2)), (self.field_rect.bottom - (self.GRID_SIZE * 2)), (self.GRID_SIZE * 2), (self.GRID_SIZE * 2))
self.creeps = pygame.sprite.Group()
self.spawn_new_creep()
self.creep_spawn_timer = Timer(500, self.spawn_new_creep)
self.create_walls()
self.grid_nrows = (self.FIELD_SIZE[1] / self.GRID_SIZE)
self.grid_ncols = (self.FIELD_SIZE[0] / self.GRID_SIZE)
self.goal_coord = ((self.grid_nrows - 1), (self.grid_ncols - 1))
self.gridpath = GridPath(nrows=self.grid_nrows, ncols=self.grid_ncols, goal=self.goal_coord)
for wall in self.walls:
self.gridpath.set_blocked(wall)
self.options = dict(draw_grid=False)
def create_walls(self):
walls_list = []
for r in range(0, 15):
walls_list.append((r, 6))
if (r != 7):
walls_list.append((r, 3))
walls_list.append((r, 4))
if (r > 4):
walls_list.append((r, 1))
for r in range(9, 20):
walls_list.append((r, 10))
for c in range(14, 18):
walls_list.append((15, c))
self.walls = dict().fromkeys(walls_list, True)
def next_on_path(self, coord):
return self.gridpath.get_next(coord)
def xy2coord(self, pos):
(x, y) = ((pos[0] - self.field_rect.left), (pos[1] - self.field_rect.top))
return ((int(y) / self.GRID_SIZE), (int(x) / self.GRID_SIZE))
def coord2xy_mid(self, coord):
(nrow, ncol) = coord
return (((self.field_rect.left + (ncol * self.GRID_SIZE)) + (self.GRID_SIZE / 2)), ((self.field_rect.top + (nrow * self.GRID_SIZE)) + (self.GRID_SIZE / 2)))
def is_goal_coord(self, coord):
return (coord == self.goal_coord)
_spawned_creep_count = 0
def spawn_new_creep(self):
if (self._spawned_creep_count >= self.MAX_N_CREEPS):
return
self.creeps.add(Creep(screen=self.screen, game=self, creep_images=choice(self.creep_images), explosion_images=self.explosion_images, field=self.field_rect, init_position=((self.field_rect.left + (self.GRID_SIZE / 2)), (self.field_rect.top + (self.GRID_SIZE / 2))), init_direction=(1, 1), speed=0.05))
self._spawned_creep_count += 1
def get_field_rect(self):
return self.field_box.get_internal_rect()
def draw_background(self):
img_rect = self.tile_img.get_rect()
nrows = (int((self.screen.get_height() / img_rect.height)) + 1)
ncols = (int((self.screen.get_width() / img_rect.width)) + 1)
for y in range(nrows):
for x in range(ncols):
img_rect.topleft = ((x * img_rect.width), (y * img_rect.height))
self.screen.blit(self.tile_img, img_rect)
def draw_portals(self):
entrance_sf = pygame.Surface((self.entrance_rect.w, self.entrance_rect.h))
entrance_sf.fill(Color(80, 200, 80))
entrance_sf.set_alpha(150)
self.screen.blit(entrance_sf, self.entrance_rect)
exit_sf = pygame.Surface((self.exit_rect.w, self.exit_rect.h))
exit_sf.fill(Color(200, 80, 80))
exit_sf.set_alpha(150)
self.screen.blit(exit_sf, self.exit_rect)
def draw_grid(self):
for y in range((self.grid_nrows + 1)):
pygame.draw.line(self.screen, Color(50, 50, 50), (self.field_rect.left, ((self.field_rect.top + (y * self.GRID_SIZE)) - 1)), ((self.field_rect.right - 1), ((self.field_rect.top + (y * self.GRID_SIZE)) - 1)))
for x in range((self.grid_ncols + 1)):
pygame.draw.line(self.screen, Color(50, 50, 50), (((self.field_rect.left + (x * self.GRID_SIZE)) - 1), self.field_rect.top), (((self.field_rect.left + (x * self.GRID_SIZE)) - 1), (self.field_rect.bottom - 1)))
def draw_walls(self):
wallcolor = Color(140, 140, 140)
for wall in self.walls:
(nrow, ncol) = wall
pos_x = ((self.field_rect.left + (ncol * self.GRID_SIZE)) + (self.GRID_SIZE / 2))
pos_y = ((self.field_rect.top + (nrow * self.GRID_SIZE)) + (self.GRID_SIZE / 2))
radius = 3
pygame.draw.polygon(self.screen, wallcolor, [((pos_x - radius), pos_y), (pos_x, (pos_y + radius)), ((pos_x + radius), pos_y), (pos_x, (pos_y - radius))])
if (((nrow + 1), ncol) in self.walls):
pygame.draw.line(self.screen, wallcolor, (pos_x, pos_y), (pos_x, (pos_y + self.GRID_SIZE)), 3)
if ((nrow, (ncol + 1)) in self.walls):
pygame.draw.line(self.screen, wallcolor, (pos_x, pos_y), ((pos_x + self.GRID_SIZE), pos_y), 3)
def draw(self):
self.draw_background()
self.field_box.draw()
if self.options['draw_grid']:
self.draw_grid()
self.draw_walls()
self.tboard.draw()
self.mboard.text = self.mboard_text
self.mboard.draw()
for creep in self.creeps:
creep.draw()
self.draw_portals()
def run(self):
while True:
time_passed = self.clock.tick(30)
if (time_passed > 100):
continue
for event in pygame.event.get():
if (event.type == pygame.QUIT):
self.quit()
elif (event.type == pygame.KEYDOWN):
if (event.key == pygame.K_SPACE):
self.paused = (not self.paused)
elif (event.key == pygame.K_g):
if (pygame.key.get_mods() & pygame.KMOD_CTRL):
self.options['draw_grid'] = (not self.options['draw_grid'])
elif ((event.type == pygame.MOUSEBUTTONDOWN) and (event.button == 1)):
for creep in self.creeps:
creep.mouse_click_event(event.pos)
if (not self.paused):
msg1 = ('Creeps: %d' % len(self.creeps))
msg2 = ''
self.mboard_text = [msg1, msg2]
self.creep_spawn_timer.update(time_passed)
for creep in self.creeps:
creep.update(time_passed)
self.draw()
pygame.display.flip()
def quit(self):
sys.exit() |
('/stop_match', methods=['GET', 'POST'])
_origin(allow_headers=['*'])
def stop_match():
global match_details, live_video_process
try:
socket_io.emit('kill_self', {'data': 'Sleep'})
match_details = constants.MATCH_DETAILS_TEMPLATE
return (jsonify({'response': 'Success'}), 200)
except Exception as exception:
print('Exception', exception)
return (jsonify({'response': 'Error'}), 500) |
class BadgeFormList(ResourceList):
def query(self, view_kwargs):
query_ = self.session.query(BadgeForms)
if view_kwargs.get('badge_id'):
events = safe_query_kwargs(Event, view_kwargs, 'event_id')
query_ = self.session.query(BadgeForms).filter_by(event_id=events.id)
query_ = query_.filter_by(badge_id=view_kwargs.get('badge_id'))
else:
query_ = event_query(query_, view_kwargs)
return query_
def after_get(badge_forms):
for item in badge_forms['data']:
badgeFields = []
badgeFieldForms = BadgeFieldForms.query.filter_by(badge_form_id=item['id']).filter_by(badge_id=item['attributes']['badge-id']).order_by(BadgeFieldForms.id.asc()).all()
for badgeFieldForm in badgeFieldForms:
badgeFields.append(badgeFieldForm.convert_to_dict())
item['attributes']['badge-fields'] = badgeFields
return badge_forms
view_kwargs = True
decorators = (jwt_required,)
methods = ['GET']
schema = BadgeFormSchema
data_layer = {'session': db.session, 'model': BadgeForms, 'methods': {'query': query, 'after_get': after_get}} |
class OptionPlotoptionsSplineDatalabels(Options):
def align(self):
return self._config_get('center')
def align(self, text: str):
self._config(text, js_type=False)
def allowOverlap(self):
return self._config_get(False)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self) -> 'OptionPlotoptionsSplineDatalabelsAnimation':
return self._config_sub_data('animation', OptionPlotoptionsSplineDatalabelsAnimation)
def backgroundColor(self):
return self._config_get(None)
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(0)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def crop(self):
return self._config_get(True)
def crop(self, flag: bool):
self._config(flag, js_type=False)
def defer(self):
return self._config_get(True)
def defer(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def filter(self) -> 'OptionPlotoptionsSplineDatalabelsFilter':
return self._config_sub_data('filter', OptionPlotoptionsSplineDatalabelsFilter)
def format(self):
return self._config_get('point.value')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get(None)
def formatter(self, value: Any):
self._config(value, js_type=False)
def inside(self):
return self._config_get(None)
def inside(self, flag: bool):
self._config(flag, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, flag: bool):
self._config(flag, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def overflow(self):
return self._config_get('justify')
def overflow(self, text: str):
self._config(text, js_type=False)
def padding(self):
return self._config_get(5)
def padding(self, num: float):
self._config(num, js_type=False)
def position(self):
return self._config_get('center')
def position(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def shape(self):
return self._config_get('square')
def shape(self, text: str):
self._config(text, js_type=False)
def style(self):
return self._config_get(None)
def style(self, value: Any):
self._config(value, js_type=False)
def textPath(self) -> 'OptionPlotoptionsSplineDatalabelsTextpath':
return self._config_sub_data('textPath', OptionPlotoptionsSplineDatalabelsTextpath)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('bottom')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(0)
def y(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(6)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def extract_quotes_li(tree: lxml.html.HtmlElement, max_quotes: int, headings: Optional[List[Text]]=None, word_blacklist: Optional[List[Text]]=None) -> List[Text]:
remove_toc(tree)
quotes_list = []
skip_to_next_heading = bool(tree.xpath('//h2|//h3'))
node_list = tree.xpath('//div/ul/li|//div/dl|//h2|//h3')
for node in node_list:
if (node.tag in ['h2', 'h3']):
skip_to_next_heading = check_skip_heading(node, (headings or []))
continue
elif skip_to_next_heading:
continue
potential_quote = extract_potential_quote(node)
if (potential_quote and is_quote(potential_quote, (word_blacklist or []))):
quotes_list.append(potential_quote)
if (max_quotes == len(quotes_list)):
break
return quotes_list |
class ProfileCacheDB():
def __init__(self, target: str, path: str=None, uri: str=None, port: str=None):
self._target = target
self._mode = CacheMode.LOCAL
self._db_commit_flag = False
self._gemm_cache_version = ait_cache_version()
self._conv_cache_version = ait_cache_version()
self._conv3d_cache_version = ait_cache_version()
if (uri is not None):
self._mode = CacheMode.REMOTE
if (self._mode == CacheMode.LOCAL):
assert (path is not None)
self._con = sqlite3.connect(path)
self._cur = self._con.cursor()
self._init_db()
else:
raise NotImplementedError
def _init_db(self):
self._create_gemm_table()
self._create_conv_table()
self._create_conv3d_table()
self._create_norm_table()
def gemm_cache_version(self) -> int:
return self._gemm_cache_version
def conv_cache_version(self) -> int:
return self._conv_cache_version
def conv3d_cache_version(self) -> int:
return self._conv3d_cache_version
def _create_gemm_table(self):
version = self.gemm_cache_version
if (not self._table_exists('gemm', version)):
_LOGGER.info('Temporarily keeping the old gemm cache versions if exist')
_LOGGER.info(f'Creating a new gemm table with version={version!r}')
sql = GEMM_INIT_TEMPLATE.render(dev=self._target, version=version)
self._cur.execute(sql)
self._con.commit()
def _create_conv_table(self):
version = self.conv_cache_version
if (not self._table_exists('conv', version)):
_LOGGER.info('Temporarily keeping the old conv cache versions if exist')
_LOGGER.info(f'Creating a new conv table with version={version!r}')
sql = CONV_INIT_TEMPLATE.render(dev=self._target, version=version)
self._cur.execute(sql)
self._con.commit()
def _create_conv3d_table(self):
version = self.conv3d_cache_version
if (not self._table_exists('conv3d', version)):
_LOGGER.info('Temporarily keeping the old conv3d cache versions if exist')
_LOGGER.info(f'Creating a new conv3d table with version={version!r}')
sql = CONV3D_INIT_TEMPLATE.render(dev=self._target, version=version)
self._cur.execute(sql)
self._con.commit()
def _create_norm_table(self):
sql = NORM_INIT_TEMPLATE.render(dev=self._target)
self._cur.execute(sql)
self._con.commit()
def _table_exists(self, table_kind, cache_version):
table_name = f'{self._target}_{table_kind}_{cache_version}'
sql = CHECK_TABLE_EXISTENCE_TEMPLATE.render(table_name=table_name)
self._cur.execute(sql)
tables = self._cur.fetchall()
if tables:
_LOGGER.info(f'table_name={table_name!r} exists in the db')
return True
else:
_LOGGER.info(f'table_name={table_name!r} does not exist in the db, possible version mismatch!')
return False
def _delete_existing_table(self, table_kind):
sql = QUERY_ALL_TABLES_TEMPLATE.render()
self._cur.execute(sql)
all_tables = self._cur.fetchall()
if (len(all_tables) == 0):
_LOGGER.info('deleting table: skip empty table')
return
target_tables = [table[0] for table in all_tables if table[0].startswith(f'{self._target}_{table_kind}')]
assert (len(target_tables) != 0), f'no {table_kind} table exists'
assert (len(target_tables) == 1), f'expected only one {table_kind} table but got target_tables={target_tables!r}'
_LOGGER.info(f'deleting table target_tables[0]={target_tables[0]!r}')
self._cur.execute(f'DROP TABLE {target_tables[0]}')
def _query(self, sql: str) -> Tuple[(str, int)]:
if (self._mode == CacheMode.LOCAL):
if self._db_commit_flag:
self._con.commit()
self._db_commit_flag = False
self._cur.execute(sql)
out = self._cur.fetchall()
if (len(out) == 0):
return None
return out[0]
raise NotImplementedError
def query_gemm(self, args: Dict[(str, Any)]) -> Tuple[(str, int)]:
sql = GEMM_QUERY_TEMPLATE.render(dev=self._target, version=self.gemm_cache_version, **args)
return self._query(sql)
def query_conv(self, args: Dict[(str, Any)]) -> Tuple[(str, int)]:
sql = CONV_QUERY_TEMPLATE.render(dev=self._target, version=self.conv_cache_version, **args)
return self._query(sql)
def query_conv3d(self, args: Dict[(str, Any)]) -> Tuple[(str, int)]:
sql = CONV3D_QUERY_TEMPLATE.render(dev=self._target, version=self.conv3d_cache_version, **args)
return self._query(sql)
def query_normalization(self, args: Dict[(str, Any)]) -> Tuple[(str, int)]:
sql = NORM_QUERY_TEMPLATE.render(dev=self._target, **args)
return self._query(sql)
def _insert(self, query_sql: str, insert_sql: str) -> None:
if (self._mode == CacheMode.LOCAL):
self._cur.execute(query_sql)
out = self._cur.fetchall()
if (len(out) == 0):
self._cur.execute(insert_sql)
self._db_commit_flag = True
else:
_LOGGER.info(('Ignore repeat profile_record: ' + query_sql))
def insert_gemm(self, args: Dict[(str, Any)]) -> None:
query_sql = GEMM_QUERY_TEMPLATE.render(dev=self._target, version=self.gemm_cache_version, dtype_a=args['dtype_a'], dtype_b=args['dtype_b'], dtype_c=args['dtype_c'], dtype_acc=args['dtype_acc'], major_a=args['major_a'], major_b=args['major_b'], major_c=args['major_c'], op_type=args['op_type'], device=args['device'], epilogue=args['epilogue'], split_k=args['split_k'], pshape=args['pshape'], exec_entry_sha1=args['exec_entry_sha1'])
insert_sql = GEMM_INSERT_TEMPLATE.render(dev=self._target, version=self.gemm_cache_version, **args)
self._insert(query_sql, insert_sql)
def insert_conv(self, args: Dict[(str, Any)]) -> None:
query_sql = CONV_QUERY_TEMPLATE.render(dev=self._target, version=self.conv_cache_version, dtype_a=args['dtype_a'], dtype_b=args['dtype_b'], dtype_c=args['dtype_c'], dtype_acc=args['dtype_acc'], major_a=args['major_a'], major_b=args['major_b'], major_c=args['major_c'], kh=args['kh'], kw=args['kw'], co=args['co'], strideh=args['strideh'], padh=args['padh'], dilateh=args['dilateh'], stridew=args['stridew'], padw=args['padw'], dilatew=args['dilatew'], op_type=args['op_type'], device=args['device'], epilogue=args['epilogue'], split_k=args['split_k'], exec_entry_sha1=args['exec_entry_sha1'])
insert_sql = CONV_INSERT_TEMPLATE.render(dev=self._target, version=self.conv_cache_version, **args)
self._insert(query_sql, insert_sql)
def insert_conv3d(self, args: Dict[(str, Any)]) -> None:
query_sql = CONV3D_QUERY_TEMPLATE.render(dev=self._target, version=self.conv3d_cache_version, dtype_a=args['dtype_a'], dtype_b=args['dtype_b'], dtype_c=args['dtype_c'], dtype_acc=args['dtype_acc'], major_a=args['major_a'], major_b=args['major_b'], major_c=args['major_c'], kd=args['kd'], kh=args['kh'], kw=args['kw'], co=args['co'], stride_d=args['stride_d'], stride_h=args['stride_h'], stride_w=args['stride_w'], pad_d=args['pad_d'], pad_h=args['pad_h'], pad_w=args['pad_w'], dilate_d=args['dilate_d'], dilate_h=args['dilate_h'], dilate_w=args['dilate_w'], op_type=args['op_type'], device=args['device'], epilogue=args['epilogue'], split_k=args['split_k'], exec_entry_sha1=args['exec_entry_sha1'])
insert_sql = CONV3D_INSERT_TEMPLATE.render(dev=self._target, version=self.conv3d_cache_version, **args)
self._insert(query_sql, insert_sql)
def insert_normalization(self, args: Dict[(str, Any)]) -> None:
query_sql = NORM_QUERY_TEMPLATE.render(dev=self._target, dtype_in=args['dtype_in'], dtype_acc=args['dtype_acc'], dtype_out=args['dtype_out'], rank=args['rank'], op_type=args['op_type'], device=args['device'], exec_entry_sha1=args['exec_entry_sha1'])
insert_sql = NORM_INSERT_TEMPLATE.render(dev=self._target, **args)
self._insert(query_sql, insert_sql)
def __del__(self):
self._con.commit()
self._con.close() |
.parametrize('endpoint__monitor_level', [1])
def test_set_rule_post(dashboard_user, endpoint, session):
response = dashboard_user.post('dashboard/api/set_rule', data={'name': endpoint.name, 'value': 3})
assert (response.status_code == 200)
assert (response.data == b'OK')
endpoint = session.query(Endpoint).get(endpoint.id)
assert (endpoint.monitor_level == 3) |
(EcsClient, '__init__')
def test_scale_action(client):
action = ScaleAction(client, CLUSTER_NAME, SERVICE_NAME)
updated_service = action.scale(5)
assert isinstance(updated_service, EcsService)
client.describe_services.assert_called_once_with(cluster_name=CLUSTER_NAME, service_name=SERVICE_NAME)
client.update_service.assert_called_once_with(cluster=action.service.cluster, service=action.service.name, desired_count=5, task_definition=action.service.task_definition) |
def extractSlavetranslationWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
_loss('alpha_reg')
class AlphaRegLoss(Loss):
def __init__(self, alpha: float, inputs: List[str], lambda_alpha_l1: float, lambda_alpha_l0: float, l1_end_step: int):
super().__init__(alpha, inputs=inputs)
self.lambda_alpha_l1 = lambda_alpha_l1
self.lambda_alpha_l0 = lambda_alpha_l0
self.l1_end_step = l1_end_step
def forward(self, pred, global_step):
lambda_alpha_l1 = self.lambda_alpha_l1
if (global_step >= self.l1_end_step):
lambda_alpha_l1 = 0
return (self.alpha * cal_alpha_reg(pred, lambda_alpha_l1, self.lambda_alpha_l0)) |
def create_session_name(node=''):
if (node is None):
return ''
result = rospy.names.ns_join('/', node).replace(SLASH_SEP, ('%s%s' % (SLASH_SEP, SLASH_SEP)))
result = result.replace('/', SLASH_SEP)
if (len(result) > SCREEN_NAME_MAX_CHARS):
result = ('_~%s' % result[((len(result) - SCREEN_NAME_MAX_CHARS) - 2):])
return result |
def log_fortianalyzer3_filter(data, fos):
vdom = data['vdom']
log_fortianalyzer3_filter_data = data['log_fortianalyzer3_filter']
filtered_data = underscore_to_hyphen(filter_log_fortianalyzer3_filter_data(log_fortianalyzer3_filter_data))
return fos.set('log.fortianalyzer3', 'filter', data=filtered_data, vdom=vdom) |
.feature('unit')
.story('services', 'south', 'ingest')
class TestIngest():
def setup_method(self):
Ingest._core_management_host = ''
Ingest._core_management_port = 0
Ingest.readings_storage_async = None
Ingest.storage_async = None
Ingest._readings_stats = 0
Ingest._discarded_readings_stats = 0
Ingest._sensor_stats = {}
Ingest._write_statistics_task = None
Ingest._write_statistics_sleep_task = None
Ingest._stop = False
Ingest._started = False
Ingest._readings_lists = None
Ingest._current_readings_list_index = 0
Ingest._insert_readings_tasks = None
Ingest._readings_list_batch_size_reached = None
Ingest._readings_list_not_empty = None
Ingest._readings_lists_not_full = None
Ingest._insert_readings_wait_tasks = None
Ingest._last_insert_time = 0
Ingest._readings_list_size = 0
Ingest._write_statistics_frequency_seconds = 5
Ingest._readings_buffer_size = 500
Ingest._max_concurrent_readings_inserts = 5
Ingest._readings_insert_batch_size = 100
Ingest._readings_insert_batch_timeout_seconds = 1
Ingest._max_readings_insert_batch_connection_idle_seconds = 60
Ingest._max_readings_insert_batch_reconnect_wait_seconds = 10
Ingest.category = 'South'
Ingest.default_config = {'readings_buffer_size': {'description': 'The maximum number of readings to buffer in memory', 'type': 'integer', 'default': str(Ingest._readings_buffer_size)}, 'max_concurrent_readings_inserts': {'description': 'The maximum number of concurrent processes that send batches of readings to storage', 'type': 'integer', 'default': str(Ingest._max_concurrent_readings_inserts)}, 'readings_insert_batch_size': {'description': 'The maximum number of readings in a batch of inserts', 'type': 'integer', 'default': str(Ingest._readings_insert_batch_size)}, 'readings_insert_batch_timeout_seconds': {'description': 'The number of seconds to wait for a readings list to reach the minimum batch size', 'type': 'integer', 'default': str(Ingest._readings_insert_batch_timeout_seconds)}, 'max_readings_insert_batch_connection_idle_seconds': {'description': 'Close storage connections used to insert readings when idle for this number of seconds', 'type': 'integer', 'default': str(Ingest._max_readings_insert_batch_connection_idle_seconds)}, 'max_readings_insert_batch_reconnect_wait_seconds': {'description': 'The maximum number of seconds to wait before reconnecting to storage when inserting readings', 'type': 'integer', 'default': str(Ingest._max_readings_insert_batch_reconnect_wait_seconds)}}
.asyncio
async def test_read_config(self, mocker):
Ingest.storage_async = MagicMock(spec=StorageClientAsync)
Ingest.readings_storage_async = MagicMock(spec=ReadingsStorageClientAsync)
mocker.patch.object(MicroserviceManagementClient, '__init__', return_value=None)
create_cfg = mocker.patch.object(MicroserviceManagementClient, 'create_configuration_category', return_value=None)
get_cfg = mocker.patch.object(MicroserviceManagementClient, 'get_configuration_category', return_value=get_cat(Ingest.default_config))
mocker.patch.object(MicroserviceManagementClient, 'create_child_category', return_value=None)
Ingest._parent_service = MagicMock(_core_microservice_management_client=MicroserviceManagementClient())
(await Ingest._read_config())
assert (1 == create_cfg.call_count)
assert (1 == get_cfg.call_count)
new_config = get_cat(Ingest.default_config)
assert (Ingest._readings_buffer_size == int(new_config['readings_buffer_size']['value']))
assert (Ingest._max_concurrent_readings_inserts == int(new_config['max_concurrent_readings_inserts']['value']))
assert (Ingest._readings_insert_batch_size == int(new_config['readings_insert_batch_size']['value']))
assert (Ingest._readings_insert_batch_timeout_seconds == int(new_config['readings_insert_batch_timeout_seconds']['value']))
assert (Ingest._max_readings_insert_batch_connection_idle_seconds == int(new_config['max_readings_insert_batch_connection_idle_seconds']['value']))
assert (Ingest._max_readings_insert_batch_reconnect_wait_seconds == int(new_config['max_readings_insert_batch_reconnect_wait_seconds']['value']))
.asyncio
async def test_read_config_filter(self, mocker):
mock_config = {'filter': {'type': 'JSON', 'default': '{"pipeline": ["scale"]}', 'description': 'Filter pipeline', 'value': '{"pipeline": ["scale"]}'}, 'dataPointsPerSec': {'order': '2', 'description': 'Data points per second', 'displayName': 'Data points per second', 'type': 'integer', 'default': '1', 'value': '1'}, 'assetName': {'order': '1', 'description': 'Name of Asset', 'displayName': 'Asset name', 'type': 'string', 'default': 'sinusoid', 'value': 'sinusoid'}, 'plugin': {'description': 'Sinusoid Plugin', 'type': 'string', 'default': 'sinusoid', 'readonly': 'true', 'value': 'sinusoid'}}
Ingest.storage_async = MagicMock(spec=StorageClientAsync)
Ingest.readings_storage_async = MagicMock(spec=ReadingsStorageClientAsync)
mocker.patch.object(MicroserviceManagementClient, '__init__', return_value=None)
create_cfg = mocker.patch.object(MicroserviceManagementClient, 'create_configuration_category', return_value=None)
get_cfg = mocker.patch.object(MicroserviceManagementClient, 'get_configuration_category', return_value=get_cat(Ingest.default_config))
mocker.patch.object(MicroserviceManagementClient, 'create_child_category', return_value=None)
Ingest._parent_service = MagicMock(_core_microservice_management_client=MicroserviceManagementClient(), _name='test')
Ingest._parent_service.config = mock_config
log_warning = mocker.patch.object(ingest._LOGGER, 'warning')
(await Ingest._read_config())
assert (1 == log_warning.call_count)
calls = [call('South Service [%s] does not support the use of a filter pipeline.', 'test')]
log_warning.assert_has_calls(calls, any_order=True)
.asyncio
async def test_start(self, mocker):
class mock_stat():
def __init__(self):
pass
async def register(self, key, desc):
return None
async def mock_create(storage):
return mock_stat()
if ((sys.version_info.major == 3) and (sys.version_info.minor >= 8)):
_rv1 = (await mock_coro())
_rv2 = (await mock_create(None))
else:
_rv1 = asyncio.ensure_future(mock_coro())
_rv2 = asyncio.ensure_future(mock_create(None))
mocker.patch.object(StorageClientAsync, '__init__', return_value=None)
mocker.patch.object(ReadingsStorageClientAsync, '__init__', return_value=None)
log_warning = mocker.patch.object(ingest._LOGGER, 'warning')
mocker.patch.object(MicroserviceManagementClient, '__init__', return_value=None)
create_cfg = mocker.patch.object(MicroserviceManagementClient, 'create_configuration_category', return_value=None)
get_cfg = mocker.patch.object(MicroserviceManagementClient, 'get_configuration_category', return_value=get_cat(Ingest.default_config))
mocker.patch.object(MicroserviceManagementClient, 'get_asset_tracker_events', return_value={'track': []})
mocker.patch.object(MicroserviceManagementClient, 'create_child_category', return_value=None)
mocker.patch.object(statistics, 'create_statistics', return_value=_rv2)
parent_service = MagicMock(_core_microservice_management_client=MicroserviceManagementClient())
mocker.patch.object(Ingest, '_write_statistics', return_value=_rv1)
mocker.patch.object(Ingest, '_insert_readings', return_value=_rv1)
(await Ingest.start(parent=parent_service))
assert (1 == create_cfg.call_count)
assert (1 == get_cfg.call_count)
assert (Ingest._stop is False)
assert (Ingest._started is True)
assert (Ingest._readings_list_size == int((Ingest._readings_buffer_size / Ingest._max_concurrent_readings_inserts)))
assert (Ingest._last_insert_time is 0)
assert (Ingest._max_concurrent_readings_inserts == len(Ingest._insert_readings_wait_tasks))
assert (Ingest._max_concurrent_readings_inserts == len(Ingest._readings_list_batch_size_reached))
assert (Ingest._max_concurrent_readings_inserts == len(Ingest._readings_list_not_empty))
assert (Ingest._max_concurrent_readings_inserts == len(Ingest._readings_lists))
assert (0 == log_warning.call_count)
.asyncio
async def test_stop(self, mocker):
class mock_stat():
def __init__(self):
pass
async def register(self, key, desc):
return None
async def mock_create(storage):
return mock_stat()
if ((sys.version_info.major == 3) and (sys.version_info.minor >= 8)):
_rv1 = (await mock_coro())
_rv2 = (await mock_create(None))
else:
_rv1 = asyncio.ensure_future(mock_coro())
_rv2 = asyncio.ensure_future(mock_create(None))
mocker.patch.object(StorageClientAsync, '__init__', return_value=None)
mocker.patch.object(ReadingsStorageClientAsync, '__init__', return_value=None)
log_exception = mocker.patch.object(ingest._LOGGER, 'exception')
mocker.patch.object(MicroserviceManagementClient, '__init__', return_value=None)
create_cfg = mocker.patch.object(MicroserviceManagementClient, 'create_configuration_category', return_value=None)
get_cfg = mocker.patch.object(MicroserviceManagementClient, 'get_configuration_category', return_value=get_cat(Ingest.default_config))
mocker.patch.object(MicroserviceManagementClient, 'get_asset_tracker_events', return_value={'track': []})
mocker.patch.object(MicroserviceManagementClient, 'create_child_category', return_value=None)
mocker.patch.object(statistics, 'create_statistics', return_value=_rv2)
parent_service = MagicMock(_core_microservice_management_client=MicroserviceManagementClient())
mocker.patch.object(Ingest, '_write_statistics', return_value=_rv1)
mocker.patch.object(Ingest, '_insert_readings', return_value=_rv1)
(await Ingest.start(parent=parent_service))
(await asyncio.sleep(1))
(await Ingest.stop())
assert (1 == create_cfg.call_count)
assert (1 == get_cfg.call_count)
assert (Ingest._stop is True)
assert (Ingest._started is False)
assert (Ingest._insert_readings_wait_tasks is None)
assert (Ingest._insert_readings_tasks is None)
assert (Ingest._readings_lists is None)
assert (Ingest._readings_list_batch_size_reached is None)
assert (Ingest._readings_list_not_empty is None)
assert (Ingest._readings_lists_not_full is None)
assert (0 == log_exception.call_count)
.asyncio
async def test_increment_discarded_readings(self, mocker):
Ingest.increment_discarded_readings()
assert (1 == Ingest._discarded_readings_stats)
.skip(reason='This method uses a while True loop. Investigate as to how to write unit test for an infinite loop.')
.asyncio
async def test__insert_readings(self, mocker):
pass
.skip(reason='This method uses a while True loop. Investigate as to how to write unit test for an infinite loop.')
.asyncio
async def test_write_statistics(self, mocker):
pass
.asyncio
async def test_is_available_at_start(self, mocker):
Ingest._max_concurrent_readings_inserts = 1
Ingest._readings_list_size = 2
Ingest._current_readings_list_index = 0
Ingest._readings_lists = []
Ingest._readings_lists.append([])
Ingest._readings_lists[0].append((await mock_coro()))
log_warning = mocker.patch.object(ingest._LOGGER, 'warning')
mocker.patch.object(Ingest, '_write_statistics', return_value=(await mock_coro()))
mocker.patch.object(Ingest, '_insert_readings', return_value=(await mock_coro()))
retval = Ingest.is_available()
assert (retval is True)
assert (0 == log_warning.call_count)
.asyncio
async def test_is_available_at_stop(self, mocker):
Ingest._max_concurrent_readings_inserts = 1
Ingest._readings_list_size = 2
Ingest._current_readings_list_index = 0
Ingest._readings_lists = []
Ingest._readings_lists.append([])
Ingest._readings_lists[0].append((await mock_coro()))
log_warning = mocker.patch.object(ingest._LOGGER, 'warning')
Ingest._stop = True
mocker.patch.object(Ingest, '_write_statistics', return_value=(await mock_coro()))
mocker.patch.object(Ingest, '_insert_readings', return_value=(await mock_coro()))
retval = Ingest.is_available()
assert (retval is False)
assert (0 == log_warning.call_count)
.asyncio
async def test_is_available_when_all_lists_full(self, mocker):
Ingest._max_concurrent_readings_inserts = 1
Ingest._readings_list_size = 2
Ingest._current_readings_list_index = 0
Ingest._readings_lists = []
Ingest._readings_lists.append([])
Ingest._readings_lists[0].append((await mock_coro()))
Ingest._readings_lists[0].append((await mock_coro()))
log_warning = mocker.patch.object(ingest._LOGGER, 'warning')
mocker.patch.object(Ingest, '_write_statistics', return_value=(await mock_coro()))
mocker.patch.object(Ingest, '_insert_readings', return_value=(await mock_coro()))
retval = Ingest.is_available()
assert (retval is False)
assert (1 == log_warning.call_count)
log_warning.assert_called_with('The ingest service is unavailable %s', 0)
.asyncio
async def test_add_readings_all_ok(self, mocker):
data = {'timestamp': '2017-01-02T01:02:03.23232Z-05:00', 'asset': 'pump1', 'readings': {'velocity': '500', 'temperature': {'value': '32', 'unit': 'kelvin'}}}
Ingest._max_concurrent_readings_inserts = 1
Ingest._readings_list_size = 2
Ingest._current_readings_list_index = 0
Ingest._readings_lists = []
Ingest._readings_lists.append([])
Ingest._readings_list_not_empty = []
Ingest._readings_list_not_empty.append(asyncio.Event())
Ingest._started = True
mocker.patch.object(Ingest, '_write_statistics', return_value=(await mock_coro()))
mocker.patch.object(Ingest, '_insert_readings', return_value=(await mock_coro()))
mocker.patch.object(MicroserviceManagementClient, '__init__', return_value=None)
mocker.patch.object(MicroserviceManagementClient, 'create_asset_tracker_event', return_value=None)
assert (0 == len(Ingest._readings_lists[0]))
assert ('PUMP1' not in list(Ingest._sensor_stats.keys()))
(await Ingest.add_readings(asset=data['asset'], timestamp=data['timestamp'], readings=data['readings']))
assert (1 == len(Ingest._readings_lists[0]))
.asyncio
async def test_add_readings_if_stop(self, mocker):
data = {'timestamp': '2017-01-02T01:02:03.23232Z-05:00', 'asset': 'pump1', 'readings': {'velocity': '500', 'temperature': {'value': '32', 'unit': 'kelvin'}}}
Ingest._max_concurrent_readings_inserts = 1
Ingest._readings_list_size = 2
Ingest._current_readings_list_index = 0
Ingest._readings_lists = []
Ingest._readings_lists.append([])
Ingest._readings_list_not_empty = []
Ingest._readings_list_not_empty.append(asyncio.Event())
Ingest._stop = True
log_warning = mocker.patch.object(ingest._LOGGER, 'warning')
mocker.patch.object(Ingest, '_write_statistics', return_value=(await mock_coro()))
mocker.patch.object(Ingest, '_insert_readings', return_value=(await mock_coro()))
assert (0 == len(Ingest._readings_lists[0]))
(await Ingest.add_readings(asset=data['asset'], timestamp=data['timestamp'], readings=data['readings']))
assert (0 == len(Ingest._readings_lists[0]))
assert (1 == log_warning.call_count)
log_warning.assert_called_with('The South Service is stopping')
.asyncio
async def test_add_readings_not_started(self, mocker):
data = {'timestamp': '2017-01-02T01:02:03.23232Z-05:00', 'asset': 'pump1', 'readings': {'velocity': '500', 'temperature': {'value': '32', 'unit': 'kelvin'}}}
Ingest._max_concurrent_readings_inserts = 1
Ingest._readings_list_size = 2
Ingest._current_readings_list_index = 0
Ingest._readings_lists = []
Ingest._readings_lists.append([])
Ingest._readings_list_not_empty = []
Ingest._readings_list_not_empty.append(asyncio.Event())
Ingest._started = False
mocker.patch.object(Ingest, '_write_statistics', return_value=(await mock_coro()))
mocker.patch.object(Ingest, '_insert_readings', return_value=(await mock_coro()))
assert (0 == len(Ingest._readings_lists[0]))
with pytest.raises(RuntimeError):
(await Ingest.add_readings(asset=data['asset'], timestamp=data['timestamp'], readings=data['readings']))
assert (0 == len(Ingest._readings_lists[0]))
.asyncio
async def test_add_readings_incorrect_data_values(self, mocker):
data = {'timestamp': '2017-01-02T01:02:03.23232Z-05:00', 'asset': 'pump1', 'readings': {'velocity': '500', 'temperature': {'value': '32', 'unit': 'kelvin'}}}
Ingest._max_concurrent_readings_inserts = 1
Ingest._readings_list_size = 2
Ingest._current_readings_list_index = 0
Ingest._readings_lists = []
Ingest._readings_lists.append([])
Ingest._readings_list_not_empty = []
Ingest._readings_list_not_empty.append(asyncio.Event())
Ingest._started = True
mocker.patch.object(Ingest, '_write_statistics', return_value=(await mock_coro()))
mocker.patch.object(Ingest, '_insert_readings', return_value=(await mock_coro()))
assert (0 == len(Ingest._readings_lists[0]))
with pytest.raises(ValueError):
(await Ingest.add_readings(asset=None, timestamp=data['timestamp'], readings=data['readings']))
with pytest.raises(TypeError):
(await Ingest.add_readings(asset=123, timestamp=None, readings=data['readings']))
with pytest.raises(ValueError):
(await Ingest.add_readings(asset=data['asset'], timestamp=None, readings=data['readings']))
with pytest.raises(TypeError):
(await Ingest.add_readings(asset=data['asset'], timestamp=data['timestamp'], readings=123))
assert (0 == len(Ingest._readings_lists[0]))
.asyncio
async def test_add_readings_when_one_list_becomes_full(self, mocker):
data = {'timestamp': '2017-01-02T01:02:03.23232Z-05:00', 'asset': 'pump1', 'readings': {'velocity': '500', 'temperature': {'value': '32', 'unit': 'kelvin'}}}
Ingest._max_concurrent_readings_inserts = 2
Ingest._readings_list_size = 1
Ingest._readings_insert_batch_size = 1
Ingest._current_readings_list_index = 0
Ingest._readings_lists = []
Ingest._readings_lists.append([])
Ingest._readings_lists.append([])
Ingest._readings_list_not_empty = []
Ingest._readings_list_not_empty.append(asyncio.Event())
Ingest._readings_list_not_empty.append(asyncio.Event())
Ingest._readings_list_batch_size_reached = []
Ingest._readings_list_batch_size_reached.append(asyncio.Event())
Ingest._readings_list_batch_size_reached.append(asyncio.Event())
Ingest._started = True
mocker.patch.object(Ingest, '_write_statistics', return_value=(await mock_coro()))
mocker.patch.object(Ingest, '_insert_readings', return_value=(await mock_coro()))
mocker.patch.object(MicroserviceManagementClient, '__init__', return_value=None)
mocker.patch.object(MicroserviceManagementClient, 'create_asset_tracker_event', return_value=None)
assert (0 == len(Ingest._readings_lists[0]))
assert ('PUMP1' not in list(Ingest._sensor_stats.keys()))
(await Ingest.add_readings(asset=data['asset'], timestamp=data['timestamp'], readings=data['readings']))
(await Ingest.add_readings(asset=data['asset'], timestamp=data['timestamp'], readings=data['readings']))
assert (1 == len(Ingest._readings_lists[0]))
assert (1 == len(Ingest._readings_lists[1])) |
def _showLayer(layer):
layer = (('(' + layer) + ')')
size = (('((CGRect)[(id)' + layer) + ' bounds]).size')
width = float(fb.evaluateExpression((('(CGFloat)(' + size) + '.width)')))
height = float(fb.evaluateExpression((('(CGFloat)(' + size) + '.height)')))
if ((width == 0.0) or (height == 0.0)):
print('Nothing to see here - the size of this element is {} x {}.'.format(width, height))
return
fb.evaluateEffect((('UIGraphicsBeginImageContextWithOptions(' + size) + ', NO, 0.0)'))
fb.evaluateEffect((('[(id)' + layer) + ' renderInContext:(void *)UIGraphicsGetCurrentContext()]'))
result = fb.evaluateExpressionValue('(UIImage *)UIGraphicsGetImageFromCurrentImageContext()')
if ((result.GetError() is not None) and (str(result.GetError()) != 'success')):
print(result.GetError())
else:
image = result.GetValue()
_showImage(image)
fb.evaluateEffect('UIGraphicsEndImageContext()') |
def build_jobs_list(main_job_names: typing.Sequence[str], releases: typing.Sequence[str], options: dict) -> typing.List[Job]:
main_jobs = []
for job_name in main_job_names:
for release in releases:
job_class = AVAILABLE_JOBS[job_name]
if (release in job_class.skip_releases):
continue
if ((job_class.only_releases is not None) and (release not in job_class.only_releases)):
continue
main_jobs.append((AVAILABLE_JOBS[job_name], dict(release=release)))
options['buffer_output'] = ((options['concurrency'] != 1) and (len(main_jobs) > 1))
jobs = []
job_index = {}
def populate_jobs_list(job_class, job_args, parent=None):
job_key = (job_class, ','.join((f'{k}={job_args[k]}' for k in sorted(job_args))))
if (job_key in job_index):
job = job_index[job_key]
created = False
else:
job = job_class(**job_args, options=options)
job_index[job_key] = job
created = True
if (parent is not None):
parent.depends_on.append(job)
for (dep_class, dep_args) in job.get_dependencies():
populate_jobs_list(dep_class, dep_args, parent=job)
if created:
jobs.append(job)
for job in main_jobs:
populate_jobs_list(job[0], job[1])
return jobs |
def _downgrade_vfp_entries():
bind = op.get_bind()
session = orm.Session(bind=bind)
child_to_parents = {}
for (parent, child) in session.execute(select(included_files_table.c.parent_uid, included_files_table.c.child_uid)):
child_to_parents.setdefault(child, set()).add(parent)
full_paths = {uid: _generate_full_path(uid, child_to_parents) for uid in child_to_parents}
root_uids = set()
for (uid, uid_path_list) in full_paths.items():
query = select(VirtualFilePath.file_path, VirtualFilePath.parent_uid).filter((VirtualFilePath.file_uid == uid))
path_dict = {}
for (path, parent_uid) in session.execute(query):
path_dict.setdefault(parent_uid, []).append(path)
vfp_dict = {}
for uid_list in uid_path_list:
(root_uid, parent_uid) = (uid_list[0], uid_list[(- 2)])
root_uids.add(root_uid)
vfp_dict.setdefault(root_uid, [])
for path in path_dict[parent_uid]:
vfp_dict[root_uid].append('|'.join((uid_list[:(- 1)] + [path])))
fo_entry = session.get(FileObjectEntry, uid)
fo_entry.virtual_file_paths = vfp_dict
for uid in root_uids:
fo_entry = session.get(FileObjectEntry, uid)
fo_entry.virtual_file_paths = {uid: [uid]}
session.commit() |
class EfRegister(Register):
def __init__(self):
super().__init__()
self.renderfuncs[Sgr] = renderfunc.sgr
self.b = Style(Sgr(1))
self.bold = Style(Sgr(1))
self.dim = Style(Sgr(2))
self.i = Style(Sgr(3))
self.italic = Style(Sgr(3))
self.u = Style(Sgr(4))
self.underl = Style(Sgr(4))
self.blink = Style(Sgr(5))
self.inverse = Style(Sgr(7))
self.hidden = Style(Sgr(8))
self.strike = Style(Sgr(9))
self.rs = Style(Sgr(22), Sgr(23), Sgr(24), Sgr(25), Sgr(27), Sgr(28), Sgr(29)) |
def test_clean_pop(app):
app.testing = False
called = []
_request
def teardown_req(error=None):
(1 / 0)
_appcontext
def teardown_app(error=None):
called.append('TEARDOWN')
try:
with app.test_request_context():
called.append(flask.current_app.name)
except ZeroDivisionError:
pass
assert (called == ['flask_test', 'TEARDOWN'])
assert (not flask.current_app) |
class Ticket(SoftDeletionModel):
__tablename__ = 'tickets'
__table_args__ = (db.UniqueConstraint('name', 'event_id', 'deleted_at', name='name_event_deleted_at_uc'),)
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String, nullable=False)
description = db.Column(db.String)
is_description_visible = db.Column(db.Boolean)
type = db.Column(db.String, nullable=False)
quantity = db.Column(db.Integer, default=100)
position = db.Column(db.Integer, default=1)
price = db.Column(db.Float)
min_price = db.Column(db.Float, default=0, nullable=False)
max_price = db.Column(db.Float, default=0)
is_fee_absorbed = db.Column(db.Boolean, default=False)
sales_starts_at = db.Column(db.DateTime(timezone=True), nullable=False)
sales_ends_at = db.Column(db.DateTime(timezone=True), nullable=False)
is_hidden = db.Column(db.Boolean, default=False)
min_order = db.Column(db.Integer, default=1)
max_order = db.Column(db.Integer, default=10)
is_checkin_restricted = db.Column(db.Boolean)
auto_checkin_enabled = db.Column(db.Boolean, default=False)
event_id = db.Column(db.Integer, db.ForeignKey('events.id', ondelete='CASCADE'))
event = db.relationship('Event', backref='tickets_')
tags = db.relationship('TicketTag', secondary=ticket_tags_table, backref='tickets')
order_ticket = db.relationship('OrderTicket', backref='ticket', passive_deletes=True)
access_codes = db.relationship('AccessCode', secondary=access_codes_tickets, backref='tickets')
discount_codes = db.relationship('DiscountCode', secondary=discount_codes_tickets, backref='tickets')
form_id = db.Column(db.String)
badge_id = db.Column(db.String)
def has_order_tickets(self):
from app.api.helpers.db import get_count
orders = Order.id.in_(OrderTicket.query.with_entities(OrderTicket.order_id).filter_by(ticket_id=self.id).all())
count = get_count(Order.query.filter(orders).filter((Order.status != 'deleted')))
return bool((count > 0))
def has_completed_order_tickets(self):
order_tickets = OrderTicket.query.filter_by(ticket_id=self.id)
count = 0
for order_ticket in order_tickets:
order = Order.query.filter_by(id=order_ticket.order_id).first()
if ((order.status == 'completed') or (order.status == 'placed')):
count += 1
return bool((count > 0))
def tags_csv(self):
tag_names = [tag.name for tag in self.tags]
return ','.join(tag_names)
def has_current_orders(self):
return db.session.query(Order.query.join(TicketHolder).filter((TicketHolder.ticket_id == self.id), or_((Order.status == 'completed'), (Order.status == 'placed'), (Order.status == 'pending'), (Order.status == 'initializing'))).exists()).scalar()
def reserved_count(self):
from app.api.attendees import get_sold_and_reserved_tickets_count
return get_sold_and_reserved_tickets_count(self.id)
def is_available(self):
return (self.reserved_count < self.quantity)
def raise_if_unavailable(self):
if (not self.is_available):
raise ConflictError({'id': self.id}, f'Ticket "{self.name}" already sold out')
def __repr__(self):
return ('<Ticket %r>' % self.name) |
class ContactResponse(ModelComposed):
allowed_values = {('contact_type',): {'PRIMARY': 'primary', 'BILLING': 'billing', 'TECHNICAL': 'technical', 'SECURITY': 'security', 'EMERGENCY': 'emergency', 'GENERAL_COMPLIANCE': 'general compliance'}}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'user_id': (str, none_type), 'contact_type': (str,), 'name': (str, none_type), 'email': (str, none_type), 'phone': (str, none_type), 'customer_id': (str, none_type), 'created_at': (datetime, none_type), 'deleted_at': (datetime, none_type), 'updated_at': (datetime, none_type), 'id': (str,)}
_property
def discriminator():
return None
attribute_map = {'user_id': 'user_id', 'contact_type': 'contact_type', 'name': 'name', 'email': 'email', 'phone': 'phone', 'customer_id': 'customer_id', 'created_at': 'created_at', 'deleted_at': 'deleted_at', 'updated_at': 'updated_at', 'id': 'id'}
read_only_vars = {'created_at', 'deleted_at', 'updated_at', 'id'}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
_property
def _composed_schemas():
lazy_import()
return {'anyOf': [], 'allOf': [Contact, ContactResponseAllOf, Timestamps], 'oneOf': []} |
class TimeBasedResultsCache(ResultsCacheBase):
_cache = {}
_cache_expiration = {}
_default_expiration_in_seconds = 3600
def get(self, key):
if ((key not in self._cache) or (key not in self._cache_expiration)):
return None
if (self._cache_expiration[key] < datetime.datetime.utcnow()):
if (key in self._cache):
del self._cache[key]
if (key in self._cache_expiration):
del self._cache_expiration[key]
return None
return self._cache[key]
def set(self, key, value):
self._cache[key] = value
self._cache_expiration[key] = (datetime.datetime.utcnow() + datetime.timedelta(seconds=self._default_expiration_in_seconds)) |
class SimpleGridModel(GridModel):
data = Any()
rows = Union(None, List(Instance(GridRow)))
columns = Union(None, List(Instance(GridColumn)))
def get_column_count(self):
if (self.columns is not None):
count = len(self.columns)
else:
count = len(self.data[0])
return count
def get_column_name(self, index):
if (self.columns is not None):
try:
name = self.columns[index].label
except IndexError:
name = ''
else:
name = str((index + 1))
return name
def get_cols_drag_value(self, cols):
if (len(cols) == 1):
value = self.__get_data_column(cols[0])
else:
value = []
for col in cols:
value.append(self.__get_data_column(col))
return value
def is_column_read_only(self, index):
read_only = False
if (self.columns is not None):
try:
read_only = self.columns[index].read_only
except IndexError:
pass
return read_only
def get_row_count(self):
if (self.rows is not None):
count = len(self.rows)
else:
count = len(self.data)
return count
def get_row_name(self, index):
if (self.rows is not None):
try:
name = self.rows[index].label
except IndexError:
name = str((index + 1))
else:
name = str((index + 1))
return name
def get_rows_drag_value(self, rows):
if (len(rows) == 1):
value = self.__get_data_row(rows[0])
else:
value = []
for row in rows:
value.append(self.__get_data_row(row))
return value
def is_row_read_only(self, index):
read_only = False
if (self.rows is not None):
try:
read_only = self.rows[index].read_only
except IndexError:
pass
return read_only
def get_value(self, row, col):
try:
return self.data[row][col]
except IndexError:
pass
return ''
def is_cell_empty(self, row, col):
if ((row >= self.get_row_count()) or (col >= self.get_column_count())):
empty = True
else:
try:
value = self.get_value(row, col)
empty = (value is None)
except IndexError:
empty = True
return empty
def get_cell_context_menu(self, row, col):
context_menu = MenuManager(Group(_CopyAction(self, row, col, name='Copy'), id='Group'))
return context_menu
def is_cell_editable(self, row, col):
return True
def _set_value(self, row, col, value):
new_rows = 0
try:
self.data[row][col] = value
except IndexError:
self.data.append(([0] * self.GetNumberCols()))
self.data[row][col] = value
new_rows = 1
return new_rows
def _delete_rows(self, pos, num_rows):
if ((pos + num_rows) >= self.get_row_count()):
num_rows = (self.get_rows_count() - pos)
del self.data[(pos, (pos + num_rows))]
return num_rows
def __get_data_column(self, col):
row_count = self.get_row_count()
coldata = []
for row in range(row_count):
try:
coldata.append(self.get_value(row, col))
except IndexError:
coldata.append(None)
return coldata
def __get_data_row(self, row):
col_count = self.get_column_count()
rowdata = []
for col in range(col_count):
try:
rowdata.append(self.get_value(row, col))
except IndexError:
rowdata.append(None)
return rowdata |
def sentence_split_and_markup(text, max=50, lang='auto', speaker_lang=None):
if ((speaker_lang is not None) and (len(speaker_lang) == 1)):
if ((lang.upper() not in ['AUTO', 'MIX']) and (lang.lower() != speaker_lang[0])):
logging.debug(f'lang "{lang}" is not in speaker_lang {speaker_lang},automatically set lang={speaker_lang[0]}')
lang = speaker_lang[0]
sentences_list = []
if (lang.upper() != 'MIX'):
if (max <= 0):
sentences_list.append((markup_language(text, speaker_lang) if (lang.upper() == 'AUTO') else f'[{lang.upper()}]{text}[{lang.upper()}]'))
else:
for i in sentence_split(text, max):
if check_is_none(i):
continue
sentences_list.append((markup_language(i, speaker_lang) if (lang.upper() == 'AUTO') else f'[{lang.upper()}]{i}[{lang.upper()}]'))
else:
sentences_list.append(text)
for i in sentences_list:
logging.debug(i)
return sentences_list |
class Twente(flx.Widget):
def init(self):
with flx.HFix():
flx.Widget(flex=1)
with flx.VBox(flex=0, minsize=200):
with flx.GroupWidget(title='Plot options'):
flx.Label(text='Month')
self.month = flx.ComboBox(options=months, selected_index=12, style='width: 100%')
self.smoothing_label = flx.Label(text='Smoothing')
self.smoothing = flx.Slider(max=20, step=2, text='{value} samples')
flx.Widget(flex=3)
with flx.VBox(flex=4):
self.plot = flx.PlotWidget(flex=1, xdata=years, yrange=((- 5), 20), title='Average monthly temperature', xlabel='year', ylabel=u'temperature (C)')
flx.Widget(flex=1)
def _update_plot(self):
smoothing = self.smoothing.value
yy1 = data[self.month.selected_index]
yy2 = []
sm2 = int((smoothing / 2))
for i in range(len(yy1)):
val = 0
n = 0
for j in range(max(0, (i - sm2)), min(len(yy1), ((i + sm2) + 1))):
val += yy1[j]
n += 1
if (n == 0):
yy2.append(yy1[i])
else:
yy2.append((val / n))
self.plot.set_data(self.plot.xdata, yy2) |
class NeighborsConfListener(BaseConfListener):
def __init__(self, neighbors_conf):
super(NeighborsConfListener, self).__init__(neighbors_conf)
neighbors_conf.add_listener(NeighborsConf.ADD_NEIGH_CONF_EVT, self.on_add_neighbor_conf)
neighbors_conf.add_listener(NeighborsConf.REMOVE_NEIGH_CONF_EVT, self.on_remove_neighbor_conf)
def on_add_neighbor_conf(self, evt):
raise NotImplementedError('This method should be overridden.')
def on_remove_neighbor_conf(self, evt):
raise NotImplementedError('This method should be overridden.') |
def test_non_symmetric_custom_medium_to_gds(tmp_path):
geometry = td.Box(size=(1, 2, 1), center=(0.5, 0, 2.5))
(nx, ny, nz) = (150, 80, 180)
x = np.linspace(0, 2, nx)
y = np.linspace((- 1), 1, ny)
z = np.linspace(2, 3, nz)
f = np.array([td.C_0])
(mx, my, mz, _) = np.meshgrid(x, y, z, f, indexing='ij', sparse=True)
data = (((1 + mx) + (0 * my)) + ((mz - 2) ** 2))
print(data.min(), data.max())
eps_diagonal_data = td.ScalarFieldDataArray(data, coords=dict(x=x, y=y, z=z, f=f))
eps_components = {f'eps_{d}{d}': eps_diagonal_data for d in 'xyz'}
eps_dataset = td.PermittivityDataset(**eps_components)
medium = td.CustomMedium(eps_dataset=eps_dataset, name='my_medium')
structure = td.Structure(geometry=geometry, medium=medium)
fname = str((tmp_path / 'structure-non-symmetric.gds'))
structure.to_gds_file(fname, y=0, permittivity_threshold=2.0, frequency=td.C_0)
cell = gdstk.read_gds(fname).cells[0]
assert np.allclose(cell.bounding_box(), ((0, 2), (1, 3)), atol=0.1)
assert (gdstk.inside([(0.1, 2.1), (0.5, 2.5), (0.9, 2.9)], cell.polygons) == (False, False, True)) |
class OptionSeriesColumnrangeSonificationDefaultinstrumentoptionsMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
('builtins.open', mock_open(read_data='opened_file'))
('aea.cli.registry.push.check_is_author_logged_in')
('aea.cli.registry.push.list_missing_packages', return_value=[])
('aea.cli.registry.utils._rm_tarfiles')
('aea.cli.registry.push.os.getcwd', return_value='cwd')
('aea.cli.registry.push._compress_dir')
('aea.cli.registry.push.load_yaml', return_value={'description': 'some-description', 'version': PublicIdMock.DEFAULT_VERSION, 'author': 'some_author', 'name': 'some_name', 'protocols': ['some/protocol:0.1.2']})
('aea.cli.registry.push.request_api', return_value={'public_id': 'public-id'})
class PushItemTestCase(TestCase):
('aea.cli.registry.push.os.path.exists', return_value=True)
('aea.cli.registry.push.is_readme_present', return_value=True)
def test_push_item_positive(self, is_readme_present_mock, path_exists_mock, request_api_mock, load_yaml_mock, compress_mock, getcwd_mock, rm_tarfiles_mock, check_is_author_logged_in_mock, *_):
public_id = PublicIdMock(name='some_name', author='some_author', version='{}'.format(PublicIdMock.DEFAULT_VERSION))
push_item(ContextMock(), 'some-type', public_id)
request_api_mock.assert_called_once_with('POST', '/some-types/create', data={'name': 'some_name', 'description': 'some-description', 'version': PublicIdMock.DEFAULT_VERSION, 'protocols': ['some/protocol:0.1.2']}, is_auth=True, files={'file': open('file.1'), 'readme': open('file.2')})
('aea.cli.registry.push.os.path.exists', return_value=True)
('aea.cli.registry.push.is_readme_present', return_value=True)
def test_push_dependency_fail(self, is_readme_present_mock, path_exists_mock, request_api_mock, load_yaml_mock, compress_mock, getcwd_mock, rm_tarfiles_mock, check_is_author_logged_in_mock, *_):
public_id = PublicIdMock(name='some_name', author='some_author', version='{}'.format(PublicIdMock.DEFAULT_VERSION))
with patch('aea.cli.registry.push.list_missing_packages', return_value=[('some', PublicId.from_str('some/pack:0.1.0'))]):
with pytest.raises(ClickException, match='Found missing dependencies! Push canceled!'):
push_item(ContextMock(), 'some-type', public_id)
('aea.cli.registry.push.os.path.exists', return_value=True)
('aea.cli.registry.push.is_readme_present', return_value=False)
def test_push_item_positive_without_readme(self, is_readme_present_mock, path_exists_mock, request_api_mock, *mocks):
public_id = PublicIdMock(name='some_name', author='some_author', version='{}'.format(PublicIdMock.DEFAULT_VERSION))
push_item(ContextMock(), 'some-type', public_id)
request_api_mock.assert_called_once_with('POST', '/some-types/create', data={'name': 'some_name', 'description': 'some-description', 'version': PublicIdMock.DEFAULT_VERSION, 'protocols': ['some/protocol:0.1.2']}, is_auth=True, files={'file': open('opened_file', 'r')})
('aea.cli.registry.push.os.path.exists', return_value=False)
def test_push_item_item_not_found(self, path_exists_mock, request_api_mock, load_yaml_mock, compress_mock, getcwd_mock, rm_tarfiles_mock, check_is_author_logged_in_mock, *_):
with self.assertRaises(ClickException):
push_item(ContextMock(), 'some-type', PublicIdMock())
request_api_mock.assert_not_called() |
class OptionSeriesSunburstSonificationTracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
_group.command('coverage')
('-o', '--os-filter', default='all', help='Filter rule coverage summary by OS. (E.g. windows) Default: all')
def rta_coverage(os_filter: str):
all_rules = RuleCollection.default()
triggered_rules = get_triggered_rules()
coverage_map = build_coverage_map(triggered_rules, all_rules)
all_rule_count = len(all_rules.rules)
print_converage_summary(coverage_map, all_rule_count, os_filter) |
(scope='function')
def privacy_request_awaiting_consent_email_send(db: Session, consent_policy: Policy) -> PrivacyRequest:
privacy_request = _create_privacy_request_for_policy(db, consent_policy)
privacy_request.status = PrivacyRequestStatus.awaiting_email_send
privacy_request.save(db)
(yield privacy_request)
privacy_request.delete(db) |
class OptionSeriesHistogramSonificationDefaultinstrumentoptionsMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def delete_snapshot(repo_name, snapshot_name):
if (repo_name and snapshot_name):
configure_snapshot_repository(repo_name)
try:
logger.info(' Deleting snapshot {0} from {1} ...'.format(snapshot_name, repo_name))
es_client = create_es_client()
es_client.snapshot.delete(repository=repo_name, snapshot=snapshot_name)
logger.info(' The snapshot {0} from {1} is deleted successfully.'.format(snapshot_name, repo_name))
except Exception as err:
logger.error(' Error occured in delete_snapshot.{0}'.format(err))
else:
logger.error(' Please provide both snapshot and repository names.') |
def generate_lists(install_dir):
list_oftypes = set()
for uclass in loxi_globals.unified.classes:
for ofclass in uclass.version_classes.values():
for m in ofclass.members:
if (isinstance(m, ir.OFDataMember) and loxi_utils.oftype_is_list(m.oftype)):
list_oftypes.add(m.oftype)
for oftype in sorted(list(list_oftypes)):
(cls, e_cls) = loxi_utils_legacy.list_name_extract(oftype)
e_cls = e_cls[:(- 2)]
e_uclass = loxi_globals.unified.class_by_name(e_cls)
with template_utils.open_output(install_dir, ('loci/src/%s.c' % cls)) as out:
util.render_template(out, 'list.c', cls=cls, e_cls=e_cls, e_uclass=e_uclass, wire_length_get=class_metadata_dict[e_cls].wire_length_get)
c_code_gen.gen_new_function_definitions(out, cls) |
class CacheActiveDirectoryView(ActiveDirectoryView):
USER_LOCKED_FILTER = (lambda _: {'files': ['users_locked']})
GROUPS_FILTER = (lambda _: {'files': ['groups']})
USER_ALL_FILTER = (lambda _: {'files': ['users_all']})
USER_SPN_FILTER = (lambda _: {'files': ['users_spn']})
COMPUTERS_FILTER = (lambda _: {'files': ['machines']})
ANR = (lambda _, u: {'files': ['users_all', 'groups', 'machines'], 'filter': (lambda record: eq_anr(record, u))})
GROUP_DN_FILTER = (lambda _, g: {'fmt': 'json', 'files': ['groups'], 'filter': (lambda x: eq(x['sAMAccountName'], g))})
ACCOUNTS_IN_GROUP_FILTER = (lambda _, p, g: {'fmt': 'json', 'files': ['users_all', 'groups', 'machines'], 'filter': (lambda x: ((('primaryGroupID' in x) and eq(p, x['primaryGroupID'])) or (('memberOf' in x) and (g in x['memberOf']))))})
ACCOUNT_IN_GROUPS_FILTER = (lambda _, u: {'fmt': 'json', 'files': ['users_all', 'groups', 'machines'], 'filter': (lambda x: eq(x['sAMAccountName'], u))})
DISTINGUISHED_NAME = (lambda _, n: {'fmt': 'json', 'files': ['users_all', 'groups', 'machines'], 'filter': (lambda x: eq(x['distinguishedName'], n))})
PRIMARY_GROUP_ID = (lambda _, i: {'fmt': 'json', 'files': ['users_all', 'groups', 'machines'], 'filter': (lambda x: x['objectSid'].endswith(f'-{i}'))})
AUTH_POLICIES_FILTER = (lambda _: {'files': ['auth_policies']})
SILOS_FILTER = (lambda _: {'files': ['silos']})
SILO_FILTER = (lambda _, s: {'fmt': 'json', 'files': ['silos'], 'filter': (lambda x: eq(x['cn'], s))})
DOMAIN_INFO_FILTER = (lambda _: None)
GPO_INFO_FILTER = (lambda _: None)
OU_FILTER = (lambda _: None)
PSO_INFO_FILTER = (lambda _: None)
TRUSTS_INFO_FILTER = (lambda _: None)
ZONES_FILTER = (lambda _: None)
ZONE_FILTER = (lambda _: None)
USER_ACCOUNT_CONTROL_FILTER = (lambda _, __: None)
USER_ACCOUNT_CONTROL_FILTER_NEG = (lambda _, __: None)
USER_LOCKED_FILTER = (lambda _: None)
SMSA_FILTER = (lambda _: None)
SHADOW_PRINCIPALS_FILTER = (lambda _: None)
UNCONSTRAINED_DELEGATION_FILTER = (lambda _: None)
CONSTRAINED_DELEGATION_FILTER = (lambda _: None)
RESOURCE_BASED_CONSTRAINED_DELEGATION_FILTER = (lambda _: None)
ALL_DELEGATIONS_FILTER = (lambda _: None)
class CacheActiveDirectoryException(Exception):
pass
class CacheActiveDirectoryDirNotFoundException(Exception):
pass
def __init__(self, cache_dir='.', prefix='ldeep_'):
if (not path.exists(cache_dir)):
raise self.CacheActiveDirectoryDirNotFoundException(f"{cache_dir} doesn't exist.")
self.path = cache_dir
self.prefix = prefix
(self.fqdn, self.base_dn) = self.__get_domain_info()
self.attributes = ALL
self.throttle = 0
self.page_size = 0
def set_all_attributes(self, attributes=ALL):
self.attributes = attributes
def all_attributes(self):
return self.attributes
def set_controls(self, controls):
pass
def query(self, cachefilter, attributes=[], base=None, scope=None, **filter_args):
def scrub_json_from_key(obj, func):
if isinstance(obj, dict):
for key in list(obj.keys()):
if func(key):
del obj[key]
else:
scrub_json_from_key(obj[key], func)
elif isinstance(obj, list):
for k in reversed(range(len(obj))):
if func(obj[k]):
del obj[k]
else:
scrub_json_from_key(obj[k], func)
if (cachefilter is None):
raise self.CacheActiveDirectoryException('Cache query not supported.')
if ('fmt' in cachefilter):
fmt = cachefilter['fmt']
elif (ALL_ATTRIBUTES in attributes):
fmt = 'json'
else:
fmt = 'lst'
data = []
for fil in cachefilter['files']:
filename = '{prefix}_{file}.{ext}'.format(prefix=self.prefix, file=fil, ext=fmt)
if (fmt == 'json'):
if (path.join(self.path, filename) in FILE_CONTENT_DICT):
json = FILE_CONTENT_DICT[path.join(self.path, filename)]
else:
fp = open(path.join(self.path, filename))
json = json_load(fp)
if ('ntSecurityDescriptor' not in self.attributes):
scrub_json_from_key(json, (lambda x: (x == 'nTSecurityDescriptor')))
FILE_CONTENT_DICT[path.join(self.path, filename)] = json
if ('filter' in cachefilter):
for record in json:
if cachefilter['filter'](record):
data.append(record)
else:
data += json
else:
if (path.join(self.path, filename) in FILE_CONTENT_DICT):
fp = FILE_CONTENT_DICT[path.join(self.path, filename)]
else:
fp = open(path.join(self.path, filename))
FILE_CONTENT_DICT[path.join(self.path, filename)] = fp
if ('filter' in cachefilter):
for line in fp:
x = {'sAMAccountName': line.strip()}
if cachefilter['filter'](x):
data += [line.strip()]
else:
data += map((lambda x: x.strip()), fp.readlines())
return data
def resolve_sid(self, sid):
if (sid in WELL_KNOWN_SIDS):
return WELL_KNOWN_SIDS[sid]
elif validate_sid(sid):
results = self.query({'fmt': 'json', 'files': ['users_all', 'groups', 'machines'], 'filter': (lambda x: (x['objectSid'] == sid))})
if results:
return results
raise self.ActiveDirectoryInvalidSID(f'SID: {sid}')
def resolve_guid(self, guid):
if validate_guid(guid):
results = self.query({'fmt': 'json', 'files': ['users_all', 'groups', 'machines'], 'filter': (lambda x: (x['objectGUID'] == guid))})
if results:
return results
raise self.ActiveDirectoryInvalidGUID(f'GUID: {guid}')
def get_sddl(self, *kwargs):
raise NotImplementedError
def __get_domain_info(self):
filename = '{prefix}_domain_policy.lst'.format(prefix=self.prefix)
with open(path.join(self.path, filename)) as fp:
for line in fp:
if line.startswith('distinguishedName:'):
base = line.split(' ')[1].strip()
domain = base.replace('DC=', '.')[1:].replace(',', '')
return (domain, base) |
class TestAccount(BaseEvenniaCommandTest):
([(0, True, 1, 'You are out-of-character'), (1, True, 1, 'You are out-of-character'), (2, True, 1, 'You are out-of-character'), (3, True, 1, 'You are out-of-character'), (0, False, 1, 'Account TestAccount'), (1, False, 1, 'Account TestAccount'), (2, False, 1, 'Account TestAccount'), (3, False, 1, 'Account TestAccount'), (0, True, 2, 'Account TestAccount'), (1, True, 2, 'Account TestAccount'), (2, True, 2, 'Account TestAccount'), (3, True, 2, 'Account TestAccount'), (0, False, 2, 'Account TestAccount'), (1, False, 2, 'Account TestAccount'), (2, False, 2, 'Account TestAccount'), (3, False, 2, 'Account TestAccount')])
def test_ooc_look(self, multisession_mode, auto_puppet, max_nr_chars, expected_result):
self.account.characters.add(self.char1)
self.account.unpuppet_all()
with self.settings(MULTISESSION=multisession_mode):
with patch('evennia.commands.default.account._MAX_NR_CHARACTERS', new=max_nr_chars):
with patch('evennia.commands.default.account._AUTO_PUPPET_ON_LOGIN', new=auto_puppet):
self.call(account.CmdOOCLook(), '', expected_result, caller=self.account)
def test_ooc(self):
self.call(account.CmdOOC(), '', 'You go OOC.', caller=self.account)
def test_ic(self):
self.account.characters.add(self.char1)
self.account.unpuppet_object(self.session)
self.call(account.CmdIC(), 'Char', 'You become Char.', caller=self.account, receiver=self.char1)
def test_ic__other_object(self):
self.account.characters.add(self.obj1)
self.account.unpuppet_object(self.session)
self.call(account.CmdIC(), 'Obj', 'You become Obj.', caller=self.account, receiver=self.obj1)
def test_ic__nonaccess(self):
self.account.unpuppet_object(self.session)
self.call(account.CmdIC(), 'Nonexistent', 'That is not a valid character choice.', caller=self.account, receiver=self.account)
def test_password(self):
self.call(account.CmdPassword(), 'testpassword = testpassword', 'Password changed.', caller=self.account)
def test_option(self):
self.call(account.CmdOption(), '', 'Client settings', caller=self.account)
def test_who(self):
self.call(account.CmdWho(), '', 'Accounts:', caller=self.account)
def test_quit(self):
self.call(account.CmdQuit(), '', 'Quitting. Hope to see you again, soon.', caller=self.account)
def test_sessions(self):
self.call(account.CmdSessions(), '', 'Your current session(s):', caller=self.account)
def test_color_test(self):
self.call(account.CmdColorTest(), 'ansi', 'ANSI colors:', caller=self.account)
def test_char_create(self):
self.call(account.CmdCharCreate(), 'Test1=Test char', 'Created new character Test1. Use ic Test1 to enter the game', caller=self.account)
def test_char_delete(self):
self.account.characters.add(self.char1)
self.call(account.CmdCharDelete(), 'Char', "This will permanently destroy 'Char'. This cannot be undone. Continue yes/[no]?", caller=self.account)
self.account.permissions.add('Player')
self.account.permissions.remove('Developer')
self.char1.locks.add('delete:none()')
self.call(account.CmdCharDelete(), 'Char', 'You do not have permission to delete this character.', caller=self.account)
self.char1.locks.add(('delete:pid(%i)' % self.account.id))
self.call(account.CmdCharDelete(), 'Char', "This will permanently destroy 'Char'. This cannot be undone. Continue yes/[no]?", caller=self.account)
def test_quell(self):
self.call(account.CmdQuell(), '', "Quelling to current puppet's permissions (developer).", caller=self.account) |
class SGDGrafting(Grafting):
def __init__(self, param: Tensor):
super(SGDGrafting, self).__init__(param)
def precondition(self, grad: Tensor, iteration: int) -> Tensor:
return grad
def direction_norm(self, grad: Tensor, iteration: int) -> Tensor:
return torch.linalg.norm(grad) |
class Connection(ConnectionAPI, Service):
_protocol_handlers: DefaultDict[(Type[ProtocolAPI], Set[HandlerFn])]
_msg_handlers: Set[HandlerFn]
_command_handlers: DefaultDict[(Type[CommandAPI[Any]], Set[HandlerFn])]
_logics: Dict[(str, LogicAPI)]
def __init__(self, multiplexer: MultiplexerAPI, devp2p_receipt: DevP2PReceipt, protocol_receipts: Sequence[HandshakeReceiptAPI], is_dial_out: bool) -> None:
self.logger = get_logger('p2p.connection.Connection')
self._multiplexer = multiplexer
self._multiplexer.raise_if_streaming_error()
self._devp2p_receipt = devp2p_receipt
self.protocol_receipts = tuple(protocol_receipts)
self.is_dial_out = is_dial_out
self._protocol_handlers = collections.defaultdict(set)
self._command_handlers = collections.defaultdict(set)
self._msg_handlers = set()
self._handlers_ready = asyncio.Event()
self.behaviors_applied = asyncio.Event()
self._logics = {}
def __str__(self) -> str:
return f'Connection-{self.session}'
def __repr__(self) -> str:
return f'<Connection {self.session!r} {self._multiplexer!r} dial_out={self.is_dial_out}>'
def is_streaming_messages(self) -> bool:
return self._handlers_ready.is_set()
def start_protocol_streams(self) -> None:
self._handlers_ready.set()
async def run_behaviors(self, behaviors: Tuple[(BehaviorAPI, ...)]) -> None:
async with contextlib.AsyncExitStack() as stack:
futures: List[asyncio.Task[Any]] = [create_task(self.manager.wait_finished(), 'Connection/run_behaviors/wait_finished')]
for behavior in behaviors:
if behavior.should_apply_to(self):
behavior_exit = (await stack.enter_async_context(behavior.apply(self)))
futures.append(behavior_exit)
self.behaviors_applied.set()
async with cleanup_tasks(*futures):
try:
for behavior in behaviors:
behavior.post_apply()
(await wait_first(futures, max_wait_after_cancellation=2))
except PeerConnectionLost:
self.manager.cancel()
async def run_peer(self, peer: 'BasePeer') -> None:
self.manager.run_daemon_task(self.run_behaviors, peer.get_behaviors())
(await self.behaviors_applied.wait())
self.manager.run_daemon_child_service(peer)
(await asyncio.wait_for(peer.manager.wait_started(), timeout=PEER_READY_TIMEOUT))
(await asyncio.wait_for(peer.ready.wait(), timeout=PEER_READY_TIMEOUT))
_property
def is_dial_in(self) -> bool:
return (not self.is_dial_out)
_property
def remote(self) -> NodeAPI:
return self._multiplexer.remote
_property
def session(self) -> SessionAPI:
return self._multiplexer.session
def is_alive(self) -> bool:
return (self.manager.is_running and (not self._multiplexer.is_closing))
def __del__(self) -> None:
self._multiplexer.cancel_streaming()
async def run(self) -> None:
self._multiplexer.raise_if_streaming_error()
for protocol in self._multiplexer.get_protocols():
self.manager.run_daemon_task(self._feed_protocol_handlers, protocol)
try:
(await self._multiplexer.wait_streaming_finished())
except PeerConnectionLost:
pass
except MalformedMessage as err:
self.logger.debug('Disconnecting peer %s for sending MalformedMessage: %s', self.remote, err, exc_info=True)
try:
self.get_base_protocol().send(Disconnect(DisconnectReason.BAD_PROTOCOL))
except PeerConnectionLost:
self.logger.debug('%s went away while trying to disconnect for MalformedMessage', self)
finally:
self.manager.cancel()
async def _feed_protocol_handlers(self, protocol: ProtocolAPI) -> None:
try:
(await asyncio.wait_for(self._handlers_ready.wait(), timeout=10))
except asyncio.TimeoutError as err:
self.logger.warning('Timedout waiting for handler ready signal')
raise asyncio.TimeoutError('The handlers ready event was never set. Ensure that `Connection.start_protocol_streams()` is being called') from err
async for cmd in self._multiplexer.stream_protocol_messages(protocol):
self.logger.debug2('Handling command: %s', type(cmd))
protocol_handlers = set(self._protocol_handlers[type(protocol)])
for proto_handler_fn in protocol_handlers:
self.logger.debug2('Running protocol handler %s for protocol=%s command=%s', proto_handler_fn, protocol, type(cmd))
self.manager.run_task(proto_handler_fn, self, cmd)
command_handlers = set(self._command_handlers[type(cmd)])
command_handlers.update(self._msg_handlers)
for cmd_handler_fn in command_handlers:
self.logger.debug2('Running command handler %s for protocol=%s command=%s', cmd_handler_fn, protocol, type(cmd))
self.manager.run_task(cmd_handler_fn, self, cmd)
if (self._multiplexer.is_closing and (not self.manager.is_cancelled)):
try:
(await asyncio.wait_for(self.manager.wait_finished(), timeout=2))
except asyncio.TimeoutError:
if (not self.manager.is_cancelled):
self.logger.error('stream_protocol_messages() terminated but %s was never cancelled, this will cause the Connection to crash with a DaemonTaskExit', self)
def add_protocol_handler(self, protocol_class: Type[ProtocolAPI], handler_fn: HandlerFn) -> SubscriptionAPI:
if (not self._multiplexer.has_protocol(protocol_class)):
raise UnknownProtocol(f'Protocol {protocol_class} was not found int he connected protocols: {self._multiplexer.get_protocols()}')
self._protocol_handlers[protocol_class].add(handler_fn)
cancel_fn = functools.partial(self._protocol_handlers[protocol_class].remove, handler_fn)
return Subscription(cancel_fn)
def add_msg_handler(self, handler_fn: HandlerFn) -> SubscriptionAPI:
self._msg_handlers.add(handler_fn)
cancel_fn = functools.partial(self._msg_handlers.remove, handler_fn)
return Subscription(cancel_fn)
def add_command_handler(self, command_type: Type[CommandAPI[Any]], handler_fn: HandlerFn) -> SubscriptionAPI:
for protocol in self._multiplexer.get_protocols():
if protocol.supports_command(command_type):
self._command_handlers[command_type].add(handler_fn)
cancel_fn = functools.partial(self._command_handlers[command_type].remove, handler_fn)
return Subscription(cancel_fn)
else:
raise UnknownProtocolCommand(f'Command {command_type} was not found in the connected protocols: {self._multiplexer.get_protocols()}')
def add_logic(self, name: str, logic: LogicAPI) -> SubscriptionAPI:
if (name in self._logics):
raise DuplicateAPI(f"There is already an API registered under the name '{name}': {self._logics[name]}")
self.logger.debug("Adding '%s' logic to %s", name, self)
self._logics[name] = logic
cancel_fn = functools.partial(self.remove_logic, name)
return Subscription(cancel_fn)
def remove_logic(self, name: str) -> None:
self.logger.debug("Removing '%s' logic from %s", name, self)
self._logics.pop(name)
def has_logic(self, name: str) -> bool:
if (not self.is_alive):
raise PeerConnectionLost('Cannot look up subprotocol when connection is not alive')
return (name in self._logics)
def get_logic(self, name: str, logic_type: Type[TLogic]) -> TLogic:
if (not self.has_logic(name)):
raise UnknownAPI(f"No '{name}' logic registered on {self}. Registered ones are: {self._logics.keys()} ")
logic = self._logics[name]
if isinstance(logic, logic_type):
return logic
else:
raise TypeError(f'Wrong logic type. expected: {logic_type} got: {type(logic)}')
def get_multiplexer(self) -> MultiplexerAPI:
return self._multiplexer
def get_base_protocol(self) -> BaseP2PProtocol:
return self._multiplexer.get_base_protocol()
def get_p2p_receipt(self) -> DevP2PReceipt:
return self._devp2p_receipt
def has_protocol(self, protocol_identifier: Union[(ProtocolAPI, Type[ProtocolAPI])]) -> bool:
return self._multiplexer.has_protocol(protocol_identifier)
def get_protocols(self) -> Tuple[(ProtocolAPI, ...)]:
return self._multiplexer.get_protocols()
def get_protocol_by_type(self, protocol_type: Type[TProtocol]) -> TProtocol:
return self._multiplexer.get_protocol_by_type(protocol_type)
def get_protocol_for_command_type(self, command_type: Type[CommandAPI[Any]]) -> ProtocolAPI:
return self._multiplexer.get_protocol_for_command_type(command_type)
def get_receipt_by_type(self, receipt_type: Type[THandshakeReceipt]) -> THandshakeReceipt:
for receipt in self.protocol_receipts:
if isinstance(receipt, receipt_type):
return receipt
else:
raise ReceiptNotFound(f'Receipt not found: {receipt_type}')
_property
def remote_capabilities(self) -> Capabilities:
return self._devp2p_receipt.capabilities
_property
def remote_p2p_version(self) -> int:
return self._devp2p_receipt.version
_property
def negotiated_p2p_version(self) -> int:
return self.get_base_protocol().version
_property
def remote_public_key(self) -> keys.PublicKey:
return keys.PublicKey(self._devp2p_receipt.remote_public_key)
_property
def client_version_string(self) -> str:
return self._devp2p_receipt.client_version_string
_property
def safe_client_version_string(self) -> str:
if (len(self.client_version_string) <= 256):
return self.client_version_string
truncated_client_version_string = (self.client_version_string[:253] + '...')
if truncated_client_version_string.isprintable():
return truncated_client_version_string
else:
return repr(truncated_client_version_string) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.