code stringlengths 281 23.7M |
|---|
def extractDthoursonpalmerCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def downgrade():
op.drop_index(op.f('ix_privacypreferencehistory_created_at'), table_name='privacypreferencehistory')
op.drop_index(op.f('ix_currentprivacypreference_updated_at'), table_name='currentprivacypreference')
op.drop_index(op.f('ix_currentprivacypreference_created_at'), table_name='currentprivacypreference') |
_flyte_cli.command('register-project', cls=_FlyteSubCommand)
_project_identifier_option
_project_name_option
_project_description_option
_host_option
_insecure_option
def register_project(identifier, name, description, host, insecure):
_welcome_message()
client = _get_client(host, insecure)
client.register_project(_Project(identifier, name, description))
_click.echo('Registered project [id: {}, name: {}, description: {}]'.format(identifier, name, description)) |
class Result():
body: Optional[bytes]
def __init__(self, query, res):
self.query = query
query.result = self
self.parent = query.parent
self.status = res.get('status')
self.headers = res.get('headers')
self.messages = res.get('messages')
self.tls = res.get('tls')
if ('body' in res):
self.body = base64.decodebytes(bytes(res['body'], 'ASCII'))
else:
self.body = None
self.text = res.get('text')
self.json = res.get('json')
self.backend = (BackendResult(self.json) if self.json else None)
self.error = res.get('error')
def __repr__(self):
return str(self.as_dict())
def check(self):
if self.query.skip:
pytest.skip(self.query.skip)
if self.query.xfail:
pytest.xfail(self.query.xfail)
if (not self.query.ignore_result):
if (self.query.error is not None):
found = False
errors = self.query.error
if isinstance(self.query.error, str):
errors = [self.query.error]
if (self.error is not None):
for error in errors:
if (error in self.error):
found = True
break
assert found, '{}: expected error to contain any of {}; got {} instead'.format(self.query.url, ', '.join([("'%s'" % x) for x in errors]), (("'%s'" % self.error) if self.error else 'no error'))
elif isinstance(self.query.expected, list):
if (self.status not in self.query.expected):
self.parent.log_kube_artifacts()
assert (self.status in self.query.expected), ('%s: expected status code %s, got %s instead with error %s' % (self.query.url, self.query.expected, self.status, self.error))
else:
if (self.query.expected != self.status):
self.parent.log_kube_artifacts()
assert (self.query.expected == self.status), ('%s: expected status code %s, got %s instead with error %s' % (self.query.url, self.query.expected, self.status, self.error))
def as_dict(self) -> Dict[(str, Any)]:
od = {'query': self.query.as_json(), 'status': self.status, 'error': self.error, 'headers': self.headers}
if (self.backend and self.backend.name):
od['backend'] = self.backend.as_dict()
else:
od['json'] = self.json
od['text'] = self.text
return od |
def test_owly_short_method():
params = urlencode({'apiKey': 'TEST_KEY', 'longUrl': expanded})
body = json.dumps({'results': {'shortUrl': shorten}})
mock_url = f'{owly.api_url}shorten?{params}'
responses.add(responses.GET, mock_url, body=body, match_querystring=True)
shorten_result = owly.short(expanded)
assert (shorten_result == shorten) |
def mock_gids_in_passwd_pass(self, cmd):
if ('/etc/group' in cmd):
output = ['1000', '1001', '']
elif ('/etc/passwd' in cmd):
output = ['1000', '1001', '']
else:
output = ['']
error = ['']
returncode = 0
return SimpleNamespace(stdout=output, stderr=error, returncode=returncode) |
class LPDDR4Output():
def __init__(self, nphases, databits):
self.clk = Signal((2 * nphases))
self.cke = Signal(nphases)
self.odt = Signal(nphases)
self.reset_n = Signal(nphases)
self.cs = Signal(nphases)
self.ca = [Signal(nphases) for _ in range(6)]
self.dmi_o = [Signal((2 * nphases)) for _ in range((databits // 8))]
self.dmi_i = [Signal((2 * nphases)) for _ in range((databits // 8))]
self.dmi_oe = Signal()
self.dq_o = [Signal((2 * nphases)) for _ in range(databits)]
self.dq_i = [Signal((2 * nphases)) for _ in range(databits)]
self.dq_oe = Signal()
self.dqs_o = [Signal((2 * nphases)) for _ in range((databits // 8))]
self.dqs_i = [Signal((2 * nphases)) for _ in range((databits // 8))]
self.dqs_oe = Signal() |
class TokenCollection(object):
def __init__(self):
self.tokens = []
self.lookup = {}
self.patterns = {}
def __getattr__(self, attr):
try:
return self.lookup[attr]
except AttributeError:
pass
return object.__getattribute__(self, attr)
def add(self, regex, name):
if (name is None):
self.tokens.append((regex, None))
else:
self.lookup[name] = name
self.patterns[name] = re.compile(regex)
self.tokens.append((regex, (lambda s, t: Token(name, t, s.match)))) |
class MetaDataSelect(widgets.Select):
def render_option(self, selected_choices, option_value, option_label):
try:
properties = ' '.join((('%s="%s"' % kv) for kv in iteritems(option_value[2])))
except IndexError:
properties = ''
metadata = option_value[1]
option_value = force_text(option_value[0])
if (option_value in selected_choices):
selected_html = mark_safe(' selected="selected"')
if (not self.allow_multiple_selected):
selected_choices.remove(option_value)
else:
selected_html = ''
return format_html(u"<option value='{0}'{1} data-meta='{2}' {3}>{4}</option>", option_value, selected_html, mark_safe(dumps(metadata, indent=None).replace("'", "\\'")), mark_safe(properties), force_text(option_label)) |
_bgp_error_metadata(code=RUNTIME_CONF_ERROR_CODE, sub_code=4, def_desc='Incorrect Value for configuration.')
class ConfigValueError(RuntimeConfigError):
def __init__(self, **kwargs):
conf_name = kwargs.get(CONF_NAME)
conf_value = kwargs.get(CONF_VALUE)
if (conf_name and conf_value):
super(ConfigValueError, self).__init__(desc=('Incorrect Value %s for configuration: %s' % (conf_value, conf_name)))
elif conf_name:
super(ConfigValueError, self).__init__(desc=('Incorrect Value for configuration: %s' % conf_name))
else:
super(ConfigValueError, self).__init__(desc=kwargs.get('desc')) |
def enlarge_bins(bin_intervals):
chr_start = True
for idx in range((len(bin_intervals) - 1)):
(chrom, start, end, extra) = bin_intervals[idx]
(chrom_next, start_next, end_next, extra_next) = bin_intervals[(idx + 1)]
if (chr_start is True):
start = 0
chr_start = False
bin_intervals[idx] = (chrom, start, end, extra)
if ((chrom == chrom_next) and (end != start_next)):
middle = (start_next - int(((start_next - end) / 2)))
bin_intervals[idx] = (chrom, start, middle, extra)
bin_intervals[(idx + 1)] = (chrom, middle, end_next, extra_next)
if (chrom != chrom_next):
chr_start = True
(chrom, start, end, extra) = bin_intervals[(- 1)]
bin_intervals[(- 1)] = (chrom, start, end, extra)
return bin_intervals |
def path_relative_to_dir(path, dir):
partial_path_elems = []
while True:
(head, tail) = os.path.split(path)
partial_path_elems.append(tail)
if (tail == dir):
break
elif (not head):
print(path)
print(dir)
raise RuntimeError('no dir in path')
path = head
return os.path.join(*reversed(partial_path_elems)) |
class AnyFileSearcher(AbstractSearcher):
exts = []
def __init__(self, path):
self._path = os.path.normpath(decode(path))
def __str__(self):
return ('%s{"%s"}' % (self.__class__.__name__, self._path))
def fileExists(self, mibname, mtime, rebuild=False):
if rebuild:
((debug.logger & debug.flagSearcher) and debug.logger(('pretend %s is very old' % mibname)))
return
mibname = decode(mibname)
basename = os.path.join(self._path, mibname)
for sfx in self.exts:
f = (basename + sfx)
if ((not os.path.exists(f)) or (not os.path.isfile(f))):
((debug.logger & debug.flagSearcher) and debug.logger(('%s not present or not a file' % f)))
continue
try:
fileTime = os.stat(f)[8]
except OSError:
raise error.PySmiSearcherError(('failure opening compiled file %s: %s' % (f, sys.exc_info()[1])), searcher=self)
((debug.logger & debug.flagSearcher) and debug.logger(('found %s, mtime %s' % (f, time.strftime('%a, %d %b %Y %H:%M:%S GMT', time.gmtime(fileTime))))))
if (fileTime >= mtime):
raise error.PySmiFileNotModifiedError()
raise error.PySmiFileNotFoundError(('no compiled file %s found' % mibname), searcher=self) |
('', doc={'description': 'Initiate a comparison'})
class RestComparePut(RestResourceBase):
URL = '/rest/compare'
_accepted(*PRIVILEGES['compare'])
(compare_model)
def put(self):
data = self.validate_payload_data(compare_model)
compare_id = normalize_compare_id(';'.join(data['uid_list']))
with get_shared_session(self.db.comparison) as comparison_db:
if (comparison_db.comparison_exists(compare_id) and (not data['redo'])):
return error_message('Compare already exists. Use "redo" to force re-compare.', self.URL, request_data=request.json, return_code=200)
if (not comparison_db.objects_exist(compare_id)):
missing_uids = ', '.join((uid for uid in convert_compare_id_to_list(compare_id) if (not comparison_db.exists(uid))))
return error_message(f'Some objects are not found in the database: {missing_uids}', self.URL, request_data=request.json, return_code=404)
self.intercom.add_compare_task(compare_id, force=data['redo'])
return success_message({'message': 'Compare started. Please use GET to get the results.'}, self.URL, request_data=request.json, return_code=202) |
def validate_datapoint_format(datapoint: dict[(str, Any)], kind: str, zone_key: ZoneKey):
standard_keys = ['datetime', 'source']
keys_dict = {'production': (['zoneKey', 'production'] + standard_keys), 'consumption': (['zoneKey', 'consumption'] + standard_keys), 'exchange': (['sortedZoneKeys', 'netFlow'] + standard_keys), 'price': (['zoneKey', 'currency', 'price'] + standard_keys), 'consumptionForecast': (['zoneKey', 'value'] + standard_keys), 'productionPerModeForecast': (['zoneKey', 'production'] + standard_keys), 'generationForecast': (['zoneKey', 'value'] + standard_keys), 'exchangeForecast': (['zoneKey', 'netFlow'] + standard_keys)}
for key in keys_dict[kind]:
if (key not in datapoint.keys()):
raise ValidationError('{} - data point does not have the required keys: {} is missing'.format(zone_key, [key for key in keys_dict[kind] if (key not in datapoint.keys())])) |
class BasePlot(object):
def __init__(self):
clsname = f'{type(self).__module__}.{type(self).__name__}'
logger.info(clsname)
self._contourlevels = 3
self._colormap = _get_colormap('viridis')
self._ax = None
self._tight = False
self._showok = True
self._fig = None
self._allfigs = []
self._pagesize = 'A4'
logger.info('Ran __init__ ...')
def contourlevels(self):
return self._contourlevels
def contourlevels(self, num):
self._contourlevels = num
def colormap(self):
return self._colormap
def colormap(self, cmap):
import matplotlib as mpl
if isinstance(cmap, mpl.colors.LinearSegmentedColormap):
self._colormap = cmap
elif isinstance(cmap, str):
logger.info('Definition of a colormap from string name: %s', cmap)
self.define_colormap(cmap)
else:
raise ValueError('Input incorrect')
logger.info('Colormap: %s', self._colormap)
def pagesize(self):
return self._pagesize
def define_any_colormap(cfile, colorlist=None):
import matplotlib as mpl
import matplotlib.pyplot as plt
valid_maps = sorted((m for m in plt.cm.datad))
logger.info('Valid color maps: %s', valid_maps)
colors = []
cmap = _get_colormap('rainbow')
if (cfile is None):
cfile = 'rainbow'
cmap = _get_colormap('rainbow')
elif (cfile == 'xtgeo'):
colors = _ctable.xtgeocolors()
cmap = mpl.colors.LinearSegmentedColormap.from_list(cfile, colors, N=len(colors))
cmap.name = 'xtgeo'
elif (cfile == 'random40'):
colors = _ctable.random40()
cmap = mpl.colors.LinearSegmentedColormap.from_list(cfile, colors, N=len(colors))
cmap.name = 'random40'
elif (cfile == 'randomc'):
colors = _ctable.randomc(256)
cmap = mpl.colors.LinearSegmentedColormap.from_list(cfile, colors, N=len(colors))
cmap.name = 'randomc'
elif (isinstance(cfile, str) and ('rms' in cfile)):
colors = _ctable.colorsfromfile(cfile)
cmap = mpl.colors.LinearSegmentedColormap.from_list('rms', colors, N=len(colors))
cmap.name = cfile
elif (cfile in valid_maps):
cmap = _get_colormap(cfile)
for i in range(cmap.N):
colors.append(cmap(i))
else:
xtg.warnuser(f'Trying to access as color map not installed in this version of matplotlib: <{cfile}>. Revert to <rainbow>')
cmap = _get_colormap('rainbow')
for i in range(cmap.N):
colors.append(cmap(i))
ctable = []
if colorlist:
for entry in colorlist:
if (entry < len(colors)):
ctable.append(colors[entry])
else:
logger.warning('Color list out of range')
ctable.append(colors[0])
cmap = mpl.colors.LinearSegmentedColormap.from_list(ctable, colors, N=len(colors))
cmap.name = 'user'
return cmap
def get_any_colormap_as_table(cmap):
return [cmap(i) for i in range(cmap.N)]
def get_colormap_as_table(self):
return self.get_any_colormap_as_table(self._colormap)
def define_colormap(self, cfile, colorlist=None):
logger.info('Defining colormap')
cmap = self.define_any_colormap(cfile, colorlist=colorlist)
self.contourlevels = cmap.N
self._colormap = cmap
def canvas(self, title=None, subtitle=None, infotext=None, figscaling=1.0):
import matplotlib.pyplot as plt
(self._fig, self._ax) = plt.subplots(figsize=((11.69 * figscaling), (8.27 * figscaling)))
self._allfigs.append(self._fig)
if (title is not None):
self._fig.suptitle(title, fontsize=18)
if (subtitle is not None):
self._ax.set_title(subtitle, size=14)
if (infotext is not None):
self._fig.text(0.01, 0.02, infotext, ha='left', va='center', fontsize=8)
def show(self):
if self._tight:
self._fig.tight_layout()
if self._showok:
import matplotlib.pyplot as plt
logger.info('Calling plt show method...')
plt.show()
return True
logger.warning('Nothing to plot (well outside Z range?)')
return False
def close(self):
import matplotlib.pyplot as plt
for fig in self._allfigs:
plt.close(fig)
def savefig(self, filename, fformat='png', last=True, **kwargs):
if self._tight:
self._fig.tight_layout()
if self._showok:
import matplotlib.pyplot as plt
plt.savefig(filename, format=fformat, **kwargs)
if last:
self.close()
return True
logger.warning('Nothing to plot (well outside Z range?)')
return False |
def use_oserdese2(p, luts, connects):
p['oddr_mux_config'] = 'none'
p['tddr_mux_config'] = 'none'
p['DATA_RATE_OQ'] = verilog.quote(random.choice(('SDR', 'DDR')))
p['DATA_RATE_TQ'] = verilog.quote(random.choice(('BUF', 'SDR', 'DDR')))
if (verilog.unquote(p['DATA_RATE_OQ']) == 'SDR'):
data_widths = [2, 3, 4, 5, 6, 7, 8]
else:
data_widths = [4, 6, 8]
p['DATA_WIDTH'] = random.choice(data_widths)
if ((p['DATA_WIDTH'] == 4) and (verilog.unquote(p['DATA_RATE_OQ']) == 'DDR') and (verilog.unquote(p['DATA_RATE_TQ']) == 'DDR')):
tristate_width = 4
else:
tristate_width = 1
p['SERDES_MODE'] = verilog.quote(random.choice(('MASTER', 'SLAVE')))
p['TRISTATE_WIDTH'] = tristate_width
p['OSERDES_MODE'] = verilog.quote(random.choice(('MASTER', 'SLAVE')))
if p['io']:
p['TFB'] = '.TFB(tfb_{site}),'.format(**p)
p['TQ'] = '.TQ({twire}),'.format(**p)
p['t1net'] = luts.get_next_output_net()
p['t2net'] = luts.get_next_output_net()
p['t3net'] = luts.get_next_output_net()
p['t4net'] = luts.get_next_output_net()
p['tcenet'] = luts.get_next_output_net()
for idx in range(4):
p['IS_T{}_INVERTED'.format((idx + 1))] = random.randint(0, 1)
else:
p['TFB'] = '.TFB(),'
p['TQ'] = '.TQ(),'
p['t1net'] = ''
p['t2net'] = ''
p['t3net'] = ''
p['t4net'] = ''
p['tcenet'] = ''
for idx in range(4):
p['IS_T{}_INVERTED'.format((idx + 1))] = 0
p['SRVAL_OQ'] = random.randint(0, 1)
p['SRVAL_TQ'] = random.randint(0, 1)
p['INIT_OQ'] = random.randint(0, 1)
p['INIT_TQ'] = random.randint(0, 1)
for idx in range(8):
p['IS_D{}_INVERTED'.format((idx + 1))] = random.randint(0, 1)
p['IS_CLK_INVERTED'] = random.randint(0, 1)
p['IS_CLKDIV_INVERTED'] = random.randint(0, 1)
clk_connections = ''
p['CLK_USED'] = random.randint(0, 1)
p['CLKDIV_USED'] = random.randint(0, 1)
if p['CLK_USED']:
clk_connections += '\n .CLK({}),'.format(luts.get_next_output_net())
if p['CLKDIV_USED']:
clk_connections += '\n .CLKDIV({}),'.format(luts.get_next_output_net())
print('\n (* KEEP, DONT_TOUCH, LOC = "{ologic_loc}" *)\n OSERDESE2 #(\n .SERDES_MODE({OSERDES_MODE}),\n .DATA_RATE_TQ({DATA_RATE_TQ}),\n .DATA_RATE_OQ({DATA_RATE_OQ}),\n .DATA_WIDTH({DATA_WIDTH}),\n .TRISTATE_WIDTH({TRISTATE_WIDTH}),\n .SRVAL_OQ({SRVAL_OQ}),\n .SRVAL_TQ({SRVAL_TQ}),\n .INIT_OQ({INIT_OQ}),\n .INIT_TQ({INIT_TQ}),\n .IS_T1_INVERTED({IS_T1_INVERTED}),\n .IS_T2_INVERTED({IS_T2_INVERTED}),\n .IS_T3_INVERTED({IS_T3_INVERTED}),\n .IS_T4_INVERTED({IS_T4_INVERTED}),\n .IS_D1_INVERTED({IS_D1_INVERTED}),\n .IS_D2_INVERTED({IS_D2_INVERTED}),\n .IS_D3_INVERTED({IS_D3_INVERTED}),\n .IS_D4_INVERTED({IS_D4_INVERTED}),\n .IS_D5_INVERTED({IS_D5_INVERTED}),\n .IS_D6_INVERTED({IS_D6_INVERTED}),\n .IS_D7_INVERTED({IS_D7_INVERTED}),\n .IS_D8_INVERTED({IS_D8_INVERTED}),\n .IS_CLK_INVERTED({IS_CLK_INVERTED}),\n .IS_CLKDIV_INVERTED({IS_CLKDIV_INVERTED})\n ) oserdese2_{site} (\n .OQ({owire}),\n {TFB}\n {TQ}\n {clk_connections}\n .D1({d1net}),\n .D2({d2net}),\n .D3({d3net}),\n .D4({d4net}),\n .D5({d5net}),\n .D6({d6net}),\n .D7({d7net}),\n .D8({d8net}),\n .OCE({ocenet}),\n .RST({rstnet}),\n .T1({t1net}),\n .T2({t2net}),\n .T3({t3net}),\n .T4({t4net}),\n .TCE({tcenet})\n );'.format(clk_connections=clk_connections, rstnet=luts.get_next_output_net(), d1net=luts.get_next_output_net(), d2net=luts.get_next_output_net(), d3net=luts.get_next_output_net(), d4net=luts.get_next_output_net(), d5net=luts.get_next_output_net(), d6net=luts.get_next_output_net(), d7net=luts.get_next_output_net(), d8net=luts.get_next_output_net(), ocenet=luts.get_next_output_net(), ofb_wire=luts.get_next_input_net(), **p), file=connects) |
('tomate.ui.preference.extension', scope=SingletonScope)
class ExtensionTab():
(bus='tomate.bus', config='tomate.config', plugin_engine='tomate.plugin')
def __init__(self, bus: Bus, config: Config, plugin_engine: PluginEngine):
self._plugins = plugin_engine
self._config = config
self._bus = bus
self.toplevel = None
self.plugin_model = Gtk.ListStore(*PluginGrid.MODEL)
self.plugin_list = Gtk.TreeView(headers_visible=False, model=self.plugin_model, name='plugin.list')
self.plugin_list.get_selection().connect('changed', self._on_plugin_changed)
self.plugin_list.get_selection().set_mode(Gtk.SelectionMode.BROWSE)
renderer = Gtk.CellRendererToggle()
renderer.connect('toggled', self._on_plugin_toggle)
column = Gtk.TreeViewColumn('Active', renderer, active=PluginGrid.ACTIVE)
self.plugin_list.append_column(column)
renderer = Gtk.CellRendererPixbuf()
column = Gtk.TreeViewColumn('Icon', renderer, pixbuf=PluginGrid.ICON)
self.plugin_list.append_column(column)
renderer = Gtk.CellRendererText(wrap_mode=Pango.WrapMode.WORD, wrap_width=250)
column = Gtk.TreeViewColumn('Detail', renderer, markup=PluginGrid.DETAIL)
self.plugin_list.append_column(column)
plugin_list_container = Gtk.ScrolledWindow(shadow_type=Gtk.ShadowType.IN)
plugin_list_container.add(self.plugin_list)
self.settings_button = Gtk.Button.new_from_icon_name('preferences-system', Gtk.IconSize.MENU)
self.settings_button.set_properties(name='plugin.settings', sensitive=False)
self.settings_button.connect('clicked', self._on_plugin_settings_clicked)
settings_button_container = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=6)
settings_button_container.pack_end(self.settings_button, False, False, 0)
self.widget = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=6)
self.widget.pack_start(plugin_list_container, True, True, 0)
self.widget.pack_start(settings_button_container, False, False, 0)
self.widget.show_all()
def _on_plugin_changed(self, selection):
(model, selected) = selection.get_selected()
if (selected is not None):
grid_plugin = PluginGrid.from_iter(model, selected)
self.settings_button.props.sensitive = (grid_plugin.is_enable & grid_plugin.has_settings)
def _on_plugin_toggle(self, _, path):
plugin = PluginGrid.from_path(self.plugin_model, path)
plugin.toggle()
logger.debug('action=toggle plugin=%s enable=%s', plugin.name, plugin.is_enable)
if plugin.is_enable:
self._activate(plugin)
else:
self._deactivate(plugin)
def _on_plugin_settings_clicked(self, _):
(model, selected) = self.plugin_list.get_selection().get_selected()
grid_plugin = PluginGrid.from_iter(model, selected)
logger.debug('action=open_plugin_settings plugin=%s', grid_plugin.name)
grid_plugin.open_settings(self.toplevel)
def _activate(self, plugin):
self._plugins.activate(plugin.name)
self.settings_button.props.sensitive = plugin.has_settings
def _deactivate(self, plugin):
self._plugins.deactivate(plugin.name)
self.settings_button.props.sensitive = False
def set_toplevel(self, widget: Gtk.Widget) -> None:
self.toplevel = widget
def refresh(self):
logger.debug('action=refresh_plugins has_plugins=%s', self._plugins.has_plugins())
self._clear()
for plugin in self._plugins.all():
self._add(plugin)
if self._plugins.has_plugins():
self._select_first()
def _add(self, plugin):
logger.debug('action=add_plugin plugin=%s', plugin.name)
self.plugin_model.append(PluginGrid.create_row(plugin, self._config))
def _select_first(self):
self.plugin_list.get_selection().select_iter(self.plugin_model.get_iter_first())
def _clear(self):
logger.debug('action=clear_plugin_list')
self.plugin_model.clear() |
class TestObjectPropertiesClass(DefaultObject):
attr1 = AttributeProperty(default='attr1')
attr2 = AttributeProperty(default='attr2', category='attrcategory')
attr3 = AttributeProperty(default='attr3', autocreate=False)
attr4 = SubAttributeProperty(default='attr4')
cusattr = CustomizedProperty(default=5)
tag1 = TagProperty()
tag2 = TagProperty(category='tagcategory')
tag3 = SubTagProperty()
testalias = AliasProperty()
testperm = PermissionProperty()
awaretest = 5
settest = 0
tagcategory1 = TagCategoryProperty('category_tag1')
tagcategory2 = TagCategoryProperty('category_tag1', 'category_tag2', 'category_tag3')
def base_property(self):
self.property_initialized = True |
class DE94(DeltaE):
NAME = '94'
def __init__(self, kl: float=1, k1: float=0.045, k2: float=0.015, space: str='lab-d65'):
self.kl = kl
self.k1 = k1
self.k2 = k2
self.space = space
def distance(self, color: Color, sample: Color, kl: (float | None)=None, k1: (float | None)=None, k2: (float | None)=None, space: (str | None)=None, **kwargs: Any) -> float:
if (kl is None):
kl = self.kl
if (k1 is None):
k1 = self.k1
if (k2 is None):
k2 = self.k2
if (space is None):
space = self.space
if (not isinstance(color.CS_MAP[space], CIELab)):
raise ValueError('Distance color space must be a CIE Lab color space.')
(l1, a1, b1) = color.convert(space).coords(nans=False)
(l2, a2, b2) = sample.convert(space).coords(nans=False)
c1 = math.sqrt(((a1 ** 2) + (b1 ** 2)))
c2 = math.sqrt(((a2 ** 2) + (b2 ** 2)))
dl = (l1 - l2)
dc = (c1 - c2)
da = (a1 - a2)
db = (b1 - b2)
dh = (((da ** 2) + (db ** 2)) - (dc ** 2))
sl = 1
sc = (1 + (k1 * c1))
sh = (1 + (k2 * c1))
kc = 1
kh = 1
return math.sqrt(((((dl / (kl * sl)) ** 2) + ((dc / (kc * sc)) ** 2)) + (dh / ((kh * sh) ** 2)))) |
.parametrize('sync_mode, expected_full_db, expected_node_class', (('light', False, LightNode), ('fast', True, FullNode), ('full', True, FullNode), ('warp', True, FullNode)))
def test_sync_mode_effect_on_db_and_node_type(sync_mode, expected_full_db, expected_node_class):
trinity_config = TrinityConfig(network_id=1)
eth1_app_config = Eth1AppConfig(trinity_config, sync_mode)
assert (eth1_app_config.sync_mode == sync_mode)
assert (eth1_app_config.node_class == expected_node_class)
if expected_full_db:
assert (eth1_app_config.database_mode is Eth1DbMode.FULL)
else:
assert (eth1_app_config.database_mode is Eth1DbMode.LIGHT) |
def get_right_audio_support_and_sampling_rate(audio_format: str, sampling_rate: int, list_audio_formats: List):
if (not audio_format):
audio_format = 'mp3'
right_extension_sampling = next(filter((lambda x: (x[0] == audio_format)), audio_format_list_extensions), None)
samplings = right_extension_sampling[2]
if sampling_rate:
nearest_sampling = min(samplings, key=(lambda x: abs((x - sampling_rate))))
nearest_sampling = samplings[floor((len(samplings) / 2))]
extension = right_extension_sampling[1]
if ('wav' in audio_format):
audio_format = audio_format.replace('wav', 'riff')
if (audio_format == 'riff'):
audio_format = 'riff-pcm'
right_audio_format = [format for format in list_audio_formats if all(((formt in format.lower()) for formt in audio_format.split('-')))]
right_audio_format = next(filter((lambda x: ((f'{nearest_sampling}Hz' in x) or (f'{int((nearest_sampling / 1000))}Khz' in x))), right_audio_format), None)
return (extension, right_audio_format) |
def _build_argument_parser():
description = 'Wrapper script to run rms.'
usage = 'The script must be invoked with minimum three positional arguments:\n\n rms iens project workflow \n\nOptional arguments supported: \n target file [-t][--target-file]\n run path [-r][--run-path] default=rms/model\n import path [-i][--import-path] default=./ \n export path [-e][--export-path] default=./ \n version [-v][--version]\n'
parser = argparse.ArgumentParser(description=description, usage=usage)
parser.add_argument('iens', type=int, help='Realization number')
parser.add_argument('project', help='The RMS project we are running')
parser.add_argument('workflow', help='The rms workflow we intend to run')
parser.add_argument('-r', '--run-path', default='rms/model', help='The directory which will be used as cwd when running rms')
parser.add_argument('-t', '--target-file', default=None, help='name of file which should be created/updated by rms')
parser.add_argument('-i', '--import-path', default='./', help='the prefix of all relative paths when rms is importing')
parser.add_argument('-e', '--export-path', default='./', help='the prefix of all relative paths when rms is exporting')
parser.add_argument('-v', '--version', default=None, help='The version of rms to use')
parser.add_argument('-a', '--allow-no-env', action='store_true', help='Allow RMS to run without a site configured environment')
return parser |
def downgrade():
op.drop_index(op.f('ix_ruletarget_key'), table_name='ruletarget')
op.drop_index(op.f('ix_ruletarget_id'), table_name='ruletarget')
op.drop_table('ruletarget')
op.drop_index(op.f('ix_rule_key'), table_name='rule')
op.drop_index(op.f('ix_rule_id'), table_name='rule')
op.drop_table('rule')
op.drop_index(op.f('ix_privacyrequest_status'), table_name='privacyrequest')
op.drop_index(op.f('ix_privacyrequest_id'), table_name='privacyrequest')
op.drop_index(op.f('ix_privacyrequest_external_id'), table_name='privacyrequest')
op.drop_table('privacyrequest')
op.drop_index(op.f('ix_policy_key'), table_name='policy')
op.drop_index(op.f('ix_policy_id'), table_name='policy')
op.drop_table('policy')
op.drop_index(op.f('ix_datasetconfig_id'), table_name='datasetconfig')
op.drop_index(op.f('ix_datasetconfig_fides_key'), table_name='datasetconfig')
op.drop_table('datasetconfig')
op.drop_index(op.f('ix_storageconfig_type'), table_name='storageconfig')
op.drop_index(op.f('ix_storageconfig_name'), table_name='storageconfig')
op.drop_index(op.f('ix_storageconfig_key'), table_name='storageconfig')
op.drop_index(op.f('ix_storageconfig_id'), table_name='storageconfig')
op.drop_table('storageconfig')
op.drop_index(op.f('ix_executionlog_status'), table_name='executionlog')
op.drop_index(op.f('ix_executionlog_privacy_request_id'), table_name='executionlog')
op.drop_index(op.f('ix_executionlog_id'), table_name='executionlog')
op.drop_index(op.f('ix_executionlog_dataset_name'), table_name='executionlog')
op.drop_index(op.f('ix_executionlog_collection_name'), table_name='executionlog')
op.drop_index(op.f('ix_executionlog_action_type'), table_name='executionlog')
op.drop_table('executionlog')
op.drop_index(op.f('ix_connectionconfig_name'), table_name='connectionconfig')
op.drop_index(op.f('ix_connectionconfig_key'), table_name='connectionconfig')
op.drop_index(op.f('ix_connectionconfig_id'), table_name='connectionconfig')
op.drop_table('connectionconfig')
op.drop_index(op.f('ix_client_id'), table_name='client')
op.drop_table('client') |
def import_optional_dependency(name: str, extra: str='', raise_on_missing: bool=True, on_version: str='raise') -> Optional['ModuleType']:
try:
module = importlib.import_module(name)
except ImportError:
if raise_on_missing:
raise ImportError(message.format(name=name, extra=extra)) from None
else:
return None
minimum_version = VERSIONS.get(name)
if minimum_version:
version = _get_version(module)
if (packaging.version.parse(version) < packaging.version.Version(minimum_version)):
assert (on_version in {'warn', 'raise', 'ignore'})
msg = version_message.format(minimum_version=minimum_version, name=name, actual_version=version)
if (on_version == 'warn'):
warnings.warn(msg, UserWarning)
return None
elif (on_version == 'raise'):
raise ImportError(msg)
return module |
def _check_if_items_are_defined(method):
(method)
def check_items(self, *args, **kwargs):
if (getattr(self, 'items') is None):
raise KeyError('No items are defined in Pagination. Perhaps you forgot to specify it when creating Pagination instance?')
return method(self, *args, **kwargs)
return check_items |
class StatsLabel(QLabel):
def __init__(self, text, align_right=False, parent=None):
super(StatsLabel, self).__init__('', parent)
self.format_text = '<font size=16>%s</font>'
self.setText(text)
if align_right:
self.setAlignment(Qt.AlignRight)
else:
self.setAlignment(Qt.AlignLeft)
def setText(self, text):
super(StatsLabel, self).setText((self.format_text % text)) |
def main():
import argparse
parser = argparse.ArgumentParser(description='Parse a db file, checking for consistency')
db_root_arg(parser)
parser.add_argument('--verbose', action='store_true', help='')
parser.add_argument('--strict', action='store_true', help='Complain on unresolved entries (ex: <0 candidates>, <const0>)')
parser.add_argument('fin', help='')
parser.add_argument('fout', nargs='?', help='')
args = parser.parse_args()
run(args.fin, args.fout, strict=args.strict, verbose=args.verbose) |
class TestTransactionTable():
def setup_class(cls):
cls.db_context = _db_context()
cls.store = TransactionTable(cls.db_context)
cls.tx_hash = os.urandom(32)
def teardown_class(cls):
cls.store.close()
cls.db_context.close()
def setup_method(self):
db = self.store._db
db.execute(f'DELETE FROM Transactions')
db.commit()
def _get_store_hashes(self) -> List[bytes]:
return [row[0] for row in self.store.read_metadata()]
def test_proof_serialization(self):
proof1 = TxProof(position=10, branch=[os.urandom(32) for i in range(10)])
raw = self.store._pack_proof(proof1)
proof2 = self.store._unpack_proof(raw)
assert (proof1.position == proof2.position)
assert (proof1.branch == proof2.branch)
.timeout(8)
def test_create1(self):
bytedata_1 = os.urandom(10)
tx_hash = bitcoinx.double_sha256(bytedata_1)
metadata_1 = TxData(height=None, fee=None, position=None, date_added=1, date_updated=1)
with SynchronousWriter() as writer:
self.store.create([(tx_hash, metadata_1, bytedata_1, TxFlags.StateDispatched, None)], completion_callback=writer.get_callback())
assert writer.succeeded()
(_tx_hash, flags, _metadata) = self.store.read_metadata(tx_hashes=[tx_hash])[0]
assert (TxFlags.StateDispatched == (flags & TxFlags.STATE_MASK))
(_tx_hash, bytedata_2, _flags, metadata_2) = self.store.read(tx_hashes=[tx_hash])[0]
assert (metadata_1 == metadata_2)
assert (bytedata_1 == bytedata_2)
.timeout(8)
def test_create2(self) -> None:
to_add = []
for i in range(10):
tx_bytes = os.urandom(10)
tx_hash = bitcoinx.double_sha256(tx_bytes)
tx_data = TxData(height=1, fee=2, position=None, date_added=1, date_updated=1)
to_add.append((tx_hash, tx_data, tx_bytes, TxFlags.Unset, None))
with SynchronousWriter() as writer:
self.store.create(to_add, completion_callback=writer.get_callback())
assert writer.succeeded()
existing_tx_hashes = set(self._get_store_hashes())
added_tx_hashes = set((t[0] for t in to_add))
assert (added_tx_hashes == existing_tx_hashes)
.timeout(8)
def test_update(self):
to_add = []
for i in range(10):
tx_bytes = os.urandom(10)
tx_hash = bitcoinx.double_sha256(tx_bytes)
tx_data = TxData(height=None, fee=2, position=None, date_added=1, date_updated=1)
if (i % 2):
to_add.append((tx_hash, tx_data, tx_bytes, TxFlags.HasByteData, None))
else:
to_add.append((tx_hash, tx_data, None, TxFlags.Unset, None))
with SynchronousWriter() as writer:
self.store.create(to_add, completion_callback=writer.get_callback())
assert writer.succeeded()
to_update = []
for (tx_hash, metadata, tx_bytes, flags, description) in to_add:
tx_metadata = TxData(height=1, fee=2, position=None, date_added=1, date_updated=1)
to_update.append((tx_hash, tx_metadata, tx_bytes, flags))
with SynchronousWriter() as writer:
self.store.update(to_update, completion_callback=writer.get_callback())
assert writer.succeeded()
for (get_tx_hash, bytedata_get, flags_get, metadata_get) in self.store.read():
for (update_tx_hash, update_metadata, update_tx_bytes, update_flags) in to_update:
if (update_tx_hash == get_tx_hash):
assert (metadata_get == update_metadata)
assert (bytedata_get == update_tx_bytes)
continue
.timeout(8)
def test_update__entry_with_set_bytedata_flag(self):
tx_bytes = os.urandom(10)
tx_hash = bitcoinx.double_sha256(tx_bytes)
tx_data = TxData(height=None, fee=2, position=None, date_added=1, date_updated=1)
row = (tx_hash, tx_data, tx_bytes, TxFlags.HasByteData, None)
with SynchronousWriter() as writer:
self.store.create([row], completion_callback=writer.get_callback())
assert writer.succeeded()
with pytest.raises(AssertionError):
self.store.update([(tx_hash, tx_data, None, TxFlags.HasByteData)])
.timeout(8)
def test_update__entry_with_unset_bytedata_flag(self):
tx_bytes = os.urandom(10)
tx_hash = bitcoinx.double_sha256(tx_bytes)
tx_data = TxData(height=None, fee=2, position=None, date_added=1, date_updated=1)
row = (tx_hash, tx_data, tx_bytes, TxFlags.HasByteData, None)
with SynchronousWriter() as writer:
self.store.create([row], completion_callback=writer.get_callback())
assert writer.succeeded()
with pytest.raises(AssertionError):
self.store.update([(tx_hash, tx_data, tx_bytes, TxFlags.Unset)])
.timeout(8)
def test_update__entry_with_magic_bytedata_and_set_flag(self):
tx_bytes = os.urandom(10)
tx_hash = bitcoinx.double_sha256(tx_bytes)
tx_data = TxData(height=None, fee=2, position=None, date_added=1, date_updated=1)
row = (tx_hash, tx_data, tx_bytes, TxFlags.HasByteData, None)
with SynchronousWriter() as writer:
self.store.create([row], completion_callback=writer.get_callback())
assert writer.succeeded()
with pytest.raises(AssertionError):
self.store.update([(tx_hash, tx_data, MAGIC_UNTOUCHED_BYTEDATA, TxFlags.Unset)])
.timeout(8)
def test_update__with_valid_magic_bytedata(self):
tx_bytes = os.urandom(10)
tx_hash = bitcoinx.double_sha256(tx_bytes)
tx_data = TxData(height=None, fee=2, position=None, date_added=1, date_updated=1)
row = (tx_hash, tx_data, tx_bytes, TxFlags.HasByteData, None)
with SynchronousWriter() as writer:
self.store.create([row], completion_callback=writer.get_callback())
assert writer.succeeded()
with SynchronousWriter() as writer:
self.store.update([(tx_hash, tx_data, MAGIC_UNTOUCHED_BYTEDATA, TxFlags.HasByteData)], completion_callback=writer.get_callback())
assert writer.succeeded()
rows = self.store.read()
assert (1 == len(rows))
(get_tx_hash, bytedata_get, flags_get, metadata_get) = rows[0]
assert (tx_bytes == bytedata_get)
assert ((flags_get & TxFlags.HasByteData) != 0)
.timeout(8)
def test_update_flags(self):
bytedata = os.urandom(10)
tx_hash = bitcoinx.double_sha256(bytedata)
metadata = TxData(height=1, fee=2, position=None, date_added=1, date_updated=1)
with SynchronousWriter() as writer:
self.store.create([(tx_hash, metadata, bytedata, TxFlags.Unset, None)], completion_callback=writer.get_callback())
assert writer.succeeded()
expected_flags = ((TxFlags.HasByteData | TxFlags.HasFee) | TxFlags.HasHeight)
(_tx_hash, flags, _metadata) = self.store.read_metadata(tx_hashes=[tx_hash])[0]
assert (expected_flags == flags), f'expected {expected_flags!r}, got {TxFlags.to_repr(flags)}'
flags = TxFlags.StateReceived
mask = ((TxFlags.METADATA_FIELD_MASK | TxFlags.HasByteData) | TxFlags.HasProofData)
date_updated = 1
with SynchronousWriter() as writer:
self.store.update_flags([(tx_hash, flags, mask, date_updated)], completion_callback=writer.get_callback())
assert writer.succeeded()
(_tx_hash, flags_get, _metadata) = self.store.read_metadata(tx_hashes=[tx_hash])[0]
expected_flags |= TxFlags.StateReceived
assert (expected_flags == flags_get), f'{TxFlags.to_repr(expected_flags)} != {TxFlags.to_repr(flags_get)}'
flags = TxFlags.StateReceived
mask = TxFlags.Unset
date_updated = 1
with SynchronousWriter() as writer:
self.store.update_flags([(tx_hash, flags, mask, date_updated)], completion_callback=writer.get_callback())
assert writer.succeeded()
(_tx_hash, flags, _metadata) = self.store.read_metadata(tx_hashes=[tx_hash])[0]
assert (TxFlags.StateReceived == flags)
.timeout(8)
def test_delete(self) -> None:
to_add = []
for i in range(10):
bytedata = os.urandom(10)
tx_hash = bitcoinx.double_sha256(bytedata)
metadata = TxData(height=1, fee=2, position=None, date_added=1, date_updated=1)
to_add.append((tx_hash, metadata, bytedata, TxFlags.Unset, None))
with SynchronousWriter() as writer:
self.store.create(to_add, completion_callback=writer.get_callback())
assert writer.succeeded()
add_hashes = set((t[0] for t in to_add))
get_hashes = set(self._get_store_hashes())
assert (add_hashes == get_hashes)
with SynchronousWriter() as writer:
self.store.delete(add_hashes, completion_callback=writer.get_callback())
assert writer.succeeded()
get_hashes = self._get_store_hashes()
assert (0 == len(get_hashes))
.timeout(8)
def test_get_all_pending(self):
get_tx_hashes = set([])
for tx_hex in (tx_hex_1, tx_hex_2):
bytedata = bytes.fromhex(tx_hex)
tx_hash = bitcoinx.double_sha256(bytedata)
metadata = TxData(height=1, fee=2, position=None, date_added=1, date_updated=1)
with SynchronousWriter() as writer:
self.store.create([(tx_hash, metadata, bytedata, TxFlags.Unset, None)], completion_callback=writer.get_callback())
assert writer.succeeded()
get_tx_hashes.add(tx_hash)
result_tx_hashes = set(self._get_store_hashes())
assert (get_tx_hashes == result_tx_hashes)
.timeout(8)
def test_read(self):
to_add = []
for i in range(10):
tx_bytes = os.urandom(10)
tx_hash = bitcoinx.double_sha256(tx_bytes)
tx_data = TxData(height=None, fee=2, position=None, date_added=1, date_updated=1)
to_add.append((tx_hash, tx_data, tx_bytes, TxFlags.HasFee, None))
with SynchronousWriter() as writer:
self.store.create(to_add, completion_callback=writer.get_callback())
assert writer.succeeded()
tx_hash_1 = to_add[0][0]
matches = self.store.read(tx_hashes=[tx_hash_1])
assert (tx_hash_1 == matches[0][0])
assert self.store.read(TxFlags.HasByteData, TxFlags.HasByteData, [tx_hash_1])
matches = self.store.read(tx_hashes=[b'aaaa'])
assert (0 == len(matches))
matches = self.store.read(flags=TxFlags.HasFee)
assert (10 == len(matches))
matches = self.store.read(flags=TxFlags.Unset, mask=TxFlags.HasHeight)
assert (10 == len(matches))
matches = self.store.read(flags=TxFlags.HasFee, mask=TxFlags.HasFee)
assert (10 == len(matches))
matches = self.store.read(flags=TxFlags.Unset, mask=TxFlags.HasFee)
assert (0 == len(matches))
.timeout(8)
def test_read_metadata(self) -> None:
all_tx_hashes = []
datas = []
for i in range(5):
bytedata = os.urandom(10)
tx_hash = bitcoinx.double_sha256(bytedata)
metadata = TxData(height=(i * 100), fee=(i * 1000), position=None, date_added=1, date_updated=1)
datas.append((tx_hash, metadata, bytedata, TxFlags.Unset, None))
all_tx_hashes.append(tx_hash)
with SynchronousWriter() as writer:
self.store.create(datas, completion_callback=writer.get_callback())
assert writer.succeeded()
select_tx_hashes = [all_tx_hashes[0], all_tx_hashes[3], b'']
rowdatas = self.store.read_metadata(tx_hashes=select_tx_hashes)
assert (len(rowdatas) == 2)
for rowdata in rowdatas:
tx_hash = rowdata[0]
tx_flags = rowdata[1]
metadata = rowdata[2]
rowidx = all_tx_hashes.index(tx_hash)
assert (metadata.height == (rowidx * 100))
assert (metadata.fee == (rowidx * 1000))
assert (metadata.position is None)
.timeout(8)
def test_update_metadata(self) -> None:
tx_hashes = []
datas = []
for i in range(5):
bytedata = os.urandom(10)
tx_hash = bitcoinx.double_sha256(bytedata)
metadata = TxData(height=(i * 100), fee=(i * 1000), position=None, date_added=1, date_updated=1)
datas.append((tx_hash, metadata, bytedata, TxFlags.Unset, None))
tx_hashes.append(tx_hash)
with SynchronousWriter() as writer:
self.store.create(datas, completion_callback=writer.get_callback())
assert writer.succeeded()
updates = []
for i in range(5):
tx_hash = tx_hashes[i]
metadata = TxData(height=(i * 200), fee=(i * 2000), position=None, date_added=1, date_updated=1)
updates.append((tx_hash, metadata, (TxFlags.HasHeight | TxFlags.HasFee)))
with SynchronousWriter() as writer:
self.store.update_metadata(updates, completion_callback=writer.get_callback())
assert writer.succeeded()
select_tx_hashes = [tx_hashes[0], tx_hashes[3], b'']
rowdatas = self.store.read_metadata(tx_hashes=select_tx_hashes)
assert (len(rowdatas) == 2)
for rowdata in rowdatas:
tx_hash = rowdata[0]
tx_flags = rowdata[1]
metadata = rowdata[2]
rowidx = tx_hashes.index(tx_hash)
assert (metadata.height == (rowidx * 200))
assert (metadata.fee == (rowidx * 2000))
assert (metadata.position is None)
.timeout(8)
def test_proof(self):
bytedata = os.urandom(10)
tx_hash = bitcoinx.double_sha256(bytedata)
metadata = TxData(height=1, fee=2, position=None, date_added=1, date_updated=1)
with SynchronousWriter() as writer:
self.store.create([(tx_hash, metadata, bytedata, 0, None)], completion_callback=writer.get_callback())
assert writer.succeeded()
position1 = 10
merkle_branch1 = [os.urandom(32) for i in range(10)]
proof = TxProof(position1, merkle_branch1)
date_updated = 1
with SynchronousWriter() as writer:
self.store.update_proof([(tx_hash, proof, date_updated)], completion_callback=writer.get_callback())
assert writer.succeeded()
rows = self.store.read_proof([self.tx_hash])
assert (len(rows) == 0)
(db_tx_hash, (tx_position2, merkle_branch2)) = self.store.read_proof([tx_hash])[0]
assert (db_tx_hash == tx_hash)
assert (position1 == tx_position2)
assert (merkle_branch1 == merkle_branch2)
.timeout(8)
def test_labels(self):
bytedata_1 = os.urandom(10)
tx_hash_1 = bitcoinx.double_sha256(bytedata_1)
metadata_1 = TxData(height=1, fee=2, position=None, date_added=1, date_updated=1)
bytedata_2 = os.urandom(10)
tx_hash_2 = bitcoinx.double_sha256(bytedata_2)
metadata_2 = TxData(height=1, fee=2, position=None, date_added=1, date_updated=1)
with SynchronousWriter() as writer:
self.store.create([(tx_hash_1, metadata_1, bytedata_1, 0, None), (tx_hash_2, metadata_2, bytedata_2, 0, None)], completion_callback=writer.get_callback())
assert writer.succeeded()
with SynchronousWriter() as writer:
self.store.update_descriptions([('tx 1', tx_hash_1)], completion_callback=writer.get_callback())
assert writer.succeeded()
rows = self.store.read_descriptions()
assert (len(rows) == 1)
assert (len([(r[1] == 'tx 1') for r in rows if (r[0] == tx_hash_1)]) == 1)
with SynchronousWriter() as writer:
self.store.update_descriptions([(None, tx_hash_1), ('tx 2', tx_hash_2)], completion_callback=writer.get_callback())
assert writer.succeeded()
rows = self.store.read_descriptions([tx_hash_2])
assert (len(rows) == 1)
assert ((rows[0][0] == tx_hash_2) and (rows[0][1] == 'tx 2'))
rows = self.store.read_descriptions([self.tx_hash])
assert (len(rows) == 0) |
('copr_cli.main.next_page')
('configparser.ConfigParser.read')
def test_list_packages(read, next_page, capsys):
read.return_value = []
response_data = json.loads(read_res('list_packages_response.json'))
expected_output = read_res('list_packages_expected.json')
responses.add(responses.GET, ' json=response_data, status=202)
next_page.return_value = None
main.main(argv=['list-packages', 'praiskup/ping'])
(out, _) = capsys.readouterr()
assert (json.loads(out) == json.loads(expected_output)) |
def dump(config: Dataclass, stream=None, omit_defaults: bool=False, **kwargs):
config_dict = encode(config)
if omit_defaults:
defaults_dict = encode(utils.get_defaults_dict(config))
config_dict = utils.remove_matching(config_dict, defaults_dict)
return save_config(config_dict, stream, **kwargs) |
.xfail(reason='we use two spaces, Apple uses tabs')
def test_apple_formatting(parametrized_pl):
(pl, use_builtin_types) = parametrized_pl
pl = plistlib.loads(TESTDATA, use_builtin_types=use_builtin_types)
data = plistlib.dumps(pl, use_builtin_types=use_builtin_types)
assert (data == TESTDATA) |
class PanelSlide(Panel):
name = 'Slide Panel'
_option_cls = OptPanel.OptionPanelSliding
tag = 'div'
def __init__(self, page: primitives.PageModel, components: Optional[List[Html.Html]], title: Union[(Html.Html, str)], color: Optional[str], width: types.SIZE_TYPE, height: types.SIZE_TYPE, html_code: Optional[str], helper, options: types.OPTION_TYPE, profile: types.PROFILE_TYPE):
self.requirements = (page.icons.family,)
super(PanelSlide, self).__init__(page, components, None, color, width, height, html_code, helper, options, profile)
self.add_helper(helper)
self.icon = self.page.ui.icon('').css({'display': 'inline-block', 'margin': '0 5px 5px 0', 'line-height': ('%spx' % Defaults.LINE_HEIGHT), 'font-size': ('%spx' % Defaults.BIG_ICONS)})
if hasattr(title, 'options'):
self.text = title
self.text.options.managed = False
self.text.style.css.display = 'inline-block'
else:
self.text = self.page.ui.title(title, html_code=('%s_title' % self.htmlCode)).css({'display': 'inline-block', 'margin': 0})
self.text.style.css.font_size = self.page.body.style.globals.font.normal((- 2))
self.title = self.page.ui.div([self.icon, self.text])
self.title.options.managed = False
self.title.style.css.white_space = 'nowrap'
self.title.style.css.padding = '0 5px 0 0'
(self._vals, self.__clicks, self.__clicks_open) = (([self.title] + self._vals), [], [])
def panel(self):
return self.val[1]
def options(self) -> OptPanel.OptionPanelSliding:
return super().options
def dom(self) -> JsHtmlPanels.JsHtmlSlidingPanel:
if (self._dom is None):
self._dom = JsHtmlPanels.JsHtmlSlidingPanel(self, page=self.page)
return self._dom
def click(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=None, source_event: Optional[str]=None, on_ready: bool=False):
if (not isinstance(js_funcs, list)):
js_funcs = [js_funcs]
self.__clicks = js_funcs
return self
def open(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=None, on_ready: str=False):
if (not isinstance(js_funcs, list)):
js_funcs = [js_funcs]
self.__clicks_open = [self.page.js.if_((self.icon.dom.content.toString().indexOf(self.options.icon_expanded.split(' ')[(- 1)]) >= 0), js_funcs, profile=profile).toStr()]
return self
def __add__(self, component: Html.Html):
self.val[1] += component
return self
def __str__(self):
self.title.style.css.text_align = self.options.title_align
if (self.options.title_align == 'right'):
self.text.style.css.margin_right = 5
if self.options.expanded:
icon_change = self.options.icon_closed
icon_current = self.options.icon_expanded
self.icon.set_icon(self.options.icon_expanded)
else:
icon_change = self.options.icon_expanded
icon_current = self.options.icon_closed
self._vals[1].style.css.display = 'none'
self.icon.set_icon(self.options.icon_closed)
if (self.options.icon_position == 'right'):
self.icon.style.css.float = 'right'
click_frg = [self.page.js.getElementsByName(('panel_%s' % self.htmlCode)).first.toggle()]
if (icon_change and icon_current):
click_frg.append(self.icon.dom.switchClass(icon_current, icon_change))
if (self.options.click_type == 'title'):
self.title.style.css.cursor = 'pointer'
self.title.click(((self.__clicks + click_frg) + self.__clicks_open))
elif (self.options.click_type == 'icon'):
self.icon.click(((self.__clicks + click_frg) + self.__clicks_open))
str_div = ''.join([(v.html() if hasattr(v, 'html') else str(v)) for v in self.val])
return ('<%s %s>%s</%s>%s' % (self.tag, self.get_attrs(css_class_names=self.style.get_classes()), str_div, self.tag, self.helper)) |
class FlexEdge(BaseEdge):
char = 'X'
description = 'Flex cut'
def __call__(self, x, h, **kw):
dist = self.settings.distance
connection = self.settings.connection
width = self.settings.width
burn = self.boxes.burn
h += (2 * burn)
lines = int((x // dist))
leftover = (x - (lines * dist))
sections = max(int(((h - connection) // width)), 1)
sheight = (((h - connection) / sections) - connection)
self.ctx.stroke()
for i in range(1, lines):
pos = ((i * dist) + (leftover / 2))
if (i % 2):
self.ctx.move_to(pos, 0)
self.ctx.line_to(pos, (connection + sheight))
for j in range(((sections - 1) // 2)):
self.ctx.move_to(pos, ((((2 * j) + 1) * sheight) + (((2 * j) + 2) * connection)))
self.ctx.line_to(pos, (((2 * j) + 3) * (sheight + connection)))
if (not (sections % 2)):
self.ctx.move_to(pos, ((h - sheight) - connection))
self.ctx.line_to(pos, h)
elif (sections % 2):
self.ctx.move_to(pos, h)
self.ctx.line_to(pos, ((h - connection) - sheight))
for j in range(((sections - 1) // 2)):
self.ctx.move_to(pos, (h - ((((2 * j) + 1) * sheight) + (((2 * j) + 2) * connection))))
self.ctx.line_to(pos, (h - (((2 * j) + 3) * (sheight + connection))))
else:
for j in range((sections // 2)):
self.ctx.move_to(pos, ((h - connection) - ((2 * j) * (sheight + connection))))
self.ctx.line_to(pos, (h - ((2 * (j + 1)) * (sheight + connection))))
self.ctx.stroke()
self.ctx.move_to(0, 0)
self.ctx.line_to(x, 0)
self.ctx.translate(*self.ctx.get_current_point()) |
(u'build of {distgit} DistGit namespaced {package_name} package from {committish} {committish_type} in {namespace} is done')
def step_build_from_fork(context, distgit, package_name, committish, committish_type, namespace):
_ = committish_type
distgit = distgit.lower()
build = context.cli.run_build(['build-distgit', '--name', package_name, '--distgit', distgit, '--commit', committish, '--namespace', namespace, context.last_project_name])
context.cli.wait_success_build(build) |
class EvAdventureNPC(LivingMixin, DefaultCharacter):
is_pc = False
hit_dice = AttributeProperty(default=1, autocreate=False)
armor = AttributeProperty(default=1, autocreate=False)
morale = AttributeProperty(default=9, autocreate=False)
hp_multiplier = AttributeProperty(default=4, autocreate=False)
hp = AttributeProperty(default=None, autocreate=False)
allegiance = AttributeProperty(default=Ability.ALLEGIANCE_HOSTILE, autocreate=False)
is_idle = AttributeProperty(default=False, autocreate=False)
weapon = AttributeProperty(default=get_bare_hands, autocreate=False)
coins = AttributeProperty(default=1, autocreate=False)
group = TagProperty('npcs')
def strength(self):
return self.hit_dice
def dexterity(self):
return self.hit_dice
def constitution(self):
return self.hit_dice
def intelligence(self):
return self.hit_dice
def wisdom(self):
return self.hit_dice
def charisma(self):
return self.hit_dice
def hp_max(self):
return (self.hit_dice * self.hp_multiplier)
def at_object_creation(self):
self.hp = self.hp_max
self.tags.add('npcs', category='group')
def at_attacked(self, attacker, **kwargs):
pass
def ai_next_action(self, **kwargs):
pass |
def check_password_formatting(form, field):
ok = True
min = 6
if (len(field.data) < min):
field.errors.append('Password must be more than {} characters.'.format(min))
ok = False
if ((not any((s.isupper() for s in field.data))) and (not any((s.islower() for s in field.data)))):
field.errors.append('Password must contain upper and lower characters.')
ok = False
return ok |
class CGPoint(object):
def __init__(self, element):
super(CGPoint, self).__init__()
self.element = element
def summary(self):
x = self.element.GetChildMemberWithName('x')
y = self.element.GetChildMemberWithName('y')
x_value = float(x.GetValue())
y_value = float(y.GetValue())
return '{{{}, {}}}'.format(x_value, y_value) |
_production
class AstNode(AstNodeBase, ProductionOps):
id: int
cfg = synthesized(default=UndefinedAttribute('CFG not defined'))
stage1_context: Stage1Context = inherited()
stage2_context: Stage2Context = inherited()
_solc_version = inherited()
def resolve_reference(self, node_id):
return self.root().ast_nodes_by_id().get(node_id, None)
def __str__(self):
return f'{self.__class__.__qualname__} [{self.src}; line {self.src_line}]' |
def test_one_form(M, f):
one_form = assemble(action(M, f))
assert isinstance(one_form, Cofunction)
for f in one_form.subfunctions:
if (f.function_space().rank == 2):
assert (abs((f.dat.data.sum() - (0.5 * sum(f.function_space().shape)))) < 1e-12)
else:
assert (abs((f.dat.data.sum() - (0.5 * f.function_space().value_size))) < 1e-12) |
def utf8(s, errors='replace'):
if (sys.version_info[0] <= 2):
if isinstance(s, (str, buffer)):
return unicode(s, 'utf-8', errors=errors)
elif (not isinstance(s, unicode)):
return unicode(str(s))
elif isinstance(s, bytes):
return s.decode('utf-8')
elif (not isinstance(s, str)):
return str(s)
return s |
class TestBaseTCFQuery():
.usefixtures('emerse_system')
def test_get_matching_privacy_declarations_enable_purpose_override_is_false(self, db):
(declarations, _, _) = get_tcf_base_query_and_filters(db)
assert (declarations.count() == 13)
mapping = {declaration.data_use: declaration.purpose for declaration in declarations}
assert (mapping == {'marketing.advertising.serving': None, 'essential.service.security': None, 'essential.fraud_detection': None, 'analytics.reporting.campaign_insights': 9, 'analytics.reporting.content_performance': 8, 'analytics.reporting.ad_performance': 7, 'marketing.advertising.frequency_capping': 2, 'marketing.advertising.first_party.contextual': 2, 'marketing.advertising.negative_targeting': 2, 'marketing.advertising.first_party.targeted': 4, 'marketing.advertising.third_party.targeted': 4, 'marketing.advertising.profiling': 3, 'functional.storage': 1})
.parametrize('override_fixture', ['enable_override_vendor_purposes', 'enable_override_vendor_purposes_api_set'])
def test_privacy_declaration_purpose_overrides(self, override_fixture, request, db, emerse_system):
request.getfixturevalue(override_fixture)
purpose_7_decl = next((decl for decl in emerse_system.privacy_declarations if (decl.data_use == 'analytics.reporting.ad_performance')))
purpose_7_decl.flexible_legal_basis_for_processing = False
purpose_7_decl.save(db)
purpose_9_decl = next((decl for decl in emerse_system.privacy_declarations if (decl.data_use == 'analytics.reporting.campaign_insights')))
purpose_9_decl.flexible_legal_basis_for_processing = False
purpose_9_decl.save(db)
TCFPurposeOverride.create(db, data={'purpose': 1, 'is_included': True, 'required_legal_basis': 'Consent'})
TCFPurposeOverride.create(db, data={'purpose': 2, 'is_included': False})
TCFPurposeOverride.create(db, data={'purpose': 3, 'is_included': True, 'required_legal_basis': None})
TCFPurposeOverride.create(db, data={'purpose': 4, 'is_included': True, 'required_legal_basis': 'Legitimate interests'})
TCFPurposeOverride.create(db, data={'purpose': 7, 'is_included': True, 'required_legal_basis': 'Consent'})
TCFPurposeOverride.create(db, data={'purpose': 9, 'is_included': False})
(declarations, _, _) = get_tcf_base_query_and_filters(db)
legal_basis_overrides = {declaration.purpose: declaration.legal_basis_for_processing for declaration in declarations if declaration.purpose}
assert (legal_basis_overrides == {8: 'Legitimate interests', 7: 'Legitimate interests', 4: 'Legitimate interests', 3: 'Consent', 1: 'Consent'})
original_legal_basis = {declaration.purpose: declaration.original_legal_basis_for_processing for declaration in declarations if declaration.purpose}
assert (original_legal_basis == {8: 'Legitimate interests', 7: 'Legitimate interests', 4: 'Consent', 3: 'Consent', 1: 'Consent'})
assert (declarations.count() == 9) |
class FlattenConcatStateActionValueNet(FlattenConcatBaseNet):
def __init__(self, obs_shapes: Dict[(str, Sequence[int])], output_shapes: Dict[(str, Sequence[int])], hidden_units: List[int], non_lin: nn.Module):
super().__init__(obs_shapes, hidden_units, non_lin)
module_init = make_module_init_normc(std=0.01)
for (output_key, output_shape) in output_shapes.items():
self.perception_dict[output_key] = LinearOutputBlock(in_keys='latent', out_keys=output_key, in_shapes=self.perception_dict['latent'].out_shapes(), output_units=output_shape[(- 1)])
self.perception_dict[output_key].apply(module_init)
self.net = InferenceBlock(in_keys=list(obs_shapes.keys()), out_keys=list(output_shapes.keys()), in_shapes=list(obs_shapes.values()), perception_blocks=self.perception_dict)
def forward(self, x):
return self.net(x) |
class MidiThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.running = True
self.enabled = False
def setEnabled(self, enabled):
self.enabled = enabled
def stop(self):
self.enables = False
self.running = False
def run(self):
msg = mido.Message('clock')
while self.running:
if (self.enabled and midiport):
monitor.debug('midi beat')
for tick in clock:
tick.wait()
midiport.send(msg)
else:
time.sleep(patch.getfloat('general', 'delay')) |
class AccountChangeForm(UserChangeForm):
class Meta(object):
model = AccountDB
fields = '__all__'
username = forms.RegexField(label='Username', max_length=30, regex='^[\\w. +-]+$', widget=forms.TextInput(attrs={'size': '30'}), error_messages={'invalid': 'This value may contain only letters, spaces, numbers and /./+/-/_ characters.'}, help_text='30 characters or fewer. Letters, spaces, digits and /./+/-/_ only.')
db_typeclass_path = forms.ChoiceField(label='Typeclass', help_text="This is the Python-path to the class implementing the actual account functionality. You usually don't need to change this from the default.<BR>If your custom class is not found here, it may not be imported into Evennia yet.", choices=(lambda : adminutils.get_and_load_typeclasses(parent=AccountDB)))
db_lock_storage = forms.CharField(label='Locks', required=False, widget=forms.Textarea(attrs={'cols': '100', 'rows': '2'}), help_text="Locks limit access to the entity. Written on form `type:lockdef;type:lockdef...<BR>(Permissions (used with the perm() lockfunc) are Tags with the 'permission' type)")
db_cmdset_storage = forms.CharField(label='CommandSet', initial=settings.CMDSET_ACCOUNT, widget=forms.TextInput(attrs={'size': '78'}), required=False)
is_superuser = forms.BooleanField(label='Superuser status', required=False, help_text='Superusers bypass all in-game locks and has all permissions without explicitly assigning them. Usually only one superuser (user #1) is needed and only a superuser can create another superuser.<BR>Only Superusers can change the user/group permissions below.')
def clean_username(self):
username = self.cleaned_data['username']
if (username.upper() == self.instance.username.upper()):
return username
elif AccountDB.objects.filter(username__iexact=username):
raise forms.ValidationError('An account with that name already exists.')
return self.cleaned_data['username']
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
account_cmdset = settings.CMDSET_ACCOUNT
self.fields['db_cmdset_storage'].help_text = f"Path to Command-set path. Most non-character objects don't need a cmdset and can leave this field blank. Default cmdset-path<BR> for Accounts is <strong>{account_cmdset}</strong> ." |
def show_schedule_dialog(parent_window):
index = get_index()
original_sched = Reader.note.reminder
nid = Reader.note_id
dialog = ScheduleDialog(Reader.note, parent_window)
if dialog.exec_():
schedule = dialog.schedule()
if (schedule != original_sched):
update_reminder(nid, schedule)
prio = get_priority(nid)
if ((not prio) or (prio == 0)):
null_position(nid)
Reader.note = get_note(nid)
if ((original_sched is not None) and (original_sched != '') and ((schedule == '') or (schedule is None))):
tooltip(f'Removed schedule.')
else:
tooltip(f'Updated schedule.')
run_hooks('updated-schedule') |
class Options(object):
def __init__(self, options: Dict[(str, Any)]):
self.node = options.get('node', 'localhost')
self.listen = options.get('listen', '')
self.backend = options.get('backend', '')
self.interconnect = options.get('interconnect', '')
self.archive_path = options.get('archive_path', '')
self.disables = options.get('disables', [])
self.testing = options.get('testing', False) |
class Place(ctlarg):
_fields = ('name', 'place')
_attributes = ('lineno', 'col_offset')
def __init__(self, name, place, lineno=0, col_offset=0, **ARGS):
ctlarg.__init__(self, **ARGS)
self.name = name
self.place = place
self.lineno = int(lineno)
self.col_offset = int(col_offset) |
def push_urls_into_table(mapdict):
with db.session_context() as db_sess:
pbar = tqdm.tqdm(mapdict.items())
for (netloc, urls) in pbar:
have_item = db_sess.query(db.NewNetlocTracker).filter((db.NewNetlocTracker.netloc == netloc)).scalar()
if ((not have_item) and netloc):
urls = list(urls)
urls.sort(key=(lambda x: len(x)))
pbar.write(('New Url: %s -> %s' % (netloc, urls[0])))
new = db.NewNetlocTracker(netloc=netloc, example_url=urls[0])
db_sess.add(new)
db_sess.commit() |
def main(finder, args, source=None):
try:
if (source is None):
source = os.path.dirname(snakes.__file__)
target = args[0]
exclude = args[1:]
if (not os.path.isdir(source)):
raise Exception('could not find SNAKES sources')
elif (not os.path.isdir(target)):
raise Exception(('no directory %r' % target))
except (ValueError, IndexError):
die('Usage: python -m snakes.utils.apidoc TARGET [EXCLUDE...]\n TARGET target directory to write files\n EXCLUDE pattern to exclude modules (not file names)')
except Exception as error:
die(str(error))
finder(source, target, exclude).process() |
class TestFilterMatch(ApiBaseTest):
filter_match_fields = [('filer_type', models.CommitteeReports.means_filed)]
def setUp(self):
super(TestFilterMatch, self).setUp()
self.dates = [factories.CalendarDateFactory(event_id=123, calendar_category_id=1, summary='July Quarterly Report Due'), factories.CalendarDateFactory(event_id=321, calendar_category_id=1, summary='TX Primary Runoff'), factories.CalendarDateFactory(event_id=111, calendar_category_id=2, summary='EC Reporting Period'), factories.CalendarDateFactory(event_id=222, calendar_category_id=2, summary='IE Reporting Period'), factories.CalendarDateFactory(event_id=333, calendar_category_id=3, summary='Executive Session'), factories.CalendarDateFactory(calendar_category_id=3, summary='Missing ID')]
self.reports = [factories.ReportsHouseSenateFactory(means_filed='e-file'), factories.ReportsHouseSenateFactory(means_filed='paper'), factories.ReportsHouseSenateFactory()]
def test_filter_match(self):
query_dates = filters.filter_match(models.CalendarDate.query, {'event_id': 123}, CalendarDatesView.filter_match_fields)
self.assertEqual(set(query_dates.all()), set((each for each in self.dates if (each.event_id == 123))))
query_reports = filters.filter_match(models.CommitteeReportsHouseSenate.query, {'filer_type': 'e-file'}, self.filter_match_fields)
self.assertEqual(set(query_reports.all()), set((each for each in self.reports if (each.means_filed == 'e-file'))))
def test_filter_match_exclude(self):
query_dates = filters.filter_match(models.CalendarDate.query, {'event_id': (- 321)}, CalendarDatesView.filter_match_fields)
self.assertEqual(set(query_dates.all()), set((each for each in self.dates if ((each.event_id != 321) or (each.event_id is None)))))
query_reports = filters.filter_match(models.CommitteeReportsHouseSenate.query, {'filer_type': '-paper'}, self.filter_match_fields)
self.assertEqual(set(query_reports.all()), set((each for each in self.reports if (each.means_filed != 'paper')))) |
class Test_rans2p(object):
def setup_class(cls):
cls._scriptdir = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, cls._scriptdir)
def teardown_class(cls):
sys.path.remove(cls._scriptdir)
pass
def setup_method(self, method):
self.aux_names = []
def teardown_method(self, method):
FileList = ['cylinder_rans2p_T1_rans2p.h5', 'cylinder_rans2p_T1_rans2p.xmf']
for file in FileList:
if os.path.isfile(file):
os.remove(file)
else:
pass
def test_ex2(self):
self.compare_name = 'T1_rans2p'
self.example_setting('T=0.01 onlySaveFinalSolution=True')
def example_setting(self, pre_setting):
Context.contextOptionsString = pre_setting
from . import cylinder_so as my_so
reload(my_so)
opts.profile = False
opts.gatherArchive = True
pList = []
nList = []
sList = []
for (pModule, nModule) in my_so.pnList:
pList.append(importlib.import_module(('.' + pModule), 'proteus.tests.cylinder2D.conforming_rans2p'))
nList.append(importlib.import_module(('.' + nModule), 'proteus.tests.cylinder2D.conforming_rans2p'))
if (pList[(- 1)].name == None):
pList[(- 1)].name = pModule
reload(pList[(- 1)])
reload(nList[(- 1)])
if (my_so.sList == []):
for i in range(len(my_so.pnList)):
s = default_s
sList.append(s)
else:
sList = my_so.sList
my_so.name += ('_rans2p_' + self.compare_name)
ns = proteus.NumericalSolution.NS_base(my_so, pList, nList, sList, opts)
self.aux_names.append(ns.modelList[0].name)
ns.calculateSolution(my_so.name)
actual = tables.open_file((my_so.name + '.h5'))
expected_path = ((('comparison_files/' + 'comparison_') + self.compare_name) + '_u_t2.csv')
np.testing.assert_almost_equal(np.fromfile(os.path.join(self._scriptdir, expected_path), sep=','), np.array(actual.root.u_t2).flatten(), decimal=6)
actual.close() |
class Analyser(MTModule):
errored = False
def __init__(self, config, module, storage=None):
super().__init__(config, module, storage)
if ((not isinstance(module, str)) or (module == '')):
raise InvalidAnalyserConfigError('You must provide a name for your analyser')
if (not isinstance(storage, Storage)):
raise InvalidAnalyserConfigError('You must provide a valid storage object')
if (not ('elements_in' in config)):
raise InvalidAnalyserConfigError("The config must contain an 'elements_in' indicating the analyser's input.")
if ((not config['elements_in']) or (not isinstance(config['elements_in'], list))):
raise InvalidAnalyserConfigError("The 'elements_in' must be a list containing at least one string")
def analyse_element(self, element: LocalElement, config) -> Union[(LocalElement, None)]:
return NotImplemented
def pre_analyse(self, config):
def post_analyse(self, config):
return None
def start_analysing(self):
self.logger(f"Running analysis {('in parallel' if self.in_parallel else 'serially')}")
self.__pre_analyse()
self.__analyse()
self.__post_analyse()
cfg = self.get_full_config()
if (not self.errored):
self.disk.write_meta(f'{self.get_selector()}/{self.name}', {'etype': self.out_etype.__repr__(), 'config': cfg, 'stage': {'name': self.name, 'module': 'analyser'}})
self.flush_logs()
('pre-analyse')
def __pre_analyse(self):
self.pre_analyse(self.config)
def __analyse(self):
try:
elements = self.disk.read_elements(self.config['elements_in'])
except:
raise InvalidAnalyserElements(f"The 'elements_in' you specified does not exist on the storage specified.")
if (len(elements) == 0):
raise InvalidAnalyserElements('No elements could be found at the location you tried to select or passed in.')
if self.in_parallel:
self.analyse((e for e in elements))
else:
self.analyse(elements)
def get_dest_q(self):
return (self.dest_q.value if self.in_parallel else self.dest_q)
def set_dest_q(self, value):
if self.in_parallel:
self.dest_q.value = value
else:
self.dest_q = value
('analyse')
def analyse(self, elements: Union[(Generator[(LocalElement, None, None)], List[LocalElement])]):
for element in elements:
og_query = super(type(self.disk), self.disk).read_query(element.query)
self.set_dest_q(f'{og_query[0]}/{self.name}')
self.__attempt_analyse(5, element)
self.disk.delete_local_on_write = False
def get_selector(self):
sel = ''
for q in self.config['elements_in']:
(selname, _) = super(type(self.disk), self.disk).read_query(q)
sel += selname
return sel
('post-analyse')
def __post_analyse(self):
analysed_els = self.disk.read_elements([self.get_dest_q()])
outel = self.post_analyse(analysed_els)
if (outel is None):
return
successes = []
for q in self.config['elements_in']:
(selname, _) = super(type(self.disk), self.disk).read_query(q)
success = self.disk.write_element(f'{selname}/{self.name}', outel)
successes.append(success)
if (not all(successes)):
raise ElementShouldRetryError("Some instances of the final element produced via 'post_analyse' failed to save.")
def __attempt_analyse(self, attempts, element):
try:
new_element = self.analyse_element(element, self.config)
if (new_element is None):
return
success = self.disk.write_element(self.get_dest_q(), new_element)
if (not success):
raise ElementShouldRetryError('Unsuccessful storage')
except ElementShouldSkipError as e:
self.error_logger(str(e), element)
except ElementShouldRetryError as e:
self.error_logger(str(e), element)
if (attempts > 1):
return self.__attempt_analyse((attempts - 1), element)
else:
self.error_logger('failed after maximum retries - skipping element', element)
self.errored = True
except Exception as e:
if self.is_dev():
raise e
else:
self.error_logger(f'{str(e)}: skipping element', element)
print(traceback.format_exc()) |
class CompareRoutes(ComponentBase):
def __init__(self, **kwargs):
super().__init__(**kwargs)
_accepted(*PRIVILEGES['compare'])
('/compare/<compare_id>', GET)
def show_compare_result(self, compare_id):
compare_id = normalize_compare_id(compare_id)
with get_shared_session(self.db.comparison) as comparison_db:
if (not comparison_db.objects_exist(compare_id)):
return render_template('compare/error.html', error='Not all UIDs found in the DB')
result = comparison_db.get_comparison_result(compare_id)
if (not result):
return render_template('compare/wait.html', compare_id=compare_id)
download_link = self._create_ida_download_if_existing(result, compare_id)
uid_list = convert_compare_id_to_list(compare_id)
(plugin_views, plugins_without_view) = self._get_compare_plugin_views(result)
compare_view = _get_compare_view(plugin_views)
self._fill_in_empty_fields(result, compare_id)
return render_template_string(compare_view, result=result, uid_list=uid_list, download_link=download_link, plugins_without_view=plugins_without_view)
def _fill_in_empty_fields(result, compare_id):
compare_uids = compare_id.split(';')
for key in result['general']:
for uid in compare_uids:
if (uid not in result['general'][key]):
result['general'][key][uid] = ''
def _get_compare_plugin_views(self, compare_result):
(views, plugins_without_view) = ([], [])
with suppress(KeyError):
used_plugins = list(compare_result['plugins'].keys())
with get_shared_session(self.db.template) as template_db:
for plugin in used_plugins:
view = template_db.get_view(plugin)
if view:
views.append((plugin, view))
else:
plugins_without_view.append(plugin)
return (views, plugins_without_view)
_accepted(*PRIVILEGES['submit_analysis'])
('/compare', GET)
def start_compare(self):
uid_dict = get_comparison_uid_dict_from_session()
if (len(uid_dict) < 2):
return render_template('compare/error.html', error='No UIDs found for comparison')
comparison_id = convert_uid_list_to_compare_id(list(uid_dict))
session['uids_for_comparison'] = None
redo = (True if request.args.get('force_recompare') else None)
with get_shared_session(self.db.comparison) as comparison_db:
if (not comparison_db.objects_exist(comparison_id)):
return render_template('compare/error.html', error='Not all UIDs found in the DB')
if ((not redo) and comparison_db.comparison_exists(comparison_id)):
return redirect(url_for('show_compare_result', compare_id=comparison_id))
self.intercom.add_compare_task(comparison_id, force=redo)
return render_template('compare/wait.html', compare_id=comparison_id)
def _create_ida_download_if_existing(result, compare_id):
if (isinstance(result, dict) and result.get('plugins', {}).get('Ida_Diff_Highlighting', {}).get('idb_binary')):
return f'/ida-download/{compare_id}'
return None
_accepted(*PRIVILEGES['compare'])
('/database/browse_compare', GET)
def browse_comparisons(self):
with get_shared_session(self.db.comparison) as comparison_db:
(page, per_page) = extract_pagination_from_request(request)[0:2]
try:
compare_list = comparison_db.page_comparison_results(skip=(per_page * (page - 1)), limit=per_page)
except Exception as exception:
error_message = f'Could not query database: {type(exception)}'
logging.error(error_message, exc_info=True)
return render_template('error.html', message=error_message)
total = comparison_db.get_total_number_of_results()
pagination = get_pagination(page=page, per_page=per_page, total=total, record_name='compare results')
return render_template('database/compare_browse.html', compare_list=compare_list, page=page, per_page=per_page, pagination=pagination)
_accepted(*PRIVILEGES['submit_analysis'])
('/comparison/add/<uid>', GET)
('/comparison/add/<uid>/<root_uid>', GET)
def add_to_compare_basket(self, uid, root_uid=None):
compare_uid_list = get_comparison_uid_dict_from_session()
compare_uid_list[uid] = root_uid
session.modified = True
return redirect(url_for('show_analysis', uid=uid, root_uid=root_uid))
_accepted(*PRIVILEGES['submit_analysis'])
('/comparison/remove/<analysis_uid>/<compare_uid>', GET)
('/comparison/remove/<analysis_uid>/<compare_uid>/<root_uid>', GET)
def remove_from_compare_basket(self, analysis_uid, compare_uid, root_uid=None):
compare_uid_list = get_comparison_uid_dict_from_session()
if (compare_uid in compare_uid_list):
session['uids_for_comparison'].pop(compare_uid)
session.modified = True
return redirect(url_for('show_analysis', uid=analysis_uid, root_uid=root_uid))
_accepted(*PRIVILEGES['submit_analysis'])
('/comparison/remove_all/<analysis_uid>', GET)
('/comparison/remove_all/<analysis_uid>/<root_uid>', GET)
def remove_all_from_compare_basket(self, analysis_uid, root_uid=None):
compare_uid_list = get_comparison_uid_dict_from_session()
compare_uid_list.clear()
session.modified = True
return redirect(url_for('show_analysis', uid=analysis_uid, root_uid=root_uid))
_accepted(*PRIVILEGES['compare'])
('/comparison/text_files', GET)
def start_text_file_comparison(self):
uids_dict = get_comparison_uid_dict_from_session()
if (len(uids_dict) != 2):
return render_template('compare/error.html', error=f"Can't compare {len(uids_dict)} files. You must select exactly 2 files.")
((uid_1, root_uid_1), (uid_2, root_uid_2)) = list(uids_dict.items())
uids_dict.clear()
session.modified = True
return redirect(url_for('compare_text_files', uid_1=uid_1, uid_2=uid_2, root_uid_1=root_uid_1, root_uid_2=root_uid_2))
_accepted(*PRIVILEGES['compare'])
('/comparison/text_files/<uid_1>/<uid_2>', GET)
('/comparison/text_files/<uid_1>/<uid_2>/<root_uid_1>/<root_uid_2>', GET)
def compare_text_files(self, uid_1: str, uid_2: str, root_uid_1: (str | None)=None, root_uid_2: (str | None)=None):
diff_files = [self._get_data_for_file_diff(uid_1, root_uid_1), self._get_data_for_file_diff(uid_2, root_uid_2)]
uids_with_missing_file_type = ', '.join((f.uid for f in diff_files if (f.mime is None)))
if uids_with_missing_file_type:
return render_template('compare/error.html', error=f'file_type analysis is not finished for {uids_with_missing_file_type}')
if any(((not f.mime.startswith('text')) for f in diff_files)):
return render_template('compare/error.html', error=f"Can't compare non-text mimetypes. ({diff_files[0].mime} vs {diff_files[1].mime})")
diff_str = self.intercom.get_file_diff((uid_1, uid_2))
if (diff_str is None):
return render_template('compare/error.html', error='File(s) not found.')
return render_template('compare/text_files.html', diffstr=diff_str, hid0=diff_files[0].fw_hid, hid1=diff_files[1].fw_hid)
def _get_data_for_file_diff(self, uid: str, root_uid: (str | None)) -> FileDiffData:
with get_shared_session(self.db.frontend) as frontend_db:
fo = frontend_db.get_object(uid)
if (root_uid in [None, 'None']):
root_uid = frontend_db.get_root_uid(fo.uid)
fw_hid = frontend_db.get_object(root_uid).get_hid()
mime = fo.processed_analysis.get('file_type', {}).get('result', {}).get('mime')
return FileDiffData(uid, mime, fw_hid) |
def main():
global_config = config['Global']
valid_dataloader = build_dataloader(config, 'Eval', device, logger)
post_process_class = build_post_process(config['PostProcess'], global_config)
if hasattr(post_process_class, 'character'):
char_num = len(getattr(post_process_class, 'character'))
if (config['Architecture']['algorithm'] in ['Distillation']):
for key in config['Architecture']['Models']:
config['Architecture']['Models'][key]['Head']['out_channels'] = char_num
else:
config['Architecture']['Head']['out_channels'] = char_num
model = build_model(config['Architecture'])
use_srn = (config['Architecture']['algorithm'] == 'SRN')
if ('model_type' in config['Architecture'].keys()):
model_type = config['Architecture']['model_type']
else:
model_type = None
best_model_dict = load_dygraph_params(config, model, logger, None)
if len(best_model_dict):
logger.info('metric in ckpt ')
for (k, v) in best_model_dict.items():
logger.info('{}:{}'.format(k, v))
eval_class = build_metric(config['Metric'])
metric = program.eval(model, valid_dataloader, post_process_class, eval_class, model_type, use_srn)
logger.info('metric eval ')
for (k, v) in metric.items():
logger.info('{}:{}'.format(k, v)) |
def _check_min(rule):
value = rule['value']
if (rule['type'] in ('integer', 'float')):
if (value < rule['min']):
raise UnprocessableEntityException(BELOW_MINIMUM_MSG.format(**rule))
if (rule['type'] in ('text', 'enum', 'array', 'object')):
if (len(value) < rule['min']):
raise UnprocessableEntityException((BELOW_MINIMUM_MSG.format(**rule) + ' items')) |
class SphinxBuilder():
def __init__(self, app: SphinxTestApp, src_path: Path):
self.app = app
self._src_path = src_path
def src_path(self) -> Path:
return self._src_path
def out_path(self) -> Path:
return Path(self.app.outdir)
def build(self, assert_pass=True):
self.app.build()
if assert_pass:
assert (self.warnings == ''), self.status
return self
def status(self):
return self.app._status.getvalue()
def warnings(self):
return self.app._warning.getvalue()
def get_doctree(self, docname: str, post_transforms: bool=False) -> nodes.document:
doctree = self.app.env.get_doctree(docname)
if post_transforms:
self.app.env.apply_post_transforms(doctree, docname)
for node in findall(doctree)(include_self=True):
if (not (('source' in node) and node['source'])):
continue
node['source'] = Path(node['source']).relative_to(self.src_path).as_posix()
if node['source'].endswith('.rst'):
node['source'] = node['source'][:(- 4)]
elif node['source'].endswith('.md'):
node['source'] = node['source'][:(- 3)]
if (doctree.children and isinstance(doctree.children[0], nodes.section)):
doctree.children[0]['classes'] = []
return doctree |
def test_field_io():
grid = make_pupil_grid(128)
field = make_circular_aperture(1)(grid)
formats = ['asdf', 'fits', 'fits.gz', 'pkl', 'pickle']
filenames = [('field_test.' + fmt) for fmt in formats]
for fname in filenames:
write_field(field, fname)
new_field = read_field(fname)
assert np.allclose(field, new_field)
assert (hash(field.grid) == hash(new_field.grid))
os.remove(fname) |
def geocode_filter_subaward_locations(scope: str, values: list) -> Q:
or_queryset = Q()
location_mappings = {'country_code': {'sub_legal_entity': 'country_code', 'sub_place_of_perform': 'country_co'}, 'zip5': {'sub_legal_entity': 'zip5', 'sub_place_of_perform': 'zip5'}, 'city_name': {'sub_legal_entity': 'city_name', 'sub_place_of_perform': 'city_name'}, 'state_code': {'sub_legal_entity': 'state_code', 'sub_place_of_perform': 'state_code'}, 'county_code': {'sub_legal_entity': 'county_code', 'sub_place_of_perform': 'county_code'}, 'congressional_code': {'sub_legal_entity': 'congressional', 'sub_place_of_perform': 'congressio'}, 'current_congressional_code': {'sub_legal_entity': 'sub_legal_entity_congressional_current', 'sub_place_of_perform': 'sub_place_of_performance_congressional_current'}}
location_mappings = {location_type: field_dict[scope] for (location_type, field_dict) in location_mappings.items()}
nested_values = create_nested_object(values)
for (country, state_zip) in nested_values.items():
country_qs = None
if (country != ALL_FOREIGN_COUNTRIES):
country_qs = Q(**{f"{scope}_{location_mappings['country_code']}__exact": country})
state_qs = Q()
for (state_zip_key, location_values) in state_zip.items():
if (state_zip_key == 'city'):
state_inner_qs = Q(**{f"{scope}_{location_mappings['city_name']}__in": location_values})
elif (state_zip_key == 'zip'):
state_inner_qs = Q(**{f"{scope}_{location_mappings['zip5']}__in": location_values})
else:
state_inner_qs = Q(**{f"{scope}_{location_mappings['state_code']}__exact": state_zip_key.upper()})
county_qs = Q()
district_qs = Q()
city_qs = Q()
if location_values['county']:
county_qs = Q(**{f"{scope}_{location_mappings['county_code']}__in": location_values['county']})
if location_values['district_current']:
district_qs = Q(**{f"{location_mappings['current_congressional_code']}__in": location_values['district_current']})
if location_values['district_original']:
district_qs = Q(**{f"{scope}_{location_mappings['congressional_code']}__in": location_values['district_original']})
if location_values['city']:
city_qs = Q(**{f"{scope}_{location_mappings['city_name']}__in": location_values['city']})
state_inner_qs &= ((county_qs | district_qs) | city_qs)
state_qs |= state_inner_qs
if country_qs:
or_queryset |= (country_qs & state_qs)
else:
or_queryset |= state_qs
return or_queryset |
class TestMaxReactionsConfigVariable(BaseConfigTestVariable):
OPTION_NAME = 'max_reactions'
CONFIG_ATTR_NAME = 'max_reactions'
GOOD_VALUES = [1, 10]
INCORRECT_VALUES = ['sTrING?', (- 1), 0, 1.1]
REQUIRED = False
AEA_ATTR_NAME = 'max_reactions'
AEA_DEFAULT_VALUE = AEABuilder.DEFAULT_MAX_REACTIONS |
def pairing(Q: Optimized_Point3D[FQ2], P: Optimized_Point3D[FQ], final_exponentiate: bool=True) -> FQ12:
assert is_on_curve(Q, b2)
assert is_on_curve(P, b)
if ((P[(- 1)] == P[(- 1)].zero()) or (Q[(- 1)] == Q[(- 1)].zero())):
return FQ12.one()
return miller_loop(Q, P, final_exponentiate=final_exponentiate) |
()
def get_procedure_prescribed(patient):
return frappe.db.sql('\n\t\t\tSELECT\n\t\t\t\tpp.name, pp.procedure, pp.parent, ct.practitioner,\n\t\t\t\tct.encounter_date, pp.practitioner, pp.date, pp.department\n\t\t\tFROM\n\t\t\t\t`tabPatient Encounter` ct, `tabProcedure Prescription` pp\n\t\t\tWHERE\n\t\t\t\tct.patient=%(patient)s and pp.parent=ct.name and pp.appointment_booked=0\n\t\t\tORDER BY\n\t\t\t\tct.creation desc\n\t\t', {'patient': patient}) |
class RegMap():
DATA_ADDR = 0
DATA_VAL_POS = 0
DATA_VAL_MSK =
CTRL_ADDR = 4
CTRL_VAL_POS = 0
CTRL_VAL_MSK = 65535
STATUS_ADDR = 8
STATUS_VAL_POS = 0
STATUS_VAL_MSK = 255
START_ADDR = 256
START_VAL_POS = 0
START_VAL_MSK = 1
def __init__(self, interface):
self._if = interface
def data(self):
return self._if.read(self.DATA_ADDR)
def data(self, val):
self._if.write(self.DATA_ADDR, val)
def data_bf(self):
return _RegData(self)
def ctrl(self):
return self._if.read(self.CTRL_ADDR)
def ctrl(self, val):
self._if.write(self.CTRL_ADDR, val)
def ctrl_bf(self):
return _RegCtrl(self)
def status(self):
return self._if.read(self.STATUS_ADDR)
def status_bf(self):
return _RegStatus(self)
def start(self):
return 0
def start(self, val):
self._if.write(self.START_ADDR, val)
def start_bf(self):
return _RegStart(self) |
class ApplicationDefault(Base):
def __init__(self):
super(ApplicationDefault, self).__init__()
self._g_credential = None
def get_credential(self):
self._load_credential()
return self._g_credential
def project_id(self):
self._load_credential()
return self._project_id
def _load_credential(self):
if (not self._g_credential):
(self._g_credential, self._project_id) = google.auth.default(scopes=_scopes) |
class FaucetIPv6TupleTest(FaucetIPv4TupleTest):
MAX_RULES = 1024
ETH_TYPE = IPV6_ETH
NET_BASE = ipaddress.IPv6Network('fc00::00/64')
START_ACL_CONFIG = '\nacls:\n 1:\n exact_match: True\n rules:\n - rule:\n actions: {allow: 1}\n eth_type: 34525\n ip_proto: 6\n ipv6_dst: ::1\n ipv6_src: ::1\n tcp_dst: 65535\n tcp_src: 65535\n' |
class TypeWafActiveRule(ModelSimple):
allowed_values = {('value',): {'WAF_ACTIVE_RULE': 'waf_active_rule'}}
validations = {}
additional_properties_type = None
_nullable = False
_property
def openapi_types():
return {'value': (str,)}
_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_path_to_item = kwargs.pop('_path_to_item', ())
if ('value' in kwargs):
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
value = 'waf_active_rule'
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
self.value = value
if kwargs:
raise ApiTypeError(('Invalid named arguments=%s passed to %s. Remove those invalid named arguments.' % (kwargs, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if ('value' in kwargs):
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
value = 'waf_active_rule'
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
self.value = value
if kwargs:
raise ApiTypeError(('Invalid named arguments=%s passed to %s. Remove those invalid named arguments.' % (kwargs, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
return self |
class HSI(HSV):
BASE = 'srgb'
NAME = 'hsi'
SERIALIZE = ('--hsi',)
CHANNELS = (Channel('h', 0.0, 360.0, flags=FLG_ANGLE), Channel('s', 0.0, 1.0, bound=True), Channel('i', 0.0, 1.0, bound=True))
CHANNEL_ALIASES = {'hue': 'h', 'saturation': 's', 'intensity': 'i'}
WHITE = WHITES['2deg']['D65']
GAMUT_CHECK = 'srgb'
CLIP_SPACE = None
def to_base(self, coords: Vector) -> Vector:
return hsi_to_srgb(coords)
def from_base(self, coords: Vector) -> Vector:
return srgb_to_hsi(coords) |
def extractNotfoundtlWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class Event(EventT):
def __init__(self, app: AppT, key: K, value: V, headers: Optional[HeadersArg], message: Message) -> None:
self.app: AppT = app
self.key: K = key
self.value: V = value
self.message: Message = message
if (headers is not None):
if (not isinstance(headers, dict)):
self.headers = dict(headers)
else:
self.headers = headers
else:
self.headers = {}
self.acked: bool = False
async def send(self, channel: Union[(str, ChannelT)], key: K=USE_EXISTING_KEY, value: V=USE_EXISTING_VALUE, partition: Optional[int]=None, timestamp: Optional[float]=None, headers: Any=USE_EXISTING_HEADERS, schema: Optional[SchemaT]=None, key_serializer: CodecArg=None, value_serializer: CodecArg=None, callback: Optional[MessageSentCallback]=None, force: bool=False) -> Awaitable[RecordMetadata]:
if (key is USE_EXISTING_KEY):
key = self.key
if (value is USE_EXISTING_VALUE):
value = self.value
if (headers is USE_EXISTING_HEADERS):
headers = self.headers
return (await self._send(channel, key, value, partition, timestamp, headers, schema, key_serializer, value_serializer, callback, force=force))
async def forward(self, channel: Union[(str, ChannelT)], key: K=USE_EXISTING_KEY, value: V=USE_EXISTING_VALUE, partition: Optional[int]=None, timestamp: Optional[float]=None, headers: Any=USE_EXISTING_HEADERS, schema: Optional[SchemaT]=None, key_serializer: CodecArg=None, value_serializer: CodecArg=None, callback: Optional[MessageSentCallback]=None, force: bool=False) -> Awaitable[RecordMetadata]:
if (key is USE_EXISTING_KEY):
key = self.message.key
if (value is USE_EXISTING_VALUE):
value = self.message.value
if (headers is USE_EXISTING_HEADERS):
headers = self.message.headers
if (not headers):
headers = None
return (await self._send(channel, key, value, partition, timestamp, headers, schema, key_serializer, value_serializer, callback, force=force))
async def _send(self, channel: Union[(str, ChannelT)], key: K=None, value: V=None, partition: Optional[int]=None, timestamp: Optional[float]=None, headers: HeadersArg=None, schema: Optional[SchemaT]=None, key_serializer: CodecArg=None, value_serializer: CodecArg=None, callback: Optional[MessageSentCallback]=None, force: bool=False) -> Awaitable[RecordMetadata]:
return (await cast(_App, self.app)._attachments.maybe_put(channel, key, value, partition, timestamp, headers, schema, key_serializer, value_serializer, callback, force=force))
def _attach(self, channel: Union[(ChannelT, str)], key: K=None, value: V=None, partition: Optional[int]=None, timestamp: Optional[float]=None, headers: HeadersArg=None, schema: Optional[SchemaT]=None, key_serializer: CodecArg=None, value_serializer: CodecArg=None, callback: Optional[MessageSentCallback]=None) -> Awaitable[RecordMetadata]:
return cast(_App, self.app)._attachments.put(self.message, channel, key, value, partition=partition, timestamp=timestamp, headers=headers, schema=schema, key_serializer=key_serializer, value_serializer=value_serializer, callback=callback)
def ack(self) -> bool:
return self.message.ack(self.app.consumer)
def __repr__(self) -> str:
return f'<{type(self).__name__}: k={self.key!r} v={self.value!r}>'
async def __aenter__(self) -> EventT:
return self
async def __aexit__(self, _exc_type: Type[BaseException]=None, _exc_val: BaseException=None, _exc_tb: TracebackType=None) -> Optional[bool]:
self.ack()
return None |
class TestText(util.PluginTestCase):
def setup_fs(self):
config = self.dedent("\n matrix:\n - name: text\n sources:\n - '{}/**/*.txt'\n aspell:\n lang: en\n d: en_US\n hunspell:\n d: en_US\n pipeline:\n - pyspelling.filters.text:\n convert_encoding: utf-8\n ").format(self.tempdir)
self.mktemp('.text.yml', config, 'utf-8')
def test_text(self):
bad_words = ['helo', 'begn']
good_words = ['yes', 'word']
self.mktemp('test.txt', '\n'.join((bad_words + good_words)), 'utf-8')
self.assert_spellcheck('.text.yml', bad_words)
def test_text_utf16(self):
bad_words = ['helo', 'begn']
good_words = ['yes', 'word']
self.mktemp('test.txt', '\n'.join((bad_words + good_words)), 'utf-16')
self.assert_spellcheck('.text.yml', bad_words)
def test_text_utf32(self):
bad_words = ['helo', 'begn']
good_words = ['yes', 'word']
self.mktemp('test.txt', '\n'.join((bad_words + good_words)), 'utf-32')
self.assert_spellcheck('.text.yml', bad_words) |
class TestGetUsers():
def _map_user_record_to_uid_email_phones(user_record):
return {'uid': user_record.uid, 'email': user_record.email, 'phone_number': user_record.phone_number}
def test_multiple_uid_types(self, new_user_record_list, new_user_with_provider):
get_users_results = auth.get_users([auth.UidIdentifier(new_user_record_list[0].uid), auth.EmailIdentifier(new_user_record_list[1].email), auth.PhoneIdentifier(new_user_record_list[2].phone_number), auth.ProviderIdentifier(new_user_with_provider.provider_data[0].provider_id, new_user_with_provider.provider_data[0].uid)])
actual = sorted([self._map_user_record_to_uid_email_phones(user) for user in get_users_results.users], key=(lambda user: user['uid']))
expected = sorted([self._map_user_record_to_uid_email_phones(user) for user in (new_user_record_list + [new_user_with_provider])], key=(lambda user: user['uid']))
assert (actual == expected)
def test_existing_and_non_existing_users(self, new_user_record_list):
get_users_results = auth.get_users([auth.UidIdentifier(new_user_record_list[0].uid), auth.UidIdentifier('uid_that_doesnt_exist'), auth.UidIdentifier(new_user_record_list[2].uid)])
actual = sorted([self._map_user_record_to_uid_email_phones(user) for user in get_users_results.users], key=(lambda user: user['uid']))
expected = sorted([self._map_user_record_to_uid_email_phones(user) for user in [new_user_record_list[0], new_user_record_list[2]]], key=(lambda user: user['uid']))
assert (actual == expected)
def test_non_existing_users(self):
not_found_ids = [auth.UidIdentifier('non-existing user')]
get_users_results = auth.get_users(not_found_ids)
assert (get_users_results.users == [])
assert (get_users_results.not_found == not_found_ids)
def test_de_dups_duplicate_users(self, new_user):
get_users_results = auth.get_users([auth.UidIdentifier(new_user.uid), auth.UidIdentifier(new_user.uid)])
actual = [self._map_user_record_to_uid_email_phones(user) for user in get_users_results.users]
expected = [self._map_user_record_to_uid_email_phones(new_user)]
assert (actual == expected) |
def create_engine(db_uri, print_sql, auto_create):
if db_uri.startswith(_SQLITE_SCHEME):
path = db_uri[len(_SQLITE_SCHEME):]
if ((not auto_create) and (path != ':memory:')):
if (not Path(path).exists()):
raise ConnectError(("File %r doesn't exist. To create it, set auto_create to True" % path))
return SqliteInterface(path, print_sql=print_sql)
dsn = dsnparse.parse(db_uri)
if (len(dsn.schemes) > 1):
raise NotImplementedError("Preql doesn't support multiple schemes")
(scheme,) = dsn.schemes
if (scheme == 'snowflake'):
if (len(dsn.paths) == 1):
(database,) = dsn.paths
schema = dsn.query['schema']
elif (len(dsn.paths) == 2):
(database, schema) = dsn.paths
else:
raise ValueError(f"Too many parts in path. Expected format: '{HELP_SNOWFLAKE_URI_FORMAT}'")
try:
warehouse = dsn.query['warehouse']
except KeyError:
raise ValueError(f"Must provide warehouse. Expected format: '{HELP_SNOWFLAKE_URI_FORMAT}'")
return SnowflakeInterface(dsn.host, dsn.user, dsn.password, warehouse=warehouse, database=database, schema=schema, print_sql=print_sql)
elif (scheme == 'presto'):
if (len(dsn.paths) == 1):
(catalog,) = dsn.paths
schema = dsn.query.get('schema')
elif (len(dsn.paths) == 2):
(catalog, schema) = dsn.paths
else:
raise ValueError(f"Too many parts in path. Expected format: '{HELP_PRESTO_URI_FORMAT}'")
return PrestoInterface(dsn.host, dsn.port, dsn.user, dsn.password, catalog=catalog, schema=schema, print_sql=print_sql)
if (len(dsn.paths) == 0):
path = ''
elif (len(dsn.paths) == 1):
(path,) = dsn.paths
else:
raise ValueError(('Bad value for uri, too many paths: %s' % db_uri))
if ((scheme == 'postgres') or (scheme == 'postgresql')):
return PostgresInterface(dsn.host, dsn.port, path, dsn.user, dsn.password, print_sql=print_sql)
elif (scheme == 'mysql'):
return MysqlInterface(dsn.host, dsn.port, path, dsn.user, dsn.password, print_sql=print_sql)
elif (scheme == 'git'):
return GitInterface(path, print_sql=print_sql)
elif (scheme == 'duck'):
return DuckInterface(path, print_sql=print_sql)
elif (scheme == 'bigquery'):
return BigQueryInterface(dsn.host, path, print_sql=print_sql)
elif (scheme == 'redshift'):
return RedshiftInterface(dsn.host, dsn.port, path, dsn.user, dsn.password, print_sql=print_sql)
elif (scheme == 'oracle'):
return OracleInterface(dsn.host, dsn.port, path, dsn.user, dsn.password, print_sql=print_sql)
raise NotImplementedError(f'Scheme {dsn.scheme} currently not supported') |
def test_bulk_all_errors_from_chunk_are_raised_on_failure(sync_client):
sync_client.indices.create(index='i', body={'mappings': {'properties': {'a': {'type': 'integer'}}}, 'settings': {'number_of_shards': 1, 'number_of_replicas': 0}})
sync_client.cluster.health(wait_for_status='yellow')
try:
for (ok, _) in helpers.streaming_bulk(sync_client, [{'a': 'b'}, {'a': 'c'}], index='i', raise_on_error=True):
assert ok
except helpers.BulkIndexError as e:
assert (2 == len(e.errors))
else:
assert False, 'exception should have been raised' |
class OptionPlotoptionsVariablepieSonificationTracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class flow_add(flow_mod):
version = 4
type = 14
_command = 0
def __init__(self, xid=None, cookie=None, cookie_mask=None, table_id=None, idle_timeout=None, hard_timeout=None, priority=None, buffer_id=None, out_port=None, out_group=None, flags=None, match=None, instructions=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (cookie != None):
self.cookie = cookie
else:
self.cookie = 0
if (cookie_mask != None):
self.cookie_mask = cookie_mask
else:
self.cookie_mask = 0
if (table_id != None):
self.table_id = table_id
else:
self.table_id = 0
if (idle_timeout != None):
self.idle_timeout = idle_timeout
else:
self.idle_timeout = 0
if (hard_timeout != None):
self.hard_timeout = hard_timeout
else:
self.hard_timeout = 0
if (priority != None):
self.priority = priority
else:
self.priority = 0
if (buffer_id != None):
self.buffer_id = buffer_id
else:
self.buffer_id = 0
if (out_port != None):
self.out_port = out_port
else:
self.out_port = 0
if (out_group != None):
self.out_group = out_group
else:
self.out_group = 0
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (match != None):
self.match = match
else:
self.match = ofp.match()
if (instructions != None):
self.instructions = instructions
else:
self.instructions = []
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!Q', self.cookie))
packed.append(struct.pack('!Q', self.cookie_mask))
packed.append(struct.pack('!B', self.table_id))
packed.append(util.pack_fm_cmd(self._command))
packed.append(struct.pack('!H', self.idle_timeout))
packed.append(struct.pack('!H', self.hard_timeout))
packed.append(struct.pack('!H', self.priority))
packed.append(struct.pack('!L', self.buffer_id))
packed.append(util.pack_port_no(self.out_port))
packed.append(struct.pack('!L', self.out_group))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 2))
packed.append(self.match.pack())
packed.append(loxi.generic_util.pack_list(self.instructions))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = flow_add()
_version = reader.read('!B')[0]
assert (_version == 4)
_type = reader.read('!B')[0]
assert (_type == 14)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
obj.cookie = reader.read('!Q')[0]
obj.cookie_mask = reader.read('!Q')[0]
obj.table_id = reader.read('!B')[0]
__command = util.unpack_fm_cmd(reader)
assert (__command == 0)
obj.idle_timeout = reader.read('!H')[0]
obj.hard_timeout = reader.read('!H')[0]
obj.priority = reader.read('!H')[0]
obj.buffer_id = reader.read('!L')[0]
obj.out_port = util.unpack_port_no(reader)
obj.out_group = reader.read('!L')[0]
obj.flags = reader.read('!H')[0]
reader.skip(2)
obj.match = ofp.match.unpack(reader)
obj.instructions = loxi.generic_util.unpack_list(reader, ofp.instruction.instruction.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.cookie != other.cookie):
return False
if (self.cookie_mask != other.cookie_mask):
return False
if (self.table_id != other.table_id):
return False
if (self.idle_timeout != other.idle_timeout):
return False
if (self.hard_timeout != other.hard_timeout):
return False
if (self.priority != other.priority):
return False
if (self.buffer_id != other.buffer_id):
return False
if (self.out_port != other.out_port):
return False
if (self.out_group != other.out_group):
return False
if (self.flags != other.flags):
return False
if (self.match != other.match):
return False
if (self.instructions != other.instructions):
return False
return True
def pretty_print(self, q):
q.text('flow_add {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('cookie = ')
q.text(('%#x' % self.cookie))
q.text(',')
q.breakable()
q.text('cookie_mask = ')
q.text(('%#x' % self.cookie_mask))
q.text(',')
q.breakable()
q.text('table_id = ')
q.text(('%#x' % self.table_id))
q.text(',')
q.breakable()
q.text('idle_timeout = ')
q.text(('%#x' % self.idle_timeout))
q.text(',')
q.breakable()
q.text('hard_timeout = ')
q.text(('%#x' % self.hard_timeout))
q.text(',')
q.breakable()
q.text('priority = ')
q.text(('%#x' % self.priority))
q.text(',')
q.breakable()
q.text('buffer_id = ')
q.text(('%#x' % self.buffer_id))
q.text(',')
q.breakable()
q.text('out_port = ')
q.text(util.pretty_port(self.out_port))
q.text(',')
q.breakable()
q.text('out_group = ')
q.text(('%#x' % self.out_group))
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPFF_SEND_FLOW_REM', 2: 'OFPFF_CHECK_OVERLAP', 4: 'OFPFF_RESET_COUNTS', 8: 'OFPFF_NO_PKT_COUNTS', 16: 'OFPFF_NO_BYT_COUNTS', 128: 'OFPFF_BSN_SEND_IDLE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('match = ')
q.pp(self.match)
q.text(',')
q.breakable()
q.text('instructions = ')
q.pp(self.instructions)
q.breakable()
q.text('}') |
def _graphs_with_dangerous_reference_use() -> Tuple[(ControlFlowGraph, ControlFlowGraph)]:
in_cfg = ControlFlowGraph()
x = vars('x', 2, aliased=False)
y = vars('y', 2, aliased=True)
c = const(11)
in_node = BasicBlock(0, [_call('rand', [x[0]], []), _assign(y[0], _add(x[0], c[5])), _assign(x[1], y[0]), _call('scanf', [], [_addr(y[0])]), _assign(y[1], y[0]), _ret(y[1])])
in_cfg.add_node(in_node)
out_cfg = ControlFlowGraph()
out_node = BasicBlock(0, [_call('rand', [x[0]], []), _assign(y[0], _add(x[0], c[5])), _assign(x[1], _add(x[0], c[5])), _call('scanf', [], [_addr(y[0])]), _assign(y[1], y[0]), _ret(y[0])])
out_cfg.add_node(out_node)
return (in_cfg, out_cfg) |
def copyright(ctx):
for (dirpath, dirnames, filenames) in os.walk(ROOT_DIR):
reldirpath = os.path.relpath(dirpath, ROOT_DIR)
if ((reldirpath[0] in '._') or reldirpath.endswith('__pycache__')):
continue
if (os.path.split(reldirpath)[0] in ('build', 'dist')):
continue
for fname in filenames:
if (not fname.endswith('.py')):
continue
filename = os.path.join(dirpath, fname)
text = open(filename, 'rt', encoding='utf-8').read()
if ('copyright' in text[:200].lower()):
print(('Copyright in %s%s%s' % (reldirpath, os.path.sep, fname)))
for (i, line) in enumerate(text[:200].splitlines()):
if ('copyright' in line.lower()):
print((' line %i: %s' % ((i + 1), line))) |
def create_user(username, password, email=None, name=None, job_title=None, locale=None, disabled=None):
password = hash_password(password)
register = FlicketUser(username=username, email=email, name=name, password=password, job_title=job_title, date_added=datetime.datetime.now(), locale=locale, disabled=disabled)
db.session.add(register)
db.session.commit() |
def cleanup(fips_dir, proj_dir):
clion_dir = (proj_dir + '/.idea')
if os.path.isdir(clion_dir):
log.info(((log.RED + 'Please confirm to delete the following directory:') + log.DEF))
log.info(' {}'.format(clion_dir))
if util.confirm(((log.RED + 'Delete this directory?') + log.DEF)):
if os.path.isdir(clion_dir):
log.info(' deleting {}'.format(clion_dir))
shutil.rmtree(clion_dir)
log.info('Done.')
else:
log.info('Nothing deleted, done.')
else:
log.info('Nothing to delete.') |
def extractBuzyhoneybeeBlog(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OrderingDottedRelatedSerializer(serializers.ModelSerializer):
related_text = serializers.CharField(source='related_object.text')
related_title = serializers.CharField(source='related_object.title')
class Meta():
model = OrderingFilterRelatedModel
fields = ('related_text', 'related_title', 'index') |
.parametrize('sending_elasticapm_client', [{'api_request_time': '2s'}], indirect=True)
def test_send_timer(sending_elasticapm_client, caplog):
with caplog.at_level('DEBUG', 'elasticapm.transport'):
assert (sending_elasticapm_client.config.api_request_time.total_seconds() == 2)
sending_elasticapm_client.begin_transaction('test_type')
sending_elasticapm_client.end_transaction('test')
sending_elasticapm_client._transport.flush()
assert_any_record_contains(caplog.records, 'Sent request') |
class LinearAD_SteadyState(AnalyticalSolutions.SteadyState):
from math import exp, sqrt
def __init__(self, b=[1.0, 0, 0], a=0.5):
self.b_ = b
self.a_ = a
bn = sqrt((((b[0] ** 2) + (b[1] ** 2)) + (b[2] ** 2)))
self.bn = bn
if (bn != 0.0):
self.D_ = old_div(1.0, (exp(old_div(bn, a)) - 1.0))
else:
self.D_ = 0.0
self.C_ = ((- self.D_) * exp(old_div(bn, a)))
def uOfX(self, X):
x = X
if (self.D_ != 0.0):
return (((- self.D_) * exp(old_div((((self.b_[0] * x[0]) + (self.b_[1] * x[1])) + (self.b_[2] * x[2])), self.a_))) - self.C_)
else:
return (1.0 - old_div((((self.b_[0] * x[0]) + (self.b_[1] * x[1])) + (self.b_[2] * x[2])), self.bn)) |
def test_wave_load_tablefile(wave, mocker, tmp_path):
wavegen = WaveformGenerator(mocker.Mock())
wavegen.load_function('SI1', 'tria')
def tria(x):
return (AnalogOutput.RANGE[1] * (abs(((x % 4) - 2)) - 1))
span = [(- 1), 3]
x = np.arange(span[0], span[1], ((span[1] - span[0]) / 512))
table_tmp_json = str((tmp_path / 'table.json'))
with open(table_tmp_json, 'w') as json_file:
json.dump(tria(x).tolist(), json_file)
cli.cmdline(['wave', 'load', 'SI2', '--table-file', table_tmp_json])
assert (AnalogOutput('SI1').waveform_table == AnalogOutput('SI2').waveform_table) |
class OptionPlotoptionsPyramid3dSonificationDefaultinstrumentoptionsMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class ConfigurationWindow():
def __init__(self) -> None:
super().__init__()
builder = Gtk.Builder()
builder.add_from_file(os.path.join(TOP_DIR, 'glade_files/configure.glade'))
self.window = builder.get_object('configuration_window')
self.stt_combobox = builder.get_object('stt_combobox')
self.tts_combobox = builder.get_object('tts_combobox')
self.auth_switch = builder.get_object('auth_switch')
self.hotword_combobox = builder.get_object('hotword_combobox')
self.wake_button_switch = builder.get_object('wake_button_switch')
self.init_auth_switch()
self.init_tts_combobox()
self.init_stt_combobox()
self.init_hotword_switch()
self.init_wake_button_switch()
builder.connect_signals(ConfigurationWindow.Handler(self))
self.window.set_resizable(False)
def show_window(self):
self.window.show_all()
Gtk.main()
def exit_window(self):
self.window.destroy()
Gtk.main_quit()
def init_tts_combobox(self):
default_tts = susicfg.get('tts')
if (default_tts == 'google'):
self.tts_combobox.set_active(TTS_GOOGLE)
elif (default_tts == 'flite'):
self.tts_combobox.set_active(TTS_FLITE)
elif (default_tts == 'watson'):
self.tts_combobox.set_active(TTS_WATSON)
else:
self.tts_combobox.set_active(TTS_FLITE)
susicfg.set('tts', 'flite')
def init_stt_combobox(self):
default_stt = susicfg.get('stt')
if (default_stt == 'google'):
self.stt_combobox.set_active(STT_GOOGLE)
elif (default_stt == 'watson'):
self.stt_combobox.set_active(STT_WATSON)
elif (default_stt == 'bing'):
self.stt_combobox.set_active(STT_BING)
elif (default_stt == 'deepspeech-local'):
self.stt_combobox.set_active(STT_DEEPSPEECH)
elif (default_stt == 'vosk'):
self.stt_combobox.set_active(STT_VOSK)
else:
self.tts_combobox.set_active(STT_DEEPSPEECH)
susicfg.set('stt', 'deepspeech-local')
def init_auth_switch(self):
usage_mode = susicfg.get('susi.mode')
if (usage_mode == 'authenticated'):
self.auth_switch.set_active(True)
else:
self.auth_switch.set_active(False)
def init_hotword_switch(self):
default_hotword = susicfg.get('hotword.engine')
if (default_hotword == 'Snowboy'):
self.hotword_combobox.set_active(HOTWORD_SNOWBOY)
elif (default_hotword == 'PocketSphinx'):
self.hotword_combobox.set_active(HOTWORD_POCKETSPHINX)
elif (default_hotword == 'None'):
self.hotword_combobox.set_active(HOTWORD_NONE)
else:
try:
import snowboy
self.hotword_combobox.set_active(HOTWORD_SNOWBOY)
susicfg.set('hotword.engine', 'Snowboy')
except ImportError:
self.hotword_combobox.set_active(HOTWORD_POCKETSPHINX)
susicfg.set('hotword.engine', 'PocketSphinx')
def init_wake_button_switch(self):
try:
import RPi.GPIO
if (susicfg.get('wakebutton') == 'enabled'):
self.wake_button_switch.set_active(True)
else:
self.wake_button_switch.set_active(False)
except ImportError:
self.wake_button_switch.set_sensitive(False)
except RuntimeError:
self.wake_button_switch.set_sensitive(False)
class Handler():
def __init__(self, config_window):
self.config_window = config_window
def on_delete_window(self, *args):
self.config_window.exit_window()
def on_stt_combobox_changed(self, combo: Gtk.ComboBox):
selection = combo.get_active()
if (selection == STT_DEEPSPEECH):
susicfg.set('stt', 'deepspeech_local')
elif (selection == STT_VOSK):
susicfg.set('stt', 'vosk')
elif (selection == STT_GOOGLE):
susicfg.set('stt', 'google')
elif (selection == STT_WATSON):
credential_dialog = WatsonCredentialsDialog(self.config_window.window)
response = credential_dialog.run()
if (response == Gtk.ResponseType.OK):
username = credential_dialog.username_field.get_text()
password = credential_dialog.password_field.get_text()
susicfg.set('stt', 'watson')
susicfg.set('watson.stt.user', username)
susicfg.set('watson.stt.pass', password)
else:
self.config_window.init_stt_combobox()
credential_dialog.destroy()
elif (selection == STT_BING):
credential_dialog = BingCredentialDialog(self.config_window.window)
response = credential_dialog.run()
if (response == Gtk.ResponseType.OK):
api_key = credential_dialog.api_key_field.get_text()
susicfg.set('stt', 'bing')
susicfg.set('bing.api', api_key)
else:
self.config_window.init_stt_combobox()
credential_dialog.destroy()
def on_tts_combobox_changed(self, combo):
selection = combo.get_active()
if (selection == TTS_GOOGLE):
susicfg.set('tts', 'google')
elif (selection == TTS_FLITE):
susicfg.set('tts', 'flite')
elif (selection == TTS_WATSON):
credential_dialog = WatsonCredentialsDialog(self.config_window.window)
response = credential_dialog.run()
if (response == Gtk.ResponseType.OK):
username = credential_dialog.username_field.get_text()
password = credential_dialog.password_field.get_text()
susicfg.set('tts', 'watson')
susicfg.set('watson.tts.user', username)
susicfg.set('watson.tts.pass', password)
susicfg.set('watson.tts.voice', 'en-US_AllisonVoice')
else:
self.config_window.init_tts_combobox()
credential_dialog.destroy()
def on_auth_switch_active_notify(self, switch, gparam):
if switch.get_active():
login_window = LoginWindow()
login_window.show_window()
if (susicfg.get('susi.mode') == 'authenticated'):
switch.set_active(True)
else:
switch.set_active(False)
def on_hotword_combobox_changed(self, combo: Gtk.ComboBox):
selection = combo.get_active()
if (selection == HOTWORD_SNOWBOY):
susicfg.set('hotword.engine', 'Snowboy')
elif (selection == HOTWORD_POCKETSPHINX):
susicfg.set('hotword.engine', 'PocketSphinx')
elif (selection == HOTWORD_NONE):
susicfg.set('hotword.engine', 'None')
def on_wake_button_switch_active_notify(self, switch, gparam):
if switch.get_active():
susicfg.set('wakebutton', 'enabled')
else:
susicfg.set('wakebutton', 'disabled') |
class OefSearchDialogues(BaseOefSearchDialogues):
def __init__(self, self_address: Address, **kwargs) -> None:
def role_from_first_message(message: Message, receiver_address: Address) -> BaseDialogue.Role:
return OefSearchDialogue.Role.AGENT
BaseOefSearchDialogues.__init__(self, self_address=self_address, role_from_first_message=role_from_first_message) |
class PyPIChecker(Checker):
CHECKER_DATA_TYPE = 'pypi'
CHECKER_DATA_SCHEMA = {'type': 'object', 'properties': {'name': {'type': 'string'}, 'packagetype': {'type': 'string', 'enum': ['sdist', 'bdist_wheel']}, 'versions': OPERATORS_SCHEMA, 'stable-only': {'type': 'boolean'}}, 'required': ['name']}
async def check(self, external_data: ExternalBase):
package_name = external_data.checker_data['name']
package_type = external_data.checker_data.get('packagetype', 'sdist')
constraints = [(o, Version(v)) for (o, v) in external_data.checker_data.get('versions', {}).items()]
stable_only = external_data.checker_data.get('stable-only', True)
async with self.session.get(f'{PYPI_INDEX}/{package_name}/json') as response:
pypi_data = (await response.json())
if constraints:
releases = pypi_data['releases']
else:
releases = {pypi_data['info']['version']: pypi_data['urls']}
downloads = list(_filter_downloads(releases, constraints, package_type, stable_only))
try:
(pypi_version, pypi_download, pypi_date) = downloads[(- 1)]
except IndexError as err:
raise CheckerQueryError(f"Couldn't find {package_type} for package {package_name}") from err
checksum = MultiDigest.from_source(pypi_download['digests'])
new_version = ExternalFile(url=pypi_download['url'], checksum=checksum, size=pypi_download['size'], version=pypi_version.orig_str, timestamp=pypi_date)
external_data.set_new_version(new_version) |
class DebugSolverFrame():
def __init__(self):
self.commands = []
def str_lines(self, show_smt=False):
lines = []
for c in self.commands:
if (c[0] == 'bind'):
(cmd, names, rhs, smt) = c
cmd = 'bind '
for (nm, r) in zip(names, rhs):
lines.append(f'{cmd}{nm} = {r}')
if show_smt:
lines.append(f' smt {SMT.to_smtlib(smt)}')
cmd = ' '
elif (c[0] == 'tuplebind'):
(cmd, names, rhs, smt) = c
nms = ','.join([str(n) for n in names])
lines.append(f'bind {nms} = {rhs}')
assert (type(smt) is tuple)
if show_smt:
for s in smt:
lines.append(f' smt {SMT.to_smtlib(s)}')
elif (c[0] == 'assume'):
(cmd, e, smt) = c
lines.append(f'assume {e}')
if show_smt:
lines.append(f' smt {SMT.to_smtlib(smt)}')
else:
assert False, 'bad case'
return lines
def add_bind(self, names, rhs, smt):
self.commands.append(('bind', names, rhs, smt))
def add_tuple_bind(self, names, rhs, smt):
self.commands.append(('tuplebind', names, rhs, smt))
def add_assumption(self, e, smt_e):
self.commands.append(('assume', e, smt_e)) |
def extractTheklreadsBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class ConfigLoader(ABC):
def load_configuration(self, config_name: Optional[str], overrides: List[str], run_mode: RunMode, from_shell: bool=True, validate_sweep_overrides: bool=True) -> DictConfig:
...
def load_sweep_config(self, master_config: DictConfig, sweep_overrides: List[str]) -> DictConfig:
...
def get_search_path(self) -> ConfigSearchPath:
...
def get_sources(self) -> List[ConfigSource]:
...
def list_groups(self, parent_name: str) -> List[str]:
...
def get_group_options(self, group_name: str, results_filter: Optional[ObjectType]=ObjectType.CONFIG, config_name: Optional[str]=None, overrides: Optional[List[str]]=None) -> List[str]:
...
def compute_defaults_list(self, config_name: Optional[str], overrides: List[str], run_mode: RunMode) -> Any:
... |
.parametrize('text,input_strings,result_strings,result_offsets', [('Felipe and Jaime went to the library.', ['Felipe', 'Jaime', 'library'], ['Felipe', 'Jaime', 'library'], [(0, 6), (11, 16), (29, 36)]), ('The Manila Observatory was founded in 1865 in Manila.', ['Manila', 'The Manila Observatory'], ['Manila', 'Manila', 'The Manila Observatory'], [(4, 10), (46, 52), (0, 22)]), ('Take the road from downtown and turn left at the public market.', ['public market', 'downtown'], ['public market', 'downtown'], [(49, 62), (19, 27)])])
def test_ensure_offsets_correspond_to_substrings(text, input_strings, result_strings, result_offsets):
offsets = find_substrings(text, input_strings)
assert (result_offsets == offsets)
found_substrings = [text[start:end] for (start, end) in offsets]
assert (result_strings == found_substrings) |
def create():
root = getRootDir.getEnvsDir()
template = getFlag.getFlag('-t')
name = getArg.getArg(0)
if (not name):
sys.exit(text.createHelper)
if (not isRoot.isRoot()):
sys.exit(text.notRoot)
path = (root + name)
if (not createDir(path)):
sys.exit(text.envAlreadyExists)
copyBaseFiles(path)
if template:
executeTemplate(template)
print(text.envCreated) |
class MetaHandler(type):
def __new__(cls, name, bases, attrs):
new_class = type.__new__(cls, name, bases, attrs)
declared_events = OrderedDict()
all_events = OrderedDict()
events = []
for (key, value) in list(attrs.items()):
if isinstance(value, EventHandlerWrapper):
events.append((key, value))
declared_events.update(events)
new_class._declared_events_ = declared_events
for base in reversed(new_class.__mro__[1:]):
if hasattr(base, '_declared_events_'):
all_events.update(base._declared_events_)
all_events.update(declared_events)
new_class._all_events_ = all_events
new_class._events_handlers_ = {el.event: el for el in new_class._all_events_.values()}
return new_class |
def extractRinxlyWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
.parametrize('original,translated', EVM_EQUIVALENTS.items())
def test_compile_input_json_evm_translates(solc5source, original, translated):
compiler.set_solc_version('0.5.7')
input_json = compiler.generate_input_json({'path.sol': solc5source}, True, 200, original)
compiler.compile_from_input_json(input_json) |
class NotificationEvent(BaseObject):
def __init__(self, api=None, body=None, id=None, recipients=None, subject=None, type=None, via=None, **kwargs):
self.api = api
self.body = body
self.id = id
self.recipients = recipients
self.subject = subject
self.type = type
self.via = via
for (key, value) in kwargs.items():
setattr(self, key, value)
for key in self.to_dict():
if (getattr(self, key) is None):
try:
self._dirty_attributes.remove(key)
except KeyError:
continue |
class Business(BusinessMixin, AbstractCrudObject):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isBusiness = True
super(Business, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
block_offline_analytics = 'block_offline_analytics'
collaborative_ads_managed_partner_business_info = 'collaborative_ads_managed_partner_business_info'
collaborative_ads_managed_partner_eligibility = 'collaborative_ads_managed_partner_eligibility'
collaborative_ads_partner_premium_options = 'collaborative_ads_partner_premium_options'
created_by = 'created_by'
created_time = 'created_time'
extended_updated_time = 'extended_updated_time'
id = 'id'
is_hidden = 'is_hidden'
link = 'link'
name = 'name'
payment_account_id = 'payment_account_id'
primary_page = 'primary_page'
profile_picture_uri = 'profile_picture_uri'
timezone_id = 'timezone_id'
two_factor_type = 'two_factor_type'
updated_by = 'updated_by'
updated_time = 'updated_time'
user_access_expire_time = 'user_access_expire_time'
verification_status = 'verification_status'
vertical = 'vertical'
vertical_id = 'vertical_id'
class TwoFactorType():
admin_required = 'admin_required'
all_required = 'all_required'
none = 'none'
class Vertical():
advertising = 'ADVERTISING'
automotive = 'AUTOMOTIVE'
consumer_packaged_goods = 'CONSUMER_PACKAGED_GOODS'
ecommerce = 'ECOMMERCE'
education = 'EDUCATION'
energy_and_utilities = 'ENERGY_AND_UTILITIES'
entertainment_and_media = 'ENTERTAINMENT_AND_MEDIA'
financial_services = 'FINANCIAL_SERVICES'
gaming = 'GAMING'
government_and_politics = 'GOVERNMENT_AND_POLITICS'
health = 'HEALTH'
luxury = 'LUXURY'
marketing = 'MARKETING'
non_profit = 'NON_PROFIT'
organizations_and_associations = 'ORGANIZATIONS_AND_ASSOCIATIONS'
other = 'OTHER'
professional_services = 'PROFESSIONAL_SERVICES'
restaurant = 'RESTAURANT'
retail = 'RETAIL'
technology = 'TECHNOLOGY'
telecom = 'TELECOM'
travel = 'TRAVEL'
class PermittedTasks():
advertise = 'ADVERTISE'
analyze = 'ANALYZE'
cashier_role = 'CASHIER_ROLE'
create_content = 'CREATE_CONTENT'
manage = 'MANAGE'
manage_jobs = 'MANAGE_JOBS'
manage_leads = 'MANAGE_LEADS'
messaging = 'MESSAGING'
moderate = 'MODERATE'
moderate_community = 'MODERATE_COMMUNITY'
pages_messaging = 'PAGES_MESSAGING'
pages_messaging_subscriptions = 'PAGES_MESSAGING_SUBSCRIPTIONS'
profile_plus_advertise = 'PROFILE_PLUS_ADVERTISE'
profile_plus_analyze = 'PROFILE_PLUS_ANALYZE'
profile_plus_create_content = 'PROFILE_PLUS_CREATE_CONTENT'
profile_plus_facebook_access = 'PROFILE_PLUS_FACEBOOK_ACCESS'
profile_plus_full_control = 'PROFILE_PLUS_FULL_CONTROL'
profile_plus_manage = 'PROFILE_PLUS_MANAGE'
profile_plus_manage_leads = 'PROFILE_PLUS_MANAGE_LEADS'
profile_plus_messaging = 'PROFILE_PLUS_MESSAGING'
profile_plus_moderate = 'PROFILE_PLUS_MODERATE'
profile_plus_moderate_delegate_community = 'PROFILE_PLUS_MODERATE_DELEGATE_COMMUNITY'
profile_plus_revenue = 'PROFILE_PLUS_REVENUE'
read_page_mailboxes = 'READ_PAGE_MAILBOXES'
view_monetization_insights = 'VIEW_MONETIZATION_INSIGHTS'
class SurveyBusinessType():
advertiser = 'ADVERTISER'
agency = 'AGENCY'
app_developer = 'APP_DEVELOPER'
publisher = 'PUBLISHER'
class PagePermittedTasks():
advertise = 'ADVERTISE'
analyze = 'ANALYZE'
cashier_role = 'CASHIER_ROLE'
create_content = 'CREATE_CONTENT'
manage = 'MANAGE'
manage_jobs = 'MANAGE_JOBS'
manage_leads = 'MANAGE_LEADS'
messaging = 'MESSAGING'
moderate = 'MODERATE'
moderate_community = 'MODERATE_COMMUNITY'
pages_messaging = 'PAGES_MESSAGING'
pages_messaging_subscriptions = 'PAGES_MESSAGING_SUBSCRIPTIONS'
profile_plus_advertise = 'PROFILE_PLUS_ADVERTISE'
profile_plus_analyze = 'PROFILE_PLUS_ANALYZE'
profile_plus_create_content = 'PROFILE_PLUS_CREATE_CONTENT'
profile_plus_facebook_access = 'PROFILE_PLUS_FACEBOOK_ACCESS'
profile_plus_full_control = 'PROFILE_PLUS_FULL_CONTROL'
profile_plus_manage = 'PROFILE_PLUS_MANAGE'
profile_plus_manage_leads = 'PROFILE_PLUS_MANAGE_LEADS'
profile_plus_messaging = 'PROFILE_PLUS_MESSAGING'
profile_plus_moderate = 'PROFILE_PLUS_MODERATE'
profile_plus_moderate_delegate_community = 'PROFILE_PLUS_MODERATE_DELEGATE_COMMUNITY'
profile_plus_revenue = 'PROFILE_PLUS_REVENUE'
read_page_mailboxes = 'READ_PAGE_MAILBOXES'
view_monetization_insights = 'VIEW_MONETIZATION_INSIGHTS'
class SubverticalV2():
accounting_and_tax = 'ACCOUNTING_AND_TAX'
activities_and_leisure = 'ACTIVITIES_AND_LEISURE'
air = 'AIR'
apparel_and_accessories = 'APPAREL_AND_ACCESSORIES'
arts_and_heritage_and_education = 'ARTS_AND_HERITAGE_AND_EDUCATION'
ar_or_vr_gaming = 'AR_OR_VR_GAMING'
audio_streaming = 'AUDIO_STREAMING'
auto = 'AUTO'
auto_insurance = 'AUTO_INSURANCE'
auto_rental = 'AUTO_RENTAL'
baby = 'BABY'
ballot_initiative_or_referendum = 'BALLOT_INITIATIVE_OR_REFERENDUM'
beauty = 'BEAUTY'
beauty_and_fashion = 'BEAUTY_AND_FASHION'
beer_and_wine_and_liquor_and_malt_beverages = 'BEER_AND_WINE_AND_LIQUOR_AND_MALT_BEVERAGES'
bookstores = 'BOOKSTORES'
broadcast_television = 'BROADCAST_TELEVISION'
business_consultants = 'BUSINESS_CONSULTANTS'
buying_agency = 'BUYING_AGENCY'
cable_and_satellite = 'CABLE_AND_SATELLITE'
cable_television = 'CABLE_TELEVISION'
call_center_and_messaging_services = 'CALL_CENTER_AND_MESSAGING_SERVICES'
candidate_or_politician = 'CANDIDATE_OR_POLITICIAN'
career = 'CAREER'
career_and_tech = 'CAREER_AND_TECH'
casual_dining = 'CASUAL_DINING'
chronic_conditions_and_medical_causes = 'CHRONIC_CONDITIONS_AND_MEDICAL_CAUSES'
civic_influencers = 'CIVIC_INFLUENCERS'
clinical_trials = 'CLINICAL_TRIALS'
coffee = 'COFFEE'
computer_and_software_and_hardware = 'COMPUTER_AND_SOFTWARE_AND_HARDWARE'
console_and_cross_platform_gaming = 'CONSOLE_AND_CROSS_PLATFORM_GAMING'
consulting = 'CONSULTING'
consumer_electronics = 'CONSUMER_ELECTRONICS'
counseling_and_psychotherapy = 'COUNSELING_AND_PSYCHOTHERAPY'
creative_agency = 'CREATIVE_AGENCY'
credit_and_financing_and_mortages = 'CREDIT_AND_FINANCING_AND_MORTAGES'
cruises_and_marine = 'CRUISES_AND_MARINE'
culture_and_lifestyle = 'CULTURE_AND_LIFESTYLE'
data_analytics_and_data_management = 'DATA_ANALYTICS_AND_DATA_MANAGEMENT'
dating_and_technology_apps = 'DATING_AND_TECHNOLOGY_APPS'
department_store = 'DEPARTMENT_STORE'
desktop_software = 'DESKTOP_SOFTWARE'
dieting_and_fitness_programs = 'DIETING_AND_FITNESS_PROGRAMS'
digital_native_education_or_training = 'DIGITAL_NATIVE_EDUCATION_OR_TRAINING'
drinking_places = 'DRINKING_PLACES'
education_resources = 'EDUCATION_RESOURCES'
ed_tech = 'ED_TECH'
elearning_and_massive_online_open_courses = 'ELEARNING_AND_MASSIVE_ONLINE_OPEN_COURSES'
election_commission = 'ELECTION_COMMISSION'
electronics_and_appliances = 'ELECTRONICS_AND_APPLIANCES'
engineering_and_design = 'ENGINEERING_AND_DESIGN'
environment_and_animal_welfare = 'ENVIRONMENT_AND_ANIMAL_WELFARE'
esports = 'ESPORTS'
events = 'EVENTS'
farming_and_ranching = 'FARMING_AND_RANCHING'
file_storage_and_cloud_and_data_services = 'FILE_STORAGE_AND_CLOUD_AND_DATA_SERVICES'
finance = 'FINANCE'
fin_tech = 'FIN_TECH'
fishing_and_hunting_and_forestry_and_logging = 'FISHING_AND_HUNTING_AND_FORESTRY_AND_LOGGING'
fitness = 'FITNESS'
food = 'FOOD'
footwear = 'FOOTWEAR'
for_profit_colleges_and_universities = 'FOR_PROFIT_COLLEGES_AND_UNIVERSITIES'
full_service_agency = 'FULL_SERVICE_AGENCY'
government_controlled_entity = 'GOVERNMENT_CONTROLLED_ENTITY'
government_department_or_agency = 'GOVERNMENT_DEPARTMENT_OR_AGENCY'
government_official = 'GOVERNMENT_OFFICIAL'
government_owned_media = 'GOVERNMENT_OWNED_MEDIA'
grocery_and_drug_and_convenience = 'GROCERY_AND_DRUG_AND_CONVENIENCE'
head_of_state = 'HEAD_OF_STATE'
health_insurance = 'HEALTH_INSURANCE'
health_systems_and_practitioners = 'HEALTH_SYSTEMS_AND_PRACTITIONERS'
health_tech = 'HEALTH_TECH'
home_and_furniture_and_office = 'HOME_AND_FURNITURE_AND_OFFICE'
home_improvement = 'HOME_IMPROVEMENT'
home_insurance = 'HOME_INSURANCE'
home_tech = 'HOME_TECH'
hotel_and_accomodation = 'HOTEL_AND_ACCOMODATION'
household_goods_durable = 'HOUSEHOLD_GOODS_DURABLE'
household_goods_non_durable = 'HOUSEHOLD_GOODS_NON_DURABLE'
hr_and_financial_management = 'HR_AND_FINANCIAL_MANAGEMENT'
humanitarian_or_disaster_relief = 'HUMANITARIAN_OR_DISASTER_RELIEF'
independent_expenditure_group = 'INDEPENDENT_EXPENDITURE_GROUP'
insurance_tech = 'INSURANCE_TECH'
international_organizaton = 'INTERNATIONAL_ORGANIZATON'
investment_bank_and_brokerage = 'INVESTMENT_BANK_AND_BROKERAGE'
issue_advocacy = 'ISSUE_ADVOCACY'
legal = 'LEGAL'
life_insurance = 'LIFE_INSURANCE'
logistics_and_transportation_and_fleet_management = 'LOGISTICS_AND_TRANSPORTATION_AND_FLEET_MANAGEMENT'
manufacturing = 'MANUFACTURING'
medical_devices_and_supplies_and_equipment = 'MEDICAL_DEVICES_AND_SUPPLIES_AND_EQUIPMENT'
medspa_and_elective_surgeries_and_alternative_medicine = 'MEDSPA_AND_ELECTIVE_SURGERIES_AND_ALTERNATIVE_MEDICINE'
mining_and_quarrying = 'MINING_AND_QUARRYING'
mobile_gaming = 'MOBILE_GAMING'
movies = 'MOVIES'
museums_and_parks_and_libraries = 'MUSEUMS_AND_PARKS_AND_LIBRARIES'
music = 'MUSIC'
network_security_products = 'NETWORK_SECURITY_PRODUCTS'
news_and_current_events = 'NEWS_AND_CURRENT_EVENTS'
non_prescription = 'NON_PRESCRIPTION'
not_for_profit_colleges_and_universities = 'NOT_FOR_PROFIT_COLLEGES_AND_UNIVERSITIES'
office = 'OFFICE'
office_or_business_supplies = 'OFFICE_OR_BUSINESS_SUPPLIES'
oil_and_gas_and_consumable_fuel = 'OIL_AND_GAS_AND_CONSUMABLE_FUEL'
online_only_publications = 'ONLINE_ONLY_PUBLICATIONS'
package_or_freight_delivery = 'PACKAGE_OR_FREIGHT_DELIVERY'
party_independent_expenditure_group_us = 'PARTY_INDEPENDENT_EXPENDITURE_GROUP_US'
payment_processing_and_gateway_solutions = 'PAYMENT_PROCESSING_AND_GATEWAY_SOLUTIONS'
pc_gaming = 'PC_GAMING'
people = 'PEOPLE'
personal_care = 'PERSONAL_CARE'
pet = 'PET'
photography_and_filming_services = 'PHOTOGRAPHY_AND_FILMING_SERVICES'
pizza = 'PIZZA'
planning_agency = 'PLANNING_AGENCY'
political_party_or_committee = 'POLITICAL_PARTY_OR_COMMITTEE'
prescription = 'PRESCRIPTION'
professional_associations = 'PROFESSIONAL_ASSOCIATIONS'
property_and_casualty = 'PROPERTY_AND_CASUALTY'
quick_service = 'QUICK_SERVICE'
radio = 'RADIO'
railroads = 'RAILROADS'
real_estate = 'REAL_ESTATE'
real_money_gaming = 'REAL_MONEY_GAMING'
recreational = 'RECREATIONAL'
religious = 'RELIGIOUS'
reseller = 'RESELLER'
residential_and_long_term_care_facilities_and_outpatient_care_centers = 'RESIDENTIAL_AND_LONG_TERM_CARE_FACILITIES_AND_OUTPATIENT_CARE_CENTERS'
retail_and_credit_union_and_commercial_bank = 'RETAIL_AND_CREDIT_UNION_AND_COMMERCIAL_BANK'
ride_sharing_or_taxi_services = 'RIDE_SHARING_OR_TAXI_SERVICES'
safety_services = 'SAFETY_SERVICES'
scholarly = 'SCHOLARLY'
school_and_early_children_edcation = 'SCHOOL_AND_EARLY_CHILDREN_EDCATION'
social_media = 'SOCIAL_MEDIA'
software_as_a_service = 'SOFTWARE_AS_A_SERVICE'
sporting = 'SPORTING'
sporting_and_outdoor = 'SPORTING_AND_OUTDOOR'
sports = 'SPORTS'
superstores = 'SUPERSTORES'
t1_automotive_manufacturer = 'T1_AUTOMOTIVE_MANUFACTURER'
t1_motorcycle = 'T1_MOTORCYCLE'
t2_dealer_associations = 'T2_DEALER_ASSOCIATIONS'
t3_auto_agency = 'T3_AUTO_AGENCY'
t3_auto_resellers = 'T3_AUTO_RESELLERS'
t3_dealer_groups = 'T3_DEALER_GROUPS'
t3_franchise_dealer = 'T3_FRANCHISE_DEALER'
t3_independent_dealer = 'T3_INDEPENDENT_DEALER'
t3_parts_and_services = 'T3_PARTS_AND_SERVICES'
t3_portals = 'T3_PORTALS'
telecommunications_equipment_and_accessories = 'TELECOMMUNICATIONS_EQUIPMENT_AND_ACCESSORIES'
telephone_service_providers_and_carriers = 'TELEPHONE_SERVICE_PROVIDERS_AND_CARRIERS'
ticketing = 'TICKETING'
tobacco = 'TOBACCO'
tourism_and_travel_services = 'TOURISM_AND_TRAVEL_SERVICES'
tourism_board = 'TOURISM_BOARD'
toy_and_hobby = 'TOY_AND_HOBBY'
trade_school = 'TRADE_SCHOOL'
travel_agencies_and_guides_and_otas = 'TRAVEL_AGENCIES_AND_GUIDES_AND_OTAS'
utilities_and_energy_equipment_and_services = 'UTILITIES_AND_ENERGY_EQUIPMENT_AND_SERVICES'
veterinary_clinics_and_services = 'VETERINARY_CLINICS_AND_SERVICES'
video_streaming = 'VIDEO_STREAMING'
virtual_services = 'VIRTUAL_SERVICES'
vitamins_or_wellness = 'VITAMINS_OR_WELLNESS'
warehousing_and_storage = 'WAREHOUSING_AND_STORAGE'
water_and_soft_drink_and_baverage = 'WATER_AND_SOFT_DRINK_AND_BAVERAGE'
website_designers_or_graphic_designers = 'WEBSITE_DESIGNERS_OR_GRAPHIC_DESIGNERS'
wholesale = 'WHOLESALE'
wireless_services = 'WIRELESS_SERVICES'
class VerticalV2():
advertising_and_marketing = 'ADVERTISING_AND_MARKETING'
agriculture = 'AGRICULTURE'
automotive = 'AUTOMOTIVE'
banking_and_credit_cards = 'BANKING_AND_CREDIT_CARDS'
business_to_business = 'BUSINESS_TO_BUSINESS'
consumer_packaged_goods = 'CONSUMER_PACKAGED_GOODS'
ecommerce = 'ECOMMERCE'
education = 'EDUCATION'
energy_and_natural_resources_and_utilities = 'ENERGY_AND_NATURAL_RESOURCES_AND_UTILITIES'
entertainment_and_media = 'ENTERTAINMENT_AND_MEDIA'
gaming = 'GAMING'
government = 'GOVERNMENT'
healthcare_and_pharmaceuticals_and_biotech = 'HEALTHCARE_AND_PHARMACEUTICALS_AND_BIOTECH'
insurance = 'INSURANCE'
non_profit = 'NON_PROFIT'
organizations_and_associations = 'ORGANIZATIONS_AND_ASSOCIATIONS'
politics = 'POLITICS'
professional_services = 'PROFESSIONAL_SERVICES'
publishing = 'PUBLISHING'
restaurants = 'RESTAURANTS'
retail = 'RETAIL'
technology = 'TECHNOLOGY'
telecom = 'TELECOM'
travel = 'TRAVEL'
class ActionSource():
physical_store = 'PHYSICAL_STORE'
website = 'WEBSITE'
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Business, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_update(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'entry_point': 'string', 'name': 'string', 'primary_page': 'string', 'timezone_id': 'unsigned int', 'two_factor_type': 'two_factor_type_enum', 'vertical': 'vertical_enum'}
enums = {'two_factor_type_enum': Business.TwoFactorType.__dict__.values(), 'vertical_enum': Business.Vertical.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Business, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_access_token(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'app_id': 'string', 'fbe_external_business_id': 'string', 'scope': 'list<Permission>', 'system_user_name': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/access_token', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Business, api_type='EDGE', response_parser=ObjectParser(target_class=Business, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def delete_ad_accounts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'adaccount_id': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/ad_accounts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_ad_studies(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adstudy import AdStudy
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/ad_studies', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdStudy, api_type='EDGE', response_parser=ObjectParser(target_class=AdStudy, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_ad_study(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adstudy import AdStudy
param_types = {'cells': 'list<Object>', 'client_business': 'string', 'confidence_level': 'float', 'cooldown_start_time': 'int', 'description': 'string', 'end_time': 'int', 'name': 'string', 'objectives': 'list<Object>', 'observation_end_time': 'int', 'start_time': 'int', 'type': 'type_enum', 'viewers': 'list<int>'}
enums = {'type_enum': AdStudy.Type.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/ad_studies', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdStudy, api_type='EDGE', response_parser=ObjectParser(target_class=AdStudy, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_ad_account(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adaccount import AdAccount
param_types = {'ad_account_created_from_bm_flag': 'bool', 'currency': 'string', 'end_advertiser': 'Object', 'funding_id': 'string', 'invoice': 'bool', 'invoice_group_id': 'string', 'invoicing_emails': 'list<string>', 'io': 'bool', 'media_agency': 'string', 'name': 'string', 'partner': 'string', 'po_number': 'string', 'timezone_id': 'unsigned int'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/adaccount', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdAccount, api_type='EDGE', response_parser=ObjectParser(target_class=AdAccount, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_add_phone_number(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'phone_number': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/add_phone_numbers', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Business, api_type='EDGE', response_parser=ObjectParser(target_class=Business, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_ad_network_application(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.application import Application
param_types = {'name': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/adnetwork_applications', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Application, api_type='EDGE', response_parser=ObjectParser(target_class=Application, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_ad_network_analytics(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adnetworkanalyticssyncqueryresult import AdNetworkAnalyticsSyncQueryResult
param_types = {'aggregation_period': 'aggregation_period_enum', 'breakdowns': 'list<breakdowns_enum>', 'filters': 'list<map>', 'limit': 'unsigned int', 'metrics': 'list<metrics_enum>', 'ordering_column': 'ordering_column_enum', 'ordering_type': 'ordering_type_enum', 'since': 'datetime', 'until': 'datetime'}
enums = {'aggregation_period_enum': AdNetworkAnalyticsSyncQueryResult.AggregationPeriod.__dict__.values(), 'breakdowns_enum': AdNetworkAnalyticsSyncQueryResult.Breakdowns.__dict__.values(), 'metrics_enum': AdNetworkAnalyticsSyncQueryResult.Metrics.__dict__.values(), 'ordering_column_enum': AdNetworkAnalyticsSyncQueryResult.OrderingColumn.__dict__.values(), 'ordering_type_enum': AdNetworkAnalyticsSyncQueryResult.OrderingType.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/adnetworkanalytics', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdNetworkAnalyticsSyncQueryResult, api_type='EDGE', response_parser=ObjectParser(target_class=AdNetworkAnalyticsSyncQueryResult, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_ad_network_analytic(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adnetworkanalyticssyncqueryresult import AdNetworkAnalyticsSyncQueryResult
param_types = {'aggregation_period': 'aggregation_period_enum', 'breakdowns': 'list<breakdowns_enum>', 'filters': 'list<Object>', 'limit': 'int', 'metrics': 'list<metrics_enum>', 'ordering_column': 'ordering_column_enum', 'ordering_type': 'ordering_type_enum', 'since': 'datetime', 'until': 'datetime'}
enums = {'aggregation_period_enum': AdNetworkAnalyticsSyncQueryResult.AggregationPeriod.__dict__.values(), 'breakdowns_enum': AdNetworkAnalyticsSyncQueryResult.Breakdowns.__dict__.values(), 'metrics_enum': AdNetworkAnalyticsSyncQueryResult.Metrics.__dict__.values(), 'ordering_column_enum': AdNetworkAnalyticsSyncQueryResult.OrderingColumn.__dict__.values(), 'ordering_type_enum': AdNetworkAnalyticsSyncQueryResult.OrderingType.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/adnetworkanalytics', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Business, api_type='EDGE', response_parser=ObjectParser(target_class=Business, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_ad_network_analytics_results(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adnetworkanalyticsasyncqueryresult import AdNetworkAnalyticsAsyncQueryResult
param_types = {'query_ids': 'list<string>'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/adnetworkanalytics_results', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdNetworkAnalyticsAsyncQueryResult, api_type='EDGE', response_parser=ObjectParser(target_class=AdNetworkAnalyticsAsyncQueryResult, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_ads_reporting_mmm_reports(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'filtering': 'list<map>'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/ads_reporting_mmm_reports', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_ads_reporting_mmm_schedulers(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/ads_reporting_mmm_schedulers', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_ads_pixels(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adspixel import AdsPixel
param_types = {'id_filter': 'string', 'name_filter': 'string', 'sort_by': 'sort_by_enum'}
enums = {'sort_by_enum': AdsPixel.SortBy.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/adspixels', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdsPixel, api_type='EDGE', response_parser=ObjectParser(target_class=AdsPixel, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_ads_pixel(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adspixel import AdsPixel
param_types = {'is_crm': 'bool', 'name': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/adspixels', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdsPixel, api_type='EDGE', response_parser=ObjectParser(target_class=AdsPixel, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def delete_agencies(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'business': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/agencies', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_agencies(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/agencies', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Business, api_type='EDGE', response_parser=ObjectParser(target_class=Business, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_an_placements(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adplacement import AdPlacement
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/an_placements', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdPlacement, api_type='EDGE', response_parser=ObjectParser(target_class=AdPlacement, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_block_list_draft(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'publisher_urls_file': 'file'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/block_list_drafts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Business, api_type='EDGE', response_parser=ObjectParser(target_class=Business, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_business_asset_groups(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.businessassetgroup import BusinessAssetGroup
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/business_asset_groups', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=BusinessAssetGroup, api_type='EDGE', response_parser=ObjectParser(target_class=BusinessAssetGroup, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_business_invoices(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.omegacustomertrx import OmegaCustomerTrx
param_types = {'end_date': 'string', 'invoice_id': 'string', 'issue_end_date': 'string', 'issue_start_date': 'string', 'root_id': 'unsigned int', 'start_date': 'string', 'type': 'type_enum'}
enums = {'type_enum': OmegaCustomerTrx.Type.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/business_invoices', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=OmegaCustomerTrx, api_type='EDGE', response_parser=ObjectParser(target_class=OmegaCustomerTrx, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_business_users(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.businessuser import BusinessUser
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/business_users', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=BusinessUser, api_type='EDGE', response_parser=ObjectParser(target_class=BusinessUser, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_business_user(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.businessuser import BusinessUser
param_types = {'email': 'string', 'role': 'role_enum'}
enums = {'role_enum': BusinessUser.Role.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/business_users', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=BusinessUser, api_type='EDGE', response_parser=ObjectParser(target_class=BusinessUser, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_business_projects(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/businessprojects', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_claim_custom_conversion(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.customconversion import CustomConversion
param_types = {'custom_conversion_id': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/claim_custom_conversions', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=CustomConversion, api_type='EDGE', response_parser=ObjectParser(target_class=CustomConversion, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_client_ad_accounts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adaccount import AdAccount
param_types = {'search_query': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/client_ad_accounts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdAccount, api_type='EDGE', response_parser=ObjectParser(target_class=AdAccount, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_client_apps(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.application import Application
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/client_apps', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Application, api_type='EDGE', response_parser=ObjectParser(target_class=Application, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_client_app(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'app_id': 'Object'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/client_apps', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Business, api_type='EDGE', response_parser=ObjectParser(target_class=Business, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_client_offsite_signal_container_business_objects(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/client_offsite_signal_container_business_objects', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_client_pages(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.page import Page
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/client_pages', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_client_page(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'page_id': 'int', 'permitted_tasks': 'list<permitted_tasks_enum>'}
enums = {'permitted_tasks_enum': Business.PermittedTasks.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/client_pages', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Business, api_type='EDGE', response_parser=ObjectParser(target_class=Business, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_client_pixels(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adspixel import AdsPixel
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/client_pixels', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdsPixel, api_type='EDGE', response_parser=ObjectParser(target_class=AdsPixel, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_client_product_catalogs(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.productcatalog import ProductCatalog
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/client_product_catalogs', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=ProductCatalog, api_type='EDGE', response_parser=ObjectParser(target_class=ProductCatalog, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_client_whats_app_business_accounts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.whatsappbusinessaccount import WhatsAppBusinessAccount
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/client_whatsapp_business_accounts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=WhatsAppBusinessAccount, api_type='EDGE', response_parser=ObjectParser(target_class=WhatsAppBusinessAccount, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def delete_clients(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'business': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/clients', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_clients(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/clients', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Business, api_type='EDGE', response_parser=ObjectParser(target_class=Business, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_collaborative_ads_collaboration_requests(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.cpascollaborationrequest import CPASCollaborationRequest
param_types = {'status': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/collaborative_ads_collaboration_requests', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=CPASCollaborationRequest, api_type='EDGE', response_parser=ObjectParser(target_class=CPASCollaborationRequest, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_collaborative_ads_collaboration_request(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.cpascollaborationrequest import CPASCollaborationRequest
param_types = {'brands': 'list<string>', 'contact_email': 'string', 'contact_first_name': 'string', 'contact_last_name': 'string', 'phone_number': 'string', 'receiver_business': 'string', 'requester_agency_or_brand': 'requester_agency_or_brand_enum', 'sender_client_business': 'string'}
enums = {'requester_agency_or_brand_enum': CPASCollaborationRequest.RequesterAgencyOrBrand.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/collaborative_ads_collaboration_requests', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=CPASCollaborationRequest, api_type='EDGE', response_parser=ObjectParser(target_class=CPASCollaborationRequest, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_collaborative_ads_suggested_partners(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.cpasadvertiserpartnershiprecommendation import CPASAdvertiserPartnershipRecommendation
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/collaborative_ads_suggested_partners', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=CPASAdvertiserPartnershipRecommendation, api_type='EDGE', response_parser=ObjectParser(target_class=CPASAdvertiserPartnershipRecommendation, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_commerce_merchant_settings(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.commercemerchantsettings import CommerceMerchantSettings
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/commerce_merchant_settings', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=CommerceMerchantSettings, api_type='EDGE', response_parser=ObjectParser(target_class=CommerceMerchantSettings, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_cpas_business_setup_config(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.cpasbusinesssetupconfig import CPASBusinessSetupConfig
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/cpas_business_setup_config', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=CPASBusinessSetupConfig, api_type='EDGE', response_parser=ObjectParser(target_class=CPASBusinessSetupConfig, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_cpas_business_setup_config(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.cpasbusinesssetupconfig import CPASBusinessSetupConfig
param_types = {'accepted_collab_ads_tos': 'bool', 'ad_accounts': 'list<string>', 'business_capabilities_status': 'map', 'capabilities_compliance_status': 'map'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/cpas_business_setup_config', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=CPASBusinessSetupConfig, api_type='EDGE', response_parser=ObjectParser(target_class=CPASBusinessSetupConfig, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_cpas_merchant_config(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.cpasmerchantconfig import CPASMerchantConfig
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/cpas_merchant_config', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=CPASMerchantConfig, api_type='EDGE', response_parser=ObjectParser(target_class=CPASMerchantConfig, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_creative_folder(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.businesscreativefolder import BusinessCreativeFolder
param_types = {'description': 'string', 'name': 'string', 'parent_folder_id': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/creative_folders', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=BusinessCreativeFolder, api_type='EDGE', response_parser=ObjectParser(target_class=BusinessCreativeFolder, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_credit_cards(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.creditcard import CreditCard
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/creditcards', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=CreditCard, api_type='EDGE', response_parser=ObjectParser(target_class=CreditCard, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_custom_conversion(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.customconversion import CustomConversion
param_types = {'advanced_rule': 'string', 'custom_event_type': 'custom_event_type_enum', 'default_conversion_value': 'float', 'description': 'string', 'event_source_id': 'string', 'name': 'string', 'rule': 'string'}
enums = {'custom_event_type_enum': CustomConversion.CustomEventType.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/customconversions', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=CustomConversion, api_type='EDGE', response_parser=ObjectParser(target_class=CustomConversion, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_draft_negative_keyword_list(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'negative_keyword_list_file': 'file'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/draft_negative_keyword_lists', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_event_source_groups(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.eventsourcegroup import EventSourceGroup
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/event_source_groups', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=EventSourceGroup, api_type='EDGE', response_parser=ObjectParser(target_class=EventSourceGroup, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_event_source_group(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.eventsourcegroup import EventSourceGroup
param_types = {'event_sources': 'list<string>', 'name': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/event_source_groups', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=EventSourceGroup, api_type='EDGE', response_parser=ObjectParser(target_class=EventSourceGroup, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_extended_credit_applications(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'only_show_pending': 'bool'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/extendedcreditapplications', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_extended_credits(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.extendedcredit import ExtendedCredit
param_types = {'order_by_is_owned_credential': 'bool'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/extendedcredits', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=ExtendedCredit, api_type='EDGE', response_parser=ObjectParser(target_class=ExtendedCredit, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_image(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.businessimage import BusinessImage
param_types = {'ad_placements_validation_only': 'bool', 'bytes': 'string', 'creative_folder_id': 'string', 'name': 'string', 'validation_ad_placements': 'list<validation_ad_placements_enum>'}
enums = {'validation_ad_placements_enum': BusinessImage.ValidationAdPlacements.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/images', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=BusinessImage, api_type='EDGE', response_parser=ObjectParser(target_class=BusinessImage, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_initiated_audience_sharing_requests(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.businessassetsharingagreement import BusinessAssetSharingAgreement
param_types = {'recipient_id': 'string', 'request_status': 'request_status_enum'}
enums = {'request_status_enum': BusinessAssetSharingAgreement.RequestStatus.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/initiated_audience_sharing_requests', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=BusinessAssetSharingAgreement, api_type='EDGE', response_parser=ObjectParser(target_class=BusinessAssetSharingAgreement, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def delete_instagram_accounts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'instagram_account': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/instagram_accounts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_instagram_accounts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.instagramuser import InstagramUser
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/instagram_accounts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=InstagramUser, api_type='EDGE', response_parser=ObjectParser(target_class=InstagramUser, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_instagram_business_accounts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.iguser import IGUser
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/instagram_business_accounts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=IGUser, api_type='EDGE', response_parser=ObjectParser(target_class=IGUser, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def delete_managed_businesses(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'existing_client_business_id': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/managed_businesses', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_managed_business(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'child_business_external_id': 'string', 'existing_client_business_id': 'string', 'name': 'string', 'sales_rep_email': 'string', 'survey_business_type': 'survey_business_type_enum', 'survey_num_assets': 'unsigned int', 'survey_num_people': 'unsigned int', 'timezone_id': 'unsigned int', 'vertical': 'vertical_enum'}
enums = {'survey_business_type_enum': Business.SurveyBusinessType.__dict__.values(), 'vertical_enum': Business.Vertical.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/managed_businesses', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Business, api_type='EDGE', response_parser=ObjectParser(target_class=Business, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_managed_partner_business_setup(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'active_ad_account_id': 'string', 'active_page_id': 'int', 'partner_facebook_page_url': 'string', 'partner_registration_countries': 'list<string>', 'seller_email_address': 'string', 'seller_external_website_url': 'string', 'template': 'list<map>'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/managed_partner_business_setup', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Business, api_type='EDGE', response_parser=ObjectParser(target_class=Business, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def delete_managed_partner_businesses(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'child_business_external_id': 'string', 'child_business_id': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/managed_partner_businesses', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_managed_partner_business(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'ad_account_currency': 'string', 'catalog_id': 'string', 'child_business_external_id': 'string', 'credit_limit': 'unsigned int', 'line_of_credit_id': 'string', 'name': 'string', 'no_ad_account': 'bool', 'page_name': 'string', 'page_profile_image_url': 'string', 'partition_type': 'partition_type_enum', 'partner_facebook_page_url': 'string', 'partner_registration_countries': 'list<string>', 'sales_rep_email': 'string', 'seller_external_website_url': 'string', 'seller_targeting_countries': 'list<string>', 'skip_partner_page_creation': 'bool', 'survey_business_type': 'survey_business_type_enum', 'survey_num_assets': 'unsigned int', 'survey_num_people': 'unsigned int', 'timezone_id': 'unsigned int', 'vertical': 'vertical_enum'}
enums = {'partition_type_enum': ['AUTH', 'FIXED', 'FIXED_WITHOUT_PARTITION'], 'survey_business_type_enum': ['ADVERTISER', 'AGENCY', 'APP_DEVELOPER', 'PUBLISHER'], 'vertical_enum': ['ADVERTISING', 'AUTOMOTIVE', 'CONSUMER_PACKAGED_GOODS', 'ECOMMERCE', 'EDUCATION', 'ENERGY_AND_UTILITIES', 'ENTERTAINMENT_AND_MEDIA', 'FINANCIAL_SERVICES', 'GAMING', 'GOVERNMENT_AND_POLITICS', 'HEALTH', 'LUXURY', 'MARKETING', 'NON_PROFIT', 'ORGANIZATIONS_AND_ASSOCIATIONS', 'OTHER', 'PROFESSIONAL_SERVICES', 'RESTAURANT', 'RETAIL', 'TECHNOLOGY', 'TELECOM', 'TRAVEL']}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/managed_partner_businesses', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_negative_keyword_lists(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/negative_keyword_lists', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_offline_conversion_data_sets(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.offlineconversiondataset import OfflineConversionDataSet
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/offline_conversion_data_sets', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=OfflineConversionDataSet, api_type='EDGE', response_parser=ObjectParser(target_class=OfflineConversionDataSet, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_offline_conversion_data_set(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.offlineconversiondataset import OfflineConversionDataSet
param_types = {'auto_assign_to_new_accounts_only': 'bool', 'description': 'string', 'enable_auto_assign_to_accounts': 'bool', 'is_mta_use': 'bool', 'name': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/offline_conversion_data_sets', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=OfflineConversionDataSet, api_type='EDGE', response_parser=ObjectParser(target_class=OfflineConversionDataSet, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_open_bridge_configurations(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.openbridgeconfiguration import OpenBridgeConfiguration
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/openbridge_configurations', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=OpenBridgeConfiguration, api_type='EDGE', response_parser=ObjectParser(target_class=OpenBridgeConfiguration, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_open_bridge_configuration(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.openbridgeconfiguration import OpenBridgeConfiguration
param_types = {'access_key': 'string', 'active': 'bool', 'endpoint': 'string', 'fallback_domain': 'string', 'fallback_domain_enabled': 'bool', 'host_business_id': 'unsigned int', 'host_external_id': 'string', 'pixel_id': 'unsigned int'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/openbridge_configurations', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=OpenBridgeConfiguration, api_type='EDGE', response_parser=ObjectParser(target_class=OpenBridgeConfiguration, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_owned_ad_accounts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adaccount import AdAccount
param_types = {'search_query': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/owned_ad_accounts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdAccount, api_type='EDGE', response_parser=ObjectParser(target_class=AdAccount, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_owned_ad_account(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'adaccount_id': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/owned_ad_accounts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Business, api_type='EDGE', response_parser=ObjectParser(target_class=Business, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_owned_apps(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.application import Application
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/owned_apps', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Application, api_type='EDGE', response_parser=ObjectParser(target_class=Application, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_owned_app(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'app_id': 'Object'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/owned_apps', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Business, api_type='EDGE', response_parser=ObjectParser(target_class=Business, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def delete_owned_businesses(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'client_id': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/owned_businesses', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_owned_businesses(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'child_business_external_id': 'string', 'client_user_id': 'int'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/owned_businesses', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Business, api_type='EDGE', response_parser=ObjectParser(target_class=Business, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_owned_business(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'child_business_external_id': 'string', 'name': 'string', 'page_permitted_tasks': 'list<page_permitted_tasks_enum>', 'sales_rep_email': 'string', 'shared_page_id': 'string', 'survey_business_type': 'survey_business_type_enum', 'survey_num_assets': 'unsigned int', 'survey_num_people': 'unsigned int', 'timezone_id': 'unsigned int', 'vertical': 'vertical_enum'}
enums = {'page_permitted_tasks_enum': Business.PagePermittedTasks.__dict__.values(), 'survey_business_type_enum': Business.SurveyBusinessType.__dict__.values(), 'vertical_enum': Business.Vertical.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/owned_businesses', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Business, api_type='EDGE', response_parser=ObjectParser(target_class=Business, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_owned_instagram_accounts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.instagramuser import InstagramUser
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/owned_instagram_accounts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=InstagramUser, api_type='EDGE', response_parser=ObjectParser(target_class=InstagramUser, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_owned_offsite_signal_container_business_objects(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/owned_offsite_signal_container_business_objects', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_owned_pages(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.page import Page
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/owned_pages', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_owned_page(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'code': 'string', 'entry_point': 'string', 'page_id': 'int'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/owned_pages', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Business, api_type='EDGE', response_parser=ObjectParser(target_class=Business, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_owned_pixels(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adspixel import AdsPixel
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/owned_pixels', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdsPixel, api_type='EDGE', response_parser=ObjectParser(target_class=AdsPixel, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_owned_product_catalogs(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.productcatalog import ProductCatalog
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/owned_product_catalogs', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=ProductCatalog, api_type='EDGE', response_parser=ObjectParser(target_class=ProductCatalog, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_owned_product_catalog(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.productcatalog import ProductCatalog
param_types = {'catalog_segment_filter': 'Object', 'catalog_segment_product_set_id': 'string', 'da_display_settings': 'Object', 'destination_catalog_settings': 'map', 'flight_catalog_settings': 'map', 'name': 'string', 'parent_catalog_id': 'string', 'partner_integration': 'map', 'store_catalog_settings': 'map', 'vertical': 'vertical_enum'}
enums = {'vertical_enum': ProductCatalog.Vertical.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/owned_product_catalogs', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=ProductCatalog, api_type='EDGE', response_parser=ObjectParser(target_class=ProductCatalog, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_owned_whats_app_business_accounts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.whatsappbusinessaccount import WhatsAppBusinessAccount
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/owned_whatsapp_business_accounts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=WhatsAppBusinessAccount, api_type='EDGE', response_parser=ObjectParser(target_class=WhatsAppBusinessAccount, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def delete_pages(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'page_id': 'int'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/pages', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_partner_account_linking(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/partner_account_linking', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_partner_premium_option(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'catalog_segment_id': 'string', 'enable_basket_insight': 'bool', 'enable_extended_audience_retargeting': 'bool', 'partner_business_id': 'string', 'retailer_custom_audience_config': 'map', 'vendor_id': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/partner_premium_options', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_pending_client_ad_accounts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.businessadaccountrequest import BusinessAdAccountRequest
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/pending_client_ad_accounts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=BusinessAdAccountRequest, api_type='EDGE', response_parser=ObjectParser(target_class=BusinessAdAccountRequest, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_pending_client_apps(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.businessapplicationrequest import BusinessApplicationRequest
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/pending_client_apps', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=BusinessApplicationRequest, api_type='EDGE', response_parser=ObjectParser(target_class=BusinessApplicationRequest, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_pending_client_pages(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.businesspagerequest import BusinessPageRequest
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/pending_client_pages', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=BusinessPageRequest, api_type='EDGE', response_parser=ObjectParser(target_class=BusinessPageRequest, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_pending_owned_ad_accounts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.businessadaccountrequest import BusinessAdAccountRequest
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/pending_owned_ad_accounts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=BusinessAdAccountRequest, api_type='EDGE', response_parser=ObjectParser(target_class=BusinessAdAccountRequest, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_pending_owned_pages(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.businesspagerequest import BusinessPageRequest
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/pending_owned_pages', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=BusinessPageRequest, api_type='EDGE', response_parser=ObjectParser(target_class=BusinessPageRequest, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_pending_shared_offsite_signal_container_business_objects(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/pending_shared_offsite_signal_container_business_objects', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_pending_users(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.businessrolerequest import BusinessRoleRequest
param_types = {'email': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/pending_users', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=BusinessRoleRequest, api_type='EDGE', response_parser=ObjectParser(target_class=BusinessRoleRequest, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_picture(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.profilepicturesource import ProfilePictureSource
param_types = {'breaking_change': 'breaking_change_enum', 'height': 'int', 'redirect': 'bool', 'type': 'type_enum', 'width': 'int'}
enums = {'breaking_change_enum': ProfilePictureSource.BreakingChange.__dict__.values(), 'type_enum': ProfilePictureSource.Type.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/picture', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=ProfilePictureSource, api_type='EDGE', response_parser=ObjectParser(target_class=ProfilePictureSource, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_pixel_to(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/pixel_tos', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_pre_verified_numbers(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.whatsappbusinesspreverifiedphonenumber import WhatsAppBusinessPreVerifiedPhoneNumber
param_types = {'code_verification_status': 'code_verification_status_enum', 'phone_number': 'string'}
enums = {'code_verification_status_enum': WhatsAppBusinessPreVerifiedPhoneNumber.CodeVerificationStatus.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/preverified_numbers', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=WhatsAppBusinessPreVerifiedPhoneNumber, api_type='EDGE', response_parser=ObjectParser(target_class=WhatsAppBusinessPreVerifiedPhoneNumber, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_received_audience_sharing_requests(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.businessassetsharingagreement import BusinessAssetSharingAgreement
param_types = {'initiator_id': 'string', 'request_status': 'request_status_enum'}
enums = {'request_status_enum': BusinessAssetSharingAgreement.RequestStatus.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/received_audience_sharing_requests', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=BusinessAssetSharingAgreement, api_type='EDGE', response_parser=ObjectParser(target_class=BusinessAssetSharingAgreement, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_setup_managed_partner_ad_account(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'credit_line_id': 'string', 'marketplace_business_id': 'string', 'subvertical_v2': 'subvertical_v2_enum', 'vendor_id': 'string', 'vertical_v2': 'vertical_v2_enum'}
enums = {'subvertical_v2_enum': Business.SubverticalV2.__dict__.values(), 'vertical_v2_enum': Business.VerticalV2.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/setup_managed_partner_adaccounts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Business, api_type='EDGE', response_parser=ObjectParser(target_class=Business, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def delete_share_pre_verified_numbers(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'partner_business_id': 'string', 'preverified_id': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/share_preverified_numbers', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_share_pre_verified_number(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'partner_business_id': 'string', 'preverified_id': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/share_preverified_numbers', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Business, api_type='EDGE', response_parser=ObjectParser(target_class=Business, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_system_user_access_token(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'asset': 'list<unsigned int>', 'fetch_only': 'bool', 'scope': 'list<Permission>', 'set_token_expires_in_60_days': 'bool', 'system_user_id': 'unsigned int'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/system_user_access_tokens', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Business, api_type='EDGE', response_parser=ObjectParser(target_class=Business, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_system_users(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.systemuser import SystemUser
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/system_users', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=SystemUser, api_type='EDGE', response_parser=ObjectParser(target_class=SystemUser, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_system_user(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.systemuser import SystemUser
param_types = {'name': 'string', 'role': 'role_enum', 'system_user_id': 'int'}
enums = {'role_enum': SystemUser.Role.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/system_users', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=SystemUser, api_type='EDGE', response_parser=ObjectParser(target_class=SystemUser, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_third_party_measurement_report_dataset(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/third_party_measurement_report_dataset', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_video(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.advideo import AdVideo
param_types = {'ad_placements_validation_only': 'bool', 'adaptive_type': 'string', 'animated_effect_id': 'unsigned int', 'application_id': 'string', 'asked_fun_fact_prompt_id': 'unsigned int', 'audio_story_wave_animation_handle': 'string', 'chunk_session_id': 'string', 'composer_entry_picker': 'string', 'composer_entry_point': 'string', 'composer_entry_time': 'unsigned int', 'composer_session_events_log': 'string', 'composer_session_id': 'string', 'composer_source_surface': 'string', 'composer_type': 'string', 'container_type': 'container_type_enum', 'content_category': 'content_category_enum', 'creative_folder_id': 'string', 'creative_tools': 'string', 'description': 'string', 'embeddable': 'bool', 'end_offset': 'unsigned int', 'fbuploader_video_file_chunk': 'string', 'file_size': 'unsigned int', 'file_url': 'string', 'fisheye_video_cropped': 'bool', 'formatting': 'formatting_enum', 'fov': 'unsigned int', 'front_z_rotation': 'float', 'fun_fact_prompt_id': 'unsigned int', 'fun_fact_toastee_id': 'unsigned int', 'guide': 'list<list<unsigned int>>', 'guide_enabled': 'bool', 'has_nickname': 'bool', 'holiday_card': 'string', 'initial_heading': 'unsigned int', 'initial_pitch': 'unsigned int', 'instant_game_entry_point_data': 'string', 'is_boost_intended': 'bool', 'is_group_linking_post': 'bool', 'is_voice_clip': 'bool', 'location_source_id': 'string', 'offer_like_post_id': 'unsigned int', 'og_action_type_id': 'string', 'og_icon_id': 'string', 'og_object_id': 'string', 'og_phrase': 'string', 'og_suggestion_mechanism': 'string', 'original_fov': 'unsigned int', 'original_projection_type': 'original_projection_type_enum', 'publish_event_id': 'unsigned int', 'react_mode_metadata': 'string', 'referenced_sticker_id': 'string', 'replace_video_id': 'string', 'slideshow_spec': 'map', 'source': 'string', 'source_instagram_media_id': 'string', 'spherical': 'bool', 'start_offset': 'unsigned int', 'swap_mode': 'swap_mode_enum', 'text_format_metadata': 'string', 'throwback_camera_roll_media': 'string', 'thumb': 'file', 'time_since_original_post': 'unsigned int', 'title': 'string', 'transcode_setting_properties': 'string', 'unpublished_content_type': 'unpublished_content_type_enum', 'upload_phase': 'upload_phase_enum', 'upload_session_id': 'string', 'upload_setting_properties': 'string', 'validation_ad_placements': 'list<validation_ad_placements_enum>', 'video_file_chunk': 'string', 'video_id_original': 'string', 'video_start_time_ms': 'unsigned int', 'waterfall_id': 'string'}
enums = {'container_type_enum': AdVideo.ContainerType.__dict__.values(), 'content_category_enum': AdVideo.ContentCategory.__dict__.values(), 'formatting_enum': AdVideo.Formatting.__dict__.values(), 'original_projection_type_enum': AdVideo.OriginalProjectionType.__dict__.values(), 'swap_mode_enum': AdVideo.SwapMode.__dict__.values(), 'unpublished_content_type_enum': AdVideo.UnpublishedContentType.__dict__.values(), 'upload_phase_enum': AdVideo.UploadPhase.__dict__.values(), 'validation_ad_placements_enum': AdVideo.ValidationAdPlacements.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/videos', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdVideo, api_type='EDGE', response_parser=ObjectParser(target_class=AdVideo, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {'block_offline_analytics': 'bool', 'collaborative_ads_managed_partner_business_info': 'ManagedPartnerBusiness', 'collaborative_ads_managed_partner_eligibility': 'BusinessManagedPartnerEligibility', 'collaborative_ads_partner_premium_options': 'BusinessPartnerPremiumOptions', 'created_by': 'Object', 'created_time': 'datetime', 'extended_updated_time': 'datetime', 'id': 'string', 'is_hidden': 'bool', 'link': 'string', 'name': 'string', 'payment_account_id': 'string', 'primary_page': 'Page', 'profile_picture_uri': 'string', 'timezone_id': 'unsigned int', 'two_factor_type': 'string', 'updated_by': 'Object', 'updated_time': 'datetime', 'user_access_expire_time': 'datetime', 'verification_status': 'string', 'vertical': 'string', 'vertical_id': 'unsigned int'}
def _get_field_enum_info(cls):
field_enum_info = {}
field_enum_info['TwoFactorType'] = Business.TwoFactorType.__dict__.values()
field_enum_info['Vertical'] = Business.Vertical.__dict__.values()
field_enum_info['PermittedTasks'] = Business.PermittedTasks.__dict__.values()
field_enum_info['SurveyBusinessType'] = Business.SurveyBusinessType.__dict__.values()
field_enum_info['PagePermittedTasks'] = Business.PagePermittedTasks.__dict__.values()
field_enum_info['SubverticalV2'] = Business.SubverticalV2.__dict__.values()
field_enum_info['VerticalV2'] = Business.VerticalV2.__dict__.values()
field_enum_info['ActionSource'] = Business.ActionSource.__dict__.values()
return field_enum_info |
def extractDaoIst(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('zombie master', 'Zombie Master', 'translated'), ("Master's Smile", "Master's Smile", 'translated'), ('I became the Villainesss Brother', "I became the Villainess's Brother", 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def parse_args():
parser = argparse.ArgumentParser('Script for plotting rollout statistics.')
parser.add_argument('config', help='path to plot config file (args get updated).', type=str, default=None)
parser.add_argument('experiment_name_and_event_dir', type=str, nargs='+', help='list of experiments to plot passed as [[name, path_to_event_log_dir], [name, path_to_event_log_dir], ...].')
parser.add_argument('--save_figs', action='store_true', help='save generated figures.')
return parser.parse_args() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.