code stringlengths 281 23.7M |
|---|
def add_rebuild(subparsers):
rebuild_parser = subparsers.add_parser('rebuild', help=runner.rebuild_pipelines.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
rebuild_parser.set_defaults(func=runner.rebuild_pipelines)
rebuild_parser.add_argument('-a', '--all', action='store_true', help='Rebuild all Pipelines')
rebuild_parser.add_argument('project', nargs='?', default=os.getenv('REBUILD_PROJECT'), help='Project to rebuild, overrides $REBUILD_PROJECT') |
def _add_queue_cli(add_cmd: Callable, add_hidden: bool=True) -> Callable:
if (not add_hidden):
return (lambda *a, **k: None)
sub = add_cmd('queue', help='Manage the job queue')
queue = sub.add_subparsers(dest='queue_cmd', title='subcommands')
sub = add_cmd('info', queue, help='Print a summary of the state of the jobs queue')
sub.add_argument('--without-log', dest='withlog', action='store_false', help='do not show any of the job queue log file')
sub.add_argument('--with-log', dest='withlog', action='store_const', const=True, help='also show last 10 lines of the job queue log file')
sub.add_argument('queueid', nargs='?', help='The queue to show')
sub = add_cmd('pause', queue, help='Do not let queued jobs run')
sub.add_argument('queueid')
sub = add_cmd('unpause', queue, help='Let queued jobs run')
sub.add_argument('queueid')
sub = add_cmd('list', queue, help='List the queued jobs')
sub.add_argument('queueid', nargs='?', help='The queue to list')
sub = add_cmd('push', queue, help='Add a job to the back of the queue')
sub.add_argument('reqid')
sub = add_cmd('pop', queue, help='Get the next job from the front of the queue')
sub.add_argument('queueid')
sub = add_cmd('move', queue, help='Move a job up or down in the queue')
sub.add_argument('reqid')
sub.add_argument('position', help='the new index for the job (1-based or relative)')
sub = add_cmd('remove', queue, help='Remove a job from the queue')
sub.add_argument('reqid')
def handle_args(args, parser):
if (args.cmd != 'queue'):
return
ns = vars(args)
action = ns.pop('queue_cmd')
args.cmd = f'queue-{action}'
if (action == 'move'):
pos = args.position
if (pos == '+'):
pos = '1'
relative = '+'
elif (pos == '-'):
pos = '1'
relative = '-'
elif pos.startswith('+'):
pos = pos[1:]
relative = '+'
elif pos.startswith('-'):
pos = pos[1:]
relative = '-'
else:
relative = None
if (not pos.isdigit()):
parser.error('position must be positive int')
pos = int(pos)
if (pos == 0):
parser.error('position must be positive int')
args.position = pos
args.relative = relative
return handle_args |
.dict('bodhi.server.validators.config', {'admin_packager_groups': ['provenpackager'], 'qa_groups': ['fedora-ci-users']})
class TestValidateQAAcls(BasePyTestCase):
def get_mock_request(self):
update = self.db.query(models.Build).filter_by(nvr='bodhi-2.0-1.fc17').one().update
user = self.db.query(models.User).filter_by(id=1).one()
mock_request = mock.Mock()
mock_request.identity = user
mock_request.db = self.db
mock_request.errors = Errors()
mock_request.validated = {'update': update}
mock_request.buildinfo = {'bodhi-2.0-1.fc17': {}}
return mock_request
def test_validate_qa_acls_no_identity(self):
mock_request = self.get_mock_request()
mock_request.identity = None
validators.validate_qa_acls(mock_request)
error = [{'location': 'cookies', 'name': 'user', 'description': 'No ACLs for anonymous user'}]
assert (mock_request.errors == error)
def test_validate_qa_acls_archived_release(self):
mock_request = self.get_mock_request()
mock_request.validated['update'].release.state = models.ReleaseState.archived
validators.validate_qa_acls(mock_request)
error = [{'location': 'body', 'name': 'update', 'description': 'cannot edit Update for an archived Release'}]
assert (mock_request.errors == error)
def test_validate_qa_acls_no_update(self):
mock_request = self.get_mock_request()
mock_request.validated.pop('update', None)
validators.validate_qa_acls(mock_request)
error = [{'location': 'body', 'name': 'update', 'description': 'ACL validation mechanism was unable to determine ACLs.'}]
assert (mock_request.errors == error)
('bodhi.server.validators.validate_acls')
def test_validate_qa_acls_admin_group(self, mock_acls):
user = self.db.query(models.User).filter_by(id=1).one()
group = self.db.query(models.Group).filter_by(name='provenpackager').one()
user.groups.pop(0)
user.groups.append(group)
self.db.flush()
mock_request = self.get_mock_request()
validators.validate_qa_acls(mock_request)
assert (not len(mock_request.errors))
mock_acls.assert_not_called()
('bodhi.server.validators.validate_acls')
def test_validate_qa_acls_ci_group(self, mock_acls):
user = self.db.query(models.User).filter_by(id=1).one()
group = self.db.query(models.Group).filter_by(name='fedora-ci-users').one()
user.groups.pop(0)
user.groups.append(group)
self.db.flush()
mock_request = self.get_mock_request()
validators.validate_qa_acls(mock_request)
assert (not len(mock_request.errors))
mock_acls.assert_not_called()
('bodhi.server.validators.validate_acls')
def test_validate_qa_acls_fallback(self, mock_acls):
mock_request = self.get_mock_request()
validators.validate_qa_acls(mock_request)
assert (not len(mock_request.errors))
mock_acls.assert_called_once() |
def plot_auc(label, score, title):
(precision, recall, thresholds) = precision_recall_curve(label, score)
plt.figure(figsize=(15, 5))
plt.grid()
plt.plot(thresholds, precision[1:], color='r', label='Precision')
plt.plot(thresholds, recall[1:], color='b', label='Recall')
plt.gca().invert_xaxis()
plt.legend(loc='lower right')
plt.xlabel('Threshold (0.00 - 1.00)')
plt.ylabel('Precision / Recall')
_ = plt.title(title) |
class OptionSeriesLineDatalabelsFilter(Options):
def operator(self):
return self._config_get(None)
def operator(self, value: Any):
self._config(value, js_type=False)
def property(self):
return self._config_get(None)
def property(self, text: str):
self._config(text, js_type=False) |
class OptionPlotoptionsBellcurveSonificationContexttracksMapping(Options):
def frequency(self) -> 'OptionPlotoptionsBellcurveSonificationContexttracksMappingFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsBellcurveSonificationContexttracksMappingFrequency)
def gapBetweenNotes(self) -> 'OptionPlotoptionsBellcurveSonificationContexttracksMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionPlotoptionsBellcurveSonificationContexttracksMappingGapbetweennotes)
def highpass(self) -> 'OptionPlotoptionsBellcurveSonificationContexttracksMappingHighpass':
return self._config_sub_data('highpass', OptionPlotoptionsBellcurveSonificationContexttracksMappingHighpass)
def lowpass(self) -> 'OptionPlotoptionsBellcurveSonificationContexttracksMappingLowpass':
return self._config_sub_data('lowpass', OptionPlotoptionsBellcurveSonificationContexttracksMappingLowpass)
def noteDuration(self) -> 'OptionPlotoptionsBellcurveSonificationContexttracksMappingNoteduration':
return self._config_sub_data('noteDuration', OptionPlotoptionsBellcurveSonificationContexttracksMappingNoteduration)
def pan(self) -> 'OptionPlotoptionsBellcurveSonificationContexttracksMappingPan':
return self._config_sub_data('pan', OptionPlotoptionsBellcurveSonificationContexttracksMappingPan)
def pitch(self) -> 'OptionPlotoptionsBellcurveSonificationContexttracksMappingPitch':
return self._config_sub_data('pitch', OptionPlotoptionsBellcurveSonificationContexttracksMappingPitch)
def playDelay(self) -> 'OptionPlotoptionsBellcurveSonificationContexttracksMappingPlaydelay':
return self._config_sub_data('playDelay', OptionPlotoptionsBellcurveSonificationContexttracksMappingPlaydelay)
def rate(self) -> 'OptionPlotoptionsBellcurveSonificationContexttracksMappingRate':
return self._config_sub_data('rate', OptionPlotoptionsBellcurveSonificationContexttracksMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionPlotoptionsBellcurveSonificationContexttracksMappingTime':
return self._config_sub_data('time', OptionPlotoptionsBellcurveSonificationContexttracksMappingTime)
def tremolo(self) -> 'OptionPlotoptionsBellcurveSonificationContexttracksMappingTremolo':
return self._config_sub_data('tremolo', OptionPlotoptionsBellcurveSonificationContexttracksMappingTremolo)
def volume(self) -> 'OptionPlotoptionsBellcurveSonificationContexttracksMappingVolume':
return self._config_sub_data('volume', OptionPlotoptionsBellcurveSonificationContexttracksMappingVolume) |
def test_schema_validation():
(value, error) = person.validate_or_error({'name': 'Tom', 'age': '123'})
assert (not error)
assert (value == {'name': 'Tom', 'age': 123})
(value, error) = person.validate_or_error({'name': 'Tom', 'age': '123'})
assert (not error)
assert (value == {'name': 'Tom', 'age': 123})
(value, error) = person.validate_or_error({'name': 'Tom', 'age': 'abc'})
assert (dict(error) == {'age': 'Must be a number.'})
(value, error) = person.validate_or_error({'name': 'Tom'})
assert (dict(error) == {'age': 'This field is required.'}) |
class OptionPlotoptionsPictorialSonificationContexttracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def meta_data_upload_handler(save_stats: dict[(str, Any)], path: str) -> tuple[(Union[(None, dict[(str, Any)])], dict[(str, Any)])]:
data = prepare_upload(save_stats, path)
if (data is None):
return (None, save_stats)
(token, inquiry_code, save_data, playtime, managed_items, save_stats) = data
helper.colored_text('Uploading meta data...', helper.GREEN)
upload_data = upload_metadata_v2(token, inquiry_code, save_data, playtime, managed_items, helper.calculate_user_rank(save_stats))
return (upload_data, save_stats) |
def extractStormytranslationsHomeBlog(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Quick Transmigration: Perfect Destiny', 'Quick Transmigration: Perfect Destiny', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TrainDataService(object):
def __init__(self, json_data):
self.trains = json.loads(json_data)
def data_for_train(self, train_id):
return json.dumps(self.trains.get(train_id))
def reserve(self, train_id, seats, booking_reference):
train = self.trains.get(train_id)
seats = json.loads(seats)
for seat in seats:
if (not (seat in train['seats'])):
return 'seat not found {0}'.format(seat)
existing_reservation = train['seats'][seat]['booking_reference']
if (existing_reservation and (existing_reservation != booking_reference)):
return 'already booked with reference: {0}'.format(existing_reservation)
for seat in seats:
train['seats'][seat]['booking_reference'] = booking_reference
return self.data_for_train(train_id)
def reset(self, train_id):
train = self.trains.get(train_id)
for (seat_id, seat) in train['seats'].items():
seat['booking_reference'] = ''
return self.data_for_train(train_id) |
def check_update(l=None, config={}):
if (not l):
l = logger.Logger()
dev = (('update_beta_channel' in config) and config['update_beta_channel'])
download_update = (('auto_update' in config) and (config['auto_update'] == 'download'))
l.debug((i18n.UPDATE_CHANNEL % ((dev and i18n.UPDATE_DEV_CHANNEL) or i18n.UPDATE_RELEASE_CHANNEL)))
s = requests.Session()
g = GithubUpdater(s)
try:
info = g.get_latest_release(dev)
if (hasattr(const, 'VERSION_UPDATE') and (VERSION_UPDATE == info.update_id)):
l.debug(i18n.UPDATE_NO_UPDATE)
return
l.info((i18n.UPDATE_AVAILABLE % (info.ts, info.message, info.update_id)))
if (not download_update):
l.info(i18n.UPDATE_DOWNLOAD_MANUALLY)
return
resp = s.get(info.download_link)
z = resp.content
with zipfile.ZipFile(StringIO(z)) as zf:
make_src_update_file(zf, g.get_src_path_in_archive(info), info)
l.info(i18n.UPDATE_COMPLETE)
except MemoryError as ex:
l.warn((i18n.UPDATE_FAILED % str(ex))) |
class TestDefaultTickGenerator(unittest.TestCase):
def setUp(self):
self.tick_generator = DefaultTickGenerator()
def test_default_tick_generator(self):
high = 1.0
low = 0.0
interval = 0.1
ticks = self.tick_generator.get_ticks(data_low=0, data_high=1, bounds_low=low, bounds_high=high, interval=interval)
expected_num_ticks = (((high - low) / interval) + 1)
self.assertEqual(len(ticks), expected_num_ticks) |
class GameItem(AbstractCrudObject):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isGameItem = True
super(GameItem, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
count = 'count'
created = 'created'
ext_id = 'ext_id'
id = 'id'
item_def = 'item_def'
owner = 'owner'
status = 'status'
updated = 'updated'
class Action():
consume = 'CONSUME'
drop = 'DROP'
mark = 'MARK'
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=GameItem, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {'count': 'int', 'created': 'datetime', 'ext_id': 'string', 'id': 'string', 'item_def': 'string', 'owner': 'User', 'status': 'string', 'updated': 'datetime'}
def _get_field_enum_info(cls):
field_enum_info = {}
field_enum_info['Action'] = GameItem.Action.__dict__.values()
return field_enum_info |
class DummyChecker(Checker, register=False):
def get_json_schema(self, external_data):
return None
def should_check(cls, external_data):
return True
async def check(self, external_data):
logging.debug('Phony checker checking external data %s and all is always good', external_data.filename) |
def process_input(fname):
if (fname is sys.stdin):
try:
stdin_file = fname.buffer.read()
except AttributeError:
stdin_file = fname.read()
else:
stdin_file = None
if (stdin_file is not None):
fobj = io.BytesIO(stdin_file)
else:
fobj = io.open(fname, mode='rb')
f_keep = fobj
mesg = BytesParser(policy=policy.default.clone(utf8=True)).parse(fobj)
if (('From' not in mesg.keys()) and ('Date' not in mesg.keys())):
fobj = _fix_first_line(f_keep)
mesg = BytesParser(policy=policy.default.clone(utf8=True)).parse(fobj)
try:
fobj.close()
except NameError:
pass
close_stdin()
return mesg |
class client():
def __init__(self):
store = {}
def set(self, key, val):
global store, latest
store[key] = val
return
def get(self, key):
global store, latest
if (key in store):
return store[key]
else:
return None
def publish(self, key, val):
global store, latest
store[key] = val
latest = key
def pubsub(self):
return pubsub()
def exists(self, key):
return (key in store) |
class BaseTestCertRequestError():
PUBLIC_KEY = 'a_public_key'
IDENTIFIER = 'an_identifier'
LEDGER_ID = 'a_ledger_id'
NOT_BEFORE = '2020-01-01'
NOT_AFTER = '2020-01-02'
MESSAGE_FORMAT = '{public_key}'
PATH = 'some/path'
ERROR_MESSAGE_PATTERN = ''
def test_error(self):
with pytest.raises(ValueError, match=self.ERROR_MESSAGE_PATTERN):
CertRequest(self.PUBLIC_KEY, self.IDENTIFIER, self.LEDGER_ID, self.NOT_BEFORE, self.NOT_AFTER, self.MESSAGE_FORMAT, self.PATH) |
class EpisodeGlobber(GlobBase):
KEYS = ['episode', 'eps', 'ep']
def attach_token(self, before, target, after):
have_chapter_before = any([isinstance(itm, ChapterToken) for itm in (before + [target])])
have_chapter_after = any([isinstance(itm, ChapterToken) for itm in after])
(before, prec, intervening) = self.get_preceeding_text(before)
if (target == ' '):
if prec:
before.append(prec)
if intervening:
before.append(intervening)
elif (prec and (prec.lower() in self.KEYS)):
if isinstance(target, CompoundToken):
if (prec and (prec.lower() in self.KEYS)):
if have_chapter_before:
target.specialize_into(CompoundFragmentFragmentToken, prec, intervening)
elif have_chapter_after:
target.specialize_into(CompoundVolumeVolumeToken, prec, intervening)
else:
target.specialize_into(CompoundVolChapterToken, prec, intervening)
elif isinstance(target, str):
if have_chapter_before:
target = FragmentToken(prec, intervening, target)
elif have_chapter_after:
target = VolumeToken(prec, intervening, target)
else:
target = ChapterToken(prec, intervening, target)
else:
raise TypeError('Attempting to attach EpisodeToken to a non-string/CompoundToken instance!')
else:
if prec:
before.append(prec)
if intervening:
before.append(intervening)
return (before, target, after) |
class OptionSeriesVariablepieEvents(Options):
def afterAnimate(self):
return self._config_get(None)
def afterAnimate(self, value: Any):
self._config(value, js_type=False)
def checkboxClick(self):
return self._config_get(None)
def checkboxClick(self, value: Any):
self._config(value, js_type=False)
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def hide(self):
return self._config_get(None)
def hide(self, value: Any):
self._config(value, js_type=False)
def legendItemClick(self):
return self._config_get(None)
def legendItemClick(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def show(self):
return self._config_get(None)
def show(self, value: Any):
self._config(value, js_type=False) |
(window=14, nstd=2)
def bb_strategy(ohlcv):
window = bb_strategy.window
nstd = bb_strategy.nstd
mean = ohlcv.close.rolling(window).mean()
std = ohlcv.close.rolling(window).std()
up = (mean + (nstd * std))
dn = (mean - (nstd * std))
entries = (ohlcv.close > up)
exits = (ohlcv.close < dn)
figure = {'overlaps': {'up': (mean + (nstd * std)), 'dn': (mean - (nstd * std))}}
return (entries, exits, figure) |
.django_db
def test_quarterly_followed_by_monthly():
baker.make('submissions.DABSSubmissionWindowSchedule', id=11, submission_fiscal_year=2020, submission_fiscal_month=10, is_quarter=False, submission_due_date='2020-08-21', submission_reveal_date='2020-08-22')
baker.make('submissions.DABSSubmissionWindowSchedule', id=22, submission_fiscal_year=2020, submission_fiscal_month=9, is_quarter=True, submission_due_date='2020-08-14', submission_reveal_date='2020-08-15')
baker.make('submissions.SubmissionAttributes', submission_id=1, toptier_code='A', reporting_fiscal_year=2020, reporting_fiscal_period=10, quarter_format_flag=False, submission_window_id=11)
baker.make('submissions.SubmissionAttributes', submission_id=2, toptier_code='A', reporting_fiscal_year=2020, reporting_fiscal_period=9, quarter_format_flag=True, submission_window_id=22)
call_command('populate_is_final_balances_for_fy')
monthly_submission = SubmissionAttributes.objects.get(submission_id=1)
quarterly_submission = SubmissionAttributes.objects.get(submission_id=2)
assert monthly_submission.is_final_balances_for_fy
assert (not quarterly_submission.is_final_balances_for_fy) |
class Ext():
def __init__(self, type, data):
if ((not (type.__class__ == int)) or (not ((type >= 0) and (type <= 127)))):
raise TypeError('ext type out of range')
elif (_IS_PY3 and (not (data.__class__ == bytes))):
raise TypeError("ext data is not type 'bytes'")
elif ((not _IS_PY3) and (not (data.__class__ == str))):
raise TypeError("ext data is not type 'str'")
self.type = type
self.data = data
def __eq__(self, other):
return (isinstance(other, self.__class__) and (self.type == other.type) and (self.data == other.data))
def __ne__(self, other):
return (not self.__eq__(other))
def __str__(self):
s = ('Ext Object (Type: 0x%02x, Data: ' % self.type)
for i in xrange(min(len(self.data), 8)):
if (i > 0):
s += ' '
if isinstance(self.data[i], int):
s += ('%02x' % self.data[i])
else:
s += ('%02x' % ord(self.data[i]))
if (len(self.data) > 8):
s += ' ...'
s += ')'
return s |
def test_builder_to_disk_uses_default_cwd(manifest_dir, monkeypatch):
monkeypatch.chdir(manifest_dir)
build({}, package_name('package'), manifest_version('ethpm/3'), version('1.0.0'), write_to_disk(), validate())
actual_manifest = (manifest_dir / '1.0.0.json').read_text()
assert (actual_manifest == MINIFIED_MANIFEST) |
class UserProvider():
def __init__(self, uid, provider_id, email=None, display_name=None, photo_url=None):
self.uid = uid
self.provider_id = provider_id
self.email = email
self.display_name = display_name
self.photo_url = photo_url
def uid(self):
return self._uid
def uid(self, uid):
self._uid = _auth_utils.validate_uid(uid, required=True)
def provider_id(self):
return self._provider_id
_id.setter
def provider_id(self, provider_id):
self._provider_id = _auth_utils.validate_provider_id(provider_id, required=True)
def email(self):
return self._email
def email(self, email):
self._email = _auth_utils.validate_email(email)
def display_name(self):
return self._display_name
_name.setter
def display_name(self, display_name):
self._display_name = _auth_utils.validate_display_name(display_name)
def photo_url(self):
return self._photo_url
_url.setter
def photo_url(self, photo_url):
self._photo_url = _auth_utils.validate_photo_url(photo_url)
def to_dict(self):
payload = {'rawId': self.uid, 'providerId': self.provider_id, 'displayName': self.display_name, 'email': self.email, 'photoUrl': self.photo_url}
return {k: v for (k, v) in payload.items() if (v is not None)} |
class SpeakerImageSizeDetail(ResourceDetail):
def before_get(self, args, kwargs):
kwargs['id'] = 2
decorators = (api.has_permission('is_admin', methods='PATCH', id='2'),)
methods = ['GET', 'PATCH']
schema = SpeakerImageSizeSchema
data_layer = {'session': db.session, 'model': ImageSizes} |
class ProxyFunctionSpace(FunctionSpace):
def __new__(cls, mesh, element, name=None):
topology = mesh.topology
self = super(ProxyFunctionSpace, cls).__new__(cls)
if (mesh is not topology):
return WithGeometry.create(self, mesh)
else:
return self
def __repr__(self):
return ('%sProxyFunctionSpace(%r, %r, name=%r, index=%r, component=%r)' % (str(self.identifier).capitalize(), self.mesh(), self.ufl_element(), self.name, self.index, self.component))
def __str__(self):
return ('%sProxyFunctionSpace(%s, %s, name=%s, index=%s, component=%s)' % (str(self.identifier).capitalize(), self.mesh(), self.ufl_element(), self.name, self.index, self.component))
identifier = None
no_dats = False
def make_dat(self, *args, **kwargs):
if self.no_dats:
raise ValueError(("Can't build Function on %s function space" % self.identifier))
return super(ProxyFunctionSpace, self).make_dat(*args, **kwargs) |
.integration_mongodb
.integration
def test_mongo_example_data(integration_mongodb_connector):
db = integration_mongodb_connector['mongo_test']
collection_names = set(db.collection_names())
assert ({'payment_card', 'orders', 'customer', 'employee', 'product', 'reports', 'customer_details', 'composite_pk_test'}.difference(collection_names) == set())
assert (db.customer.count() == 3)
assert (db.payment_card.count() == 2)
assert (db.orders.count() == 4)
assert (db.employee.count() == 2)
assert (db.product.count() == 3)
assert (db.reports.count() == 4) |
def get_all_configs_from_hydra(default_conf: str, all_hydra_config_modules: List[str]) -> List[pytest.param]:
configs = []
for config_module in all_hydra_config_modules:
with initialize_config_module(config_module):
for overrides in _get_all_overrides_from_hydra():
config = pytest.param(config_module, default_conf, overrides, id='-'.join(overrides.values()))
configs.append(config)
return configs |
def get_images(html, url):
if ('<!-- Login -->' in html):
raise PauseDownloadError("You didn't login!")
source_url = re.search('href="(/image/source/\\d+)', html)
if source_url:
source_url = urljoin(url, source_url.group(1))
source_html = grabhtml(source_url)
image = re.search('data-src="([^"]+)', source_html)
if image:
image = image.group(1)
else:
image = source_url
return [image]
img = re.search('href="(/image/source\\?id=\\d+)', html).group(1)
return [urljoin(url, img)] |
class ValveOfTestCase(unittest.TestCase):
def test_reorder_dupe(self):
flow = valve_of.output_port(1)
flows = [flow, flow, flow]
reordered = valve_of.valve_flowreorder(flows, use_barriers=False)
self.assertEqual(1, len(reordered))
def test_delete_order(self):
global_groupdel = valve_of.groupdel(group_id=valve_of.ofp.OFPG_ALL)
global_flowdel = valve_of.flowmod(cookie=None, hard_timeout=None, idle_timeout=None, match_fields=None, out_port=None, table_id=valve_of.ofp.OFPTT_ALL, inst=(), priority=0, command=valve_of.ofp.OFPFC_DELETE, out_group=valve_of.ofp.OFPG_ANY)
flowdel = valve_of.flowmod(cookie=None, hard_timeout=None, idle_timeout=None, match_fields=None, out_port=None, table_id=9, inst=(), priority=0, command=valve_of.ofp.OFPFC_DELETE, out_group=valve_of.ofp.OFPG_ANY)
flow = valve_of.output_port(1)
flows = [flowdel, flow, flow, flow, global_flowdel, global_groupdel]
reordered = valve_of.valve_flowreorder(flows, use_barriers=True)
reordered_str = [str(r) for r in reordered]
self.assertTrue(valve_of.is_global_groupdel(reordered[0]), msg=reordered)
self.assertTrue(valve_of.is_global_flowdel(reordered[1]), msg=reordered)
self.assertEqual(str(valve_of.barrier()), str(reordered[2]), msg=reordered)
self.assertTrue((str(flowdel) not in reordered_str), msg=reordered)
self.assertEqual(str(flow), reordered_str[(- 1)], msg=reordered) |
def prepare_rfft_input(arr):
res = Type(dtypes.complex_for(arr.dtype), (arr.shape[:(- 1)] + ((arr.shape[(- 1)] // 2),)))
return Transformation([Parameter('output', Annotation(res, 'o')), Parameter('input', Annotation(arr, 'i'))], '\n <%\n batch_idxs = " ".join((idx + ", ") for idx in idxs[:-1])\n %>\n ${input.ctype} re = ${input.load_idx}(${batch_idxs} ${idxs[-1]} * 2);\n ${input.ctype} im = ${input.load_idx}(${batch_idxs} ${idxs[-1]} * 2 + 1);\n ${output.store_same}(COMPLEX_CTR(${output.ctype})(re, im));\n ', connectors=['output']) |
def extractBreakingOffTheEngagementBringItOn(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Breaking off the engagement .. Bring it on!', 'Breaking off the engagement .. Bring it on!', 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
_member_required
def edit_plain_text_document(request, uuid, metadata_uuid=None):
uuid = UUID(uuid)
if (metadata_uuid is not None):
metadata_uuid = UUID(metadata_uuid)
try:
doc = Document.objects.all().get(uuid=uuid)
except Document.DoesNotExist:
raise Http404('Document with this uuid does not exist.')
has_metadata = None
if (metadata_uuid is not None):
try:
has_metadata = doc.binary_metadata.all().get(metadata__uuid=metadata_uuid)
except DocumentHasBinaryMetadata.DoesNotExist:
raise Http404(f'Document {doc} does not have associated metadata with uuid {metadata_uuid}.')
if (request.method == 'POST'):
form = TextStorageEdit(request.POST)
if form.is_valid():
uuid = form.cleaned_data['uuid']
filename = form.cleaned_data['filename']
blob = str.encode(form.cleaned_data['content'])
content_type = form.cleaned_data['content_type']
if has_metadata:
m = BinaryMetadata.new_file(blob, len(blob), uuid=None, content_type=content_type, filename=filename)
has_metadata.metadata.name = m.name
has_metadata.metadata.data = m.data
has_metadata.metadata.set_last_modified()
has_metadata.set_last_modified()
doc.set_last_modified()
m.delete()
else:
m = BinaryMetadata.new_file(blob, len(blob), uuid=uuid, content_type=content_type, filename=filename)
m.save()
(has, _) = DocumentHasBinaryMetadata.objects.get_or_create(name=STORAGE_NAME, document=doc, metadata=m)
has.save()
messages.add_message(request, messages.SUCCESS, f"""Added file "{(filename if filename else m.uuid)}" {(m.uuid if filename else '')} for {doc}""")
return redirect(doc)
elif has_metadata:
_t = has_metadata.metadata.try_get_content_type()
if (_t is None):
return HttpResponseBadRequest(request, f'{has_metadata.metadata} is not a plaintext file.')
filename = _t['filename']
_type = _t['content_type']
try:
content = has_metadata.metadata.data.decode(encoding='utf-8', errors='strict')
except:
return HttpResponseBadRequest(request, f'{has_metadata.metadata} is not a plaintext file.')
form = TextStorageEdit(initial={'uuid': has_metadata.metadata.uuid, 'content_type': _type, 'filename': filename, 'content': content})
else:
form = TextStorageEdit()
context = {'document': doc, 'form': form, 'add_document_form': AddDocument()}
template = loader.get_template('plain_text_edit.html')
return HttpResponse(template.render(context, request)) |
class IngressStatusTest1(AmbassadorTest):
status_update = {'loadBalancer': {'ingress': [{'ip': '42.42.42.42'}]}}
def init(self):
self.target = HTTP()
def manifests(self) -> str:
return ('\n---\napiVersion: networking.k8s.io/v1\nkind: Ingress\nmetadata:\n annotations:\n kubernetes.io/ingress.class: ambassador\n getambassador.io/ambassador-id: {self.ambassador_id}\n name: {self.path.k8s}\nspec:\n rules:\n - paths:\n - backend:\n service:\n name: {self.target.path.k8s}\n port:\n number: 80\n path: /{self.name}/\n pathType: Prefix\n' + super().manifests())
def queries(self):
if (True or (sys.platform != 'darwin')):
text = json.dumps(self.status_update)
update_cmd = [KUBESTATUS_PATH, 'Service', '-n', 'default', '-f', f'metadata.name={self.path.k8s}', '-u', '/dev/fd/0']
subprocess.run(update_cmd, input=text.encode('utf-8'), timeout=10)
time.sleep(1)
(yield Query(self.url((self.name + '/'))))
(yield Query(self.url(f'need-normalization/../{self.name}/')))
def check(self):
if (not parse_bool(os.environ.get('AMBASSADOR_PYTEST_INGRESS_TEST', 'false'))):
pytest.xfail('AMBASSADOR_PYTEST_INGRESS_TEST not set, xfailing...')
if (False and (sys.platform == 'darwin')):
pytest.xfail('not supported on Darwin')
for r in self.results:
if r.backend:
assert (r.backend.name == self.target.path.k8s), (r.backend.name, self.target.path.k8s)
assert r.backend.request
assert (r.backend.request.headers['x-envoy-original-path'][0] == f'/{self.name}/')
ingress_cmd = ['tools/bin/kubectl', 'get', '-n', 'default', '-o', 'json', 'ingress', self.path.k8s]
ingress_run = subprocess.Popen(ingress_cmd, stdout=subprocess.PIPE)
(ingress_out, _) = ingress_run.communicate()
ingress_json = json.loads(ingress_out)
assert (ingress_json['status'] == self.status_update), f"Expected Ingress status to be {self.status_update}, got {ingress_json['status']} instead" |
def scan_dates(pfile: _XTGeoFile, maxdates: int=MAXDATES, dataframe: bool=False) -> (list | pd.DataFrame):
dates = []
seqnum = (- 1)
for item in resfo.lazy_read(pfile.file):
kw = item.read_keyword().strip()
data = item.read_array()
if (kw == 'SEQNUM'):
seqnum = data[0]
continue
if ((kw == 'INTEHEAD') and (seqnum != (- 1))):
date = int(f'{data[66]}{data[65]:02d}{data[64]:02d}')
dates.append((seqnum, date))
seqnum = (- 1)
return (pd.DataFrame.from_records(dates, columns=['SEQNUM', 'DATE']) if dataframe else dates) |
class Worker():
def __init__(self, html_code: Optional[str]=None, src: Optional[Union[(str, primitives.PageModel)]]=None, server: bool=False):
(self.page, self.__server) = (src, server)
self._selector = (html_code or ('worker_%s' % id(self)))
self.page._props['js']['builders'].add(('var %s' % self._selector))
def message(self):
return JsObjects.JsObject.JsObject.get('event.data')
def connect(self, script: Optional[str]=None, content: Optional[str]=None, url: Optional[str]=None):
if ((not self.__server) or (content is not None)):
script_content = [('if(document.getElementById("js_%(id)s") != null){document.getElementById("js_%(id)s").remove()}' % {'id': self._selector}), 'var wkScript = document.createElement("script")', ('wkScript.setAttribute("id", "js_%s")' % self._selector)]
if (script is not None):
with open(script) as f:
script_content.append(('wkScript.textContent = "%s"' % f.read().strip().replace('\n', '')))
elif (url is not None):
script_content.append(('wkScript.setAttribute("src", %s)' % JsUtils.jsConvertData(url, None)))
else:
script_content.append(('wkScript.textContent = "%s"' % content.strip().replace('\n', '')))
script_content.append('document.head.appendChild(wkScript)')
self.page.properties.js.add_builders(('\n%(content)s; var blob_%(selector)s = new Blob([document.querySelector(\'#js_%(selector)s\').textContent ], {type: "text/javascript"})\n%(selector)s = new Worker(window.URL.createObjectURL(blob_%(selector)s))' % {'content': JsUtils.jsConvertFncs(script_content, toStr=True), 'selector': self._selector}))
else:
self.page.properties.js.add_builders(("%s = new Worker('%s')" % (self._selector, script)))
return JsObjects.JsVoid(("%s = new Worker('%s')" % (self._selector, script)))
def postMessage(self, data, components: List[primitives.HtmlModel]=None):
if (components is not None):
data = JsData.Datamap(components=components, attrs=data)
else:
data = JsUtils.jsConvertData(data, None)
return JsObjects.JsVoid(('%s.postMessage(%s)' % (self._selector, data)))
def on(self, event_type: str, js_funcs: Union[(list, str)], profile: Optional[Union[(dict, bool)]]=None):
self.page.js.onReady(self.addEventListener(event_type, js_funcs, profile))
def addEventListener(self, event_type: str, js_funcs: Union[(list, str)], profile: Optional[Union[(dict, bool)]]=None):
return JsObjects.JsVoid(("%(varName)s.addEventListener('%(eventType)s', function (event) {%(data)s})" % {'varName': self._selector, 'eventType': event_type, 'data': JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)}))
def receive(self, js_funcs: Union[(list, str)], profile: Optional[Union[(dict, bool)]]=None):
return JsObjects.JsVoid(('%(varName)s.onmessage = function (event) {%(data)s}' % {'varName': self._selector, 'data': JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)}))
def terminate(self):
return JsObjects.JsVoid(('%s.terminate(); %s = undefined' % (self._selector, self._selector)))
def close(self):
return self.terminate() |
class Application(AbstractCrudObject):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isApplication = True
super(Application, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
aam_rules = 'aam_rules'
an_ad_space_limit = 'an_ad_space_limit'
an_platforms = 'an_platforms'
android_key_hash = 'android_key_hash'
android_sdk_error_categories = 'android_sdk_error_categories'
app_domains = 'app_domains'
app_events_config = 'app_events_config'
app_events_feature_bitmask = 'app_events_feature_bitmask'
app_events_session_timeout = 'app_events_session_timeout'
app_install_tracked = 'app_install_tracked'
app_name = 'app_name'
app_signals_binding_ios = 'app_signals_binding_ios'
app_type = 'app_type'
auth_dialog_data_help_url = 'auth_dialog_data_help_url'
auth_dialog_headline = 'auth_dialog_headline'
auth_dialog_perms_explanation = 'auth_dialog_perms_explanation'
auth_referral_default_activity_privacy = 'auth_referral_default_activity_privacy'
auth_referral_enabled = 'auth_referral_enabled'
auth_referral_extended_perms = 'auth_referral_extended_perms'
auth_referral_friend_perms = 'auth_referral_friend_perms'
auth_referral_response_type = 'auth_referral_response_type'
auth_referral_user_perms = 'auth_referral_user_perms'
auto_event_mapping_android = 'auto_event_mapping_android'
auto_event_mapping_ios = 'auto_event_mapping_ios'
auto_event_setup_enabled = 'auto_event_setup_enabled'
auto_log_app_events_default = 'auto_log_app_events_default'
auto_log_app_events_enabled = 'auto_log_app_events_enabled'
business = 'business'
canvas_fluid_height = 'canvas_fluid_height'
canvas_fluid_width = 'canvas_fluid_width'
canvas_url = 'canvas_url'
category = 'category'
client_config = 'client_config'
company = 'company'
configured_ios_sso = 'configured_ios_sso'
contact_email = 'contact_email'
created_time = 'created_time'
creator_uid = 'creator_uid'
daily_active_users = 'daily_active_users'
daily_active_users_rank = 'daily_active_users_rank'
deauth_callback_url = 'deauth_callback_url'
default_share_mode = 'default_share_mode'
description = 'description'
financial_id = 'financial_id'
gdpv4_chrome_custom_tabs_enabled = 'gdpv4_chrome_custom_tabs_enabled'
gdpv4_enabled = 'gdpv4_enabled'
gdpv4_nux_content = 'gdpv4_nux_content'
gdpv4_nux_enabled = 'gdpv4_nux_enabled'
has_messenger_product = 'has_messenger_product'
hosting_url = 'hosting_url'
icon_url = 'icon_url'
id = 'id'
ios_bundle_id = 'ios_bundle_id'
ios_sdk_dialog_flows = 'ios_sdk_dialog_flows'
ios_sdk_error_categories = 'ios_sdk_error_categories'
ios_sfvc_attr = 'ios_sfvc_attr'
ios_supports_native_proxy_auth_flow = 'ios_supports_native_proxy_auth_flow'
ios_supports_system_auth = 'ios_supports_system_auth'
ipad_app_store_id = 'ipad_app_store_id'
iphone_app_store_id = 'iphone_app_store_id'
latest_sdk_version = 'latest_sdk_version'
link = 'link'
logging_token = 'logging_token'
logo_url = 'logo_url'
migrations = 'migrations'
mobile_profile_section_url = 'mobile_profile_section_url'
mobile_web_url = 'mobile_web_url'
monthly_active_users = 'monthly_active_users'
monthly_active_users_rank = 'monthly_active_users_rank'
name = 'name'
namespace = 'namespace'
object_store_urls = 'object_store_urls'
owner_business = 'owner_business'
page_tab_default_name = 'page_tab_default_name'
page_tab_url = 'page_tab_url'
photo_url = 'photo_url'
privacy_policy_url = 'privacy_policy_url'
profile_section_url = 'profile_section_url'
property_id = 'property_id'
protected_mode_rules = 'protected_mode_rules'
real_time_mode_devices = 'real_time_mode_devices'
restrictions = 'restrictions'
restrictive_data_filter_params = 'restrictive_data_filter_params'
restrictive_data_filter_rules = 'restrictive_data_filter_rules'
sdk_update_message = 'sdk_update_message'
seamless_login = 'seamless_login'
secure_canvas_url = 'secure_canvas_url'
secure_page_tab_url = 'secure_page_tab_url'
server_ip_whitelist = 'server_ip_whitelist'
smart_login_bookmark_icon_url = 'smart_login_bookmark_icon_url'
smart_login_menu_icon_url = 'smart_login_menu_icon_url'
social_discovery = 'social_discovery'
subcategory = 'subcategory'
suggested_events_setting = 'suggested_events_setting'
supported_platforms = 'supported_platforms'
supports_apprequests_fast_app_switch = 'supports_apprequests_fast_app_switch'
supports_attribution = 'supports_attribution'
supports_implicit_sdk_logging = 'supports_implicit_sdk_logging'
suppress_native_ios_gdp = 'suppress_native_ios_gdp'
terms_of_service_url = 'terms_of_service_url'
url_scheme_suffix = 'url_scheme_suffix'
user_support_email = 'user_support_email'
user_support_url = 'user_support_url'
website_url = 'website_url'
weekly_active_users = 'weekly_active_users'
class SupportedPlatforms():
amazon = 'AMAZON'
android = 'ANDROID'
canvas = 'CANVAS'
gameroom = 'GAMEROOM'
instant_game = 'INSTANT_GAME'
ipad = 'IPAD'
iphone = 'IPHONE'
mobile_web = 'MOBILE_WEB'
oculus = 'OCULUS'
samsung = 'SAMSUNG'
supplementary_images = 'SUPPLEMENTARY_IMAGES'
web = 'WEB'
windows = 'WINDOWS'
xiaomi = 'XIAOMI'
class AnPlatforms():
android = 'ANDROID'
desktop = 'DESKTOP'
galaxy = 'GALAXY'
instant_articles = 'INSTANT_ARTICLES'
ios = 'IOS'
mobile_web = 'MOBILE_WEB'
oculus = 'OCULUS'
unknown = 'UNKNOWN'
xiaomi = 'XIAOMI'
class Platform():
android = 'ANDROID'
ios = 'IOS'
class RequestType():
app_indexing = 'APP_INDEXING'
button_sampling = 'BUTTON_SAMPLING'
plugin = 'PLUGIN'
class MutationMethod():
add = 'ADD'
delete = 'DELETE'
replace = 'REPLACE'
class PostMethod():
codeless = 'CODELESS'
eymt = 'EYMT'
class LoggingSource():
detection = 'DETECTION'
messenger_bot = 'MESSENGER_BOT'
class LoggingTarget():
app = 'APP'
app_and_page = 'APP_AND_PAGE'
page = 'PAGE'
def get_endpoint(cls):
return 'adnetwork_applications'
def api_create(self, parent_id, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.adobjects.business import Business
return Business(api=self._api, fbid=parent_id).create_ad_network_application(fields, params, batch, success, failure, pending)
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'advertiser_id': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Application, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_update(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'allow_cycle_app_secret': 'bool', 'an_platforms': 'list<an_platforms_enum>', 'app_domains': 'list<string>', 'app_name': 'string', 'app_type': 'bool', 'auth_dialog_headline': 'string', 'auth_dialog_perms_explanation': 'string', 'auth_referral_default_activity_privacy': 'string', 'auth_referral_enabled': 'bool', 'auth_referral_extended_perms': 'list<string>', 'auth_referral_friend_perms': 'list<string>', 'auth_referral_response_type': 'string', 'auth_referral_user_perms': 'list<string>', 'canvas_fluid_height': 'bool', 'canvas_fluid_width': 'bool', 'canvas_url': 'string', 'contact_email': 'string', 'deauth_callback_url': 'string', 'mobile_web_url': 'string', 'namespace': 'string', 'page_tab_default_name': 'string', 'privacy_policy_url': 'string', 'restrictions': 'string', 'secure_canvas_url': 'string', 'secure_page_tab_url': 'string', 'server_ip_whitelist': 'list<string>', 'terms_of_service_url': 'string', 'url_scheme_suffix': 'string', 'user_support_email': 'string', 'user_support_url': 'string', 'website_url': 'string'}
enums = {'an_platforms_enum': Application.AnPlatforms.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Application, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def delete_accounts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'type': 'type_enum', 'uid': 'int'}
enums = {'type_enum': ['test-users']}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/accounts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_accounts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'type': 'type_enum'}
enums = {'type_enum': ['test-users']}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/accounts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_account(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'installed': 'bool', 'minor': 'bool', 'name': 'string', 'owner_access_token': 'string', 'permissions': 'list<Permission>', 'type': 'type_enum', 'uid': 'int'}
enums = {'type_enum': ['test-users']}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/accounts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_activity(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'advertiser_id': 'string', 'advertiser_tracking_enabled': 'bool', 'anon_id': 'string', 'app_user_id': 'string', 'application_tracking_enabled': 'bool', 'attribution': 'string', 'auto_publish': 'bool', 'bundle_id': 'string', 'bundle_short_version': 'string', 'bundle_version': 'string', 'campaign_ids': 'string', 'click_id': 'string', 'consider_views': 'bool', 'custom_events': 'list<Object>', 'custom_events_file': 'file', 'data_processing_options': 'list<string>', 'data_processing_options_country': 'unsigned int', 'data_processing_options_state': 'unsigned int', 'device_token': 'string', 'event': 'event_enum', 'extinfo': 'Object', 'include_dwell_data': 'bool', 'include_video_data': 'bool', 'install_referrer': 'string', 'install_timestamp': 'unsigned int', 'installer_package': 'string', 'limited_data_use': 'bool', 'migration_bundle': 'string', 'page_id': 'unsigned int', 'page_scoped_user_id': 'unsigned int', 'receipt_data': 'string', 'ud': 'map', 'url_schemes': 'list<string>', 'user_id': 'string', 'user_id_type': 'user_id_type_enum', 'vendor_id': 'string', 'windows_attribution_id': 'string'}
enums = {'event_enum': ['CUSTOM_APP_EVENTS', 'DEFERRED_APP_LINK', 'MOBILE_APP_INSTALL'], 'user_id_type_enum': ['INSTANT_GAMES_PLAYER_ID']}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/activities', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_ad_placement_groups(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/ad_placement_groups', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_ad_network_placements(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adplacement import AdPlacement
param_types = {'request_id': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/adnetwork_placements', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdPlacement, api_type='EDGE', response_parser=ObjectParser(target_class=AdPlacement, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_ad_network_analytics(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adnetworkanalyticssyncqueryresult import AdNetworkAnalyticsSyncQueryResult
param_types = {'aggregation_period': 'aggregation_period_enum', 'breakdowns': 'list<breakdowns_enum>', 'filters': 'list<map>', 'limit': 'unsigned int', 'metrics': 'list<metrics_enum>', 'ordering_column': 'ordering_column_enum', 'ordering_type': 'ordering_type_enum', 'since': 'datetime', 'until': 'datetime'}
enums = {'aggregation_period_enum': AdNetworkAnalyticsSyncQueryResult.AggregationPeriod.__dict__.values(), 'breakdowns_enum': AdNetworkAnalyticsSyncQueryResult.Breakdowns.__dict__.values(), 'metrics_enum': AdNetworkAnalyticsSyncQueryResult.Metrics.__dict__.values(), 'ordering_column_enum': AdNetworkAnalyticsSyncQueryResult.OrderingColumn.__dict__.values(), 'ordering_type_enum': AdNetworkAnalyticsSyncQueryResult.OrderingType.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/adnetworkanalytics', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdNetworkAnalyticsSyncQueryResult, api_type='EDGE', response_parser=ObjectParser(target_class=AdNetworkAnalyticsSyncQueryResult, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_ad_network_analytic(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adnetworkanalyticssyncqueryresult import AdNetworkAnalyticsSyncQueryResult
param_types = {'aggregation_period': 'aggregation_period_enum', 'breakdowns': 'list<breakdowns_enum>', 'filters': 'list<Object>', 'limit': 'int', 'metrics': 'list<metrics_enum>', 'ordering_column': 'ordering_column_enum', 'ordering_type': 'ordering_type_enum', 'since': 'datetime', 'until': 'datetime'}
enums = {'aggregation_period_enum': AdNetworkAnalyticsSyncQueryResult.AggregationPeriod.__dict__.values(), 'breakdowns_enum': AdNetworkAnalyticsSyncQueryResult.Breakdowns.__dict__.values(), 'metrics_enum': AdNetworkAnalyticsSyncQueryResult.Metrics.__dict__.values(), 'ordering_column_enum': AdNetworkAnalyticsSyncQueryResult.OrderingColumn.__dict__.values(), 'ordering_type_enum': AdNetworkAnalyticsSyncQueryResult.OrderingType.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/adnetworkanalytics', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Application, api_type='EDGE', response_parser=ObjectParser(target_class=Application, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_ad_network_analytics_results(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adnetworkanalyticsasyncqueryresult import AdNetworkAnalyticsAsyncQueryResult
param_types = {'query_ids': 'list<string>'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/adnetworkanalytics_results', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdNetworkAnalyticsAsyncQueryResult, api_type='EDGE', response_parser=ObjectParser(target_class=AdNetworkAnalyticsAsyncQueryResult, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_aem_attribution(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'advertiser_ids': 'list<string>', 'fb_content_data': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/aem_attribution', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_aem_conversion_configs(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'advertiser_ids': 'list<string>'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/aem_conversion_configs', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_aem_conversion_filter(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'catalog_id': 'string', 'fb_content_ids': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/aem_conversion_filter', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_aem_conversion(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'aem_conversions': 'list<map>'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/aem_conversions', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_aem_skan_readiness(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'app_id': 'int', 'is_aem_ready': 'bool', 'is_app_aem_install_ready': 'bool', 'is_app_aem_ready': 'bool', 'is_skan_ready': 'bool', 'message': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/aem_skan_readiness', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_agencies(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.business import Business
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/agencies', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Business, api_type='EDGE', response_parser=ObjectParser(target_class=Business, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_aggregate_revenue(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'ecpms': 'list<string>', 'query_ids': 'list<string>', 'request_id': 'string', 'sync_api': 'bool'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/aggregate_revenue', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_android_dialog_configs(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/android_dialog_configs', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_app_capi_settings(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/app_capi_settings', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_app_event_types(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/app_event_types', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_app_indexing(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'app_version': 'string', 'device_session_id': 'string', 'extra_info': 'string', 'platform': 'platform_enum', 'request_type': 'request_type_enum', 'tree': 'map'}
enums = {'platform_enum': Application.Platform.__dict__.values(), 'request_type_enum': Application.RequestType.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/app_indexing', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Application, api_type='EDGE', response_parser=ObjectParser(target_class=Application, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_app_indexing_session(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'device_session_id': 'string', 'extinfo': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/app_indexing_session', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Application, api_type='EDGE', response_parser=ObjectParser(target_class=Application, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_app_installed_groups(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.group import Group
param_types = {'group_id': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/app_installed_groups', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Group, api_type='EDGE', response_parser=ObjectParser(target_class=Group, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_app_push_device_token(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'device_id': 'string', 'device_token': 'string', 'platform': 'platform_enum'}
enums = {'platform_enum': Application.Platform.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/app_push_device_token', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Application, api_type='EDGE', response_parser=ObjectParser(target_class=Application, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_app_assets(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/appassets', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_asset(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'asset': 'file', 'comment': 'string', 'type': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/assets', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Application, api_type='EDGE', response_parser=ObjectParser(target_class=Application, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_authorized_ad_accounts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adaccount import AdAccount
param_types = {'business': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/authorized_adaccounts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdAccount, api_type='EDGE', response_parser=ObjectParser(target_class=AdAccount, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_button_auto_detection_device_selection(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'device_id': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/button_auto_detection_device_selection', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_cloudbridge_settings(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/cloudbridge_settings', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_codeless_event_mapping(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'mappings': 'list<map>', 'mutation_method': 'mutation_method_enum', 'platform': 'platform_enum', 'post_method': 'post_method_enum'}
enums = {'mutation_method_enum': Application.MutationMethod.__dict__.values(), 'platform_enum': Application.Platform.__dict__.values(), 'post_method_enum': Application.PostMethod.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/codeless_event_mappings', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Application, api_type='EDGE', response_parser=ObjectParser(target_class=Application, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_da_checks(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.dacheck import DACheck
param_types = {'checks': 'list<string>', 'connection_method': 'connection_method_enum'}
enums = {'connection_method_enum': DACheck.ConnectionMethod.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/da_checks', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=DACheck, api_type='EDGE', response_parser=ObjectParser(target_class=DACheck, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_events(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.event import Event
param_types = {'include_canceled': 'bool', 'type': 'type_enum'}
enums = {'type_enum': Event.Type.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/events', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Event, api_type='EDGE', response_parser=ObjectParser(target_class=Event, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_iap_purchases(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'order_id': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/iap_purchases', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_insights_push_schedule(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/insights_push_schedule', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_ios_dialog_configs(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/ios_dialog_configs', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_linked_dataset(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/linked_dataset', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_mmp_auditing(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'advertiser_id': 'string', 'attribution': 'string', 'attribution_model': 'string', 'auditing_token': 'string', 'click_attr_window': 'unsigned int', 'custom_events': 'list<Object>', 'decline_reason': 'string', 'event': 'string', 'event_reported_time': 'unsigned int', 'fb_ad_id': 'unsigned int', 'fb_click_time': 'unsigned int', 'fb_view_time': 'unsigned int', 'is_fb': 'bool', 'used_install_referrer': 'bool', 'view_attr_window': 'unsigned int'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/mmp_auditing', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_mobile_sdk_gk(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'device_id': 'string', 'extinfo': 'Object', 'os_version': 'string', 'platform': 'platform_enum', 'sdk_version': 'string'}
enums = {'platform_enum': ['ANDROID', 'IOS']}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/mobile_sdk_gk', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_monetized_digital_store_objects(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/monetized_digital_store_objects', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_monetized_digital_store_object(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'content_id': 'string', 'store': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/monetized_digital_store_objects', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_object_types(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.nullnode import NullNode
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/object_types', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=NullNode, api_type='EDGE', response_parser=ObjectParser(target_class=NullNode, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_objects(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.nullnode import NullNode
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/objects', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=NullNode, api_type='EDGE', response_parser=ObjectParser(target_class=NullNode, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_occludes_popup(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'flash': 'bool', 'unity': 'bool'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/occludespopups', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_page_activity(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'advertiser_tracking_enabled': 'bool', 'application_tracking_enabled': 'bool', 'custom_events': 'list<Object>', 'logging_source': 'logging_source_enum', 'logging_target': 'logging_target_enum', 'page_id': 'unsigned int', 'page_scoped_user_id': 'unsigned int'}
enums = {'logging_source_enum': Application.LoggingSource.__dict__.values(), 'logging_target_enum': Application.LoggingTarget.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/page_activities', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Application, api_type='EDGE', response_parser=ObjectParser(target_class=Application, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_payment_currency(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'currency_url': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/payment_currencies', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Application, api_type='EDGE', response_parser=ObjectParser(target_class=Application, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_permissions(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'android_key_hash': 'string', 'ios_bundle_id': 'string', 'permission': 'list<Permission>', 'proxied_app_id': 'int', 'status': 'list<status_enum>'}
enums = {'status_enum': ['live', 'unapproved']}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/permissions', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_products(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'product_ids': 'list<string>'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/products', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_purchases(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/purchases', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_roles(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/roles', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_server_domain_infos(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/server_domain_infos', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_subscribed_domains(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/subscribed_domains', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_subscribed_domain(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'subscribe': 'list<string>', 'unsubscribe': 'list<string>'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/subscribed_domains', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Application, api_type='EDGE', response_parser=ObjectParser(target_class=Application, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_subscribed_domains_phishing(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/subscribed_domains_phishing', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_subscribed_domains_phishing(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'subscribe': 'list<string>', 'unsubscribe': 'list<string>'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/subscribed_domains_phishing', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Application, api_type='EDGE', response_parser=ObjectParser(target_class=Application, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def delete_subscriptions(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'fields': 'list<string>', 'object': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/subscriptions', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_subscription(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'callback_url': 'string', 'fields': 'list<string>', 'include_values': 'bool', 'object': 'string', 'verify_token': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/subscriptions', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_upload(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'file_length': 'unsigned int', 'file_name': 'Object', 'file_type': 'Object', 'session_type': 'session_type_enum'}
enums = {'session_type_enum': ['attachment']}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/uploads', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {'aam_rules': 'string', 'an_ad_space_limit': 'unsigned int', 'an_platforms': 'list<string>', 'android_key_hash': 'list<string>', 'android_sdk_error_categories': 'list<Object>', 'app_domains': 'list<string>', 'app_events_config': 'Object', 'app_events_feature_bitmask': 'unsigned int', 'app_events_session_timeout': 'unsigned int', 'app_install_tracked': 'bool', 'app_name': 'string', 'app_signals_binding_ios': 'list<Object>', 'app_type': 'unsigned int', 'auth_dialog_data_help_url': 'string', 'auth_dialog_headline': 'string', 'auth_dialog_perms_explanation': 'string', 'auth_referral_default_activity_privacy': 'string', 'auth_referral_enabled': 'unsigned int', 'auth_referral_extended_perms': 'list<string>', 'auth_referral_friend_perms': 'list<string>', 'auth_referral_response_type': 'string', 'auth_referral_user_perms': 'list<string>', 'auto_event_mapping_android': 'list<Object>', 'auto_event_mapping_ios': 'list<Object>', 'auto_event_setup_enabled': 'bool', 'auto_log_app_events_default': 'bool', 'auto_log_app_events_enabled': 'bool', 'business': 'Business', 'canvas_fluid_height': 'bool', 'canvas_fluid_width': 'unsigned int', 'canvas_url': 'string', 'category': 'string', 'client_config': 'map', 'company': 'string', 'configured_ios_sso': 'bool', 'contact_email': 'string', 'created_time': 'datetime', 'creator_uid': 'string', 'daily_active_users': 'string', 'daily_active_users_rank': 'unsigned int', 'deauth_callback_url': 'string', 'default_share_mode': 'string', 'description': 'string', 'financial_id': 'string', 'gdpv4_chrome_custom_tabs_enabled': 'bool', 'gdpv4_enabled': 'bool', 'gdpv4_nux_content': 'string', 'gdpv4_nux_enabled': 'bool', 'has_messenger_product': 'bool', 'hosting_url': 'string', 'icon_url': 'string', 'id': 'string', 'ios_bundle_id': 'list<string>', 'ios_sdk_dialog_flows': 'Object', 'ios_sdk_error_categories': 'list<Object>', 'ios_sfvc_attr': 'bool', 'ios_supports_native_proxy_auth_flow': 'bool', 'ios_supports_system_auth': 'bool', 'ipad_app_store_id': 'string', 'iphone_app_store_id': 'string', 'latest_sdk_version': 'Object', 'link': 'string', 'logging_token': 'string', 'logo_url': 'string', 'migrations': 'map<string, bool>', 'mobile_profile_section_url': 'string', 'mobile_web_url': 'string', 'monthly_active_users': 'string', 'monthly_active_users_rank': 'unsigned int', 'name': 'string', 'namespace': 'string', 'object_store_urls': 'Object', 'owner_business': 'Business', 'page_tab_default_name': 'string', 'page_tab_url': 'string', 'photo_url': 'string', 'privacy_policy_url': 'string', 'profile_section_url': 'string', 'property_id': 'string', 'protected_mode_rules': 'Object', 'real_time_mode_devices': 'list<string>', 'restrictions': 'Object', 'restrictive_data_filter_params': 'string', 'restrictive_data_filter_rules': 'string', 'sdk_update_message': 'string', 'seamless_login': 'int', 'secure_canvas_url': 'string', 'secure_page_tab_url': 'string', 'server_ip_whitelist': 'string', 'smart_login_bookmark_icon_url': 'string', 'smart_login_menu_icon_url': 'string', 'social_discovery': 'unsigned int', 'subcategory': 'string', 'suggested_events_setting': 'string', 'supported_platforms': 'list<SupportedPlatforms>', 'supports_apprequests_fast_app_switch': 'Object', 'supports_attribution': 'bool', 'supports_implicit_sdk_logging': 'bool', 'suppress_native_ios_gdp': 'bool', 'terms_of_service_url': 'string', 'url_scheme_suffix': 'string', 'user_support_email': 'string', 'user_support_url': 'string', 'website_url': 'string', 'weekly_active_users': 'string'}
def _get_field_enum_info(cls):
field_enum_info = {}
field_enum_info['SupportedPlatforms'] = Application.SupportedPlatforms.__dict__.values()
field_enum_info['AnPlatforms'] = Application.AnPlatforms.__dict__.values()
field_enum_info['Platform'] = Application.Platform.__dict__.values()
field_enum_info['RequestType'] = Application.RequestType.__dict__.values()
field_enum_info['MutationMethod'] = Application.MutationMethod.__dict__.values()
field_enum_info['PostMethod'] = Application.PostMethod.__dict__.values()
field_enum_info['LoggingSource'] = Application.LoggingSource.__dict__.values()
field_enum_info['LoggingTarget'] = Application.LoggingTarget.__dict__.values()
return field_enum_info |
def falcon_generate_output(model, tokenizer, params, device, context_len=2048):
'Fork from:
tokenizer.bos_token_id = 1
print(params)
stop = params.get('stop', '###')
prompt = params['prompt']
query = prompt
print('Query Message: ', query)
input_ids = tokenizer(query, return_tensors='pt').input_ids
input_ids = input_ids.to(model.device)
streamer = TextIteratorStreamer(tokenizer, timeout=10.0, skip_prompt=True, skip_special_tokens=True)
tokenizer.bos_token_id = 1
stop_token_ids = [0]
class StopOnTokens(StoppingCriteria):
def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor, **kwargs) -> bool:
for stop_id in stop_token_ids:
if (input_ids[0][(- 1)] == stop_id):
return True
return False
stop = StopOnTokens()
generate_kwargs = dict(input_ids=input_ids, max_new_tokens=512, temperature=1.0, do_sample=True, top_k=1, streamer=streamer, repetition_penalty=1.7, stopping_criteria=StoppingCriteriaList([stop]))
t = Thread(target=model.generate, kwargs=generate_kwargs)
t.start()
out = ''
for new_text in streamer:
out += new_text
(yield out) |
def test_data_quality_test_value_quantile() -> None:
test_dataset = pd.DataFrame({'feature1': [0, 1, 2, 3], 'target': [0, 0, 0, 1], 'prediction': [0, 0, 1, 1]})
suite = TestSuite(tests=[TestColumnQuantile(column_name='feature1', quantile=0.7, lt=1)])
suite.run(current_data=test_dataset, reference_data=None, column_mapping=ColumnMapping())
assert (not suite)
suite = TestSuite(tests=[TestColumnQuantile(column_name='feature1', quantile=0.2, lt=0.7)])
suite.run(current_data=test_dataset, reference_data=None, column_mapping=ColumnMapping())
suite._inner_suite.raise_for_error()
assert suite
assert suite.show()
assert suite.json() |
def recv_closure(sock, hdrlen, fmts, verbose=False):
def recv_msg(nbytes=None, fmt=None, expect=''):
if (nbytes is None):
nbytes = hdrlen
msg = sock.recv(nbytes)
if fmt:
msg = struct.unpack(fmts[fmt], msg)
else:
msg = msg.decode('ascii').strip()
if verbose:
if expect:
expect = f", expected '{expect}'"
print(f'RECEIVED: {msg}{expect}')
return msg
return recv_msg |
class OutputTypeBaseInformationError(ErsiliaError):
def __init__(self):
self.message = 'Wrong Ersilia output type'
self.hints = 'Only output types allowed: {}. More than one output type can be added in list format'.format(', '.join(_read_default_fields('Output Type')))
ErsiliaError.__init__(self, self.message, self.hints) |
def test_setting_fullnameOverride():
config = "\ndeployment:\n enabled: true\nfullnameOverride: 'filebeat-custom'\n"
r = helm_template(config)
custom_name = 'filebeat-custom'
assert (custom_name in r['daemonset'])
assert (r['daemonset'][custom_name]['spec']['template']['spec']['containers'][0]['name'] == project)
assert (r['daemonset'][custom_name]['spec']['template']['spec']['serviceAccountName'] == name)
volumes = r['daemonset'][custom_name]['spec']['template']['spec']['volumes']
assert ({'name': 'data', 'hostPath': {'path': (('/var/lib/' + custom_name) + '-default-data'), 'type': 'DirectoryOrCreate'}} in volumes)
assert (custom_name in r['deployment'])
assert (r['deployment'][custom_name]['spec']['template']['spec']['containers'][0]['name'] == project)
assert (r['deployment'][custom_name]['spec']['template']['spec']['serviceAccountName'] == name) |
class bsn_debug_counter_stats_reply(bsn_stats_reply):
version = 6
type = 19
stats_type = 65535
experimenter = 6035143
subtype = 12
def __init__(self, xid=None, flags=None, entries=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (entries != None):
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.subtype))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bsn_debug_counter_stats_reply()
_version = reader.read('!B')[0]
assert (_version == 6)
_type = reader.read('!B')[0]
assert (_type == 19)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 65535)
obj.flags = reader.read('!H')[0]
reader.skip(4)
_experimenter = reader.read('!L')[0]
assert (_experimenter == 6035143)
_subtype = reader.read('!L')[0]
assert (_subtype == 12)
obj.entries = loxi.generic_util.unpack_list(reader, ofp.common.bsn_debug_counter_stats_entry.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.entries != other.entries):
return False
return True
def pretty_print(self, q):
q.text('bsn_debug_counter_stats_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REPLY_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('entries = ')
q.pp(self.entries)
q.breakable()
q.text('}') |
def compare_envs(state, eids, socket, env_path=DEFAULT_ENV_PATH):
logging.info('comparing envs')
eidNums = {e: str(i) for (i, e) in enumerate(eids)}
env = {}
envs = {}
for eid in eids:
if (eid in state):
envs[eid] = state.get(eid)
elif (env_path is not None):
p = os.path.join(env_path, eid.strip(), '.json')
if os.path.exists(p):
with open(p, 'r') as fn:
env = tornado.escape.json_decode(fn.read())
state[eid] = env
envs[eid] = env
res = copy.deepcopy(envs[list(envs.keys())[0]])
name2Wid = {res['jsons'][wid].get('title', None): (wid + '_compare') for wid in res.get('jsons', {}) if ('title' in res['jsons'][wid])}
for wid in list(res['jsons'].keys()):
res['jsons'][(wid + '_compare')] = res['jsons'][wid]
res['jsons'][wid] = None
res['jsons'].pop(wid)
for (ix, eid) in enumerate(sorted(envs.keys())):
env = envs[eid]
for wid in env.get('jsons', {}).keys():
win = env['jsons'][wid]
if (win.get('type', None) != 'plot'):
continue
if ('content' not in win):
continue
if ('title' not in win):
continue
title = win['title']
if ((title not in name2Wid) or (title == '')):
continue
destWid = name2Wid[title]
destWidJson = res['jsons'][destWid]
if (ix == 0):
if ('name' not in destWidJson['content']['data'][0]):
continue
destWidJson['has_compare'] = False
destWidJson['content']['layout']['showlegend'] = True
destWidJson['contentID'] = get_rand_id()
for (dataIdx, data) in enumerate(destWidJson['content']['data']):
if ('name' not in data):
break
destWidJson['content']['data'][dataIdx]['name'] = '{}_{}'.format(eidNums[eid], data['name'])
else:
if ('name' not in destWidJson['content']['data'][0]):
continue
destWidJson['has_compare'] = True
for (_dataIdx, data) in enumerate(win['content']['data']):
data = copy.deepcopy(data)
if ('name' not in data):
destWidJson['has_compare'] = False
break
data['name'] = '{}_{}'.format(eidNums[eid], data['name'])
destWidJson['content']['data'].append(data)
for destWid in list(res['jsons'].keys()):
if (('has_compare' not in res['jsons'][destWid]) or (not res['jsons'][destWid]['has_compare'])):
del res['jsons'][destWid]
tableRows = ['<tr> <td> {} </td> <td> {} </td> </tr>'.format(v, eidNums[v]) for v in eidNums]
tbl = '"<style>\n table, th, td {{\n border: 1px solid black;\n }}\n </style>\n <table> {} </table>'.format(' '.join(tableRows))
res['jsons']['window_compare_legend'] = {'command': 'window', 'version': 1, 'id': 'window_compare_legend', 'title': 'compare_legend', 'inflate': True, 'width': None, 'height': None, 'contentID': 'compare_legend', 'content': tbl, 'type': 'text', 'layout': {'title': 'compare_legend'}, 'i': 1, 'has_compare': True}
if ('reload' in res):
socket.write_message(json.dumps({'command': 'reload', 'data': res['reload']}))
jsons = list(res.get('jsons', {}).values())
windows = sorted(jsons, key=(lambda k: (('i' not in k), k.get('i', None))))
for v in windows:
socket.write_message(v)
socket.write_message(json.dumps({'command': 'layout'}))
socket.eid = eids |
def test_cbor_json(config_env: Dict):
if (CONFIG_ERROR in config_env.keys()):
fail(f'Config Error: {config_env[CONFIG_ERROR]}')
if (EXPECTED_VALID_JSON not in config_env[EXPECTED_RESULTS].keys()):
skip(f'Test not requested: {EXPECTED_VALID_JSON}')
if (not ({CBOR, JSON} <= config_env.keys())):
skip(f'Test dataset does not contain {CBOR} and/or {JSON}')
cbor_bytes = unhexlify(config_env[CBOR])
cbor_object = loads(cbor_bytes, object_hook=_object_hook)
if config_env[EXPECTED_RESULTS][EXPECTED_DECODE]:
if (PAYLOAD_HCERT in cbor_object.keys()):
cbor_object = cbor_object[PAYLOAD_HCERT][1]
assert (_ordered(cbor_object) == _ordered(config_env[JSON]))
else:
assert (_ordered(cbor_object) != _ordered(config_env[JSON])) |
def _create_version():
import os
from subprocess import STDOUT, CalledProcessError, check_output
version = 'Unknown'
root = os.path.dirname(os.path.abspath(__file__))
try:
with open(os.path.join(root, 'VERSION.txt'), 'r') as f:
version = f.read().strip()
except IOError as err:
print(err)
try:
rev = check_output('git rev-parse --short HEAD', shell=True, cwd=root, stderr=STDOUT).strip().decode()
version = ('%s (rev %s)' % (version, rev))
except (IOError, CalledProcessError, OSError):
pass
return version |
class StatusDialog(QDialog):
close = Signal()
run = Signal()
def __init__(self, title: str, widget: QWidget, parent: QObject=None):
QDialog.__init__(self, parent)
self.setWindowTitle(title)
self.setModal(True)
self.setWindowFlags((self.windowFlags() | Qt.CustomizeWindowHint))
self.setWindowFlags((self.windowFlags() & (~ Qt.WindowContextHelpButtonHint)))
self.setWindowFlags((self.windowFlags() & (~ Qt.WindowCloseButtonHint)))
layout = QVBoxLayout()
layout.addWidget(widget)
run_button = QPushButton('Run')
run_button.setAutoDefault(True)
run_button.setObjectName('RUN')
run_button.clicked.connect(self.run)
self._close_button = QPushButton('Close')
self._close_button.setAutoDefault(False)
self._close_button.setObjectName('CLOSE')
self._close_button.clicked.connect(self.accept)
self._status_bar = QStatusBar()
button_layout = QHBoxLayout()
button_layout.addStretch()
button_layout.addWidget(run_button)
button_layout.addWidget(self._close_button)
layout.addWidget(self._status_bar)
layout.addLayout(button_layout)
self.setLayout(layout)
def keyPressEvent(self, q_key_event):
if ((not self._close_button.isEnabled()) and (q_key_event.key() == Qt.Key_Escape)):
pass
else:
QDialog.keyPressEvent(self, q_key_event)
def enable_button(self, caption, enabled: bool=True):
button = cast(QPushButton, self.findChild(QPushButton, str(caption).capitalize()))
if (button is not None):
button.setEnabled(enabled)
def enable_buttons(self, enabled: bool=True):
buttons = cast(List[QPushButton], self.findChildren(QPushButton))
for button in buttons:
button.setEnabled(enabled)
(RunModelEvent)
def progress_update(self, event: RunModelEvent):
if isinstance(event, RunModelStatusEvent):
self._status_bar.showMessage(f'{event.msg}')
elif isinstance(event, RunModelTimeEvent):
self._status_bar.showMessage(f'Estimated remaining time {event.remaining_time:.2f}s')
()
def clear_status(self):
self._status_bar.clearMessage() |
class InputsBuilder(DataBuilder, ABC):
def src_inputs(self, srcs: List[str]) -> torch.LongTensor:
(src_ids, src_key_padding_mask) = self.encode([((sent + ' ') + self.tokenizer.eos_token) for sent in srcs])
src_inputs = torch.stack([src_ids, src_key_padding_mask], dim=1)
return src_inputs.long()
def tgt_inputs(self, tgts: List[str]) -> torch.LongTensor:
raise NotImplementedError |
class ActionInputDirective(ActionDirective):
def name(cls):
return 'input'
def get_full_grammar(cls):
return (((((((super(ActionInputDirective, cls).get_full_grammar() + Optional((Literal('job') + oneOf(('control', 'context'))('job_slot')))) + Literal('input')) + Optional(DataTypeGrammar)) + ':') + VarNameGrammar) + ':') + VarValueGrammar)
def ingest_from_parsed_test_fixture(self, action_case, test_case, parse_results, file_name, line_number):
parsed_data_type_value = get_parsed_data_type_value(parse_results, parse_results.value)
path = 'inputs'
if (parse_results.job_slot == 'control'):
path = ('job_control_' + path)
if (parse_results.job_slot == 'context'):
path = ('job_context_' + path)
path_put(action_case, '{}.{}'.format(path, parse_results.variable_name), parsed_data_type_value)
def assert_test_case_action_results(*args, **kwargs):
pass |
class Test(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_line_topology(self):
def test_line_connectivity(n):
G = fnss.line_topology(n)
self.assertEqual(n, G.number_of_nodes())
self.assertEqual((n - 1), G.number_of_edges())
for i in range(n):
if (i <= (n - 2)):
self.assertTrue(G.has_edge(i, (i + 1)))
if (i >= 1):
self.assertTrue(G.has_edge(i, (i - 1)))
self.assertRaises(ValueError, fnss.line_topology, 0)
self.assertRaises(ValueError, fnss.line_topology, (- 1))
test_line_connectivity(8)
test_line_connectivity(11)
def test_k_ary_tree_topology(self):
def test_K_ary_tree_connectivity(k, h):
expected_degree = {'root': k, 'intermediate': (k + 1), 'leaf': 1}
G = fnss.k_ary_tree_topology(k, h)
self.assertEqual(sum(((k ** d) for d in range((h + 1)))), G.number_of_nodes())
self.assertEqual(sum(((k ** d) for d in range(1, (h + 1)))), G.number_of_edges())
degree = G.degree()
for v in G.nodes():
v_type = G.node[v]['type']
v_depth = G.node[v]['depth']
self.assertEqual(expected_degree[v_type], degree[v])
neighbors = G.neighbors(v)
for u in neighbors:
u_depth = G.node[u]['depth']
if (u < v):
self.assertEqual(u_depth, (v_depth - 1))
elif (u > v):
self.assertEqual(u_depth, (v_depth + 1))
else:
self.fail(('Node %s has a self-loop' % str(v)))
self.assertRaises(ValueError, fnss.k_ary_tree_topology, 0, 3)
self.assertRaises(ValueError, fnss.k_ary_tree_topology, 3, 0)
self.assertRaises(ValueError, fnss.k_ary_tree_topology, (- 1), 3)
self.assertRaises(ValueError, fnss.k_ary_tree_topology, 3, (- 1))
test_K_ary_tree_connectivity(3, 5)
test_K_ary_tree_connectivity(5, 3)
test_K_ary_tree_connectivity(2, 1)
def test_ring_topology(self):
def test_ring_connectivity(n):
G = fnss.ring_topology(n)
self.assertEqual(n, G.number_of_nodes())
self.assertEqual(n, G.number_of_edges())
for i in range(n):
self.assertTrue(G.has_edge(i, ((i + 1) % n)))
self.assertTrue(G.has_edge(i, ((i - 1) % n)))
self.assertRaises(ValueError, fnss.ring_topology, 0)
self.assertRaises(ValueError, fnss.ring_topology, (- 1))
self.assertRaises(TypeError, fnss.ring_topology, 'String')
test_ring_connectivity(10)
test_ring_connectivity(21)
def test_star_topology(self):
def test_star_connectivity(n):
G = fnss.star_topology(n)
self.assertEqual((n + 1), G.number_of_nodes())
self.assertEqual(n, G.number_of_edges())
self.assertEqual('root', G.node[0]['type'])
for i in range(1, (n + 1)):
self.assertEqual('leaf', G.node[i]['type'])
self.assertTrue(G.has_edge(i, 0))
self.assertTrue(G.has_edge(0, i))
self.assertRaises(ValueError, fnss.star_topology, 0)
self.assertRaises(ValueError, fnss.star_topology, (- 1))
self.assertRaises(TypeError, fnss.star_topology, 'String')
test_star_connectivity(10)
test_star_connectivity(21)
def test_full_mesh_topology(self):
def test_full_mesh_connectivity(n):
G = fnss.full_mesh_topology(n)
self.assertEqual(n, G.number_of_nodes())
self.assertEqual(((n * (n - 1)) // 2), G.number_of_edges())
for i in range(n):
for j in range(n):
if (i != j):
self.assertTrue(G.has_edge(i, j))
self.assertRaises(ValueError, fnss.full_mesh_topology, 0)
self.assertRaises(ValueError, fnss.full_mesh_topology, (- 1))
self.assertRaises(TypeError, fnss.full_mesh_topology, 'String')
test_full_mesh_connectivity(10)
test_full_mesh_connectivity(21)
def test_dumbbell_topology(self):
def test_dumbbell_connectivity(m, n):
G = fnss.dumbbell_topology(m, n)
self.assertEqual(((2 * m) + n), G.number_of_nodes())
self.assertEqual((((2 * m) + n) - 1), G.number_of_edges())
for i in range(m):
self.assertTrue(G.has_edge(i, m))
self.assertEqual('left_bell', G.node[i]['type'])
for i in range(m, (m + n)):
self.assertTrue(G.has_edge(i, (i + 1)))
self.assertEqual('core', G.node[i]['type'])
for i in range((m + n), ((2 * m) + n)):
self.assertTrue(G.has_edge(((m + n) - 1), i))
self.assertEqual('right_bell', G.node[i]['type'])
self.assertRaises(ValueError, fnss.dumbbell_topology, 0, 0)
self.assertRaises(ValueError, fnss.dumbbell_topology, (- 1), 1)
self.assertRaises(ValueError, fnss.dumbbell_topology, 1, 3)
self.assertRaises(TypeError, fnss.dumbbell_topology, 'String', 4)
self.assertRaises(TypeError, fnss.dumbbell_topology, 4, 'String')
test_dumbbell_connectivity(15, 12)
test_dumbbell_connectivity(2, 1)
def test_chord_topology(self):
def test_chord_connectivity(m, r):
G = fnss.chord_topology(m, r)
n = (2 ** m)
self.assertEqual(len(G), n)
if (r <= 2):
for i in G.nodes():
self.assertEqual(len(G.adj[i]), m)
else:
for i in G.nodes():
for j in range((i + 1), ((i + r) + 1)):
self.assertTrue(G.has_edge(i, (j % n)))
test_chord_connectivity(2, 1)
test_chord_connectivity(3, 1)
test_chord_connectivity(4, 1)
test_chord_connectivity(5, 1)
test_chord_connectivity(5, 2)
test_chord_connectivity(5, 3)
test_chord_connectivity(3, 7)
self.assertRaises(ValueError, fnss.chord_topology, 0, 3)
self.assertRaises(ValueError, fnss.chord_topology, 1, 3)
self.assertRaises(ValueError, fnss.chord_topology, (- 1), 3)
self.assertRaises(ValueError, fnss.chord_topology, 5, (- 1))
self.assertRaises(ValueError, fnss.chord_topology, 5, 0)
self.assertRaises(ValueError, fnss.chord_topology, 3, 8)
self.assertRaises(TypeError, fnss.chord_topology, 5, None)
self.assertRaises(TypeError, fnss.chord_topology, None, 3)
self.assertRaises(TypeError, fnss.chord_topology, 5, '1') |
def test_migrate_gen_data(data, forecast, tmp_path):
group = '/REAL_0/GEN_DATA'
with open_storage((tmp_path / 'storage'), mode='w') as storage:
experiment = storage.create_experiment(responses=[SummaryConfig(name=name, input_file='some_file', keys=['some_key']) for name in ('SNAKE_OIL_WPR_DIFF', 'SNAKE_OIL_OPR_DIFF', 'SNAKE_OIL_GPR_DIFF')])
ensemble = experiment.create_ensemble(name='default_0', ensemble_size=5)
bf._migrate_gen_data(ensemble, forecast)
for key in (set(data[group].variables) - set(data[group].dimensions)):
expect = np.array(data[group][key]).flatten()
actual = ensemble.load_responses(key, (0,))['values'].data.flatten()
assert (list(expect) == list(actual)), key |
class TestParseCommaSeparatedValue():
def test_should_return_empty_list_for_empty_string(self):
assert (parse_comma_separated_value('') == [])
def test_should_return_empty_list_for_blank_string(self):
assert (parse_comma_separated_value(' ') == [])
def test_should_parse_single_value(self):
assert (parse_comma_separated_value('one') == ['one'])
def test_should_parse_multiple_values(self):
assert (parse_comma_separated_value('one,two,three') == ['one', 'two', 'three'])
def test_should_remove_spaces(self):
assert (parse_comma_separated_value(' one , two , three ') == ['one', 'two', 'three']) |
class ActionMenu(xbmcgui.WindowXMLDialog):
selected_action = None
action_items = None
auto_close_thread = None
listControl = None
action_exitkeys_id = None
def __init__(self, *args, **kwargs):
log.debug('ActionMenu: __init__')
xbmcgui.WindowXML.__init__(self)
self.auto_close_thread = ActionAutoClose(self)
self.auto_close_thread.start()
def onInit(self):
log.debug('ActionMenu: onInit')
self.action_exitkeys_id = [10, 13]
self.listControl = self.getControl(3000)
self.listControl.addItems(self.action_items)
self.setFocus(self.listControl)
def onFocus(self, control_id):
pass
def doAction(self, action_id):
pass
def onMessage(self, message):
log.debug('ActionMenu: onMessage: {0}', message)
def onAction(self, action):
if (action.getId() == 10):
self.auto_close_thread.stop()
self.close()
elif (action.getId() == 92):
self.auto_close_thread.stop()
self.close()
else:
self.auto_close_thread.set_last()
log.debug('ActionMenu: onAction: {0}', action.getId())
def onClick(self, control_id):
if (control_id == 3000):
self.selected_action = self.listControl.getSelectedItem()
log.debug('ActionMenu: Selected Item: {0}', self.selected_action)
self.auto_close_thread.stop()
self.close()
def setActionItems(self, action_items):
self.action_items = action_items
def getActionItem(self):
return self.selected_action |
class OutOfSsaTranslation(object):
def __init__(self):
self.cfg = None
self._live_in = None
self._live_out = None
self._interference_graph = None
self._phi_congruence_class = None
self._stmt_block_map = None
self._use_map = None
self._def_map = None
self._copy_counter = 0
def __call__(self, cfg, liveness):
self.cfg = cfg
self._interference_graph = liveness.interference_graph
self._live_out = liveness._live_out_block
self._live_in = liveness._live_in_block
self._stmt_block_map = {}
self._use_map = liveness._use_map
self._def_map = liveness._def_map
self._phi_congruence_class = {}
for bb in self.cfg:
for instr in self.cfg.get_node_instructions(bb):
self._stmt_block_map[instr] = bb
self.perform()
def perform(self):
logging.debug('out of ssa')
for i in self.cfg.instructions:
if isinstance(i, expressions.Phi):
self._live_in[self._stmt_block_map[i]].add(i.dst)
self._break_phi_interference()
utils.show_flow_graph(self.cfg, 'out ssa copies')
for i in self.cfg.instructions:
for d in i.defs:
d.unsubscribe()
for u in i.uses:
u.unsubscribe()
for bb in self.cfg:
instructions = self.cfg.get_node_instructions(bb)
new_instructions = []
for i in instructions:
if (not (self._is_copy(i) or isinstance(i, expressions.Phi))):
new_instructions.append(i)
self.cfg.set_node_instructions(bb, new_instructions)
def perform2(self):
logging.info('perform')
self._eliminate_phi_resource_interference()
utils.show_flow_graph(self.cfg, 'out ssa copies')
for i in self.cfg.instructions:
for d in i.defs:
d.unsubscribe()
for u in i.uses:
u.unsubscribe()
for bb in self.cfg:
instructions = self.cfg.get_node_instructions(bb)
new_instructions = []
for i in instructions:
if (not (self._is_copy(i) or isinstance(i, expressions.Phi))):
new_instructions.append(i)
self.cfg.set_node_instructions(bb, new_instructions)
def _is_copy(self, instr):
return (isinstance(instr, expressions.Assignment) and (instr.src == instr.dst))
def _eliminate_phi_resource_interference(self):
self._init_phi_congruence_classes()
for instr in self.cfg.instructions:
if isinstance(instr, expressions.Phi):
current_block = self._stmt_block_map[instr]
candidate_resource_set = set()
unresolved_neighbor_map = {}
phi_resources = [instr.dst]
phi_resources.extend(instr.src)
for x in phi_resources:
unresolved_neighbor_map[x] = set()
for pair in itertools.combinations(phi_resources, 2):
(x_i, x_j) = pair
if self._phi_congruence_classes_interfere(x_i, x_j):
li = self._get_orig_block(current_block, x_i, instr)
lj = self._get_orig_block(current_block, x_j, instr)
self._determine_copies(x_i, li, x_j, lj, candidate_resource_set)
for x in candidate_resource_set:
self._insert_copy(x, instr)
current_phi_congruence_class = set()
for x in phi_resources:
current_phi_congruence_class.update(self._phi_congruence_class[x])
self._phi_congruence_class[x] = current_phi_congruence_class
def _insert_copy(self, x, instr):
logging.info(instr)
current_block = self._stmt_block_map[instr]
if (x in instr.src):
orig_block = self._get_orig_block(current_block, x, instr)
copy = expressions.Var(instr.dst.name, instr.dst.type)
copy_instr = expressions.Assignment(copy, x)
self._copy_counter += 1
self.cfg.get_node_instructions(orig_block).append(copy_instr)
self._stmt_block_map[copy_instr] = instr
instr.src.remove(x)
instr.src.append(copy)
self._phi_congruence_class[copy] = set([copy])
change = False
for s in self.cfg.successors(orig_block):
if ((x not in self._live_in[s]) and (not self._used_in_phi_k_j(x, s))):
change = True
if change:
if (x in self._live_out[orig_block]):
self._live_out[orig_block].remove(x)
for e in self._live_out[orig_block]:
self._interference_graph.add_edge(copy, e)
self._stmt_block_map[instr] = current_block
else:
xnew = expressions.Var(x.name, x.type)
self._copy_counter += 1
xnew_copy = expressions.Assignment(x, xnew)
instructions = self.cfg.get_node_instructions(current_block)
index = 0
for i in xrange(len(instructions)):
if (not isinstance(instructions[i], expressions.Phi)):
index = i
break
self.cfg.get_node_instructions(current_block).insert(index, xnew_copy)
instr.dst = xnew
self._phi_congruence_class[xnew] = set([xnew])
self._live_in[current_block].add(xnew)
for e in self._live_in[current_block]:
self._interference_graph.add_edge(xnew, e)
self._stmt_block_map[instr] = current_block
self._stmt_block_map[xnew_copy] = current_block
def _get_orig_block(self, current_block, phi_arg, phi_instr):
predecessors = self.cfg.predecessors(current_block)
for p in predecessors:
if (phi_arg in self._live_out[p]):
return p
return 0
def _used_in_phi_k_j(self, var, j):
instructions = self.cfg.get_node_instructions(j)
for i in instructions:
if (not isinstance(i, expressions.Phi)):
continue
if ((var == i.dst) or (var in i.src)):
return True
return False
def _init_phi_congruence_classes(self):
for instr in self.cfg.instructions:
if isinstance(instr, expressions.Phi):
self._phi_congruence_class[instr.dst] = set([instr.dst])
for x in instr.src:
self._phi_congruence_class[x] = set([x])
def _phi_congruence_classes_interfere(self, i, j):
cc_i = self._phi_congruence_class[i]
cc_j = self._phi_congruence_class[j]
interfere = False
for pair in itertools.product(cc_i, cc_j, repeat=1):
(y_i, y_j) = pair
if self._interference_graph.are_interfering(y_i, y_j):
interfere = True
return interfere
def _determine_copies(self, i, li, j, lj, candidates):
if self._interference_graph.are_interfering(i, j):
candidates.add(i)
candidates.add(j)
res1 = self._intersection_of_phi_and_live_out(i, lj)
res2 = self._intersection_of_phi_and_live_out(j, li)
if (res1 and (not res2)):
logging.info('case1')
candidates.add(i)
elif ((not res2) and res1):
logging.info('case2')
candidates.add(j)
elif ((not res1) and (not res2)):
logging.info('case3')
candidates.add(i)
candidates.add(j)
else:
logging.info('case4')
candidates.add(i)
candidates.add(j)
def _intersection_of_phi_and_live_out(self, x, ly):
phi = self._phi_congruence_class[x]
live = self._live_out[ly]
return phi.intersection(live)
def _break_phi_interference(self):
self._initialize_phi_congruence_classes()
for instr in self.cfg.instructions:
if (not isinstance(instr, expressions.Phi)):
continue
phi = instr
candidate_resource_set = set()
unresolved_neighbor_map = {x: set() for x in phi.resources}
self._add_basic_block_information_to_phi(phi)
for (xi, xj) in self._pairs(phi.resources):
if self._congruence_classes_interfere(xi, xj):
self._determine_candidates(xi, xj, phi, candidate_resource_set, unresolved_neighbor_map)
self._process_unresolved_resources(candidate_resource_set)
for rsc in candidate_resource_set:
self._insert_copy2(rsc, phi)
self._merge_phi_congruence_class(phi)
self._nullify_singleton_phi_congruence_classes()
def _initialize_phi_congruence_classes(self):
for i in self.cfg.instructions:
if isinstance(i, expressions.Phi):
for r in i.resources:
self._phi_congruence_class[r] = {r}
def _process_unresolved_resources(self, candidate_resource_set):
pass
def _merge_phi_congruence_class(self, phi):
current_phi_congruence_class = set()
for r in phi.resources:
current_phi_congruence_class.update(self._phi_congruence_class[r])
for r in phi.resources:
self._phi_congruence_class[r] = current_phi_congruence_class
def _nullify_singleton_phi_congruence_classes(self):
to_delete = set()
for (resource, congruence_class) in self._phi_congruence_class.items():
if (len(congruence_class) == 1):
to_delete.add(resource)
for d in to_delete:
del self._phi_congruence_class[d]
def _insert_copy2(self, resource, phi):
if (resource != phi.dst):
new_var = self._create_copy_var(resource)
copy = self._make_copy(new_var, resource)
resource_block = phi.get_resource_basic_block(resource)
self._replace_phi_argument(resource, new_var, phi)
self._insert_copy_at_the_end(resource_block, copy)
self._phi_congruence_class[new_var] = {new_var}
live_out = self._live_out[resource_block]
self._try_to_remove_old_argument_from_live_out(resource_block, resource)
self._build_interference_edges(new_var, live_out)
else:
new_var = self._create_copy_var(resource)
copy = self._make_copy(resource, new_var)
current_block = self._stmt_block_map[phi]
self._insert_phi_target_copy(copy, current_block)
phi.dst = new_var
self._phi_congruence_class[new_var] = {new_var}
self._live_in[current_block].remove(resource)
self._live_in[current_block].add(new_var)
self._build_interference_edges(new_var, self._live_in[current_block])
def _insert_phi_target_copy(self, copy, block):
instructions = self.cfg.get_node_instructions(block)
last_phi_index = 0
for i in instructions:
if isinstance(i, expressions.Phi):
last_phi_index += 1
instructions.insert(last_phi_index, copy)
def _insert_copy_at_the_end(self, block, copy):
instructions = self.cfg.get_node_instructions(block)
instructions.append(copy)
def _try_to_remove_old_argument_from_live_out(self, resource_block, resource):
can_remove = True
for s in self.cfg.successors(resource_block):
if ((resource in self._live_in[s]) or self._is_used_in_phi_in_block(resource, resource_block)):
can_remove = True
if can_remove:
self._live_out[resource_block].remove(resource)
def _is_used_in_phi_in_block(self, resource, block):
instructions = self.cfg.get_node_instructions(block)
for i in instructions:
if (isinstance(i, expressions.Phi) and (resource in i.src)):
return True
return False
def _create_copy_var(original):
return expressions.Var(original.name, original.type)
def _make_copy(dst, src):
return expressions.Assignment(dst, src)
def _build_interference_edges(self, var, live_set):
for s in live_set:
if (not self._interference_graph.are_interfering(var, s)):
self._interference_graph.add_edge(var, s)
def _replace_phi_argument(old, new, phi):
old_index = phi.src.index(old)
phi.src.insert(old_index, new)
phi.src.remove(old)
def _determine_candidates(self, i, j, phi, candidate_resource_set, unresolved_neighbor_map):
if ((i != phi.dst) and (j != phi.dst)):
self._determine_candidates_for_sources(i, j, phi, candidate_resource_set, unresolved_neighbor_map)
else:
self._determine_candidates_for_target_and_source(i, j, phi, candidate_resource_set, unresolved_neighbor_map)
def _determine_candidates_for_sources(self, i, j, phi, candidate_resource_set, unresolved_neighbor_map):
phi_i = self._phi_congruence_class[i]
phi_j = self._phi_congruence_class[j]
live_out_i = self._live_out[phi.get_resource_basic_block(i)]
live_out_j = self._live_out[phi.get_resource_basic_block(j)]
intersection_empty_ij = self._intersection_empty(phi_i, live_out_j)
intersection_empty_ji = self._intersection_empty(phi_j, live_out_i)
if (intersection_empty_ij and (not intersection_empty_ji)):
candidate_resource_set.add(i)
elif ((not intersection_empty_ij) and intersection_empty_ji):
candidate_resource_set.add(j)
elif (intersection_empty_ij and intersection_empty_ij):
unresolved_neighbor_map[i].add(j)
unresolved_neighbor_map[j].add(i)
elif ((not intersection_empty_ij) and (not intersection_empty_ji)):
candidate_resource_set.add(i)
candidate_resource_set.add(j)
def _determine_candidates_for_target_and_source(self, t, s, phi, candidate_resource_set, unresolved_neighbor_map):
phi_t = self._phi_congruence_class[t]
phi_s = self._phi_congruence_class[s]
live_out_t = self._live_out[phi.get_resource_basic_block(t)]
live_in_s = self._live_in[phi.get_resource_basic_block(t)]
intersection_empty_ij = self._intersection_empty(phi_t, live_in_s)
intersection_empty_ji = self._intersection_empty(phi_s, live_out_t)
if (intersection_empty_ij and (not intersection_empty_ji)):
candidate_resource_set.add(t)
elif ((not intersection_empty_ij) and intersection_empty_ji):
candidate_resource_set.add(s)
elif (intersection_empty_ij and intersection_empty_ij):
unresolved_neighbor_map[s].add(t)
unresolved_neighbor_map[t].add(s)
elif ((not intersection_empty_ij) and (not intersection_empty_ji)):
candidate_resource_set.add(s)
candidate_resource_set.add(t)
def _intersection_empty(self, x, y):
return x.intersection(y)
def _congruence_classes_interfere(self, xi, xj):
for yi in self._phi_congruence_class[xi]:
for yj in self._phi_congruence_class[xj]:
if self._interference_graph.are_interfering(yi, yj):
return True
return False
def _add_basic_block_information_to_phi(self, phi):
phi_target_block = self._stmt_block_map[phi]
phi.set_resource_basic_block(phi.dst, phi_target_block)
predesessors = self.cfg.predecessors(phi_target_block)
for arg in phi.src:
orig_block = self._get_phi_argument_block(arg, predesessors)
phi.set_resource_basic_block(arg, orig_block)
def _pairs(iterable):
return itertools.combinations(iterable, 2)
def _get_phi_argument_block(self, phi_argument, phi_block_predecessors):
for p in phi_block_predecessors:
if (phi_argument in self._live_out[p]):
return p
return None
def _rename_congruence_classes(self):
pass
def _remove_copies(self):
pass |
class RateLimiter(ModelNormal):
allowed_values = {(' {'HEAD': 'HEAD', 'OPTIONS': 'OPTIONS', 'GET': 'GET', 'POST': 'POST', 'PUT': 'PUT', 'PATCH': 'PATCH', 'DELETE': 'DELETE', 'TRACE': 'TRACE'}, ('window_size',): {'one_second': 1, 'ten_seconds': 10, 'one_minute': 60}, ('action',): {'RESPONSE': 'response', 'RESPONSE_OBJECT': 'response_object', 'LOG_ONLY': 'log_only'}, ('logger_type',): {'AZUREBLOB': 'azureblob', 'BIGQUERY': 'bigquery', 'CLOUDFILES': 'cloudfiles', 'DATADOG': 'datadog', 'DIGITALOCEAN': 'digitalocean', 'ELASTICSEARCH': 'elasticsearch', 'FTP': 'ftp', 'GCS': 'gcs', 'GOOGLEANALYTICS': 'googleanalytics', 'HEROKU': 'heroku', 'HONEYCOMB': 'honeycomb', 'HTTP': ' 'HTTPS': ' 'KAFKA': 'kafka', 'KINESIS': 'kinesis', 'LOGENTRIES': 'logentries', 'LOGGLY': 'loggly', 'LOGSHUTTLE': 'logshuttle', 'NEWRELIC': 'newrelic', 'NEWRELICOTLP': 'newrelicotlp', 'OPENSTACK': 'openstack', 'PAPERTRAIL': 'papertrail', 'PUBSUB': 'pubsub', 'S3': 's3', 'SCALYR': 'scalyr', 'SFTP': 'sftp', 'SPLUNK': 'splunk', 'STACKDRIVER': 'stackdriver', 'SUMOLOGIC': 'sumologic', 'SYSLOG': 'syslog'}}
validations = {('name',): {'max_length': 255, 'min_length': 1}, ('uri_dictionary_name',): {'max_length': 255, 'min_length': 1}, (' {'min_items': 1}, ('rps_limit',): {'inclusive_maximum': 10000, 'inclusive_minimum': 10}, ('client_key',): {'min_items': 1}, ('penalty_box_duration',): {'inclusive_maximum': 60, 'inclusive_minimum': 1}, ('action',): {'min_length': 1}, ('response_object_name',): {'max_length': 255, 'min_length': 1}}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'name': (str,), 'uri_dictionary_name': (str, none_type), ' ([str],), 'rps_limit': (int,), 'window_size': (int,), 'client_key': ([str],), 'penalty_box_duration': (int,), 'action': (str,), 'response': ({str: (str,)}, none_type), 'response_object_name': (str, none_type), 'logger_type': (str,), 'feature_revision': (int,)}
_property
def discriminator():
return None
attribute_map = {'name': 'name', 'uri_dictionary_name': 'uri_dictionary_name', ' ' 'rps_limit': 'rps_limit', 'window_size': 'window_size', 'client_key': 'client_key', 'penalty_box_duration': 'penalty_box_duration', 'action': 'action', 'response': 'response', 'response_object_name': 'response_object_name', 'logger_type': 'logger_type', 'feature_revision': 'feature_revision'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def bot_data_in(session, *args, **kwargs):
txt = (args[0] if args else None)
if (txt is None):
return
if (txt.strip() in _IDLE_COMMAND):
session.update_session_counters(idle=True)
return
txt = _maybe_strip_incoming_mxp(txt)
kwargs.pop('options', None)
session.account.execute_cmd(session=session, txt=txt, **kwargs)
session.update_session_counters() |
class UserPassesTests(ResolveInfoTestCase):
def test_user_passes_test(self):
result = decorators.user_passes_test((lambda u: (u.pk == self.user.pk)))((lambda info: None))(self.info_mock(self.user))
self.assertIsNone(result)
def test_permission_denied(self):
func = decorators.user_passes_test((lambda u: (u.pk == (self.user.pk + 1))))((lambda info: None))
with self.assertRaises(exceptions.PermissionDenied):
func(self.info_mock(self.user)) |
(scope='module')
def coordinator(tmp_path_factory, monkey_class, bot_token, bot_admins) -> ehforwarderbot.coordinator:
tmp_path = tmp_path_factory.mktemp('etm_test')
monkey_class.setenv('EFB_DATA_PATH', str(tmp_path))
config_path = ehforwarderbot.utils.get_config_path()
dump_config(config_path, {'master_channel': TelegramChannel.channel_id, 'slave_channels': ['tests.mocks.slave'], 'middlewares': []})
ehforwarderbot.coordinator.add_channel(MockSlaveChannel())
channel_config_path = ehforwarderbot.utils.get_config_path(TelegramChannel.channel_id)
dump_config(channel_config_path, {'token': bot_token, 'admins': bot_admins})
ehforwarderbot.coordinator.add_channel(TelegramChannel())
(yield ehforwarderbot.coordinator)
ehforwarderbot.coordinator.master.stop_polling()
for i in ehforwarderbot.coordinator.slaves.values():
i.stop_polling() |
class SelectionFunction():
def __init__(self, function, words=None):
self._set_words(words)
if (not callable(function)):
raise TypeError(f'function must be a callable, not {type(function)}.')
self._signature = inspect.signature(function)
self._function = function
self._base_kwargs = {}
self._ref_shape = None
def _set_words(self, words):
if ((words is not None) and (not isinstance(words, (int, slice, type(...), list, _np.ndarray)))):
raise TypeError(f'words should be instance of int, slice, Ellipsis, list or ndarray, not {type(words)}.')
if isinstance(words, list):
words = _np.array(words, dtype='uint8')
if (isinstance(words, _np.ndarray) and (words.dtype.kind not in ('u', 'i'))):
raise TypeError(f'words should be an unsigned integer ndarray, not {words.dtype}.')
self.words = (words if (words is not None) else ...)
def __call__(self, **kwargs):
for (name, arg) in self._signature.parameters.items():
try:
self._base_kwargs[name] = kwargs[name]
self._ref_shape = kwargs[name].shape
except KeyError as e:
if (name not in self._base_kwargs):
raise SelectionFunctionError(f'Missing values in metadata {list(kwargs.keys())} for expected argument {e} of selection function {self}.')
values = self._function(**self._base_kwargs)
if (values.shape[0] != self._ref_shape[0]):
raise SelectionFunctionError(f'Shape of selection function output should begin with {self._ref_shape[0]}, not {values.shape[0]}.')
if (self.words is not None):
try:
values = values.swapaxes(0, (- 1))[self.words].swapaxes(0, (- 1))
except IndexError:
raise SelectionFunctionError(f"Words selection {self.words} can't be applied for this selection function with shape {values.shape}.")
return values
def _words_str(self):
if (not isinstance(self.words, Ellipsis.__class__)):
return str(self.words)
return 'All'
def __str__(self):
template_str = f'''Selection function:
Function : {self._function.__name__}
Function args : {list(self._signature.parameters.keys())}
Words selection : {self._words_str}
'''
return template_str |
class TestEngine(unittest.TestCase):
engine_name = 'base'
query_cache = {}
schema = EMPTY_SCHEMA
queries_file = QUERIES_FILE
events_file = EVENTS_FILE
__events = None
def get_analytic(cls, query_text, is_case_sensitive=None):
cache_key = (query_text, is_case_sensitive)
with cls.schema:
if (cache_key not in cls.query_cache):
analytic_info = {'metadata': {'id': 'query-{:d}'.format(len(cls.query_cache)), 'name': query_text}, 'query': query_text}
cls.query_cache[cache_key] = parse_analytic(analytic_info)
return cls.query_cache[cache_key]
def get_events(cls):
if (cls.__events is None):
with open(cls.events_file, 'r') as f:
data = json.load(f)
cls.__events = [Event.from_data(d) for d in data]
return cls.__events
def filter_queries(cls, q):
return True
def get_example_queries(cls, match_case_sensitive=False):
with open(cls.queries_file, 'r') as f:
queries = []
for q in toml.load(f)['queries']:
case_settings = []
if (('case_sensitive' not in q) and ('case_insensitive' not in q)):
case_sensitive = True
case_insensitive = True
else:
case_sensitive = (q.get('case_sensitive') is True)
case_insensitive = (q.get('case_insensitive') is True)
if case_sensitive:
case_settings.append(True)
if case_insensitive:
case_settings.append(False)
assert (len(case_settings) > 0), q
for cs in case_settings:
analytic = cls.get_analytic(q['query'], cs)
analytic.metadata['_info'] = q.copy()
q['analytic'] = analytic
if (cs == match_case_sensitive):
queries.append(q)
return list(filter(cls.filter_queries, queries))
def get_example_analytics(cls, match_case_sensitive=False):
return [q['analytic'] for q in cls.get_example_queries(match_case_sensitive=match_case_sensitive)]
def validate_results(self, actual, expected, query=None):
self.assertListEqual(actual, expected, 'Got {} but expected {} for analytic {}'.format(actual, expected, query))
def test_valid_analytics(self):
self.get_example_queries() |
class TestFirstFixtures(PluginTestingOrderOfOperationsTestCase):
server_class = FirstStubServer
server_settings = {}
fixture_path = (os.path.dirname(__file__) + '/first_fixtures')
model_constants = {'test_first_user': {'username': 'beamerblvd'}, 'test_users': [{'username': 'guitar-king'}, {'username': 'allison.agd'}]}
def _process_stub_action_stubbed_out(body):
return {'user': {'id': body['user_id'], 'username': 'user_{}'.format(body['user_id'])}}
def _process_stub_action_stub_job_error(body):
raise JobError(errors=[Error(code='CAT_ERROR', message='Your cat broke the vase'), Error(code='DOG_ERROR', message='Your dog ate the couch')]) |
def lambda_handler(event, context):
params = (event.get('queryStringParameters') or {})
list_id_param = params.get('list_id', None)
date_range_param = params.get('date_range', None)
review_words = review_word_service.get_review_words(list_id=list_id_param, date_range=date_range_param)
return {'statusCode': 200, 'headers': {'Access-Control-Allow-Methods': 'GET,OPTIONS', 'Access-Control-Allow-Origin': '*'}, 'body': json.dumps(review_words)} |
def _construct_file_key(request_id: str, config: StorageConfig) -> str:
naming = config.details.get(StorageDetails.NAMING.value, FileNaming.request_id.value)
if (naming != FileNaming.request_id.value):
raise ValueError(f'File naming of {naming} not supported')
return f'{request_id}.{get_extension(config.format)}' |
def extractMintywoodBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def getsupportlevel(of=0):
global supportedsys
if (getosname(0) == 'linux'):
import linux_os
lvl = 0
if (rpieGlobals.osinuse == 'linux'):
if (linux_os.is_command_found('dpkg') and linux_os.is_command_found('apt')):
lvl = 1
if linux_os.checkRPI():
lvl = 10
elif linux_os.checkRockPI():
lvl = 9
elif linux_os.checkOPI():
lvl = 3
elif linux_os.is_command_found('pacman'):
lvl = 2
elif linux_os.is_command_found('apk'):
lvl = 4
if (of == 0):
return supportedsys[lvl]
else:
return lvl |
class OptionSeriesAreasplinerangeSonificationContexttracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
_coconut.functools.wraps(_coconut.itertools.tee)
def tee(iterable, n=2):
if (n < 0):
raise _coconut.ValueError('tee: n cannot be negative')
elif (n == 0):
return ()
elif (n == 1):
return (iterable,)
elif _coconut.isinstance(iterable, _coconut.reiterables):
return ((iterable,) * n)
else:
if ((_coconut.getattr(iterable, '__getitem__', None) is not None) or _coconut.isinstance(iterable, (_coconut.tee_type, _coconut.abc.Sized, _coconut.abc.Container))):
existing_copies = [iterable]
while (_coconut.len(existing_copies) < n):
try:
copy = _coconut.copy.copy(iterable)
except _coconut.TypeError:
break
else:
existing_copies.append(copy)
else:
return _coconut.tuple(existing_copies)
return _coconut.itertools.tee(iterable, n) |
def write_df_to_relation(adapter: BigQueryAdapter, data: pd.DataFrame, relation: BaseRelation) -> AdapterResponse:
assert (adapter.type() == 'bigquery')
project: str = relation.database
dataset: str = relation.schema
table: str = relation.identifier
with new_connection(adapter, 'fal-bigquery:write_df_to_relation') as conn:
connection_manager: BigQueryConnectionManager = adapter.connections
client: bigquery.Client = conn.handle
table_ref = bigquery.TableReference(bigquery.DatasetReference(project, dataset), table)
job_config = bigquery.LoadJobConfig(schema=[], source_format='PARQUET', write_disposition=WriteDisposition.WRITE_TRUNCATE)
with connection_manager.exception_handler('START JOB'):
job = client.load_table_from_dataframe(data, table_ref, job_config=job_config)
timeout = (connection_manager.get_job_execution_timeout_seconds(conn) or 300)
with connection_manager.exception_handler('LOAD TABLE'):
adapter.poll_until_job_completes(job, timeout)
query_table = client.get_table(job.destination)
num_rows = query_table.num_rows
return AdapterResponse('OK', rows_affected=num_rows) |
class TestPermutation(unittest.TestCase):
def test_permutation(self, func):
self.assertEqual(func(None, 'foo'), False)
self.assertEqual(func('', 'foo'), False)
self.assertEqual(func('Nib', 'bin'), False)
self.assertEqual(func('act', 'cat'), True)
self.assertEqual(func('a ct', 'ca t'), True)
self.assertEqual(func('dog', 'doggo'), False)
print('Success: test_permutation') |
class FindDup(LoopIR.LoopIR_Do):
def __init__(self, proc):
self.result = False
self.env = []
super().__init__(proc)
def result(self):
return self.result
def do_s(self, s):
for e in self.env:
if (s is e):
self.result = True
print(s)
self.env.append(s)
super().do_s(s) |
class BulkUpdateDictionaryListRequest(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'items': ([BulkUpdateDictionaryItem],)}
_property
def discriminator():
return None
attribute_map = {'items': 'items'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
_and_async_middleware
def apps_middleware(get_response):
if iscoroutinefunction(get_response):
async def middleware(request):
request.urlconf = (await apps_urlconf_async())
return (await get_response(request))
else:
def middleware(request):
request.urlconf = apps_urlconf()
return get_response(request)
return middleware |
class OptionPlotoptionsPolygonSonificationContexttracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionPlotoptionsArearangeSonificationDefaultinstrumentoptionsMappingTremolo(Options):
def depth(self) -> 'OptionPlotoptionsArearangeSonificationDefaultinstrumentoptionsMappingTremoloDepth':
return self._config_sub_data('depth', OptionPlotoptionsArearangeSonificationDefaultinstrumentoptionsMappingTremoloDepth)
def speed(self) -> 'OptionPlotoptionsArearangeSonificationDefaultinstrumentoptionsMappingTremoloSpeed':
return self._config_sub_data('speed', OptionPlotoptionsArearangeSonificationDefaultinstrumentoptionsMappingTremoloSpeed) |
class FaucetStackTopoChangeTest(FaucetMultiDPTestBase):
NUM_DPS = 3
def test_graph_object(self):
self.set_up(stack=True, n_dps=self.NUM_DPS, n_tagged=self.NUM_HOSTS, switch_to_switch_links=2)
self._enable_event_log()
self.verify_stack_up()
stack_event_found = False
with open(self.event_log, 'r', encoding='utf-8') as event_log_file:
for event_log_line in event_log_file.readlines():
event = json.loads(event_log_line.strip())
if ('STACK_TOPO_CHANGE' in event):
stack_event_found = True
graph = event.get('STACK_TOPO_CHANGE').get('graph')
self.assertTrue(graph)
node_count = len(graph.get('nodes'))
self.assertEqual(node_count, 3, ('Number of nodes in graph object is %s (!=3)' % node_count))
self.assertTrue(stack_event_found) |
def _hdf5_tree_editor(selected=''):
return ui.TreeEditor(nodes=[ui.TreeNode(node_for=[Hdf5FilesNode], auto_open=True, children='files', label='name', view=no_view), ui.TreeNode(node_for=[Hdf5FileNode], auto_open=True, children='groups_and_arrays', label='name', view=no_view), ui.TreeNode(node_for=[Hdf5GroupNode], auto_open=False, children='groups_and_arrays', label='name', view=no_view), ui.TreeNode(node_for=[Hdf5ArrayNode], auto_open=False, children='', label='name', view=no_view)], editable=False, selected=selected) |
def train(model, train_loader, val_loader, optimizer, init_lr=0.002, checkpoint_dir=None, checkpoint_interval=None, nepochs=None, clip_thresh=1.0):
model.train()
if use_cuda:
model = model.cuda()
linear_dim = model.linear_dim
criterion = DiscretizedMixturelogisticLoss()
if (hparams.exponential_moving_average is not None):
ema = ExponentialMovingAverage(hparams.ema_decay)
for (name, param) in model.named_parameters():
if param.requires_grad:
ema.register(name, param.data)
else:
ema = None
global global_step, global_epoch
while (global_epoch < nepochs):
h = open(logfile_name, 'a')
running_loss = 0.0
running_loss_mol1 = 0.0
running_loss_mol2 = 0.0
for (step, (mel, x)) in tqdm(enumerate(train_loader)):
current_lr = learning_rate_decay(init_lr, global_step)
for param_group in optimizer.param_groups:
param_group['lr'] = current_lr
optimizer.zero_grad()
(mel, x) = (Variable(mel), Variable(x))
if use_cuda:
(mel, x) = (mel.cuda(), x.cuda())
(logits, targets) = model(mel, x)
l = x.shape[1]
loss = criterion(logits.transpose(1, 2), targets)
loss_weight = 1
loss = (loss_weight * loss)
loss.backward(retain_graph=False)
grad_norm = torch.nn.utils.clip_grad_norm_(model.parameters(), clip_thresh)
optimizer.step()
if (ema is not None):
for (name, param) in model.named_parameters():
if (name in ema.shadow):
ema.update(name, param.data)
if ((global_step % checkpoint_interval) == 0):
save_checkpoint(model, optimizer, global_step, checkpoint_dir, global_epoch, ema=ema)
print('Saved ema')
log_value('loss', float(loss.item()), global_step)
global_step += 1
running_loss += loss.item()
averaged_loss = (running_loss / len(train_loader))
log_value('loss (per epoch)', averaged_loss, global_epoch)
h.write((((('Loss after epoch ' + str(global_epoch)) + ': ') + format((running_loss / len(train_loader)))) + '\n'))
h.close()
global_epoch += 1 |
()
_decorator
def addresses():
addresses_data = API_CLIENT.get_addresses().get('data')
if JSON_OUTPUT:
_print_json(addresses_data)
return
headers = ['Type', 'Country', 'City', 'Zip code', 'Street', 'Number', 'Address line 1', 'Address line 2', 'Created', 'Updated']
keys = ['type', 'countryName', 'cityName', 'zipCode', 'streetName', 'houseNumberBlock', 'addressLine1', 'addressLine2', _datetime_extractor('created'), _datetime_extractor('updated')]
table = _create_table_from_dict(headers, keys, addresses_data, numalign='right')
click.echo(table) |
def offset_single(mu):
inp = {'rel_error': rel_error, 'noise_floor': noise_floor, 'mu': mu}
onoise_reim = add_noise(oresp, 'gaussian_correlated', **inp)
onoise_comp = add_noise(oresp, 'white_noise', **inp)
(fig, axs) = figure(offs, oresp, onoise_reim, onoise_comp)
fig.suptitle(f"Inline $E_{{xx}}$; $s_z=r_z=0$; $f=${freq} Hz; fullspace of {model['res']} $\Omega\,$m; $\mu=${mu}", fontsize=20)
for i in range(3):
axs[(0, i)].set_ylim([1e-19, 3e-10])
for i in range(4):
axs[(1, i)].set_xlabel('Offset (m)')
axs[(1, i)].set_yscale('log')
axs[(1, i)].set_ylim([0.01, 1000000.0]) |
def register(registry):
_SimpleEditorWithCell.register(registry)
registry.register_interaction(target_class=SimpleEditor, interaction_class=Selected, handler=_query_table_editor_selected)
registry.register_interaction(target_class=SimpleEditor, interaction_class=SelectedIndices, handler=_query_table_editor_selected_indices) |
class TSFCKernel(Cached):
_cache = {}
_cachedir = environ.get('FIREDRAKE_TSFC_KERNEL_CACHE_DIR', path.join(tempfile.gettempdir(), ('firedrake-tsfc-kernel-cache-uid%d' % getuid())))
def _cache_lookup(cls, key):
(key, comm) = key
commkey = comm.py2f()
assert (commkey != MPI.COMM_NULL.py2f())
return (cls._cache.get((key, commkey)) or cls._read_from_disk(key, comm))
def _read_from_disk(cls, key, comm):
if (comm.rank == 0):
cache = cls._cachedir
(shard, disk_key) = (key[:2], key[2:])
filepath = os.path.join(cache, shard, disk_key)
val = None
if os.path.exists(filepath):
try:
with gzip.open(filepath, 'rb') as f:
val = f.read()
except zlib.error:
pass
comm.bcast(val, root=0)
else:
val = comm.bcast(None, root=0)
if (val is None):
raise KeyError(f'Object with key {key} not found')
return cls._cache.setdefault((key, comm.py2f()), pickle.loads(val))
def _cache_store(cls, key, val):
(key, comm) = key
cls._cache[(key, comm.py2f())] = val
_ensure_cachedir(comm=comm)
if (comm.rank == 0):
val._key = key
(shard, disk_key) = (key[:2], key[2:])
filepath = os.path.join(cls._cachedir, shard, disk_key)
tempfile = os.path.join(cls._cachedir, shard, ('%s_p%d.tmp' % (disk_key, os.getpid())))
os.makedirs(os.path.join(cls._cachedir, shard), exist_ok=True)
with gzip.open(tempfile, 'wb') as f:
pickle.dump(val, f, 0)
os.rename(tempfile, filepath)
comm.barrier()
def _cache_key(cls, form, name, parameters, coefficient_numbers, constant_numbers, interface, diagonal=False):
return (md5(((((((form.signature() + name) + str(sorted(parameters.items()))) + str(coefficient_numbers)) + str(constant_numbers)) + str(type(interface))) + str(diagonal)).encode()).hexdigest(), form.ufl_domains()[0].comm)
def __init__(self, form, name, parameters, coefficient_numbers, constant_numbers, interface, diagonal=False):
if self._initialized:
return
tree = tsfc_compile_form(form, prefix=name, parameters=parameters, interface=interface, diagonal=diagonal, log=PETSc.Log.isActive())
kernels = []
for kernel in tree:
coefficient_numbers_per_kernel = tuple(((coefficient_numbers[index], subindices) for (index, subindices) in kernel.coefficient_numbers))
constant_numbers_per_kernel = constant_numbers
events = (kernel.event,)
pyop2_kernel = as_pyop2_local_kernel(kernel.ast, kernel.name, len(kernel.arguments), flop_count=kernel.flop_count, events=events)
kernels.append(KernelInfo(kernel=pyop2_kernel, integral_type=kernel.integral_type, oriented=kernel.oriented, subdomain_id=kernel.subdomain_id, domain_number=kernel.domain_number, coefficient_numbers=coefficient_numbers_per_kernel, constant_numbers=constant_numbers_per_kernel, needs_cell_facets=False, pass_layer_arg=False, needs_cell_sizes=kernel.needs_cell_sizes, arguments=kernel.arguments, events=events))
self.kernels = tuple(kernels)
self._initialized = True |
def extractTrashscnWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class _BroadcastTo():
def __init__(self, array: ArrayLike, old: Shape, new: Shape) -> None:
self._loop1 = 0
self._loop2 = 0
self._chunk_subindex = 0
self._chunk_max = 0
self._chunk_index = 0
self._chunk = []
self.data = ravel(array)
self.shape = new
self.empty = (0 in new)
self.different = (old != new)
if self.empty:
self.amount = self.length = self.expand = self.repeat = 0
elif self.different:
if (len(old) > 1):
self.amount = prod(old[:(- 1)])
self.length = old[(- 1)]
else:
self.amount = old[(- 1)]
self.length = 1
diff = [(int((x / y)) if y else y) for (x, y) in zip(new, old)]
repeat = (prod(diff[:(- 1)]) if (len(old) > 1) else 1)
expand = diff[(- 1)]
if ((len(diff) > 1) and (diff[(- 2)] > 1)):
self.repeat = expand
self.expand = repeat
else:
self.repeat = repeat
self.expand = expand
else:
self.amount = len(self.data)
self.length = 1
self.expand = 1
self.repeat = 1
self.reset()
def reset(self) -> None:
self._loop1 = self.repeat
self._loop2 = self.expand
self._chunk_subindex = 0
self._chunk_max = (self.amount * self.length)
self._chunk_index = 0
def __next__(self) -> float:
if self._loop1:
d = self.data[(self._chunk_index + self._chunk_subindex)]
self._chunk_subindex += 1
if (self._chunk_subindex >= self.length):
self._loop2 -= 1
self._chunk_subindex = 0
if (not self._loop2):
self._chunk_index += self.length
self._loop2 = self.expand
if (self._chunk_index >= self._chunk_max):
self._loop1 -= 1
if self._loop1:
self._chunk_index = 0
return d
raise StopIteration
def __iter__(self) -> Iterator[float]:
return self |
class Wallet(MixinMeta):
async def walletdisabledcheck(self, ctx):
if (await bank.is_global()):
return (not (await self.config.disable_wallet()))
return (not (await self.config.guild(ctx.guild).disable_wallet()))
async def walletdeposit(self, ctx, user, amount):
conf = (await self.configglobalcheckuser(user))
main_conf = (await self.configglobalcheck(ctx))
wallet = (await conf.wallet())
max_bal = (await main_conf.wallet_max())
amount = (wallet + amount)
if (amount <= max_bal):
(await conf.wallet.set(amount))
else:
(await conf.wallet.set(max_bal))
raise ValueError
async def walletremove(self, user, amount):
conf = (await self.configglobalcheckuser(user))
wallet = (await conf.wallet())
if (amount < wallet):
(await conf.wallet.set((wallet - amount)))
else:
(await conf.wallet.set(0))
async def walletwithdraw(self, user, amount):
conf = (await self.configglobalcheckuser(user))
wallet = (await conf.wallet())
if (amount < wallet):
(await conf.wallet.set((wallet - amount)))
else:
raise ValueError
async def walletset(self, user, amount):
conf = (await self.configglobalcheckuser(user))
(await conf.wallet.set(amount))
async def bankdeposit(self, ctx, user, amount):
conf = (await self.configglobalcheckuser(user))
wallet = (await conf.wallet())
deposit = abs(amount)
if (deposit > wallet):
return (await ctx.send('You have insufficent funds to complete this deposit.'))
try:
(await bank.deposit_credits(user, deposit))
msg = f'You have succesfully deposited {deposit} {(await bank.get_currency_name(ctx.guild))} into your bank account.'
except BalanceTooHigh as e:
deposit = (e.max_balance - (await bank.get_balance(user)))
(await bank.deposit_credits(user, deposit))
msg = f'Your transaction was limited to {deposit} {e.currency_name} as your bank account has reached the max balance.'
(await self.walletset(user, (wallet - deposit)))
return (await ctx.send(msg))
async def walletbalance(self, user):
conf = (await self.configglobalcheckuser(user))
return (await conf.wallet())
async def bankwithdraw(self, ctx, user, amount):
conf = (await self.configglobalcheckuser(user))
mainconf = (await self.configglobalcheck(ctx))
max_bal = (await mainconf.wallet_max())
wallet = (await conf.wallet())
try:
if ((wallet + amount) > max_bal):
return (await ctx.send(f'You have attempted to withdraw more cash than the maximum balance allows. The maximum balance is {humanize_number(max_bal)} {(await bank.get_currency_name(ctx.guild))}.'))
(await bank.withdraw_credits(user, amount))
(await self.walletset(user, (wallet + amount)))
return (await ctx.send(f'You have succesfully withdrawn {humanize_number(amount)} {(await bank.get_currency_name(ctx.guild))} from your bank account.'))
except ValueError:
return (await ctx.send('You have insufficent funds to complete this withdrawal.'))
()
_disabled_check()
_only()
async def wallet(self, ctx):
()
_only()
async def balance(self, ctx, user: discord.Member=None):
if (user is None):
user = ctx.author
balance = (await self.walletbalance(user))
currency = (await bank.get_currency_name(ctx.guild))
(await ctx.send(f"{user.display_name}'s wallet balance is {humanize_number(balance)} {currency}"))
()
_only()
async def leaderboard(self, ctx, top: int=10):
if (top < 1):
top = 10
guild = ctx.guild
if (await bank.is_global()):
raw_accounts = (await self.config.all_users())
if (guild is not None):
tmp = raw_accounts.copy()
for acc in tmp:
if (not guild.get_member(acc)):
del raw_accounts[acc]
else:
raw_accounts = (await self.config.all_members(guild))
walletlist = sorted(raw_accounts.items(), key=(lambda x: x[1]['wallet']), reverse=True)[:top]
try:
bal_len = len(str(walletlist[0][1]['wallet']))
except IndexError:
return (await ctx.send('There are no users with a wallet balance.'))
pound_len = len(str(len(walletlist)))
header = '{pound:{pound_len}}{score:{bal_len}}{name:2}\n'.format(pound='#', name='Name', score='Score', bal_len=(bal_len + 6), pound_len=(pound_len + 3))
highscores = []
pos = 1
temp_msg = header
for acc in walletlist:
try:
name = guild.get_member(acc[0]).display_name
except AttributeError:
user_id = (f'({acc[0]})' if (await ctx.bot.is_owner(ctx.author)) else '')
name = f'{user_id}'
balance = acc[1]['wallet']
if (acc[0] != ctx.author.id):
temp_msg += f'''{pos}. {balance: <{(bal_len + 5)}} {name}
'''
else:
temp_msg += f'''{pos}. {balance: <{(bal_len + 5)}} <<{ctx.author.display_name}>>
'''
if ((pos % 10) == 0):
highscores.append(box(temp_msg, lang='md'))
temp_msg = header
pos += 1
if (temp_msg != header):
highscores.append(box(temp_msg, lang='md'))
if highscores:
(await menu(ctx, highscores, DEFAULT_CONTROLS))
_disabled_check()
_global_setting_admin()
_only()
(name='set')
async def _walletset(self, ctx, user: discord.Member, amount: int):
conf = (await self.configglobalcheck(ctx))
maxw = (await conf.wallet_max())
if (amount > maxw):
return (await ctx.send(f"{user.display_name}'s wallet balance cannot rise above {humanize_number(maxw)} {(await bank.get_currency_name(ctx.guild))}."))
(await self.walletset(user, amount))
(await ctx.send(f"{ctx.author.display_name} has set {user.display_name}'s wallet balance to {humanize_number(amount)} {(await bank.get_currency_name(ctx.guild))}."))
()
_disabled_check()
_only()
(1, 5, commands.BucketType.user)
async def deposit(self, ctx, amount: Union[(int, str)]):
cdcheck = (await self.cdcheck(ctx, 'depositcd'))
if isinstance(cdcheck, tuple):
embed = (await self.cdnotice(ctx.author, cdcheck[1], 'deposit'))
return (await ctx.send(embed=embed))
if isinstance(amount, str):
if (amount != 'all'):
return (await ctx.send('You must provide a valid number or the string `all`.'))
amount = (await self.walletbalance(ctx.author))
(await self.bankdeposit(ctx, ctx.author, amount))
()
_disabled_check()
_only()
(1, 5, commands.BucketType.user)
async def withdraw(self, ctx, amount: int):
cdcheck = (await self.cdcheck(ctx, 'withdrawcd'))
if isinstance(cdcheck, tuple):
embed = (await self.cdnotice(ctx.author, cdcheck[1], 'withdraw'))
return (await ctx.send(embed=embed))
(await self.bankwithdraw(ctx, ctx.author, amount)) |
def filter_firewall_ssh_host_key_data(json):
option_list = ['hostname', 'ip', 'name', 'nid', 'port', 'public_key', 'status', 'type', 'usage']
json = remove_invalid_fields(json)
dictionary = {}
for attribute in option_list:
if ((attribute in json) and (json[attribute] is not None)):
dictionary[attribute] = json[attribute]
return dictionary |
def test_eliminate_dead_code_forwarding4():
def foo():
x: (f32 DRAM)
for i in seq(0, 8):
if ((i + 3) < (- 1)):
x = 0.0
pass
loop_cursor = foo.find_loop('i')
if_cursor = loop_cursor.body()[0]
if_true_stmt = if_cursor.body()[0]
foo = eliminate_dead_code(foo, 'if _:_ #0')
loop_cursor = foo.forward(loop_cursor)
with pytest.raises(InvalidCursorError, match=''):
if_cursor = foo.forward(if_cursor)
with pytest.raises(InvalidCursorError, match=''):
if_true_stmt = foo.forward(if_true_stmt)
assert isinstance(loop_cursor, ForCursor)
assert (len(loop_cursor.body()) == 1)
assert isinstance(loop_cursor.body()[0], PassCursor) |
def test_search_config_file_sub_directories(workdir):
c_dir = os.path.join(workdir, 'a', 'b', 'c')
os.makedirs(c_dir)
filename = os.path.join(workdir, 'a', '.mu_repo')
with open(filename, 'w'):
pass
assert (mu_repo.SearchConfigDir(os.path.join(workdir, 'a', 'b', 'c')) == os.path.dirname(filename))
assert (mu_repo.SearchConfigDir(os.path.join(workdir, 'a', 'b')) == os.path.dirname(filename))
assert (mu_repo.SearchConfigDir(os.path.join(workdir, 'a')) == os.path.dirname(filename))
assert (mu_repo.SearchConfigDir(workdir) is None)
assert (mu_repo.SearchConfigDir(os.path.join(workdir, 'a', 'b', 'c'), recurse_limit=1) is None)
assert (mu_repo.SearchConfigDir(os.path.join(workdir, 'a'), recurse_limit=0) == os.path.dirname(filename))
os.makedirs(os.path.join(workdir, 'a', 'b', '.git'))
assert (mu_repo.SearchConfigDir(os.path.join(workdir, 'a', 'b', 'c')) == os.path.join(workdir, 'a', 'b')) |
class WriterContext(threading.Thread):
def __init__(self, buffer_, rw_lock, init_sleep_time, sleep_time, to_write):
threading.Thread.__init__(self)
self.__buffer = buffer_
self.__rw_lock = rw_lock
self.__init_sleep_time = init_sleep_time
self.__sleep_time = sleep_time
self.__to_write = to_write
self.entry_time = None
self.exit_time = None
def run(self):
time.sleep(self.__init_sleep_time)
with self.__rw_lock.writer_context():
self.entry_time = time.time()
time.sleep(self.__sleep_time)
self.__buffer.append(self.__to_write)
self.exit_time = time.time() |
.django_db
def test_nonzero_obligation_prevents_filter_out_negated_values(client, elasticsearch_account_index, basic_faba_with_object_class, monkeypatch, helpers):
oc = major_object_class_with_children('001', [1])
(baker.make('references.DisasterEmergencyFundCode', code='L', group_name=COVID_19_GROUP_NAME),)
baker.make('awards.FinancialAccountsByAwards', disaster_emergency_fund=DisasterEmergencyFundCode.objects.filter(code='L').first(), submission=SubmissionAttributes.objects.all().first(), object_class=oc[0], transaction_obligated_amount=1, gross_outlay_amount_by_award_cpe=100, ussgl487200_down_adj_pri_ppaid_undel_orders_oblig_refund_cpe=(- 3), ussgl497200_down_adj_pri_paid_deliv_orders_oblig_refund_cpe=(- 6), award_id=None, piid='123', fain=None, uri=None, distinct_award_key='123||')
baker.make('awards.FinancialAccountsByAwards', disaster_emergency_fund=DisasterEmergencyFundCode.objects.filter(code='L').first(), submission=SubmissionAttributes.objects.all().first(), object_class=oc[0], transaction_obligated_amount=5, gross_outlay_amount_by_award_cpe=(- 110), ussgl487200_down_adj_pri_ppaid_undel_orders_oblig_refund_cpe=3, ussgl497200_down_adj_pri_paid_deliv_orders_oblig_refund_cpe=16, award_id=None, piid='123', fain=None, uri=None, distinct_award_key='123||')
setup_elasticsearch_test(monkeypatch, elasticsearch_account_index)
helpers.patch_datetime_now(monkeypatch, 2022, 12, 31)
helpers.reset_dabs_cache()
resp = helpers.post_for_spending_endpoint(client, url, query='001 name', def_codes=['L'], spending_type='award')
expected_results = [{'id': '001', 'code': '001', 'description': '001 name', 'award_count': 1, 'obligation': 6.0, 'outlay': 0, 'children': [{'id': '1', 'code': '0001', 'description': '0001 name', 'award_count': 1, 'obligation': 6.0, 'outlay': 0}]}]
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.json()['results'] == expected_results)
expected_totals = {'award_count': 1, 'obligation': 6.0, 'outlay': 0}
assert (resp.json()['totals'] == expected_totals) |
class DownloadDecoder(BaseDecoder):
media_type = '*/*'
def __init__(self, download_dir=None):
self._delete_on_close = (download_dir is None)
self.download_dir = download_dir
def decode(self, response):
base_url = response.url
content_type = response.headers.get('content-type')
content_disposition = response.headers.get('content-disposition')
(fd, temp_path) = tempfile.mkstemp(suffix='.download')
with os.fdopen(fd, 'wb') as file_handle:
for chunk in response.iter_content(chunk_size=4096):
file_handle.write(chunk)
output_filename = _get_filename(base_url, content_type, content_disposition)
output_dir = self.download_dir
if (output_dir is None):
output_dir = os.path.dirname(temp_path)
output_path = os.path.join(output_dir, output_filename)
if (output_path != temp_path):
output_path = _unique_output_path(output_path)
shutil.move(temp_path, output_path)
output_file = open(output_path, 'rb')
downloaded = DownloadedFile(output_file, output_path, delete=self._delete_on_close)
downloaded.basename = output_filename
return downloaded |
class OptionPlotoptionsTimelineSonificationContexttracksMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def webtext2019zh():
datasets_name = sys._getframe().f_code.co_name
writer = csv.writer(open('./pretrain_datasets/output/{}.txt'.format(datasets_name), 'w'), delimiter='\t')
base_dir = './QA/web_text_zh/'
name_list = os.listdir(base_dir)
for name in name_list:
dir_in = (base_dir + name)
for line in tqdm(open(dir_in)):
line = json.loads(line)
title = line['title']
desc = line['desc']
if (not (desc == '')):
prompt = ((title + ' \\n ') + desc)
else:
prompt = title
ans = line['content']
content = (prompt + ans)
content = remove_url(content)
star = int(line['star'])
if (star < 5):
continue
if (len(content) < 5):
continue
line = [content, datasets_name]
writer.writerow(line) |
class OptionSeriesAreasplineSonificationDefaultspeechoptionsPointgrouping(Options):
def algorithm(self):
return self._config_get('last')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class BLEEventHandler(btle.DefaultDelegate):
def __init__(self, keypressed_callback, KPHANDLE):
self.keypressed_callback = keypressed_callback
self.keypressed_handle = KPHANDLE
btle.DefaultDelegate.__init__(self)
def handleNotification(self, cHandle, data):
if (cHandle == self.keypressed_handle):
self.keypressed_callback(data[0]) |
class UfbtSdkDeployer():
UFBT_STATE_FILE_NAME = 'ufbt_state.json'
def __init__(self, ufbt_state_dir: str, toolchain_dir: str=None):
self.ufbt_state_dir = Path(ufbt_state_dir)
self.download_dir = (self.ufbt_state_dir / 'download')
self.current_sdk_dir = (self.ufbt_state_dir / 'current')
if toolchain_dir:
self.toolchain_dir = (self.ufbt_state_dir / toolchain_dir)
else:
self.toolchain_dir = (Path(os.environ.get('FBT_TOOLCHAIN_PATH', self.ufbt_state_dir.absolute())) / STATE_DIR_TOOLCHAIN_SUBDIR)
self.state_file = (self.current_sdk_dir / self.UFBT_STATE_FILE_NAME)
def get_previous_task(self) -> Optional[SdkDeployTask]:
if (not os.path.exists(self.state_file)):
return None
with open(self.state_file, 'r') as f:
ufbt_state = json.load(f)
log.debug(f'get_previous_task() loaded state: ufbt_state={ufbt_state!r}')
return SdkDeployTask.from_dict(ufbt_state)
def deploy(self, task: SdkDeployTask) -> bool:
log.info(f'Deploying SDK for {task.hw_target}')
sdk_loader = SdkLoaderFactory.create_for_task(task, self.download_dir)
sdk_target_dir = self.current_sdk_dir.absolute()
log.info(f'uFBT SDK dir: {sdk_target_dir}')
if ((not task.force) and os.path.exists(sdk_target_dir)):
with open(self.state_file, 'r') as f:
ufbt_state = json.load(f)
if (ufbt_state.get('version') in sdk_loader.ALWAYS_UPDATE_VERSIONS):
log.info('Cannot determine current SDK version, updating')
elif ((ufbt_state.get('version') == sdk_loader.get_metadata().get('version')) and (ufbt_state.get('hw_target') == task.hw_target)):
log.info('SDK is up-to-date')
return True
try:
sdk_component_path = sdk_loader.get_sdk_component(task.hw_target)
except Exception as e:
log.error(f'Failed to fetch SDK for {task.hw_target}: {e}')
return False
shutil.rmtree(sdk_target_dir, ignore_errors=True)
ufbt_state = {'hw_target': task.hw_target, **sdk_loader.get_metadata()}
log.info('Deploying SDK')
with ZipFile(sdk_component_path, 'r') as zip_file:
zip_file.extractall(sdk_target_dir)
with open(self.state_file, 'w') as f:
json.dump(ufbt_state, f, indent=4)
log.info('SDK deployed.')
return True |
class TestLinearA98RGBPoperties(util.ColorAsserts, unittest.TestCase):
def test_r(self):
c = Color('color(--a98-rgb-linear 0.1 0.2 0.3)')
self.assertEqual(c['r'], 0.1)
c['r'] = 0.2
self.assertEqual(c['r'], 0.2)
def test_g(self):
c = Color('color(--a98-rgb-linear 0.1 0.2 0.3)')
self.assertEqual(c['g'], 0.2)
c['g'] = 0.1
self.assertEqual(c['g'], 0.1)
def test_b(self):
c = Color('color(--a98-rgb-linear 0.1 0.2 0.3)')
self.assertEqual(c['b'], 0.3)
c['b'] = 0.1
self.assertEqual(c['b'], 0.1)
def test_alpha(self):
c = Color('color(--a98-rgb-linear 0.1 0.2 0.3)')
self.assertEqual(c['alpha'], 1)
c['alpha'] = 0.5
self.assertEqual(c['alpha'], 0.5) |
def greens_func_numpy(east, north, mindist):
distance = np.sqrt(((east ** 2) + (north ** 2)))
distance += mindist
result = np.empty_like(distance)
small = (distance < 1)
big = (~ small)
result[small] = (distance[small] * (np.log((distance[small] ** distance[small])) - distance[small]))
result[big] = ((distance[big] ** 2) * (np.log(distance[big]) - 1))
return result |
class ZoomFastFourierTransform(FourierTransform):
def __init__(self, input_grid, output_grid):
if ((not input_grid.is_regular) or (not input_grid.is_('cartesian'))):
raise ValueError('The input grid should be regularly spaced in Cartesian coordinates.')
if ((not output_grid.is_regular) or (not output_grid.is_('cartesian'))):
raise ValueError('The output grid should be regularly spaced in Cartesian coordinates.')
if (input_grid.ndim != output_grid.ndim):
raise ValueError('The input_grid must have the same dimensions as the output_grid.')
self.input_grid = input_grid
self.output_grid = output_grid
self._current_dtype = None
def _compute_shifts_and_weights(self, dtype):
(float_dtype, complex_dtype) = _get_float_and_complex_dtype(dtype)
if (complex_dtype != self._current_dtype):
w = np.exp((((- 1j) * self.output_grid.delta) * self.input_grid.delta))
a = np.exp(((1j * self.output_grid.zero) * self.input_grid.delta))
inv_w = np.exp(((1j * self.input_grid.delta) * self.output_grid.delta))
inv_a = np.exp((((- 1j) * self.input_grid.zero) * self.output_grid.delta))
self.czts = [ChirpZTransform(n, m, ww, aa) for (n, m, ww, aa) in zip(self.input_grid.dims, self.output_grid.dims, w, a)]
self.inv_czts = [ChirpZTransform(n, m, ww, aa) for (n, m, ww, aa) in zip(self.output_grid.dims, self.input_grid.dims, inv_w, inv_a)]
self.shifts = [np.exp((((- 1j) * x) * x0)) for (x, x0) in zip(self.output_grid.separated_coords, self.input_grid.zero)]
self.inv_shifts = [np.exp(((1j * x) * x0)) for (x, x0) in zip(self.input_grid.separated_coords, self.output_grid.zero)]
self.shifts = [s.astype(complex_dtype, copy=False) for s in self.shifts]
self.inv_shifts = [s.astype(complex_dtype, copy=False) for s in self.inv_shifts]
self.input_weights = self.input_grid.weights.astype(float_dtype)
self.output_weights = (self.output_grid.weights / ((2 * np.pi) ** self.output_grid.ndim)).astype(float_dtype)
self._current_dtype = complex_dtype
def forward(self, field):
self._compute_shifts_and_weights(field.dtype)
f = (field * self.input_weights).shaped
for (i, (czt, shift)) in enumerate(zip(self.czts, self.shifts)):
f = np.moveaxis(f, (- i), 0)
f = (czt(f) * shift)
f = np.moveaxis(f, (- i), 0)
shape = (tuple(field.tensor_shape) + ((- 1),))
return Field(f.reshape(shape), self.output_grid)
def backward(self, field):
self._compute_shifts_and_weights(field.dtype)
f = (field * self.output_weights).shaped
for (i, (czt, shift)) in enumerate(zip(self.inv_czts, self.inv_shifts)):
f = np.moveaxis(f, (- i), 0)
f = (czt(f) * shift)
f = np.moveaxis(f, (- i), 0)
shape = (tuple(field.tensor_shape) + ((- 1),))
return Field(f.reshape(shape), self.input_grid) |
def test_no_exo_floor_div_after_divide_loop_with_guard(golden):
def foo(N: size, x: f32[N]):
for i in seq(0, N):
x[i] = 0.0
foo = divide_loop(foo, foo.find_loop('i'), 8, ('io', 'ii'))
(c_file, h_file) = compile_procs_to_strings([foo], 'test.h')
code0 = f'''{h_file}
{c_file}'''
foo = cut_loop(foo, foo.find_loop('io'), '((N + 7) / (8)) - 1')
(c_file, h_file) = compile_procs_to_strings([foo], 'test.h')
code1 = f'''{h_file}
{c_file}'''
foo = divide_loop(foo, foo.find_loop('io'), 4, ('ioo', 'ioi'))
(c_file, h_file) = compile_procs_to_strings([foo], 'test.h')
code2 = f'''{h_file}
{c_file}'''
code = f'''{code0}
{code1}
{code2}
'''
assert (code == golden) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.