code stringlengths 281 23.7M |
|---|
class TestHelloWorld(BaseSkillTestCase):
path_to_skill = Path(ROOT_DIR, 'packages', 'fetchai', 'skills', 'hello_world')
is_agent_to_agent_messages = False
def setup_method(self):
self.message = 'Hello Something Custom!'
config_overrides = {'behaviours': {'hello_world': {'args': {'message': self.message}}}}
super().setup(config_overrides=config_overrides)
self.hello_world_behaviour = cast(HelloWorld, self._skill.skill_context.behaviours.hello_world)
self.logger = self._skill.skill_context.logger
def test_act(self):
with patch.object(self.logger, 'log') as mock_logger:
assert (self.hello_world_behaviour.act() is None)
mock_logger.assert_any_call(logging.INFO, self.message) |
class Versions():
def get():
try:
from google.protobuf import __version__ as proto_version
from grpc import _grpcio_metadata as grpcmeta
return (gui_version, grpcmeta.__version__, proto_version)
except:
return ('none', 'none', 'none') |
def lazy_import():
from fastly.model.logging_common_response import LoggingCommonResponse
from fastly.model.logging_generic_common_response import LoggingGenericCommonResponse
from fastly.model.logging_openstack_additional import LoggingOpenstackAdditional
from fastly.model.service_id_and_version_string import ServiceIdAndVersionString
from fastly.model.timestamps import Timestamps
globals()['LoggingCommonResponse'] = LoggingCommonResponse
globals()['LoggingGenericCommonResponse'] = LoggingGenericCommonResponse
globals()['LoggingOpenstackAdditional'] = LoggingOpenstackAdditional
globals()['ServiceIdAndVersionString'] = ServiceIdAndVersionString
globals()['Timestamps'] = Timestamps |
def parseIntentFilter(parcel: ParcelParser, parent: Field) -> None:
parcel.parse_field('mActions', '', parcel.readString16Vector, parent)
category_nullcheck = parcel.parse_field('mCategories-nullcheck', '', parcel.readInt32, parent)
if category_nullcheck.content:
parcel.parse_field('mCategories', '', parcel.readString16Vector, parent)
scheme_nullcheck = parcel.parse_field('mDataSchemes-nullcheck', '', parcel.readInt32, parent)
if scheme_nullcheck.content:
parcel.parse_field('mDataSchemes', '', parcel.readString16Vector, parent)
static_types_nulllcheck = parcel.parse_field('mStaticDataTypes-nullcheck', '', parcel.readInt32, parent)
if static_types_nulllcheck.content:
parcel.parse_field('mStaticDataTypes', '', parcel.readString16Vector, parent)
types_nullcheck = parcel.parse_field('mDataTypes-nullcheck', '', parcel.readInt32, parent)
if types_nullcheck.content:
parcel.parse_field('mDataTypes', '', parcel.readString16Vector, parent)
group_nullcheck = parcel.parse_field('mMimeGroups-nullcheck', '', parcel.readInt32, parent)
if group_nullcheck.content:
parcel.parse_field('mMimeGroups', '', parcel.readString16Vector, parent)
parcel.parse_field('mDataSchemeSpecificParts', '', functools.partial(parcel.readParcelableVectorWithoutNullChecks, 'PatternMatcher'), parent)
parcel.parse_field('mDataAuthorities', '', functools.partial(parcel.readParcelableVectorWithoutNullChecks, 'AuthorityEntry'), parent)
parcel.parse_field('mDataPaths', '', functools.partial(parcel.readParcelableVectorWithoutNullChecks, 'PatternMatcher'), parent)
parcel.parse_field('mPriority', 'int32', parcel.readInt32, parent)
parcel.parse_field('mHasStaticPartialTypes', 'int32', parcel.readInt32, parent)
parcel.parse_field('mHasDynamicPartialTypes', 'int32', parcel.readInt32, parent)
parcel.parse_field('setAutoVerify', 'int32', parcel.readInt32, parent)
parcel.parse_field('setVisibilityToInstantApp', 'int32', parcel.readInt32, parent)
parcel.parse_field('mOrder', 'int32', parcel.readInt32, parent) |
def main(stream=None):
argparser = argparse.ArgumentParser(usage='usage: %(prog)s [options] <elf-file>', description=SCRIPT_DESCRIPTION, add_help=False, prog='readelf.py')
argparser.add_argument('file', nargs='?', default=None, help='ELF file to parse')
argparser.add_argument('-H', '--help', action='store_true', dest='help', help='Display this information')
argparser.add_argument('--verbose', action='store_true', dest='verbose', help='For compatibility with dwarfdump. Non-verbose mode is not implemented.')
sections = ('info', 'loclists', 'rnglists')
for section in sections:
argparser.add_argument(('--debug-%s' % section), action='store_true', dest=section, help=('Display the contents of DWARF debug_%s section.' % section))
args = argparser.parse_args()
if (args.help or (not args.file)):
argparser.print_help()
sys.exit(0)
del ENUM_DW_TAG['DW_TAG_template_type_param']
del ENUM_DW_TAG['DW_TAG_template_value_param']
ENUM_DW_TAG['DW_TAG_template_type_parameter'] = 47
ENUM_DW_TAG['DW_TAG_template_value_parameter'] = 48
with open(args.file, 'rb') as file:
try:
readelf = ReadElf(args.file, file, (stream or sys.stdout))
if args.info:
readelf.dump_info()
if args.loclists:
readelf.dump_loclists()
if args.rnglists:
readelf.dump_rnglists()
except ELFError as ex:
sys.stdout.flush()
sys.stderr.write(('ELF error: %s\n' % ex))
if args.show_traceback:
traceback.print_exc()
sys.exit(1) |
def extractSplatanovelWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def send_progress(monitor):
play_data = get_playing_data(monitor.played_information)
if (play_data is None):
return
log.debug('Sending Progress Update')
player = xbmc.Player()
play_time = player.getTime()
total_play_time = player.getTotalTime()
play_data['currentPossition'] = play_time
play_data['duration'] = total_play_time
play_data['currently_playing'] = True
item_id = play_data.get('item_id')
if (item_id is None):
return
source_id = play_data.get('source_id')
ticks = int((play_time * ))
duration = int((total_play_time * ))
paused = play_data.get('paused', False)
playback_type = play_data.get('playback_type')
play_session_id = play_data.get('play_session_id', '')
live_stream_id = play_data.get('live_stream_id', '')
playlist = xbmc.PlayList(xbmc.PLAYLIST_VIDEO)
playlist_position = playlist.getposition()
playlist_size = playlist.size()
(volume, muted) = get_volume()
postdata = {'QueueableMediaTypes': 'Video', 'CanSeek': True, 'ItemId': item_id, 'MediaSourceId': source_id, 'IsPaused': paused, 'IsMuted': muted, 'PlayMethod': playback_type, 'PlaySessionId': play_session_id, 'LiveStreamId': live_stream_id, 'PlaylistIndex': playlist_position, 'PlaylistLength': playlist_size, 'VolumeLevel': volume}
if ((duration is not None) and (duration > 0)):
postdata['RunTimeTicks'] = duration
postdata['PositionTicks'] = ticks
log.debug('Sending POST progress started: {0}', postdata)
url = '{server}/emby/Sessions/Playing/Progress'
download_utils.download_url(url, post_body=postdata, method='POST') |
class NotebookTab(Gtk.EventBox):
reorderable = True
def __init__(self, notebook: SmartNotebook, page: 'NotebookPage', vertical: bool=False):
Gtk.EventBox.__init__(self)
self.set_visible_window(False)
self.closable = True
self.notebook = notebook
self.page = page
self.page.tab_menu.attach_to_widget(self.page, None)
self.connect('button-press-event', self.on_button_press)
self.vertical = vertical
if vertical:
box = Gtk.Box(spacing=2, orientation=Gtk.Orientation.VERTICAL)
else:
box = Gtk.Box(spacing=2)
self.add(box)
apply_css(box)
self.icon = Gtk.Image()
self.icon.set_no_show_all(True)
apply_css(self.icon)
self.label = Gtk.Label(label=self.page.get_page_name())
self.label.set_tooltip_text(self.page.get_page_name())
apply_css(self.label)
if vertical:
self.label.set_angle(90)
self.label.props.valign = Gtk.Align.CENTER
self.label.set_max_width_chars(20)
else:
self.label.props.halign = Gtk.Align.CENTER
self.label.set_ellipsize(Pango.EllipsizeMode.END)
self.adjust_label_width(Gtk.PositionType.TOP)
if self.can_rename():
self.entry = entry = Gtk.Entry()
entry.set_width_chars(self.label.get_max_width_chars())
entry.set_text(self.label.get_text())
border = Gtk.Border.new()
border.left = 1
border.right = 1
entry.connect('activate', self.on_entry_activate)
entry.connect('focus-out-event', self.on_entry_focus_out_event)
entry.connect('key-press-event', self.on_entry_key_press_event)
entry.set_no_show_all(True)
apply_css(entry)
self.button = button = Gtk.Button()
button.set_relief(Gtk.ReliefStyle.NONE)
button.set_halign(Gtk.Align.CENTER)
button.set_valign(Gtk.Align.CENTER)
button.set_focus_on_click(False)
button.set_tooltip_text(_('Close Tab'))
button.add(Gtk.Image.new_from_icon_name('window-close', Gtk.IconSize.MENU))
button.connect('clicked', self.close)
button.connect('button-press-event', self.on_button_press)
apply_css(button)
if vertical:
box.pack_start(button, False, False, 0)
box.pack_end(self.icon, False, False, 0)
box.pack_end(self.label, True, True, 0)
if self.can_rename():
box.pack_end(self.entry, True, True, 0)
else:
box.pack_start(self.icon, False, False, 0)
box.pack_start(self.label, True, True, 0)
if self.can_rename():
box.pack_start(self.entry, True, True, 0)
box.pack_end(button, False, False, 0)
page.set_tab(self)
page.connect('name-changed', self.on_name_changed)
box.show_all()
def adjust_label_width(self, tab_pos: Gtk.PositionType) -> None:
if self.vertical:
return
if (tab_pos in (Gtk.PositionType.TOP, Gtk.PositionType.BOTTOM)):
self.label.set_width_chars(4)
else:
self.label.set_width_chars(20)
def set_icon(self, pixbuf: Optional[GdkPixbuf.Pixbuf]) -> None:
if (pixbuf is None):
self.icon.set_property('visible', False)
else:
self.icon.set_from_pixbuf(pixbuf)
self.icon.set_property('visible', True)
def set_closable(self, closable: bool) -> None:
self.closable = closable
self.button.set_sensitive(closable)
def on_button_press(self, widget, event):
if ((event.button == Gdk.BUTTON_PRIMARY) and (event.type == Gdk.EventType._2BUTTON_PRESS)):
self.start_rename()
elif (event.button == Gdk.BUTTON_MIDDLE):
self.close()
elif event.triggers_context_menu():
self.page.tab_menu.popup(None, None, None, None, event.button, event.time)
return True
def on_entry_activate(self, entry):
self.entry.props.editing_canceled = False
self.end_rename()
def on_entry_focus_out_event(self, widget, event):
if (not self.entry.props.editing_canceled):
widget.activate()
def on_entry_key_press_event(self, widget, event):
if (event.keyval == Gdk.KEY_Escape):
self.entry.props.editing_canceled = True
self.end_rename()
return True
def on_name_changed(self, *args):
self.label.set_text(self.page.get_page_name())
def start_rename(self):
if (not self.can_rename()):
return
self.entry.set_text(self.page.get_page_name())
self.label.hide()
self.button.hide()
self.entry.show()
self.entry.select_region(0, (- 1))
self.entry.grab_focus()
def end_rename(self, cancel=False):
name = self.entry.get_text()
if ((name.strip() != '') and (not self.entry.props.editing_canceled)):
self.page.set_page_name(name)
self.label.set_text(name)
self.label.set_tooltip_text(name)
self.entry.hide()
self.label.show()
self.button.show()
self.entry.props.editing_canceled = False
def can_rename(self):
return hasattr(self.page, 'set_page_name')
def close(self, *args):
if (self.closable and (not self.page.emit('closing'))):
self.notebook.remove_page(self.notebook.page_num(self.page)) |
class OptionSeriesTreemapTraverseupbuttonPosition(Options):
def align(self):
return self._config_get('right')
def align(self, text: str):
self._config(text, js_type=False)
def verticalAlign(self):
return self._config_get('top')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get((- 10))
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(10)
def y(self, num: float):
self._config(num, js_type=False) |
def test_extra_field_order():
(app, db, admin) = setup()
(Model1, _) = create_models(db)
view = CustomModelView(Model1, form_extra_fields={'extra_field': fields.StringField('Extra Field')})
admin.add_view(view)
client = app.test_client()
rv = client.get('/admin/model1/new/')
assert (rv.status_code == 200)
data = rv.data.decode('utf-8')
assert ('Extra Field' in data)
pos1 = data.find('Extra Field')
pos2 = data.find('Test1')
assert (pos2 < pos1) |
class KShotNWaySequence(DeterministicSequence):
def __init__(self, annotations, batch_size, k_shot, n_way, **kwargs):
if ((batch_size is not None) and (batch_size != (n_way * k_shot))):
warnings.warn('batch_size was set but not consistent with k_shot and n_way. Value is overridden.')
batch_size = (k_shot * n_way)
if kwargs.get('shuffle'):
warnings.warn('KShotNWaySequence does not use the shuffle attribute')
kwargs['shuffle'] = False
self.k_shot = k_shot
self.n_way = n_way
super().__init__(annotations, batch_size, **kwargs)
self.label_to_indexes = dict(self.annotations[0].reset_index().groupby('label', as_index=False).agg({'index': list})[['label', 'index']].values)
def on_epoch_end(self):
self.annotations[0] = self.annotations[0].groupby('label').apply((lambda group: group.sample(frac=1).assign(k_shot_index=[((group.name + '-') + str(index)) for index in pd.np.repeat(list(range(math.ceil((len(group) / self.k_shot)))), self.k_shot)][:len(group)]))).reset_index('label', drop=True).groupby('k_shot_index').apply((lambda group: group.assign(k_shot_len=len(group))))
indexes_with_k_shots = pd.np.array(pd.np.random.permutation(self.annotations[0].reset_index().loc[(lambda df: (df.k_shot_len == self.k_shot))].groupby('k_shot_index', as_index=False).agg({'index': list})['index'].values).tolist()).flatten()
other_indexes = self.annotations[0].index.difference(indexes_with_k_shots)
indexes = (indexes_with_k_shots.tolist() + other_indexes.tolist())
self.annotations[0] = self.annotations[0].loc[indexes]
self.targets = self.targets.loc[indexes]
def __len__(self):
return math.floor((len(self.annotations[0]) / self.batch_size)) |
def array_serializer_generator(msg_context, package, type_, name, serialize, is_numpy):
(base_type, is_array, array_len) = msgs.parse_type(type_)
if (not is_array):
raise MsgGenerationException(('Invalid array spec: %s' % type_))
var_length = (array_len is None)
if (base_type in ['char', 'uint8']):
for y in string_serializer_generator(package, type_, name, serialize):
(yield y)
return
var = (_serial_context + name)
if var_length:
for y in len_serializer_generator(var, False, serialize):
(yield y)
length = None
else:
length = array_len
if is_simple(base_type):
if var_length:
pattern = compute_struct_pattern([base_type])
(yield ("pattern = '<%%s%s'%%length" % pattern))
if serialize:
if is_numpy:
(yield pack_numpy(var))
else:
(yield pack2('pattern', ('*' + var)))
else:
(yield 'start = end')
(yield 's = struct.Struct(pattern)')
(yield 'end += s.size')
if is_numpy:
dtype = NUMPY_DTYPE[base_type]
(yield unpack_numpy(var, 'length', dtype, 'str[start:end]'))
else:
(yield unpack3(var, 's', 'str[start:end]'))
else:
pattern = ('%s%s' % (length, compute_struct_pattern([base_type])))
if serialize:
if is_numpy:
(yield pack_numpy(var))
else:
(yield pack(pattern, ('*' + var)))
else:
(yield 'start = end')
(yield ('end += %s' % struct.calcsize(('<%s' % pattern))))
if is_numpy:
dtype = NUMPY_DTYPE[base_type]
(yield unpack_numpy(var, length, dtype, 'str[start:end]'))
else:
(yield unpack(var, pattern, 'str[start:end]'))
if ((not serialize) and (base_type == 'bool')):
if (base_type == 'bool'):
(yield ('%s = list(map(bool, %s))' % (var, var)))
else:
loop_var = ('val%s' % len(_context_stack))
if (base_type == 'string'):
push_context('')
factory = string_serializer_generator(package, base_type, loop_var, serialize)
else:
push_context(('%s.' % loop_var))
factory = serializer_generator(msg_context, make_python_safe(get_registered_ex(msg_context, base_type)), serialize, is_numpy)
if serialize:
if (array_len is not None):
(yield ('if len(%s) != %s:' % (var, array_len)))
(yield (INDENT + ('self._check_types(ValueError("Expecting %%s items but found %%s when writing \'%%s\'" %% (%s, len(%s), \'%s\')))' % (array_len, var, var))))
(yield ('for %s in %s:' % (loop_var, var)))
else:
(yield ('%s = []' % var))
if var_length:
(yield 'for i in range(0, length):')
else:
(yield ('for i in range(0, %s):' % length))
if (base_type != 'string'):
(yield (INDENT + ('%s = %s' % (loop_var, compute_constructor(msg_context, package, base_type)))))
for y in factory:
(yield (INDENT + y))
if (not serialize):
(yield (INDENT + ('%s.append(%s)' % (var, loop_var))))
pop_context() |
class SyncFolder(Core):
def sync_folder(self, local_folder: str, remote_folder: str, flag: Optional[bool]=None, file_filter: Callable[([Union[(str, BaseFile)]], bool)]=(lambda x: False), ignore_content: bool=False, follow_delete: bool=False, drive_id: str=None):
if (flag is None):
self._auth.log.info('sync_folder: ')
elif flag:
self._auth.log.info('sync_folder: ')
else:
self._auth.log.info('sync_folder: ')
if (not os.path.exists(local_folder)):
self._auth.log.warning(',: %s', local_folder)
os.makedirs(local_folder)
self.__sync_folder(local_folder, remote_folder, flag, file_filter, ignore_content, follow_delete, drive_id)
def __sync_folder(self, local_folder: str, remote_folder: str, flag: Optional[bool], file_filter: Callable[([Union[(str, BaseFile)]], bool)], ignore_content: bool, follow_delete: bool, drive_id: str):
local_files = {}
for f in os.listdir(local_folder):
local_file = os.path.join(local_folder, f)
if (os.path.isfile(local_file) and file_filter(local_file)):
self._auth.log.debug(f' {local_file}')
continue
local_files[f] = local_file
remote_files: Dict[(str, BaseFile)] = {}
for f in self._core_get_file_list(GetFileListRequest(remote_folder, drive_id=drive_id)):
remote_file = f.name
if ((f.type == 'file') and file_filter(f)):
self._auth.log.debug(f' {remote_file}')
continue
remote_files[remote_file] = f
if (flag is None):
self.__sync_all(drive_id, file_filter, flag, local_files, local_folder, remote_files, remote_folder, ignore_content, follow_delete)
elif flag:
self.__sync_local(drive_id, local_files, remote_files, remote_folder, flag, file_filter, local_folder, ignore_content, follow_delete)
else:
self.__sync_remote(drive_id, local_files, remote_files, local_folder, flag, file_filter, ignore_content, follow_delete)
def __sync_all(self, drive_id, file_filter, flag, local_files, local_folder, remote_files, remote_folder, ignore_content, follow_delete):
for f in list(local_files):
local_file = local_files.pop(f)
if os.path.isdir(local_file):
if (f in remote_files):
remote_file = remote_files.pop(f)
if (remote_file.type == 'file'):
self._auth.log.warning(f':,, {f}')
continue
else:
self._auth.log.debug(f',,, {f}')
remote_file = self._core_create_folder(CreateFolderRequest(name=f, parent_file_id=remote_folder, drive_id=drive_id, check_name_mode='overwrite'))
self.__sync_folder(local_file, remote_file.file_id, flag, file_filter, ignore_content, follow_delete, drive_id)
continue
if (f in remote_files):
remote_file = remote_files.pop(f)
if (remote_file.type == 'folder'):
self._auth.log.warning(f':,, {f}')
continue
local_size = os.path.getsize(local_file)
if (local_size == remote_file.size):
if ignore_content:
self._auth.log.warning(f': {f}')
continue
local_sha1 = self._core_sha1(local_file).lower()
if (local_sha1 == remote_file.content_hash.lower()):
self._auth.log.debug(f' {f} ,sha1,')
continue
local_time = os.path.getmtime(local_file)
local_time = int(local_time)
remote_time = remote_file.updated_at
remote_time = utc_str_to_timestamp(remote_time)
if (local_time > remote_time):
self._auth.log.debug(f', {f}')
self.upload_file(file_path=local_file, parent_file_id=remote_folder, name=f, drive_id=drive_id, check_name_mode='overwrite')
elif (local_time < remote_time):
self._auth.log.debug(f',, {f}')
os.remove(local_file)
self.download_files([remote_file], local_folder)
else:
self._auth.log.debug(f', {f}')
self.upload_file(file_path=local_file, parent_file_id=remote_folder, name=f, drive_id=drive_id, check_name_mode='overwrite')
if (len(remote_files) != 0):
self._auth.log.debug(f', {len(remote_files)} {list(remote_files.keys())}')
for remote_file in remote_files.values():
if (remote_file.type == 'file'):
self.download_files([remote_file], local_folder)
else:
self.download_folder(remote_file.file_id, local_folder)
def __sync_remote(self, drive_id, local_files, remote_files, local_folder, flag, file_filter, ignore_content, follow_delete):
for f in list(remote_files):
remote_file = remote_files.pop(f)
if (remote_file.type == 'folder'):
if (f in local_files):
local_file = local_files.pop(f)
if os.path.isfile(local_file):
self._auth.log.warning(f':,, {f}')
else:
local_file = os.path.join(local_folder, f)
self._auth.log.debug(f',,, {local_file}')
if (not os.path.exists(local_file)):
os.mkdir(local_file)
self.__sync_folder(local_file, remote_file.file_id, flag, file_filter, ignore_content, follow_delete, drive_id)
continue
if (f in local_files):
local_file = local_files.pop(f)
if os.path.isdir(local_file):
self._auth.log.debug(',,,')
shutil.rmtree(local_file)
self.download_files([remote_file], local_folder)
continue
remote_size = remote_file.size
if (remote_size == os.path.getsize(local_file)):
if ignore_content:
self._auth.log.warning(f': {f}')
continue
local_sha1 = self._core_sha1(local_file).lower()
if (local_sha1 == remote_file.content_hash.lower()):
self._auth.log.debug(f' {local_file} ,sha1,')
continue
os.remove(local_file)
self.download_files([remote_file], local_folder)
else:
local_file = os.path.join(local_folder, f)
self._auth.log.debug(f', {local_file}')
self.download_files([remote_file], local_folder)
if (follow_delete and (len(local_files) != 0)):
self._auth.log.debug(f', {len(local_files)} {list(local_files.keys())}')
for local_file in local_files.values():
if os.path.isfile(local_file):
os.remove(local_file)
else:
shutil.rmtree(local_file)
def __sync_local(self, drive_id, local_files, remote_files, remote_folder, flag, file_filter, local_folder, ignore_content, follow_delete):
for f in list(local_files):
local_file = local_files.pop(f)
if os.path.isdir(local_file):
if (f in remote_files):
remote_file = remote_files.pop(f)
if (remote_file.type == 'file'):
self._auth.log.warning(f':,, {f}')
else:
self._auth.log.debug(f',,, {f}')
remote_file = self._core_create_folder(CreateFolderRequest(name=f, parent_file_id=remote_folder, drive_id=drive_id, check_name_mode='overwrite'))
self.__sync_folder(local_file, remote_file.file_id, flag, file_filter, ignore_content, follow_delete, drive_id)
continue
if (f in remote_files):
remote_file = remote_files.pop(f)
if (remote_file.type == 'folder'):
self._auth.log.debug(f',,, {f}')
self._core_move_file_to_trash(MoveFileToTrashRequest(file_id=remote_file.file_id, drive_id=drive_id))
self._auth.log.debug(f' {f}')
self.upload_file(file_path=local_file, parent_file_id=remote_folder, name=f, drive_id=drive_id, check_name_mode='overwrite')
continue
local_size = os.path.getsize(local_file)
if (local_size == remote_file.size):
if ignore_content:
self._auth.log.warning(f': {f}')
continue
local_sha1 = self._core_sha1(local_file).lower()
if (local_sha1 == remote_file.content_hash.lower()):
self._auth.log.debug(f' {f} ,sha1,')
continue
else:
self.upload_file(file_path=local_file, parent_file_id=remote_folder, name=f, drive_id=drive_id, check_name_mode='overwrite')
else:
self.upload_file(file_path=local_file, parent_file_id=remote_folder, name=f, drive_id=drive_id, check_name_mode='overwrite')
else:
self._auth.log.debug(f', {f}')
self.upload_file(file_path=local_file, parent_file_id=remote_folder, name=f, drive_id=drive_id, check_name_mode='overwrite')
if (follow_delete and (len(remote_files) != 0)):
self._auth.log.debug(f', {len(remote_files)} {list(remote_files.keys())}')
self.batch_move_to_trash([remote_file.file_id for remote_file in remote_files.values()])
def _core_sha1(param):
sha1 = hashlib.sha1()
with open(param, 'rb') as f:
while True:
data = f.read(8192)
if (not data):
break
sha1.update(data)
return sha1.hexdigest() |
def serve_ports(ports_socket, start_free_ports, min_free_ports):
ports_q = collections.deque()
free_ports = set()
port_age = {}
serialno = 0
def get_port():
while True:
free_socket = socket.socket()
free_socket.bind(('', 0))
free_port = free_socket.getsockname()[1]
free_socket.close()
if (free_port < 1024):
continue
if (free_port in RESERVED_FOR_TESTS_PORTS):
continue
if (free_port in free_ports):
continue
break
free_ports.add(free_port)
port_age[free_port] = time.time()
return free_port
def queue_free_ports(min_queue_size):
while (len(ports_q) < min_queue_size):
port = get_port()
ports_q.append(port)
port_age[port] = time.time()
queue_free_ports(start_free_ports)
ports_by_name = collections.defaultdict(set)
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.bind(ports_socket)
sock.listen(1)
cold_start = True
while True:
(connection, _) = sock.accept()
(command, name) = receive_sock_line(connection).split(',')
response = None
if (command == GETSERIAL):
serialno += 1
response = serialno
elif (command == PUTPORTS):
ports_returned = 0
for port in ports_by_name[name]:
ports_returned += 1
ports_q.append(port)
port_age[port] = time.time()
del ports_by_name[name]
response = ports_returned
if ports_returned:
cold_start = False
elif (command == GETPORT):
while True:
port = ports_q.popleft()
if (((time.time() - port_age[port]) > MIN_PORT_AGE) or cold_start):
break
ports_q.append(port)
time.sleep(1)
ports_by_name[name].add(port)
response = port
queue_free_ports(min_free_ports)
elif (command == LISTPORTS):
response = list(ports_by_name[name])
if (response is not None):
response_str = ''
if isinstance(response, int):
response = [response]
response_str = ''.join([('%u\n' % i) for i in response])
connection.sendall(response_str.encode())
connection.close() |
class DynamicPostChildAttachment(AbstractObject):
def __init__(self, api=None):
super(DynamicPostChildAttachment, self).__init__()
self._isDynamicPostChildAttachment = True
self._api = api
class Field(AbstractObject.Field):
description = 'description'
image_url = 'image_url'
link = 'link'
place_id = 'place_id'
product_id = 'product_id'
title = 'title'
_field_types = {'description': 'string', 'image_url': 'string', 'link': 'string', 'place_id': 'string', 'product_id': 'string', 'title': 'string'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
class Solution():
def merge(self, intervals: List[List[int]]) -> List[List[int]]:
intervals.sort()
ret = []
for (i, interval) in enumerate(intervals):
if (i == 0):
ret.append(interval)
continue
if (interval[0] > ret[(- 1)][1]):
ret.append(list(interval))
else:
ret[(- 1)][1] = max(ret[(- 1)][1], interval[1])
return ret |
class OptionPlotoptionsPackedbubbleSonificationDefaultspeechoptionsMapping(Options):
def pitch(self) -> 'OptionPlotoptionsPackedbubbleSonificationDefaultspeechoptionsMappingPitch':
return self._config_sub_data('pitch', OptionPlotoptionsPackedbubbleSonificationDefaultspeechoptionsMappingPitch)
def playDelay(self) -> 'OptionPlotoptionsPackedbubbleSonificationDefaultspeechoptionsMappingPlaydelay':
return self._config_sub_data('playDelay', OptionPlotoptionsPackedbubbleSonificationDefaultspeechoptionsMappingPlaydelay)
def rate(self) -> 'OptionPlotoptionsPackedbubbleSonificationDefaultspeechoptionsMappingRate':
return self._config_sub_data('rate', OptionPlotoptionsPackedbubbleSonificationDefaultspeechoptionsMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionPlotoptionsPackedbubbleSonificationDefaultspeechoptionsMappingTime':
return self._config_sub_data('time', OptionPlotoptionsPackedbubbleSonificationDefaultspeechoptionsMappingTime)
def volume(self) -> 'OptionPlotoptionsPackedbubbleSonificationDefaultspeechoptionsMappingVolume':
return self._config_sub_data('volume', OptionPlotoptionsPackedbubbleSonificationDefaultspeechoptionsMappingVolume) |
class TargetingGeoLocationCity(AbstractObject):
def __init__(self, api=None):
super(TargetingGeoLocationCity, self).__init__()
self._isTargetingGeoLocationCity = True
self._api = api
class Field(AbstractObject.Field):
country = 'country'
distance_unit = 'distance_unit'
key = 'key'
name = 'name'
radius = 'radius'
region = 'region'
region_id = 'region_id'
_field_types = {'country': 'string', 'distance_unit': 'string', 'key': 'string', 'name': 'string', 'radius': 'unsigned int', 'region': 'string', 'region_id': 'string'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
def template_def(signature, code):
if (not isinstance(signature, funcsigs.Signature)):
kind = funcsigs.Parameter.POSITIONAL_OR_KEYWORD
signature = funcsigs.Signature([funcsigs.Parameter(name, kind=kind) for name in signature])
template_src = (((("<%def name='_func" + str(signature)) + "'>\n") + code) + '\n</%def>')
return template_from(template_src).get_def('_func') |
class PeriodicRunTaskEvent(SchedulingEvent):
def __init__(self, workflow_execution_id: int, task_name: str):
super().__init__(SchedulingEventType.PERIODIC_RUN_TASK)
self.context = json.dumps({EventContextConstant.WORKFLOW_EXECUTION_ID: workflow_execution_id, EventContextConstant.TASK_NAME: task_name}) |
def get_normalized_bounding_box_for_page_coordinates_and_page_meta(coordinates: LayoutPageCoordinates, page_meta: LayoutPageMeta) -> LayoutPageCoordinates:
page_coordinates = page_meta.coordinates
assert (page_coordinates is not None)
return LayoutPageCoordinates(x=(coordinates.x / page_coordinates.width), y=((coordinates.y / page_coordinates.height) + page_meta.page_number), width=(coordinates.width / page_coordinates.width), height=(coordinates.height / page_coordinates.height), page_number=coordinates.page_number) |
def stream_content_to_json(buffer):
values = []
try:
content = parse_json(buffer)
values.append({'content': content, 'raw': buffer})
buffer = ''
except Exception:
while True:
pos = (buffer.find('\n') + 1)
if (pos <= 0):
break
slice = buffer[0:pos].strip()
if (len(pos) > 0):
values.append({'content': slice.decode(), 'raw': slice})
buffer = buffer[pos].encode()
return {'buffer': buffer, 'values': values} |
def zendesk_erasure_data(zendesk_client: ZendeskClient, zendesk_erasure_identity_email: str) -> Generator:
response = zendesk_client.create_user(zendesk_erasure_identity_email)
assert response.ok
user = response.json()['user']
response = zendesk_client.create_ticket(user['id'])
assert response.ok
ticket = response.json()['ticket']
(yield (ticket, user)) |
_custom_acc_mapper_fn(op_and_target=('call_method', 'std'), arg_replacement_tuples=[('input', 'input'), ('dim', 'dim'), ('unbiased', 'unbiased', this_arg_is_optional), ('keepdim', 'keepdim', this_arg_is_optional), ('dtype', 'dtype', this_arg_is_optional)])
_custom_acc_mapper_fn(op_and_target=('call_function', torch.std), arg_replacement_tuples=[('input', 'input'), ('dim', 'dim'), ('unbiased', 'unbiased', this_arg_is_optional), ('keepdim', 'keepdim', this_arg_is_optional), ('dtype', 'dtype', this_arg_is_optional)])
def std_mapper(node, mod):
input_node = node.kwargs['input']
dim = node.kwargs.get('dim')
keepdim = node.kwargs.get('keepdim')
assert ((dim is not None) and (keepdim is not None)), 'We currently do not support `std` with dim=None and keepdim=None'
with node.graph.inserting_before(node):
mean_kwargs = {'input': input_node, 'dim': dim, 'keepdim': True}
mean_node = node.graph.call_function(mean, kwargs=mean_kwargs)
mean_node.meta['type'] = torch.Tensor
sub_kwargs = {'input': input_node, 'other': mean_node}
sub_node = node.graph.call_function(sub, kwargs=sub_kwargs)
sub_node.meta['type'] = torch.Tensor
pow_kwargs = {'input': sub_node, 'exponent': 2.0}
pow_node = node.graph.call_function(pow, kwargs=pow_kwargs)
pow_node.meta['type'] = torch.Tensor
post_mean_kwargs = {'input': pow_node, 'dim': dim, 'keepdim': keepdim}
post_mean_node = node.graph.call_function(mean, kwargs=post_mean_kwargs)
post_mean_node.meta['type'] = torch.Tensor
sqrt_kwargs = {'input': post_mean_node}
sqrt_node = node.graph.call_function(sqrt, kwargs=sqrt_kwargs)
sqrt_node.meta['type'] = torch.Tensor
output_node = sqrt_node
output_node.meta = node.meta.copy()
return output_node |
def filter_data(base: Any, updates: Any) -> Any:
if (not isinstance(updates, type(base))):
return updates
if (not isinstance(base, dict)):
if (base == updates):
return SAME_MARK
return updates
new_keys = (set(updates.keys()) - set(base.keys()))
common_keys = set(updates.keys()).intersection(set(base.keys()))
new_data = {key: updates[key] for key in new_keys}
for key in common_keys:
value = filter_data(base[key], updates[key])
if (value is SAME_MARK):
continue
new_data[key] = value
if (not new_data):
return SAME_MARK
return new_data |
class ProductionTestCase(unittest.TestCase):
def test_no_datetime(self):
with self.assertRaises(Exception, msg='Datetime key must be present!'):
validate_production(p1, 'FR')
def test_no_zoneKey(self):
with self.assertRaises(Exception, msg='zoneKey is required!'):
validate_production(p2, 'FR')
def test_bad_datetime(self):
with self.assertRaises(Exception, msg='datetime object is required!'):
validate_production(p3, 'FR')
def test_zoneKey_mismatch(self):
with self.assertRaises(Exception, msg='zoneKey mismatch must be caught!'):
validate_production(p4, 'FR')
def test_future_not_allowed(self):
with self.assertRaises(Exception, msg='Datapoints from the future are not valid!'):
validate_production(p5, 'FR')
def test_missing_types(self):
with self.assertRaises(Exception, msg='Coal/Oil/Unknown are required!'):
validate_production(p6, 'FR')
def test_missing_types_allowed(self):
self.assertFalse(validate_production(p7, 'CH'), msg="CH, NO, AU-TAS, US-NEISO don't require Coal/Oil/Unknown!")
def test_negative_production(self):
with self.assertRaises(Exception, msg='Negative generation should be rejected!'):
validate_production(p8, 'FR')
def test_good_datapoint(self):
self.assertFalse(validate_production(p9, 'FR'), msg='This datapoint is good!') |
def check_out_of_sync(proj_dir):
check_exists_with_error()
out_of_sync = False
(status, status_output) = has_uncommitted_files(proj_dir)
if status:
out_of_sync = True
log.warn("'{}' has uncommitted changes:".format(proj_dir))
log.info(status_output)
branches_out_of_sync = False
branches = get_branches(proj_dir)
if (not branches):
log.warn("'{}' no remote branches found".format(proj_dir))
for local_branch in branches:
remote_branch = branches[local_branch]
remote_rev = get_remote_rev(proj_dir, remote_branch)
if remote_rev:
local_rev = get_local_rev(proj_dir, local_branch)
if (remote_rev != local_rev):
out_of_sync = True
if (not branches_out_of_sync):
log.warn("'{}' branches out of sync:".format(proj_dir))
branches_out_of_sync = True
log.info(' {}: {}'.format(local_branch, local_rev))
log.info(' {}: {}'.format(remote_branch, remote_rev))
return out_of_sync |
class EnKFMain():
def __init__(self, config: 'ErtConfig', read_only: bool=False) -> None:
self.ert_config = config
self._update_configuration: Optional[UpdateConfiguration] = None
def update_configuration(self) -> UpdateConfiguration:
if (not self._update_configuration):
self._update_configuration = UpdateConfiguration.global_update_step(list(self.ert_config.observations.keys()), self.ert_config.ensemble_config.parameters)
return self._update_configuration
_configuration.setter
def update_configuration(self, user_config: List[UpdateStep]) -> None:
config = UpdateConfiguration(update_steps=user_config)
config.context_validate(list(self.ert_config.observations.keys()), self.ert_config.ensemble_config.parameters)
self._update_configuration = config
def __repr__(self) -> str:
return f'EnKFMain(size: {self.ert_config.model_config.num_realizations}, config: {self.ert_config})'
def runWorkflows(self, runtime: HookRuntime, storage: Optional[StorageAccessor]=None, ensemble: Optional[EnsembleAccessor]=None) -> None:
for workflow in self.ert_config.hooked_workflows[runtime]:
WorkflowRunner(workflow, self, storage, ensemble).run_blocking() |
def _decode(e: bytes, separator: bytes=SEPARATOR) -> Envelope:
split = e.split(separator)
if ((len(split) < 5) or (split[(- 1)] not in [b'', b'\n'])):
raise ValueError('Expected at least 5 values separated by commas and last value being empty or new line, got {}'.format(len(split)))
to = split[0].decode('utf-8').strip().lstrip('\x00')
sender = split[1].decode('utf-8').strip()
protocol_specification_id = PublicId.from_str(split[2].decode('utf-8').strip())
message = SEPARATOR.join(split[3:(- 1)])
if (b'\\x' in message):
message = codecs.decode(message, 'unicode-escape').encode('utf-8')
return Envelope(to=to, sender=sender, protocol_specification_id=protocol_specification_id, message=message) |
('ecs_deploy.cli.get_client')
def test_deploy_new_tag(get_client, runner):
get_client.return_value = EcsTestClient('acces_key', 'secret_key')
result = runner.invoke(cli.deploy, (CLUSTER_NAME, SERVICE_NAME, '-t', 'latest'))
assert (result.exit_code == 0)
assert (not result.exception)
assert (u'Deploying based on task definition: test-task:1' in result.output)
assert (u'Updating task definition' in result.output)
assert (u'Changed image of container "webserver" to: "webserver:latest" (was: "webserver:123")' in result.output)
assert (u'Changed image of container "application" to: "application:latest" (was: "application:123")' in result.output)
assert (u'Successfully created revision: 2' in result.output)
assert (u'Successfully deregistered revision: 1' in result.output)
assert (u'Successfully changed task definition to: test-task:2' in result.output)
assert (u'Deployment successful' in result.output) |
class conv2d(Operator):
def __init__(self, stride, pad, dilate=1, group=1) -> None:
super().__init__()
self._attrs['op'] = 'conv2d'
self._attrs['stride'] = stride
self._attrs['pad'] = pad
self._attrs['dilate'] = dilate
self._attrs['group'] = group
self._attrs['has_profiler'] = True
self._attrs['epilogue_alignment'] = 1
self._attrs['epilogue'] = 'LinearCombination'
self._attrs['workspace'] = 0
self._attrs['split_k'] = None
self.shape_eval_template = SHAPE_FUNC_TEMPLATE
self.shape_save_template = SHAPE_ASSIGNMENT_TEMPLATE
self.exec_key_template = EXEC_KEY_TEMPLATE
self.exec_dyn_key_template = EXEC_DYN_KEY_TEMPLATE
self.exec_cond_template = EXEC_COND_TEMPLATE
def _get_params_factory(self):
params_factory = {}
(params_factory['strideh'], params_factory['stridew']) = _maybe_int_to_tuple(self._attrs['stride'], 'Stride')
(params_factory['padh'], params_factory['padw']) = _maybe_int_to_tuple(self._attrs['pad'], 'Pad')
(params_factory['dilateh'], params_factory['dilatew']) = _maybe_int_to_tuple(self._attrs['dilate'], 'Dilation')
return params_factory
def _infer_shape(self, x: List[int], w: List[int]) -> List[int]:
if (x[3] != (w[3] * self._attrs['group'])):
raise RuntimeError('X/W Shape mismatch for conv2d')
eval_func = self.shape_eval_template.render(indent='', dtype='', div='//', x_dim0=x[0], x_dim1=x[1], x_dim2=x[2], x_dim3=x[3], w_dim0=w[0], w_dim1=w[1], w_dim2=w[2], **self._get_params_factory())
output = {}
exec(eval_func, output)
return [int(output['NO']), int(output['HO']), int(output['WO']), int(output['CO'])]
def _infer_shapes(self, x: Tensor, w: Tensor) -> List[int]:
x_shape_values = [var._attrs['values'] for var in x._attrs['shape']]
x_shapes = itertools.product(*x_shape_values)
w_shape = [var._attrs['values'][0] for var in w._attrs['shape']]
self._attrs['CO'] = w_shape[0]
self._attrs['KH'] = w_shape[1]
self._attrs['KW'] = w_shape[2]
y_shapes = []
for x_shape in x_shapes:
y_shape = self._infer_shape(x_shape, w_shape)
y_shapes.append(y_shape)
def unique(vector):
return sorted(set(vector))
output_shape = [x._attrs['shape'][0], shape_utils.gen_int_var(unique([d[1] for d in y_shapes])), shape_utils.gen_int_var(unique([d[2] for d in y_shapes])), shape_utils.gen_int_var(unique([d[3] for d in y_shapes]))]
in_h = x._attrs['shape'][1]._attrs['symbolic_value']
in_w = x._attrs['shape'][2]._attrs['symbolic_value']
(dilate_h, dilate_w) = _maybe_int_to_tuple(self._attrs['dilate'], 'Dilation')
(stride_h, stride_w) = _maybe_int_to_tuple(self._attrs['stride'], 'Stride')
(pad_h, pad_w) = _maybe_int_to_tuple(self._attrs['pad'], 'Pad')
KHEff = (((w_shape[1] - 1) * dilate_h) + 1)
KWEff = (((w_shape[2] - 1) * dilate_w) + 1)
out_h = ((((in_h + (2 * pad_h)) - KHEff) // stride_h) + 1)
out_w = ((((in_w + (2 * pad_w)) - KWEff) // stride_w) + 1)
output_shape[1]._attrs['symbolic_value'] = out_h
output_shape[2]._attrs['symbolic_value'] = out_w
return output_shape
def _invert_exec_key(self, key):
tmp = re.findall('(\\d+)', key)
return [int(x) for x in tmp]
def _gen_exec_key(self, shape: List[int]):
return self.exec_key_template.render(x_dim0=shape[0], x_dim1=shape[1], x_dim2=shape[2], x_dim3=shape[3]).replace('\n', '')
def _gen_dyn_exec_key(self, dim0_lb, dim0_ub, dim1_lb, dim1_ub, dim2_lb, dim2_ub, dim3):
return self.exec_dyn_key_template.render(x_dim0_lb=dim0_lb, x_dim0_ub=dim0_ub, x_dim1_lb=dim1_lb, x_dim1_ub=dim1_ub, x_dim2_lb=dim2_lb, x_dim2_ub=dim2_ub, x_dim3=dim3).replace('\n', '')
def _extract_exec_path(self, x: Tensor):
x_shape_values = [var._attrs['values'] for var in x._attrs['shape']]
self._attrs['dim_lower_bounds'] = [min(vals) for vals in x_shape_values]
x_shape_values = ([x_shape_values[0]] + [[max(vs)] for vs in x_shape_values[1:]])
x_shapes = itertools.product(*x_shape_values)
self._attrs['exec_path'] = OrderedDict()
for x_shape in x_shapes:
key = self._gen_exec_key(x_shape)
self._attrs['exec_path'][key] = ''
def _extract_epilogue_alignment(self, output_shape: List[IntVar]) -> None:
epilogue_dim = output_shape[(- 1)]
if (not isinstance(epilogue_dim, IntImm)):
raise RuntimeError('Conv output last dimension must be static!')
self._attrs['epilogue_alignment'] = alignment.find_max_alignment(number=epilogue_dim._attrs['values'][0], dtype=self._attrs['inputs'][0]._attrs['dtype'])
def __call__(self, x: Tensor, w: Tensor) -> List[Tensor]:
self._attrs['inputs'] = [x, w]
self._set_depth()
output_shape = self._infer_shapes(x, w)
self._extract_exec_path(x)
self._extract_epilogue_alignment(output_shape)
output = Tensor(output_shape, src_ops={self}, dtype=x._attrs['dtype'])
self._attrs['outputs'] = [output]
return output
def _get_op_attributes(self) -> Dict[(str, Any)]:
target_attrs = ['dilate', 'group', 'pad', 'stride']
attr = {}
for target_attr in target_attrs:
if (target_attr in self._attrs):
attr[target_attr] = self._attrs[target_attr]
return attr
def _should_build_profiler(self) -> bool:
force_cache = environ.force_profiler_cache()
if self._has_dynamic_input_dims():
if force_cache:
raise RuntimeError('We cannot force to use the cache as dynamic dims require us to generate and build the profilers')
return True
if force_cache:
return False
target = backend.target.Target.current()
workloads = list(self._attrs['exec_path'].keys())
build_profiler = True
if (not target.use_dummy_profiling_results()):
tmp_key = next(iter(self._attrs['op_instance'].keys()))
tmp_op = self._attrs['op_instance'][tmp_key]
build_profiler = False
for wkl in workloads:
exec_entry_sha1 = sha1(wkl.encode('utf-8')).hexdigest()
split_k = (1 if (self._attrs['split_k'] is None) else self._attrs['split_k'])
query = ConvQueryEntry(dtype_a=(tmp_op.A.element.value - 1), dtype_b=(tmp_op.B.element.value - 1), dtype_c=(tmp_op.C.element.value - 1), dtype_acc=(tmp_op.accumulator_type().value - 1), major_a=tmp_op.A.layout.value, major_b=tmp_op.B.layout.value, major_c=tmp_op.C.layout.value, kh=self._attrs['KH'], kw=self._attrs['KW'], co=self._attrs['CO'], op_type=self._attrs['op'], device=target._arch, epilogue=tmp_op.epilogue_functor.value, split_k=split_k, exec_entry_sha1=exec_entry_sha1, **self._get_params_factory())
cache_value = target.query_profile_cache('conv', query.__dict__)
if ((cache_value is not None) and (not target.force_profile())):
_LOGGER.info(f"Load profiling result for {self._attrs['name']} from cache: {cache_value}")
(best_algo, workspace) = cache_value
self._attrs['exec_path'][wkl] = best_algo
self._attrs['workspace'] = max(self._attrs['workspace'], workspace)
else:
build_profiler = True
return build_profiler
def gen_profiler(self, workdir: str=None, dynamic_profiling_strategy=DynamicProfileStrategy.HINTS) -> None:
target = backend.target.Target.current()
func_key = '{target}.{op}.config'.format(target=target.name(), op=self._attrs['op'])
func = registry.get(func_key)
func(self._attrs, dtype=self._attrs['inputs'][0]._attrs['dtype'])
if self._should_build_profiler():
x_shapes = [self._invert_exec_key(exec_key) for exec_key in self._attrs['exec_path']]
self._attrs['op_instance'] = filter_op_instances(func_attrs=self._attrs, x_shapes=x_shapes)
return generate_profiler_sources(func_attrs=self._attrs, op_class='conv', workdir=workdir, shape_template=self.shape_eval_template)
def _gen_profile_cmd(self, profiler_prefix, cfg, x_shape):
exe_path = os.path.join(profiler_prefix, cfg)
if (not os.access(exe_path, os.X_OK)):
raise RuntimeError(('Profiler %s is not executable' % exe_path))
cmd = [exe_path]
params = self._get_params_factory()
cmd.append(x_shape[0])
cmd.append(x_shape[1])
cmd.append(x_shape[2])
cmd.append(x_shape[3])
cmd.append(self._attrs['KH'])
cmd.append(self._attrs['KW'])
cmd.append(self._attrs['CO'])
cmd.append(params['strideh'])
cmd.append(params['padh'])
cmd.append(params['dilateh'])
cmd.append(params['stridew'])
cmd.append(params['padw'])
cmd.append(params['dilatew'])
cmd.append(self._attrs['group'])
command = [str(x) for x in cmd]
return command
def _profile_single_workload(self, profiler_prefix, exec_key, devices, force_cache):
target = backend.target.Target.current()
tmp_key = next(iter(self._attrs['op_instance'].keys()))
tmp_op = self._attrs['op_instance'][tmp_key]
exec_entry_sha1 = sha1(exec_key.encode('utf-8')).hexdigest()
split_k = (1 if (self._attrs['split_k'] is None) else self._attrs['split_k'])
query = ConvQueryEntry(dtype_a=(tmp_op.A.element.value - 1), dtype_b=(tmp_op.B.element.value - 1), dtype_c=(tmp_op.C.element.value - 1), dtype_acc=(tmp_op.accumulator_type().value - 1), major_a=tmp_op.A.layout.value, major_b=tmp_op.B.layout.value, major_c=tmp_op.C.layout.value, kh=self._attrs['KH'], kw=self._attrs['KW'], co=self._attrs['CO'], op_type=self._attrs['op'], device=target._arch, epilogue=tmp_op.epilogue_functor.value, split_k=split_k, exec_entry_sha1=exec_entry_sha1, **self._get_params_factory())
cache_value = target.query_profile_cache('conv', query.__dict__)
if ((cache_value is not None) and (not target.force_profile())):
_LOGGER.info('Load profiling result from cache.')
return cache_value
if ((cache_value is None) and force_cache):
op_type = self._attrs['op']
raise RuntimeError('force_cache is enabled but we could not find the following cache ', f'available on device target._arch={target._arch!r}, op_type={op_type!r}, exec_entry_sha1={exec_entry_sha1!r}')
if target.use_dummy_profiling_results():
op_type = self._attrs['op']
raise Exception('This is a CI run but we could not find the following cache ', f'''available on device {target._arch}
''', f'''{op_type} {exec_entry_sha1}.
''', 'Please adjust target.select_minimal_algo function.')
profiler_filename = get_profiler_filename(self._attrs, 'conv')
runner = backend.profiler_runner.Runner(devices, self._attrs['name'], timeout=180)
x_shape = self._invert_exec_key(exec_key)
command = self._gen_profile_cmd(profiler_prefix, profiler_filename, x_shape)
runner.push(profiler_filename, command)
runner.join()
result = runner.pull()
if (len(result) == 0):
raise RuntimeError(f'Profile workload: {exec_key} failed. Results: {result}.')
out = min(result, key=itemgetter(1))
best_algo = out[1].op_config
workspace = out[1].workspace
cache_record = ConvRecordEntry(exec_entry=exec_key, exec_entry_sha1=exec_entry_sha1, dtype_a=(tmp_op.A.element.value - 1), dtype_b=(tmp_op.B.element.value - 1), dtype_c=(tmp_op.C.element.value - 1), dtype_acc=(tmp_op.accumulator_type().value - 1), major_a=tmp_op.A.layout.value, major_b=tmp_op.B.layout.value, major_c=tmp_op.C.layout.value, kh=self._attrs['KH'], kw=self._attrs['KW'], co=self._attrs['CO'], op_type=self._attrs['op'], epilogue=tmp_op.epilogue_functor.value, device=target._arch, algo=best_algo, workspace=workspace, split_k=split_k, **self._get_params_factory())
Target.current().insert_profile_cache('conv', cache_record.__dict__)
return (best_algo, workspace)
def _has_dynamic_input_dims(self):
for input_tensor in self._attrs['inputs']:
for dim in input_tensor._attrs['shape']:
if (not isinstance(dim, IntImm)):
return True
return False
def profile(self, workdir='./', devices=None, dynamic_profiling_strategy=DynamicProfileStrategy.HINTS):
if (devices is None):
devices = [0]
self._profile_static(workdir, devices)
if self._has_dynamic_input_dims():
if (dynamic_profiling_strategy != DynamicProfileStrategy.HINTS):
raise NotImplementedError('conv2d only supports HINTS dynamic profiling strategy for now! Current strategy: {}'.format(dynamic_profiling_strategy))
self._profile_dynamic_dim(workdir)
def _profile_static(self, workdir, devices):
workloads = list(self._attrs['exec_path'].keys())
profiler_prefix = os.path.join(workdir, 'profiler', self._attrs['op'])
target = backend.target.Target.current()
if ('op_instance' not in self._attrs):
func_key = '{target}.{op}.config'.format(target=target.name(), op=self._attrs['op'])
func = registry.get(func_key)
func(self._attrs, dtype=self._attrs['inputs'][0]._attrs['dtype'])
force_cache = environ.force_profiler_cache()
for wkl in workloads:
_LOGGER.info('Profile: {name}: {wkl}'.format(name=self._attrs['name'], wkl=wkl))
if (target.use_dummy_profiling_results() and (not force_cache)):
algo = target.select_minimal_algo(list(self._attrs['op_instance'].keys()))
_LOGGER.info(f'Select minimal algo {algo} for CI')
self._attrs['exec_path'][wkl] = algo
self._attrs['workspace'] = 102400
elif (self._attrs['exec_path'][wkl] == ''):
(best_algo, workspace) = self._profile_single_workload(profiler_prefix, wkl, devices, force_cache)
self._attrs['exec_path'][wkl] = best_algo
self._attrs['workspace'] = max(self._attrs['workspace'], workspace)
def _profile_dynamic_dim(self, workdir):
def _extract_dynamic_dim(exec_keys):
var_dims = [[], [], [], []]
for key in exec_keys:
dims = self._invert_exec_key(key)
for (i, v) in enumerate(dims):
var_dims[i].append(v)
return var_dims
dim_lbs = self._attrs['dim_lower_bounds']
dims = _extract_dynamic_dim(self._attrs['exec_path'].keys())
dim0_lb = dim_lbs[0]
dim1_lb = dim_lbs[1]
dim2_lb = dim_lbs[2]
dim1_ub = dims[1][0]
dim2_ub = dims[2][0]
dim3 = dims[3][0]
num_exec_path = len(self._attrs['exec_path'])
if (num_exec_path < 1):
return
algos = list(self._attrs['exec_path'].values())
if ((num_exec_path == 1) or (len(set(algos)) <= 1)):
new_exec_paths = OrderedDict()
dim0_ub = max(dims[0])
new_key = self._gen_dyn_exec_key(dim0_lb, dim0_ub, dim1_lb, dim1_ub, dim2_lb, dim2_ub, dim3)
new_exec_paths[new_key] = algos[0]
self._attrs['exec_path'] = new_exec_paths
return
target = backend.target.Target.current()
if target.use_dummy_profiling_results():
return
profiler_prefix = os.path.join(workdir, 'profiler', self._attrs['op'])
runner = backend.profiler_runner.Runner([0], self._attrs['name'])
regions = []
for i in range((len(dims[0]) - 1)):
regions.append([dims[0][i], dims[0][(i + 1)], algos[i], algos[(i + 1)]])
special_cases = OrderedDict()
new_exec_paths = OrderedDict()
for (lb, ub, lb_algo, ub_algo) in regions:
mid = ((lb + ub) // 2)
origin_lb = lb
origin_ub = ub
last_mid = mid
while ((mid > lb) and (mid < ub)):
mid = ((lb + ub) // 2)
mid_shape = [mid, dim1_ub, dim2_ub, dim3]
_LOGGER.info('current: lb_algo: {lb_algo}, LB:{lb} MID:{mid} UB:{ub}'.format(lb_algo=lb_algo, lb=lb, mid=mid, ub=ub))
profiler_filename = get_profiler_filename(self._attrs, 'conv')
profiler_cmd = self._gen_profile_cmd(profiler_prefix, profiler_filename, mid_shape)
runner.push(idx=profiler_filename, cmd=profiler_cmd, return_ops=[str(lb_algo), str(ub_algo)])
runner.join()
result = runner.pull()
result_dict = {res.op_config: res for res in result[0][1]}
assert (len(result_dict) >= 1)
if (len(result_dict) == 1):
assert (str(ub_algo) not in result_dict)
lb = (mid + 1)
else:
lb_time = result_dict[str(lb_algo)].duration
ub_time = result_dict[str(ub_algo)].duration
if (lb_time < ub_time):
lb = (mid + 1)
else:
ub = (mid - 1)
last_mid = mid
mid = ((lb + ub) // 2)
lo_region_key = self._gen_dyn_exec_key(origin_lb, last_mid, dim1_lb, dim1_ub, dim2_lb, dim2_ub, dim3)
up_region_key = self._gen_dyn_exec_key(last_mid, origin_ub, dim1_lb, dim1_ub, dim2_lb, dim2_ub, dim3)
new_exec_paths[lo_region_key] = lb_algo
new_exec_paths[up_region_key] = ub_algo
special_cases.update(new_exec_paths)
self._attrs['exec_path'] = special_cases
def gen_function(self) -> str:
target = backend.target.Target.current()
func_key = '{target}.{op}.gen_function'.format(target=target.name(), op=self._attrs['op'])
func = registry.get(func_key)
return func(self._attrs, self.exec_cond_template, self.shape_eval_template, self.shape_save_template) |
class GenericOutputAdapter(ResponseRefactor):
def __init__(self, config_json):
ResponseRefactor.__init__(self, config_json=config_json)
self.api_schema = None
self._schema = None
self._array_types = set(['array', 'numeric_array', 'string_array', 'mixed_array'])
def _is_string(output):
if (type(output) is str):
return True
else:
return False
def _extension(filename):
return filename.split('.')[(- 1)]
def _has_extension(self, output, extension):
if (not self._is_string(output)):
return False
ext = output.split('.')[(- 1)]
if (ext == extension):
return True
else:
return False
def __pure_dtype(self, k):
t = self._schema[k]['type']
return t
def __array_shape(self, k):
s = self._schema[k]['shape']
return s[0]
def __meta_by_key(self, k):
return self._schema[k]['meta']
def __cast_values(self, vals, dtypes, output_keys):
v = []
for (v_, t_, k_) in zip(vals, dtypes, output_keys):
self.logger.debug(v_)
self.logger.debug(t_)
self.logger.debug(k_)
if (t_ in self._array_types):
if (v_ is None):
v_ = ([None] * self.__array_shape(k_))
v += v_
else:
v += [v_]
return v
def _guess_pure_dtype_if_absent(self, vals):
pdt = PureDataTyper(vals)
dtype = pdt.get_type()
self.logger.debug('Guessed pure datatype: {0}'.format(dtype))
if (dtype is None):
return None
else:
return dtype['type']
def __expand_output_keys(self, vals, output_keys):
output_keys_expanded = []
if (len(output_keys) == 1):
merge_key = False
else:
merge_key = True
current_pure_dtype = {}
for (v, ok) in zip(vals, output_keys):
self.logger.debug('Data: {0}'.format(ok))
self.logger.debug('Values: {0}'.format(v))
m = self.__meta_by_key(ok)
if (ok not in current_pure_dtype):
self.logger.debug('Getting pure dtype for {0}'.format(ok))
t = self.__pure_dtype(ok)
self.logger.debug('This is the pure datatype: {0}'.format(t))
if (t is None):
t = self._guess_pure_dtype_if_absent(v)
self.logger.debug('Guessed absent pure datatype: {0}'.format(t))
current_pure_dtype[ok] = t
else:
t = current_pure_dtype[ok]
self.logger.debug('Datatype: {0}'.format(t))
if (t in self._array_types):
self.logger.debug('Datatype has been matched: {0} over {1}'.format(t, self._array_types))
assert (m is not None)
if (v is not None):
if (len(m) > len(v)):
self.logger.debug('Metadata {0} is longer than values {1}'.format(len(m), len(v)))
v = (list(v) + ([None] * (len(m) - len(v))))
assert (len(m) == len(v))
if merge_key:
self.logger.debug('Merge key is {0}'.format(merge_key))
output_keys_expanded += ['{0}{1}{2}'.format(ok, FEATURE_MERGE_PATTERN, m_) for m_ in m]
else:
self.logger.debug('No merge key')
output_keys_expanded += ['{0}'.format(m_) for m_ in m]
else:
output_keys_expanded += [ok]
return output_keys_expanded
def _get_outputshape_from_airtable(self, model_id):
airtable_interface = AirtableInterface(config_json=self.config_json)
output_shape = ' '
for record in airtable_interface.items():
model_idi = record['fields']['Identifier']
try:
if (model_idi == model_id):
output_shape = record['fields']['Output Shape']
except KeyError:
self.logger.warning('The Output Shape field is empty')
return output_shape
def _to_dataframe(self, result, model_id):
output_shape = self._get_outputshape_from_airtable(model_id)
result = json.loads(result)
R = []
output_keys = None
output_keys_expanded = None
for r in result:
inp = r['input']
out = r['output']
if (output_shape == 'Flexible List'):
vals = [json.dumps(out)]
output_keys_expanded = ['outcome']
else:
if (output_keys is None):
output_keys = [k for k in out.keys()]
vals = [out[k] for k in output_keys]
dtypes = [self.__pure_dtype(k) for k in output_keys]
are_dtypes_informative = False
for dtype in dtypes:
if (dtype is not None):
are_dtypes_informative = True
if (output_keys_expanded is None):
output_keys_expanded = self.__expand_output_keys(vals, output_keys)
if (not are_dtypes_informative):
t = self._guess_pure_dtype_if_absent(vals)
if (len(output_keys) == 1):
dtypes = [t]
vals = self.__cast_values(vals, dtypes, output_keys)
R += [([inp['key'], inp['input']] + vals)]
columns = (['key', 'input'] + output_keys_expanded)
df = DataFrame(data=R, columns=columns)
return df
def meta(self):
if (self._meta is None):
self.logger.error('Meta not available, run some adapations first and it will be inferred atomatically')
else:
return self._meta
def merge(self, subfiles, output_file):
self.logger.debug('Merging {0} files into {1}'.format(len(subfiles), output_file))
extensions = set([self._extension(x) for x in (subfiles + [output_file])])
assert (len(extensions) == 1)
if self._has_extension(output_file, 'json'):
data = []
for subfile in subfiles:
with open(subfile, 'r') as f:
data += json.load(f)
with open(output_file, 'w') as f:
json.dump(data, f, indent=4)
else:
with open(output_file, 'w') as fo:
use_header = True
for subfile in subfiles:
with open(subfile, 'r') as fi:
if (not use_header):
next(fi)
for l in fi:
fo.write(l)
use_header = False
def adapt(self, result, output, model_id=None, api_name=None):
if ((model_id is not None) and (api_name is not None) and (self.api_schema is None)):
self.api_schema = ApiSchema(model_id=model_id, config_json=self.config_json)
if (self.api_schema is not None):
if self.api_schema.isfile():
self._schema = self.api_schema.get_output_by_api(api_name)
else:
self.api_schema = None
if ((output is not None) and (self._schema is None)):
raise Exception
if self._has_extension(output, 'json'):
data = json.loads(result)
with open(output, 'w') as f:
json.dump(data, f, indent=4)
if self._has_extension(output, 'csv'):
df = self._to_dataframe(result, model_id)
df.write(output)
if self._has_extension(output, 'tsv'):
df = self._to_dataframe(result, model_id)
df.write(output, delimiter='\t')
if self._has_extension(output, 'h5'):
df = self._to_dataframe(result, model_id)
df.write(output)
return result |
class OptionPlotoptionsHeatmapSonificationDefaultinstrumentoptionsMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_load_data(web_page_loader):
page_url = '
mock_response = Mock()
mock_response.status_code = 200
mock_response.content = '\n <html>\n <head>\n <title>Test Page</title>\n </head>\n <body>\n <div id="content">\n <p>This is some test content.</p>\n </div>\n </body>\n </html>\n '
with patch('embedchain.loaders.web_page.WebPageLoader._session.get', return_value=mock_response):
result = web_page_loader.load_data(page_url)
content = web_page_loader._get_clean_content(mock_response.content, page_url)
expected_doc_id = hashlib.sha256((content + page_url).encode()).hexdigest()
assert (result['doc_id'] == expected_doc_id)
expected_data = [{'content': content, 'meta_data': {'url': page_url}}]
assert (result['data'] == expected_data) |
(scope='module')
def _pgs():
app = App(__name__)
db = Database(app, config=sdict(uri=f"postgres://{os.environ.get('POSTGRES_URI')}", auto_connect=True))
db.define_models(SourceCustom, SourceMulti, DestCustomCustom, DestCustomMulti, DestMultiCustom, DestMultiMulti, DoctorCustom, PatientCustom, AppointmentCustom, DoctorMulti, PatientMulti, AppointmentMulti, SymptomCustom, SymptomMulti)
return db |
def add_mode_of_payments(payment_mode, account, company):
if (not frappe.db.get_value('Mode of Payment', payment_mode)):
doc = frappe.new_doc('Mode of Payment')
doc.mode_of_payment = payment_mode
doc.enabled = 1
doc.type = 'General'
doc.set('accounts', [])
add_payment_mode_accounts(doc, account, company)
doc.insert() |
('script,expected', [param('tests/test_apps/passes_callable_class_to_hydra_main/my_app.py', dedent(' 123\n my_app\n '), id='passes_callable_class_to_hydra_main')])
def test_pass_callable_class_to_hydra_main(tmpdir: Path, script: str, expected: str) -> None:
cmd = [script, ('hydra.run.dir=' + str(tmpdir)), 'hydra.job.chdir=True']
(result, _err) = run_python_script(cmd)
assert_text_same(result, expected) |
def _resolve_imports(api_to_calls: ImportToCallSiteDict, wrapper_set: WrapperSet, export_hashes: Optional[Dict[(int, int)]], md: Cs, process_controller: ProcessController) -> None:
arch = process_controller.architecture
page_size = process_controller.page_size
def get_data(addr: int, size: int) -> bytes:
try:
return process_controller.read_process_memory(addr, size)
except ReadProcessMemoryError:
size = (page_size - (addr % page_size))
return process_controller.read_process_memory(addr, size)
resolved_wrappers: Dict[(int, int)] = {}
problematic_wrappers = set()
for (call_addr, call_size, instr_was_jmp, wrapper_addr, _) in wrapper_set:
resolved_addr = resolved_wrappers.get(wrapper_addr)
if (resolved_addr is not None):
LOG.debug('Already resolved wrapper: %s -> %s', hex(wrapper_addr), hex(resolved_addr))
api_to_calls[resolved_addr].append((call_addr, call_size, instr_was_jmp))
continue
if (wrapper_addr in problematic_wrappers):
LOG.debug('Skipping unresolved wrapper')
continue
if ((export_hashes is not None) and (arch == Architecture.X86_32)):
try:
import_hash = compute_function_hash(md, wrapper_addr, get_data, process_controller)
except Exception as ex:
LOG.debug('Failure for wrapper at %s: %s', hex(wrapper_addr), str(ex))
problematic_wrappers.add(wrapper_addr)
continue
if (import_hash != EMPTY_FUNCTION_HASH):
LOG.debug('Hash: %s', hex(import_hash))
resolved_addr = export_hashes.get(import_hash)
if (resolved_addr is not None):
LOG.debug('Hash matched')
LOG.debug('Resolved API: %s -> %s', hex(wrapper_addr), hex(resolved_addr))
resolved_wrappers[wrapper_addr] = resolved_addr
api_to_calls[resolved_addr].append((call_addr, call_size, instr_was_jmp))
continue
resolved_addr = resolve_wrapped_api(call_addr, process_controller, (call_addr + call_size))
if (resolved_addr is not None):
LOG.debug('Resolved API: %s -> %s', hex(wrapper_addr), hex(resolved_addr))
resolved_wrappers[wrapper_addr] = resolved_addr
api_to_calls[resolved_addr].append((call_addr, call_size, instr_was_jmp))
else:
problematic_wrappers.add(wrapper_addr) |
_type(EvpnEsi.ROUTER_ID)
class EvpnRouterIDEsi(EvpnEsi):
_TYPE_NAME = 'router_id'
_VALUE_PACK_STR = '!4sIx'
_VALUE_FIELDS = ['router_id', 'local_disc']
_TYPE = {'ascii': ['router_id']}
def __init__(self, router_id, local_disc, type_=None):
super(EvpnRouterIDEsi, self).__init__(type_)
self.router_id = router_id
self.local_disc = local_disc
def parse_value(cls, buf):
(router_id, local_disc) = struct.unpack_from(cls._VALUE_PACK_STR, buf)
return {'router_id': addrconv.ipv4.bin_to_text(router_id), 'local_disc': local_disc}
def serialize_value(self):
return struct.pack(self._VALUE_PACK_STR, addrconv.ipv4.text_to_bin(self.router_id), self.local_disc) |
def workflow_method(func=None, name=None, workflow_id=None, workflow_id_reuse_policy=WorkflowIdReusePolicy.AllowDuplicateFailedOnly, execution_start_to_close_timeout_seconds=7200, task_start_to_close_timeout_seconds=10, task_list=None):
def wrapper(fn):
if (not hasattr(fn, '_workflow_method')):
fn._workflow_method = WorkflowMethod()
fn._workflow_method._name = (name if name else get_workflow_method_name(fn))
fn._workflow_method._workflow_id = workflow_id
fn._workflow_method._workflow_id_reuse_policy = workflow_id_reuse_policy
fn._workflow_method._execution_start_to_close_timeout_seconds = execution_start_to_close_timeout_seconds
fn._workflow_method._task_start_to_close_timeout_seconds = task_start_to_close_timeout_seconds
fn._workflow_method._task_list = task_list
return fn
if (func and inspect.isfunction(func)):
return wrapper(func)
else:
return wrapper |
def test_generate_elasticsearch_query():
assert (ESPSCCodes.generate_elasticsearch_query({'require': [['Product', '1', '1111'], ['Research and Development']], 'exclude': [['Product', '1'], ['Research and Development', 'A', 'A5']]}, _QueryType.AWARDS).to_dict() == {'query_string': {'query': '((((A*)) AND (((NOT (A* AND A5*)))))) OR ((((1* AND 1111))))', 'default_field': 'product_or_service_code.keyword'}}) |
def test_retrieve(additionals, utils):
additionals.emptySelector.start_indexing()
additionals.emptySelector.start_retrieving()
pth = additionals.emptySelector.disk.read_query('empty')
images = [(pth / f'{x}/image.jpeg') for x in ['el1', 'el2', 'el3']]
for img in images:
assert os.path.isfile(img) |
class WinterSnow(GraphCanvas.Canvas):
name = 'Skin Winter Snow'
_option_cls = OptSkins.OptionsSkin
def cursors(self):
pass
_js__builder__ = '\n var requestAnimationFrame = window.requestAnimationFrame || window.mozRequestAnimationFrame || window.webkitRequestAnimationFrame || window.msRequestAnimationFrame ||\n function(callback){window.setTimeout(callback, 1000 / 60)}; window.requestAnimationFrame = requestAnimationFrame;\n window.flakes = []; window.flakeCount = 400; var mX = -100; var mY = -100;\n htmlObj.width = window.innerWidth; htmlObj.height = window.innerHeight;\n htmlObj.addEventListener("mousemove", function(e) {mX = e.clientX, mY = e.clientY});\n window.addEventListener("resize", function(){htmlObj.width = window.innerWidth; htmlObj.height = window.innerHeight})\n for (var i = 0; i < window.flakeCount; i++) {\n var x = Math.floor(Math.random() * htmlObj.width); var y = Math.floor(Math.random() * htmlObj.height);\n var size = (Math.random() * 3) + 2; var speed = (Math.random() * 1) + 0.5;\n var opacity = (Math.random() * 0.5) + 0.3;\n window.flakes.push({speed: speed, velY: speed, velX: 0, x: x, y: y, size: size, stepSize: (Math.random()) / 30,\n step: 0, opacity: opacity})};\n startSnow()\n'
def __str__(self):
self.page.properties.js.add_builders(self.refresh())
self.page.properties.js.add_constructor('resetSnow', ('\nfunction resetSnow(flake){\n var canvas = document.getElementById("%s");\n flake.x = Math.floor(Math.random() * canvas.width); flake.y = 0; flake.size = (Math.random() * 3) + 2;\n flake.speed = (Math.random() * 1) + 0.5; flake.velY = flake.speed; flake.velX = 0;\n flake.opacity = (Math.random() * 0.5) + 0.3}' % self.htmlCode))
self.page.properties.js.add_constructor('startSnow', ('\nfunction startSnow() { var mX = -100; var mY = -100;\n var canvas = document.getElementById("%s"); var ctx = canvas.getContext("2d"); \n ctx.clearRect(0, 0, canvas.width, canvas.height);\n for (var i = 0; i < window.flakeCount; i++) {\n var flake = window.flakes[i]; var x = mX; var y = mY; var minDist = 150; var x2 = flake.x; var y2 = flake.y;\n var dist = Math.sqrt((x2 - x) * (x2 - x) + (y2 - y) * (y2 - y)); var dx = x2 - x; var dy = y2 - y;\n if (dist < minDist) {\n var force = minDist / (dist * dist); var xcomp = (x - x2) / dist; var ycomp = (y - y2) / dist; \n var deltaV = force / 2; flake.velX -= deltaV * xcomp; flake.velY -= deltaV * ycomp} \n else {\n flake.velX *= .98; if (flake.velY <= flake.speed){flake.velY = flake.speed};\n flake.velX += Math.cos(flake.step += .05) * flake.stepSize}\n ctx.fillStyle = "rgba(255,255,255," + flake.opacity + ")";\n flake.y += flake.velY; flake.x += flake.velX;\n if (flake.y >= canvas.height || flake.y <= 0) {resetSnow(flake)}\n if (flake.x >= canvas.width || flake.x <= 0) {resetSnow(flake)}\n ctx.beginPath(); ctx.arc(flake.x, flake.y, flake.size, 0, Math.PI * 2); ctx.fill()}\n requestAnimationFrame(startSnow)}' % self.htmlCode))
return ('<canvas %s>Your browser does not support the HTML5 canvas tag.</canvas>' % self.get_attrs(css_class_names=self.style.get_classes())) |
def verbose_option(dest='verbose', default=0, flag_value=1, callback=None, expose_value=False, show_default=False):
def decorator(f):
_context
_flag_option(default=default, flag_value=flag_value, show_default=show_default)
_verbose_flag_option()
def new_func(ctx, *args, **kwargs):
verbose = kwargs.pop('verbose')
no_verbose = kwargs.pop('no_verbose')
if no_verbose:
verbose = 0
if expose_value:
kwargs[dest] = verbose
if callable(callback):
param = ctx.params.get('verbose')
value = verbose
callback(ctx, param, value)
return ctx.invoke(f, *args, **kwargs)
return update_wrapper_with_click_params(new_func, f)
return decorator |
class SystemChatMember(ChatMember):
SYSTEM_ID = ChatID('__system__')
def __init__(self, chat: 'Chat', *, name: str='', alias: Optional[str]=None, id: ChatID=ChatID(''), uid: ChatID=ChatID(''), vendor_specific: Dict[(str, Any)]=None, description: str='', middleware: Optional[Middleware]=None):
name = (name or translator.gettext('System'))
uid = (uid or id or self.SYSTEM_ID)
super().__init__(chat, name=name, alias=alias, id=id, uid=uid, vendor_specific=vendor_specific, description=description, middleware=middleware) |
class FridaProcessController(ProcessController):
def __init__(self, pid: int, main_module_name: str, frida_session: frida.core.Session, frida_script: frida.core.Script):
frida_rpc = frida_script.exports
super().__init__(pid, main_module_name, _str_to_architecture(frida_rpc.get_architecture()), frida_rpc.get_pointer_size(), frida_rpc.get_page_size())
self._frida_rpc = frida_rpc
self._frida_session = frida_session
self._exported_functions_cache: Optional[Dict[(int, Dict[(str, Any)])]] = None
def find_module_by_address(self, address: int) -> Optional[Dict[(str, Any)]]:
value: Optional[Dict[(str, Any)]] = self._frida_rpc.find_module_by_address(hex(address))
return value
def find_range_by_address(self, address: int, include_data: bool=False) -> Optional[MemoryRange]:
value: Optional[Dict[(str, Any)]] = self._frida_rpc.find_range_by_address(hex(address))
if (value is None):
return None
return self._frida_range_to_mem_range(value, include_data)
def find_export_by_name(self, module_name: str, export_name: str) -> Optional[int]:
export_address: Optional[str] = self._frida_rpc.find_export_by_name(module_name, export_name)
if (export_address is None):
return None
return int(export_address, 16)
def enumerate_modules(self) -> List[str]:
value: List[str] = self._frida_rpc.enumerate_modules()
return value
def enumerate_module_ranges(self, module_name: str, include_data: bool=False) -> List[MemoryRange]:
def convert_range(dict_range: Dict[(str, Any)]) -> MemoryRange:
return self._frida_range_to_mem_range(dict_range, include_data)
value: List[Dict[(str, Any)]] = self._frida_rpc.enumerate_module_ranges(module_name)
return list(map(convert_range, value))
def enumerate_exported_functions(self, update_cache: bool=False) -> Dict[(int, Dict[(str, Any)])]:
if ((self._exported_functions_cache is None) or update_cache):
value: List[Dict[(str, Any)]] = self._frida_rpc.enumerate_exported_functions(self.main_module_name)
exports_dict = {int(e['address'], 16): e for e in value}
self._exported_functions_cache = exports_dict
return exports_dict
return self._exported_functions_cache
def allocate_process_memory(self, size: int, near: int) -> int:
buffer_addr = self._frida_rpc.allocate_process_memory(size, near)
return int(buffer_addr, 16)
def query_memory_protection(self, address: int) -> str:
try:
protection: str = self._frida_rpc.query_memory_protection(hex(address))
return protection
except frida.core.RPCException as rpc_exception:
raise QueryProcessMemoryError from rpc_exception
def set_memory_protection(self, address: int, size: int, protection: str) -> bool:
result: bool = self._frida_rpc.set_memory_protection(hex(address), size, protection)
return result
def read_process_memory(self, address: int, size: int) -> bytes:
read_data = bytearray(size)
try:
for offset in range(0, size, MAX_DATA_CHUNK_SIZE):
chunk_size = min(MAX_DATA_CHUNK_SIZE, (size - offset))
data = self._frida_rpc.read_process_memory(hex((address + offset)), chunk_size)
if (data is None):
raise ReadProcessMemoryError('read_process_memory failed (invalid parameters?)')
read_data[offset:(offset + chunk_size)] = data
return bytes(read_data)
except frida.core.RPCException as rpc_exception:
raise ReadProcessMemoryError from rpc_exception
def write_process_memory(self, address: int, data: List[int]) -> None:
try:
self._frida_rpc.write_process_memory(hex(address), data)
except frida.core.RPCException as rpc_exception:
raise WriteProcessMemoryError from rpc_exception
def terminate_process(self) -> None:
self._frida_rpc.notify_dumping_finished()
frida.kill(self.pid)
self._frida_session.detach()
def _frida_range_to_mem_range(self, dict_range: Dict[(str, Any)], with_data: bool) -> MemoryRange:
base = int(dict_range['base'], 16)
size = dict_range['size']
data = None
if with_data:
data = self.read_process_memory(base, size)
return MemoryRange(base=base, size=size, protection=dict_range['protection'], data=data) |
class AssistedHealHandler(THBEventHandler):
interested = ['action_after']
def handle(self, evt_type, act):
if ((evt_type == 'action_after') and isinstance(act, TryRevive)):
if (not act.succeeded):
return act
assert act.revived_by
tgt = act.target
if (not tgt.has_skill(AssistedHeal)):
return act
g = self.game
self.good_person = p = act.revived_by
if (not g.user_input([p], ChooseOptionInputlet(self, (False, True)))):
return act
g.process_action(AssistedHealAction(p, tgt))
return act |
class CourseCreatedList(mixins.CreateModelMixin, mixins.UpdateModelMixin, viewsets.ReadOnlyModelViewSet):
queryset = Courses.objects.filter(is_delete=False)
serializer_class = CreatedCourseSerializers
pagination_class = StandardResultsSetPagination
permission_classes = (IsAuthenticated, IsOwnerOrReadOnly)
authentication_classes = [JSONWebTokenAuthentication]
filter_backends = (DjangoFilterBackend,)
filter_class = CoursesFilter |
def undervolt(config):
if ((('UNDERVOLT.{:s}'.format(power['source']) not in config) and ('UNDERVOLT' not in config)) or ('UNDERVOLT' in UNSUPPORTED_FEATURES)):
return
for plane in VOLTAGE_PLANES:
write_offset_mv = config.getfloat('UNDERVOLT.{:s}'.format(power['source']), plane, fallback=config.getfloat('UNDERVOLT', plane, fallback=0.0))
write_value = calc_undervolt_msr(plane, write_offset_mv)
writemsr('MSR_OC_MAILBOX', write_value)
if args.debug:
write_value &=
read_value = get_undervolt(plane)[plane]
read_offset_mv = calc_undervolt_mv(read_value)
match = (OK if (write_value == read_value) else ERR)
log('[D] Undervolt plane {:s} - write {:.0f} mV ({:#x}) - read {:.0f} mV ({:#x}) - match {}'.format(plane, write_offset_mv, write_value, read_offset_mv, read_value, match)) |
class Annotation():
def __init__(self, type_, role=None, constant=False):
self.type = Type.from_value(type_)
if (role is None):
if (len(self.type.shape) == 0):
role = 's'
elif constant:
role = 'i'
else:
role = 'io'
assert (role in ('i', 'o', 'io', 's'))
self.role = role
self.constant = constant
if (role == 's'):
self.array = False
self.input = False
self.output = False
else:
self.array = True
self.input = ('i' in role)
self.output = ('o' in role)
def __eq__(self, other):
return ((self.type == other.type) and (self.role == other.role) and (self.constant == other.constant))
def can_be_argument_for(self, annotation):
if (not self.type.compatible_with(annotation.type)):
return False
if (self.role == annotation.role):
return True
if ((self.role == 'io') and annotation.array):
return True
return False
def __repr__(self):
if self.array:
return 'Annotation({type_}, role={role}{constant})'.format(type_=self.type, role=repr(self.role), constant=(', constant' if self.constant else ''))
else:
return 'Annotation({dtype})'.format(dtype=self.type.dtype)
def __process_modules__(self, process):
ann = Annotation(self.type, role=self.role, constant=self.constant)
ann.type = process(ann.type)
return ann |
def generate(logfilenames: typing.List[str], file: typing.TextIO) -> None:
writer = csv.DictWriter(file, fieldnames=Row.names())
writer.writeheader()
logs = parse_logs(logfilenames)
for info in logs:
row = Row.from_info(info=info)
writer.writerow(rowdict=row.name_dict()) |
def test_refresh_reload_plugins(preference, plugin_engine):
plugin_engine.collect()
preference.run()
plugin_list = Q.select(preference.widget, Q.props('name', 'plugin.list'))
assert (Q.map(plugin_list, TV.model, len) == 2)
for plugin in plugin_engine.all():
plugin_engine.remove(plugin)
preference.run()
assert (Q.map(plugin_list, TV.model, len) == 0) |
class OptionPlotoptionsSolidgaugeSonificationDefaultspeechoptionsMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('undefined')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('undefined')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('undefined')
def min(self, text: str):
self._config(text, js_type=False)
def within(self):
return self._config_get('undefined')
def within(self, text: str):
self._config(text, js_type=False) |
def get_flow_stats(dp, waiters, flow=None, to_user=True):
flow = (flow if flow else {})
table_id = UTIL.ofp_table_from_user(flow.get('table_id', dp.ofproto.OFPTT_ALL))
flags = str_to_int(flow.get('flags', 0))
out_port = UTIL.ofp_port_from_user(flow.get('out_port', dp.ofproto.OFPP_ANY))
out_group = UTIL.ofp_group_from_user(flow.get('out_group', dp.ofproto.OFPG_ANY))
cookie = str_to_int(flow.get('cookie', 0))
cookie_mask = str_to_int(flow.get('cookie_mask', 0))
match = to_match(dp, flow.get('match', {}))
priority = str_to_int(flow.get('priority', (- 1)))
stats = dp.ofproto_parser.OFPFlowStatsRequest(dp, flags, table_id, out_port, out_group, cookie, cookie_mask, match)
msgs = []
ofctl_utils.send_stats_request(dp, stats, waiters, msgs, LOG)
flows = []
for msg in msgs:
for stats in msg.body:
if (0 <= priority != stats.priority):
continue
s = {'priority': stats.priority, 'cookie': stats.cookie, 'idle_timeout': stats.idle_timeout, 'hard_timeout': stats.hard_timeout, 'byte_count': stats.byte_count, 'duration_sec': stats.duration_sec, 'duration_nsec': stats.duration_nsec, 'packet_count': stats.packet_count, 'length': stats.length, 'flags': stats.flags}
if to_user:
s['actions'] = actions_to_str(stats.instructions)
s['match'] = match_to_str(stats.match)
s['table_id'] = UTIL.ofp_table_to_user(stats.table_id)
else:
s['actions'] = stats.instructions
s['instructions'] = stats.instructions
s['match'] = stats.match
s['table_id'] = stats.table_id
flows.append(s)
return wrap_dpid_dict(dp, flows, to_user) |
def test_analyzers_added_to_object():
random_analyzer_name = ''.join((choice(string.ascii_letters) for _ in range(100)))
random_analyzer = analyzer(random_analyzer_name, tokenizer='standard', filter='standard')
index = Index('i', using='alias')
index.analyzer(random_analyzer)
assert (index._analysis['analyzer'][random_analyzer_name] == {'filter': ['standard'], 'type': 'custom', 'tokenizer': 'standard'}) |
()
def check_fee_validity(appointment, date=None, practitioner=None):
if (not frappe.db.get_single_value('Healthcare Settings', 'enable_free_follow_ups')):
return
if isinstance(appointment, str):
appointment = json.loads(appointment)
appointment = frappe.get_doc(appointment)
date = (getdate(date) if date else appointment.appointment_date)
filters = {'practitioner': (practitioner if practitioner else appointment.practitioner), 'patient': appointment.patient, 'valid_till': ('>=', date), 'start_date': ('<=', date)}
if (appointment.status != 'Cancelled'):
filters['status'] = 'Active'
else:
filters['patient_appointment'] = appointment.name
validity = frappe.db.exists('Fee Validity', filters)
if (not validity):
if appointment.get('__islocal'):
return
else:
validity = (get_fee_validity(appointment.get('name'), date) or None)
if (validity and len(validity)):
return frappe.get_doc('Fee Validity', validity[0].get('name'))
return
validity = frappe.get_doc('Fee Validity', validity)
return validity |
class OptionSeriesTreegraphStatesInactive(Options):
def animation(self) -> 'OptionSeriesTreegraphStatesInactiveAnimation':
return self._config_sub_data('animation', OptionSeriesTreegraphStatesInactiveAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def opacity(self):
return self._config_get(0.2)
def opacity(self, num: float):
self._config(num, js_type=False) |
.timeout(8)
def test_table_keyinstances_crud(db_context: DatabaseContext) -> None:
table = KeyInstanceTable(db_context)
assert ([] == table.read())
table._get_current_timestamp = (lambda : 10)
KEYINSTANCE_ID = 0
ACCOUNT_ID = 10
MASTERKEY_ID = 20
DERIVATION_DATA1 = b'111'
DERIVATION_DATA2 = b'222'
line1 = KeyInstanceRow((KEYINSTANCE_ID + 1), (ACCOUNT_ID + 1), (MASTERKEY_ID + 1), DerivationType.BIP32, DERIVATION_DATA1, ScriptType.P2PKH, True, None)
line2 = KeyInstanceRow((KEYINSTANCE_ID + 2), (ACCOUNT_ID + 1), (MASTERKEY_ID + 1), DerivationType.HARDWARE, DERIVATION_DATA2, ScriptType.P2PKH, True, None)
with pytest.raises(sqlite3.IntegrityError):
with SynchronousWriter() as writer:
table.create([line1], completion_callback=writer.get_callback())
assert (not writer.succeeded())
with MasterKeyTable(db_context) as mktable:
with SynchronousWriter() as writer:
mktable.create([MasterKeyRow((MASTERKEY_ID + 1), None, 2, b'111')], completion_callback=writer.get_callback())
assert writer.succeeded()
with pytest.raises(sqlite3.IntegrityError):
with SynchronousWriter() as writer:
table.create([line1], completion_callback=writer.get_callback())
assert (not writer.succeeded())
with AccountTable(db_context) as acctable:
with SynchronousWriter() as writer:
acctable.create([AccountRow((ACCOUNT_ID + 1), (MASTERKEY_ID + 1), ScriptType.P2PKH, 'name')], completion_callback=writer.get_callback())
assert writer.succeeded()
with SynchronousWriter() as writer:
table.create([line1], completion_callback=writer.get_callback())
assert writer.succeeded()
with SynchronousWriter() as writer:
table.create([line2], completion_callback=writer.get_callback())
assert writer.succeeded()
with pytest.raises(sqlite3.IntegrityError):
with SynchronousWriter() as writer:
table.create([line1], completion_callback=writer.get_callback())
assert (not writer.succeeded())
db_lines = table.read()
assert (2 == len(db_lines))
db_line1 = [db_line for db_line in db_lines if (db_line[0] == line1[0])][0]
assert (line1 == db_line1)
db_line2 = [db_line for db_line in db_lines if (db_line[0] == line2[0])][0]
assert (line2 == db_line2)
date_updated = 20
with SynchronousWriter() as writer:
table.update_derivation_data([(b'234', line1[0])], date_updated, completion_callback=writer.get_callback())
assert writer.succeeded()
with SynchronousWriter() as writer:
table.update_flags([(False, line2[0])], date_updated, completion_callback=writer.get_callback())
assert writer.succeeded()
db_lines = table.read()
assert (2 == len(db_lines))
db_line1 = [db_line for db_line in db_lines if (db_line[0] == line1[0])][0]
assert (b'234' == db_line1[4])
db_line2 = [db_line for db_line in db_lines if (db_line[0] == line2[0])][0]
assert (not db_line2[6])
db_lines = table.read(key_ids=[(KEYINSTANCE_ID + 1)])
assert (1 == len(db_lines))
assert ((KEYINSTANCE_ID + 1) == db_lines[0].keyinstance_id)
with SynchronousWriter() as writer:
table.delete([line2[0]], completion_callback=writer.get_callback())
assert writer.succeeded()
db_lines = table.read()
assert (1 == len(db_lines))
assert (db_lines[0].keyinstance_id == line1.keyinstance_id)
assert (db_lines[0].description is None)
assert (db_lines[0].derivation_data == b'234')
with SynchronousWriter() as writer:
table.update_descriptions([('line1', line1.keyinstance_id)], completion_callback=writer.get_callback())
assert writer.succeeded()
rows = table.read()
assert (len(rows) == 1)
assert (rows[0].keyinstance_id == line1[0])
assert (rows[0].description == 'line1')
table.close() |
class YAMLObj1(object):
yaml_tag = u'!obj:'
def from_yaml(cls, loader, suffix, node):
import srsly.ruamel_yaml
obj1 = Obj1(suffix)
if isinstance(node, srsly.ruamel_yaml.MappingNode):
obj1.add_node(loader.construct_mapping(node))
else:
raise NotImplementedError
return obj1
def to_yaml(cls, dumper, data):
return dumper.represent_scalar((cls.yaml_tag + data._suffix), data.dump()) |
def _butterworth_args_check(data, frequency, cutoff, filter_type, order, axis, precision):
if (not isinstance(data, _np.ndarray)):
raise TypeError(f"'data' should be a numpy ndarray, not {type(data)}.")
if ((not isinstance(frequency, int)) and (not isinstance(frequency, float))):
raise TypeError(f"'frequency' should be an of int or float type, not {type(frequency)}.")
if (frequency <= 0):
raise ValueError("'frequency' should be positive.")
if hasattr(cutoff, '__iter__'):
cutoff = tuple(cutoff)
else:
cutoff = (cutoff,)
for value in cutoff:
if ((not isinstance(value, int)) and (not isinstance(value, float))):
raise TypeError(f"'cutoff' should be a value or a collection of values of int or float type, not {type(value)}.")
if (value <= 0):
raise ValueError("'cutoff' value(s) should be positive.")
if (not isinstance(filter_type, FilterType)):
raise TypeError(f"'filter_type' should be an of FilterType type, not {type(filter_type)}.")
if (not isinstance(order, int)):
raise TypeError(f"'order' should be an of int type, not {type(order)}.")
if (order <= 0):
raise ValueError("'order' should be positive.")
if (not isinstance(axis, int)):
raise TypeError(f"'axis' should be an of int type, not {type(axis)}.")
try:
precision = _np.dtype(precision)
except TypeError:
raise TypeError(f'precision should be a valid dtype, not {type(precision)}.')
return (data, frequency, cutoff, filter_type, order, axis, precision) |
class LifespanManager():
def __init__(self, app: ASGIApp, startup_timeout: typing.Optional[float]=5, shutdown_timeout: typing.Optional[float]=5) -> None:
self._state: typing.Dict[(str, typing.Any)] = {}
self.app = state_middleware(app, self._state)
self.startup_timeout = startup_timeout
self.shutdown_timeout = shutdown_timeout
self._concurrency_backend = detect_concurrency_backend()
self._startup_complete = self._concurrency_backend.create_event()
self._shutdown_complete = self._concurrency_backend.create_event()
self._receive_queue = self._concurrency_backend.create_queue(capacity=2)
self._receive_called = False
self._app_exception: typing.Optional[BaseException] = None
self._exit_stack = AsyncExitStack()
async def startup(self) -> None:
(await self._receive_queue.put({'type': 'lifespan.startup'}))
(await self._concurrency_backend.run_and_fail_after(self.startup_timeout, self._startup_complete.wait))
if self._app_exception:
raise self._app_exception
async def shutdown(self) -> None:
(await self._receive_queue.put({'type': 'lifespan.shutdown'}))
(await self._concurrency_backend.run_and_fail_after(self.shutdown_timeout, self._shutdown_complete.wait))
async def receive(self) -> Message:
self._receive_called = True
return (await self._receive_queue.get())
async def send(self, message: Message) -> None:
if (not self._receive_called):
raise LifespanNotSupported("Application called send() before receive(). Is it missing `assert scope['type'] == ' or similar?")
if (message['type'] == 'lifespan.startup.complete'):
self._startup_complete.set()
elif (message['type'] == 'lifespan.shutdown.complete'):
self._shutdown_complete.set()
async def run_app(self) -> None:
scope: Scope = {'type': 'lifespan'}
try:
(await self.app(scope, self.receive, self.send))
except BaseException as exc:
self._app_exception = exc
self._startup_complete.set()
self._shutdown_complete.set()
if (not self._receive_called):
raise LifespanNotSupported("Application failed before making its first call to 'receive()'. We expect this to originate from a statement similar to `assert scope['type'] == 'type'`. If that is not the case, then this crash is unexpected and there is probably more debug output in the cause traceback.") from exc
raise
async def __aenter__(self) -> 'LifespanManager':
(await self._exit_stack.__aenter__())
(await self._exit_stack.enter_async_context(self._concurrency_backend.run_in_background(self.run_app)))
try:
(await self.startup())
return self
except BaseException:
(await self._exit_stack.aclose())
raise
async def __aexit__(self, exc_type: typing.Optional[typing.Type[BaseException]]=None, exc_value: typing.Optional[BaseException]=None, traceback: typing.Optional[TracebackType]=None) -> typing.Optional[bool]:
if (exc_type is None):
self._exit_stack.push_async_callback(self.shutdown)
return (await self._exit_stack.__aexit__(exc_type, exc_value, traceback)) |
def start_proj(length, name, price, area, capacity):
io.sendlineafter('Exit\n', '1')
io.sendlineafter('name: ', str(length))
io.sendlineafter('name: ', name)
io.sendlineafter('price: ', str(price))
io.sendlineafter('area: ', str(area))
io.sendlineafter('capacity: ', str(capacity)) |
class OptionSeriesBarSonificationContexttracksMappingLowpass(Options):
def frequency(self) -> 'OptionSeriesBarSonificationContexttracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionSeriesBarSonificationContexttracksMappingLowpassFrequency)
def resonance(self) -> 'OptionSeriesBarSonificationContexttracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionSeriesBarSonificationContexttracksMappingLowpassResonance) |
def goodbye_email(use):
domain = Site.objects.get_current().domain
location = use.location
c = {'first_name': use.user.first_name, 'location': use.location, 'booking_url': ((' + domain) + urlresolvers.reverse('booking_detail', args=(location.slug, use.booking.id))), 'new_booking_url': ((' + domain) + urlresolvers.reverse('location_stay', args=(location.slug,)))}
(text_content, html_content) = render_templates(c, location, LocationEmailTemplate.DEPARTURE)
subject = ('[%s] Thank you for staying with us' % location.email_subject_prefix)
mailgun_data = {'from': use.location.from_email(), 'to': [use.user.email], 'subject': subject, 'text': text_content}
if html_content:
mailgun_data['html'] = html_content
return mailgun_send(mailgun_data) |
class BankRestClient(Bank):
API_URL = '/cosmos/bank/v1beta1'
def __init__(self, rest_api: RestClient):
self._rest_api = rest_api
def Balance(self, request: QueryBalanceRequest) -> QueryBalanceResponse:
response = self._rest_api.get(f'{self.API_URL}/balances/{request.address}/by_denom?denom={request.denom}', request, ['address', 'denom'])
return Parse(response, QueryBalanceResponse())
def AllBalances(self, request: QueryAllBalancesRequest) -> QueryAllBalancesResponse:
response = self._rest_api.get(f'{self.API_URL}/balances/{request.address}', request, ['address'])
return Parse(response, QueryAllBalancesResponse())
def TotalSupply(self, request: QueryTotalSupplyRequest) -> QueryTotalSupplyResponse:
response = self._rest_api.get(f'{self.API_URL}/supply', request)
return Parse(response, QueryTotalSupplyResponse())
def SupplyOf(self, request: QuerySupplyOfRequest) -> QuerySupplyOfResponse:
response = self._rest_api.get(f'{self.API_URL}/supply/{request.denom}')
return Parse(response, QuerySupplyOfResponse())
def Params(self, request: QueryParamsRequest) -> QueryParamsResponse:
response = self._rest_api.get(f'{self.API_URL}/params')
return Parse(response, QueryParamsResponse())
def DenomMetadata(self, request: QueryDenomMetadataRequest) -> QueryDenomMetadataResponse:
response = self._rest_api.get(f'{self.API_URL}/denoms_metadata/{request.denom}')
return Parse(response, QueryDenomMetadataResponse())
def DenomsMetadata(self, request: QueryDenomsMetadataRequest) -> QueryDenomsMetadataResponse:
response = self._rest_api.get(f'{self.API_URL}/denoms_metadata', request)
return Parse(response, QueryDenomsMetadataResponse()) |
def profile(name, env, filename=None, verbose=False):
if filename:
filename = ((name + '-') + filename)
print(('Profiling %s ==> %s' % (name, filename)))
else:
filename = None
title = (name + ' profile')
print()
print(('=' * len(title)))
print(title)
print(('=' * len(title)))
func = create_bench(name, env)
gc.collect()
num_iterations = 100000
if PYPY:
print('JIT warmup...')
for x in range((num_iterations * JIT_WARMING_MULTIPLIER)):
func()
print('Ready.')
code = 'for x in range({0}): func()'.format(num_iterations)
if verbose:
if (pprofile is None):
print('pprofile not found. Please install pprofile and try again.')
return
pprofile.runctx(code, locals(), globals(), filename=filename)
else:
cProfile.runctx(code, locals(), globals(), sort='tottime', filename=filename) |
def upload(access_token: str, title: str, creator_name: str, description: str='', files: Collection[Union[(Path, str)]]=(), affiliation: str=None, orcid: str=None, gnd: str=None, upload_type: str='other', license: str='CC-BY-4.0', keywords: Collection[str]=(), related_identifiers: Collection[dict]=(), community_identifer: str=None, use_sandbox: bool=True, publish: bool=False):
zenodo = Zenodo(access_token, use_sandbox=use_sandbox)
r = zenodo.create_deposition()
creator = {'name': creator_name}
if (affiliation is not None):
creator['affiliation'] = affiliation
if (orcid is not None):
creator['orcid'] = orcid
if (gnd is not None):
creator['gnd'] = gnd
data = {'metadata': {'upload_type': upload_type, 'publication_date': datetime.now().strftime('%Y-%m-%d'), 'title': title, 'description': description, 'creators': [creator], 'license': license, 'keywords': list(keywords), 'related_identifiers': list(related_identifiers)}}
if (community_identifer is not None):
data['communities'] = [{'identifier': community_identifer}]
zenodo.update_metadata(r, data)
for f in files:
if (not Path(f).exists()):
raise FileNotFoundError(f'{f} is not found')
for f in files:
print(f'Now uploading {f}...')
zenodo.upload_file(r, f)
if publish:
r = zenodo.publish(r)
url = r.json()['links']['latest_html']
print(f'Successfully published. Go to {url}')
else:
url = r.json()['links']['html']
print(f'Successfully uploaded, but not published yet. Go to {url}') |
class TestExceptionHandlers():
.usefixtures('mock_config_redis_disabled')
def test_redis_disabled(self, api_client: TestClient, generate_auth_header):
auth_header = generate_auth_header([CLIENT_CREATE])
request_body = [{'requested_at': '2021-08-30T16:09:37.359Z', 'identity': {'email': 'customer-'}, 'policy_key': 'my_separate_policy'}]
expected_response = {'message': 'Application redis cache required, but it is currently disabled! Please update your application configuration to enable integration with a redis cache.'}
response = api_client.post((V1_URL_PREFIX + PRIVACY_REQUESTS), headers=auth_header, json=request_body)
response_body = json.loads(response.text)
assert (500 == response.status_code)
assert (expected_response == response_body)
expected_response = 'no cache configured'
response = api_client.get(HEALTH)
response_body = json.loads(response.text)
assert (200 == response.status_code)
assert (expected_response == response_body['cache']) |
def extractShirohane(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
class PlotScrollBar(NativeScrollBar):
axis = Enum('index', 'value')
plot = Property
mapper = Property
_plot = Any()
_mapper = Any()
_axis_index = Optional(Int)
def force_data_update(self):
self._handle_dataspace_update()
def overlay(self, component, gc, view_bounds=None, mode='default'):
self.do_layout()
self._draw_mainlayer(gc, view_bounds, 'default')
def _draw_plot(self, gc, view_bounds=None, mode='default'):
self._draw_mainlayer(gc, view_bounds, 'default')
def _do_layout(self):
if getattr(self.plot, '_layout_needed', False):
self.plot.do_layout()
axis = self._determine_axis()
(low, high) = self.mapper.screen_bounds
self.bounds[axis] = (high - low)
self.position[axis] = low
self._widget_moved = True
def _get_abs_coords(self, x, y):
if (self.container is not None):
return self.container.get_absolute_coords(x, y)
else:
return self.component.get_absolute_coords(x, y)
def _handle_dataspace_update(self):
range = self.mapper.range
bounds_list = [source.get_bounds() for source in range.sources if (source.get_size() > 0)]
(mins, maxes) = zip(*bounds_list)
dmin = min(mins)
dmax = max(maxes)
view = float((range.high - range.low))
totalmin = min(range.low, dmin)
totalmax = max(range.high, dmax)
scrollrange = ((totalmax - totalmin) - view)
if (round((scrollrange / 20.0)) > 0.0):
ticksize = (scrollrange / round((scrollrange / 20.0)))
else:
ticksize = 1
foo = (totalmin, totalmax, view, ticksize)
self.trait_setq(range=foo, scroll_position=max(min(self.scroll_position, (totalmax - view)), totalmin))
self._scroll_updated = True
self.request_redraw()
def _scroll_position_changed(self):
super()._scroll_position_changed()
range = self.mapper.range
view_width = (range.high - range.low)
new_scroll_pos = self.scroll_position
range.set_bounds(new_scroll_pos, (new_scroll_pos + view_width))
def _component_changed(self, old, new):
if (self._plot is not None):
return
if (old is not None):
self._modify_plot_listeners(old, 'detach')
if (new is not None):
self._modify_plot_listeners(new, 'attach')
self._update_mapper_listeners()
def __plot_changed(self, old, new):
if (old is not None):
self._modify_plot_listeners(old, 'detach')
elif (self.component is not None):
self._modify_plot_listeners(self.component, 'detach')
if (new is not None):
self._modify_plot_listeners(new, 'attach')
self._update_mapper_listeners()
elif (self.component is not None):
self._modify_plot_listeners(self.component, 'attach')
self._update_mapper_listeners()
def _modify_plot_listeners(self, plot, action='attach'):
if (action == 'attach'):
remove = False
else:
remove = True
plot.observe(self._component_bounds_handler, 'bounds.items', remove=remove)
plot.observe(self._component_pos_handler, 'position.items', remove=remove)
def _component_bounds_handler(self, event):
self._handle_dataspace_update()
self._widget_moved = True
def _component_pos_handler(self, event):
self._handle_dataspace_update()
self._widget_moved = True
def _update_mapper_listeners(self):
pass
def _handle_mapper_updated(self):
self._handle_dataspace_update()
def _get_plot(self):
if (self._plot is not None):
return self._plot
else:
return self.component
def _set_plot(self, val):
self._plot = val
def _get_mapper(self):
if (self._mapper is not None):
return self._mapper
else:
return getattr(self.plot, (self.axis + '_mapper'))
def _set_mapper(self, new_mapper):
self._mapper = new_mapper
def _get_axis_index(self):
if (self._axis_index is None):
return self._determine_axis()
else:
return self._axis_index
def _set_axis_index(self, val):
self._axis_index = val
def _get_axis_coord(self, event, axis='index'):
event_pos = (event.x, event.y)
if (axis == 'index'):
return event_pos[self.axis_index]
else:
return event_pos[(1 - self.axis_index)]
def _determine_axis(self):
if (self.axis == 'index'):
if (self.plot.orientation == 'h'):
return 0
else:
return 1
elif (self.plot.orientation == 'h'):
return 1
else:
return 0 |
class TensorOpsTest(unittest.TestCase):
def test_gradients(self) -> None:
for type_ in [torch.float32, torch.float64]:
x = torch.randn(3, requires_grad=True, dtype=type_)
prec = torch.Tensor([[1, 0.1, 0], [0.1, 2, 0.5], [0, 0.5, 3]]).to(type_)
mu = torch.randn(3, dtype=type_)
f = ((((- (x - mu)) prec) (x - mu)) / 2)
(grad, hess) = tensorops.gradients(f, x)
self.assertTrue(grad.allclose(((- (x - mu)) prec)))
self.assertTrue(hess.allclose((- prec)))
self.assertEqual(grad.dtype, type_, 'gradient dtype must match input')
self.assertEqual(hess.dtype, type_, 'hessian dtype must match input')
def test_simplex_gradients(self) -> None:
for type_ in [torch.float32, torch.float64]:
x = torch.randn(3, requires_grad=True, dtype=type_)
prec = torch.Tensor([[1, 0.1, 0], [0.1, 2, 0.5], [0, 0.5, 3]]).to(type_)
prec_diag = torch.Tensor([1.0, 1.9, 3.0]).to(type_)
mu = torch.randn(3, dtype=type_)
f = ((((- (x - mu)) prec) (x - mu)) / 2)
(grad, hess) = tensorops.simplex_gradients(f, x)
self.assertTrue(grad.allclose(((- (x - mu)) prec)))
self.assertTrue(hess.allclose((- prec_diag)))
self.assertEqual(grad.dtype, type_, 'gradient dtype must match input')
self.assertEqual(hess.dtype, type_, 'hessian dtype must match input')
def test_halfspace_gradients(self) -> None:
for type_ in [torch.float32, torch.float64]:
x = torch.randn(3, requires_grad=True, dtype=type_)
prec = torch.Tensor([[1, 0.1, 0], [0.1, 2, 0.5], [0, 0.5, 3]]).to(type_)
prec_diag = torch.Tensor([1.0, 2.0, 3.0]).to(type_)
mu = torch.randn(3, dtype=type_)
f = ((((- (x - mu)) prec) (x - mu)) / 2)
(grad, hess) = tensorops.halfspace_gradients(f, x)
self.assertTrue(grad.allclose(((- (x - mu)) prec)))
self.assertTrue(hess.allclose((- prec_diag)))
self.assertEqual(grad.dtype, type_, 'gradient dtype must match input')
self.assertEqual(hess.dtype, type_, 'hessian dtype must match input')
def test_gradients_negative(self) -> None:
x = torch.randn(3, requires_grad=True)
with self.assertRaises(ValueError) as cm:
tensorops.gradients((2 * x), x)
self.assertTrue(('output tensor must have exactly one element' in str(cm.exception))) |
class OptionSeriesVariwideSonificationContexttracksMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def construct_anonymization_context(text: str) -> str:
prompt = f'''Please analyze the following text and identify any personal and sensitive information contained within it. For each instance of personal information, please provide a category and a confidence score. Categories could include, but should not be limited to, names, addresses, phone numbers, email addresses, social security numbers, enterprise name, any private information and credit card numbers. Use a confidence score between 0 and 1 to indicate how certain you are that the identified information is actually personal information.
The text is included between three backticks.
First write the redacted Text by replacing each character of identified entity with `*`, then extract the entity, finally extract the confidence Score between 0.0-1.0.
The category must be one of the following: "name", "address", "phonenumber", "email", "social security number", "organization", "credit card number", "other".
The text:
```{text}```
Your output:'
'''
return prompt |
('ecs_deploy.cli.get_client')
def test_scale_with_timeout(get_client, runner):
get_client.return_value = EcsTestClient('acces_key', 'secret_key', wait=2)
result = runner.invoke(cli.scale, (CLUSTER_NAME, SERVICE_NAME, '2', '--timeout', '1'))
assert (result.exit_code == 1)
assert (u'Scaling failed due to timeout. Please see: in result.output) |
class TestStrategy():
.parametrize(['step_dates_path', 'df_path', 'expected_path'], [('data/step_dates1.csv', 'data/df1.csv', 'data/expected1.csv'), ('data/step_dates2.csv', 'data/df2.csv', 'data/expected2.csv'), ('data/step_dates3.csv', 'data/df3.csv', 'data/expected3.csv')])
def test__cast_data(self, step_dates_path, df_path, expected_path):
step_dates = [np.datetime64(x) for x in pd.read_csv(step_dates_path)['date']]
df = pd.read_csv(df_path)
df['date'] = df['date'].astype(np.datetime64)
expected = pd.read_csv(expected_path)
expected['date'] = expected['date'].astype(np.datetime64)
strategy = Strategy()
strategy.step_dates = step_dates
cast_df = strategy._cast_data(df)
pd.testing.assert_frame_equal(cast_df, expected)
.parametrize(['step_dates_path', 'df_path', 'expected_path', 'date_col', 'price_col', 'return_col', 'return_format'], [('data/step_dates1.csv', 'data/df1.csv', 'data/expected4.csv', 'date', 'price', 'return', 'ratio'), ('data/step_dates1.csv', 'data/df5.csv', 'data/expected5.csv', 'date', 'price', 'return', 'ratio'), ('data/step_dates2.csv', 'data/df6.csv', 'data/expected6.csv', 'date', 'price', 'return', 'ratio'), ('data/step_dates3.csv', 'data/df7.csv', 'data/expected7.csv', 'date', 'price', 'return', 'ratio'), ('data/step_dates3.csv', 'data/df8.csv', 'data/expected7.csv', 'Date', 'Close', 'return', 'ratio'), ('data/step_dates1.csv', 'data/df9.csv', 'data/expected5.csv', 'date', 'price', 'return', 'change'), ('data/step_dates1.csv', 'data/df10.csv', 'data/expected5.csv', 'date', 'price', 'adj_price', 'price')])
def test__check_create_ticker_data(self, step_dates_path, df_path, expected_path, date_col, price_col, return_col, return_format):
step_dates = [np.datetime64(x) for x in pd.read_csv(step_dates_path)['date']]
df = pd.read_csv(df_path)
df[date_col] = df[date_col].astype(np.datetime64)
expected = pd.read_csv(expected_path)
expected['date'] = expected['date'].astype(np.datetime64)
data_loader = DfData(df)
strategy = Strategy()
strategy.data_loader = data_loader
strategy.step_dates = step_dates
strategy.date_col = date_col
strategy.price_col = price_col
strategy.return_col = return_col
strategy.return_format = return_format
strategy._check_create_ticker_data('AAPL')
assert ('AAPL' in strategy._data)
result = strategy._data['AAPL']
need_cols = ['price', 'return', 'missed', 'closed', 'prev_price']
pd.testing.assert_frame_equal(result[need_cols], expected[need_cols])
.parametrize(['step_dates_path', 'df_path', 'step_idx', 'portfolio', 'cash', 'comission', 'direction', 'expected'], [('data/step_dates1.csv', 'data/df1.csv', 4, {'AAPL': 10}, 168.7, 0.0, Order.BUY, 1), ('data/step_dates1.csv', 'data/df1.csv', 4, {'AAPL': 10}, 168.6, 0.0, Order.BUY, 0), ('data/step_dates1.csv', 'data/df1.csv', 4, {'AAPL': 10}, 168.7, 0.1, Order.BUY, 0), ('data/step_dates1.csv', 'data/df1.csv', 4, {'AAPL': 10}, 168.7, 0.1, Order.SELL, 10), ('data/step_dates1.csv', 'data/df1.csv', 16, {'AAPL': 10}, 168.7, 0.0, Order.BUY, None), ('data/step_dates2.csv', 'data/df2.csv', 16, {'AAPL': 10}, 168.7, 0.0, Order.BUY, None), ('data/step_dates3.csv', 'data/df3.csv', 1, {'AAPL': 10}, 175.0, 0.0, Order.BUY, 10), ('data/step_dates3.csv', 'data/df3.csv', 1, {'AAPL': 10}, 175.0, 0.0025, Order.BUY, 9)])
def test__aposteriori_next_step_max_size(self, step_dates_path, df_path, step_idx, portfolio, cash, direction, comission, expected):
step_dates = [np.datetime64(x) for x in pd.read_csv(step_dates_path)['date']]
df = pd.read_csv(df_path)
df['date'] = df['date'].astype(np.datetime64)
data_loader = DfData(df)
strategy = Strategy()
strategy.data_loader = data_loader
strategy.step_dates = step_dates
strategy.date_col = 'date'
strategy.price_col = 'price'
strategy.return_col = 'return'
strategy.return_format = 'ratio'
strategy.comission = comission
strategy._cash = cash
strategy._check_create_ticker_data('AAPL')
strategy.portfolio = portfolio
strategy.step_idx = step_idx
order = {'ticker': 'AAPL', 'direction': direction}
result = strategy._aposteriori_next_step_max_size(order)
assert (result == expected)
.parametrize(['step_dates_path', 'df_path', 'step_idx', 'portfolio', 'cash', 'comission', 'direction', 'size', 'allow_partial', 'creation_date', 'lifetime', 'expected_portfolio', 'expected__cash', 'expected_location', 'expected_size', 'expected_price', 'expected_status', 'expected_execution_date'], [('data/step_dates1.csv', 'data/df1.csv', 4, {'AAPL': 10}, 168.7, 0.0, Order.BUY, 1, True, np.datetime64('2015-02-27'), np.timedelta64(3, 'D'), {'AAPL': 11}, 0.0, 'orders', 1, 168.7, Order.COMPLETED, np.datetime64('2015-03-02')), ('data/step_dates1.csv', 'data/df1.csv', 4, {'AAPL': 10}, 168.7, 0.0, Order.BUY, 3, True, np.datetime64('2015-02-27'), np.timedelta64(3, 'D'), {'AAPL': 11}, 0.0, 'orders', 1, 168.7, Order.PARTIAL, np.datetime64('2015-03-02')), ('data/step_dates1.csv', 'data/df1.csv', 4, {'AAPL': 10}, ((168.7 * 3) + 10.0), 0.0, Order.BUY, 3, True, np.datetime64('2015-02-27'), np.timedelta64(3, 'D'), {'AAPL': 13}, 10.0, 'orders', 3, 168.7, Order.COMPLETED, np.datetime64('2015-03-02')), ('data/step_dates1.csv', 'data/df1.csv', 4, {'AAPL': 10}, (168.7 * 3.0), 0.01, Order.BUY, 3, True, np.datetime64('2015-02-27'), np.timedelta64(3, 'D'), {'AAPL': 12}, (168.7 * (3 - (2 * 1.01))), 'orders', 2, 168.7, Order.PARTIAL, np.datetime64('2015-03-02')), ('data/step_dates1.csv', 'data/df1.csv', 4, {'AAPL': 10}, 10, 0.0, Order.SELL, 2, True, np.datetime64('2015-02-27'), np.timedelta64(3, 'D'), {'AAPL': 8}, (10 + (168.7 * 2)), 'orders', 2, 168.7, Order.COMPLETED, np.datetime64('2015-03-02')), ('data/step_dates1.csv', 'data/df1.csv', 4, {'AAPL': 10}, 10, 0.0, Order.SELL, 12, True, np.datetime64('2015-02-27'), np.timedelta64(3, 'D'), {'AAPL': 0}, (10 + (168.7 * 10)), 'orders', 10, 168.7, Order.PARTIAL, np.datetime64('2015-03-02')), ('data/step_dates1.csv', 'data/df1.csv', 4, {'AAPL': 10}, 10, 0.0, Order.SELL, 12, False, np.datetime64('2015-02-27'), np.timedelta64(3, 'D'), {'AAPL': 10}, 10, '_active_orders', 12, np.nan, np.nan, np.nan), ('data/step_dates1.csv', 'data/df1.csv', 4, {'AAPL': 10}, 168.7, 0.0, Order.BUY, 3, False, np.datetime64('2015-02-27'), np.timedelta64(3, 'D'), {'AAPL': 10}, 168.7, '_active_orders', 3, np.nan, np.nan, np.nan), ('data/step_dates1.csv', 'data/df1.csv', 4, {'AAPL': 10}, 168.6, 0.0, Order.BUY, 3, True, np.datetime64('2015-02-27'), np.timedelta64(3, 'D'), {'AAPL': 10}, 168.6, '_active_orders', 3, np.nan, np.nan, np.nan), ('data/step_dates1.csv', 'data/df1.csv', 4, {'AAPL': 10}, 168.7, 0.01, Order.BUY, 3, True, np.datetime64('2015-02-27'), np.timedelta64(3, 'D'), {'AAPL': 10}, 168.7, '_active_orders', 3, np.nan, np.nan, np.nan), ('data/step_dates1.csv', 'data/df1.csv', 12, {'AAPL': 10}, 243.3, 0.0, Order.BUY, 1, True, np.datetime64('2015-03-16'), np.timedelta64(3, 'D'), {'AAPL': 10}, 243.3, '_active_orders', 1, np.nan, np.nan, np.nan), ('data/step_dates1.csv', 'data/df1.csv', 11, {'AAPL': 10}, 243.3, 0.0, Order.BUY, 1, True, np.datetime64('2015-03-15'), np.timedelta64(4, 'D'), {'AAPL': 11}, 0.0, 'orders', 1, 243.3, Order.COMPLETED, np.datetime64('2015-03-16')), ('data/step_dates1.csv', 'data/df1.csv', 11, {'AAPL': 10}, 243.3, 0.0, Order.BUY, 1, True, np.datetime64('2015-03-12'), np.timedelta64(4, 'D'), {'AAPL': 11}, 0.0, 'orders', 1, 243.3, Order.COMPLETED, np.datetime64('2015-03-16')), ('data/step_dates1.csv', 'data/df1.csv', 11, {'AAPL': 10}, 243.3, 0.0, Order.BUY, 1, True, np.datetime64('2015-03-09'), np.timedelta64(4, 'D'), {'AAPL': 10}, 243.3, 'orders', 1, np.nan, Order.EXPIRED, np.nan)])
def test__execute_market_order(self, step_dates_path, df_path, step_idx, portfolio, cash, comission, direction, size, allow_partial, creation_date, lifetime, expected_portfolio, expected__cash, expected_location, expected_size, expected_price, expected_status, expected_execution_date):
step_dates = [np.datetime64(x) for x in pd.read_csv(step_dates_path)['date']]
df = pd.read_csv(df_path)
df['date'] = df['date'].astype(np.datetime64)
data_loader = DfData(df)
strategy = Strategy()
strategy.data_loader = data_loader
strategy.step_dates = step_dates
strategy.date_col = 'date'
strategy.price_col = 'price'
strategy.return_col = 'return'
strategy.return_format = 'ratio'
strategy.comission = comission
strategy.latency = np.timedelta64(1, 'h')
strategy._cash = cash
strategy._check_create_ticker_data('AAPL')
strategy.portfolio = portfolio
strategy.step_idx = step_idx
strategy.step_date = strategy.step_dates[strategy.step_idx]
order = {'ticker': 'AAPL', 'direction': direction, 'size': size, 'allow_partial': allow_partial, 'creation_date': creation_date, 'submit_date': (creation_date + strategy.latency), 'lifetime': lifetime}
strategy._execute_market_order(order)
if (expected_location == '_active_orders'):
assert (len(strategy._active_orders) == 1)
assert (len(strategy.orders) == 0)
result = strategy._active_orders[0]
assert ('execution_date' not in result)
result['execution_date'] = np.nan
assert ('price' not in result)
result['price'] = np.nan
assert ('status' not in result)
result['status'] = np.nan
if (expected_location == 'orders'):
assert (len(strategy.orders) == 1)
assert (len(strategy._active_orders) == 0)
result = strategy.orders[0]
assert (strategy.portfolio == expected_portfolio)
np.testing.assert_almost_equal(strategy._cash, expected__cash)
np.testing.assert_almost_equal(result['size'], expected_size)
np.testing.assert_almost_equal(result['price'], expected_price)
np.testing.assert_equal(result['status'], expected_status)
np.testing.assert_equal(result['execution_date'], expected_execution_date)
.parametrize(['step_dates_path', 'df_pathes', 'step_idx', 'portfolio', 'cash', 'expected__cash'], [('data/step_dates1.csv', ['data/df1.csv', 'data/df2.csv'], 4, {'AAPL0': 2, 'AAPL1': 10}, 120, 120.0), ('data/step_dates1.csv', ['data/df1.csv', 'data/df2.csv'], 9, {'AAPL0': 2, 'AAPL1': 10}, 120, 120.0), ('data/step_dates1.csv', ['data/df1.csv', 'data/df2.csv'], 9, {'AAPL0': 0, 'AAPL1': 10}, 120, 120.0), ('data/step_dates1.csv', ['data/df1.csv', 'data/df5.csv'], 5, {'AAPL0': 2, 'AAPL1': 10}, 120, 285.5), ('data/step_dates1.csv', ['data/df6.csv', 'data/df5.csv'], 5, {'AAPL0': 2, 'AAPL1': 10}, 0.0, 169.26)])
def test__receive_dividends(self, step_dates_path, df_pathes, step_idx, portfolio, cash, expected__cash):
step_dates = [np.datetime64(x) for x in pd.read_csv(step_dates_path)['date']]
dfs = {}
for (k, df_path) in enumerate(df_pathes):
df = pd.read_csv(df_path)
df['date'] = df['date'].astype(np.datetime64)
dfs['AAPL{}'.format(k)] = df
data_loader = DfData(dfs)
strategy = Strategy()
strategy.data_loader = data_loader
strategy.step_dates = step_dates
strategy.date_col = 'date'
strategy.price_col = 'price'
strategy.return_col = 'return'
strategy.return_format = 'ratio'
strategy._cash = cash
strategy.portfolio = portfolio
strategy.step_idx = step_idx
strategy.step_date = strategy.step_dates[strategy.step_idx]
for (k, df_path) in enumerate(df_pathes):
strategy._check_create_ticker_data('AAPL{}'.format(k))
strategy._receive_dividends()
np.testing.assert_almost_equal(strategy._cash, expected__cash)
.parametrize(['step_dates_path', 'df_pathes', 'step_idx', 'portfolio', 'cash', 'expected_equity'], [('data/step_dates1.csv', ['data/df1.csv', 'data/df2.csv'], 4, {'AAPL0': 2, 'AAPL1': 10}, 120, 634.0), ('data/step_dates1.csv', ['data/df1.csv', 'data/df2.csv'], 9, {'AAPL0': 2, 'AAPL1': 10}, 120, 746.4), ('data/step_dates1.csv', ['data/df1.csv', 'data/df2.csv'], 9, {'AAPL0': 0, 'AAPL1': 10}, 120, 262.0), ('data/step_dates1.csv', ['data/df1.csv', 'data/df5.csv'], 5, {'AAPL0': 2, 'AAPL1': 10}, 120, 2144.4), ('data/step_dates1.csv', ['data/df6.csv', 'data/df5.csv'], 5, {'AAPL0': 2, 'AAPL1': 10}, 0.0, 1713.8)])
def test__calc_equity(self, step_dates_path, df_pathes, step_idx, portfolio, cash, expected_equity):
step_dates = [np.datetime64(x) for x in pd.read_csv(step_dates_path)['date']]
dfs = {}
for (k, df_path) in enumerate(df_pathes):
df = pd.read_csv(df_path)
df['date'] = df['date'].astype(np.datetime64)
dfs['AAPL{}'.format(k)] = df
data_loader = DfData(dfs)
strategy = Strategy()
strategy.data_loader = data_loader
strategy.step_dates = step_dates
strategy.date_col = 'date'
strategy.price_col = 'price'
strategy.return_col = 'return'
strategy.return_format = 'ratio'
strategy._cash = cash
strategy.portfolio = portfolio
strategy.step_idx = step_idx
strategy.step_date = strategy.step_dates[strategy.step_idx]
for (k, df_path) in enumerate(df_pathes):
strategy._check_create_ticker_data('AAPL{}'.format(k))
equity = strategy._calc_equity()
np.testing.assert_almost_equal(equity, expected_equity)
.parametrize(['step_dates_path', 'df_pathes', 'step_idx', 'portfolio', 'expected_orders'], [('data/step_dates3.csv', ['data/df2.csv', 'data/df6.csv'], 10, {'AAPL0': 2, 'AAPL1': 10}, [{'ticker': 'AAPL0', 'direction': Order.SELL, 'size': 2}, {'ticker': 'AAPL1', 'direction': Order.SELL, 'size': 10}]), ('data/step_dates3.csv', ['data/df2.csv', 'data/df5.csv'], 8, {'AAPL0': 2, 'AAPL1': 10}, [{'ticker': 'AAPL1', 'direction': Order.SELL, 'size': 10}]), ('data/step_dates3.csv', ['data/df2.csv', 'data/df5.csv'], 7, {'AAPL0': 2, 'AAPL1': 10}, [])])
def test__post_close_orders(self, step_dates_path, df_pathes, step_idx, portfolio, expected_orders):
step_dates = [np.datetime64(x) for x in pd.read_csv(step_dates_path)['date']]
dfs = {}
for (k, df_path) in enumerate(df_pathes):
df = pd.read_csv(df_path)
df['date'] = df['date'].astype(np.datetime64)
dfs['AAPL{}'.format(k)] = df
data_loader = DfData(dfs)
strategy = Strategy()
strategy.latency = np.timedelta64(1, 'h')
strategy.data_loader = data_loader
strategy.step_dates = step_dates
strategy.date_col = 'date'
strategy.price_col = 'price'
strategy.return_col = 'return'
strategy.return_format = 'ratio'
strategy.portfolio = portfolio
strategy.step_idx = step_idx
strategy.step_date = strategy.step_dates[strategy.step_idx]
strategy.verbose = False
for (k, df_path) in enumerate(df_pathes):
strategy._check_create_ticker_data('AAPL{}'.format(k))
strategy._post_close_orders()
assert (len(strategy._active_orders) == len(expected_orders))
for (order, expected) in zip(strategy._active_orders, expected_orders):
np.testing.assert_equal(order['ticker'], expected['ticker'])
np.testing.assert_equal(order['direction'], expected['direction'])
np.testing.assert_equal(order['size'], expected['size'])
.parametrize(['step_dates_path', 'df_path', 'step_idx', 'direction', 'value', 'expected_orders'], [('data/step_dates1.csv', 'data/df1.csv', 3, Order.BUY, 1000.0, [{'ticker': 'AAPL', 'direction': Order.BUY, 'size': 6}]), ('data/step_dates1.csv', 'data/df1.csv', 3, Order.BUY, 960.0, [{'ticker': 'AAPL', 'direction': Order.BUY, 'size': 5}]), ('data/step_dates1.csv', 'data/df1.csv', 8, Order.BUY, 1000.0, [{'ticker': 'AAPL', 'direction': Order.BUY, 'size': 5}]), ('data/step_dates1.csv', 'data/df1.csv', 8, Order.SELL, 1000.0, [{'ticker': 'AAPL', 'direction': Order.SELL, 'size': 5}])])
def test_post_order_value(self, step_dates_path, df_path, step_idx, direction, value, expected_orders):
step_dates = [np.datetime64(x) for x in pd.read_csv(step_dates_path)['date']]
df = pd.read_csv(df_path)
df['date'] = df['date'].astype(np.datetime64)
data_loader = DfData(df)
strategy = Strategy()
strategy.latency = np.timedelta64(1, 'h')
strategy.data_loader = data_loader
strategy.step_dates = step_dates
strategy.date_col = 'date'
strategy.price_col = 'price'
strategy.return_col = 'return'
strategy.return_format = 'ratio'
strategy.step_idx = step_idx
strategy.step_date = strategy.step_dates[strategy.step_idx]
strategy.verbose = False
strategy.post_order_value(ticker='AAPL', direction=direction, order_type=Order.MARKET, value=value, lifetime=np.timedelta64(300, 'D'), allow_partial=True)
assert (len(strategy._active_orders) == len(expected_orders))
for (order, expected) in zip(strategy._active_orders, expected_orders):
np.testing.assert_equal(order['ticker'], expected['ticker'])
np.testing.assert_equal(order['direction'], expected['direction'])
np.testing.assert_equal(order['size'], expected['size'])
.parametrize(['step_dates_path', 'step_idx', 'portfolio', 'size', 'expected_orders'], [('data/step_dates1.csv', 4, {'AAPL': 3}, 10, [{'ticker': 'AAPL', 'direction': Order.BUY, 'size': 7}]), ('data/step_dates1.csv', 4, {'AAPL': 0}, 10, [{'ticker': 'AAPL', 'direction': Order.BUY, 'size': 10}]), ('data/step_dates1.csv', 4, {'AAPL': 10}, 4, [{'ticker': 'AAPL', 'direction': Order.SELL, 'size': 6}]), ('data/step_dates1.csv', 4, {}, 10, [{'ticker': 'AAPL', 'direction': Order.BUY, 'size': 10}]), ('data/step_dates1.csv', 4, {'AAPL': 10}, 10, [])])
def test_post_portfolio_size(self, step_dates_path, step_idx, portfolio, size, expected_orders):
step_dates = [np.datetime64(x) for x in pd.read_csv(step_dates_path)['date']]
strategy = Strategy()
strategy.step_dates = step_dates
strategy.step_date = strategy.step_dates[0]
strategy.latency = np.timedelta64(1, 'h')
strategy.portfolio = portfolio
strategy.post_portfolio_size(ticker='AAPL', size=size, lifetime=np.timedelta64(300, 'D'), allow_partial=True)
assert (len(strategy._active_orders) == len(expected_orders))
for (order, expected) in zip(strategy._active_orders, expected_orders):
np.testing.assert_equal(order['ticker'], expected['ticker'])
np.testing.assert_equal(order['direction'], expected['direction'])
np.testing.assert_equal(order['size'], expected['size'])
.parametrize(['step_dates_path', 'df_path', 'step_idx', 'portfolio', 'value', 'expected_orders'], [('data/step_dates1.csv', 'data/df1.csv', 4, {'AAPL': 0}, 1000.0, [{'ticker': 'AAPL', 'direction': Order.BUY, 'size': 6}]), ('data/step_dates1.csv', 'data/df1.csv', 4, {'AAPL': 0}, 20.0, []), ('data/step_dates1.csv', 'data/df1.csv', 4, {'AAPL': 6}, 1000.0, []), ('data/step_dates1.csv', 'data/df11.csv', 2, {}, 1000.0, []), ('data/step_dates1.csv', 'data/df1.csv', 12, {}, 1000.0, [{'ticker': 'AAPL', 'direction': Order.BUY, 'size': 4}]), ('data/step_dates1.csv', 'data/df1.csv', 4, {'AAPL': 2}, 1000.0, [{'ticker': 'AAPL', 'direction': Order.BUY, 'size': 4}]), ('data/step_dates1.csv', 'data/df1.csv', 4, {'AAPL': 20}, 1000.0, [{'ticker': 'AAPL', 'direction': Order.SELL, 'size': 14}])])
def test_post_portfolio_value(self, step_dates_path, df_path, step_idx, portfolio, value, expected_orders):
step_dates = [np.datetime64(x) for x in pd.read_csv(step_dates_path)['date']]
df = pd.read_csv(df_path)
df['date'] = df['date'].astype(np.datetime64)
data_loader = DfData(df)
strategy = Strategy()
strategy.latency = np.timedelta64(1, 'h')
strategy.portfolio = portfolio
strategy.data_loader = data_loader
strategy.step_dates = step_dates
strategy.date_col = 'date'
strategy.price_col = 'price'
strategy.return_col = 'return'
strategy.return_format = 'ratio'
strategy.step_idx = step_idx
strategy.step_date = strategy.step_dates[strategy.step_idx]
strategy.verbose = False
strategy.post_portfolio_value(ticker='AAPL', value=value, lifetime=np.timedelta64(300, 'D'), allow_partial=True)
assert (len(strategy._active_orders) == len(expected_orders))
for (order, expected) in zip(strategy._active_orders, expected_orders):
np.testing.assert_equal(order['ticker'], expected['ticker'])
np.testing.assert_equal(order['direction'], expected['direction'])
np.testing.assert_equal(order['size'], expected['size'])
.parametrize(['step_dates_path', 'df_path', 'step_idx', 'portfolio', 'step_equity', 'part', 'expected_orders'], [('data/step_dates1.csv', 'data/df1.csv', 4, {'AAPL': 0}, 1000.0, 1.0, [{'ticker': 'AAPL', 'direction': Order.BUY, 'size': 6}]), ('data/step_dates1.csv', 'data/df1.csv', 4, {'AAPL': 0}, 1000.0, 0.5, [{'ticker': 'AAPL', 'direction': Order.BUY, 'size': 3}]), ('data/step_dates1.csv', 'data/df1.csv', 4, {'AAPL': 0}, 1000.0, 0.3, [{'ticker': 'AAPL', 'direction': Order.BUY, 'size': 2}]), ('data/step_dates1.csv', 'data/df1.csv', 4, {'AAPL': 1}, 1000.0, 0.3, [{'ticker': 'AAPL', 'direction': Order.BUY, 'size': 1}]), ('data/step_dates1.csv', 'data/df1.csv', 4, {'AAPL': 5}, 1000.0, 0.3, [{'ticker': 'AAPL', 'direction': Order.SELL, 'size': 3}])])
def test_post_portfolio_part(self, step_dates_path, df_path, step_idx, portfolio, step_equity, part, expected_orders):
step_dates = [np.datetime64(x) for x in pd.read_csv(step_dates_path)['date']]
df = pd.read_csv(df_path)
df['date'] = df['date'].astype(np.datetime64)
data_loader = DfData(df)
strategy = Strategy()
strategy.latency = np.timedelta64(1, 'h')
strategy.equity = ([0] * len(step_dates))
strategy.equity[step_idx] = step_equity
strategy.portfolio = portfolio
strategy.data_loader = data_loader
strategy.step_dates = step_dates
strategy.date_col = 'date'
strategy.price_col = 'price'
strategy.return_col = 'return'
strategy.return_format = 'ratio'
strategy.step_idx = step_idx
strategy.step_date = strategy.step_dates[strategy.step_idx]
strategy.verbose = False
strategy.post_portfolio_part(ticker='AAPL', part=part, lifetime=np.timedelta64(300, 'D'), allow_partial=True)
assert (len(strategy._active_orders) == len(expected_orders))
for (order, expected) in zip(strategy._active_orders, expected_orders):
np.testing.assert_equal(order['ticker'], expected['ticker'])
np.testing.assert_equal(order['direction'], expected['direction'])
np.testing.assert_equal(order['size'], expected['size'])
class Strategy1(Strategy):
def step(self):
if (self.step_idx == 0):
self.post_order(ticker='AAPL0', size=10, direction=Order.BUY, lifetime=np.timedelta64(3, 'D'), allow_partial=True)
self.post_order(ticker='AAPL1', size=5, direction=Order.BUY, lifetime=np.timedelta64(3, 'D'), allow_partial=True)
if (self.step_idx == 7):
self.post_portfolio_part(ticker='AAPL1', part=0.99, allow_partial=True)
if (self.step_idx == 16):
self.post_order(ticker='AAPL0', size=20, direction=Order.SELL, lifetime=np.timedelta64(3, 'D'), allow_partial=True)
.parametrize(['step_dates_path', 'df_pathes', 'strategy', 'cash', 'comission', 'latency', 'expected_orders', 'expected_equity', 'expected_cash', 'expected_portfolio'], [('data/step_dates1.csv', ['data/df6.csv', 'data/df3.csv'], Strategy1(), 500.0, 0.0, np.timedelta64(1, 'h'), [{'ticker': 'AAPL0', 'direction': Order.BUY, 'size': 10, 'creation_date': np.datetime64('2015-02-18'), 'execution_date': np.datetime64('2015-02-20'), 'status': Order.COMPLETED, 'price': 16.0}, {'ticker': 'AAPL1', 'direction': Order.BUY, 'size': 5, 'creation_date': np.datetime64('2015-02-18'), 'execution_date': np.nan, 'status': Order.EXPIRED, 'price': np.nan}, {'ticker': 'AAPL1', 'direction': Order.BUY, 'size': 28, 'creation_date': np.datetime64('2015-03-05'), 'execution_date': np.datetime64('2015-03-12'), 'status': Order.COMPLETED, 'price': 12.3}, {'ticker': 'AAPL1', 'direction': Order.SELL, 'size': 28, 'creation_date': np.datetime64('2015-03-17'), 'execution_date': np.datetime64('2015-03-18'), 'status': Order.COMPLETED, 'price': 19.3}, {'ticker': 'AAPL0', 'direction': Order.SELL, 'size': 10, 'creation_date': np.datetime64('2015-03-21'), 'execution_date': np.datetime64('2015-03-23'), 'status': Order.PARTIAL, 'price': 19.6}], [500.0, 500.0, 455.0, 498.0, 498.0, 492.8, 477.8, 496.8, 500.8, 500.8, 540.8, 500.8, 500.8, 500.8, 696.8, 737.8, 719.1, 769.1, 769.1, 769.1], [500, 340.0, 340.0, 340.0, 340.0, 358.8, 358.8, 358.8, 358.8, 358.8, 14.4, 14.4, 14.4, 14.4, 554.8, 554.8, 573.1, 769.1, 769.1, 769.1], {'AAPL0': 0, 'AAPL1': 0}), ('data/step_dates1.csv', ['data/df6.csv', 'data/df3.csv'], Strategy1(), 100.0, 0.0, np.timedelta64(49, 'h'), [{'ticker': 'AAPL0', 'direction': Order.BUY, 'size': 8, 'creation_date': np.datetime64('2015-02-18'), 'execution_date': np.datetime64('2015-02-23'), 'status': Order.PARTIAL, 'price': 11.5}, {'ticker': 'AAPL1', 'direction': Order.BUY, 'size': 5, 'creation_date': np.datetime64('2015-02-18'), 'execution_date': np.nan, 'status': Order.EXPIRED, 'price': np.nan}, {'ticker': 'AAPL1', 'direction': Order.BUY, 'size': 1, 'creation_date': np.datetime64('2015-03-05'), 'execution_date': np.datetime64('2015-03-12'), 'status': Order.COMPLETED, 'price': 12.3}, {'ticker': 'AAPL1', 'direction': Order.SELL, 'size': 1, 'creation_date': np.datetime64('2015-03-17'), 'execution_date': np.datetime64('2015-03-18'), 'status': Order.COMPLETED, 'price': 19.3}, {'ticker': 'AAPL0', 'direction': Order.SELL, 'size': 8, 'creation_date': np.datetime64('2015-03-21'), 'execution_date': np.datetime64('2015-03-24'), 'status': Order.PARTIAL, 'price': 20.0}], [100.0, 100.0, 100.0, 134.4, 134.4, 130.24, 118.24, 133.44, 136.64, 136.64, 168.64, 136.64, 136.64, 136.64, 143.64, 176.44, 161.48, 201.48, 204.68, 204.68], [100.0, 100.0, 8.0, 8.0, 8.0, 23.04, 23.04, 23.04, 23.04, 23.04, 10.74, 10.74, 10.74, 10.74, 30.04, 30.04, 44.68, 44.68, 204.68, 204.68], {'AAPL0': 0, 'AAPL1': 0})])
def test_backtest_simple_strategy(self, step_dates_path, df_pathes, strategy, cash, comission, latency, expected_orders, expected_equity, expected_cash, expected_portfolio):
step_dates = [np.datetime64(x) for x in pd.read_csv(step_dates_path)['date']]
dfs = {}
for (k, df_path) in enumerate(df_pathes):
df = pd.read_csv(df_path)
df['date'] = df['date'].astype(np.datetime64)
dfs['AAPL{}'.format(k)] = df
data_loader = DfData(dfs)
strategy.backtest(data_loader=data_loader, date_col='date', price_col='price', return_col='return', return_format='ratio', step_dates=step_dates, cash=cash, comission=comission, latency=latency)
assert (len(strategy.orders) == len(expected_orders))
for (order, expected) in zip(strategy._active_orders, expected_orders):
np.testing.assert_equal(order['ticker'], expected['ticker'])
np.testing.assert_equal(order['direction'], expected['direction'])
np.testing.assert_equal(order['size'], expected['size'])
np.testing.assert_almost_equal(order['price'], expected['price'])
np.testing.assert_equal(order['creation_date'], expected['creation_date'])
np.testing.assert_equal(order['execution_date'], expected['execution_date'])
np.testing.assert_array_almost_equal(strategy.equity, expected_equity)
np.testing.assert_array_almost_equal(strategy.cash, expected_cash)
assert (strategy.portfolio == expected_portfolio) |
def get_metadata_files(vercodes):
found_invalid = False
metadatafiles = []
for appid in vercodes.keys():
f = (Path('metadata') / ('%s.yml' % appid))
if f.exists():
metadatafiles.append(f)
else:
found_invalid = True
logging.critical((_('No such package: %s') % appid))
if found_invalid:
raise FDroidException(_('Found invalid appids in arguments'))
return metadatafiles |
def otsu_mask(voxel_sims: Float[(torch.Tensor, 'n')], num_bins: int=100) -> Tuple[(Float[(torch.Tensor, 'n')], float)]:
(sim_min, sim_max) = (voxel_sims.min(), voxel_sims.max())
voxel_sims_norm = ((voxel_sims - sim_min) / (sim_max - sim_min))
threshold_vals = torch.linspace(0, 1, num_bins).to(voxel_sims.device)
hist = torch.histc(voxel_sims_norm, bins=num_bins, min=0, max=1)
probs = (hist / torch.sum(hist))
cum_sum = torch.cumsum(probs, dim=0)
cum_mean = torch.cumsum((probs * threshold_vals), dim=0)
variance = ((((cum_mean[(- 1)] * cum_sum) - cum_mean) ** 2) / ((cum_sum * (1 - cum_sum)) + 1e-09))
threshold = threshold_vals[torch.argmax(variance)]
mask = (voxel_sims_norm >= threshold).squeeze()
return (mask, threshold) |
class OptionPlotoptionsWindbarbStates(Options):
def hover(self) -> 'OptionPlotoptionsWindbarbStatesHover':
return self._config_sub_data('hover', OptionPlotoptionsWindbarbStatesHover)
def inactive(self) -> 'OptionPlotoptionsWindbarbStatesInactive':
return self._config_sub_data('inactive', OptionPlotoptionsWindbarbStatesInactive)
def normal(self) -> 'OptionPlotoptionsWindbarbStatesNormal':
return self._config_sub_data('normal', OptionPlotoptionsWindbarbStatesNormal)
def select(self) -> 'OptionPlotoptionsWindbarbStatesSelect':
return self._config_sub_data('select', OptionPlotoptionsWindbarbStatesSelect) |
def register_user():
request_data = request.get_json()
user = User.query.filter_by(username=request_data.get('username')).first()
if (not user):
try:
jsonschema.validate(request_data, register_user_schema)
if (vuln and ('admin' in request_data)):
if request_data['admin']:
admin = True
else:
admin = False
user = User(username=request_data['username'], password=request_data['password'], email=request_data['email'], admin=admin)
else:
user = User(username=request_data['username'], password=request_data['password'], email=request_data['email'])
db.session.add(user)
db.session.commit()
responseObject = {'status': 'success', 'message': 'Successfully registered. Login to receive an auth token.'}
return Response(json.dumps(responseObject), 200, mimetype='application/json')
except jsonschema.exceptions.ValidationError as exc:
return Response(error_message_helper(exc.message), 400, mimetype='application/json')
else:
return Response(error_message_helper('User already exists. Please Log in.'), 200, mimetype='application/json') |
class SavepathParser(object):
helper = None
cfg_savepath = None
relative_to_path = None
format_relative_path = None
working_path = None
def __init__(self, cfg_savepath, relative_to_path, format_relative_path, helper, working_path):
self.helper = helper
timestamp_execution = int(time.time())
cfg_savepath = re.sub('%time_execution\\(([^\\)]+)\\)', (lambda match: self.time_replacer(match, timestamp_execution)), cfg_savepath)
cfg_savepath = re.sub('%timestamp_execution', str(timestamp_execution), cfg_savepath)
self.cfg_savepath = cfg_savepath
self.relative_to_path = relative_to_path
self.format_relative_path = format_relative_path
self.working_path = working_path
def time_replacer(match, timestamp):
return time.strftime(match.group(1), time.gmtime(timestamp))
def append_md5_if_too_long(component, size):
if (len(component) > size):
if (size > 32):
component_size = ((size - 32) - 1)
return ('%s_%s' % (component[:component_size], hashlib.md5(component.encode('utf-8')).hexdigest()))
else:
return hashlib.md5(component.encode('utf-8')).hexdigest()[:size]
else:
return component
def get_savepath(self, url, savepath=None):
timestamp = int(time.time())
if (not savepath):
savepath = self.cfg_savepath
savepath = re.sub(re_working_path, (lambda match: self.working_path), savepath)
savepath = re.sub(re_time_dl, (lambda match: SavepathParser.time_replacer(match, timestamp)), savepath)
savepath = re.sub(re_timstamp_dl, str(timestamp), savepath)
savepath = re.sub(re_domain, (lambda match: UrlExtractor.get_allowed_domain(url, False)[:int(match.group(1))]), savepath)
savepath = re.sub(re_appendmd5_domain, (lambda match: SavepathParser.append_md5_if_too_long(UrlExtractor.get_allowed_domain(url, False), int(match.group(1)))), savepath)
savepath = re.sub(re_md5_domain, (lambda match: hashlib.md5(UrlExtractor.get_allowed_domain(url, False).encode('utf-8')).hexdigest()[:int(match.group(1))]), savepath)
savepath = re.sub(re_full_domain, (lambda match: UrlExtractor.get_allowed_domain(url)[:int(match.group(1))]), savepath)
savepath = re.sub(re_appendmd5_full_domain, (lambda match: SavepathParser.append_md5_if_too_long(UrlExtractor.get_allowed_domain(url), int(match.group(1)))), savepath)
savepath = re.sub(re_md5_full_domain, (lambda match: hashlib.md5(UrlExtractor.get_allowed_domain(url).encode('utf-8')).hexdigest()[:int(match.group(1))]), savepath)
savepath = re.sub(re_subdomains, (lambda match: UrlExtractor.get_subdomain(url)[:int(match.group(1))]), savepath)
savepath = re.sub(re_appendmd5_subdomains, (lambda match: SavepathParser.append_md5_if_too_long(UrlExtractor.get_subdomain(url), int(match.group(1)))), savepath)
savepath = re.sub(re_md5_subdomains, (lambda match: hashlib.md5(UrlExtractor.get_subdomain(url).encode('utf-8')).hexdigest()[:int(match.group(1))]), savepath)
savepath = re.sub(re_url_dir, (lambda match: UrlExtractor.get_url_directory_string(url)[:int(match.group(1))]), savepath)
savepath = re.sub(re_appendmd5_url_dir, (lambda match: SavepathParser.append_md5_if_too_long(UrlExtractor.get_url_directory_string(url), int(match.group(1)))), savepath)
savepath = re.sub(re_md5_url_dir, (lambda match: hashlib.md5(UrlExtractor.get_url_directory_string(url).encode('utf-8')).hexdigest()[:int(match.group(1))]), savepath)
savepath = re.sub(re_url_file, (lambda match: UrlExtractor.get_url_file_name(url)[:int(match.group(1))]), savepath)
savepath = re.sub(re_md5_url_file, (lambda match: hashlib.md5(UrlExtractor.get_url_file_name(url).encode('utf-8')).hexdigest()[:int(match.group(1))]), savepath)
abs_savepath = self.get_abs_path(savepath)
savepath = re.sub(re_max_url_file, (lambda match: UrlExtractor.get_url_file_name(url)[:SavepathParser.get_max_url_file_name_length(abs_savepath)]), savepath)
savepath = re.sub(re_appendmd5_max_url_file, (lambda match: SavepathParser.append_md5_if_too_long(UrlExtractor.get_url_file_name(url), SavepathParser.get_max_url_file_name_length(abs_savepath))), savepath)
return SavepathParser.remove_not_allowed_chars(savepath)
def remove_not_allowed_chars(savepath):
split_savepath = os.path.splitdrive(savepath)
savepath_without_invalid_chars = re.sub('<|>|:|\\"|\\||\\?|\\*', '_', split_savepath[1])
return (split_savepath[0] + savepath_without_invalid_chars)
def get_abs_path_static(savepath, relative_to_path):
if os.path.isabs(savepath):
return os.path.abspath(savepath)
else:
return os.path.abspath(os.path.join(relative_to_path, savepath))
def get_abs_path(self, savepath):
return self.get_abs_path_static(savepath, self.relative_to_path)
def get_base_path(path):
if ('%' not in path):
return path
path = os.path.split(path)[0]
while ('%' in path):
path = os.path.split(path)[0]
return path
def get_formatted_relative_path(self, path):
if (self.format_relative_path and (path.startswith('./') or path.startswith('.\\'))):
return path[2:]
else:
return path
def get_max_url_file_name_length(savepath):
number_occurrences = savepath.count('%max_url_file_name')
number_occurrences += savepath.count('%appendmd5_max_url_file_name')
savepath_copy = savepath
size_without_max_url_file_name = len(savepath_copy.replace('%max_url_file_name', '').replace('%appendmd5_max_url_file_name', ''))
max_size = ((260 - 1) - size_without_max_url_file_name)
max_size_per_occurrence = (max_size / number_occurrences)
return max_size_per_occurrence
def get_filename(savepath):
return ntpath.basename(savepath) |
class JITOption(click.Option):
def __init__(self, param_decls: Union[(str, Sequence[str])], default: Union[(Callable[([], Any)], None, Any)]=None, help: Union[(Callable[([], str)], str, None)]=None, prompt: Union[(Callable[([], str)], str, None)]=None, **kwargs: Any):
self.callable_default = default
self.callable_help = help
self.callable_prompt = prompt
if isinstance(param_decls, str):
param_decls = [_value_of(param_decls)]
return super().__init__(param_decls=param_decls, default=_value_of(default), help=_value_of(help), prompt=_value_of(prompt), **kwargs)
def prompt_for_value(self, ctx: click.Context) -> Any:
self.prompt = _value_of(self.callable_prompt)
return super().prompt_for_value(ctx)
def get_help_record(self, ctx: click.Context) -> Tuple[(str, str)]:
self.help = _value_of(self.callable_help)
return super().get_help_record(ctx)
def get_default(self, ctx: click.Context) -> Any:
self.default = _value_of(self.callable_default)
return super().get_default(ctx) |
def matrix_lines(et, pos, pos_cloned, edge_color: Iterable, alpha: Iterable, lw: Iterable, aes_kw: Dict):
patches = []
for (r, d) in et.iterrows():
start = d['source']
end = d['target']
(x_start, y_start) = pos_y[start]
(x_end, y_end) = pos[end]
(x, y) = (max(x_start, y_start), max(x_end, y_end))
kw = {'fc': edge_color[r], 'alpha': alpha[r], 'radius': lw[r], 'zorder': 10}
kw.update(aes_kw)
patch = Circle(xy=(x, y), **kw)
patches.append(patch)
return patches |
def do_the_useful_stuff(process_counter, task_id, worker_id, sleep):
if ('FAIL_EARLY' in os.environ):
raise Exception('sorry')
redis = get_redis_connection(REDIS_OPTS)
redis.hset(worker_id, 'started', 1)
if ('FAIL_STARTED_PID' in os.environ):
return 0
redis.hset(worker_id, 'PID', os.getpid())
if ('FAIL_STARTED' in os.environ):
raise Exception('sorry')
time.sleep(sleep)
result = (1 if (process_counter % 8) else 2)
redis.hset(worker_id, 'status', str(result))
return 0 |
def test_complex_list_columns():
(app, db, admin) = setup()
with app.app_context():
(M1, M2) = create_models(db)
m1 = M1('model1_val1')
db.session.add(m1)
db.session.add(M2('model2_val1', model1=m1))
db.session.commit()
view = CustomModelView(M2, db.session, column_list=['model1.test1'])
admin.add_view(view)
client = app.test_client()
rv = client.get('/admin/model2/')
assert (rv.status_code == 200)
data = rv.data.decode('utf-8')
assert ('model1_val1' in data) |
def add_elem_types_to_mm_info(mm_info, func_attrs):
backend_spec = CUDASpec()
elem_input_type = backend_spec.dtype_to_lib_type(func_attrs['inputs'][0]._attrs['dtype'])
elem_output_type = backend_spec.dtype_to_lib_type(func_attrs['outputs'][0]._attrs['dtype'])
return dataclasses.replace(mm_info, a_ptr=f'({elem_input_type}*)({mm_info.a_ptr})', b_ptr=f'({elem_input_type}*)({mm_info.b_ptr})', bias_ptr=f'({elem_output_type}*)({mm_info.bias_ptr})', c_ptr=f'({elem_output_type}*)({mm_info.c_ptr})') |
()
('bounds', nargs=4, type=float, metavar='MINX MINY MAXX MAXY')
('-s', '--step', type=float, help='Step between lines (in projected units)', required=True)
('-j', '--crs', type=str, default=None, help=crs_help2)
('-o', '--output', default=sys.stdout, type=click.File('wb'), help='Defaults to stdout')
def graticule(bounds, step, crs, output):
click.echo(_graticule.geojson(bounds, step, crs), file=output) |
def error(parent, message=None, markup=None):
if (message is markup is None):
raise ValueError('message or markup must be specified')
dialog = Gtk.MessageDialog(buttons=Gtk.ButtonsType.CLOSE, message_type=Gtk.MessageType.ERROR, modal=True, transient_for=parent)
if (markup is None):
dialog.props.text = message
else:
dialog.set_markup(markup)
dialog.run()
dialog.destroy() |
(tags=['audit'], description=docs.NAME_SEARCH)
class AuditCandidateNameSearch(utils.Resource):
filter_fulltext_fields = [('q', models.AuditCandidateSearch.fulltxt)]
_kwargs(args.names)
_with(schemas.AuditCandidateSearchListSchema())
def get(self, **kwargs):
query = filters.filter_fulltext(models.AuditCandidateSearch.query, kwargs, self.filter_fulltext_fields)
query = query.order_by(sa.desc(models.AuditCandidateSearch.id)).limit(20)
return {'results': query.all()} |
_production
class Expression(AstNode, VarStateMixin):
expression_value = synthesized(default=fail_on_access)
expression_value_used = inherited(default=True, implicit_pushdown=False)
def type_string(self) -> str:
return self.type_descriptions['typeString']
def type_identifier(self) -> str:
return self.type_descriptions['typeIdentifier'] |
def _remove_spaces(line: str) -> str:
quotes = False
comment = False
new_line = ''
for c in line:
if ((c == ' ') and (not quotes)):
if comment:
new_line += c
else:
if ((c == "'") or (c == '"')):
quotes = (not quotes)
c = "'"
elif (c == '#'):
comment = True
new_line += c
return new_line |
class TestGeneral(BaseEvenniaCommandTest):
def test_look(self):
rid = self.room1.id
self.call(general.CmdLook(), 'here', 'Room(#{})\nroom_desc'.format(rid))
def test_look_no_location(self):
self.char1.location = None
self.call(general.CmdLook(), '', 'You have no location to look at!')
def test_look_nonexisting(self):
self.call(general.CmdLook(), 'yellow sign', "Could not find 'yellow sign'.")
def test_home(self):
self.call(general.CmdHome(), '', 'You are already home')
def test_go_home(self):
self.call(building.CmdTeleport(), '/quiet Room2')
self.call(general.CmdHome(), '', "There's no place like home")
def test_no_home(self):
self.char1.home = None
self.call(general.CmdHome(), '', 'You have no home')
def test_inventory(self):
self.call(general.CmdInventory(), '', 'You are not carrying anything.')
def test_pose(self):
self.char2.msg = Mock()
self.call(general.CmdPose(), 'looks around', 'Char looks around')
self.char2.msg.assert_called_with(text=('Char looks around', {'type': 'pose'}), from_obj=self.char1)
def test_nick(self):
self.call(general.CmdNick(), 'testalias = testaliasedstring1', "Inputline-nick 'testalias' mapped to 'testaliasedstring1'.")
self.call(general.CmdNick(), '/account testalias = testaliasedstring2', "Account-nick 'testalias' mapped to 'testaliasedstring2'.")
self.call(general.CmdNick(), '/object testalias = testaliasedstring3', "Object-nick 'testalias' mapped to 'testaliasedstring3'.")
self.assertEqual('testaliasedstring1', self.char1.nicks.get('testalias'))
self.assertEqual('testaliasedstring2', self.char1.nicks.get('testalias', category='account'))
self.assertEqual(None, self.char1.account.nicks.get('testalias', category='account'))
self.assertEqual('testaliasedstring3', self.char1.nicks.get('testalias', category='object'))
def test_nick_list(self):
self.call(general.CmdNick(), '/list', 'No nicks defined.')
self.call(general.CmdNick(), 'test1 = Hello', "Inputline-nick 'test1' mapped to 'Hello'.")
self.call(general.CmdNick(), '/list', 'Defined Nicks:')
def test_get_and_drop(self):
self.call(general.CmdGet(), 'Obj', 'You pick up an Obj.')
self.call(general.CmdDrop(), 'Obj', 'You drop an Obj.')
def test_give(self):
self.call(general.CmdGive(), 'Obj to Char2', "You aren't carrying Obj.")
self.call(general.CmdGive(), 'Obj = Char2', "You aren't carrying Obj.")
self.call(general.CmdGet(), 'Obj', 'You pick up an Obj.')
self.call(general.CmdGive(), 'Obj to Char2', 'You give')
self.call(general.CmdGive(), 'Obj = Char', 'You give', caller=self.char2)
def test_mux_command(self):
class CmdTest(MuxCommand):
key = 'test'
switch_options = ('test', 'testswitch', 'testswitch2')
def func(self):
self.msg('Switches matched: {}'.format(self.switches))
self.call(CmdTest(), '/test/testswitch/testswitch2', "Switches matched: ['test', 'testswitch', 'testswitch2']")
self.call(CmdTest(), '/test', "Switches matched: ['test']")
self.call(CmdTest(), '/test/testswitch', "Switches matched: ['test', 'testswitch']")
self.call(CmdTest(), '/testswitch/testswitch2', "Switches matched: ['testswitch', 'testswitch2']")
self.call(CmdTest(), '/testswitch', "Switches matched: ['testswitch']")
self.call(CmdTest(), '/testswitch2', "Switches matched: ['testswitch2']")
self.call(CmdTest(), '/t', 'test: Ambiguous switch supplied: Did you mean /test or /testswitch or /testswitch2?|Switches matched: []')
self.call(CmdTest(), '/tests', 'test: Ambiguous switch supplied: Did you mean /testswitch or /testswitch2?|Switches matched: []')
def test_say(self):
self.call(general.CmdSay(), 'Testing', 'You say, "Testing"')
def test_whisper(self):
self.call(general.CmdWhisper(), 'Obj = Testing', 'You whisper to Obj, "Testing"', caller=self.char2)
def test_access(self):
self.call(general.CmdAccess(), '', 'Permission Hierarchy (climbing):') |
class task_():
def accepts_Invoke_level_kwargs(self):
def body(c, parts):
pass
t = fabric.task(name='dadbod', aliases=['heavenly', 'check', 'shop'], default=True, help={'parts': 'See: the sum of'}, iterable=['parts'])(body)
assert (t.body is body)
assert (t.__doc__ == 'I am a docstring')
assert (t.name == 'dadbod')
assert ('heavenly' in t.aliases)
assert t.is_default
assert ('parts' in t.help)
assert ('parts' in t.iterable)
def returns_Fabric_level_Task_instance(self):
assert isinstance(fabric.task(Mock()), fabric.Task)
def does_not_touch_klass_kwarg_if_explicitly_given(self):
class SubFabTask(fabric.Task):
pass
assert isinstance(fabric.task(klass=SubFabTask)(Mock()), SubFabTask)
class hosts_kwarg():
def _run(self, hosts):
(hosts=hosts)
def mytask(c):
pass
assert (mytask.hosts == hosts)
def values_may_be_connection_first_posarg_strings(self):
self._run(['host1', '', 'host3:2222'])
def values_may_be_Connection_constructor_kwarg_dicts(self):
self._run([{'host': 'host1'}, {'host': 'host2', 'user': 'user'}, {'host': 'host3', 'port': 2222}])
def values_may_be_mixed(self):
self._run([{'host': 'host1'}, '']) |
def _get_highest_punk_bid_per_index(punk_bids: List[PunkBid], punk_index: int) -> Optional[PunkBid]:
highest_punk_bid = None
for punk_bid in punk_bids:
if (punk_bid.punk_index == punk_index):
if (highest_punk_bid is None):
highest_punk_bid = punk_bid
elif (punk_bid.price > highest_punk_bid.price):
highest_punk_bid = punk_bid
return highest_punk_bid |
class HttpDataTransform():
def __init__(self, steps: List[TransformStep], reverse: bool=False, build: str=None) -> None:
self.tsteps: List[TransformStep] = steps
self.rsteps: List[TransformStep] = steps[::(- 1)]
if reverse:
(self.tsteps, self.rsteps) = (self.rsteps, self.tsteps)
if (build is not None):
build_step = ('BUILD', build)
self.tsteps.insert(0, build_step)
self.rsteps.append(build_step)
def transform(self, c2data: C2Data, request: Optional[HttpRequest]=None) -> HttpRequest:
request = (request or HttpRequest(method=b'', uri=b'', body=b'', params={}, headers={}))
uri = request.uri
params = request.params
headers = request.headers
body = request.body
data: bytes = b''
for (step, step_val) in self.tsteps:
step = step.lower()
if (step == 'append'):
if isinstance(step_val, int):
step_val = (b'X' * step_val)
assert isinstance(step_val, bytes)
data = (data + step_val)
elif (step == 'prepend'):
if isinstance(step_val, int):
step_val = (b'X' * step_val)
assert isinstance(step_val, bytes)
data = (step_val + data)
elif (step == 'base64'):
data = base64.b64encode(data)
elif (step == 'base64url'):
data = base64.urlsafe_b64encode(data)
elif (step == 'netbios'):
data = netbios_encode(data).lower()
elif (step == 'netbiosu'):
data = netbios_encode(data).upper()
elif (step == 'mask'):
mask = p32be(random.getrandbits(32))
data = (mask + xor(data, mask))
elif (step == 'print'):
body = data
elif (step == 'header'):
assert isinstance(step_val, bytes)
headers[step_val] = data
elif ((step == '_header') or (step == '_hostheader')):
assert isinstance(step_val, bytes)
(key, _, val) = step_val.partition(b': ')
headers[key] = val
elif (step == 'uri_append'):
uri += data
elif ((step == 'parameter') or (step == '_parameter')):
assert isinstance(step_val, bytes)
params[step_val] = data
elif (step == 'build'):
if (step_val == 'output'):
data = (c2data.output or b'')
elif (step_val == 'id'):
data = (c2data.id or b'')
elif (step_val == 'metadata'):
data = (c2data.metadata or b'')
else:
raise ValueError('Unknown transform step with value: {}'.format((step, step_val)))
return request._replace(body=body, params=params, uri=uri, headers=headers)
def recover(self, HttpRequest) -> ClientC2Data:
...
def recover(self, HttpResponse) -> ServerC2Data:
...
def recover(self, Union[(HttpRequest, HttpResponse)]) -> Union[(ClientC2Data, ServerC2Data)]:
assert isinstance( (HttpRequest, HttpResponse)), 'argument should be a HttpRequest or HttpResponse'
build_metadata = None
build_output = None
build_id = None
data = b''
for (step, step_val) in self.rsteps:
step = step.lower()
if (step == 'append'):
if isinstance(step_val, bytes):
step_val = len(step_val)
assert isinstance(step_val, int)
data = data[:(- step_val)]
elif (step == 'prepend'):
if isinstance(step_val, bytes):
step_val = len(step_val)
assert isinstance(step_val, int)
data = data[step_val:]
elif (step == 'base64'):
data = base64.b64decode((data + b'=='))
elif (step == 'base64url'):
data = base64.urlsafe_b64decode((data + b'=='))
elif (step == 'netbios'):
data = netbios_decode(data.upper())
elif (step == 'netbiosu'):
data = netbios_decode(data)
elif (step == 'mask'):
data = xor(data[4:], data[:4])
elif (step == 'print'):
data =
elif (step == 'uri_append'):
assert isinstance( HttpRequest)
data =
elif (step == 'header'):
assert isinstance(step_val, bytes)
data =
elif (step == 'parameter'):
assert isinstance( HttpRequest)
assert isinstance(step_val, bytes)
data =
elif (step == 'build'):
if (step_val == 'output'):
build_output = data
elif (step_val == 'id'):
build_id = data
elif (step_val == 'metadata'):
build_metadata = data
elif (step in ('_header', '_hostheader')):
pass
else:
raise ValueError('Unknown recover step with value: {}'.format((step, step_val)))
if isinstance( HttpRequest):
return ClientC2Data(output=build_output, id=build_id, metadata=build_metadata)
return ServerC2Data(output=build_output, id=build_id, metadata=build_metadata) |
def fetch_historical_consumption(target_datetime: datetime, session: Session, logger: Logger=getLogger(__name__)):
data_list = fetch_historical_data(session=session, target_datetime=target_datetime)
result_list = TotalConsumptionList(logger)
for data in data_list:
result_list.append(datetime=(datetime.fromisoformat(data['thoiGian']).replace(tzinfo=tz) - timedelta(minutes=30)), consumption=data['congSuat'], zoneKey=ZoneKey('VN'), source='nldc.evn.vn')
return result_list.to_list() |
_runner
def directory(c, runner, path, user=None, group=None, mode=None):
runner('mkdir -p {}'.format(path))
if (user is not None):
group = (group or user)
runner('chown {}:{} {}'.format(user, group, path))
if (mode is not None):
runner('chmod {} {}'.format(mode, path)) |
def _evolve(shot: System, dt: float):
for (ball_id, ball) in shot.balls.items():
(rvw, s) = evolve.evolve_state_motion(state=ball.state.s, rvw=ball.state.rvw, R=ball.params.R, m=ball.params.m, u_s=ball.params.u_s, u_sp=ball.params.u_sp, u_r=ball.params.u_r, g=ball.params.g, t=dt)
ball.state = BallState(rvw, s, (shot.t + dt)) |
class OptionSeriesNetworkgraphTooltip(Options):
def clusterFormat(self):
return self._config_get('Clustered points: {point.clusterPointsAmount}')
def clusterFormat(self, text: str):
self._config(text, js_type=False)
def dateTimeLabelFormats(self) -> 'OptionSeriesNetworkgraphTooltipDatetimelabelformats':
return self._config_sub_data('dateTimeLabelFormats', OptionSeriesNetworkgraphTooltipDatetimelabelformats)
def distance(self):
return self._config_get(16)
def distance(self, num: float):
self._config(num, js_type=False)
def followPointer(self):
return self._config_get(False)
def followPointer(self, flag: bool):
self._config(flag, js_type=False)
def followTouchMove(self):
return self._config_get(True)
def followTouchMove(self, flag: bool):
self._config(flag, js_type=False)
def footerFormat(self):
return self._config_get('')
def footerFormat(self, text: str):
self._config(text, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def headerFormat(self):
return self._config_get(None)
def headerFormat(self, text: str):
self._config(text, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, text: str):
self._config(text, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def pointFormat(self):
return self._config_get(None)
def pointFormat(self, text: str):
self._config(text, js_type=False)
def pointFormatter(self):
return self._config_get(None)
def pointFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False)
def xDateFormat(self):
return self._config_get(None)
def xDateFormat(self, text: str):
self._config(text, js_type=False) |
.django_db
def test_statement_timeout_successfully_times_out():
test_timeout_in_seconds = 0.5
pg_sleep_in_seconds = 10
_db_timeout(test_timeout_in_seconds)
def test_timeout_success():
with connection.cursor() as cursor:
cursor.execute(f'SELECT pg_sleep({pg_sleep_in_seconds:.2f})')
start = perf_counter()
try:
test_timeout_success()
except Exception:
assert ((perf_counter() - start) < pg_sleep_in_seconds)
else:
assert False |
def return_memory_map_for_pid(pid):
hProcess = open_process(pid)
memory_map = {}
memory_map[pid] = set()
if hProcess:
pageStart = 0
while True:
information = virtual_query_ex(hProcess, pageStart)
if (information == 0):
break
if (information[5] != 65536):
memory_map[pid].add(copy.deepcopy(information))
newAddress = (information[0] + information[4])
if (newAddress <= pageStart):
break
pageStart = newAddress
return memory_map |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.