code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
def hasTOC(self): <NEW_LINE> <INDENT> if not self.leadout: <NEW_LINE> <INDENT> logger.debug('no leadout, no TOC') <NEW_LINE> return False <NEW_LINE> <DEDENT> for t in self.tracks: <NEW_LINE> <INDENT> if 1 not in t.indexes.keys(): <NEW_LINE> <INDENT> logger.debug('no index 1, no TOC') <NEW_LINE> return False <NEW_LINE> <DEDENT> if t.indexes[1].absolute is None: <NEW_LINE> <INDENT> logger.debug('no absolute index 1, no TOC') <NEW_LINE> return False <NEW_LINE> <DEDENT> <DEDENT> return True | Check if the Table has a complete TOC.
a TOC is a list of all tracks and their Index 01, with absolute
offsets, as well as the leadout. | 625941c28c0ade5d55d3e94d |
def truncate(self, s, limit=100, suffix='...'): <NEW_LINE> <INDENT> if len(s) <= limit: <NEW_LINE> <INDENT> return s <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ' '.join(s[:limit+1].split(' ')[0:-1]) + suffix | Smart truncate
Inputs:
M (tuple) The text in many languages
Output:
The text in the current language | 625941c273bcbd0ca4b2c00b |
def get_sdk_version(module: ModuleType) -> str: <NEW_LINE> <INDENT> return getattr(module, '__version__', 'undefined') | Check the version of azure.functions sdk.
Parameters
----------
module: ModuleType
The azure.functions SDK module
Returns
-------
str
The SDK version that our customer has installed. | 625941c207d97122c417881c |
def check_attachment_permission(self, action, username, resource, perm): <NEW_LINE> <INDENT> if resource.parent.realm == 'blog': <NEW_LINE> <INDENT> if action == 'ATTACHMENT_VIEW': <NEW_LINE> <INDENT> return 'BLOG_VIEW' in perm(resource.parent) <NEW_LINE> <DEDENT> if action in ['ATTACHMENT_CREATE', 'ATTACHMENT_DELETE']: <NEW_LINE> <INDENT> if 'BLOG_MODIFY_ALL' in perm(resource.parent): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif 'BLOG_MODIFY_OWN' in perm(resource.parent): <NEW_LINE> <INDENT> bp = BlogPost(self.env, resource.parent.id) <NEW_LINE> if bp.author == username: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return False | Respond to the various actions into the legacy attachment
permissions used by the Attachment module. | 625941c21b99ca400220aa45 |
def __getitem__(self, moment): <NEW_LINE> <INDENT> assert moment > 0 <NEW_LINE> import scipy.stats <NEW_LINE> dist = scipy.stats.norm(loc=self.mean, scale=self.stddev) <NEW_LINE> for k in range(len(self.memoized_moments), moment + 1): <NEW_LINE> <INDENT> m_k_minus_2 = self.memoized_moments[k - 2] if k > 1 else np.double(0.0) <NEW_LINE> m_k_minus_1 = self.memoized_moments[k - 1] <NEW_LINE> numerator = (np.power(self.maxval, k - 1) * dist.pdf(self.maxval) - np.power(self.minval, k - 1) * dist.pdf(self.minval)) <NEW_LINE> denominator = dist.cdf(self.maxval) - dist.cdf(self.minval) <NEW_LINE> m = ((k - 1) * self.stddev**2 * m_k_minus_2 + self.mean * m_k_minus_1 - self.stddev * numerator / denominator) <NEW_LINE> assert abs(m) < 1e50 <NEW_LINE> self.memoized_moments.append(m) <NEW_LINE> <DEDENT> return self.memoized_moments[moment] | Calculates the truncated normal moments.
Args:
moment: The number for the moment.
Returns:
The value for the given moment.
Uses the recurrence relation described in:
http://www.smp.uq.edu.au/people/YoniNazarathy/teaching_projects
/studentWork/EricOrjebin_TruncatedNormalMoments.pdf | 625941c2cad5886f8bd26f6e |
def choose(): <NEW_LINE> <INDENT> inputs = input_mod.get_inputs(['Enter a number: '], '') <NEW_LINE> option = inputs[0] <NEW_LINE> if option == "1": <NEW_LINE> <INDENT> show_table(table) <NEW_LINE> <DEDENT> elif option == "2": <NEW_LINE> <INDENT> add(table) <NEW_LINE> <DEDENT> elif option == "3": <NEW_LINE> <INDENT> remove(table, id_) <NEW_LINE> <DEDENT> elif option == "4": <NEW_LINE> <INDENT> update(table, id_) <NEW_LINE> <DEDENT> elif option == "5": <NEW_LINE> <INDENT> ui.print_result(get_lowest_price_item_id(table), 'The id of cheapest game is: ') <NEW_LINE> <DEDENT> elif option == "6": <NEW_LINE> <INDENT> ui.print_table(get_items_sold_between_ERP(table), title_list) <NEW_LINE> <DEDENT> elif option == '0': <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> while option not in OPTION: <NEW_LINE> <INDENT> raise KeyError <NEW_LINE> menu_control() | Function gets input from user
and starts options from module.
No arg
Returns nothing | 625941c2bd1bec0571d905c3 |
def clean_email(self): <NEW_LINE> <INDENT> UserModel = get_user_model() <NEW_LINE> email = self.cleaned_data["email"] <NEW_LINE> self.users_cache = UserModel._default_manager.filter( business_email__iexact=email) <NEW_LINE> if not len(self.users_cache): <NEW_LINE> <INDENT> raise ValidationError(self.fields['email'].error_messages['unknown']) <NEW_LINE> <DEDENT> if not any(user.is_active for user in self.users_cache): <NEW_LINE> <INDENT> raise ValidationError(self.fields['email'].error_messages['unknown']) <NEW_LINE> <DEDENT> return email | Validates that an active user exists with the given email address. | 625941c2a17c0f6771cbdfe7 |
def test_team_membership_str_repr_is_user_belongs_to_team(self): <NEW_LINE> <INDENT> membership = self.team_a_admin_membership <NEW_LINE> expected = f"{membership.user.username} belongs to {membership.team.name}" <NEW_LINE> self.assertEqual(str(membership), expected) | Does TeamMember have the expected str repr? | 625941c215fb5d323cde0aa1 |
def _get_servers(self): <NEW_LINE> <INDENT> return self.__servers | Getter method for servers, mapped from YANG variable /system/ntp/servers (container)
YANG Description: Enclosing container for the list of NTP servers | 625941c26e29344779a625a8 |
def new_url(**kwargs): <NEW_LINE> <INDENT> url_base = "/axapi/v3/file" <NEW_LINE> f_dict = {} <NEW_LINE> return url_base.format(**f_dict) | Return the URL for creating a resource | 625941c2d268445f265b4e03 |
def create_dict_class(module_class): <NEW_LINE> <INDENT> class_ = None <NEW_LINE> module_name, class_name = module_class.rsplit(".", 1) <NEW_LINE> try: <NEW_LINE> <INDENT> module_ = importlib.import_module(module_name) <NEW_LINE> try: <NEW_LINE> <INDENT> class_ = getattr(module_, class_name) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> logging.error('Class does not exist: {}'.format(class_name)) <NEW_LINE> <DEDENT> <DEDENT> except ImportError: <NEW_LINE> <INDENT> logging.error('Module does not exist: {}'.format(module_name)) <NEW_LINE> <DEDENT> return class_ | Return the dictionary class
:param module_class: the name of module and class in the form of "module.class"
:return: class object of the dictionary | 625941c28c0ade5d55d3e94e |
def plotToStringIO(self): <NEW_LINE> <INDENT> return graph.plot_1d(self.outArray, self.fitArray, self.color, self.ginf) | Plots the graph to a StringIO object and returns it.
Return values:
graphString: a StringIO object representing the graph. | 625941c276e4537e8c351605 |
def generate_role_namespace(role_name, role_version): <NEW_LINE> <INDENT> return '%s-%s' % (role_name, role_version) | Creates a unique namespace for the given role name and version.
The returned namespace can be later converted back into its name and
version using parse_role_namespace.
:type role_name: str
:type role_version: str or int
:rtype: str | 625941c260cbc95b062c64d7 |
def show1dmodel(x, thk=None, xlab=None, zlab="z in m", islog=True, z0=0, **kwargs): <NEW_LINE> <INDENT> if xlab is None: <NEW_LINE> <INDENT> xlab = "$\\rho$ in $\\Omega$m" <NEW_LINE> <DEDENT> if thk is None: <NEW_LINE> <INDENT> nl = int(np.floor((len(x) - 1) / 2.)) + 1 <NEW_LINE> thk = np.asarray(x)[:nl - 1] <NEW_LINE> x = np.asarray(x)[nl - 1:nl * 2 - 1] <NEW_LINE> <DEDENT> z1 = np.concatenate(([0], np.cumsum(thk))) + z0 <NEW_LINE> z = np.concatenate((z1, [z1[-1] * 1.2])) <NEW_LINE> nl = len(x) <NEW_LINE> px = np.zeros((nl * 2, 1)) <NEW_LINE> pz = np.zeros((nl * 2, 1)) <NEW_LINE> for i in range(nl): <NEW_LINE> <INDENT> px[2 * i] = x[i] <NEW_LINE> px[2 * i + 1] = x[i] <NEW_LINE> pz[2 * i + 1] = z[i + 1] <NEW_LINE> if i < nl - 1: <NEW_LINE> <INDENT> pz[2 * i + 2] = z[i + 1] <NEW_LINE> <DEDENT> <DEDENT> if islog: <NEW_LINE> <INDENT> plt.semilogx(px, pz, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> plt.plot(px, pz, **kwargs) <NEW_LINE> <DEDENT> plt.ion() <NEW_LINE> plt.grid(which='both') <NEW_LINE> plt.xlim((np.min(x) * 0.9, np.max(x) * 1.1)) <NEW_LINE> plt.ylim((max(z1) * 1.15, 0.)) <NEW_LINE> plt.xlabel(xlab) <NEW_LINE> plt.ylabel(zlab) <NEW_LINE> plt.show() <NEW_LINE> return | Show 1d block model defined by value and thickness vectors. | 625941c224f1403a92600afd |
def loc_detail(request, pk, *args, **kwargs): <NEW_LINE> <INDENT> location = Location.objects.get(pk=pk) <NEW_LINE> overlaplist = location.overlapswith.all() <NEW_LINE> context = {} <NEW_LINE> context['location'] = location <NEW_LINE> context['overlaplist'] = overlaplist <NEW_LINE> return render_to_response('loc_detail.html', context) | Displays the location's detail. Used when one clicks on a linked location. | 625941c201c39578d7e74dd0 |
def __init__(self, preprocess=20, prefix='#', **kwargs): <NEW_LINE> <INDENT> super().__init__(preprocess=preprocess, **kwargs) <NEW_LINE> self.prefix = prefix | :param str prefix: the character(s) to begin a comment | 625941c223849d37ff7b3025 |
def transform(self, src, dst, registry, value): <NEW_LINE> <INDENT> return self[(src, dst)].transform(src, dst, registry, value) | Transform the value, finding the rule in the chained context.
(A rule in last context will take precedence)
:raises: KeyError if the rule is not found. | 625941c25510c4643540f37e |
def _get_content_info_line_helper(self, ref, info, lines, props_re, get_link_target): <NEW_LINE> <INDENT> mode_type_map = { '100644': 'file', '100755': 'file', '120000': 'symlink', '160000': 'dataset', } <NEW_LINE> for line in lines: <NEW_LINE> <INDENT> if not line: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> inf = {} <NEW_LINE> props = props_re.match(line) <NEW_LINE> if not props: <NEW_LINE> <INDENT> path = ut.PurePosixPath(line) <NEW_LINE> inf['gitshasum'] = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> path = ut.PurePosixPath(props.group('fname')) <NEW_LINE> <DEDENT> if props: <NEW_LINE> <INDENT> inf['gitshasum'] = props.group('sha') <NEW_LINE> inf['type'] = mode_type_map.get( props.group('type'), props.group('type')) <NEW_LINE> if get_link_target and inf['type'] == 'symlink' and ((ref is None and '.git/annex/objects' in ut.Path( get_link_target(str(self.pathobj / path)) ).as_posix()) or (ref and '.git/annex/objects' in get_link_target( u'{}:{}'.format( ref, str(path)))) ): <NEW_LINE> <INDENT> inf['type'] = 'file' <NEW_LINE> <DEDENT> if ref and inf['type'] == 'file': <NEW_LINE> <INDENT> inf['bytesize'] = int(props.group('size')) <NEW_LINE> <DEDENT> <DEDENT> path = self.pathobj.joinpath(path) <NEW_LINE> if 'type' not in inf: <NEW_LINE> <INDENT> inf['type'] = 'symlink' if path.is_symlink() else 'directory' if path.is_dir() else 'file' <NEW_LINE> <DEDENT> info[path] = inf | Internal helper of get_content_info() to parse Git output | 625941c25e10d32532c5eebc |
def run(self): <NEW_LINE> <INDENT> _enemies = self.getenemies(self.getcurrplayer()) <NEW_LINE> _friends = self.getfriends(self.getcurrplayer()) <NEW_LINE> self._killable = [] <NEW_LINE> for j in _enemies: <NEW_LINE> <INDENT> if (j[0], j[1] - 1) in _friends and (j[0], j[1] + 1) not in self.getallpawns() and j[1] + 1 < self.getsize(): <NEW_LINE> <INDENT> self._killable.append((j[0], j[1] + 1)) <NEW_LINE> <DEDENT> if (j[0], j[1] + 1) in _friends and (j[0], j[1] - 1) not in self.getallpawns() and j[1] - 1 > 0: <NEW_LINE> <INDENT> self._killable.append((j[0], j[1] - 1)) <NEW_LINE> <DEDENT> if (j[0] + 1, j[1]) in _friends and (j[0] - 1, j[1]) not in self.getallpawns() and j[0] - 1 >= 0: <NEW_LINE> <INDENT> self._killable.append((j[0] - 1, j[1])) <NEW_LINE> <DEDENT> if (j[0] - 1, j[1]) in _friends and (j[0] + 1, j[1]) not in self.getallpawns() and j[0] + 1 < self.getsize(): <NEW_LINE> <INDENT> self._killable.append((j[0] + 1, j[1])) | checks if a pawn is killable and returns coordinates to kill it | 625941c2cad5886f8bd26f6f |
def _segment_request(path, data): <NEW_LINE> <INDENT> key = SEGMENT_IO_WRITE_KEY_PROD if _is_prod() else SEGMENT_IO_WRITE_KEY_DEV <NEW_LINE> try: <NEW_LINE> <INDENT> http.post('{}/{}'.format(SEGMENT_URL, path), json=data, auth=HTTPBasicAuth(key, ''), timeout=(1, 1)) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.exception(e) | Send a segment.io HTTP request
:param path: URL path
:type path: str
:param data: json POST data
:type data: dict
:rtype: None | 625941c20383005118ecf579 |
def setup_openstack_api(): <NEW_LINE> <INDENT> if 'OS_AUTH_URL' not in os.environ: <NEW_LINE> <INDENT> print("Please source a cloud rc file") <NEW_LINE> exit(1) <NEW_LINE> <DEDENT> auth_args = { 'auth_url': os.environ['OS_AUTH_URL'], 'project_name': 'admin', 'username': os.environ['OS_USERNAME'], 'password': os.environ['OS_PASSWORD'], 'verify': False } <NEW_LINE> requests.packages.urllib3.disable_warnings() <NEW_LINE> return connection.Connection(**auth_args) | Grab auth info from the environment. | 625941c2f7d966606f6a9f97 |
def __init__(self, date, ranking_list): <NEW_LINE> <INDENT> self.year = date <NEW_LINE> self.tournaments = [] <NEW_LINE> self.participants = [] <NEW_LINE> self.ranking_points = [] <NEW_LINE> self.ranking_points = ranking_list | Constructor for season
:param str date: The name of the season
:param List<int> ranking_list: list of ranking points | 625941c2091ae35668666ef7 |
@scheduler.scheduled_job('cron', id='send_everyday_reservation', hour=9) <NEW_LINE> def _send_everyday_reservation(): <NEW_LINE> <INDENT> with db_session_manager.with_session() as db_session: <NEW_LINE> <INDENT> today = get_now() <NEW_LINE> start_time = format_date(today.replace(days=-1)) <NEW_LINE> end_time = format_date(today) <NEW_LINE> manager = ServiceReservationManager(db_session) <NEW_LINE> manager.send_reservation_by_time(start_time, end_time) <NEW_LINE> logger.info('daily_send_everyday_reservation') | 获取前一天报名学员的信息
(约课日期、学员账号、姓名、手机、时段、课程开始日期、课程结束日期) | 625941c216aa5153ce36240d |
def update_object(obj1, obj2): <NEW_LINE> <INDENT> d1 = obj1.__dict__ <NEW_LINE> d2 = obj2.__dict__ <NEW_LINE> for k in d1.iterkeys(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> d1[k] = d2[k] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> obj1.__dict__ = d1 <NEW_LINE> return obj1 | 只更新obj1中有的属性 | 625941c2fff4ab517eb2f3d0 |
def main(args): <NEW_LINE> <INDENT> if len(sys.argv) > 1: <NEW_LINE> <INDENT> youtube.init() <NEW_LINE> init(poll_rate) <NEW_LINE> new_thread(animate) <NEW_LINE> if args.my_playlists: <NEW_LINE> <INDENT> playlists = youtube.get_my_playlists() <NEW_LINE> stop_anim() <NEW_LINE> for (i, playlist) in enumerate(playlists): <NEW_LINE> <INDENT> print(i, ') ', playlist['title'], sep='') <NEW_LINE> <DEDENT> integer = input("\nEnter playlist number: ") <NEW_LINE> playlist = playlists[integer] <NEW_LINE> save_playlist(playlist) <NEW_LINE> if args.download: <NEW_LINE> <INDENT> download(playlist, playlist['title']) <NEW_LINE> <DEDENT> <DEDENT> elif args.id: <NEW_LINE> <INDENT> stop_anim() <NEW_LINE> playlist = youtube.get_playlist(args.id) <NEW_LINE> save_playlist(playlist) <NEW_LINE> <DEDENT> elif args.channel: <NEW_LINE> <INDENT> stop_anim() <NEW_LINE> playlist = youtube.get_uploads_playlist(args.channel) <NEW_LINE> save_playlist(playlist) <NEW_LINE> <DEDENT> elif args.list: <NEW_LINE> <INDENT> stop_anim() <NEW_LINE> print([playlist['title'].encode('utf-8') for playlist in saved_playlists]) <NEW_LINE> <DEDENT> with open('update_list.dat', 'wb') as update_list_dat: <NEW_LINE> <INDENT> pickle.dump(saved_playlists, update_list_dat) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> parser.print_help() | Main logic and CLI handler. | 625941c276d4e153a657eac5 |
def convert_time(time): <NEW_LINE> <INDENT> units = [ (24 * 60 * 60, 'days'), (60 * 60, 'hours'), (60, 'minutes'), (1, 'seconds'), ] <NEW_LINE> if time == 0: <NEW_LINE> <INDENT> return ('0', 'seconds') <NEW_LINE> <DEDENT> for unit in units: <NEW_LINE> <INDENT> if time >= unit[0]: <NEW_LINE> <INDENT> return ('{0}'.format(time // unit[0]), unit[1]) | Convert a time in seconds into the biggest unit | 625941c2c4546d3d9de729c7 |
def _setLevelFromSliderValue(self, value): <NEW_LINE> <INDENT> item = self.item() <NEW_LINE> if item is not None: <NEW_LINE> <INDENT> volume = item.parent() <NEW_LINE> if volume is not None: <NEW_LINE> <INDENT> dataRange = volume.getDataRange() <NEW_LINE> if dataRange is not None: <NEW_LINE> <INDENT> sliderMin, sliderMax = self._LEVEL_SLIDER_RANGE <NEW_LINE> offset = (value - sliderMin) / (sliderMax - sliderMin) <NEW_LINE> dataMin, dataMax = dataRange[0], dataRange[-1] <NEW_LINE> level = dataMin + (dataMax - dataMin) * offset <NEW_LINE> item.setLevel(level) | Convert slider value to isolevel.
:param int value: | 625941c2eab8aa0e5d26daec |
def call(name, function, *args, **kwargs): <NEW_LINE> <INDENT> thin_dest_path = _generate_tmp_path() <NEW_LINE> mkdirp_thin_argv = ['mkdir', '-p', thin_dest_path] <NEW_LINE> ret = __salt__['dockerng.run_all'](name, subprocess.list2cmdline(mkdirp_thin_argv)) <NEW_LINE> if ret['retcode'] != 0: <NEW_LINE> <INDENT> return {'result': False, 'comment': ret['stderr']} <NEW_LINE> <DEDENT> if function is None: <NEW_LINE> <INDENT> raise CommandExecutionError('Missing function parameter') <NEW_LINE> <DEDENT> thin_path = salt.utils.thin.gen_thin(__opts__['cachedir']) <NEW_LINE> with io.open(thin_path, 'rb') as file: <NEW_LINE> <INDENT> _client_wrapper('put_archive', name, thin_dest_path, file) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> salt_argv = [ 'python', os.path.join(thin_dest_path, 'salt-call'), '--metadata', '--local', '--out', 'json', '-l', 'quiet', '--', function ] + list(args) + ['{0}={1}'.format(key, value) for (key, value) in kwargs.items() if not key.startswith('__')] <NEW_LINE> ret = __salt__['dockerng.run_all'](name, subprocess.list2cmdline(map(str, salt_argv))) <NEW_LINE> if ret['retcode'] != 0: <NEW_LINE> <INDENT> raise CommandExecutionError(ret['stderr']) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> data = salt.utils.find_json(ret['stdout']) <NEW_LINE> local = data.get('local', data) <NEW_LINE> if isinstance(local, dict): <NEW_LINE> <INDENT> if 'retcode' in local: <NEW_LINE> <INDENT> __context__['retcode'] = local['retcode'] <NEW_LINE> <DEDENT> <DEDENT> return local.get('return', data) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return {'result': False, 'comment': 'Can\'t parse container command output'} <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> rm_thin_argv = ['rm', '-rf', thin_dest_path] <NEW_LINE> __salt__['dockerng.run_all'](name, subprocess.list2cmdline(rm_thin_argv)) | Executes a salt function inside a container
CLI Example:
.. code-block:: bash
salt myminion dockerng.call test.ping
salt myminion test.arg arg1 arg2 key1=val1
The container does not need to have Salt installed, but Python
is required.
.. versionadded:: Carbon | 625941c27b25080760e393ef |
def change_channel(self, channel): <NEW_LINE> <INDENT> if self.last_channel == channel: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.fc.chanSet(channel, True) <NEW_LINE> self.last_channel = channel <NEW_LINE> self.backend.change_channel(channel) <NEW_LINE> self.__change_state(State.TUNING) | Select the correct dvbstreamer instance, change the channel
and set the primary mrl. | 625941c2e5267d203edcdc34 |
def test_sync_app_port_group_mismatch(self): <NEW_LINE> <INDENT> self.fake_marathon.add_app({ 'id': '/my-app_1', 'labels': { 'HAPROXY_GROUP': 'external', 'HAPROXY_0_GROUP': 'internal', 'HAPROXY_0_VHOST': 'example.com', 'MARATHON_ACME_0_DOMAIN': 'example.com', }, 'portDefinitions': [ {'port': 9000, 'protocol': 'tcp', 'labels': {}} ] }) <NEW_LINE> marathon_acme = self.mk_marathon_acme() <NEW_LINE> d = marathon_acme.sync() <NEW_LINE> assert_that(d, succeeded(Equals([]))) <NEW_LINE> assert_that( self.fake_marathon_api.check_called_get_apps(), Equals(True)) <NEW_LINE> assert_that(self.cert_store.as_dict(), succeeded(Equals({}))) <NEW_LINE> assert_that(self.fake_marathon_lb.check_signalled_usr1(), Equals(False)) | When a sync is run and Marathon has an app and that app has a matching
group but mismatching port group, then no certificates should be
fetched and marathon-lb should not be notified. | 625941c24a966d76dd550fa3 |
def end_subroutine(self): <NEW_LINE> <INDENT> self.sub_list.append(copy.copy(self.current_sequence)) <NEW_LINE> self.current_sequence = [] <NEW_LINE> self.is_subroutine = False | ends the current subroutine
appends current_sequence to sub_list
flushes current_sequence | 625941c2442bda511e8be3b0 |
def _create_table_objects(self): <NEW_LINE> <INDENT> class_mappings = { "entry": EntryTable, "r_ele": REleTable, "sense": SenseTable, "audit": AuditTable, "lsource": LSourceTable, "gloss": GlossTable, "links": LinksTable, "bibl": BiblTable, "entity": EntityTable, } <NEW_LINE> kv_tables = [ "k_ele", "ke_pri", "re_restr", "re_pri", "etym", "stagk", "stagr", "xref", "ant", "s_inf", "example", "pri", ] <NEW_LINE> kv_entity_tables = [ "ke_inf", "re_inf", "dial", "field", "misc", "pos", ] <NEW_LINE> for tbl in kv_tables: <NEW_LINE> <INDENT> class_mappings[tbl] = KeyValueTable <NEW_LINE> <DEDENT> for tbl in kv_entity_tables: <NEW_LINE> <INDENT> class_mappings[tbl] = KeyEntityTable <NEW_LINE> <DEDENT> table_mappings = {} <NEW_LINE> for tbl, cls in class_mappings.iteritems(): <NEW_LINE> <INDENT> table_mappings[tbl] = cls(self.cursor, tbl) <NEW_LINE> <DEDENT> return table_mappings | Creates table objects.
Returns a dictionary of table name to table object. | 625941c22ae34c7f2600d0c7 |
def create_data_iter(self, ds, batch_size): <NEW_LINE> <INDENT> data_iter = iter(ds) <NEW_LINE> def prepare_tf_data(xs): <NEW_LINE> <INDENT> def _prepare(x): <NEW_LINE> <INDENT> return x.reshape((self.num_shards, -1) + x.shape[1:]) <NEW_LINE> <DEDENT> return jax.tree_map(_prepare, xs) <NEW_LINE> <DEDENT> def to_numpy(xs): <NEW_LINE> <INDENT> return jax.tree_map( lambda x: x._numpy(), xs) <NEW_LINE> <DEDENT> def maybe_pad_batch(batch): <NEW_LINE> <INDENT> batch_pad = batch_size - batch['inputs'].shape[0] <NEW_LINE> unpadded_mask_shape = batch['inputs'].shape[0] <NEW_LINE> if batch_pad == 0: <NEW_LINE> <INDENT> if 'weights' not in batch: <NEW_LINE> <INDENT> batch['weights'] = onp.ones(unpadded_mask_shape, dtype=onp.float32) <NEW_LINE> <DEDENT> return batch <NEW_LINE> <DEDENT> def zero_pad(array): <NEW_LINE> <INDENT> pad_with = [(0, batch_pad)] + [(0, 0)] * (array.ndim - 1) <NEW_LINE> return onp.pad(array, pad_with, mode='constant') <NEW_LINE> <DEDENT> padded_batch = jax.tree_map(zero_pad, batch) <NEW_LINE> padded_batch_mask = zero_pad( onp.ones(unpadded_mask_shape, dtype=onp.float32)) <NEW_LINE> if 'weights' in padded_batch: <NEW_LINE> <INDENT> padded_batch['weights'] *= padded_batch_mask <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> padded_batch['weights'] = padded_batch_mask <NEW_LINE> <DEDENT> return padded_batch <NEW_LINE> <DEDENT> it = map(to_numpy, data_iter) <NEW_LINE> it = map(maybe_pad_batch, it) <NEW_LINE> it = map(prepare_tf_data, it) <NEW_LINE> it = jax_utils.prefetch_to_device(it, 2) <NEW_LINE> return it | Create an iterator from a tf dataset.
Args:
ds: tfds dataset; Dataset which we want to build an iterator for.
batch_size: int; Batch size for the given dataset split.
Returns:
Data iter for the given dataset. | 625941c27d847024c06be24f |
def _escape(self): <NEW_LINE> <INDENT> self.canvas.buttons.unpress_all() <NEW_LINE> self._release_mouse() | Release the mouse and remove all key presses to the camera doesn't fly off into the distance. | 625941c2925a0f43d2549e0a |
def iterConnections(connections: PipelineTaskConnections, connectionType: Union[str, Iterable[str]] ) -> typing.Generator[BaseConnection, None, None]: <NEW_LINE> <INDENT> if isinstance(connectionType, str): <NEW_LINE> <INDENT> connectionType = (connectionType,) <NEW_LINE> <DEDENT> for name in itertools.chain.from_iterable(getattr(connections, ct) for ct in connectionType): <NEW_LINE> <INDENT> yield getattr(connections, name) | Creates an iterator over the selected connections type which yields
all the defined connections of that type.
Parameters
----------
connections: `PipelineTaskConnections`
An instance of a `PipelineTaskConnections` object that will be iterated
over.
connectionType: `str`
The type of connections to iterate over, valid values are inputs,
outputs, prerequisiteInputs, initInputs, initOutputs.
Yields
-------
connection: `BaseConnection`
A connection defined on the input connections object of the type
supplied. The yielded value Will be an derived type of
`BaseConnection`. | 625941c2d4950a0f3b08c2e6 |
def get_redirect_target(invalid_targets=(), request=None): <NEW_LINE> <INDENT> if request is None: <NEW_LINE> <INDENT> request = get_request() <NEW_LINE> <DEDENT> check_target = request.values.get('_redirect_target') or request.args.get('next') or request.environ.get('HTTP_REFERER') <NEW_LINE> if not check_target: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> check_target = check_target.lstrip('/') <NEW_LINE> blog_url = request.app.cfg['blog_url'] <NEW_LINE> blog_parts = urlparse(blog_url) <NEW_LINE> check_parts = urlparse(urljoin(blog_url, check_target)) <NEW_LINE> if blog_parts[:2] != check_parts[:2]: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> current_parts = urlparse(urljoin(blog_url, request.path)) <NEW_LINE> if check_parts[:5] == current_parts[:5]: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for invalid in invalid_targets: <NEW_LINE> <INDENT> if check_parts[:5] == urlparse(urljoin(blog_url, invalid))[:5]: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> return check_target | Check the request and get the redirect target if possible.
If not this function returns just `None`. The return value of this
function is suitable to be passed to `_redirect` | 625941c210dbd63aa1bd2b39 |
@app.route("/verify/", methods=["POST"]) <NEW_LINE> @login_required <NEW_LINE> def verify(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> signatureA = request.files.get("signatureA") <NEW_LINE> signatureB = request.files.get("signatureB") <NEW_LINE> security_lvl = request.form.get("security") <NEW_LINE> filenameA = secure_filename(signatureA.filename) <NEW_LINE> signature_pathA = os.path.join(app.config['UPLOAD_FOLDER'], filenameA) <NEW_LINE> signatureA.save(signature_pathA) <NEW_LINE> filenameB = secure_filename(signatureB.filename) <NEW_LINE> signature_pathB = os.path.join(app.config['UPLOAD_FOLDER'], filenameB) <NEW_LINE> signatureB.save(signature_pathB) <NEW_LINE> security_lvl = int(security_lvl) <NEW_LINE> time_a = datetime.now() <NEW_LINE> dist, decision, same_percent, forg_percent, diff_percent = compare_signatures(signature_pathA, signature_pathB, security_lvl) <NEW_LINE> current_user.total_tests = current_user.total_tests + 1 <NEW_LINE> db.session.commit() <NEW_LINE> if(decision == 1): <NEW_LINE> <INDENT> current_user.sign_matched = current_user.sign_matched + 1 <NEW_LINE> db.session.commit() <NEW_LINE> <DEDENT> time_b = datetime.now() <NEW_LINE> current_user.total_time_taken = (time_b-time_a).total_seconds() + current_user.total_time_taken <NEW_LINE> db.session.commit() <NEW_LINE> test_ = Test(user_id=current_user.id, res_dist=dist, res_decsn=bool(decision), res_same_per=same_percent, res_forg_per=forg_percent, res_diff_per=diff_percent, signature_1=filenameA, signature_2=filenameB) <NEW_LINE> db.session.add(test_) <NEW_LINE> db.session.commit() <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> flash(u'An error occured, please try again!', 'error') <NEW_LINE> return redirect("/") <NEW_LINE> <DEDENT> if DEBUG: <NEW_LINE> <INDENT> print("type(signatureA): ", type(signatureA)) <NEW_LINE> print("type(signatureB): ", type(signatureB)) <NEW_LINE> print("type(security_lvl): ", type(security_lvl)) <NEW_LINE> <DEDENT> return render_template("result.html", dist=dist, decision=bool(decision), same_percent=same_percent, forg_percent=forg_percent, diff_percent=diff_percent, username=current_user.username) | accepts POST of json data
data = {
"signature_image" : image
"uuid" : uuid
} | 625941c2b5575c28eb68df94 |
def test_all_disabled(self): <NEW_LINE> <INDENT> filth_replacer = FilthReplacer(include_type=False, include_hash=False, include_count=False) <NEW_LINE> self.assertEqual( filth_replacer.filth_label(scrubadub.filth.TaggedEvaluationFilth(0, 1, 'a', comparison_type='phone')), 'FILTH' ) | Test making labels when everything is disabled | 625941c2d10714528d5ffc76 |
def post_ui_openwindow_information(self, target_id, **kwargs): <NEW_LINE> <INDENT> kwargs['_return_http_data_only'] = True <NEW_LINE> if kwargs.get('callback'): <NEW_LINE> <INDENT> return self.post_ui_openwindow_information_with_http_info(target_id, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> (data) = self.post_ui_openwindow_information_with_http_info(target_id, **kwargs) <NEW_LINE> return data | Open Information Window
Open the information window for a character, corporation or alliance inside the client ---
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_ui_openwindow_information(target_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int target_id: The target to open (required)
:param str datasource: The server name you would like data from
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: None
If the method is called asynchronously,
returns the request thread. | 625941c25fcc89381b1e1652 |
def get_data(self, **context): <NEW_LINE> <INDENT> context.update({'username': self.object.get_username(), 'full_name': self.object.get_full_name(), self.redirect_field_name: self.get_success_url()}) <NEW_LINE> return super(LoginView, self).get_data(**context) | Add to the JSON context.
:param context: A json response context.
:type context: dict
:rtype: dict | 625941c29f2886367277a824 |
def tournament_select(pop, t_size, k): <NEW_LINE> <INDENT> selected = candidate() <NEW_LINE> for i in xrange(k): <NEW_LINE> <INDENT> sample = random.sample(pop, t_size) <NEW_LINE> for c in sample: <NEW_LINE> <INDENT> if selected.fitness < c.fitness: <NEW_LINE> <INDENT> selected = c <NEW_LINE> <DEDENT> elif selected.fitness == c.fitness: <NEW_LINE> <INDENT> selected = random.choice([c, selected]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return selected | Selects a candidate psuedo-randomly from the population in a way
that preserves diversity, but also favors higher fittness.
:Returns: the selected candidate. | 625941c2ad47b63b2c509f15 |
def head(input_list): <NEW_LINE> <INDENT> return input_list[0] | Return the first element of the input list . | 625941c2bf627c535bc13164 |
def properly_paired(self, samfile=None): <NEW_LINE> <INDENT> MTSam_file = os.path.join(os.path.dirname(self.bam), "MT.bam") <NEW_LINE> pairedreads = pysam.AlignmentFile(MTSam_file, "wb", template=self.samfile) <NEW_LINE> tid = self.samfile.get_tid("chrM") <NEW_LINE> MTSam = samfile <NEW_LINE> properly_n, diff_chr_n = 0, 0 <NEW_LINE> MT_n = 0 <NEW_LINE> for read in MTSam: <NEW_LINE> <INDENT> MT_n += 1 <NEW_LINE> chr = read.reference_id <NEW_LINE> rnext = read.next_reference_id <NEW_LINE> if read.is_proper_pair: <NEW_LINE> <INDENT> properly_n = properly_n + 1 <NEW_LINE> if chr==tid and rnext==tid: <NEW_LINE> <INDENT> diff_chr_n = diff_chr_n + 1 <NEW_LINE> pairedreads.write(read) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.sample_meta_infor["MT_reads"] = MT_n <NEW_LINE> self.sample_meta_infor["properly_paired"] = properly_n <NEW_LINE> self.sample_meta_infor["sameChrom_paired"] = diff_chr_n <NEW_LINE> return(MTSam_file) | get properly paired read rnext "=" | 625941c22c8b7c6e89b35757 |
def screenview(self, screen_name, app_name, page_params=None, misc_params=(), **kwargs): <NEW_LINE> <INDENT> page = PageViewParameters(page_params, screen_name=screen_name) <NEW_LINE> app = AppTrackingParameters(name=app_name, **kwargs) <NEW_LINE> return self.request(HIT_TYPE_SCREENVIEW, page, app, *misc_params) | Generates and sends a screen view.
:param screen_name: Screen name.
:type screen_name: unicode | str
:param app_name: App name.
:type app_name: unicode | str
:param page_params: Optional additional page view parameters.
:type page_params: dict | server_tracking.google.parameters.PageViewParameters
:param misc_params: Miscellaneous parameters to add to the hit.
:type misc_params: tuple[server_tracking.parameters.UrlGenerator]
:param kwargs: Raw url parameters to update the generated url with.
:return: Varies, see :meth:`request`.
:rtype: bool | unicode | str | 625941c2a8ecb033257d3063 |
def __init__(self, device, tool, cleanup_test_files): <NEW_LINE> <INDENT> super(LinkerTestRunner, self).__init__(device, tool, cleanup_test_files) | Creates a new LinkerTestRunner.
Args:
device: Attached android device.
tool: Name of the Valgrind tool.
cleanup_test_files: Whether or not to cleanup test files on device. | 625941c2cc40096d615958e6 |
def metadata(abbr, __metadata=__metadata): <NEW_LINE> <INDENT> abbr = abbr.lower() <NEW_LINE> if abbr in __metadata: <NEW_LINE> <INDENT> return __metadata[abbr] <NEW_LINE> <DEDENT> rv = db.metadata.find_one({'_id': abbr}) <NEW_LINE> __metadata[abbr] = rv <NEW_LINE> return rv | Grab the metadata for the given two-letter abbreviation. | 625941c2b545ff76a8913dac |
def month_year_picker(request): <NEW_LINE> <INDENT> locale = current_locale() <NEW_LINE> return { 'mypicker_months_short': get_month_names('abbreviated', locale=locale), 'mypicker_months_long': get_month_names('wide', locale=locale) } | Adds localized date info for the month-year picker widget. | 625941c28e71fb1e9831d740 |
def _threadedRepublishData(self, *args): <NEW_LINE> <INDENT> if _Debug: <NEW_LINE> <INDENT> print('[DHT NODE] SINGLE republishData called, node: %r' % self.id) <NEW_LINE> <DEDENT> expiredKeys = [] <NEW_LINE> for key in self._dataStore.keys(): <NEW_LINE> <INDENT> if _Debug: <NEW_LINE> <INDENT> print('[DHT NODE] SINGLE %r' % key) <NEW_LINE> <DEDENT> if key == 'nodeState': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> now = int(time.time()) <NEW_LINE> itemData = self._dataStore.getItem(key) <NEW_LINE> originallyPublished = itemData['originallyPublished'] <NEW_LINE> originalPublisherID = itemData['originalPublisherID'] <NEW_LINE> lastPublished = itemData['lastPublished'] <NEW_LINE> expireSeconds = itemData['expireSeconds'] <NEW_LINE> age = now - originallyPublished <NEW_LINE> if originalPublisherID == self.id: <NEW_LINE> <INDENT> if age >= constants.dataExpireTimeout: <NEW_LINE> <INDENT> twisted.internet.reactor.callFromThread( self.iterativeStore, key=key, value=itemData['value'], expireSeconds=expireSeconds, ) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if age >= constants.dataExpireTimeout: <NEW_LINE> <INDENT> expiredKeys.append(key) <NEW_LINE> <DEDENT> elif now - lastPublished >= constants.replicateInterval: <NEW_LINE> <INDENT> twisted.internet.reactor.callFromThread( self.iterativeStore, key=key, value=self._dataStore[key], originalPublisherID=originalPublisherID, age=age, expireSeconds=expireSeconds, ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for key in expiredKeys: <NEW_LINE> <INDENT> del self._dataStore[key] | Republishes and expires any stored data (i.e. stored C{(key, value
pairs)} that need to be republished/expired.
This method should run in a deferred thread | 625941c2d53ae8145f87a209 |
def get_one_user(self, user_id): <NEW_LINE> <INDENT> if isinstance(user_id, int) is False: <NEW_LINE> <INDENT> return "User Id should be a number" <NEW_LINE> <DEDENT> for user in range(len(users)): <NEW_LINE> <INDENT> if user_id != users[user]["user_id"]: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> return users[user] | fetch a specific user | 625941c294891a1f4081ba3e |
def test_analyze(scan_object): <NEW_LINE> <INDENT> print('\n >>> Start Analyze()... \n') <NEW_LINE> import talos <NEW_LINE> import glob <NEW_LINE> r = talos.Reporting(scan_object) <NEW_LINE> list_of_files = glob.glob('./test_latest/' + '/*.csv') <NEW_LINE> r = talos.Reporting(list_of_files[-1]) <NEW_LINE> r = talos.Analyze(scan_object) <NEW_LINE> r.best_params('val_loss', ['val_acc']) <NEW_LINE> r.correlate('val_loss', ['val_acc']) <NEW_LINE> r.data <NEW_LINE> r.high('val_acc') <NEW_LINE> r.low('val_acc') <NEW_LINE> r.plot_box('first_neuron', 'val_acc') <NEW_LINE> r.plot_corr('val_loss', ['val_acc']) <NEW_LINE> r.plot_hist('val_acc') <NEW_LINE> r.plot_kde('val_acc') <NEW_LINE> r.plot_line('val_acc') <NEW_LINE> r.plot_regs('val_acc', 'val_loss') <NEW_LINE> r.rounds() <NEW_LINE> r.rounds2high('val_acc') <NEW_LINE> r.table('val_loss', ['val_acc']) <NEW_LINE> print('finish Analyze() \n') | Tests all the attributes available in the Reporting() object | 625941c282261d6c526ab432 |
def test_load(self): <NEW_LINE> <INDENT> config = Configuration() <NEW_LINE> config.load(r"test_data\config.txt") <NEW_LINE> self.assertListEqual(config.sections(), ["DEFAULT", "Test"]) <NEW_LINE> self.assertEqual(config.DEFAULT["localhost"], "192.168.0.1") <NEW_LINE> self.assertEqual(config.DEFAULT["port"], 8080) <NEW_LINE> self.assertEqual(config.DEFAULT["connection_timeout"], 60) <NEW_LINE> self.assertEqual(config.Test["key1"], 100) <NEW_LINE> self.assertEqual(config.Test["key2"], 123.456) <NEW_LINE> self.assertEqual(config.Test["key3"], "True") <NEW_LINE> self.assertEqual(config.Test["key4"], "123") <NEW_LINE> self.assertEqual(config.Test["key5"], r"C:\test\nope\red\中文\英文.jpg") <NEW_LINE> self.assertEqual(config.Test["key6"], False) <NEW_LINE> self.assertListEqual(config.Test["key7"], [1, -2, 3]) <NEW_LINE> self.assertListEqual(config.Test["key8"], [1.1, -2.2, 3.3]) <NEW_LINE> self.assertListEqual(config.Test["key9"], ["1", "1.1", "True", "helloworld"]) <NEW_LINE> self.assertListEqual(config.Test["key10"], ["C:\windows", r"C:\中文"]) <NEW_LINE> self.assertListEqual(config.Test["key11"], [True, False, True, False]) <NEW_LINE> self.assertListEqual(config.Test["key12"], []) | 测试Configuration.load()方法
| 625941c25fc7496912cc3913 |
def confirm_next(self, seq): <NEW_LINE> <INDENT> for n, i in enumerate(seq): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if self.items[self.pos + n] != i: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> except IndexError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True | Returns True if each item in seq matches each corresponding item
from the current position onward. | 625941c2009cb60464c63349 |
def init_menu(self) -> None: <NEW_LINE> <INDENT> self.output_type_group = QActionGroup(self.menuOutput_Type) <NEW_LINE> self.output_type_group.addAction(self.actionPretty) <NEW_LINE> self.output_type_group.addAction(self.actionLatex) <NEW_LINE> self.output_type_group.addAction(self.actionNormal) <NEW_LINE> self.output_type_group.setExclusive(True) <NEW_LINE> self.output_type_group.triggered.connect(self.change_output_type) <NEW_LINE> action_bindings = { "actionUnicode": self.toggle_unicode, "actionLinewrap": self.toggle_line_wrap, "actionScientific_Notation": self.toggle_use_scientific, "actionAccuracy": self.change_accuracy, "actionTab_List": self.open_tab_list, "actionCopy_Exact_Answer": self.copy_exact_ans, "actionCopy_Approximate_Answer": self.copy_approx_ans, "actionNext_Tab": self.next_tab, "actionPrevious_Tab": self.previous_tab, "actionLatexFs": self.change_latex_fs, "actionUseLatex": self.toggle_use_latex, } <NEW_LINE> checkable_actions = { "actionUseLatex": self.use_latex, "actionUnicode": self.use_unicode, "actionLinewrap": self.line_wrap, } <NEW_LINE> for action in ( self.menuSettings.actions() + self.menuCopy.actions() + self.menuTab.actions() ): <NEW_LINE> <INDENT> object_name = action.objectName() <NEW_LINE> if object_name in action_bindings.keys(): <NEW_LINE> <INDENT> action.triggered.connect(action_bindings[object_name]) <NEW_LINE> <DEDENT> if object_name in checkable_actions.keys(): <NEW_LINE> <INDENT> if checkable_actions[object_name]: <NEW_LINE> <INDENT> action.setChecked(True) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> _translate = QCoreApplication.translate <NEW_LINE> if self.use_scientific: <NEW_LINE> <INDENT> self.actionScientific_Notation.setText( _translate("MainWindow", f"Scientific Notation - {self.use_scientific}") ) <NEW_LINE> <DEDENT> self.actionAccuracy.setText( _translate("MainWindow", f"Accuracy - {self.accuracy}") ) <NEW_LINE> self.actionLatexFs.setText( _translate("MainWindow", f"LaTeX font-size - {self.latex_fs}") ) <NEW_LINE> if self.output_type == 1: <NEW_LINE> <INDENT> self.actionPretty.setChecked(True) <NEW_LINE> <DEDENT> elif self.output_type == 2: <NEW_LINE> <INDENT> self.actionLatex.setChecked(True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.actionNormal.setChecked(True) | For the QActionGroup Output Type -> Pretty - Latex - Normal.
This couldn't be done in Qt Designer since AFAIK it doesn't support QActionGroup. | 625941c2f548e778e58cd512 |
def SH_to_RH(self, T: float, p: float = atm(), SH: float = 0.) -> Optional[float]: <NEW_LINE> <INDENT> return self.relative_humidity(T, p, hum_ratio=SH) | Args:
T:
temperature [K]
p:
pressure [Pa]
SH:
specific humidity [kg water/kg dry air]
Returns:
relative humidity [/]
OR
None if parameters out of range | 625941c20c0af96317bb817e |
def check_input(starting_files): <NEW_LINE> <INDENT> if not isinstance(starting_files, list): <NEW_LINE> <INDENT> raise TypeError("starting_files must be a list of files") <NEW_LINE> <DEDENT> for in_file in starting_files: <NEW_LINE> <INDENT> if not isinstance(in_file, str): <NEW_LINE> <INDENT> raise TypeError("file must be a string (" + str(in_file) + ")") <NEW_LINE> <DEDENT> if not os.path.exists(in_file): <NEW_LINE> <INDENT> raise Exception('Input file does not exists -> ' + in_file) <NEW_LINE> <DEDENT> <DEDENT> return None | Function that checks if a list of input files exists or not
Parameters
----------
starting_files : list
List of paths that are going to be evaluated for existance
Returns
-------
None | 625941c2cc0a2c11143dce26 |
def get_input_tree(self): <NEW_LINE> <INDENT> algorithm = fastICA() <NEW_LINE> algorithm.trait.bound = self.INTERFACE_ATTRIBUTES_ONLY <NEW_LINE> tree = algorithm.interface[self.INTERFACE_ATTRIBUTES] <NEW_LINE> for node in tree: <NEW_LINE> <INDENT> if node['name'] == 'time_series': <NEW_LINE> <INDENT> node['conditions'] = FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'], operations=["=="], values=[4]) <NEW_LINE> <DEDENT> <DEDENT> return tree | Return a list of lists describing the interface to the analyzer. This
is used by the GUI to generate the menus and fields necessary for defining a simulation. | 625941c23c8af77a43ae3734 |
def apply_filters_gcmc(p_lhs_emb,masked_filter_set): <NEW_LINE> <INDENT> filter_l_emb, filter_r_emb = 0,0 <NEW_LINE> for filter_ in masked_filter_set: <NEW_LINE> <INDENT> if filter_ is not None: <NEW_LINE> <INDENT> filter_l_emb += filter_(p_lhs_emb) <NEW_LINE> <DEDENT> <DEDENT> return filter_l_emb | Doesnt Have Masked Filters yet | 625941c2925a0f43d2549e0b |
def setBias(self, x): <NEW_LINE> <INDENT> self._bias = x <NEW_LINE> [obj.setBias(x) for obj in self._base_objs] | Replace the `bias` attribute.
0 is even, positive is towards first point, negative is towards
the second point.
Parameters:
x : float
New `bias` attribute. | 625941c2435de62698dfdbe2 |
def setDefault(key, value): <NEW_LINE> <INDENT> defaultsFromFile = AppKit.NSUserDefaults.standardUserDefaults() <NEW_LINE> defaultsFromFile.setObject_forKey_(value, key) | Set a value to the user defaults for a given key. | 625941c23cc13d1c6d3c7311 |
def serialize_numpy(self, buff, numpy): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> _x = self.camera_frame <NEW_LINE> length = len(_x) <NEW_LINE> if python3 or type(_x) == unicode: <NEW_LINE> <INDENT> _x = _x.encode('utf-8') <NEW_LINE> length = len(_x) <NEW_LINE> <DEDENT> buff.write(struct.pack('<I%ss'%length, length, _x)) <NEW_LINE> _x = self <NEW_LINE> buff.write(_struct_2I.pack(_x.stamp.secs, _x.stamp.nsecs)) <NEW_LINE> length = len(self.polygon.points) <NEW_LINE> buff.write(_struct_I.pack(length)) <NEW_LINE> for val1 in self.polygon.points: <NEW_LINE> <INDENT> _x = val1 <NEW_LINE> buff.write(_struct_3f.pack(_x.x, _x.y, _x.z)) <NEW_LINE> <DEDENT> <DEDENT> except struct.error as se: self._check_types(se) <NEW_LINE> except TypeError as te: self._check_types(te) | serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module | 625941c25fcc89381b1e1653 |
def GetPointer(self): <NEW_LINE> <INDENT> return _itkBinaryErodeImageFilterPython.itkBinaryErodeImageFilterID2ID2SE2_GetPointer(self) | GetPointer(self) -> itkBinaryErodeImageFilterID2ID2SE2 | 625941c24f6381625f1149d1 |
def delete_registration_tags(self, registration_id, tags, **kwargs): <NEW_LINE> <INDENT> kwargs['_return_http_data_only'] = True <NEW_LINE> if kwargs.get('async_req'): <NEW_LINE> <INDENT> return self.delete_registration_tags_with_http_info(registration_id, tags, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> (data) = self.delete_registration_tags_with_http_info(registration_id, tags, **kwargs) <NEW_LINE> return data | Delete tags from a Registration # noqa: E501
Deletes the specified tags from the registration. Deleting tags that do not exist will still result in a success. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_registration_tags(registration_id, tags, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str registration_id: (required)
:param TagListSchema tags: (required)
:return: None
If the method is called asynchronously,
returns the request thread. | 625941c2e1aae11d1e749c4b |
def get_app_json(self): <NEW_LINE> <INDENT> self.app_json = self._get_empty_app_json() <NEW_LINE> self.app_json['id'] = '{}/{}'.format(self.group_name, self.service_name) <NEW_LINE> for key in self.service_info: <NEW_LINE> <INDENT> method_name = '_parse_{}'.format(key) <NEW_LINE> if hasattr(self, method_name): <NEW_LINE> <INDENT> logging.info('Parsing key "%s"', key) <NEW_LINE> method_to_call = getattr(self, method_name) <NEW_LINE> method_to_call(key) <NEW_LINE> <DEDENT> <DEDENT> return self.app_json | Gets the app.json for the service in docker-compose | 625941c266656f66f7cbc140 |
def addrule(self, rule): <NEW_LINE> <INDENT> log.debug(LOG_FILTER, "enable %s ", rule) <NEW_LINE> for r in self.rules: <NEW_LINE> <INDENT> assert r.sid != rule.sid <NEW_LINE> <DEDENT> self.rules.append(rule) | Append given rule to rule list. | 625941c2ec188e330fd5a739 |
def exit_program(): <NEW_LINE> <INDENT> exit(0) | Will exit program when called | 625941c2c432627299f04bda |
def update_fileinfo_table(FileInfo, fileinfo_dict): <NEW_LINE> <INDENT> session = get_session() <NEW_LINE> query = session.query(FileInfo.id) .filter(FileInfo.imagename == fileinfo_dict['imagename']).all() <NEW_LINE> if query == []: <NEW_LINE> <INDENT> id_num = '' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> id_num = query[0][0] <NEW_LINE> <DEDENT> session.close() <NEW_LINE> insert_or_update(FileInfo, fileinfo_dict, id_num) | Insert or update a record in a file information table.
Each row needs be populated before the linked rows in the Phot
table. They are linked by 'imagename'.
Parameters:
FileInfo : Table
The table in which to insert 'fileinfo_dict'.
fileinfo_dict : dict
A dictionary containing the file information. Each key of
'fileinfo_dict' corresponds to a column in a FileInfo
table of the database.
Returns:
nothing
Outputs:
nothing
Notes:
Based on M.Bourque's `update_database.py` functions. | 625941c256ac1b37e6264169 |
def __mul__(self, otherObject: object) -> OclWrapper_Any: <NEW_LINE> <INDENT> return oclWrapper_Factory(self._wrapped * otherObject) | __mul__ method.
Note:
Delegates the __mul__ method to the wrapped object and creates an OclWrapper_Any.
Args:
otherObject (object): The other object to mul this one.
Returns:
An OclWrapper_Any wrapping the result of the operation on the wrapped object and the other object.
>>> print(oclWrapper_Factory(1) * 2)
2
>>> print(oclWrapper_Factory(1) * oclWrapper_Factory(2))
2 | 625941c23346ee7daa2b2d00 |
def __init__(self, data={}): <NEW_LINE> <INDENT> if not self.myParam: <NEW_LINE> <INDENT> self.myParam = O_Parametre(data) | Comments | 625941c226238365f5f0ee02 |
def createVocabList(dataSet): <NEW_LINE> <INDENT> vocabSet = set([]) <NEW_LINE> for document in dataSet: <NEW_LINE> <INDENT> vocabSet = vocabSet | set(document) <NEW_LINE> <DEDENT> return list(vocabSet) | 构造词集
:param dataSet:
:return: | 625941c2f9cc0f698b140593 |
@receiver(pre_save, sender=CipherSuite) <NEW_LINE> def complete_cs_instance(sender, instance, *args, **kwargs): <NEW_LINE> <INDENT> if instance.hex_byte_1 == '0x13' or instance.hex_byte_2 == '0xC6' or instance.hex_byte_2 == '0xC7': <NEW_LINE> <INDENT> name = instance.name <NEW_LINE> (prt,_,rst) = name.replace("_", " ").partition(" ") <NEW_LINE> (enc,_,hsh) = rst.rpartition(" ") <NEW_LINE> aut = "-" <NEW_LINE> kex = "-" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if re.search("EXPORT", instance.name): <NEW_LINE> <INDENT> name = instance.name.replace('EXPORT_', '') <NEW_LINE> export_cipher = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> name = instance.name <NEW_LINE> export_cipher = False <NEW_LINE> <DEDENT> (prt,_,rst) = name.replace("_", " ").partition(" ") <NEW_LINE> (kex,_,rst) = rst.partition("WITH") <NEW_LINE> if export_cipher: <NEW_LINE> <INDENT> prt += " EXPORT" <NEW_LINE> <DEDENT> (kex,_,aut) = kex.partition(" ") <NEW_LINE> (enc,_,hsh) = rst.rpartition(" ") <NEW_LINE> if re.match(r'\d+', hsh.strip()) or re.match(r'CCM\Z', hsh.strip()): <NEW_LINE> <INDENT> enc += " " + hsh <NEW_LINE> hsh = "SHA256" <NEW_LINE> <DEDENT> if kex.strip() == "PSK" and aut.strip() == "DHE": <NEW_LINE> <INDENT> kex = "DHE" <NEW_LINE> aut = "PSK" <NEW_LINE> <DEDENT> <DEDENT> aead_flag = False <NEW_LINE> if re.search(r'GCM|POLY1305|CCM', enc, re.IGNORECASE): <NEW_LINE> <INDENT> aead_flag = True <NEW_LINE> <DEDENT> if not aut: <NEW_LINE> <INDENT> instance.auth_algorithm, _ = AuthAlgorithm.objects.get_or_create( short_name=kex.strip() ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> instance.auth_algorithm, _ = AuthAlgorithm.objects.get_or_create( short_name=aut.strip() ) <NEW_LINE> <DEDENT> instance.kex_algorithm, _ = KexAlgorithm.objects.get_or_create( short_name=kex.strip() ) <NEW_LINE> instance.protocol_version, _ = ProtocolVersion.objects.get_or_create( short_name=prt.strip() ) <NEW_LINE> instance.hash_algorithm, _ = HashAlgorithm.objects.get_or_create( short_name=hsh.strip() ) <NEW_LINE> instance.enc_algorithm, _ = EncAlgorithm.objects.update_or_create( short_name=enc.strip(), defaults={'aead_algorithm': aead_flag} ) | Derives related algorithms form instance.name of the cipher suites. | 625941c24527f215b584c3ef |
def set_List(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'List', value) | Set the value of the List input for this Choreo. ((required, string) The recipient list to be added to the specified newsletter.) | 625941c221bff66bcd6848eb |
def load(self, filename, ctx=None, allow_missing=False, ignore_extra=False, restore_prefix=''): <NEW_LINE> <INDENT> if restore_prefix: <NEW_LINE> <INDENT> for name in self.keys(): <NEW_LINE> <INDENT> assert name.startswith(restore_prefix), "restore_prefix is '%s' but Parameters name '%s' does not start " "with '%s'"%(restore_prefix, name, restore_prefix) <NEW_LINE> <DEDENT> <DEDENT> lprefix = len(restore_prefix) <NEW_LINE> loaded = [(k[4:] if k.startswith('arg:') or k.startswith('aux:') else k, v) for k, v in ndarray.load(filename).items()] <NEW_LINE> arg_dict = {restore_prefix+k: v for k, v in loaded} <NEW_LINE> if not allow_missing: <NEW_LINE> <INDENT> for name in self.keys(): <NEW_LINE> <INDENT> assert name in arg_dict, "Parameter '%s' is missing in file '%s', which contains parameters: %s. " "Please make sure source and target networks have the same prefix."%( name[lprefix:], filename, _brief_print_list(arg_dict.keys())) <NEW_LINE> <DEDENT> <DEDENT> for name in arg_dict: <NEW_LINE> <INDENT> if name not in self._params: <NEW_LINE> <INDENT> assert ignore_extra, "Parameter '%s' loaded from file '%s' is not present in ParameterDict, " "choices are: %s. Set ignore_extra to True to ignore. " "Please make sure source and target networks have the same prefix."%( name[lprefix:], filename, _brief_print_list(self._params.keys())) <NEW_LINE> continue <NEW_LINE> <DEDENT> self[name]._load_init(arg_dict[name], ctx) | Load parameters from file.
Parameters
----------
filename : str
Path to parameter file.
ctx : Context or list of Context
Context(s) initialize loaded parameters on.
allow_missing : bool, default False
Whether to silently skip loading parameters not represents in the file.
ignore_extra : bool, default False
Whether to silently ignore parameters from the file that are not
present in this ParameterDict.
restore_prefix : str, default ''
prepend prefix to names of stored parameters before loading. | 625941c23346ee7daa2b2d01 |
def includeme(config): <NEW_LINE> <INDENT> config.add_route( 'companies', "/companies", ) <NEW_LINE> config.add_route( 'company', '/companies/{id:\d+}', traverse='/companies/{id}' ) <NEW_LINE> config.add_view( CompanyList, route_name='companies', renderer='companies.mako', permission='manage', ) <NEW_LINE> config.add_view( CompanyAdd, route_name='companies', renderer="base/formpage.mako", request_param="action=add", permission="manage", ) <NEW_LINE> config.add_view( company_index, route_name='company', renderer='company_index.mako', request_param='action=index', permission='edit', ) <NEW_LINE> config.add_view( company_view, route_name='company', renderer='company.mako', permission="view", ) <NEW_LINE> config.add_view( CompanyEdit, route_name='company', renderer='base/formpage.mako', request_param='action=edit', permission="edit", ) <NEW_LINE> config.add_view( company_enable, route_name='company', request_param='action=enable', permission="edit", ) <NEW_LINE> config.add_view( company_disable, route_name='company', request_param='action=disable', permission="edit", ) <NEW_LINE> config.add_view( company_remove_employee_view, route_name="company", request_param='action=remove', permission="manage", ) <NEW_LINE> for panel, request_param in ( ('company_tasks', 'action=tasks_html',), ('company_events', 'action=events_html',), ): <NEW_LINE> <INDENT> config.add_view( make_panel_wrapper_view(panel), route_name='company', renderer="panel_wrapper.mako", request_param=request_param, permission="edit", ) | Add all company related views | 625941c231939e2706e4ce02 |
def compute_sale_purchase(self, treasury_id, date_from): <NEW_LINE> <INDENT> start_date_form = fields.Date.from_string(date_from) <NEW_LINE> start_month = start_date_form.month <NEW_LINE> start_date = '%s-%s-01'%(start_date_form.year, start_month) <NEW_LINE> end_date_form = start_date_form + relativedelta(months=12) <NEW_LINE> end_date = fields.Date.to_string(end_date_form) <NEW_LINE> match_month = {} <NEW_LINE> x = 1 <NEW_LINE> while x <= 12: <NEW_LINE> <INDENT> match_month[start_month] = 'month%s'%(x) <NEW_LINE> start_month = (start_date_form + relativedelta(months=x)).month <NEW_LINE> x += 1 <NEW_LINE> <DEDENT> revenue, expenses, balance = self.compute_revenue(start_date, end_date, match_month) <NEW_LINE> revenue, expenses, balance = self.compute_expense(start_date, end_date, revenue, expenses, balance, match_month) <NEW_LINE> old_lines_ids = self.search([('treasury_id', '=', treasury_id)]) <NEW_LINE> if old_lines_ids: <NEW_LINE> <INDENT> for line in old_lines_ids: <NEW_LINE> <INDENT> if line.type == 'revenue': <NEW_LINE> <INDENT> line.write(revenue) <NEW_LINE> <DEDENT> elif line.type == 'expense': <NEW_LINE> <INDENT> line.write(expenses) <NEW_LINE> <DEDENT> elif line.type == 'balance_sheet': <NEW_LINE> <INDENT> line.write(balance) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> vals = {'name': _('Revenue'), 'sequence': 1, 'type': 'revenue', 'treasury_id': treasury_id} <NEW_LINE> vals.update(revenue) <NEW_LINE> self.create(vals) <NEW_LINE> vals = {'name': _('Expenses'), 'sequence': 2, 'type': 'expense', 'treasury_id': treasury_id} <NEW_LINE> vals.update(expenses) <NEW_LINE> self.create(vals) <NEW_LINE> vals = {'name': _('Balance sheet'), 'sequence': 3, 'type': 'balance_sheet', 'treasury_id': treasury_id} <NEW_LINE> vals.update(balance) <NEW_LINE> self.create(vals) <NEW_LINE> <DEDENT> return True | Fonction permettant de calculer et de créer les lignes de ventes et achats en cours et
du bilan des ventes et achats
:param treasury_id: L'id de la trésorerie à assigner à la ligne
:type treasury_id: integer
:param date_from: Date de début pour la recherche des achats et ventes
:type date_from: string
:return: True
:rtype: boolean | 625941c231939e2706e4ce03 |
def p_estat(p): <NEW_LINE> <INDENT> pass | estat : statement_b
| eword | 625941c27d43ff24873a2c35 |
def get_all_collections_for_all_databases(self, system=False): <NEW_LINE> <INDENT> return { db: self.get_collections(db) for db in self.get_databases(system=system) } | Return a dict of database names and their collections
- system: if True, include the system dbs 'admin', 'config', and 'local' | 625941c266673b3332b92027 |
def make2D(var,style,name): <NEW_LINE> <INDENT> hist = ROOT.TH2F(name,name,var[3][0],var[3][1],var[3][2],var[3][3],var[3][4],var[3][5]) <NEW_LINE> if style["fill"]: <NEW_LINE> <INDENT> style["fill"].Copy(hist) <NEW_LINE> <DEDENT> if style["line"]: <NEW_LINE> <INDENT> style["line"].Copy(hist) <NEW_LINE> <DEDENT> if style["marker"]: <NEW_LINE> <INDENT> style["marker"].Copy(hist) <NEW_LINE> <DEDENT> hist.GetYaxis().SetTitle(var[2].split(":")[0]) <NEW_LINE> hist.GetYaxis().SetTitleSize(0.07) <NEW_LINE> hist.GetYaxis().SetTitleFont(42) <NEW_LINE> hist.GetYaxis().SetTitleOffset(1.2) <NEW_LINE> hist.GetXaxis().SetTitle(var[2].split(":")[1]) <NEW_LINE> hist.GetXaxis().SetLabelFont(42) <NEW_LINE> hist.GetYaxis().SetLabelSize(0.05) <NEW_LINE> hist.GetXaxis().SetTitleOffset(1.1) <NEW_LINE> hist.SetTitle(style["Label"]) <NEW_LINE> return hist | A functon to make a 1D histogram and set it's style | 625941c25510c4643540f37f |
def basic_auth_handler( url: str, method: str, timeout: Optional[float], headers: List[Tuple[str, str]], data: bytes, username: str = None, password: str = None, ) -> Callable[[], None]: <NEW_LINE> <INDENT> def handle(): <NEW_LINE> <INDENT> if username is not None and password is not None: <NEW_LINE> <INDENT> auth_value = f'{username}:{password}'.encode() <NEW_LINE> auth_token = base64.b64encode(auth_value) <NEW_LINE> auth_header = b'Basic ' + auth_token <NEW_LINE> headers.append(('Authorization', auth_header)) <NEW_LINE> <DEDENT> default_handler(url, method, timeout, headers, data)() <NEW_LINE> <DEDENT> return handle | Handler that implements HTTP/HTTPS connections with Basic Auth.
Sets auth headers using supplied 'username' and 'password', if set.
Used by the push_to_gateway functions. Can be re-used by other handlers. | 625941c2dc8b845886cb54ca |
def stopTimeSelection(self): <NEW_LINE> <INDENT> diff = time.time() - self.startRefSelection <NEW_LINE> self.globalSelection += diff | Tracks Selection Time | 625941c2462c4b4f79d1d666 |
def _update_node(self, node_id, next_bit, child_bit, child_params, dry_run=False): <NEW_LINE> <INDENT> assert next_bit in (0, 1) <NEW_LINE> a, b = self.arr_a[node_id], self.arr_b[node_id] <NEW_LINE> lpe = self.arr_lpe[node_id] <NEW_LINE> if next_bit == 0: <NEW_LINE> <INDENT> new_lpe = lpe + np.log((a + 0.5) / (a + b + 1)) <NEW_LINE> new_a, new_b = a + 1, b <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_lpe = lpe + np.log((b + 0.5) / (a + b + 1)) <NEW_LINE> new_a, new_b = a, b + 1 <NEW_LINE> <DEDENT> children = self.arr_children[node_id, :] <NEW_LINE> if all(children == self.NO_CHILD): <NEW_LINE> <INDENT> new_lpw = new_lpe <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> child_lpw_sum = 0 <NEW_LINE> for s in range(self.context_syms): <NEW_LINE> <INDENT> child_lpw_sum += child_params.lpw if child_bit == s else self.get_lpw(children[s], 0) <NEW_LINE> <DEDENT> new_lpw = np.logaddexp(np.log(self.lpw_weight) + new_lpe, np.log(1 - self.lpw_weight) + child_lpw_sum) <NEW_LINE> <DEDENT> if not dry_run: <NEW_LINE> <INDENT> self.arr_a[node_id] = new_a <NEW_LINE> self.arr_b[node_id] = new_b <NEW_LINE> self.arr_lpe[node_id] = new_lpe <NEW_LINE> self.arr_lpw[node_id] = new_lpw <NEW_LINE> <DEDENT> return NodeParams(new_a, new_b, new_lpe, new_lpw) | Update a, b, lpe, lpw for the given node. | 625941c2498bea3a759b9a46 |
def _load_translator_from_file( self, module_name, file_name, directory = '', search_delimiters = '_', suffix = '.qm' ): <NEW_LINE> <INDENT> from camelot.core.resources import resource_string <NEW_LINE> file_name_parts = [ file_name ] <NEW_LINE> head, tail = os.path.split( file_name_parts[0] ) <NEW_LINE> while tail: <NEW_LINE> <INDENT> file_name_parts[0] = tail <NEW_LINE> file_name_parts = [ head ] + file_name_parts <NEW_LINE> head, tail = os.path.split( file_name_parts[0] ) <NEW_LINE> <DEDENT> file_name_parts_possibilities = [] <NEW_LINE> for file_name_part in file_name_parts: <NEW_LINE> <INDENT> part_possibilities = [] <NEW_LINE> for search_delimiter in search_delimiters: <NEW_LINE> <INDENT> delimited_parts = file_name_part.split( search_delimiter ) <NEW_LINE> for i in range( len( delimited_parts ) ): <NEW_LINE> <INDENT> part_possibility = search_delimiter.join( delimited_parts[:len(delimited_parts)-i] ) <NEW_LINE> part_possibilities.append( part_possibility ) <NEW_LINE> <DEDENT> <DEDENT> file_name_parts_possibilities.append( part_possibilities ) <NEW_LINE> <DEDENT> file_names = [] <NEW_LINE> for parts_possibility in itertools.product( *file_name_parts_possibilities ): <NEW_LINE> <INDENT> file_name = os.path.join( *parts_possibility ) <NEW_LINE> file_names.append( file_name ) <NEW_LINE> file_names.append( file_name + suffix ) <NEW_LINE> <DEDENT> translations = None <NEW_LINE> for file_name in file_names: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> logger.debug( u'try %s'%file_name ) <NEW_LINE> translations = resource_string( module_name, os.path.join(directory,file_name) ) <NEW_LINE> break <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> if translations: <NEW_LINE> <INDENT> _translations_data_.append( translations ) <NEW_LINE> translator = QtCore.QTranslator() <NEW_LINE> if not hasattr( translator, 'loadFromData' ): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if translator.loadFromData( translations ): <NEW_LINE> <INDENT> logger.info("add translation %s" % (directory + file_name)) <NEW_LINE> return translator | Tries to create a translator based on a file stored within a module.
The file is loaded through the pkg_resources, to enable loading it from
within a Python egg. This method tries to mimic the behavior of
:meth:`QtCore.QTranslator.load` while looking for an appropriate
translation file.
:param module_name: the name of the module in which to look for
the translation file with pkg_resources.
:param file_name: the filename of the the tranlations file, without
suffix
:param directory: the directory, relative to the module in which
to look for translation files
:param suffix: the suffix of the filename
:param search_delimiters: list of characters by which to split the file
name to search for variations of the file name
:return: :keyword:None if unable to load the file, otherwise a
:obj:`QtCore.QTranslator` object.
This method tries to load all file names with or without suffix, and
with or without the part after the search delimiter. | 625941c2d6c5a10208143fdf |
def load_pretrained_model_mat(colornet_wpath): <NEW_LINE> <INDENT> print('Loading pretrained model... (it could take a while)') <NEW_LINE> base_model = VGG16(weights=colornet_wpath) <NEW_LINE> model = Model(input=base_model.input, output=base_model.get_layer('block4_pool').output) <NEW_LINE> print('Model loaded!') <NEW_LINE> return(model) | Load Emil's pretrained model | 625941c299cbb53fe6792b7d |
def run_prediction(config, locations=None): <NEW_LINE> <INDENT> if config.info: <NEW_LINE> <INDENT> print("Starting prediction process....") <NEW_LINE> <DEDENT> if locations is not None: <NEW_LINE> <INDENT> cache_m = CacheManager(locations=locations) <NEW_LINE> <DEDENT> if config.print_pred: <NEW_LINE> <INDENT> print_prediction(config) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> build_ensemble = False <NEW_LINE> predictor = Predictor(config, build_ensemble=build_ensemble) <NEW_LINE> predictor.run() | Main training function, to work as a new process | 625941c250485f2cf553cd2f |
def run_cmd(self, cmd, args, errorclass): <NEW_LINE> <INDENT> cmdargs = self.process_cmd_args(cmd, args) <NEW_LINE> try: <NEW_LINE> <INDENT> p=subprocess.Popen( cmdargs, env=ZFS_ENV, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) <NEW_LINE> <DEDENT> except OSError as e: <NEW_LINE> <INDENT> if errorclass != None and e.errno == errno.ENOENT: <NEW_LINE> <INDENT> raise errorclass() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise e <NEW_LINE> <DEDENT> <DEDENT> out,err=p.communicate() <NEW_LINE> rc=p.returncode <NEW_LINE> logging.debug('command %s returned result code %d' % (str([cmdargs]),rc)) <NEW_LINE> return (out,err,rc) | wrap subprocess.Popen with the ZFS environment
instanciates a subprocess object, and raises the specified errorclass if
the subprocess call raises an OSError with errno of 2 (ENOENT)
See :py:func:`ZfsCommandRunner.run_cmd` for a description of the
parameters and the return values | 625941c2cdde0d52a9e52fc7 |
def compute_integral_constants(self, T, P, lattice_sz, kappa): <NEW_LINE> <INDENT> Pfactor = self.hydrate_size(T_0, P, 1.0, kappa, dim='linear') <NEW_LINE> a_factor = (lattice_sz/self.Hs.a_norm)*self.lattice_Tfactor*Pfactor <NEW_LINE> for ii in range(len(self.Hs.R['sm'])): <NEW_LINE> <INDENT> self.R_sm[ii] = self.Hs.R['sm'][ii + 1]*a_factor <NEW_LINE> <DEDENT> for ii in range(len(self.Hs.R['lg'])): <NEW_LINE> <INDENT> self.R_lg[ii] = self.Hs.R['lg'][ii + 1]*a_factor | Function to compute integral for langmuir constant calculation
Parameters
----------
T : float
Temperature in Kelvin
P : float
Pressure in bar
lattice_sz : float
Size of filled hydrate lattice
kappa : float
Compressibility of filled hydrate | 625941c2187af65679ca50b4 |
def __init__(self, image: bytearray, app_descriptor_offset: int, byte_order: str): <NEW_LINE> <INDENT> self._image = bytearray(image) <NEW_LINE> self._offset = int(app_descriptor_offset) <NEW_LINE> self._byte_order = str(byte_order) <NEW_LINE> if AppDescriptor.unpack_from(self._image, self._byte_order, self._offset) is None: <NEW_LINE> <INDENT> raise ValueError("The provided image does not contain an app descriptor at the specified offset") <NEW_LINE> <DEDENT> if len(self._image) % AppDescriptor.ALIGNMENT != 0 or self._offset % AppDescriptor.ALIGNMENT != 0: <NEW_LINE> <INDENT> raise ValueError("Bad alignment") <NEW_LINE> <DEDENT> if len(self._image) <= AppDescriptor.SIZE or not (0 <= self._offset < len(self._image)): <NEW_LINE> <INDENT> raise ValueError("Bad sizing") | Do not construct instances manually. Use the factory method instead. | 625941c29c8ee82313fbb70b |
def display_plots(self): <NEW_LINE> <INDENT> plots=self.plotgroup.plots <NEW_LINE> self.zoomed_images = [ImageTk.PhotoImage(p.bitmap.image) for p in plots] <NEW_LINE> old_canvases = self.canvases <NEW_LINE> self.canvases = [Canvas(self.plot_container, width=image.width(), height=image.height(), borderwidth=1,highlightthickness=0, relief='groove') for image in self.zoomed_images] <NEW_LINE> for i,image,canvas in zip(range(len(self.zoomed_images)), self.zoomed_images,self.canvases): <NEW_LINE> <INDENT> canvas.grid(row=i//self.plotgroup.proj_plotting_shape[1], column=i%self.plotgroup.proj_plotting_shape[1], padx=UNIT_PADDING,pady=UNIT_PADDING) <NEW_LINE> canvas.create_image(1,1,anchor='nw',image=image) <NEW_LINE> <DEDENT> for c in old_canvases: <NEW_LINE> <INDENT> c.grid_forget() <NEW_LINE> <DEDENT> self._add_canvas_bindings() | CFProjectionPanel requires a 2D grid of plots. | 625941c2e1aae11d1e749c4c |
def raise_on_exception(e): <NEW_LINE> <INDENT> raise e | Raises exception e | 625941c2a934411ee375162a |
def largestOverlap(self, A, B): <NEW_LINE> <INDENT> ans = 0 <NEW_LINE> d = collections.defaultdict(int) <NEW_LINE> a = [] <NEW_LINE> b = [] <NEW_LINE> for i in range(len(A)): <NEW_LINE> <INDENT> for j in range(len(A[0])): <NEW_LINE> <INDENT> if A[i][j] == 1: <NEW_LINE> <INDENT> a.append((i, j)) <NEW_LINE> <DEDENT> if B[i][j] == 1: <NEW_LINE> <INDENT> b.append((i, j)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for aa in a: <NEW_LINE> <INDENT> for bb in b: <NEW_LINE> <INDENT> dd = (bb[0] - aa[0], bb[1] - aa[1]) <NEW_LINE> d[dd] += 1 <NEW_LINE> ans = max(ans, d[dd]) <NEW_LINE> <DEDENT> <DEDENT> return ans | :type A: List[List[int]]
:type B: List[List[int]]
:rtype: int | 625941c2cdde0d52a9e52fc8 |
def resnext50_32x4d(pretrained=False, progress=True, **kwargs): <NEW_LINE> <INDENT> kwargs['groups'] = 32 <NEW_LINE> kwargs['width_per_group'] = 4 <NEW_LINE> return _resnet('resnext50_32x4d', Bottleneck, [3, 4, 6, 3], pretrained, progress, **kwargs) | Constructs a ResNeXt-50 32x4d model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
progress (bool): If True, displays a progress bar of the download to stderr | 625941c292d797404e304120 |
def convert_waveforms(self, waveforms, n_chans, transitions): <NEW_LINE> <INDENT> dt = max(self.get_minimum_period(n_chans).values()) * 5e-9 <NEW_LINE> transitions_map = dict() <NEW_LINE> for ch, wfm in waveforms.items(): <NEW_LINE> <INDENT> ch = int(ch) <NEW_LINE> for wfe_path, (encoding, wfe) in wfm.items(): <NEW_LINE> <INDENT> if encoding == 'step': <NEW_LINE> <INDENT> for timestamp, value in wfe: <NEW_LINE> <INDENT> transitions_map.setdefault(timestamp, {})[ch] = ('set_frequency', value) <NEW_LINE> <DEDENT> <DEDENT> elif encoding == 'linear': <NEW_LINE> <INDENT> t0 = wfe[0][0] <NEW_LINE> t1 = wfe[1][0] <NEW_LINE> freq0 = wfe[0][1] <NEW_LINE> freq1 = wfe[1][1] <NEW_LINE> freq_last = wfe[-1][1] <NEW_LINE> transitions_map.setdefault(t0, {})[ch] = ('set_frequency_ramp', freq0, freq1, freq_last, (t1-t0)*dt) <NEW_LINE> for (t0,f0), (t1,f1) in zip(wfe[1:-1], wfe[2:]): <NEW_LINE> <INDENT> transitions_map.setdefault(t0, {})[ch] = ('update_frequency_ramp', f0, f1, (t1-t0)*dt) <NEW_LINE> <DEDENT> transitions_map.setdefault(wfe[-1][0], {})[ch] = ('set_frequency', freq_last) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise RuntimeError( 'bbb.dds: Unsupported waveform encoding [{}]'.format(encoding) ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if set(transitions_map) != set(transitions): <NEW_LINE> <INDENT> raise RuntimeError( 'DDS cannot currently share clock source with other devices') <NEW_LINE> <DEDENT> waveform = list(transitions_map.items()) <NEW_LINE> waveform.sort(key = lambda timestep_data: timestep_data[0]) <NEW_LINE> return [wi[1] for wi in waveform] | Set the output waveforms for the AFRL/BeagleBone Black device.
:param waveforms:
a dict('channel': {<wfe-path>: (<encoding>, [(t1, val1), (t2, val2)])})
(see gui/plotter/digital.py for format)
where wfe-path means waveform element path, referring to
the unique identifier of the specific waveform element a
particular set of values refers to.
:param clock_transitions: a dict('channel': { 'dt': dt,
'transitions': iterable})
(see processor/engine/compute.py for format)
:param t_max: the maximum duration of any channel in units of time.
:param continuous: bool of continuous or single-shot mode
:param n_chans: The number of channels configured for output. This is used
to define the base time unit using get_minimum_period(...). | 625941c215fb5d323cde0aa3 |
def compare_filters(unfiltered_df, body_part, fs = 30): <NEW_LINE> <INDENT> body_part_x = unfiltered_df[body_part + '_x'] <NEW_LINE> body_part_y = unfiltered_df[body_part + '_y'] <NEW_LINE> frame = np.arange(len(body_part_x)) <NEW_LINE> time = frame / fs <NEW_LINE> body_part_x_l_filterd = load_data.smooth_data(body_part_x, n = 20, method = 'lfilt') <NEW_LINE> body_part_x_savgol_filtered = load_data.smooth_data(body_part_x, method = 'savgol', savgol_window=51, savgol_degree=3) <NEW_LINE> plt.figure() <NEW_LINE> ax1 = plt.subplot(1, 1, 1) <NEW_LINE> ax1.plot(time, body_part_x) <NEW_LINE> ax1.plot(time, body_part_x_l_filterd) <NEW_LINE> ax1.plot(time, body_part_x_savgol_filtered) <NEW_LINE> ax1.legend(['No filter', 'Linear filter', 'Savgol filter'], frameon = False) <NEW_LINE> plt.title(['Male nose x position']) <NEW_LINE> sns.despine(top = True, right = True) <NEW_LINE> plt.show() <NEW_LINE> plt.figure() <NEW_LINE> ax2 = plt.subplot(1, 1, 1) <NEW_LINE> ax2.plot(fftpack.fft(body_part_x)) <NEW_LINE> ax2.plot(fftpack.fft(body_part_x_l_filterd)) <NEW_LINE> ax2.plot(fftpack.fft(body_part_x_savgol_filtered)) <NEW_LINE> ax2.legend(['No filter', 'Linear filter', 'Savgol filter'], frameon = False) <NEW_LINE> plt.title(['Male nose x position']) <NEW_LINE> ax2.set_xlabel('Frequency (Hz)') <NEW_LINE> ax2.set_ylabel('Power') <NEW_LINE> sns.despine(top = True, right = True) <NEW_LINE> plt.show() <NEW_LINE> plt.figure() <NEW_LINE> ax2 = plt.subplot(1, 1, 1) <NEW_LINE> freq, power = plot_freq_spectrum(body_part_x) <NEW_LINE> ax2.plot(freq, power) <NEW_LINE> freq, power = plot_freq_spectrum(body_part_x_l_filterd) <NEW_LINE> ax2.plot(freq, power) <NEW_LINE> freq, power = plot_freq_spectrum(body_part_x_savgol_filtered) <NEW_LINE> ax2.plot(freq, power) <NEW_LINE> ax2.legend(['No filter', 'Linear filter', 'Savgol filter'], frameon = False) <NEW_LINE> plt.title(['Male nose x position']) <NEW_LINE> ax2.set_xlabel('Frequency (Hz)') <NEW_LINE> ax2.set_ylabel('Power') <NEW_LINE> sns.despine(top = True, right = True) <NEW_LINE> plt.show() | Plots signal before and after filtering, and also compare between filters
:param unfiltered_df:
:param body_part:
:return: | 625941c21f5feb6acb0c4aea |
def downgrade(): <NEW_LINE> <INDENT> db_dialect = op.get_context().dialect <NEW_LINE> if 'postgresql' in db_dialect.name: <NEW_LINE> <INDENT> _postgresql_downgrade_part1_ddl() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Functions were not downgraded. " "'%s' is not a supported database dialect." % db_dialect.name) <NEW_LINE> return <NEW_LINE> <DEDENT> op.drop_column('node_feature', 'time_stamp') <NEW_LINE> if 'postgresql' in db_dialect.name: <NEW_LINE> <INDENT> _postgresql_downgrade_part2_ddl() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Functions were not downgraded. " "'%s' is not a supported database dialect." % db_dialect.name) <NEW_LINE> return | Remove the 'time_stamp' column from 'node_feature' table and restore
all dependencies effected by this change.
1) Restore all dependent stored procedures
2) Drop the 'time_stamp' column
3) Restore the view | 625941c28a349b6b435e810a |
def test_clear_input_checked_returns_false(self): <NEW_LINE> <INDENT> value = self.widget.value_from_datadict( data={'myfile-clear': True}, files={}, name='myfile', ) <NEW_LINE> self.assertIs(value, False) | ClearableFileInput.value_from_datadict returns False if the clear
checkbox is checked, if not required. | 625941c201c39578d7e74dd2 |
def apply_palette(image, palette, t_index): <NEW_LINE> <INDENT> image = image.convert('RGBA') <NEW_LINE> pixels = image.tobytes() <NEW_LINE> t_value = (t_index in range(256)) and pack('!B', t_index) or None <NEW_LINE> mapping = {} <NEW_LINE> indexes = [] <NEW_LINE> for offset in range(0, len(pixels), 4): <NEW_LINE> <INDENT> r, g, b, a = unpack('!BBBB', pixels[offset:offset+4]) <NEW_LINE> if a < 0x80 and t_value is not None: <NEW_LINE> <INDENT> indexes.append(t_value) <NEW_LINE> continue <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> indexes.append(mapping[(r, g, b)]) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> mapping[(r, g, b)] = pack('!B', palette_color(r, g, b, palette, t_index)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> indexes.append(mapping[(r, g, b)]) <NEW_LINE> <DEDENT> if hasattr(Image, 'frombytes'): <NEW_LINE> <INDENT> output = Image.frombytes('P', image.size, ''.join(indexes)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> output = Image.fromstring('P', image.size, ''.join(indexes)) <NEW_LINE> <DEDENT> bits = int(ceil(log(len(palette)) / log(2))) <NEW_LINE> palette += [(0, 0, 0)] * (256 - len(palette)) <NEW_LINE> palette = reduce(add, palette) <NEW_LINE> output.putpalette(palette) <NEW_LINE> return output | Apply a palette array to an image, return a new image.
| 625941c2656771135c3eb803 |
def count_example(vdb): <NEW_LINE> <INDENT> print("Counting records with ALT=A,T...") <NEW_LINE> keys = [(vcfdb.ALT, "A,T")] <NEW_LINE> print("\tcount value = ", vdb.get_num_records(keys)) <NEW_LINE> c, s = time_and_count(vdb.get_records(keys)) <NEW_LINE> print("\tfound = ", c, " in ", s, " seconds") <NEW_LINE> print("Counting records with FILTER=HARD_TO_VALIDATE;InDel...:") <NEW_LINE> keys = [(vcfdb.FILTER, "HARD_TO_VALIDATE;InDel")] <NEW_LINE> print("\tcount value = ", vdb.get_num_records(keys)) <NEW_LINE> c, s = time_and_count(vdb.get_records(keys)) <NEW_LINE> print("\tfound = ", c, " in ", s, " seconds") | Shows an example of counting records from the various indexes. | 625941c2f9cc0f698b140594 |
def clear_descendant_family_id_terms(self): <NEW_LINE> <INDENT> self._clear_terms('descendantFamilyId') | Clears the descendant family ``Id`` terms.
*compliance: mandatory -- This method must be implemented.* | 625941c2507cdc57c6306c6d |
def _parse_object(_object): <NEW_LINE> <INDENT> if "resource_uri" in _object: <NEW_LINE> <INDENT> del _object["resource_uri"] <NEW_LINE> <DEDENT> keys_to_parse = ["collection", "item", "version", "app", "tag"] <NEW_LINE> for key in keys_to_parse: <NEW_LINE> <INDENT> if key in _object: <NEW_LINE> <INDENT> _object[key] = _object[key].split("/")[-2] <NEW_LINE> <DEDENT> <DEDENT> keys_to_parse = ["collections", "tags"] <NEW_LINE> for key in keys_to_parse: <NEW_LINE> <INDENT> if key in _object: <NEW_LINE> <INDENT> _object[key] = map(lambda uri: uri.split("/")[-2], _object[key]) <NEW_LINE> <DEDENT> <DEDENT> return _object | Return a parsed object. Ugly API urls are replaced by UUIDs | 625941c276e4537e8c351608 |
def test_comp_fsl_board_services_different_cancel_discount(self): <NEW_LINE> <INDENT> expected_board_service_sale_lines = 2 <NEW_LINE> r_test = self.env["pms.reservation"].create( { "pms_property_id": self.pms_property1.id, "checkin": datetime.datetime.now(), "checkout": datetime.datetime.now() + datetime.timedelta(days=3), "adults": 2, "room_type_id": self.room_type_double.id, "partner_id": self.env.ref("base.res_partner_12").id, "board_service_room_id": self.board_service_room_type.id, } ) <NEW_LINE> r_test.service_ids[0].service_line_ids[0].cancel_discount = 1.0 <NEW_LINE> self.assertEqual( expected_board_service_sale_lines, len( r_test.folio_id.sale_line_ids.filtered( lambda x: not x.display_type and x.is_board_service ) ), "Folio should contain {} board service sale lines".format( expected_board_service_sale_lines ), ) | Check that the board services of reservation with different cancel
discounts should generate several sale lines.
----------------
Create a reservation of 2 nights, for a double room with a board service
room per night. Then change the cancel discount of the first board service line
to 1.0 and it is verified that the length of the sale lines of the board services
in the reservation is equal to 2 because there are 2 different board service
cancel discounts in the reservation. | 625941c2fb3f5b602dac3628 |
def __init__(self, x=None, y=None): <NEW_LINE> <INDENT> self.x = x <NEW_LINE> self.y = y <NEW_LINE> self.isBottom = False <NEW_LINE> self.isTop = False <NEW_LINE> self.isLeft = False | docstring | 625941c22ae34c7f2600d0c8 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.