code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def test204_login(self): <NEW_LINE> <INDENT> print('login') <NEW_LINE> try: <NEW_LINE> <INDENT> response = Service.Service().login(Common.LoginPwd, 66666, Common.teleNum) <NEW_LINE> Common.search_str(str(response), [self.SUCCESS_RESULT]) <NEW_LINE> authorization = response['data']['authorization'] <NEW_LINE> Utils.Utils.headers['Authorization'] = authorization <NEW_LINE> <DEDENT> except Exception as ex: <NEW_LINE> <INDENT> raise ex
用户登录
625941bf91af0d3eaac9b955
def __contains__(self, x): <NEW_LINE> <INDENT> if isinstance(x, str): <NEW_LINE> <INDENT> x = FeatureGroup(x) <NEW_LINE> <DEDENT> return x in self.__s
Return True if the feature set contains either given feature or given feature group This function relies on the fact that FeatureGroup and Feature compare as equal if their names match. This is to allow, for example, NUMBER.plural == NUMBER return True. >>> NUMBER = FeatureGroup('NUMBER', 'singular', 'plural') >>> fs = FeatureSet([NUMBER.plural]) >>> NUMBER in fs True >>> NUMBER.singular in fs False >>> NUMBER.plural in fs True
625941bfa934411ee37515d2
def is_valid_url(variable): <NEW_LINE> <INDENT> if re.match(r"https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{2,256}\.[a-z]{2,6}\b([-a-zA-Z0-9@:%_\+.~#?&//=]*)", variable): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False
Check if email is a valid mail.
625941bf2ae34c7f2600d070
def destroy(vm_, call=None): <NEW_LINE> <INDENT> destroy_opt = __opts__.get('destroy', False) <NEW_LINE> profiles = __opts__.get('profiles', {}) <NEW_LINE> profile = __opts__.get('profile', __opts__.get('internal_lxc_profile', [])) <NEW_LINE> path = None <NEW_LINE> if profile and profile in profiles: <NEW_LINE> <INDENT> path = profiles[profile].get('path', None) <NEW_LINE> <DEDENT> action = __opts__.get('action', '') <NEW_LINE> if action != 'destroy' and not destroy_opt: <NEW_LINE> <INDENT> raise SaltCloudSystemExit( 'The destroy action must be called with -d, --destroy, ' '-a or --action.' ) <NEW_LINE> <DEDENT> if not get_configured_provider(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> ret = {'comment': '{0} was not found'.format(vm_), 'result': False} <NEW_LINE> if _salt('lxc.info', vm_, path=path): <NEW_LINE> <INDENT> __utils__['cloud.fire_event']( 'event', 'destroying instance', 'salt/cloud/{0}/destroying'.format(vm_), {'name': vm_, 'instance_id': vm_}, transport=__opts__['transport'] ) <NEW_LINE> cret = _salt('lxc.destroy', vm_, stop=True, path=path) <NEW_LINE> ret['result'] = cret['result'] <NEW_LINE> if ret['result']: <NEW_LINE> <INDENT> ret['comment'] = '{0} was destroyed'.format(vm_) <NEW_LINE> __utils__['cloud.fire_event']( 'event', 'destroyed instance', 'salt/cloud/{0}/destroyed'.format(vm_), {'name': vm_, 'instance_id': vm_}, transport=__opts__['transport'] ) <NEW_LINE> if __opts__.get('update_cachedir', False) is True: <NEW_LINE> <INDENT> __utils__['cloud.delete_minion_cachedir'](vm_, __active_provider_name__.split(':')[0], __opts__) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return ret
Destroy a lxc container
625941bf0a50d4780f666dcf
def get_cbm_vbm(self, tol=0.001, abs_tol=False, spin=None): <NEW_LINE> <INDENT> tdos = self.get_densities(spin) <NEW_LINE> if not abs_tol: <NEW_LINE> <INDENT> tol = tol * tdos.sum() / tdos.shape[0] <NEW_LINE> <DEDENT> i_fermi = 0 <NEW_LINE> while self.energies[i_fermi] <= self.efermi: <NEW_LINE> <INDENT> i_fermi += 1 <NEW_LINE> <DEDENT> i_gap_start = i_fermi <NEW_LINE> while i_gap_start - 1 >= 0 and tdos[i_gap_start - 1] <= tol: <NEW_LINE> <INDENT> i_gap_start -= 1 <NEW_LINE> <DEDENT> i_gap_end = i_gap_start <NEW_LINE> while i_gap_end < tdos.shape[0] and tdos[i_gap_end] <= tol: <NEW_LINE> <INDENT> i_gap_end += 1 <NEW_LINE> <DEDENT> i_gap_end -= 1 <NEW_LINE> return self.energies[i_gap_end], self.energies[i_gap_start]
Expects a DOS object and finds the cbm and vbm. Args: tol: tolerance in occupations for determining the gap abs_tol: an absolute tolerance (True) and a relative one (False) spin: Possible values are: None - finds the gap in the summed densities Up - finds the gap in the up spin channel Down - finds the gap in teh down spin channel Returns: (cbm, vbm): float in eV corresponding to the gap
625941bf8a43f66fc4b53fa6
def add_match_to_profile(self, user_id, matched_interest, force=False): <NEW_LINE> <INDENT> if 'user_id' in g: <NEW_LINE> <INDENT> user = self.user_store.query.get(g.user_id) <NEW_LINE> g.user = user <NEW_LINE> <DEDENT> user = self.db.session.query(Profile).filter_by(user_id=g.user_id).first() <NEW_LINE> matched_user = self.db.session.query(Profile).filter_by(user_id=user_id).first() <NEW_LINE> user_match_history = user.match_history <NEW_LINE> matched_user_match_history = matched_user.match_history <NEW_LINE> if not force: <NEW_LINE> <INDENT> if str(user_id) in user.match_history.keys() or str(g.user_id) in matched_user.match_history.keys(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> user_dict = user._asdict() <NEW_LINE> matched_user_dict = matched_user._asdict() <NEW_LINE> user_feature_unlock = int(1.0 / (len(user_dict['interests']) + 3) * 100) <NEW_LINE> matched_user_feature_unlock = int(1.0 / (len(matched_user_dict['interests']) + 3) * 100) <NEW_LINE> user_match_history[str(user_id)] = { 'active': True, 'percent_unlocked': matched_user_feature_unlock, 'profile': { 'animal': matched_user_dict['animal'], 'color': matched_user_dict['color'], 'gender': matched_user_dict['gender'], 'preferred_gender': matched_user_dict['preferred_gender'], 'first_name': '', 'last_name': '', 'profile_picture': '', 'interests': matched_interest } } <NEW_LINE> matched_user_match_history[str(g.user_id)] = { 'active': True, 'percent_unlocked': user_feature_unlock, 'profile': { 'animal': user_dict['animal'], 'color': user_dict['color'], 'gender': user_dict['gender'], 'preferred_gender': user_dict['preferred_gender'], 'first_name': '', 'last_name': '', 'profile_picture': '', 'interests': matched_interest } } <NEW_LINE> setattr(user, 'match_history', user_match_history) <NEW_LINE> setattr(matched_user, 'match_history', matched_user_match_history) <NEW_LINE> self.db.session.commit() <NEW_LINE> return True
Adds a match for the user in the current context with a specified user. If a match has not previously been made then return True, else False. :param user_id: user id of the match to the user in the current context :type user_id: int :param matched_interest: one interest that the matched user and user in the current context have in common :type matched_interest: key value of interest in common :param force: optional argument to override previous match with user :type force: boolean :return: Whether a new match was successfully made :rtype: bool
625941bf656771135c3eb7ab
def unmarshall_player(player_sparse): <NEW_LINE> <INDENT> template = get_player_template() <NEW_LINE> delta = {} <NEW_LINE> player = player_sparse.copy() <NEW_LINE> for k in player_sparse.keys(): <NEW_LINE> <INDENT> if player_sparse[k] == None or player_sparse[k] == []: <NEW_LINE> <INDENT> player.pop(k) <NEW_LINE> <DEDENT> <DEDENT> for k in template.keys(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> delta[k] = player[k] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> template.update(delta) <NEW_LINE> template["msfID"] = template.pop("id") <NEW_LINE> template["shoots"] = template.pop("handedness")["shoots"] <NEW_LINE> nbaID = template.pop("externalMappings")[0]["id"] <NEW_LINE> if nbaID == None: <NEW_LINE> <INDENT> template["nbaID"] = -1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> template["nbaID"] = nbaID <NEW_LINE> <DEDENT> template["height"] = height_to_cm(template["height"]) <NEW_LINE> template["weight"] = pound_to_kg(template["weight"]) <NEW_LINE> template["birthDate"] = datetime.strptime(template["birthDate"], "%Y-%m-%d") <NEW_LINE> template["draftedYear"] = template["drafted"]["year"] <NEW_LINE> template["draftedRound"] = template["drafted"]["round"] <NEW_LINE> template["draftedRoundPick"] = template["drafted"]["roundPick"] <NEW_LINE> template["draftedOverallPick"] = template["drafted"]["overallPick"] <NEW_LINE> template.pop("drafted") <NEW_LINE> return template
Unmarshall a MSF player JSON to a proper flat dict Paramters --------- player_sparse: dict, fetched player from MSF API. Returns ------- template: dict, flat proper player dict.
625941bf1f037a2d8b94613d
def warning(*args, **kwargs): <NEW_LINE> <INDENT> pass
The warning command is provided so that the user can issue warning messages from his/her scripts. The string argument is displayed in the command window (or stdout if running in batch mode) after being prefixed with a warning message heading and surrounded by the appropriate language separators (# for Python, // for Mel). Flags: - showLineNumber : sl (bool) [create] Obsolete. Will be deleted in the next version of Maya. Use the checkbox in the script editor that enables line number display instead.Flag can appear in Create mode of commandFlag can have multiple arguments, passed either as a tuple or a list. Derived from mel command `maya.cmds.warning`
625941bf8e7ae83300e4af0b
def __init__(self, id=None, slug=None, username=None, password=None, email=None, admin=None, active=None, created_at=None, updated_at=None): <NEW_LINE> <INDENT> self._id = None <NEW_LINE> self._slug = None <NEW_LINE> self._username = None <NEW_LINE> self._password = None <NEW_LINE> self._email = None <NEW_LINE> self._admin = None <NEW_LINE> self._active = None <NEW_LINE> self._created_at = None <NEW_LINE> self._updated_at = None <NEW_LINE> self.discriminator = None <NEW_LINE> if id is not None: <NEW_LINE> <INDENT> self.id = id <NEW_LINE> <DEDENT> if slug is not None: <NEW_LINE> <INDENT> self.slug = slug <NEW_LINE> <DEDENT> self.username = username <NEW_LINE> if password is not None: <NEW_LINE> <INDENT> self.password = password <NEW_LINE> <DEDENT> self.email = email <NEW_LINE> if admin is not None: <NEW_LINE> <INDENT> self.admin = admin <NEW_LINE> <DEDENT> if active is not None: <NEW_LINE> <INDENT> self.active = active <NEW_LINE> <DEDENT> if created_at is not None: <NEW_LINE> <INDENT> self.created_at = created_at <NEW_LINE> <DEDENT> if updated_at is not None: <NEW_LINE> <INDENT> self.updated_at = updated_at
User - a model defined in OpenAPI
625941bfe76e3b2f99f3a74f
def remove(download): <NEW_LINE> <INDENT> pass
remove a Download object from the queue
625941bf4f6381625f11497c
def get_changed_files_in_commit(self, commit_hash): <NEW_LINE> <INDENT> output = self._execute_command(get_changed_files_in_commit.format(commit_id=commit_hash)) <NEW_LINE> return re.match(r"(?P<content>.*)\ncommit {}".format(commit_hash), output, re.DOTALL).group('content').splitlines()
Gets all changed files in specified by parameter commit :param commit_hash: Commit hash :type commit_hash: str
625941bf7d847024c06be1f8
def p_VariableStatementError(p): <NEW_LINE> <INDENT> p[0] = "VariableStatement" <NEW_LINE> p[0] = list(p)
VariableStatement : VAR error ';' | VAR error | VAR Identifier EQUAL error ';' | VAR Identifier EQUAL error
625941bf187af65679ca505d
def risposta3(): <NEW_LINE> <INDENT> tavolo() <NEW_LINE> glPushMatrix() <NEW_LINE> t(yArm/2,0,yArm/2) <NEW_LINE> t(lunghezzaTavolo/2, larghezzaTavolo*2.4, altezzaTavolo/2) <NEW_LINE> r(-alphaTavolo, 1, 0, 0) <NEW_LINE> t(-lunghezzaTavolo/2, 0, -altezzaTavolo/2) <NEW_LINE> t(0,zArm,0) <NEW_LINE> drawTecnigrafo(xArm, yArm, zArm, 0 , 20, 3) <NEW_LINE> glPopMatrix()
Visualizza il tavolo ed il modello dei bracci con la squadretta alla fine
625941bf7cff6e4e811178c5
def testWordLength(): <NEW_LINE> <INDENT> if avgWordLength() != [5.728459348746789, 5.881334691000235, 5.985706580366775, 5.686571428571429, 5.3567737870195336]: <NEW_LINE> <INDENT> return False
tests function avgWordLength, returns false if not working
625941bf2ae34c7f2600d071
def subtract_cells(self, other): <NEW_LINE> <INDENT> column_names = self.column_names_list() <NEW_LINE> result = named_column_sheet(self.config, column_names) <NEW_LINE> result.rows = {date: qsutils.qsutils.subtracted_row(row, other.rows.get(date, {}), column_names) for date, row in self.rows.items()} <NEW_LINE> return result
Return the cell-by-cell subtraction of another sheet from this one.
625941bf16aa5153ce3623b8
def SetDefault(self): <NEW_LINE> <INDENT> tlw = wx.GetTopLevelParent(self) <NEW_LINE> if hasattr(tlw, 'SetDefaultItem'): <NEW_LINE> <INDENT> tlw.SetDefaultItem(self)
This sets the :class:`AquaButton` to be the default item for the panel or dialog box. :note: Under Windows, only dialog box buttons respond to this function. As normal under Windows and Motif, pressing return causes the default button to be depressed when the return key is pressed. See also :meth:`Window.SetFocus` which sets the keyboard focus for windows and text panel items, and :meth:`TopLevelWindow.SetDefaultItem`. :note: Note that under Motif, calling this function immediately after creation of a button and before the creation of other buttons will cause misalignment of the row of buttons, since default buttons are larger. To get around this, call :meth:`~aquabutton.AquaButton.SetDefault` after you have created a row of buttons: wxPython will then set the size of all buttons currently on the panel to the same size.
625941bf92d797404e3040c8
def pass_args(): <NEW_LINE> <INDENT> parser = argparse.ArgumentParser(description='Generate a sine wave stimulus module using a Lookup Table.') <NEW_LINE> parser.add_argument('-e', '--etol', help='Error tolerance.', type=float) <NEW_LINE> parser.add_argument('-f', '--freq', help='Frequency.', type=float) <NEW_LINE> parser.add_argument('-p', '--phase', help='Initial phase in degree.', type=float) <NEW_LINE> parser.add_argument('-o', '--offset', help='DC offset.', type=float) <NEW_LINE> parser.add_argument('-a', '--amp', help='Amplitude.', type=float) <NEW_LINE> parser.add_argument('-m', '--module', help='Module name.', type=str) <NEW_LINE> return parser.parse_args()
args
625941bf0a366e3fb873e757
def scanstring(string, end, strict=True): <NEW_LINE> <INDENT> pass
scanstring(string, end, strict=True) -> (string, end) Scan the string s for a JSON string. End is the index of the character in s after the quote that started the JSON string. Unescapes all valid JSON string escape sequences and raises ValueError on attempt to decode an invalid string. If strict is False then literal control characters are allowed in the string. Returns a tuple of the decoded string and the index of the character in s after the end quote.
625941bfac7a0e7691ed4010
def write_open_ended(self, background_column_indices=None, column_indices=None): <NEW_LINE> <INDENT> if not column_indices: <NEW_LINE> <INDENT> start, length = self.metadata['columns'] <NEW_LINE> column_indices = range(start, start + length) <NEW_LINE> if self.metadata['other'] == 'Y': <NEW_LINE> <INDENT> column_indices = [ i for i in column_indices if 'other' in self.subset.columns[i] or 'comment' in self.subset.columns[i] ] <NEW_LINE> <DEDENT> <DEDENT> if (background_column_indices and not isinstance(background_column_indices, list)): <NEW_LINE> <INDENT> background_column_indices = [background_column_indices] <NEW_LINE> <DEDENT> text = f'{self.question.upper()}\n' <NEW_LINE> help_txt = self.metadata['help'] <NEW_LINE> if pd.notnull(help_txt): <NEW_LINE> <INDENT> text += f'{help_txt}\n' <NEW_LINE> <DEDENT> text += '\n' <NEW_LINE> for respondent in self.subset.index: <NEW_LINE> <INDENT> respondent_txt = f'R{respondent}\n' <NEW_LINE> include_respondent = False <NEW_LINE> for col_index in column_indices: <NEW_LINE> <INDENT> colname = self.subset.columns[col_index] <NEW_LINE> value = self.subset.loc[respondent, colname] <NEW_LINE> if pd.notnull(value): <NEW_LINE> <INDENT> respondent_txt += f'{value}\n' <NEW_LINE> include_respondent = True <NEW_LINE> <DEDENT> <DEDENT> if include_respondent and background_column_indices: <NEW_LINE> <INDENT> for col_index in background_column_indices: <NEW_LINE> <INDENT> colname = self.subset.columns[col_index] <NEW_LINE> value = self.subset.loc[respondent, colname] <NEW_LINE> for bg_qid, row in self.survey.question_list.iterrows(): <NEW_LINE> <INDENT> if (row.start + row.nr_columns) > col_index: <NEW_LINE> <INDENT> bg_question = Question(self.survey, bg_qid) <NEW_LINE> bg_metadata = bg_question.metadata <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if 'answers' in bg_metadata: <NEW_LINE> <INDENT> for scale in bg_metadata['answers']: <NEW_LINE> <INDENT> for answer in bg_metadata['answers'][scale]: <NEW_LINE> <INDENT> if answer['code'] == value: <NEW_LINE> <INDENT> value = answer['answer'] <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> respondent_txt += f"- {value}\n" <NEW_LINE> <DEDENT> <DEDENT> if include_respondent: <NEW_LINE> <INDENT> text += respondent_txt + '\n\n' <NEW_LINE> <DEDENT> <DEDENT> return text
List the answers to an open-ended question :param background_column_indices: indices of columns containing background information to be included (Default value = None) :param column_indices: indices of columns to be used (Default value = None)
625941bffff4ab517eb2f379
def get_vmin_vmax(self, frame, vmin=None, vmax=None, soft_vmin=False, soft_vmax=False): <NEW_LINE> <INDENT> has_vmin = vmin is not None <NEW_LINE> has_vmax = vmax is not None <NEW_LINE> if has_vmin and has_vmax and (not soft_vmin) and (not soft_vmax): return vmin, vmax <NEW_LINE> return get_vmin_vmax(frame.data, interval=self.config.interval, zmin=vmin, zmax=vmax, soft_zmin=soft_vmin, soft_zmax=soft_vmax)
This function ... :param frame: :param vmin: :param vmax: :param soft_vmin: :param soft_vmax: :return:
625941bf21bff66bcd684894
def intersection(self, other): <NEW_LINE> <INDENT> VERIFICATION.verify_type(other, Rect, "intersection target must be Rect") <NEW_LINE> funcs = (max, min, max, min) <NEW_LINE> intersection_tuple = self._apply_binary_funcs(other, funcs) <NEW_LINE> (inter_row_start, inter_row_end, inter_col_start, inter_col_end) = intersection_tuple <NEW_LINE> if inter_row_start >= inter_row_end or inter_col_start >= inter_col_end: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return Rect(*intersection_tuple)
Returns the intersection between the current Rect and the input Rect Computes and returns a Rect whose ROI covers the exact overlapping region between the current object's ROI and the passed in Rect's ROI if such region consists of at least one pixel or None if such region is empty. Arguments: other -- The Rect to compute the intersection against Returns: A Rect representing the intersection or None if the intersection is empty
625941bf6fb2d068a760efda
def create_user(self, **extra_fields): <NEW_LINE> <INDENT> user = self.model(**extra_fields) <NEW_LINE> user.save() <NEW_LINE> return user
Create and save a User with the given email and password.
625941bf31939e2706e4cdac
def make_connection(config=None, default_model=None): <NEW_LINE> <INDENT> return datastore_rpc.Connection( adapter=ModelAdapter(default_model), config=config)
Create a new Connection object with the right adapter. Optionally you can pass in a datastore_rpc.Configuration object.
625941bfec188e330fd5a6e3
def isoformat(self): <NEW_LINE> <INDENT> return self.strftime(self.ISO8601Format)
Convert object to an ISO 8601 timestamp accepted by MediaWiki. datetime.datetime.isoformat does not postfix the ISO formatted date with a 'Z' unless a timezone is included, which causes MediaWiki ~1.19 and earlier to fail.
625941bfd18da76e23532412
def stop_sequence(self): <NEW_LINE> <INDENT> return sorted( self.stop_times(), key=lambda x:int(x.get('stop_sequence')) )
Return the sorted StopTimes for this trip.
625941bfa05bb46b383ec763
def text_image(text_path, font_path=None): <NEW_LINE> <INDENT> grayscale = 'L' <NEW_LINE> with open(text_path) as text_file: <NEW_LINE> <INDENT> lines = tuple(l.rstrip() for l in text_file.readlines()) <NEW_LINE> <DEDENT> large_font = 20 <NEW_LINE> font_path = font_path or 'C:\WINDOWS\Fonts\\arial.ttf' <NEW_LINE> try: <NEW_LINE> <INDENT> font = PIL.ImageFont.truetype( size=large_font) <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> font = PIL.ImageFont.load_default() <NEW_LINE> print('Could not use chosen font. Using default.') <NEW_LINE> <DEDENT> pt2px = lambda pt: int(round(pt * 96.0 / 72)) <NEW_LINE> max_width_line=11 <NEW_LINE> test_string = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' <NEW_LINE> max_height = pt2px(font.getsize(test_string)[1]) <NEW_LINE> max_width = pt2px(font.getsize(max_width_line)[0]) <NEW_LINE> max_height=2 <NEW_LINE> height = max_height * len(lines) <NEW_LINE> width = int(round(max_width + 40)) <NEW_LINE> image = PIL.Image.new(grayscale, (width, height), color=PIXEL_OFF) <NEW_LINE> draw = PIL.ImageDraw.Draw(image) <NEW_LINE> vertical_position = 5 <NEW_LINE> horizontal_position = 5 <NEW_LINE> line_spacing = int(round(max_height * 0.8)) <NEW_LINE> for line in lines: <NEW_LINE> <INDENT> draw.text((horizontal_position, vertical_position), line, fill=PIXEL_ON, font=font) <NEW_LINE> vertical_position += line_spacing <NEW_LINE> <DEDENT> c_box = PIL.ImageOps.invert(image).getbbox() <NEW_LINE> image = image.crop(c_box) <NEW_LINE> return image
Convert text file to a grayscale image with black characters on a white background. arguments: text_path - the content of this file will be converted to an image font_path - path to a font file (for example impact.ttf)
625941bfd99f1b3c44c674d4
def dedup_list(alist): <NEW_LINE> <INDENT> seen = set() <NEW_LINE> ret = [] <NEW_LINE> for item in alist: <NEW_LINE> <INDENT> if item not in seen: <NEW_LINE> <INDENT> ret.append(item) <NEW_LINE> <DEDENT> seen.add(item) <NEW_LINE> <DEDENT> return ret
Deduplicates the given list by removing duplicates while preserving the order
625941bf097d151d1a222d9a
def generate(self, src_text, src_text_rev, sentence_limit, word2id, id2word): <NEW_LINE> <INDENT> self.initialize(batch_size=1) <NEW_LINE> self.one_encode(src_text, src_text_rev, train=False) <NEW_LINE> sentence = "" <NEW_LINE> word_id = word2id["<start>"] <NEW_LINE> for _ in range(sentence_limit): <NEW_LINE> <INDENT> predict_vec = self.one_decode(predict_id=word_id, teacher_id=None, train=False) <NEW_LINE> word = id2word[xp.argmax(predict_vec.data)] <NEW_LINE> word_id = word2id[word] <NEW_LINE> if word == "<eos>": <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> sentence = sentence + word + " " <NEW_LINE> <DEDENT> return sentence
:param src_text: input text embed id ex.) [ 1, 0 ,14 ,5 ] :param src_text_rev: :param sentence_limit: :param word2id: :param id2word: :return:
625941bf26238365f5f0edaa
def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(DocumentHtmlDisplayAnchor, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result
Returns the model properties as a dict
625941bf66673b3332b91fd0
def main(): <NEW_LINE> <INDENT> parser = get_argparser() <NEW_LINE> args = parser.parse_args() <NEW_LINE> conf = ConfigParser() <NEW_LINE> conf.read_file(args.config) <NEW_LINE> conf["alembic"]['sqlalchemy.url'] = SQLALCHEMY_URL <NEW_LINE> conf["alembic"]['sqlalchemy.echo'] = SQLALCHEMY_ECHO <NEW_LINE> if SQLALCHEMY_POOL_SIZE: <NEW_LINE> <INDENT> conf["alembic"]['sqlalchemy.pool_size'] = SQLALCHEMY_POOL_SIZE <NEW_LINE> <DEDENT> if not conf.has_section('alembic'): <NEW_LINE> <INDENT> raise NoSectionError('alembic') <NEW_LINE> <DEDENT> init_dbsession(conf['alembic']) <NEW_LINE> bot = Bot() <NEW_LINE> bot.run()
設定ファイルをparseして、slackbotを起動します 1. configparserで設定ファイルを読み込む 2. 設定ファイルに `alembic` セクションが設定されているかチェック 3. 設定ファイルの情報でDB周りの設定を初期化 4. slackbotの処理を開始
625941bf96565a6dacc8f60c
def main_menu(): <NEW_LINE> <INDENT> img = libtcod.image_load('menu_background.png') <NEW_LINE> title = 'Rumble in the Underdeep' <NEW_LINE> author = 'By @cr8ivecodesmith' <NEW_LINE> while not libtcod.console_is_window_closed(): <NEW_LINE> <INDENT> libtcod.image_blit_2x(img, 0, 0, 0) <NEW_LINE> libtcod.console_set_default_foreground(0, libtcod.gold) <NEW_LINE> libtcod.console_print_ex(0, SCREEN_WIDTH / 2, SCREEN_HEIGHT / 2 - 4, libtcod.BKGND_NONE, libtcod.CENTER, title) <NEW_LINE> libtcod.console_print_ex(0, SCREEN_WIDTH / 2, SCREEN_HEIGHT - 2, libtcod.BKGND_NONE, libtcod.CENTER, author) <NEW_LINE> choice = menu('', ['New game', 'Continue', 'Quit'], 24) <NEW_LINE> if choice == 0: <NEW_LINE> <INDENT> new_game() <NEW_LINE> play_game() <NEW_LINE> <DEDENT> elif choice == 1: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> load_game() <NEW_LINE> play_game() <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> msgbox('\nNo saved game to load.\n', 24) <NEW_LINE> continue <NEW_LINE> <DEDENT> <DEDENT> elif choice == 2: <NEW_LINE> <INDENT> break
Game main menu
625941bf3346ee7daa2b2ca9
def is_square_mod_p(n, p): <NEW_LINE> <INDENT> if n == 0: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return pow(n, int((p-1)/2), p) == 1
Checks if n is a square (mod p) using Euler's criterion >>> is_square_mod_p(0,5) True >>> is_square_mod_p(1,5) True >>> is_square_mod_p(2,5) False
625941bfd53ae8145f87a1b3
def fen(self): <NEW_LINE> <INDENT> hash = [] <NEW_LINE> for i, coord in enumerate(all_coords): <NEW_LINE> <INDENT> p_type = self.get_coord((coord[1], SIZE - coord[0] - 1)) <NEW_LINE> if i % 8 == 0 and i > 0: <NEW_LINE> <INDENT> hash.append("/") <NEW_LINE> <DEDENT> if p_type == EMPTY: <NEW_LINE> <INDENT> if len(hash) > 0 and hash[-1] in ("1", "2", "3", "4", "5", "6", "7", "8"): <NEW_LINE> <INDENT> hash[-1] = str(int(hash[-1]) + 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> hash.append("1") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> hash.append(pieces[p_type]) <NEW_LINE> <DEDENT> <DEDENT> hash.append(" w " if self.current_player == WHITE else " b ") <NEW_LINE> hash_len = len(hash) <NEW_LINE> if self.kings_moved[0] == -1: <NEW_LINE> <INDENT> if self.rooks_moved[0] == -1: <NEW_LINE> <INDENT> hash.append("K") <NEW_LINE> <DEDENT> if self.rooks_moved[1] == -1: <NEW_LINE> <INDENT> hash.append("Q") <NEW_LINE> <DEDENT> <DEDENT> if self.kings_moved[1] == -1: <NEW_LINE> <INDENT> if self.rooks_moved[2] == -1: <NEW_LINE> <INDENT> hash.append("k") <NEW_LINE> <DEDENT> if self.rooks_moved[3] == -1: <NEW_LINE> <INDENT> hash.append("q") <NEW_LINE> <DEDENT> <DEDENT> if len(hash) == hash_len: <NEW_LINE> <INDENT> hash.append("-") <NEW_LINE> <DEDENT> if len(self.move_list) > 0: <NEW_LINE> <INDENT> last_move = self.move_list[-1] <NEW_LINE> if self.is_type(last_move[1], "P") and abs(last_move[1][1] - last_move[0][1]) == 2: <NEW_LINE> <INDENT> behind_rank = last_move[1][1] - (-1 if self.current_player == WHITE else 1) <NEW_LINE> hash.append(" " + coord_to_notation((last_move[1][0], behind_rank)) + " ") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> hash.append(" - ") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> hash.append(" - ") <NEW_LINE> <DEDENT> hash.append(str(self.__get_half_move_clock())) <NEW_LINE> full_moves = self.__get_full_move_clock() <NEW_LINE> hash.append(" " + str(full_moves)) <NEW_LINE> return "".join(hash)
Return the FEN hash string for the current state of the game.
625941bfa17c0f6771cbdf92
def vm_snapshot_revert(vm_name, kwargs=None, call=None): <NEW_LINE> <INDENT> if call != "action": <NEW_LINE> <INDENT> raise SaltCloudSystemExit( "The vm_snapshot_revert action must be called with -a or --action." ) <NEW_LINE> <DEDENT> if kwargs is None: <NEW_LINE> <INDENT> kwargs = {} <NEW_LINE> <DEDENT> snapshot_id = kwargs.get("snapshot_id", None) <NEW_LINE> if snapshot_id is None: <NEW_LINE> <INDENT> raise SaltCloudSystemExit( "The vm_snapshot_revert function requires a 'snapshot_id' to be provided." ) <NEW_LINE> <DEDENT> server, user, password = _get_xml_rpc() <NEW_LINE> auth = ":".join([user, password]) <NEW_LINE> vm_id = int(get_vm_id(kwargs={"name": vm_name})) <NEW_LINE> response = server.one.vm.snapshotrevert(auth, vm_id, int(snapshot_id)) <NEW_LINE> data = { "action": "vm.snapshotrevert", "snapshot_reverted": response[0], "vm_id": response[1], "error_code": response[2], } <NEW_LINE> return data
Reverts a virtual machine to a snapshot .. versionadded:: 2016.3.0 vm_name The name of the VM to revert. snapshot_id The snapshot ID. CLI Example: .. code-block:: bash salt-cloud -a vm_snapshot_revert my-vm snapshot_id=42
625941bfbaa26c4b54cb1061
def _server_connect(dsn, user='', password='', host=''): <NEW_LINE> <INDENT> if dsn is None: <NEW_LINE> <INDENT> raise InterfaceError("dsn value should not be None") <NEW_LINE> <DEDENT> if (not isinstance(dsn, str)) | (not isinstance(user, str)) | (not isinstance(password, str)) | (not isinstance(host, str)): <NEW_LINE> <INDENT> raise InterfaceError("Arguments should be of type string or unicode") <NEW_LINE> <DEDENT> if dsn.find('=') != -1: <NEW_LINE> <INDENT> if dsn[len(dsn) - 1] != ';': <NEW_LINE> <INDENT> dsn = dsn + ";" <NEW_LINE> <DEDENT> if host != '' and dsn.find('HOSTNAME=') == -1: <NEW_LINE> <INDENT> dsn = dsn + "HOSTNAME=" + host + ";" <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> dsn = "DSN=" + dsn + ";" <NEW_LINE> <DEDENT> if user != '' and dsn.find('UID=') == -1: <NEW_LINE> <INDENT> dsn = dsn + "UID=" + user + ";" <NEW_LINE> <DEDENT> if password != '' and dsn.find('PWD=') == -1: <NEW_LINE> <INDENT> dsn = dsn + "PWD=" + password + ";" <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> conn = ibm_db.connect(dsn, '', '') <NEW_LINE> <DEDENT> except Exception as inst: <NEW_LINE> <INDENT> raise _get_exception(inst) <NEW_LINE> <DEDENT> return conn
This method create connection with server
625941bfaad79263cf39097d
def format_cmd(alias, cmd, params): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return cmd.format(*params) <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> helpers.exit(constants.WARN_FMT_NUM_PARAMS.format( alias, len(params))) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> helpers.exit(constants.WARN_FMT_PLACEHOLDER_SYNTAX) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> helpers.exit(constants.WARN_FMT_PLACEHOLDER_SYNTAX2) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> helpers.exit(errors.INVALID_FORMAT_USE_CMD.format(cmd))
Formats a command with user provided parameters, similar to the Python `format()` method. Arguments: cmd (str): The command. params (tuple): Parameters to be formatted into the command. Returns: A `string` representing the newly formatted command.
625941bf67a9b606de4a7dfa
def edit_item(self, item): <NEW_LINE> <INDENT> conn = sqlite3.connect('plugins/rs_ac_singidunum_magacin/db/warehouse.db') <NEW_LINE> c = conn.cursor() <NEW_LINE> c.execute('UPDATE items SET item_count = ? WHERE name = ? and expiration_date = ? and temperature = ? ', (item.item_count, item.name,item.expiration_date, item.temperature)) <NEW_LINE> conn.commit() <NEW_LINE> conn.close()
Menja broj proizvoda u sqlite bazi.
625941bf167d2b6e31218ad5
def get_positions(self): <NEW_LINE> <INDENT> self.logger.debug('Fetching positions of BitFlyer') <NEW_LINE> try: <NEW_LINE> <INDENT> positions = self.call_json_api('GET', API_PATH_POSITIONS) <NEW_LINE> <DEDENT> except BitFlyerAPIError as e: <NEW_LINE> <INDENT> self.logger.warning(('Failed to fetch positions of BitFlyer, ' + 'API error={e}.') .format(e=e)) <NEW_LINE> return None <NEW_LINE> <DEDENT> self.logger.debug(('Completed to fetch positions of BitFlyer.' + '#positions={positions}') .format(positions=len(positions))) <NEW_LINE> return positions
Get positions. Refer: [建玉の一覧を取得](https://lightning.bitflyer.jp/docs?lang=ja#建玉の一覧を取得)
625941bf23849d37ff7b2fd0
def find_vampire_in_4digit(num): <NEW_LINE> <INDENT> digit = 4 <NEW_LINE> nl = list(str(num)) <NEW_LINE> if len(nl) != digit: <NEW_LINE> <INDENT> print('only works for 4-digit number!') <NEW_LINE> return <NEW_LINE> <DEDENT> vampires = [] <NEW_LINE> jj = permutations(nl, digit) <NEW_LINE> for cc in jj: <NEW_LINE> <INDENT> n1 = int(cc[0] + cc[1]) <NEW_LINE> n2 = int(cc[2] + cc[3]) <NEW_LINE> if n1 > n2: <NEW_LINE> <INDENT> n1, n2 = n2, n1 <NEW_LINE> <DEDENT> if n1 * n2 == num: <NEW_LINE> <INDENT> if num in vampires: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> print(str(num) + " is a vampire number! " + str(n1) + " x " + str(n2)) <NEW_LINE> vampires.append(num)
give number check if a vampire number
625941bf9f2886367277a7cf
def test_client_intermediate(self): <NEW_LINE> <INDENT> with HTTPSConnectionPool( self.host, self.port, key_file=os.path.join(self.certs_dir, CLIENT_INTERMEDIATE_KEY), cert_file=os.path.join(self.certs_dir, CLIENT_INTERMEDIATE_PEM), ca_certs=DEFAULT_CA, ) as https_pool: <NEW_LINE> <INDENT> r = https_pool.request("GET", "/certificate") <NEW_LINE> subject = json.loads(r.data.decode("utf-8")) <NEW_LINE> assert subject["organizationalUnitName"].startswith("Testing cert")
Check that certificate chains work well with client certs We generate an intermediate CA from the root CA, and issue a client certificate from that intermediate CA. Since the server only knows about the root CA, we need to send it the certificate *and* the intermediate CA, so that it can check the whole chain.
625941bf5510c4643540f32a
def get_ros_package_path(config): <NEW_LINE> <INDENT> code_trees = [] <NEW_LINE> for tree_el in reversed(config.get_config_elements()): <NEW_LINE> <INDENT> if not os.path.isfile(tree_el.get_path()): <NEW_LINE> <INDENT> code_trees.append(tree_el.get_path()) <NEW_LINE> <DEDENT> <DEDENT> return code_trees
Return the simplifed ROS_PACKAGE_PATH
625941bfe64d504609d7477f
def logSearch( self, mode='all', searchTerm='', startLine='', logNum=1 ): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> assert isinstance( searchTerm, str ) <NEW_LINE> logPath = '/opt/onos/log/karaf.log.' <NEW_LINE> logPaths = '/opt/onos/log/karaf.log' <NEW_LINE> for i in range( 1, logNum ): <NEW_LINE> <INDENT> logPaths = logPath + str( i ) + " " + logPaths <NEW_LINE> <DEDENT> cmd = "cat " + logPaths <NEW_LINE> if startLine: <NEW_LINE> <INDENT> cmd = cmd + " | grep -A 100000000 \'" + startLine + "\'" <NEW_LINE> <DEDENT> if mode == 'all': <NEW_LINE> <INDENT> cmd = cmd + " | grep \'" + searchTerm + "\'" <NEW_LINE> <DEDENT> elif mode == 'last': <NEW_LINE> <INDENT> cmd = cmd + " | grep \'" + searchTerm + "\'" + " | tail -n 1" <NEW_LINE> <DEDENT> elif mode == 'first': <NEW_LINE> <INDENT> cmd = cmd + " | grep \'" + searchTerm + "\'" + " | head -n 1" <NEW_LINE> <DEDENT> elif mode == 'num': <NEW_LINE> <INDENT> cmd = cmd + " | grep \'" + searchTerm + "\' | wc -l" <NEW_LINE> num = self.lineCount( cmd ) <NEW_LINE> return num <NEW_LINE> <DEDENT> elif mode == 'total': <NEW_LINE> <INDENT> totalLines = self.lineCount( "cat /opt/onos/log/karaf.log | wc -l" ) <NEW_LINE> return int( totalLines ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> main.log.error( self.name + " unsupported mode" ) <NEW_LINE> return main.ERROR <NEW_LINE> <DEDENT> before = self.sendline( cmd ) <NEW_LINE> before = before.splitlines() <NEW_LINE> returnLines = [ line for line in before if searchTerm in line ] <NEW_LINE> return returnLines <NEW_LINE> <DEDENT> except AssertionError: <NEW_LINE> <INDENT> main.log.error( self.name + " searchTerm is not string type" ) <NEW_LINE> return None <NEW_LINE> <DEDENT> except pexpect.EOF: <NEW_LINE> <INDENT> main.log.error( self.name + ": EOF exception found" ) <NEW_LINE> main.log.error( self.name + ": " + self.handle.before ) <NEW_LINE> main.cleanAndExit() <NEW_LINE> <DEDENT> except pexpect.TIMEOUT: <NEW_LINE> <INDENT> main.log.error( self.name + ": TIMEOUT exception found" ) <NEW_LINE> main.log.error( self.name + ": " + self.handle.before ) <NEW_LINE> main.cleanAndExit() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> main.log.exception( self.name + ": Uncaught exception!" ) <NEW_LINE> main.cleanAndExit()
Searches the latest ONOS log file for the given search term and return a list that contains all the lines that have the search term. Arguments: searchTerm: The string to grep from the ONOS log. startLine: The term that decides which line is the start to search the searchTerm in the karaf log. For now, startTerm only works in 'first' mode. logNum: In some extreme cases, one karaf log is not big enough to contain all the information.Because of this, search mutiply logs is necessary to capture the right result. logNum is the number of karaf logs that we need to search the searchTerm. mode: all: return all the strings that contain the search term last: return the last string that contains the search term first: return the first string that contains the search term num: return the number of times that the searchTerm appears in the log total: return how many lines in karaf log
625941bf9b70327d1c4e0d13
def tower_encoder(frames: tf.Tensor, poses: tf.Tensor, scope="TowerEncoder"): <NEW_LINE> <INDENT> with tf.variable_scope(scope): <NEW_LINE> <INDENT> endpoints = {} <NEW_LINE> net = tf.layers.conv2d(frames, filters=256, kernel_size=2, strides=2, padding="VALID", activation=tf.nn.relu) <NEW_LINE> skip1 = tf.layers.conv2d(net, filters=128, kernel_size=1, strides=1, padding="SAME", activation=None) <NEW_LINE> net = tf.layers.conv2d(net, filters=128, kernel_size=3, strides=1, padding="SAME", activation=tf.nn.relu) <NEW_LINE> net = net + skip1 <NEW_LINE> net = tf.layers.conv2d(net, filters=256, kernel_size=2, strides=2, padding="VALID", activation=tf.nn.relu) <NEW_LINE> height, width = tf.shape(net)[1], tf.shape(net)[2] <NEW_LINE> poses = broadcast_pose(poses, height, width) <NEW_LINE> net = tf.concat([net, poses], axis=3) <NEW_LINE> skip2 = tf.layers.conv2d(net, filters=128, kernel_size=1, strides=1, padding="SAME", activation=None) <NEW_LINE> net = tf.layers.conv2d(net, filters=128, kernel_size=3, strides=1, padding="SAME", activation=tf.nn.relu) <NEW_LINE> net = net + skip2 <NEW_LINE> net = tf.layers.conv2d(net, filters=256, kernel_size=3, strides=1, padding="SAME", activation=tf.nn.relu) <NEW_LINE> net = tf.layers.conv2d(net, filters=256, kernel_size=1, strides=1, padding="SAME", activation=tf.nn.relu) <NEW_LINE> return net, endpoints
Feed-forward convolutional architecture.
625941bf15baa723493c3eb3
def _precess_from_J2000_Capitaine(epoch): <NEW_LINE> <INDENT> from .angles import rotation_matrix <NEW_LINE> T = (epoch - 2000.0) / 100.0 <NEW_LINE> pzeta = (-0.0000003173, -0.000005971, 0.01801828, 0.2988499, 2306.083227, 2.650545) <NEW_LINE> pz = (-0.0000002904, -0.000028596, 0.01826837, 1.0927348, 2306.077181, -2.650545) <NEW_LINE> ptheta = (-0.0000001274, -0.000007089, -0.04182264, -0.4294934, 2004.191903, 0) <NEW_LINE> zeta = np.polyval(pzeta, T) / 3600.0 <NEW_LINE> z = np.polyval(pz, T) / 3600.0 <NEW_LINE> theta = np.polyval(ptheta, T) / 3600.0 <NEW_LINE> return rotation_matrix(-z, 'z') * rotation_matrix(theta, 'y') * rotation_matrix(-zeta, 'z')
Computes the precession matrix from J2000 to the given Julian Epoch. Expression from from Capitaine et al. 2003 as expressed in the USNO Circular 179. This should match the IAU 2006 standard from SOFA. Parameters ---------- epoch : scalar The epoch as a julian year number (e.g. J2000 is 2000.0)
625941bf1f5feb6acb0c4a93
def containsNearbyDuplicate(self, nums, k): <NEW_LINE> <INDENT> num2index = {} <NEW_LINE> for index, num in enumerate(nums): <NEW_LINE> <INDENT> if num in num2index and abs(num2index[num] - index) <= k: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> num2index[num] = index <NEW_LINE> <DEDENT> return False
:type nums: List[int] :type k: int :rtype: bool
625941bfff9c53063f47c134
def bernstein_pt_aprox(X): <NEW_LINE> <INDENT> n = X.shape[0] <NEW_LINE> xpoly = np.poly1d([0]) <NEW_LINE> ypoly = np.poly1d([0]) <NEW_LINE> for i in xrange(n): <NEW_LINE> <INDENT> npoly = bernstein(i, n-1) <NEW_LINE> xpoly += X[i,0] * npoly <NEW_LINE> ypoly += X[i,1] * npoly <NEW_LINE> <DEDENT> return xpoly, ypoly
Returns the 'x' and 'y' coordinate functions for a 2-dimensional Bezier curve with control points 'X'.
625941bfb5575c28eb68df3e
def baroclinic_modes(self, nmodes, ztop=10, N2key="N2", depthkey="z"): <NEW_LINE> <INDENT> if N2key not in self.fields or depthkey not in self.fields: <NEW_LINE> <INDENT> raise FieldError("baroclinic_modes requires buoyancy frequency and depth") <NEW_LINE> <DEDENT> igood = ~self.nanmask((N2key, depthkey)) <NEW_LINE> N2 = self[N2key][igood] <NEW_LINE> dep = self[depthkey][igood] <NEW_LINE> itop = np.argwhere(dep > ztop)[0] <NEW_LINE> N2 = N2[itop:] <NEW_LINE> dep = dep[itop:] <NEW_LINE> h = np.diff(dep) <NEW_LINE> assert all(h == h_ for h_ in h[1:]) <NEW_LINE> f = 4*OMEGA * math.sin(self.coords[1]) <NEW_LINE> F = f**2/N2 <NEW_LINE> F[0] = 0.0 <NEW_LINE> F[-1] = 0.0 <NEW_LINE> F = sprs.diags(F, 0) <NEW_LINE> D1 = util.sparse_diffmat(len(self), 1, h) <NEW_LINE> D2 = util.sparse_diffmat(len(self), 2, h) <NEW_LINE> T = sparse.diags(D1 * F.diagonal(), 0) <NEW_LINE> M = T*D1 + F*D2 <NEW_LINE> lamda, V = sprs.linalg.eigs(M.tocsc(), k=nmodes+1, sigma=1e-8) <NEW_LINE> Ld = 1.0 / np.sqrt(np.abs(np.real(lamda[1:]))) <NEW_LINE> return Ld, V[:,1:]
Calculate the baroclinic normal modes based on linear quasigeostrophy and the vertical stratification. Return the first `nmodes::int` deformation radii and their associated eigenfunctions. Additional arguments -------------------- ztop the depth at which to cut off the profile, to avoid surface effects N2key::string Data key to use for N^2 depthkey::string Data key to use for depth
625941bf67a9b606de4a7dfb
def findIndex(self, R, Z, tol=1e-10, show=False): <NEW_LINE> <INDENT> R = np.asfarray(R) <NEW_LINE> Z = np.asfarray(Z) <NEW_LINE> assert R.shape == Z.shape <NEW_LINE> input_shape = R.shape <NEW_LINE> n = R.size <NEW_LINE> position = np.concatenate( (R.reshape((n,1)), Z.reshape((n,1))), axis=1) <NEW_LINE> R = R.reshape((n,)) <NEW_LINE> Z = Z.reshape((n,)) <NEW_LINE> dists, ind = self.tree.query(position) <NEW_LINE> nx,nz = self.R.shape <NEW_LINE> xind = np.floor_divide(ind, nz) <NEW_LINE> zind = ind - xind*nz <NEW_LINE> xind = np.asfarray(xind) <NEW_LINE> zind = np.asfarray(zind) <NEW_LINE> mask = np.ones(xind.shape) <NEW_LINE> mask[ np.logical_or((xind < 0.5), (xind > (nx-1.5))) ] = 0.0 <NEW_LINE> if show and plotting_available: <NEW_LINE> <INDENT> plt.plot(self.R, self.Z, '.') <NEW_LINE> plt.plot(R, Z, 'x') <NEW_LINE> <DEDENT> while True: <NEW_LINE> <INDENT> Rpos,Zpos = self.getCoordinate(xind, zind) <NEW_LINE> if show and plotting_available: <NEW_LINE> <INDENT> plt.plot(Rpos, Zpos, 'o') <NEW_LINE> <DEDENT> dR = Rpos - R <NEW_LINE> dZ = Zpos - Z <NEW_LINE> if np.amax(mask*(dR**2 + dZ**2)) < tol: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> dRdx, dZdx = self.getCoordinate(xind, zind, dx=1) <NEW_LINE> dRdz, dZdz = self.getCoordinate(xind, zind, dz=1) <NEW_LINE> determinant = dRdx*dZdz - dRdz*dZdx <NEW_LINE> xind -= mask * ((dZdz*dR - dRdz*dZ) / determinant) <NEW_LINE> zind -= mask * ((dRdx*dZ - dZdx*dR) / determinant) <NEW_LINE> in_boundary = xind < 0.5 <NEW_LINE> mask[ in_boundary ] = 0.0 <NEW_LINE> xind[ in_boundary ] = 0.0 <NEW_LINE> out_boundary = xind > (nx-1.5) <NEW_LINE> mask[ out_boundary ] = 0.0 <NEW_LINE> xind[ out_boundary ] = nx-1 <NEW_LINE> <DEDENT> if show and plotting_available: <NEW_LINE> <INDENT> plt.show() <NEW_LINE> <DEDENT> in_boundary = xind < 0.5 <NEW_LINE> xind[ in_boundary ] = -1 <NEW_LINE> out_boundary = xind > (nx-1.5) <NEW_LINE> xind[ out_boundary ] = nx <NEW_LINE> return xind.reshape(input_shape), zind.reshape(input_shape)
Finds the (x, z) index corresponding to the given (R, Z) coordinate Parameters ---------- R, Z : array_like Locations. Can be scalar or array, must be the same shape tol : float, optional Maximum tolerance on the square distance Returns ------- x, z : (ndarray, ndarray) Index as a float, same shape as R, Z
625941bf92d797404e3040c9
def _get_index_prefix(self, index=None): <NEW_LINE> <INDENT> if index and self.app.config['ELASTICSEARCH_INDEX_PREFIX']: <NEW_LINE> <INDENT> prefixed_index = self.app.config['ELASTICSEARCH_INDEX_PREFIX'] + index <NEW_LINE> return prefixed_index <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return index
Get prefix that should be added to each index. This allows to save indexes with a seperate prefix in elasticsearch, however entry point and request stay the same. So the name of index is different in elasticsearch itself and the layer handles the querying for the right index.
625941bfbaa26c4b54cb1062
def hash_file(path, saltenv='base'): <NEW_LINE> <INDENT> path, senv = salt.utils.url.split_env(path) <NEW_LINE> if senv: <NEW_LINE> <INDENT> saltenv = senv <NEW_LINE> <DEDENT> return _client().hash_file(path, saltenv)
Return the hash of a file, to get the hash of a file on the salt master file server prepend the path with salt://<file on server> otherwise, prepend the file with / for a local file. CLI Example: .. code-block:: bash salt '*' cp.hash_file salt://path/to/file
625941bffb3f5b602dac35d0
@coroutine <NEW_LINE> def dvilike_machine(transitions, actions, start = 'start', **state_vars): <NEW_LINE> <INDENT> state = start <NEW_LINE> while True: <NEW_LINE> <INDENT> command = (yield) <NEW_LINE> try: <NEW_LINE> <INDENT> state = transitions[command][state] <NEW_LINE> <DEDENT> except KeyError as e: <NEW_LINE> <INDENT> raise IllegalTransitionError(e) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> state_vars = actions[command][state](**state_vars) <NEW_LINE> <DEDENT> except KeyError as e: <NEW_LINE> <INDENT> raise UndefinedActionError(e)
No. transitions: Dictionary mapping commands (strings) to dictionaries mapping states (strings) to a state (a string). actions: Dictionary mapping commands (strings) to dictionaries mapping states to an action (a function). start: The start state (a string).
625941bf50485f2cf553ccd8
def test_initialisation_attitude(self): <NEW_LINE> <INDENT> self.Solver.actualise_splines() <NEW_LINE> error = self.Solver.error_function() <NEW_LINE> self.assertAlmostEqual(error, 0, delta=1e-25)
[Attitude] Test if generated source comply with the copied (from satellite) attitude
625941bf796e427e537b0503
def decrease(self,proxy): <NEW_LINE> <INDENT> score = self.db.zscore(self.redis_key,proxy) <NEW_LINE> if score and (score > self.min_score): <NEW_LINE> <INDENT> print('代理',proxy,'当前分数',score,'减1') <NEW_LINE> return self.db.zincrby(self.redis_key, -1, proxy) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('代理',proxy,'当前分数为',score,',太低,移除!!!') <NEW_LINE> return self.db.zrem(self.redis_key,proxy)
为不能用的代理降分,把分数过低的代理清除 :param proxy: 随即代理 :return: 降分或删除
625941bf0c0af96317bb8128
def GetMax(dOuter, dOrder): <NEW_LINE> <INDENT> dBig = {} <NEW_LINE> iLittle = iBig = 0 <NEW_LINE> for sIP, dData in dOuter.iteritems(): <NEW_LINE> <INDENT> iLittle = dData['traffic'] <NEW_LINE> if iLittle >= iBig and not sIP in dOrder: <NEW_LINE> <INDENT> iBig = iLittle <NEW_LINE> dBig = {'ip': sIP, 'data': dData} <NEW_LINE> <DEDENT> <DEDENT> dOrder[dBig['ip']] = dBig['data'] <NEW_LINE> return dOrder
每次得到一个不在dOrder中的dOuter的最大流量值,加入dOrder @param dOuter: @param dOrder: @return:
625941bfbe8e80087fb20b86
def test_volume_present_new(self): <NEW_LINE> <INDENT> ret = { "name": "myzpool/volume", "result": True, "comment": "volume myzpool/volume was created", "changes": {"myzpool/volume": "created"}, } <NEW_LINE> mock_exists = MagicMock(return_value=False) <NEW_LINE> mock_create = MagicMock(return_value=OrderedDict([("created", True)])) <NEW_LINE> with patch.dict(zfs.__salt__, {"zfs.exists": mock_exists}), patch.dict( zfs.__salt__, {"zfs.create": mock_create} ), patch.dict(zfs.__utils__, self.utils_patch): <NEW_LINE> <INDENT> self.assertEqual( ret, zfs.volume_present("myzpool/volume", volume_size="1G") )
Test if volume is present (non existing volume)
625941bf44b2445a33931fd7
def _drop_index(engine, table_name, index_name): <NEW_LINE> <INDENT> _LOGGER.debug("Dropping index %s from table %s", index_name, table_name) <NEW_LINE> success = False <NEW_LINE> try: <NEW_LINE> <INDENT> engine.execute(text(f"DROP INDEX {index_name}")) <NEW_LINE> <DEDENT> except SQLAlchemyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> success = True <NEW_LINE> <DEDENT> if not success: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> engine.execute( text( "DROP INDEX {table}.{index}".format( index=index_name, table=table_name ) ) ) <NEW_LINE> <DEDENT> except SQLAlchemyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> success = True <NEW_LINE> <DEDENT> <DEDENT> if not success: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> engine.execute( text( "DROP INDEX {index} ON {table}".format( index=index_name, table=table_name ) ) ) <NEW_LINE> <DEDENT> except SQLAlchemyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> success = True <NEW_LINE> <DEDENT> <DEDENT> if success: <NEW_LINE> <INDENT> _LOGGER.debug( "Finished dropping index %s from table %s", index_name, table_name ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _LOGGER.warning( "Failed to drop index %s from table %s. Schema " "Migration will continue; this is not a " "critical operation.", index_name, table_name, )
Drop an index from a specified table. There is no universal way to do something like `DROP INDEX IF EXISTS` so we will simply execute the DROP command and ignore any exceptions WARNING: Due to some engines (MySQL at least) being unable to use bind parameters in a DROP INDEX statement (at least via SQLAlchemy), the query string here is generated from the method parameters without sanitizing. DO NOT USE THIS FUNCTION IN ANY OPERATION THAT TAKES USER INPUT.
625941bfd164cc6175782c8d
def random_Marsaglia(k, seed): <NEW_LINE> <INDENT> subdivisions = [10, 20, 50, 100] <NEW_LINE> numbers = [] <NEW_LINE> random.seed(seed) <NEW_LINE> count = 0 <NEW_LINE> while count < k: <NEW_LINE> <INDENT> x1 = 2.0 * random.random() - 1.0 <NEW_LINE> x2 = 2.0 * random.random() - 1.0 <NEW_LINE> w = x1 ** 2 + x2 ** 2 <NEW_LINE> if w < 1.0: <NEW_LINE> <INDENT> w = sqrt((-2.0 * log(w)) / w) <NEW_LINE> y1 = x1 * w <NEW_LINE> y2 = x2 * w <NEW_LINE> numbers.append(y1) <NEW_LINE> numbers.append(y2) <NEW_LINE> count += 2 <NEW_LINE> <DEDENT> <DEDENT> for i in range(len(subdivisions)): <NEW_LINE> <INDENT> x = np.linspace(-5, 5, 10000) <NEW_LINE> y = 1.0 / (sqrt(2.0 * pi)) * np.exp(- x ** 2 / 2.0) <NEW_LINE> plt.plot(x, y, color='r') <NEW_LINE> n, bins, patches = plt.hist(numbers, subdivisions[i], facecolor='b', alpha=0.3, normed=True) <NEW_LINE> plt.xlabel('x') <NEW_LINE> plt.ylabel('P(x)') <NEW_LINE> plt.savefig('b_%d_%d.eps' % (k, subdivisions[i])) <NEW_LINE> plt.close()
Generate k Gaussian distributed random numbers. :param k: number of random numbers. :param seed: random seed. :return: null.
625941bf293b9510aa2c31d8
def _read_bitand_expr(self): <NEW_LINE> <INDENT> node = self._read_equlity_expr() <NEW_LINE> while self._next_t('&'): <NEW_LINE> <INDENT> node = self._binop(NodeKind.OP_BITAND, self._conv(node), self._conv(self._read_equlity_expr())) <NEW_LINE> <DEDENT> return node
bit and expression & operation
625941bffbf16365ca6f60fe
def error(code=404): <NEW_LINE> <INDENT> e = { 404: b'HTTP/1.1 404 NOT FOUND\r\n\r\n<h1>NOT FOUND</h1>', } <NEW_LINE> return e.get(code, b'')
根据code返回不同的错误响应 :param code: :return:
625941bf167d2b6e31218ad6
def search(self, query: str) -> CommentQuery: <NEW_LINE> <INDENT> return self.filter( Comment.search_tsv.op("@@")(func.websearch_to_tsquery(query)) )
Restrict the comments to ones that match a search query (generative).
625941bf50812a4eaa59c264
def retreive_artifacts_link_from_last_run(self, workflow_id): <NEW_LINE> <INDENT> resp = self.call(self.workflows_url + "/" + str(workflow_id) + "/runs") <NEW_LINE> last_workflow_run = max(resp["workflow_runs"], key=lambda w: w["run_number"]) <NEW_LINE> self.logger.info("last run workflow_id: {}".format(last_workflow_run["workflow_id"])) <NEW_LINE> if last_workflow_run["conclusion"] != "success": <NEW_LINE> <INDENT> self.logger.error("the last job, id {}, is not success".format(last_workflow_run["id"])) <NEW_LINE> sys.exit("the last job is not success") <NEW_LINE> <DEDENT> if last_workflow_run["status"] != "completed": <NEW_LINE> <INDENT> self.logger.error("the last job, id {}, is not completed".format(last_workflow_run["id"])) <NEW_LINE> sys.exit("the last job is not completed") <NEW_LINE> <DEDENT> self.logger.info( "run id={} in success with artifacts url {}".format(last_workflow_run["id"], last_workflow_run["artifacts_url"])) <NEW_LINE> return last_workflow_run["artifacts_url"], last_workflow_run["id"]
Retreive artifacts link from the last run
625941bf0fa83653e4656efc
def setEnabled(self, enable): <NEW_LINE> <INDENT> pass
void Plasma.QueryMatch.setEnabled(bool enable)
625941bf8c0ade5d55d3e8f8
def to_be_ended(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> qs = super(ConditionClassManager, self) .get_query_set() .exclude(self.model.exists_when) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> raise NoExistsWhen <NEW_LINE> <DEDENT> return qs.filter(pk__in=self._get_ids_with_conditions())
Returns a query set of all the objects in self.model.objects for which exists_when is false, but there is an open condition for it.
625941bfd268445f265b4dae
def index_information(self, collection_name): <NEW_LINE> <INDENT> dbcollection = self._db[collection_name] <NEW_LINE> return {index: specs.get('unique', False) or index == '_id_' for index, specs in dbcollection.index_information().items()}
Return dict of names and sorting order of indexes
625941bf3cc13d1c6d3c72bb
def get_wordlist(self, url, filename="shiritori_wordlist.txt"): <NEW_LINE> <INDENT> if not path.exists(filename): <NEW_LINE> <INDENT> self.logger.info("Wordlist not found; downloading new list. This can take a while.") <NEW_LINE> r = requests.get(url, stream=True) <NEW_LINE> filesize = len(r.content) <NEW_LINE> with open(filename, 'wb') as filehandle: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> from tqdm import tqdm <NEW_LINE> content_progress = tqdm(r.iter_content(), total=filesize) <NEW_LINE> for chunk in content_progress: <NEW_LINE> <INDENT> content_progress.set_description("Downloading wordlist") <NEW_LINE> filehandle.write(chunk) <NEW_LINE> <DEDENT> <DEDENT> except ImportError: <NEW_LINE> <INDENT> self.logger.info("'tdqm' failed to import; not showing progressbar.") <NEW_LINE> self.logger.info("Downloading wordlist...") <NEW_LINE> for chunk in r.iter_content(): <NEW_LINE> <INDENT> filehandle.write(chunk) <NEW_LINE> <DEDENT> <DEDENT> self.logger.info("New wordlist saved to 'shiritori/%s'" % filename) <NEW_LINE> <DEDENT> <DEDENT> with open(filename, 'r') as filehandle: <NEW_LINE> <INDENT> wordlist = [word.strip().lower() for word in filehandle] <NEW_LINE> <DEDENT> wordlist = list(filter(lambda x: "'s" not in x, wordlist)) <NEW_LINE> wordlist = list(filter(lambda x: len(x) < 30, wordlist)) <NEW_LINE> return wordlist
Retrieves and downloads the wordlist on initializaton for use in the game.
625941bf66656f66f7cbc0ea
def strStr(self, haystack, needle): <NEW_LINE> <INDENT> if not needle: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> if not haystack or len(haystack) < len(needle): <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> length = len(needle) <NEW_LINE> for i in range(len(haystack)): <NEW_LINE> <INDENT> if haystack[i:i + length] == needle: <NEW_LINE> <INDENT> return i <NEW_LINE> <DEDENT> <DEDENT> return -1
:type haystack: str :type needle: str :rtype: int
625941bfd53ae8145f87a1b4
def print_queue(self): <NEW_LINE> <INDENT> for i in range(0, len(self.clients)): <NEW_LINE> <INDENT> print(self.clients[i])
Imprime a fila.
625941bf60cbc95b062c6482
def get_context(fingerprint, verify_cb=None, remote_jid=None): <NEW_LINE> <INDENT> ctx = SSL.Context(SSL.SSLv23_METHOD) <NEW_LINE> flags = (SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3 | SSL.OP_SINGLE_DH_USE | SSL.OP_NO_TICKET) <NEW_LINE> ctx.set_options(flags) <NEW_LINE> ctx.set_cipher_list('HIGH:!aNULL:!3DES') <NEW_LINE> if fingerprint == 'server': <NEW_LINE> <INDENT> ctx.set_verify(SSL.VERIFY_NONE|SSL.VERIFY_FAIL_IF_NO_PEER_CERT, verify_cb or default_callback) <NEW_LINE> <DEDENT> elif fingerprint == 'client': <NEW_LINE> <INDENT> ctx.set_verify(SSL.VERIFY_PEER, verify_cb or default_callback) <NEW_LINE> <DEDENT> cert_name = os.path.join(configpaths.get('MY_CERT'), SELF_SIGNED_CERTIFICATE) <NEW_LINE> ctx.use_privatekey_file((cert_name + '.pkey').encode('utf-8')) <NEW_LINE> ctx.use_certificate_file((cert_name + '.cert').encode('utf-8')) <NEW_LINE> dh_params_name = os.path.join(configpaths.get('MY_CERT'), DH_PARAMS) <NEW_LINE> try: <NEW_LINE> <INDENT> with open(dh_params_name, "r") as dh_params_file: <NEW_LINE> <INDENT> ctx.load_tmp_dh(dh_params_name.encode('utf-8')) <NEW_LINE> <DEDENT> <DEDENT> except FileNotFoundError as err: <NEW_LINE> <INDENT> default_dh_params_name = os.path.join(configpaths.get('DATA'), 'other', DEFAULT_DH_PARAMS) <NEW_LINE> try: <NEW_LINE> <INDENT> with open(default_dh_params_name, "r") as default_dh_params_file: <NEW_LINE> <INDENT> ctx.load_tmp_dh(default_dh_params_name.encode('utf-8')) <NEW_LINE> <DEDENT> <DEDENT> except FileNotFoundError as err: <NEW_LINE> <INDENT> log.error('Unable to load default DH parameter file: %s, %s', default_dh_params_name, err) <NEW_LINE> raise <NEW_LINE> <DEDENT> <DEDENT> if remote_jid: <NEW_LINE> <INDENT> store = ctx.get_cert_store() <NEW_LINE> path = os.path.join(os.path.expanduser(configpaths.get('MY_PEER_CERTS')), remote_jid) + '.cert' <NEW_LINE> if os.path.exists(path): <NEW_LINE> <INDENT> load_cert_file(path, cert_store=store) <NEW_LINE> log.debug('certificate file %s loaded fingerprint %s', path, fingerprint) <NEW_LINE> <DEDENT> <DEDENT> return ctx
constructs and returns the context objects
625941bf3346ee7daa2b2caa
def verify_user_cfg_change(self, field, name, multivalued=False): <NEW_LINE> <INDENT> self.add_record(user_data.ENTITY, user_data.DATA2) <NEW_LINE> self.navigate_to_record(user_data.DATA2['pkey']) <NEW_LINE> if multivalued: <NEW_LINE> <INDENT> s = "div[name={0}] input[name={0}-0]".format(field) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> s = "div[name={0}] input[name={0}]".format(field) <NEW_LINE> <DEDENT> assert self.get_value(s) == name <NEW_LINE> self.delete(user_data.ENTITY, [user_data.DATA2])
Helper function to verify that user config changes were reflected on newly created user
625941bfc432627299f04b84
@pytest.fixture(name="mock_get_countries_by_region_with_filter") <NEW_LINE> def fixture_mock_get_countries_by_region_with_filter(requests_mock): <NEW_LINE> <INDENT> eu_url = BASE_URI + "/region/europe?fields=name" <NEW_LINE> requests_mock.get(eu_url, json=[]) <NEW_LINE> af_url = BASE_URI + "/region/africa?fields=name" <NEW_LINE> requests_mock.get(af_url, json=[RSA, NGR, EGY, KEN])
Mock requests for getting countries by region and filters the response.
625941bfe5267d203edcdbdf
def prediction_imputation(self, df, cfg): <NEW_LINE> <INDENT> df_ctu, ids = self.add_rows_no_event_ctu_predict(df, cfg) <NEW_LINE> df_ctu = self.remove_ctu_churn_customers(df_ctu, cfg) <NEW_LINE> df_ctu = self.fill_no_event_ctu(df_ctu, cfg) <NEW_LINE> df_ctu = df_ctu[df_ctu.index.isin(ids)] <NEW_LINE> df.set_index([cfg['ID_COL'],cfg['CTU_COL']],inplace=True) <NEW_LINE> df['imputed_ctu'] = 0 <NEW_LINE> df_ctu['imputed_ctu'] = 1 <NEW_LINE> df = pd.concat([df,df_ctu], axis=0) <NEW_LINE> df = self.create_target(df.reset_index(), cfg) <NEW_LINE> print ("\nNumber of imputed rows after combining:", df['imputed_ctu'].sum()) <NEW_LINE> print ("Te positives in imputed data:", sum(df[df['imputed_ctu']==1][cfg['TE_TARGET_COL']])) <NEW_LINE> print ("Te positives in available data:", sum(df[df['imputed_ctu']==0][cfg['TE_TARGET_COL']])) <NEW_LINE> del df_ctu <NEW_LINE> gc.collect() <NEW_LINE> return df
Imputes values for empty CTUs Parameters: df (dataframe): dataframe cfg (dict): configuration dictionary Returns: df: dataframe
625941bff9cc0f698b14053d
def _get_bounds(self) -> constants.TYPING_LIST: <NEW_LINE> <INDENT> list_bounds = [] <NEW_LINE> for elem in self.range_X: <NEW_LINE> <INDENT> list_bounds.append(tuple(elem)) <NEW_LINE> <DEDENT> return list_bounds
It returns list of range tuples, obtained from `self.range_X`. :returns: list of range tuples. :rtype: list
625941bf090684286d50ec23
def _get_max_prefixes(self): <NEW_LINE> <INDENT> return self.__max_prefixes
Getter method for max_prefixes, mapped from YANG variable /bgp/neighbors/neighbor/afi_safis/afi_safi/l3vpn_ipv6_unicast/prefix_limit/state/max_prefixes (uint32) YANG Description: Maximum number of prefixes that will be accepted from the neighbour
625941bf627d3e7fe0d68d8e
@register_decorator(priority=5) <NEW_LINE> def namelambda(name): <NEW_LINE> <INDENT> def rename(f): <NEW_LINE> <INDENT> if not isinstance(f, (LambdaType, FunctionType)): <NEW_LINE> <INDENT> return f <NEW_LINE> <DEDENT> f = copy(f) <NEW_LINE> f.__name__ = name <NEW_LINE> idx = f.__qualname__.rfind('.') <NEW_LINE> f.__qualname__ = f"{f.__qualname__[:idx]}.{name}" if idx != -1 else name <NEW_LINE> co = f.__code__ <NEW_LINE> if version_info >= (3, 8, 0): <NEW_LINE> <INDENT> f.__code__ = f.__code__.replace(co_name=name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> f.__code__ = CodeType(co.co_argcount, co.co_kwonlyargcount, co.co_nlocals, co.co_stacksize, co.co_flags, co.co_code, co.co_consts, co.co_names, co.co_varnames, co.co_filename, name, co.co_firstlineno, co.co_lnotab, co.co_freevars, co.co_cellvars) <NEW_LINE> <DEDENT> return f <NEW_LINE> <DEDENT> return rename
Rename a function. Decorator. This can be used to give a lambda a meaningful name, which is especially useful for debugging in cases where a lambda is returned as a closure, and the actual call into it occurs much later (so that if the call crashes, the stack trace will report a meaningful name, not just ``"<lambda>"``). To support reordering by ``unpythonic.syntax.util.sort_lambda_decorators``, this is a standard parametric decorator, called like:: foo = namelambda("foo")(lambda ...: ...) The first call returns a *foo-renamer*, and supplying the lambda to that actually returns a lambda that has the name *foo*. This is used internally by some macros (``namedlambda``, ``let``, ``do``), but also provided as part of unpythonic's public API in case it's useful elsewhere. **CAUTION**: When a function definition is executed, the names the parent scopes had at that time are baked into the function's ``__qualname__``. Hence renaming a function after it is defined will not affect the dotted names of any closures defined *inside* that function. This is mainly an issue for nested lambdas:: from unpythonic import namelambda, withself nested = namelambda("outer")(lambda: namelambda("inner")(withself(lambda self: self))) print(nested.__qualname__) # "outer" print(nested().__qualname__) # "<lambda>.<locals>.inner" Note the inner lambda does not see the outer's new name.
625941bf71ff763f4b5495c7
def test_regulation_retrocession(self): <NEW_LINE> <INDENT> mandate_id = self.ref('%s.extm_jacques_membre_ag' % self._module_ns) <NEW_LINE> data = {'ext_mandate_id': mandate_id, 'month': '05', 'year': 2014 } <NEW_LINE> self.assertRaises( orm.except_orm, self.registry('retrocession').create, self.cr, self.uid, data) <NEW_LINE> data = {'ext_mandate_id': mandate_id, 'month': '12', 'year': 2014 } <NEW_LINE> self.registry('retrocession').create(self.cr, self.uid, data) <NEW_LINE> self.assertRaises( orm.except_orm, self.registry('retrocession').create, self.cr, self.uid, data) <NEW_LINE> data['is_regulation'] = True <NEW_LINE> retro_id = self.registry('retrocession').create( self.cr, self.uid, data) <NEW_LINE> self.assertNotEqual(retro_id, False)
Test regulation retrocession
625941bf50485f2cf553ccd9
def shift_y(self, y_move): <NEW_LINE> <INDENT> [p.shift_y(y_move) for p in self.arr]
Move all items down by the given amount.
625941bf8da39b475bd64eb1
def read_excel_data(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> xl = pd.ExcelFile(self.excel_file) <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> raise FileNotFoundError(f"file {self.excel_file} not found, or no data") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.xl_dict = {sheet_name: xl.parse(sheet_name) for sheet_name in xl.sheet_names}
read data from excel to pandas table excel file has few sheets, Each sheet is converted into dict to store values with key = sheet_name
625941bf442bda511e8be35c
def CNN_simple(input_size=(71,71,3), output_size=5): <NEW_LINE> <INDENT> model = Sequential() <NEW_LINE> model.add(Lambda(lambda x : x, input_shape=input_size)) <NEW_LINE> model.add(Conv2D(32, kernel_size=(3, 3), padding='same', kernel_initializer='random_uniform')) <NEW_LINE> model.add(Activation('relu')) <NEW_LINE> model.add(MaxPooling2D(pool_size=(2,2), strides=(2, 2))) <NEW_LINE> create_conv_pool(model, 32) <NEW_LINE> create_conv_pool(model, 64) <NEW_LINE> model.add(Flatten()) <NEW_LINE> create_fully_connected(model, 64, activation='relu', dropout_bool=True) <NEW_LINE> create_fully_connected(model, output_size, activation='softmax') <NEW_LINE> return model
Implements a convolutional neural network
625941bf566aa707497f44ad
def test_bootstrap_length(): <NEW_LINE> <INDENT> out = algo.bootstrap(a_norm) <NEW_LINE> assert_equal(len(out), 10000) <NEW_LINE> n_boot = 100 <NEW_LINE> out = algo.bootstrap(a_norm, n_boot=n_boot) <NEW_LINE> assert_equal(len(out), n_boot)
Test that we get a bootstrap array of the right shape.
625941bfa934411ee37515d3
def OnChatIncomingPM(self, userData, data): <NEW_LINE> <INDENT> raise NotImplementedError
Callback for incoming chat in a Private Message (PM). Return value should be a tuple of two bools: first denoting if the call was successfull and the second if further processing shall be blocked for other plugins.
625941bf07f4c71912b113c1
def enable_ports(self): <NEW_LINE> <INDENT> pass
Enable ports if Ports table is empty.
625941bfa17c0f6771cbdf93
def is_null(self) -> bool: <NEW_LINE> <INDENT> return self.ref == self.alt
Return if this variant has an alternate equivalent to its reference.
625941bf498bea3a759b99f0
def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> data = request.POST.copy() <NEW_LINE> form = self.form_class(data) <NEW_LINE> context = {} <NEW_LINE> context['messages'] = [] <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> username = form.cleaned_data['email'] <NEW_LINE> password = form.cleaned_data['password'] <NEW_LINE> try: <NEW_LINE> <INDENT> user = authenticate(username=username, password=password) <NEW_LINE> if user is not None: <NEW_LINE> <INDENT> login(request, user) <NEW_LINE> next = request.GET.get('next', None) <NEW_LINE> if next: <NEW_LINE> <INDENT> return HttpResponseRedirect(next) <NEW_LINE> <DEDENT> if user.userprofile.user_type == CUSTOMER: <NEW_LINE> <INDENT> return HttpResponseRedirect(reverse('home')) <NEW_LINE> <DEDENT> if user.userprofile.user_type == TECH_ADMIN: <NEW_LINE> <INDENT> return HttpResponseRedirect(reverse('home')) <NEW_LINE> <DEDENT> if user.userprofile.user_type == ACCOUNT_ADMIN: <NEW_LINE> <INDENT> return HttpResponseRedirect(reverse('home')) <NEW_LINE> <DEDENT> if user.userprofile.user_type == SUPER_ADMIN: <NEW_LINE> <INDENT> return HttpResponseRedirect(reverse('home')) <NEW_LINE> <DEDENT> return HttpResponseRedirect("/user/dashboard") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> messages.error(request, 'Invalid Username or Password', extra_tags='alert-error') <NEW_LINE> context['form'] = form <NEW_LINE> return self.render_to_response(context) <NEW_LINE> <DEDENT> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> messages.error(request, 'Invalid Username or Password', extra_tags='alert-error') <NEW_LINE> context['form'] = form <NEW_LINE> return self.render_to_response(context) <NEW_LINE> <DEDENT> <DEDENT> messages.error(request, 'Invalid Username or Password', extra_tags='alert-error') <NEW_LINE> context['form'] = form <NEW_LINE> return self.render_to_response(context)
Validates the username and password if credentials are valid the user will be authenticated and will be returned to Dashboard else appropriate message will be displayed to User on the same login page :param request: :param args: :param kwargs: :returns: HttpResponse,HttpResponseRedirect
625941bfad47b63b2c509ec0
def mostCommon(self, text): <NEW_LINE> <INDENT> if isinstance(text,list): <NEW_LINE> <INDENT> text = ''.join(text) <NEW_LINE> <DEDENT> lst = Counter(text) <NEW_LINE> d = {} <NEW_LINE> for key, value in lst.items(): <NEW_LINE> <INDENT> d[key] = value <NEW_LINE> <DEDENT> sort_val = sorted(d.items(), key=lambda kv: kv[1], reverse = True) <NEW_LINE> return sort_val
Znajdywanie najczęściej występujących znakow
625941bf66673b3332b91fd1
def __init__(self): <NEW_LINE> <INDENT> self.PkgId = None <NEW_LINE> self.Bucket = None <NEW_LINE> self.Region = None <NEW_LINE> self.Path = None <NEW_LINE> self.Credentials = None
:param PkgId: 程序包ID 注意:此字段可能返回 null,表示取不到有效值。 :type PkgId: str :param Bucket: 桶 注意:此字段可能返回 null,表示取不到有效值。 :type Bucket: str :param Region: 目标地域 注意:此字段可能返回 null,表示取不到有效值。 :type Region: str :param Path: 存储路径 注意:此字段可能返回 null,表示取不到有效值。 :type Path: str :param Credentials: 鉴权信息 :type Credentials: :class:`tencentcloud.tsf.v20180326.models.CosCredentials`
625941bfa219f33f346288ad
def signUserInorOut(user_data, server_conf, clientSocket, count): <NEW_LINE> <INDENT> user_name, course = user_data.split('#') <NEW_LINE> db_name = server_conf["database_name"] <NEW_LINE> profile = getProfileWithName(user_name, db_name) <NEW_LINE> if profile != None: <NEW_LINE> <INDENT> if profile[8] == 0: <NEW_LINE> <INDENT> clientSocket.send('SIN'.encode()) <NEW_LINE> signUserIn(profile[0], course, profile[3], db_name) <NEW_LINE> <DEDENT> elif profile[8] == 1: <NEW_LINE> <INDENT> clientSocket.send('SNO'.encode()) <NEW_LINE> signOutUser(profile, db_name, count) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> clientSocket.send('SIN'.encode()) <NEW_LINE> id = generateID(db_name) <NEW_LINE> insertOrUpdate(id, user_name, db_name) <NEW_LINE> signUserIn(id, course, user_name, db_name) <NEW_LINE> <DEDENT> return
Signs in user after client has filled out data :param user_data: user data from client :param server_conf: server config file
625941bfd10714528d5ffc21
def queue_window(self, window: Window, event_type: WMEventType): <NEW_LINE> <INDENT> self.queue.put(WMEvent(window=window, event_type=event_type))
Add a window to the queue.
625941bf1f5feb6acb0c4a94
def __call__(cls, parameters, **kwargs): <NEW_LINE> <INDENT> obj = type.__call__(cls) <NEW_LINE> obj.init(parameters, **kwargs) <NEW_LINE> return obj
Called when you call Class()
625941bf187af65679ca505e
def p_init_next_state_1(p): <NEW_LINE> <INDENT> initValues[p[1]]=True
init_next_state : NAME NEXT EQUALS TRUE
625941bf63f4b57ef0001060
def numOfWays(self, nums: List[int]) -> int: <NEW_LINE> <INDENT> def way(nums): <NEW_LINE> <INDENT> if len(nums) <= 2: return 1 <NEW_LINE> l = [x for x in nums if x < nums[0]] <NEW_LINE> r = [x for x in nums if x > nums[0]] <NEW_LINE> return comb(len(l)+len(r), len(r)) * way(l) * way(r) <NEW_LINE> <DEDENT> return (way(nums)-1) % (10**9+7)
First, we cannot change root, otherwise different tree Second, nodes in the left < root, nodes in the right > root l = [x for x in nums if x < nums[0]] r = [x for x in nums if x > nums[0]] Interleaving l and r will lead to the same tree There are C(|l|+|r|, |l|) ways Do it recursively for l and r Total ways = C(|l|+|r|, |l|) * way(l) * way(r) Return = Total ways - 1 Time O(nlogn) best, O(n^2) worst Space O(nlogn) best, O(n^2) worst
625941bffbf16365ca6f60ff
def __init__(self, *args, **kwds): <NEW_LINE> <INDENT> if args or kwds: <NEW_LINE> <INDENT> super(CfgPRT, self).__init__(*args, **kwds) <NEW_LINE> if self.portID is None: <NEW_LINE> <INDENT> self.portID = 0 <NEW_LINE> <DEDENT> if self.reserved0 is None: <NEW_LINE> <INDENT> self.reserved0 = 0 <NEW_LINE> <DEDENT> if self.txReady is None: <NEW_LINE> <INDENT> self.txReady = 0 <NEW_LINE> <DEDENT> if self.mode is None: <NEW_LINE> <INDENT> self.mode = 0 <NEW_LINE> <DEDENT> if self.baudRate is None: <NEW_LINE> <INDENT> self.baudRate = 0 <NEW_LINE> <DEDENT> if self.inProtoMask is None: <NEW_LINE> <INDENT> self.inProtoMask = 0 <NEW_LINE> <DEDENT> if self.outProtoMask is None: <NEW_LINE> <INDENT> self.outProtoMask = 0 <NEW_LINE> <DEDENT> if self.flags is None: <NEW_LINE> <INDENT> self.flags = 0 <NEW_LINE> <DEDENT> if self.reserved1 is None: <NEW_LINE> <INDENT> self.reserved1 = 0 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.portID = 0 <NEW_LINE> self.reserved0 = 0 <NEW_LINE> self.txReady = 0 <NEW_LINE> self.mode = 0 <NEW_LINE> self.baudRate = 0 <NEW_LINE> self.inProtoMask = 0 <NEW_LINE> self.outProtoMask = 0 <NEW_LINE> self.flags = 0 <NEW_LINE> self.reserved1 = 0
Constructor. Any message fields that are implicitly/explicitly set to None will be assigned a default value. The recommend use is keyword arguments as this is more robust to future message changes. You cannot mix in-order arguments and keyword arguments. The available fields are: portID,reserved0,txReady,mode,baudRate,inProtoMask,outProtoMask,flags,reserved1 :param args: complete set of field values, in .msg order :param kwds: use keyword arguments corresponding to message field names to set specific fields.
625941bf99fddb7c1c9de2d3
def load_photo(self, last_chat_file_id): <NEW_LINE> <INDENT> method = 'getFile' <NEW_LINE> image_name = 'initial.jpg' <NEW_LINE> data = {"file_id": last_chat_file_id} <NEW_LINE> ret = requests.post(self.api_url + method, data=data) <NEW_LINE> data = {'file_path': ret.json()['result']['file_path']} <NEW_LINE> ret = requests.get('https://api.telegram.org/file/bot{}/{}'.format(self.token, data['file_path'])) <NEW_LINE> if ret.status_code == 200: <NEW_LINE> <INDENT> with open(image_name, 'wb') as f: <NEW_LINE> <INDENT> f.write(ret.content)
Получение/загрузка фотографии, присланного пользователем бота :param last_chat_file_id: id чата присланной фотографии :return: -
625941bf796e427e537b0504
def find_all(self, *criterion, raw=False, limit=None, offset=None): <NEW_LINE> <INDENT> self.session.expire_all() <NEW_LINE> if raw: <NEW_LINE> <INDENT> return session.query(self.__class__).filter(*criterion).limit(limit).offset(offset) <NEW_LINE> <DEDENT> res = session.query(self.__class__).filter(*criterion).limit(limit).offset(offset).all() <NEW_LINE> return res
Searches the DB (Parameters: limit, offset
625941bf7047854f462a134d
def _secret_data_too_large(req, resp): <NEW_LINE> <INDENT> api.abort(falcon.HTTP_413, _("Could not add secret data as it was too large"), req, resp)
Throw exception indicating plain-text was too big.
625941bf45492302aab5e201
def get_representation(output): <NEW_LINE> <INDENT> hidden_states = output[1] <NEW_LINE> token_embeddings = torch.stack(hidden_states, dim=0) <NEW_LINE> token_embeddings = torch.squeeze(token_embeddings, dim=1) <NEW_LINE> token_embeddings = token_embeddings.permute(1, 0, 2) <NEW_LINE> hidden_states = [token[-1] for token in token_embeddings] <NEW_LINE> return hidden_states
Get the hidden representations from bert layers.
625941bfdd821e528d63b0eb
def test_type_objects_and_constructors() -> None: <NEW_LINE> <INDENT> assert isfunction(dbapi.Date) <NEW_LINE> assert isfunction(dbapi.Time) <NEW_LINE> assert isfunction(dbapi.Timestamp) <NEW_LINE> assert isfunction(dbapi.DateFromTicks) <NEW_LINE> assert isfunction(dbapi.TimeFromTicks) <NEW_LINE> assert isfunction(dbapi.TimestampFromTicks) <NEW_LINE> assert isfunction(dbapi.Binary) <NEW_LINE> assert dbapi.STRING <NEW_LINE> assert dbapi.BINARY <NEW_LINE> assert dbapi.NUMBER <NEW_LINE> assert dbapi.DATETIME <NEW_LINE> assert dbapi.ROWID
Test type objects and constructors.
625941bf711fe17d825422b1
def noise(C0=0, Cx=0, x=0, Cu=0, u=0, NSim=1): <NEW_LINE> <INDENT> if is_scalar(C0): <NEW_LINE> <INDENT> independent_noise = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert len(C0.shape) == 2, "C0 must have 2 dimensions" <NEW_LINE> independent_noise = C0.dot(randn(C0.shape[1],NSim)) <NEW_LINE> <DEDENT> if is_scalar(Cx): <NEW_LINE> <INDENT> state_dep_noise = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert len(Cx.shape) == 3, "Cx must have 3 dimensions" <NEW_LINE> nCx = Cx.shape[2] <NEW_LINE> state_dep_noise = sum([Cx[:,:,i].dot(x)*randn(NSim) for i in range(nCx)]) <NEW_LINE> <DEDENT> if is_scalar(Cu): <NEW_LINE> <INDENT> control_dep_noise = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert len(Cu.shape) == 3, "Cu must have 3 dimensions" <NEW_LINE> nCu = Cu.shape[2] <NEW_LINE> control_dep_noise = sum([Cu[:,:,i].dot(u)*randn(NSim) for i in range(nCu)]) <NEW_LINE> <DEDENT> noise = independent_noise + state_dep_noise + control_dep_noise <NEW_LINE> return noise
Produce state and control dependent noise.
625941bfbde94217f3682d34
def get_temp(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> dev = sorted([x for x in os.listdir( "/sys/class/thermal/") if "thermal_zone" in x])[-1] <NEW_LINE> with open("/sys/class/thermal/"+dev+"/temp") as f: <NEW_LINE> <INDENT> temp = f.read() <NEW_LINE> <DEDENT> return str(int(int(temp)/1000)) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print(e.args) <NEW_LINE> return "NaN"
Return temp in Celsius from /sys/class/thermal
625941bf71ff763f4b5495c8
def _in_iterating_context(node): <NEW_LINE> <INDENT> parent = node.parent <NEW_LINE> if isinstance(parent, astroid.For): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if isinstance(parent, astroid.Comprehension): <NEW_LINE> <INDENT> if parent.iter == node: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(parent, astroid.Call): <NEW_LINE> <INDENT> if isinstance(parent.func, astroid.Name): <NEW_LINE> <INDENT> if parent.func.name in _ACCEPTS_ITERATOR: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(parent.func, astroid.Attribute): <NEW_LINE> <INDENT> if parent.func.attrname in ATTRIBUTES_ACCEPTS_ITERATOR: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> inferred = utils.safe_infer(parent.func) <NEW_LINE> if inferred: <NEW_LINE> <INDENT> if inferred.qname() in _BUILTIN_METHOD_ACCEPTS_ITERATOR: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> root = inferred.root() <NEW_LINE> if root and root.name == "itertools": <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif isinstance(parent, astroid.Assign) and isinstance( parent.targets[0], (astroid.List, astroid.Tuple) ): <NEW_LINE> <INDENT> if len(parent.targets[0].elts) > 1: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> elif ( isinstance(parent, astroid.Compare) and len(parent.ops) == 1 and parent.ops[0][0] in ["in", "not in"] ): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif isinstance(parent, astroid.YieldFrom): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if isinstance(parent, astroid.Starred): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False
Check if the node is being used as an iterator. Definition is taken from lib2to3.fixer_util.in_special_context().
625941bf5e10d32532c5ee68
def docstring_to_rest(repo, docstring, current_type=None, current_func=None): <NEW_LINE> <INDENT> if current_type is not None: <NEW_LINE> <INDENT> assert current_type.count(".") == 1 <NEW_LINE> <DEDENT> if current_func is not None: <NEW_LINE> <INDENT> assert current_func.count(".") in (1, 2) <NEW_LINE> <DEDENT> def esc_xml(text): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> etree.tostring(etree.fromstring( "<dummy>%s</dummy>" % text.replace( "&nbsp;", "&#160;"))) <NEW_LINE> <DEDENT> except etree.XMLSyntaxError: <NEW_LINE> <INDENT> text = escape(text) <NEW_LINE> <DEDENT> return text <NEW_LINE> <DEDENT> reg = re.compile(r"(\|\[.*?\]\|)", flags=re.MULTILINE | re.DOTALL) <NEW_LINE> docstring = "".join([ p if reg.match(p) else esc_xml(p) for p in reg.split(docstring)]) <NEW_LINE> docbook = _docstring_to_docbook(docstring) <NEW_LINE> rst = _docbook_to_rest(repo, docbook, current_type, current_func) <NEW_LINE> if not docstring.endswith("\n"): <NEW_LINE> <INDENT> rst = rst.rstrip("\n") <NEW_LINE> <DEDENT> while rst.endswith("\n\n"): <NEW_LINE> <INDENT> rst = rst[:-1] <NEW_LINE> <DEDENT> return rst
Converts `docstring` to reST. Args: repo (Repository): the repo that produced the docstring docstring (str): the docstring current_type (str or None): the Python identifier for the docstring. In case the docstring comes from Gtk.Widget.some_func, the parser can use "Gtk.Widget" in case a signal without a class name is referenced. current_func (str or None): The Python identifier for the docstring. In case the docstring comes from Gtk.Widget.some_func, the parser can use "Gtk.Widget.some_func" to rewrite instance parameters. Returns: str: the docstring converted to reST
625941bfbe383301e01b53cc