code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def get_subdomains(self, site): <NEW_LINE> <INDENT> if DOMAIN_REGEX.search(site): <NEW_LINE> <INDENT> response = self.session.get('https://api.threatminer.org/v2/domain.php?q={}&rt=5'.format(site)).json() <NEW_LINE> return response <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise InvalidTypeException('You must submit a Domain.')
Get all subdomains of a given domain. :param site: Domain name :return: JSON of server response
625941c07b180e01f3dc475c
def pop_macro(self, index: int = -1) -> Macro: <NEW_LINE> <INDENT> m = self.macros.pop(index) <NEW_LINE> self._decrement_after(m.pos, len(m)) <NEW_LINE> return m
Remove the macro from the word at the given index.
625941c0091ae35668666ebc
def get_goodX(VI, approx = 0, min_ = -10, max_ = 10): <NEW_LINE> <INDENT> x = uniform(min_, max_) <NEW_LINE> if approx == 0: <NEW_LINE> <INDENT> x = int(x) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> x = round(x,approx) <NEW_LINE> <DEDENT> while x in VI: <NEW_LINE> <INDENT> x = uniform(min_, max_) <NEW_LINE> if approx == 0: <NEW_LINE> <INDENT> x = int(x) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> x = round(x,approx) <NEW_LINE> <DEDENT> <DEDENT> return x
@todo: Docstring for get_goodX :param VI: @todo :returns: @todo
625941c010dbd63aa1bd2aff
def enableDocument(self, docIndex, enable): <NEW_LINE> <INDENT> if docIndex == None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> docData = self.documents[docIndex] <NEW_LINE> docItem = self.documentTree.getItemByData(docData) <NEW_LINE> self.documentTree.enableItemTree(docItem, enable)
Enable/disable selected document.
625941c06fece00bbac2d696
def _log_ui_throbber_progress(self, progress_from_test=False): <NEW_LINE> <INDENT> if progress_from_test: <NEW_LINE> <INDENT> self._log_ui_healthy(self.throbber.render(), True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._log_ui_partial(self.throbber.render(), True)
Give an interactive indicator of the test progress :param progress_from_test: if indication of progress came explicitly from the test. If false, it means the test process is running, but not communicating test specific progress. :type progress_from_test: bool :rtype: None
625941c04c3428357757c283
def minimal_logger(name, debug=False): <NEW_LINE> <INDENT> log = logging.getLogger(name) <NEW_LINE> formatter = logging.Formatter( "%(asctime)s (%(levelname)s) %(name)s : %(message)s") <NEW_LINE> console = logging.StreamHandler() <NEW_LINE> console.setFormatter(formatter) <NEW_LINE> console.setLevel(logging.INFO) <NEW_LINE> log.setLevel(logging.INFO) <NEW_LINE> if '--debug' in sys.argv or debug: <NEW_LINE> <INDENT> console.setLevel(logging.DEBUG) <NEW_LINE> log.setLevel(logging.DEBUG) <NEW_LINE> <DEDENT> log.addHandler(console) <NEW_LINE> return log
Setup just enough for cement to be able to do debug logging. This is the logger used by the Cement framework, which is setup and accessed before the application is functional (and more importantly before the applications log handler is usable). Required Arguments: name The logging namespace. This is generally '__name__' or anything you want. Optional Arguments: debug Toggle debug output. Default: False Usage: .. code-block:: python from cement.core import backend Log = backend.minimal_logger('cement') Log.debug('This is a debug message')
625941c0566aa707497f44c6
def get_weibo_info(gsid): <NEW_LINE> <INDENT> cookies = {'SUB': gsid} <NEW_LINE> uid = get_uid(gsid) <NEW_LINE> url = f'https://m.weibo.cn/profile/info?uid={uid}' <NEW_LINE> r = requests.get(url, cookies=cookies) <NEW_LINE> try: <NEW_LINE> <INDENT> logging.info(str(r.status_code) + ':' + str(r.json())) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> logging.warning(str(r.status_code) + ':' + r.text) <NEW_LINE> <DEDENT> info = [] <NEW_LINE> for i, j in enumerate(r.json()['data']['statuses']): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> t = j['created_at'] <NEW_LINE> t = time.mktime(time.strptime(' '.join(t.split()[:4] + t.split()[-1:]), '%c')) <NEW_LINE> mid = r.json()['data']['statuses'][i]['mid'] <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> title = r.json()['data']['statuses'][i]['raw_text'][:-2] <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> title = r.json()['data']['statuses'][i]['text'] <NEW_LINE> <DEDENT> info.append({'t': t, 'mid': mid, 'title': title}) <NEW_LINE> <DEDENT> info.sort(key=lambda keys: keys['t'], reverse=True) <NEW_LINE> return info
获取已发微博的信息 :param gsid: :return:
625941c05510c4643540f344
def rel_db_type(self, connection): <NEW_LINE> <INDENT> if connection.features.related_fields_match_type: <NEW_LINE> <INDENT> return self.db_type(connection) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return IntegerField().db_type(connection=connection)
Return the data type that a related field pointing to this field should use. In most cases, a foreign key pointing to a positive integer primary key will have an integer column data type but some databases (e.g. MySQL) have an unsigned integer type. In that case (related_fields_match_type=True), the primary key should return its db_type.
625941c0d268445f265b4dc8
def error_handler(code, msg): <NEW_LINE> <INDENT> class HTTPHandler(http.server.BaseHTTPRequestHandler): <NEW_LINE> <INDENT> def do_GET(self): <NEW_LINE> <INDENT> self.send_error(code, msg) <NEW_LINE> <DEDENT> def log_message(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return HTTPHandler
Return an HTTP handler that always returns the given error code.
625941c0d164cc6175782ca7
def getNrOfProgramsAfterExpansion(agent, suffixListSize): <NEW_LINE> <INDENT> check_for_any_wild = [x.endswith("_W_ALL") for x in agent.colony.A] <NEW_LINE> any_wild_objects = [] <NEW_LINE> for i, val in enumerate(check_for_any_wild): <NEW_LINE> <INDENT> if (val): <NEW_LINE> <INDENT> any_wild_objects.append(agent.colony.A[i]) <NEW_LINE> <DEDENT> <DEDENT> counter = 0 <NEW_LINE> logging.info("wild_ANY objects = %s" % any_wild_objects) <NEW_LINE> for program in agent.programs: <NEW_LINE> <INDENT> wild_exists_in_program = False <NEW_LINE> for rule in program: <NEW_LINE> <INDENT> for obj in any_wild_objects: <NEW_LINE> <INDENT> if (obj == rule.lhs or obj == rule.rhs or obj == rule.alt_lhs or obj == rule.alt_rhs): <NEW_LINE> <INDENT> wild_exists_in_program = True <NEW_LINE> logging.warning("wild_ANY object %s exists in program %s rule %s" % (obj, program.print(), rule.print(toString=True))) <NEW_LINE> break; <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if (wild_exists_in_program): <NEW_LINE> <INDENT> counter += suffixListSize <NEW_LINE> <DEDENT> <DEDENT> return counter + len(agent.programs)
Returns the final number of programs that will result after all programs (within this agent) with * wildcard objects have been expanded :agent: The agent whose programs will checked :suffixListSize: The number of programs that result after expanding a program such as < X_* -> e, e->X_* > if suffixListSize = 2 then we obtain 2 new programs, < X_0 - > e ... > and < X_1 -> e ...> that replace the original one :returns: The final number of programs that will result after expansion
625941c0a8370b77170527fa
def set_legend_on_top(self, legend_on_top): <NEW_LINE> <INDENT> self._legend_on_top = legend_on_top
rief Setter for property legend_on_top \param legend_on_top - bool
625941c0f7d966606f6a9f5b
def is_proc_group_parent(proc) -> bool: <NEW_LINE> <INDENT> if os.uname().sysname == 'Darwin': <NEW_LINE> <INDENT> fproc_names = filter(lambda x: len(x.cmdline()) > 0, proc.children()) <NEW_LINE> procs_names = [ p.cmdline()[0] for p in fproc_names ] <NEW_LINE> procs_names.append(proc.cmdline()[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> procs_names = [ p.name() for p in proc.children() ] <NEW_LINE> procs_names.append(proc.name()) <NEW_LINE> <DEDENT> if len(procs_names) <= 1: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if fuzzy_sequence_match(procs_names): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False
Checks if process is a group parent
625941c010dbd63aa1bd2b00
def cosh(self): <NEW_LINE> <INDENT> returned = (self.exp() + (-self).exp()) / ComplexDecimal("2") <NEW_LINE> returned.imaginary = -returned.imaginary <NEW_LINE> return returned
Hyperbolic cosine of self
625941c0d486a94d0b98e09f
def _summary_text(self): <NEW_LINE> <INDENT> msg = "" <NEW_LINE> kickstart_timezone = self._timezone_module.Timezone <NEW_LINE> timezone_msg = _("not set") <NEW_LINE> if kickstart_timezone: <NEW_LINE> <INDENT> timezone_msg = kickstart_timezone <NEW_LINE> <DEDENT> msg += _("Timezone: %s\n") % timezone_msg <NEW_LINE> msg += "\n" <NEW_LINE> msg += _("NTP servers:") <NEW_LINE> if self._ntp_servers: <NEW_LINE> <INDENT> for status in format_ntp_status_list(self._ntp_servers): <NEW_LINE> <INDENT> msg += "\n%s" % status <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> msg += _("not configured") <NEW_LINE> <DEDENT> return msg
Return summary of current timezone & NTP configuration. :returns: current status :rtype: str
625941c015baa723493c3ecd
def perform(req, summary): <NEW_LINE> <INDENT> pass
Perform the action. This must return an etree object
625941c007f4c71912b113da
@protocol.commands.add('sendmessage') <NEW_LINE> def sendmessage(context, channel, text): <NEW_LINE> <INDENT> raise exceptions.MpdNotImplemented
*musicpd.org, client to client section:* ``sendmessage {CHANNEL} {TEXT}`` Send a message to the specified channel.
625941c0fbf16365ca6f6119
def is_in_redcap(rc_df, scans_df): <NEW_LINE> <INDENT> scans_df['in_redcap'] = False <NEW_LINE> scans_df['visit_ignore___yes'] = '' <NEW_LINE> scans_df['visit_ignore_why'] = '' <NEW_LINE> scans_df['visit_ignore_why_other'] = '' <NEW_LINE> scans_df['visit_notes'] = '' <NEW_LINE> scans_df['mri_missing'] = '' <NEW_LINE> scans_df['mri_missing_why'] = '' <NEW_LINE> scans_df['mri_missing_why_other'] = '' <NEW_LINE> scans_df['mri_notes'] = '' <NEW_LINE> rc_cases = rc_df[rc_df.mri_xnat_sid.isin(scans_df.case)] <NEW_LINE> for idx, row in rc_cases.iterrows(): <NEW_LINE> <INDENT> scan_cases = scans_df[scans_df.case == row.mri_xnat_sid] <NEW_LINE> scans_df.in_redcap.loc[scan_cases.index] = True <NEW_LINE> scans_df.visit_ignore___yes.loc[scan_cases.index] = row.visit_ignore___yes <NEW_LINE> scans_df.visit_ignore_why.loc[scan_cases.index] = row.visit_ignore_why <NEW_LINE> scans_df.visit_ignore_why_other.loc[scan_cases.index] = row.visit_ignore_why_other <NEW_LINE> scans_df.visit_notes.loc[scan_cases.index] = row.visit_notes <NEW_LINE> scans_df.mri_missing.loc[scan_cases.index] = row.mri_missing <NEW_LINE> scans_df.mri_missing_why.loc[scan_cases.index] = row.mri_missing_why <NEW_LINE> scans_df.mri_missing_why_other.loc[scan_cases.index] = row.mri_missing_why_other <NEW_LINE> scans_df.mri_notes.loc[scan_cases.index] = row.mri_notes <NEW_LINE> <DEDENT> return scans_df
Checks if the scans missing in the pipeline are listed in REDCap and adds a column indicating as such.
625941c0435de62698dfdba6
def unique_id(self): <NEW_LINE> <INDENT> return _gsm.gsm_run_bb_sptr_unique_id(self)
unique_id(self) -> long
625941c0236d856c2ad44731
@app.route("/api/post/<int:id>", methods=["DELETE"]) <NEW_LINE> @decorators.accept("application/json") <NEW_LINE> def post_delete(id): <NEW_LINE> <INDENT> post = session.query(models.Post).get(id) <NEW_LINE> if not post: <NEW_LINE> <INDENT> message = "Could not find post with id {}".format(id) <NEW_LINE> data = json.dumps({"message": message}) <NEW_LINE> return Response(data, 404, mimetype="application/json") <NEW_LINE> <DEDENT> session.delete(post) <NEW_LINE> session.commit() <NEW_LINE> message = "Deleted post with id {} from the database".format(id) <NEW_LINE> data = json.dumps({"message": message}) <NEW_LINE> return Response(data, 200, mimetype="application/json")
Delete single post endpoint
625941c08c3a873295158312
def norm(vec) -> float: <NEW_LINE> <INDENT> sum_of_squares = 0.0 <NEW_LINE> for x in vec: <NEW_LINE> <INDENT> sum_of_squares += vec[x] * vec[x] <NEW_LINE> <DEDENT> return math.sqrt(sum_of_squares)
Return the norm of a vector stored as a dictionary, as described in the handout for Project 3. Norm is defined as
625941c030bbd722463cbd1e
def smartquotes(text): <NEW_LINE> <INDENT> text = unicode(text) <NEW_LINE> output = smartypants.smartypants(text) <NEW_LINE> return output
Applies smarty pants to curl quotes. >>> smartquotes('The "Green" man') u'The &#8220;Green&#8221; man'
625941c03c8af77a43ae36f8
def LoadFromFolder(self, path, pattern='*.yml', variables={}, verbose=False): <NEW_LINE> <INDENT> for root, _, files in os.walk(path): <NEW_LINE> <INDENT> for filename in files: <NEW_LINE> <INDENT> absfilename = os.path.join(root, filename) <NEW_LINE> if fnmatch.fnmatch(absfilename, pattern): <NEW_LINE> <INDENT> self.Load(absfilename, merge=True, variables=variables, verbose=verbose)
For conf.d/ stuff.
625941c08e71fb1e9831d704
def strfdate_ics(date): <NEW_LINE> <INDENT> return date.strftime('%Y%m%d')
Converter data ics pra string
625941c045492302aab5e21b
@qgl2decl <NEW_LINE> def CRtomo_seq(controlQ: qreg, targetQ: qreg, lengths, ph, amp=0.8, riseFall=20e-9): <NEW_LINE> <INDENT> cNt = QRegister(controlQ, targetQ) <NEW_LINE> tomo_pulses = [Y90m, X90, Id] <NEW_LINE> for l, tomo_pulse in product(lengths, tomo_pulses): <NEW_LINE> <INDENT> init(cNt) <NEW_LINE> Id(controlQ) <NEW_LINE> flat_top_gaussian_edge(controlQ, targetQ, riseFall=riseFall, length=l, amp=amp, phase=ph, label="CR") <NEW_LINE> Barrier(cNt) <NEW_LINE> Id(controlQ) <NEW_LINE> tomo_pulse(targetQ) <NEW_LINE> MEAS(targetQ) <NEW_LINE> <DEDENT> for l, tomo_pulse in product(lengths, tomo_pulses): <NEW_LINE> <INDENT> init(cNt) <NEW_LINE> X(controlQ) <NEW_LINE> flat_top_gaussian_edge(controlQ, targetQ, riseFall=riseFall, length=l, amp=amp, phase=ph, label="CR") <NEW_LINE> Barrier(cNt) <NEW_LINE> X(controlQ) <NEW_LINE> tomo_pulse(targetQ) <NEW_LINE> MEAS(targetQ) <NEW_LINE> <DEDENT> create_cal_seqs(targetQ, 2)
Variable length CX experiment, for Hamiltonian tomography. Parameters ---------- controlQ : logical channel for the control qubit (LogicalChannel) targetQ: logical channel for the target qubit (LogicalChannel) lengths : pulse lengths of the CR pulse to sweep over (iterable) riseFall : rise/fall time of the CR pulse (s) ph : phase of the CR pulse (rad)
625941c02c8b7c6e89b3571c
def testFrancMultiplication(): <NEW_LINE> <INDENT> assert Franc(10) == FiveTimes(2) <NEW_LINE> assert Franc(15) == FiveTimes(3)
no explanation needed
625941c00a50d4780f666dea
def __init__(self, X, parents, cpt): <NEW_LINE> <INDENT> if isinstance(parents, str): <NEW_LINE> <INDENT> parents = parents.split() <NEW_LINE> <DEDENT> if isinstance(cpt, (float, int)): <NEW_LINE> <INDENT> cpt = {(): cpt} <NEW_LINE> <DEDENT> elif isinstance(cpt, dict): <NEW_LINE> <INDENT> if cpt and isinstance(list(cpt.keys())[0], bool): <NEW_LINE> <INDENT> cpt = dict(((v,), p) for v, p in list(cpt.items())) <NEW_LINE> <DEDENT> <DEDENT> assert isinstance(cpt, dict) <NEW_LINE> for vs, p in list(cpt.items()): <NEW_LINE> <INDENT> assert isinstance(vs, tuple) and len(vs) == len(parents) <NEW_LINE> assert every(lambda v: isinstance(v, bool), vs) <NEW_LINE> assert 0 <= p <= 1 <NEW_LINE> <DEDENT> self.variable = X <NEW_LINE> self.parents = parents <NEW_LINE> self.cpt = cpt <NEW_LINE> self.children = []
X is a variable name, and parents a sequence of variable names or a space-separated string. cpt, the conditional probability table, takes one of these forms: * A number, the unconditional probability P(X=true). You can use this form when there are no parents. * A dict {v: p, ...}, the conditional probability distribution P(X=true | parent=v) = p. When there's just one parent. * A dict {(v1, v2, ...): p, ...}, the distribution P(X=true | parent1=v1, parent2=v2, ...) = p. Each key must have as many values as there are parents. You can use this form always; the first two are just conveniences. In all cases the probability of X being false is left implicit, since it follows from P(X=true). >>> X = BayesNode('X', '', 0.2) >>> Y = BayesNode('Y', 'P', {T: 0.2, F: 0.7}) >>> Z = BayesNode('Z', 'P Q', ... {(T, T): 0.2, (T, F): 0.3, (F, T): 0.5, (F, F): 0.7})
625941c03d592f4c4ed1cfcd
def findSubstring(self, s, words): <NEW_LINE> <INDENT> if not s or not any(words) or len(s) < len(words[0]) * len(words): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> words_count = collections.Counter(map(tuple, words)) <NEW_LINE> def check(i, l, word_len): <NEW_LINE> <INDENT> cur = collections.Counter() <NEW_LINE> iters = [iter(s[i:i + l])] * word_len <NEW_LINE> for word in zip(*iters): <NEW_LINE> <INDENT> cur[word] += 1 <NEW_LINE> if cur[word] > words_count[word]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> word_len, k = len(words[0]), len(words) <NEW_LINE> l = word_len * k <NEW_LINE> return [i for i in range(len(s) + 1 - l) if check(i, l, word_len)]
:type s: str :type words: List[str] :rtype: List[int]
625941c0462c4b4f79d1d62a
def setBarHeight(self, barHeight): <NEW_LINE> <INDENT> self.barHeight = barHeight
Sets the bar height (float).
625941c06aa9bd52df036cfd
def test_no_notifications_not_monitoring(self): <NEW_LINE> <INDENT> with responses.RequestsMock() as rsps: <NEW_LINE> <INDENT> self.login(rsps) <NEW_LINE> rsps.add( rsps.GET, api_url('/events/pages/') + f'?rule=MONP&rule=MONS&offset=0&limit={SECURITY_FORMS_DEFAULT_PAGE_SIZE}', json={'count': 0, 'newest': None, 'oldest': None}, match_querystring=True ) <NEW_LINE> rsps.add( rsps.GET, api_url('/monitored/'), json={'count': 0}, ) <NEW_LINE> rsps.add( rsps.GET, api_url('/events/'), json={'count': 0, 'results': []}, ) <NEW_LINE> response = self.client.get(reverse('security:notification_list')) <NEW_LINE> <DEDENT> self.assertEqual(response.status_code, 200) <NEW_LINE> response_content = response.content.decode(response.charset) <NEW_LINE> self.assertIn('You’re not monitoring anything at the moment', response_content) <NEW_LINE> self.assertIn('0 results', response_content)
Expect to see a message if you're not monitoring anything and have no notifications
625941c021bff66bcd6848af
def append_series(self, avgs, avgs_bran, series_config): <NEW_LINE> <INDENT> for avg in avgs: <NEW_LINE> <INDENT> for avg_bran in avgs_bran: <NEW_LINE> <INDENT> if avg[0] == avg_bran[0] and avg[1] != 0 and avg_bran[1] != 0: <NEW_LINE> <INDENT> series_config["data"].append( [avg[0], avg[1] / avg_bran[1]])
append series with ratio values by doing division Args: avgs (array): is being passed from load_data and is array where averaged values are avgs_bran (array): is being passed from load_data and is array where averaged bran values are series_config (dict) we pass our config dictionary
625941c04428ac0f6e5ba74b
def resample(self): <NEW_LINE> <INDENT> underx = self.x[self.y == self.minc] <NEW_LINE> undery = self.y[self.y == self.minc] <NEW_LINE> if self.indices_support: <NEW_LINE> <INDENT> idx_under = np.nonzero(self.y == self.minc)[0] <NEW_LINE> <DEDENT> for key in self.ucd.keys(): <NEW_LINE> <INDENT> if key == self.minc: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if self.ratio * self.ucd[self.minc] > self.ucd[key]: <NEW_LINE> <INDENT> num_samples = self.ucd[key] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> num_samples = int(self.ratio * self.ucd[self.minc]) <NEW_LINE> <DEDENT> seed(self.rs) <NEW_LINE> if self.replacement: <NEW_LINE> <INDENT> indx = randint(low=0, high=self.ucd[key], size=num_samples) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> indx = sample(range((self.y == key).sum()), num_samples) <NEW_LINE> <DEDENT> if self.indices_support: <NEW_LINE> <INDENT> idx_tmp = np.nonzero(self.y == key)[0][indx] <NEW_LINE> idx_under = np.concatenate((idx_under, idx_tmp), axis=0) <NEW_LINE> <DEDENT> underx = concatenate((underx, self.x[self.y == key][indx]), axis=0) <NEW_LINE> undery = concatenate((undery, self.y[self.y == key][indx]), axis=0) <NEW_LINE> <DEDENT> if self.verbose: <NEW_LINE> <INDENT> print("Under-sampling performed: " + str(Counter(undery))) <NEW_LINE> <DEDENT> if self.indices_support: <NEW_LINE> <INDENT> return underx, undery, idx_under <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return underx, undery
...
625941c0187af65679ca5078
def rank_uids(uids): <NEW_LINE> <INDENT> uid_rank = [[(uids[uid][0][i], uid) for uid in uids] for i in range(8)] <NEW_LINE> for i in range(8): <NEW_LINE> <INDENT> uid_rank[i].sort(key=lambda tup: tup[0], reverse=True) <NEW_LINE> <DEDENT> return uid_rank
Sort uids based on eight different io stats. Returns: uid_rank is a 2d list of tuples: The first dimension represent the 8 different io stats. The second dimension is a sorted list of tuples by tup[0], each tuple is a uid's perticular stat at the first dimension and the uid.
625941c0566aa707497f44c7
def create_plot(): <NEW_LINE> <INDENT> self.UI_figure = Figure() <NEW_LINE> self.UI_canvas = FigureCanvasQTAgg(self.UI_figure) <NEW_LINE> toolbar = NavigationToolbar2QT(self.UI_canvas, self) <NEW_LINE> plotL = QtGui.QVBoxLayout(spacing=0) <NEW_LINE> plotL.setContentsMargins(0, 0, 0, 0) <NEW_LINE> plotL.addWidget(self.UI_canvas, 1) <NEW_LINE> plotL.addWidget(toolbar) <NEW_LINE> plotW = QtGui.QWidget() <NEW_LINE> plotW.setLayout(plotL) <NEW_LINE> plotD = QtGui.QDockWidget( "Plot", objectName="Plot", features=(QtGui.QDockWidget.DockWidgetFloatable | QtGui.QDockWidget.DockWidgetMovable) ) <NEW_LINE> plotD.setWidget(plotW) <NEW_LINE> return plotD
Create the plot.
625941c015baa723493c3ece
def on_showhide_console(self, event=None): <NEW_LINE> <INDENT> show = not self.frame_console.IsShown() <NEW_LINE> if show and not self.frame_console_shown: <NEW_LINE> <INDENT> self.frame_console_shown = True <NEW_LINE> size = wx.Size(self.Size.width, max(200, self.Size.height / 3)) <NEW_LINE> self.frame_console.Size = size <NEW_LINE> display = wx.GetDisplaySize() <NEW_LINE> y = 0 <NEW_LINE> min_bottom_space = 130 <NEW_LINE> if size.height > display.height - self.Size.height - self.Position.y - min_bottom_space: <NEW_LINE> <INDENT> y = display.height - self.Size.height - self.Position.y - size.height - min_bottom_space <NEW_LINE> <DEDENT> self.frame_console.Position = ( self.Position.x, self.Position.y + self.Size.height + y ) <NEW_LINE> <DEDENT> if show: self.console.ScrollToLine(self.console.LineCount + 3 - ( self.console.Size.height / self.console.GetTextExtent(" ")[1] )) <NEW_LINE> self.frame_console.Show(show) <NEW_LINE> self.frame_console.Iconize(False) <NEW_LINE> if hasattr(self, "menu_console"): self.menu_console.Check(show)
Toggles the console shown/hidden.
625941c09f2886367277a7e9
def list_jobs( self, resource_group_name, resource_name, custom_headers=None, raw=False, **operation_config): <NEW_LINE> <INDENT> def internal_paging(next_link=None, raw=False): <NEW_LINE> <INDENT> if not next_link: <NEW_LINE> <INDENT> url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/jobs' <NEW_LINE> path_format_arguments = { 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'resourceName': self._serialize.url("resource_name", resource_name, 'str') } <NEW_LINE> url = self._client.format_url(url, **path_format_arguments) <NEW_LINE> query_parameters = {} <NEW_LINE> query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> url = next_link <NEW_LINE> query_parameters = {} <NEW_LINE> <DEDENT> header_parameters = {} <NEW_LINE> header_parameters['Content-Type'] = 'application/json; charset=utf-8' <NEW_LINE> if self.config.generate_client_request_id: <NEW_LINE> <INDENT> header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) <NEW_LINE> <DEDENT> if custom_headers: <NEW_LINE> <INDENT> header_parameters.update(custom_headers) <NEW_LINE> <DEDENT> if self.config.accept_language is not None: <NEW_LINE> <INDENT> header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') <NEW_LINE> <DEDENT> request = self._client.get(url, query_parameters) <NEW_LINE> response = self._client.send( request, header_parameters, **operation_config) <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> raise models.ErrorDetailsException(self._deserialize, response) <NEW_LINE> <DEDENT> return response <NEW_LINE> <DEDENT> deserialized = models.JobResponsePaged(internal_paging, self._deserialize.dependencies) <NEW_LINE> if raw: <NEW_LINE> <INDENT> header_dict = {} <NEW_LINE> client_raw_response = models.JobResponsePaged(internal_paging, self._deserialize.dependencies, header_dict) <NEW_LINE> return client_raw_response <NEW_LINE> <DEDENT> return deserialized
Get a list of all the jobs in an IoT hub. For more information, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry. Get a list of all the jobs in an IoT hub. For more information, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry. :param resource_group_name: The name of the resource group that contains the IoT hub. :type resource_group_name: str :param resource_name: The name of the IoT hub. :type resource_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: An iterator like instance of :class:`JobResponse <azure.mgmt.iothub.models.JobResponse>` :rtype: :class:`JobResponsePaged <azure.mgmt.iothub.models.JobResponsePaged>` :raises: :class:`ErrorDetailsException<azure.mgmt.iothub.models.ErrorDetailsException>`
625941c0e1aae11d1e749c0f
@app.route("/student-add", methods=['POST']) <NEW_LINE> def student_add(): <NEW_LINE> <INDENT> firstname = request.form.get('firstname') <NEW_LINE> lastname = request.form.get('lastname') <NEW_LINE> github = request.form.get('github') <NEW_LINE> hackbright.make_new_student(firstname, lastname, github) <NEW_LINE> return "The student information has been successfully added!"
Add a student
625941c0442bda511e8be376
def __lt__(self, other): <NEW_LINE> <INDENT> tree_opts = self._mptt_meta <NEW_LINE> self_tree_id, self_tree_left = getattr(self, tree_opts.tree_id_attr), getattr(self, tree_opts.left_attr) <NEW_LINE> other_tree_id, other_tree_left = getattr(other, tree_opts.tree_id_attr), getattr(other, tree_opts.left_attr) <NEW_LINE> if self_tree_id == other_tree_id: <NEW_LINE> <INDENT> return self_tree_left < other_tree_left <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self_tree_id < other_tree_id
RUS: Сравнивает узлы витрины данных для организации сортировки согласно структуре дерева.
625941c0ad47b63b2c509eda
def on_start(self, event): <NEW_LINE> <INDENT> if not self.state.configured: <NEW_LINE> <INDENT> logging.warning("Start called before configuration complete, deferring event: {}".format(event.handle)) <NEW_LINE> self._defer_once(event) <NEW_LINE> return <NEW_LINE> <DEDENT> self.unit.status = MaintenanceStatus("Starting charm software") <NEW_LINE> self.unit.status = ActiveStatus("Unit is ready") <NEW_LINE> self.state.started = True <NEW_LINE> logging.info("Started")
Handle start state.
625941c0377c676e91272104
def pulsePin(pin, msWidth, isPositive): <NEW_LINE> <INDENT> pass
Apply pulse to Output pin
625941c085dfad0860c3adb4
def close(self): <NEW_LINE> <INDENT> self.io.close()
Close opened io file object.
625941c0460517430c3940e5
def get_client_instance(opts={}, api_version=None): <NEW_LINE> <INDENT> return get_client_class(api_version)(**opts)
Get Freezerclient Instance. We will the provided auth dict to instantiate a client instance Returns freezerclient.v{x}.client.Client Object :return: Object
625941c0ec188e330fd5a6fe
def __init__(self, length_multi, input_dim=0, min=False): <NEW_LINE> <INDENT> super(ToMultipleLength, self).__init__() <NEW_LINE> self.min = min <NEW_LINE> self.length_multi = length_multi <NEW_LINE> self.input_size = input_dim
Constructor :param length: Length multiple
625941c0c4546d3d9de7298c
def forward(self, predictions, targets): <NEW_LINE> <INDENT> loc_data, conf_data, priors = predictions <NEW_LINE> num = loc_data.size(0) <NEW_LINE> priors = priors[:loc_data.size(1), :] <NEW_LINE> num_priors = (priors.size(0)) <NEW_LINE> num_classes = self.num_classes <NEW_LINE> loc_t = torch.Tensor(num, num_priors, 4) <NEW_LINE> conf_t = torch.LongTensor(num, num_priors) <NEW_LINE> for idx in range(num): <NEW_LINE> <INDENT> truths = targets[idx][:, :-1].data <NEW_LINE> labels = targets[idx][:, -1].data <NEW_LINE> defaults = priors.data <NEW_LINE> match(self.threshold, truths, defaults, self.variance, labels, loc_t, conf_t, idx) <NEW_LINE> <DEDENT> if self.use_gpu: <NEW_LINE> <INDENT> loc_t = loc_t.cuda() <NEW_LINE> conf_t = conf_t.cuda() <NEW_LINE> <DEDENT> loc_t = Variable(loc_t, requires_grad=False) <NEW_LINE> conf_t = Variable(conf_t, requires_grad=False) <NEW_LINE> pos = conf_t > 0 <NEW_LINE> num_pos = pos.sum(dim=1, keepdim=True) <NEW_LINE> pos_idx = pos.unsqueeze(pos.dim()).expand_as(loc_data) <NEW_LINE> loc_p = loc_data[pos_idx].view(-1, 4) <NEW_LINE> loc_t = loc_t[pos_idx].view(-1, 4) <NEW_LINE> loss_l = F.smooth_l1_loss(loc_p, loc_t, size_average=False) <NEW_LINE> batch_conf = conf_data.view(-1, self.num_classes) <NEW_LINE> loss_c = log_sum_exp(batch_conf) - batch_conf.gather(1, conf_t.view(-1, 1)) <NEW_LINE> loss_c[pos] = 0 <NEW_LINE> loss_c = loss_c.view(num, -1) <NEW_LINE> _, loss_idx = loss_c.sort(1, descending=True) <NEW_LINE> _, idx_rank = loss_idx.sort(1) <NEW_LINE> num_pos = pos.long().sum(1, keepdim=True) <NEW_LINE> num_neg = torch.clamp(self.negpos_ratio*num_pos, max=pos.size(1)-1) <NEW_LINE> neg = idx_rank < num_neg.expand_as(idx_rank) <NEW_LINE> pos_idx = pos.unsqueeze(2).expand_as(conf_data) <NEW_LINE> neg_idx = neg.unsqueeze(2).expand_as(conf_data) <NEW_LINE> conf_p = conf_data[(pos_idx+neg_idx).gt(0)].view(-1, self.num_classes) <NEW_LINE> targets_weighted = conf_t[(pos+neg).gt(0)] <NEW_LINE> loss_c = F.cross_entropy(conf_p, targets_weighted, size_average=False, weight=torch.Tensor([1.5, 1])) <NEW_LINE> N = num_pos.data.sum() <NEW_LINE> loss_l /= N <NEW_LINE> loss_c /= N <NEW_LINE> return loss_l, loss_c
Multibox Loss Args: predictions (tuple): A tuple containing loc preds, conf preds, and prior boxes from SSD net. conf shape: torch.size(batch_size,num_priors,num_classes) loc shape: torch.size(batch_size,num_priors,4) priors shape: torch.size(num_priors,4) ground_truth (tensor): Ground truth boxes and labels for a batch, shape: [batch_size,num_objs,5] (last idx is the label).
625941c0a4f1c619b28aff99
def upload_data(self, name, data): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> res = self.bucket.put_object(name, data) <NEW_LINE> if res.status == 200: <NEW_LINE> <INDENT> img_url = self.make_url(name) <NEW_LINE> return img_url <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.error(str(e))
上传数据
625941c07047854f462a1367
def _TestCase3(iTolerance): <NEW_LINE> <INDENT> gen = rxcs.sig.randMult() <NEW_LINE> gen.tS = 1 <NEW_LINE> gen.fR = 1e6 <NEW_LINE> gen.fMax = 20e3 <NEW_LINE> gen.fRes = 1 <NEW_LINE> gen.iSNR = 5 <NEW_LINE> gen.vFrqs = np.array([1e3, np.nan, np.nan]) <NEW_LINE> gen.vAmps = np.array([0.5, 1, 10]) <NEW_LINE> gen.vPhs = np.array([78, np.nan, np.nan]) <NEW_LINE> gen.nTones = 20 <NEW_LINE> gen.iMinAmp = 5.0 <NEW_LINE> gen.iGraAmp = 0.1 <NEW_LINE> gen.iMaxAmp = 10.0 <NEW_LINE> gen.iMinPhs = 1 <NEW_LINE> gen.iGraPhs = 1 <NEW_LINE> gen.iMaxPhs = 20 <NEW_LINE> gen.nSigs = int(1e2) <NEW_LINE> gen.bMute = 1 <NEW_LINE> _checkSNR(gen, iTolerance) <NEW_LINE> return
This is test case function #3. |br| The function sets up the configuration dictionary for the Random Multitone Signal Generator and runs the engine of the test. Args: iTolerance: maximum tolerance of a difference between an expected value and a real value Returns: Nothing
625941c08e05c05ec3eea2cd
def GetDriveSpace(self, path): <NEW_LINE> <INDENT> if sys.platform[:5] == "linux": <NEW_LINE> <INDENT> st = os.statvfs(path) <NEW_LINE> return '%.1f' % ((st.f_bavail * st.f_frsize) / 1024/1024/1024) + " GB" <NEW_LINE> <DEDENT> elif sys.platform[:5] == "win32": <NEW_LINE> <INDENT> drive = os.getenv(path) <NEW_LINE> freeuser = ctypes.c_int64() <NEW_LINE> total = ctypes.c_int64() <NEW_LINE> free = ctypes.c_int64() <NEW_LINE> ctypes.windll.kernel32.GetDiskFreeSpaceExW(drive, ctypes.byref(freeuser), ctypes.byref(total), ctypes.byref(free)) <NEW_LINE> return str('%.1f' % (free.value/1024/1024/1024)) + " GB" <NEW_LINE> <DEDENT> return 0
Returns the amount of free space, in gigabytes, on the drive containing the provided path.
625941c01b99ca400220aa0b
@when_any('kubernetes-master.components.started', 'ceph-storage.configured') <NEW_LINE> @when('leadership.is_leader') <NEW_LINE> def configure_cdk_addons(): <NEW_LINE> <INDENT> remove_state('cdk-addons.configured') <NEW_LINE> load_gpu_plugin = hookenv.config('enable-nvidia-plugin').lower() <NEW_LINE> gpuEnable = (get_version('kube-apiserver') >= (1, 9) and load_gpu_plugin == "auto" and is_state('kubernetes-master.gpu.enabled')) <NEW_LINE> registry = hookenv.config('addons-registry') <NEW_LINE> dbEnabled = str(hookenv.config('enable-dashboard-addons')).lower() <NEW_LINE> dnsEnabled = str(hookenv.config('enable-kube-dns')).lower() <NEW_LINE> metricsEnabled = str(hookenv.config('enable-metrics')).lower() <NEW_LINE> if (is_state('ceph-storage.configured') and get_version('kube-apiserver') >= (1, 10)): <NEW_LINE> <INDENT> cephEnabled = "true" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cephEnabled = "false" <NEW_LINE> <DEDENT> ceph_ep = endpoint_from_flag('ceph-storage.available') <NEW_LINE> ceph = {} <NEW_LINE> default_storage = '' <NEW_LINE> if ceph_ep: <NEW_LINE> <INDENT> b64_ceph_key = base64.b64encode(ceph_ep.key().encode('utf-8')) <NEW_LINE> ceph['admin_key'] = b64_ceph_key.decode('ascii') <NEW_LINE> ceph['kubernetes_key'] = b64_ceph_key.decode('ascii') <NEW_LINE> ceph['mon_hosts'] = ceph_ep.mon_hosts() <NEW_LINE> default_storage = hookenv.config('default-storage') <NEW_LINE> <DEDENT> args = [ 'arch=' + arch(), 'dns-ip=' + get_deprecated_dns_ip(), 'dns-domain=' + hookenv.config('dns_domain'), 'registry=' + registry, 'enable-dashboard=' + dbEnabled, 'enable-kube-dns=' + dnsEnabled, 'enable-metrics=' + metricsEnabled, 'enable-gpu=' + str(gpuEnable).lower(), 'enable-ceph=' + cephEnabled, 'ceph-admin-key=' + (ceph.get('admin_key', '')), 'ceph-kubernetes-key=' + (ceph.get('admin_key', '')), 'ceph-mon-hosts="' + (ceph.get('mon_hosts', '')) + '"', 'default-storage=' + default_storage, ] <NEW_LINE> check_call(['snap', 'set', 'cdk-addons'] + args) <NEW_LINE> if not addons_ready(): <NEW_LINE> <INDENT> remove_state('cdk-addons.configured') <NEW_LINE> return <NEW_LINE> <DEDENT> set_state('cdk-addons.configured')
Configure CDK addons
625941c026238365f5f0edc6
def requestedPresenceSubscriptionTo(self): <NEW_LINE> <INDENT> return QUrl()
static QUrl Nepomuk.Vocabulary.NCO.requestedPresenceSubscriptionTo()
625941c07d847024c06be214
def relink_sharedobjects(pkg_path, build_prefix): <NEW_LINE> <INDENT> bin_files = rec_glob(pkg_path, ['.so']) <NEW_LINE> for b_file in bin_files: <NEW_LINE> <INDENT> if sys.platform == 'darwin': <NEW_LINE> <INDENT> mk_relative_osx(b_file, build_prefix) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Nothing to do on Linux or Windows.")
invokes functions in post module to relink to libraries in conda env :param pkg_path: look for shared objects to relink in pkg_path :param build_prefix: path to conda environment which contains lib/. to find runtime libraries. .. note:: develop mode builds the extensions in place and makes a link to package in site-packages/. The build_prefix points to conda environment since runtime libraries should be loaded from environment's lib/. first
625941c06e29344779a6256f
def distance_matrix(self, D): <NEW_LINE> <INDENT> R = torch.sum(torch.mul(D, D), 3) <NEW_LINE> R = torch.sqrt(R) <NEW_LINE> return R
Calcuates the distance matrix from the distance tensor B = batch_size, N = max_num_atoms, M = max_num_neighbors, d = num_features Parameters ---------- D: torch.Tensor of shape (B, N, M, d) Distance tensor. Returns ------- R: torch.Tensor of shape (B, N, M) Distance matrix.
625941c04a966d76dd550f68
def update_settings(self, clean_voltage, clean_time, electroplate_voltage, plating_time, end_voltage, sweep_rate, sweep_type=0, pulse_height=50, pulse_inc=10, pulse_width=100): <NEW_LINE> <INDENT> self.clean_volt = clean_voltage <NEW_LINE> self.clean_time = clean_time <NEW_LINE> self.plate_volt = electroplate_voltage <NEW_LINE> self.plate_time = plating_time <NEW_LINE> self.end_voltage = end_voltage <NEW_LINE> self.low_voltage = self.plate_volt <NEW_LINE> self.high_voltage = self.end_voltage <NEW_LINE> self.sweep_rate = sweep_rate <NEW_LINE> self.delay_time = 500 + abs(self.high_voltage - self.low_voltage) / self.sweep_rate <NEW_LINE> self.pulse_height = pulse_height <NEW_LINE> self.pulse_inc = pulse_inc <NEW_LINE> self.pulse_width = pulse_width <NEW_LINE> if sweep_type == 1: <NEW_LINE> <INDENT> self.sweep_type = "DPV" <NEW_LINE> self.delay_time = (500 + self.pulse_width * abs(self.high_voltage - self.low_voltage) / self.pulse_inc) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.sweep_type = "LS"
Update the CV settings :param clean_voltage: int - mV, voltage to hold the working electrode at to remove plated ions :param clean_time: int - seconds, time to use the cleaning voltage :param electroplate_voltage: int - mV, voltage to hold the working electrode at to have the metal ions plate onto the electrode :param plating_time: int - seconds, time to hold the electrode at the plating potential :param end_voltage: int - mV, voltage the user wants to end the cyclic voltammetry at :param sweep_rate: float - V/s, rate of change of the cyclic voltammetry
625941c0d99f1b3c44c674ef
def test_build(self): <NEW_LINE> <INDENT> model = 'Mymodel' <NEW_LINE> with unittest.mock.patch( 'gandy.models.models.UncertaintyModel._build', return_value=model ) as mocked__build: <NEW_LINE> <INDENT> subject = mds.UncertaintyModel((1,), (1,), keyword=5) <NEW_LINE> mocked__build.assert_called_once_with(keyword=5) <NEW_LINE> self.assertTrue(subject.model is model) <NEW_LINE> <DEDENT> return
Test the parent build method, to make sure it executes protected method
625941c0004d5f362079a290
def stop_init(self, drive, adapter): <NEW_LINE> <INDENT> cmd = [] <NEW_LINE> cmd.append("-Stop") <NEW_LINE> cmd.append("-L{0}".format(drive)) <NEW_LINE> if isinstance(adapter, int): <NEW_LINE> <INDENT> cmd.append("-a{0}".format(adapter)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Logical drive's adapter ID must be type int") <NEW_LINE> <DEDENT> return self.execute("-LDInit {0}".format(' '.join(cmd)))
Stops initialization on a logical drive :param drive: specifies the logical drive to stop initializion :type drive: string :param adapter: specifies the drive's controller :type adapter: int :param full: specifies whether to do a full initialize :type full: bool :return: MegaCLI command output :rtype: string
625941c0eab8aa0e5d26dab2
def __init__(self,setting,model=None): <NEW_LINE> <INDENT> self.model = model <NEW_LINE> self.embedding_type = setting.EMBEDDING_TYPE <NEW_LINE> self.path=setting.TEST_DATA_PATH <NEW_LINE> self.raw_text = setting.TEST_ON_RAW_TEXT
Given the needed parameters for a database connection to a load network on mongodb :return:
625941c0287bf620b61d39c0
def __init__(self, incoming, num_filters, filter_size, stride=1, pad="VALID", untie_biases=False, W=XavierUniformInitializer(), b=tf.zeros_initializer, nonlinearity=tf.nn.relu, n=None, **kwargs): <NEW_LINE> <INDENT> super(BaseConvLayer, self).__init__(incoming, **kwargs) <NEW_LINE> if nonlinearity is None: <NEW_LINE> <INDENT> self.nonlinearity = tf.identity <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.nonlinearity = nonlinearity <NEW_LINE> <DEDENT> if n is None: <NEW_LINE> <INDENT> n = len(self.input_shape) - 2 <NEW_LINE> <DEDENT> elif n != len(self.input_shape) - 2: <NEW_LINE> <INDENT> raise ValueError("Tried to create a %dD convolution layer with " "input shape %r. Expected %d input dimensions " "(batchsize, channels, %d spatial dimensions)." % (n, self.input_shape, n + 2, n)) <NEW_LINE> <DEDENT> self.n = n <NEW_LINE> self.num_filters = num_filters <NEW_LINE> self.filter_size = as_tuple(filter_size, n, int) <NEW_LINE> self.stride = as_tuple(stride, n, int) <NEW_LINE> self.untie_biases = untie_biases <NEW_LINE> self.pad = pad <NEW_LINE> if pad == 'SAME': <NEW_LINE> <INDENT> if any(s % 2 == 0 for s in self.filter_size): <NEW_LINE> <INDENT> raise NotImplementedError( '`same` padding requires odd filter size.') <NEW_LINE> <DEDENT> <DEDENT> self.W = self.add_param(W, self.get_W_shape(), name="W") <NEW_LINE> if b is None: <NEW_LINE> <INDENT> self.b = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.untie_biases: <NEW_LINE> <INDENT> biases_shape = self.output_shape[1:3] + (num_filters,) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> biases_shape = (num_filters,) <NEW_LINE> <DEDENT> self.b = self.add_param(b, biases_shape, name="b", regularizable=False)
Input is assumed to be of shape batch*height*width*channels
625941c0b5575c28eb68df5a
def _get_lint_commands(rule_details): <NEW_LINE> <INDENT> lint_commands = [] <NEW_LINE> src_files = [su.get_relative_path(rule_details[su.POSSIBLE_PREFIXES_KEY], f) for f in rule_details[su.SRCS_KEY]] <NEW_LINE> for file_path in src_files: <NEW_LINE> <INDENT> if INIT_FILE_NAME == os.path.basename(file_path): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> lint_commands.append( [su.PYTHON_PYLINT_CHECK, '--rcfile=' + su.PYLINT_RC_FILE, file_path]) <NEW_LINE> pep8_command = su.PEP8_COMMAND_LINE[:] <NEW_LINE> pep8_command.append(file_path) <NEW_LINE> lint_commands.append(pep8_command) <NEW_LINE> <DEDENT> return lint_commands
Get lint and other static code check commands from sources.
625941c0be7bc26dc91cd55f
def getData(self): <NEW_LINE> <INDENT> return self._data
Return a reference to the data inside the Grid. :returns: A reference to a 2D numpy array.
625941c055399d3f0558860e
def betterEvaluationFunction(currentGameState): <NEW_LINE> <INDENT> currentPosition = currentGameState.getPacmanPosition() <NEW_LINE> foods = currentGameState.getFood().asList() <NEW_LINE> foodDistances = [manhattanDistance(f, currentPosition) for f in foods] <NEW_LINE> minFood = min(foodDistances) if foodDistances else 0 <NEW_LINE> avgDistance = sum(foodDistances) / len(foodDistances) if foodDistances else 0 <NEW_LINE> ghosts = currentGameState.getGhostPositions() <NEW_LINE> ghostDistances = [manhattanDistance(g, currentPosition) for g in ghosts] <NEW_LINE> minGhost = min(ghostDistances) if ghostDistances else 0 <NEW_LINE> capsules = currentGameState.getCapsules() <NEW_LINE> capsulesDistances = [manhattanDistance(c, currentPosition) for c in capsules] <NEW_LINE> minCapsules = min(capsulesDistances) if capsulesDistances else 0 <NEW_LINE> score = - (1000 * len(foods) + 800 * len(capsules) + 100 * minFood + 10 * avgDistance ) + 10 * minGhost + 100 * currentGameState.getScore() <NEW_LINE> if len(foods) == 1: <NEW_LINE> <INDENT> score = - (100 * manhattanDistance(foods[0], currentPosition) + 10 * avgDistance) + 10 * minGhost + 100 * currentGameState.getScore() <NEW_LINE> <DEDENT> if currentGameState.isWin(): <NEW_LINE> <INDENT> score = 99999999 <NEW_LINE> <DEDENT> if currentGameState.isLose(): <NEW_LINE> <INDENT> score = - 99999999 <NEW_LINE> <DEDENT> return score + random.randint(1, 80)
Your extreme ghost-hunting, pellet-nabbing, food-gobbling, unstoppable evaluation function (question 5). DESCRIPTION: <write something here so we know what you did>
625941c0dc8b845886cb548f
def process_chain(process, name): <NEW_LINE> <INDENT> flattened = flatten_list(process) <NEW_LINE> logging.warning(f"Processing {name} started.") <NEW_LINE> start_time = datetime.now() <NEW_LINE> for i, step in enumerate(flattened): <NEW_LINE> <INDENT> logging.warning(f"Processing step {i + 1} of {len(flattened)}") <NEW_LINE> step_start_time = datetime.now() <NEW_LINE> try: <NEW_LINE> <INDENT> step() <NEW_LINE> <DEDENT> except ProcessError as e: <NEW_LINE> <INDENT> step_stop_time = datetime.now() <NEW_LINE> logging.error( f"Processing failed on step {i + 1} of {len(flattened)} after {step_stop_time - step_start_time} {e}" ) <NEW_LINE> raise ProcessError(f"Could not process chain {name}", e) <NEW_LINE> <DEDENT> step_stop_time = datetime.now() <NEW_LINE> logging.warning(f"completed step {i + 1} in {step_stop_time - step_start_time}") <NEW_LINE> <DEDENT> logging.warning(f"Processing {name} ended. Duration: {datetime.now() - start_time}")
Execute a series of processing steps. :param process: The list of things to process. if any return False processing will stop. :param name: The name of this process. Used for making the logging clearer. :return: Nothing.
625941c0460517430c3940e6
def _results_save_fbrowser_success(self, instance): <NEW_LINE> <INDENT> foldername = instance.filename <NEW_LINE> folder_path = os.path.join(instance.path, foldername) <NEW_LINE> self.save_results_dir_path = os.path.normpath(folder_path)
On results folder path selection, store and close FileBrowser.
625941c0aad79263cf390999
def check_groups(self, groups): <NEW_LINE> <INDENT> self.force_minichat_refresh() <NEW_LINE> displayed_groups = self.selenium.find_elements_by_css_selector('.minichat-group') <NEW_LINE> self.assertEqual(len(groups), len(displayed_groups)) <NEW_LINE> for i, group in enumerate(groups[::-1]): <NEW_LINE> <INDENT> displayed_messages = displayed_groups[i].find_elements_by_css_selector('.minichat-text-content') <NEW_LINE> self.assertEqual(len(group), len(displayed_messages)) <NEW_LINE> for j, message in enumerate(group): <NEW_LINE> <INDENT> self.assertEqual(message, displayed_messages[j].text)
Check that messages are split correctly into groups. *groups* is a list of list of message text. Messages should be given in chronological order!
625941c07cff6e4e811178e1
def test_build_east(self): <NEW_LINE> <INDENT> builder = BoundaryBuilder(self.north, self.east, self.south, self.west) <NEW_LINE> points = [ Point(x=1, z=6), Point(x=4, z=3), Point(x=5, z=6), Point(x=7, z=4), Point(x=9, z=6) ] <NEW_LINE> nodes = get_nodes(points) <NEW_LINE> expected = points + [ Point(x=self.north, z=6), Point(x=self.north, z=self.east), Point(x=self.south, z=self.east), Point(x=self.south, z=6) ] <NEW_LINE> result = builder.influence_east(nodes) <NEW_LINE> self.assertSequenceEqual(result, expected)
Создаётся корректный многоугольник восточной InfluenceArea
625941c04f88993c3716bfc5
def addMaps(self, configMapDict=None, dataMapDict=None): <NEW_LINE> <INDENT> if configMapDict is not None: <NEW_LINE> <INDENT> self.configMap.update(configMapDict) <NEW_LINE> <DEDENT> if dataMapDict is not None: <NEW_LINE> <INDENT> self.dataMap.update(dataMapDict)
Add additional maps, Args ---- configMapDict : dict() dataMapDict : dict() Two dictionaries to update the internal mapping dictionaries with. NOTE: this *overwrites* any values in the existing maps.
625941c099cbb53fe6792b42
def canonical(self): <NEW_LINE> <INDENT> apply_inst = self.apply_actions.per_output_actions(pass_through=True) <NEW_LINE> write_inst = self.write_actions.per_output_actions(pass_through=True) <NEW_LINE> return (self.clear_actions, self.goto_table, self.meter, self.write_metadata, apply_inst, write_inst)
Return a canonical representation of this instruction The returned value will equal another in terms of overall forwarding if, in all cases, this set of instructions is equivalent to the other, including when merged with any other instruction. This flattens groups and redundancies out of actions. But, considers differences in apply and write actions, and the next table, meter etc. return: A hashable tuple, do not assume anything about the contents
625941c0ac7a0e7691ed402c
def set_scale(self, value): <NEW_LINE> <INDENT> logging.warning( 'CanvasIcon: the scale parameter is currently unsupported') <NEW_LINE> if self._buffer.scale != value: <NEW_LINE> <INDENT> self._buffer.scale = value <NEW_LINE> self.emit_request_changed()
Parameters ---------- value: Returns ------- None
625941c0b7558d58953c4e74
@exit_on_keyboard_interrupt <NEW_LINE> def pick_commit_reflog(*args): <NEW_LINE> <INDENT> commits = subprocess.check_output(("git", "reflog", "--date=short") + args) <NEW_LINE> p = subprocess.Popen(["fzf"], stdin=PIPE, stdout=PIPE) <NEW_LINE> commit = p.communicate(input=add_new_line(commits))[0] <NEW_LINE> if not commit: <NEW_LINE> <INDENT> raise KeyboardInterrupt <NEW_LINE> <DEDENT> return commit.split()[0].decode("utf-8")
Pick a commit hash from the reflog.
625941c038b623060ff0ad4a
def ask_generator(self, query): <NEW_LINE> <INDENT> if not pl_fc_entails(self.clauses, query): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> yield {}
Yield the empty substitution if KB implies query; else False
625941c0d18da76e2353242f
def calcGrowthRate(a, r_min=19.3, K_t=6.1*10**-2, Lambda_0=1.35): <NEW_LINE> <INDENT> result = (a - r_min) * K_t / Lambda_0 <NEW_LINE> return result
growth_rateを計算する関数
625941c0a4f1c619b28aff9a
def load(self, filename): <NEW_LINE> <INDENT> if not isinstance(filename, str): <NEW_LINE> <INDENT> raise TypeError("Filename must be the name of a file on an absolute or relative path") <NEW_LINE> <DEDENT> elif len(filename) == 0: <NEW_LINE> <INDENT> raise ValueError("Filename must not be empty") <NEW_LINE> <DEDENT> receive_conn, send_conn = mp.Pipe(duplex=False) <NEW_LINE> parser = mp.Process(target=_xml_parser, args=(self.logqueue, send_conn, filename)) <NEW_LINE> parser.start() <NEW_LINE> self.logger.info("Starting multiprocess main loop") <NEW_LINE> running = True <NEW_LINE> while running: <NEW_LINE> <INDENT> self._handle_log_queue() <NEW_LINE> handled = 0 <NEW_LINE> while receive_conn.poll() and handled <= self.max_elements_handled: <NEW_LINE> <INDENT> handled += 1 <NEW_LINE> elem = receive_conn.recv() <NEW_LINE> if elem is None: <NEW_LINE> <INDENT> running = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if elem.tag == "bounds": <NEW_LINE> <INDENT> self.min_lat = float(elem.attrib['minlat']) <NEW_LINE> self.max_lat = float(elem.attrib['maxlat']) <NEW_LINE> self.min_lon = float(elem.attrib['minlon']) <NEW_LINE> self.max_lon = float(elem.attrib['maxlon']) <NEW_LINE> self.logger.info("Area of map is defined by (%.4f, %.4f), (%.4f, %.4f)", self.min_lat, self.min_lon, self.max_lat, self.max_lon) <NEW_LINE> <DEDENT> elif elem.tag == "node": <NEW_LINE> <INDENT> n = self._parse_node(elem) <NEW_LINE> self.nodes[n.id] = n <NEW_LINE> <DEDENT> elif elem.tag == "way": <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> w = self._parse_way(elem) <NEW_LINE> <DEDENT> except MultiReader.UnusedWayException: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.ways[w.id] = w <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> parser.join() <NEW_LINE> self.logger.info("Finished multiprocess main loop") <NEW_LINE> self.logger.info("Found %d nodes", len(self.nodes)) <NEW_LINE> self.logger.info("Found %d ways", len(self.ways)) <NEW_LINE> self._handle_log_queue(True) <NEW_LINE> self._filter_noncar_ways() <NEW_LINE> self.logger.info("Adding back-references from nodes to ways") <NEW_LINE> for way in self.ways: <NEW_LINE> <INDENT> for node in self.ways[way].nodes: <NEW_LINE> <INDENT> self.nodes[node].ways.append(way)
Parses data from an OSM file and stores it in memory. Starts a subprocess that reads the XML and parses. It is then passed back through a queue and handled. OSM Data gets stored in class members. Params: filename - The file which holds the OSM XML
625941c067a9b606de4a7e17
def sep(self): <NEW_LINE> <INDENT> if self.outformat == "tex": self.text("\hline\n") <NEW_LINE> else: self.line(self.separator)
Draws a separating line
625941c03617ad0b5ed67e54
def dip_gsw(xgsw,ygsw,zgsw): <NEW_LINE> <INDENT> xgsm,ygsm,zgsm = gswgsm(xgsw,ygsw,zgsw, 1) <NEW_LINE> bxgsm,bygsm,bzgsm = dip(xgsm,ygsm,zgsm) <NEW_LINE> return gswgsm(bxgsm,bygsm,bzgsm, -1)
Calculates gsm components of a geodipole field with the dipole moment corresponding to the epoch, specified by calling subroutine recalc (should be invoked before the first use of this one and in case the date/time was changed). :param xgsw,ygsw,zgsw: GSW coordinates in Re (1 Re = 6371.2 km) :return: bxgsm,bygsm,gzgsm. Field components in gsm system, in nanotesla. Author: Sheng Tian
625941c07b180e01f3dc475d
def lmove(self, first_list, second_list, src="LEFT", dest="RIGHT"): <NEW_LINE> <INDENT> params = [first_list, second_list, src, dest] <NEW_LINE> return self.execute_command("LMOVE", *params)
Atomically returns and removes the first/last element of a list, pushing it as the first/last element on the destination list. Returns the element being popped and pushed. For more information check https://redis.io/commands/lmove
625941c085dfad0860c3adb5
def test_delete_vote(api): <NEW_LINE> <INDENT> pass
Testing delete vote functionality. :param api: DogApi object instance. :type api: catdog.dog.DogApi
625941c03317a56b86939bb9
def submit(self, silent=False, parameters=None, instanceId=None, timeout=0): <NEW_LINE> <INDENT> if parameters == None: <NEW_LINE> <INDENT> parameters = {} <NEW_LINE> <DEDENT> return "ajaxSubmit('%s', %s, '%s', %d);" % (self.jsId(instanceId), interpretAsString(silent), self.__urlStringFromDict__(parameters), timeout)
Returns the javascript that will submit this control
625941c07b180e01f3dc475e
def cleanup_teams(): <NEW_LINE> <INDENT> spark = setup_spark() <NEW_LINE> users = read_csv() <NEW_LINE> team_names = [department_to_team_room(department) for department in set((user['Department'] for user in users))] <NEW_LINE> teams = spark.list_teams() <NEW_LINE> teams = [team for team in teams if team['name'] in team_names] <NEW_LINE> today = spark_api.time_to_str(datetime.datetime.utcnow())[:11] <NEW_LINE> teams = [team for team in teams if team['created'].startswith(today)] <NEW_LINE> for team in teams: <NEW_LINE> <INDENT> log.info('Deleting memberships for team {}'.format(team['name'])) <NEW_LINE> memberships = spark.list_team_memberships(p_teamId = team['id']) <NEW_LINE> for membership in memberships: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> spark.delete_team_membership(membership_id = membership['id']) <NEW_LINE> <DEDENT> except spark_api.APIError: pass <NEW_LINE> <DEDENT> log.info('Deleting team {}'.format(team['name'])) <NEW_LINE> try: <NEW_LINE> <INDENT> spark.delete_team(team_id = team['id']) <NEW_LINE> <DEDENT> except spark_api.APIError: pass <NEW_LINE> <DEDENT> return
delete all teams created by this script
625941c05510c4643540f345
def __interact_read(fd): <NEW_LINE> <INDENT> return os.read(fd, 1000)
This is used by the interact() method.
625941c02eb69b55b151c808
def id(self): <NEW_LINE> <INDENT> return self._mangler.parent_name()
Returns internal identifier that torch.package uses to distinguish PackageImporter instances. Looks like: <torch_package_0>
625941c0091ae35668666ebe
def __init__(self, name=None, value=None): <NEW_LINE> <INDENT> self._name = None <NEW_LINE> self._value = None <NEW_LINE> self.discriminator = None <NEW_LINE> if name is not None: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> if value is not None: <NEW_LINE> <INDENT> self.value = value
LookmlModelExploreAlwaysFilter - a model defined in Swagger
625941c0eab8aa0e5d26dab3
def release_zoom(self, event): <NEW_LINE> <INDENT> if self._zoom_info is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.canvas.mpl_disconnect(self._zoom_info.cid) <NEW_LINE> self.remove_rubberband() <NEW_LINE> start_x, start_y = self._zoom_info.start_xy <NEW_LINE> if ((abs(event.x - start_x) < 5 and event.key != "y") or (abs(event.y - start_y) < 5 and event.key != "x")): <NEW_LINE> <INDENT> self.canvas.draw_idle() <NEW_LINE> self._zoom_info = None <NEW_LINE> return <NEW_LINE> <DEDENT> for i, ax in enumerate(self._zoom_info.axes): <NEW_LINE> <INDENT> twinx = any(ax.get_shared_x_axes().joined(ax, prev) for prev in self._zoom_info.axes[:i]) <NEW_LINE> twiny = any(ax.get_shared_y_axes().joined(ax, prev) for prev in self._zoom_info.axes[:i]) <NEW_LINE> ax._set_view_from_bbox( (start_x, start_y, event.x, event.y), self._zoom_info.direction, event.key, twinx, twiny) <NEW_LINE> <DEDENT> self.canvas.draw_idle() <NEW_LINE> self._zoom_info = None <NEW_LINE> self.push_current()
Callback for mouse button release in zoom to rect mode.
625941c06fece00bbac2d698
def check_str_split(string: str, sep: str, maxsplit: int) -> ResultComparison: <NEW_LINE> <INDENT> return compare_results(lambda s, *a: s.split(*a), string, sep, maxsplit)
post: _
625941c0de87d2750b85fcec
def _arm_thumb_filter_jump_successors(self, addr, successors, get_ins_addr, get_exit_stmt_idx): <NEW_LINE> <INDENT> if not successors: <NEW_LINE> <INDENT> return [ ] <NEW_LINE> <DEDENT> it_counter = 0 <NEW_LINE> conc_temps = {} <NEW_LINE> can_produce_exits = set() <NEW_LINE> bb = self._lift(addr, thumb=True, opt_level=0) <NEW_LINE> for stmt in bb.vex.statements: <NEW_LINE> <INDENT> if stmt.tag == 'Ist_IMark': <NEW_LINE> <INDENT> if it_counter > 0: <NEW_LINE> <INDENT> it_counter -= 1 <NEW_LINE> can_produce_exits.add(stmt.addr + stmt.delta) <NEW_LINE> <DEDENT> <DEDENT> elif stmt.tag == 'Ist_WrTmp': <NEW_LINE> <INDENT> val = stmt.data <NEW_LINE> if val.tag == 'Iex_Const': <NEW_LINE> <INDENT> conc_temps[stmt.tmp] = val.con.value <NEW_LINE> <DEDENT> <DEDENT> elif stmt.tag == 'Ist_Put': <NEW_LINE> <INDENT> if stmt.offset == self.project.arch.registers['itstate'][0]: <NEW_LINE> <INDENT> val = stmt.data <NEW_LINE> if val.tag == 'Iex_RdTmp': <NEW_LINE> <INDENT> if val.tmp in conc_temps: <NEW_LINE> <INDENT> it_counter = 0 <NEW_LINE> itstate = conc_temps[val.tmp] <NEW_LINE> while itstate != 0: <NEW_LINE> <INDENT> it_counter += 1 <NEW_LINE> itstate >>= 8 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> if it_counter != 0: <NEW_LINE> <INDENT> l.debug('Basic block ends before calculated IT block (%#x)', addr) <NEW_LINE> <DEDENT> THUMB_BRANCH_INSTRUCTIONS = ('beq', 'bne', 'bcs', 'bhs', 'bcc', 'blo', 'bmi', 'bpl', 'bvs', 'bvc', 'bhi', 'bls', 'bge', 'blt', 'bgt', 'ble', 'cbz', 'cbnz') <NEW_LINE> for cs_insn in bb.capstone.insns: <NEW_LINE> <INDENT> if cs_insn.mnemonic.split('.')[0] in THUMB_BRANCH_INSTRUCTIONS: <NEW_LINE> <INDENT> can_produce_exits.add(cs_insn.address) <NEW_LINE> <DEDENT> <DEDENT> successors_filtered = [suc for suc in successors if get_ins_addr(suc) in can_produce_exits or get_exit_stmt_idx(suc) == 'default'] <NEW_LINE> return successors_filtered
Filter successors for THUMB mode basic blocks, and remove those successors that won't be taken normally. :param int addr: Address of the basic block / SimIRSB. :param list successors: A list of successors. :param func get_ins_addr: A callable that returns the source instruction address for a successor. :param func get_exit_stmt_idx: A callable that returns the source statement ID for a successor. :return: A new list of successors after filtering. :rtype: list
625941c06fece00bbac2d699
@mock.patch('longbow.configuration.saveini') <NEW_LINE> @mock.patch('longbow.staging.stage_downstream') <NEW_LINE> @mock.patch('longbow.scheduling._checkwaitingjobs') <NEW_LINE> @mock.patch('longbow.scheduling._polljobs') <NEW_LINE> @mock.patch('longbow.scheduling._monitorinitialise') <NEW_LINE> def test_monitor_except(mock_init, mock_poll, mock_wait, mock_down, mock_save): <NEW_LINE> <INDENT> jobs = { "lbowconf": { "recoveryfile": "recovery-YYMMDD-HHMMSS", "hpc1-queue-slots": 1, "hpc1-queue-max": 2 }, "jobone": { "resource": "hpc1", "laststatus": "Finished" }, "jobtwo": { "resource": "hpc1", "laststatus": "Complete" }, "jobthree": { "resource": "hpc1", "laststatus": "Submit Error" } } <NEW_LINE> mock_init.return_value = 0, 1 <NEW_LINE> mock_poll.return_value = False <NEW_LINE> mock_down.return_value = None <NEW_LINE> mock_save.side_effect = IOError <NEW_LINE> mock_wait.return_value = False <NEW_LINE> monitor(jobs) <NEW_LINE> assert jobs["jobone"]["laststatus"] == "Complete" <NEW_LINE> assert mock_save.call_count == 1
Check that if an exception is thrown on the save recovery file, that it does not bring the whole application down.
625941c0167d2b6e31218af2
def has_same_status(self,status): <NEW_LINE> <INDENT> for index, item in enumerate(status.get_status_list()): <NEW_LINE> <INDENT> same_index = self.has_same_lr_item(item,full=True) <NEW_LINE> if same_index == -1: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True
判断两个状态是否完全相同 Args: status: 用于比较的状态
625941c00c0af96317bb8144
def getEndPoint1(self): <NEW_LINE> <INDENT> return _AriaPy.ArLineSegment_getEndPoint1(self)
getEndPoint1(self) -> ArPose
625941c0a79ad161976cc0a1
def find_resource_n_h(itvs, hy, rqts, top, h, h_bottom): <NEW_LINE> <INDENT> avail_bks = keep_no_empty_scat_bks(itvs, top) <NEW_LINE> l_avail_bks = len(avail_bks) <NEW_LINE> if l_avail_bks < rqts[h]: <NEW_LINE> <INDENT> return ProcSet() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if h == h_bottom - 2: <NEW_LINE> <INDENT> itvs_acc = ProcSet() <NEW_LINE> i = 0 <NEW_LINE> nb_r = 0 <NEW_LINE> while (i < l_avail_bks) and (nb_r != rqts[h]): <NEW_LINE> <INDENT> avail_sub_bks = [ (avail_bks[i] & x) for x in hy[h + 1] if len(avail_bks[i] & x) != 0 ] <NEW_LINE> r = extract_n_scattered_block_itv(itvs, avail_sub_bks, rqts[h + 1]) <NEW_LINE> if len(r) != 0: <NEW_LINE> <INDENT> itvs_acc = itvs_acc | r <NEW_LINE> nb_r += 1 <NEW_LINE> <DEDENT> i += 1 <NEW_LINE> <DEDENT> if nb_r == rqts[h]: <NEW_LINE> <INDENT> return itvs_acc <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ProcSet() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> itvs_acc = ProcSet() <NEW_LINE> i = 0 <NEW_LINE> nb_r = 0 <NEW_LINE> while (i < l_avail_bks) and (nb_r != rqts[h]): <NEW_LINE> <INDENT> r = find_resource_n_h(itvs, hy, rqts, [avail_bks[i]], h + 1, h_bottom) <NEW_LINE> if len(r) != 0: <NEW_LINE> <INDENT> itvs_acc = itvs_acc | r <NEW_LINE> nb_r += 1 <NEW_LINE> <DEDENT> i += 1 <NEW_LINE> <DEDENT> if nb_r == rqts[h]: <NEW_LINE> <INDENT> return itvs_acc <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ProcSet()
Recursive function collecting resources from each hierarchy level. :param itvs: A :class:`ProcSet` of available resources :param [ProcSet] hy: The specified hierarchy levels :param [Integer] rqts: Array containing the number of resources needed by level of hierarchy :param top: Current level of hierarchy to consider :param h: Current level of hierarchy to consider :param h_bottom: Last level of hierarchy (used to stop recursive call) :return: A :class:`ProcSet` containing resources compatible with the request, or empty if the request could not be satisfied.
625941c0f8510a7c17cf9657
def wait_for_ringing(log, ad): <NEW_LINE> <INDENT> log.info("waiting for ringing {}".format(ad.serial)) <NEW_LINE> ad.droid.telecomStartListeningForCallAdded() <NEW_LINE> if ad.droid.telecomIsInCall(): <NEW_LINE> <INDENT> log.info("Device already in call {}".format(ad.serial)) <NEW_LINE> ad.droid.telecomStopListeningForCallAdded() <NEW_LINE> return True <NEW_LINE> <DEDENT> call_id = None <NEW_LINE> calls_in_state = ad.droid.telecomCallGetCallIds() <NEW_LINE> if len(calls_in_state) == 0: <NEW_LINE> <INDENT> event = None <NEW_LINE> try: <NEW_LINE> <INDENT> event = ad.ed.pop_event( tel_defines.EventTelecomCallAdded, tel_defines.MAX_WAIT_TIME_CALLEE_RINGING) <NEW_LINE> <DEDENT> except queue.Empty: <NEW_LINE> <INDENT> log.info("Did not get {} droid {}".format( tel_defines.EventTelecomCallAdded, ad.serial)) <NEW_LINE> return False <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> ad.droid.telecomStopListeningForCallAdded() <NEW_LINE> <DEDENT> call_id = event['data']['CallId'] <NEW_LINE> log.info("wait_for_ringing call found {} dev {}".format( call_id, ad.serial)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> call_id = calls_in_state[0] <NEW_LINE> <DEDENT> if wait_for_call_state( log, ad, call_id, tel_defines.CALL_STATE_RINGING) != tel_defines.CALL_STATE_RINGING: <NEW_LINE> <INDENT> log.info("No ringing call id {} droid {}".format( call_id, ad.serial)) <NEW_LINE> return False <NEW_LINE> <DEDENT> return True
Wait for the droid to be in ringing state. Args: log: log object ad: android device object Returns: True if success, False if fail.
625941c0a8370b77170527fc
def getXthWorstIndividualOfPopulation(self, popInd, X): <NEW_LINE> <INDENT> makespanList = [item.makespan for item in self.model[popInd].pop] <NEW_LINE> sortedMakespanList = sorted(makespanList, reverse=True) <NEW_LINE> indexs = [makespanList.index(sortedMakespanList[X])] <NEW_LINE> return indexs
功能: 返回某个种群第X差的个体在种群中的序号 输入: popInd 种群序号 X 第X差,从0开始数 输出: indexs 一个list,在种群中的序号
625941c03346ee7daa2b2cc6
def fetch_baja_bathymetry(): <NEW_LINE> <INDENT> _datasets_deprecation_warning() <NEW_LINE> data_file = REGISTRY.fetch("baja-bathymetry.csv.xz") <NEW_LINE> data = pd.read_csv(data_file, compression="xz") <NEW_LINE> return data
Fetch sample bathymetry data from Baja California. .. warning:: All sample datasets in Verde are deprecated and will be **removed in Verde v2.0.0**. The tutorials/examples will transition to using `Ensaio <https://www.fatiando.org/ensaio/>`__ instead. This is the ``@tut_ship.xyz`` sample data from the `GMT <http://gmt.soest.hawaii.edu/>`__ tutorial. If the file isn't already in your data directory, it will be downloaded automatically. Returns ------- data : :class:`pandas.DataFrame` The bathymetry data. Columns are longitude, latitude, and bathymetry (in meters) for each data point. See also -------- setup_baja_bathymetry_map: Utility function to help setup a Cartopy map.
625941c05fcc89381b1e1619
def get_neuro_df(uids_lst, neuro_exp_only=None, neuro_dict=None, admin=False, flatten_df=False): <NEW_LINE> <INDENT> query = {'uID': {'$in': uids_lst}} <NEW_LINE> proj = default_neuro_proj.copy() <NEW_LINE> remap_cols_dict = remap_neuro_variables(neuropsych_variables) <NEW_LINE> arg_compatability(uids_lst=uids_lst, neuro_exp_only=neuro_exp_only, neuro_dict=neuro_dict) <NEW_LINE> if neuro_exp_only: <NEW_LINE> <INDENT> if neuro_exp_only.upper() == 'VST': <NEW_LINE> <INDENT> add_proj = {k:1 for k,v in remap_cols_dict.items() if 'VST' in v} <NEW_LINE> <DEDENT> if neuro_exp_only.upper() == 'TOLT': <NEW_LINE> <INDENT> add_proj = {k:1 for k,v in remap_cols_dict.items() if 'TOLT' in v} <NEW_LINE> <DEDENT> <DEDENT> if neuro_dict: <NEW_LINE> <INDENT> need_to_format_proj = neuro_dict_proj(neuro_dict=neuro_dict) <NEW_LINE> add_proj = {k:1 for k,v in remap_cols_dict.items() if v in need_to_format_proj.keys()} <NEW_LINE> <DEDENT> if admin: <NEW_LINE> <INDENT> admin_proj = default_neuro_admin.copy() <NEW_LINE> proj.update(admin_proj) <NEW_LINE> docs = D.Mdb['neuropsych'].find(query, admin_proj) <NEW_LINE> df = C.buildframe_fromdocs(docs, inds=['ID', 'np_session']) <NEW_LINE> return df <NEW_LINE> <DEDENT> proj.update(add_proj) <NEW_LINE> docs = D.Mdb['neuropsych'].find(query, proj) <NEW_LINE> df = C.buildframe_fromdocs(docs, inds=['ID', 'np_session']) <NEW_LINE> df1 = rename_neuropsych_cols(df, remap_cols_dict) <NEW_LINE> return df1
Can query by: 1) uids_lst + neuro_exp_only to get ALL conds/measure for experiment 2) uids_lst + neuro_dict to get specific conds/measures for experiments
625941c0435de62698dfdba8
def findCenter(imArray): <NEW_LINE> <INDENT> imShape = imArray.shape <NEW_LINE> imCenter = np.array([imShape[0]/2,imShape[1]/2]) <NEW_LINE> return imCenter
returns the center of an array as coords WARN: the coords are not INT in general
625941c063d6d428bbe4444b
def backward(self): <NEW_LINE> <INDENT> raise NotImplemented
Backward propagation. Compute the gradient of the current node with respect to the input nodes. The gradient of the loss with respect to the current node should already be computed in the `gradients` attribute of the output nodes.
625941c0adb09d7d5db6c6ed
def delete_app(self, app_name): <NEW_LINE> <INDENT> url = '/apps/{name}'.format(name=app_name) <NEW_LINE> return self.__rest_helper(url, method='DELETE')
Delete an application.
625941c06e29344779a62570
def Available(self, owner=None, **owner_kwargs): <NEW_LINE> <INDENT> if owner: <NEW_LINE> <INDENT> self._owner = owner <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._owner = owner_kwargs <NEW_LINE> <DEDENT> return self
Make process start action available for the User accepts user lookup kwargs or callable predicate :: User -> bool:: .Available(username='employee') .Available(lambda user: user.is_super_user)
625941c091af0d3eaac9b972
def test_merge_pdfs(self): <NEW_LINE> <INDENT> pattern = "%s/card*.pdf" % self.TESTDATA_FOLDER <NEW_LINE> result = os.path.join(self.test_dir, "result.pdf") <NEW_LINE> merge_pdfs(pattern, result) <NEW_LINE> self.assert_pdf(result, 2)
Test merging pdfs.
625941c0cc40096d615958ad
def read_distance(self, read_ser = readSensorsLine(), filtrage = True): <NEW_LINE> <INDENT> percentage_var_max = 0.15 <NEW_LINE> offset_lecture = 18 <NEW_LINE> if read_ser[0] == 'S': <NEW_LINE> <INDENT> last_index_range = self.get_last_index_range(read_ser, 12) <NEW_LINE> distance = int(read_ser[12:last_index_range]) + offset_lecture <NEW_LINE> if filtrage == True: <NEW_LINE> <INDENT> if distance - offset_lecture != 765: <NEW_LINE> <INDENT> print(time.time() - self.startTime) <NEW_LINE> if time.time() - self.startTime > 1: <NEW_LINE> <INDENT> if abs(distance-self.range) < percentage_var_max*self.range: <NEW_LINE> <INDENT> self.set_distance(distance) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.set_distance(int(self.range + percentage_var_max*(distance-self.range))) <NEW_LINE> <DEDENT> <DEDENT> else : <NEW_LINE> <INDENT> self.set_distance(distance) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.set_distance(self.range) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.set_distance(distance - offset_lecture) <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> return False
Gets the range given by the sensor connected to the Arduino Returns False if no data is read and True otherwise. Takes as input the line containing the data we want to read from the Arduino. If no line is given as input, we read a new line.
625941c07c178a314d6ef3b8
def _return_retry_timer(self): <NEW_LINE> <INDENT> msg = 'Minion return retry timer set to {0} seconds' <NEW_LINE> if self.opts.get('return_retry_timer_max'): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> random_retry = randint(self.opts['return_retry_timer'], self.opts['return_retry_timer_max']) <NEW_LINE> log.debug(msg.format(random_retry) + ' (randomized)') <NEW_LINE> return random_retry <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> log.error( 'Invalid value (return_retry_timer: {0} or return_retry_timer_max: {1})' 'both must be a positive integers'.format( self.opts['return_retry_timer'], self.opts['return_retry_timer_max'], ) ) <NEW_LINE> log.debug(msg.format(DEFAULT_MINION_OPTS['return_retry_timer'])) <NEW_LINE> return DEFAULT_MINION_OPTS['return_retry_timer'] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> log.debug(msg.format(self.opts.get('return_retry_timer'))) <NEW_LINE> return self.opts.get('return_retry_timer')
Based on the minion configuration, either return a randomized timer or just return the value of the return_retry_timer.
625941c0e8904600ed9f1e86
def test_articulation_points_resnet(): <NEW_LINE> <INDENT> tf.reset_default_graph() <NEW_LINE> nodes = util.make_resnet(3) <NEW_LINE> all_ops = ge.get_forward_walk_ops(seed_ops=nodes[0].op) <NEW_LINE> graph = nx.Graph(util.tf_ops_to_graph(all_ops)) <NEW_LINE> assert util.set_equal(util.format_ops(nx.articulation_points(graph)), ['a01_add']) <NEW_LINE> tf.reset_default_graph() <NEW_LINE> nodes = util.make_resnet(4) <NEW_LINE> all_ops = ge.get_forward_walk_ops(seed_ops=nodes[0].op) <NEW_LINE> graph = nx.Graph(util.tf_ops_to_graph(all_ops)) <NEW_LINE> assert util.set_equal(util.format_ops(nx.articulation_points(graph)), ['a01_add', 'a02_add'])
Make sure articulation points are found correctly in resnet.
625941c09c8ee82313fbb6d1
def spacify_number(number): <NEW_LINE> <INDENT> nb_rev = str(number)[::-1] <NEW_LINE> new_chain = '' <NEW_LINE> for val, letter in enumerate(nb_rev): <NEW_LINE> <INDENT> if val%3==0: <NEW_LINE> <INDENT> new_chain += ' ' <NEW_LINE> <DEDENT> new_chain += letter <NEW_LINE> <DEDENT> final_chain = new_chain[::-1] <NEW_LINE> return final_chain
Takes a number and returns a string with spaces every 3 numbers
625941c0a8ecb033257d302a
def __init__( self, method: Callable[ ..., Awaitable[product_search_service.ListProductsInProductSetResponse] ], request: product_search_service.ListProductsInProductSetRequest, response: product_search_service.ListProductsInProductSetResponse, *, metadata: Sequence[Tuple[str, str]] = () ): <NEW_LINE> <INDENT> self._method = method <NEW_LINE> self._request = product_search_service.ListProductsInProductSetRequest(request) <NEW_LINE> self._response = response <NEW_LINE> self._metadata = metadata
Instantiates the pager. Args: method (Callable): The method that was originally called, and which instantiated this pager. request (google.cloud.vision_v1.types.ListProductsInProductSetRequest): The initial request object. response (google.cloud.vision_v1.types.ListProductsInProductSetResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata.
625941c0442bda511e8be377
def _undeploy_link(self, progress_bar: progressbar.ProgressBar, network: docker.models.networks.Network) -> None: <NEW_LINE> <INDENT> self._delete_link(network) <NEW_LINE> if progress_bar is not None: <NEW_LINE> <INDENT> progress_bar += 1
Undeploy a Docker network. Args: progress_bar (Optional[progressbar.ProgressBar]): A progress bar object to display if used from cli. network (docker.models.networks.Network): The Docker network to undeploy. Returns: None
625941c06aa9bd52df036cff