function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
list
def init_random(n_neighbors, inds, indptr, data, heap, dist, rng_state): n_samples = indptr.shape[0] - 1 for i in range(n_samples): if heap[0][i, 0] < 0.0: for j in range(n_neighbors - np.sum(heap[0][i] >= 0.0)): idx = np.abs(tau_rand_int(rng_state)) % n_samples from_inds = inds[indptr[idx] : indptr[idx + 1]] from_data = data[indptr[idx] : indptr[idx + 1]] to_inds = inds[indptr[i] : indptr[i + 1]] to_data = data[indptr[i] : indptr[i + 1]] d = dist(from_inds, from_data, to_inds, to_data) checked_flagged_heap_push( heap[1][i], heap[0][i], heap[2][i], d, idx, np.uint8(1) ) return
lmcinnes/pynndescent
[ 731, 93, 731, 58, 1518045834 ]
def generate_graph_updates( new_candidate_block, old_candidate_block, dist_thresholds, inds, indptr, data, dist
lmcinnes/pynndescent
[ 731, 93, 731, 58, 1518045834 ]
def nn_descent_internal_low_memory_parallel( current_graph, inds, indptr, data, n_neighbors, rng_state, max_candidates=50, dist=sparse_euclidean, n_iters=10, delta=0.001, verbose=False,
lmcinnes/pynndescent
[ 731, 93, 731, 58, 1518045834 ]
def nn_descent_internal_high_memory_parallel( current_graph, inds, indptr, data, n_neighbors, rng_state, max_candidates=50, dist=sparse_euclidean, n_iters=10, delta=0.001, verbose=False,
lmcinnes/pynndescent
[ 731, 93, 731, 58, 1518045834 ]
def nn_descent( inds, indptr, data, n_neighbors, rng_state, max_candidates=50, dist=sparse_euclidean, n_iters=10, delta=0.001, init_graph=EMPTY_GRAPH, rp_tree_init=True, leaf_array=None, low_memory=False, verbose=False,
lmcinnes/pynndescent
[ 731, 93, 731, 58, 1518045834 ]
def asList(cls, ipaddress, rangeCheck=False): """For ipaddress="10.123.45.67" return mutable [10, 123, 45, 67].
srguiwiz/nrvr-commander
[ 16, 5, 16, 9, 1371660119 ]
def asTuple(cls, ipaddress): """For ipaddress="10.123.45.67" return immutable (10, 123, 45, 67).""" if isinstance(ipaddress, tuple): return ipaddress elif isinstance(ipaddress, list): return tuple(ipaddress) else: return tuple(cls.asList(ipaddress))
srguiwiz/nrvr-commander
[ 16, 5, 16, 9, 1371660119 ]
def asString(cls, ipaddress): """For ipaddress=[10, 123, 45, 67] return "10.123.45.67".""" if isinstance(ipaddress, basestring): return ipaddress if isinstance(ipaddress, (int, long)): ipaddress = cls.asList(ipaddress) return ".".join(map(str, ipaddress))
srguiwiz/nrvr-commander
[ 16, 5, 16, 9, 1371660119 ]
def asInteger(cls, ipaddress): """For ipaddress=[10, 123, 45, 67] return 175844675.
srguiwiz/nrvr-commander
[ 16, 5, 16, 9, 1371660119 ]
def bitAnd(cls, one, other): if not isinstance(one, (list, tuple)): one = cls.asList(one) if not isinstance(other, (list, tuple)): other = cls.asList(other) octets = [] for oneOctet, otherOctet in zip(one, other): octets.append(oneOctet & otherOctet) return octets
srguiwiz/nrvr-commander
[ 16, 5, 16, 9, 1371660119 ]
def bitOr(cls, one, other): if not isinstance(one, (list, tuple)): one = cls.asList(one) if not isinstance(other, (list, tuple)): other = cls.asList(other) octets = [] for oneOctet, otherOctet in zip(one, other): octets.append(oneOctet | otherOctet) return octets
srguiwiz/nrvr-commander
[ 16, 5, 16, 9, 1371660119 ]
def bitNot(cls, one): if not isinstance(one, (list, tuple)): one = cls.asList(one) octets = [] for oneOctet in one: octets.append(~oneOctet & 255) return octets
srguiwiz/nrvr-commander
[ 16, 5, 16, 9, 1371660119 ]
def nameWithNumber(cls, stem, ipaddress, octets=1, separator="-"): """For stem="example" and ipaddress="10.123.45.67" return "example-067".
srguiwiz/nrvr-commander
[ 16, 5, 16, 9, 1371660119 ]
def numberWithinSubnet(cls, oneInSubnet, otherNumber, netmask="255.255.255.0"): """For oneInSubnet="10.123.45.67" and otherNumber="89" return [10, 123, 45, 89].
srguiwiz/nrvr-commander
[ 16, 5, 16, 9, 1371660119 ]
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.folders = [] # A hack to communicate parsing args to _elems_to_objs()
ecederstrand/exchangelib
[ 1027, 232, 1027, 14, 1457378916 ]
def _elems_to_objs(self, elems): for folder, elem in zip(self.folders, elems): if isinstance(elem, Exception): yield elem continue yield parse_folder_elem(elem=elem, folder=folder, account=self.account)
ecederstrand/exchangelib
[ 1027, 232, 1027, 14, 1457378916 ]
def read(fname): with open(os.path.join(os.path.dirname(__file__), fname)) as f: return f.read()
fusionbox/django-darkknight
[ 8, 3, 8, 3, 1392764387 ]
def ROCData_from_results(results, clf_index, target): """ Compute ROC Curve(s) from evaluation results. :param Orange.evaluation.Results results: Evaluation results. :param int clf_index: Learner index in the `results`. :param int target: Target class index (i.e. positive class). :rval ROCData: A instance holding the computed curves. """ merged = roc_curve_for_fold(results, slice(0, -1), clf_index, target) merged_curve = ROCCurve(ROCPoints(*merged), ROCPoints(*roc_curve_convex_hull(merged))) folds = results.folds if results.folds is not None else [slice(0, -1)] fold_curves = [] for fold in folds: # TODO: Check for no FP or no TP points = roc_curve_for_fold(results, fold, clf_index, target) hull = roc_curve_convex_hull(points) c = ROCCurve(ROCPoints(*points), ROCPoints(*hull)) fold_curves.append(c) curves = [fold.points for fold in fold_curves if fold.is_valid] fpr, tpr, std = roc_curve_vertical_average(curves) thresh = numpy.zeros_like(fpr) * numpy.nan hull = roc_curve_convex_hull((fpr, tpr, thresh)) v_avg = ROCAveragedVert( ROCPoints(fpr, tpr, thresh), ROCPoints(*hull), std ) all_thresh = numpy.hstack([t for _, _, t in curves]) all_thresh = numpy.clip(all_thresh, 0.0 - 1e-10, 1.0 + 1e-10) all_thresh = numpy.unique(all_thresh)[::-1] thresh = all_thresh[::max(all_thresh.size // 10, 1)] (fpr, fpr_std), (tpr, tpr_std) = \ roc_curve_threshold_average(curves, thresh) hull = roc_curve_convex_hull((fpr, tpr, thresh)) t_avg = ROCAveragedThresh( ROCPoints(fpr, tpr, thresh), ROCPoints(*hull), tpr_std, fpr_std ) return ROCData(merged_curve, fold_curves, v_avg, t_avg)
qusp/orange3
[ 1, 1, 1, 2, 1430084518 ]
def plot_curve(curve, pen=None, shadow_pen=None, symbol="+", symbol_size=3, name=None): """ Construct a `PlotCurve` for the given `ROCCurve`. :param ROCCurve curve: Source curve. The other parameters are passed to pg.PlotDataItem :rtype: PlotCurve """ def extend_to_origin(points): "Extend ROCPoints to include coordinate origin if not already present" if points.tpr.size and (points.tpr[0] > 0 or points.fpr[0] > 0): points = ROCPoints( numpy.r_[0, points.fpr], numpy.r_[0, points.tpr], numpy.r_[points.thresholds[0] + 1, points.thresholds] ) return points points = extend_to_origin(curve.points) item = pg.PlotCurveItem( points.fpr, points.tpr, pen=pen, shadowPen=shadow_pen, name=name, antialias=True ) sp = pg.ScatterPlotItem( curve.points.fpr, curve.points.tpr, symbol=symbol, size=symbol_size, pen=shadow_pen, name=name ) sp.setParentItem(item) hull = extend_to_origin(curve.hull) hull_item = pg.PlotDataItem( hull.fpr, hull.tpr, pen=pen, antialias=True ) return PlotCurve(curve, item, hull_item)
qusp/orange3
[ 1, 1, 1, 2, 1430084518 ]
def plot_avg_curve(curve, pen=None, shadow_pen=None, symbol="+", symbol_size=4, name=None): """ Construct a `PlotAvgCurve` for the given `curve`. :param curve: Source curve. :type curve: ROCAveragedVert or ROCAveragedThresh The other parameters are passed to pg.PlotDataItem :rtype: PlotAvgCurve """ pc = plot_curve(curve, pen=pen, shadow_pen=shadow_pen, symbol=symbol, symbol_size=symbol_size, name=name) points = curve.points if isinstance(curve, ROCAveragedVert): tpr_std = curve.tpr_std error_item = pg.ErrorBarItem( x=points.fpr[1:-1], y=points.tpr[1:-1], height=2 * tpr_std[1:-1], pen=pen, beam=0.025, antialias=True, ) elif isinstance(curve, ROCAveragedThresh): tpr_std, fpr_std = curve.tpr_std, curve.fpr_std error_item = pg.ErrorBarItem( x=points.fpr[1:-1], y=points.tpr[1:-1], height=2 * tpr_std[1:-1], width=2 * fpr_std[1:-1], pen=pen, beam=0.025, antialias=True, ) return PlotAvgCurve(curve, pc.curve_item, pc.hull_item, error_item)
qusp/orange3
[ 1, 1, 1, 2, 1430084518 ]
def once(f): """ Return a function that will be called only once, and it's result cached. """ cached = None @wraps(f) def wraped(): nonlocal cached if cached is None: cached = Some(f()) return cached.val return wraped
qusp/orange3
[ 1, 1, 1, 2, 1430084518 ]
def __init__(self, pos=None, angle=90, pen=None, movable=False, bounds=None, antialias=False): super().__init__(pos, angle, pen, movable, bounds) self.antialias = antialias
qusp/orange3
[ 1, 1, 1, 2, 1430084518 ]
def __init__(self, parent=None): super().__init__(parent) self.results = None self.classifier_names = [] self.perf_line = None self.colors = [] self._curve_data = {} self._plot_curves = {} self._rocch = None self._perf_line = None box = gui.widgetBox(self.controlArea, "Plot") tbox = gui.widgetBox(box, "Target Class") tbox.setFlat(True) self.target_cb = gui.comboBox( tbox, self, "target_index", callback=self._on_target_changed) cbox = gui.widgetBox(box, "Classifiers") cbox.setFlat(True) self.classifiers_list_box = gui.listBox( cbox, self, "selected_classifiers", "classifier_names", selectionMode=QtGui.QListView.MultiSelection, callback=self._on_classifiers_changed) abox = gui.widgetBox(box, "Combine ROC Curves From Folds") abox.setFlat(True) gui.comboBox(abox, self, "roc_averaging", items=["Merge predictions from folds", "Mean TP rate", "Mean TP and FP at threshold", "Show individual curves"], callback=self._replot) hbox = gui.widgetBox(box, "ROC Convex Hull") hbox.setFlat(True) gui.checkBox(hbox, self, "display_convex_curve", "Show convex ROC curves", callback=self._replot) gui.checkBox(hbox, self, "display_convex_hull", "Show ROC convex hull", callback=self._replot) box = gui.widgetBox(self.controlArea, "Analysis") gui.checkBox(box, self, "display_def_threshold", "Default threshold (0.5) point", callback=self._on_display_def_threshold_changed) gui.checkBox(box, self, "display_perf_line", "Show performance line", callback=self._on_display_perf_line_changed) grid = QtGui.QGridLayout() ibox = gui.indentedBox(box, orientation=grid) sp = gui.spin(box, self, "fp_cost", 1, 1000, 10, callback=self._on_display_perf_line_changed) grid.addWidget(QtGui.QLabel("FP Cost"), 0, 0) grid.addWidget(sp, 0, 1) sp = gui.spin(box, self, "fn_cost", 1, 1000, 10, callback=self._on_display_perf_line_changed) grid.addWidget(QtGui.QLabel("FN Cost")) grid.addWidget(sp, 1, 1) sp = gui.spin(box, self, "target_prior", 1, 99, callback=self._on_display_perf_line_changed) sp.setSuffix("%") sp.addAction(QtGui.QAction("Auto", sp)) grid.addWidget(QtGui.QLabel("Prior target class probability")) grid.addWidget(sp, 2, 1) self.plotview = pg.GraphicsView(background="w") self.plotview.setFrameStyle(QtGui.QFrame.StyledPanel) self.plot = pg.PlotItem() self.plot.getViewBox().setMenuEnabled(False) self.plot.getViewBox().setMouseEnabled(False, False) pen = QPen(self.palette().color(QtGui.QPalette.Text)) tickfont = QtGui.QFont(self.font()) tickfont.setPixelSize(max(int(tickfont.pixelSize() * 2 // 3), 11)) axis = self.plot.getAxis("bottom") axis.setTickFont(tickfont) axis.setPen(pen) axis.setLabel("FP Rate (1-Specificity)") axis = self.plot.getAxis("left") axis.setTickFont(tickfont) axis.setPen(pen) axis.setLabel("TP Rate (Sensitivity)") self.plot.showGrid(True, True, alpha=0.1) self.plot.setRange(xRange=(0.0, 1.0), yRange=(0.0, 1.0)) self.plotview.setCentralItem(self.plot) self.mainArea.layout().addWidget(self.plotview)
qusp/orange3
[ 1, 1, 1, 2, 1430084518 ]
def clear(self): """Clear the widget state.""" self.results = None self.plot.clear() self.classifier_names = [] self.selected_classifiers = [] self.target_cb.clear() self.target_index = 0 self.colors = [] self._curve_data = {} self._plot_curves = {} self._rocch = None self._perf_line = None
qusp/orange3
[ 1, 1, 1, 2, 1430084518 ]
def curve_data(self, target, clf_idx): """Return `ROCData' for the given target and classifier.""" if (target, clf_idx) not in self._curve_data: data = ROCData.from_results(self.results, clf_idx, target) self._curve_data[target, clf_idx] = data return self._curve_data[target, clf_idx]
qusp/orange3
[ 1, 1, 1, 2, 1430084518 ]
def generate_pens(basecolor): pen = QPen(basecolor, 1) pen.setCosmetic(True) shadow_pen = QPen(pen.color().lighter(160), 2.5) shadow_pen.setCosmetic(True) return pen, shadow_pen
qusp/orange3
[ 1, 1, 1, 2, 1430084518 ]
def merged(): return plot_curve( data.merged, pen=pen, shadow_pen=shadow_pen, name=name)
qusp/orange3
[ 1, 1, 1, 2, 1430084518 ]
def folds(): return [plot_curve(fold, pen=pen, shadow_pen=shadow_pen) for fold in data.folds]
qusp/orange3
[ 1, 1, 1, 2, 1430084518 ]
def avg_vert(): return plot_avg_curve(data.avg_vertical, pen=pen, shadow_pen=shadow_pen, name=name)
qusp/orange3
[ 1, 1, 1, 2, 1430084518 ]
def avg_thres(): return plot_avg_curve(data.avg_threshold, pen=pen, shadow_pen=shadow_pen, name=name)
qusp/orange3
[ 1, 1, 1, 2, 1430084518 ]
def _setup_plot(self): target = self.target_index selected = self.selected_classifiers curves = [self.plot_curves(target, i) for i in selected] selected = [self.curve_data(target, i) for i in selected] if self.roc_averaging == OWROCAnalysis.Merge: for curve in curves: graphics = curve.merge() curve = graphics.curve self.plot.addItem(graphics.curve_item) if self.display_convex_curve: self.plot.addItem(graphics.hull_item) if self.display_def_threshold: points = curve.points ind = numpy.argmin(numpy.abs(points.thresholds - 0.5)) item = pg.TextItem( text="{:.3f}".format(points.thresholds[ind]), ) item.setPos(points.fpr[ind], points.tpr[ind]) self.plot.addItem(item) hull_curves = [curve.merged.hull for curve in selected] if hull_curves: self._rocch = convex_hull(hull_curves) iso_pen = QPen(QColor(Qt.black), 1) iso_pen.setCosmetic(True) self._perf_line = InfiniteLine(pen=iso_pen, antialias=True) self.plot.addItem(self._perf_line) elif self.roc_averaging == OWROCAnalysis.Vertical: for curve in curves: graphics = curve.avg_vertical() self.plot.addItem(graphics.curve_item) self.plot.addItem(graphics.confint_item) hull_curves = [curve.avg_vertical.hull for curve in selected] elif self.roc_averaging == OWROCAnalysis.Threshold: for curve in curves: graphics = curve.avg_threshold() self.plot.addItem(graphics.curve_item) self.plot.addItem(graphics.confint_item) hull_curves = [curve.avg_threshold.hull for curve in selected] elif self.roc_averaging == OWROCAnalysis.NoAveraging: for curve in curves: graphics = curve.folds() for fold in graphics: self.plot.addItem(fold.curve_item) if self.display_convex_curve: self.plot.addItem(fold.hull_item) hull_curves = [fold.hull for curve in selected for fold in curve.folds] if self.display_convex_hull and hull_curves: hull = convex_hull(hull_curves) hull_pen = QPen(QColor(200, 200, 200, 100), 2) hull_pen.setCosmetic(True) item = self.plot.plot( hull.fpr, hull.tpr, pen=hull_pen, brush=QBrush(QColor(200, 200, 200, 50)), fillLevel=0) item.setZValue(-10000) pen = QPen(QColor(100, 100, 100, 100), 1, Qt.DashLine) pen.setCosmetic(True) self.plot.plot([0, 1], [0, 1], pen=pen, antialias=True) if self.roc_averaging == OWROCAnalysis.Merge: self._update_perf_line()
qusp/orange3
[ 1, 1, 1, 2, 1430084518 ]
def _on_classifiers_changed(self): self.plot.clear() if self.results is not None: self._setup_plot()
qusp/orange3
[ 1, 1, 1, 2, 1430084518 ]
def _on_display_def_threshold_changed(self): self._replot()
qusp/orange3
[ 1, 1, 1, 2, 1430084518 ]
def _update_perf_line(self): if self._perf_line is None: return self._perf_line.setVisible(self.display_perf_line) if self.display_perf_line: m = roc_iso_performance_slope( self.fp_cost, self.fn_cost, self.target_prior / 100.0) hull = self._rocch ind = roc_iso_performance_line(m, hull) angle = numpy.arctan2(m, 1) # in radians self._perf_line.setAngle(angle * 180 / numpy.pi) self._perf_line.setPos((hull.fpr[ind[0]], hull.tpr[ind[0]]))
qusp/orange3
[ 1, 1, 1, 2, 1430084518 ]
def interp(x, xp, fp, left=None, right=None): """ Like numpy.interp except for handling of running sequences of same values in `xp`. """ x = numpy.asanyarray(x) xp = numpy.asanyarray(xp) fp = numpy.asanyarray(fp) if xp.shape != fp.shape: raise ValueError("xp and fp must have the same shape") ind = numpy.searchsorted(xp, x, side="right") fx = numpy.zeros(len(x)) under = ind == 0 over = ind == len(xp) between = ~under & ~over fx[under] = left if left is not None else fp[0] fx[over] = right if right is not None else fp[-1] if right is not None: # Fix points exactly on the right boundary. fx[x == xp[-1]] = fp[-1] ind = ind[between] df = (fp[ind] - fp[ind - 1]) / (xp[ind] - xp[ind - 1]) fx[between] = df * (x[between] - xp[ind]) + fp[ind] return fx
qusp/orange3
[ 1, 1, 1, 2, 1430084518 ]
def roc_curve_vertical_average(curves, samples=10): fpr_sample = numpy.linspace(0.0, 1.0, samples) tpr_samples = [] for fpr, tpr, _ in curves: tpr_samples.append(interp(fpr_sample, fpr, tpr, left=0, right=1)) tpr_samples = numpy.array(tpr_samples) return fpr_sample, tpr_samples.mean(axis=0), tpr_samples.std(axis=0)
qusp/orange3
[ 1, 1, 1, 2, 1430084518 ]
def roc_curve_threshold_average_interp(curves, thresh_samples): fpr_samples, tpr_samples = [], [] for fpr, tpr, thresh in curves: thresh = thresh[::-1] fpr = interp(thresh_samples, thresh, fpr[::-1], left=1.0, right=0.0) tpr = interp(thresh_samples, thresh, tpr[::-1], left=1.0, right=0.0) fpr_samples.append(fpr) tpr_samples.append(tpr) fpr_samples = numpy.array(fpr_samples) tpr_samples = numpy.array(tpr_samples) return ((fpr_samples.mean(axis=0), fpr_samples.std(axis=0)), (tpr_samples.mean(axis=0), fpr_samples.std(axis=0)))
qusp/orange3
[ 1, 1, 1, 2, 1430084518 ]
def roc_curve_convex_hull(curve): def slope(p1, p2): x1, y1, _ = p1 x2, y2, _ = p2 if x1 != x2: return (y2 - y1) / (x2 - x1) else: return numpy.inf fpr, _, _ = curve if len(fpr) <= 2: return curve points = map(roc_point._make, zip(*curve)) hull = deque([next(points)]) for point in points: while True: if len(hull) < 2: hull.append(point) break else: last = hull[-1] if point.fpr != last.fpr and \ slope(hull[-2], last) > slope(last, point): hull.append(point) break else: hull.pop() fpr = numpy.array([p.fpr for p in hull]) tpr = numpy.array([p.tpr for p in hull]) thres = numpy.array([p.threshold for p in hull]) return (fpr, tpr, thres)
qusp/orange3
[ 1, 1, 1, 2, 1430084518 ]
def slope(p1, p2): x1, y1, *_ = p1 x2, y2, *_ = p2 if x1 != x2: return (y2 - y1) / (x2 - x1) else: return numpy.inf
qusp/orange3
[ 1, 1, 1, 2, 1430084518 ]
def roc_iso_performance_line(slope, hull, tol=1e-5): """ Return the indices where a line with `slope` touches the ROC convex hull. """ fpr, tpr, *_ = hull # Compute the distance of each point to a reference iso line # going through point (0, 1). The point(s) with the minimum # distance are our result # y = m * x + 1 # m * x - 1y + 1 = 0 a, b, c = slope, -1, 1 dist = distance_to_line(a, b, c, fpr, tpr) mindist = numpy.min(dist) return numpy.flatnonzero((dist - mindist) <= tol)
qusp/orange3
[ 1, 1, 1, 2, 1430084518 ]
def roc_iso_performance_slope(fp_cost, fn_cost, p): assert 0 <= p <= 1 if fn_cost * p == 0: return numpy.inf else: return (fp_cost * (1. - p)) / (fn_cost * p)
qusp/orange3
[ 1, 1, 1, 2, 1430084518 ]
def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read()
jradavenport/cubehelix
[ 82, 7, 82, 2, 1397592865 ]
def __init__(self, **kwargs): super(ReadOnlyFieldNamespacedModelResource, self).__init__(**kwargs) for fld in getattr(self.Meta, 'readonly_fields', []): self.fields[fld].readonly = True
darkpixel/statuspage
[ 111, 24, 111, 8, 1420925296 ]
def hydrate(self, bundle): u = User.objects.get(username=bundle.request.GET['username']) bundle.obj.user = u return bundle
darkpixel/statuspage
[ 111, 24, 111, 8, 1420925296 ]
def n_samples_from_flags(add_flags=True, flags_obj=None): """Collects sample-related options into a list of samples.""" n_reads = flags_obj.reads.split(';') num_samples = len(n_reads) flags_organized = {} for flag_name in [ 'reads', 'sample_names', 'downsample_fractions', 'pileup_image_heights' ]: if flags_obj[flag_name].value != 'DEFAULT': flags_organized[flag_name] = flags_obj[flag_name].value.split(';') if len(flags_organized[flag_name]) != num_samples: raise ValueError(f'--{flag_name} has {len(flags_organized[flag_name])} ' f'samples, but it should be matching the number of ' f'samples in --reads, which was {num_samples}.') else: flags_organized[flag_name] = [''] * num_samples n_sample_options = [] for i in range(num_samples): sample_name = make_examples_core.assign_sample_name( sample_name_flag=flags_organized['sample_names'][i], reads_filenames=flags_organized['reads'][i]) n_sample_options.append( deepvariant_pb2.SampleOptions( role=str(i), name=sample_name, variant_caller_options=make_examples_core.make_vc_options( sample_name=sample_name, flags_obj=flags_obj), order=range(num_samples), pileup_height=100)) if add_flags: for i in range(num_samples): n_sample_options[i].reads_filenames.extend( flags_organized['reads'][i].split(',')) if flags_organized['downsample_fractions'][i]: n_sample_options[i].downsample_fraction = float( flags_organized['downsample_fractions'][i]) if flags_organized['pileup_image_heights'][i]: n_sample_options[i].pileup_height = int( flags_organized['pileup_image_heights'][i]) # Ordering here determines the default order of samples, and when a sample # above has a custom .order, then this is the list those indices refer to. samples_in_order = n_sample_options sample_role_to_train = '0' return samples_in_order, sample_role_to_train
google/deepvariant
[ 2741, 682, 2741, 7, 1511402182 ]
def check_options_are_valid(options): """Checks that all the options chosen make sense together.""" # Check for general flags (shared for DeepVariant and DeepTrio). make_examples_options.check_options_are_valid( options, main_sample_index=MAIN_SAMPLE_INDEX) sample_names = [s.name for s in options.sample_options] if len(sample_names) != len(set(sample_names)): raise ValueError('--sample_names cannot contain duplicate names.')
google/deepvariant
[ 2741, 682, 2741, 7, 1511402182 ]
def main(): """Try to read given cache file.""" args = parse_args() logger = logging.getLogger('read-migrated-cache') cache = rss2irc.read_cache(logger, args.cache) assert isinstance(cache, rss2irc.CachedData) assert len(cache.items) sys.exit(0)
zstyblik/rss2irc
[ 1, 1, 1, 1, 1436094510 ]
def enable(self): self.nics = {} self.ignores = self.pkmeter.config.get(self.namespace, 'ignores', '') self.ignores = list(filter(None, self.ignores.split(' '))) super(Plugin, self).enable()
mjs7231/pkmeter
[ 21, 7, 21, 5, 1419820748 ]
def update(self): for iface, newio in psutil.net_io_counters(True).items(): if not iface.startswith('lo'): netinfo = netifaces.ifaddresses(iface) if netinfo.get(netifaces.AF_INET) and not self._is_ignored(iface): newio = self._net_io_counters(newio) newio['iface'] = iface newio.update(netinfo[netifaces.AF_INET][0]) self._deltas(self.nics.get(iface,{}), newio) self.nics[iface] = newio elif iface in self.nics: del self.nics[iface] self.data['nics'] = sorted(self.nics.values(), key=lambda n:n['iface']) self.data['total'] = self._deltas(self.data.get('total',{}), self._net_io_counters()) super(Plugin, self).update()
mjs7231/pkmeter
[ 21, 7, 21, 5, 1419820748 ]
def _net_io_counters(self, io=None): io = io or psutil.net_io_counters() return { 'bytes_sent': io.bytes_sent, 'bytes_recv': io.bytes_recv, 'packets_sent': io.packets_sent, 'packets_recv': io.packets_recv, 'errin': io.errin, 'errout': io.errout, 'dropin': io.dropin, 'dropout': io.dropout, }
mjs7231/pkmeter
[ 21, 7, 21, 5, 1419820748 ]
def format_chars(chars_sent_ls): max_leng = max([len(l) for l in chars_sent_ls]) to_pads = [max_leng - len(l) for l in chars_sent_ls] for i, to_pad in enumerate(to_pads): if to_pad % 2 == 0: chars_sent_ls[i] = [0] * (to_pad / 2) + chars_sent_ls[i] + [0] * (to_pad / 2) else: chars_sent_ls[i] = [0] * (1 + (to_pad / 2)) + chars_sent_ls[i] + [0] * (to_pad / 2) return chars_sent_ls
cosmozhang/NCRF-AE
[ 26, 5, 26, 1, 1500695653 ]
def add_unknown_words(word_vecs, vocab, min_df=1, k=200): """ For words that occur in at least min_df documents, create a separate word vector. 0.25 is chosen so the unknown vectors have (approximately) same variance as pre-trained ones """ for word in vocab: if word not in word_vecs: idx = vocab[word] word_vecs[idx] = np.random.uniform(-0.25,0.25,k)
cosmozhang/NCRF-AE
[ 26, 5, 26, 1, 1500695653 ]
def is_user(s): if len(s)>1 and s[0] == "@": return True else: return False
cosmozhang/NCRF-AE
[ 26, 5, 26, 1, 1500695653 ]
def digits(n): digit_str = '' for i in range(n): digit_str = digit_str + 'DIGIT' return digit_str
cosmozhang/NCRF-AE
[ 26, 5, 26, 1, 1500695653 ]
def sepdata(fname, gname, hname, pos_dictionary): vocab_dict = pos_dictionary['words2idx'] tag_dict = pos_dictionary['labels2idx'] char_dict = pos_dictionary['chars2idx'] # of all sets dataset_words = [] dataset_labels = [] dataset_chars = [] for f in [fname, gname, hname]: data = open(f, "rb").readlines() # of a whole set words_set = [] tag_labels_set = [] chars_set = [] # of a whole sentence example_words = [] example_tag_labels = [] example_char = [] count = 0 for line in data: line = line.replace('\n', '').replace('\r', '') line = line.split("\t") if (not line[0].isdigit()) and (line != ['']): continue # this is the heading line # this means a example finishes if (line == ['', ''] or line == ['']) and (len(example_words) > 0): words_set.append(np.array(example_words, dtype = "int32")) tag_labels_set.append(np.array(example_tag_labels, dtype = "int32")) chars_set.append(np.array(example_char, dtype = "int32")) # restart a new example after one finishes example_words = [] example_tag_labels = [] example_char = [] count += 1 else: # part of an example vocab = line[1] tag = line[3] if is_number(vocab): # check if the term is a number vocab = digits(len(vocab)) if is_url(vocab): vocab = "URL" if is_user(vocab): vocab = "USR" example_words.append(vocab_dict[vocab]) example_tag_labels.append(tag_dict[tag]) char_word_list = map(lambda u: char_dict[u], list(vocab)) example_char.append(char_word_list) example_char = format_chars(example_char) # for each example do a padding dataset_words.append(words_set) dataset_labels.append(tag_labels_set) dataset_chars.append(chars_set) train_pos= [dataset_words[0], dataset_chars[0], dataset_labels[0]] valid_pos = [dataset_words[1], dataset_chars[1], dataset_labels[1]] test_pos = [dataset_words[2], dataset_chars[2], dataset_labels[2]] assert len(dataset_words[0]+dataset_words[1]+dataset_words[2]) == len(train_pos[0]) + len(valid_pos[0]) + len(test_pos[0]) return train_pos, valid_pos, test_pos
cosmozhang/NCRF-AE
[ 26, 5, 26, 1, 1500695653 ]
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration(PACKAGE_NAME, parent_package, top_path) config.add_subpackage('__check_build')
RJT1990/pyflux
[ 2015, 236, 2015, 92, 1455653522 ]
def test_get(self): for dut in self.duts: dut.config('default hostname') resp = dut.api('system').get() keys = ['hostname', 'iprouting', 'banner_motd', 'banner_login'] self.assertEqual(sorted(keys), sorted(resp.keys()))
arista-eosplus/pyeapi
[ 125, 59, 125, 17, 1416361962 ]
def test_get_check_hostname(self): for dut in self.duts: dut.config('hostname teststring') response = dut.api('system').get() self.assertEqual(response['hostname'], 'teststring')
arista-eosplus/pyeapi
[ 125, 59, 125, 17, 1416361962 ]
def test_get_banner_with_EOF(self): for dut in self.duts: motd_banner_value = '!!!newlinebaner\nSecondLIneEOF!!!newlinebanner\n' dut.config([dict(cmd="banner motd", input=motd_banner_value)]) resp = dut.api('system').get() self.assertEqual(resp['banner_motd'], motd_banner_value.rstrip())
arista-eosplus/pyeapi
[ 125, 59, 125, 17, 1416361962 ]
def test_set_hostname_with_no_value(self): for dut in self.duts: dut.config('hostname test') response = dut.api('system').set_hostname(disable=True) self.assertTrue(response, 'dut=%s' % dut) value = 'no hostname' self.assertIn(value, dut.running_config)
arista-eosplus/pyeapi
[ 125, 59, 125, 17, 1416361962 ]
def test_set_hostname_default_over_value(self): for dut in self.duts: dut.config('hostname test') response = dut.api('system').set_hostname(value='foo', default=True) self.assertTrue(response, 'dut=%s' % dut) value = 'no hostname' self.assertIn(value, dut.running_config)
arista-eosplus/pyeapi
[ 125, 59, 125, 17, 1416361962 ]
def test_set_iprouting_to_false(self): for dut in self.duts: dut.config('ip routing') resp = dut.api('system').set_iprouting(False) self.assertTrue(resp, 'dut=%s' % dut) self.assertIn('no ip routing', dut.running_config)
arista-eosplus/pyeapi
[ 125, 59, 125, 17, 1416361962 ]
def test_set_iprouting_to_default(self): for dut in self.duts: dut.config('ip routing') resp = dut.api('system').set_iprouting(default=True) self.assertTrue(resp, 'dut=%s' % dut) self.assertIn('no ip routing', dut.running_config)
arista-eosplus/pyeapi
[ 125, 59, 125, 17, 1416361962 ]
def test_set_banner_motd(self): for dut in self.duts: banner_value = random_string() dut.config([dict(cmd="banner motd", input=banner_value)]) self.assertIn(banner_value, dut.running_config) banner_api_value = random_string() resp = dut.api('system').set_banner("motd", banner_api_value) self.assertTrue(resp, 'dut=%s' % dut) self.assertIn(banner_api_value, dut.running_config)
arista-eosplus/pyeapi
[ 125, 59, 125, 17, 1416361962 ]
def test_set_banner_motd_default(self): for dut in self.duts: dut.config([dict(cmd="banner motd", input="!!!!REMOVE BANNER TEST!!!!")]) dut.api('system').set_banner('motd', None, True) self.assertIn('no banner motd', dut.running_config)
arista-eosplus/pyeapi
[ 125, 59, 125, 17, 1416361962 ]
def test_set_banner_login_default(self): for dut in self.duts: dut.config([dict(cmd="banner login", input="!!!!REMOVE LOGIN BANNER TEST!!!!")]) dut.api('system').set_banner('login', None, True) self.assertIn('no banner login', dut.running_config)
arista-eosplus/pyeapi
[ 125, 59, 125, 17, 1416361962 ]
def __init__(self, topic): """Constructor""" super(PublishTwistState, self).__init__(outcomes=['done'], input_keys=['twist']) self._topic = topic self._pub = ProxyPublisher({self._topic: Twist})
FlexBE/generic_flexbe_states
[ 12, 21, 12, 5, 1448030032 ]
def extractTranslasiSanusiMe(item): ''' Parser for 'translasi.sanusi.me' ''' vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title']) if not (chp or vol) or "preview" in item['title'].lower(): return None tagmap = [ ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel'), ] for tagname, name, tl_type in tagmap: if tagname in item['tags']: return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
fake-name/ReadableWebProxy
[ 191, 16, 191, 3, 1437712243 ]
def attribute_checker(operator, attribute, value=''): """ Takes an operator, attribute and optional value; returns a function that will return True for elements that match that combination. """ return { '=': lambda el: el.get(attribute) == value, # attribute includes value as one of a set of space separated tokens '~': lambda el: value in el.get(attribute, '').split(), # attribute starts with value '^': lambda el: el.get(attribute, '').startswith(value), # attribute ends with value '$': lambda el: el.get(attribute, '').endswith(value), # attribute contains value '*': lambda el: value in el.get(attribute, ''), # attribute is either exactly value or starts with value- '|': lambda el: el.get(attribute, '') == value \ or el.get(attribute, '').startswith('%s-' % value), }.get(operator, lambda el: attribute in el)
devilry/devilry-django
[ 48, 23, 48, 82, 1264339874 ]
def monkeypatch(BeautifulSoupClass=None): """ If you don't explicitly state the class to patch, defaults to the most common import location for BeautifulSoup. """ if not BeautifulSoupClass: from BeautifulSoup import BeautifulSoup as BeautifulSoupClass BeautifulSoupClass.findSelect = select
devilry/devilry-django
[ 48, 23, 48, 82, 1264339874 ]
def cssFind(html, selector): """ Parse ``html`` with class:`BeautifulSoup.BeautifulSoup` and use :func:`.select` on the result. Added by Espen A. Kristiansen to make it even easier to use for testing. """ soup = BeautifulSoup(html) return select(soup, selector)
devilry/devilry-django
[ 48, 23, 48, 82, 1264339874 ]
def cssExists(html, selector): """ Same as :func:`.cssFind`, but returns ``True`` if the selector matches at least one item. Added by Espen A. Kristiansen to make it even easier to use for testing. """ matches = cssFind(html, selector) return bool(len(matches))
devilry/devilry-django
[ 48, 23, 48, 82, 1264339874 ]
def __init__(self, *, innerproduct=None, gradient=None): r"""Initialize a new :py:class:`ObservablesMixedHAWP` instance for observable computation of Hagedorn wavepackets. """ self._innerproduct = None self._gradient = None
WaveBlocks/WaveBlocksND
[ 6, 8, 6, 34, 1332703340 ]
def set_gradient(self, gradient): r"""Set the gradient. :param gradient: A gradient operator. The gradient is only used for the computation of the kinetic energy :math:`\langle \Psi | T | \Psi^{\prime} \rangle`. :type gradient: A :py:class:`Gradient` subclass instance. """ self._gradient = gradient
WaveBlocks/WaveBlocksND
[ 6, 8, 6, 34, 1332703340 ]
def norm(self, wavepacket, *, component=None, summed=False): r"""Calculate the :math:`L^2` norm :math:`\langle \Psi | \Psi \rangle` of the wavepacket :math:`\Psi`. :param wavepacket: The wavepacket :math:`\Psi` of which we compute the norm. :type wavepacket: A :py:class:`HagedornWavepacketBase` subclass instance. :param component: The index :math:`i` of the component :math:`\Phi_i` whose norm is computed. The default value is ``None`` which means to compute the norms of all :math:`N` components. :type component: int or ``None``. :param summed: Whether to sum up the norms :math:`\langle \Phi_i | \Phi_i \rangle` of the individual components :math:`\Phi_i`. :type summed: Boolean, default is ``False``. :return: The norm of :math:`\Psi` or the norm of :math:`\Phi_i` or a list with the :math:`N` norms of all components. (Depending on the optional arguments.) .. note:: This method just redirects to a call to :py:meth:`HagedornWavepacketBase.norm`. """ return wavepacket.norm(component=component, summed=summed)
WaveBlocks/WaveBlocksND
[ 6, 8, 6, 34, 1332703340 ]
def kinetic_energy(self, wavepacket, *, component=None, summed=False): r"""Compute the kinetic energy :math:`E_{\text{kin}} := \langle \Psi | T | \Psi \rangle` of the different components :math:`\Phi_i` of the wavepacket :math:`\Psi`. :param wavepacket: The wavepacket :math:`\Psi` of which we compute the kinetic energy. :type wavepacket: A :py:class:`HagedornWavepacketBase` subclass instance. :param component: The index :math:`i` of the component :math:`\Phi_i` whose kinetic energy we compute. If set to ``None`` the computation is performed for all :math:`N` components. :type component: Integer or ``None``. :param summed: Whether to sum up the kinetic energies :math:`E_i` of the individual components :math:`\Phi_i`. :type summed: Boolean, default is ``False``. :return: A list of the kinetic energies of the individual components or the overall kinetic energy of the wavepacket. (Depending on the optional arguments.) .. note:: This method just expands to a call of the :py:meth:`ObservablesMixedHAWP.kinetic_overlap_energy` method. Better use :py:meth:`ObservablesHAWP.kinetic_energy`. """ return self.kinetic_overlap_energy(wavepacket, wavepacket, component=component, summed=summed)
WaveBlocks/WaveBlocksND
[ 6, 8, 6, 34, 1332703340 ]
def create(kernel): result = Building() result.template = "object/building/tatooine/shared_housing_tatt_style02_large.iff" result.attribute_template_id = -1 result.stfName("building_name","housing_tatt_style01_large")
anhstudios/swganh
[ 62, 37, 62, 37, 1297996365 ]
def resolve(code): """ Transform the given (2- or 3-letter) language code to a human readable language name. The return value is a 2-tuple containing the given language code and the language name. If the language code cannot be resolved, name will be 'Unknown (<code>)'. """ if not code: return None, None if not isinstance(code, string_types): raise ValueError('Invalid language code specified by parser') # Take up to 3 letters from the code. code = re.split(r'[^a-z]', code.lower())[0][:3] for spec in codes: if code in spec[:-1]: return code, spec[-1] return code, u'Unknown (%r)' % code
SickGear/SickGear
[ 574, 83, 574, 2, 1415773777 ]
def main(): """ devo far inserire name, city, salary come input e salvarli nel dizionario # 1.finche utente non smette. # 2.l'utente inserisce il nome usa raw_input per chiedere le info all'utente # 3.l'utente inserisce la città # 4.l'utente inserisce lo stipendio # 5.inserisci il dizionario con chiavi 'name','city','salary' nella lista PEOPLE = [] PEOPLE.append(person_d)
feroda/lessons-python4beginners
[ 2, 12, 2, 3, 1472811971 ]
def insert_person(): ret_val = False
feroda/lessons-python4beginners
[ 2, 12, 2, 3, 1472811971 ]
def stampa_lista(): print("Stampo la mia lista... ") for x in PEOPLE: print("Sig: {name} di {city} guadagna {salary}".format(**x) )
feroda/lessons-python4beginners
[ 2, 12, 2, 3, 1472811971 ]
def scrivi_file(): print("Scrivo file... ")
feroda/lessons-python4beginners
[ 2, 12, 2, 3, 1472811971 ]
def __init__(self, source_type, driver_format, is_block_dev=False): """Image initialization. :source_type: block or file :driver_format: raw or qcow2 :is_block_dev: """ if (CONF.ephemeral_storage_encryption.enabled and not self._supports_encryption()): raise exception.NovaException(_('Incompatible settings: ' 'ephemeral storage encryption is supported ' 'only for LVM images.')) self.source_type = source_type self.driver_format = driver_format self.driver_io = None self.discard_mode = CONF.libvirt.hw_disk_discard self.is_block_dev = is_block_dev self.preallocate = False # NOTE(dripton): We store lines of json (path, disk_format) in this # file, for some image types, to prevent attacks based on changing the # disk_format. self.disk_info_path = None # NOTE(mikal): We need a lock directory which is shared along with # instance files, to cover the scenario where multiple compute nodes # are trying to create a base file at the same time self.lock_path = os.path.join(CONF.instances_path, 'locks')
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def create_image(self, prepare_template, base, size, *args, **kwargs): """Create image from template. Contains specific behavior for each image type. :prepare_template: function, that creates template. Should accept `target` argument. :base: Template name :size: Size of created image in bytes """ pass
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def resize_image(self, size): """Resize image to size (in bytes). :size: Desired size of image in bytes """ pass
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def disk_qos(self, info, extra_specs): tune_items = ['disk_read_bytes_sec', 'disk_read_iops_sec', 'disk_write_bytes_sec', 'disk_write_iops_sec', 'disk_total_bytes_sec', 'disk_total_iops_sec'] for key, value in six.iteritems(extra_specs): scope = key.split(':') if len(scope) > 1 and scope[0] == 'quota': if scope[1] in tune_items: setattr(info, scope[1], value)
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def check_image_exists(self): return os.path.exists(self.path)
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def fetch_func_sync(target, *args, **kwargs): # The image may have been fetched while a subsequent # call was waiting to obtain the lock. if not os.path.exists(target): fetch_func(target=target, *args, **kwargs)
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def _can_fallocate(self): """Check once per class, whether fallocate(1) is available, and that the instances directory supports fallocate(2). """ can_fallocate = getattr(self.__class__, 'can_fallocate', None) if can_fallocate is None: test_path = self.path + '.fallocate_test' _out, err = utils.trycmd('fallocate', '-l', '1', test_path) fileutils.delete_if_exists(test_path) can_fallocate = not err self.__class__.can_fallocate = can_fallocate if not can_fallocate: LOG.warning(_LW('Unable to preallocate image at path: ' '%(path)s'), {'path': self.path}) return can_fallocate
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def get_disk_size(self, name): return disk.get_disk_size(name)
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def _get_driver_format(self): return self.driver_format
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def _dict_from_line(line): if not line: return {} try: return jsonutils.loads(line) except (TypeError, ValueError) as e: msg = (_("Could not load line %(line)s, got error " "%(error)s") % {'line': line, 'error': e}) raise exception.InvalidDiskInfo(reason=msg)
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def write_to_disk_info_file(): # Use os.open to create it without group or world write permission. fd = os.open(self.disk_info_path, os.O_RDONLY | os.O_CREAT, 0o644) with os.fdopen(fd, "r") as disk_info_file: line = disk_info_file.read().rstrip() dct = _dict_from_line(line) if self.path in dct: msg = _("Attempted overwrite of an existing value.") raise exception.InvalidDiskInfo(reason=msg) dct.update({self.path: driver_format}) tmp_path = self.disk_info_path + ".tmp" fd = os.open(tmp_path, os.O_WRONLY | os.O_CREAT, 0o644) with os.fdopen(fd, "w") as tmp_file: tmp_file.write('%s\n' % jsonutils.dumps(dct)) os.rename(tmp_path, self.disk_info_path)
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def is_shared_block_storage(): """True if the backend puts images on a shared block storage.""" return False
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def is_file_in_instance_path(): """True if the backend stores images in files under instance path.""" return False
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def direct_snapshot(self, context, snapshot_name, image_format, image_id, base_image_id): """Prepare a snapshot for direct reference from glance :raises: exception.ImageUnacceptable if it cannot be referenced directly in the specified image format :returns: URL to be given to glance """ raise NotImplementedError(_('direct_snapshot() is not implemented'))
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def _get_lock_name(self, base): """Get an image's name of a base file.""" return os.path.split(base)[-1]
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def import_file(self, instance, local_file, remote_name): """Import an image from local storage into this backend. Import a local file into the store used by this image type. Note that this is a noop for stores using local disk (the local file is considered "in the store"). If the image already exists it will be overridden by the new file :param local_file: path to the file to import :param remote_name: the name for the file in the store """ # NOTE(mikal): this is a noop for now for all stores except RBD, but # we should talk about if we want this functionality for everything. pass
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def remove_snap(self, name, ignore_errors=False): """Remove a snapshot on the image. A noop on backends that don't support snapshots. :param name: name of the snapshot :param ignore_errors: don't log errors if the snapshot does not exist """ pass
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def __init__(self, instance=None, disk_name=None, path=None): self.disk_name = disk_name super(Raw, self).__init__("file", "raw", is_block_dev=False) self.path = (path or os.path.join(libvirt_utils.get_instance_path(instance), disk_name)) self.preallocate = ( strutils.to_slug(CONF.preallocate_images) == 'space') if self.preallocate: self.driver_io = "native" self.disk_info_path = os.path.join(os.path.dirname(self.path), 'disk.info') self.correct_format()
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]