code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def __isparent(self, ignore_errors=False): <NEW_LINE> <INDENT> return len(self.__list_children( ignore_errors=ignore_errors)) > 0
Indicates whether the current image is a parent image. 'ignore_plugin_errors' ignore plugin runtime errors when trying to determine if we're a parent image.
625941c2e64d504609d747d7
def __init__(self, highlim=75, lowlim=25): <NEW_LINE> <INDENT> self.highlim = highlim <NEW_LINE> self.lowlim = lowlim <NEW_LINE> super().__init__()
A Cox model which outputs the class of the sample: high, mid, or low hazard. Limits are defined as percentiles.
625941c294891a1f4081ba3f
def __init__(self, data_array, rows=3000, columns=4000, datatype='uint16',channel_groups=(1,1)): <NEW_LINE> <INDENT> self.Images = [] <NEW_LINE> self.channel_groups = channel_groups <NEW_LINE> self.num_channel_groups = self.channel_groups[0]*self.channel_groups[1] <NEW_LINE> if len(data_array)>0 and type(data_array[0])==str: <NEW_LINE> <INDENT> self.load_RAWfiles(data_array,rows,columns,datatype,channel_groups=self.channel_groups) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.load_Images(data_array)
data_array: list of RAW files or list of Image objects
625941c2f7d966606f6a9f99
def withdraw(self, amount): <NEW_LINE> <INDENT> self.wthdrw = amount <NEW_LINE> print("This is your withdrawal:",self.wthdrw) <NEW_LINE> if self.initial_balance < self.wthdrw: <NEW_LINE> <INDENT> self.count_fees += 5 <NEW_LINE> self.initial_balance -= self.wthdrw <NEW_LINE> print("This is new bal after withdrawal:",self.initial_balance) <NEW_LINE> fee = 5 <NEW_LINE> self.initial_balance = self.initial_balance - fee <NEW_LINE> print("New bal after withdrawal:",self.initial_balance, "including overdraft fee of",fee) <NEW_LINE> print("Total fee:",fee) <NEW_LINE> return self.initial_balance <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.initial_balance -= self.wthdrw <NEW_LINE> return self.initial_balance
Withdraws the amount from the account. Each withdrawal resulting in a negative balance also deducts a penalty fee of 5 dollars from the balance.
625941c2a219f33f34628903
def get_structure_dataframes(self, abspath=False, filter_abifile=None, **kwargs): <NEW_LINE> <INDENT> from abipy.core.structure import dataframes_from_structures <NEW_LINE> if "index" not in kwargs: <NEW_LINE> <INDENT> index = list(self._abifiles.keys()) <NEW_LINE> if not abspath: index = self._to_relpaths(index) <NEW_LINE> kwargs["index"] = index <NEW_LINE> <DEDENT> abifiles = self.abifiles if filter_abifile is not None else list(filter(filter_abifile, self.abifiles)) <NEW_LINE> return dataframes_from_structures(struct_objects=abifiles, **kwargs)
Wrap dataframes_from_structures function. Args: abspath: True if paths in index should be absolute. Default: Relative to getcwd(). filter_abifile: Function that receives an ``abifile`` object and returns True if the file should be added to the plotter.
625941c2091ae35668666ef9
def _lpad(self, length, pad=' '): <NEW_LINE> <INDENT> return _ops.LPad(self, length, pad).to_expr()
Returns string of given length by truncating (on right) or padding (on left) original string Parameters ---------- length : int pad : string, default is ' ' Examples -------- table.strings.lpad(5, '-') 'a' becomes '----a' 'abcdefg' becomes 'abcde' Returns ------- padded : string
625941c216aa5153ce362410
def read_bitcoin_config(dbdir): <NEW_LINE> <INDENT> from ConfigParser import SafeConfigParser <NEW_LINE> class FakeSecHead(object): <NEW_LINE> <INDENT> def __init__(self, fp): <NEW_LINE> <INDENT> self.fp = fp <NEW_LINE> self.sechead = '[all]\n' <NEW_LINE> <DEDENT> def readline(self): <NEW_LINE> <INDENT> if self.sechead: <NEW_LINE> <INDENT> try: return self.sechead <NEW_LINE> finally: self.sechead = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> s = self.fp.readline() <NEW_LINE> if s.find('#') != -1: <NEW_LINE> <INDENT> s = s[0:s.find('#')].strip() +"\n" <NEW_LINE> <DEDENT> return s <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> config_parser = SafeConfigParser() <NEW_LINE> config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, "alta.conf")))) <NEW_LINE> return dict(config_parser.items("all"))
Read the alta.conf file from dbdir, returns dictionary of settings
625941c2442bda511e8be3b2
@jsonErrorHandler <NEW_LINE> def createUsers(data): <NEW_LINE> <INDENT> users_list = list(data['users'].value_counts().keys()) <NEW_LINE> for user in users_list: <NEW_LINE> <INDENT> if len(users.distinct('user_id')) == 0: <NEW_LINE> <INDENT> users_id = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> users_id = len(users.distinct('user_id')) <NEW_LINE> <DEDENT> users.insert_one({ 'user_id': users_id, 'user_name': user }) <NEW_LINE> <DEDENT> print('Users names have been created in users collection') <NEW_LINE> return 'Users names have been created in users collection'
Creates users in our database in MongoDB from the dataset cleaned
625941c250485f2cf553cd30
def polyFeatures(X, p): <NEW_LINE> <INDENT> X_poly = np.zeros((X.shape[0], p)) <NEW_LINE> for j in range(1, p + 1): <NEW_LINE> <INDENT> X_poly[:, j - 1] = X[:, 0] ** j <NEW_LINE> <DEDENT> return X_poly
Maps X (1D vector) into the p-th power [X_poly] = POLYFEATURES(X, p) takes a data matrix X (size m x 1) and maps each example into its polynomial features where X_poly(i, :) = [X(i) X(i).^2 X(i).^3 ... X(i).^p];
625941c21f037a2d8b946195
def extract_maas_parameters(config): <NEW_LINE> <INDENT> params = {} <NEW_LINE> for line in config.splitlines(): <NEW_LINE> <INDENT> cloud, key, type, value = line.split()[:4] <NEW_LINE> if key == "cloud-init/maas-metadata-url": <NEW_LINE> <INDENT> params['url'] = value <NEW_LINE> <DEDENT> elif key == "cloud-init/maas-metadata-credentials": <NEW_LINE> <INDENT> values = value.split("&") <NEW_LINE> for oauth in values: <NEW_LINE> <INDENT> key, value = oauth.split('=') <NEW_LINE> if key == 'oauth_token_key': <NEW_LINE> <INDENT> params['token_key'] = value <NEW_LINE> <DEDENT> elif key == 'oauth_token_secret': <NEW_LINE> <INDENT> params['token_secret'] = value <NEW_LINE> <DEDENT> elif key == 'oauth_consumer_key': <NEW_LINE> <INDENT> params['consumer_key'] = value <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return params
Extracts the needed values from the debconf entry.
625941c250812a4eaa59c2bb
def delete(self): <NEW_LINE> <INDENT> self.rdbFile.deleteRouter(self.name) <NEW_LINE> self.crcFile.deleteRouterDetails(self.name) <NEW_LINE> self.djangoDevice.delete()
Remove from routerdb and cloginrc and django database
625941c2d10714528d5ffc78
def addConditionTask(self, parentTask, parentOutputModuleName): <NEW_LINE> <INDENT> mySplitArgs = {} <NEW_LINE> mySplitArgs['algo_package'] = "T0.JobSplitting" <NEW_LINE> mySplitArgs['runNumber'] = self.runNumber <NEW_LINE> mySplitArgs['streamName'] = self.streamName <NEW_LINE> parentTaskCmssw = parentTask.getStep("cmsRun1") <NEW_LINE> parentOutputModule = parentTaskCmssw.getOutputModule(parentOutputModuleName) <NEW_LINE> conditionTask = parentTask.addTask("%sCondition%s" % (parentTask.name(), parentOutputModuleName)) <NEW_LINE> conditionTaskBogus = conditionTask.makeStep("bogus") <NEW_LINE> conditionTaskBogus.setStepType("DQMUpload") <NEW_LINE> conditionTask.setInputReference(parentTaskCmssw, outputModule = parentOutputModuleName) <NEW_LINE> conditionTask.applyTemplates() <NEW_LINE> conditionTask.setTaskType("Harvesting") <NEW_LINE> conditionTask.setSplittingAlgorithm("Condition", **mySplitArgs) <NEW_LINE> return
_addConditionTask_ Does not actually produce any jobs The job splitter is custom and just forwards information into T0AST specific data structures, the actual upload of the conditions to the DropBox is handled in a separate Tier0 component.
625941c207f4c71912b11418
def stop(self): <NEW_LINE> <INDENT> _log.info('stopping services: %s', self.service_names) <NEW_LINE> SpawningProxy(self.containers).stop() <NEW_LINE> _log.info('services stopped: %s', self.service_names)
Stop all running containers concurrently. The method blocks until all containers have stopped.
625941c267a9b606de4a7e52
def needs_commit(self): <NEW_LINE> <INDENT> return self.count_pending_units > 0
Check whether there are some not committed changes.
625941c29f2886367277a826
def set_high_low_lines(self, color='black', width=0.75, dash_type='solid', transparency=50): <NEW_LINE> <INDENT> self.chart.set_high_low_lines({ 'line': { 'color': color, 'width': width, 'dash_type': dash_type, 'transparency': transparency } })
Set properties for the chart high-low lines. :param color: string, default 'black' :param width: float or int, default 0.75 :param dash_type: string, default 'solid' :param transparency: int, default 50 :return:
625941c2d99f1b3c44c6752b
def _flow_check_handler_internal(self): <NEW_LINE> <INDENT> integ_flow = self.integ_br_obj.dump_flows_for( in_port=self.int_peer_port_num) <NEW_LINE> ext_flow = self.ext_br_obj.dump_flows_for( in_port=self.phy_peer_port_num) <NEW_LINE> for net_uuid, lvm in six.iteritems(self.local_vlan_map): <NEW_LINE> <INDENT> vdp_vlan = lvm.any_consistent_vlan() <NEW_LINE> flow_required = False <NEW_LINE> if not (vdp_vlan and ovs_lib.is_valid_vlan_tag(vdp_vlan)): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if not self._check_bridge_flow(integ_flow, vdp_vlan, lvm.lvid): <NEW_LINE> <INDENT> LOG.error("Flow for VDP Vlan %(vdp_vlan)s, Local vlan " "%(lvid)s not present on Integ bridge", {'vdp_vlan': vdp_vlan, 'lvid': lvm.lvid}) <NEW_LINE> flow_required = True <NEW_LINE> <DEDENT> if not self._check_bridge_flow(ext_flow, lvm.lvid, vdp_vlan): <NEW_LINE> <INDENT> LOG.error("Flow for VDP Vlan %(vdp_vlan)s, Local vlan " "%(lvid)s not present on External bridge", {'vdp_vlan': vdp_vlan, 'lvid': lvm.lvid}) <NEW_LINE> flow_required = True <NEW_LINE> <DEDENT> if flow_required: <NEW_LINE> <INDENT> LOG.info("Programming flows for lvid %(lvid)s vdp vlan" " %(vdp)s", {'lvid': lvm.lvid, 'vdp': vdp_vlan}) <NEW_LINE> self.program_vm_ovs_flows(lvm.lvid, 0, vdp_vlan)
Periodic handler to check if installed flows are present. This handler runs periodically to check if installed flows are present. This function cannot detect and delete the stale flows, if present. It requires more complexity to delete stale flows. Generally, stale flows are not present. So, that logic is not put here.
625941c2435de62698dfdbe3
@click.group() <NEW_LINE> def main(): <NEW_LINE> <INDENT> pass
Simple CLI for ecasb2share
625941c28e05c05ec3eea30a
def gratingAnglesFF(phEnergy, lineDensity, cff, nOrderDiff=1): <NEW_LINE> <INDENT> hc = 1.239842e-6 <NEW_LINE> alpha = np.arcsin(-nOrderDiff*lineDensity*1e3*hc/phEnergy/(cff**2-1) + np.sqrt(1+(-nOrderDiff*lineDensity*1e3*hc/phEnergy*cff/(cff**2-1))**2)) <NEW_LINE> return [alpha, gratingAngle(alpha, phEnergy, lineDensity, nOrderDiff)]
Grating angles at Fixed Focus Condition returns alpha and beta in rad phEnergy in eV lineDensity in line/mm
625941c2ac7a0e7691ed4067
def get_W(self, word_vecs, k=300): <NEW_LINE> <INDENT> vocab_size = len(word_vecs) <NEW_LINE> word_idx_map = dict() <NEW_LINE> W = np.zeros(shape=(vocab_size+1, k), dtype="float32") <NEW_LINE> W[0] = np.zeros(k) <NEW_LINE> i = 1 <NEW_LINE> for word in word_vecs: <NEW_LINE> <INDENT> W[i] = word_vecs[word] <NEW_LINE> word_idx_map[word] = i <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> return W, word_idx_map
Get word matrix. W[i] is the vector for word indexed by i
625941c2090684286d50ec7b
def set_api_env(url=None, headers=None): <NEW_LINE> <INDENT> global api_base_url <NEW_LINE> global http_headers <NEW_LINE> api_base_url = url <NEW_LINE> http_headers = headers
Set base URL and http headers Parameters ---------- url : str Base URL string headers : dict Http headers
625941c21f037a2d8b946196
def test_dataset_is_public_and_has_data(self): <NEW_LINE> <INDENT> department = Department.create(name="IM Police Department", short_name="IMPD", load_defaults=True) <NEW_LINE> assert department.dataset_is_public_and_has_data("complaints") == False <NEW_LINE> assert department.dataset_is_public_and_has_data("uof") == False <NEW_LINE> assert department.dataset_is_public_and_has_data("ois") == False <NEW_LINE> assert department.dataset_is_public_and_has_data("assaults") == False <NEW_LINE> assert department.displayable_dataset_count() == 0 <NEW_LINE> CitizenComplaintIMPD.create(department_id=department.id, opaque_id="12345abcde") <NEW_LINE> assert department.dataset_is_public_and_has_data("complaints") == True <NEW_LINE> assert department.displayable_dataset_count() == 1 <NEW_LINE> UseOfForceIncidentIMPD.create(department_id=department.id, opaque_id="23456bcdef") <NEW_LINE> assert department.dataset_is_public_and_has_data("uof") == True <NEW_LINE> assert department.displayable_dataset_count() == 2 <NEW_LINE> OfficerInvolvedShootingIMPD.create(department_id=department.id, opaque_id="34567cdefg") <NEW_LINE> assert department.dataset_is_public_and_has_data("ois") == True <NEW_LINE> assert department.displayable_dataset_count() == 3 <NEW_LINE> AssaultOnOfficerIMPD.create(department_id=department.id, opaque_id="45678defgh") <NEW_LINE> assert department.dataset_is_public_and_has_data("assaults") == True <NEW_LINE> assert department.displayable_dataset_count() == 4 <NEW_LINE> department.is_public_citizen_complaints = False <NEW_LINE> assert department.dataset_is_public_and_has_data("complaints") == False <NEW_LINE> department.is_public_use_of_force_incidents = False <NEW_LINE> assert department.dataset_is_public_and_has_data("uof") == False <NEW_LINE> department.is_public_officer_involved_shootings = False <NEW_LINE> assert department.dataset_is_public_and_has_data("ois") == False <NEW_LINE> department.is_public_assaults_on_officers = False <NEW_LINE> assert department.dataset_is_public_and_has_data("assaults") == False <NEW_LINE> assert department.displayable_dataset_count() == 0
We can accurately tell if a dataset is public and has data.
625941c257b8e32f52483431
def phaseSelection(k, v, fmt, meta): <NEW_LINE> <INDENT> global inselection <NEW_LINE> if k == 'Para': <NEW_LINE> <INDENT> if isinstance(v[0], dict): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> if v[0] == '%%kaishi%%': <NEW_LINE> <INDENT> if inselection: <NEW_LINE> <INDENT> inselection = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> inselection = True <NEW_LINE> <DEDENT> return [] <NEW_LINE> <DEDENT> <DEDENT> if not inselection: <NEW_LINE> <INDENT> return []
Second Step: Foce on selection
625941c2b7558d58953c4eaf
def test_metrics_clear(n_samples=2000, knn=50, uniform_class=0): <NEW_LINE> <INDENT> X, y = generate_sample(n_samples=n_samples, n_features=10) <NEW_LINE> sample_weight = numpy.random.exponential(size=n_samples) <NEW_LINE> predictions = numpy.random.random(size=[n_samples, 2]) <NEW_LINE> predictions /= predictions.sum(axis=1, keepdims=True) <NEW_LINE> features = X.columns[:1] <NEW_LINE> mask = (y == uniform_class) <NEW_LINE> X_clear = X.ix[mask, :] <NEW_LINE> y_clear = y[mask] <NEW_LINE> sample_weight_clear = sample_weight[mask] <NEW_LINE> predictions_clear = predictions[mask] <NEW_LINE> for function in [sde, theil_flatness, cvm_flatness]: <NEW_LINE> <INDENT> flatness_val = function(y, predictions, X, uniform_features=features, sample_weight=sample_weight, label=0, knn=knn) <NEW_LINE> flatness_val_clear = function(y_clear, predictions_clear, X_clear, uniform_features=features, sample_weight=sample_weight_clear, label=0, knn=knn) <NEW_LINE> assert flatness_val == flatness_val_clear, 'after deleting other class, the metrics changed' <NEW_LINE> <DEDENT> for class_ in [KnnBasedSDE, KnnBasedTheil, KnnBasedCvM]: <NEW_LINE> <INDENT> metric1 = class_(n_neighbours=knn, uniform_features=features, uniform_label=0, ) <NEW_LINE> metric1.fit(X, y, sample_weight=sample_weight) <NEW_LINE> flatness_val1 = metric1(y, predictions, sample_weight) <NEW_LINE> metric2 = class_(n_neighbours=knn, uniform_features=features, uniform_label=0, ) <NEW_LINE> metric2.fit(X_clear, y_clear, sample_weight=sample_weight_clear) <NEW_LINE> flatness_val2 = metric2(y_clear, predictions_clear, sample_weight_clear) <NEW_LINE> assert flatness_val1 == flatness_val2, 'after deleting other class, the metrics changed'
Testing that after deleting all inappropriate events (events of other class), metrics stays the same
625941c271ff763f4b54961f
def created_as_json( request: Request, start_response: StartResponse, url_part: str, json: str | bytes | Any, *, extra_headers: Sequence[Header] = [], ) -> Iterable[bytes]: <NEW_LINE> <INDENT> all_headers = [_location_header(request, url_part)] + list(extra_headers) <NEW_LINE> return respond_with_json( start_response, json, status=HTTPStatus.CREATED, extra_headers=all_headers, )
Prepare a 201 Created WSGI response with a Location header and JSON body.
625941c2167d2b6e31218b2d
def _collapseMsg(self, msg): <NEW_LINE> <INDENT> retval = {} <NEW_LINE> for log in msg: <NEW_LINE> <INDENT> data = "".join(msg[log]) <NEW_LINE> if isinstance(log, tuple) and log[0] == 'log': <NEW_LINE> <INDENT> retval['log'] = (log[1], data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> retval[log] = data <NEW_LINE> <DEDENT> <DEDENT> return retval
Take msg, which is a dictionary of lists of output chunks, and concatenate all the chunks into a single string
625941c2ec188e330fd5a73a
@api_view(['GET']) <NEW_LINE> def asset_classfilter(request, class_name): <NEW_LINE> <INDENT> if request.method == 'GET': <NEW_LINE> <INDENT> assets = Asset.objects.filter(asset_class=class_name) <NEW_LINE> if not assets: <NEW_LINE> <INDENT> return Response(status=status.HTTP_404_NOT_FOUND) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> serializer = AssetSerializer(assets, many=True) <NEW_LINE> return Response(serializer.data)
List assets filtered by the provided class string :param request: Django HTTP request :param class_name: The name of the class to perform filtering :return: Response with appropriate assets
625941c2656771135c3eb804
def new_column_sugar(needs: tuple, name=None): <NEW_LINE> <INDENT> def real_decorator(func): <NEW_LINE> <INDENT> @wraps(func) <NEW_LINE> def wrapper(self, *args, **kwargs): <NEW_LINE> <INDENT> for need in needs: <NEW_LINE> <INDENT> if need not in self: <NEW_LINE> <INDENT> raise exceptions.RequiredColumnError(need) <NEW_LINE> <DEDENT> <DEDENT> out = func(self, *args, **kwargs) <NEW_LINE> return Series(out, index=self.index, name=name) <NEW_LINE> <DEDENT> return wrapper <NEW_LINE> <DEDENT> return real_decorator
Decorator for certain methods of ActivityData that create new columns using the special column types. Parameters ---------- needs : tuple A tuple of column names. name : str, optional The name for the returned Series object. Returns ------- Series Suitable for joining to the existing data. Raises ------ RequiredColumnError If a column specified in `needs` is not present.
625941c2435de62698dfdbe4
def arrayPairSum(self, nums): <NEW_LINE> <INDENT> nums.sort() <NEW_LINE> sum = 0 <NEW_LINE> for n in nums[0::2]: <NEW_LINE> <INDENT> sum+= n <NEW_LINE> <DEDENT> return sum
:type nums: List[int] :rtype: int
625941c2b57a9660fec3381a
def list_favorite_kittens(color: KittenColor) -> List[Kitten]: <NEW_LINE> <INDENT> kittens = [ Kitten({'name': 'fluffums', 'color': 'white', 'cuteness': 9.8}), Kitten({'name': 'tabitha', 'color': 'tabby', 'cuteness': 8.7}), Kitten({'name': 'meowster', 'color': 'white', 'cuteness': 7.8}), Kitten({'name': 'fuzzball', 'color': 'brown', 'cuteness': 8.0}), ] <NEW_LINE> return [ kitten for kitten in kittens if kitten['color'] == color ]
List your favorite kittens, optionally filtered by color.
625941c292d797404e304121
def main(): <NEW_LINE> <INDENT> url = 'https://www.triip.me/search/' <NEW_LINE> options = webdriver.ChromeOptions() <NEW_LINE> options.binary_location = '/Applications/Google Chrome Canary.app/Contents/MacOS/Google Chrome Canary' <NEW_LINE> options.add_argument('window-size=800x841') <NEW_LINE> options.add_argument('headless') <NEW_LINE> driver = webdriver.Chrome(chrome_options=options) <NEW_LINE> with open('triipusers.txt', 'r') as f: <NEW_LINE> <INDENT> user_url = f.readlines <NEW_LINE> <DEDENT> market(driver, user_url)
crawls over Triip.com for all listings and unique users. commented out code is no longer needed once the initial scrape is completed. :return: None
625941c2507cdc57c6306c6e
def cublasDger(m, n, alpha, x, incx, y, incy, A, lda): <NEW_LINE> <INDENT> _libcublas.cublasDger(m, n, alpha, int(x), incx, int(y), incy, int(A), lda) <NEW_LINE> status = cublasGetError() <NEW_LINE> cublasCheckStatus(status)
Rank-1 operation on real general matrix.
625941c28e7ae83300e4af64
def rect_circle(rect, circle): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> rect_x, rect_y, rect_width, rect_height = rect <NEW_LINE> circle_x, circle_y, radius = circle <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> rect_x, rect_y, rect_width, rect_height = circle <NEW_LINE> circle_x, circle_y, radius = rect <NEW_LINE> <DEDENT> rect_x = rect_x + rect_width / 2 <NEW_LINE> rect_y = rect_y + rect_height / 2 <NEW_LINE> circle_x += radius <NEW_LINE> circle_y += radius <NEW_LINE> dist_x = abs(circle_x - rect_x) <NEW_LINE> dist_y = abs(circle_y - rect_y) <NEW_LINE> half_width = rect_width / 2 <NEW_LINE> half_height = rect_height / 2 <NEW_LINE> if dist_x > (half_width + radius) or dist_y > (half_height + radius): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if dist_x <= half_width or dist_y <= half_height: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> corner_distance = (dist_x - half_width)**2 + (dist_y - half_height)**2 <NEW_LINE> return corner_distance <= (radius**2)
Check if rectangle and circle are overlapping Args: rect (peachy.geo.Rect, tuple[x, y, w, h]): The rectangle to use in this collision detection procedure. circle (peachy.geo.Circle, tuple[x, y, r]): The circle to use in this collision detection procdure. Represented using a Circle or tuple. Note: This procedure will still run if arguments are reversed.
625941c2e1aae11d1e749c4d
@image_comparison(['legend_auto1'], remove_text=True) <NEW_LINE> def test_legend_auto1(): <NEW_LINE> <INDENT> fig = plt.figure() <NEW_LINE> ax = fig.add_subplot(111) <NEW_LINE> x = np.arange(100) <NEW_LINE> ax.plot(x, 50 - x, 'o', label='y=1') <NEW_LINE> ax.plot(x, x - 50, 'o', label='y=-1') <NEW_LINE> ax.legend(loc='best')
Test automatic legend placement
625941c24e696a04525c93e4
def get_context_menu(self, pos): <NEW_LINE> <INDENT> menu = Menu() <NEW_LINE> splitter = None <NEW_LINE> splitter = None <NEW_LINE> for tabwidget in self.tabwidgets(): <NEW_LINE> <INDENT> global_rect = QtCore.QRect(tabwidget.mapToGlobal(QtCore.QPoint(0, 0)), tabwidget.size()) <NEW_LINE> if global_rect.contains(pos): <NEW_LINE> <INDENT> splitter = tabwidget.parent() <NEW_LINE> <DEDENT> <DEDENT> if not splitter: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if not splitter.is_empty(): <NEW_LINE> <INDENT> actions = [Action(id='split_hor', name='Create new pane to the right', on_perform=lambda : splitter.split(orientation= QtCore.Qt.Horizontal)), Action(id='split_ver', name='Create new pane to the bottom', on_perform=lambda : splitter.split(orientation= QtCore.Qt.Vertical))] <NEW_LINE> splitgroup = Group(*actions, id='split') <NEW_LINE> menu.append(splitgroup) <NEW_LINE> <DEDENT> if splitter.is_collapsible(): <NEW_LINE> <INDENT> if splitter is splitter.parent().leftchild: <NEW_LINE> <INDENT> if splitter.parent().orientation() is QtCore.Qt.Horizontal: <NEW_LINE> <INDENT> text = 'Merge with right pane' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> text = 'Merge with bottom pane' <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if splitter.parent().orientation() is QtCore.Qt.Horizontal: <NEW_LINE> <INDENT> text = 'Merge with left pane' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> text = 'Merge with top pane' <NEW_LINE> <DEDENT> <DEDENT> actions = [Action(id='merge', name=text, on_perform=lambda : splitter.collapse())] <NEW_LINE> collapsegroup = Group(*actions, id='collapse') <NEW_LINE> menu.append(collapsegroup) <NEW_LINE> <DEDENT> return menu
Returns a context menu containing split/collapse actions pos : position (in global coordinates) where the context menu was requested
625941c230c21e258bdfa434
def get_ipv4_pattern(): <NEW_LINE> <INDENT> byte = '([1-9]?\d|1\d\d|2[0-4]\d|25[0-5])' <NEW_LINE> regex = '^{0}(\.{0}){{3}}$'.format(byte) <NEW_LINE> pattern = re.compile(regex) <NEW_LINE> return pattern
Compiles a regex pattern that matches IPv4 addresses.
625941c2c432627299f04bdc
def _backend(self, settings_override=None): <NEW_LINE> <INDENT> settings = { "auth.ldap.url": "ldap://localhost/", "auth.ldap.service_dn": "cn=service,ou=users,o=test", "auth.ldap.service_password": "snerp", "auth.ldap.base_dn": "ou=users,o=test", "auth.ldap.user_search_filter": "(cn={username})", "auth.ldap.admin_field": "roles", "auth.ldap.admin_value": ["admin"], } <NEW_LINE> settings.update(settings_override or {}) <NEW_LINE> settings = dict(((k, v) for (k, v) in settings.items() if v is not None)) <NEW_LINE> kwargs = LDAPAccessBackend.configure(settings) <NEW_LINE> request = DummyRequest() <NEW_LINE> request.userid = None <NEW_LINE> return LDAPAccessBackend(request, **kwargs)
Wrapper to instantiate a LDAPAccessBackend
625941c2ec188e330fd5a73b
def render(self, **kwargs): <NEW_LINE> <INDENT> if self.points.shape[1] == 2: <NEW_LINE> <INDENT> return PointCloudViewer2d(self.figure_id, self.new_figure, self.points).render(**kwargs) <NEW_LINE> <DEDENT> elif self.points.shape[1] == 3: <NEW_LINE> <INDENT> return PointCloudViewer3d(self.figure_id, self.new_figure, self.points).render(**kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Only 2D and 3D pointclouds are " "currently supported")
Select the correct type of pointcloud viewer for the given pointcloud dimensionality. Parameters ---------- kwargs : dict Passed through to pointcloud viewer. Returns ------- viewer : :class:`Renderer` The rendering object. Raises ------ DimensionalityError Only 2D and 3D viewers are supported.
625941c256ac1b37e626416b
def test_serviceassignment_getservice(self): <NEW_LINE> <INDENT> pass
Test case for serviceassignment_getservice View a list of locations where a service is available # noqa: E501
625941c25e10d32532c5eebf
def haveSomeData(self): <NEW_LINE> <INDENT> if not self.__allSqQueries: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True
Determine if this binnary flush contain any data :return:
625941c263d6d428bbe44487
def _check_is_not_deleted(self): <NEW_LINE> <INDENT> if self._is_deleted: <NEW_LINE> <INDENT> raise _error.EntityDeleted(self.ref)
Raise an exception if the entity has 'deleted' state
625941c2adb09d7d5db6c729
def test_widget_save_copy_1(widget_test): <NEW_LINE> <INDENT> copy_names = ( expand_path('favorite_copy.json'), '', expand_path('read-only_copy.json'), ) <NEW_LINE> widget_test.file_dialog.getSaveFileName.side_effect = [ [name] for name in copy_names ] <NEW_LINE> steno_dict_copies = ( mock.create_autospec(StenoDictionary), mock.create_autospec(StenoDictionary), ) <NEW_LINE> widget_test.create_dictionary.side_effect = steno_dict_copies <NEW_LINE> widget_test.select(range(5)) <NEW_LINE> widget_test.widget.action_CopyDictionaries.trigger() <NEW_LINE> assert widget_test.file_dialog.mock_calls == [ mock.call.getSaveFileName( parent=widget_test.widget, caption='Save a copy of %s as...' % name, directory=expand_path('%s - Copy.json' % Path(name).stem), filter=FILE_PICKER_SAVE_FILTER, ) for name in ['favorite.json', 'normal.json', 'read-only.ro'] ] <NEW_LINE> assert widget_test.create_dictionary.mock_calls == [ mock.call(name, threaded_save=False) for name in copy_names if name ] <NEW_LINE> assert steno_dict_copies[0].mock_calls == [ mock.call.update(widget_test.dictionaries.dicts[0]), mock.call.save(), ] <NEW_LINE> assert steno_dict_copies[1].mock_calls == [ mock.call.update(widget_test.dictionaries.dicts[2]), mock.call.save(), ]
☑ ★ favorite.json ☑ 🗘 loading.json ☑ ⎕ normal.json ☑ 🛇 read-only.ro ☑ ! invalid.bad
625941c2baa26c4b54cb10b9
def _get_svol_uri(self, part_obj, boot_params): <NEW_LINE> <INDENT> if boot_params['boot_method'] == 'scsi': <NEW_LINE> <INDENT> sg_type = 'fcp' <NEW_LINE> prop_key = 'uuid' <NEW_LINE> prop_value = boot_params['uuid'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sg_type = 'fc' <NEW_LINE> prop_key = 'device-number' <NEW_LINE> prop_value = boot_params['devicenr'] <NEW_LINE> <DEDENT> self._logger.debug("Looking for storage volume object with %s='%s'", prop_key, prop_value) <NEW_LINE> for sg_uri in part_obj.get_property('storage-group-uris'): <NEW_LINE> <INDENT> sg_obj = (part_obj.manager.cpc.manager.console.storage_groups .resource_object(sg_uri)) <NEW_LINE> if sg_obj.get_property('type').lower() != sg_type: <NEW_LINE> <INDENT> self._logger.debug( "Skipping storage group %s, type '%s' (actual) != '%s' " "(expected)", sg_obj.get_property('name'), sg_obj.get_property('type').lower(), sg_type) <NEW_LINE> continue <NEW_LINE> <DEDENT> for sg_vol in sg_obj.storage_volumes.list(): <NEW_LINE> <INDENT> sg_vol.pull_full_properties() <NEW_LINE> try: <NEW_LINE> <INDENT> sg_vol_value = sg_vol.properties[prop_key] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if sg_vol_value.lower() != prop_value.lower(): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if sg_vol.properties['usage'] != 'boot': <NEW_LINE> <INDENT> sg_vol.update_properties({'usage': 'boot'}) <NEW_LINE> <DEDENT> return sg_vol.get_property('element-uri') <NEW_LINE> <DEDENT> <DEDENT> raise ValueError( 'Storage volume <{}:{}> not found or not attached to partition' .format(prop_key, prop_value))
Find the uri of a storage volume
625941c2377c676e91272141
def __init__(self, is_direction_incoming): <NEW_LINE> <INDENT> self.__direction_incoming = is_direction_incoming <NEW_LINE> self.__total_bytes = 0 <NEW_LINE> self.__ticks = deque() <NEW_LINE> self.__ticks_lock = Lock() <NEW_LINE> self.__start_time = None <NEW_LINE> self.__timer_service = TimerService( TRAFFIC_NOTIFICATION_PERIOD.total_seconds(), self.__on_timer) <NEW_LINE> self.__started = False
@param is_direction_incoming: whether the measured direction is incoming (otherwise outgoing). @type is_direction_incoming: bool
625941c2c4546d3d9de729ca
def test_get_flowcell_product_code(self): <NEW_LINE> <INDENT> run_dir = 'data/nanopore_data/run4/done_demuxing/20200104_1412_MN19414_AAU644_68125dc2' <NEW_LINE> run = MinIONqc(run_dir, None, None) <NEW_LINE> got_id = run._get_flowcell_product_code() <NEW_LINE> expected_id = 'FLO-FLG001' <NEW_LINE> self.assertEqual(got_id, expected_id)
Get flowcell product code from report.md.
625941c2d7e4931a7ee9deb5
def prepend(self, *others): <NEW_LINE> <INDENT> p = Path(*others) <NEW_LINE> p = p.append(self) <NEW_LINE> return p
Join another path or relationship to the start of this path to form a new path. :arg others: Entities to join to the start of this path :rtype: :class:`.Path`
625941c285dfad0860c3adf2
@login_required <NEW_LINE> def remove_folios(request): <NEW_LINE> <INDENT> page = int(request.GET.get('page')) <NEW_LINE> folio_pgs = get_curr_folio_pgs(request) <NEW_LINE> folio_pgs.clear_page(page) <NEW_LINE> set_curr_folio_pgs(request, folio_pgs) <NEW_LINE> return HttpResponse(dumps(str(None)), mimetype="text/json")
Given a manuscript's id and a page number, removes all folio numbers for the specified page in the specified manuscript.
625941c2a4f1c619b28affd6
def testConnectionsEstablishedAfterTrust(self): <NEW_LINE> <INDENT> self.servers = ['test_server_1','test_server_2'] <NEW_LINE> self.clientSecurePorts,self.serverSecurePorts = createServers( self.servers) <NEW_LINE> serverAndPort = "%s %s"%("localhost",self.clientSecurePorts[1]) <NEW_LINE> run_client_command("connect-server %s"%serverAndPort, name=self.servers[0],useCnx=True) <NEW_LINE> self.server1LogChecker = ServerLogChecker(name=self.servers[0]) <NEW_LINE> self.server1LogChecker.startThread() <NEW_LINE> self.server2LogChecker = ServerLogChecker(name=self.servers[1]) <NEW_LINE> self.server2LogChecker.startThread() <NEW_LINE> run_client_command("trust -all",name=self.servers[1], expectstdout="localhost(\s)*14807\s*.*",useCnx=True) <NEW_LINE> self.server2Nodes = getConnectedNodesFromConf(self.servers[1]).nodes <NEW_LINE> self.server1Node = self.server2Nodes.values()[0] <NEW_LINE> self.server1Nodes = getConnectedNodesFromConf(self.servers[0]).nodes <NEW_LINE> self.server2Node = self.server1Nodes.values()[0] <NEW_LINE> self.server2LogChecker.waitForOutput("Established inbound " "connections to server " "%s"%self.server1Node.toString()) <NEW_LINE> self.server2LogChecker.shutdownGracefully() <NEW_LINE> self.server2LogChecker.waitForOutput("Established outgoing " "connections to server " "%s"%self.server1Node.toString())
Connections are established after servers has trusted each other
625941c296565a6dacc8f664
def get_current_date(): <NEW_LINE> <INDENT> return date.today().strftime("%d/%m/%Y")
:return: the current date in format %d/%m/%Y
625941c238b623060ff0ad86
def set_confirm_widget(self, widget_name): <NEW_LINE> <INDENT> self.main_dialog.set_confirm_widget(widget_name)
Make a widget confirmable, eg activating that widget would close the dialog. :param widget_name: name of the widget to be confirmable
625941c224f1403a92600b00
def cumulative_cases(self): <NEW_LINE> <INDENT> return self.state["cumulative_cases"]
Stock: cumulative_cases = self.zika_cases() Initial Value: self.init_cumulative_cases() Do not overwrite this function
625941c2956e5f7376d70e06
def __getNextState(self, s, a): <NEW_LINE> <INDENT> stateCurRow = s // self.numCols <NEW_LINE> stateCurCol = s % self.numCols <NEW_LINE> actionHorizontal = 0 <NEW_LINE> actionVertical = 0 <NEW_LINE> if a == 0: <NEW_LINE> <INDENT> actionHorizontal = 1 <NEW_LINE> actionVertical = 0 <NEW_LINE> <DEDENT> elif a == 1: <NEW_LINE> <INDENT> actionHorizontal = 0 <NEW_LINE> actionVertical = -1 <NEW_LINE> <DEDENT> elif a == 2: <NEW_LINE> <INDENT> actionHorizontal = -1 <NEW_LINE> actionVertical = 0 <NEW_LINE> <DEDENT> elif a == 3: <NEW_LINE> <INDENT> actionHorizontal = 0 <NEW_LINE> actionVertical = 1 <NEW_LINE> <DEDENT> stateNextRow = stateCurRow + actionVertical <NEW_LINE> stateNextCol = stateCurCol + actionHorizontal <NEW_LINE> if stateNextRow < 0 or stateNextRow >= self.numRows: <NEW_LINE> <INDENT> stateNextRow = stateCurRow <NEW_LINE> <DEDENT> if stateNextCol < 0 or stateNextCol >= self.numCols: <NEW_LINE> <INDENT> stateNextCol = stateCurCol <NEW_LINE> <DEDENT> sprime = stateNextRow * self.numCols + stateNextCol <NEW_LINE> if np.isin(sprime, self.wallStates): <NEW_LINE> <INDENT> sprime = s <NEW_LINE> <DEDENT> return sprime
This function calculates the next state from the given state and action.
625941c230dc7b7665901901
def case_generator(op_type, Xshape, diagonal, expected): <NEW_LINE> <INDENT> cls_name = "{0}_{1}_shape_{2}_diag_{3}".format(expected, op_type, Xshape, diagonal) <NEW_LINE> errmsg = { "diagonal: TypeError": "diagonal in {} must be a python Int".format(op_type), "input: ValueError": "x shape in {} must be at least 2-D".format(op_type), } <NEW_LINE> class FailureCase(unittest.TestCase): <NEW_LINE> <INDENT> def test_failure(self): <NEW_LINE> <INDENT> paddle.enable_static() <NEW_LINE> data = fluid.data(shape=Xshape, dtype='float64', name=cls_name) <NEW_LINE> with self.assertRaisesRegexp( eval(expected.split(':')[-1]), errmsg[expected]): <NEW_LINE> <INDENT> getattr(tensor, op_type)(x=data, diagonal=diagonal) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> class SuccessCase(TrilTriuOpDefaultTest): <NEW_LINE> <INDENT> def initTestCase(self): <NEW_LINE> <INDENT> paddle.enable_static() <NEW_LINE> self.real_op_type = op_type <NEW_LINE> self.diagonal = diagonal <NEW_LINE> self.X = np.random.random(Xshape).astype("float64") <NEW_LINE> <DEDENT> <DEDENT> CLASS = locals()['SuccessCase' if expected == "success" else 'FailureCase'] <NEW_LINE> CLASS.__name__ = cls_name <NEW_LINE> globals()[cls_name] = CLASS
Generate testcases with the params shape of X, diagonal and op_type. If arg`expercted` is 'success', it will register an Optest case and expect to pass. Otherwise, it will register an API case and check the expect failure.
625941c23317a56b86939bf5
def construct_tree(edges): <NEW_LINE> <INDENT> parentDB = {} <NEW_LINE> childrenDB = {} <NEW_LINE> for edg in edges: <NEW_LINE> <INDENT> c, p = edg[0], edg[1] <NEW_LINE> parentDB[c] = p <NEW_LINE> if p in childrenDB: <NEW_LINE> <INDENT> childrenDB[p][c] = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> childrenDB[p] = {c:None} <NEW_LINE> <DEDENT> <DEDENT> root = None <NEW_LINE> for v in childrenDB.viewkeys(): <NEW_LINE> <INDENT> if v not in parentDB: <NEW_LINE> <INDENT> root = v <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> leaf_set = (set(parentDB.keys()) - set(childrenDB.keys())) <NEW_LINE> tree = {} <NEW_LINE> for node in leaf_set: <NEW_LINE> <INDENT> tree[node] = {} <NEW_LINE> <DEDENT> while len(parentDB) != 0: <NEW_LINE> <INDENT> for head, tail in tree.items(): <NEW_LINE> <INDENT> if head != root: <NEW_LINE> <INDENT> if head not in childrenDB: <NEW_LINE> <INDENT> if parentDB[head] in tree: <NEW_LINE> <INDENT> tree[parentDB[head]][head] = tail <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tree[parentDB[head]] = {head: tail} <NEW_LINE> <DEDENT> del tree[head] <NEW_LINE> del childrenDB[parentDB[head]][head] <NEW_LINE> if len(childrenDB[parentDB[head]]) == 0: <NEW_LINE> <INDENT> del childrenDB[parentDB[head]] <NEW_LINE> <DEDENT> del parentDB[head] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return tree
Given a list of edges [child, parent], return a tree.
625941c2187af65679ca50b6
def get(self, attr): <NEW_LINE> <INDENT> idx = pd.Index(self.fid, name='fid') <NEW_LINE> return pd.Series(getattr(self, attr), index=idx, name=attr)
Returns ``pandas.Series`` of object attribute. Example: >>> g.get('ead')
625941c23539df3088e2e2e4
def show_translation(self, dx, dy): <NEW_LINE> <INDENT> ody = dx + self.data.shape[0]/2 <NEW_LINE> odx = self.data.shape[1]/2 - dy <NEW_LINE> plt.scatter(odx, ody, s=40, alpha = .5) <NEW_LINE> return odx, ody
prints on the image where the peak is usage: corr = Corr() best = corr.find_peak() dx, dy = corr.find_translation(best) corr.show_image() corr.show_translation(dx, dy) plt.show()
625941c221a7993f00bc7c85
def dipole_potential(x, y): <NEW_LINE> <INDENT> r_sq = x**2 + y**2 <NEW_LINE> theta = np.arctan2(y, x) <NEW_LINE> z = np.cos(theta)/r_sq <NEW_LINE> return (np.max(z)-z) / (np.max(z)-np.min(z))
An electric dipole potential V.
625941c22eb69b55b151c845
def clean(self): <NEW_LINE> <INDENT> if self.email_subject: <NEW_LINE> <INDENT> self.email_subject = self.email_subject. replace('\r\n', ' ').replace('\r', ' ').replace('\n', ' ') <NEW_LINE> <DEDENT> super().clean()
override :meth:`django.db.models.Model.clean` to clean the instance data before saving it to the database * remove line breaks from email subject :raises: :exc:`django.core.exceptions.ValidationError`
625941c2090684286d50ec7c
def tmod_Haul(self,index): <NEW_LINE> <INDENT> if index == 0: <NEW_LINE> <INDENT> return "size" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0
Hauling is dependent on the direction of the haul. So a vertical haul can drive a different parameter to a horizontal haul. The direction of the haul is represented by an index, with 0 representing horizontal and 1 representing vertical. The function simply returns the name of the attribute to drive, given it's index. As we only have one attribute, we'll set horizontal hauling to control it and vertical hauling to do nothing.
625941c2cdde0d52a9e52fca
def check(self, raw_metadata): <NEW_LINE> <INDENT> return raw_metadata.get('url', '').startswith( 'ftp://ftp.gportal.jaxa.jp/standard/GCOM-W/GCOM-W.AMSR2/L3.SST')
Checks that the URL starts with the right prefix
625941c215fb5d323cde0aa5
def handle_popup_done(self): <NEW_LINE> <INDENT> pass
popup interaction is done The popup_done event is sent out when a popup grab is broken, that is, when the user clicks a surface that doesn't belong to the client owning the popup surface.
625941c2566aa707497f4505
def get_low_velocity(self, arg0): <NEW_LINE> <INDENT> return self.sampler.execute(self.get_low_velocity_cmd, (arg0, ))
Get Zone Low Velocity Returns: BYTE
625941c28a43f66fc4b54000
def train_based_on_states(self, data, max_epochs=1000, learning_rate=0.1): <NEW_LINE> <INDENT> num_examples = data.shape[0] <NEW_LINE> data = np.insert(data, 0, 1, axis=1) <NEW_LINE> errors = [] <NEW_LINE> for epoch in range(max_epochs): <NEW_LINE> <INDENT> pos_hidden_activations = np.dot(data, self.weights) <NEW_LINE> pos_hidden_probs = self._logistic(pos_hidden_activations) <NEW_LINE> pos_hidden_probs[:, 0] = 1 <NEW_LINE> pos_hidden_states = pos_hidden_probs > np.random.rand(num_examples, self.num_hidden + 1) <NEW_LINE> pos_associations = np.dot(data.T, pos_hidden_states) <NEW_LINE> neg_visible_activations = np.dot(pos_hidden_states, self.weights.T) <NEW_LINE> neg_visible_probs = self._logistic(neg_visible_activations) <NEW_LINE> neg_visible_probs[:, 0] = 1 <NEW_LINE> neg_hidden_activations = np.dot(neg_visible_probs, self.weights) <NEW_LINE> neg_hidden_probs = self._logistic(neg_hidden_activations) <NEW_LINE> neg_hidden_states = neg_hidden_probs > np.random.rand(num_examples, self.num_hidden + 1) <NEW_LINE> neg_associations = np.dot(neg_visible_probs.T, neg_hidden_states) <NEW_LINE> self.weights += learning_rate * ((pos_associations - neg_associations) / num_examples) <NEW_LINE> error = np.sum((data - neg_visible_probs) ** 2) <NEW_LINE> if self.debug_print: <NEW_LINE> <INDENT> print("Epoch %s: error is %s" % (epoch, error)) <NEW_LINE> errors.append(error) <NEW_LINE> <DEDENT> <DEDENT> return errors
Train the machine. Parameters ---------- data: A matrix where each row is a training example consisting of the states of visible units.
625941c28c0ade5d55d3e952
def coord_num(phi): <NEW_LINE> <INDENT> n = 20.0 - 34.0*phi + 14.0*phi**2.0 <NEW_LINE> return n
Calculates coordination number value as a function of porosity using the relationship: n = 20.0 - 34.0*phi + 14.0*phi**2.0 The above expression was copied from Avseth's QSI book, equation 2.7 on page 55. Usage: n = coord_num(phi) Inputs: phi = porosity (v/v) Output: n = coordination number (number of grain-to-grain contacts)
625941c250812a4eaa59c2bc
def __init__(self, serial_number=None, pixelFormat=None): <NEW_LINE> <INDENT> self.cam = xiapi.Camera() <NEW_LINE> if serial_number is None: <NEW_LINE> <INDENT> self.cam.open_device() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.cam.open_device_by_SN(serial_number) <NEW_LINE> <DEDENT> if pixelFormat is None: <NEW_LINE> <INDENT> if self.cam.is_iscolor(): <NEW_LINE> <INDENT> pixelFormat = "XI_RGB24" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pixelFormat = "XI_MONO8" <NEW_LINE> <DEDENT> <DEDENT> elif pixelFormat not in XI_IMG_FORMAT: <NEW_LINE> <INDENT> raise ValueError( f"Wrong pixelFormat. Possible values are {set(XI_IMG_FORMAT.keys()):}" ) <NEW_LINE> <DEDENT> self.cam.set_imgdataformat(pixelFormat) <NEW_LINE> print(f"Camera {self.name:} opened ({pixelFormat:})")
Constructor method
625941c26e29344779a625ac
def set_range_direction(self, char): <NEW_LINE> <INDENT> self.direction = char.direction
sets the direction of the spell to be the same as the player
625941c2ab23a570cc250119
def put(self, id): <NEW_LINE> <INDENT> data = request.get_json() <NEW_LINE> if 'email' not in data: <NEW_LINE> <INDENT> email = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> email = data["email"] <NEW_LINE> <DEDENT> if 'first_name' not in data: <NEW_LINE> <INDENT> first_name = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> first_name = data["first_name"] <NEW_LINE> <DEDENT> if 'last_name' not in data: <NEW_LINE> <INDENT> last_name = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> last_name = data["last_name"] <NEW_LINE> <DEDENT> if 'availability' not in data: <NEW_LINE> <INDENT> availability = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> availability = data["availability"] <NEW_LINE> <DEDENT> interviewee = interviewee_ctrl.update_interviewee(id, email, first_name, last_name, availability) <NEW_LINE> if not interviewee: <NEW_LINE> <INDENT> response = "Interviewee with id %s does not exist or email %s is already taken by another interviewee" % ( id, email), 400 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> response = interviewee, 200 <NEW_LINE> <DEDENT> return response
PUT interface to update an interviewee by it's unique database id. All data for which is contained in the json object is updated. :param id: :return:
625941c2656771135c3eb805
def update_category( category_id: CategoryID, slug: str, title: str, description: str ) -> Category: <NEW_LINE> <INDENT> category = _get_category(category_id) <NEW_LINE> category.slug = slug.strip().lower() <NEW_LINE> category.title = title.strip() <NEW_LINE> category.description = description.strip() <NEW_LINE> db.session.commit() <NEW_LINE> return _db_entity_to_category(category)
Update the category.
625941c201c39578d7e74dd4
def configure(self, url): <NEW_LINE> <INDENT> self._ftdi.open_mpsse_from_url( url, direction=self.direction, frequency=self._frequency) <NEW_LINE> cmd = array('B', (Ftdi.SET_BITS_LOW, 0x0, self.direction)) <NEW_LINE> self._ftdi.write_data(cmd)
Configure the FTDI interface as a JTAG controller
625941c260cbc95b062c64db
def hasNext(self): <NEW_LINE> <INDENT> if self.idx < len(self.queue): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
:rtype: bool
625941c21d351010ab855ab5
def setUp(self): <NEW_LINE> <INDENT> self.driver = webdriver.Chrome( executable_path='/home/traiana/Desktop/HW_2/QA/testing/chromedriver') <NEW_LINE> self.driver.implicitly_wait(30) <NEW_LINE> self.base_url = "https://www.katalon.com/" <NEW_LINE> self.verification_errors = [] <NEW_LINE> self.accept_next_alert = True
set up class
625941c22ae34c7f2600d0ca
def update_Gp(self): <NEW_LINE> <INDENT> data = self.Jaccoo.data <NEW_LINE> row = self.Jaccoo.row <NEW_LINE> col = self.Jaccoo.col <NEW_LINE> data[self._Cbase:self._mbaseLin] = self.im.a0 * self._Cdata <NEW_LINE> Gcoo = sp.coo_matrix((data[:self._mbaseLin], (row[:self._mbaseLin], col[:self._mbaseLin])), (self.ckt.nD_dimension, self.ckt.nD_dimension), dtype = float) <NEW_LINE> self.Gp = Gcoo.tocsr() <NEW_LINE> return self.Gp
Recalculate Gp from im information
625941c2e64d504609d747d9
def _choose_from_bounds(self, *bounds): <NEW_LINE> <INDENT> if isinstance(bounds[0], str): <NEW_LINE> <INDENT> lower, upper = self.bounds(*bounds) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lower, upper = bounds <NEW_LINE> <DEDENT> return randint(lower, upper)
Returns a value that is within the given bounds. Each value has an equal chance of being chosen.
625941c2f7d966606f6a9f9b
def __init__(self, cur, currentUser, threads): <NEW_LINE> <INDENT> self.cur = cur <NEW_LINE> self.currentUser = currentUser <NEW_LINE> self.threads = threads <NEW_LINE> self.pfam_hmm_dir = ConfigMetadata.PFAM_HMM_DIR <NEW_LINE> self.protein_file_suffix = ConfigMetadata.PROTEIN_FILE_SUFFIX <NEW_LINE> self.pfam_suffix = ConfigMetadata.PFAM_SUFFIX <NEW_LINE> self.pfam_top_hit_suffix = ConfigMetadata.PFAM_TOP_HIT_SUFFIX <NEW_LINE> self.checksum_suffix = ConfigMetadata.CHECKSUM_SUFFIX
Initialization.
625941c255399d3f0558864c
def modularity(self, membership, weights=None): <NEW_LINE> <INDENT> if isinstance(membership, VertexClustering): <NEW_LINE> <INDENT> if membership.graph != self: <NEW_LINE> <INDENT> raise ValueError("clustering object belongs to another graph") <NEW_LINE> <DEDENT> return GraphBase.modularity(self, membership.membership, weights) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return GraphBase.modularity(self, membership, weights)
Calculates the modularity score of the graph with respect to a given clustering. The modularity of a graph w.r.t. some division measures how good the division is, or how separated are the different vertex types from each other. It's defined as M{Q=1/(2m)*sum(Aij-ki*kj/(2m)delta(ci,cj),i,j)}. M{m} is the number of edges, M{Aij} is the element of the M{A} adjacency matrix in row M{i} and column M{j}, M{ki} is the degree of node M{i}, M{kj} is the degree of node M{j}, and M{Ci} and C{cj} are the types of the two vertices (M{i} and M{j}). M{delta(x,y)} is one iff M{x=y}, 0 otherwise. If edge weights are given, the definition of modularity is modified as follows: M{Aij} becomes the weight of the corresponding edge, M{ki} is the total weight of edges adjacent to vertex M{i}, M{kj} is the total weight of edges adjacent to vertex M{j} and M{m} is the total edge weight in the graph. @param membership: a membership list or a L{VertexClustering} object @param weights: optional edge weights or C{None} if all edges are weighed equally. Attribute names are also allowed. @return: the modularity score @newfield ref: Reference @ref: MEJ Newman and M Girvan: Finding and evaluating community structure in networks. Phys Rev E 69 026113, 2004.
625941c2b545ff76a8913daf
def post_process_document(file_name, config): <NEW_LINE> <INDENT> substitute_headers_and_footers(file_name, config)
Apply final touchup to the output document after it has been saved. The document object can be assumed to have been destroyed at this point. The only access to it is through `file_name`. The current implementation filters the headers and footers of the document and does a keyword repacement on them.
625941c216aa5153ce362411
def service_document(self): <NEW_LINE> <INDENT> return ServiceDocumentDepositClient(url=self.base_url, auth=self.auth).execute()
Retrieve service document endpoint's information.
625941c2f8510a7c17cf9694
def _fix_package_tree(root_dir): <NEW_LINE> <INDENT> usr_dir = os.path.join(root_dir, 'usr') <NEW_LINE> if not os.path.exists(usr_dir): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for item in os.listdir(usr_dir): <NEW_LINE> <INDENT> src = os.path.join(root_dir, 'usr', item) <NEW_LINE> dst = os.path.join(root_dir, item) <NEW_LINE> if os.path.exists(dst): <NEW_LINE> <INDENT> mess = "Destination already exists" <NEW_LINE> raise Exception(mess) <NEW_LINE> <DEDENT> qibuild.sh.mv(src, dst) <NEW_LINE> <DEDENT> qibuild.sh.rm(os.path.join(root_dir, 'usr')) <NEW_LINE> return
Make the package tree comply with qiBuild.
625941c2d58c6744b4257bf9
def network_nsg_show(name, params=None, options='', **kargs): <NEW_LINE> <INDENT> cmd = "azure network nsg show %s %s" % (name, options) <NEW_LINE> if params: <NEW_LINE> <INDENT> cmd += add_option("--subscription", params.get("subscription", None)) <NEW_LINE> <DEDENT> return command(cmd, azure_json=True, **kargs)
Show Network Security Group properties :param name: NSG name :param params: Command properties :param options: extra options :param kargs: Additional args for running the command :return: CmdResult object
625941c260cbc95b062c64dc
def get_progress_color(base_color): <NEW_LINE> <INDENT> if min(base_color) == max(base_color): <NEW_LINE> <INDENT> return _adjust_gray_luminence(base_color) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return _adjust_color_luminence(base_color)
Return a color that is slightly brighter then the base_color. For shades of gray the constant GRAY_FACTOR decides the brightness. Higher factor gives more brightness. For other colors the LUMINENCE_FACTOR decides the brightness. Smaller factor gives more brightness.
625941c2097d151d1a222df4
def make_secure_val(val): <NEW_LINE> <INDENT> return '%(value)s|%(hashed)s' % {'value': val, 'hashed': hash_str(val)}
Makes the hashed cookie for a given val. The hashed cookie uses a pipe | as the separator between the value and it's hash. val: String Returns: String
625941c276d4e153a657eac9
def exact_match(y_true, y_pred): <NEW_LINE> <INDENT> m_list = [] <NEW_LINE> for i in range(y_true.shape[0]): <NEW_LINE> <INDENT> set_true = set( y_true[i] ) <NEW_LINE> set_pred = set( y_pred[i] ) <NEW_LINE> if set_true == set_pred: <NEW_LINE> <INDENT> m_list.append(1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> m_list.append(0) <NEW_LINE> <DEDENT> <DEDENT> return np.mean(m_list)
Compute the exact match measure for the multi-label case
625941c2fff4ab517eb2f3d4
def load(path: Union[str, Path]) -> Any: <NEW_LINE> <INDENT> correct_file = _convert_path(path, _COMPRESS) <NEW_LINE> incorrect_file = _convert_path(path, not _COMPRESS) <NEW_LINE> if incorrect_file.exists(): <NEW_LINE> <INDENT> if ( not correct_file.exists() or incorrect_file.stat().st_mtime > correct_file.stat().st_mtime ): <NEW_LINE> <INDENT> _update_single_file_compression(correct_file, _COMPRESS) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _delete_single_file(incorrect_file) <NEW_LINE> <DEDENT> <DEDENT> open_func = gzip.open if _COMPRESS else open <NEW_LINE> with open_func(correct_file, "rt", encoding="utf8") as file: <NEW_LINE> <INDENT> return json.load(file)
Loads json data (which may be compressed) from the given file.
625941c282261d6c526ab435
def __or__(self, f): <NEW_LINE> <INDENT> return Nepomuk.TagWidget.ModeFlags()
Nepomuk.TagWidget.ModeFlags Nepomuk.TagWidget.ModeFlags.__or__(Nepomuk.TagWidget.ModeFlags f)
625941c24a966d76dd550fa7
def print_tree(account): <NEW_LINE> <INDENT> raise NotImplementedError()
In the example output below, "GE" is the root account, "Jet Engines" and "Appliances" are first-degree ChildAccounts, and "DoD Contracts" and "Washing Machines" are second-degree ChildAccounts. > print_tree(general_electric) GE (Manufacturing, R&D): Daniel Testperson Jet Engines (Manufacturing, R&D, Aerospace): Daniel Testperson DoD Contracts (Defense, R&D, Aerospace): William Testperson Appliances (Manufacturing, Consumer Goods): Janet Testperson Washing Machines (Consumer Goods): Janet Testperson
625941c2ff9c53063f47c18d
def x_update_command_display(self) -> None: <NEW_LINE> <INDENT> for command_parameter in self.parameters_list: <NEW_LINE> <INDENT> ui_obj = self.input_edit_objects[command_parameter] <NEW_LINE> value = self.get_current_value(ui_obj) <NEW_LINE> self.command_parameter_current_values[command_parameter] = value <NEW_LINE> <DEDENT> if list(self.command_parameter_current_values.values()).count("") == len(self.command_parameter_current_values): <NEW_LINE> <INDENT> display = "{}()".format(self.command_name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> parameter_string_text = "" <NEW_LINE> for command_parameter in self.parameters_list: <NEW_LINE> <INDENT> value = self.command_parameter_current_values[command_parameter] <NEW_LINE> if value != "": <NEW_LINE> <INDENT> text = '{}="{}", '.format(command_parameter, value) <NEW_LINE> parameter_string_text += text <NEW_LINE> <DEDENT> <DEDENT> updated_parameter_string_text = parameter_string_text.rsplit(", ", 1)[0] <NEW_LINE> display = "{}({})".format(self.command_name, updated_parameter_string_text) <NEW_LINE> <DEDENT> self.CommandDisplay_View_TextBrowser.setText(display)
Each command dialog box has a command display that shows the string representation of the command with the user-specified input parameters. It is updated dynamically as the user enters/selects values for the different command parameter fields (this function is called when any text is changed in the input field Qt widgets). The function is responsible for reading the inputs, creating the updated string representation of the command and updating the CommandDisplay widget. Returns: None
625941c23c8af77a43ae3737
def apply ( self, info ): <NEW_LINE> <INDENT> return
Handles the **Apply** button being clicked.
625941c2091ae35668666efb
@login_required <NEW_LINE> def delete_unit(request): <NEW_LINE> <INDENT> if request.method != 'PUT': <NEW_LINE> <INDENT> return JsonResponse({"error": "PUT method required."}, status=400) <NEW_LINE> <DEDENT> d = json.loads(request.body) <NEW_LINE> data = d["childunit"] <NEW_LINE> print(data) <NEW_LINE> unit = Unit.objects.get(id=data["id"]) <NEW_LINE> if request.user == unit.owner: <NEW_LINE> <INDENT> unit.delete() <NEW_LINE> return JsonResponse({"message": "Unit deleted successfully."}, status=201) <NEW_LINE> <DEDENT> return JsonResponse({"error": "User is not the owner of this unit."}, status=400)
Given a unit ID via 'PUT', delete the corresponding unit from the DB only if the requestor is the owner of that unit.
625941c2d6c5a10208143fe2
def zero(self): <NEW_LINE> <INDENT> self.counter.set_value(0)
Zero the counter attribute
625941c245492302aab5e25a
def cleanup(self): <NEW_LINE> <INDENT> item = self.model(self.data["id"]) <NEW_LINE> item.delete() <NEW_LINE> del item
Override this to set a custom cleanup process. By default this takes the key that was generated in `action()` and calls the models `.delete()` function.
625941c27b25080760e393f3
def edit_repo( name, description=None, homepage=None, private=None, has_issues=None, has_wiki=None, has_downloads=None, profile="github", ): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> allow_private_change = _get_config_value(profile, "allow_repo_privacy_changes") <NEW_LINE> <DEDENT> except CommandExecutionError: <NEW_LINE> <INDENT> allow_private_change = False <NEW_LINE> <DEDENT> if private is not None and not allow_private_change: <NEW_LINE> <INDENT> raise CommandExecutionError( "The private field is set to be changed for " "repo {} but allow_repo_privacy_changes " "disallows this.".format(name) ) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> client = _get_client(profile) <NEW_LINE> organization = client.get_organization(_get_config_value(profile, "org_name")) <NEW_LINE> repo = organization.get_repo(name) <NEW_LINE> given_params = { "description": description, "homepage": homepage, "private": private, "has_issues": has_issues, "has_wiki": has_wiki, "has_downloads": has_downloads, } <NEW_LINE> parameters = {"name": name} <NEW_LINE> for param_name, param_value in given_params.items(): <NEW_LINE> <INDENT> if param_value is not None: <NEW_LINE> <INDENT> parameters[param_name] = param_value <NEW_LINE> <DEDENT> <DEDENT> organization._requester.requestJsonAndCheck("PATCH", repo.url, input=parameters) <NEW_LINE> get_repo_info(name, profile=profile, ignore_cache=True) <NEW_LINE> return True <NEW_LINE> <DEDENT> except github.GithubException: <NEW_LINE> <INDENT> log.exception("Error editing a repo") <NEW_LINE> return False
Updates an existing Github repository. name The name of the team to be created. description The description of the repository. homepage The URL with more information about the repository. private The visiblity of the repository. Note that private repositories require a paid GitHub account. has_issues Whether to enable issues for this repository. has_wiki Whether to enable the wiki for this repository. has_downloads Whether to enable downloads for this repository. profile The name of the profile configuration to use. Defaults to ``github``. CLI Example: .. code-block:: bash salt myminion github.add_repo 'repo_name' .. versionadded:: 2016.11.0
625941c2596a897236089a5c
def _make_reversed_substrate(self): <NEW_LINE> <INDENT> reversed_substrate = datamodel.Substrate("{}_reversed".format(self._original_substrate.name)) <NEW_LINE> for node in self._original_substrate.nodes: <NEW_LINE> <INDENT> reversed_substrate.add_node(node, self._original_substrate.node[node]["supported_types"], self._original_substrate.node[node]["capacity"], self._original_substrate.node[node]["cost"]) <NEW_LINE> reversed_substrate.node[node] = self._original_substrate.node[node] <NEW_LINE> <DEDENT> for tail, head in self._original_substrate.edges: <NEW_LINE> <INDENT> original_edge_properties = self._original_substrate.edge[(tail, head)] <NEW_LINE> reversed_substrate.add_edge(head, tail, capacity=original_edge_properties["capacity"], cost=original_edge_properties["cost"], bidirected=False) <NEW_LINE> for key, value in self._original_substrate.edge[(tail, head)].iteritems(): <NEW_LINE> <INDENT> if key not in reversed_substrate.edge[(head, tail)]: <NEW_LINE> <INDENT> reversed_substrate.edge[(head, tail)][key] = value <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return reversed_substrate
Generate a copy of the substrate with all edges reversed. All default substrate node & edge properties are preserved :return:
625941c297e22403b379cf32
def _lock(self): <NEW_LINE> <INDENT> if self.locked: <NEW_LINE> <INDENT> LOGGER.debug("Config and commit already locked - skipping") <NEW_LINE> return <NEW_LINE> <DEDENT> for lock_type in LOCK_TYPES: <NEW_LINE> <INDENT> cmd = TAKE_LOCK_API_CMD.format(lock_type) <NEW_LINE> try: <NEW_LINE> <INDENT> self.device.op(cmd=cmd) <NEW_LINE> <DEDENT> except pan.xapi.PanXapiError as exc: <NEW_LINE> <INDENT> raise LockError(f"Failed to aquire {lock_type}-lock: {str(exc)}") <NEW_LINE> <DEDENT> LOGGER.debug("%s-lock acquired", lock_type) <NEW_LINE> <DEDENT> self.locked = True
Take lock for config editing and committing new configurations.
625941c267a9b606de4a7e54
def get_success_url(self) -> str: <NEW_LINE> <INDENT> return self.object.get_absolute_url()
On success, return to the list view.
625941c28c3a873295158351
def run(self): <NEW_LINE> <INDENT> showObj = findCertainShow(sickrage.srCore.SHOWLIST, int(self.indexerid)) <NEW_LINE> if showObj: <NEW_LINE> <INDENT> return _responds(RESULT_FAILURE, msg="An existing indexerid already exists in the database") <NEW_LINE> <DEDENT> if not os.path.isdir(self.location): <NEW_LINE> <INDENT> return _responds(RESULT_FAILURE, msg='Not a valid location') <NEW_LINE> <DEDENT> indexerName = None <NEW_LINE> indexerResult = CMD_SiCKRAGESearchIndexers([], {indexer_ids[self.indexer]: self.indexerid}).run() <NEW_LINE> if indexerResult['result'] == result_type_map[RESULT_SUCCESS]: <NEW_LINE> <INDENT> if not indexerResult['data']['results']: <NEW_LINE> <INDENT> return _responds(RESULT_FAILURE, msg="Empty results returned, check indexerid and try again") <NEW_LINE> <DEDENT> if len(indexerResult['data']['results']) == 1 and 'name' in indexerResult['data']['results'][0]: <NEW_LINE> <INDENT> indexerName = indexerResult['data']['results'][0]['name'] <NEW_LINE> <DEDENT> <DEDENT> if not indexerName: <NEW_LINE> <INDENT> return _responds(RESULT_FAILURE, msg="Unable to retrieve information from indexer") <NEW_LINE> <DEDENT> indexer = indexerResult['data']['results'][0]['indexer'] <NEW_LINE> quality_map = {'sdtv': Quality.SDTV, 'sddvd': Quality.SDDVD, 'hdtv': Quality.HDTV, 'rawhdtv': Quality.RAWHDTV, 'fullhdtv': Quality.FULLHDTV, 'hdwebdl': Quality.HDWEBDL, 'fullhdwebdl': Quality.FULLHDWEBDL, 'hdbluray': Quality.HDBLURAY, 'fullhdbluray': Quality.FULLHDBLURAY, 'unknown': Quality.UNKNOWN} <NEW_LINE> newQuality = int(sickrage.srCore.srConfig.QUALITY_DEFAULT) <NEW_LINE> iqualityID = [] <NEW_LINE> aqualityID = [] <NEW_LINE> if isinstance(self.initial, collections.Iterable): <NEW_LINE> <INDENT> for quality in self.initial: <NEW_LINE> <INDENT> iqualityID.append(quality_map[quality]) <NEW_LINE> <DEDENT> <DEDENT> if isinstance(self.archive, collections.Iterable): <NEW_LINE> <INDENT> for quality in self.archive: <NEW_LINE> <INDENT> aqualityID.append(quality_map[quality]) <NEW_LINE> <DEDENT> <DEDENT> if iqualityID or aqualityID: <NEW_LINE> <INDENT> newQuality = Quality.combineQualities(iqualityID, aqualityID) <NEW_LINE> <DEDENT> sickrage.srCore.SHOWQUEUE.addShow( int(indexer), int(self.indexerid), self.location, default_status=sickrage.srCore.srConfig.STATUS_DEFAULT, quality=newQuality, flatten_folders=int(self.flatten_folders), subtitles=self.subtitles, default_status_after=sickrage.srCore.srConfig.STATUS_DEFAULT_AFTER, archive=self.archive_firstmatch ) <NEW_LINE> return _responds(RESULT_SUCCESS, {"name": indexerName}, indexerName + " has been queued to be added")
Add an existing show in SiCKRAGE
625941c2be7bc26dc91cd59d
def weightsInit(N,M): <NEW_LINE> <INDENT> return np.random.rand(M, N)
params: N: # of columns of the input patterns M: # of the outputs (# of rows of the output)
625941c2711fe17d82542309
def copy_and_sum_families(family_source, family_target): <NEW_LINE> <INDENT> for every in family_source: <NEW_LINE> <INDENT> if every not in family_target: <NEW_LINE> <INDENT> family_target[every] = family_source[every] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> family_target[every] += family_source[every]
methods iterates thru source family and copies its entries to target family in case key already exists in both families - then the values are added
625941c263f4b57ef00010b7
def test_nothing_returned_fails(): <NEW_LINE> <INDENT> fake_client = Mock(spec=MQTTClient, message_received=Mock(return_value=None)) <NEW_LINE> expected = {"topic": "/a/b/c", "payload": "hello"} <NEW_LINE> verifier = MQTTResponse( fake_client, "Test stage", expected, {"strict": StrictLevel.all_on()} ) <NEW_LINE> with pytest.raises(exceptions.TestFailError): <NEW_LINE> <INDENT> verifier.verify(expected) <NEW_LINE> <DEDENT> assert not verifier.received_messages
Raises an error if no message was received
625941c266673b3332b9202a
def get_tol_2d(): <NEW_LINE> <INDENT> return geometry.gmGetXyTol()
Get the global tolerance for 2d float operations. Value may not be meaningful if it has not been set yet.
625941c2d99f1b3c44c6752d
def AddMutexEnvVarsFlags(parser): <NEW_LINE> <INDENT> env_vars_util.AddUpdateEnvVarsFlags(parser)
Add flags for creating updating and deleting env vars.
625941c2498bea3a759b9a49
def case_aresidueinvertresidue(self, node): <NEW_LINE> <INDENT> super().case_aresidueinvertresidue(node) <NEW_LINE> value = self.FALSE_VALUE <NEW_LINE> residue = self.comparator.create_mutation(node.get_integer(), node.get_mutatedaminoacid()) <NEW_LINE> residue = self.comparator.invert_mutation(residue) <NEW_LINE> for mutation in self.mutation_list: <NEW_LINE> <INDENT> if self.comparator.compare(mutation, residue) == 1: <NEW_LINE> <INDENT> value = self.TRUE_VALUE <NEW_LINE> self.all_scored_mutations.add(mutation) <NEW_LINE> self.scored_item_mutations.add(mutation) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> self.stack.push(value)
TODO: fillme
625941c2b57a9660fec3381b