code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def cmd_images(args): <NEW_LINE> <INDENT> if args.push: <NEW_LINE> <INDENT> docker_call(["login", docker_registry_name()]) <NEW_LINE> <DEDENT> with in_directory(utils.get_internal_fn(os.path.join("docker", args.os))): <NEW_LINE> <INDENT> images = ( (make_image_name("build", args.os), "support.Dockerfile"), (make_image_name("kvm", args.os), "kvm.Dockerfile"), (make_image_name("ci", args.os), "ci.Dockerfile"), ) <NEW_LINE> if args.pull: <NEW_LINE> <INDENT> to_pull = set() <NEW_LINE> for image, dockerfn in images: <NEW_LINE> <INDENT> to_pull.update(do_pull(dockerfn)) <NEW_LINE> <DEDENT> for I in to_pull: <NEW_LINE> <INDENT> docker_call(["pull", I]) <NEW_LINE> <DEDENT> <DEDENT> support = load_supported(args) <NEW_LINE> for I in support: <NEW_LINE> <INDENT> I.fetch() <NEW_LINE> <DEDENT> cmd = ["build"] + get_proxy_arg() <NEW_LINE> for I in support: <NEW_LINE> <INDENT> if args.only is not None and not I.docker_tag.startswith(args.only): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> print("------- Building support image %s -------" % (I.docker_tag)) <NEW_LINE> I.build_image(args.os, cmd) <NEW_LINE> <DEDENT> for image, dockerfn in images: <NEW_LINE> <INDENT> if image is None : <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if args.only is not None and not image.startswith(args.only): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> docker_call(cmd + ["-t", image, "-f", dockerfn, "."]) <NEW_LINE> if args.push: <NEW_LINE> <INDENT> docker_call(["push", image])
Build docker images for different architectures and OS.
625941c160cbc95b062c64c0
def isGameOver(self): <NEW_LINE> <INDENT> if self._game_result: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False
Returns True if the game is over by either checkmate or draw.
625941c1ff9c53063f47c172
def skip_lines(self, inputfile, sequence): <NEW_LINE> <INDENT> expected_characters = { '-': ['dashes', 'd'], '=': ['equals', 'e'], '*': ['stars', 's'], } <NEW_LINE> lines = [] <NEW_LINE> for expected in sequence: <NEW_LINE> <INDENT> line = next(inputfile) <NEW_LINE> if expected in ["blank", "b"]: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> assert line.strip() == "" <NEW_LINE> <DEDENT> except AssertionError: <NEW_LINE> <INDENT> frame, fname, lno, funcname, funcline, index = inspect.getouterframes(inspect.currentframe())[1] <NEW_LINE> parser = fname.split('/')[-1] <NEW_LINE> msg = "In %s, line %i, line not blank as expected: %s" % (parser, lno, line.strip()) <NEW_LINE> self.logger.warning(msg) <NEW_LINE> <DEDENT> <DEDENT> for character, keys in expected_characters.items(): <NEW_LINE> <INDENT> if expected in keys: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> assert utils.str_contains_only(line.strip(), [character, ' ']) <NEW_LINE> <DEDENT> except AssertionError: <NEW_LINE> <INDENT> frame, fname, lno, funcname, funcline, index = inspect.getouterframes(inspect.currentframe())[1] <NEW_LINE> parser = fname.split('/')[-1] <NEW_LINE> msg = "In %s, line %i, line not all %s as expected: %s" % (parser, lno, keys[0], line.strip()) <NEW_LINE> self.logger.warning(msg) <NEW_LINE> continue <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> lines.append(line) <NEW_LINE> <DEDENT> return lines
Read trivial line types and check they are what they are supposed to be. This function will read len(sequence) lines and do certain checks on them, when the elements of sequence have the appropriate values. Currently the following elements trigger checks: 'blank' or 'b' - the line should be blank 'dashes' or 'd' - the line should contain only dashes (or spaces) 'equals' or 'e' - the line should contain only equal signs (or spaces) 'stars' or 's' - the line should contain only stars (or spaces)
625941c11d351010ab855a9a
def test_dpp_qr_code_auth_mutual_not_used(dev, apdev): <NEW_LINE> <INDENT> check_dpp_capab(dev[0]) <NEW_LINE> check_dpp_capab(dev[1]) <NEW_LINE> logger.info("dev0 displays QR Code") <NEW_LINE> id0 = dev[0].dpp_bootstrap_gen(chan="81/1", mac=True) <NEW_LINE> uri0 = dev[0].request("DPP_BOOTSTRAP_GET_URI %d" % id0) <NEW_LINE> logger.info("dev1 scans QR Code") <NEW_LINE> id1 = dev[1].dpp_qr_code(uri0) <NEW_LINE> logger.info("dev1 displays QR Code") <NEW_LINE> id1b = dev[1].dpp_bootstrap_gen(chan="81/1", mac=True) <NEW_LINE> uri1b = dev[1].request("DPP_BOOTSTRAP_GET_URI %d" % id1b) <NEW_LINE> logger.info("dev0 does not scan QR Code") <NEW_LINE> logger.info("dev1 initiates DPP Authentication") <NEW_LINE> if "OK" not in dev[0].request("DPP_LISTEN 2412"): <NEW_LINE> <INDENT> raise Exception("Failed to start listen operation") <NEW_LINE> <DEDENT> if "OK" not in dev[1].request("DPP_AUTH_INIT peer=%d own=%d" % (id1, id1b)): <NEW_LINE> <INDENT> raise Exception("Failed to initiate DPP Authentication") <NEW_LINE> <DEDENT> ev = dev[1].wait_event(["DPP-AUTH-DIRECTION"], timeout=5) <NEW_LINE> if ev is None: <NEW_LINE> <INDENT> raise Exception("DPP authentication direction not indicated (Initiator)") <NEW_LINE> <DEDENT> if "mutual=0" not in ev: <NEW_LINE> <INDENT> raise Exception("Mutual authentication not used") <NEW_LINE> <DEDENT> ev = dev[0].wait_event(["DPP-AUTH-SUCCESS"], timeout=5) <NEW_LINE> if ev is None: <NEW_LINE> <INDENT> raise Exception("DPP authentication did not succeed (Responder)") <NEW_LINE> <DEDENT> ev = dev[1].wait_event(["DPP-AUTH-SUCCESS"], timeout=5) <NEW_LINE> if ev is None: <NEW_LINE> <INDENT> raise Exception("DPP authentication did not succeed (Initiator)") <NEW_LINE> <DEDENT> dev[0].request("DPP_STOP_LISTEN")
DPP QR Code and authentication exchange (mutual not used)
625941c156b00c62f0f145d5
def test_node_import_new(self): <NEW_LINE> <INDENT> process_node_info(TestServerBase.DATA1) <NEW_LINE> assert Node.query.count() == 1 <NEW_LINE> assert Package.query.count() == 1
Test importing a new node
625941c1435de62698dfdbc9
def get_h_samples(gt_path): <NEW_LINE> <INDENT> per_line = load_json(gt_path) <NEW_LINE> for line in per_line: <NEW_LINE> <INDENT> h_samples = line["h_samples"] <NEW_LINE> break <NEW_LINE> <DEDENT> return h_samples
从gt_json文件中拿到y坐标,一遍后面预测拟合后的x坐标
625941c1d53ae8145f87a1f1
def get(self, *args, **kwargs): <NEW_LINE> <INDENT> return self.post(*args, **kwargs)
get request
625941c144b2445a33932014
def test_process_log_message(self): <NEW_LINE> <INDENT> fake_log = MagicMock() <NEW_LINE> example_line = "2018-11-01T09:30:56.693495-04:00 IN=ens160 OUT=ens1 SRC=192.168.1.2 DST=10.1.1.1 WINDOW=3472 RES=0x00 " <NEW_LINE> message = log_sender.process_log_message(example_line, self.cipher, fake_log) <NEW_LINE> self.cipher.decrypt(message) <NEW_LINE> self.assertTrue(isinstance(message, bytes))
``process_log_message`` returns an encrypted message
625941c163b5f9789fde7063
def horizontalOffset(self, *args, **kwargs): <NEW_LINE> <INDENT> return 0
dummy reimplementation
625941c1fbf16365ca6f613d
def __contains__(self, key): <NEW_LINE> <INDENT> return key in self._parameters
Return True if 'key' is a parameter, False otherwise. Keyword Arguments: key -- name of the parameter
625941c18c3a873295158335
def admin_endpoint(handler): <NEW_LINE> <INDENT> async def inner_handler(endpoint, request): <NEW_LINE> <INDENT> server = endpoint.server <NEW_LINE> user = await user_from_request(server, request) <NEW_LINE> if isinstance(user, web.Response): <NEW_LINE> <INDENT> return user <NEW_LINE> <DEDENT> if not user.admin: <NEW_LINE> <INDENT> log.warning(f"{user!s} tried to use an admin endpoint") <NEW_LINE> return _err(errno=40001) <NEW_LINE> <DEDENT> return await do(handler, endpoint, request, user) <NEW_LINE> <DEDENT> inner_handler.__doc__ = handler.__doc__ <NEW_LINE> return inner_handler
Declare an Admin Endpoint. Admin Endpoints in Litecord are endpoints that can only be accessed by users who have administrator powers. To make a user an admin, set the `admin` field in the raw user object to boolean `true`.
625941c1d268445f265b4dec
def searchMatrix(self, matrix, target): <NEW_LINE> <INDENT> if not matrix: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> m = len(matrix) <NEW_LINE> n = len(matrix[0]) <NEW_LINE> i = m - 1 <NEW_LINE> j = 0 <NEW_LINE> while i >= 0 and j < n: <NEW_LINE> <INDENT> if matrix[i][j] == target: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif matrix[i][j] < target: <NEW_LINE> <INDENT> j = j + 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> i = i - 1 <NEW_LINE> <DEDENT> <DEDENT> return False
:type matrix: List[List[int]] :type target: int :rtype: bool
625941c107f4c71912b113fe
def __init__(self, corpus): <NEW_LINE> <INDENT> self.biGramCount = {} <NEW_LINE> self.uniGramCount = {} <NEW_LINE> self.train(corpus) <NEW_LINE> self.vocab = len(self.biGramCount.keys())
Initialize your data structures in the constructor.
625941c110dbd63aa1bd2b22
def cpdb_dataset(tfrecords): <NEW_LINE> <INDENT> dataset = tf.data.TFRecordDataset(tfrecords) <NEW_LINE> dataset = dataset.map(lambda x: cpdb_parser(x)) <NEW_LINE> return dataset
Open a tfrecords file in the cpdb format, parse, and return a tf.data.Dataset object
625941c16fece00bbac2d6bb
def maxArea(self, height): <NEW_LINE> <INDENT> result = 0 <NEW_LINE> leftIndex = 0 <NEW_LINE> rightIndex = len(height) - 1 <NEW_LINE> leftHeight = height[leftIndex] <NEW_LINE> rightHeight = height[rightIndex] <NEW_LINE> while leftIndex < rightIndex: <NEW_LINE> <INDENT> if leftHeight > rightHeight: <NEW_LINE> <INDENT> if result < (rightHeight * (rightIndex - leftIndex)): <NEW_LINE> <INDENT> result = (rightHeight * (rightIndex - leftIndex)) <NEW_LINE> <DEDENT> rightIndex -= 1 <NEW_LINE> rightHeight = height[rightIndex] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if result < (leftHeight * (rightIndex - leftIndex)): <NEW_LINE> <INDENT> result = (leftHeight * (rightIndex - leftIndex)) <NEW_LINE> <DEDENT> leftIndex += 1 <NEW_LINE> leftHeight = height[leftIndex] <NEW_LINE> <DEDENT> <DEDENT> return result
:type height: List[int] :rtype: int
625941c197e22403b379cf17
def cell(self): <NEW_LINE> <INDENT> cpp.deprecation("'FunctionSpace.cell()'", "1.7.0", "2.0.0", "Use 'FunctionSpace.ufl_cell()' instead.") <NEW_LINE> return self.ufl_cell()
Return the UFL cell.
625941c156ac1b37e6264151
def test_update_edit(): <NEW_LINE> <INDENT> mocked_api = MagicMock() <NEW_LINE> mocked_api.post.return_value = { 'update': {'id': 1, 'text': 'hey!'} } <NEW_LINE> update = Update(mocked_api, raw_response={'id':1, 'text': 'ola!'}) <NEW_LINE> new_update = update.edit(text='hey!') <NEW_LINE> assert_update = Update(mocked_api, raw_response={'id':1, 'text': 'hey!'}) <NEW_LINE> post_data = 'text=hey!&' <NEW_LINE> mocked_api.post.assert_called_once_with(url='updates/1/update.json', data=post_data) <NEW_LINE> eq_(new_update, assert_update)
Test basic update editing
625941c15166f23b2e1a50d7
def get_edge_mask(poly, mask): <NEW_LINE> <INDENT> h = mask.shape[0] <NEW_LINE> w = mask.shape[1] <NEW_LINE> gt_poly = np.zeros((poly.shape[0],poly.shape[1]),np.int32) <NEW_LINE> gt_poly[:,0] = np.floor(poly[:,0]*w) <NEW_LINE> gt_poly[:,1] = np.floor(poly[:,1]*h) <NEW_LINE> cv2.polylines(mask, np.int32([gt_poly]), True, [1]) <NEW_LINE> return mask
Generate edge mask
625941c14d74a7450ccd4141
def Initialize(self): <NEW_LINE> <INDENT> import RebarTools <NEW_LINE> from BillOfMaterial.BOMPreferences import BOMPreferences <NEW_LINE> from pathlib import Path <NEW_LINE> self.rebar_commands = RebarTools.ReinforcementCommands <NEW_LINE> self.appendToolbar("RebarCommands", self.rebar_commands) <NEW_LINE> BOMPreferences() <NEW_LINE> FreeCADGui.addPreferencePage( str( Path(RebarTools.__file__).parent.absolute() / "BillOfMaterial" / "BOMPreferences.ui" ), "Reinforcement", ) <NEW_LINE> FreeCADGui.addIconPath( str( Path(RebarTools.__file__).parent.absolute() / "icons" / "preferences" ) ) <NEW_LINE> FreeCADGui.addLanguagePath( str(Path(RebarTools.__file__).parent.absolute() / "translations") )
This function is executed when FreeCAD starts
625941c14f6381625f1149ba
def __init__(self, failed_tasks): <NEW_LINE> <INDENT> message = "some of the tasks failed: %s" % edera.helpers.render(failed_tasks) <NEW_LINE> Error.__init__(self, message) <NEW_LINE> self.failed_tasks = failed_tasks
Args: failed_tasks (List[Task]) - failed tasks
625941c131939e2706e4cdea
def selectCandidatesToEliminate(self): <NEW_LINE> <INDENT> (topTwo, desc) = self.chooseNfromM(2, self.count[0], self.continuing, "top two candidates") <NEW_LINE> loserList = [] <NEW_LINE> for c in self.continuing: <NEW_LINE> <INDENT> if c not in topTwo: <NEW_LINE> <INDENT> loserList.append(c) <NEW_LINE> <DEDENT> <DEDENT> self.newLosers(loserList) <NEW_LINE> return (loserList, desc)
Eliminate all candidates except for the top two.
625941c1a4f1c619b28affbc
def rotateLeft(self, node: Node): <NEW_LINE> <INDENT> rightNode = node.right <NEW_LINE> node.right = rightNode.left <NEW_LINE> if node.right is not None: <NEW_LINE> <INDENT> node.right.parent = node <NEW_LINE> <DEDENT> rightNode.parent = node.parent <NEW_LINE> if node.parent is None: <NEW_LINE> <INDENT> self.root = rightNode <NEW_LINE> <DEDENT> elif node == node.parent.left: <NEW_LINE> <INDENT> node.parent.left = rightNode <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> node.parent.right = rightNode <NEW_LINE> <DEDENT> rightNode.left = node <NEW_LINE> node.parent = rightNode
Performs a subtree left rotation starting at the given node. Parameters: ----------- node : Node Starting point of subtree rotation.
625941c14e696a04525c93ca
def blend_single_feature(f1, f2, feature_index): <NEW_LINE> <INDENT> f = copy.deepcopy(f2) <NEW_LINE> f[ feature_index ] = f1[ feature_index ] <NEW_LINE> return f
puts feature with index feature_index from f1 array and puts it in f2
625941c15510c4643540f367
def contactTracingRates(self, infectedList, removedSet, t, seed=21): <NEW_LINE> <INDENT> assert set(infectedList).intersection(removedSet) == set([]) <NEW_LINE> ctRates = numpy.zeros(len(infectedList)) <NEW_LINE> ctStartDate = t - self.ctStartTime <NEW_LINE> cdEndDate = t - self.ctEndTime <NEW_LINE> infectedArray = numpy.array(infectedList) <NEW_LINE> infectedArrInds = numpy.argsort(infectedArray) <NEW_LINE> infectedArray = infectedArray[infectedArrInds] <NEW_LINE> removeIndices = numpy.array(list(removedSet), numpy.int) <NEW_LINE> underCT = numpy.zeros(self.graph.size, numpy.bool) <NEW_LINE> underCT[removeIndices] = numpy.logical_and(self.graph.vlist.V[removeIndices, HIVVertices.detectionTimeIndex] >= cdEndDate, self.graph.vlist.V[removeIndices, HIVVertices.detectionTimeIndex] <= ctStartDate) <NEW_LINE> if len(infectedList) < len(removedSet): <NEW_LINE> <INDENT> for i in range(len(infectedList)): <NEW_LINE> <INDENT> vertexInd = infectedList[i] <NEW_LINE> detectedNeighbours = self.detectedNeighboursList[vertexInd] <NEW_LINE> for ind in detectedNeighbours: <NEW_LINE> <INDENT> if underCT[ind]: <NEW_LINE> <INDENT> ctRates[i] = self.ctRatePerPerson <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for vertexInd in removedSet: <NEW_LINE> <INDENT> if underCT[vertexInd]: <NEW_LINE> <INDENT> neighbours = self.neighboursList[vertexInd] <NEW_LINE> for ind in neighbours: <NEW_LINE> <INDENT> if self.graph.vlist.V[ind, HIVVertices.stateIndex] == HIVVertices.infected: <NEW_LINE> <INDENT> i = numpy.searchsorted(infectedArray, ind) <NEW_LINE> ctRates[infectedArrInds[i]] = self.ctRatePerPerson <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> assert (ctRates >= numpy.zeros(len(infectedList))).all() <NEW_LINE> return ctRates
Compute the contact tracing detection rate of a list of infected individuals.
625941c1baa26c4b54cb109f
def get_final_score(self): <NEW_LINE> <INDENT> score_string = points_data['score'].iloc[-1] <NEW_LINE> games_string = score_string.split(',')[0].replace(' ',', ') <NEW_LINE> winner_sets = 0 <NEW_LINE> loser_sets = 0 <NEW_LINE> for set_ in games_string.split(', '): <NEW_LINE> <INDENT> if int(set_[0]) > int(set_[set_.find(':')+1]): <NEW_LINE> <INDENT> winner_sets += 1 <NEW_LINE> <DEDENT> elif int(set_[set_.find(':')+1]) > int(set_[0]): <NEW_LINE> <INDENT> loser_sets += 1 <NEW_LINE> <DEDENT> <DEDENT> sets_string = '{}:{}'.format(winner_sets,loser_sets) <NEW_LINE> return {'Sets':sets_string,'Games':games_string}
May not support tie-break sets.
625941c15f7d997b87174a13
def _get_scoop_env_name(name): <NEW_LINE> <INDENT> envname = "".join([SCOOP_ENVIRONMENT_PREFIX, SCOOP_ENVIRONMENT_SEPARATOR, name.upper()]) <NEW_LINE> return envname
Geneate the SCOOP environment name
625941c1498bea3a759b9a2e
def test_cart_iv(self): <NEW_LINE> <INDENT> srvcmd = self.utils.build_cmd(self, self.env, "test_servers") <NEW_LINE> try: <NEW_LINE> <INDENT> srv_rtn = self.utils.launch_cmd_bg(self, srvcmd) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.utils.print("Exception in launching server : {}".format(e)) <NEW_LINE> self.fail("Test failed.\n") <NEW_LINE> <DEDENT> if not self.utils.check_process(srv_rtn): <NEW_LINE> <INDENT> procrtn = self.utils.stop_process(srv_rtn) <NEW_LINE> self.fail("Server did not launch, return code %s" % procrtn) <NEW_LINE> <DEDENT> actions = [ {"operation":"fetch", "rank":0, "key":(0, 42), "return_code":-1, "expected_value":""}, {"operation":"update", "rank":0, "key":(0, 42), "value":"potato"}, {"operation":"fetch", "rank":0, "key":(0, 42), "return_code":0, "expected_value":"potato"}, {"operation":"invalidate", "rank":0, "key":(0, 42)}, {"operation":"fetch", "rank":0, "key":(0, 42), "return_code":-1, "expected_value":""}, ] <NEW_LINE> time.sleep(2) <NEW_LINE> failed = False <NEW_LINE> clicmd = self.utils.build_cmd(self, self.env, "test_clients") <NEW_LINE> try: <NEW_LINE> <INDENT> self._iv_test_actions(clicmd, actions) <NEW_LINE> <DEDENT> except ValueError as exception: <NEW_LINE> <INDENT> failed = True <NEW_LINE> self.utils.print("TEST FAILED: %s" % str(exception)) <NEW_LINE> <DEDENT> num_servers = self.utils.get_srv_cnt(self, "test_servers") <NEW_LINE> srv_ppn = self.params.get("test_servers_ppn", '/run/tests/*/') <NEW_LINE> for rank in reversed(range(1, int(srv_ppn) * num_servers)): <NEW_LINE> <INDENT> clicmd += " -o shutdown -r " + str(rank) <NEW_LINE> self.utils.print("\nClient cmd : %s\n" % clicmd) <NEW_LINE> try: <NEW_LINE> <INDENT> subprocess.call(shlex.split(clicmd)) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> failed = True <NEW_LINE> self.utils.print("Exception in launching client : {}".format(e)) <NEW_LINE> <DEDENT> <DEDENT> time.sleep(1) <NEW_LINE> clicmd += " -o shutdown -r 0" <NEW_LINE> self.utils.print("\nClient cmd : %s\n" % clicmd) <NEW_LINE> try: <NEW_LINE> <INDENT> subprocess.call(shlex.split(clicmd)) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> failed = True <NEW_LINE> self.utils.print("Exception in launching client : {}".format(e)) <NEW_LINE> <DEDENT> time.sleep(2) <NEW_LINE> if self.utils.check_process(srv_rtn): <NEW_LINE> <INDENT> self.utils.stop_process(srv_rtn) <NEW_LINE> <DEDENT> if failed: <NEW_LINE> <INDENT> self.fail("Test failed.\n")
Test CaRT IV :avocado: tags=all,cart,pr,iv,two_node
625941c18a43f66fc4b53fe5
def lock_inside_screen(self, width, height, delta): <NEW_LINE> <INDENT> if self.facing_right: <NEW_LINE> <INDENT> if self.movex <= -1 and self.x > 0: <NEW_LINE> <INDENT> self.x += self.movex * delta <NEW_LINE> <DEDENT> if self.movex >= 1 and self.x < width-50: <NEW_LINE> <INDENT> self.x += self.movex * delta <NEW_LINE> <DEDENT> <DEDENT> if not self.facing_right: <NEW_LINE> <INDENT> if self.movex <= -1 and self.x > 0: <NEW_LINE> <INDENT> self.x += self.movex * delta <NEW_LINE> <DEDENT> if self.movex >= 1 and self.x < width-50: <NEW_LINE> <INDENT> self.x += self.movex * delta <NEW_LINE> <DEDENT> <DEDENT> if self.movey >= 1 and self.y < height-70: <NEW_LINE> <INDENT> self.y += self.movey * delta <NEW_LINE> <DEDENT> if self.movey <= -1 and self.y > 0: <NEW_LINE> <INDENT> self.y += self.movey * delta
Movement of the pc, locking it on the visible screen
625941c163d6d428bbe4446d
def _connect(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return vmware.connect_to_api(self.module) <NEW_LINE> <DEDENT> except socket.gaierror as ex: <NEW_LINE> <INDENT> logging.critical('connection error\n%s', ex) <NEW_LINE> <DEDENT> except vim.fault.InvalidLogin as ex: <NEW_LINE> <INDENT> logging.critical('authentication error\n%s', ex.msg) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> logging.error('error connecting to vcenter') <NEW_LINE> <DEDENT> sys.exit(1)
Connect to vcenter and return content.
625941c130dc7b76659018e6
def get_io(action_manager, io_name, param): <NEW_LINE> <INDENT> if scenario_key.PARAM_KEY_IO_ADDRESS in param: <NEW_LINE> <INDENT> result, data = action_manager.io_operation(io_name, scenario_key.IO_MOTION_GET_IO_STATE, param) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result, data = action_manager.io_operation(io_name, scenario_key.IO_MOTION_GET_IO_STATE_ALL, param) <NEW_LINE> <DEDENT> return result, data[scenario_key.PARAM_KEY_IO_STATE]
IOの状態を取得
625941c17c178a314d6ef3da
def quatre_fonctions(): <NEW_LINE> <INDENT> return "HELLO"
La dernière et ultime... Bafouille...
625941c1cb5e8a47e48b7a2b
def get_cluster_id_by_name(self, emr_cluster_name, cluster_states): <NEW_LINE> <INDENT> response = self.get_conn().list_clusters( ClusterStates=cluster_states ) <NEW_LINE> matching_clusters = list( filter(lambda cluster: cluster['Name'] == emr_cluster_name, response['Clusters']) ) <NEW_LINE> if len(matching_clusters) == 1: <NEW_LINE> <INDENT> cluster_id = matching_clusters[0]['Id'] <NEW_LINE> self.log.info('Found cluster name = %s id = %s', emr_cluster_name, cluster_id) <NEW_LINE> return cluster_id <NEW_LINE> <DEDENT> elif len(matching_clusters) > 1: <NEW_LINE> <INDENT> raise AirflowException(f'More than one cluster found for name {emr_cluster_name}') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.log.info('No cluster found for name %s', emr_cluster_name) <NEW_LINE> return None
Fetch id of EMR cluster with given name and (optional) states. Will return only if single id is found. :param emr_cluster_name: Name of a cluster to find :type emr_cluster_name: str :param cluster_states: State(s) of cluster to find :type cluster_states: list :return: id of the EMR cluster
625941c1099cdd3c635f0bda
def index(self): <NEW_LINE> <INDENT> return render('csw/admin.html')
Return the admin page for CSW
625941c130c21e258bdfa41a
def list( self, resource_group_name, virtual_router_name, **kwargs ): <NEW_LINE> <INDENT> cls = kwargs.pop('cls', None) <NEW_LINE> error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } <NEW_LINE> error_map.update(kwargs.pop('error_map', {})) <NEW_LINE> api_version = "2020-11-01" <NEW_LINE> accept = "application/json" <NEW_LINE> def prepare_request(next_link=None): <NEW_LINE> <INDENT> header_parameters = {} <NEW_LINE> header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') <NEW_LINE> if not next_link: <NEW_LINE> <INDENT> url = self.list.metadata['url'] <NEW_LINE> path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'virtualRouterName': self._serialize.url("virtual_router_name", virtual_router_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } <NEW_LINE> url = self._client.format_url(url, **path_format_arguments) <NEW_LINE> query_parameters = {} <NEW_LINE> query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') <NEW_LINE> request = self._client.get(url, query_parameters, header_parameters) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> url = next_link <NEW_LINE> query_parameters = {} <NEW_LINE> request = self._client.get(url, query_parameters, header_parameters) <NEW_LINE> <DEDENT> return request <NEW_LINE> <DEDENT> def extract_data(pipeline_response): <NEW_LINE> <INDENT> deserialized = self._deserialize('VirtualRouterPeeringListResult', pipeline_response) <NEW_LINE> list_of_elem = deserialized.value <NEW_LINE> if cls: <NEW_LINE> <INDENT> list_of_elem = cls(list_of_elem) <NEW_LINE> <DEDENT> return deserialized.next_link or None, iter(list_of_elem) <NEW_LINE> <DEDENT> def get_next(next_link=None): <NEW_LINE> <INDENT> request = prepare_request(next_link) <NEW_LINE> pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) <NEW_LINE> response = pipeline_response.http_response <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> error = self._deserialize.failsafe_deserialize(_models.Error, response) <NEW_LINE> map_error(status_code=response.status_code, response=response, error_map=error_map) <NEW_LINE> raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) <NEW_LINE> <DEDENT> return pipeline_response <NEW_LINE> <DEDENT> return ItemPaged( get_next, extract_data )
Lists all Virtual Router Peerings in a Virtual Router resource. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param virtual_router_name: The name of the Virtual Router. :type virtual_router_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either VirtualRouterPeeringListResult or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_11_01.models.VirtualRouterPeeringListResult] :raises: ~azure.core.exceptions.HttpResponseError
625941c10a366e3fb873e796
def __new__(cls, *args, **kwargs): <NEW_LINE> <INDENT> inst = super(Event, cls).__new__(cls) <NEW_LINE> inst.context = Context.get_instance() <NEW_LINE> return inst
Injects the client instance in the event object.
625941c1507cdc57c6306c54
def close_writers(): <NEW_LINE> <INDENT> with tlock: <NEW_LINE> <INDENT> for key, writer in __context__.get("cloud_jsonl_returner.writers", {}).iteritems(): <NEW_LINE> <INDENT> if not writer.closed: <NEW_LINE> <INDENT> log.info("Closing writer for file '{:}' found in context".format(writer.name)) <NEW_LINE> writer.close()
Ensure all open writers are closed.
625941c15e10d32532c5eea5
def __init__(self): <NEW_LINE> <INDENT> self.domains = {}
Initialize the class with an empty dictionary
625941c173bcbd0ca4b2bff4
def output_traceback(ex): <NEW_LINE> <INDENT> output = "".join(traceback.format_exc()).strip() <NEW_LINE> for line in output.split('\n'): <NEW_LINE> <INDENT> logging.error(line) <NEW_LINE> <DEDENT> trace_obj = traceback.extract_tb(ex.__traceback__)[-1] <NEW_LINE> trace = [basename(trace_obj[0]), trace_obj[1]] <NEW_LINE> name = type(ex).__name__ <NEW_LINE> output = str(ex).replace('\n', ' ') <NEW_LINE> msg = f"{name} in {trace[0]} on line {trace[1]}: {output}" <NEW_LINE> return (msg, output)
Returns a tuple of a prettyprinted error message and string representation of the error.
625941c199cbb53fe6792b65
def aggregate(df, by, fun, select=None, subset=None, **kwargs): <NEW_LINE> <INDENT> if 'DataFrameGroupBy' not in str(type(df)): <NEW_LINE> <INDENT> for b in by: <NEW_LINE> <INDENT> df[b] = df[b].astype('category') <NEW_LINE> <DEDENT> df = subset(df, select, subset) <NEW_LINE> gp = df.groupby(by) <NEW_LINE> <DEDENT> gp = gp.agg(fun) <NEW_LINE> gp = subset(gp, subset=~np.all(np.isnan(gp.values), axis=1)) <NEW_LINE> return gp
This should enhance the stupidly designed groupby functions. Will have to evoke this until it is fixed. df: pandas data frame by: columns to aggregate by fun: function(s) to apply. For examples: - fun = {'mean': np.mean, 'variance', np.var} will create 2 columns in the aggregated dataframe, 'mean' and 'variance', which stores the results of each aggregation - fun = ['sum', 'count', custom_function], apply the pandas built-in sum and count, as well as a custom_function defined by the user. The column aggregated by custom_function will be named 'custom_function' select: select columns to aggregate on, exclude other columns subset: select rows to aggregate on.
625941c126238365f5f0edea
def prepare(self, job_params): <NEW_LINE> <INDENT> pass
Allow to execute code only once when the job is run for the first time. This method is executed before the generation of the tasks and the total.
625941c1442bda511e8be399
def main(): <NEW_LINE> <INDENT> print('Loading variables from context...') <NEW_LINE> ctx = load_context() <NEW_LINE> required_retry_count = int(ctx.get('required_retry_count', 0)) <NEW_LINE> current_retry_count = ctx.get('current_retry_count', 0) <NEW_LINE> if isinstance(current_retry_count, list): <NEW_LINE> <INDENT> current_retry_count = current_retry_count[0] <NEW_LINE> <DEDENT> master_slcs = ctx.get('master_slcs', False) <NEW_LINE> slave_slcs = ctx.get('slave_slcs', False) <NEW_LINE> hsh = gen_direct_hash(master_slcs, slave_slcs) <NEW_LINE> if check_ifg_status_by_hash(hsh): <NEW_LINE> <INDENT> err = "S1-GUNW-GREYLIST Found with full_hash_id : %s" %hsh <NEW_LINE> print(err) <NEW_LINE> sys.exit(0) <NEW_LINE> <DEDENT> if current_retry_count < required_retry_count: <NEW_LINE> <INDENT> print('current job retry_count of {} less than the required of {}. Exiting.'.format(current_retry_count, required_retry_count)) <NEW_LINE> return <NEW_LINE> <DEDENT> if master_slcs is False or slave_slcs is False: <NEW_LINE> <INDENT> print('master/slave metadata fields are not included in job met. Exiting.') <NEW_LINE> return <NEW_LINE> <DEDENT> print('querying for appropriate ifg-cfg...') <NEW_LINE> ifg_cfg = get_ifg_cfg(master_slcs, slave_slcs) <NEW_LINE> print('ifg found: {}'.format(ifg_cfg)) <NEW_LINE> print('building greylist product') <NEW_LINE> build_greylist_product.build(ifg_cfg)
Pulls the job info from context, and generates appropriate greylist products for the given job.
625941c16e29344779a62592
def start_proxy(options): <NEW_LINE> <INDENT> _create_proxy_config(options) <NEW_LINE> config_dir = paths.conf_dir <NEW_LINE> config_file = "%s/nginx_kimchi.conf" % config_dir <NEW_LINE> cmd = ['nginx', '-c', config_file] <NEW_LINE> subprocess.call(cmd)
Start nginx reverse proxy.
625941c1b830903b967e988b
def segmentizeLine( line:str, segmentEndPunctuation:str='.?!;:' ) -> List[List[str]]: <NEW_LINE> <INDENT> vPrint( 'Verbose', debuggingThisModule, "segmentizeLine( {!r} )".format( line ) ) <NEW_LINE> if segmentEndPunctuation: <NEW_LINE> <INDENT> for segmentEndChar in segmentEndPunctuation: <NEW_LINE> <INDENT> line = line.replace( segmentEndChar, 'SsSsSsS' ) <NEW_LINE> <DEDENT> <DEDENT> line = line.replace('—',' ').replace('–',' ') <NEW_LINE> lineList:List[List[str]] = [] <NEW_LINE> for segment in line.split( 'SsSsSsS' ): <NEW_LINE> <INDENT> segmentList = [] <NEW_LINE> for rawWord in segment.split(): <NEW_LINE> <INDENT> word = rawWord <NEW_LINE> for internalMarker in BibleOrgSysGlobals.internal_SFMs_to_remove: word = word.replace( internalMarker, '' ) <NEW_LINE> word = BibleOrgSysGlobals.stripWordEndsPunctuation( word ) <NEW_LINE> if word and not word[0].isalnum(): <NEW_LINE> <INDENT> if len(word) > 1: <NEW_LINE> <INDENT> if BibleOrgSysGlobals.debugFlag and debuggingThisModule: <NEW_LINE> <INDENT> vPrint( 'Quiet', debuggingThisModule, "segmentizeLine: {} {}:{} ".format( self.BBB, C, V ) + _("Have unexpected character starting word {!r}").format( word ) ) <NEW_LINE> <DEDENT> word = word[1:] <NEW_LINE> <DEDENT> <DEDENT> if word: <NEW_LINE> <INDENT> if 1 or BibleOrgSysGlobals.verbosityLevel > 3: <NEW_LINE> <INDENT> for k,char in enumerate(word): <NEW_LINE> <INDENT> if not char.isalnum() and (k==0 or k==len(word)-1 or char not in BibleOrgSysGlobals.MEDIAL_WORD_PUNCT_CHARS): <NEW_LINE> <INDENT> if BibleOrgSysGlobals.debugFlag and debuggingThisModule: <NEW_LINE> <INDENT> vPrint( 'Quiet', debuggingThisModule, "segmentizeLine: {} {}:{} ".format( self.BBB, C, V ) + _("Have unexpected {!r} in word {!r}").format( char, word ) ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> lcWord = word.lower() <NEW_LINE> isAReferenceOrNumber = True <NEW_LINE> for char in word: <NEW_LINE> <INDENT> if not char.isdigit() and char not in ':-,.': isAReferenceOrNumber = False; break <NEW_LINE> <DEDENT> if not isAReferenceOrNumber: <NEW_LINE> <INDENT> segmentList.append( word ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> lineList.append( segmentList ) <NEW_LINE> <DEDENT> return lineList
Break the line into segments (like sentences that should match across the translations) and then break each segment into words. If you want case folding, convert line to lowerCase before calling. Set segmentEndPunctuation to None if you don't want the lines further divided. Returns a list of lists of words.
625941c1462c4b4f79d1d64f
def covered_by_grid(self, n, e): <NEW_LINE> <INDENT> n_sg_dist = (self.sg_size / 2 - 1) * self.n_step <NEW_LINE> e_sg_dist = (self.sg_size / 2 - 1) * self.e_step <NEW_LINE> return (n - n_sg_dist > self.n_min and n + n_sg_dist < self.n_max and e - e_sg_dist > self.e_min and e + e_sg_dist < self.e_max)
Returns true if the interpolation subgrid (4x4 nodes centered in the input location) is covered by the grid, false otherwise ne ± d must be inside the grid mins and maxs values to return True --------- | d | | | | |d-n,e-d| | | | | d | --------- :param n: coordinate on north direction :type n: float :param e: coordinate on east direction :type e: float :return: interpolation subgrid is covered by the grid area :rtype: bool
625941c129b78933be1e562e
def check_client_version(obj, what, version='sol005'): <NEW_LINE> <INDENT> fullclassname = obj.__module__ + "." + obj.__class__.__name__ <NEW_LINE> message = 'The following commands or options are only supported with the option "--sol005": {}'.format(what) <NEW_LINE> if version == 'v1': <NEW_LINE> <INDENT> message = 'The following commands or options are not supported when using option "--sol005": {}'.format(what) <NEW_LINE> <DEDENT> if fullclassname != 'osmclient.{}.client.Client'.format(version): <NEW_LINE> <INDENT> raise ClientException(message) <NEW_LINE> <DEDENT> return
Checks the version of the client object and raises error if it not the expected. :param obj: the client object :what: the function or command under evaluation (used when an error is raised) :return: - :raises ClientError: if the specified version does not match the client version
625941c1796e427e537b0542
def move_available_trunks_to_include(self, trunk_list): <NEW_LINE> <INDENT> is_moved = None <NEW_LINE> try: <NEW_LINE> <INDENT> self.logger.info('Start: move available trunks to include') <NEW_LINE> self._route_page.move_available_trunks_to_include(trunk_list) <NEW_LINE> is_moved = True <NEW_LINE> <DEDENT> except WebDriverException as exp: <NEW_LINE> <INDENT> is_moved = False <NEW_LINE> self.logger.error(exp.msg) <NEW_LINE> raise <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.logger.info('End: move available trunks to include') <NEW_LINE> return is_moved
Returning move available trunks to include Implementing logging for move available trunks to include functionality :param trunk_list: :return: True/False
625941c19f2886367277a80d
def sessions_from_csessid(self, csessid): <NEW_LINE> <INDENT> return [session for session in self.values() if session.csessid and session.csessid == csessid]
Given a cliend identification hash (for session types that offer them) return all sessions with a matching hash. Args csessid (str): The session hash
625941c11f5feb6acb0c4ad2
def test_was_published_recently_with_old_question(self): <NEW_LINE> <INDENT> time = timezone.now() - datetime.timedelta(days=30) <NEW_LINE> old_question = Question(pub_date=time) <NEW_LINE> self.assertIs(old_question.was_published_recently(), False)
pub_date가 지난 24시간보다 이전이라면 was_published_recently()는 False를 반환해야 마땅합니다
625941c191f36d47f21ac46f
def new_intervals(self): <NEW_LINE> <INDENT> return self._new_intervals
Returns an interval set of all the created intervals.
625941c11b99ca400220aa2f
def wait_to_exist(self, log_level: int = logging.INFO, ms_between_polls: int = 50) -> None: <NEW_LINE> <INDENT> return
Returns after the file exists--useful for eventually consistent stores (e.g., S3)
625941c12eb69b55b151c82b
def get_test_check_response(self, **kwargs): <NEW_LINE> <INDENT> entity_id = kwargs['entity_id'] <NEW_LINE> check_id = kwargs.get('check_id', '__test_check') <NEW_LINE> monitoring_zones = kwargs.get('monitoring_zones') or ['__AGENT__'] <NEW_LINE> ench_key = (entity_id, check_id) <NEW_LINE> timestamp = int(1000 * self._clock.seconds()) <NEW_LINE> return (self.test_check_response_code.get(ench_key, 200), [{'timestamp': timestamp, 'monitoring_zone_id': monitoring_zone, 'available': self.test_check_available.get(ench_key, True), 'status': self.test_check_status.get( ench_key, 'code=200,rt=0.4s,bytes=99'), 'metrics': {m.name: m.get_value_for_test_check(entity_id=entity_id, check_id=check_id, monitoring_zone=monitoring_zone, timestamp=timestamp) for m in self.metrics}} for monitoring_zone in monitoring_zones])
Gets the response as would have been returned by the test-check API.
625941c1f548e778e58cd4fb
def pick_href(element: bs4.element.Tag) -> URLString: <NEW_LINE> <INDENT> return URLString(element.attrs['href'])
HTML 요소에서 `href` 속성 추출 @param element: HTML 요소 @return: 추출한 `href` 속성 값
625941c14e696a04525c93cb
def rotate(self, matrix: List[List[int]]) -> None: <NEW_LINE> <INDENT> for i in range(len(matrix)): <NEW_LINE> <INDENT> for j in range(i, len(matrix)): <NEW_LINE> <INDENT> matrix[i][j], matrix[j][i] = matrix[j][i], matrix[i][j] <NEW_LINE> <DEDENT> <DEDENT> for list1 in matrix: <NEW_LINE> <INDENT> list1.reverse()
Do not return anything, modify matrix in-place instead.
625941c1711fe17d825422ee
def check_limit_values(self): <NEW_LINE> <INDENT> args = { "start_zoom": None, "min_zoom": None, "max_zoom": None, "lat1": None, "lon1": None, "lat2": None, "lon2": None, } <NEW_LINE> start_zoom = self.opt["start_zoom"] <NEW_LINE> min_zoom = self.opt["min_zoom"] <NEW_LINE> max_zoom = self.opt["max_zoom"] <NEW_LINE> zoom_error = False <NEW_LINE> if min_zoom > max_zoom: <NEW_LINE> <INDENT> args["min_zoom"] = max_zoom <NEW_LINE> zoom_error = True <NEW_LINE> <DEDENT> if max_zoom < min_zoom: <NEW_LINE> <INDENT> args["max_zoom"] = min_zoom <NEW_LINE> zoom_error = True <NEW_LINE> <DEDENT> if start_zoom < min_zoom: <NEW_LINE> <INDENT> args["start_zoom"] = min_zoom <NEW_LINE> zoom_error = True <NEW_LINE> <DEDENT> if start_zoom > max_zoom: <NEW_LINE> <INDENT> args["start_zoom"] = max_zoom <NEW_LINE> zoom_error = True <NEW_LINE> <DEDENT> if zoom_error: <NEW_LINE> <INDENT> txt = _( "Your zoom settings were inconclusive and therefore " "changed for this report generation.") <NEW_LINE> ErrorDialog(_("INFO"), txt, parent=self.user.uistate.window) <NEW_LINE> <DEDENT> if not args["min_zoom"]: <NEW_LINE> <INDENT> args["min_zoom"] = min_zoom <NEW_LINE> <DEDENT> if not args["max_zoom"]: <NEW_LINE> <INDENT> args["max_zoom"] = max_zoom <NEW_LINE> <DEDENT> if not args["start_zoom"]: <NEW_LINE> <INDENT> args["start_zoom"] = start_zoom <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> lat1 = float(self.opt["lat1"]) <NEW_LINE> lon1 = float(self.opt["lon1"]) <NEW_LINE> lat2 = float(self.opt["lat2"]) <NEW_LINE> lon2 = float(self.opt["lon2"]) <NEW_LINE> if lat1 < -90 or lat1 > 90: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> if lat2 < -90 or lat2 > 90: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> if lon1 < -180 or lon1 > 180: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> if lon2 < -180 or lon2 > 180: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> args["lat1"] = lat1 <NEW_LINE> args["lon1"] = lon1 <NEW_LINE> args["lat2"] = lat2 <NEW_LINE> args["lon2"] = lon2 <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> txt = _( "Report generation failed.\n" "Please check the values for limits latitude and longitude." "\nLatitude: -90 to 90\nLongitude: -180 to 180") <NEW_LINE> ErrorDialog(_("INFO"), txt, parent=self.user.uistate.window) <NEW_LINE> return False, None <NEW_LINE> <DEDENT> return True, args
Check if limit values are valid for report generation.
625941c1a934411ee3751612
@contextlib.contextmanager <NEW_LINE> def stdout_redirected(to=os.devnull, stdout=None): <NEW_LINE> <INDENT> if stdout is None: <NEW_LINE> <INDENT> stdout = sys.stdout <NEW_LINE> <DEDENT> stdout_fd = fileno(stdout) <NEW_LINE> with os.fdopen(os.dup(stdout_fd), 'wb') as copied: <NEW_LINE> <INDENT> stdout.flush() <NEW_LINE> try: <NEW_LINE> <INDENT> os.dup2(fileno(to), stdout_fd) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> with open(to, 'wb') as to_file: <NEW_LINE> <INDENT> os.dup2(to_file.fileno(), stdout_fd) <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> yield stdout <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> stdout.flush() <NEW_LINE> os.dup2(copied.fileno(), stdout_fd)
https://stackoverflow.com/a/22434262/190597 (J.F. Sebastian)
625941c1ad47b63b2c509eff
def get(self, type1, id1, type2, by_type): <NEW_LINE> <INDENT> threshold = request.args.get('threshold', default=0.4) <NEW_LINE> max_results = parse_args_max_results(request.args) <NEW_LINE> with KnowledgeGraph() as database: <NEW_LINE> <INDENT> sim_results = database.similarity_search(type1, id1, type2, by_type, threshold, max_results) <NEW_LINE> <DEDENT> return sim_results, 200
Similarity search in the local knowledge graph --- tags: [simple] parameters: - in: path name: type1 description: "type of query node" schema: type: string required: true default: "disease" - in: path name: id1 description: "curie of query node" schema: type: string required: true default: "MONDO:0005737" - in: path name: type2 description: "type of return nodes" schema: type: string required: true default: "disease" - in: path name: by_type description: "type used to evaluate similarity" schema: type: string required: true default: "phenotypic_feature" - in: query name: threshold description: "Number between 0 and 1 indicating the minimum similarity to return" schema: type: number default: 0.4 - in: query name: max_results description: "The maximum number of results to return. Set to 0 to return all results." schema: type: integer default: 100 responses: 200: description: result content: application/json: schema: $ref: "#/definitions/SimilarityResult"
625941c124f1403a92600ae7
def model(request): <NEW_LINE> <INDENT> assert isinstance(request, HttpRequest) <NEW_LINE> return render(request,'app/model.html',context_instance = RequestContext(request, {'title':'Template for new pages - change in app/views.py', 'message':'Your application description page - change in app/views.py', 'year':datetime.now().year,}))
Renders the example/template page.
625941c192d797404e304108
def test_inactive(self): <NEW_LINE> <INDENT> active_orders = OrderFactory.create_batch( 3, start_date=self.past_date, end_date=self.future_date, ) <NEW_LINE> inactive_orders = [ OrderFactory( start_date=self.future_date, end_date=self.extra_future_date, ), OrderFactory( start_date=self.extra_past_date, end_date=self.past_date, ), ] <NEW_LINE> result = Order.objects.inactive() <NEW_LINE> for order in inactive_orders: <NEW_LINE> <INDENT> self.assertIn(order, result) <NEW_LINE> <DEDENT> for order in active_orders: <NEW_LINE> <INDENT> self.assertNotIn(order, result)
Order.objects.inactive()
625941c18e7ae83300e4af4b
def add_to_inventory(inventory, added_items): <NEW_LINE> <INDENT> for item in added_items: <NEW_LINE> <INDENT> if item in inventory.keys(): <NEW_LINE> <INDENT> inventory[item] += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> inventory[item] = 1 <NEW_LINE> <DEDENT> <DEDENT> return inventory
Updates inventory by added_items.
625941c1bf627c535bc1314d
def check_id(self, emp_id): <NEW_LINE> <INDENT> return self.check_attr_match("id_rule","id",emp_id)
>>> v = Validator() >>> v.check_id('M000') True >>> v.check_id('F999') True >>> v.check_id('m000') False >>> v.check_id('F9999') False >>> v.check_id('MMMM') False >>> v.check_id('0000') False >>> v.check_id('000') False >>> v.check_id('M00') False >>> v.check_id(None) False >>> v.check_id(1) False >>> v.check_id(True) False >>> v.check_id({'M00'}) False
625941c1046cf37aa974ccc8
@csrf_exempt <NEW_LINE> def disgenet(disgenet): <NEW_LINE> <INDENT> disgenet_uri = {} <NEW_LINE> sparql_query = ( "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>" + "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>" + "PREFIX owl: <http://www.w3.org/2002/07/owl#>" + "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>" + "PREFIX dcterms: <http://purl.org/dc/terms/>" + "PREFIX foaf: <http://xmlns.com/foaf/0.1/>" + "PREFIX skos: <http://www.w3.org/2004/02/skos/core#>" + "PREFIX void: <http://rdfs.org/ns/void#>" + "PREFIX sio: <http://semanticscience.org/resource/>" + "PREFIX ncit: <http://ncicb.nci.nih.gov/xml/owl/EVS/Thesaurus.owl#>" + "PREFIX up: <http://purl.uniprot.org/core/>" + "PREFIX dcat: <http://www.w3.org/ns/dcat#>" + "PREFIX dctypes: <http://purl.org/dc/dcmitype/>" + "PREFIX wi: <http://http://purl.org/ontology/wi/core#>" + "PREFIX eco: <http://http://purl.obolibrary.org/obo/eco.owl#>" + "PREFIX prov: <http://http://http://www.w3.org/ns/prov#>" + "PREFIX pav: <http://http://http://purl.org/pav/>" + "PREFIX obo: <http://purl.obolibrary.org/obo/>" + "SELECT * " + "WHERE { SERVICE <http://rdf.disgenet.org/sparql/> { " + "?uri dcterms:title ?disease . " + "?disease bif:contains \'\"" + disgenet + "\"\' ." + "} " + "} LIMIT 30" ) <NEW_LINE> g = rdflib.ConjunctiveGraph('SPARQLStore') <NEW_LINE> g.open("http://127.0.0.1:8890/sparql/") <NEW_LINE> for row in g.query(sparql_query): <NEW_LINE> <INDENT> disgenet_uri[row[0].strip("rdflib.term.URIRef")] = row[1] <NEW_LINE> <DEDENT> return disgenet_uri
Finds the DisGeNET URI based on the searched disease entered when uploading data to the SEEK server. Arguments: disgenet: Disease entered in the SEEK upload form. Returns: DisGeNET URIs that are connected to the disease entered in the upload form.
625941c1956e5f7376d70ded
def abrm(rf, x, balanced=False): <NEW_LINE> <INDENT> device = backend.get_device(rf) <NEW_LINE> xp = device.xp <NEW_LINE> with device: <NEW_LINE> <INDENT> eps = 1e-16 <NEW_LINE> g = xp.ones(xp.size(rf)) * 2 * xp.pi / xp.size(rf) <NEW_LINE> a = xp.ones(xp.size(x), dtype=complex) <NEW_LINE> b = xp.zeros(xp.size(x), dtype=complex) <NEW_LINE> for mm in range(xp.size(rf)): <NEW_LINE> <INDENT> om = x * g[mm] <NEW_LINE> phi = xp.sqrt(xp.abs(rf[mm]) ** 2 + om ** 2) + eps <NEW_LINE> n = xp.column_stack((xp.real(rf[mm]) / phi, xp.imag(rf[mm]) / phi, om / phi)) <NEW_LINE> av = xp.cos(phi / 2) - 1j * n[:, 2] * xp.sin(phi / 2) <NEW_LINE> bv = -1j * (n[:, 0] + 1j * n[:, 1]) * xp.sin(phi / 2) <NEW_LINE> at = av * a - xp.conj(bv) * b <NEW_LINE> bt = bv * a + xp.conj(av) * b <NEW_LINE> a = at <NEW_LINE> b = bt <NEW_LINE> <DEDENT> if balanced: <NEW_LINE> <INDENT> g = -2 * xp.pi / 2 <NEW_LINE> om = x * g <NEW_LINE> phi = xp.abs(om) + eps <NEW_LINE> nz = om / phi <NEW_LINE> av = xp.cos(phi / 2) - 1j * nz * xp.sin(phi / 2) <NEW_LINE> a = av * a <NEW_LINE> b = xp.conj(av) * b <NEW_LINE> <DEDENT> return a, b
1D RF pulse simulation, with simultaneous RF + gradient rotations. Args: rf (array): rf waveform input. x (array): spatial locations. balanced (bool): toggles application of rewinder. Returns: 2-element tuple containing - **a** (*array*): SLR alpha parameter. - **b** (*array*): SLR beta parameter. References: Pauly, J., Le Roux, Patrick., Nishimura, D., and Macovski, A.(1991). 'Parameter Relations for the Shinnar-LeRoux Selective Excitation Pulse Design Algorithm'. IEEE Transactions on Medical Imaging, Vol 10, No 1, 53-65.
625941c1aad79263cf3909bd
def getMicInputGain(self, channel, unitCode=0): <NEW_LINE> <INDENT> resp = self.XAPCommand('MLINE', channel, unitCode=unitCode) <NEW_LINE> return int(resp)
Request the microphone input gain for the target channel. unitCode - the unit code of the target XAP800 channel - the target channel (1-8, or * for all)
625941c163f4b57ef000109d
@app.route('/view_revu/<review_id>') <NEW_LINE> def view_revu(review_id): <NEW_LINE> <INDENT> the_revu = mongo.db.reviews.find_one({"_id": ObjectId(review_id)}) <NEW_LINE> if the_revu == None: <NEW_LINE> <INDENT> flash("Unable to find that particular REVU") <NEW_LINE> return redirect('home') <NEW_LINE> <DEDENT> return render_template('revu.html', review=the_revu)
Find review in database based on review_id parameter If review doesn't exist, flash error message and redirect to home If review exists, render REVU page
625941c14d74a7450ccd4142
def create_volume(self, volume): <NEW_LINE> <INDENT> vdisk_name = volume['name'] <NEW_LINE> vdisk_params = self._get_vdisk_params(volume['volume_type_id']) <NEW_LINE> vdisk_size = six.text_type(volume['size']) <NEW_LINE> return self._create_vdisk(vdisk_name, vdisk_size, 'gb', vdisk_params)
Create volume.
625941c116aa5153ce3623f7
def prev(self): <NEW_LINE> <INDENT> repeat_mode = self.repeat_mode <NEW_LINE> shuffle_mode = self.shuffle_mode <NEW_LINE> if repeat_mode == 'track': <NEW_LINE> <INDENT> return self.current <NEW_LINE> <DEDENT> if shuffle_mode != 'disabled': <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> prev_index, prev = max(self.get_shuffle_history()) <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> return self.get_current() <NEW_LINE> <DEDENT> self.__tracks.del_meta_key(prev_index, 'playlist_shuffle_history') <NEW_LINE> self.current_position = prev_index <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> position = self.current_position - 1 <NEW_LINE> if position < 0: <NEW_LINE> <INDENT> if repeat_mode == 'all': <NEW_LINE> <INDENT> position = len(self) - 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> position = 0 if len(self) else -1 <NEW_LINE> <DEDENT> <DEDENT> self.current_position = position <NEW_LINE> <DEDENT> return self.get_current()
Progresses to the previous track within the playlist and takes shuffle and repeat modes into account :returns: the new current track :rtype: :class:`xl.trax.Track` or None
625941c18e71fb1e9831d729
def test_custom_action_response_descriptor_octopus_server_web_api_actions_azure_web_sites_slot_list_action(self): <NEW_LINE> <INDENT> pass
Test case for custom_action_response_descriptor_octopus_server_web_api_actions_azure_web_sites_slot_list_action
625941c12eb69b55b151c82c
def get_full_filename(filename: str) -> Any: <NEW_LINE> <INDENT> config = APP.config_slider["DEFAULT"] <NEW_LINE> return os.path.join(config['root_dir'], filename)
Return full file name.
625941c1dc8b845886cb54b3
def _key_sub_key(key, sub_key, config_dict, die=True): <NEW_LINE> <INDENT> result = None <NEW_LINE> if key in config_dict: <NEW_LINE> <INDENT> if sub_key in config_dict[key]: <NEW_LINE> <INDENT> result = config_dict[key][sub_key] <NEW_LINE> <DEDENT> <DEDENT> if result is None and die is True: <NEW_LINE> <INDENT> log_message = ( '%s:%s not defined in configuration') % (key, sub_key) <NEW_LINE> log.log2die(1016, log_message) <NEW_LINE> <DEDENT> return result
Get config parameter from YAML. Args: key: Primary key sub_key: Secondary key config_dict: Dictionary to explore die: Die if true and the result encountered is None Returns: result: result
625941c15166f23b2e1a50d8
def make_rc_record(record): <NEW_LINE> <INDENT> rc_rec = SeqRecord(seq=record.seq.reverse_complement(), id="rc_" + record.id, name="rc_" + record.name, description="reverse complement") <NEW_LINE> return rc_rec
Return a new SeqRecord with the reverse complement sequence.
625941c1be7bc26dc91cd583
def date_print(date): <NEW_LINE> <INDENT> date = strptime(date, "%Y/%m/%d") <NEW_LINE> return strftime('%d %B %Y', date)
Converts ISO 8601 calendar date (YYYY-MM-DD) in a fancy format Args: date (str): YYYY-MM-DD Returns: The date converted (i.e. 26 August 2018)
625941c1be8e80087fb20bc5
def test_pack_fail(self): <NEW_LINE> <INDENT> f1 = BufferFormat.from_string("(3f)[foo]") <NEW_LINE> f2 = BufferFormat.from_string("(3f)[vertex](4B)[color]") <NEW_LINE> with self.assertRaises(ValueError, msg="Assign succeed") as cm1: <NEW_LINE> <INDENT> data = ( (None,None,None), (None,None,None) ) <NEW_LINE> f1.pack(data) <NEW_LINE> <DEDENT> with self.assertRaises(IndexError, msg="Assign succeed") as cm2: <NEW_LINE> <INDENT> data = ( (4.0, 5.0, 6.0, 12.0), ) <NEW_LINE> f1.pack(data) <NEW_LINE> <DEDENT> with self.assertRaises(ValueError, msg="Assign succeed") as cm3: <NEW_LINE> <INDENT> f1.pack(()) <NEW_LINE> <DEDENT> with self.assertRaises(ValueError, msg="Assign succeed") as cm4: <NEW_LINE> <INDENT> data = ( ((10.0, 20.0, 30.0), (10, 11, 12, 13)), (20.0, 30.0, 40.0) ) <NEW_LINE> f2.pack(data) <NEW_LINE> <DEDENT> self.assertEqual('Expected Sequence with format "3f", found "(None, None, None)"', str(cm1.exception), 'Exceptions do not match') <NEW_LINE> self.assertEqual('invalid index', str(cm2.exception), 'Exceptions do not match') <NEW_LINE> self.assertEqual('No data to pack', str(cm3.exception), 'Exceptions do not match') <NEW_LINE> self.assertEqual('Expected Sequence with format "3f", found "20.0"', str(cm4.exception), 'Exceptions do not match')
Test packing data with invalid format
625941c1d18da76e23532453
def generate_comments(self, solution_data): <NEW_LINE> <INDENT> comment = "HI,\n" <NEW_LINE> link = "" <NEW_LINE> for ind, sol in enumerate(solution_data): <NEW_LINE> <INDENT> if 'solution' in sol.get('result'): <NEW_LINE> <INDENT> comment += f"Error: {sol.get('description')} \n and {sol.get('result').get('err')}\n" <NEW_LINE> comment += f"Solution: {sol.get('result').get('solution')}\n" <NEW_LINE> comment += f"---------------------------------------------------------------------\n" <NEW_LINE> link += sol.get('kb') + '\n' <NEW_LINE> <DEDENT> elif sol.get('kb'): <NEW_LINE> <INDENT> comment += f"Error: {sol.get('description')}\n and {sol.get('result').get('err')}\n" <NEW_LINE> comment += f"Solution: {sol.get('kb')}\n" <NEW_LINE> comment += f"---------------------------------------------------------------------\n" <NEW_LINE> link += sol.get('kb') + '\n' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> comment += f"\nError: {sol.get('description')}\n and {sol.get('result').get('err')}\n" <NEW_LINE> comment += f"---------------------------------------------------------------------\n" <NEW_LINE> <DEDENT> if ind == 4: <NEW_LINE> <INDENT> _LOGGER.info('Top 5 solutions are found for the failed plugins') <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> return comment, link
Generate the top 5 critical comments for solving the failure.
625941c1507cdc57c6306c55
def _get_data(self, command): <NEW_LINE> <INDENT> data = bytearray(3) <NEW_LINE> self.i2c.writeto(self.address, command) <NEW_LINE> sleep(self.I2C_WAIT_TIME) <NEW_LINE> self.i2c.readfrom_into(self.address, data) <NEW_LINE> value = self._convert_to_integer(data[:2]) <NEW_LINE> verified = self._verify_checksum(data) <NEW_LINE> if not verified: <NEW_LINE> <INDENT> raise CRCError('Data read off i2c bus failed CRC check.', data[:2], data[-1]) <NEW_LINE> <DEDENT> return value
Retrieve data from the sensor and verify it with a CRC check.
625941c1fb3f5b602dac3610
def test_driver_capabilities(self): <NEW_LINE> <INDENT> self.assertEqual(set(ComputeDriver.capabilities), set(self.drv.capabilities)) <NEW_LINE> self.assertFalse(self.drv.capabilities['has_imagecache']) <NEW_LINE> self.assertFalse(self.drv.capabilities['supports_evacuate']) <NEW_LINE> self.assertFalse( self.drv.capabilities['supports_migrate_to_same_host']) <NEW_LINE> self.assertTrue(self.drv.capabilities['supports_attach_interface']) <NEW_LINE> self.assertFalse(self.drv.capabilities['supports_device_tagging']) <NEW_LINE> self.assertFalse( self.drv.capabilities['supports_tagged_attach_interface']) <NEW_LINE> self.assertFalse( self.drv.capabilities['supports_tagged_attach_volume']) <NEW_LINE> self.assertTrue(self.drv.capabilities['supports_extend_volume']) <NEW_LINE> self.assertFalse(self.drv.capabilities['supports_multiattach'])
Test the driver capabilities.
625941c1bd1bec0571d905ae
def add_uptodate_dependency(self, name, uptodate_dependency, add='both'): <NEW_LINE> <INDENT> if add not in {'fragment', 'page', 'both'}: <NEW_LINE> <INDENT> raise Exception("Add parameter is '{0}', but must be either 'fragment', 'page', or 'both'.".format(add)) <NEW_LINE> <DEDENT> if add == 'fragment' or add == 'both': <NEW_LINE> <INDENT> self.__uptodate_deps_fragment.append({'name': name, 'deps': uptodate_dependency}) <NEW_LINE> <DEDENT> if add == 'page' or add == 'both': <NEW_LINE> <INDENT> self.__uptodate_deps_page.append({'name': name, 'deps': uptodate_dependency})
Add doit uptodate dependency to post. Similar to Post.add_uptodate_dependency.
625941c1b7558d58953c4e97
def __init__(self, dtid=None, offset=None, count=None, total=None, data=None): <NEW_LINE> <INDENT> self.swagger_types = { 'dtid': 'str', 'offset': 'int', 'count': 'int', 'total': 'int', 'data': 'list[Whitelist]' } <NEW_LINE> self.attribute_map = { 'dtid': 'dtid', 'offset': 'offset', 'count': 'count', 'total': 'total', 'data': 'data' } <NEW_LINE> self._dtid = dtid <NEW_LINE> self._offset = offset <NEW_LINE> self._count = count <NEW_LINE> self._total = total <NEW_LINE> self._data = data
WhitelistResultEnvelope - a model defined in Swagger :param dict swaggerTypes: The key is attribute name and the value is attribute type. :param dict attributeMap: The key is attribute name and the value is json key in definition.
625941c197e22403b379cf18
def exec_command(args, commands): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> command_name = args.command if args.command is not None else "help" <NEW_LINE> command = commands[command_name] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> print('Not implemented yet', file=sys.stderr) <NEW_LINE> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> command[0](*command[1:]) <NEW_LINE> <DEDENT> except SystemExit: <NEW_LINE> <INDENT> pass
Call the associated command handler
625941c17cff6e4e81117905
def Read(self,reader): <NEW_LINE> <INDENT> pass
Read(self: GH_Gradient,reader: GH_IReader) -> bool
625941c1a05bb46b383ec7a3
def restoredim(y,oldshape,reorderdim): <NEW_LINE> <INDENT> newshape = [oldshape[i] for i in reorderdim] <NEW_LINE> y = y.reshape(newshape) <NEW_LINE> restore_order = [newshape.index(dim) for dim in oldshape] <NEW_LINE> return y.transpose(restore_order)
Revert y back to its oldshape, given its current reordered state (from dim2front) Parameters ---------- y : ARRAY : Array to restore oldshape : LIST of INT : Old dimension sizes, in order reorderdim : LIST of INT : Reordering of dimensions performed by dim2front Returns ------- y : ARRAY : Array restored to the old shape
625941c14a966d76dd550f8d
def batting_average(info, batting_stats): <NEW_LINE> <INDENT> hits = float(batting_stats[info["hits"]]) <NEW_LINE> at_bats = float(batting_stats[info["atbats"]]) <NEW_LINE> if at_bats >= MINIMUM_AB: <NEW_LINE> <INDENT> return hits / at_bats <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0
Inputs: batting_stats - dictionary of batting statistics (values are strings) Output: Returns the batting average as a float
625941c17d847024c06be239
def isPalindrome(text:str,metod:int) -> bool: <NEW_LINE> <INDENT> if(metod == 0): <NEW_LINE> <INDENT> return palindrome_recursive(text,metod) <NEW_LINE> <DEDENT> elif(metod == 1): <NEW_LINE> <INDENT> return palindrome_iterative(text) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Ikinci parametre olarak 0 veya 1 değerini girmeniz gerekiyor {} için".format(text)) <NEW_LINE> return False
isPalindrome metodu verilen string'i istenen metoda göre palindrome olup olmadığını sorgular Parametre: String ve 0 değeri girdiğinizde recursive çalışmakta String ve 1 değeri girdiğinizde iterative çalışmakta
625941c1a8ecb033257d304d
def searchirc(request): <NEW_LINE> <INDENT> context = { 'date_sliding_value': request.session.get('irc_date_sliding_value', 2), 'date_sliding_type': request.session.get('irc_date_sliding_type', 'y'), 'sort_field': request.session.get('irc_sort_field', '_score'), 'sort_dir': request.session.get('irc_sort_dir', '-'), 'filter_channel': request.session.get('irc_filter_channel', ''), 'day_mode': request.session.get('irc_day_mode', True), } <NEW_LINE> return render(request, 'wally/searchirc.html', context)
Get search-form for irc search
625941c1ff9c53063f47c174
def GetBranchingAttachmentInfo(self, attachment): <NEW_LINE> <INDENT> physicalAttachment = self.LogicalToPhysicalAttachment(attachment) <NEW_LINE> lineCount = self.GetAttachmentLineCount(attachment) <NEW_LINE> if not lineCount: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> totalBranchLength = self._branchSpacing * (lineCount - 1) <NEW_LINE> root = self.GetBranchingAttachmentRoot(attachment) <NEW_LINE> neck = wx.RealPoint() <NEW_LINE> shoulder1 = wx.RealPoint() <NEW_LINE> shoulder2 = wx.RealPoint() <NEW_LINE> if physicalAttachment == 0: <NEW_LINE> <INDENT> neck[0] = self.GetX() <NEW_LINE> neck[1] = root[1] - self._branchNeckLength <NEW_LINE> shoulder1[0] = root[0] - totalBranchLength / 2.0 <NEW_LINE> shoulder2[0] = root[0] + totalBranchLength / 2.0 <NEW_LINE> shoulder1[1] = neck[1] <NEW_LINE> shoulder2[1] = neck[1] <NEW_LINE> <DEDENT> elif physicalAttachment == 1: <NEW_LINE> <INDENT> neck[0] = root[0] + self._branchNeckLength <NEW_LINE> neck[1] = root[1] <NEW_LINE> shoulder1[0] = neck[0] <NEW_LINE> shoulder2[0] = neck[0] <NEW_LINE> shoulder1[1] = neck[1] - totalBranchLength / 2.0 <NEW_LINE> shoulder1[1] = neck[1] + totalBranchLength / 2.0 <NEW_LINE> <DEDENT> elif physicalAttachment == 2: <NEW_LINE> <INDENT> neck[0] = self.GetX() <NEW_LINE> neck[1] = root[1] + self._branchNeckLength <NEW_LINE> shoulder1[0] = root[0] - totalBranchLength / 2.0 <NEW_LINE> shoulder2[0] = root[0] + totalBranchLength / 2.0 <NEW_LINE> shoulder1[1] = neck[1] <NEW_LINE> shoulder2[1] = neck[1] <NEW_LINE> <DEDENT> elif physicalAttachment == 3: <NEW_LINE> <INDENT> neck[0] = root[0] - self._branchNeckLength <NEW_LINE> neck[1] = root[1] <NEW_LINE> shoulder1[0] = neck[0] <NEW_LINE> shoulder2[0] = neck[0] <NEW_LINE> shoulder1[1] = neck[1] - totalBranchLength / 2.0 <NEW_LINE> shoulder2[1] = neck[1] + totalBranchLength / 2.0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Unrecognised attachment point in GetBranchingAttachmentInfo") <NEW_LINE> <DEDENT> return root, neck, shoulder1, shoulder2
Get information about where branching connections go. :param `attachment`: ??? :returns: `False` if there are no lines at this attachment.
625941c11d351010ab855a9c
def _handle_conn(self, tcp_conn): <NEW_LINE> <INDENT> session = self.spdy_session_class(self, tcp_conn) <NEW_LINE> self.emit('session', session)
Process a new client connection, tcp_conn.
625941c1e76e3b2f99f3a78f
def disableTable(self, tableName): <NEW_LINE> <INDENT> pass
Disable a table Parameters: - tableName: the tablename to disable
625941c182261d6c526ab41c
@ltk.command(short_help="Cleans up the associations between local documents and documents in Lingotek") <NEW_LINE> @click.option('-a', '--all', 'dis_all', flag_value=True, help='Removes all associations between local and remote') <NEW_LINE> @click.argument('file_paths', required=False, nargs=-1) <NEW_LINE> @click.option('-f', '--force', flag_value=True, help='Deletes local documents that no longer exists in Lingotek') <NEW_LINE> def clean(force, dis_all, file_paths): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> action = actions.Action(os.getcwd()) <NEW_LINE> init_logger(action.path) <NEW_LINE> action.clean_action(force, dis_all, file_paths) <NEW_LINE> <DEDENT> except (UninitializedError, RequestFailedError) as e: <NEW_LINE> <INDENT> print_log(e) <NEW_LINE> logger.error(e) <NEW_LINE> return
Cleans up the associations between local documents and documents in Lingotek. By default, checks that local documents and remote documents line up. Use different options for different use cases. Enter file or directory names to remove local associations of specific files or directories.
625941c163b5f9789fde7065
def sanitize_input(input_object, allowed_type): <NEW_LINE> <INDENT> input_object = np.atleast_1d(input_object) <NEW_LINE> for obj in np.nditer(input_object, flags=["refs_ok", "zerosize_ok"]): <NEW_LINE> <INDENT> if not isinstance(obj.item(), allowed_type): <NEW_LINE> <INDENT> raise TypeError("Only objects of type: {0} accepted.".format(allowed_type)) <NEW_LINE> <DEDENT> <DEDENT> return input_object
Sanitizes input data by testing if *input_object* is an array of type *allowed_type*. Args: input_object: Object which is to be checked. allowed_type: desired type Return: input_object
625941c121a7993f00bc7c6c
def write_config(self, name='main', key_path=None, cert_path=None): <NEW_LINE> <INDENT> self.CONFIG['auth_service']['cluster_name'] = name <NEW_LINE> self.CONFIG['auth_service']['tokens'] = ['trusted_cluster:%s' % j.data.hash.sha256_string(name)] <NEW_LINE> if key_path and cert_path: <NEW_LINE> <INDENT> self.CONFIG['proxy_service']['https_key_file'] = key_path <NEW_LINE> self.CONFIG['proxy_service']['https_cert_file'] = cert_path <NEW_LINE> <DEDENT> self.prefab.core.file_write('/etc/teleport.yaml', j.data.serializer.yaml.dumps(self.CONFIG))
write configuration file @param name ,, str cluster name to be used has to be the same if going to connect to existing cluster @param trusted_cluster_paths,, list(dict(cluster)) has to be in the format:
625941c1293b9510aa2c3218
def delete(self, val): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> to_delete = self.find_node(val) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> if to_delete == self.root and self.size() == 1: <NEW_LINE> <INDENT> self.root = None <NEW_LINE> <DEDENT> elif to_delete == self.root and self.size() > 1: <NEW_LINE> <INDENT> self.root._reset_by_value(val) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if to_delete._left is None and to_delete._right is None: <NEW_LINE> <INDENT> if to_delete._parent._left == to_delete: <NEW_LINE> <INDENT> to_delete._parent._left = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> to_delete._parent._right = None <NEW_LINE> <DEDENT> <DEDENT> elif to_delete._left is not None and to_delete._right is None: <NEW_LINE> <INDENT> to_delete._shift_up_left_del() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> to_delete._shift_up_right_del()
Deletes a node by value in a tree.
625941c1bf627c535bc1314e
def iterate(self): <NEW_LINE> <INDENT> iterations = 0 <NEW_LINE> while self.previous_assignments != self.assignments: <NEW_LINE> <INDENT> iterations += 1 <NEW_LINE> self.set_centroids() <NEW_LINE> self.set_clusters() <NEW_LINE> <DEDENT> return self.centroids
Performs iterations over the cluster by re-calculating the mean of the cluster and then re-clustering.
625941c197e22403b379cf19
def test_high_bits_encryption_regression(self): <NEW_LINE> <INDENT> value = "\xFF\xFF\xFF\xFF\xFF" <NEW_LINE> with mock.patch("random.randint") as random_mock: <NEW_LINE> <INDENT> random_mock.side_effect=random.Random(34).randint <NEW_LINE> for nbits in (10, 16, 20): <NEW_LINE> <INDENT> K = rsa.RSAPrivateKey(10) <NEW_LINE> k = K.GetPublicKey() <NEW_LINE> msg = rsa.Message.Encode(value, K.N) <NEW_LINE> self.assertEquals(K.Decrypt(k.Encrypt(msg)).Decode(), value)
Must be able to crypt messages larger than n in any base.
625941c192d797404e304109
def set(self, labels: LabelsType, value: NumericValueType) -> None: <NEW_LINE> <INDENT> self.set_value(labels, value)
Set the counter to an arbitrary value.
625941c156ac1b37e6264153
def state(self) -> np.ndarray: <NEW_LINE> <INDENT> return self._stepper.current_state
Return the state (wave function) at this point in the computation. The state is returned in the computational basis with these basis states defined by the qubit_map. In particular the value in the qubit_map is the index of the qubit, and these are translated into binary vectors where the last qubit is the 1s bit of the index, the second-to-last is the 2s bit of the index, and so forth (i.e. big endian ordering). Example: qubit_map: {QubitA: 0, QubitB: 1, QubitC: 2} Then the returned vector will have indices mapped to qubit basis states like the following table | | QubitA | QubitB | QubitC | +---+--------+--------+--------+ | 0 | 0 | 0 | 0 | | 1 | 0 | 0 | 1 | | 2 | 0 | 1 | 0 | | 3 | 0 | 1 | 1 | | 4 | 1 | 0 | 0 | | 5 | 1 | 0 | 1 | | 6 | 1 | 1 | 0 | | 7 | 1 | 1 | 1 | +---+--------+--------+--------+
625941c14f6381625f1149bb
def unexpected_error(func): <NEW_LINE> <INDENT> @wraps(func) <NEW_LINE> def wrapper(*args, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return func(*args, **kwargs) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> slack_error_logger.exception(e) <NEW_LINE> <DEDENT> <DEDENT> return wrapper
Decorator to except rest Exceptions of function
625941c1ec188e330fd5a723
def zero_to_right(line): <NEW_LINE> <INDENT> length = len(line) <NEW_LINE> result = [0] * length <NEW_LINE> idx = 0 <NEW_LINE> for num in line: <NEW_LINE> <INDENT> if num != 0: <NEW_LINE> <INDENT> result[idx] = num <NEW_LINE> idx += 1 <NEW_LINE> <DEDENT> <DEDENT> return result
Helper function for merge() that put all non-zero term to the left with no space. i.e. zero's to the right
625941c1e5267d203edcdc1f
def test_malformedEmailAddress(self): <NEW_LINE> <INDENT> return self.specifyBogusEmail('hello, world!')
If a malformed email address is provided, no notification should be sent, but the user should receive the same feedback as if it worked, to discourage cracking attempts.
625941c16aa9bd52df036d22
def add_user(self, username, vec): <NEW_LINE> <INDENT> self.__add_row_to_data(username, vec) <NEW_LINE> self.__save_current_user_data() <NEW_LINE> self.build_annoy_index()
Adds a new user to the data and rebuilds the AnnoyIndex Args: username: Username used for indexing vec: Vector of ratings for the games
625941c123849d37ff7b3010
def train(self, input_tensor, target_tensor, decoder_optimizer, criterion, teacher_forcing_ratio=0.5): <NEW_LINE> <INDENT> decoder_optimizer.zero_grad() <NEW_LINE> target_length = target_tensor.size(0) <NEW_LINE> loss = 0 <NEW_LINE> decoder_input = torch.tensor([[self.SOS_token]], device=device) <NEW_LINE> decoder_hidden = self._create_init_hidden(torch.reshape(input_tensor, (1, 1, -1))) <NEW_LINE> if torch.cuda.is_available(): target_tensor = target_tensor.cuda() <NEW_LINE> use_teacher_forcing = True if random.random() < teacher_forcing_ratio else False <NEW_LINE> if use_teacher_forcing: <NEW_LINE> <INDENT> for i in range(target_length): <NEW_LINE> <INDENT> decoder_output, decoder_hidden = self.decoder( decoder_input, decoder_hidden) <NEW_LINE> loss += criterion(decoder_output, target_tensor[i]) <NEW_LINE> decoder_input = target_tensor[i] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for i in range(target_length): <NEW_LINE> <INDENT> decoder_output, decoder_hidden = self.decoder( decoder_input, decoder_hidden) <NEW_LINE> top_value, top_index = decoder_output.topk(1) <NEW_LINE> decoder_input = top_index.squeeze().detach() <NEW_LINE> loss += criterion(decoder_output, target_tensor[i]) <NEW_LINE> if decoder_input.item() == self.EOS_token: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> loss.backward() <NEW_LINE> decoder_optimizer.step() <NEW_LINE> return loss.item() / target_length
Train on a single sentence.
625941c1baa26c4b54cb10a1
def _revert(self): <NEW_LINE> <INDENT> self.kwargs['collect'].reset_form_data()
delete created targets and empty data-fields on Collect
625941c17b180e01f3dc4781