code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
def test_quoted_reply_author_full_name_is_displayed(self): <NEW_LINE> <INDENT> self.env.insert_known_users([('author', 'The Author', 'author@example.net')]) <NEW_LINE> tkt = self._create_ticket_with_change({}, {'comment': 'the comment'}, 'author') <NEW_LINE> req = self._create_request(method='GET', args={'id': tkt.id, 'replyto': '1'}) <NEW_LINE> data = self.ticket_module.process_request(req)[1] <NEW_LINE> comment = u"Replying to [comment:1 The Author]:\n> " u"the comment\n" <NEW_LINE> self.assertEqual(comment, data['comment']) <NEW_LINE> self.assertEqual(comment, data['change_preview']['comment']) | Full name of reply-to author is used in quoted reply. | 625941c0d53ae8145f87a1d0 |
def register(self, operator_name, operator, *args, **kwargs): <NEW_LINE> <INDENT> def operator_wapper(*wrapper_args): <NEW_LINE> <INDENT> return operator(*(wrapper_args + args), **kwargs) <NEW_LINE> <DEDENT> setattr(self, operator_name, types.MethodType(operator_wapper, self)) <NEW_LINE> return self | regeister udf to the class
:param operator_name: string
:param operator: a function, operator itself
:param args: arg of operator
:param kwargs: kwargs of operator
:return: | 625941c04428ac0f6e5ba74d |
def get_user(self, user_id, **kwargs): <NEW_LINE> <INDENT> kwargs['_return_http_data_only'] = True <NEW_LINE> return self.get_user_with_http_info(user_id, **kwargs) | Get a single User # noqa: E501
Retrieves the data (including an invitation code) for a user. Non-admin users can only get themselves. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user(user_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int user_id: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: User
If the method is called asynchronously,
returns the request thread. | 625941c015baa723493c3ed0 |
def setAppId(self, appId): <NEW_LINE> <INDENT> self.appId = appId | :param appId: (Optional) appId | 625941c01f5feb6acb0c4ab0 |
def clearStatusCache(self): <NEW_LINE> <INDENT> pass | Public method to clear the status cache. | 625941c0cad5886f8bd26f36 |
def getTypeById(self, ids): <NEW_LINE> <INDENT> result=self.getInstances() <NEW_LINE> typeIds=[] <NEW_LINE> idMap={} <NEW_LINE> for instance in result['instances']: <NEW_LINE> <INDENT> if 'timeSeriesId' in instance: <NEW_LINE> <INDENT> idMap[instance['timeSeriesId'][0]] = instance <NEW_LINE> <DEDENT> <DEDENT> for ID in ids: <NEW_LINE> <INDENT> if ID in idMap: <NEW_LINE> <INDENT> typeIds.append(idMap[ID]['typeId']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> typeIds.append(None) <NEW_LINE> <DEDENT> <DEDENT> return typeIds | Returns the type ids that correspond to the given timeseries ids.
Args:
ids (list): The timeseries ids for which to get type ids.
Returns:
list: The type ids, None if timeseries ids does not exist in the TSI environment. | 625941c0e1aae11d1e749c11 |
def _request_push_update(self): <NEW_LINE> <INDENT> cookies = self._auth_cookies <NEW_LINE> tags_updated = [] <NEW_LINE> try: <NEW_LINE> <INDENT> payload = CONST.SOAP_CLOUD_PUSH_PAYLOAD <NEW_LINE> headers = CONST.SOAP_CLOUD_PUSH_HEADERS <NEW_LINE> response = requests.post( CONST.REQUEST_CLOUD_PUSH_UPDATE_URL, headers=headers, cookies=cookies, data=payload) <NEW_LINE> if response.status_code >= 400: <NEW_LINE> <INDENT> _LOGGER.debug("Push update response: %s - %s", response.status_code, response.text) <NEW_LINE> raise WirelessTagsConnectionError <NEW_LINE> <DEDENT> root = ElementTree.fromstring(response.content) <NEW_LINE> raw_tags = root.find(CONST.CLOUD_PUSH_XPATH) <NEW_LINE> tags_updated = json.loads(raw_tags.text) <NEW_LINE> <DEDENT> except Exception as error: <NEW_LINE> <INDENT> _LOGGER.error("Failed to fetch push update: %s", error) <NEW_LINE> raise <NEW_LINE> <DEDENT> return tags_updated | Request push update for tags states. | 625941c04e4d5625662d4337 |
def open_image(fn): <NEW_LINE> <INDENT> flags = cv2.IMREAD_UNCHANGED + cv2.IMREAD_ANYDEPTH + cv2.IMREAD_ANYCOLOR <NEW_LINE> if not os.path.exists(fn): <NEW_LINE> <INDENT> raise OSError('No such file or directory: {}'.format(fn)) <NEW_LINE> <DEDENT> elif os.path.isdir(fn): <NEW_LINE> <INDENT> raise OSError('Is a directory: {}'.format(fn)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if str(fn).lower().endswith(('.tif', '.tiff', '.tifff')): <NEW_LINE> <INDENT> im = tifffile.imread(str(fn)).astype(np.float32) / 65535 <NEW_LINE> im = np.moveaxis(im, 0, -1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> im = cv2.imread(str(fn), flags).astype(np.float32) / 255 <NEW_LINE> im = cv2.cvtColor(im, cv2.COLOR_BGR2RGB) <NEW_LINE> <DEDENT> if im is None: raise OSError(f'File not recognized by io.imread: {fn}') <NEW_LINE> return im <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise OSError('Error handling image at: {}'.format(fn)) from e | Opens an image using OpenCV given the file path.
Arguments:
fn: the file path of the image
Returns:
The image in RGB format as numpy array of floats normalized to range between 0.0 - 1.0 | 625941c066673b3332b91fed |
def dumps(self) -> Dict[str, Any]: <NEW_LINE> <INDENT> return self._dumps() | Dumps all the information of the subcatalog into a dict.
Returns:
A dict containing all the information of the subcatalog. | 625941c07cff6e4e811178e2 |
def test_calculate_degree_matrix_2(self): <NEW_LINE> <INDENT> data = [0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0] <NEW_LINE> W = CondensedMatrix(data) <NEW_LINE> self.assertListEqual([ 3., 1., 2., 1., 1.], SpectralTools.calculate_degree_matrix(W)) | Test provided by Nancy-Sarah Yacovzada. | 625941c07d847024c06be216 |
def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, VehicleStatsDecorationsObdOdometerMeters): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() | Returns true if both objects are equal | 625941c073bcbd0ca4b2bfd3 |
def __load_description(self) -> "FilePreloader": <NEW_LINE> <INDENT> def set_new_dataset(*, append: bool = False, new_index: int = 0) -> None: <NEW_LINE> <INDENT> new_dataset = copy.deepcopy(self.protocol) <NEW_LINE> new_dataset["previous_hash"] = None <NEW_LINE> new_dataset["hash"] = None <NEW_LINE> new_dataset["line_number"] = 1 <NEW_LINE> if append: <NEW_LINE> <INDENT> self.__description.append(new_dataset) <NEW_LINE> <DEDENT> elif self.__description: <NEW_LINE> <INDENT> self.__description[new_index] = new_dataset <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__description = [new_dataset] <NEW_LINE> <DEDENT> <DEDENT> if self.does_preloader_description_file_exists(): <NEW_LINE> <INDENT> dataset = DictHelper().from_json_file(self.__description_file) <NEW_LINE> if not isinstance(dataset, list): <NEW_LINE> <INDENT> dataset = [dataset] <NEW_LINE> <DEDENT> found = False <NEW_LINE> for index, descr in enumerate(dataset): <NEW_LINE> <INDENT> if all(x in descr and descr[x] == y for x, y in self.protocol.items()): <NEW_LINE> <INDENT> self.__matching_index = index <NEW_LINE> found = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> self.__description = dataset <NEW_LINE> if not found: <NEW_LINE> <INDENT> set_new_dataset(append=True) <NEW_LINE> self.__matching_index = len(self.__description) - 1 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> set_new_dataset() <NEW_LINE> <DEDENT> return self | Loads the descriptoin into the interface. | 625941c094891a1f4081ba05 |
def getPage(self, pagenum): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.children[pagenum] <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> return None | Get page widget. | 625941c007d97122c41787e3 |
def pack(self): <NEW_LINE> <INDENT> return {'message': six.text_type(self) if six.PY3 else self.__unicode__(), 'args': self.args} | Serialise exception for transfer.
:return: dictionary in format {"message": payload, "args": arguments} | 625941c0b830903b967e986a |
def print_network(self): <NEW_LINE> <INDENT> logging.info("") <NEW_LINE> logging.info(self.network) <NEW_LINE> logging.info("Network accuracy: %.2f%%" % (self.accuracy * 100)) <NEW_LINE> logging.info("Network loss: %.2f%%" % (self.loss)) | Print out a network. | 625941c09b70327d1c4e0d31 |
def __init__(self): <NEW_LINE> <INDENT> self.chat_id = None <NEW_LINE> self.text = None <NEW_LINE> self.first_name = None <NEW_LINE> self.last_name = None <NEW_LINE> self.token_bot = None <NEW_LINE> self.incoming_message_text = None <NEW_LINE> self.outgoing_message_text = None | "
Initializes an instance of the TelegramBot class.
Attributes:
chat_id:str : Chat ID of Telegram chat, used to identify which conversation
outgoing messages should be send to.
text:str : Text of Telegram chat
first_name :str: First name of the user who sent the message
last_name :str: Last name of the user who sent the message
token_bot: str: Bot Token that comes from bot father
incoming_message_text: str: text that comes from user or non-user interacting with bot
outgoing_message: str: text that's going to be answer for replying at certaing command or NLP | 625941c02ae34c7f2600d08e |
def files_ls(self, path, **kwargs): <NEW_LINE> <INDENT> args = (path,) <NEW_LINE> return self._client.request('/files/ls', args, decoder='json', **kwargs) | Lists contents of a directory in the MFS.
.. code-block:: python
>>> c.files_ls("/")
{'Entries': [
{'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0}
]}
Parameters
----------
path : str
Filepath within the MFS
Returns
-------
dict : Directory entries | 625941c066656f66f7cbc107 |
def pos_tag_feature(self, data): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for i, line in enumerate(data): <NEW_LINE> <INDENT> result.append(numpy.zeros(shape=self.NUMBER_OF_POS_TAGS + 1)) <NEW_LINE> if line['phrase']: <NEW_LINE> <INDENT> result[-1][self.TAG_TO_NUM['PHRASE']] = 1 <NEW_LINE> continue <NEW_LINE> <DEDENT> result[-1][int(self.results['POS_ALL'][i][len(line['sent'].split(' '))])] = 1 <NEW_LINE> <DEDENT> return result | Find out the POS tag of the target word
:param data: See the entry for calculate_features
:return: | 625941c01d351010ab855a79 |
def get_rgb_image_from_segmentation_and_label_descriptor(im_segm, ldm, invert_black_white=False, dtype_output=np.int32): <NEW_LINE> <INDENT> labels_in_image = list(np.sort(list(set(im_segm.get_data().flatten())))) <NEW_LINE> if not len(im_segm.shape) == 3: <NEW_LINE> <INDENT> raise IOError('input segmentation must be 3D.') <NEW_LINE> <DEDENT> rgb_image_arr = np.ones(list(im_segm.shape) + [3]) <NEW_LINE> for l in ldm.dict_label_descriptor.keys(): <NEW_LINE> <INDENT> if l not in labels_in_image: <NEW_LINE> <INDENT> msg = 'get_corresponding_rgb_image: Label {} present in the label descriptor and not in ' 'selected image'.format(l) <NEW_LINE> print(msg) <NEW_LINE> <DEDENT> pl = im_segm.get_data() == l <NEW_LINE> rgb_image_arr[pl, :] = ldm.dict_label_descriptor[l][0] <NEW_LINE> <DEDENT> if invert_black_white: <NEW_LINE> <INDENT> pl = im_segm.get_data() == 0 <NEW_LINE> rgb_image_arr[pl, :] = np.array([255, 255, 255]) <NEW_LINE> <DEDENT> return set_new_data(im_segm, rgb_image_arr, new_dtype=dtype_output) | From the labels descriptor and a nibabel segmentation image.
:param im_segm: nibabel segmentation whose labels corresponds to the input labels descriptor.
:param ldm: instance of class label descriptor manager.
:param dtype_output: data type of the output image.
:param invert_black_white: to swap black with white (improving background visualisation).
:return: a 4d image, where at each voxel there is the [r, g, b] vector in the fourth dimension. | 625941c08a349b6b435e80d1 |
def resolve_peer_creds(self): <NEW_LINE> <INDENT> if not IS_UID_GID_RESOLVABLE: <NEW_LINE> <INDENT> raise NotImplementedError( 'UID/GID lookup is unavailable under current platform. ' 'It can only be done under UNIX-like OS ' 'but not under the Google App Engine', ) <NEW_LINE> <DEDENT> elif not self.peercreds_resolve_enabled: <NEW_LINE> <INDENT> raise RuntimeError( 'UID/GID lookup is disabled within this server', ) <NEW_LINE> <DEDENT> user = pwd.getpwuid(self.peer_uid).pw_name <NEW_LINE> group = grp.getgrgid(self.peer_gid).gr_name <NEW_LINE> return user, group | Return the username and group tuple of the peercreds if available.
Raises:
NotImplementedError: in case of unsupported OS
RuntimeError: in case of UID/GID lookup unsupported or disabled | 625941c023849d37ff7b2fed |
def subtract(self, obj): <NEW_LINE> <INDENT> pass | (DiagonalMatrix, Matrix) -> DiagonalMatrix
This is a method that allows the user to subtract matrices.
It will take the diagonal and make it a diagonal matrix.
REQ: both matrices's element must be numeric
REQ: if matrix1 is n by n, then matrix 2 must also be n by n | 625941c099cbb53fe6792b44 |
def filter_machines(self, machines, data): <NEW_LINE> <INDENT> machines, data = self.filter(machines, data) <NEW_LINE> if not machines and not data: <NEW_LINE> <INDENT> raise Http404 <NEW_LINE> <DEDENT> return machines, data | Filter machines using plugin's `filter` method.
Args:
machines (Queryset of machines): Machines to filter.
data (str): Some value that `filter` will use to restrict
queryset by.
Returns:
Tuple of filtered machines, data
Raises:
Http404 if plugin's `filter` method responds with None, None | 625941c038b623060ff0ad4c |
def put_bag(self, stuff): <NEW_LINE> <INDENT> self.bag.append(stuff) | 인사 메서드 | 625941c050812a4eaa59c281 |
def exec_inside_vm(self, name, commands, guest_user=None, guest_pass=None, wait_for_tools=False): <NEW_LINE> <INDENT> if not commands: <NEW_LINE> <INDENT> raise VmCLIException('No command provided for execution!') <NEW_LINE> <DEDENT> vm = self.get_vm_obj(name, fail_missing=True) <NEW_LINE> self.logger.info("Checking if guest's OS has vmtools installed ...") <NEW_LINE> if wait_for_tools: <NEW_LINE> <INDENT> self.wait_for_guest_vmtools(vm) <NEW_LINE> <DEDENT> if vm.guest.toolsStatus in ['toolsNotInstalled', 'toolsNotRunning']: <NEW_LINE> <INDENT> raise VmCLIException("Guest's VMware tools are not installed or not running. Aborting...") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> credentials = vim.vm.guest.NamePasswordAuthentication(username=guest_user, password=guest_pass) <NEW_LINE> for cmd in commands: <NEW_LINE> <INDENT> executable = cmd.split()[0].lstrip() <NEW_LINE> arguments = ' '.join(cmd.split()[1:]) <NEW_LINE> try: <NEW_LINE> <INDENT> self.logger.info('Running command "{} {}" inside guest'.format(executable, arguments)) <NEW_LINE> progspec = vim.vm.guest.ProcessManager.ProgramSpec(programPath=executable, arguments=arguments) <NEW_LINE> self.content.guestOperationsManager.processManager.StartProgramInGuest(vm, credentials, progspec) <NEW_LINE> <DEDENT> except vim.fault.FileNotFound as e: <NEW_LINE> <INDENT> raise VmCLIException(e.msg + '. Try providing absolute path to the binary.') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except vim.fault.InvalidGuestLogin as e: <NEW_LINE> <INDENT> raise VmCLIException(e.msg) | Runs provided command inside guest's operating system. | 625941c0a05bb46b383ec781 |
def load_lab_med(path): <NEW_LINE> <INDENT> dat = np.genfromtxt(path,skip_header=1,delimiter=',') <NEW_LINE> IDs = dat[:,0]; IDs = np.array([int(ID) for ID in IDs]) <NEW_LINE> dat_dict = {} <NEW_LINE> if np.shape(dat)[1]>2: <NEW_LINE> <INDENT> dat[:,2] = np.log(np.maximum(dat[:,2],.01)) <NEW_LINE> mean = np.nanmean(dat[:,2]) <NEW_LINE> std = np.nanstd(dat[:,2]) <NEW_LINE> dat[:,2] = (dat[:,2]-mean)/std <NEW_LINE> dat_dict['mean'] = mean <NEW_LINE> dat_dict['std'] = std <NEW_LINE> <DEDENT> start = 0 <NEW_LINE> cur_id = IDs[start] <NEW_LINE> for i in xrange(len(IDs)): <NEW_LINE> <INDENT> if IDs[i] != cur_id: <NEW_LINE> <INDENT> dat_dict[cur_id] = dat[start:i,1:] <NEW_LINE> cur_id = IDs[i] <NEW_LINE> start = i <NEW_LINE> <DEDENT> <DEDENT> dat_dict[cur_id] = dat[start:,1:] <NEW_LINE> return dat_dict | helper; load a single lab/med (generic, assuming same format)
NOTE: this implicitly assumes that the data files are already
sorted by encounter so that a single quick pass over the
file suffices. we don't assume for a given encounter that
labs are chronological; will sort when Subjects are created. | 625941c0435de62698dfdba9 |
def check_balancing_schedule(balancing_schedule): <NEW_LINE> <INDENT> if callable(balancing_schedule): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return_value = balancing_schedule({}, {}, 0, 0) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> e_args = list(e.args) <NEW_LINE> e_args[0] += BALANCING_SCHEDULE_INFO <NEW_LINE> e.args = tuple(e_args) <NEW_LINE> raise e <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not isinstance(return_value, dict): <NEW_LINE> <INDENT> raise TypeError( f" The self-defined `balancing_schedule` must return a `dict`," + f" got {type(return_value)}" + BALANCING_SCHEDULE_INFO ) <NEW_LINE> <DEDENT> <DEDENT> return balancing_schedule <NEW_LINE> <DEDENT> if balancing_schedule in BALANCING_KIND: <NEW_LINE> <INDENT> return BALANCING_KIND_MAPPING[balancing_schedule] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> balancing_schedule_info = balancing_schedule if isinstance(balancing_schedule, str) else type(balancing_schedule) <NEW_LINE> raise TypeError( f"'balancing_schedule' should be one of {BALANCING_KIND} or `callable`," f" got {balancing_schedule_info}." ) | Check the `balancing_schedule` parameter. | 625941c0097d151d1a222db9 |
def show_interfaces(resolve_mac=True): <NEW_LINE> <INDENT> return ifaces.show(resolve_mac) | Print list of available network interfaces | 625941c0dd821e528d63b108 |
def test_email_or_phone_in_form(self): <NEW_LINE> <INDENT> form = self.make_validated_form(email='', phone='') <NEW_LINE> self.assertListEqual(['__all__'], list(form.errors)) | Phone or E-mail should be informed | 625941c0c4546d3d9de7298f |
@instance_replacer(type(None)) <NEW_LINE> def none_filter(key, value): <NEW_LINE> <INDENT> return {} | Removes None values from a dictionary.
e.g. {"number": 1, "none": None} becomes {"number": 1} | 625941c067a9b606de4a7e18 |
def on_touch_up(self, touch): <NEW_LINE> <INDENT> if self.disabled: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for child in self.children[:]: <NEW_LINE> <INDENT> if child.dispatch('on_touch_up', touch): <NEW_LINE> <INDENT> return True | Receive a touch up event. The touch is in parent coordinates.
See :meth:`on_touch_down` for more information. | 625941c029b78933be1e560d |
def __init__(self, id, Fx=0, Fz=0, Ty=0, ux=0, uz=0, phi_y=0, vertex=None): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.Fx = Fx <NEW_LINE> self.Fz = Fz <NEW_LINE> self.Ty = Ty <NEW_LINE> self.ux = ux <NEW_LINE> self.uz = uz <NEW_LINE> self.phi_y = phi_y <NEW_LINE> self.vertex = vertex <NEW_LINE> self.hinge = False <NEW_LINE> self.elements = {} | :param id: ID of the node, integer
:param Fx: Value of Fx
:param Fz: Value of Fz
:param Ty: Value of Ty
:param ux: Value of ux
:param uz: Value of uz
:param phi_y: Value of phi
:param vertex: Point object | 625941c03cc13d1c6d3c72d8 |
def parse(self, fo: Iterable[str]) -> None: <NEW_LINE> <INDENT> for i, line in enumerate(fo): <NEW_LINE> <INDENT> stripped = line.lstrip() <NEW_LINE> record: Union[HBARecord, HBAComment] <NEW_LINE> if not stripped or stripped.startswith("#"): <NEW_LINE> <INDENT> record = HBAComment(line.replace(os.linesep, "")) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> record = HBARecord.parse(line) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise ParseError(1 + i, line, str(e)) <NEW_LINE> <DEDENT> <DEDENT> self.lines.append(record) | Parse records and comments from file object
:param fo: An iterable returning lines | 625941c076e4537e8c3515ce |
def nodes_depth_first(self,root): <NEW_LINE> <INDENT> assert root in self.edgelists, "Must designate a root node in the graph" <NEW_LINE> yield from self._nodes_dfs(root, set()) | Iterate nodes in depth-first order starting at root | 625941c0de87d2750b85fced |
def __release_info(): <NEW_LINE> <INDENT> ret = [None, None, None] <NEW_LINE> try: <NEW_LINE> <INDENT> with open(os.path.join(current.request.folder, "RELEASE_INFO"), "rt") as file: <NEW_LINE> <INDENT> for i, v in enumerate(file.readlines()): <NEW_LINE> <INDENT> if i == 2: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ret[i] = datetime.fromisoformat(v.strip()) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> ret[i] = v.strip() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> except FileNotFoundError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return ret | Return the release number and name, as stored in the RELEASE_INFO file, which is
created automatically when generating the website | 625941c0956e5f7376d70dcc |
@batch.route("users/mark-for-deletion", methods=["DELETE"]) <NEW_LINE> def mark_for_deletion_accounts(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with transactional_session() as session: <NEW_LINE> <INDENT> logger.info("Scheduler processing Accounts not accessed in the last 36 months ") <NEW_LINE> _since_36_months = datetime.utcnow() - timedelta(days=1095) <NEW_LINE> _last_login_before_36_months = session.query(User).filter( and_(User.last_login_date != None, User.last_login_date < _since_36_months) ) <NEW_LINE> _last_login_before_36_months.update({"mark_for_deletion": True}) <NEW_LINE> _account_created_before_36_months = session.query(User).filter( and_(User.last_login_date == None, User.account_creation_date < _since_36_months) ) <NEW_LINE> _account_created_before_36_months.update({"mark_for_deletion": True}) <NEW_LINE> logger.info("Scheduler finished processing Accounts not accessed in last 36 months") <NEW_LINE> logger.info("Scheduler processing Account not activated for more than 80 hrs") <NEW_LINE> _since_80_hrs = datetime.utcnow() - timedelta(hours=80) <NEW_LINE> _account_not_activated_80_hrs = session.query(User).filter( and_(User.account_verified == False, User.account_creation_date < _since_80_hrs) ) <NEW_LINE> _account_not_activated_80_hrs.update({"mark_for_deletion": True}) <NEW_LINE> logger.info("Scheduler finished* processing Account not activated for more than 80 hrs") <NEW_LINE> <DEDENT> <DEDENT> except SQLAlchemyError: <NEW_LINE> <INDENT> logger.exception("Unable to perform scheduler mark for delete operation") <NEW_LINE> return make_response( jsonify( { "title": "Scheduler operation for mark for delete users error", "detail": "Unable to perform delete operation for accounts not accessed in last " "36 months", } ), 500, ) <NEW_LINE> <DEDENT> return "", 204 | Marks user accounts for deletion which has not been accessed in the last 36 months.
To be called from scheduler | 625941c056ac1b37e6264131 |
def sample(self, randseed=None): <NEW_LINE> <INDENT> self.ysample = odict() <NEW_LINE> for key in self.keys(): <NEW_LINE> <INDENT> self.ysample[key] = self.prior[key].sample(randseed=randseed)[0] <NEW_LINE> <DEDENT> return None | Recalculate ysample | 625941c0796e427e537b0521 |
def vehicle_group_within(self, point, distance) -> List[Group]: <NEW_LINE> <INDENT> return [x for x in self.vehicle_group if x.position.distance_to_point(point) < distance] | Return all vehicle groups within the radius of a given point.
Args:
point(mapping.Point): Center of circle
distance: Distance to the point
Returns:
Sequence of vehicle groups within range. | 625941c07b180e01f3dc475f |
def elemento(self, pos): <NEW_LINE> <INDENT> if not self._datos or pos < 0 or pos > self._tamanio: <NEW_LINE> <INDENT> raise IndexError <NEW_LINE> <DEDENT> return self._datos[pos] | Retorna el elemento de la posición indicada.
Args:
pos (int): posición en la lista.
Returns:
Object: elemento del nodo. | 625941c0ec188e330fd5a701 |
def _construct_host_file() -> str: <NEW_LINE> <INDENT> hostfile=os.path.join(os.environ['SM_INPUT_CONFIG_DIR'],'hostfile') <NEW_LINE> if os.environ.get('SM_HOSTS', None) is not None: <NEW_LINE> <INDENT> hosts = json.loads(os.environ['SM_HOSTS']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> hosts = ["localhost"] <NEW_LINE> <DEDENT> print(f"list of hosts:{hosts} used to construct hostfile config") <NEW_LINE> if len(hosts)==1: <NEW_LINE> <INDENT> logger.debug("Single node training, skipping hostfile creation.") <NEW_LINE> return "" <NEW_LINE> <DEDENT> with open(hostfile,'a') as f: <NEW_LINE> <INDENT> for h in hosts: <NEW_LINE> <INDENT> f.write(f"{h} slots={os.environ['SM_NUM_GPUS']}\n") <NEW_LINE> <DEDENT> <DEDENT> logger.debug(f"Hostfile {hostfile} has been saved.") <NEW_LINE> return hostfile | constructs MPI-compatible hostfile with all nodes
and number of GPU devices | 625941c030c21e258bdfa3f9 |
def getItemsInSphere(self, sphere, t = DEFAULT_TOLERANCE): <NEW_LINE> <INDENT> surely = set() <NEW_LINE> maybe = set() <NEW_LINE> r = self.checkSphere(sphere, t) <NEW_LINE> if (r == 1): <NEW_LINE> <INDENT> surely.add(self) <NEW_LINE> return [surely, maybe] <NEW_LINE> <DEDENT> elif (r == -1): <NEW_LINE> <INDENT> return [surely, maybe] <NEW_LINE> <DEDENT> if (len(self.items) == 0): <NEW_LINE> <INDENT> raise BoundingBoxError("Bounding box used as a leaf.") <NEW_LINE> <DEDENT> for it in self.items: <NEW_LINE> <INDENT> r = it.checkSphere(sphere, t) <NEW_LINE> if (r == 1): <NEW_LINE> <INDENT> surely.add(it) <NEW_LINE> <DEDENT> elif (r == 0): <NEW_LINE> <INDENT> if (isinstance(it, BoundingBox)): <NEW_LINE> <INDENT> ts = it.getItemsInSphere(sphere, t) <NEW_LINE> surely.update(ts[0]) <NEW_LINE> maybe.update(ts[1]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> maybe.add(it) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return [surely, maybe] | Get items in sphere.
Get the items which are inside the specified sphere. Returns two sets
of items, the 'surely' and 'maybe' sets. The 'surely' set contains
items which are surely inside the specified sphere. These can be
leaves or bounding boxes containing other items which are inside the
sphere. The 'maybe' set contains only leaf items with bounding boxes
that intersect the sphere, so the caller has to determine if the
items themselves are inside the sphere. | 625941c0656771135c3eb7ca |
def hits(G, max_iter=100, tol=1.0e-8, nstart=None, normalized=True): <NEW_LINE> <INDENT> import numpy as np <NEW_LINE> import scipy as sp <NEW_LINE> import scipy.sparse.linalg <NEW_LINE> if len(G) == 0: <NEW_LINE> <INDENT> return {}, {} <NEW_LINE> <DEDENT> A = nx.adjacency_matrix(G, nodelist=list(G), dtype=float) <NEW_LINE> if nstart is None: <NEW_LINE> <INDENT> _, _, vt = sp.sparse.linalg.svds(A, k=1, maxiter=max_iter, tol=tol) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> nstart = np.array(list(nstart.values())) <NEW_LINE> _, _, vt = sp.sparse.linalg.svds(A, k=1, v0=nstart, maxiter=max_iter, tol=tol) <NEW_LINE> <DEDENT> a = vt.flatten().real <NEW_LINE> h = A @ a <NEW_LINE> if normalized: <NEW_LINE> <INDENT> h /= h.sum() <NEW_LINE> a /= a.sum() <NEW_LINE> <DEDENT> hubs = dict(zip(G, map(float, h))) <NEW_LINE> authorities = dict(zip(G, map(float, a))) <NEW_LINE> return hubs, authorities | Returns HITS hubs and authorities values for nodes.
The HITS algorithm computes two numbers for a node.
Authorities estimates the node value based on the incoming links.
Hubs estimates the node value based on outgoing links.
Parameters
----------
G : graph
A NetworkX graph
max_iter : integer, optional
Maximum number of iterations in power method.
tol : float, optional
Error tolerance used to check convergence in power method iteration.
nstart : dictionary, optional
Starting value of each node for power method iteration.
normalized : bool (default=True)
Normalize results by the sum of all of the values.
Returns
-------
(hubs,authorities) : two-tuple of dictionaries
Two dictionaries keyed by node containing the hub and authority
values.
Raises
------
PowerIterationFailedConvergence
If the algorithm fails to converge to the specified tolerance
within the specified number of iterations of the power iteration
method.
Examples
--------
>>> G = nx.path_graph(4)
>>> h, a = nx.hits(G)
Notes
-----
The eigenvector calculation is done by the power iteration method
and has no guarantee of convergence. The iteration will stop
after max_iter iterations or an error tolerance of
number_of_nodes(G)*tol has been reached.
The HITS algorithm was designed for directed graphs but this
algorithm does not check if the input graph is directed and will
execute on undirected graphs.
References
----------
.. [1] A. Langville and C. Meyer,
"A survey of eigenvector methods of web information retrieval."
http://citeseer.ist.psu.edu/713792.html
.. [2] Jon Kleinberg,
Authoritative sources in a hyperlinked environment
Journal of the ACM 46 (5): 604-32, 1999.
doi:10.1145/324133.324140.
http://www.cs.cornell.edu/home/kleinber/auth.pdf. | 625941c07b180e01f3dc4760 |
def contains(self, string): <NEW_LINE> <INDENT> node = self.root <NEW_LINE> for i in string: <NEW_LINE> <INDENT> if node.has_child(i): <NEW_LINE> <INDENT> node = node.get_child(i) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return node.is_terminal() <NEW_LINE> <DEDENT> <DEDENT> return node.is_terminal() | Return True if this prefix tree contains the given string. | 625941c050485f2cf553ccf6 |
def doSubscribe(self, request): <NEW_LINE> <INDENT> if isinstance(request.topics, str): <NEW_LINE> <INDENT> request.topics = [(request.topics, request.qos)] <NEW_LINE> <DEDENT> elif isinstance(request.topics, tuple): <NEW_LINE> <INDENT> request.topics = [(request.topics[0], request.topics[1])] <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self._checkSubscribe(request) <NEW_LINE> request.msgId = self.factory.makeId() <NEW_LINE> request.encode() <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> return defer.fail(e) <NEW_LINE> <DEDENT> request.interval = Interval(initial=self._initialT) <NEW_LINE> request.deferred = defer.Deferred() <NEW_LINE> request.deferred.msgId = request.msgId <NEW_LINE> self.factory.windowSubscribe[self.addr][request.msgId] = request <NEW_LINE> self._retrySubscribe(request, False) <NEW_LINE> return request.deferred | Send a SUBSCRIBE control packet. | 625941c04a966d76dd550f6b |
@main.command <NEW_LINE> def plot_kernel (*scales): <NEW_LINE> <INDENT> from matplotlib import pyplot <NEW_LINE> T = arange(-2,2,0.01) <NEW_LINE> K = 0 * T <NEW_LINE> for scale in scales: <NEW_LINE> <INDENT> scale = float(scale) <NEW_LINE> K += quad_kernel(T / scale) / scale <NEW_LINE> <DEDENT> pyplot.plot(T,K) <NEW_LINE> pyplot.show() | Plots sum of quadratic kernels at specified scales | 625941c0167d2b6e31218af4 |
def validate_pair(resi1, resi2): <NEW_LINE> <INDENT> if resi1.short_abbrev =='.' or resi2.short_abbrev =='.' or not is_ribose_complete(resi1) or not is_ribose_complete(resi2): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> nn_dist = resi1['N*'] - resi2['N*'] <NEW_LINE> if nn_dist > NN_CUTOFF: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True | Checks if the two pairs are possibly paired. | 625941c0f548e778e58cd4da |
def SetMarkerImage(self, *args): <NEW_LINE> <INDENT> return _itkReconstructionByDilationImageFilterPython.itkReconstructionByDilationImageFilterIUC2IUC2_Superclass_SetMarkerImage(self, *args) | SetMarkerImage(self, itkImageUC2 arg0) | 625941c0e5267d203edcdbfd |
def _predict_spec(tower_specs, aggregation_device): <NEW_LINE> <INDENT> estimator_spec = tower_specs[0]._asdict() <NEW_LINE> estimator_spec['mode'] = model_fn_lib.ModeKeys.PREDICT <NEW_LINE> with ops_lib.device(aggregation_device): <NEW_LINE> <INDENT> estimator_spec['predictions'] = _concat_tensor_dicts( *[tower_spec.predictions for tower_spec in tower_specs]) <NEW_LINE> export_outputs_dict = _dict_concat( *[tower_spec.export_outputs for tower_spec in tower_specs]) <NEW_LINE> export_outputs = {} <NEW_LINE> for name, export_output_list in six.iteritems(export_outputs_dict): <NEW_LINE> <INDENT> if isinstance(export_output_list[0], export_output_lib.PredictOutput): <NEW_LINE> <INDENT> export_outputs[name] = export_output_lib.PredictOutput( outputs=_concat_tensor_dicts(*[ export_output.outputs for export_output in export_output_list ])) <NEW_LINE> <DEDENT> elif isinstance(export_output_list[0], export_output_lib.RegressionOutput): <NEW_LINE> <INDENT> export_outputs[name] = export_output_lib.RegressionOutput( value=array_ops.concat( [export_output.value for export_output in export_output_list], axis=0)) <NEW_LINE> <DEDENT> elif isinstance(export_output_list[0], export_output_lib.ClassificationOutput): <NEW_LINE> <INDENT> scores = None <NEW_LINE> if export_output_list[0].scores is not None: <NEW_LINE> <INDENT> scores = array_ops.concat( [export_output.scores for export_output in export_output_list], axis=0) <NEW_LINE> <DEDENT> classes = None <NEW_LINE> if export_output_list[0].classes is not None: <NEW_LINE> <INDENT> classes = array_ops.stack( [export_output.classes for export_output in export_output_list], axis=0) <NEW_LINE> <DEDENT> export_outputs[name] = export_output_lib.ClassificationOutput( scores=scores, classes=classes) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> estimator_spec['export_outputs'] = export_outputs <NEW_LINE> return model_fn_lib.EstimatorSpec(**estimator_spec) | Populate replicated EstimatorSpec for `GraphKeys.PREDICT`. | 625941c0b830903b967e986b |
def make_instance(self, include_optional): <NEW_LINE> <INDENT> if include_optional : <NEW_LINE> <INDENT> return AccountAdditionalCost( id = 56, created = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), account_id = 56, name = '0', title = '0', description = '0', subscribed_applications_ids = [ 56 ] ) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> return AccountAdditionalCost( id = 56, created = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), account_id = 56, name = '0', title = '0', description = '0', ) | Test AccountAdditionalCost
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included | 625941c0f8510a7c17cf9659 |
def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(AnnouncementUpdateView, self).get_context_data(**kwargs) <NEW_LINE> context['active_page'] = 'announcement-update' <NEW_LINE> return context | add active-page string to context
:param kwargs:
:return:context | 625941c015baa723493c3ed1 |
def test_windage_mast_with_sail_stbd_upwind(): <NEW_LINE> <INDENT> force = windage_mast_with_sail(tws=10., twa=45., boatspeed=2., heel_angle=10., trim_angle=0., mast_x=0.5, mast_z_bottom=0.07, mast_z_top=1.7, mast_front_area=0.017, mast_side_area=0.017) <NEW_LINE> assert force.fx < 0 <NEW_LINE> assert force.fy > 0 <NEW_LINE> assert force.py > 0 <NEW_LINE> force_neg_heel = windage_mast_with_sail(tws=10., twa=45., boatspeed=2., heel_angle=-10., trim_angle=0., mast_x=0.5, mast_z_bottom=0.07, mast_z_top=1.7, mast_front_area=0.017, mast_side_area=0.017) <NEW_LINE> assert force_neg_heel.py < 0 | Upwind on starboard tack | 625941c0925a0f43d2549dd3 |
def p_function_arglist_closed_common__EXPECT_SCM__function_arglist_closed_optional__Chr45__UNSIGNED(self, p): <NEW_LINE> <INDENT> p[0] = p[2] + [-1 * p[4]] | function_arglist_closed_common : EXPECT_SCM function_arglist_closed_optional '-' UNSIGNED | 625941c056b00c62f0f145b6 |
@attr('codegen-independent') <NEW_LINE> def test_construction(): <NEW_LINE> <INDENT> q = 500 * ms <NEW_LINE> assert_quantity(q, 0.5, second) <NEW_LINE> q = np.float64(500) * ms <NEW_LINE> assert_quantity(q, 0.5, second) <NEW_LINE> q = np.array(500) * ms <NEW_LINE> assert_quantity(q, 0.5, second) <NEW_LINE> q = np.array([500, 1000]) * ms <NEW_LINE> assert_quantity(q, np.array([0.5, 1]), second) <NEW_LINE> q = Quantity(500) <NEW_LINE> assert_quantity(q, 500, 1) <NEW_LINE> q = Quantity(500, dim=second.dim) <NEW_LINE> assert_quantity(q, 500, second) <NEW_LINE> q = Quantity([0.5, 1], dim=second.dim) <NEW_LINE> assert_quantity(q, np.array([0.5, 1]), second) <NEW_LINE> q = Quantity(np.array([0.5, 1]), dim=second.dim) <NEW_LINE> assert_quantity(q, np.array([0.5, 1]), second) <NEW_LINE> q = Quantity([500 * ms, 1 * second]) <NEW_LINE> assert_quantity(q, np.array([0.5, 1]), second) <NEW_LINE> q = Quantity.with_dimensions(np.array([0.5, 1]), second=1) <NEW_LINE> assert_quantity(q, np.array([0.5, 1]), second) <NEW_LINE> q = [0.5, 1] * second <NEW_LINE> assert_quantity(q, np.array([0.5, 1]), second) <NEW_LINE> q = Quantity([1, 2, 3]) <NEW_LINE> assert_quantity(q, np.array([1, 2, 3]), Unit(1)) <NEW_LINE> q = Quantity(np.array([1, 2, 3])) <NEW_LINE> assert_quantity(q, np.array([1, 2, 3]), Unit(1)) <NEW_LINE> q = Quantity([]) <NEW_LINE> assert_quantity(q, np.array([]), Unit(1)) <NEW_LINE> q1 = Quantity.with_dimensions(np.array([0.5, 1]), second=1) <NEW_LINE> q2 = Quantity(q1) <NEW_LINE> assert_quantity(q2, np.asarray(q1), q1) <NEW_LINE> q2[0] = 3 * second <NEW_LINE> assert_equal(q1[0], 3*second) <NEW_LINE> q1 = Quantity.with_dimensions(np.array([0.5, 1]), second=1) <NEW_LINE> q2 = Quantity(q1, copy=True) <NEW_LINE> assert_quantity(q2, np.asarray(q1), q1) <NEW_LINE> q2[0] = 3 * second <NEW_LINE> assert_equal(q1[0], 0.5*second) <NEW_LINE> assert_raises(TypeError, lambda: Quantity([500 * ms, 1])) <NEW_LINE> assert_raises(TypeError, lambda: Quantity(['some', 'nonsense'])) <NEW_LINE> assert_raises(DimensionMismatchError, lambda: Quantity([500 * ms, 1 * volt])) | Test the construction of quantity objects | 625941c05166f23b2e1a50b7 |
def _get_url_for_timerange(self, timerange, **kwargs): <NEW_LINE> <INDENT> url = rhessi.get_obssum_filename(timerange) <NEW_LINE> return [url] | Returns a URL to the RHESSI data for the specified date range.
Parameters
----------
args : TimeRange, datetimes, date strings
Date range should be specified using a TimeRange, or start
and end dates at datetime instances or date strings. | 625941c0fbf16365ca6f611d |
def connect(self, timeout=60): <NEW_LINE> <INDENT> self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) <NEW_LINE> timeout_start = time.time() <NEW_LINE> while time.time() < timeout_start + timeout: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.client.connect( hostname=self.hostname, username=self.username, password=self.password, ) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return False | create connection | 625941c007f4c71912b113de |
def _MolsToGridImage(mols, molsPerRow=3, subImgSize=(200, 200), legends=None, highlightAtomLists=None, highlightBondLists=None, drawOptions=None, returnPNG=False, **kwargs): <NEW_LINE> <INDENT> if legends is None: <NEW_LINE> <INDENT> legends = [''] * len(mols) <NEW_LINE> <DEDENT> nRows = len(mols) // molsPerRow <NEW_LINE> if len(mols) % molsPerRow: <NEW_LINE> <INDENT> nRows += 1 <NEW_LINE> <DEDENT> if not hasattr(rdMolDraw2D, 'MolDraw2DCairo'): <NEW_LINE> <INDENT> from PIL import Image <NEW_LINE> res = Image.new("RGBA", (molsPerRow * subImgSize[0], nRows * subImgSize[1]), (255, 255, 255, 0)) <NEW_LINE> for i, mol in enumerate(mols): <NEW_LINE> <INDENT> row = i // molsPerRow <NEW_LINE> col = i % molsPerRow <NEW_LINE> highlights = None <NEW_LINE> if highlightAtomLists and highlightAtomLists[i]: <NEW_LINE> <INDENT> highlights = highlightAtomLists[i] <NEW_LINE> <DEDENT> if mol is not None: <NEW_LINE> <INDENT> img = _moltoimg(mol, subImgSize, highlights, legends[i], **kwargs) <NEW_LINE> res.paste(img, (col * subImgSize[0], row * subImgSize[1])) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> fullSize = (molsPerRow * subImgSize[0], nRows * subImgSize[1]) <NEW_LINE> d2d = rdMolDraw2D.MolDraw2DCairo(fullSize[0], fullSize[1], subImgSize[0], subImgSize[1]) <NEW_LINE> if drawOptions is not None: <NEW_LINE> <INDENT> d2d.SetDrawOptions(drawOptions) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dops = d2d.drawOptions() <NEW_LINE> for k, v in list(kwargs.items()): <NEW_LINE> <INDENT> if hasattr(dops, k): <NEW_LINE> <INDENT> setattr(dops, k, v) <NEW_LINE> del kwargs[k] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> d2d.DrawMolecules(list(mols), legends=legends or None, highlightAtoms=highlightAtomLists, highlightBonds=highlightBondLists, **kwargs) <NEW_LINE> d2d.FinishDrawing() <NEW_LINE> if not returnPNG: <NEW_LINE> <INDENT> res = _drawerToImage(d2d) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> res = d2d.GetDrawingText() <NEW_LINE> <DEDENT> <DEDENT> return res | returns a PIL Image of the grid
| 625941c063b5f9789fde7043 |
def train(self, data): <NEW_LINE> <INDENT> results = [] <NEW_LINE> weights = [] <NEW_LINE> for i in xrange(1, self.iter_n): <NEW_LINE> <INDENT> hll = BaseHyperLogLog(self.calcHLLConstant(i), i, self.hashFunc) <NEW_LINE> for d in data: <NEW_LINE> <INDENT> hll.update(d) <NEW_LINE> <DEDENT> k = hll.calc_cardinality() <NEW_LINE> w = self.getWeight(k) <NEW_LINE> if i < (self.iter_n / 2): <NEW_LINE> <INDENT> results.append(w * k) <NEW_LINE> weights.append(w) <NEW_LINE> <DEDENT> <DEDENT> self.k = sum(results) / sum(weights) | Serial training on each HLL estimation
:param data:
:return: | 625941c026238365f5f0edc9 |
def flattenList(l): <NEW_LINE> <INDENT> return functools.reduce(operator.concat, l) | flatten a list by one layer | 625941c0fb3f5b602dac35ef |
def show_process(self, i, epoch_images, loss_, acc_): <NEW_LINE> <INDENT> if i is not None: <NEW_LINE> <INDENT> self.i = i <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.i += 1 <NEW_LINE> <DEDENT> num_arrow = int(self.i * self.max_arrow / self.max_steps) <NEW_LINE> num_line = self.max_arrow - num_arrow <NEW_LINE> percent = self.i * 100.0 / self.max_steps <NEW_LINE> process_bar_ = '\r' + '{0}/{1} '.format(i, epoch_images) + '[' + '>' * num_arrow + ' ' * num_line + ']' + ' - loss:{:.2f}\t'.format(float(loss_)) + ' - acc:{:.2f}'.format(float(acc_)) <NEW_LINE> sys.stdout.write(process_bar_) <NEW_LINE> sys.stdout.flush() <NEW_LINE> if self.i >= self.max_steps: <NEW_LINE> <INDENT> self.close() | core method, show the process bar.
Note:
1. calculate how many >
2. calculate how may -
3. calculate the percentage: "
" must be at left ,means that begin form left
4. sys.stdout.write(process_bar) and sys.stdout.flush() is to print to terminal
:param
i: inter, the step right now, init set to 0, total step== num of images for each epoch
epoch_i: inter, show the current processed epoch
epoch_images: inter, the num of total images for each epoch
loss_train, acc_train: loss and acc for current calculation batch
loss_vel, acc_vel: test at validate dataset, test at given iter, eg: each 1000 batch iter
:return: no return | 625941c0f9cc0f698b14055b |
def register_certificate_alg(algorithm, key_handler, cert_handler): <NEW_LINE> <INDENT> _certificate_alg_map[algorithm] = (key_handler, cert_handler) <NEW_LINE> _certificate_algs.append(algorithm) | Register a new certificate algorithm | 625941c0236d856c2ad44735 |
def glInitTextureFloatATI(): <NEW_LINE> <INDENT> return extensions.hasGLExtension( EXTENSION_NAME ) | Return boolean indicating whether this extension is available | 625941c031939e2706e4cdcb |
def setUp(self): <NEW_LINE> <INDENT> @checks <NEW_LINE> def foo(data): <NEW_LINE> <INDENT> return data <NEW_LINE> <DEDENT> self.foo = foo | self.data_c: Complete dataset/No missing values
self.data_m: Incommplete dataset/Has missing values | 625941c007f4c71912b113df |
def _languages_to_pull(self, languages, files, lang_map, stats, force): <NEW_LINE> <INDENT> if not languages: <NEW_LINE> <INDENT> pull_languages = set([]) <NEW_LINE> pull_languages |= set(files.keys()) <NEW_LINE> mapped_files = [] <NEW_LINE> for lang in pull_languages: <NEW_LINE> <INDENT> if lang in lang_map.flip: <NEW_LINE> <INDENT> mapped_files.append(lang_map.flip[lang]) <NEW_LINE> <DEDENT> <DEDENT> pull_languages -= set(lang_map.flip.keys()) <NEW_LINE> pull_languages |= set(mapped_files) <NEW_LINE> return (pull_languages, set([])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pull_languages = [] <NEW_LINE> new_translations = [] <NEW_LINE> f_langs = list(files.keys()) <NEW_LINE> for l in languages: <NEW_LINE> <INDENT> if l not in f_langs and not ( l in lang_map and lang_map[l] in f_langs): <NEW_LINE> <INDENT> if self._should_add_translation(l, stats, force): <NEW_LINE> <INDENT> new_translations.append(l) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> pull_languages.append(lang_map[l] if l in list(lang_map.keys()) else l) <NEW_LINE> <DEDENT> <DEDENT> return (set(pull_languages), set(new_translations)) | Get a set of languages to pull.
Args:
languages: A list of languages the user selected in command.
files: A dictionary of current local translation files.
Returns:
A tuple of a set of existing languages and new translations. | 625941c00fa83653e4656f1a |
def _pay(self, payable_views): <NEW_LINE> <INDENT> assert self._can_pay(payable_views) <NEW_LINE> purchase_order = payable_views[0].purchase <NEW_LINE> if (purchase_order and api.sysparam(self.store).BLOCK_INCOMPLETE_PURCHASE_PAYMENTS and not purchase_order.status == PurchaseOrder.ORDER_CLOSED): <NEW_LINE> <INDENT> return warning(_("Can't confirm the payment if the purchase " "is not completely received yet.")) <NEW_LINE> <DEDENT> with api.trans() as store: <NEW_LINE> <INDENT> payments = [store.fetch(view.payment) for view in payable_views] <NEW_LINE> run_dialog(PurchasePaymentConfirmSlave, self, store, payments=payments) <NEW_LINE> <DEDENT> if store.committed: <NEW_LINE> <INDENT> self.refresh() <NEW_LINE> <DEDENT> self._update_widgets() | Pay a list of items from a payable_views, note that
the list of payable_views must reference the same order
@param payables_views: a list of payable_views | 625941c00a366e3fb873e776 |
def _simulate(self, node): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> if node.is_terminal(): <NEW_LINE> <INDENT> rewards = node.rewards() <NEW_LINE> return rewards <NEW_LINE> <DEDENT> node = node.find_random_child() | Returns the reward for a random simulation (to completion) of `node` | 625941c030bbd722463cbd22 |
def serialize(self, buff): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> _x = self <NEW_LINE> buff.write(_struct_2q.pack(_x.A, _x.B)) <NEW_LINE> <DEDENT> except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x)))) <NEW_LINE> except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x)))) | serialize message into buffer
:param buff: buffer, ``StringIO`` | 625941c091f36d47f21ac44e |
def _post_connect(self): <NEW_LINE> <INDENT> pass | Mixins should implement this method to do post-connection config. | 625941c0be8e80087fb20ba4 |
def get_topic_by_url_fragment(url_fragment): <NEW_LINE> <INDENT> topic_model = ( topic_models.TopicModel.get_by_url_fragment(url_fragment)) <NEW_LINE> if topic_model is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> topic = get_topic_from_model(topic_model) <NEW_LINE> return topic | Returns a domain object representing a topic.
Args:
url_fragment: str. The url fragment of the topic.
Returns:
Topic or None. The domain object representing a topic with the
given id, or None if it does not exist. | 625941c0cc40096d615958af |
def _parse_player_data(self, player_data): <NEW_LINE> <INDENT> for field in self.__dict__: <NEW_LINE> <INDENT> short_field = str(field)[1:] <NEW_LINE> if short_field == 'player_id' or short_field == 'index' or short_field == 'most_recent_season' or short_field == 'season' or short_field == 'name' or short_field == 'weight' or short_field == 'height' or short_field == 'nationality' or short_field == 'birth_date' or short_field == 'contract': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> field_stats = [] <NEW_LINE> if type(player_data) == dict: <NEW_LINE> <INDENT> for year, data in player_data.items(): <NEW_LINE> <INDENT> stats = pq(data['data']) <NEW_LINE> value = self._parse_value(stats, short_field) <NEW_LINE> field_stats.append(value) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> stats = pq(player_data) <NEW_LINE> value = self._parse_value(stats, short_field) <NEW_LINE> field_stats.append(value) <NEW_LINE> <DEDENT> setattr(self, field, field_stats) | Parse all player information and set attributes.
Iterate through each class attribute to parse the data from the HTML
page and set the attribute value with the result.
Parameters
----------
player_data : dictionary or string
If this class is inherited from the ``Player`` class, player_data
will be a dictionary where each key is a string representing the
season and each value contains the HTML data as a string. If this
class is inherited from the ``BoxscorePlayer`` class, player_data
will be a string representing the player's game statistics in HTML
format. | 625941c04f88993c3716bfc8 |
def receive_sensor(): <NEW_LINE> <INDENT> global sensor_distance <NEW_LINE> global sensor_distances <NEW_LINE> global is_object_close <NEW_LINE> with serial.Serial('/dev/ttyUSB5') as ser: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> sensor_distance = serial_distance(ser) <NEW_LINE> sensor_distances.append(1.0 if sensor_distance < 100.0 else 0.0) <NEW_LINE> sensor_distances = sensor_distances[-10:] <NEW_LINE> if np.mean(sensor_distances) >= 0.6: <NEW_LINE> <INDENT> is_object_close = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> is_object_close = False <NEW_LINE> <DEDENT> if sthread.check_terminate_thread(): <NEW_LINE> <INDENT> return | Receives a single reading from the external ultrasound sensor. | 625941c07047854f462a136a |
def get_queryset(self): <NEW_LINE> <INDENT> queryset = StudentCourse.objects.all() <NEW_LINE> pk = float(self.request.query_params.get('student_id', None)) <NEW_LINE> if pk is not None: <NEW_LINE> <INDENT> queryset = StudentCourse.objects.filter(student=pk) <NEW_LINE> <DEDENT> return queryset | Optionally restricts the returned purchases to a given user,
by filtering against a `username` query parameter in the URL. | 625941c030c21e258bdfa3fa |
def port_profile_vlan_profile_switchport_basic_basic(self, **kwargs): <NEW_LINE> <INDENT> config = ET.Element("config") <NEW_LINE> port_profile = ET.SubElement(config, "port-profile", xmlns="urn:brocade.com:mgmt:brocade-port-profile") <NEW_LINE> name_key = ET.SubElement(port_profile, "name") <NEW_LINE> name_key.text = kwargs.pop('name') <NEW_LINE> vlan_profile = ET.SubElement(port_profile, "vlan-profile") <NEW_LINE> switchport_basic = ET.SubElement(vlan_profile, "switchport-basic") <NEW_LINE> basic = ET.SubElement(switchport_basic, "basic") <NEW_LINE> callback = kwargs.pop('callback', self._callback) <NEW_LINE> return callback(config) | Auto Generated Code
| 625941c0187af65679ca507c |
def scale(self, multiplier): <NEW_LINE> <INDENT> for key in list(self.keys()): <NEW_LINE> <INDENT> self[key] *= multiplier | Multiplies all counts by multiplier. | 625941c029b78933be1e560e |
def rvm(alpha, beta, prof): <NEW_LINE> <INDENT> zeta = alpha + beta <NEW_LINE> points = range(len(prof)) <NEW_LINE> pa = [] <NEW_LINE> phi0 = 0. <NEW_LINE> for point in points: <NEW_LINE> <INDENT> phi = np.deg2rad(prof[point]) <NEW_LINE> numer = np.sin(np.deg2rad(alpha)) * np.sin(np.deg2rad(phi - phi0)) <NEW_LINE> denom = np.sin(np.deg2rad(zeta)) * np.cos(np.deg2rad(alpha)) - np.cos(np.rad2deg(zeta)) * np.sin(np.rad2deg(alpha)) * np.cos(np.deg2rad(phi - phi0)) <NEW_LINE> psi = np.arctan(numer/denom) <NEW_LINE> if psi < -np.pi/2.: <NEW_LINE> <INDENT> psi = psi + np.pi <NEW_LINE> <DEDENT> if psi > np.pi/2.: <NEW_LINE> <INDENT> psi = psi - np.pi <NEW_LINE> <DEDENT> pa.append(np.rad2deg(psi)) <NEW_LINE> <DEDENT> return pa | Function to determine polarization swing.
Args:
-----
alpha : inclination angle (degrees)
beta : impact parameter (degrees)
prof : one dimentional profile
Return:
-------
pa : polarization position angle (degrees). | 625941c0711fe17d825422ce |
def execute_go(direction): <NEW_LINE> <INDENT> global current_room <NEW_LINE> if current_room["exits"][direction]: <NEW_LINE> <INDENT> current_room = rooms[current_room["exits"][direction]] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("You cannot go there.") | This function, given the direction (e.g. "south") updates the current room
to reflect the movement of the player if the direction is a valid exit
(and prints the name of the room into which the player is
moving). Otherwise, it prints "You cannot go there." | 625941c03d592f4c4ed1cfd2 |
def swapPairs(self, head): <NEW_LINE> <INDENT> if head == None or head.next == None: <NEW_LINE> <INDENT> return head <NEW_LINE> <DEDENT> t = head.next <NEW_LINE> head.next = t.next <NEW_LINE> t.next = head <NEW_LINE> r = t.next.next <NEW_LINE> p = t.next <NEW_LINE> while r != None and r.next != None: <NEW_LINE> <INDENT> tmp = r.next <NEW_LINE> p.next = tmp <NEW_LINE> r.next = tmp.next <NEW_LINE> tmp.next = r <NEW_LINE> p = r <NEW_LINE> r = r.next <NEW_LINE> <DEDENT> return t | :type head: ListNode
:rtype: ListNode | 625941c08e7ae83300e4af2a |
def get_owner_value(self, owner=None): <NEW_LINE> <INDENT> answer = DEFAULT_VALUE <NEW_LINE> if is_safe_value(owner): <NEW_LINE> <INDENT> answer = safe_string(owner) <NEW_LINE> <DEDENT> return self.get_unoverflowable_paragraph(answer) | Returns a flowable paragraph to add to the pdf given the misp_event owner
:param owner: MISP_EVENT owner to be formatted
:return: a Paragraph to add in the pdf, regarding the values of "owner" | 625941c0ad47b63b2c509ede |
def format_address(self, value): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ret = int(value) <NEW_LINE> <DEDENT> except gdb.error: <NEW_LINE> <INDENT> ret = int(str(value).split(' ')[0], 16) <NEW_LINE> <DEDENT> return ret | Helper for printing gdb.Value on both python 2 and 3
| 625941c0c432627299f04ba3 |
def __init__(self, kernel=None): <NEW_LINE> <INDENT> if kernel is not None: <NEW_LINE> <INDENT> if not isinstance(kernel, Kernel): <NEW_LINE> <INDENT> raise ValueError("kernel must be None or a mlpy.Kernel object") <NEW_LINE> <DEDENT> <DEDENT> self._coeff = None <NEW_LINE> self._evals = None <NEW_LINE> self._K = None <NEW_LINE> self._kernel = kernel <NEW_LINE> self._x = None | Initialization.
:Parameters:
kernel : None or mlpy.Kernel object.
if kernel is None, K and Kt in .learn()
and in .transform() methods must be precomputed kernel
matricies, else K and Kt must be training (resp.
test) data in input space. | 625941c0442bda511e8be37a |
def set_servo_position( self, identifier: int, position: ServoPosition, ) -> None: <NEW_LINE> <INDENT> if position is None: <NEW_LINE> <INDENT> level = 0 <NEW_LINE> <DEDENT> elif -1 <= position <= 1: <NEW_LINE> <INDENT> status_unit = (position + 1) / 2 <NEW_LINE> level = 150 + int((550 - 150) * status_unit) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Position of servo should be between 1 and -1.") <NEW_LINE> <DEDENT> self._command('S', str(identifier), str(level)) <NEW_LINE> self._servo_states[identifier] = position | Set the position of a servo.
:param identifier: Port of servo to set position.
:param position: Position to set the servo to.
:raises ValueError: Position was not valid. | 625941c0d268445f265b4dcd |
def process_injection(self, parameters, data=None, content_type=None, headers=None): <NEW_LINE> <INDENT> if self._context.use_ssl(): <NEW_LINE> <INDENT> request = httplib.HTTPSConnection(self._server) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> request = httplib.HTTPConnection(self._server) <NEW_LINE> <DEDENT> request.putrequest(self._method, self.build_uri(parameters)) <NEW_LINE> if data is not None: <NEW_LINE> <INDENT> request.putheader('Content-Length', str(len(data))) <NEW_LINE> <DEDENT> if content_type is not None: <NEW_LINE> <INDENT> request.putheader('Content-Type', content_type) <NEW_LINE> <DEDENT> _headers = {} <NEW_LINE> if self._context.has_headers(): <NEW_LINE> <INDENT> for header, value in self._context.get_headers().items: <NEW_LINE> <INDENT> _headers[header] = value <NEW_LINE> <DEDENT> <DEDENT> if headers: <NEW_LINE> <INDENT> for header, value in headers: <NEW_LINE> <INDENT> _headers[header,] = value <NEW_LINE> <DEDENT> <DEDENT> for header, value in _headers: <NEW_LINE> <INDENT> request.putheader(header, value) <NEW_LINE> <DEDENT> if self._context.get_cookie() is not None: <NEW_LINE> <INDENT> request.putheader('Cookie', self._context.get_cookie()) <NEW_LINE> <DEDENT> request.endheaders() <NEW_LINE> if data is not None: <NEW_LINE> <INDENT> request.send(data) <NEW_LINE> <DEDENT> response = request.getresponse() <NEW_LINE> return self.process_response(HttpResponse(response)) | Do the HTTP request, and execute trigger.execute
In this default impl., parameters is not used. Override this
to change behavior. | 625941c0dd821e528d63b109 |
def t200222_x19(): <NEW_LINE> <INDENT> call = t200222_x17(z1=1102, mode7=0, mode8=0, mode9=0, mode10=0) <NEW_LINE> if call.Get() == 1: <NEW_LINE> <INDENT> Quit() <NEW_LINE> <DEDENT> elif call.Done(): <NEW_LINE> <INDENT> return 0 | State 0,2 | 625941c0e5267d203edcdbfe |
def has_windows(self) -> bool: <NEW_LINE> <INDENT> if self.wins.log_win: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False | Check if conversation has already created its windows | 625941c066673b3332b91fef |
def getAction(self, gameState): <NEW_LINE> <INDENT> self.game_agents = gameState.getNumAgents() <NEW_LINE> path = self.max_value(gameState, 0, 0) <NEW_LINE> return path[1] | Returns the expectimax action using self.depth and self.evaluationFunction
All ghosts should be modeled as choosing uniformly at random from their
legal moves. | 625941c0711fe17d825422cf |
def fit(self,X): <NEW_LINE> <INDENT> X = self._check_X_train(X) <NEW_LINE> score_old = [np.NINF] <NEW_LINE> alpha_, c_ , d_ = 0,0,0 <NEW_LINE> for j in range(self.n_init): <NEW_LINE> <INDENT> if self.verbose: <NEW_LINE> <INDENT> print("New Initialisation, restart number {0} \n".format(j)) <NEW_LINE> <DEDENT> alphaK, c, d, score = self._fit(X) <NEW_LINE> if score[-1] > score_old[-1]: <NEW_LINE> <INDENT> alpha_ , c_ , d_ = alphaK, c, d <NEW_LINE> score_old = score <NEW_LINE> <DEDENT> <DEDENT> self.alpha_ = alpha_ <NEW_LINE> self.means_ = c_ / (c_ + d_) <NEW_LINE> self.c_, self.d_ = c_,d_ <NEW_LINE> self.weights_ = alpha_ / np.sum(alpha_) <NEW_LINE> self.scores_ = score_old <NEW_LINE> return self | Fits Variational Bayesian Bernoulli Mixture Model
Parameters
----------
X: array-like or sparse csr_matrix of size [n_samples, n_features]
Data Matrix
Returns
-------
self: object
self
Practical Advice
----------------
Significant speedup can be achieved by using sparse matrices
(see scipy.sparse.csr_matrix) | 625941c0be8e80087fb20ba5 |
def __init__(self, p, prec, print_mode, names): <NEW_LINE> <INDENT> pAdicRingBaseGeneric.__init__(self, p, prec, print_mode, names, pAdicCappedRelativeElement) | Initialization.
INPUT:
- ``p`` -- prime
- ``prec`` -- precision cap
- ``print_mode`` -- dictionary with print options.
- ``names`` -- how to print the prime.
EXAMPLES::
sage: R = ZpCR(next_prime(10^60)) #indirect doctest
sage: type(R)
<class 'sage.rings.padics.padic_base_leaves.pAdicRingCappedRelative_with_category'>
TESTS::
sage: R = ZpCR(2)
sage: TestSuite(R).run()
sage: TestSuite(R).run(elements = [R.random_element() for i in range(2^10)], max_runs = 2^12, skip='_test_metric_function') # long time
sage: R._test_metric_function(elements = [R.random_element() for i in range(2^3)]) # long time
sage: R = ZpCR(3, 1)
sage: TestSuite(R).run(elements = [R.random_element() for i in range(3^3)])
sage: R = ZpCR(3, 2)
sage: TestSuite(R).run(elements = [R.random_element() for i in range(3^6)], skip='_test_metric_function') # long time
sage: R._test_metric_function(elements = [R.random_element() for i in range(2^3)]) # long time
sage: R = ZpCR(next_prime(10^60))
sage: TestSuite(R).run(elements = [R.random_element() for i in range(2^3)], max_runs = 2^5, skip='_test_log') # long time
sage: R._test_log(max_runs=2, elements=[R.random_element() for i in range(4)]) # long time | 625941c0aad79263cf39099c |
def add_table(self, table): <NEW_LINE> <INDENT> Driver.add_table(self, table) <NEW_LINE> name = table.name <NEW_LINE> self.collections[name] = self.datas[name] <NEW_LINE> self.inc_collections[name] = self.increments[name] <NEW_LINE> self.line_ids[name] = {} | Add the new table. | 625941c0009cb60464c63312 |
def ModifySpaceProperty(self, request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> params = request._serialize() <NEW_LINE> body = self.call("ModifySpaceProperty", params) <NEW_LINE> response = json.loads(body) <NEW_LINE> if "Error" not in response["Response"]: <NEW_LINE> <INDENT> model = models.ModifySpacePropertyResponse() <NEW_LINE> model._deserialize(response["Response"]) <NEW_LINE> return model <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> code = response["Response"]["Error"]["Code"] <NEW_LINE> message = response["Response"]["Error"]["Message"] <NEW_LINE> reqid = response["Response"]["RequestId"] <NEW_LINE> raise TencentCloudSDKException(code, message, reqid) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> if isinstance(e, TencentCloudSDKException): <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TencentCloudSDKException(e.message, e.message) | 更新位置空间产品属性
:param request: Request instance for ModifySpaceProperty.
:type request: :class:`tencentcloud.iotexplorer.v20190423.models.ModifySpacePropertyRequest`
:rtype: :class:`tencentcloud.iotexplorer.v20190423.models.ModifySpacePropertyResponse` | 625941c03346ee7daa2b2cc9 |
def test_horovod_process_set_included_op(self): <NEW_LINE> <INDENT> hvd.init() <NEW_LINE> if hvd.size() == 1: <NEW_LINE> <INDENT> self.skipTest("Only one worker available") <NEW_LINE> <DEDENT> single_set = hvd.add_process_set([0]) <NEW_LINE> included = self.evaluate(hvd.process_set_included_op(process_set_id=single_set.process_set_id)) <NEW_LINE> if hvd.rank() == 0: <NEW_LINE> <INDENT> self.assertEqual(included, 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.assertEqual(included, 0) <NEW_LINE> <DEDENT> hvd.remove_process_set(single_set) | Test that the result of hvd.process_set_included_op(process_set_id) is correct. | 625941c001c39578d7e74d9a |
def entry(arguments): <NEW_LINE> <INDENT> logger.setLevel(arguments.verbosity.upper()) <NEW_LINE> if not arguments.alphabay_username: <NEW_LINE> <INDENT> logger.error("This sink requires a username to be specified through CLI or enviornment variable.") <NEW_LINE> raise SystemExit() <NEW_LINE> <DEDENT> if not arguments.alphabay_password: <NEW_LINE> <INDENT> logger.error("This sink requires a password to be specified through CLI or environment variable.") <NEW_LINE> raise SystemExit() <NEW_LINE> <DEDENT> if not arguments.dbc_access_key: <NEW_LINE> <INDENT> logger.error("This sink requires a deathbycaptcha access key to be specified through CLI or environment variable.") <NEW_LINE> raise SystemExit() <NEW_LINE> <DEDENT> if not arguments.dbc_secret_key: <NEW_LINE> <INDENT> logger.error("This sink requires a deathbycaptcha secret key to be specified through CLI or environment variable.") <NEW_LINE> raise SystemExit() <NEW_LINE> <DEDENT> sink = AlphabaySink( arguments.alphabay_username, arguments.alphabay_password, arguments.dbc_access_key, arguments.dbc_secret_key, url_file=arguments.url_file, save_to_directory=arguments.save_to_directory, onion_url=arguments.onion_url, request_interval=arguments.request_interval, request_retries=arguments.request_retries, request_timeout=arguments.request_timeout, category=arguments.category ) <NEW_LINE> sink.logger = logger <NEW_LINE> if arguments.ingest: <NEW_LINE> <INDENT> if arguments.datastore == "stdout": <NEW_LINE> <INDENT> store = STDOutInterface() <NEW_LINE> parser = AlphabayParser(datastore=store) <NEW_LINE> parser.parse(scrape_results=sink.scrape()) <NEW_LINE> <DEDENT> elif arguments.datastore == "elasticsearch": <NEW_LINE> <INDENT> store = ElasticsearchInterface( host=arguments.datastore_host, port=arguments.datastore_port ) <NEW_LINE> parser = AlphabayParser(datastore=store) <NEW_LINE> parser.parse( scrape_results=sink.scrape( daemon=arguments.daemonize ) ) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> list(sink.scrape()) | The entry point for the alphabay sink CLI interface. This defines the logic
around the usage of command line arguments and the alphabay sink in order
to perform scraping, ingestion, and storage related functions. | 625941c05f7d997b871749f4 |
def _is_random_variable(x: T) -> bool: <NEW_LINE> <INDENT> return len(x.shape) == 1 or x.shape[1] == 1 | Check if the matrix x correspond to a random variable.
The matrix is considered a random variable if it is a vector
or a matrix corresponding to a column vector. Otherwise,
the matrix correspond to a random vector. | 625941c066673b3332b91ff0 |
def ngrams_pad(self, string): <NEW_LINE> <INDENT> warnings.warn('Method ngrams_pad deprecated, use method split instead.', DeprecationWarning) <NEW_LINE> return self.split(string) | Alias for 3.1 compatibility, please use split instead. | 625941c024f1403a92600ac7 |
def test_for_nonexisting_offer(self): <NEW_LINE> <INDENT> response = self.client.get('/o/offers/some-slug/42/join') <NEW_LINE> self.assertEqual(response.status_code, 404) | Test if error 404 will be raised when offer dosn't exist. | 625941c03539df3088e2e2aa |
def update(self, in_vals, y): <NEW_LINE> <INDENT> for k in range(len(in_vals)): <NEW_LINE> <INDENT> self.w[k] += (y * in_vals[k]) | Updates weight vector w according to
w_k <- w_k + (y * x_k)
in_vals: list/array of input values
y: output label from training example | 625941c06e29344779a62573 |
def getManifestGraph(self): <NEW_LINE> <INDENT> return self._loadManifest() | Returns the manifest graph | 625941c0656771135c3eb7cb |
def station_stats(df): <NEW_LINE> <INDENT> print('\nCalculating The Most Popular Stations and Trip...\n') <NEW_LINE> start_time = time.time() <NEW_LINE> popular_start_station = df['Start Station'].mode()[0] <NEW_LINE> print("The most popular start station is: "+ popular_start_station) <NEW_LINE> popular_end_station = df['End Station'].mode()[0] <NEW_LINE> print("The most popular end station is: "+ popular_end_station) <NEW_LINE> trips = df.groupby(['Start Station','End Station']).count().sort_values('Unnamed: 0',ascending=False).index[0] <NEW_LINE> number_of_trips = df.groupby(['Start Station','End Station']).count().sort_values('Unnamed: 0',ascending=False).iloc[0,2] <NEW_LINE> print("The most frequent trip is: "+str(trips)+" with "+str(number_of_trips)+" trips.") <NEW_LINE> print("\nThis took %s seconds." % (time.time() - start_time)) <NEW_LINE> print('-'*40) | Displays statistics on the most popular stations and trip. | 625941c0d8ef3951e324349c |
def find_winner(self): <NEW_LINE> <INDENT> if self.winning_cond == ">": <NEW_LINE> <INDENT> if self.get_num_b_disk() > self.get_num_w_disk(): <NEW_LINE> <INDENT> return "BLACK" <NEW_LINE> <DEDENT> if self.get_num_b_disk() == self.get_num_w_disk(): <NEW_LINE> <INDENT> return "NONE" <NEW_LINE> <DEDENT> return "WHITE" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.get_num_b_disk() < self.get_num_w_disk(): <NEW_LINE> <INDENT> return "BLACK" <NEW_LINE> <DEDENT> elif self.get_num_b_disk() == self.get_num_w_disk(): <NEW_LINE> <INDENT> return "NONE" <NEW_LINE> <DEDENT> return "WHITE" | This function tries to find which player is the winner of this game
according to the winning condition | 625941c0046cf37aa974cca9 |
def equalizeHist(self): <NEW_LINE> <INDENT> if self._isColor(): <NEW_LINE> <INDENT> self.image = cv2.cvtColor(self.image, cv2.COLOR_BGR2GRAY) <NEW_LINE> <DEDENT> self.image = cv2.equalizeHist(self.image) <NEW_LINE> return | This function is a wrapper to the OpenCV function
`equalizeHist <http://docs.opencv.org/2.4/modules/imgproc/doc/histograms.html#equalizehist>`_.
This function equalizes the histogram of a grayscale image by stretching
the minimum and maximum values to 0 and 255 respectively.
If this is run on a color image it will be converted to gray scale first. | 625941c09b70327d1c4e0d33 |
def get_binary_source_url(self, module_name, revision): <NEW_LINE> <INDENT> os = self.ini_mgr.get_operating_system() <NEW_LINE> arch = self.ini_mgr.get_architecture() <NEW_LINE> binaries = self.m.list_revision_binaries(module_name, DEP, revision) <NEW_LINE> display_name = self.m.get_display_name(module_name, DEP) <NEW_LINE> os_display_name = self.m.get_operating_system_display_name(os) <NEW_LINE> arch_display_name = self.m.get_architecture_display_name(arch) <NEW_LINE> source_url = None <NEW_LINE> for binary in binaries: <NEW_LINE> <INDENT> if (os == binary[1] and arch == binary[2] or binary[1] == "pi" and binary[2] == "pi"): <NEW_LINE> <INDENT> source_url = binary[0] <NEW_LINE> return source_url <NEW_LINE> <DEDENT> <DEDENT> raise BinaryNotFound({"display_name": display_name, "name": module_name, "revision": revision, "os": os_display_name, "arch": arch_display_name}) | Get the source url from a dependency revision | 625941c063d6d428bbe4444e |
def io_iterator_(frame, fnc, output_strategy='absolute', mix_names=False): <NEW_LINE> <INDENT> args, _, _, values = inspect.getargvalues(frame) <NEW_LINE> args.remove('self') <NEW_LINE> del values['self'] <NEW_LINE> spargs, defaults = get_args_default(fnc) <NEW_LINE> len_args = len(spargs) <NEW_LINE> len_defaults = len(defaults) <NEW_LINE> split_at = len_args - len_defaults <NEW_LINE> inputs = [] <NEW_LINE> outputs = [] <NEW_LINE> out_dir = '' <NEW_LINE> for arv in args[:split_at]: <NEW_LINE> <INDENT> inputs.append(values[arv]) <NEW_LINE> <DEDENT> out_keys = [] <NEW_LINE> for arv in args[split_at:]: <NEW_LINE> <INDENT> if arv == 'out_dir': <NEW_LINE> <INDENT> out_dir = values[arv] <NEW_LINE> <DEDENT> elif 'out_' in arv: <NEW_LINE> <INDENT> out_keys.append(arv) <NEW_LINE> outputs.append(values[arv]) <NEW_LINE> <DEDENT> <DEDENT> return io_iterator(inputs, out_dir, outputs, output_strategy, mix_names, out_keys=out_keys) | Create an IOIterator using introspection.
Parameters
----------
frame : frameobject
Contains the info about the current local variables values.
fnc : function
The function to inspect
output_strategy : string
Controls the behavior of the IOIterator for output paths.
mix_names : bool
Whether or not to append a mix of input names at the beginning.
Returns
-------
Properly instantiated IOIterator object. | 625941c0eab8aa0e5d26dab6 |
def _get_gradients_adagrad(self, J): <NEW_LINE> <INDENT> grads = T.grad(J, [self.__dict__[self.updatable_parameters[i]] for i in xrange(len(self.updatable_parameters))]) <NEW_LINE> for i, _ in enumerate(grads): <NEW_LINE> <INDENT> grads[i] = debug_print(grads[i], 'grads_' + self.updatable_parameters[i]) <NEW_LINE> <DEDENT> updated_squares = dict() <NEW_LINE> for i, p in enumerate(self.updatable_parameters): <NEW_LINE> <INDENT> if isinstance(grads[i], sparse.SparseVariable): <NEW_LINE> <INDENT> power = debug_print(sparse.structured_pow(grads[i], 2.), 'pow_' + p) <NEW_LINE> power = sparse.remove0(power) <NEW_LINE> updated_squares[p] = self.__dict__['adagrad_matrix_' + p] + power <NEW_LINE> sqrt_matrix = sparse.sp_ones_like(power) <NEW_LINE> sqrt_matrix = debug_print(updated_squares[p] * sqrt_matrix, 'adagrad_squares_subset_' + p) <NEW_LINE> sqrt_matrix = debug_print(sparse.sqrt(sqrt_matrix), 'adagrad_sqrt_' + p) <NEW_LINE> sqrt_matrix = debug_print(sparse.structured_pow(sqrt_matrix, -1.), 'adagrad_pow-1_' + p) <NEW_LINE> grads[i] = sparse.mul(grads[i], sqrt_matrix) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> power = debug_print(T.pow(grads[i], 2.), 'pow_' + p) <NEW_LINE> updated_squares[p] = self.__dict__['adagrad_matrix_' + p] + power <NEW_LINE> denominator = T.switch(T.neq(updated_squares[p], 0.0), T.sqrt(updated_squares[p]), T.ones_like(updated_squares[p], dtype=floatX)) <NEW_LINE> grads[i] = T.mul(grads[i], 1. / denominator) <NEW_LINE> <DEDENT> updated_squares[p] = debug_print(updated_squares[p], 'upd_squares_' + p) <NEW_LINE> <DEDENT> for i, _ in enumerate(grads): <NEW_LINE> <INDENT> grads[i] = debug_print(grads[i], 'grads_updated_' + self.updatable_parameters[i]) <NEW_LINE> <DEDENT> return grads, updated_squares | Get the AdaGrad gradients and squared gradients updates.
The returned gradients still need to be multiplied with the general
learning rate.
Parameters
----------
J : theano variable
cost
Returns
-------
theano variable
gradients that are adapted by the AdaGrad algorithm
theano variable
updated sum of squares for all previous steps | 625941c0b5575c28eb68df5e |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.