text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def geo_max_distance(left, right): """Returns the 2-dimensional maximum distance between two geometries in projected units. If g1 and g2 is the same geometry the function will return the distance between the two vertices most far from each other in that geometry Parameters ---------- left : geometry right : geometry Returns ------- MaxDistance : double scalar """ op = ops.GeoMaxDistance(left, right) return op.to_expr()
[ "def", "geo_max_distance", "(", "left", ",", "right", ")", ":", "op", "=", "ops", ".", "GeoMaxDistance", "(", "left", ",", "right", ")", "return", "op", ".", "to_expr", "(", ")" ]
27.294118
20.470588
def watch(key, recurse=False, profile=None, timeout=0, index=None, **kwargs): ''' .. versionadded:: 2016.3.0 Makes a best effort to watch for a key or tree change in etcd. Returns a dict containing the new key value ( or None if the key was deleted ), the modifiedIndex of the key, whether the key changed or not, the path to the key that changed and whether it is a directory or not. If something catastrophic happens, returns {} CLI Example: .. code-block:: bash salt myminion etcd.watch /path/to/key salt myminion etcd.watch /path/to/key timeout=10 salt myminion etcd.watch /patch/to/key profile=my_etcd_config index=10 salt myminion etcd.watch /patch/to/key host=127.0.0.1 port=2379 ''' client = __utils__['etcd_util.get_conn'](__opts__, profile, **kwargs) return client.watch(key, recurse=recurse, timeout=timeout, index=index)
[ "def", "watch", "(", "key", ",", "recurse", "=", "False", ",", "profile", "=", "None", ",", "timeout", "=", "0", ",", "index", "=", "None", ",", "*", "*", "kwargs", ")", ":", "client", "=", "__utils__", "[", "'etcd_util.get_conn'", "]", "(", "__opts_...
38.956522
29.913043
def to_array(self): """ Serializes this KeyboardButton to a dictionary. :return: dictionary representation of this object. :rtype: dict """ array = super(KeyboardButton, self).to_array() array['text'] = u(self.text) # py2: type unicode, py3: type str if self.request_contact is not None: array['request_contact'] = bool(self.request_contact) # type bool if self.request_location is not None: array['request_location'] = bool(self.request_location) # type bool return array
[ "def", "to_array", "(", "self", ")", ":", "array", "=", "super", "(", "KeyboardButton", ",", "self", ")", ".", "to_array", "(", ")", "array", "[", "'text'", "]", "=", "u", "(", "self", ".", "text", ")", "# py2: type unicode, py3: type str", "if", "self",...
37.8
20.466667
def __get_button_events(self, state, timeval=None): """Get the button events from xinput.""" changed_buttons = self.__detect_button_events(state) events = self.__emulate_buttons(changed_buttons, timeval) return events
[ "def", "__get_button_events", "(", "self", ",", "state", ",", "timeval", "=", "None", ")", ":", "changed_buttons", "=", "self", ".", "__detect_button_events", "(", "state", ")", "events", "=", "self", ".", "__emulate_buttons", "(", "changed_buttons", ",", "tim...
49
15
def merge_lvm_data(primary, secondary, name_key): """ Returns a dictionary containing the set of data from primary and secondary where values in primary will always be returned if present, and values in secondary will only be returned if not present in primary, or if the value in primary is `None`. Sample input Data:: primary = [ {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'name_key': 'xyz'}, {'a': None, 'b': 12, 'c': 13, 'd': 14, 'name_key': 'qrs'}, {'a': None, 'b': 12, 'c': 13, 'd': 14, 'name_key': 'def'}, ] secondary = [ {'a': 31, 'e': 33, 'name_key': 'xyz'}, {'a': 11, 'e': 23, 'name_key': 'qrs'}, {'a': 1, 'e': 3, 'name_key': 'ghi'}, ] Returns: dict: Dictionary of key value pairs from obj1 and obj2:: { 'xyz': {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 33, 'name_key': 'xyz'}, 'qrs': {'a': 11, 'b': 12, 'c': 13, d: 14, e: 23, 'name_key': 'qrs'}, 'def': {'a': None, 'b': 12, 'c': 13, 'd': 14, 'name_key': 'def'}, 'ghi': {'a': 1, 'e': 3, 'name_key': 'ghi'} } """ pri_data = to_name_key_dict(primary, name_key) # Prime results with secondary data, to be updated with primary data combined_data = to_name_key_dict(secondary, name_key) for name in pri_data: if name not in combined_data: # Data only in primary combined_data[name] = pri_data[name] else: # Data in both primary and secondary, pick primary if better or no secondary combined_data[name].update(dict( (k, v) for k, v in pri_data[name].items() if v is not None or k not in combined_data[name] )) return set_defaults(combined_data)
[ "def", "merge_lvm_data", "(", "primary", ",", "secondary", ",", "name_key", ")", ":", "pri_data", "=", "to_name_key_dict", "(", "primary", ",", "name_key", ")", "# Prime results with secondary data, to be updated with primary data", "combined_data", "=", "to_name_key_dict",...
39.217391
23.913043
def _GetRealImagArray(Array): """ Returns the real and imaginary components of each element in an array and returns them in 2 resulting arrays. Parameters ---------- Array : ndarray Input array Returns ------- RealArray : ndarray The real components of the input array ImagArray : ndarray The imaginary components of the input array """ ImagArray = _np.array([num.imag for num in Array]) RealArray = _np.array([num.real for num in Array]) return RealArray, ImagArray
[ "def", "_GetRealImagArray", "(", "Array", ")", ":", "ImagArray", "=", "_np", ".", "array", "(", "[", "num", ".", "imag", "for", "num", "in", "Array", "]", ")", "RealArray", "=", "_np", ".", "array", "(", "[", "num", ".", "real", "for", "num", "in",...
27.684211
21.263158
def convert_string_to_type(string_value): """Converts a string into a type or class :param string_value: the string to be converted, e.g. "int" :return: The type derived from string_value, e.g. int """ # If the parameter is already a type, return it if string_value in ['None', type(None).__name__]: return type(None) if isinstance(string_value, type) or isclass(string_value): return string_value # Get object associated with string # First check whether we are having a built in type (int, str, etc) if sys.version_info >= (3,): import builtins as builtins23 else: import __builtin__ as builtins23 if hasattr(builtins23, string_value): obj = getattr(builtins23, string_value) if type(obj) is type: return obj # If not, try to locate the module try: obj = locate(string_value) except ErrorDuringImport as e: raise ValueError("Unknown type '{0}'".format(e)) # Check whether object is a type if type(obj) is type: return locate(string_value) # Check whether object is a class if isclass(obj): return obj # Raise error if none is the case raise ValueError("Unknown type '{0}'".format(string_value))
[ "def", "convert_string_to_type", "(", "string_value", ")", ":", "# If the parameter is already a type, return it", "if", "string_value", "in", "[", "'None'", ",", "type", "(", "None", ")", ".", "__name__", "]", ":", "return", "type", "(", "None", ")", "if", "isi...
32.631579
15.368421
def _create_pileup(bam_file, data, out_base, background): """Create pileup calls in the regions of interest for hg19 -> GRCh37 chromosome mapping. """ out_file = "%s-mpileup.txt" % out_base if not utils.file_exists(out_file): with file_transaction(data, out_file) as tx_out_file: background_bed = os.path.normpath(os.path.join( os.path.dirname(os.path.realpath(utils.which("verifybamid2"))), "resource", "%s.%s.%s.vcf.gz.dat.bed" % (background["dataset"], background["nvars"], background["build"]))) local_bed = os.path.join(os.path.dirname(out_base), "%s.%s-hg19.bed" % (background["dataset"], background["nvars"])) if not utils.file_exists(local_bed): with file_transaction(data, local_bed) as tx_local_bed: with open(background_bed) as in_handle: with open(tx_local_bed, "w") as out_handle: for line in in_handle: out_handle.write("chr%s" % line) mpileup_cl = samtools.prep_mpileup([bam_file], dd.get_ref_file(data), data["config"], want_bcf=False, target_regions=local_bed) cl = ("{mpileup_cl} | sed 's/^chr//' > {tx_out_file}") do.run(cl.format(**locals()), "Create pileup from BAM input") return out_file
[ "def", "_create_pileup", "(", "bam_file", ",", "data", ",", "out_base", ",", "background", ")", ":", "out_file", "=", "\"%s-mpileup.txt\"", "%", "out_base", "if", "not", "utils", ".", "file_exists", "(", "out_file", ")", ":", "with", "file_transaction", "(", ...
64.434783
25.521739
def gauss_jordan(A, x, b): """Linear equation system Ax=b by Gauss-Jordan :param A: n by m matrix :param x: table of size n :param b: table of size m :modifies: x will contain solution if any :returns int: 0 if no solution, 1 if solution unique, 2 otherwise :complexity: :math:`O(n^2m)` """ n = len(x) m = len(b) assert len(A) == m and len(A[0]) == n S = [] # put linear system in a single matrix S for i in range(m): S.append(A[i][:] + [b[i]]) S.append(list(range(n))) # indices in x k = diagonalize(S, n, m) if k < m: for i in range(k, m): if not is_zero(S[i][n]): return GJ_ZERO_SOLUTIONS for j in range(k): x[S[m][j]] = S[j][n] if k < n: for j in range(k, n): x[S[m][j]] = 0 return GJ_SEVERAL_SOLUTIONS return GJ_SINGLE_SOLUTION
[ "def", "gauss_jordan", "(", "A", ",", "x", ",", "b", ")", ":", "n", "=", "len", "(", "x", ")", "m", "=", "len", "(", "b", ")", "assert", "len", "(", "A", ")", "==", "m", "and", "len", "(", "A", "[", "0", "]", ")", "==", "n", "S", "=", ...
28.625
13.65625
def dependencies_order_of_build(target_contract, dependencies_map): """ Return an ordered list of contracts that is sufficient to successfully deploy the target contract. Note: This function assumes that the `dependencies_map` is an acyclic graph. """ if not dependencies_map: return [target_contract] if target_contract not in dependencies_map: raise ValueError('no dependencies defined for {}'.format(target_contract)) order = [target_contract] todo = list(dependencies_map[target_contract]) while todo: target_contract = todo.pop(0) target_pos = len(order) for dependency in dependencies_map[target_contract]: # we need to add the current contract before all its depedencies if dependency in order: target_pos = order.index(dependency) else: todo.append(dependency) order.insert(target_pos, target_contract) order.reverse() return order
[ "def", "dependencies_order_of_build", "(", "target_contract", ",", "dependencies_map", ")", ":", "if", "not", "dependencies_map", ":", "return", "[", "target_contract", "]", "if", "target_contract", "not", "in", "dependencies_map", ":", "raise", "ValueError", "(", "...
31.741935
21.387097
def __remove_obsolete_metadata(self): """ Removes obsolete entries from the metadata of all stored routines. """ clean = {} for key, _ in self._source_file_names.items(): if key in self._pystratum_metadata: clean[key] = self._pystratum_metadata[key] self._pystratum_metadata = clean
[ "def", "__remove_obsolete_metadata", "(", "self", ")", ":", "clean", "=", "{", "}", "for", "key", ",", "_", "in", "self", ".", "_source_file_names", ".", "items", "(", ")", ":", "if", "key", "in", "self", ".", "_pystratum_metadata", ":", "clean", "[", ...
35
13.8
def config_present(name): ''' Ensure a specific configuration line exists in the running config name config line to set Examples: .. code-block:: yaml add snmp group: onyx.config_present: - names: - snmp-server community randoSNMPstringHERE group network-operator - snmp-server community AnotherRandomSNMPSTring group network-admin add snmp acl: onyx.config_present: - names: - snmp-server community randoSNMPstringHERE use-acl snmp-acl-ro - snmp-server community AnotherRandomSNMPSTring use-acl snmp-acl-rw ''' ret = {'name': name, 'result': False, 'changes': {}, 'comment': ''} matches = __salt__['onyx.cmd']('find', name) if matches: ret['result'] = True ret['comment'] = 'Config is already set' elif __opts__['test'] is True: ret['result'] = None ret['comment'] = 'Config will be added' ret['changes']['new'] = name else: __salt__['onyx.cmd']('add_config', name) matches = __salt__['onyx.cmd']('find', name) if matches: ret['result'] = True ret['comment'] = 'Successfully added config' ret['changes']['new'] = name else: ret['result'] = False ret['comment'] = 'Failed to add config' return ret
[ "def", "config_present", "(", "name", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'result'", ":", "False", ",", "'changes'", ":", "{", "}", ",", "'comment'", ":", "''", "}", "matches", "=", "__salt__", "[", "'onyx.cmd'", "]", "(", "'find'...
27.333333
22.784314
def update_todo_menu(self): """Update todo list menu""" editorstack = self.get_current_editorstack() results = editorstack.get_todo_results() self.todo_menu.clear() filename = self.get_current_filename() for text, line0 in results: icon = ima.icon('todo') slot = lambda _checked, _l=line0: self.load(filename, goto=_l) action = create_action(self, text=text, icon=icon, triggered=slot) self.todo_menu.addAction(action) self.update_todo_actions()
[ "def", "update_todo_menu", "(", "self", ")", ":", "editorstack", "=", "self", ".", "get_current_editorstack", "(", ")", "results", "=", "editorstack", ".", "get_todo_results", "(", ")", "self", ".", "todo_menu", ".", "clear", "(", ")", "filename", "=", "self...
45.75
11.916667
def __pop_params(self, tid): """ Forgets the arguments tuple for the last call to the hooked function from this thread. @type tid: int @param tid: Thread global ID. """ stack = self.__paramStack[tid] stack.pop() if not stack: del self.__paramStack[tid]
[ "def", "__pop_params", "(", "self", ",", "tid", ")", ":", "stack", "=", "self", ".", "__paramStack", "[", "tid", "]", "stack", ".", "pop", "(", ")", "if", "not", "stack", ":", "del", "self", ".", "__paramStack", "[", "tid", "]" ]
27.25
13.916667
def api_path_map(self): """Cached dict of api_path: func.""" if self._api_path_cache is None: self._api_path_cache = { api_path: func for api_path, func in api_endpoints(self) } return self._api_path_cache
[ "def", "api_path_map", "(", "self", ")", ":", "if", "self", ".", "_api_path_cache", "is", "None", ":", "self", ".", "_api_path_cache", "=", "{", "api_path", ":", "func", "for", "api_path", ",", "func", "in", "api_endpoints", "(", "self", ")", "}", "retur...
32.555556
7.888889
def get_apex(self, lat, height=None): """ Calculate apex height Parameters ----------- lat : (float) Latitude in degrees height : (float or NoneType) Height above the surface of the earth in km or NoneType to use reference height (default=None) Returns ---------- apex_height : (float) Height of the field line apex in km """ lat = helpers.checklat(lat, name='alat') if height is None: height = self.refh cos_lat_squared = np.cos(np.radians(lat))**2 apex_height = (self.RE + height) / cos_lat_squared - self.RE return apex_height
[ "def", "get_apex", "(", "self", ",", "lat", ",", "height", "=", "None", ")", ":", "lat", "=", "helpers", ".", "checklat", "(", "lat", ",", "name", "=", "'alat'", ")", "if", "height", "is", "None", ":", "height", "=", "self", ".", "refh", "cos_lat_s...
28.416667
17.75
def load_xml_db(self): """Load the Lutron database from the server.""" import urllib.request xmlfile = urllib.request.urlopen('http://' + self._host + '/DbXmlInfo.xml') xml_db = xmlfile.read() xmlfile.close() _LOGGER.info("Loaded xml db") parser = LutronXmlDbParser(lutron=self, xml_db_str=xml_db) assert(parser.parse()) # throw our own exception self._areas = parser.areas self._name = parser.project_name _LOGGER.info('Found Lutron project: %s, %d areas' % ( self._name, len(self.areas))) return True
[ "def", "load_xml_db", "(", "self", ")", ":", "import", "urllib", ".", "request", "xmlfile", "=", "urllib", ".", "request", ".", "urlopen", "(", "'http://'", "+", "self", ".", "_host", "+", "'/DbXmlInfo.xml'", ")", "xml_db", "=", "xmlfile", ".", "read", "...
30.5
20.555556
def start_roles(self, service_name, deployment_name, role_names): ''' Starts the specified virtual machines. service_name: The name of the service. deployment_name: The name of the deployment. role_names: The names of the roles, as an enumerable of strings. ''' _validate_not_none('service_name', service_name) _validate_not_none('deployment_name', deployment_name) _validate_not_none('role_names', role_names) return self._perform_post( self._get_roles_operations_path(service_name, deployment_name), _XmlSerializer.start_roles_operation_to_xml(role_names), as_async=True)
[ "def", "start_roles", "(", "self", ",", "service_name", ",", "deployment_name", ",", "role_names", ")", ":", "_validate_not_none", "(", "'service_name'", ",", "service_name", ")", "_validate_not_none", "(", "'deployment_name'", ",", "deployment_name", ")", "_validate_...
39.388889
19.277778
def check_node_position( cls, parent_id, position, on_same_branch, db_session=None, *args, **kwargs ): """ Checks if node position for given parent is valid, raises exception if this is not the case :param parent_id: :param position: :param on_same_branch: indicates that we are checking same branch :param db_session: :return: """ db_session = get_db_session(db_session) if not position or position < 1: raise ZigguratResourceOutOfBoundaryException( "Position is lower than {}", value=1 ) item_count = cls.count_children(parent_id, db_session=db_session) max_value = item_count if on_same_branch else item_count + 1 if position > max_value: raise ZigguratResourceOutOfBoundaryException( "Maximum resource ordering is {}", value=max_value )
[ "def", "check_node_position", "(", "cls", ",", "parent_id", ",", "position", ",", "on_same_branch", ",", "db_session", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "db_session", "=", "get_db_session", "(", "db_session", ")", "if", "not...
38.416667
20.25
def _proposal_params(self, state): """ Proposal parameters Calculate parameters needed for the proposal. Inputs : state : x : the present sample, the place to linearize around f : f(x), function value at x J : f'(x), the jacobian of the function evaluated at x Outputs : state : mu : the mean vector L : the lower triangular cholesky factor of P log_p : log(p(x)) log of the posterior density """ x = state['x'] f = state['f'] J = state['J'] JJ = np.dot(J.T,J) if self._prior: m = self._m H = self._H Hm = self._Hm # LL' = P = H+J'J L = la.cholesky(H+JJ) # mu = (P^-1)(Hm-J'f+J'Jx) mu = la.solve(L.T,la.solve(L,Hm-np.dot(J.T,f)+np.dot(JJ,x))) else: # P = J'J L = la.cholesky(JJ) # mu = x-(P^-1)J'f mu = x-la.solve(L.T,la.solve(L,np.dot(J.T,f))) state['L'] = L state['mu'] = mu state['log_p'] = self._log_post(x,f) return state
[ "def", "_proposal_params", "(", "self", ",", "state", ")", ":", "x", "=", "state", "[", "'x'", "]", "f", "=", "state", "[", "'f'", "]", "J", "=", "state", "[", "'J'", "]", "JJ", "=", "np", ".", "dot", "(", "J", ".", "T", ",", "J", ")", "if"...
28.659091
16.159091
def list_dir(self, context): """Return a listing of all of the functions in this context including builtins. Args: context (object): The context to print a directory for. Returns: str """ doc = inspect.getdoc(context) listing = "" listing += "\n" listing += annotate.context_name(context) + "\n" if doc is not None: doc = inspect.cleandoc(doc) listing += doc + "\n" listing += "\nDefined Functions:\n" is_dict = False if isinstance(context, dict): funs = context.keys() is_dict = True else: funs = utils.find_all(context) for fun in sorted(funs): override_name = None if is_dict: override_name = fun fun = self.find_function(context, fun) if isinstance(fun, dict): if is_dict: listing += " - " + override_name + '\n' else: listing += " - " + fun.metadata.name + '\n' else: listing += " - " + fun.metadata.signature(name=override_name) + '\n' if annotate.short_description(fun) != "": listing += " " + annotate.short_description(fun) + '\n' listing += "\nBuiltin Functions\n" for bif in sorted(self.builtins.keys()): listing += ' - ' + bif + '\n' listing += '\n' return listing
[ "def", "list_dir", "(", "self", ",", "context", ")", ":", "doc", "=", "inspect", ".", "getdoc", "(", "context", ")", "listing", "=", "\"\"", "listing", "+=", "\"\\n\"", "listing", "+=", "annotate", ".", "context_name", "(", "context", ")", "+", "\"\\n\""...
27.240741
20.537037
def encode(self, b64=False): """Encode the payload for transmission.""" encoded_payload = b'' for pkt in self.packets: encoded_packet = pkt.encode(b64=b64) packet_len = len(encoded_packet) if b64: encoded_payload += str(packet_len).encode('utf-8') + b':' + \ encoded_packet else: binary_len = b'' while packet_len != 0: binary_len = six.int2byte(packet_len % 10) + binary_len packet_len = int(packet_len / 10) if not pkt.binary: encoded_payload += b'\0' else: encoded_payload += b'\1' encoded_payload += binary_len + b'\xff' + encoded_packet return encoded_payload
[ "def", "encode", "(", "self", ",", "b64", "=", "False", ")", ":", "encoded_payload", "=", "b''", "for", "pkt", "in", "self", ".", "packets", ":", "encoded_packet", "=", "pkt", ".", "encode", "(", "b64", "=", "b64", ")", "packet_len", "=", "len", "(",...
41.05
13.15
def get_assessment_parts_by_ids(self, assessment_part_ids): """Gets an ``AssessmentPartList`` corresponding to the given ``IdList``. arg: assessment_part_ids (osid.id.IdList): the list of ``Ids`` to retrieve return: (osid.assessment.authoring.AssessmentPartList) - the returned ``AssessmentPart`` list raise: NotFound - an ``Id was`` not found raise: NullArgument - ``assessment_part_ids`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for # osid.resource.ResourceLookupSession.get_resources_by_ids # NOTE: This implementation currently ignores plenary view collection = JSONClientValidated('assessment_authoring', collection='AssessmentPart', runtime=self._runtime) object_id_list = [] for i in assessment_part_ids: object_id_list.append(ObjectId(self._get_id(i, 'assessment_authoring').get_identifier())) result = collection.find( dict({'_id': {'$in': object_id_list}}, **self._view_filter())) result = list(result) sorted_result = [] for object_id in object_id_list: for object_map in result: if object_map['_id'] == object_id: sorted_result.append(object_map) break return objects.AssessmentPartList(sorted_result, runtime=self._runtime, proxy=self._proxy)
[ "def", "get_assessment_parts_by_ids", "(", "self", ",", "assessment_part_ids", ")", ":", "# Implemented from template for", "# osid.resource.ResourceLookupSession.get_resources_by_ids", "# NOTE: This implementation currently ignores plenary view", "collection", "=", "JSONClientValidated", ...
49.323529
17.941176
def GetMACBRepresentation(self, event): """Retrieves the MACB representation. Args: event (EventObject): event. Returns: str: MACB representation. """ data_type = getattr(event, 'data_type', None) if not data_type: return '....' # The filestat parser is somewhat limited. if data_type == 'fs:stat': descriptions = event.timestamp_desc.split(';') return_characters = ['.', '.', '.', '.'] for description in descriptions: if description in ( 'mtime', definitions.TIME_DESCRIPTION_MODIFICATION): return_characters[0] = 'M' elif description in ( 'atime', definitions.TIME_DESCRIPTION_LAST_ACCESS): return_characters[1] = 'A' elif description in ( 'ctime', definitions.TIME_DESCRIPTION_CHANGE): return_characters[2] = 'C' elif description in ( 'crtime', definitions.TIME_DESCRIPTION_CREATION): return_characters[3] = 'B' return ''.join(return_characters) # Access time. if event.timestamp_desc in [ definitions.TIME_DESCRIPTION_LAST_ACCESS, definitions.TIME_DESCRIPTION_ACCOUNT_CREATED, definitions.TIME_DESCRIPTION_LAST_VISITED, definitions.TIME_DESCRIPTION_START, definitions.TIME_DESCRIPTION_LAST_SHUTDOWN, definitions.TIME_DESCRIPTION_LAST_LOGIN, definitions.TIME_DESCRIPTION_LAST_PASSWORD_RESET, definitions.TIME_DESCRIPTION_LAST_CONNECTED, definitions.TIME_DESCRIPTION_LAST_RUN, definitions.TIME_DESCRIPTION_LAST_PRINTED]: return '.A..' # Content modification. if event.timestamp_desc in [ definitions.TIME_DESCRIPTION_MODIFICATION, definitions.TIME_DESCRIPTION_WRITTEN, definitions.TIME_DESCRIPTION_DELETED]: return 'M...' # Content creation time. if event.timestamp_desc in [ definitions.TIME_DESCRIPTION_CREATION, definitions.TIME_DESCRIPTION_ADDED, definitions.TIME_DESCRIPTION_FILE_DOWNLOADED, definitions.TIME_DESCRIPTION_FIRST_CONNECTED]: return '...B' # Metadata modification. if event.timestamp_desc in [ definitions.TIME_DESCRIPTION_CHANGE, definitions.TIME_DESCRIPTION_ENTRY_MODIFICATION]: return '..C.' return '....'
[ "def", "GetMACBRepresentation", "(", "self", ",", "event", ")", ":", "data_type", "=", "getattr", "(", "event", ",", "'data_type'", ",", "None", ")", "if", "not", "data_type", ":", "return", "'....'", "# The filestat parser is somewhat limited.", "if", "data_type"...
32.514286
15.514286
def fire_event(self, event_name, service_name, default=None): """ Fire a data_ready, data_lost, start, or stop event on a given service. """ service = self.get_service(service_name) callbacks = service.get(event_name, default) if not callbacks: return if not isinstance(callbacks, Iterable): callbacks = [callbacks] for callback in callbacks: if isinstance(callback, ManagerCallback): callback(self, service_name, event_name) else: callback(service_name)
[ "def", "fire_event", "(", "self", ",", "event_name", ",", "service_name", ",", "default", "=", "None", ")", ":", "service", "=", "self", ".", "get_service", "(", "service_name", ")", "callbacks", "=", "service", ".", "get", "(", "event_name", ",", "default...
38.933333
12.533333
def help_center_section_articles(self, id, locale=None, **kwargs): "https://developer.zendesk.com/rest_api/docs/help_center/articles#list-articles" api_path = "/api/v2/help_center/sections/{id}/articles.json" api_path = api_path.format(id=id) if locale: api_opt_path = "/api/v2/help_center/{locale}/sections/{id}/articles.json" api_path = api_opt_path.format(id=id, locale=locale) return self.call(api_path, **kwargs)
[ "def", "help_center_section_articles", "(", "self", ",", "id", ",", "locale", "=", "None", ",", "*", "*", "kwargs", ")", ":", "api_path", "=", "\"/api/v2/help_center/sections/{id}/articles.json\"", "api_path", "=", "api_path", ".", "format", "(", "id", "=", "id"...
59.25
24.75
def register(self, func): """ Register function to templates. """ if callable(func): self.functions[func.__name__] = func return func
[ "def", "register", "(", "self", ",", "func", ")", ":", "if", "callable", "(", "func", ")", ":", "self", ".", "functions", "[", "func", ".", "__name__", "]", "=", "func", "return", "func" ]
33
11.6
def delete(key, service=None, profile=None): # pylint: disable=W0613 ''' Get a value from the etcd service ''' client = _get_conn(profile) try: client.delete(key) return True except Exception: return False
[ "def", "delete", "(", "key", ",", "service", "=", "None", ",", "profile", "=", "None", ")", ":", "# pylint: disable=W0613", "client", "=", "_get_conn", "(", "profile", ")", "try", ":", "client", ".", "delete", "(", "key", ")", "return", "True", "except",...
24.5
21.3
def _try_pydatetime(x): """Try to convert to pandas objects to datetimes. Plotly doesn't know how to handle them. """ try: # for datetimeindex x = [y.isoformat() for y in x.to_pydatetime()] except AttributeError: pass try: # for generic series x = [y.isoformat() for y in x.dt.to_pydatetime()] except AttributeError: pass return x
[ "def", "_try_pydatetime", "(", "x", ")", ":", "try", ":", "# for datetimeindex", "x", "=", "[", "y", ".", "isoformat", "(", ")", "for", "y", "in", "x", ".", "to_pydatetime", "(", ")", "]", "except", "AttributeError", ":", "pass", "try", ":", "# for gen...
24.75
18.25
def get_item_metadata(self, handle): """Return dictionary containing all metadata associated with handle. In other words all the metadata added using the ``add_item_metadata`` method. :param handle: handle for accessing an item before the dataset is frozen :returns: dictionary containing item metadata """ if not self._metadata_dir_exists(): return {} prefix = self._handle_to_fragment_absprefixpath(handle) files = [f for f in self._ls_abspaths_with_cache( self._metadata_fragments_abspath) if f.startswith(prefix)] metadata = {} for f in files: key = f.split('.')[-2] # filename: identifier.key.json value = _get_obj(f) metadata[key] = value return metadata
[ "def", "get_item_metadata", "(", "self", ",", "handle", ")", ":", "if", "not", "self", ".", "_metadata_dir_exists", "(", ")", ":", "return", "{", "}", "prefix", "=", "self", ".", "_handle_to_fragment_absprefixpath", "(", "handle", ")", "files", "=", "[", "...
32.423077
20.461538
def register_handler(self, callable_obj, entrypoint, methods=('GET',)): """Register a handler callable to a specific route. Args: entrypoint (str): The uri relative path. methods (tuple): A tuple of valid method strings. callable_obj (callable): The callable object. Returns: The Router instance (for chaining purposes). Raises: RouteError, for missing routing params or invalid callable object type. """ router_obj = Route.wrap_callable( uri=entrypoint, methods=methods, callable_obj=callable_obj ) if router_obj.is_valid: self._routes.add(router_obj) return self raise RouteError( # pragma: no cover "Missing params: methods: {} - entrypoint: {}".format( methods, entrypoint ) )
[ "def", "register_handler", "(", "self", ",", "callable_obj", ",", "entrypoint", ",", "methods", "=", "(", "'GET'", ",", ")", ")", ":", "router_obj", "=", "Route", ".", "wrap_callable", "(", "uri", "=", "entrypoint", ",", "methods", "=", "methods", ",", "...
29.580645
20.354839
def license(self, license_id: str, token: dict = None, prot: str = "https") -> dict: """Get details about a specific license. :param str token: API auth token :param str license_id: license UUID :param str prot: https [DEFAULT] or http (use it only for dev and tracking needs). """ # handling request parameters payload = {"lid": license_id} # search request license_url = "{}://v1.{}.isogeo.com/licenses/{}".format( prot, self.api_url, license_id ) license_req = self.get( license_url, headers=self.header, params=payload, proxies=self.proxies, verify=self.ssl, ) # checking response checker.check_api_response(license_req) # end of method return license_req.json()
[ "def", "license", "(", "self", ",", "license_id", ":", "str", ",", "token", ":", "dict", "=", "None", ",", "prot", ":", "str", "=", "\"https\"", ")", "->", "dict", ":", "# handling request parameters", "payload", "=", "{", "\"lid\"", ":", "license_id", "...
30.428571
15.892857
def build_pos_grid(start, end, nstep, mesh=False): """ Return a grid of positions starting at X,Y given by 'start', and ending at X,Y given by 'end'. The grid will be completely filled in X and Y by every 'step' interval. """ # Build X and Y arrays dx = end[0] - start[0] if dx < 0: nstart = end end = start start = nstart dx = -dx stepx = dx / nstep # Perform linear fit to find exact line that connects start and end xarr = np.arange(start[0], end[0] + stepx / 2.0, stepx) yarr = np.interp(xarr, [start[0], end[0]], [start[1], end[1]]) # create grid of positions if mesh: xa, ya = np.meshgrid(xarr, yarr) xarr = xa.ravel() yarr = ya.ravel() return xarr, yarr
[ "def", "build_pos_grid", "(", "start", ",", "end", ",", "nstep", ",", "mesh", "=", "False", ")", ":", "# Build X and Y arrays", "dx", "=", "end", "[", "0", "]", "-", "start", "[", "0", "]", "if", "dx", "<", "0", ":", "nstart", "=", "end", "end", ...
30
19.84
def _check_all_devices_in_sync(self): '''Wait until all devices have failover status of 'In Sync'. :raises: UnexpectedClusterState ''' if len(self._get_devices_by_failover_status('In Sync')) != \ len(self.devices): msg = "Expected all devices in group to have 'In Sync' status." raise UnexpectedDeviceGroupState(msg)
[ "def", "_check_all_devices_in_sync", "(", "self", ")", ":", "if", "len", "(", "self", ".", "_get_devices_by_failover_status", "(", "'In Sync'", ")", ")", "!=", "len", "(", "self", ".", "devices", ")", ":", "msg", "=", "\"Expected all devices in group to have 'In S...
38.1
21.9
def create_user(self, login=None, password=None, user_name=None, envs=[], query='/users/'): """ `login` - Login or username for user `password` - Plain text password for user `user_name` - Full name of user Create user in specified environments """ login = login.lower() data = {'login': login, 'password': password[0], 'name': user_name} juicer.utils.Log.log_debug("Create User: %s ('%s')", login, user_name) for env in envs: if envs.index(env) != 0 and juicer.utils.env_same_host(env, envs[envs.index(env) - 1]): juicer.utils.Log.log_info("environment `%s` shares a host with environment `%s`... skipping!", (env, envs[envs.index(env) - 1])) continue elif juicer.utils.user_exists_p(login, self.connectors[env]): juicer.utils.Log.log_info("user `%s` already exists in %s... skipping!", (login, env)) continue else: _r = self.connectors[env].post(query, data) if _r.status_code == Constants.PULP_POST_CREATED: juicer.utils.Log.log_info("created user `%s` with login `%s` in %s", (user_name, login, env)) else: _r.raise_for_status() return True
[ "def", "create_user", "(", "self", ",", "login", "=", "None", ",", "password", "=", "None", ",", "user_name", "=", "None", ",", "envs", "=", "[", "]", ",", "query", "=", "'/users/'", ")", ":", "login", "=", "login", ".", "lower", "(", ")", "data", ...
44.060606
23.878788
def process_docstring(app, what, name, obj, options, lines): """Enable markdown syntax in docstrings""" markdown = "\n".join(lines) # ast = cm_parser.parse(markdown) # html = cm_renderer.render(ast) rest = m2r(markdown) rest.replace("\r\n", "\n") del lines[:] lines.extend(rest.split("\n"))
[ "def", "process_docstring", "(", "app", ",", "what", ",", "name", ",", "obj", ",", "options", ",", "lines", ")", ":", "markdown", "=", "\"\\n\"", ".", "join", "(", "lines", ")", "# ast = cm_parser.parse(markdown)", "# html = cm_renderer.render(ast)", "rest", "="...
26.5
17.333333
def read(self, domain, type_name, search_command, body=None): """Read entry in ThreatConnect Data Store Args: domain (string): One of 'local', 'organization', or 'system'. type_name (string): This is a free form index type name. The ThreatConnect API will use this resource verbatim. search_command (string): Search command to pass to ES. body (str): JSON body """ return self._request(domain, type_name, search_command, 'GET', body)
[ "def", "read", "(", "self", ",", "domain", ",", "type_name", ",", "search_command", ",", "body", "=", "None", ")", ":", "return", "self", ".", "_request", "(", "domain", ",", "type_name", ",", "search_command", ",", "'GET'", ",", "body", ")" ]
47.272727
22.727273
def __parts_and_divisions(self): """ The parts and divisions directly part of this element. """ from .division import Division from .part import Part from .placeholder_part import PlaceholderPart text = self.node.text if text: stripped_text = text.replace('\n', '') if stripped_text.strip(): yield PlaceholderPart(stripped_text) for item in self.node: if item.tag == 'part': yield Part(item) elif item.tag == 'div': yield Division(item) if item.tail: stripped_tail = item.tail.replace('\n', '') if stripped_tail.strip(): yield PlaceholderPart(stripped_tail)
[ "def", "__parts_and_divisions", "(", "self", ")", ":", "from", ".", "division", "import", "Division", "from", ".", "part", "import", "Part", "from", ".", "placeholder_part", "import", "PlaceholderPart", "text", "=", "self", ".", "node", ".", "text", "if", "t...
31.24
13.88
def get_filehandle(self): """Get HDF4 filehandle.""" if os.path.exists(self.filename): self.filehandle = SD(self.filename, SDC.READ) logger.debug("Loading dataset {}".format(self.filename)) else: raise IOError("Path {} does not exist.".format(self.filename))
[ "def", "get_filehandle", "(", "self", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "self", ".", "filename", ")", ":", "self", ".", "filehandle", "=", "SD", "(", "self", ".", "filename", ",", "SDC", ".", "READ", ")", "logger", ".", "debug"...
44.571429
17.428571
def get_user(self, user_name, raw=False): """ Get a dictionary or object with info about the given user from the Hacker News API. Will raise an requests.HTTPError if we got a non-200 response back. Response parameters: "id' -> The user's unique username. Case-sensitive. Required. "delay" -> Delay in minutes between a comment's creation and its visibility to other users. "created" -> Creation date of the user, in Unix Time. "karma" -> The user's karma. "about" -> The user's optional self-description. HTML. "submitted" -> List of the user's stories, polls and comments. :param user_name: the relevant user's name :param raw: (optional): If true, return the raw decoded JSON dict, if False, return a nice object with keywords as attributes. Default if False. :return: A dictionary with relevant info about the user, if successful. """ suburl = "v0/user/{}.json".format(user_name) try: user_data = self._make_request(suburl) except requests.HTTPError as e: hn_logger.exception('Faulted on item request for user {}, with status {}'.format(user_name, e.errno)) raise e if not user_data: raise ValueError('User name {} not found, or no data!'.format(user_name)) return user_data if raw else HackerNewsUpdates(**user_data)
[ "def", "get_user", "(", "self", ",", "user_name", ",", "raw", "=", "False", ")", ":", "suburl", "=", "\"v0/user/{}.json\"", ".", "format", "(", "user_name", ")", "try", ":", "user_data", "=", "self", ".", "_make_request", "(", "suburl", ")", "except", "r...
54.185185
27.962963
def table(self, data2=None, dense=True): """ Compute the counts of values appearing in a column, or co-occurence counts between two columns. :param H2OFrame data2: An optional single column to aggregate counts by. :param bool dense: If True (default) then use dense representation, which lists only non-zero counts, 1 combination per row. Set to False to expand counts across all combinations. :returns: H2OFrame of the counts at each combination of factor levels """ return H2OFrame._expr(expr=ExprNode("table", self, data2, dense)) if data2 is not None else H2OFrame._expr( expr=ExprNode("table", self, dense))
[ "def", "table", "(", "self", ",", "data2", "=", "None", ",", "dense", "=", "True", ")", ":", "return", "H2OFrame", ".", "_expr", "(", "expr", "=", "ExprNode", "(", "\"table\"", ",", "self", ",", "data2", ",", "dense", ")", ")", "if", "data2", "is",...
56.916667
35.083333
def build_current_graph(): """ Read current state of SQL items from the current project state. Returns: (SQLStateGraph) Current project state graph. """ graph = SQLStateGraph() for app_name, config in apps.app_configs.items(): try: module = import_module( '.'.join((config.module.__name__, SQL_CONFIG_MODULE))) sql_items = module.sql_items except (ImportError, AttributeError): continue for sql_item in sql_items: graph.add_node((app_name, sql_item.name), sql_item) for dep in sql_item.dependencies: graph.add_lazy_dependency((app_name, sql_item.name), dep) graph.build_graph() return graph
[ "def", "build_current_graph", "(", ")", ":", "graph", "=", "SQLStateGraph", "(", ")", "for", "app_name", ",", "config", "in", "apps", ".", "app_configs", ".", "items", "(", ")", ":", "try", ":", "module", "=", "import_module", "(", "'.'", ".", "join", ...
31.608696
18.391304
def save(self, name, content, max_length=None): """ Saves the given content with the given name using the local storage. If the :attr:`~queued_storage.backends.QueuedStorage.delayed` attribute is ``True`` this will automatically call the :meth:`~queued_storage.backends.QueuedStorage.transfer` method queuing the transfer from local to remote storage. :param name: file name :type name: str :param content: content of the file specified by name :type content: :class:`~django:django.core.files.File` :rtype: str """ cache_key = self.get_cache_key(name) cache.set(cache_key, False) # Use a name that is available on both the local and remote storage # systems and save locally. name = self.get_available_name(name) try: name = self.local.save(name, content, max_length=max_length) except TypeError: # Django < 1.10 name = self.local.save(name, content) # Pass on the cache key to prevent duplicate cache key creation, # we save the result in the storage to be able to test for it if not self.delayed: self.result = self.transfer(name, cache_key=cache_key) return name
[ "def", "save", "(", "self", ",", "name", ",", "content", ",", "max_length", "=", "None", ")", ":", "cache_key", "=", "self", ".", "get_cache_key", "(", "name", ")", "cache", ".", "set", "(", "cache_key", ",", "False", ")", "# Use a name that is available o...
41
20.096774
def export_data(self): """ Get the results with the modified_data """ result = {} data = self.__original_data__.copy() data.update(self.__modified_data__) for key, value in data.items(): if key in self.__deleted_fields__: continue try: result[key] = value.export_data() except AttributeError: result[key] = value return result
[ "def", "export_data", "(", "self", ")", ":", "result", "=", "{", "}", "data", "=", "self", ".", "__original_data__", ".", "copy", "(", ")", "data", ".", "update", "(", "self", ".", "__modified_data__", ")", "for", "key", ",", "value", "in", "data", "...
27.058824
12.823529
def get_ddG_results(self): """Parse the results from BuildModel and get the delta delta G's. A positive ddG means that the mutation(s) is destabilzing, negative means stabilizing. - highly stabilising (ΔΔG < −1.84 kcal/mol); - stabilising (−1.84 kcal/mol ≤ ΔΔG < −0.92 kcal/mol); - slightly stabilising (−0.92 kcal/mol ≤ ΔΔG < −0.46 kcal/mol); - neutral (−0.46 kcal/mol < ΔΔG ≤ +0.46 kcal/mol); - slightly destabilising (+0.46 kcal/mol < ΔΔG ≤ +0.92 kcal/mol); - destabilising (+0.92 kcal/mol < ΔΔG ≤ +1.84 kcal/mol); - highly destabilising (ΔΔG > +1.84 kcal/mol). Returns: dict: Dictionary of mutation group to predicted ddG. """ foldx_avg_df = self.df_mutation_ddG_avg foldx_avg_ddG = {} results = foldx_avg_df[['Pdb', 'total energy', 'SD']].T.to_dict().values() for r in results: ident = r['Pdb'].split('_')[-1] ddG = r['total energy'] ddG_sd = r['SD'] foldx_avg_ddG[self.mutation_index_to_group[int(ident)]] = (ddG, ddG_sd) return foldx_avg_ddG
[ "def", "get_ddG_results", "(", "self", ")", ":", "foldx_avg_df", "=", "self", ".", "df_mutation_ddG_avg", "foldx_avg_ddG", "=", "{", "}", "results", "=", "foldx_avg_df", "[", "[", "'Pdb'", ",", "'total energy'", ",", "'SD'", "]", "]", ".", "T", ".", "to_di...
39.413793
25.206897
def shortcut( name, target, arguments=None, working_dir=None, description=None, icon_location=None, force=False, backupname=None, makedirs=False, user=None, **kwargs): ''' Create a Windows shortcut If the file already exists and is a shortcut pointing to any location other than the specified target, the shortcut will be replaced. If it is a regular file or directory then the state will return False. If the regular file or directory is desired to be replaced with a shortcut pass force: True, if it is to be renamed, pass a backupname. name The location of the shortcut to create. Must end with either ".lnk" or ".url" target The location that the shortcut points to arguments Any arguments to pass in the shortcut working_dir Working directory in which to execute target description Description to set on shortcut icon_location Location of shortcut's icon force If the name of the shortcut exists and is not a file and force is set to False, the state will fail. If force is set to True, the link or directory in the way of the shortcut file will be deleted to make room for the shortcut, unless backupname is set, when it will be renamed backupname If the name of the shortcut exists and is not a file, it will be renamed to the backupname. If the backupname already exists and force is False, the state will fail. Otherwise, the backupname will be removed first. makedirs If the location of the shortcut does not already have a parent directory then the state will fail, setting makedirs to True will allow Salt to create the parent directory. Setting this to True will also create the parent for backupname if necessary. user The user to own the file, this defaults to the user salt is running as on the minion The default mode for new files and directories corresponds umask of salt process. For existing files and directories it's not enforced. ''' user = _test_owner(kwargs, user=user) ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''} if not salt.utils.platform.is_windows(): return _error(ret, 'Shortcuts are only supported on Windows') if not name: return _error(ret, 'Must provide name to file.shortcut') if not name.endswith('.lnk') and not name.endswith('.url'): return _error(ret, 'Name must end with either ".lnk" or ".url"') # Normalize paths; do this after error checks to avoid invalid input # getting expanded, e.g. '' turning into '.' name = os.path.realpath(os.path.expanduser(name)) if name.endswith('.lnk'): target = os.path.realpath(os.path.expanduser(target)) if working_dir: working_dir = os.path.realpath(os.path.expanduser(working_dir)) if icon_location: icon_location = os.path.realpath(os.path.expanduser(icon_location)) if user is None: user = __opts__['user'] # Make sure the user exists in Windows # Salt default is 'root' if not __salt__['user.info'](user): # User not found, use the account salt is running under # If username not found, use System user = __salt__['user.current']() if not user: user = 'SYSTEM' preflight_errors = [] uid = __salt__['file.user_to_uid'](user) if uid == '': preflight_errors.append('User {0} does not exist'.format(user)) if not os.path.isabs(name): preflight_errors.append( 'Specified file {0} is not an absolute path'.format(name) ) if preflight_errors: msg = '. '.join(preflight_errors) if len(preflight_errors) > 1: msg += '.' return _error(ret, msg) presult, pcomment, pchanges = _shortcut_check(name, target, arguments, working_dir, description, icon_location, force, user) if __opts__['test']: ret['result'] = presult ret['comment'] = pcomment ret['changes'] = pchanges return ret if not os.path.isdir(os.path.dirname(name)): if makedirs: try: _makedirs(name=name, user=user) except CommandExecutionError as exc: return _error(ret, 'Drive {0} is not mapped'.format(exc.message)) else: return _error( ret, 'Directory "{0}" for shortcut is not present'.format( os.path.dirname(name) ) ) if os.path.isdir(name) or os.path.islink(name): # It is not a shortcut, but a dir or symlink if backupname is not None: # Make a backup first if os.path.lexists(backupname): if not force: return _error(ret, (( 'File exists where the backup target {0} should go' ).format(backupname))) else: __salt__['file.remove'](backupname) time.sleep(1) # wait for asynchronous deletion if not os.path.isdir(os.path.dirname(backupname)): if makedirs: try: _makedirs(name=backupname) except CommandExecutionError as exc: return _error(ret, 'Drive {0} is not mapped'.format(exc.message)) else: return _error(ret, ( 'Directory does not exist for' ' backup at "{0}"' ).format(os.path.dirname(backupname))) os.rename(name, backupname) time.sleep(1) # wait for asynchronous rename elif force: # Remove whatever is in the way __salt__['file.remove'](name) ret['changes']['forced'] = 'Shortcut was forcibly replaced' time.sleep(1) # wait for asynchronous deletion else: # Otherwise throw an error return _error(ret, (( 'Directory or symlink exists where the' ' shortcut "{0}" should be' ).format(name))) # This will just load the shortcut if it already exists # It won't create the file until calling scut.Save() with salt.utils.winapi.Com(): shell = win32com.client.Dispatch("WScript.Shell") scut = shell.CreateShortcut(name) # The shortcut target will automatically be created with its # canonical capitalization; no way to override it, so ignore case state_checks = [scut.TargetPath.lower() == target.lower()] if arguments is not None: state_checks.append(scut.Arguments == arguments) if working_dir is not None: state_checks.append( scut.WorkingDirectory.lower() == working_dir.lower() ) if description is not None: state_checks.append(scut.Description == description) if icon_location is not None: state_checks.append(scut.IconLocation.lower() == icon_location.lower()) if __salt__['file.file_exists'](name): # The shortcut exists, verify that it matches the desired state if not all(state_checks): # The target is wrong, delete it os.remove(name) else: if _check_shortcut_ownership(name, user): # The shortcut looks good! ret['comment'] = ('Shortcut {0} is present and owned by ' '{1}'.format(name, user)) else: if _set_shortcut_ownership(name, user): ret['comment'] = ('Set ownership of shortcut {0} to ' '{1}'.format(name, user)) ret['changes']['ownership'] = '{0}'.format(user) else: ret['result'] = False ret['comment'] += ( 'Failed to set ownership of shortcut {0} to ' '{1}'.format(name, user) ) return ret if not os.path.exists(name): # The shortcut is not present, make it try: scut.TargetPath = target if arguments is not None: scut.Arguments = arguments if working_dir is not None: scut.WorkingDirectory = working_dir if description is not None: scut.Description = description if icon_location is not None: scut.IconLocation = icon_location scut.Save() except (AttributeError, pywintypes.com_error) as exc: ret['result'] = False ret['comment'] = ('Unable to create new shortcut {0} -> ' '{1}: {2}'.format(name, target, exc)) return ret else: ret['comment'] = ('Created new shortcut {0} -> ' '{1}'.format(name, target)) ret['changes']['new'] = name if not _check_shortcut_ownership(name, user): if not _set_shortcut_ownership(name, user): ret['result'] = False ret['comment'] += (', but was unable to set ownership to ' '{0}'.format(user)) return ret
[ "def", "shortcut", "(", "name", ",", "target", ",", "arguments", "=", "None", ",", "working_dir", "=", "None", ",", "description", "=", "None", ",", "icon_location", "=", "None", ",", "force", "=", "False", ",", "backupname", "=", "None", ",", "makedirs"...
38.46063
20.34252
def wcs_to_axes(w, npix): """Generate a sequence of bin edge vectors corresponding to the axes of a WCS object.""" npix = npix[::-1] x = np.linspace(-(npix[0]) / 2., (npix[0]) / 2., npix[0] + 1) * np.abs(w.wcs.cdelt[0]) y = np.linspace(-(npix[1]) / 2., (npix[1]) / 2., npix[1] + 1) * np.abs(w.wcs.cdelt[1]) if w.wcs.naxis == 2: return x, y cdelt2 = np.log10((w.wcs.cdelt[2] + w.wcs.crval[2]) / w.wcs.crval[2]) z = (np.linspace(0, npix[2], npix[2] + 1)) * cdelt2 z += np.log10(w.wcs.crval[2]) return x, y, z
[ "def", "wcs_to_axes", "(", "w", ",", "npix", ")", ":", "npix", "=", "npix", "[", ":", ":", "-", "1", "]", "x", "=", "np", ".", "linspace", "(", "-", "(", "npix", "[", "0", "]", ")", "/", "2.", ",", "(", "npix", "[", "0", "]", ")", "/", ...
29.05
22.3
def p_casecontent_condition_single(self, p): 'casecontent_condition : casecontent_condition COMMA expression' p[0] = p[1] + (p[3],) p.set_lineno(0, p.lineno(1))
[ "def", "p_casecontent_condition_single", "(", "self", ",", "p", ")", ":", "p", "[", "0", "]", "=", "p", "[", "1", "]", "+", "(", "p", "[", "3", "]", ",", ")", "p", ".", "set_lineno", "(", "0", ",", "p", ".", "lineno", "(", "1", ")", ")" ]
45.25
12.75
def debye_E_single(x): """ calculate Debye energy using old fortran routine :params x: Debye x value :return: Debye energy """ # make the function handles both scalar and array if ((x > 0.0) & (x <= 0.1)): result = 1. - 0.375 * x + x * x * \ (0.05 - (5.952380953e-4) * x * x) # for 0.1 < x <= 7.25 if ((x > 0.1) & (x <= 7.25)): result = ((((.0946173 * x - 4.432582) * x + 85.07724) * x - 800.6087) * x + 3953.632) / ((((x + 15.121491) * x + 143.155337) * x + 682.0012) * x + 3953.632) # for x > 7.25 # it appears there might be error for this part, but never been exposed # because of rarity of such high x value. if (x > 7.25): exx = np.exp(-x) nn = np.round(25. / x) n = nn.astype(np.int64) temp = 0. if (n > 0): temp2 = 1. end = n + 1 for i in range(1, end): temps = i * 1. temp2 = temp2 * exx x3 = temps * x temp = temp + temp2 * \ (6. + x3 * (6. + x3 * (3. + x3))) / \ (temps * temps * temps * temps) result = 3.0 * (6.493939402 - temp) / (x * x * x) return result
[ "def", "debye_E_single", "(", "x", ")", ":", "# make the function handles both scalar and array", "if", "(", "(", "x", ">", "0.0", ")", "&", "(", "x", "<=", "0.1", ")", ")", ":", "result", "=", "1.", "-", "0.375", "*", "x", "+", "x", "*", "x", "*", ...
34.394737
13.342105
def GetConsoleTitle() -> str: """ GetConsoleTitle from Win32. Return str. """ arrayType = ctypes.c_wchar * MAX_PATH values = arrayType() ctypes.windll.kernel32.GetConsoleTitleW(values, MAX_PATH) return values.value
[ "def", "GetConsoleTitle", "(", ")", "->", "str", ":", "arrayType", "=", "ctypes", ".", "c_wchar", "*", "MAX_PATH", "values", "=", "arrayType", "(", ")", "ctypes", ".", "windll", ".", "kernel32", ".", "GetConsoleTitleW", "(", "values", ",", "MAX_PATH", ")",...
26.444444
11.111111
def init_dict(data, index, columns, dtype=None): """ Segregate Series based on type and coerce into matrices. Needs to handle a lot of exceptional cases. """ if columns is not None: from pandas.core.series import Series arrays = Series(data, index=columns, dtype=object) data_names = arrays.index missing = arrays.isnull() if index is None: # GH10856 # raise ValueError if only scalars in dict index = extract_index(arrays[~missing]) else: index = ensure_index(index) # no obvious "empty" int column if missing.any() and not is_integer_dtype(dtype): if dtype is None or np.issubdtype(dtype, np.flexible): # GH#1783 nan_dtype = object else: nan_dtype = dtype val = construct_1d_arraylike_from_scalar(np.nan, len(index), nan_dtype) arrays.loc[missing] = [val] * missing.sum() else: keys = com.dict_keys_to_ordered_list(data) columns = data_names = Index(keys) # GH#24096 need copy to be deep for datetime64tz case # TODO: See if we can avoid these copies arrays = [data[k] if not is_datetime64tz_dtype(data[k]) else data[k].copy(deep=True) for k in keys] return arrays_to_mgr(arrays, data_names, index, columns, dtype=dtype)
[ "def", "init_dict", "(", "data", ",", "index", ",", "columns", ",", "dtype", "=", "None", ")", ":", "if", "columns", "is", "not", "None", ":", "from", "pandas", ".", "core", ".", "series", "import", "Series", "arrays", "=", "Series", "(", "data", ","...
38.810811
16.378378
def sid(self, spec): """Convert the given search specifier into a search-id (sid).""" if spec.startswith('@'): index = int(spec[1:]) jobs = self.service.jobs.list() if index < len(jobs): return jobs[index].sid return spec
[ "def", "sid", "(", "self", ",", "spec", ")", ":", "if", "spec", ".", "startswith", "(", "'@'", ")", ":", "index", "=", "int", "(", "spec", "[", "1", ":", "]", ")", "jobs", "=", "self", ".", "service", ".", "jobs", ".", "list", "(", ")", "if",...
36.25
8.5
def start(self): """ Starts the Service. :Exceptions: - WebDriverException : Raised either when it can't start the service or when it can't connect to the service """ try: cmd = [self.path] cmd.extend(self.command_line_args()) self.process = subprocess.Popen(cmd, env=self.env, close_fds=platform.system() != 'Windows', stdout=self.log_file, stderr=self.log_file, stdin=PIPE) except TypeError: raise except OSError as err: if err.errno == errno.ENOENT: raise WebDriverException( "'%s' executable needs to be in PATH. %s" % ( os.path.basename(self.path), self.start_error_message) ) elif err.errno == errno.EACCES: raise WebDriverException( "'%s' executable may have wrong permissions. %s" % ( os.path.basename(self.path), self.start_error_message) ) else: raise except Exception as e: raise WebDriverException( "The executable %s needs to be available in the path. %s\n%s" % (os.path.basename(self.path), self.start_error_message, str(e))) count = 0 while True: self.assert_process_still_running() if self.is_connectable(): break count += 1 time.sleep(1) if count == 30: raise WebDriverException("Can not connect to the Service %s" % self.path)
[ "def", "start", "(", "self", ")", ":", "try", ":", "cmd", "=", "[", "self", ".", "path", "]", "cmd", ".", "extend", "(", "self", ".", "command_line_args", "(", ")", ")", "self", ".", "process", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "e...
40.136364
19.5
def _update_redundancy_routers(self, context, updated_router, update_specification, requested_ha_settings, updated_router_db, gateway_changed): """To be called in update_router() AFTER router has been updated in DB. """ router_requested = update_specification['router'] ha_settings_db = updated_router_db.ha_settings ha_enabled_requested = requested_ha_settings.get(ha.ENABLED, False) if not (updated_router[ha.ENABLED] or ha_enabled_requested): # No HA currently enabled and no HA requested so we're done return # The redundancy routers need interfaces on the same networks as the # user visible router. ports = self._get_router_interfaces(updated_router_db) e_context = context.elevated() if not updated_router[ha.ENABLED] and ha_enabled_requested: # No HA currently enabled but HA requested router_requested.update(requested_ha_settings) router_requested[EXTERNAL_GW_INFO] = ( updated_router[EXTERNAL_GW_INFO]) requested_ha_settings = self._ensure_create_ha_compliant( router_requested, updated_router_db.hosting_info.router_type) self._create_redundancy_routers( e_context, updated_router, requested_ha_settings, updated_router_db, ports, expire_db=True) return rr_ids = self._get_redundancy_router_ids(context, updated_router['id']) ha_details_update_spec = requested_ha_settings.get(ha.DETAILS) if (updated_router[ha.ENABLED] and not requested_ha_settings.get( ha.ENABLED, updated_router[ha.ENABLED])): # HA currently enabled but HA disable requested # delete ha settings and extra port for gateway (VIP) port self._delete_ha_group(e_context, updated_router_db.gw_port_id) self._remove_redundancy_routers(e_context, rr_ids, ports, True) with context.session.begin(subtransactions=True): context.session.delete(ha_settings_db) elif ha_details_update_spec: # HA currently enabled and HA setting update (other than # disable HA) requested old_redundancy_level = ha_settings_db.redundancy_level ha_settings_db.update(ha_details_update_spec) diff = (ha_details_update_spec.get(ha.REDUNDANCY_LEVEL, old_redundancy_level) - old_redundancy_level) with context.session.begin(subtransactions=True): context.session.add(ha_settings_db) if diff < 0: # Remove -diff redundancy routers #TODO(bobmel): Ensure currently active router is excluded to_remove = rr_ids[len(rr_ids) + diff:] rr_ids = rr_ids[:len(rr_ids) + diff] self._remove_redundancy_routers(e_context, to_remove, ports) elif diff > 0: # Add diff redundancy routers start = old_redundancy_level + 1 stop = start + diff self._add_redundancy_routers(e_context, start, stop, updated_router, ports, ha_settings_db, False) if gateway_changed is True: self._change_ha_for_gateway(e_context, updated_router, updated_router_db, ha_settings_db, router_requested, expire=True) else: # Notify redundancy routers about changes self.notify_routers_updated(e_context, rr_ids) elif gateway_changed is True: # HA currently enabled (and to remain so) nor any HA setting update # and gateway has changed self._change_ha_for_gateway(e_context, updated_router, updated_router_db, ha_settings_db, router_requested) # pick up updates to other attributes where it makes sense # and push - right now it is only admin_state_up. other_updates_spec = {'router': {}} if 'admin_state_up' in update_specification['router']: other_updates_spec['router']['admin_state_up'] = ( update_specification['router']['admin_state_up']) if 'name' in update_specification['router']: other_updates_spec['router']['name'] = ( update_specification['router']['name']) if (other_updates_spec['router'] or 'routes' in update_specification['router']): self._process_other_router_updates(e_context, updated_router_db, other_updates_spec) # Ensure we get latest state from DB context.session.expire(updated_router_db) self._extend_router_dict_ha(updated_router, updated_router_db)
[ "def", "_update_redundancy_routers", "(", "self", ",", "context", ",", "updated_router", ",", "update_specification", ",", "requested_ha_settings", ",", "updated_router_db", ",", "gateway_changed", ")", ":", "router_requested", "=", "update_specification", "[", "'router'"...
55.988889
20.222222
def _partition_tasks(worker): """ Takes a worker and sorts out tasks based on their status. Still_pending_not_ext is only used to get upstream_failure, upstream_missing_dependency and run_by_other_worker """ task_history = worker._add_task_history pending_tasks = {task for(task, status, ext) in task_history if status == 'PENDING'} set_tasks = {} set_tasks["completed"] = {task for (task, status, ext) in task_history if status == 'DONE' and task in pending_tasks} set_tasks["already_done"] = {task for (task, status, ext) in task_history if status == 'DONE' and task not in pending_tasks and task not in set_tasks["completed"]} set_tasks["ever_failed"] = {task for (task, status, ext) in task_history if status == 'FAILED'} set_tasks["failed"] = set_tasks["ever_failed"] - set_tasks["completed"] set_tasks["scheduling_error"] = {task for(task, status, ext) in task_history if status == 'UNKNOWN'} set_tasks["still_pending_ext"] = {task for (task, status, ext) in task_history if status == 'PENDING' and task not in set_tasks["ever_failed"] and task not in set_tasks["completed"] and not ext} set_tasks["still_pending_not_ext"] = {task for (task, status, ext) in task_history if status == 'PENDING' and task not in set_tasks["ever_failed"] and task not in set_tasks["completed"] and ext} set_tasks["run_by_other_worker"] = set() set_tasks["upstream_failure"] = set() set_tasks["upstream_missing_dependency"] = set() set_tasks["upstream_run_by_other_worker"] = set() set_tasks["upstream_scheduling_error"] = set() set_tasks["not_run"] = set() return set_tasks
[ "def", "_partition_tasks", "(", "worker", ")", ":", "task_history", "=", "worker", ".", "_add_task_history", "pending_tasks", "=", "{", "task", "for", "(", "task", ",", "status", ",", "ext", ")", "in", "task_history", "if", "status", "==", "'PENDING'", "}", ...
69.28
36.8
def get_port_profile_for_intf_input_rbridge_id(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") get_port_profile_for_intf = ET.Element("get_port_profile_for_intf") config = get_port_profile_for_intf input = ET.SubElement(get_port_profile_for_intf, "input") rbridge_id = ET.SubElement(input, "rbridge-id") rbridge_id.text = kwargs.pop('rbridge_id') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "get_port_profile_for_intf_input_rbridge_id", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "get_port_profile_for_intf", "=", "ET", ".", "Element", "(", "\"get_port_profile_for_intf\"", ")", "co...
43
14.916667
def _schedule_sending_init_updates(self): """Setup timer for sending best-paths for all other address-families that qualify. Setup timer for sending initial updates to peer. """ def _enqueue_non_rtc_init_updates(): LOG.debug('Scheduled queuing of initial Non-RTC UPDATEs') tm = self._core_service.table_manager self.comm_all_best_paths(tm.global_tables) self._sent_init_non_rtc_update = True # Stop the timer as we have handled RTC EOR self._rtc_eor_timer.stop() self._rtc_eor_timer = None self._sent_init_non_rtc_update = False self._rtc_eor_timer = self._create_timer( Peer.RTC_EOR_TIMER_NAME, _enqueue_non_rtc_init_updates ) # Start timer for sending initial updates self._rtc_eor_timer.start(const.RTC_EOR_DEFAULT_TIME, now=False) LOG.debug('Scheduled sending of initial Non-RTC UPDATEs after:' ' %s sec', const.RTC_EOR_DEFAULT_TIME)
[ "def", "_schedule_sending_init_updates", "(", "self", ")", ":", "def", "_enqueue_non_rtc_init_updates", "(", ")", ":", "LOG", ".", "debug", "(", "'Scheduled queuing of initial Non-RTC UPDATEs'", ")", "tm", "=", "self", ".", "_core_service", ".", "table_manager", "self...
41.2
15.16
def split(self, string, maxsplit=0): """Split string by the occurrences of pattern.""" splitlist = [] state = _State(string, 0, sys.maxint, self.flags) n = 0 last = state.start while not maxsplit or n < maxsplit: state.reset() state.string_position = state.start if not state.search(self._code): break if state.start == state.string_position: # zero-width match if last == state.end: # or end of string break state.start += 1 continue splitlist.append(string[last:state.start]) # add groups (if any) if self.groups: match = SRE_Match(self, state) # TODO: Use .extend once it is implemented. # splitlist.extend(list(match.groups(None))) splitlist += (list(match.groups(None))) n += 1 last = state.start = state.string_position splitlist.append(string[last:state.end]) return splitlist
[ "def", "split", "(", "self", ",", "string", ",", "maxsplit", "=", "0", ")", ":", "splitlist", "=", "[", "]", "state", "=", "_State", "(", "string", ",", "0", ",", "sys", ".", "maxint", ",", "self", ".", "flags", ")", "n", "=", "0", "last", "=",...
40.444444
14.185185
def _get_format(self, token): """ Returns a QTextCharFormat for token or None. """ if token in self._formats: return self._formats[token] result = self._get_format_from_style(token, self._style) self._formats[token] = result return result
[ "def", "_get_format", "(", "self", ",", "token", ")", ":", "if", "token", "in", "self", ".", "_formats", ":", "return", "self", ".", "_formats", "[", "token", "]", "result", "=", "self", ".", "_get_format_from_style", "(", "token", ",", "self", ".", "_...
29.1
14.4
def connect(self, opt): """This sets up the tokens we expect to see in a way that hvac also expects.""" if not self._kwargs['verify']: LOG.warning('Skipping SSL Validation!') self.version = self.server_version() self.token = self.init_token() my_token = self.lookup_token() if not my_token or 'data' not in my_token: raise aomi.exceptions.AomiCredentials('initial token') display_name = my_token['data']['display_name'] vsn_string = "" if self.version: vsn_string = ", v%s" % self.version else: LOG.warning("Unable to deterine Vault version. Not all " "functionality is supported") LOG.info("Connected to %s as %s%s", self._url, display_name, vsn_string) if opt.reuse_token: LOG.debug("Not creating operational token") self.initial_token = self.token self.operational_token = self.token else: self.initial_token = self.token self.operational_token = self.op_token(display_name, opt) if not self.is_authenticated(): raise aomi.exceptions.AomiCredentials('operational token') self.token = self.operational_token return self
[ "def", "connect", "(", "self", ",", "opt", ")", ":", "if", "not", "self", ".", "_kwargs", "[", "'verify'", "]", ":", "LOG", ".", "warning", "(", "'Skipping SSL Validation!'", ")", "self", ".", "version", "=", "self", ".", "server_version", "(", ")", "s...
35.078947
16.763158
def clique(graph, id): """ Returns the largest possible clique for the node with given id. """ clique = [id] for n in graph.nodes: friend = True for id in clique: if n.id == id or graph.edge(n.id, id) == None: friend = False break if friend: clique.append(n.id) return clique
[ "def", "clique", "(", "graph", ",", "id", ")", ":", "clique", "=", "[", "id", "]", "for", "n", "in", "graph", ".", "nodes", ":", "friend", "=", "True", "for", "id", "in", "clique", ":", "if", "n", ".", "id", "==", "id", "or", "graph", ".", "e...
23.4375
18.6875
def num_to_var_int(x): """ (bitcoin-specific): convert an integer into a variable-length integer """ x = int(x) if x < 253: return from_int_to_byte(x) elif x < 65536: return from_int_to_byte(253) + encode(x, 256, 2)[::-1] elif x < 4294967296: return from_int_to_byte(254) + encode(x, 256, 4)[::-1] else: return from_int_to_byte(255) + encode(x, 256, 8)[::-1]
[ "def", "num_to_var_int", "(", "x", ")", ":", "x", "=", "int", "(", "x", ")", "if", "x", "<", "253", ":", "return", "from_int_to_byte", "(", "x", ")", "elif", "x", "<", "65536", ":", "return", "from_int_to_byte", "(", "253", ")", "+", "encode", "(",...
25.625
22.625
def delete_saved_sandbox(self, context, delete_saved_apps, cancellation_context): """ Delete a saved sandbox, along with any vms associated with it :param ResourceCommandContext context: :param list[DeleteSavedApp] delete_saved_apps: :param CancellationContext cancellation_context: :return: list[SaveAppResult] save_app_results """ connection = self.command_wrapper.execute_command_with_connection(context, self.delete_saved_sandbox_command.delete_sandbox, delete_saved_apps, cancellation_context) delete_saved_apps_results = connection return delete_saved_apps_results
[ "def", "delete_saved_sandbox", "(", "self", ",", "context", ",", "delete_saved_apps", ",", "cancellation_context", ")", ":", "connection", "=", "self", ".", "command_wrapper", ".", "execute_command_with_connection", "(", "context", ",", "self", ".", "delete_saved_sand...
60.461538
24.923077
def datasets(self): """A mapping from dataset numbers to datasets in this list""" return {key: val['ds'] for key, val in six.iteritems( self._get_ds_descriptions(self.array_info(ds_description=['ds'])))}
[ "def", "datasets", "(", "self", ")", ":", "return", "{", "key", ":", "val", "[", "'ds'", "]", "for", "key", ",", "val", "in", "six", ".", "iteritems", "(", "self", ".", "_get_ds_descriptions", "(", "self", ".", "array_info", "(", "ds_description", "=",...
57
20.25
def make_var_string(string): """ Make a var-string (a var-int with the length, concatenated with the data) Return the hex-encoded string """ s = None if isinstance(string, str) and re.match('^[0-9a-fA-F]*$', string): # convert from hex to bin, safely s = binascii.unhexlify(string) else: s = string[:] buf = encoding.num_to_var_int(len(s)) + s return buf.encode('hex')
[ "def", "make_var_string", "(", "string", ")", ":", "s", "=", "None", "if", "isinstance", "(", "string", ",", "str", ")", "and", "re", ".", "match", "(", "'^[0-9a-fA-F]*$'", ",", "string", ")", ":", "# convert from hex to bin, safely", "s", "=", "binascii", ...
29.714286
16
def mid_point(self): ''' Returns the midpoint of the arc as a 1x2 numpy array. ''' midpoint_angle = self.from_angle + self.sign*self.length_degrees() / 2 return self.angle_as_point(midpoint_angle)
[ "def", "mid_point", "(", "self", ")", ":", "midpoint_angle", "=", "self", ".", "from_angle", "+", "self", ".", "sign", "*", "self", ".", "length_degrees", "(", ")", "/", "2", "return", "self", ".", "angle_as_point", "(", "midpoint_angle", ")" ]
38.5
24.5
def _http_get(self, url, query): """ Performs the HTTP GET Request. """ if not self.authorization_as_header: query.update({'access_token': self.access_token}) response = None self._normalize_query(query) kwargs = { 'params': query, 'headers': self._request_headers() } if self._has_proxy(): kwargs['proxies'] = self._proxy_parameters() response = requests.get( self._url(url), **kwargs ) if response.status_code == 429: raise RateLimitExceededError(response) return response
[ "def", "_http_get", "(", "self", ",", "url", ",", "query", ")", ":", "if", "not", "self", ".", "authorization_as_header", ":", "query", ".", "update", "(", "{", "'access_token'", ":", "self", ".", "access_token", "}", ")", "response", "=", "None", "self"...
22.137931
19.793103
def DeriveReportKey(cls, root_key, report_id, sent_timestamp): """Derive a standard one time use report signing key. The standard method is HMAC-SHA256(root_key, MAGIC_NUMBER || report_id || sent_timestamp) where MAGIC_NUMBER is 0x00000002 and all integers are in little endian. """ signed_data = struct.pack("<LLL", AuthProvider.ReportKeyMagic, report_id, sent_timestamp) hmac_calc = hmac.new(root_key, signed_data, hashlib.sha256) return bytearray(hmac_calc.digest())
[ "def", "DeriveReportKey", "(", "cls", ",", "root_key", ",", "report_id", ",", "sent_timestamp", ")", ":", "signed_data", "=", "struct", ".", "pack", "(", "\"<LLL\"", ",", "AuthProvider", ".", "ReportKeyMagic", ",", "report_id", ",", "sent_timestamp", ")", "hma...
47.090909
29.636364
def args(self): """Create args from function parameters.""" params = self.parameters args = OrderedDict() # This will be overridden if the command explicitly defines an # arg named help. args['help'] = HelpArg(command=self) normalize_name = self.normalize_name get_arg_config = self.get_arg_config get_short_option = self.get_short_option_for_arg get_long_option = self.get_long_option_for_arg get_inverse_option = self.get_inverse_option_for_arg names = {normalize_name(name) for name in params} used_short_options = set() for param in params.values(): annotation = get_arg_config(param) short_option = annotation.short_option if short_option: used_short_options.add(short_option) for name, param in params.items(): name = normalize_name(name) skip = ( name.startswith('_') or param.kind is param.VAR_KEYWORD or param.kind is param.KEYWORD_ONLY) if skip: continue annotation = get_arg_config(param) container = annotation.container type = annotation.type choices = annotation.choices help = annotation.help inverse_help = annotation.inverse_help short_option = annotation.short_option long_option = annotation.long_option inverse_option = annotation.inverse_option action = annotation.action nargs = annotation.nargs default = param.default if default is not param.empty: if not short_option: short_option = get_short_option(name, names, used_short_options) used_short_options.add(short_option) if not long_option: long_option = get_long_option(name) if not inverse_option: # NOTE: The DISABLE marker evaluates as True inverse_option = get_inverse_option(long_option) args[name] = Arg( command=self, parameter=param, name=name, container=container, type=type, default=default, choices=choices, help=help, inverse_help=inverse_help, short_option=short_option, long_option=long_option, inverse_option=inverse_option, action=action, nargs=nargs, ) option_map = OrderedDict() for arg in args.values(): for option in arg.options: option_map.setdefault(option, []) option_map[option].append(arg) for option, option_args in option_map.items(): if len(option_args) > 1: names = ', '.join(a.parameter.name for a in option_args) message = ( 'Option {option} of command {self.name} maps to multiple parameters: {names}') message = message.format_map(locals()) raise CommandError(message) return args
[ "def", "args", "(", "self", ")", ":", "params", "=", "self", ".", "parameters", "args", "=", "OrderedDict", "(", ")", "# This will be overridden if the command explicitly defines an", "# arg named help.", "args", "[", "'help'", "]", "=", "HelpArg", "(", "command", ...
35.655556
15.244444
def _parse_sequences(ilines, expect_qlen): """Parse the sequences in the current block. Sequence looks like: $3=227(209): >gi|15606894|ref|NP_214275.1| {|2(244)|<Aquificae(B)>}DNA polymerase III gamma subunit [Aquifex aeolicus VF5] >gi|2984127|gb|AAC07663.1| DNA polymerase III gamma subunit [Aquifex aeolicus VF5] >gi|75 {()YVPFARKYRPKFFREVIGQEAPVRILKNAIKNDRVAHaYLFAGPRGVGKTTIARILAKALNcknpskgepcgecencreiDRGVFPDLIEMDAASNRGIDDVRA-LKEAVNYKPIKG-KYKVYIIDEAHMLTKEAFNALLKTLEEPPPRTVFVLCTTEYDKILPTILSRCQRIIFSKVRKEKVIEYLKKICEKEGIECEEGALEVLAHASEGCMRDAASLLDQASVYGE()}* """ while True: first = next(ilines) if first.startswith('_') and first.endswith('].'): # End of sequences & end of block break # ENH: handle wrapped lines? try: index, this_len, query_len = _parse_seq_preheader(first) except ValueError: logging.warn('Unparseable line (SKIPPING):\n%s', first) continue (rec_id, dbxrefs, headlen, taillen, phylum, taxchar, description ) = _parse_seq_header(next(ilines)) try: headseq, molseq, tailseq = _parse_seq_body(next(ilines)) except ValueError: logging.warn('Unparseable sequence: %s -- SKIPPING', rec_id) continue # Validation if expect_qlen != query_len: logging.warn("Query length in %s given as %d; expected %d", rec_id, query_len, expect_qlen) if not headseq and not headlen: headlen = 0 if not tailseq and not taillen: taillen = 0 if headseq: if headlen is None: headlen = len(headseq) elif headlen != len(headseq): logging.warn("Conflicting head flank lengths in %s: %d, %d", rec_id, headlen, len(headseq)) if tailseq: if taillen is None: taillen = len(tailseq) elif taillen != len(tailseq): logging.warn("Conflicting tail flank lengths in %s: %d, %d", rec_id, taillen, len(tailseq)) yield {'index': index, 'id': rec_id, 'description': description, 'dbxrefs': dbxrefs, 'phylum': phylum, 'taxchar': taxchar, 'head_len': headlen, 'tail_len': taillen, 'head_seq': headseq, 'tail_seq': tailseq, 'length': this_len, 'seq': molseq, }
[ "def", "_parse_sequences", "(", "ilines", ",", "expect_qlen", ")", ":", "while", "True", ":", "first", "=", "next", "(", "ilines", ")", "if", "first", ".", "startswith", "(", "'_'", ")", "and", "first", ".", "endswith", "(", "'].'", ")", ":", "# End of...
39.8125
21.921875
def extract_fields(d, fields, delimiter='|'): """ get values out of an object ``d`` for saving to a csv """ rd = {} for f in fields: v = d.get(f, None) if isinstance(v, (str, unicode)): v = v.encode('utf8') elif isinstance(v, list): v = delimiter.join(v) rd[f] = v return rd
[ "def", "extract_fields", "(", "d", ",", "fields", ",", "delimiter", "=", "'|'", ")", ":", "rd", "=", "{", "}", "for", "f", "in", "fields", ":", "v", "=", "d", ".", "get", "(", "f", ",", "None", ")", "if", "isinstance", "(", "v", ",", "(", "st...
30.545455
12.818182
def predict(self, h=5, oos_data=None, intervals=False, **kwargs): """ Makes forecast with the estimated model Parameters ---------- h : int (default : 5) How many steps ahead would you like to forecast? oos_data : pd.DataFrame Data for the variables to be used out of sample (ys can be NaNs) intervals : boolean (default: False) Whether to return prediction intervals Returns ---------- - pd.DataFrame with predicted values """ if self.latent_variables.estimated is False: raise Exception("No latent variables estimated!") else: # Sort/manipulate the out-of-sample data _, X_oos = dmatrices(self.formula, oos_data) X_oos = np.array([X_oos])[0] X_pred = X_oos[:h] date_index = self.shift_dates(h) if self.latent_variables.estimation_method in ['M-H']: sim_vector = np.zeros([15000,h]) for n in range(0, 15000): t_z = self.draw_latent_variables(nsims=1).T[0] _, Y, _, coefficients = self._model(t_z) coefficients_star = coefficients.T[-1] theta_pred = np.dot(np.array([coefficients_star]), X_pred.T)[0] t_z = np.array([self.latent_variables.z_list[k].prior.transform(t_z[k]) for k in range(t_z.shape[0])]) model_scale, model_shape, model_skewness = self._get_scale_and_shape(t_z) sim_vector[n,:] = self.family.draw_variable(self.link(theta_pred), model_scale, model_shape, model_skewness, theta_pred.shape[0]) sim_vector = sim_vector.T forecasted_values = np.array([np.mean(i) for i in sim_vector]) prediction_01 = np.array([np.percentile(i, 1) for i in sim_vector]) prediction_05 = np.array([np.percentile(i, 5) for i in sim_vector]) prediction_95 = np.array([np.percentile(i, 95) for i in sim_vector]) prediction_99 = np.array([np.percentile(i, 99) for i in sim_vector]) else: # Retrieve data, dates and (transformed) latent variables _, Y, _, coefficients = self._model(self.latent_variables.get_z_values()) coefficients_star = coefficients.T[-1] theta_pred = np.dot(np.array([coefficients_star]), X_pred.T)[0] t_z = self.transform_z() mean_values = np.append(Y, self.link(theta_pred)) model_scale, model_shape, model_skewness = self._get_scale_and_shape(t_z) if self.model_name2 == "Skewt": m1 = (np.sqrt(model_shape)*sp.gamma((model_shape-1.0)/2.0))/(np.sqrt(np.pi)*sp.gamma(model_shape/2.0)) forecasted_values = mean_values[-h:] + (model_skewness - (1.0/model_skewness))*model_scale*m1 else: forecasted_values = mean_values[-h:] if intervals is False: result = pd.DataFrame(forecasted_values) result.rename(columns={0:self.data_name}, inplace=True) else: # Get mean prediction and simulations (for errors) if self.latent_variables.estimation_method not in ['M-H']: sim_values = np.zeros([15000,h]) if intervals is True: for n in range(0,15000): sim_values[n,:] = self.family.draw_variable(self.link(theta_pred),model_scale,model_shape,model_skewness,theta_pred.shape[0]) sim_values = sim_values.T prediction_01 = np.array([np.percentile(i, 1) for i in sim_values]) prediction_05 = np.array([np.percentile(i, 5) for i in sim_values]) prediction_95 = np.array([np.percentile(i, 95) for i in sim_values]) prediction_99 = np.array([np.percentile(i, 99) for i in sim_values]) result = pd.DataFrame([forecasted_values, prediction_01, prediction_05, prediction_95, prediction_99]).T result.rename(columns={0:self.data_name, 1: "1% Prediction Interval", 2: "5% Prediction Interval", 3: "95% Prediction Interval", 4: "99% Prediction Interval"}, inplace=True) result.index = date_index[-h:] return result
[ "def", "predict", "(", "self", ",", "h", "=", "5", ",", "oos_data", "=", "None", ",", "intervals", "=", "False", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "latent_variables", ".", "estimated", "is", "False", ":", "raise", "Exception", "("...
47.88172
30.043011
def get(path): """Read an object from file""" try: import cPickle as pickle except: import pickle with open(path, 'rb') as file: return pickle.load(file)
[ "def", "get", "(", "path", ")", ":", "try", ":", "import", "cPickle", "as", "pickle", "except", ":", "import", "pickle", "with", "open", "(", "path", ",", "'rb'", ")", "as", "file", ":", "return", "pickle", ".", "load", "(", "file", ")" ]
20.666667
18.666667
def variable_length_to_fixed_length_categorical( self, left_edge=4, right_edge=4, max_length=15): """ Encode variable-length sequences using a fixed-length encoding designed for preserving the anchor positions of class I peptides. The sequences must be of length at least left_edge + right_edge, and at most max_length. Parameters ---------- left_edge : int, size of fixed-position left side right_edge : int, size of the fixed-position right side max_length : sequence length of the resulting encoding Returns ------- numpy.array of integers with shape (num sequences, max_length) """ cache_key = ( "fixed_length_categorical", left_edge, right_edge, max_length) if cache_key not in self.encoding_cache: fixed_length_sequences = ( self.sequences_to_fixed_length_index_encoded_array( self.sequences, left_edge=left_edge, right_edge=right_edge, max_length=max_length)) self.encoding_cache[cache_key] = fixed_length_sequences return self.encoding_cache[cache_key]
[ "def", "variable_length_to_fixed_length_categorical", "(", "self", ",", "left_edge", "=", "4", ",", "right_edge", "=", "4", ",", "max_length", "=", "15", ")", ":", "cache_key", "=", "(", "\"fixed_length_categorical\"", ",", "left_edge", ",", "right_edge", ",", "...
36.085714
19.057143
def to_list_of_dicts(self, **kwargs): """ Convert the :class:`ParameterSet` to a list of the dictionary representation of each :class:`Parameter` :return: list of dicts """ if kwargs: return self.filter(**kwargs).to_list_of_dicts() return [param.to_dict() for param in self._params]
[ "def", "to_list_of_dicts", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "kwargs", ":", "return", "self", ".", "filter", "(", "*", "*", "kwargs", ")", ".", "to_list_of_dicts", "(", ")", "return", "[", "param", ".", "to_dict", "(", ")", "for",...
34.2
16.2
def _get_keys_defdict(self): '''Get the keys and the default dictionary of the given function's arguments ''' # inspect argspecs argspec = inspect.getargspec(self.func) keys, defvals = argspec.args, argspec.defaults # convert to (list_of_argkeys, dict_of_default_keys) if defvals is None: return keys, None else: defvals = list(defvals) keys.reverse() defvals.reverse() defdict = dict(zip(keys, defvals)) keys.reverse() return keys, defdict
[ "def", "_get_keys_defdict", "(", "self", ")", ":", "# inspect argspecs", "argspec", "=", "inspect", ".", "getargspec", "(", "self", ".", "func", ")", "keys", ",", "defvals", "=", "argspec", ".", "args", ",", "argspec", ".", "defaults", "# convert to (list_of_a...
32.222222
16.777778
def is_xml(text): """ Helper function. Lightweight test if response is an XML doc """ # BOM_UTF8 is an UTF-8 byte order mark which may precede the XML from an Exchange server bom_len = len(BOM_UTF8) if text[:bom_len] == BOM_UTF8: return text[bom_len:bom_len + 5] == b'<?xml' return text[:5] == b'<?xml'
[ "def", "is_xml", "(", "text", ")", ":", "# BOM_UTF8 is an UTF-8 byte order mark which may precede the XML from an Exchange server", "bom_len", "=", "len", "(", "BOM_UTF8", ")", "if", "text", "[", ":", "bom_len", "]", "==", "BOM_UTF8", ":", "return", "text", "[", "bo...
36.666667
15.333333
def get_samples(self, sample_count): """ Fetch a number of samples from self.wave_cache Args: sample_count (int): Number of samples to fetch Returns: ndarray """ if self.amplitude.value <= 0: return None # Build samples by rolling the period cache through the buffer rolled_array = numpy.roll(self.wave_cache, -1 * self.last_played_sample) # Append remaining partial period full_count, remainder = divmod(sample_count, self.cache_length) final_subarray = rolled_array[:int(remainder)] return_array = numpy.concatenate((numpy.tile(rolled_array, full_count), final_subarray)) # Keep track of where we left off to prevent popping between chunks self.last_played_sample = int(((self.last_played_sample + remainder) % self.cache_length)) # Multiply output by amplitude return return_array * (self.amplitude.value * self.amplitude_multiplier)
[ "def", "get_samples", "(", "self", ",", "sample_count", ")", ":", "if", "self", ".", "amplitude", ".", "value", "<=", "0", ":", "return", "None", "# Build samples by rolling the period cache through the buffer", "rolled_array", "=", "numpy", ".", "roll", "(", "sel...
44.48
18.72
def server(self, parsed_args): """Server.""" server_args = vars(self) server_args['bind_addr'] = parsed_args['bind_addr'] if parsed_args.max is not None: server_args['maxthreads'] = parsed_args.max if parsed_args.numthreads is not None: server_args['minthreads'] = parsed_args.numthreads return server.HTTPServer(**server_args)
[ "def", "server", "(", "self", ",", "parsed_args", ")", ":", "server_args", "=", "vars", "(", "self", ")", "server_args", "[", "'bind_addr'", "]", "=", "parsed_args", "[", "'bind_addr'", "]", "if", "parsed_args", ".", "max", "is", "not", "None", ":", "ser...
43.444444
9.777778
def get_work_units(self, work_spec_name, work_unit_keys=None, state=None, limit=None, start=None): '''Get (key, value) pairs for work units. If `state` is not :const:`None`, then it should be one of the string state constants, and this function will return a list of pairs of work unit key and value for work units in that state. If `start` is not :const:`None`, then this many work units are skipped; if `limit` is not :const:`None` then at most this many work units will be returned. If `state` is :const:`None` then all work units in all states will be returned. :param str work_spec_name: name of work spec to query :param str state: string state constant, or :const:`None` for all work units in all states :param int limit: maximum number of items to return :param int start: skip this many items before returning any :return: list of pairs of (work unit key, work unit data) ''' if work_unit_keys is not None: raise NotImplementedError("get_work_units(by work_unit_keys)") if start is None: start = 0 if state is not None: if state == AVAILABLE: return self.list_available_work_units( work_spec_name, start=start, limit=limit).items() if state == PENDING: return self.list_pending_work_units( work_spec_name, start=start, limit=limit).items() if state == BLOCKED: return self.list_blocked_work_units( work_spec_name, start=start, limit=limit).items() if state == FINISHED: return self.list_finished_work_units( work_spec_name, start=start, limit=limit).items() if state == FAILED: return self.list_failed_work_units( work_spec_name, start=start, limit=limit).items() raise ProgrammerError("unknown state {0!r}".format(state)) # TODO: correctly handle start/limit for the case where # we're trying to list everything (unqualified)...this is # actually kind of limited utility work_units = {} work_units.update(self.list_work_units(work_spec_name)) work_units.update(self.list_blocked_work_units(work_spec_name)) work_units.update(self.list_finished_work_units(work_spec_name)) work_units.update(self.list_failed_work_units(work_spec_name)) return work_units.items()
[ "def", "get_work_units", "(", "self", ",", "work_spec_name", ",", "work_unit_keys", "=", "None", ",", "state", "=", "None", ",", "limit", "=", "None", ",", "start", "=", "None", ")", ":", "if", "work_unit_keys", "is", "not", "None", ":", "raise", "NotImp...
49.019231
20.788462
def first_consumed_mesh(self): """The first consumed mesh. :return: the first consumed mesh :rtype: knittingpattern.Mesh.Mesh :raises IndexError: if no mesh is consumed .. seealso:: :attr:`number_of_consumed_meshes` """ for instruction in self.instructions: if instruction.consumes_meshes(): return instruction.first_consumed_mesh raise IndexError("{} consumes no meshes".format(self))
[ "def", "first_consumed_mesh", "(", "self", ")", ":", "for", "instruction", "in", "self", ".", "instructions", ":", "if", "instruction", ".", "consumes_meshes", "(", ")", ":", "return", "instruction", ".", "first_consumed_mesh", "raise", "IndexError", "(", "\"{} ...
35.923077
12.384615
def _makeTags(tagStr, xml): """Internal helper to construct opening and closing tag expressions, given a tag name""" if isinstance(tagStr,basestring): resname = tagStr tagStr = Keyword(tagStr, caseless=not xml) else: resname = tagStr.name tagAttrName = Word(alphas,alphanums+"_-:") if (xml): tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes ) openTag = Suppress("<") + tagStr("tag") + \ Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \ Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") else: printablesLessRAbrack = "".join(c for c in printables if c not in ">") tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack) openTag = Suppress("<") + tagStr("tag") + \ Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \ Optional( Suppress("=") + tagAttrValue ) ))) + \ Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") closeTag = Combine(_L("</") + tagStr + ">") openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % tagStr) closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("</%s>" % tagStr) openTag.tag = resname closeTag.tag = resname return openTag, closeTag
[ "def", "_makeTags", "(", "tagStr", ",", "xml", ")", ":", "if", "isinstance", "(", "tagStr", ",", "basestring", ")", ":", "resname", "=", "tagStr", "tagStr", "=", "Keyword", "(", "tagStr", ",", "caseless", "=", "not", "xml", ")", "else", ":", "resname",...
56.25
31.642857
def downgrade(): """alexm: i believe this method is never called""" with op.batch_alter_table(t2_name) as batch_op: batch_op.drop_column('do_not_use') with op.batch_alter_table(t1_name) as batch_op: batch_op.drop_column('enabled')
[ "def", "downgrade", "(", ")", ":", "with", "op", ".", "batch_alter_table", "(", "t2_name", ")", "as", "batch_op", ":", "batch_op", ".", "drop_column", "(", "'do_not_use'", ")", "with", "op", ".", "batch_alter_table", "(", "t1_name", ")", "as", "batch_op", ...
36.142857
12.714286
def extract_energy(rate, sig): """ Extracts the energy of frames. """ mfcc = python_speech_features.mfcc(sig, rate, appendEnergy=True) energy_row_vec = mfcc[:, 0] energy_col_vec = energy_row_vec[:, np.newaxis] return energy_col_vec
[ "def", "extract_energy", "(", "rate", ",", "sig", ")", ":", "mfcc", "=", "python_speech_features", ".", "mfcc", "(", "sig", ",", "rate", ",", "appendEnergy", "=", "True", ")", "energy_row_vec", "=", "mfcc", "[", ":", ",", "0", "]", "energy_col_vec", "=",...
35.142857
16
def list_all_shipping_methods(cls, **kwargs): """List ShippingMethods Return a list of ShippingMethods This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.list_all_shipping_methods(async=True) >>> result = thread.get() :param async bool :param int page: page number :param int size: page size :param str sort: page order :return: page[ShippingMethod] If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async'): return cls._list_all_shipping_methods_with_http_info(**kwargs) else: (data) = cls._list_all_shipping_methods_with_http_info(**kwargs) return data
[ "def", "list_all_shipping_methods", "(", "cls", ",", "*", "*", "kwargs", ")", ":", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'async'", ")", ":", "return", "cls", ".", "_list_all_shipping_methods_with_http_info...
38.478261
15.173913
def target(self): """ Target the current space for any forthcoming Cloud Foundry operations. """ # MAINT: I don't like this, but will deal later os.environ['PREDIX_SPACE_GUID'] = self.guid os.environ['PREDIX_SPACE_NAME'] = self.name os.environ['PREDIX_ORGANIZATION_GUID'] = self.org.guid os.environ['PREDIX_ORGANIZATION_NAME'] = self.org.name
[ "def", "target", "(", "self", ")", ":", "# MAINT: I don't like this, but will deal later", "os", ".", "environ", "[", "'PREDIX_SPACE_GUID'", "]", "=", "self", ".", "guid", "os", ".", "environ", "[", "'PREDIX_SPACE_NAME'", "]", "=", "self", ".", "name", "os", "...
40.5
15.1
def save(self): """ Creates / updates a row. This is a blind insert call. All validation and cleaning needs to happen prior to calling this. """ if self.instance is None: raise CQLEngineException("DML Query intance attribute is None") assert type(self.instance) == self.model nulled_fields = set() if self.instance._has_counter or self.instance._can_update(): if self.instance._has_counter: warn("'create' and 'save' actions on Counters are deprecated. It will be disallowed in 4.0. " "Use the 'update' mechanism instead.", DeprecationWarning) return self.update() else: insert = InsertStatement(self.column_family_name, ttl=self._ttl, timestamp=self._timestamp, if_not_exists=self._if_not_exists) static_save_only = False if len(self.instance._clustering_keys) == 0 else True for name, col in self.instance._clustering_keys.items(): static_save_only = static_save_only and col._val_is_null(getattr(self.instance, name, None)) for name, col in self.instance._columns.items(): if static_save_only and not col.static and not col.partition_key: continue val = getattr(self.instance, name, None) if col._val_is_null(val): if self.instance._values[name].changed: nulled_fields.add(col.db_field_name) continue if col.has_default and not self.instance._values[name].changed: # Ensure default columns included in a save() are marked as explicit, to get them *persisted* properly self.instance._values[name].explicit = True insert.add_assignment(col, getattr(self.instance, name, None)) # skip query execution if it's empty # caused by pointless update queries if not insert.is_empty: self._execute(insert) # delete any nulled columns if not static_save_only: self._delete_null_columns()
[ "def", "save", "(", "self", ")", ":", "if", "self", ".", "instance", "is", "None", ":", "raise", "CQLEngineException", "(", "\"DML Query intance attribute is None\"", ")", "assert", "type", "(", "self", ".", "instance", ")", "==", "self", ".", "model", "null...
50.738095
23.261905
def press(self, coordinate, success=None): """Success must be given as a tuple of a (coordinate, timeout). Use (coordinate,) if you want to use the default timeout.""" if isinstance(coordinate, WebElement): coordinate.click() else: self.get_element(coordinate).click() if success is not None: assert self.is_available(*success)
[ "def", "press", "(", "self", ",", "coordinate", ",", "success", "=", "None", ")", ":", "if", "isinstance", "(", "coordinate", ",", "WebElement", ")", ":", "coordinate", ".", "click", "(", ")", "else", ":", "self", ".", "get_element", "(", "coordinate", ...
43.888889
7.555556
def srem(self, key, member, *members): """Remove one or more members from a set.""" return self.execute(b'SREM', key, member, *members)
[ "def", "srem", "(", "self", ",", "key", ",", "member", ",", "*", "members", ")", ":", "return", "self", ".", "execute", "(", "b'SREM'", ",", "key", ",", "member", ",", "*", "members", ")" ]
49.666667
7
def get_data_or_404(model, instance_id, kind=''): """Wrap `get_data`, when missing data, raise BadRequest. """ data = get_data(model, instance_id, kind) if not data: return abort(404) return data
[ "def", "get_data_or_404", "(", "model", ",", "instance_id", ",", "kind", "=", "''", ")", ":", "data", "=", "get_data", "(", "model", ",", "instance_id", ",", "kind", ")", "if", "not", "data", ":", "return", "abort", "(", "404", ")", "return", "data" ]
24.111111
17.555556
def cfg_(self,cfg=None): """ Getter/Setter of configuration data. This can be used to update and modify the configuration file on the system by new applications. """ if cfg is None: cfg = self._cfg else: self._cfg = cfg self.overlay_load() return cfg
[ "def", "cfg_", "(", "self", ",", "cfg", "=", "None", ")", ":", "if", "cfg", "is", "None", ":", "cfg", "=", "self", ".", "_cfg", "else", ":", "self", ".", "_cfg", "=", "cfg", "self", ".", "overlay_load", "(", ")", "return", "cfg" ]
27.916667
14.916667
def Extra(self): """ Returns any `V`, `P`, `DOI` or `misc` values as a string. These are all the values not returned by [ID()](#metaknowledge.citation.Citation.ID), they are separated by `' ,'`. # Returns `str` > A string containing the data not in the ID of the `Citation`. """ extraTags = ['V', 'P', 'DOI', 'misc'] retVal = "" for tag in extraTags: if getattr(self, tag): retVal += getattr(self, tag) + ', ' if len(retVal) > 2: return retVal[:-2] else: return retVal
[ "def", "Extra", "(", "self", ")", ":", "extraTags", "=", "[", "'V'", ",", "'P'", ",", "'DOI'", ",", "'misc'", "]", "retVal", "=", "\"\"", "for", "tag", "in", "extraTags", ":", "if", "getattr", "(", "self", ",", "tag", ")", ":", "retVal", "+=", "g...
31.210526
25.526316
def update(self, **kwargs): """ Overrides update to concatenate streamed data up to defined length. """ data = kwargs.get('data') if data is not None: if (util.pd and isinstance(data, util.pd.DataFrame) and list(data.columns) != list(self.data.columns) and self._index): data = data.reset_index() self.verify(data) kwargs['data'] = self._concat(data) self._count += 1 super(Buffer, self).update(**kwargs)
[ "def", "update", "(", "self", ",", "*", "*", "kwargs", ")", ":", "data", "=", "kwargs", ".", "get", "(", "'data'", ")", "if", "data", "is", "not", "None", ":", "if", "(", "util", ".", "pd", "and", "isinstance", "(", "data", ",", "util", ".", "p...
40
12.923077
def exit(self): """Overwrite the exit method to close threads.""" if self._thread is not None: self._thread.stop() # Call the father class super(Plugin, self).exit()
[ "def", "exit", "(", "self", ")", ":", "if", "self", ".", "_thread", "is", "not", "None", ":", "self", ".", "_thread", ".", "stop", "(", ")", "# Call the father class", "super", "(", "Plugin", ",", "self", ")", ".", "exit", "(", ")" ]
34
8.833333
def is_title(p): """ Certain p tags are denoted as ``Title`` tags. This function will return True if the passed in p tag is considered a title. """ w_namespace = get_namespace(p, 'w') styles = p.xpath('.//w:pStyle', namespaces=p.nsmap) if len(styles) == 0: return False style = styles[0] return style.get('%sval' % w_namespace) == 'Title'
[ "def", "is_title", "(", "p", ")", ":", "w_namespace", "=", "get_namespace", "(", "p", ",", "'w'", ")", "styles", "=", "p", ".", "xpath", "(", "'.//w:pStyle'", ",", "namespaces", "=", "p", ".", "nsmap", ")", "if", "len", "(", "styles", ")", "==", "0...
33.818182
14.363636
def _fsic_queuing_calc(fsic1, fsic2): """ We set the lower counter between two same instance ids. If an instance_id exists in one fsic but not the other we want to give that counter a value of 0. :param fsic1: dictionary containing (instance_id, counter) pairs :param fsic2: dictionary containing (instance_id, counter) pairs :return ``dict`` of fsics to be used in queueing the correct records to the buffer """ return {instance: fsic2.get(instance, 0) for instance, counter in six.iteritems(fsic1) if fsic2.get(instance, 0) < counter}
[ "def", "_fsic_queuing_calc", "(", "fsic1", ",", "fsic2", ")", ":", "return", "{", "instance", ":", "fsic2", ".", "get", "(", "instance", ",", "0", ")", "for", "instance", ",", "counter", "in", "six", ".", "iteritems", "(", "fsic1", ")", "if", "fsic2", ...
56
31.2
def getDomainFromUrl(self, url): """ Extracting the domain from the URL. :return: domain as a string. """ try: domain = re.findall( self.domainRegexp, url )[0] except Exception, e: errMsg = "ERROR. Something happened when trying to find the domain from <" + url + ">. Are you sure that the following regular expression matches a domain in the url provided?\n\t" + self.domainRegexp raise Exception( errMsg + "\n" + str(e) ) return domain
[ "def", "getDomainFromUrl", "(", "self", ",", "url", ")", ":", "try", ":", "domain", "=", "re", ".", "findall", "(", "self", ".", "domainRegexp", ",", "url", ")", "[", "0", "]", "except", "Exception", ",", "e", ":", "errMsg", "=", "\"ERROR. Something ha...
43
25.153846
def _convert_for_reindex(self, key, axis=None): """ Transform a list of keys into a new array ready to be used as axis of the object we return (e.g. including NaNs). Parameters ---------- key : list-like Target labels axis: int Where the indexing is being made Returns ------- list-like of labels """ if axis is None: axis = self.axis or 0 labels = self.obj._get_axis(axis) if com.is_bool_indexer(key): key = check_bool_indexer(labels, key) return labels[key] if isinstance(key, Index): keyarr = labels._convert_index_indexer(key) else: # asarray can be unsafe, NumPy strings are weird keyarr = com.asarray_tuplesafe(key) if is_integer_dtype(keyarr): # Cast the indexer to uint64 if possible so # that the values returned from indexing are # also uint64. keyarr = labels._convert_arr_indexer(keyarr) if not labels.is_integer(): keyarr = ensure_platform_int(keyarr) return labels.take(keyarr) return keyarr
[ "def", "_convert_for_reindex", "(", "self", ",", "key", ",", "axis", "=", "None", ")", ":", "if", "axis", "is", "None", ":", "axis", "=", "self", ".", "axis", "or", "0", "labels", "=", "self", ".", "obj", ".", "_get_axis", "(", "axis", ")", "if", ...
28.666667
18.142857
def _ep_active(self): """Both ends of the Endpoint have become active.""" LOG.debug("Connection is up") if self._handler: with self._callback_lock: self._handler.connection_active(self)
[ "def", "_ep_active", "(", "self", ")", ":", "LOG", ".", "debug", "(", "\"Connection is up\"", ")", "if", "self", ".", "_handler", ":", "with", "self", ".", "_callback_lock", ":", "self", ".", "_handler", ".", "connection_active", "(", "self", ")" ]
38.666667
8.833333
def FileHashIndexQuery(self, subject, target_prefix, limit=100): """Search the index for matches starting with target_prefix. Args: subject: The index to use. Should be a urn that points to the sha256 namespace. target_prefix: The prefix to match against the index. limit: Either a tuple of (start, limit) or a maximum number of results to return. Yields: URNs of files which have the same data as this file - as read from the index. """ if isinstance(limit, (tuple, list)): start, length = limit # pylint: disable=unpacking-non-sequence else: start = 0 length = limit prefix = (DataStore.FILE_HASH_TEMPLATE % target_prefix).lower() results = self.ResolvePrefix(subject, prefix, limit=limit) for i, (_, hit, _) in enumerate(results): if i < start: continue if i >= start + length: break yield rdfvalue.RDFURN(hit)
[ "def", "FileHashIndexQuery", "(", "self", ",", "subject", ",", "target_prefix", ",", "limit", "=", "100", ")", ":", "if", "isinstance", "(", "limit", ",", "(", "tuple", ",", "list", ")", ")", ":", "start", ",", "length", "=", "limit", "# pylint: disable=...
32.034483
24.068966