text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def insert(self, item, low_value): """ Create a new node and insert it into a sorted list. Calls the item duplicator, if any, on the item. If low_value is true, starts searching from the start of the list, otherwise searches from the end. Use the item comparator, if any, to find where to place the new node. Returns a handle to the new node, or NULL if memory was exhausted. Resets the cursor to the list head. """ return c_void_p(lib.zlistx_insert(self._as_parameter_, item, low_value))
[ "def", "insert", "(", "self", ",", "item", ",", "low_value", ")", ":", "return", "c_void_p", "(", "lib", ".", "zlistx_insert", "(", "self", ".", "_as_parameter_", ",", "item", ",", "low_value", ")", ")" ]
51.1
24.1
def createissue(self, project_id, title, **kwargs): """ Create a new issue :param project_id: project id :param title: title of the issue :return: dict with the issue created """ data = {'id': id, 'title': title} if kwargs: data.update(kwargs) request = requests.post( '{0}/{1}/issues'.format(self.projects_url, project_id), headers=self.headers, data=data, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout) if request.status_code == 201: return request.json() else: return False
[ "def", "createissue", "(", "self", ",", "project_id", ",", "title", ",", "*", "*", "kwargs", ")", ":", "data", "=", "{", "'id'", ":", "id", ",", "'title'", ":", "title", "}", "if", "kwargs", ":", "data", ".", "update", "(", "kwargs", ")", "request"...
32.789474
15.631579
def init_config(app): """Initialize configuration. .. note:: If CairoSVG is installed then the configuration ``FORMATTER_BADGES_ENABLE`` is ``True``. :param app: The Flask application. """ try: get_distribution('CairoSVG') has_cairo = True except DistributionNotFound: has_cairo = False app.config.setdefault('FORMATTER_BADGES_ENABLE', has_cairo) for attr in dir(config): if attr.startswith('FORMATTER_'): app.config.setdefault(attr, getattr(config, attr))
[ "def", "init_config", "(", "app", ")", ":", "try", ":", "get_distribution", "(", "'CairoSVG'", ")", "has_cairo", "=", "True", "except", "DistributionNotFound", ":", "has_cairo", "=", "False", "app", ".", "config", ".", "setdefault", "(", "'FORMATTER_BADGES_ENABL...
30.631579
17.842105
def resize_image(image, height, width, channels=None, resize_mode=None ): """ Resizes an image and returns it as a np.array Arguments: image -- a PIL.Image or numpy.ndarray height -- height of new image width -- width of new image Keyword Arguments: channels -- channels of new image (stays unchanged if not specified) resize_mode -- can be crop, squash, fill or half_crop """ if resize_mode is None: resize_mode = 'squash' if resize_mode not in ['crop', 'squash', 'fill', 'half_crop']: raise ValueError('resize_mode "%s" not supported' % resize_mode) if channels not in [None, 1, 3]: raise ValueError('unsupported number of channels: %s' % channels) if isinstance(image, PIL.Image.Image): # Convert image mode (channels) if channels is None: image_mode = image.mode if image_mode == 'L': channels = 1 elif image_mode == 'RGB': channels = 3 else: raise ValueError('unknown image mode "%s"' % image_mode) elif channels == 1: # 8-bit pixels, black and white image_mode = 'L' elif channels == 3: # 3x8-bit pixels, true color image_mode = 'RGB' if image.mode != image_mode: image = image.convert(image_mode) image = np.array(image) elif isinstance(image, np.ndarray): if image.dtype != np.uint8: image = image.astype(np.uint8) if image.ndim == 3 and image.shape[2] == 1: image = image.reshape(image.shape[:2]) if channels is None: if image.ndim == 2: channels = 1 elif image.ndim == 3 and image.shape[2] == 3: channels = 3 else: raise ValueError('invalid image shape: %s' % (image.shape,)) elif channels == 1: if image.ndim != 2: if image.ndim == 3 and image.shape[2] == 3: # color to grayscale image = np.dot(image, [0.299, 0.587, 0.114]).astype(np.uint8) else: raise ValueError('invalid image shape: %s' % (image.shape,)) elif channels == 3: if image.ndim == 2: # grayscale to color image = np.repeat(image, 3).reshape(image.shape + (3,)) elif image.shape[2] != 3: raise ValueError('invalid image shape: %s' % (image.shape,)) else: raise ValueError('resize_image() expected a PIL.Image.Image or a numpy.ndarray') # No need to resize if image.shape[0] == height and image.shape[1] == width: return image # Resize interp = 'bilinear' width_ratio = float(image.shape[1]) / width height_ratio = float(image.shape[0]) / height if resize_mode == 'squash' or width_ratio == height_ratio: return imresize(image, (height, width), interp=interp) elif resize_mode == 'crop': # resize to smallest of ratios (relatively larger image), keeping aspect ratio if width_ratio > height_ratio: resize_height = height resize_width = int(round(image.shape[1] / height_ratio)) else: resize_width = width resize_height = int(round(image.shape[0] / width_ratio)) image = imresize(image, (resize_height, resize_width), interp=interp) # chop off ends of dimension that is still too long if width_ratio > height_ratio: start = int(round((resize_width - width) / 2.0)) return image[:, start:start + width] else: start = int(round((resize_height - height) / 2.0)) return image[start:start + height, :] else: if resize_mode == 'fill': # resize to biggest of ratios (relatively smaller image), keeping aspect ratio if width_ratio > height_ratio: resize_width = width resize_height = int(round(image.shape[0] / width_ratio)) if (height - resize_height) % 2 == 1: resize_height += 1 else: resize_height = height resize_width = int(round(image.shape[1] / height_ratio)) if (width - resize_width) % 2 == 1: resize_width += 1 image = imresize(image, (resize_height, resize_width), interp=interp) elif resize_mode == 'half_crop': # resize to average ratio keeping aspect ratio new_ratio = (width_ratio + height_ratio) / 2.0 resize_width = int(round(image.shape[1] / new_ratio)) resize_height = int(round(image.shape[0] / new_ratio)) if width_ratio > height_ratio and (height - resize_height) % 2 == 1: resize_height += 1 elif width_ratio < height_ratio and (width - resize_width) % 2 == 1: resize_width += 1 image = imresize(image, (resize_height, resize_width), interp=interp) # chop off ends of dimension that is still too long if width_ratio > height_ratio: start = int(round((resize_width - width) / 2.0)) image = image[:, start:start + width] else: start = int(round((resize_height - height) / 2.0)) image = image[start:start + height, :] else: raise Exception('unrecognized resize_mode "%s"' % resize_mode) # fill ends of dimension that is too short with random noise if width_ratio > height_ratio: padding = (height - resize_height) / 2 noise_size = (padding, width) if channels > 1: noise_size += (channels,) noise = np.random.randint(0, 255, noise_size).astype('uint8') image = np.concatenate((noise, image, noise), axis=0) else: padding = (width - resize_width) / 2 noise_size = (height, padding) if channels > 1: noise_size += (channels,) noise = np.random.randint(0, 255, noise_size).astype('uint8') image = np.concatenate((noise, image, noise), axis=1) return image
[ "def", "resize_image", "(", "image", ",", "height", ",", "width", ",", "channels", "=", "None", ",", "resize_mode", "=", "None", ")", ":", "if", "resize_mode", "is", "None", ":", "resize_mode", "=", "'squash'", "if", "resize_mode", "not", "in", "[", "'cr...
41.878378
17.567568
def on_treeview_delete_selection(self, event=None): """Removes selected items from treeview""" tv = self.treeview selection = tv.selection() # Need to remove filter self.filter_remove(remember=True) toplevel_items = tv.get_children() parents_to_redraw = set() for item in selection: try: parent = '' if item not in toplevel_items: parent = self.get_toplevel_parent(item) else: self.previewer.delete(item) del self.treedata[item] tv.delete(item) self.app.set_changed() if parent: self._update_max_grid_rc(parent) parents_to_redraw.add(parent) self.widget_editor.hide_all() except tk.TclError: # Selection of parent and child items ?? # TODO: notify something here pass # redraw widgets for item in parents_to_redraw: self.draw_widget(item) # restore filter self.filter_restore()
[ "def", "on_treeview_delete_selection", "(", "self", ",", "event", "=", "None", ")", ":", "tv", "=", "self", ".", "treeview", "selection", "=", "tv", ".", "selection", "(", ")", "# Need to remove filter", "self", ".", "filter_remove", "(", "remember", "=", "T...
33.382353
12.382353
def list_resources(self, session, query='?*::INSTR'): """Returns a tuple of all connected devices matching query. :param query: regular expression used to match devices. """ # For each session type, ask for the list of connected resources and # merge them into a single list. resources = sum([st.list_resources() for key, st in sessions.Session.iter_valid_session_classes()], []) resources = rname.filter(resources, query) return resources
[ "def", "list_resources", "(", "self", ",", "session", ",", "query", "=", "'?*::INSTR'", ")", ":", "# For each session type, ask for the list of connected resources and", "# merge them into a single list.", "resources", "=", "sum", "(", "[", "st", ".", "list_resources", "(...
34.6
23.533333
def qtemporal(dt, **meta): '''Converts a `numpy.datetime64` or `numpy.timedelta64` to :class:`.QTemporal` and enriches object instance with given meta data. Examples: >>> qtemporal(numpy.datetime64('2001-01-01', 'D'), qtype=QDATE) 2001-01-01 [metadata(qtype=-14)] >>> qtemporal(numpy.timedelta64(43499123, 'ms'), qtype=QTIME) 43499123 milliseconds [metadata(qtype=-19)] >>> qtemporal(qnull(QDATETIME), qtype=QDATETIME) nan [metadata(qtype=-15)] :Parameters: - `dt` (`numpy.datetime64` or `numpy.timedelta64`) - datetime to be wrapped :Kwargs: - `qtype` (`integer`) - qtype indicator :returns: `QTemporal` - wrapped datetime ''' result = QTemporal(dt) result._meta_init(**meta) return result
[ "def", "qtemporal", "(", "dt", ",", "*", "*", "meta", ")", ":", "result", "=", "QTemporal", "(", "dt", ")", "result", ".", "_meta_init", "(", "*", "*", "meta", ")", "return", "result" ]
34
22.434783
def config(self, handle, attributes=None, **kwattrs): """Sets or modifies one or more object attributes or relations. Arguments can be supplied either as a dictionary or as keyword arguments. Examples: stc.config('port1', location='//10.1.2.3/1/1') stc.config('port2', {'location': '//10.1.2.3/1/2'}) Arguments: handle -- Handle of object to modify. attributes -- Dictionary of attributes (name-value pairs). kwattrs -- Optional keyword attributes (name=value pairs). """ self._check_session() if kwattrs: if attributes: attributes.update(kwattrs) else: attributes = kwattrs self._rest.put_request('objects', str(handle), attributes)
[ "def", "config", "(", "self", ",", "handle", ",", "attributes", "=", "None", ",", "*", "*", "kwattrs", ")", ":", "self", ".", "_check_session", "(", ")", "if", "kwattrs", ":", "if", "attributes", ":", "attributes", ".", "update", "(", "kwattrs", ")", ...
37.714286
19.142857
def execute_code(self, lines, current_client=True, clear_variables=False): """Execute code instructions.""" sw = self.get_current_shellwidget() if sw is not None: if sw._reading: pass else: if not current_client: # Clear console and reset namespace for # dedicated clients # See issue 5748 try: sw.sig_prompt_ready.disconnect() except TypeError: pass sw.reset_namespace(warning=False) elif current_client and clear_variables: sw.reset_namespace(warning=False) # Needed to handle an error when kernel_client is none # See issue 6308 try: sw.execute(to_text_string(lines)) except AttributeError: pass self.activateWindow() self.get_current_client().get_control().setFocus()
[ "def", "execute_code", "(", "self", ",", "lines", ",", "current_client", "=", "True", ",", "clear_variables", "=", "False", ")", ":", "sw", "=", "self", ".", "get_current_shellwidget", "(", ")", "if", "sw", "is", "not", "None", ":", "if", "sw", ".", "_...
41.653846
12.846154
def get_objective_admin_session(self, proxy, *args, **kwargs): """Gets the ``OsidSession`` associated with the objective administration service. :param proxy: a proxy :type proxy: ``osid.proxy.Proxy`` :return: an ``ObjectiveAdminSession`` :rtype: ``osid.learning.ObjectiveAdminSession`` :raise: ``NullArgument`` -- ``proxy`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``Unimplemented`` -- ``supports_objective_admin()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_objective_admin()`` is ``true``.* """ if not self.supports_objective_admin(): raise Unimplemented() try: from . import sessions except ImportError: raise OperationFailed() proxy = self._convert_proxy(proxy) try: session = sessions.ObjectiveAdminSession(proxy=proxy, runtime=self._runtime) except AttributeError: raise OperationFailed() return session
[ "def", "get_objective_admin_session", "(", "self", ",", "proxy", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "self", ".", "supports_objective_admin", "(", ")", ":", "raise", "Unimplemented", "(", ")", "try", ":", "from", ".", "impor...
40.884615
19.423077
def row_to_dict(self, row): ''' translate a row of the current table to dictionary :param row: a row of the current table (selected with \\*) :return: dictionary of all fields ''' res = {} for i in range(len(self._fields)): res[self._fields[i][0]] = row[i] return res
[ "def", "row_to_dict", "(", "self", ",", "row", ")", ":", "res", "=", "{", "}", "for", "i", "in", "range", "(", "len", "(", "self", ".", "_fields", ")", ")", ":", "res", "[", "self", ".", "_fields", "[", "i", "]", "[", "0", "]", "]", "=", "r...
30.363636
18.909091
def sn(self): """Read the Serial Number string. This method is only available on OPC-N2 firmware versions 18+. :rtype: string :Example: >>> alpha.sn() 'OPC-N2 123456789' """ string = [] # Send the command byte and sleep for 9 ms self.cnxn.xfer([0x10]) sleep(9e-3) # Read the info string by sending 60 empty bytes for i in range(60): resp = self.cnxn.xfer([0x00])[0] string.append(chr(resp)) sleep(0.1) return ''.join(string)
[ "def", "sn", "(", "self", ")", ":", "string", "=", "[", "]", "# Send the command byte and sleep for 9 ms", "self", ".", "cnxn", ".", "xfer", "(", "[", "0x10", "]", ")", "sleep", "(", "9e-3", ")", "# Read the info string by sending 60 empty bytes", "for", "i", ...
22.04
20.84
def move(self, from_path, to_path, **kwargs): """移动单个文件或目录. :param from_path: 源文件/目录在网盘中的路径(包括文件名)。 .. warning:: * 路径长度限制为1000; * 径中不能包含以下字符:``\\\\ ? | " > < : *``; * 文件名或路径名开头结尾不能是 ``.`` 或空白字符,空白字符包括: ``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。 :param to_path: 目标文件/目录在网盘中的路径(包括文件名)。 .. warning:: * 路径长度限制为1000; * 径中不能包含以下字符:``\\\\ ? | " > < : *``; * 文件名或路径名开头结尾不能是 ``.`` 或空白字符,空白字符包括: ``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。 :return: Response 对象 """ data = { 'from': from_path, 'to': to_path, } return self._request('file', 'move', data=data, **kwargs)
[ "def", "move", "(", "self", ",", "from_path", ",", "to_path", ",", "*", "*", "kwargs", ")", ":", "data", "=", "{", "'from'", ":", "from_path", ",", "'to'", ":", "to_path", ",", "}", "return", "self", ".", "_request", "(", "'file'", ",", "'move'", "...
35.407407
16.222222
def get_error(exc): """ Return the appropriate HTTP status code according to the Exception/Error. """ if isinstance(exc, HTTPError): # Returning the HTTP Error code coming from requests module return exc.response.status_code, text(exc.response.content) if isinstance(exc, Timeout): # A timeout is a 408, and it's not a HTTPError (why? dunno). return 408, exc if isinstance(exc, Http404): # 404 is 404 return 404, exc if isinstance(exc, PermissionDenied): # Permission denied is 403 return 403, exc if isinstance(exc, SuspiciousOperation): # Shouldn't happen, but you never know return 400, exc # The default error code is 500 return 500, exc
[ "def", "get_error", "(", "exc", ")", ":", "if", "isinstance", "(", "exc", ",", "HTTPError", ")", ":", "# Returning the HTTP Error code coming from requests module", "return", "exc", ".", "response", ".", "status_code", ",", "text", "(", "exc", ".", "response", "...
27.444444
19.740741
def add_namespace_statistics(self, namespace, offset, data_points, byte_count): """Update namespace statistics for the period identified by offset""" query = 'UPDATE gauged_statistics ' \ 'SET data_points = data_points + %s,' \ 'byte_count = byte_count + %s WHERE namespace = %s ' \ 'AND "offset" = %s; INSERT INTO gauged_statistics ' \ 'SELECT %s, %s, %s, %s WHERE NOT EXISTS (' \ 'SELECT 1 FROM gauged_statistics WHERE namespace = %s' \ 'AND "offset" = %s)' self.cursor.execute(query, (data_points, byte_count, namespace, offset, namespace, offset, data_points, byte_count, namespace, offset))
[ "def", "add_namespace_statistics", "(", "self", ",", "namespace", ",", "offset", ",", "data_points", ",", "byte_count", ")", ":", "query", "=", "'UPDATE gauged_statistics '", "'SET data_points = data_points + %s,'", "'byte_count = byte_count + %s WHERE namespace = %s '", "'AND ...
56.5
17.357143
def _validate_edata(self, edata): """Validate edata argument of raise_exception_if method.""" # pylint: disable=R0916 if edata is None: return True if not (isinstance(edata, dict) or _isiterable(edata)): return False edata = [edata] if isinstance(edata, dict) else edata for edict in edata: if (not isinstance(edict, dict)) or ( isinstance(edict, dict) and ( ("field" not in edict) or ("field" in edict and (not isinstance(edict["field"], str))) or ("value" not in edict) ) ): return False return True
[ "def", "_validate_edata", "(", "self", ",", "edata", ")", ":", "# pylint: disable=R0916", "if", "edata", "is", "None", ":", "return", "True", "if", "not", "(", "isinstance", "(", "edata", ",", "dict", ")", "or", "_isiterable", "(", "edata", ")", ")", ":"...
37.421053
14.842105
def list_topic_rules(topic=None, ruleDisabled=None, region=None, key=None, keyid=None, profile=None): ''' List all rules (for a given topic, if specified) Returns list of rules CLI Example: .. code-block:: bash salt myminion boto_iot.list_topic_rules Example Return: .. code-block:: yaml rules: - {...} - {...} ''' try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) kwargs = {} if topic is not None: kwargs['topic'] = topic if ruleDisabled is not None: kwargs['ruleDisabled'] = ruleDisabled rules = [] for ret in __utils__['boto3.paged_call'](conn.list_topic_rules, marker_flag='nextToken', marker_arg='nextToken', **kwargs): rules.extend(ret['rules']) if not bool(rules): log.warning('No rules found') return {'rules': rules} except ClientError as e: return {'error': __utils__['boto3.get_error'](e)}
[ "def", "list_topic_rules", "(", "topic", "=", "None", ",", "ruleDisabled", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "conn", "=", "_get_conn", "(", ...
27.55
21.6
def n_frames_total(self, stride=1, skip=0): r"""Returns total number of frames. Parameters ---------- stride : int return value is the number of frames in trajectories when running through them with a step size of `stride`. skip : int, default=0 skip the first initial n frames per trajectory. Returns ------- n_frames_total : int total number of frames. """ if not IteratorState.is_uniform_stride(stride): return len(stride) return sum(self.trajectory_lengths(stride=stride, skip=skip))
[ "def", "n_frames_total", "(", "self", ",", "stride", "=", "1", ",", "skip", "=", "0", ")", ":", "if", "not", "IteratorState", ".", "is_uniform_stride", "(", "stride", ")", ":", "return", "len", "(", "stride", ")", "return", "sum", "(", "self", ".", "...
32.578947
18.368421
def add_partitioning_indexes(portal): """Adds the indexes for partitioning """ logger.info("Adding partitioning indexes") add_index(portal, catalog_id=CATALOG_ANALYSIS_LISTING, index_name="getAncestorsUIDs", index_attribute="getAncestorsUIDs", index_metatype="KeywordIndex") add_index(portal, catalog_id=CATALOG_ANALYSIS_REQUEST_LISTING, index_name="isRootAncestor", index_attribute="isRootAncestor", index_metatype="BooleanIndex")
[ "def", "add_partitioning_indexes", "(", "portal", ")", ":", "logger", ".", "info", "(", "\"Adding partitioning indexes\"", ")", "add_index", "(", "portal", ",", "catalog_id", "=", "CATALOG_ANALYSIS_LISTING", ",", "index_name", "=", "\"getAncestorsUIDs\"", ",", "index_...
37.428571
11.642857
def strip_cdata(text): """Removes all CDATA blocks from `text` if it contains them. Note: If the function contains escaped XML characters outside of a CDATA block, they will be unescaped. Args: A string containing one or more CDATA blocks. Returns: An XML unescaped string with CDATA block qualifiers removed. """ if not is_cdata(text): return text xml = "<e>{0}</e>".format(text) node = etree.fromstring(xml) return node.text
[ "def", "strip_cdata", "(", "text", ")", ":", "if", "not", "is_cdata", "(", "text", ")", ":", "return", "text", "xml", "=", "\"<e>{0}</e>\"", ".", "format", "(", "text", ")", "node", "=", "etree", ".", "fromstring", "(", "xml", ")", "return", "node", ...
24.4
22.45
def SCAS(cpu, dest, src): """ Scans String. Compares the byte, word, or double word specified with the memory operand with the value in the AL, AX, EAX, or RAX register, and sets the status flags according to the results. The memory operand address is read from either the ES:RDI, ES:EDI or the ES:DI registers (depending on the address-size attribute of the instruction, 32 or 16, respectively):: IF (byte comparison) THEN temp = AL - SRC; SetStatusFlags(temp); THEN IF DF = 0 THEN (E)DI = (E)DI + 1; ELSE (E)DI = (E)DI - 1; FI; ELSE IF (word comparison) THEN temp = AX - SRC; SetStatusFlags(temp) THEN IF DF = 0 THEN (E)DI = (E)DI + 2; ELSE (E)DI = (E)DI - 2; FI; ELSE (* doubleword comparison *) temp = EAX - SRC; SetStatusFlags(temp) THEN IF DF = 0 THEN (E)DI = (E)DI + 4; ELSE (E)DI = (E)DI - 4; FI; FI; FI; :param cpu: current CPU. :param dest: destination operand. :param src: source operand. """ dest_reg = dest.reg mem_reg = src.mem.base # , src.type, src.read() size = dest.size arg0 = dest.read() arg1 = src.read() res = arg0 - arg1 cpu._calculate_CMP_flags(size, res, arg0, arg1) increment = Operators.ITEBV(cpu.address_bit_size, cpu.DF, -size // 8, size // 8) cpu.write_register(mem_reg, cpu.read_register(mem_reg) + increment)
[ "def", "SCAS", "(", "cpu", ",", "dest", ",", "src", ")", ":", "dest_reg", "=", "dest", ".", "reg", "mem_reg", "=", "src", ".", "mem", ".", "base", "# , src.type, src.read()", "size", "=", "dest", ".", "size", "arg0", "=", "dest", ".", "read", "(", ...
40.076923
15.769231
def Nu_plate_Kumar(Re, Pr, chevron_angle, mu=None, mu_wall=None): r'''Calculates Nusselt number for single-phase flow in a **well-designed** Chevron-style plate heat exchanger according to [1]_. The data is believed to have been developed by APV International Limited, since acquired by SPX Corporation. This uses a curve fit of that data published in [2]_. .. math:: Nu = C_1 Re^m Pr^{0.33}\left(\frac{\mu}{\mu_{wall}}\right)^{0.17} `C1` and `m` are coefficients looked up in a table, with varying ranges of Re validity and chevron angle validity. See the source for their exact values. The wall fluid property correction is included only if the viscosity values are provided. Parameters ---------- Re : float Reynolds number with respect to the hydraulic diameter of the channels, [-] Pr : float Prandtl number calculated with bulk fluid properties, [-] chevron_angle : float Angle of the plate corrugations with respect to the vertical axis (the direction of flow if the plates were straight), between 0 and 90. Many plate exchangers use two alternating patterns; use their average angle for that situation [degrees] mu : float, optional Viscosity of the fluid at the bulk (inlet and outlet average) temperature, [Pa*s] mu_wall : float, optional Viscosity of fluid at wall temperature, [Pa*s] Returns ------- Nu : float Nusselt number with respect to `Dh`, [-] Notes ----- Data on graph from Re=0.1 to Re=10000, with chevron angles 30 to 65 degrees. See `PlateExchanger` for further clarification on the definitions. It is believed the constants used in this correlation were curve-fit to the actual graph in [1]_ by the author of [2]_ as there is no As the coefficients change, there are numerous small discontinuities, although the data on the graphs is continuous with sharp transitions of the slope. The author of [1]_ states clearly this correlation is "applicable only to well designed Chevron PHEs". Examples -------- >>> Nu_plate_Kumar(Re=2000, Pr=0.7, chevron_angle=30) 47.757818892853955 With the wall-correction factor included: >>> Nu_plate_Kumar(Re=2000, Pr=0.7, chevron_angle=30, mu=1E-3, mu_wall=8E-4) 49.604284135097544 References ---------- .. [1] Kumar, H. "The plate heat exchanger: construction and design." In First U.K. National Conference on Heat Transfer: Held at the University of Leeds, 3-5 July 1984, Institute of Chemical Engineering Symposium Series, vol. 86, pp. 1275-1288. 1984. .. [2] Ayub, Zahid H. "Plate Heat Exchanger Literature Survey and New Heat Transfer and Pressure Drop Correlations for Refrigerant Evaporators." Heat Transfer Engineering 24, no. 5 (September 1, 2003): 3-16. doi:10.1080/01457630304056. ''' # Uses the standard diameter as characteristic diameter beta_list_len = len(Kumar_beta_list) for i in range(beta_list_len): if chevron_angle <= Kumar_beta_list[i]: C1_options, m_options, Re_ranges = Kumar_C1s[i], Kumar_ms[i], Kumar_Nu_Res[i] break elif i == beta_list_len-1: C1_options, m_options, Re_ranges = Kumar_C1s[-1], Kumar_ms[-1], Kumar_Nu_Res[-1] Re_len = len(Re_ranges) for j in range(Re_len): if Re <= Re_ranges[j]: C1, m = C1_options[j], m_options[j] break elif j == Re_len-1: C1, m = C1_options[-1], m_options[-1] Nu = C1*Re**m*Pr**0.33 if mu_wall is not None and mu is not None: Nu *= (mu/mu_wall)**0.17 return Nu
[ "def", "Nu_plate_Kumar", "(", "Re", ",", "Pr", ",", "chevron_angle", ",", "mu", "=", "None", ",", "mu_wall", "=", "None", ")", ":", "# Uses the standard diameter as characteristic diameter", "beta_list_len", "=", "len", "(", "Kumar_beta_list", ")", "for", "i", "...
38.505155
25.783505
def new_histogram(name, reservoir=None): """ Build a new histogram metric with a given reservoir object If the reservoir is not provided, a uniform reservoir with the default size is used """ if reservoir is None: reservoir = histogram.UniformReservoir(histogram.DEFAULT_UNIFORM_RESERVOIR_SIZE) return new_metric(name, histogram.Histogram, reservoir)
[ "def", "new_histogram", "(", "name", ",", "reservoir", "=", "None", ")", ":", "if", "reservoir", "is", "None", ":", "reservoir", "=", "histogram", ".", "UniformReservoir", "(", "histogram", ".", "DEFAULT_UNIFORM_RESERVOIR_SIZE", ")", "return", "new_metric", "(",...
37.5
23.1
def mark_meas_good(self, g_index): """ Marks the g_index'th measuremnt of current specimen good Parameters ---------- g_index : int that gives the index of the measurement to mark good, indexed from 0 """ meas_index, ind_data = 0, [] for i, meas_data in enumerate(self.mag_meas_data): if meas_data['er_specimen_name'] == self.s: ind_data.append(i) meas_index = ind_data[g_index] self.Data[self.s]['measurement_flag'][g_index] = 'g' if len(self.Data[self.s]['zijdblock'][g_index]) < 6: self.Data[self.s]['zijdblock'][g_index].append('g') self.Data[self.s]['zijdblock'][g_index][5] = 'g' if 'zijdblock_geo' in self.Data[self.s] and g_index < len(self.Data[self.s]['zijdblock_geo']): if len(self.Data[self.s]['zijdblock_geo'][g_index]) < 6: self.Data[self.s]['zijdblock_geo'][g_index].append('g') self.Data[self.s]['zijdblock_geo'][g_index][5] = 'g' if 'zijdblock_tilt' in self.Data[self.s] and g_index < len(self.Data[self.s]['zijdblock_tilt']): if len(self.Data[self.s]['zijdblock_tilt'][g_index]) < 6: self.Data[self.s]['zijdblock_tilt'][g_index].append('g') self.Data[self.s]['zijdblock_tilt'][g_index][5] = 'g' self.mag_meas_data[meas_index]['measurement_flag'] = 'g' if self.data_model == 3.0: meas_name = str(self.Data[self.s]['measurement_names'][g_index]) mdf = self.con.tables['measurements'].df # check for multiple measurements with the same name if not isinstance(mdf.loc[meas_name], pd.Series): res = self.user_warning("Your measurements table has non-unique measurement names.\nYou may end up marking more than one measurement as good.\nRight click this measurement again to undo.") # mark measurement as good mdf.loc[meas_name, 'quality'] = 'g'
[ "def", "mark_meas_good", "(", "self", ",", "g_index", ")", ":", "meas_index", ",", "ind_data", "=", "0", ",", "[", "]", "for", "i", ",", "meas_data", "in", "enumerate", "(", "self", ".", "mag_meas_data", ")", ":", "if", "meas_data", "[", "'er_specimen_na...
53.297297
25.891892
def materials(self): """ Property for accessing :class:`MaterialManager` instance, which is used to manage materials. :rtype: yagocd.resources.material.MaterialManager """ if self._material_manager is None: self._material_manager = MaterialManager(session=self._session) return self._material_manager
[ "def", "materials", "(", "self", ")", ":", "if", "self", ".", "_material_manager", "is", "None", ":", "self", ".", "_material_manager", "=", "MaterialManager", "(", "session", "=", "self", ".", "_session", ")", "return", "self", ".", "_material_manager" ]
39.222222
19.666667
def get_scopes_for(self, user_provided_scopes): """ Returns a list of scopes needed for each of the scope_helpers provided, by adding the prefix to them if required :param user_provided_scopes: a list of scopes or scope helpers :type user_provided_scopes: list or tuple or str :return: scopes with url prefix added :rtype: list :raises ValueError: if unexpected datatype of scopes are passed """ if user_provided_scopes is None: # return all available scopes user_provided_scopes = [app_part for app_part in self._oauth_scopes] elif isinstance(user_provided_scopes, str): user_provided_scopes = [user_provided_scopes] if not isinstance(user_provided_scopes, (list, tuple)): raise ValueError( "'user_provided_scopes' must be a list or a tuple of strings") scopes = set() for app_part in user_provided_scopes: for scope in self._oauth_scopes.get(app_part, [(app_part,)]): scopes.add(self._prefix_scope(scope)) return list(scopes)
[ "def", "get_scopes_for", "(", "self", ",", "user_provided_scopes", ")", ":", "if", "user_provided_scopes", "is", "None", ":", "# return all available scopes", "user_provided_scopes", "=", "[", "app_part", "for", "app_part", "in", "self", ".", "_oauth_scopes", "]", "...
42.692308
20.153846
def remove_rich_rule(zone, rule, permanent=True): ''' Add a rich rule to a zone .. versionadded:: 2016.11.0 CLI Example: .. code-block:: bash salt '*' firewalld.remove_rich_rule zone 'rule' ''' cmd = "--zone={0} --remove-rich-rule='{1}'".format(zone, rule) if permanent: cmd += ' --permanent' return __firewall_cmd(cmd)
[ "def", "remove_rich_rule", "(", "zone", ",", "rule", ",", "permanent", "=", "True", ")", ":", "cmd", "=", "\"--zone={0} --remove-rich-rule='{1}'\"", ".", "format", "(", "zone", ",", "rule", ")", "if", "permanent", ":", "cmd", "+=", "' --permanent'", "return", ...
20
25.555556
def counter(path, delta, create_parents=False, **kwargs): """ Increment or decrement a counter in a document. :param path: Path to the counter :param delta: Amount by which to modify the value. The delta can be negative but not 0. It must be an integer (not a float) as well. :param create_parents: Create the counter (and apply the modification) if it does not exist .. note:: Unlike :meth:`couchbase.bucket.Bucket.counter`, there is no `initial` argument. If the counter does not exist within the document (but its parent does, or `create_parents` is true), it will be initialized with the value of the `delta`. This operation is only valid in :cb_bmeth:`mutate_in`. .. seealso:: :func:`upsert`, :cb_bmeth:`counter` (in `Bucket`) """ if not delta: raise ValueError("Delta must be positive or negative!") return _gen_4spec(LCB_SDCMD_COUNTER, path, delta, create_path=create_parents, **kwargs)
[ "def", "counter", "(", "path", ",", "delta", ",", "create_parents", "=", "False", ",", "*", "*", "kwargs", ")", ":", "if", "not", "delta", ":", "raise", "ValueError", "(", "\"Delta must be positive or negative!\"", ")", "return", "_gen_4spec", "(", "LCB_SDCMD_...
38.576923
23.884615
def _copy_vm(template=None, name=None, session=None, sr=None): ''' Create VM by copy This is slower and should be used if source and target are NOT in the same storage repository template = object reference name = string name of new VM session = object reference sr = object reference ''' if session is None: session = _get_session() log.debug('Creating VM %s by copying %s', name, template) source = _get_vm(template, session) task = session.xenapi.Async.VM.copy(source, name, sr) _run_async_task(task, session)
[ "def", "_copy_vm", "(", "template", "=", "None", ",", "name", "=", "None", ",", "session", "=", "None", ",", "sr", "=", "None", ")", ":", "if", "session", "is", "None", ":", "session", "=", "_get_session", "(", ")", "log", ".", "debug", "(", "'Crea...
31.166667
17.944444
def arglist(self, args, call): """arglist: (argument ',')* (argument [','] | '*' test (',' argument)* [',' '**' test] | '**' test)""" for arg in args: if isinstance(arg, ast.keyword): call.keywords.append(arg) elif len(call.keywords) > 0: error = diagnostic.Diagnostic( "fatal", "non-keyword arg after keyword arg", {}, arg.loc, [call.keywords[-1].loc]) self.diagnostic_engine.process(error) else: call.args.append(arg) return call
[ "def", "arglist", "(", "self", ",", "args", ",", "call", ")", ":", "for", "arg", "in", "args", ":", "if", "isinstance", "(", "arg", ",", "ast", ".", "keyword", ")", ":", "call", ".", "keywords", ".", "append", "(", "arg", ")", "elif", "len", "(",...
43.666667
11.866667
def _updateVariantAnnotationSets(self, variantFile, dataUrl): """ Updates the variant annotation set associated with this variant using information in the specified pysam variantFile. """ # TODO check the consistency of this between VCF files. if not self.isAnnotated(): annotationType = None for record in variantFile.header.records: if record.type == "GENERIC": if record.key == "SnpEffVersion": annotationType = ANNOTATIONS_SNPEFF elif record.key == "VEP": version = record.value.split()[0] # TODO we need _much_ more sophisticated processing # of VEP versions here. When do they become # incompatible? if version == "v82": annotationType = ANNOTATIONS_VEP_V82 elif version == "v77": annotationType = ANNOTATIONS_VEP_V77 else: # TODO raise a proper typed exception there with # the file name as an argument. raise ValueError( "Unsupported VEP version {} in '{}'".format( version, dataUrl)) if annotationType is None: infoKeys = variantFile.header.info.keys() if 'CSQ' in infoKeys or 'ANN' in infoKeys: # TODO likewise, we want a properly typed exception that # we can throw back to the repo manager UI and display # as an import error. raise ValueError( "Unsupported annotations in '{}'".format(dataUrl)) if annotationType is not None: vas = HtslibVariantAnnotationSet(self, self.getLocalId()) vas.populateFromFile(variantFile, annotationType) self.addVariantAnnotationSet(vas)
[ "def", "_updateVariantAnnotationSets", "(", "self", ",", "variantFile", ",", "dataUrl", ")", ":", "# TODO check the consistency of this between VCF files.", "if", "not", "self", ".", "isAnnotated", "(", ")", ":", "annotationType", "=", "None", "for", "record", "in", ...
53.282051
16.307692
def run(self, creds, override_etype = [23]): """ Requests TGT tickets for all users specified in the targets list creds: list : the users to request the TGT tickets for override_etype: list : list of supported encryption types """ tgts = [] for cred in creds: try: kcomm = KerbrosComm(cred, self.ksoc) kcomm.get_TGT(override_etype = override_etype, decrypt_tgt = False) tgts.append(kcomm.kerberos_TGT) except Exception as e: logger.debug('Error while roasting client %s/%s Reason: %s' % (cred.domain, cred.username, str(e))) continue results = [] for tgt in tgts: results.append(TGTTicket2hashcat(tgt)) return results
[ "def", "run", "(", "self", ",", "creds", ",", "override_etype", "=", "[", "23", "]", ")", ":", "tgts", "=", "[", "]", "for", "cred", "in", "creds", ":", "try", ":", "kcomm", "=", "KerbrosComm", "(", "cred", ",", "self", ".", "ksoc", ")", "kcomm",...
29.590909
21.909091
def p_queue(p): """ queue : QUEUE COLON LIFO | QUEUE COLON FIFO """ if p[3] == "LIFO": p[0] = {"queue": LIFO()} elif p[3] == "FIFO": p[0] = {"queue": FIFO()} else: raise RuntimeError("Queue discipline '%s' is not supported!" % p[1])
[ "def", "p_queue", "(", "p", ")", ":", "if", "p", "[", "3", "]", "==", "\"LIFO\"", ":", "p", "[", "0", "]", "=", "{", "\"queue\"", ":", "LIFO", "(", ")", "}", "elif", "p", "[", "3", "]", "==", "\"FIFO\"", ":", "p", "[", "0", "]", "=", "{",...
21.538462
18.923077
async def make_response(self, request, response, **response_kwargs): """Convert a handler result to web response.""" while iscoroutine(response): response = await response if isinstance(response, StreamResponse): return response response_kwargs.setdefault('content_type', 'application/json') return Response(text=dumps(response), **response_kwargs)
[ "async", "def", "make_response", "(", "self", ",", "request", ",", "response", ",", "*", "*", "response_kwargs", ")", ":", "while", "iscoroutine", "(", "response", ")", ":", "response", "=", "await", "response", "if", "isinstance", "(", "response", ",", "S...
36.818182
20.909091
def _calc_damping_min(self): """minimum damping [decimal]""" return ((0.8005 + 0.0129 * self._plas_index * self._ocr ** -0.1069) * (self._stress_mean * KPA_TO_ATM) ** -0.2889 * (1 + 0.2919 * np.log(self._freq))) / 100
[ "def", "_calc_damping_min", "(", "self", ")", ":", "return", "(", "(", "0.8005", "+", "0.0129", "*", "self", ".", "_plas_index", "*", "self", ".", "_ocr", "**", "-", "0.1069", ")", "*", "(", "self", ".", "_stress_mean", "*", "KPA_TO_ATM", ")", "**", ...
52.2
17.2
def splitArgs(self, args): """Returns list of arguments parsed by shlex.split() or raise UsageError if failed""" try: return shlex.split(args) except ValueError as e: raise UsageError(e)
[ "def", "splitArgs", "(", "self", ",", "args", ")", ":", "try", ":", "return", "shlex", ".", "split", "(", "args", ")", "except", "ValueError", "as", "e", ":", "raise", "UsageError", "(", "e", ")" ]
33.714286
9.142857
def compute_num_true_positives(ref_freqs, est_freqs, window=0.5, chroma=False): """Compute the number of true positives in an estimate given a reference. A frequency is correct if it is within a quartertone of the correct frequency. Parameters ---------- ref_freqs : list of np.ndarray reference frequencies (MIDI) est_freqs : list of np.ndarray estimated frequencies (MIDI) window : float Window size, in semitones chroma : bool If True, computes distances modulo n. If True, ``ref_freqs`` and ``est_freqs`` should be wrapped modulo n. Returns ------- true_positives : np.ndarray Array the same length as ref_freqs containing the number of true positives. """ n_frames = len(ref_freqs) true_positives = np.zeros((n_frames, )) for i, (ref_frame, est_frame) in enumerate(zip(ref_freqs, est_freqs)): if chroma: # match chroma-wrapped frequency events matching = util.match_events( ref_frame, est_frame, window, distance=util._outer_distance_mod_n) else: # match frequency events within tolerance window in semitones matching = util.match_events(ref_frame, est_frame, window) true_positives[i] = len(matching) return true_positives
[ "def", "compute_num_true_positives", "(", "ref_freqs", ",", "est_freqs", ",", "window", "=", "0.5", ",", "chroma", "=", "False", ")", ":", "n_frames", "=", "len", "(", "ref_freqs", ")", "true_positives", "=", "np", ".", "zeros", "(", "(", "n_frames", ",", ...
33.075
20.275
def setParams(self,params): """ set params """ start = 0 for i in range(self.n_terms): n_effects = self.B[i].size self.B[i] = np.reshape(params[start:start+n_effects],self.B[i].shape, order='F') start += n_effects
[ "def", "setParams", "(", "self", ",", "params", ")", ":", "start", "=", "0", "for", "i", "in", "range", "(", "self", ".", "n_terms", ")", ":", "n_effects", "=", "self", ".", "B", "[", "i", "]", ".", "size", "self", ".", "B", "[", "i", "]", "=...
38.142857
14.714286
def present(self, value): """Return a user-friendly representation of a value. Lookup value in self.specials, or call .to_literal() if absent. """ for k, v in self.special.items(): if v == value: return k return self.to_literal(value, *self.args, **self.kw)
[ "def", "present", "(", "self", ",", "value", ")", ":", "for", "k", ",", "v", "in", "self", ".", "special", ".", "items", "(", ")", ":", "if", "v", "==", "value", ":", "return", "k", "return", "self", ".", "to_literal", "(", "value", ",", "*", "...
36.222222
14.333333
def hex(x): ''' x-->bytes | bytearray Returns-->bytes: hex-encoded ''' if isinstance(x, bytearray): x = bytes(x) return encode(x, 'hex')
[ "def", "hex", "(", "x", ")", ":", "if", "isinstance", "(", "x", ",", "bytearray", ")", ":", "x", "=", "bytes", "(", "x", ")", "return", "encode", "(", "x", ",", "'hex'", ")" ]
20.125
19.875
def _load_data(self, resource, default=DEFAULT_VALUE_SAFEGUARD, **kwargs): """ Load data from API client. Arguments: resource(string): type of resource to load default(any): value to return if API query returned empty result. Sensible values: [], {}, None etc. Returns: dict: Deserialized response from Course Catalog API """ default_val = default if default != self.DEFAULT_VALUE_SAFEGUARD else {} try: return get_edx_api_data( api_config=CatalogIntegration.current(), resource=resource, api=self.client, **kwargs ) or default_val except (SlumberBaseException, ConnectionError, Timeout) as exc: LOGGER.exception( 'Failed to load data from resource [%s] with kwargs [%s] due to: [%s]', resource, kwargs, str(exc) ) return default_val
[ "def", "_load_data", "(", "self", ",", "resource", ",", "default", "=", "DEFAULT_VALUE_SAFEGUARD", ",", "*", "*", "kwargs", ")", ":", "default_val", "=", "default", "if", "default", "!=", "self", ".", "DEFAULT_VALUE_SAFEGUARD", "else", "{", "}", "try", ":", ...
37.192308
22.038462
def clear_dtreat(self, force=False): """ Clear all treatment parameters in self.dtreat Subsequently also clear the working copy of data The working copy of data is thus reset to the reference data """ lC = [self._dtreat[k] is not None for k in self._dtreat.keys() if k != 'order'] if any(lC) and not force: msg = """BEWARE : You are about to delete the data treatment i.e.: to clear self.dtreat (and also self.ddata) Are you sure ? If yes, use self.clear_dtreat(force=True)""" raise Exception(msg) dtreat = dict.fromkeys(self._get_keys_dtreat()) self._dtreat = self._checkformat_inputs_dtreat(dtreat) self.clear_ddata()
[ "def", "clear_dtreat", "(", "self", ",", "force", "=", "False", ")", ":", "lC", "=", "[", "self", ".", "_dtreat", "[", "k", "]", "is", "not", "None", "for", "k", "in", "self", ".", "_dtreat", ".", "keys", "(", ")", "if", "k", "!=", "'order'", "...
47.294118
13.882353
def inline_link( self, text, url): """*generate a MMD sytle link* **Key Arguments:** - ``text`` -- the text to link from - ``url`` -- the url to link to **Return:** - ``text`` -- the linked text **Usage:** To convert a text and url to MMD link: .. code-block:: python text = md.inline_link( " google search engine ", " http://www.google.com ") print text # OUTPUT: # [google search engine](http://www.google.com) """ m = self.reWS.match(text) prefix = m.group(1) text = m.group(2) suffix = m.group(3) url = url.strip() return "%(prefix)s[%(text)s](%(url)s)%(suffix)s" % locals()
[ "def", "inline_link", "(", "self", ",", "text", ",", "url", ")", ":", "m", "=", "self", ".", "reWS", ".", "match", "(", "text", ")", "prefix", "=", "m", ".", "group", "(", "1", ")", "text", "=", "m", ".", "group", "(", "2", ")", "suffix", "="...
24.323529
21.117647
def _listen_for_dweets_from_response(response): """Yields dweets as received from dweet.io's streaming API """ streambuffer = '' for byte in response.iter_content(): if byte: streambuffer += byte.decode('ascii') try: dweet = json.loads(streambuffer.splitlines()[1]) except (IndexError, ValueError): continue if isstr(dweet): yield json.loads(dweet) streambuffer = ''
[ "def", "_listen_for_dweets_from_response", "(", "response", ")", ":", "streambuffer", "=", "''", "for", "byte", "in", "response", ".", "iter_content", "(", ")", ":", "if", "byte", ":", "streambuffer", "+=", "byte", ".", "decode", "(", "'ascii'", ")", "try", ...
34.642857
10.714286
def normalize(data): """ Function to normalize data to have mean 0 and unity standard deviation (also called z-transform) Parameters ---------- data : numpy.ndarray Returns ------- numpy.ndarray z-transform of input array """ data = data.astype(float) data -= data.mean() return data / data.std()
[ "def", "normalize", "(", "data", ")", ":", "data", "=", "data", ".", "astype", "(", "float", ")", "data", "-=", "data", ".", "mean", "(", ")", "return", "data", "/", "data", ".", "std", "(", ")" ]
17.428571
22.666667
def extract_response(raw_response): """Extract requests response object. only extract those status_code in [200, 300). :param raw_response: a requests.Resposne object. :return: content of response. """ data = urlread(raw_response) if is_success_response(raw_response): return data elif is_failure_response(raw_response): raise RemoteExecuteError(data) elif is_invalid_response(raw_response): raise InvalidResponseError(data) else: raise UnknownStatusError(data)
[ "def", "extract_response", "(", "raw_response", ")", ":", "data", "=", "urlread", "(", "raw_response", ")", "if", "is_success_response", "(", "raw_response", ")", ":", "return", "data", "elif", "is_failure_response", "(", "raw_response", ")", ":", "raise", "Remo...
28.833333
12.444444
def toProtocolElement(self): """ Returns the GA4GH protocol representation of this ReadGroup. """ # TODO this is very incomplete, but we don't have the # implementation to fill out the rest of the fields currently readGroup = protocol.ReadGroup() readGroup.id = self.getId() readGroup.created = self._creationTime readGroup.updated = self._updateTime dataset = self.getParentContainer().getParentContainer() readGroup.dataset_id = dataset.getId() readGroup.name = self.getLocalId() readGroup.predicted_insert_size = pb.int(self.getPredictedInsertSize()) referenceSet = self._parentContainer.getReferenceSet() readGroup.sample_name = pb.string(self.getSampleName()) readGroup.biosample_id = pb.string(self.getBiosampleId()) if referenceSet is not None: readGroup.reference_set_id = referenceSet.getId() readGroup.stats.CopyFrom(self.getStats()) readGroup.programs.extend(self.getPrograms()) readGroup.description = pb.string(self.getDescription()) readGroup.experiment.CopyFrom(self.getExperiment()) self.serializeAttributes(readGroup) return readGroup
[ "def", "toProtocolElement", "(", "self", ")", ":", "# TODO this is very incomplete, but we don't have the", "# implementation to fill out the rest of the fields currently", "readGroup", "=", "protocol", ".", "ReadGroup", "(", ")", "readGroup", ".", "id", "=", "self", ".", "...
48.92
14.2
def parse_proposal_data(self, proposal_data, dossier_pk): """Get or Create a proposal model from raw data""" proposal_display = '{} ({})'.format(proposal_data['title'].encode( 'utf-8'), proposal_data.get('report', '').encode('utf-8')) if 'issue_type' not in proposal_data.keys(): logger.debug('This proposal data without issue_type: %s', proposal_data['epref']) return changed = False try: proposal = Proposal.objects.get(title=proposal_data['title']) except Proposal.DoesNotExist: proposal = Proposal(title=proposal_data['title']) changed = True data_map = dict( title=proposal_data['title'], datetime=_parse_date(proposal_data['ts']), dossier_id=dossier_pk, reference=proposal_data.get('report'), kind=proposal_data.get('issue_type') ) for position in ('For', 'Abstain', 'Against'): position_data = proposal_data.get(position, {}) position_total = position_data.get('total', 0) if isinstance(position_total, str) and position_total.isdigit(): position_total = int(position_total) data_map['total_%s' % position.lower()] = position_total for key, value in data_map.items(): if value != getattr(proposal, key, None): setattr(proposal, key, value) changed = True if changed: proposal.save() responses = vote_pre_import.send(sender=self, vote_data=proposal_data) for receiver, response in responses: if response is False: logger.debug( 'Skipping dossier %s', proposal_data.get( 'epref', proposal_data['title'])) return positions = ['For', 'Abstain', 'Against'] logger.info( 'Looking for votes in proposal {}'.format(proposal_display)) for position in positions: for group_vote_data in proposal_data.get( position, {}).get( 'groups', {}): for vote_data in group_vote_data['votes']: if not isinstance(vote_data, dict): logger.error('Skipping vote data %s for proposal %s', vote_data, proposal_data['_id']) continue representative_pk = self.get_representative(vote_data) if representative_pk is None: logger.error('Could not find mep for %s', vote_data) continue representative_name = vote_data.get('orig', '') changed = False try: vote = Vote.objects.get( representative_id=representative_pk, proposal_id=proposal.pk) except Vote.DoesNotExist: vote = Vote(proposal_id=proposal.pk, representative_id=representative_pk) changed = True if vote.position != position.lower(): changed = True vote.position = position.lower() if vote.representative_name != representative_name: changed = True vote.representative_name = representative_name if changed: vote.save() logger.debug('Save vote %s for MEP %s on %s #%s to %s', vote.pk, representative_pk, proposal_data['title'], proposal.pk, position) return proposal
[ "def", "parse_proposal_data", "(", "self", ",", "proposal_data", ",", "dossier_pk", ")", ":", "proposal_display", "=", "'{} ({})'", ".", "format", "(", "proposal_data", "[", "'title'", "]", ".", "encode", "(", "'utf-8'", ")", ",", "proposal_data", ".", "get", ...
38.686869
21.272727
def verifyEmails(emails=[], regExpPattern="^.+$"): """ Method to perform the mail verification process. Arguments --------- emails: List of emails to verify. regExpPattern: Pattern that should match. Returns ------- list: A list containing the results that match. """ emailsMatched = set() for i, e in enumerate(emails): if re.match(regExpPattern, e): emailsMatched.add(e) print(regExpPattern) return list(emailsMatched)
[ "def", "verifyEmails", "(", "emails", "=", "[", "]", ",", "regExpPattern", "=", "\"^.+$\"", ")", ":", "emailsMatched", "=", "set", "(", ")", "for", "i", ",", "e", "in", "enumerate", "(", "emails", ")", ":", "if", "re", ".", "match", "(", "regExpPatte...
22.363636
18.909091
def plotPointing(self, maptype=None, colour='b', mod3='r', showOuts=True, **kwargs): """Plot the FOV """ if maptype is None: maptype=self.defaultMap radec = self.currentRaDec for ch in radec[:,2][::4]: idx = np.where(radec[:,2].astype(np.int) == ch)[0] idx = np.append(idx, idx[0]) #% points to draw a box c = colour if ch in self.brokenChannels: c = mod3 maptype.plot(radec[idx, 3], radec[idx, 4], '-', color=c, **kwargs) #Show the origin of the col and row coords for this ch if showOuts: maptype.plot(radec[idx[0], 3], radec[idx[0],4], 'o', color=c)
[ "def", "plotPointing", "(", "self", ",", "maptype", "=", "None", ",", "colour", "=", "'b'", ",", "mod3", "=", "'r'", ",", "showOuts", "=", "True", ",", "*", "*", "kwargs", ")", ":", "if", "maptype", "is", "None", ":", "maptype", "=", "self", ".", ...
35.3
21.7
def create_archive( source: Path, target: Path, interpreter: str, main: str, compressed: bool = True ) -> None: """Create an application archive from SOURCE. A slightly modified version of stdlib's `zipapp.create_archive <https://docs.python.org/3/library/zipapp.html#zipapp.create_archive>`_ """ # Check that main has the right format. mod, sep, fn = main.partition(":") mod_ok = all(part.isidentifier() for part in mod.split(".")) fn_ok = all(part.isidentifier() for part in fn.split(".")) if not (sep == ":" and mod_ok and fn_ok): raise zipapp.ZipAppError("Invalid entry point: " + main) main_py = MAIN_TEMPLATE.format(module=mod, fn=fn) with maybe_open(target, "wb") as fd: # write shebang write_file_prefix(fd, interpreter) # determine compression compression = zipfile.ZIP_DEFLATED if compressed else zipfile.ZIP_STORED # create zipapp with zipfile.ZipFile(fd, "w", compression=compression) as z: for child in source.rglob("*"): # skip compiled files if child.suffix == '.pyc': continue arcname = child.relative_to(source) z.write(str(child), str(arcname)) # write main z.writestr("__main__.py", main_py.encode("utf-8")) # make executable # NOTE on windows this is no-op target.chmod(target.stat().st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
[ "def", "create_archive", "(", "source", ":", "Path", ",", "target", ":", "Path", ",", "interpreter", ":", "str", ",", "main", ":", "str", ",", "compressed", ":", "bool", "=", "True", ")", "->", "None", ":", "# Check that main has the right format.", "mod", ...
32.043478
21.043478
def save(self, *args, **kwargs): """ Override save() method to make sure that standard_name and systematic_name won't be null or empty, or consist of only space characters (such as space, tab, new line, etc). """ empty_std_name = False if not self.standard_name or self.standard_name.isspace(): empty_std_name = True empty_sys_name = False if not self.systematic_name or self.systematic_name.isspace(): empty_sys_name = True if empty_std_name and empty_sys_name: raise ValueError( "Both standard_name and systematic_name are empty") super(Gene, self).save(*args, **kwargs)
[ "def", "save", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "empty_std_name", "=", "False", "if", "not", "self", ".", "standard_name", "or", "self", ".", "standard_name", ".", "isspace", "(", ")", ":", "empty_std_name", "=", "True"...
36.684211
17.947368
def generateLowerBoundList(confidence, numUniqueFeatures, numLocationsPerObject, maxNumObjects): """ Metric: How unique is each object's most unique feature? Calculate the probabilistic lower bound for the number of occurrences of an object's most unique feature. For example, if confidence is 0.8, the tick "3" will be placed at the point where 80% of objects are completely composed of features with 3 or more total occurrences, and 20% of objects have at least one feature that has 2 or fewer total occurrences. """ # We're choosing a location, checking its feature, and checking how many # *other* occurrences there are of this feature. So we check n - 1 locations. maxNumOtherLocations = maxNumObjects*10 - 1 results = zip(itertools.count(1), findBinomialNsWithLowerBoundSampleMinimum( confidence, itertools.count(1), 1./numUniqueFeatures, numLocationsPerObject, maxNumOtherLocations)) finalResults = [(numOtherLocations, interpolatedN / numLocationsPerObject) for numOtherLocations, (interpolatedN, _, _) in results] return finalResults
[ "def", "generateLowerBoundList", "(", "confidence", ",", "numUniqueFeatures", ",", "numLocationsPerObject", ",", "maxNumObjects", ")", ":", "# We're choosing a location, checking its feature, and checking how many", "# *other* occurrences there are of this feature. So we check n - 1 locati...
48.541667
24.791667
def export(app, local): """Export the data.""" print_header() log("Preparing to export the data...") id = str(app) subdata_path = os.path.join("data", id, "data") # Create the data package os.makedirs(subdata_path) # Copy the experiment code into a code/ subdirectory try: shutil.copyfile( os.path.join("snapshots", id + "-code.zip"), os.path.join("data", id, id + "-code.zip") ) except: pass # Copy in the DATA readme. # open(os.path.join(id, "README.txt"), "a").close() # Save the experiment id. with open(os.path.join("data", id, "experiment_id.md"), "a+") as file: file.write(id) if not local: # Export the logs subprocess.call( "heroku logs " + "-n 10000 > " + os.path.join("data", id, "server_logs.md") + " --app " + id, shell=True) dump_path = dump_database(id) subprocess.call( "pg_restore --verbose --clean -d wallace " + os.path.join("data", id) + "/data.dump", shell=True) all_tables = [ "node", "network", "vector", "info", "transformation", "transmission", "participant", "notification", "question" ] for table in all_tables: subprocess.call( "psql -d wallace --command=\"\\copy " + table + " to \'" + os.path.join(subdata_path, table) + ".csv\' csv header\"", shell=True) if not local: os.remove(dump_path) log("Zipping up the package...") shutil.make_archive( os.path.join("data", id + "-data"), "zip", os.path.join("data", id) ) shutil.rmtree(os.path.join("data", id)) log("Done. Data available in " + str(id) + ".zip")
[ "def", "export", "(", "app", ",", "local", ")", ":", "print_header", "(", ")", "log", "(", "\"Preparing to export the data...\"", ")", "id", "=", "str", "(", "app", ")", "subdata_path", "=", "os", ".", "path", ".", "join", "(", "\"data\"", ",", "id", "...
23.592105
22.75
def create(self, data, fields=[], models={}): ''' Create model attributes ''' if not fields: fields = self.fields if not models and hasattr(self, 'models'): models = self.models for field in fields: setattr(self,field,None) if not data: return None for k, v in data.iteritems(): if type(v) in (str, unicode): v = v.strip() if models and k in models: if type(v) == dict: lists = [] for k2, v2 in v.iteritems(): if type(v2) == list: for d in v2: model = models[k]() lists.append(model.create(d)) if not lists: model = models[k]() v = model.create(v) else: v = lists else: model = models[k]() v = model.create(v) setattr(self,k,v) return self
[ "def", "create", "(", "self", ",", "data", ",", "fields", "=", "[", "]", ",", "models", "=", "{", "}", ")", ":", "if", "not", "fields", ":", "fields", "=", "self", ".", "fields", "if", "not", "models", "and", "hasattr", "(", "self", ",", "'models...
36.2
9.8
def atc(jobid): ''' Print the at(1) script that will run for the passed job id. This is mostly for debugging so the output will just be text. CLI Example: .. code-block:: bash salt '*' at.atc <jobid> ''' # Shim to produce output similar to what __virtual__() should do # but __salt__ isn't available in __virtual__() output = _cmd('at', '-c', six.text_type(jobid)) if output is None: return '\'at.atc\' is not available.' elif output == '': return {'error': 'invalid job id \'{0}\''.format(jobid)} return output
[ "def", "atc", "(", "jobid", ")", ":", "# Shim to produce output similar to what __virtual__() should do", "# but __salt__ isn't available in __virtual__()", "output", "=", "_cmd", "(", "'at'", ",", "'-c'", ",", "six", ".", "text_type", "(", "jobid", ")", ")", "if", "o...
25.954545
24.318182
def Bezier(points, at): """Build Bézier curve from points. Deprecated. CatmulClark builds nicer splines """ at = np.asarray(at) at_flat = at.ravel() N = len(points) curve = np.zeros((at_flat.shape[0], 2)) for ii in range(N): curve += np.outer(Bernstein(N - 1, ii)(at_flat), points[ii]) return curve.reshape(at.shape + (2,))
[ "def", "Bezier", "(", "points", ",", "at", ")", ":", "at", "=", "np", ".", "asarray", "(", "at", ")", "at_flat", "=", "at", ".", "ravel", "(", ")", "N", "=", "len", "(", "points", ")", "curve", "=", "np", ".", "zeros", "(", "(", "at_flat", "....
32.454545
11.636364
def repack_archive (archive, archive_new, verbosity=0, interactive=True): """Repack archive to different file and/or format.""" util.check_existing_filename(archive) util.check_new_filename(archive_new) if verbosity >= 0: util.log_info("Repacking %s to %s ..." % (archive, archive_new)) res = _repack_archive(archive, archive_new, verbosity=verbosity, interactive=interactive) if verbosity >= 0: util.log_info("... repacking successful.") return res
[ "def", "repack_archive", "(", "archive", ",", "archive_new", ",", "verbosity", "=", "0", ",", "interactive", "=", "True", ")", ":", "util", ".", "check_existing_filename", "(", "archive", ")", "util", ".", "check_new_filename", "(", "archive_new", ")", "if", ...
48.4
19.1
def sort(self): """Consolidate adjacent lines, if same commit ID. Will modify line number to be a range, when two or more lines with the same commit ID. """ self.sorted_commits = [] if not self.commits: return self.sorted_commits prev_commit = self.commits.pop(0) prev_line = prev_commit.line_number prev_uuid = prev_commit.uuid for commit in self.commits: if (commit.uuid != prev_uuid or commit.line_number != (prev_line + 1)): prev_commit.lines = self.line_range(prev_commit.line_number, prev_line) self.sorted_commits.append(prev_commit) prev_commit = commit prev_line = commit.line_number prev_uuid = commit.uuid # Take care of last commit prev_commit.lines = self.line_range(prev_commit.line_number, prev_line) self.sorted_commits.append(prev_commit) return self.sorted_commits
[ "def", "sort", "(", "self", ")", ":", "self", ".", "sorted_commits", "=", "[", "]", "if", "not", "self", ".", "commits", ":", "return", "self", ".", "sorted_commits", "prev_commit", "=", "self", ".", "commits", ".", "pop", "(", "0", ")", "prev_line", ...
41.56
12.76
def _travis(self): """ Logic behind autosave under Travis CI. """ if PyFunceble.CONFIGURATION["travis"]: try: _ = PyFunceble.environ["TRAVIS_BUILD_DIR"] time_autorisation = False try: time_autorisation = int(PyFunceble.time()) >= int( PyFunceble.INTERN["start"] ) + (int(PyFunceble.CONFIGURATION["travis_autosave_minutes"]) * 60) except KeyError: if self.last and not self.bypass: raise Exception( "Please review the way `ExecutionTime()` is called." ) if self.last or time_autorisation or self.bypass: Percentage().log() self.travis_permissions() command = 'git add --all && git commit -a -m "%s"' if self.last or self.bypass: if PyFunceble.CONFIGURATION["command_before_end"]: for line in Command( PyFunceble.CONFIGURATION["command_before_end"] ).run(): sys_stdout.write("{}\n".format(line)) self.travis_permissions() message = ( PyFunceble.CONFIGURATION["travis_autosave_final_commit"] + " [ci skip]" ) Command(command % message).execute() else: if PyFunceble.CONFIGURATION["command"]: for line in Command( PyFunceble.CONFIGURATION["command"] ).run(): sys_stdout.write("{}\n".format(line)) self.travis_permissions() Command( command % PyFunceble.CONFIGURATION["travis_autosave_commit"] ).execute() print( Command( "git push origin %s" % PyFunceble.CONFIGURATION["travis_branch"] ).execute() ) exit(0) except KeyError: pass
[ "def", "_travis", "(", "self", ")", ":", "if", "PyFunceble", ".", "CONFIGURATION", "[", "\"travis\"", "]", ":", "try", ":", "_", "=", "PyFunceble", ".", "environ", "[", "\"TRAVIS_BUILD_DIR\"", "]", "time_autorisation", "=", "False", "try", ":", "time_autoris...
38.063492
20.507937
def format_dirname(series_name, season_number): """Generates a directory name based on metadata using configured format. :param str series_name: name of TV series :param int season_number: the numeric season of series :returns: formatted directory name using input values and configured format :rtype: str """ data = { 'seriesname': _replace_series_name( series_name, cfg.CONF.output_series_replacements), 'seasonnumber': season_number, } return tc.titlecase(cfg.CONF.directory_name_format % data)
[ "def", "format_dirname", "(", "series_name", ",", "season_number", ")", ":", "data", "=", "{", "'seriesname'", ":", "_replace_series_name", "(", "series_name", ",", "cfg", ".", "CONF", ".", "output_series_replacements", ")", ",", "'seasonnumber'", ":", "season_num...
34.3125
20.375
def arxiv_eprints2marc(self, key, values): """Populate the ``037`` MARC field. Also populates the ``035`` and the ``65017`` MARC fields through side effects. """ result_037 = self.get('037', []) result_035 = self.get('035', []) result_65017 = self.get('65017', []) for value in values: arxiv_id = value.get('value') arxiv_id = 'arXiv:' + arxiv_id if is_arxiv_post_2007(arxiv_id) else arxiv_id result_037.append({ '9': 'arXiv', 'a': arxiv_id, 'c': force_single_element(value.get('categories')), }) result_035.append({ '9': 'arXiv', 'a': 'oai:arXiv.org:' + value.get('value'), }) categories = force_list(value.get('categories')) for category in categories: result_65017.append({ '2': 'arXiv', 'a': category, }) self['65017'] = result_65017 self['035'] = result_035 return result_037
[ "def", "arxiv_eprints2marc", "(", "self", ",", "key", ",", "values", ")", ":", "result_037", "=", "self", ".", "get", "(", "'037'", ",", "[", "]", ")", "result_035", "=", "self", ".", "get", "(", "'035'", ",", "[", "]", ")", "result_65017", "=", "s...
29.484848
18.090909
def find_class_files(self): """Find compiled class files recursively in the root path :return: list of absolute file paths """ files = self._find_files() self.announce( "found '{}' compiled class files in '{}'".format( len(files), self.root ) ) return files
[ "def", "find_class_files", "(", "self", ")", ":", "files", "=", "self", ".", "_find_files", "(", ")", "self", ".", "announce", "(", "\"found '{}' compiled class files in '{}'\"", ".", "format", "(", "len", "(", "files", ")", ",", "self", ".", "root", ")", ...
28.583333
15.25
def render_page(page, page_args): """ Renders the template at page.template """ print(page_args) template_name = page.template if page.template else page.name template = "signage/pages/{}.html".format(template_name) if page.function: context_method = getattr(pages, page.function) else: context_method = getattr(pages, page.name) sign, request = page_args context = context_method(page, sign, request) return render_to_string(template, context)
[ "def", "render_page", "(", "page", ",", "page_args", ")", ":", "print", "(", "page_args", ")", "template_name", "=", "page", ".", "template", "if", "page", ".", "template", "else", "page", ".", "name", "template", "=", "\"signage/pages/{}.html\"", ".", "form...
37.538462
13.230769
def dump(cls, data, encoding="ascii"): """ Convert str to appropriate format for "UserComment". :param data: Like u"foobar" :param str encoding: "ascii", "jis", or "unicode" :return: b"ASCII\x00\x00\x00foobar" :rtype: bytes :raises: ValueError if the encoding is unsupported. """ if encoding not in cls.ENCODINGS: raise ValueError('encoding {!r} must be one of {!r}'.format(encoding, cls.ENCODINGS)) prefix = {cls.ASCII: cls._ASCII_PREFIX, cls.JIS: cls._JIS_PREFIX, cls.UNICODE: cls._UNICODE_PREFIX}[encoding] internal_encoding = {cls.UNICODE: cls._UNICODE, cls.JIS: cls._JIS}.get(encoding, encoding) return prefix + data.encode(internal_encoding, errors='replace')
[ "def", "dump", "(", "cls", ",", "data", ",", "encoding", "=", "\"ascii\"", ")", ":", "if", "encoding", "not", "in", "cls", ".", "ENCODINGS", ":", "raise", "ValueError", "(", "'encoding {!r} must be one of {!r}'", ".", "format", "(", "encoding", ",", "cls", ...
50.666667
23.333333
def get_thellier_gui_meas_mapping(input_df, output=2): """ Get the appropriate mapping for translating measurements in Thellier GUI. This requires special handling for treat_step_num/measurement/measurement_number. Parameters ---------- input_df : pandas DataFrame MagIC records output : int output to this MagIC data model (2 or 3) Output -------- mapping : dict (used in convert_meas_df_thellier_gui) """ if int(output) == 2: thellier_gui_meas3_2_meas2_map = meas_magic3_2_magic2_map.copy() if 'treat_step_num' in input_df.columns: thellier_gui_meas3_2_meas2_map.update( {'treat_step_num': 'measurement_number'}) thellier_gui_meas3_2_meas2_map.pop('measurement') return thellier_gui_meas3_2_meas2_map # 2 --> 3 else: thellier_gui_meas2_2_meas3_map = meas_magic2_2_magic3_map.copy() if 'measurement' in input_df.columns: thellier_gui_meas2_2_meas3_map.pop('measurement_number') try: res = int(input_df.iloc[0]['measurement_number']) if res < 100: thellier_gui_meas2_2_meas3_map['measurement_number'] = 'treat_step_num' except ValueError as ex: pass return thellier_gui_meas2_2_meas3_map
[ "def", "get_thellier_gui_meas_mapping", "(", "input_df", ",", "output", "=", "2", ")", ":", "if", "int", "(", "output", ")", "==", "2", ":", "thellier_gui_meas3_2_meas2_map", "=", "meas_magic3_2_magic2_map", ".", "copy", "(", ")", "if", "'treat_step_num'", "in",...
37.714286
21.028571
def start_session(self): """ Start Session """ response = self.request("hello") bits = response.split(" ") self.server_info.update({ "server_version": bits[2], "protocol_version": bits[4], "screen_width": int(bits[7]), "screen_height": int(bits[9]), "cell_width": int(bits[11]), "cell_height": int(bits[13]) }) return response
[ "def", "start_session", "(", "self", ")", ":", "response", "=", "self", ".", "request", "(", "\"hello\"", ")", "bits", "=", "response", ".", "split", "(", "\" \"", ")", "self", ".", "server_info", ".", "update", "(", "{", "\"server_version\"", ":", "bits...
31
8.642857
def handle_data(self, data): ''' Method called for each event by zipline. In intuition this is the place to factorize algorithms and then call event() ''' self.days += 1 signals = {} self.orderbook = {} # Everytime but the first tick if self.initialized and self.manager: # Keep the portfolio aware of the situation self.manager.update( self.portfolio, self.datetime, self.perf_tracker.cumulative_risk_metrics.to_dict()) else: # Perf_tracker needs at least a turn to have an index self.sids = data.keys() self.warm(data) self.initialized = True return try: signals = self.event(data) except Exception, error: # NOTE Temporary debug. Will probably notify the error and go on # with signals={} raise AlgorithmEventFailed( reason=error, date=self.datetime, data=data) # One can process orders within the alogrithm and don't return anything if signals and self.manager: if (signals.get('buy') or signals.get('sell')): self.orderbook = self.manager.trade_signals_handler(signals) if self.auto and self._is_interactive(): self.process_orders(self.orderbook) # Some middlewares send stuff over the wires. This little security # prevent us from performing a DDOS if self._is_interactive(): self._call_middlewares()
[ "def", "handle_data", "(", "self", ",", "data", ")", ":", "self", ".", "days", "+=", "1", "signals", "=", "{", "}", "self", ".", "orderbook", "=", "{", "}", "# Everytime but the first tick", "if", "self", ".", "initialized", "and", "self", ".", "manager"...
39.075
18.475
def create(self, group_type, config_file, group_name=None, region=None, profile_name=None): """ Create a Greengrass group in the given region. :param group_type: the type of group to create. Must match a `key` in the `group_types` dict :param config_file: config file of the group to create :param group_name: the name of the group. If no name is given, then group_type will be used. :param region: the region in which to create the new group. [default: us-west-2] :param profile_name: the name of the `awscli` profile to use. [default: None] """ logging.info("[begin] create command using group_types:{0}".format( self.group_types)) config = GroupConfigFile(config_file=config_file) if config.is_fresh() is False: raise ValueError( "Config file already tracking previously created group" ) if group_type not in self.group_types.keys(): raise ValueError("Can only create {0} groups.".format( self.group_types) ) if region is None: region = self._region # create an instance of the requested group type that uses the given # config file and region gt = self.group_types[group_type](config=config, region=region) # get and store the account's IoT endpoint for future use ep = _get_iot_session(region=region).describe_endpoint() misc = config['misc'] misc['iot_endpoint'] = ep['endpointAddress'] config['misc'] = misc # Create a Group logging.info("[begin] Creating a Greengrass Group") if group_name is None: group_name = group_type gg_client = _get_gg_session(region=region, profile_name=profile_name) group_info = gg_client.create_group(Name="{0}".format(group_name)) config['group'] = {"id": group_info['Id']} # setup the policies and roles gt.create_and_attach_thing_policy() gt.create_and_attach_iam_role() cl_arn = self._create_core_definition( gg_client=gg_client, group_type=gt, config=config, group_name=group_name ) dl_arn = self._create_device_definition( gg_client=gg_client, group_type=gt, config=config, group_name=group_name ) lv_arn = self._create_function_definition( gg_client=gg_client, group_type=gt, config=config ) log_arn = self._create_logger_definition( gg_client=gg_client, group_type=gt, config=config ) sub_arn = self._create_subscription_definition( gg_client=gg_client, group_type=gt, config=config ) logging.info( 'Group details, core_def:{0} device_def:{1} func_def:{2} ' 'logger_def:{3} subs_def:{4}'.format( cl_arn, dl_arn, lv_arn, log_arn, sub_arn) ) # Add all the constituent parts to the Greengrass Group group_args = {'GroupId': group_info['Id']} if cl_arn: group_args['CoreDefinitionVersionArn'] = cl_arn if dl_arn: group_args['DeviceDefinitionVersionArn'] = dl_arn if lv_arn: group_args['FunctionDefinitionVersionArn'] = lv_arn if log_arn: group_args['LoggerDefinitionVersionArn'] = log_arn if sub_arn: group_args['SubscriptionDefinitionVersionArn'] = sub_arn grp = gg_client.create_group_version( **group_args ) # store info about the provisioned artifacts into the local config file config['group'] = { "id": group_info['Id'], "version_arn": grp['Arn'], "version": grp['Version'], "name": group_name } logging.info( "[end] Created Greengrass Group {0}".format(group_info['Id']))
[ "def", "create", "(", "self", ",", "group_type", ",", "config_file", ",", "group_name", "=", "None", ",", "region", "=", "None", ",", "profile_name", "=", "None", ")", ":", "logging", ".", "info", "(", "\"[begin] create command using group_types:{0}\"", ".", "...
36.444444
20.074074
def pad_to_same(self): """Pad shorter pianorolls with zeros at the end along the time axis to make the resulting pianoroll lengths the same as the maximum pianoroll length among all the tracks.""" max_length = self.get_max_length() for track in self.tracks: if track.pianoroll.shape[0] < max_length: track.pad(max_length - track.pianoroll.shape[0])
[ "def", "pad_to_same", "(", "self", ")", ":", "max_length", "=", "self", ".", "get_max_length", "(", ")", "for", "track", "in", "self", ".", "tracks", ":", "if", "track", ".", "pianoroll", ".", "shape", "[", "0", "]", "<", "max_length", ":", "track", ...
51.125
12.75
def rosmsg(self): """:obj:`sensor_msgs.Image` : ROS Image """ from cv_bridge import CvBridge, CvBridgeError cv_bridge = CvBridge() try: return cv_bridge.cv2_to_imgmsg(self._data, encoding=self._encoding) except CvBridgeError as cv_bridge_exception: logging.error('%s' % (str(cv_bridge_exception)))
[ "def", "rosmsg", "(", "self", ")", ":", "from", "cv_bridge", "import", "CvBridge", ",", "CvBridgeError", "cv_bridge", "=", "CvBridge", "(", ")", "try", ":", "return", "cv_bridge", ".", "cv2_to_imgmsg", "(", "self", ".", "_data", ",", "encoding", "=", "self...
40.111111
16.111111
def get_lib2to3_fixers(): '''returns a list of all fixers found in the lib2to3 library''' fixers = [] fixer_dirname = fixer_dir.__path__[0] for name in sorted(os.listdir(fixer_dirname)): if name.startswith("fix_") and name.endswith(".py"): fixers.append("lib2to3.fixes." + name[:-3]) return fixers
[ "def", "get_lib2to3_fixers", "(", ")", ":", "fixers", "=", "[", "]", "fixer_dirname", "=", "fixer_dir", ".", "__path__", "[", "0", "]", "for", "name", "in", "sorted", "(", "os", ".", "listdir", "(", "fixer_dirname", ")", ")", ":", "if", "name", ".", ...
41.25
17
async def Actions(self, entities): ''' entities : typing.Sequence[~Entity] Returns -> typing.Sequence[~ActionResult] ''' # map input types to rpc msg _params = dict() msg = dict(type='Action', request='Actions', version=2, params=_params) _params['entities'] = entities reply = await self.rpc(msg) return reply
[ "async", "def", "Actions", "(", "self", ",", "entities", ")", ":", "# map input types to rpc msg", "_params", "=", "dict", "(", ")", "msg", "=", "dict", "(", "type", "=", "'Action'", ",", "request", "=", "'Actions'", ",", "version", "=", "2", ",", "param...
31
10.714286
def del_Unnamed(df): """ Deletes all the unnamed columns :param df: pandas dataframe """ cols_del=[c for c in df.columns if 'Unnamed' in c] return df.drop(cols_del,axis=1)
[ "def", "del_Unnamed", "(", "df", ")", ":", "cols_del", "=", "[", "c", "for", "c", "in", "df", ".", "columns", "if", "'Unnamed'", "in", "c", "]", "return", "df", ".", "drop", "(", "cols_del", ",", "axis", "=", "1", ")" ]
23.625
11.625
def register_on_extra_data_changed(self, callback): """Set the callback function to consume on extra data changed events. Callback receives a IExtraDataChangedEvent object. Returns the callback_id """ event_type = library.VBoxEventType.on_extra_data_changed return self.event_source.register_callback(callback, event_type)
[ "def", "register_on_extra_data_changed", "(", "self", ",", "callback", ")", ":", "event_type", "=", "library", ".", "VBoxEventType", ".", "on_extra_data_changed", "return", "self", ".", "event_source", ".", "register_callback", "(", "callback", ",", "event_type", ")...
37.1
19.9
def public_ip_prefixes(self): """Instance depends on the API version: * 2018-07-01: :class:`PublicIPPrefixesOperations<azure.mgmt.network.v2018_07_01.operations.PublicIPPrefixesOperations>` * 2018-08-01: :class:`PublicIPPrefixesOperations<azure.mgmt.network.v2018_08_01.operations.PublicIPPrefixesOperations>` """ api_version = self._get_api_version('public_ip_prefixes') if api_version == '2018-07-01': from .v2018_07_01.operations import PublicIPPrefixesOperations as OperationClass elif api_version == '2018-08-01': from .v2018_08_01.operations import PublicIPPrefixesOperations as OperationClass else: raise NotImplementedError("APIVersion {} is not available".format(api_version)) return OperationClass(self._client, self.config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
[ "def", "public_ip_prefixes", "(", "self", ")", ":", "api_version", "=", "self", ".", "_get_api_version", "(", "'public_ip_prefixes'", ")", "if", "api_version", "==", "'2018-07-01'", ":", "from", ".", "v2018_07_01", ".", "operations", "import", "PublicIPPrefixesOpera...
66.142857
39
def create_fourier_design_matrix(t, nmodes, freq=False, Tspan=None, logf=False, fmin=None, fmax=None): """ Construct fourier design matrix from eq 11 of Lentati et al, 2013 :param t: vector of time series in seconds :param nmodes: number of fourier coefficients to use :param freq: option to output frequencies :param Tspan: option to some other Tspan :param logf: use log frequency spacing :param fmin: lower sampling frequency :param fmax: upper sampling frequency :return: F: fourier design matrix :return: f: Sampling frequencies (if freq=True) """ N = len(t) F = np.zeros((N, 2 * nmodes)) if Tspan is not None: T = Tspan else: T = t.max() - t.min() # define sampling frequencies if fmin is not None and fmax is not None: f = np.linspace(fmin, fmax, nmodes) else: f = np.linspace(1 / T, nmodes / T, nmodes) if logf: f = np.logspace(np.log10(1 / T), np.log10(nmodes / T), nmodes) Ffreqs = np.zeros(2 * nmodes) Ffreqs[0::2] = f Ffreqs[1::2] = f F[:,::2] = np.sin(2*np.pi*t[:,None]*f[None,:]) F[:,1::2] = np.cos(2*np.pi*t[:,None]*f[None,:]) if freq: return F, Ffreqs else: return F
[ "def", "create_fourier_design_matrix", "(", "t", ",", "nmodes", ",", "freq", "=", "False", ",", "Tspan", "=", "None", ",", "logf", "=", "False", ",", "fmin", "=", "None", ",", "fmax", "=", "None", ")", ":", "N", "=", "len", "(", "t", ")", "F", "=...
28.272727
19.227273
def wrap_search(cls, response): """Wrap the response from a stream search into instances and return them :param response: The response from searching a stream :type response: :class:`requests.Response` :returns: the new stream instances :rtype: :class:`list` of :class:`stream` :raises: None """ streams = [] json = response.json() streamjsons = json['streams'] for j in streamjsons: s = cls.wrap_json(j) streams.append(s) return streams
[ "def", "wrap_search", "(", "cls", ",", "response", ")", ":", "streams", "=", "[", "]", "json", "=", "response", ".", "json", "(", ")", "streamjsons", "=", "json", "[", "'streams'", "]", "for", "j", "in", "streamjsons", ":", "s", "=", "cls", ".", "w...
32.352941
12.176471
def deploy_lambda(collector): """Deploy a lambda function""" amazon = collector.configuration['amazon'] aws_syncr = collector.configuration['aws_syncr'] find_lambda_function(aws_syncr, collector.configuration).deploy(aws_syncr, amazon)
[ "def", "deploy_lambda", "(", "collector", ")", ":", "amazon", "=", "collector", ".", "configuration", "[", "'amazon'", "]", "aws_syncr", "=", "collector", ".", "configuration", "[", "'aws_syncr'", "]", "find_lambda_function", "(", "aws_syncr", ",", "collector", ...
49.4
15
def run(self, **kwargs): """ Drive servo to the position set in the `position_sp` attribute. """ for key in kwargs: setattr(self, key, kwargs[key]) self.command = self.COMMAND_RUN
[ "def", "run", "(", "self", ",", "*", "*", "kwargs", ")", ":", "for", "key", "in", "kwargs", ":", "setattr", "(", "self", ",", "key", ",", "kwargs", "[", "key", "]", ")", "self", ".", "command", "=", "self", ".", "COMMAND_RUN" ]
32.142857
9.285714
def load_tool_info(tool_name): """ Load the tool-info class. @param tool_name: The name of the tool-info module. Either a full Python package name or a name within the benchexec.tools package. @return: A tuple of the full name of the used tool-info module and an instance of the tool-info class. """ tool_module = tool_name if '.' in tool_name else ("benchexec.tools." + tool_name) try: tool = __import__(tool_module, fromlist=['Tool']).Tool() except ImportError as ie: sys.exit('Unsupported tool "{0}" specified. ImportError: {1}'.format(tool_name, ie)) except AttributeError: sys.exit('The module "{0}" does not define the necessary class "Tool", ' 'it cannot be used as tool info for BenchExec.'.format(tool_module)) return (tool_module, tool)
[ "def", "load_tool_info", "(", "tool_name", ")", ":", "tool_module", "=", "tool_name", "if", "'.'", "in", "tool_name", "else", "(", "\"benchexec.tools.\"", "+", "tool_name", ")", "try", ":", "tool", "=", "__import__", "(", "tool_module", ",", "fromlist", "=", ...
51
26.125
def toggle_view(self, checked): """Toggle view""" if not self.dockwidget: return if checked: self.dockwidget.show() self.dockwidget.raise_() else: self.dockwidget.hide()
[ "def", "toggle_view", "(", "self", ",", "checked", ")", ":", "if", "not", "self", ".", "dockwidget", ":", "return", "if", "checked", ":", "self", ".", "dockwidget", ".", "show", "(", ")", "self", ".", "dockwidget", ".", "raise_", "(", ")", "else", ":...
26.777778
11.555556
def from_points(cls, iterable_of_points): """ Creates a MultiPoint from an iterable collection of `pyowm.utils.geo.Point` instances :param iterable_of_points: iterable whose items are `pyowm.utils.geo.Point` instances :type iterable_of_points: iterable :return: a *MultiPoint* instance """ return MultiPoint([(p.lon, p.lat) for p in iterable_of_points])
[ "def", "from_points", "(", "cls", ",", "iterable_of_points", ")", ":", "return", "MultiPoint", "(", "[", "(", "p", ".", "lon", ",", "p", ".", "lat", ")", "for", "p", "in", "iterable_of_points", "]", ")" ]
50.25
17.5
def open(self, session_file=None,session_url=None, verbose=False): """ Opens a session from a local file or URL. :param session_file: The path to the session file (.cys) to be loaded. :param session_url: A URL that provides a session file. :param verbose: print more """ PARAMS=set_param(["file", "url"],[session_file, session_url]) response=api(url=self.__url+"/open", PARAMS=PARAMS, verbose=verbose) return response
[ "def", "open", "(", "self", ",", "session_file", "=", "None", ",", "session_url", "=", "None", ",", "verbose", "=", "False", ")", ":", "PARAMS", "=", "set_param", "(", "[", "\"file\"", ",", "\"url\"", "]", ",", "[", "session_file", ",", "session_url", ...
40
22
def _gt_from_ge(self, other): """Return a > b. Computed by @total_ordering from (a >= b) and (a != b).""" op_result = self.__ge__(other) if op_result is NotImplemented: return NotImplemented return op_result and self != other
[ "def", "_gt_from_ge", "(", "self", ",", "other", ")", ":", "op_result", "=", "self", ".", "__ge__", "(", "other", ")", "if", "op_result", "is", "NotImplemented", ":", "return", "NotImplemented", "return", "op_result", "and", "self", "!=", "other" ]
40.833333
5.833333
def _find_substitutions(cls, item): """Convert HOCON input into a JSON output :return: JSON string representation :type return: basestring """ if isinstance(item, ConfigValues): return item.get_substitutions() substitutions = [] elements = [] if isinstance(item, ConfigTree): elements = item.values() elif isinstance(item, list): elements = item for child in elements: substitutions += cls._find_substitutions(child) return substitutions
[ "def", "_find_substitutions", "(", "cls", ",", "item", ")", ":", "if", "isinstance", "(", "item", ",", "ConfigValues", ")", ":", "return", "item", ".", "get_substitutions", "(", ")", "substitutions", "=", "[", "]", "elements", "=", "[", "]", "if", "isins...
29.368421
12.421053
def delete(self, *keys): """Removes the specified keys. A key is ignored if it does not exist. Returns :data:`True` if all keys are removed. .. note:: **Time complexity**: ``O(N)`` where ``N`` is the number of keys that will be removed. When a key to remove holds a value other than a string, the individual complexity for this key is ``O(M)`` where ``M`` is the number of elements in the list, set, sorted set or hash. Removing a single key that holds a string value is ``O(1)``. :param keys: One or more keys to remove :type keys: :class:`str`, :class:`bytes` :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'DEL'] + list(keys), len(keys))
[ "def", "delete", "(", "self", ",", "*", "keys", ")", ":", "return", "self", ".", "_execute", "(", "[", "b'DEL'", "]", "+", "list", "(", "keys", ")", ",", "len", "(", "keys", ")", ")" ]
41.684211
24.315789
def displayEmptyInputWarningBox(display=True, parent=None): """ Displays a warning box for the 'input' parameter. """ if sys.version_info[0] >= 3: from tkinter.messagebox import showwarning else: from tkMessageBox import showwarning if display: msg = 'No valid input files found! '+\ 'Please check the value for the "input" parameter.' showwarning(parent=parent,message=msg, title="No valid inputs!") return "yes"
[ "def", "displayEmptyInputWarningBox", "(", "display", "=", "True", ",", "parent", "=", "None", ")", ":", "if", "sys", ".", "version_info", "[", "0", "]", ">=", "3", ":", "from", "tkinter", ".", "messagebox", "import", "showwarning", "else", ":", "from", ...
35.846154
16.769231
def login_checking_email(pending_id, ticket, response, detail_url='https://pswdless.appspot.com/rest/detail'): """ Log user in using Passwordless service :param pending_id: PendingExternalToMainUser's id :param ticket: ticket returned from Passwordless :param response: Response object from webapp2 :param detail_url: url to check ticket and user data :return: a Command that log user in when executed """ return LoginCheckingEmail(pending_id, ticket, response, USER_COOKIE_NAME, detail_url)
[ "def", "login_checking_email", "(", "pending_id", ",", "ticket", ",", "response", ",", "detail_url", "=", "'https://pswdless.appspot.com/rest/detail'", ")", ":", "return", "LoginCheckingEmail", "(", "pending_id", ",", "ticket", ",", "response", ",", "USER_COOKIE_NAME", ...
51.8
18.4
def translate(self, tx, ty): """Modifies the current transformation matrix (CTM) by translating the user-space origin by ``(tx, ty)``. This offset is interpreted as a user-space coordinate according to the CTM in place before the new call to :meth:`translate`. In other words, the translation of the user-space origin takes place after any existing transformation. :param tx: Amount to translate in the X direction :param ty: Amount to translate in the Y direction :type tx: float :type ty: float """ cairo.cairo_translate(self._pointer, tx, ty) self._check_status()
[ "def", "translate", "(", "self", ",", "tx", ",", "ty", ")", ":", "cairo", ".", "cairo_translate", "(", "self", ".", "_pointer", ",", "tx", ",", "ty", ")", "self", ".", "_check_status", "(", ")" ]
41.0625
18.9375
def _pseudoinverse(self, A, tol=1.0e-10): """Compute the Moore-Penrose pseudoinverse, wraps np.linalg.pinv REQUIRED ARGUMENTS A (np KxK matrix) - the square matrix whose pseudoinverse is to be computed RETURN VALUES Ainv (np KxK matrix) - the pseudoinverse OPTIONAL VALUES tol - the tolerance (relative to largest magnitude singlular value) below which singular values are to not be include in forming pseudoinverse (default: 1.0e-10) NOTES In previous versions of pymbar / Numpy, we wrote our own pseudoinverse because of a bug in Numpy. """ return np.linalg.pinv(A, rcond=tol)
[ "def", "_pseudoinverse", "(", "self", ",", "A", ",", "tol", "=", "1.0e-10", ")", ":", "return", "np", ".", "linalg", ".", "pinv", "(", "A", ",", "rcond", "=", "tol", ")" ]
35.368421
29
def get_composite_field_value(self, name): """ Return the form/formset instance for the given field name. """ field = self.composite_fields[name] if hasattr(field, 'get_form'): return self.forms[name] if hasattr(field, 'get_formset'): return self.formsets[name]
[ "def", "get_composite_field_value", "(", "self", ",", "name", ")", ":", "field", "=", "self", ".", "composite_fields", "[", "name", "]", "if", "hasattr", "(", "field", ",", "'get_form'", ")", ":", "return", "self", ".", "forms", "[", "name", "]", "if", ...
36.111111
4.555556
def Random(self): """Chooses a random element from this PMF. Returns: float value from the Pmf """ if len(self.d) == 0: raise ValueError('Pmf contains no values.') target = random.random() total = 0.0 for x, p in self.d.iteritems(): total += p if total >= target: return x # we shouldn't get here assert False
[ "def", "Random", "(", "self", ")", ":", "if", "len", "(", "self", ".", "d", ")", "==", "0", ":", "raise", "ValueError", "(", "'Pmf contains no values.'", ")", "target", "=", "random", ".", "random", "(", ")", "total", "=", "0.0", "for", "x", ",", "...
23.944444
16.666667
def return_env(self, exists=True): """ Return environment dict. Parameters ---------- exists: bool It True, only return existing paths. """ env = dict( include=self._build_paths('include', [self.VCIncludes, self.OSIncludes, self.UCRTIncludes, self.NetFxSDKIncludes], exists), lib=self._build_paths('lib', [self.VCLibraries, self.OSLibraries, self.FxTools, self.UCRTLibraries, self.NetFxSDKLibraries], exists), libpath=self._build_paths('libpath', [self.VCLibraries, self.FxTools, self.VCStoreRefs, self.OSLibpath], exists), path=self._build_paths('path', [self.VCTools, self.VSTools, self.VsTDb, self.SdkTools, self.SdkSetup, self.FxTools, self.MSBuild, self.HTMLHelpWorkshop, self.FSharp], exists), ) if self.vc_ver >= 14 and os.path.isfile(self.VCRuntimeRedist): env['py_vcruntime_redist'] = self.VCRuntimeRedist return env
[ "def", "return_env", "(", "self", ",", "exists", "=", "True", ")", ":", "env", "=", "dict", "(", "include", "=", "self", ".", "_build_paths", "(", "'include'", ",", "[", "self", ".", "VCIncludes", ",", "self", ".", "OSIncludes", ",", "self", ".", "UC...
42.886364
12.931818
def _set_preferences(self, node): ''' Set preferences. :return: ''' pref = etree.SubElement(node, 'preferences') pacman = etree.SubElement(pref, 'packagemanager') pacman.text = self._get_package_manager() p_version = etree.SubElement(pref, 'version') p_version.text = '0.0.1' p_type = etree.SubElement(pref, 'type') p_type.set('image', 'vmx') for disk_id, disk_data in self._data.system.get('disks', {}).items(): if disk_id.startswith('/dev'): p_type.set('filesystem', disk_data.get('type') or 'ext3') break p_type.set('installiso', 'true') p_type.set('boot', "vmxboot/suse-leap42.1") p_type.set('format', self.format) p_type.set('bootloader', 'grub2') p_type.set('timezone', __salt__['timezone.get_zone']()) p_type.set('hwclock', __salt__['timezone.get_hwclock']()) return pref
[ "def", "_set_preferences", "(", "self", ",", "node", ")", ":", "pref", "=", "etree", ".", "SubElement", "(", "node", ",", "'preferences'", ")", "pacman", "=", "etree", ".", "SubElement", "(", "pref", ",", "'packagemanager'", ")", "pacman", ".", "text", "...
35.259259
18.888889
def _findSwiplFromExec(): """ This function tries to use an executable on the path to find SWI-Prolog SO/DLL and the resource file. :returns: A tuple of (path to the swipl DLL, path to the resource file) :returns type: ({str, None}, {str, None}) """ platform = sys.platform[:3] fullName = None swiHome = None try: # try to get library path from swipl executable. # We may have pl or swipl as the executable try: cmd = Popen(['swipl', '--dump-runtime-variables'], stdout=PIPE) except OSError: cmd = Popen(['pl', '--dump-runtime-variables'], stdout=PIPE) ret = cmd.communicate() # Parse the output into a dictionary ret = ret[0].decode().replace(';', '').splitlines() ret = [line.split('=', 1) for line in ret] rtvars = dict((name, value[1:-1]) for name, value in ret) # [1:-1] gets # rid of the # quotes if rtvars['PLSHARED'] == 'no': raise ImportError('SWI-Prolog is not installed as a shared ' 'library.') else: # PLSHARED == 'yes' swiHome = rtvars['PLBASE'] # The environment is in PLBASE if not os.path.exists(swiHome): swiHome = None # determine platform specific path if platform == "win": dllName = rtvars['PLLIB'][:-4] + '.' + rtvars['PLSOEXT'] path = os.path.join(rtvars['PLBASE'], 'bin') fullName = os.path.join(path, dllName) if not os.path.exists(fullName): fullName = None elif platform == "cyg": # e.g. /usr/lib/pl-5.6.36/bin/i686-cygwin/cygpl.dll dllName = 'cygpl.dll' path = os.path.join(rtvars['PLBASE'], 'bin', rtvars['PLARCH']) fullName = os.path.join(path, dllName) if not os.path.exists(fullName): fullName = None elif platform == "dar": dllName = 'lib' + rtvars['PLLIB'][2:] + '.' + rtvars['PLSOEXT'] path = os.path.join(rtvars['PLBASE'], 'lib', rtvars['PLARCH']) baseName = os.path.join(path, dllName) if os.path.exists(baseName): fullName = baseName else: # We will search for versions fullName = None else: # assume UNIX-like # The SO name in some linuxes is of the form libswipl.so.5.10.2, # so we have to use glob to find the correct one dllName = 'lib' + rtvars['PLLIB'][2:] + '.' + rtvars['PLSOEXT'] path = os.path.join(rtvars['PLBASE'], 'lib', rtvars['PLARCH']) baseName = os.path.join(path, dllName) if os.path.exists(baseName): fullName = baseName else: # We will search for versions pattern = baseName + '.*' files = glob.glob(pattern) if len(files) == 0: fullName = None elif len(files) == 1: fullName = files[0] else: # Will this ever happen? fullName = None except (OSError, KeyError): # KeyError from accessing rtvars pass return (fullName, swiHome)
[ "def", "_findSwiplFromExec", "(", ")", ":", "platform", "=", "sys", ".", "platform", "[", ":", "3", "]", "fullName", "=", "None", "swiHome", "=", "None", "try", ":", "# try to get library path from swipl executable.", "# We may have pl or swipl as the executable", "tr...
37.365591
20.526882
def split_before(iterable, pred): """Yield lists of items from *iterable*, where each list starts with an item where callable *pred* returns ``True``: >>> list(split_before('OneTwo', lambda s: s.isupper())) [['O', 'n', 'e'], ['T', 'w', 'o']] >>> list(split_before(range(10), lambda n: n % 3 == 0)) [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]] """ buf = [] for item in iterable: if pred(item) and buf: yield buf buf = [] buf.append(item) yield buf
[ "def", "split_before", "(", "iterable", ",", "pred", ")", ":", "buf", "=", "[", "]", "for", "item", "in", "iterable", ":", "if", "pred", "(", "item", ")", "and", "buf", ":", "yield", "buf", "buf", "=", "[", "]", "buf", ".", "append", "(", "item",...
29
18
def main(): """Command line entry point.""" def help_exit(): raise SystemExit("usage: ddate [day] [month] [year]") if "--help" in sys.argv or "-h" in sys.argv: help_exit() if len(sys.argv) == 2: # allow for 23-2-2014 style, be lazy/sloppy with it for split_char in ".-/`,:;": # who knows what the human will use... if split_char in sys.argv[1]: parts = sys.argv[1].split(split_char) del sys.argv[1] sys.argv.extend(parts) break date = _get_date(*sys.argv[1:]) if date: print(DDate(date)) else: help_exit()
[ "def", "main", "(", ")", ":", "def", "help_exit", "(", ")", ":", "raise", "SystemExit", "(", "\"usage: ddate [day] [month] [year]\"", ")", "if", "\"--help\"", "in", "sys", ".", "argv", "or", "\"-h\"", "in", "sys", ".", "argv", ":", "help_exit", "(", ")", ...
27.565217
22.478261
def remove_vg(self, vg): """ Removes a volume group:: from lvm2py import * lvm = LVM() vg = lvm.get_vg("myvg", "w") lvm.remove_vg(vg) *Args:* * vg (obj): A VolumeGroup instance. *Raises:* * HandleError, CommitError .. note:: The VolumeGroup instance must be in write mode, otherwise CommitError is raised. """ vg.open() rm = lvm_vg_remove(vg.handle) if rm != 0: vg.close() raise CommitError("Failed to remove VG.") com = lvm_vg_write(vg.handle) if com != 0: vg.close() raise CommitError("Failed to commit changes to disk.") vg.close()
[ "def", "remove_vg", "(", "self", ",", "vg", ")", ":", "vg", ".", "open", "(", ")", "rm", "=", "lvm_vg_remove", "(", "vg", ".", "handle", ")", "if", "rm", "!=", "0", ":", "vg", ".", "close", "(", ")", "raise", "CommitError", "(", "\"Failed to remove...
23.060606
21
def header_string_from_file(filename='feff.inp'): """ Reads Header string from either a HEADER file or feff.inp file Will also read a header from a non-pymatgen generated feff.inp file Args: filename: File name containing the Header data. Returns: Reads header string. """ with zopen(filename, "r") as fobject: f = fobject.readlines() feff_header_str = [] ln = 0 # Checks to see if generated by pymatgen try: feffpmg = f[0].find("pymatgen") except IndexError: feffpmg = False # Reads pymatgen generated header or feff.inp file if feffpmg: nsites = int(f[8].split()[2]) for line in f: ln += 1 if ln <= nsites + 9: feff_header_str.append(line) else: # Reads header from header from feff.inp file from unknown # source end = 0 for line in f: if (line[0] == "*" or line[0] == "T") and end == 0: feff_header_str.append(line.replace("\r", "")) else: end = 1 return ''.join(feff_header_str)
[ "def", "header_string_from_file", "(", "filename", "=", "'feff.inp'", ")", ":", "with", "zopen", "(", "filename", ",", "\"r\"", ")", "as", "fobject", ":", "f", "=", "fobject", ".", "readlines", "(", ")", "feff_header_str", "=", "[", "]", "ln", "=", "0", ...
33.125
17.975