text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def template(self): """ Get the template from: YAML or class """ # First try props if self.props.template: return self.props.template else: # Return the wtype of the widget, and we'll presume that, # like resources, there's a .html file in that directory return self.wtype
[ "def", "template", "(", "self", ")", ":", "# First try props", "if", "self", ".", "props", ".", "template", ":", "return", "self", ".", "props", ".", "template", "else", ":", "# Return the wtype of the widget, and we'll presume that,", "# like resources, there's a .html file in that directory", "return", "self", ".", "wtype" ]
34.4
18.2
def ap_state(value, failure_string=None): """ Converts a state's name, postal abbreviation or FIPS to A.P. style. Example usage: >> ap_state("California") 'Calif.' """ try: return statestyle.get(value).ap except: if failure_string: return failure_string else: return value
[ "def", "ap_state", "(", "value", ",", "failure_string", "=", "None", ")", ":", "try", ":", "return", "statestyle", ".", "get", "(", "value", ")", ".", "ap", "except", ":", "if", "failure_string", ":", "return", "failure_string", "else", ":", "return", "value" ]
21.117647
18.764706
def isDiurnal(self): """ Returns true if this chart is diurnal. """ sun = self.getObject(const.SUN) mc = self.getAngle(const.MC) # Get ecliptical positions and check if the # sun is above the horizon. lat = self.pos.lat sunRA, sunDecl = utils.eqCoords(sun.lon, sun.lat) mcRA, mcDecl = utils.eqCoords(mc.lon, 0) return utils.isAboveHorizon(sunRA, sunDecl, mcRA, lat)
[ "def", "isDiurnal", "(", "self", ")", ":", "sun", "=", "self", ".", "getObject", "(", "const", ".", "SUN", ")", "mc", "=", "self", ".", "getAngle", "(", "const", ".", "MC", ")", "# Get ecliptical positions and check if the", "# sun is above the horizon.", "lat", "=", "self", ".", "pos", ".", "lat", "sunRA", ",", "sunDecl", "=", "utils", ".", "eqCoords", "(", "sun", ".", "lon", ",", "sun", ".", "lat", ")", "mcRA", ",", "mcDecl", "=", "utils", ".", "eqCoords", "(", "mc", ".", "lon", ",", "0", ")", "return", "utils", ".", "isAboveHorizon", "(", "sunRA", ",", "sunDecl", ",", "mcRA", ",", "lat", ")" ]
39.636364
12.181818
def start(self): """Run FIO job in thread""" self.__thread = Threads(target=self.run, args=(True, True, False)) self.__thread.setDaemon(True) self.__thread.start()
[ "def", "start", "(", "self", ")", ":", "self", ".", "__thread", "=", "Threads", "(", "target", "=", "self", ".", "run", ",", "args", "=", "(", "True", ",", "True", ",", "False", ")", ")", "self", ".", "__thread", ".", "setDaemon", "(", "True", ")", "self", ".", "__thread", ".", "start", "(", ")" ]
31.833333
18.666667
def clone(self, newname, config_path=None, flags=0, bdevtype=None, bdevdata=None, newsize=0, hookargs=()): """ Clone the current container. """ args = {} args['newname'] = newname args['flags'] = flags args['newsize'] = newsize args['hookargs'] = hookargs if config_path: args['config_path'] = config_path if bdevtype: args['bdevtype'] = bdevtype if bdevdata: args['bdevdata'] = bdevdata if _lxc.Container.clone(self, **args): return Container(newname, config_path=config_path) else: return False
[ "def", "clone", "(", "self", ",", "newname", ",", "config_path", "=", "None", ",", "flags", "=", "0", ",", "bdevtype", "=", "None", ",", "bdevdata", "=", "None", ",", "newsize", "=", "0", ",", "hookargs", "=", "(", ")", ")", ":", "args", "=", "{", "}", "args", "[", "'newname'", "]", "=", "newname", "args", "[", "'flags'", "]", "=", "flags", "args", "[", "'newsize'", "]", "=", "newsize", "args", "[", "'hookargs'", "]", "=", "hookargs", "if", "config_path", ":", "args", "[", "'config_path'", "]", "=", "config_path", "if", "bdevtype", ":", "args", "[", "'bdevtype'", "]", "=", "bdevtype", "if", "bdevdata", ":", "args", "[", "'bdevdata'", "]", "=", "bdevdata", "if", "_lxc", ".", "Container", ".", "clone", "(", "self", ",", "*", "*", "args", ")", ":", "return", "Container", "(", "newname", ",", "config_path", "=", "config_path", ")", "else", ":", "return", "False" ]
29.954545
13.954545
def msetnx(self, key, value, *pairs): """Set multiple keys to multiple values, only if none of the keys exist. :raises TypeError: if len of pairs is not event number """ if len(pairs) % 2 != 0: raise TypeError("length of pairs must be even number") return self.execute(b'MSETNX', key, value, *pairs)
[ "def", "msetnx", "(", "self", ",", "key", ",", "value", ",", "*", "pairs", ")", ":", "if", "len", "(", "pairs", ")", "%", "2", "!=", "0", ":", "raise", "TypeError", "(", "\"length of pairs must be even number\"", ")", "return", "self", ".", "execute", "(", "b'MSETNX'", ",", "key", ",", "value", ",", "*", "pairs", ")" ]
39.111111
13.222222
def cmd2list(cmd): ''' Executes a command through the operating system and returns the output as a list, or on error a string with the standard error. EXAMPLE: >>> from subprocess import Popen, PIPE >>> CMDout2array('ls -l') ''' p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) stdout, stderr = p.communicate() if p.returncode != 0 and stderr != '': return "ERROR: %s\n"%(stderr) else: return stdout.split('\n')
[ "def", "cmd2list", "(", "cmd", ")", ":", "p", "=", "Popen", "(", "cmd", ",", "stdout", "=", "PIPE", ",", "stderr", "=", "PIPE", ",", "shell", "=", "True", ")", "stdout", ",", "stderr", "=", "p", ".", "communicate", "(", ")", "if", "p", ".", "returncode", "!=", "0", "and", "stderr", "!=", "''", ":", "return", "\"ERROR: %s\\n\"", "%", "(", "stderr", ")", "else", ":", "return", "stdout", ".", "split", "(", "'\\n'", ")" ]
34.692308
17
def event(): """获取好友的动态,包括分享视频、音乐、动态等 """ r = NCloudBot() r.method = 'EVENT' r.data = {"csrf_token": ""} r.send() return r.response
[ "def", "event", "(", ")", ":", "r", "=", "NCloudBot", "(", ")", "r", ".", "method", "=", "'EVENT'", "r", ".", "data", "=", "{", "\"csrf_token\"", ":", "\"\"", "}", "r", ".", "send", "(", ")", "return", "r", ".", "response" ]
15.2
20.3
def _write_commits_to_release_notes(self): """ writes commits to the releasenotes file by appending to the end """ with open(self.release_file, 'a') as out: out.write("==========\n{}\n".format(self.tag)) for commit in self.commits: try: msg = commit[1] if msg != "cosmetic": out.write("-" + msg + "\n") except: pass
[ "def", "_write_commits_to_release_notes", "(", "self", ")", ":", "with", "open", "(", "self", ".", "release_file", ",", "'a'", ")", "as", "out", ":", "out", ".", "write", "(", "\"==========\\n{}\\n\"", ".", "format", "(", "self", ".", "tag", ")", ")", "for", "commit", "in", "self", ".", "commits", ":", "try", ":", "msg", "=", "commit", "[", "1", "]", "if", "msg", "!=", "\"cosmetic\"", ":", "out", ".", "write", "(", "\"-\"", "+", "msg", "+", "\"\\n\"", ")", "except", ":", "pass" ]
36.538462
10.076923
def upload_media(self, filename, progress=None): """Upload a file to be hosted on the target BMC This will upload the specified data to the BMC so that it will make it available to the system as an emulated USB device. :param filename: The filename to use, the basename of the parameter will be given to the bmc. :param filename: Optional callback for progress updates """ self.oem_init() return self._oem.upload_media(filename, progress)
[ "def", "upload_media", "(", "self", ",", "filename", ",", "progress", "=", "None", ")", ":", "self", ".", "oem_init", "(", ")", "return", "self", ".", "_oem", ".", "upload_media", "(", "filename", ",", "progress", ")" ]
40.384615
19.615385
def get_color_hash(string, _min=MIN_COLOR_BRIGHT, _max=MAX_COLOR_BRIGHT): """ Hashes a string and returns a number between ``min`` and ``max``. """ hash_num = int(hashlib.sha1(string.encode('utf-8')).hexdigest()[:6], 16) _range = _max - _min num_in_range = hash_num % _range return color(_min + num_in_range)
[ "def", "get_color_hash", "(", "string", ",", "_min", "=", "MIN_COLOR_BRIGHT", ",", "_max", "=", "MAX_COLOR_BRIGHT", ")", ":", "hash_num", "=", "int", "(", "hashlib", ".", "sha1", "(", "string", ".", "encode", "(", "'utf-8'", ")", ")", ".", "hexdigest", "(", ")", "[", ":", "6", "]", ",", "16", ")", "_range", "=", "_max", "-", "_min", "num_in_range", "=", "hash_num", "%", "_range", "return", "color", "(", "_min", "+", "num_in_range", ")" ]
41.125
15.125
def run(): """ Entry point to the law cli. Sets up all parsers, parses all arguments, and executes the requested subprogram. """ # setup the main parser and sub parsers parser = ArgumentParser(prog="law", description="The law command line tool.") sub_parsers = parser.add_subparsers(help="subcommands", dest="command") # add main arguments parser.add_argument("--version", "-V", action="version", version=law.__version__) # setup all progs mods = {} for prog in progs: mods[prog] = import_module("law.cli." + prog) mods[prog].setup_parser(sub_parsers) # parse args and dispatch execution if len(sys.argv) >= 2 and sys.argv[1] in forward_progs: args = parser.parse_args(sys.argv[1:3]) else: args = parser.parse_args() if args.command: mods[args.command].execute(args) else: parser.print_help()
[ "def", "run", "(", ")", ":", "# setup the main parser and sub parsers", "parser", "=", "ArgumentParser", "(", "prog", "=", "\"law\"", ",", "description", "=", "\"The law command line tool.\"", ")", "sub_parsers", "=", "parser", ".", "add_subparsers", "(", "help", "=", "\"subcommands\"", ",", "dest", "=", "\"command\"", ")", "# add main arguments", "parser", ".", "add_argument", "(", "\"--version\"", ",", "\"-V\"", ",", "action", "=", "\"version\"", ",", "version", "=", "law", ".", "__version__", ")", "# setup all progs", "mods", "=", "{", "}", "for", "prog", "in", "progs", ":", "mods", "[", "prog", "]", "=", "import_module", "(", "\"law.cli.\"", "+", "prog", ")", "mods", "[", "prog", "]", ".", "setup_parser", "(", "sub_parsers", ")", "# parse args and dispatch execution", "if", "len", "(", "sys", ".", "argv", ")", ">=", "2", "and", "sys", ".", "argv", "[", "1", "]", "in", "forward_progs", ":", "args", "=", "parser", ".", "parse_args", "(", "sys", ".", "argv", "[", "1", ":", "3", "]", ")", "else", ":", "args", "=", "parser", ".", "parse_args", "(", ")", "if", "args", ".", "command", ":", "mods", "[", "args", ".", "command", "]", ".", "execute", "(", "args", ")", "else", ":", "parser", ".", "print_help", "(", ")" ]
31.607143
21.607143
def cache_control(self): """The Cache-Control general-header field is used to specify directives that MUST be obeyed by all caching mechanisms along the request/response chain. """ def on_update(cache_control): if not cache_control and 'cache-control' in self.headers: del self.headers['cache-control'] elif cache_control: self.headers['Cache-Control'] = cache_control.to_header() return parse_cache_control_header(self.headers.get('cache-control'), on_update, ResponseCacheControl)
[ "def", "cache_control", "(", "self", ")", ":", "def", "on_update", "(", "cache_control", ")", ":", "if", "not", "cache_control", "and", "'cache-control'", "in", "self", ".", "headers", ":", "del", "self", ".", "headers", "[", "'cache-control'", "]", "elif", "cache_control", ":", "self", ".", "headers", "[", "'Cache-Control'", "]", "=", "cache_control", ".", "to_header", "(", ")", "return", "parse_cache_control_header", "(", "self", ".", "headers", ".", "get", "(", "'cache-control'", ")", ",", "on_update", ",", "ResponseCacheControl", ")" ]
50.615385
16.384615
def pysal_Join_Counts(self, **kwargs): """ Compute join count statistics for GeoRaster Usage: geo.pysal_Join_Counts(permutations = 1000, rook=True) arguments passed to raster_weights() and pysal.Join_Counts See help(gr.raster_weights), help(pysal.Join_Counts) for options """ if self.weights is None: self.raster_weights(**kwargs) rasterf = self.raster.flatten() rasterf = rasterf[rasterf.mask==False] self.Join_Counts = pysal.Join_Counts(rasterf, self.weights, **kwargs)
[ "def", "pysal_Join_Counts", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "weights", "is", "None", ":", "self", ".", "raster_weights", "(", "*", "*", "kwargs", ")", "rasterf", "=", "self", ".", "raster", ".", "flatten", "(", ")", "rasterf", "=", "rasterf", "[", "rasterf", ".", "mask", "==", "False", "]", "self", ".", "Join_Counts", "=", "pysal", ".", "Join_Counts", "(", "rasterf", ",", "self", ".", "weights", ",", "*", "*", "kwargs", ")" ]
37.266667
16.733333
def assert_not_equal(first, second, msg_fmt="{msg}"): """Fail if first equals second, as determined by the '==' operator. >>> assert_not_equal(5, 8) >>> assert_not_equal(-7, -7.0) Traceback (most recent call last): ... AssertionError: -7 == -7.0 The following msg_fmt arguments are supported: * msg - the default error message * first - the first argument * second - the second argument """ if first == second: msg = "{!r} == {!r}".format(first, second) fail(msg_fmt.format(msg=msg, first=first, second=second))
[ "def", "assert_not_equal", "(", "first", ",", "second", ",", "msg_fmt", "=", "\"{msg}\"", ")", ":", "if", "first", "==", "second", ":", "msg", "=", "\"{!r} == {!r}\"", ".", "format", "(", "first", ",", "second", ")", "fail", "(", "msg_fmt", ".", "format", "(", "msg", "=", "msg", ",", "first", "=", "first", ",", "second", "=", "second", ")", ")" ]
31.388889
14.944444
def reg_on_resume(self, callable_object, *args, **kwargs): """ Register a function/method to be called if the system needs to resume a previously halted or paused execution, including status requests.""" persistent = kwargs.pop('persistent', False) event = self._create_event(callable_object, 'resume', persistent, *args, **kwargs) self.resume_callbacks.append(event) return event
[ "def", "reg_on_resume", "(", "self", ",", "callable_object", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "persistent", "=", "kwargs", ".", "pop", "(", "'persistent'", ",", "False", ")", "event", "=", "self", ".", "_create_event", "(", "callable_object", ",", "'resume'", ",", "persistent", ",", "*", "args", ",", "*", "*", "kwargs", ")", "self", ".", "resume_callbacks", ".", "append", "(", "event", ")", "return", "event" ]
53.625
16.875
def un_priority(op,val): "unary expression order-of-operations helper" if isinstance(val,BinX) and val.op < op: return bin_priority(val.op,UnX(op,val.left),val.right) else: return UnX(op,val)
[ "def", "un_priority", "(", "op", ",", "val", ")", ":", "if", "isinstance", "(", "val", ",", "BinX", ")", "and", "val", ".", "op", "<", "op", ":", "return", "bin_priority", "(", "val", ".", "op", ",", "UnX", "(", "op", ",", "val", ".", "left", ")", ",", "val", ".", "right", ")", "else", ":", "return", "UnX", "(", "op", ",", "val", ")" ]
48.5
23.5
def isExpired(certificate): """ Check if certificate is expired """ if isinstance(certificate, six.string_types): certificate = json.loads(certificate) expiry = certificate.get('expiry', 0) return expiry < int(time.time() * 1000) + 20 * 60
[ "def", "isExpired", "(", "certificate", ")", ":", "if", "isinstance", "(", "certificate", ",", "six", ".", "string_types", ")", ":", "certificate", "=", "json", ".", "loads", "(", "certificate", ")", "expiry", "=", "certificate", ".", "get", "(", "'expiry'", ",", "0", ")", "return", "expiry", "<", "int", "(", "time", ".", "time", "(", ")", "*", "1000", ")", "+", "20", "*", "60" ]
43
6.833333
def new_text_cell(cell_type, source=None, rendered=None, metadata=None): """Create a new text cell.""" cell = NotebookNode() # VERSIONHACK: plaintext -> raw # handle never-released plaintext name for raw cells if cell_type == 'plaintext': cell_type = 'raw' if source is not None: cell.source = unicode(source) if rendered is not None: cell.rendered = unicode(rendered) cell.metadata = NotebookNode(metadata or {}) cell.cell_type = cell_type return cell
[ "def", "new_text_cell", "(", "cell_type", ",", "source", "=", "None", ",", "rendered", "=", "None", ",", "metadata", "=", "None", ")", ":", "cell", "=", "NotebookNode", "(", ")", "# VERSIONHACK: plaintext -> raw", "# handle never-released plaintext name for raw cells", "if", "cell_type", "==", "'plaintext'", ":", "cell_type", "=", "'raw'", "if", "source", "is", "not", "None", ":", "cell", ".", "source", "=", "unicode", "(", "source", ")", "if", "rendered", "is", "not", "None", ":", "cell", ".", "rendered", "=", "unicode", "(", "rendered", ")", "cell", ".", "metadata", "=", "NotebookNode", "(", "metadata", "or", "{", "}", ")", "cell", ".", "cell_type", "=", "cell_type", "return", "cell" ]
35.928571
11.714286
def urban_adj_factor(self): """ Return urban adjustment factor (UAF) used to adjust QMED and growth curves. Methodology source: eqn. 8, Kjeldsen 2010 :return: urban adjustment factor :rtype: float """ urbext = self.catchment.descriptors.urbext(self.year) result = self._pruaf() ** 2.16 * (1 + urbext) ** 0.37 self.results_log['urban_extent'] = urbext self.results_log['urban_adj_factor'] = result return result
[ "def", "urban_adj_factor", "(", "self", ")", ":", "urbext", "=", "self", ".", "catchment", ".", "descriptors", ".", "urbext", "(", "self", ".", "year", ")", "result", "=", "self", ".", "_pruaf", "(", ")", "**", "2.16", "*", "(", "1", "+", "urbext", ")", "**", "0.37", "self", ".", "results_log", "[", "'urban_extent'", "]", "=", "urbext", "self", ".", "results_log", "[", "'urban_adj_factor'", "]", "=", "result", "return", "result" ]
34.785714
17.642857
def docs(): """ Create documentation. """ from epydoc import cli path('build').exists() or path('build').makedirs() # get storage path docs_dir = options.docs.get('docs_dir', 'docs/apidocs') # clean up previous docs (path(docs_dir) / "epydoc.css").exists() and path(docs_dir).rmtree() # set up excludes try: exclude_names = options.docs.excludes except AttributeError: exclude_names = [] else: exclude_names = exclude_names.replace(',', ' ').split() excludes = [] for pkg in exclude_names: excludes.append("--exclude") excludes.append('^' + re.escape(pkg)) # call epydoc in-process sys_argv = sys.argv try: sys.argv = [ sys.argv[0] + "::epydoc", "-v", "--inheritance", "listed", "--output", docs_dir, "--name", "%s %s" % (options.setup.name, options.setup.version), "--url", options.setup.url, "--graph", "umlclasstree", ] + excludes + toplevel_packages() sys.stderr.write("Running '%s'\n" % ("' '".join(sys.argv))) cli.cli() finally: sys.argv = sys_argv
[ "def", "docs", "(", ")", ":", "from", "epydoc", "import", "cli", "path", "(", "'build'", ")", ".", "exists", "(", ")", "or", "path", "(", "'build'", ")", ".", "makedirs", "(", ")", "# get storage path", "docs_dir", "=", "options", ".", "docs", ".", "get", "(", "'docs_dir'", ",", "'docs/apidocs'", ")", "# clean up previous docs", "(", "path", "(", "docs_dir", ")", "/", "\"epydoc.css\"", ")", ".", "exists", "(", ")", "and", "path", "(", "docs_dir", ")", ".", "rmtree", "(", ")", "# set up excludes", "try", ":", "exclude_names", "=", "options", ".", "docs", ".", "excludes", "except", "AttributeError", ":", "exclude_names", "=", "[", "]", "else", ":", "exclude_names", "=", "exclude_names", ".", "replace", "(", "','", ",", "' '", ")", ".", "split", "(", ")", "excludes", "=", "[", "]", "for", "pkg", "in", "exclude_names", ":", "excludes", ".", "append", "(", "\"--exclude\"", ")", "excludes", ".", "append", "(", "'^'", "+", "re", ".", "escape", "(", "pkg", ")", ")", "# call epydoc in-process", "sys_argv", "=", "sys", ".", "argv", "try", ":", "sys", ".", "argv", "=", "[", "sys", ".", "argv", "[", "0", "]", "+", "\"::epydoc\"", ",", "\"-v\"", ",", "\"--inheritance\"", ",", "\"listed\"", ",", "\"--output\"", ",", "docs_dir", ",", "\"--name\"", ",", "\"%s %s\"", "%", "(", "options", ".", "setup", ".", "name", ",", "options", ".", "setup", ".", "version", ")", ",", "\"--url\"", ",", "options", ".", "setup", ".", "url", ",", "\"--graph\"", ",", "\"umlclasstree\"", ",", "]", "+", "excludes", "+", "toplevel_packages", "(", ")", "sys", ".", "stderr", ".", "write", "(", "\"Running '%s'\\n\"", "%", "(", "\"' '\"", ".", "join", "(", "sys", ".", "argv", ")", ")", ")", "cli", ".", "cli", "(", ")", "finally", ":", "sys", ".", "argv", "=", "sys_argv" ]
27.5
19.214286
def dirty_ops(self, instance): ''' Returns a dict of the operations needed to update this object. See :func:`Document.get_dirty_ops` for more details.''' obj_value = instance._values[self._name] if not obj_value.set: return {} if not obj_value.dirty and self.__type.config_extra_fields != 'ignore': return {} ops = obj_value.value.get_dirty_ops() ret = {} for op, values in ops.items(): ret[op] = {} for key, value in values.items(): name = '%s.%s' % (self._name, key) ret[op][name] = value return ret
[ "def", "dirty_ops", "(", "self", ",", "instance", ")", ":", "obj_value", "=", "instance", ".", "_values", "[", "self", ".", "_name", "]", "if", "not", "obj_value", ".", "set", ":", "return", "{", "}", "if", "not", "obj_value", ".", "dirty", "and", "self", ".", "__type", ".", "config_extra_fields", "!=", "'ignore'", ":", "return", "{", "}", "ops", "=", "obj_value", ".", "value", ".", "get_dirty_ops", "(", ")", "ret", "=", "{", "}", "for", "op", ",", "values", "in", "ops", ".", "items", "(", ")", ":", "ret", "[", "op", "]", "=", "{", "}", "for", "key", ",", "value", "in", "values", ".", "items", "(", ")", ":", "name", "=", "'%s.%s'", "%", "(", "self", ".", "_name", ",", "key", ")", "ret", "[", "op", "]", "[", "name", "]", "=", "value", "return", "ret" ]
33.789474
19.684211
def write_http_response( self, status: http.HTTPStatus, headers: Headers, body: Optional[bytes] = None ) -> None: """ Write status line and headers to the HTTP response. This coroutine is also able to write a response body. """ self.response_headers = headers logger.debug("%s > HTTP/1.1 %d %s", self.side, status.value, status.phrase) logger.debug("%s > %r", self.side, headers) # Since the status line and headers only contain ASCII characters, # we can keep this simple. response = f"HTTP/1.1 {status.value} {status.phrase}\r\n" response += str(headers) self.writer.write(response.encode()) if body is not None: logger.debug("%s > Body (%d bytes)", self.side, len(body)) self.writer.write(body)
[ "def", "write_http_response", "(", "self", ",", "status", ":", "http", ".", "HTTPStatus", ",", "headers", ":", "Headers", ",", "body", ":", "Optional", "[", "bytes", "]", "=", "None", ")", "->", "None", ":", "self", ".", "response_headers", "=", "headers", "logger", ".", "debug", "(", "\"%s > HTTP/1.1 %d %s\"", ",", "self", ".", "side", ",", "status", ".", "value", ",", "status", ".", "phrase", ")", "logger", ".", "debug", "(", "\"%s > %r\"", ",", "self", ".", "side", ",", "headers", ")", "# Since the status line and headers only contain ASCII characters,", "# we can keep this simple.", "response", "=", "f\"HTTP/1.1 {status.value} {status.phrase}\\r\\n\"", "response", "+=", "str", "(", "headers", ")", "self", ".", "writer", ".", "write", "(", "response", ".", "encode", "(", ")", ")", "if", "body", "is", "not", "None", ":", "logger", ".", "debug", "(", "\"%s > Body (%d bytes)\"", ",", "self", ".", "side", ",", "len", "(", "body", ")", ")", "self", ".", "writer", ".", "write", "(", "body", ")" ]
34.166667
22.75
def estimated_bytes_processed(self): """Return the estimated number of bytes processed by the query. See: https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.estimatedBytesProcessed :rtype: int or None :returns: number of DML rows affected by the job, or None if job is not yet complete. """ result = self._job_statistics().get("estimatedBytesProcessed") if result is not None: result = int(result) return result
[ "def", "estimated_bytes_processed", "(", "self", ")", ":", "result", "=", "self", ".", "_job_statistics", "(", ")", ".", "get", "(", "\"estimatedBytesProcessed\"", ")", "if", "result", "is", "not", "None", ":", "result", "=", "int", "(", "result", ")", "return", "result" ]
37.857143
22.142857
def raw_to_bv(self): """ A counterpart to FP.raw_to_bv - does nothing and returns itself. """ if self.symbolic: return BVS(next(iter(self.variables)).replace(self.STRING_TYPE_IDENTIFIER, self.GENERATED_BVS_IDENTIFIER), self.length) else: return BVV(ord(self.args[0]), self.length)
[ "def", "raw_to_bv", "(", "self", ")", ":", "if", "self", ".", "symbolic", ":", "return", "BVS", "(", "next", "(", "iter", "(", "self", ".", "variables", ")", ")", ".", "replace", "(", "self", ".", "STRING_TYPE_IDENTIFIER", ",", "self", ".", "GENERATED_BVS_IDENTIFIER", ")", ",", "self", ".", "length", ")", "else", ":", "return", "BVV", "(", "ord", "(", "self", ".", "args", "[", "0", "]", ")", ",", "self", ".", "length", ")" ]
42.125
24.875
def edit_mdp(mdp, new_mdp=None, extend_parameters=None, **substitutions): """Change values in a Gromacs mdp file. Parameters and values are supplied as substitutions, eg ``nsteps=1000``. By default the template mdp file is **overwritten in place**. If a parameter does not exist in the template then it cannot be substituted and the parameter/value pair is returned. The user has to check the returned list in order to make sure that everything worked as expected. At the moment it is not possible to automatically append the new values to the mdp file because of ambiguities when having to replace dashes in parameter names with underscores (see the notes below on dashes/underscores). If a parameter is set to the value ``None`` then it will be ignored. :Arguments: *mdp* : filename filename of input (and output filename of ``new_mdp=None``) *new_mdp* : filename filename of alternative output mdp file [None] *extend_parameters* : string or list of strings single parameter or list of parameters for which the new values should be appended to the existing value in the mdp file. This makes mostly sense for a single parameter, namely 'include', which is set as the default. Set to ``[]`` to disable. ['include'] *substitutions* parameter=value pairs, where parameter is defined by the Gromacs mdp file; dashes in parameter names have to be replaced by underscores. If a value is a list-like object then the items are written as a sequence, joined with spaces, e.g. :: ref_t=[310,310,310] ---> ref_t = 310 310 310 :Returns: Dict of parameters that have *not* been substituted. **Example** :: edit_mdp('md.mdp', new_mdp='long_md.mdp', nsteps=100000, nstxtcout=1000, lincs_iter=2) .. Note:: * Dashes in Gromacs mdp parameters have to be replaced by an underscore when supplied as python keyword arguments (a limitation of python). For example the MDP syntax is ``lincs-iter = 4`` but the corresponding keyword would be ``lincs_iter = 4``. * If the keyword is set as a dict key, eg ``mdp_params['lincs-iter']=4`` then one does not have to substitute. * Parameters *aa_bb* and *aa-bb* are considered the same (although this should not be a problem in practice because there are no mdp parameters that only differ by a underscore). * This code is more compact in ``Perl`` as one can use ``s///`` operators: ``s/^(\s*${key}\s*=\s*).*/$1${val}/`` .. SeeAlso:: One can also load the mdp file with :class:`gromacs.formats.MDP`, edit the object (a dict), and save it again. """ if new_mdp is None: new_mdp = mdp if extend_parameters is None: extend_parameters = ['include'] else: extend_parameters = list(asiterable(extend_parameters)) # None parameters should be ignored (simple way to keep the template defaults) substitutions = {k: v for k,v in substitutions.items() if v is not None} params = list(substitutions.keys()) # list will be reduced for each match def demangled(p): """Return a RE string that matches the parameter.""" return p.replace('_', '[-_]') # must catch either - or _ patterns = {parameter: re.compile("""\ (?P<assignment>\s*{0!s}\s*=\s*) # parameter == everything before the value (?P<value>[^;]*) # value (stop before comment=;) (?P<comment>\s*;.*)? # optional comment """.format(demangled(parameter)), re.VERBOSE) for parameter in substitutions} with tempfile.TemporaryFile() as target: with open(mdp, 'rb') as src: logger.info("editing mdp = {0!r}: {1!r}".format(mdp, substitutions.keys())) for line in src: line = line.decode('utf-8') new_line = line.strip() # \n must be stripped to ensure that new line is built without break for p in params[:]: m = patterns[p].match(new_line) if m: # I am too stupid to replace a specific region in the string so I rebuild it # (matching a line and then replacing value requires TWO re calls) #print 'line:' + new_line #print m.groupdict() if m.group('comment') is None: comment = '' else: comment = " "+m.group('comment') assignment = m.group('assignment') if not assignment.endswith(' '): assignment += ' ' # build new line piece-wise: new_line = assignment if p in extend_parameters: # keep original value and add new stuff at end new_line += str(m.group('value')) + ' ' # automatically transform lists into space-separated string values value = " ".join(map(str, asiterable(substitutions[p]))) new_line += value + comment params.remove(p) break target.write((new_line+'\n').encode('utf-8')) target.seek(0) # XXX: Is there a danger of corrupting the original mdp if something went wrong? with open(new_mdp, 'wb') as final: shutil.copyfileobj(target, final) # return all parameters that have NOT been substituted if len(params) > 0: logger.warn("Not substituted in {new_mdp!r}: {params!r}".format(**vars())) return {p: substitutions[p] for p in params}
[ "def", "edit_mdp", "(", "mdp", ",", "new_mdp", "=", "None", ",", "extend_parameters", "=", "None", ",", "*", "*", "substitutions", ")", ":", "if", "new_mdp", "is", "None", ":", "new_mdp", "=", "mdp", "if", "extend_parameters", "is", "None", ":", "extend_parameters", "=", "[", "'include'", "]", "else", ":", "extend_parameters", "=", "list", "(", "asiterable", "(", "extend_parameters", ")", ")", "# None parameters should be ignored (simple way to keep the template defaults)", "substitutions", "=", "{", "k", ":", "v", "for", "k", ",", "v", "in", "substitutions", ".", "items", "(", ")", "if", "v", "is", "not", "None", "}", "params", "=", "list", "(", "substitutions", ".", "keys", "(", ")", ")", "# list will be reduced for each match", "def", "demangled", "(", "p", ")", ":", "\"\"\"Return a RE string that matches the parameter.\"\"\"", "return", "p", ".", "replace", "(", "'_'", ",", "'[-_]'", ")", "# must catch either - or _", "patterns", "=", "{", "parameter", ":", "re", ".", "compile", "(", "\"\"\"\\\n (?P<assignment>\\s*{0!s}\\s*=\\s*) # parameter == everything before the value\n (?P<value>[^;]*) # value (stop before comment=;)\n (?P<comment>\\s*;.*)? # optional comment\n \"\"\"", ".", "format", "(", "demangled", "(", "parameter", ")", ")", ",", "re", ".", "VERBOSE", ")", "for", "parameter", "in", "substitutions", "}", "with", "tempfile", ".", "TemporaryFile", "(", ")", "as", "target", ":", "with", "open", "(", "mdp", ",", "'rb'", ")", "as", "src", ":", "logger", ".", "info", "(", "\"editing mdp = {0!r}: {1!r}\"", ".", "format", "(", "mdp", ",", "substitutions", ".", "keys", "(", ")", ")", ")", "for", "line", "in", "src", ":", "line", "=", "line", ".", "decode", "(", "'utf-8'", ")", "new_line", "=", "line", ".", "strip", "(", ")", "# \\n must be stripped to ensure that new line is built without break", "for", "p", "in", "params", "[", ":", "]", ":", "m", "=", "patterns", "[", "p", "]", ".", "match", "(", "new_line", ")", "if", "m", ":", "# I am too stupid to replace a specific region in the string so I rebuild it", "# (matching a line and then replacing value requires TWO re calls)", "#print 'line:' + new_line", "#print m.groupdict()", "if", "m", ".", "group", "(", "'comment'", ")", "is", "None", ":", "comment", "=", "''", "else", ":", "comment", "=", "\" \"", "+", "m", ".", "group", "(", "'comment'", ")", "assignment", "=", "m", ".", "group", "(", "'assignment'", ")", "if", "not", "assignment", ".", "endswith", "(", "' '", ")", ":", "assignment", "+=", "' '", "# build new line piece-wise:", "new_line", "=", "assignment", "if", "p", "in", "extend_parameters", ":", "# keep original value and add new stuff at end", "new_line", "+=", "str", "(", "m", ".", "group", "(", "'value'", ")", ")", "+", "' '", "# automatically transform lists into space-separated string values", "value", "=", "\" \"", ".", "join", "(", "map", "(", "str", ",", "asiterable", "(", "substitutions", "[", "p", "]", ")", ")", ")", "new_line", "+=", "value", "+", "comment", "params", ".", "remove", "(", "p", ")", "break", "target", ".", "write", "(", "(", "new_line", "+", "'\\n'", ")", ".", "encode", "(", "'utf-8'", ")", ")", "target", ".", "seek", "(", "0", ")", "# XXX: Is there a danger of corrupting the original mdp if something went wrong?", "with", "open", "(", "new_mdp", ",", "'wb'", ")", "as", "final", ":", "shutil", ".", "copyfileobj", "(", "target", ",", "final", ")", "# return all parameters that have NOT been substituted", "if", "len", "(", "params", ")", ">", "0", ":", "logger", ".", "warn", "(", "\"Not substituted in {new_mdp!r}: {params!r}\"", ".", "format", "(", "*", "*", "vars", "(", ")", ")", ")", "return", "{", "p", ":", "substitutions", "[", "p", "]", "for", "p", "in", "params", "}" ]
48.516393
26.245902
def poll(self): """check if the jobs are running and return a list of pids for finished jobs """ finished_procs = [p for p in self.running_procs if p.poll() is not None] self.running_procs = collections.deque([p for p in self.running_procs if p not in finished_procs]) for proc in finished_procs: stdout, stderr = proc.communicate() ## proc.communicate() returns (stdout, stderr) when ## self.pipe = True. Otherwise they are (None, None) finished_pids = [p.pid for p in finished_procs] self.finished_pids.extend(finished_pids) logger = logging.getLogger(__name__) messages = 'Running: {}, Finished: {}'.format(len(self.running_procs), len(self.finished_pids)) logger.info(messages) return finished_pids
[ "def", "poll", "(", "self", ")", ":", "finished_procs", "=", "[", "p", "for", "p", "in", "self", ".", "running_procs", "if", "p", ".", "poll", "(", ")", "is", "not", "None", "]", "self", ".", "running_procs", "=", "collections", ".", "deque", "(", "[", "p", "for", "p", "in", "self", ".", "running_procs", "if", "p", "not", "in", "finished_procs", "]", ")", "for", "proc", "in", "finished_procs", ":", "stdout", ",", "stderr", "=", "proc", ".", "communicate", "(", ")", "## proc.communicate() returns (stdout, stderr) when", "## self.pipe = True. Otherwise they are (None, None)", "finished_pids", "=", "[", "p", ".", "pid", "for", "p", "in", "finished_procs", "]", "self", ".", "finished_pids", ".", "extend", "(", "finished_pids", ")", "logger", "=", "logging", ".", "getLogger", "(", "__name__", ")", "messages", "=", "'Running: {}, Finished: {}'", ".", "format", "(", "len", "(", "self", ".", "running_procs", ")", ",", "len", "(", "self", ".", "finished_pids", ")", ")", "logger", ".", "info", "(", "messages", ")", "return", "finished_pids" ]
39
24.857143
def html_singleAll(self,template="basic"): """generate a data view for every ABF in the project folder.""" for fname in smartSort(self.cells): if template=="fixed": self.html_single_fixed(fname) else: self.html_single_basic(fname)
[ "def", "html_singleAll", "(", "self", ",", "template", "=", "\"basic\"", ")", ":", "for", "fname", "in", "smartSort", "(", "self", ".", "cells", ")", ":", "if", "template", "==", "\"fixed\"", ":", "self", ".", "html_single_fixed", "(", "fname", ")", "else", ":", "self", ".", "html_single_basic", "(", "fname", ")" ]
42.285714
6.428571
async def filter_by(cls, db, offset=None, limit=None, **kwargs): """Query by attributes iteratively. Ordering is not supported Example: User.get_by(db, age=[32, 54]) User.get_by(db, age=23, name="guido") """ if limit and type(limit) is not int: raise InvalidQuery('If limit is supplied it must be an int') if offset and type(offset) is not int: raise InvalidQuery('If offset is supplied it must be an int') ids_to_iterate = await cls._get_ids_filter_by(db, **kwargs) if offset: # Using offset without order_by is pretty strange, but allowed if limit: ids_to_iterate = ids_to_iterate[offset:offset+limit] else: ids_to_iterate = ids_to_iterate[offset:] elif limit: ids_to_iterate = ids_to_iterate[:limit] for key in ids_to_iterate: yield await cls.load(db, key)
[ "async", "def", "filter_by", "(", "cls", ",", "db", ",", "offset", "=", "None", ",", "limit", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "limit", "and", "type", "(", "limit", ")", "is", "not", "int", ":", "raise", "InvalidQuery", "(", "'If limit is supplied it must be an int'", ")", "if", "offset", "and", "type", "(", "offset", ")", "is", "not", "int", ":", "raise", "InvalidQuery", "(", "'If offset is supplied it must be an int'", ")", "ids_to_iterate", "=", "await", "cls", ".", "_get_ids_filter_by", "(", "db", ",", "*", "*", "kwargs", ")", "if", "offset", ":", "# Using offset without order_by is pretty strange, but allowed", "if", "limit", ":", "ids_to_iterate", "=", "ids_to_iterate", "[", "offset", ":", "offset", "+", "limit", "]", "else", ":", "ids_to_iterate", "=", "ids_to_iterate", "[", "offset", ":", "]", "elif", "limit", ":", "ids_to_iterate", "=", "ids_to_iterate", "[", ":", "limit", "]", "for", "key", "in", "ids_to_iterate", ":", "yield", "await", "cls", ".", "load", "(", "db", ",", "key", ")" ]
39.625
19.208333
def describe_lcc_csv(lcdict, returndesc=False): ''' This describes the LCC CSV format light curve file. Parameters ---------- lcdict : dict The input lcdict to parse for column and metadata info. returndesc : bool If True, returns the description string as an str instead of just printing it to stdout. Returns ------- str or None If returndesc is True, returns the description lines as a str, otherwise returns nothing. ''' metadata_lines = [] coldef_lines = [] if 'lcformat' in lcdict and 'lcc-csv' in lcdict['lcformat'].lower(): metadata = lcdict['metadata'] metakeys = lcdict['objectinfo'].keys() coldefs = lcdict['coldefs'] for mk in metakeys: metadata_lines.append( '%20s | %s' % ( mk, metadata[mk]['desc'] ) ) for ck in lcdict['columns']: coldef_lines.append('column %02d | %8s | numpy dtype: %3s | %s' % (coldefs[ck]['colnum'], ck, coldefs[ck]['dtype'], coldefs[ck]['desc'])) desc = LCC_CSVLC_DESCTEMPLATE.format( objectid=lcdict['objectid'], metadata_desc='\n'.join(metadata_lines), metadata=pformat(lcdict['objectinfo']), columndefs='\n'.join(coldef_lines) ) print(desc) if returndesc: return desc else: LOGERROR("this lcdict is not from an LCC CSV, can't figure it out...") return None
[ "def", "describe_lcc_csv", "(", "lcdict", ",", "returndesc", "=", "False", ")", ":", "metadata_lines", "=", "[", "]", "coldef_lines", "=", "[", "]", "if", "'lcformat'", "in", "lcdict", "and", "'lcc-csv'", "in", "lcdict", "[", "'lcformat'", "]", ".", "lower", "(", ")", ":", "metadata", "=", "lcdict", "[", "'metadata'", "]", "metakeys", "=", "lcdict", "[", "'objectinfo'", "]", ".", "keys", "(", ")", "coldefs", "=", "lcdict", "[", "'coldefs'", "]", "for", "mk", "in", "metakeys", ":", "metadata_lines", ".", "append", "(", "'%20s | %s'", "%", "(", "mk", ",", "metadata", "[", "mk", "]", "[", "'desc'", "]", ")", ")", "for", "ck", "in", "lcdict", "[", "'columns'", "]", ":", "coldef_lines", ".", "append", "(", "'column %02d | %8s | numpy dtype: %3s | %s'", "%", "(", "coldefs", "[", "ck", "]", "[", "'colnum'", "]", ",", "ck", ",", "coldefs", "[", "ck", "]", "[", "'dtype'", "]", ",", "coldefs", "[", "ck", "]", "[", "'desc'", "]", ")", ")", "desc", "=", "LCC_CSVLC_DESCTEMPLATE", ".", "format", "(", "objectid", "=", "lcdict", "[", "'objectid'", "]", ",", "metadata_desc", "=", "'\\n'", ".", "join", "(", "metadata_lines", ")", ",", "metadata", "=", "pformat", "(", "lcdict", "[", "'objectinfo'", "]", ")", ",", "columndefs", "=", "'\\n'", ".", "join", "(", "coldef_lines", ")", ")", "print", "(", "desc", ")", "if", "returndesc", ":", "return", "desc", "else", ":", "LOGERROR", "(", "\"this lcdict is not from an LCC CSV, can't figure it out...\"", ")", "return", "None" ]
24.462687
24.850746
def get_signature_candidate(lines): """Return lines that could hold signature The lines should: * be among last SIGNATURE_MAX_LINES non-empty lines. * not include first line * be shorter than TOO_LONG_SIGNATURE_LINE * not include more than one line that starts with dashes """ # non empty lines indexes non_empty = [i for i, line in enumerate(lines) if line.strip()] # if message is empty or just one line then there is no signature if len(non_empty) <= 1: return [] # we don't expect signature to start at the 1st line candidate = non_empty[1:] # signature shouldn't be longer then SIGNATURE_MAX_LINES candidate = candidate[-SIGNATURE_MAX_LINES:] markers = _mark_candidate_indexes(lines, candidate) candidate = _process_marked_candidate_indexes(candidate, markers) # get actual lines for the candidate instead of indexes if candidate: candidate = lines[candidate[0]:] return candidate return []
[ "def", "get_signature_candidate", "(", "lines", ")", ":", "# non empty lines indexes", "non_empty", "=", "[", "i", "for", "i", ",", "line", "in", "enumerate", "(", "lines", ")", "if", "line", ".", "strip", "(", ")", "]", "# if message is empty or just one line then there is no signature", "if", "len", "(", "non_empty", ")", "<=", "1", ":", "return", "[", "]", "# we don't expect signature to start at the 1st line", "candidate", "=", "non_empty", "[", "1", ":", "]", "# signature shouldn't be longer then SIGNATURE_MAX_LINES", "candidate", "=", "candidate", "[", "-", "SIGNATURE_MAX_LINES", ":", "]", "markers", "=", "_mark_candidate_indexes", "(", "lines", ",", "candidate", ")", "candidate", "=", "_process_marked_candidate_indexes", "(", "candidate", ",", "markers", ")", "# get actual lines for the candidate instead of indexes", "if", "candidate", ":", "candidate", "=", "lines", "[", "candidate", "[", "0", "]", ":", "]", "return", "candidate", "return", "[", "]" ]
31.483871
20.774194
def _rgetattr(obj, key): """Recursive getattr for handling dots in keys.""" for k in key.split("."): obj = getattr(obj, k) return obj
[ "def", "_rgetattr", "(", "obj", ",", "key", ")", ":", "for", "k", "in", "key", ".", "split", "(", "\".\"", ")", ":", "obj", "=", "getattr", "(", "obj", ",", "k", ")", "return", "obj" ]
29.8
13
def get_port_channel_detail_output_lacp_partner_oper_priority(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") get_port_channel_detail = ET.Element("get_port_channel_detail") config = get_port_channel_detail output = ET.SubElement(get_port_channel_detail, "output") lacp = ET.SubElement(output, "lacp") partner_oper_priority = ET.SubElement(lacp, "partner-oper-priority") partner_oper_priority.text = kwargs.pop('partner_oper_priority') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "get_port_channel_detail_output_lacp_partner_oper_priority", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "get_port_channel_detail", "=", "ET", ".", "Element", "(", "\"get_port_channel_detail\"", ")", "config", "=", "get_port_channel_detail", "output", "=", "ET", ".", "SubElement", "(", "get_port_channel_detail", ",", "\"output\"", ")", "lacp", "=", "ET", ".", "SubElement", "(", "output", ",", "\"lacp\"", ")", "partner_oper_priority", "=", "ET", ".", "SubElement", "(", "lacp", ",", "\"partner-oper-priority\"", ")", "partner_oper_priority", ".", "text", "=", "kwargs", ".", "pop", "(", "'partner_oper_priority'", ")", "callback", "=", "kwargs", ".", "pop", "(", "'callback'", ",", "self", ".", "_callback", ")", "return", "callback", "(", "config", ")" ]
47.076923
18.076923
def user_get_session_token(self, app_id=None, email=None, password=None, ekey=None, fb_access_token=None, tw_oauth_token=None, tw_oauth_token_secret=None, api_key=None): """user/get_session_token http://www.mediafire.com/developers/core_api/1.3/user/#get_session_token """ if app_id is None: raise ValueError("app_id must be defined") params = QueryParams({ 'application_id': str(app_id), 'token_version': 2, 'response_format': 'json' }) if fb_access_token: params['fb_access_token'] = fb_access_token signature_keys = ['fb_access_token'] elif tw_oauth_token and tw_oauth_token_secret: params['tw_oauth_token'] = tw_oauth_token params['tw_oauth_token_secret'] = tw_oauth_token_secret signature_keys = ['tw_oauth_token', 'tw_oauth_token_secret'] elif (email or ekey) and password: signature_keys = [] if email: signature_keys.append('email') params['email'] = email if ekey: signature_keys.append('ekey') params['ekey'] = ekey params['password'] = password signature_keys.append('password') else: raise ValueError("Credentials not provided") signature_keys.append('application_id') signature = hashlib.sha1() for key in signature_keys: signature.update(str(params[key]).encode('ascii')) # Note: If the app uses a callback URL to provide its API key, # or if it does not have the "Require Secret Key" option checked, # then the API key may be omitted from the signature if api_key: signature.update(api_key.encode('ascii')) query = urlencode(params) query += '&signature=' + signature.hexdigest() return self.request('user/get_session_token', params=query)
[ "def", "user_get_session_token", "(", "self", ",", "app_id", "=", "None", ",", "email", "=", "None", ",", "password", "=", "None", ",", "ekey", "=", "None", ",", "fb_access_token", "=", "None", ",", "tw_oauth_token", "=", "None", ",", "tw_oauth_token_secret", "=", "None", ",", "api_key", "=", "None", ")", ":", "if", "app_id", "is", "None", ":", "raise", "ValueError", "(", "\"app_id must be defined\"", ")", "params", "=", "QueryParams", "(", "{", "'application_id'", ":", "str", "(", "app_id", ")", ",", "'token_version'", ":", "2", ",", "'response_format'", ":", "'json'", "}", ")", "if", "fb_access_token", ":", "params", "[", "'fb_access_token'", "]", "=", "fb_access_token", "signature_keys", "=", "[", "'fb_access_token'", "]", "elif", "tw_oauth_token", "and", "tw_oauth_token_secret", ":", "params", "[", "'tw_oauth_token'", "]", "=", "tw_oauth_token", "params", "[", "'tw_oauth_token_secret'", "]", "=", "tw_oauth_token_secret", "signature_keys", "=", "[", "'tw_oauth_token'", ",", "'tw_oauth_token_secret'", "]", "elif", "(", "email", "or", "ekey", ")", "and", "password", ":", "signature_keys", "=", "[", "]", "if", "email", ":", "signature_keys", ".", "append", "(", "'email'", ")", "params", "[", "'email'", "]", "=", "email", "if", "ekey", ":", "signature_keys", ".", "append", "(", "'ekey'", ")", "params", "[", "'ekey'", "]", "=", "ekey", "params", "[", "'password'", "]", "=", "password", "signature_keys", ".", "append", "(", "'password'", ")", "else", ":", "raise", "ValueError", "(", "\"Credentials not provided\"", ")", "signature_keys", ".", "append", "(", "'application_id'", ")", "signature", "=", "hashlib", ".", "sha1", "(", ")", "for", "key", "in", "signature_keys", ":", "signature", ".", "update", "(", "str", "(", "params", "[", "key", "]", ")", ".", "encode", "(", "'ascii'", ")", ")", "# Note: If the app uses a callback URL to provide its API key,", "# or if it does not have the \"Require Secret Key\" option checked,", "# then the API key may be omitted from the signature", "if", "api_key", ":", "signature", ".", "update", "(", "api_key", ".", "encode", "(", "'ascii'", ")", ")", "query", "=", "urlencode", "(", "params", ")", "query", "+=", "'&signature='", "+", "signature", ".", "hexdigest", "(", ")", "return", "self", ".", "request", "(", "'user/get_session_token'", ",", "params", "=", "query", ")" ]
36.140351
19.157895
def xSectionLink(lines): """ Parse Cross Section Links Method """ # Constants KEYWORDS = ('LINK', 'DX', 'TRAPEZOID', 'TRAPEZOID_ERODE', 'TRAPEZOID_SUBSURFACE', 'ERODE_TRAPEZOID', 'ERODE_SUBSURFACE', 'SUBSURFACE_TRAPEZOID', 'SUBSURFACE_ERODE', 'TRAPEZOID_ERODE_SUBSURFACE', 'TRAPEZOID_SUBSURFACE_ERODE', 'ERODE_TRAPEZOID_SUBSURFACE', 'ERODE_SUBSURFACE_TRAPEZOID', 'SUBSURFACE_TRAPEZOID_ERODE', 'SUBSURFACE_ERODE_TRAPEZOID', 'BREAKPOINT', 'BREAKPOINT_ERODE', 'BREAKPOINT_SUBSURFACE', 'ERODE_BREAKPOINT', 'ERODE_SUBSURFACE', 'SUBSURFACE_BREAKPOINT', 'SUBSURFACE_ERODE', 'BREAKPOINT_ERODE_SUBSURFACE', 'BREAKPOINT_SUBSURFACE_ERODE', 'ERODE_BREAKPOINT_SUBSURFACE', 'ERODE_SUBSURFACE_BREAKPOINT', 'SUBSURFACE_BREAKPOINT_ERODE', 'SUBSURFACE_ERODE_BREAKPOINT', 'TRAP', 'TRAP_ERODE', 'TRAP_SUBSURFACE', 'ERODE_TRAP', 'ERODE_SUBSURFACE', 'SUBSURFACE_TRAP', 'SUBSURFACE_ERODE', 'TRAP_ERODE_SUBSURFACE', 'TRAP_SUBSURFACE_ERODE', 'ERODE_TRAP_SUBSURFACE', 'ERODE_SUBSURFACE_TRAP', 'SUBSURFACE_TRAP_ERODE', 'SUBSURFACE_ERODE_TRAP', 'NODES', 'NODE', 'XSEC') ERODE = ('TRAPEZOID_ERODE', 'TRAP_ERODE', 'TRAP_SUBSURFACE_ERODE', 'TRAP_ERODE_SUBSURFACE', 'BREAKPOINT_ERODE', 'TRAPEZOID_SUBSURFACE_ERODE', 'TRAPEZOID_ERODE_SUBSURFACE', 'BREAKPOINT_SUBSURFACE_ERODE', 'BREAKPOINT_ERODE_SUBSURFACE') SUBSURFACE = ('TRAPEZOID_SUBSURFACE', 'TRAP_SUBSURFACE', 'TRAP_SUBSURFACE_ERODE', 'TRAP_ERODE_SUBSURFACE', 'BREAKPOINT_SUBSURFACE', 'TRAPEZOID_SUBSURFACE_ERODE', 'TRAPEZOID_ERODE_SUBSURFACE', 'BREAKPOINT_SUBSURFACE_ERODE', 'BREAKPOINT_ERODE_SUBSURFACE') result = {'type': 'XSEC', 'header': {'link': None, 'dx': None, 'xSecType': None, 'nodes': None, 'erode': False, 'subsurface': False}, 'xSection': None, 'nodes': []} chunks = pt.chunk(KEYWORDS, lines) # Parse chunks associated with each key for key, chunkList in iteritems(chunks): # Parse each chunk in the chunk list for chunk in chunkList: # Cases if key == 'NODE': # Extract node x and y result['nodes'].append(nodeChunk(chunk)) elif key == 'XSEC': # Extract cross section information result['xSection'] = xSectionChunk(chunk) elif ('TRAPEZOID' in key) or ('BREAKPOINT' in key) or ('TRAP' in key): # Cross section type handler result['header']['xSecType'] = key elif key in ERODE: # Erode handler result['header']['erode'] = True elif key in SUBSURFACE: # Subsurface handler result['header']['subsurface'] = True else: # Extract all other variables into header result['header'][key.lower()] = chunk[0].strip().split()[1] return result
[ "def", "xSectionLink", "(", "lines", ")", ":", "# Constants", "KEYWORDS", "=", "(", "'LINK'", ",", "'DX'", ",", "'TRAPEZOID'", ",", "'TRAPEZOID_ERODE'", ",", "'TRAPEZOID_SUBSURFACE'", ",", "'ERODE_TRAPEZOID'", ",", "'ERODE_SUBSURFACE'", ",", "'SUBSURFACE_TRAPEZOID'", ",", "'SUBSURFACE_ERODE'", ",", "'TRAPEZOID_ERODE_SUBSURFACE'", ",", "'TRAPEZOID_SUBSURFACE_ERODE'", ",", "'ERODE_TRAPEZOID_SUBSURFACE'", ",", "'ERODE_SUBSURFACE_TRAPEZOID'", ",", "'SUBSURFACE_TRAPEZOID_ERODE'", ",", "'SUBSURFACE_ERODE_TRAPEZOID'", ",", "'BREAKPOINT'", ",", "'BREAKPOINT_ERODE'", ",", "'BREAKPOINT_SUBSURFACE'", ",", "'ERODE_BREAKPOINT'", ",", "'ERODE_SUBSURFACE'", ",", "'SUBSURFACE_BREAKPOINT'", ",", "'SUBSURFACE_ERODE'", ",", "'BREAKPOINT_ERODE_SUBSURFACE'", ",", "'BREAKPOINT_SUBSURFACE_ERODE'", ",", "'ERODE_BREAKPOINT_SUBSURFACE'", ",", "'ERODE_SUBSURFACE_BREAKPOINT'", ",", "'SUBSURFACE_BREAKPOINT_ERODE'", ",", "'SUBSURFACE_ERODE_BREAKPOINT'", ",", "'TRAP'", ",", "'TRAP_ERODE'", ",", "'TRAP_SUBSURFACE'", ",", "'ERODE_TRAP'", ",", "'ERODE_SUBSURFACE'", ",", "'SUBSURFACE_TRAP'", ",", "'SUBSURFACE_ERODE'", ",", "'TRAP_ERODE_SUBSURFACE'", ",", "'TRAP_SUBSURFACE_ERODE'", ",", "'ERODE_TRAP_SUBSURFACE'", ",", "'ERODE_SUBSURFACE_TRAP'", ",", "'SUBSURFACE_TRAP_ERODE'", ",", "'SUBSURFACE_ERODE_TRAP'", ",", "'NODES'", ",", "'NODE'", ",", "'XSEC'", ")", "ERODE", "=", "(", "'TRAPEZOID_ERODE'", ",", "'TRAP_ERODE'", ",", "'TRAP_SUBSURFACE_ERODE'", ",", "'TRAP_ERODE_SUBSURFACE'", ",", "'BREAKPOINT_ERODE'", ",", "'TRAPEZOID_SUBSURFACE_ERODE'", ",", "'TRAPEZOID_ERODE_SUBSURFACE'", ",", "'BREAKPOINT_SUBSURFACE_ERODE'", ",", "'BREAKPOINT_ERODE_SUBSURFACE'", ")", "SUBSURFACE", "=", "(", "'TRAPEZOID_SUBSURFACE'", ",", "'TRAP_SUBSURFACE'", ",", "'TRAP_SUBSURFACE_ERODE'", ",", "'TRAP_ERODE_SUBSURFACE'", ",", "'BREAKPOINT_SUBSURFACE'", ",", "'TRAPEZOID_SUBSURFACE_ERODE'", ",", "'TRAPEZOID_ERODE_SUBSURFACE'", ",", "'BREAKPOINT_SUBSURFACE_ERODE'", ",", "'BREAKPOINT_ERODE_SUBSURFACE'", ")", "result", "=", "{", "'type'", ":", "'XSEC'", ",", "'header'", ":", "{", "'link'", ":", "None", ",", "'dx'", ":", "None", ",", "'xSecType'", ":", "None", ",", "'nodes'", ":", "None", ",", "'erode'", ":", "False", ",", "'subsurface'", ":", "False", "}", ",", "'xSection'", ":", "None", ",", "'nodes'", ":", "[", "]", "}", "chunks", "=", "pt", ".", "chunk", "(", "KEYWORDS", ",", "lines", ")", "# Parse chunks associated with each key", "for", "key", ",", "chunkList", "in", "iteritems", "(", "chunks", ")", ":", "# Parse each chunk in the chunk list", "for", "chunk", "in", "chunkList", ":", "# Cases", "if", "key", "==", "'NODE'", ":", "# Extract node x and y", "result", "[", "'nodes'", "]", ".", "append", "(", "nodeChunk", "(", "chunk", ")", ")", "elif", "key", "==", "'XSEC'", ":", "# Extract cross section information", "result", "[", "'xSection'", "]", "=", "xSectionChunk", "(", "chunk", ")", "elif", "(", "'TRAPEZOID'", "in", "key", ")", "or", "(", "'BREAKPOINT'", "in", "key", ")", "or", "(", "'TRAP'", "in", "key", ")", ":", "# Cross section type handler", "result", "[", "'header'", "]", "[", "'xSecType'", "]", "=", "key", "elif", "key", "in", "ERODE", ":", "# Erode handler", "result", "[", "'header'", "]", "[", "'erode'", "]", "=", "True", "elif", "key", "in", "SUBSURFACE", ":", "# Subsurface handler", "result", "[", "'header'", "]", "[", "'subsurface'", "]", "=", "True", "else", ":", "# Extract all other variables into header", "result", "[", "'header'", "]", "[", "key", ".", "lower", "(", ")", "]", "=", "chunk", "[", "0", "]", ".", "strip", "(", ")", ".", "split", "(", ")", "[", "1", "]", "return", "result" ]
33.578947
11.315789
def subs2seqs(self) -> Dict[str, List[str]]: """A |collections.defaultdict| containing the node-specific information provided by XML `sequences` element. >>> from hydpy.auxs.xmltools import XMLInterface >>> from hydpy import data >>> interface = XMLInterface('single_run.xml', data.get_path('LahnH')) >>> series_io = interface.series_io >>> subs2seqs = series_io.writers[2].subs2seqs >>> for subs, seq in sorted(subs2seqs.items()): ... print(subs, seq) node ['sim', 'obs'] """ subs2seqs = collections.defaultdict(list) nodes = find(self.find('sequences'), 'node') if nodes is not None: for seq in nodes: subs2seqs['node'].append(strip(seq.tag)) return subs2seqs
[ "def", "subs2seqs", "(", "self", ")", "->", "Dict", "[", "str", ",", "List", "[", "str", "]", "]", ":", "subs2seqs", "=", "collections", ".", "defaultdict", "(", "list", ")", "nodes", "=", "find", "(", "self", ".", "find", "(", "'sequences'", ")", ",", "'node'", ")", "if", "nodes", "is", "not", "None", ":", "for", "seq", "in", "nodes", ":", "subs2seqs", "[", "'node'", "]", ".", "append", "(", "strip", "(", "seq", ".", "tag", ")", ")", "return", "subs2seqs" ]
41.894737
13.052632
async def get_password_tty(device, options): """Get the password to unlock a device from terminal.""" # TODO: make this a TRUE async text = _('Enter password for {0.device_presentation}: ', device) try: return getpass.getpass(text) except EOFError: print("") return None
[ "async", "def", "get_password_tty", "(", "device", ",", "options", ")", ":", "# TODO: make this a TRUE async", "text", "=", "_", "(", "'Enter password for {0.device_presentation}: '", ",", "device", ")", "try", ":", "return", "getpass", ".", "getpass", "(", "text", ")", "except", "EOFError", ":", "print", "(", "\"\"", ")", "return", "None" ]
34
15.333333
def _postprocess_hover(self, renderer, source): """ Limit hover tool to annular wedges only. """ if isinstance(renderer.glyph, AnnularWedge): super(RadialHeatMapPlot, self)._postprocess_hover(renderer, source)
[ "def", "_postprocess_hover", "(", "self", ",", "renderer", ",", "source", ")", ":", "if", "isinstance", "(", "renderer", ".", "glyph", ",", "AnnularWedge", ")", ":", "super", "(", "RadialHeatMapPlot", ",", "self", ")", ".", "_postprocess_hover", "(", "renderer", ",", "source", ")" ]
35.428571
15.142857
def mean_abs_tree_shap(model, data): """ mean(|TreeExplainer|) color = red_blue_circle(0.25) linestyle = solid """ def f(X): v = TreeExplainer(model).shap_values(X) if isinstance(v, list): return [np.tile(np.abs(sv).mean(0), (X.shape[0], 1)) for sv in v] else: return np.tile(np.abs(v).mean(0), (X.shape[0], 1)) return f
[ "def", "mean_abs_tree_shap", "(", "model", ",", "data", ")", ":", "def", "f", "(", "X", ")", ":", "v", "=", "TreeExplainer", "(", "model", ")", ".", "shap_values", "(", "X", ")", "if", "isinstance", "(", "v", ",", "list", ")", ":", "return", "[", "np", ".", "tile", "(", "np", ".", "abs", "(", "sv", ")", ".", "mean", "(", "0", ")", ",", "(", "X", ".", "shape", "[", "0", "]", ",", "1", ")", ")", "for", "sv", "in", "v", "]", "else", ":", "return", "np", ".", "tile", "(", "np", ".", "abs", "(", "v", ")", ".", "mean", "(", "0", ")", ",", "(", "X", ".", "shape", "[", "0", "]", ",", "1", ")", ")", "return", "f" ]
31.75
15.583333
def merge(self, commit_message=github.GithubObject.NotSet, commit_title=github.GithubObject.NotSet, merge_method=github.GithubObject.NotSet, sha=github.GithubObject.NotSet): """ :calls: `PUT /repos/:owner/:repo/pulls/:number/merge <http://developer.github.com/v3/pulls>`_ :param commit_message: string :rtype: :class:`github.PullRequestMergeStatus.PullRequestMergeStatus` """ assert commit_message is github.GithubObject.NotSet or isinstance(commit_message, (str, unicode)), commit_message assert commit_title is github.GithubObject.NotSet or isinstance(commit_title, (str, unicode)), commit_title assert merge_method is github.GithubObject.NotSet or isinstance(merge_method, (str, unicode)), merge_method assert sha is github.GithubObject.NotSet or isinstance(sha, (str, unicode)), sha post_parameters = dict() if commit_message is not github.GithubObject.NotSet: post_parameters["commit_message"] = commit_message if commit_title is not github.GithubObject.NotSet: post_parameters["commit_title"] = commit_title if merge_method is not github.GithubObject.NotSet: post_parameters["merge_method"] = merge_method if sha is not github.GithubObject.NotSet: post_parameters["sha"] = sha headers, data = self._requester.requestJsonAndCheck( "PUT", self.url + "/merge", input=post_parameters ) return github.PullRequestMergeStatus.PullRequestMergeStatus(self._requester, headers, data, completed=True)
[ "def", "merge", "(", "self", ",", "commit_message", "=", "github", ".", "GithubObject", ".", "NotSet", ",", "commit_title", "=", "github", ".", "GithubObject", ".", "NotSet", ",", "merge_method", "=", "github", ".", "GithubObject", ".", "NotSet", ",", "sha", "=", "github", ".", "GithubObject", ".", "NotSet", ")", ":", "assert", "commit_message", "is", "github", ".", "GithubObject", ".", "NotSet", "or", "isinstance", "(", "commit_message", ",", "(", "str", ",", "unicode", ")", ")", ",", "commit_message", "assert", "commit_title", "is", "github", ".", "GithubObject", ".", "NotSet", "or", "isinstance", "(", "commit_title", ",", "(", "str", ",", "unicode", ")", ")", ",", "commit_title", "assert", "merge_method", "is", "github", ".", "GithubObject", ".", "NotSet", "or", "isinstance", "(", "merge_method", ",", "(", "str", ",", "unicode", ")", ")", ",", "merge_method", "assert", "sha", "is", "github", ".", "GithubObject", ".", "NotSet", "or", "isinstance", "(", "sha", ",", "(", "str", ",", "unicode", ")", ")", ",", "sha", "post_parameters", "=", "dict", "(", ")", "if", "commit_message", "is", "not", "github", ".", "GithubObject", ".", "NotSet", ":", "post_parameters", "[", "\"commit_message\"", "]", "=", "commit_message", "if", "commit_title", "is", "not", "github", ".", "GithubObject", ".", "NotSet", ":", "post_parameters", "[", "\"commit_title\"", "]", "=", "commit_title", "if", "merge_method", "is", "not", "github", ".", "GithubObject", ".", "NotSet", ":", "post_parameters", "[", "\"merge_method\"", "]", "=", "merge_method", "if", "sha", "is", "not", "github", ".", "GithubObject", ".", "NotSet", ":", "post_parameters", "[", "\"sha\"", "]", "=", "sha", "headers", ",", "data", "=", "self", ".", "_requester", ".", "requestJsonAndCheck", "(", "\"PUT\"", ",", "self", ".", "url", "+", "\"/merge\"", ",", "input", "=", "post_parameters", ")", "return", "github", ".", "PullRequestMergeStatus", ".", "PullRequestMergeStatus", "(", "self", ".", "_requester", ",", "headers", ",", "data", ",", "completed", "=", "True", ")" ]
63.64
32.28
def tree2text(tree_obj, indent=4): # type: (TreeInfo, int) -> str """ Return text representation of a decision tree. """ parts = [] def _format_node(node, depth=0): # type: (NodeInfo, int) -> None def p(*args): # type: (*str) -> None parts.append(" " * depth * indent) parts.extend(args) if node.is_leaf: value_repr = _format_leaf_value(tree_obj, node) parts.append(" ---> {}".format(value_repr)) else: assert node.left is not None assert node.right is not None feat_name = node.feature_name if depth > 0: parts.append("\n") left_samples = node.left.sample_ratio p("{feat_name} <= {threshold:0.3f} ({left_samples:0.1%})".format( left_samples=left_samples, feat_name=feat_name, threshold=node.threshold, )) _format_node(node.left, depth=depth + 1) parts.append("\n") right_samples = node.right.sample_ratio p("{feat_name} > {threshold:0.3f} ({right_samples:0.1%})".format( right_samples=right_samples, feat_name=feat_name, threshold=node.threshold, )) _format_node(node.right, depth=depth + 1) _format_node(tree_obj.tree) return "".join(parts)
[ "def", "tree2text", "(", "tree_obj", ",", "indent", "=", "4", ")", ":", "# type: (TreeInfo, int) -> str", "parts", "=", "[", "]", "def", "_format_node", "(", "node", ",", "depth", "=", "0", ")", ":", "# type: (NodeInfo, int) -> None", "def", "p", "(", "*", "args", ")", ":", "# type: (*str) -> None", "parts", ".", "append", "(", "\" \"", "*", "depth", "*", "indent", ")", "parts", ".", "extend", "(", "args", ")", "if", "node", ".", "is_leaf", ":", "value_repr", "=", "_format_leaf_value", "(", "tree_obj", ",", "node", ")", "parts", ".", "append", "(", "\" ---> {}\"", ".", "format", "(", "value_repr", ")", ")", "else", ":", "assert", "node", ".", "left", "is", "not", "None", "assert", "node", ".", "right", "is", "not", "None", "feat_name", "=", "node", ".", "feature_name", "if", "depth", ">", "0", ":", "parts", ".", "append", "(", "\"\\n\"", ")", "left_samples", "=", "node", ".", "left", ".", "sample_ratio", "p", "(", "\"{feat_name} <= {threshold:0.3f} ({left_samples:0.1%})\"", ".", "format", "(", "left_samples", "=", "left_samples", ",", "feat_name", "=", "feat_name", ",", "threshold", "=", "node", ".", "threshold", ",", ")", ")", "_format_node", "(", "node", ".", "left", ",", "depth", "=", "depth", "+", "1", ")", "parts", ".", "append", "(", "\"\\n\"", ")", "right_samples", "=", "node", ".", "right", ".", "sample_ratio", "p", "(", "\"{feat_name} > {threshold:0.3f} ({right_samples:0.1%})\"", ".", "format", "(", "right_samples", "=", "right_samples", ",", "feat_name", "=", "feat_name", ",", "threshold", "=", "node", ".", "threshold", ",", ")", ")", "_format_node", "(", "node", ".", "right", ",", "depth", "=", "depth", "+", "1", ")", "_format_node", "(", "tree_obj", ".", "tree", ")", "return", "\"\"", ".", "join", "(", "parts", ")" ]
32.651163
14.27907
def store_node_label_meta(self, x, y, tx, ty, rot): """ This function stored coordinates-related metadate for a node This function should not be called by the user :param x: x location of node label or number :type x: np.float64 :param y: y location of node label or number :type y: np.float64 :param tx: text location x of node label (numbers) :type tx: np.float64 :param ty: text location y of node label (numbers) :type ty: np.float64 :param rot: rotation angle of the text (rotation) :type rot: float """ # Store computed values self.node_label_coords["x"].append(x) self.node_label_coords["y"].append(y) self.node_label_coords["tx"].append(tx) self.node_label_coords["ty"].append(ty) # Computes the text alignment for x if x == 0: self.node_label_aligns["has"].append("center") elif x > 0: self.node_label_aligns["has"].append("left") else: self.node_label_aligns["has"].append("right") # Computes the text alignment for y if self.node_label_layout == "rotate" or y == 0: self.node_label_aligns["vas"].append("center") elif y > 0: self.node_label_aligns["vas"].append("bottom") else: self.node_label_aligns["vas"].append("top") self.node_label_rotation.append(rot)
[ "def", "store_node_label_meta", "(", "self", ",", "x", ",", "y", ",", "tx", ",", "ty", ",", "rot", ")", ":", "# Store computed values", "self", ".", "node_label_coords", "[", "\"x\"", "]", ".", "append", "(", "x", ")", "self", ".", "node_label_coords", "[", "\"y\"", "]", ".", "append", "(", "y", ")", "self", ".", "node_label_coords", "[", "\"tx\"", "]", ".", "append", "(", "tx", ")", "self", ".", "node_label_coords", "[", "\"ty\"", "]", ".", "append", "(", "ty", ")", "# Computes the text alignment for x", "if", "x", "==", "0", ":", "self", ".", "node_label_aligns", "[", "\"has\"", "]", ".", "append", "(", "\"center\"", ")", "elif", "x", ">", "0", ":", "self", ".", "node_label_aligns", "[", "\"has\"", "]", ".", "append", "(", "\"left\"", ")", "else", ":", "self", ".", "node_label_aligns", "[", "\"has\"", "]", ".", "append", "(", "\"right\"", ")", "# Computes the text alignment for y", "if", "self", ".", "node_label_layout", "==", "\"rotate\"", "or", "y", "==", "0", ":", "self", ".", "node_label_aligns", "[", "\"vas\"", "]", ".", "append", "(", "\"center\"", ")", "elif", "y", ">", "0", ":", "self", ".", "node_label_aligns", "[", "\"vas\"", "]", ".", "append", "(", "\"bottom\"", ")", "else", ":", "self", ".", "node_label_aligns", "[", "\"vas\"", "]", ".", "append", "(", "\"top\"", ")", "self", ".", "node_label_rotation", ".", "append", "(", "rot", ")" ]
32.522727
18.977273
def bingham_pdf(fit): """ From the *Encyclopedia of Paleomagnetism* From Onstott, 1980: Vector resultant: R is analogous to eigenvectors of T. Eigenvalues are analogous to |R|/N. """ # Uses eigenvectors of the covariance matrix e = fit.hyperbolic_axes #singular_values #e = sampling_covariance(fit) # not sure e = e[2]**2/e kappa = (e-e[2])[:-1] kappa /= kappa[-1] F = N.sqrt(N.pi)*confluent_hypergeometric_function(*kappa) ax = fit.axes Z = 1/e M = ax F = 1/hyp1f1(*1/Z) def pdf(coords): lon,lat = coords I = lat D = lon# + N.pi/2 #D,I = _rotate(N.degrees(D),N.degrees(I),90) # Bingham is given in spherical coordinates of inclination # and declination in radians # From USGS bingham statistics reference xhat = N.array(sph2cart(lon,lat)).T #return F*expm(dot(xhat.T, M, N.diag(Z), M.T, xhat)) return 1/(F*N.exp(dot(xhat.T, M, N.diag(Z), M.T, xhat))) return pdf
[ "def", "bingham_pdf", "(", "fit", ")", ":", "# Uses eigenvectors of the covariance matrix", "e", "=", "fit", ".", "hyperbolic_axes", "#singular_values", "#e = sampling_covariance(fit) # not sure", "e", "=", "e", "[", "2", "]", "**", "2", "/", "e", "kappa", "=", "(", "e", "-", "e", "[", "2", "]", ")", "[", ":", "-", "1", "]", "kappa", "/=", "kappa", "[", "-", "1", "]", "F", "=", "N", ".", "sqrt", "(", "N", ".", "pi", ")", "*", "confluent_hypergeometric_function", "(", "*", "kappa", ")", "ax", "=", "fit", ".", "axes", "Z", "=", "1", "/", "e", "M", "=", "ax", "F", "=", "1", "/", "hyp1f1", "(", "*", "1", "/", "Z", ")", "def", "pdf", "(", "coords", ")", ":", "lon", ",", "lat", "=", "coords", "I", "=", "lat", "D", "=", "lon", "# + N.pi/2", "#D,I = _rotate(N.degrees(D),N.degrees(I),90)", "# Bingham is given in spherical coordinates of inclination", "# and declination in radians", "# From USGS bingham statistics reference", "xhat", "=", "N", ".", "array", "(", "sph2cart", "(", "lon", ",", "lat", ")", ")", ".", "T", "#return F*expm(dot(xhat.T, M, N.diag(Z), M.T, xhat))", "return", "1", "/", "(", "F", "*", "N", ".", "exp", "(", "dot", "(", "xhat", ".", "T", ",", "M", ",", "N", ".", "diag", "(", "Z", ")", ",", "M", ".", "T", ",", "xhat", ")", ")", ")", "return", "pdf" ]
22.545455
22.681818
def getUnionTemporalPoolerInput(self): """ Gets the Union Temporal Pooler input from the Temporal Memory """ activeCells = numpy.zeros(self.tm.numberOfCells()).astype(realDType) activeCells[list(self.tm.activeCellsIndices())] = 1 predictedActiveCells = numpy.zeros(self.tm.numberOfCells()).astype( realDType) predictedActiveCells[list(self.tm.predictedActiveCellsIndices())] = 1 burstingColumns = numpy.zeros(self.tm.numberOfColumns()).astype(realDType) burstingColumns[list(self.tm.unpredictedActiveColumns)] = 1 return activeCells, predictedActiveCells, burstingColumns
[ "def", "getUnionTemporalPoolerInput", "(", "self", ")", ":", "activeCells", "=", "numpy", ".", "zeros", "(", "self", ".", "tm", ".", "numberOfCells", "(", ")", ")", ".", "astype", "(", "realDType", ")", "activeCells", "[", "list", "(", "self", ".", "tm", ".", "activeCellsIndices", "(", ")", ")", "]", "=", "1", "predictedActiveCells", "=", "numpy", ".", "zeros", "(", "self", ".", "tm", ".", "numberOfCells", "(", ")", ")", ".", "astype", "(", "realDType", ")", "predictedActiveCells", "[", "list", "(", "self", ".", "tm", ".", "predictedActiveCellsIndices", "(", ")", ")", "]", "=", "1", "burstingColumns", "=", "numpy", ".", "zeros", "(", "self", ".", "tm", ".", "numberOfColumns", "(", ")", ")", ".", "astype", "(", "realDType", ")", "burstingColumns", "[", "list", "(", "self", ".", "tm", ".", "unpredictedActiveColumns", ")", "]", "=", "1", "return", "activeCells", ",", "predictedActiveCells", ",", "burstingColumns" ]
40.4
24.266667
def init(scope): """ Copy all values of scope into the class SinonGlobals Args: scope (eg. locals() or globals()) Return: SinonGlobals instance """ class SinonGlobals(object): #pylint: disable=too-few-public-methods """ A fully empty class External can push the whole `scope` into this class through global function init() """ pass global CPSCOPE #pylint: disable=global-statement CPSCOPE = SinonGlobals() funcs = [obj for obj in scope.values() if isinstance(obj, FunctionType)] for func in funcs: setattr(CPSCOPE, func.__name__, func) return CPSCOPE
[ "def", "init", "(", "scope", ")", ":", "class", "SinonGlobals", "(", "object", ")", ":", "#pylint: disable=too-few-public-methods", "\"\"\"\n A fully empty class\n External can push the whole `scope` into this class through global function init()\n \"\"\"", "pass", "global", "CPSCOPE", "#pylint: disable=global-statement", "CPSCOPE", "=", "SinonGlobals", "(", ")", "funcs", "=", "[", "obj", "for", "obj", "in", "scope", ".", "values", "(", ")", "if", "isinstance", "(", "obj", ",", "FunctionType", ")", "]", "for", "func", "in", "funcs", ":", "setattr", "(", "CPSCOPE", ",", "func", ".", "__name__", ",", "func", ")", "return", "CPSCOPE" ]
30.428571
18.047619
def get_zone(self, zone_name): """ Get the information about a particular zone """ for zone in self.get_zones(): if zone_name == zone['name']: return zone raise RuntimeError("Unknown zone")
[ "def", "get_zone", "(", "self", ",", "zone_name", ")", ":", "for", "zone", "in", "self", ".", "get_zones", "(", ")", ":", "if", "zone_name", "==", "zone", "[", "'name'", "]", ":", "return", "zone", "raise", "RuntimeError", "(", "\"Unknown zone\"", ")" ]
27.777778
8.888889
def append_text_to_shell(self, text, error, prompt): """ Append text to Python shell In a way, this method overrides the method 'insert_text' when text is inserted at the end of the text widget for a Python shell Handles error messages and show blue underlined links Handles ANSI color sequences Handles ANSI FF sequence """ cursor = self.textCursor() cursor.movePosition(QTextCursor.End) if '\r' in text: # replace \r\n with \n text = text.replace('\r\n', '\n') text = text.replace('\r', '\n') while True: index = text.find(chr(12)) if index == -1: break text = text[index+1:] self.clear() if error: is_traceback = False for text in text.splitlines(True): if (text.startswith(' File') and not text.startswith(' File "<')): is_traceback = True # Show error links in blue underlined text cursor.insertText(' ', self.default_style.format) cursor.insertText(text[2:], self.traceback_link_style.format) else: # Show error/warning messages in red cursor.insertText(text, self.error_style.format) self.exception_occurred.emit(text, is_traceback) elif prompt: # Show prompt in green insert_text_to(cursor, text, self.prompt_style.format) else: # Show other outputs in black last_end = 0 for match in self.COLOR_PATTERN.finditer(text): insert_text_to(cursor, text[last_end:match.start()], self.default_style.format) last_end = match.end() try: for code in [int(_c) for _c in match.group(1).split(';')]: self.ansi_handler.set_code(code) except ValueError: pass self.default_style.format = self.ansi_handler.get_format() insert_text_to(cursor, text[last_end:], self.default_style.format) # # Slower alternative: # segments = self.COLOR_PATTERN.split(text) # cursor.insertText(segments.pop(0), self.default_style.format) # if segments: # for ansi_tags, text in zip(segments[::2], segments[1::2]): # for ansi_tag in ansi_tags.split(';'): # self.ansi_handler.set_code(int(ansi_tag)) # self.default_style.format = self.ansi_handler.get_format() # cursor.insertText(text, self.default_style.format) self.set_cursor_position('eof') self.setCurrentCharFormat(self.default_style.format)
[ "def", "append_text_to_shell", "(", "self", ",", "text", ",", "error", ",", "prompt", ")", ":", "cursor", "=", "self", ".", "textCursor", "(", ")", "cursor", ".", "movePosition", "(", "QTextCursor", ".", "End", ")", "if", "'\\r'", "in", "text", ":", "# replace \\r\\n with \\n", "text", "=", "text", ".", "replace", "(", "'\\r\\n'", ",", "'\\n'", ")", "text", "=", "text", ".", "replace", "(", "'\\r'", ",", "'\\n'", ")", "while", "True", ":", "index", "=", "text", ".", "find", "(", "chr", "(", "12", ")", ")", "if", "index", "==", "-", "1", ":", "break", "text", "=", "text", "[", "index", "+", "1", ":", "]", "self", ".", "clear", "(", ")", "if", "error", ":", "is_traceback", "=", "False", "for", "text", "in", "text", ".", "splitlines", "(", "True", ")", ":", "if", "(", "text", ".", "startswith", "(", "' File'", ")", "and", "not", "text", ".", "startswith", "(", "' File \"<'", ")", ")", ":", "is_traceback", "=", "True", "# Show error links in blue underlined text", "cursor", ".", "insertText", "(", "' '", ",", "self", ".", "default_style", ".", "format", ")", "cursor", ".", "insertText", "(", "text", "[", "2", ":", "]", ",", "self", ".", "traceback_link_style", ".", "format", ")", "else", ":", "# Show error/warning messages in red", "cursor", ".", "insertText", "(", "text", ",", "self", ".", "error_style", ".", "format", ")", "self", ".", "exception_occurred", ".", "emit", "(", "text", ",", "is_traceback", ")", "elif", "prompt", ":", "# Show prompt in green", "insert_text_to", "(", "cursor", ",", "text", ",", "self", ".", "prompt_style", ".", "format", ")", "else", ":", "# Show other outputs in black", "last_end", "=", "0", "for", "match", "in", "self", ".", "COLOR_PATTERN", ".", "finditer", "(", "text", ")", ":", "insert_text_to", "(", "cursor", ",", "text", "[", "last_end", ":", "match", ".", "start", "(", ")", "]", ",", "self", ".", "default_style", ".", "format", ")", "last_end", "=", "match", ".", "end", "(", ")", "try", ":", "for", "code", "in", "[", "int", "(", "_c", ")", "for", "_c", "in", "match", ".", "group", "(", "1", ")", ".", "split", "(", "';'", ")", "]", ":", "self", ".", "ansi_handler", ".", "set_code", "(", "code", ")", "except", "ValueError", ":", "pass", "self", ".", "default_style", ".", "format", "=", "self", ".", "ansi_handler", ".", "get_format", "(", ")", "insert_text_to", "(", "cursor", ",", "text", "[", "last_end", ":", "]", ",", "self", ".", "default_style", ".", "format", ")", "# # Slower alternative:", "# segments = self.COLOR_PATTERN.split(text)", "# cursor.insertText(segments.pop(0), self.default_style.format)", "# if segments:", "# for ansi_tags, text in zip(segments[::2], segments[1::2]):", "# for ansi_tag in ansi_tags.split(';'):", "# self.ansi_handler.set_code(int(ansi_tag))", "# self.default_style.format = self.ansi_handler.get_format()", "# cursor.insertText(text, self.default_style.format)", "self", ".", "set_cursor_position", "(", "'eof'", ")", "self", ".", "setCurrentCharFormat", "(", "self", ".", "default_style", ".", "format", ")" ]
45.603175
16.904762
def split_func(string): """ Take a string like 'requiredIf("arg_name")' return the function name and the argument: (requiredIf, arg_name) """ ind = string.index("(") return string[:ind], string[ind+1:-1].strip('"')
[ "def", "split_func", "(", "string", ")", ":", "ind", "=", "string", ".", "index", "(", "\"(\"", ")", "return", "string", "[", ":", "ind", "]", ",", "string", "[", "ind", "+", "1", ":", "-", "1", "]", ".", "strip", "(", "'\"'", ")" ]
29.375
8.625
def policy_attached(name, policyName, principal, region=None, key=None, keyid=None, profile=None): ''' Ensure policy is attached to the given principal. name The name of the state definition policyName Name of the policy. principal The principal which can be a certificate ARN or a Cognito ID. region Region to connect to. key Secret key to be used. keyid Access key to be used. profile A dict with region, key and keyid, or a pillar key (string) that contains a dict with region, key and keyid. ''' ret = {'name': policyName, 'result': True, 'comment': '', 'changes': {} } r = __salt__['boto_iot.list_principal_policies'](principal=principal, region=region, key=key, keyid=keyid, profile=profile) if 'error' in r: ret['result'] = False ret['comment'] = 'Failed to attach policy: {0}.'.format(r['error']['message']) return ret attached = False for policy in r.get('policies', []): if policy.get('policyName') == policyName: attached = True break if not attached: if __opts__['test']: ret['comment'] = 'Policy {0} is set to be attached to {1}.'.format(policyName, principal) ret['result'] = None return ret r = __salt__['boto_iot.attach_principal_policy'](policyName=policyName, principal=principal, region=region, key=key, keyid=keyid, profile=profile) if not r.get('attached'): ret['result'] = False ret['comment'] = 'Failed to attach policy: {0}.'.format(r['error']['message']) return ret ret['changes']['old'] = {'attached': False} ret['changes']['new'] = {'attached': True} ret['comment'] = 'Policy {0} attached to {1}.'.format(policyName, principal) return ret ret['comment'] = os.linesep.join([ret['comment'], 'Policy {0} is attached.'.format(policyName)]) ret['changes'] = {} return ret
[ "def", "policy_attached", "(", "name", ",", "policyName", ",", "principal", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "ret", "=", "{", "'name'", ":", "policyName", ",", "'result'", ":", "True", ",", "'comment'", ":", "''", ",", "'changes'", ":", "{", "}", "}", "r", "=", "__salt__", "[", "'boto_iot.list_principal_policies'", "]", "(", "principal", "=", "principal", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "'error'", "in", "r", ":", "ret", "[", "'result'", "]", "=", "False", "ret", "[", "'comment'", "]", "=", "'Failed to attach policy: {0}.'", ".", "format", "(", "r", "[", "'error'", "]", "[", "'message'", "]", ")", "return", "ret", "attached", "=", "False", "for", "policy", "in", "r", ".", "get", "(", "'policies'", ",", "[", "]", ")", ":", "if", "policy", ".", "get", "(", "'policyName'", ")", "==", "policyName", ":", "attached", "=", "True", "break", "if", "not", "attached", ":", "if", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'comment'", "]", "=", "'Policy {0} is set to be attached to {1}.'", ".", "format", "(", "policyName", ",", "principal", ")", "ret", "[", "'result'", "]", "=", "None", "return", "ret", "r", "=", "__salt__", "[", "'boto_iot.attach_principal_policy'", "]", "(", "policyName", "=", "policyName", ",", "principal", "=", "principal", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "not", "r", ".", "get", "(", "'attached'", ")", ":", "ret", "[", "'result'", "]", "=", "False", "ret", "[", "'comment'", "]", "=", "'Failed to attach policy: {0}.'", ".", "format", "(", "r", "[", "'error'", "]", "[", "'message'", "]", ")", "return", "ret", "ret", "[", "'changes'", "]", "[", "'old'", "]", "=", "{", "'attached'", ":", "False", "}", "ret", "[", "'changes'", "]", "[", "'new'", "]", "=", "{", "'attached'", ":", "True", "}", "ret", "[", "'comment'", "]", "=", "'Policy {0} attached to {1}.'", ".", "format", "(", "policyName", ",", "principal", ")", "return", "ret", "ret", "[", "'comment'", "]", "=", "os", ".", "linesep", ".", "join", "(", "[", "ret", "[", "'comment'", "]", ",", "'Policy {0} is attached.'", ".", "format", "(", "policyName", ")", "]", ")", "ret", "[", "'changes'", "]", "=", "{", "}", "return", "ret" ]
32.191176
25.867647
def make_rsa_keys(bits=2048, e=65537, k=64): """ Create RSA key pair. Returns n, e, d, where (n, e) is the public key and (n, e, d) is the private key (and k is the number of rounds used in the Miller-Rabin primality test). """ p, q = None, None while p == q: p, q = get_prime(bits // 2), get_prime(bits // 2) n = p * q phi_n = phi(n, p, q) d = mult_inv(e, phi_n) return n, e, d
[ "def", "make_rsa_keys", "(", "bits", "=", "2048", ",", "e", "=", "65537", ",", "k", "=", "64", ")", ":", "p", ",", "q", "=", "None", ",", "None", "while", "p", "==", "q", ":", "p", ",", "q", "=", "get_prime", "(", "bits", "//", "2", ")", ",", "get_prime", "(", "bits", "//", "2", ")", "n", "=", "p", "*", "q", "phi_n", "=", "phi", "(", "n", ",", "p", ",", "q", ")", "d", "=", "mult_inv", "(", "e", ",", "phi_n", ")", "return", "n", ",", "e", ",", "d" ]
24.941176
16.705882
def extend_env(extra_env): """ Copies and extends the current environment with the values present in `extra_env`. """ env = os.environ.copy() env.update(extra_env) return env
[ "def", "extend_env", "(", "extra_env", ")", ":", "env", "=", "os", ".", "environ", ".", "copy", "(", ")", "env", ".", "update", "(", "extra_env", ")", "return", "env" ]
24.375
15.625
def visitNumericFacet(self, ctx: ShExDocParser.NumericFacetContext): """ numericFacet: numericRange numericLiteral | numericLength INTEGER numericRange: KW_MINEXCLUSIVE | KW_MININCLUSIVE | KW_MAXEXCLUSIVE | KW_MAXINCLUSIVE numericLength: KW_TOTALDIGITS | KW_FRACTIONDIGITS """ if ctx.numericRange(): numlit = self.context.numeric_literal_to_type(ctx.numericLiteral()) if ctx.numericRange().KW_MINEXCLUSIVE(): self.nodeconstraint.minexclusive = numlit elif ctx.numericRange().KW_MAXEXCLUSIVE(): self.nodeconstraint.maxexclusive = numlit elif ctx.numericRange().KW_MININCLUSIVE(): self.nodeconstraint.mininclusive = numlit elif ctx.numericRange().KW_MAXINCLUSIVE(): self.nodeconstraint.maxinclusive = numlit else: nlen = jsg.Integer(ctx.INTEGER().getText()) if ctx.numericLength().KW_TOTALDIGITS(): self.nodeconstraint.totaldigits = nlen elif ctx.numericLength().KW_FRACTIONDIGITS(): self.nodeconstraint.fractiondigits = nlen
[ "def", "visitNumericFacet", "(", "self", ",", "ctx", ":", "ShExDocParser", ".", "NumericFacetContext", ")", ":", "if", "ctx", ".", "numericRange", "(", ")", ":", "numlit", "=", "self", ".", "context", ".", "numeric_literal_to_type", "(", "ctx", ".", "numericLiteral", "(", ")", ")", "if", "ctx", ".", "numericRange", "(", ")", ".", "KW_MINEXCLUSIVE", "(", ")", ":", "self", ".", "nodeconstraint", ".", "minexclusive", "=", "numlit", "elif", "ctx", ".", "numericRange", "(", ")", ".", "KW_MAXEXCLUSIVE", "(", ")", ":", "self", ".", "nodeconstraint", ".", "maxexclusive", "=", "numlit", "elif", "ctx", ".", "numericRange", "(", ")", ".", "KW_MININCLUSIVE", "(", ")", ":", "self", ".", "nodeconstraint", ".", "mininclusive", "=", "numlit", "elif", "ctx", ".", "numericRange", "(", ")", ".", "KW_MAXINCLUSIVE", "(", ")", ":", "self", ".", "nodeconstraint", ".", "maxinclusive", "=", "numlit", "else", ":", "nlen", "=", "jsg", ".", "Integer", "(", "ctx", ".", "INTEGER", "(", ")", ".", "getText", "(", ")", ")", "if", "ctx", ".", "numericLength", "(", ")", ".", "KW_TOTALDIGITS", "(", ")", ":", "self", ".", "nodeconstraint", ".", "totaldigits", "=", "nlen", "elif", "ctx", ".", "numericLength", "(", ")", ".", "KW_FRACTIONDIGITS", "(", ")", ":", "self", ".", "nodeconstraint", ".", "fractiondigits", "=", "nlen" ]
57.25
17.85
def draw_image(data, obj): """Returns the PGFPlots code for an image environment. """ content = [] filename, rel_filepath = files.new_filename(data, "img", ".png") # store the image as in a file img_array = obj.get_array() dims = img_array.shape if len(dims) == 2: # the values are given as one real number: look at cmap clims = obj.get_clim() mpl.pyplot.imsave( fname=filename, arr=img_array, cmap=obj.get_cmap(), vmin=clims[0], vmax=clims[1], origin=obj.origin, ) else: # RGB (+alpha) information at each point assert len(dims) == 3 and dims[2] in [3, 4] # convert to PIL image if obj.origin == "lower": img_array = numpy.flipud(img_array) # Convert mpl image to PIL image = PIL.Image.fromarray(numpy.uint8(img_array * 255)) # If the input image is PIL: # image = PIL.Image.fromarray(img_array) image.save(filename, origin=obj.origin) # write the corresponding information to the TikZ file extent = obj.get_extent() # the format specification will only accept tuples if not isinstance(extent, tuple): extent = tuple(extent) # Explicitly use \pgfimage as includegrapics command, as the default # \includegraphics fails unexpectedly in some cases ff = data["float format"] content.append( ( "\\addplot graphics [includegraphics cmd=\\pgfimage," "xmin=" + ff + ", xmax=" + ff + ", " "ymin=" + ff + ", ymax=" + ff + "] {{{}}};\n" ).format(*(extent + (rel_filepath,))) ) return data, content
[ "def", "draw_image", "(", "data", ",", "obj", ")", ":", "content", "=", "[", "]", "filename", ",", "rel_filepath", "=", "files", ".", "new_filename", "(", "data", ",", "\"img\"", ",", "\".png\"", ")", "# store the image as in a file", "img_array", "=", "obj", ".", "get_array", "(", ")", "dims", "=", "img_array", ".", "shape", "if", "len", "(", "dims", ")", "==", "2", ":", "# the values are given as one real number: look at cmap", "clims", "=", "obj", ".", "get_clim", "(", ")", "mpl", ".", "pyplot", ".", "imsave", "(", "fname", "=", "filename", ",", "arr", "=", "img_array", ",", "cmap", "=", "obj", ".", "get_cmap", "(", ")", ",", "vmin", "=", "clims", "[", "0", "]", ",", "vmax", "=", "clims", "[", "1", "]", ",", "origin", "=", "obj", ".", "origin", ",", ")", "else", ":", "# RGB (+alpha) information at each point", "assert", "len", "(", "dims", ")", "==", "3", "and", "dims", "[", "2", "]", "in", "[", "3", ",", "4", "]", "# convert to PIL image", "if", "obj", ".", "origin", "==", "\"lower\"", ":", "img_array", "=", "numpy", ".", "flipud", "(", "img_array", ")", "# Convert mpl image to PIL", "image", "=", "PIL", ".", "Image", ".", "fromarray", "(", "numpy", ".", "uint8", "(", "img_array", "*", "255", ")", ")", "# If the input image is PIL:", "# image = PIL.Image.fromarray(img_array)", "image", ".", "save", "(", "filename", ",", "origin", "=", "obj", ".", "origin", ")", "# write the corresponding information to the TikZ file", "extent", "=", "obj", ".", "get_extent", "(", ")", "# the format specification will only accept tuples", "if", "not", "isinstance", "(", "extent", ",", "tuple", ")", ":", "extent", "=", "tuple", "(", "extent", ")", "# Explicitly use \\pgfimage as includegrapics command, as the default", "# \\includegraphics fails unexpectedly in some cases", "ff", "=", "data", "[", "\"float format\"", "]", "content", ".", "append", "(", "(", "\"\\\\addplot graphics [includegraphics cmd=\\\\pgfimage,\"", "\"xmin=\"", "+", "ff", "+", "\", xmax=\"", "+", "ff", "+", "\", \"", "\"ymin=\"", "+", "ff", "+", "\", ymax=\"", "+", "ff", "+", "\"] {{{}}};\\n\"", ")", ".", "format", "(", "*", "(", "extent", "+", "(", "rel_filepath", ",", ")", ")", ")", ")", "return", "data", ",", "content" ]
30.2
19.236364
def get_objective_sequencing_session(self, proxy): """Gets the session for sequencing objectives. :param proxy: a proxy :type proxy: ``osid.proxy.Proxy`` :return: an ``ObjectiveSequencingSession`` :rtype: ``osid.learning.ObjectiveSequencingSession`` :raise: ``NullArgument`` -- ``proxy`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``Unimplemented`` -- ``supports_objective_sequencing()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_objective_sequencing()`` is ``true``.* """ if not self.supports_objective_sequencing(): raise Unimplemented() try: from . import sessions except ImportError: raise OperationFailed() proxy = self._convert_proxy(proxy) try: session = sessions.ObjectiveSequencingSession(proxy=proxy, runtime=self._runtime) except AttributeError: raise OperationFailed() return session
[ "def", "get_objective_sequencing_session", "(", "self", ",", "proxy", ")", ":", "if", "not", "self", ".", "supports_objective_sequencing", "(", ")", ":", "raise", "Unimplemented", "(", ")", "try", ":", "from", ".", "import", "sessions", "except", "ImportError", ":", "raise", "OperationFailed", "(", ")", "proxy", "=", "self", ".", "_convert_proxy", "(", "proxy", ")", "try", ":", "session", "=", "sessions", ".", "ObjectiveSequencingSession", "(", "proxy", "=", "proxy", ",", "runtime", "=", "self", ".", "_runtime", ")", "except", "AttributeError", ":", "raise", "OperationFailed", "(", ")", "return", "session" ]
40.230769
20.115385
def decline_event(self, comment=None, *, send_response=True): """ Decline the event :param str comment: comment to add :param bool send_response: whether or not to send response back :return: Success / Failure :rtype: bool """ if not self.object_id: raise RuntimeError("Can't accept event that doesn't exist") url = self.build_url( self._endpoints.get('event').format(id=self.object_id)) url = url + '/decline' data = {} if comment and isinstance(comment, str): data[self._cc('comment')] = comment if send_response is False: data[self._cc('sendResponse')] = send_response response = self.con.post(url, data=data or None) return bool(response)
[ "def", "decline_event", "(", "self", ",", "comment", "=", "None", ",", "*", ",", "send_response", "=", "True", ")", ":", "if", "not", "self", ".", "object_id", ":", "raise", "RuntimeError", "(", "\"Can't accept event that doesn't exist\"", ")", "url", "=", "self", ".", "build_url", "(", "self", ".", "_endpoints", ".", "get", "(", "'event'", ")", ".", "format", "(", "id", "=", "self", ".", "object_id", ")", ")", "url", "=", "url", "+", "'/decline'", "data", "=", "{", "}", "if", "comment", "and", "isinstance", "(", "comment", ",", "str", ")", ":", "data", "[", "self", ".", "_cc", "(", "'comment'", ")", "]", "=", "comment", "if", "send_response", "is", "False", ":", "data", "[", "self", ".", "_cc", "(", "'sendResponse'", ")", "]", "=", "send_response", "response", "=", "self", ".", "con", ".", "post", "(", "url", ",", "data", "=", "data", "or", "None", ")", "return", "bool", "(", "response", ")" ]
32.666667
19.083333
def flatten(*args): '''Generator that recursively flattens embedded lists, tuples, etc.''' for arg in args: if isinstance(arg, collections.Iterable) and not isinstance(arg, (str, bytes)): yield from flatten(*arg) else: yield arg
[ "def", "flatten", "(", "*", "args", ")", ":", "for", "arg", "in", "args", ":", "if", "isinstance", "(", "arg", ",", "collections", ".", "Iterable", ")", "and", "not", "isinstance", "(", "arg", ",", "(", "str", ",", "bytes", ")", ")", ":", "yield", "from", "flatten", "(", "*", "arg", ")", "else", ":", "yield", "arg" ]
38.571429
24.571429
def to_julian_date(self): """ Convert Datetime Array to float64 ndarray of Julian Dates. 0 Julian date is noon January 1, 4713 BC. http://en.wikipedia.org/wiki/Julian_day """ # http://mysite.verizon.net/aesir_research/date/jdalg2.htm year = np.asarray(self.year) month = np.asarray(self.month) day = np.asarray(self.day) testarr = month < 3 year[testarr] -= 1 month[testarr] += 12 return (day + np.fix((153 * month - 457) / 5) + 365 * year + np.floor(year / 4) - np.floor(year / 100) + np.floor(year / 400) + 1721118.5 + (self.hour + self.minute / 60.0 + self.second / 3600.0 + self.microsecond / 3600.0 / 1e+6 + self.nanosecond / 3600.0 / 1e+9 ) / 24.0)
[ "def", "to_julian_date", "(", "self", ")", ":", "# http://mysite.verizon.net/aesir_research/date/jdalg2.htm", "year", "=", "np", ".", "asarray", "(", "self", ".", "year", ")", "month", "=", "np", ".", "asarray", "(", "self", ".", "month", ")", "day", "=", "np", ".", "asarray", "(", "self", ".", "day", ")", "testarr", "=", "month", "<", "3", "year", "[", "testarr", "]", "-=", "1", "month", "[", "testarr", "]", "+=", "12", "return", "(", "day", "+", "np", ".", "fix", "(", "(", "153", "*", "month", "-", "457", ")", "/", "5", ")", "+", "365", "*", "year", "+", "np", ".", "floor", "(", "year", "/", "4", ")", "-", "np", ".", "floor", "(", "year", "/", "100", ")", "+", "np", ".", "floor", "(", "year", "/", "400", ")", "+", "1721118.5", "+", "(", "self", ".", "hour", "+", "self", ".", "minute", "/", "60.0", "+", "self", ".", "second", "/", "3600.0", "+", "self", ".", "microsecond", "/", "3600.0", "/", "1e+6", "+", "self", ".", "nanosecond", "/", "3600.0", "/", "1e+9", ")", "/", "24.0", ")" ]
34.444444
10.518519
def queryset(self, request, queryset): """Filter based on whether an update (of any sort) is available.""" if self.value() == '-1': return queryset.filter(latest_version__isnull=True) elif self.value() == '0': return ( queryset .filter( current_version__isnull=False, latest_version__isnull=False, latest_version=F('current_version') ) ) elif self.value() == '1': return ( queryset .filter( current_version__isnull=False, latest_version__isnull=False ).exclude( latest_version=F('current_version') ) ) else: return queryset
[ "def", "queryset", "(", "self", ",", "request", ",", "queryset", ")", ":", "if", "self", ".", "value", "(", ")", "==", "'-1'", ":", "return", "queryset", ".", "filter", "(", "latest_version__isnull", "=", "True", ")", "elif", "self", ".", "value", "(", ")", "==", "'0'", ":", "return", "(", "queryset", ".", "filter", "(", "current_version__isnull", "=", "False", ",", "latest_version__isnull", "=", "False", ",", "latest_version", "=", "F", "(", "'current_version'", ")", ")", ")", "elif", "self", ".", "value", "(", ")", "==", "'1'", ":", "return", "(", "queryset", ".", "filter", "(", "current_version__isnull", "=", "False", ",", "latest_version__isnull", "=", "False", ")", ".", "exclude", "(", "latest_version", "=", "F", "(", "'current_version'", ")", ")", ")", "else", ":", "return", "queryset" ]
33.72
14.88
def webui_url(args): '''show the url of web ui''' nni_config = Config(get_config_filename(args)) print_normal('{0} {1}'.format('Web UI url:', ' '.join(nni_config.get_config('webuiUrl'))))
[ "def", "webui_url", "(", "args", ")", ":", "nni_config", "=", "Config", "(", "get_config_filename", "(", "args", ")", ")", "print_normal", "(", "'{0} {1}'", ".", "format", "(", "'Web UI url:'", ",", "' '", ".", "join", "(", "nni_config", ".", "get_config", "(", "'webuiUrl'", ")", ")", ")", ")" ]
49
23
def wrap(item, args=None, krgs=None, **kwargs): """Wraps the given item content between horizontal lines. Item can be a string or a function. **Examples**: :: qprompt.wrap("Hi, this will be wrapped.") # String item. qprompt.wrap(myfunc, [arg1, arg2], {'krgk': krgv}) # Func item. """ with Wrap(**kwargs): if callable(item): args = args or [] krgs = krgs or {} item(*args, **krgs) else: echo(item)
[ "def", "wrap", "(", "item", ",", "args", "=", "None", ",", "krgs", "=", "None", ",", "*", "*", "kwargs", ")", ":", "with", "Wrap", "(", "*", "*", "kwargs", ")", ":", "if", "callable", "(", "item", ")", ":", "args", "=", "args", "or", "[", "]", "krgs", "=", "krgs", "or", "{", "}", "item", "(", "*", "args", ",", "*", "*", "krgs", ")", "else", ":", "echo", "(", "item", ")" ]
30.5
17.625
def parse(cls, data: bytes) -> 'MessageContent': """Parse the bytestring into message content. Args: data: The bytestring to parse. """ lines = cls._find_lines(data) view = memoryview(data) return cls._parse(data, view, lines)
[ "def", "parse", "(", "cls", ",", "data", ":", "bytes", ")", "->", "'MessageContent'", ":", "lines", "=", "cls", ".", "_find_lines", "(", "data", ")", "view", "=", "memoryview", "(", "data", ")", "return", "cls", ".", "_parse", "(", "data", ",", "view", ",", "lines", ")" ]
27.9
13.3
def import_eit_fzj(self, filename, configfile, correction_file=None, timestep=None, **kwargs): """EIT data import for FZJ Medusa systems""" # we get not electrode positions (dummy1) and no topography data # (dummy2) df_emd, dummy1, dummy2 = eit_fzj.read_3p_data( filename, configfile, **kwargs ) if correction_file is not None: eit_fzj_utils.apply_correction_factors(df_emd, correction_file) if timestep is not None: df_emd['timestep'] = timestep self._add_to_container(df_emd) print('Summary:') self._describe_data(df_emd)
[ "def", "import_eit_fzj", "(", "self", ",", "filename", ",", "configfile", ",", "correction_file", "=", "None", ",", "timestep", "=", "None", ",", "*", "*", "kwargs", ")", ":", "# we get not electrode positions (dummy1) and no topography data", "# (dummy2)", "df_emd", ",", "dummy1", ",", "dummy2", "=", "eit_fzj", ".", "read_3p_data", "(", "filename", ",", "configfile", ",", "*", "*", "kwargs", ")", "if", "correction_file", "is", "not", "None", ":", "eit_fzj_utils", ".", "apply_correction_factors", "(", "df_emd", ",", "correction_file", ")", "if", "timestep", "is", "not", "None", ":", "df_emd", "[", "'timestep'", "]", "=", "timestep", "self", ".", "_add_to_container", "(", "df_emd", ")", "print", "(", "'Summary:'", ")", "self", ".", "_describe_data", "(", "df_emd", ")" ]
33.5
18.9
def load_stylesheet_pyqt5(): """ Loads the stylesheet for use in a pyqt5 application. :param pyside: True to load the pyside rc file, False to load the PyQt rc file :return the stylesheet string """ # Smart import of the rc file import qdarkstyle.pyqt5_style_rc # Load the stylesheet content from resources from PyQt5.QtCore import QFile, QTextStream f = QFile(":qdarkstyle/style.qss") if not f.exists(): _logger().error("Unable to load stylesheet, file not found in " "resources") return "" else: f.open(QFile.ReadOnly | QFile.Text) ts = QTextStream(f) stylesheet = ts.readAll() if platform.system().lower() == 'darwin': # see issue #12 on github mac_fix = ''' QDockWidget::title { background-color: #31363b; text-align: center; height: 12px; } ''' stylesheet += mac_fix return stylesheet
[ "def", "load_stylesheet_pyqt5", "(", ")", ":", "# Smart import of the rc file", "import", "qdarkstyle", ".", "pyqt5_style_rc", "# Load the stylesheet content from resources", "from", "PyQt5", ".", "QtCore", "import", "QFile", ",", "QTextStream", "f", "=", "QFile", "(", "\":qdarkstyle/style.qss\"", ")", "if", "not", "f", ".", "exists", "(", ")", ":", "_logger", "(", ")", ".", "error", "(", "\"Unable to load stylesheet, file not found in \"", "\"resources\"", ")", "return", "\"\"", "else", ":", "f", ".", "open", "(", "QFile", ".", "ReadOnly", "|", "QFile", ".", "Text", ")", "ts", "=", "QTextStream", "(", "f", ")", "stylesheet", "=", "ts", ".", "readAll", "(", ")", "if", "platform", ".", "system", "(", ")", ".", "lower", "(", ")", "==", "'darwin'", ":", "# see issue #12 on github", "mac_fix", "=", "'''\n QDockWidget::title\n {\n background-color: #31363b;\n text-align: center;\n height: 12px;\n }\n '''", "stylesheet", "+=", "mac_fix", "return", "stylesheet" ]
29.676471
16.911765
def union(rasters): """ Union of rasters Usage: union(rasters) where: rasters is a list of GeoRaster objects """ if sum([rasters[0].x_cell_size == i.x_cell_size for i in rasters]) == len(rasters) \ and sum([rasters[0].y_cell_size == i.y_cell_size for i in rasters]) == len(rasters)\ and sum([rasters[0].projection.ExportToProj4() == i.projection.ExportToProj4() for i in rasters]) == len(rasters): if sum([rasters[0].nodata_value == i.nodata_value for i in rasters]) == len(rasters): ndv = rasters[0].nodata_value else: ndv = np.nan if ndv == None: ndv = np.nan if sum([rasters[0].datatype == i.datatype for i in rasters]) == len(rasters): datatype = rasters[0].datatype else: datatype = None projection = rasters[0].projection lonmin = min([i.xmin for i in rasters]) lonmax = max([i.xmax for i in rasters]) latmin = min([i.ymin for i in rasters]) latmax = max([i.ymax for i in rasters]) shape = (np.abs(np.floor((latmax-latmin)/rasters[0].y_cell_size)).astype(int), np.floor((lonmax-lonmin)/rasters[0].x_cell_size).astype(int)) out = ndv*np.ones(shape) outmask = np.ones(shape).astype(bool) for i in rasters: (row, col) = map_pixel(i.xmin, i.ymax, rasters[0].x_cell_size, rasters[0].y_cell_size, lonmin, latmax) out[row:row+i.shape[0], col:col+i.shape[1]] = np.where(i.raster.data != i.nodata_value, i.raster.data,\ out[row:row+i.shape[0], col:col+i.shape[1]]) outmask[row:row+i.shape[0], col:col+i.shape[1]] = np.where(i.raster.mask == False, False,\ outmask[row:row+i.shape[0], col:col+i.shape[1]]) out = np.ma.masked_array(out, mask=outmask, fill_value=ndv) return GeoRaster(out, (lonmin, rasters[0].x_cell_size, 0.0, latmax, 0.0, rasters[0].y_cell_size), nodata_value=ndv, projection=projection, datatype=datatype) else: raise RasterGeoError('Rasters need to have same pixel sizes. Use the aggregate or dissolve functions to generate correct GeoRasters')
[ "def", "union", "(", "rasters", ")", ":", "if", "sum", "(", "[", "rasters", "[", "0", "]", ".", "x_cell_size", "==", "i", ".", "x_cell_size", "for", "i", "in", "rasters", "]", ")", "==", "len", "(", "rasters", ")", "and", "sum", "(", "[", "rasters", "[", "0", "]", ".", "y_cell_size", "==", "i", ".", "y_cell_size", "for", "i", "in", "rasters", "]", ")", "==", "len", "(", "rasters", ")", "and", "sum", "(", "[", "rasters", "[", "0", "]", ".", "projection", ".", "ExportToProj4", "(", ")", "==", "i", ".", "projection", ".", "ExportToProj4", "(", ")", "for", "i", "in", "rasters", "]", ")", "==", "len", "(", "rasters", ")", ":", "if", "sum", "(", "[", "rasters", "[", "0", "]", ".", "nodata_value", "==", "i", ".", "nodata_value", "for", "i", "in", "rasters", "]", ")", "==", "len", "(", "rasters", ")", ":", "ndv", "=", "rasters", "[", "0", "]", ".", "nodata_value", "else", ":", "ndv", "=", "np", ".", "nan", "if", "ndv", "==", "None", ":", "ndv", "=", "np", ".", "nan", "if", "sum", "(", "[", "rasters", "[", "0", "]", ".", "datatype", "==", "i", ".", "datatype", "for", "i", "in", "rasters", "]", ")", "==", "len", "(", "rasters", ")", ":", "datatype", "=", "rasters", "[", "0", "]", ".", "datatype", "else", ":", "datatype", "=", "None", "projection", "=", "rasters", "[", "0", "]", ".", "projection", "lonmin", "=", "min", "(", "[", "i", ".", "xmin", "for", "i", "in", "rasters", "]", ")", "lonmax", "=", "max", "(", "[", "i", ".", "xmax", "for", "i", "in", "rasters", "]", ")", "latmin", "=", "min", "(", "[", "i", ".", "ymin", "for", "i", "in", "rasters", "]", ")", "latmax", "=", "max", "(", "[", "i", ".", "ymax", "for", "i", "in", "rasters", "]", ")", "shape", "=", "(", "np", ".", "abs", "(", "np", ".", "floor", "(", "(", "latmax", "-", "latmin", ")", "/", "rasters", "[", "0", "]", ".", "y_cell_size", ")", ")", ".", "astype", "(", "int", ")", ",", "np", ".", "floor", "(", "(", "lonmax", "-", "lonmin", ")", "/", "rasters", "[", "0", "]", ".", "x_cell_size", ")", ".", "astype", "(", "int", ")", ")", "out", "=", "ndv", "*", "np", ".", "ones", "(", "shape", ")", "outmask", "=", "np", ".", "ones", "(", "shape", ")", ".", "astype", "(", "bool", ")", "for", "i", "in", "rasters", ":", "(", "row", ",", "col", ")", "=", "map_pixel", "(", "i", ".", "xmin", ",", "i", ".", "ymax", ",", "rasters", "[", "0", "]", ".", "x_cell_size", ",", "rasters", "[", "0", "]", ".", "y_cell_size", ",", "lonmin", ",", "latmax", ")", "out", "[", "row", ":", "row", "+", "i", ".", "shape", "[", "0", "]", ",", "col", ":", "col", "+", "i", ".", "shape", "[", "1", "]", "]", "=", "np", ".", "where", "(", "i", ".", "raster", ".", "data", "!=", "i", ".", "nodata_value", ",", "i", ".", "raster", ".", "data", ",", "out", "[", "row", ":", "row", "+", "i", ".", "shape", "[", "0", "]", ",", "col", ":", "col", "+", "i", ".", "shape", "[", "1", "]", "]", ")", "outmask", "[", "row", ":", "row", "+", "i", ".", "shape", "[", "0", "]", ",", "col", ":", "col", "+", "i", ".", "shape", "[", "1", "]", "]", "=", "np", ".", "where", "(", "i", ".", "raster", ".", "mask", "==", "False", ",", "False", ",", "outmask", "[", "row", ":", "row", "+", "i", ".", "shape", "[", "0", "]", ",", "col", ":", "col", "+", "i", ".", "shape", "[", "1", "]", "]", ")", "out", "=", "np", ".", "ma", ".", "masked_array", "(", "out", ",", "mask", "=", "outmask", ",", "fill_value", "=", "ndv", ")", "return", "GeoRaster", "(", "out", ",", "(", "lonmin", ",", "rasters", "[", "0", "]", ".", "x_cell_size", ",", "0.0", ",", "latmax", ",", "0.0", ",", "rasters", "[", "0", "]", ".", "y_cell_size", ")", ",", "nodata_value", "=", "ndv", ",", "projection", "=", "projection", ",", "datatype", "=", "datatype", ")", "else", ":", "raise", "RasterGeoError", "(", "'Rasters need to have same pixel sizes. Use the aggregate or dissolve functions to generate correct GeoRasters'", ")" ]
55.625
33.725
def utc2et(utcstr): """ Convert an input time from Calendar or Julian Date format, UTC, to ephemeris seconds past J2000. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/utc2et_c.html :param utcstr: Input time string, UTC. :type utcstr: str :return: Output epoch, ephemeris seconds past J2000. :rtype: float """ utcstr = stypes.stringToCharP(utcstr) et = ctypes.c_double() libspice.utc2et_c(utcstr, ctypes.byref(et)) return et.value
[ "def", "utc2et", "(", "utcstr", ")", ":", "utcstr", "=", "stypes", ".", "stringToCharP", "(", "utcstr", ")", "et", "=", "ctypes", ".", "c_double", "(", ")", "libspice", ".", "utc2et_c", "(", "utcstr", ",", "ctypes", ".", "byref", "(", "et", ")", ")", "return", "et", ".", "value" ]
29.9375
16.8125
def as_bulk_queries(queries, bulk_size): """Group a iterable of (stmt, args) by stmt into (stmt, bulk_args). bulk_args will be a list of the args grouped by stmt. len(bulk_args) will be <= bulk_size """ stmt_dict = defaultdict(list) for stmt, args in queries: bulk_args = stmt_dict[stmt] bulk_args.append(args) if len(bulk_args) == bulk_size: yield stmt, bulk_args del stmt_dict[stmt] for stmt, bulk_args in stmt_dict.items(): yield stmt, bulk_args
[ "def", "as_bulk_queries", "(", "queries", ",", "bulk_size", ")", ":", "stmt_dict", "=", "defaultdict", "(", "list", ")", "for", "stmt", ",", "args", "in", "queries", ":", "bulk_args", "=", "stmt_dict", "[", "stmt", "]", "bulk_args", ".", "append", "(", "args", ")", "if", "len", "(", "bulk_args", ")", "==", "bulk_size", ":", "yield", "stmt", ",", "bulk_args", "del", "stmt_dict", "[", "stmt", "]", "for", "stmt", ",", "bulk_args", "in", "stmt_dict", ".", "items", "(", ")", ":", "yield", "stmt", ",", "bulk_args" ]
32.4375
10.1875
def has_preview_permission(self, request, obj=None): """ Return `True` if the user has permissions to preview a publishable item. NOTE: this method does not actually change who can or cannot preview any particular item, just whether to show the preview link. The real dcision is made by a combination of: - `PublishingMiddleware` which chooses who can view draft content - the view code for a particular item, which may or may not render draft content for a specific user. :param request: Django request object. :param obj: The object the user would preview, if permitted. :return: Boolean. """ # User who can publish always has preview permission. if self.has_publish_permission(request, obj=obj): return True user_obj = request.user if not user_obj.is_active: return False if user_obj.is_staff: return True return False
[ "def", "has_preview_permission", "(", "self", ",", "request", ",", "obj", "=", "None", ")", ":", "# User who can publish always has preview permission.", "if", "self", ".", "has_publish_permission", "(", "request", ",", "obj", "=", "obj", ")", ":", "return", "True", "user_obj", "=", "request", ".", "user", "if", "not", "user_obj", ".", "is_active", ":", "return", "False", "if", "user_obj", ".", "is_staff", ":", "return", "True", "return", "False" ]
38.038462
20.115385
def get_actions(actions): """Get actions.""" new_actions = [] if actions: for action in actions: action_obj = get_action(action) if action_obj: new_actions.append(action_obj) return new_actions
[ "def", "get_actions", "(", "actions", ")", ":", "new_actions", "=", "[", "]", "if", "actions", ":", "for", "action", "in", "actions", ":", "action_obj", "=", "get_action", "(", "action", ")", "if", "action_obj", ":", "new_actions", ".", "append", "(", "action_obj", ")", "return", "new_actions" ]
24.9
15.1
def Network_getCertificate(self, origin): """ Function path: Network.getCertificate Domain: Network Method name: getCertificate WARNING: This function is marked 'Experimental'! Parameters: Required arguments: 'origin' (type: string) -> Origin to get certificate for. Returns: 'tableNames' (type: array) -> No description Description: Returns the DER-encoded certificate. """ assert isinstance(origin, (str,) ), "Argument 'origin' must be of type '['str']'. Received type: '%s'" % type( origin) subdom_funcs = self.synchronous_command('Network.getCertificate', origin= origin) return subdom_funcs
[ "def", "Network_getCertificate", "(", "self", ",", "origin", ")", ":", "assert", "isinstance", "(", "origin", ",", "(", "str", ",", ")", ")", ",", "\"Argument 'origin' must be of type '['str']'. Received type: '%s'\"", "%", "type", "(", "origin", ")", "subdom_funcs", "=", "self", ".", "synchronous_command", "(", "'Network.getCertificate'", ",", "origin", "=", "origin", ")", "return", "subdom_funcs" ]
29.272727
19.545455
def __set_date(self, value): ''' Sets the invoice date. @param value:datetime ''' value = date_to_datetime(value) if value > datetime.now() + timedelta(hours=14, minutes=1): #More or less 14 hours from now in case the submitted date was local raise ValueError("Date cannot be in the future.") if self.__due_date and value.date() > self.__due_date: raise ValueError("Date cannot be posterior to the due date.") self.__date = value
[ "def", "__set_date", "(", "self", ",", "value", ")", ":", "value", "=", "date_to_datetime", "(", "value", ")", "if", "value", ">", "datetime", ".", "now", "(", ")", "+", "timedelta", "(", "hours", "=", "14", ",", "minutes", "=", "1", ")", ":", "#More or less 14 hours from now in case the submitted date was local", "raise", "ValueError", "(", "\"Date cannot be in the future.\"", ")", "if", "self", ".", "__due_date", "and", "value", ".", "date", "(", ")", ">", "self", ".", "__due_date", ":", "raise", "ValueError", "(", "\"Date cannot be posterior to the due date.\"", ")", "self", ".", "__date", "=", "value" ]
39
27.461538
def set_servo(self, gpio, pulse_width_us): """ Sets a pulse-width on a gpio to repeat every subcycle (by default every 20ms). """ # Make sure we can set the exact pulse_width_us _pulse_incr_us = _PWM.get_pulse_incr_us() if pulse_width_us % _pulse_incr_us: # No clean division possible raise AttributeError(("Pulse width increment granularity %sus " "cannot divide a pulse-time of %sus") % (_pulse_incr_us, pulse_width_us)) # Initialize channel if not already done, else check subcycle time if _PWM.is_channel_initialized(self._dma_channel): _subcycle_us = _PWM.get_channel_subcycle_time_us(self._dma_channel) if _subcycle_us != self._subcycle_time_us: raise AttributeError(("Error: DMA channel %s is setup with a " "subcycle_time of %sus (instead of %sus)") % \ (self._dma_channel, _subcycle_us, self._subcycle_time_us)) else: init_channel(self._dma_channel, self._subcycle_time_us) # Add pulse for this GPIO add_channel_pulse(self._dma_channel, gpio, 0, \ int(pulse_width_us / _pulse_incr_us))
[ "def", "set_servo", "(", "self", ",", "gpio", ",", "pulse_width_us", ")", ":", "# Make sure we can set the exact pulse_width_us", "_pulse_incr_us", "=", "_PWM", ".", "get_pulse_incr_us", "(", ")", "if", "pulse_width_us", "%", "_pulse_incr_us", ":", "# No clean division possible", "raise", "AttributeError", "(", "(", "\"Pulse width increment granularity %sus \"", "\"cannot divide a pulse-time of %sus\"", ")", "%", "(", "_pulse_incr_us", ",", "pulse_width_us", ")", ")", "# Initialize channel if not already done, else check subcycle time", "if", "_PWM", ".", "is_channel_initialized", "(", "self", ".", "_dma_channel", ")", ":", "_subcycle_us", "=", "_PWM", ".", "get_channel_subcycle_time_us", "(", "self", ".", "_dma_channel", ")", "if", "_subcycle_us", "!=", "self", ".", "_subcycle_time_us", ":", "raise", "AttributeError", "(", "(", "\"Error: DMA channel %s is setup with a \"", "\"subcycle_time of %sus (instead of %sus)\"", ")", "%", "(", "self", ".", "_dma_channel", ",", "_subcycle_us", ",", "self", ".", "_subcycle_time_us", ")", ")", "else", ":", "init_channel", "(", "self", ".", "_dma_channel", ",", "self", ".", "_subcycle_time_us", ")", "# Add pulse for this GPIO", "add_channel_pulse", "(", "self", ".", "_dma_channel", ",", "gpio", ",", "0", ",", "int", "(", "pulse_width_us", "/", "_pulse_incr_us", ")", ")" ]
47.222222
18.703704
def _box_col_values(self, values, items): """ Provide boxed values for a column. """ klass = self._constructor_sliced return klass(values, index=self.index, name=items, fastpath=True)
[ "def", "_box_col_values", "(", "self", ",", "values", ",", "items", ")", ":", "klass", "=", "self", ".", "_constructor_sliced", "return", "klass", "(", "values", ",", "index", "=", "self", ".", "index", ",", "name", "=", "items", ",", "fastpath", "=", "True", ")" ]
36.333333
6
def libvlc_media_list_player_set_media_list(p_mlp, p_mlist): '''Set the media list associated with the player. @param p_mlp: media list player instance. @param p_mlist: list of media. ''' f = _Cfunctions.get('libvlc_media_list_player_set_media_list', None) or \ _Cfunction('libvlc_media_list_player_set_media_list', ((1,), (1,),), None, None, MediaListPlayer, MediaList) return f(p_mlp, p_mlist)
[ "def", "libvlc_media_list_player_set_media_list", "(", "p_mlp", ",", "p_mlist", ")", ":", "f", "=", "_Cfunctions", ".", "get", "(", "'libvlc_media_list_player_set_media_list'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_list_player_set_media_list'", ",", "(", "(", "1", ",", ")", ",", "(", "1", ",", ")", ",", ")", ",", "None", ",", "None", ",", "MediaListPlayer", ",", "MediaList", ")", "return", "f", "(", "p_mlp", ",", "p_mlist", ")" ]
48.777778
20.111111
def search(**criteria): """ Search registered *component* classes matching the given criteria. :param criteria: search criteria of the form: ``a='1', b='x'`` :return: parts registered with the given criteria :rtype: :class:`set` Will return an empty :class:`set` if nothing is found. :: from cqparts.search import search import cqparts_motors # example of a 3rd party lib # Get all DC motor classes dc_motors = search(type='motor', current_class='dc') # For more complex queries: air_cooled = search(cooling='air') non_aircooled_dcmotors = dc_motors - air_cooled # will be all DC motors that aren't air-cooled """ # Find all parts that match the given criteria results = copy(class_list) # start with full list for (category, value) in criteria.items(): results &= index[category][value] return results
[ "def", "search", "(", "*", "*", "criteria", ")", ":", "# Find all parts that match the given criteria", "results", "=", "copy", "(", "class_list", ")", "# start with full list", "for", "(", "category", ",", "value", ")", "in", "criteria", ".", "items", "(", ")", ":", "results", "&=", "index", "[", "category", "]", "[", "value", "]", "return", "results" ]
31.137931
19.62069
def iter_prefix(reader, key): """ Creates an iterator which iterates over lines that start with prefix 'key' in a sorted text file. """ return itertools.takewhile( lambda line: line.startswith(key), search(reader, key))
[ "def", "iter_prefix", "(", "reader", ",", "key", ")", ":", "return", "itertools", ".", "takewhile", "(", "lambda", "line", ":", "line", ".", "startswith", "(", "key", ")", ",", "search", "(", "reader", ",", "key", ")", ")" ]
27.555556
12.666667
def _evaluate(self,*args,**kwargs): """ NAME: __call__ (_evaluate) PURPOSE: evaluate the actions (jr,lz,jz) INPUT: Either: a) R,vR,vT,z,vz[,phi]: 1) floats: phase-space value for single object (phi is optional) (each can be a Quantity) 2) numpy.ndarray: [N] phase-space values for N objects (each can be a Quantity) b) Orbit instance: initial condition used if that's it, orbit(t) if there is a time given as well as the second argument scipy.integrate.quadrature keywords (used when directly evaluating a point off the grid) OUTPUT: (jr,lz,jz) HISTORY: 2012-07-27 - Written - Bovy (IAS@MPIA) NOTE: For a Miyamoto-Nagai potential, this seems accurate to 0.1% and takes ~0.13 ms For a MWPotential, this takes ~ 0.17 ms """ if len(args) == 5: #R,vR.vT, z, vz R,vR,vT, z, vz= args elif len(args) == 6: #R,vR.vT, z, vz, phi R,vR,vT, z, vz, phi= args else: self._parse_eval_args(*args) R= self._eval_R vR= self._eval_vR vT= self._eval_vT z= self._eval_z vz= self._eval_vz #First work on the vertical action Phi= _evaluatePotentials(self._pot,R,z) try: Phio= _evaluatePotentials(self._pot,R,numpy.zeros(len(R))) except TypeError: Phio= _evaluatePotentials(self._pot,R,0.) Ez= Phi-Phio+vz**2./2. #Bigger than Ezzmax? thisEzZmax= numpy.exp(self._EzZmaxsInterp(R)) if isinstance(R,numpy.ndarray): indx= (R > self._Rmax) indx+= (R < self._Rmin) indx+= (Ez != 0.)*(numpy.log(Ez) > thisEzZmax) indxc= True^indx jz= numpy.empty(R.shape) if numpy.sum(indxc) > 0: jz[indxc]= (self._jzInterp.ev(R[indxc],Ez[indxc]/thisEzZmax[indxc])\ *(numpy.exp(self._jzEzmaxInterp(R[indxc]))-10.**-5.)) if numpy.sum(indx) > 0: jz[indx]= self._aA(R[indx], numpy.zeros(numpy.sum(indx)), numpy.ones(numpy.sum(indx)),#these two r dummies numpy.zeros(numpy.sum(indx)), numpy.sqrt(2.*Ez[indx]), _justjz=True, **kwargs)[2] else: if R > self._Rmax or R < self._Rmin or (Ez != 0 and numpy.log(Ez) > thisEzZmax): #Outside of the grid if _PRINTOUTSIDEGRID: #pragma: no cover print("Outside of grid in Ez", R > self._Rmax , R < self._Rmin , (Ez != 0 and numpy.log(Ez) > thisEzZmax)) jz= self._aA(R,0.,1.,#these two r dummies 0.,math.sqrt(2.*Ez), _justjz=True, **kwargs)[2] else: jz= (self._jzInterp(R,Ez/thisEzZmax)\ *(numpy.exp(self._jzEzmaxInterp(R))-10.**-5.))[0][0] #Radial action ERLz= numpy.fabs(R*vT)+self._gamma*jz ER= Phio+vR**2./2.+ERLz**2./2./R**2. thisRL= self._RLInterp(ERLz) thisERRL= -numpy.exp(self._ERRLInterp(ERLz))+self._ERRLmax thisERRa= -numpy.exp(self._ERRaInterp(ERLz))+self._ERRamax if isinstance(R,numpy.ndarray): indx= ((ER-thisERRa)/(thisERRL-thisERRa) > 1.)\ *(((ER-thisERRa)/(thisERRL-thisERRa)-1.) < 10.**-2.) ER[indx]= thisERRL[indx] indx= ((ER-thisERRa)/(thisERRL-thisERRa) < 0.)\ *((ER-thisERRa)/(thisERRL-thisERRa) > -10.**-2.) ER[indx]= thisERRa[indx] indx= (ERLz < self._Lzmin) indx+= (ERLz > self._Lzmax) indx+= ((ER-thisERRa)/(thisERRL-thisERRa) > 1.) indx+= ((ER-thisERRa)/(thisERRL-thisERRa) < 0.) indxc= True^indx jr= numpy.empty(R.shape) if numpy.sum(indxc) > 0: jr[indxc]= (self._jrInterp.ev(ERLz[indxc], (ER[indxc]-thisERRa[indxc])/(thisERRL[indxc]-thisERRa[indxc]))\ *(numpy.exp(self._jrERRaInterp(ERLz[indxc]))-10.**-5.)) if numpy.sum(indx) > 0: jr[indx]= self._aA(thisRL[indx], numpy.sqrt(2.*(ER[indx]-_evaluatePotentials(self._pot,thisRL[indx],0.))-ERLz[indx]**2./thisRL[indx]**2.), ERLz[indx]/thisRL[indx], numpy.zeros(len(thisRL)), numpy.zeros(len(thisRL)), _justjr=True, **kwargs)[0] else: if (ER-thisERRa)/(thisERRL-thisERRa) > 1. \ and ((ER-thisERRa)/(thisERRL-thisERRa)-1.) < 10.**-2.: ER= thisERRL elif (ER-thisERRa)/(thisERRL-thisERRa) < 0. \ and (ER-thisERRa)/(thisERRL-thisERRa) > -10.**-2.: ER= thisERRa #Outside of grid? if ERLz < self._Lzmin or ERLz > self._Lzmax \ or (ER-thisERRa)/(thisERRL-thisERRa) > 1. \ or (ER-thisERRa)/(thisERRL-thisERRa) < 0.: if _PRINTOUTSIDEGRID: #pragma: no cover print("Outside of grid in ER/Lz", ERLz < self._Lzmin , ERLz > self._Lzmax \ , (ER-thisERRa)/(thisERRL-thisERRa) > 1. \ , (ER-thisERRa)/(thisERRL-thisERRa) < 0., ER, thisERRL, thisERRa, (ER-thisERRa)/(thisERRL-thisERRa)) jr= self._aA(thisRL[0], numpy.sqrt(2.*(ER-_evaluatePotentials(self._pot,thisRL,0.))-ERLz**2./thisRL**2.)[0], (ERLz/thisRL)[0], 0.,0., _justjr=True, **kwargs)[0] else: jr= (self._jrInterp(ERLz, (ER-thisERRa)/(thisERRL-thisERRa))\ *(numpy.exp(self._jrERRaInterp(ERLz))-10.**-5.))[0][0] return (jr,R*vT,jz)
[ "def", "_evaluate", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "len", "(", "args", ")", "==", "5", ":", "#R,vR.vT, z, vz", "R", ",", "vR", ",", "vT", ",", "z", ",", "vz", "=", "args", "elif", "len", "(", "args", ")", "==", "6", ":", "#R,vR.vT, z, vz, phi", "R", ",", "vR", ",", "vT", ",", "z", ",", "vz", ",", "phi", "=", "args", "else", ":", "self", ".", "_parse_eval_args", "(", "*", "args", ")", "R", "=", "self", ".", "_eval_R", "vR", "=", "self", ".", "_eval_vR", "vT", "=", "self", ".", "_eval_vT", "z", "=", "self", ".", "_eval_z", "vz", "=", "self", ".", "_eval_vz", "#First work on the vertical action", "Phi", "=", "_evaluatePotentials", "(", "self", ".", "_pot", ",", "R", ",", "z", ")", "try", ":", "Phio", "=", "_evaluatePotentials", "(", "self", ".", "_pot", ",", "R", ",", "numpy", ".", "zeros", "(", "len", "(", "R", ")", ")", ")", "except", "TypeError", ":", "Phio", "=", "_evaluatePotentials", "(", "self", ".", "_pot", ",", "R", ",", "0.", ")", "Ez", "=", "Phi", "-", "Phio", "+", "vz", "**", "2.", "/", "2.", "#Bigger than Ezzmax?", "thisEzZmax", "=", "numpy", ".", "exp", "(", "self", ".", "_EzZmaxsInterp", "(", "R", ")", ")", "if", "isinstance", "(", "R", ",", "numpy", ".", "ndarray", ")", ":", "indx", "=", "(", "R", ">", "self", ".", "_Rmax", ")", "indx", "+=", "(", "R", "<", "self", ".", "_Rmin", ")", "indx", "+=", "(", "Ez", "!=", "0.", ")", "*", "(", "numpy", ".", "log", "(", "Ez", ")", ">", "thisEzZmax", ")", "indxc", "=", "True", "^", "indx", "jz", "=", "numpy", ".", "empty", "(", "R", ".", "shape", ")", "if", "numpy", ".", "sum", "(", "indxc", ")", ">", "0", ":", "jz", "[", "indxc", "]", "=", "(", "self", ".", "_jzInterp", ".", "ev", "(", "R", "[", "indxc", "]", ",", "Ez", "[", "indxc", "]", "/", "thisEzZmax", "[", "indxc", "]", ")", "*", "(", "numpy", ".", "exp", "(", "self", ".", "_jzEzmaxInterp", "(", "R", "[", "indxc", "]", ")", ")", "-", "10.", "**", "-", "5.", ")", ")", "if", "numpy", ".", "sum", "(", "indx", ")", ">", "0", ":", "jz", "[", "indx", "]", "=", "self", ".", "_aA", "(", "R", "[", "indx", "]", ",", "numpy", ".", "zeros", "(", "numpy", ".", "sum", "(", "indx", ")", ")", ",", "numpy", ".", "ones", "(", "numpy", ".", "sum", "(", "indx", ")", ")", ",", "#these two r dummies", "numpy", ".", "zeros", "(", "numpy", ".", "sum", "(", "indx", ")", ")", ",", "numpy", ".", "sqrt", "(", "2.", "*", "Ez", "[", "indx", "]", ")", ",", "_justjz", "=", "True", ",", "*", "*", "kwargs", ")", "[", "2", "]", "else", ":", "if", "R", ">", "self", ".", "_Rmax", "or", "R", "<", "self", ".", "_Rmin", "or", "(", "Ez", "!=", "0", "and", "numpy", ".", "log", "(", "Ez", ")", ">", "thisEzZmax", ")", ":", "#Outside of the grid", "if", "_PRINTOUTSIDEGRID", ":", "#pragma: no cover", "print", "(", "\"Outside of grid in Ez\"", ",", "R", ">", "self", ".", "_Rmax", ",", "R", "<", "self", ".", "_Rmin", ",", "(", "Ez", "!=", "0", "and", "numpy", ".", "log", "(", "Ez", ")", ">", "thisEzZmax", ")", ")", "jz", "=", "self", ".", "_aA", "(", "R", ",", "0.", ",", "1.", ",", "#these two r dummies", "0.", ",", "math", ".", "sqrt", "(", "2.", "*", "Ez", ")", ",", "_justjz", "=", "True", ",", "*", "*", "kwargs", ")", "[", "2", "]", "else", ":", "jz", "=", "(", "self", ".", "_jzInterp", "(", "R", ",", "Ez", "/", "thisEzZmax", ")", "*", "(", "numpy", ".", "exp", "(", "self", ".", "_jzEzmaxInterp", "(", "R", ")", ")", "-", "10.", "**", "-", "5.", ")", ")", "[", "0", "]", "[", "0", "]", "#Radial action", "ERLz", "=", "numpy", ".", "fabs", "(", "R", "*", "vT", ")", "+", "self", ".", "_gamma", "*", "jz", "ER", "=", "Phio", "+", "vR", "**", "2.", "/", "2.", "+", "ERLz", "**", "2.", "/", "2.", "/", "R", "**", "2.", "thisRL", "=", "self", ".", "_RLInterp", "(", "ERLz", ")", "thisERRL", "=", "-", "numpy", ".", "exp", "(", "self", ".", "_ERRLInterp", "(", "ERLz", ")", ")", "+", "self", ".", "_ERRLmax", "thisERRa", "=", "-", "numpy", ".", "exp", "(", "self", ".", "_ERRaInterp", "(", "ERLz", ")", ")", "+", "self", ".", "_ERRamax", "if", "isinstance", "(", "R", ",", "numpy", ".", "ndarray", ")", ":", "indx", "=", "(", "(", "ER", "-", "thisERRa", ")", "/", "(", "thisERRL", "-", "thisERRa", ")", ">", "1.", ")", "*", "(", "(", "(", "ER", "-", "thisERRa", ")", "/", "(", "thisERRL", "-", "thisERRa", ")", "-", "1.", ")", "<", "10.", "**", "-", "2.", ")", "ER", "[", "indx", "]", "=", "thisERRL", "[", "indx", "]", "indx", "=", "(", "(", "ER", "-", "thisERRa", ")", "/", "(", "thisERRL", "-", "thisERRa", ")", "<", "0.", ")", "*", "(", "(", "ER", "-", "thisERRa", ")", "/", "(", "thisERRL", "-", "thisERRa", ")", ">", "-", "10.", "**", "-", "2.", ")", "ER", "[", "indx", "]", "=", "thisERRa", "[", "indx", "]", "indx", "=", "(", "ERLz", "<", "self", ".", "_Lzmin", ")", "indx", "+=", "(", "ERLz", ">", "self", ".", "_Lzmax", ")", "indx", "+=", "(", "(", "ER", "-", "thisERRa", ")", "/", "(", "thisERRL", "-", "thisERRa", ")", ">", "1.", ")", "indx", "+=", "(", "(", "ER", "-", "thisERRa", ")", "/", "(", "thisERRL", "-", "thisERRa", ")", "<", "0.", ")", "indxc", "=", "True", "^", "indx", "jr", "=", "numpy", ".", "empty", "(", "R", ".", "shape", ")", "if", "numpy", ".", "sum", "(", "indxc", ")", ">", "0", ":", "jr", "[", "indxc", "]", "=", "(", "self", ".", "_jrInterp", ".", "ev", "(", "ERLz", "[", "indxc", "]", ",", "(", "ER", "[", "indxc", "]", "-", "thisERRa", "[", "indxc", "]", ")", "/", "(", "thisERRL", "[", "indxc", "]", "-", "thisERRa", "[", "indxc", "]", ")", ")", "*", "(", "numpy", ".", "exp", "(", "self", ".", "_jrERRaInterp", "(", "ERLz", "[", "indxc", "]", ")", ")", "-", "10.", "**", "-", "5.", ")", ")", "if", "numpy", ".", "sum", "(", "indx", ")", ">", "0", ":", "jr", "[", "indx", "]", "=", "self", ".", "_aA", "(", "thisRL", "[", "indx", "]", ",", "numpy", ".", "sqrt", "(", "2.", "*", "(", "ER", "[", "indx", "]", "-", "_evaluatePotentials", "(", "self", ".", "_pot", ",", "thisRL", "[", "indx", "]", ",", "0.", ")", ")", "-", "ERLz", "[", "indx", "]", "**", "2.", "/", "thisRL", "[", "indx", "]", "**", "2.", ")", ",", "ERLz", "[", "indx", "]", "/", "thisRL", "[", "indx", "]", ",", "numpy", ".", "zeros", "(", "len", "(", "thisRL", ")", ")", ",", "numpy", ".", "zeros", "(", "len", "(", "thisRL", ")", ")", ",", "_justjr", "=", "True", ",", "*", "*", "kwargs", ")", "[", "0", "]", "else", ":", "if", "(", "ER", "-", "thisERRa", ")", "/", "(", "thisERRL", "-", "thisERRa", ")", ">", "1.", "and", "(", "(", "ER", "-", "thisERRa", ")", "/", "(", "thisERRL", "-", "thisERRa", ")", "-", "1.", ")", "<", "10.", "**", "-", "2.", ":", "ER", "=", "thisERRL", "elif", "(", "ER", "-", "thisERRa", ")", "/", "(", "thisERRL", "-", "thisERRa", ")", "<", "0.", "and", "(", "ER", "-", "thisERRa", ")", "/", "(", "thisERRL", "-", "thisERRa", ")", ">", "-", "10.", "**", "-", "2.", ":", "ER", "=", "thisERRa", "#Outside of grid?", "if", "ERLz", "<", "self", ".", "_Lzmin", "or", "ERLz", ">", "self", ".", "_Lzmax", "or", "(", "ER", "-", "thisERRa", ")", "/", "(", "thisERRL", "-", "thisERRa", ")", ">", "1.", "or", "(", "ER", "-", "thisERRa", ")", "/", "(", "thisERRL", "-", "thisERRa", ")", "<", "0.", ":", "if", "_PRINTOUTSIDEGRID", ":", "#pragma: no cover", "print", "(", "\"Outside of grid in ER/Lz\"", ",", "ERLz", "<", "self", ".", "_Lzmin", ",", "ERLz", ">", "self", ".", "_Lzmax", ",", "(", "ER", "-", "thisERRa", ")", "/", "(", "thisERRL", "-", "thisERRa", ")", ">", "1.", ",", "(", "ER", "-", "thisERRa", ")", "/", "(", "thisERRL", "-", "thisERRa", ")", "<", "0.", ",", "ER", ",", "thisERRL", ",", "thisERRa", ",", "(", "ER", "-", "thisERRa", ")", "/", "(", "thisERRL", "-", "thisERRa", ")", ")", "jr", "=", "self", ".", "_aA", "(", "thisRL", "[", "0", "]", ",", "numpy", ".", "sqrt", "(", "2.", "*", "(", "ER", "-", "_evaluatePotentials", "(", "self", ".", "_pot", ",", "thisRL", ",", "0.", ")", ")", "-", "ERLz", "**", "2.", "/", "thisRL", "**", "2.", ")", "[", "0", "]", ",", "(", "ERLz", "/", "thisRL", ")", "[", "0", "]", ",", "0.", ",", "0.", ",", "_justjr", "=", "True", ",", "*", "*", "kwargs", ")", "[", "0", "]", "else", ":", "jr", "=", "(", "self", ".", "_jrInterp", "(", "ERLz", ",", "(", "ER", "-", "thisERRa", ")", "/", "(", "thisERRL", "-", "thisERRa", ")", ")", "*", "(", "numpy", ".", "exp", "(", "self", ".", "_jrERRaInterp", "(", "ERLz", ")", ")", "-", "10.", "**", "-", "5.", ")", ")", "[", "0", "]", "[", "0", "]", "return", "(", "jr", ",", "R", "*", "vT", ",", "jz", ")" ]
49.833333
20.198413
def has_logged_in(self): """Check whether the API has logged in""" r = self.http.get(CHECKPOINT_URL) if r.state is False: return True # If logged out, flush cache self._reset_cache() return False
[ "def", "has_logged_in", "(", "self", ")", ":", "r", "=", "self", ".", "http", ".", "get", "(", "CHECKPOINT_URL", ")", "if", "r", ".", "state", "is", "False", ":", "return", "True", "# If logged out, flush cache", "self", ".", "_reset_cache", "(", ")", "return", "False" ]
31
10.375
def rename_session(self, new_name): """ Rename session and return new :class:`Session` object. Parameters ---------- new_name : str new session name Returns ------- :class:`Session` Raises ------ :exc:`exc.BadSessionName` """ session_check_name(new_name) proc = self.cmd('rename-session', new_name) if proc.stderr: if has_version('2.7') and 'no current client' in proc.stderr: """tmux 2.7 raises "no current client" warning on BSD systems. Should be fixed next release: - https://www.mail-archive.com/tech@openbsd.org/msg45186.html - https://marc.info/?l=openbsd-cvs&m=152183263526828&w=2 """ pass else: raise exc.LibTmuxException(proc.stderr) return self
[ "def", "rename_session", "(", "self", ",", "new_name", ")", ":", "session_check_name", "(", "new_name", ")", "proc", "=", "self", ".", "cmd", "(", "'rename-session'", ",", "new_name", ")", "if", "proc", ".", "stderr", ":", "if", "has_version", "(", "'2.7'", ")", "and", "'no current client'", "in", "proc", ".", "stderr", ":", "\"\"\"tmux 2.7 raises \"no current client\" warning on BSD systems.\n\n Should be fixed next release:\n\n - https://www.mail-archive.com/tech@openbsd.org/msg45186.html\n - https://marc.info/?l=openbsd-cvs&m=152183263526828&w=2\n \"\"\"", "pass", "else", ":", "raise", "exc", ".", "LibTmuxException", "(", "proc", ".", "stderr", ")", "return", "self" ]
25.857143
21.828571
def translate_line_footnotes(line, tag=None, default_title='<NOT_FOUND>'): r""" Find all bare-url footnotes, like "footnote:[moz.org]" and add a title like "footnote:[Moz (moz.org)]" >>> translate_line_footnotes('*Morphemes*:: Parts of tokens or words that contain meaning in and of themselves.'\ ... 'footnote:[https://spacy.io/usage/linguistic-features#rule-based-morphology]') '*Morphemes*:: Parts of tokens or words that contain meaning in and of themselves.footnote:[See the web page titled "Linguistic Features : spaCy Usage Documentation" (https://spacy.io/usage/linguistic-features#rule-based-morphology).]' """ line_urls = get_line_bad_footnotes(line, tag=tag) urls = line_urls[1:] if line_urls else [] for url in urls: footnote = 'footnote:[{url}]'.format(url=url) new_footnote = footnote # TODO: use these to extract name from hyperlinks title = get_url_title(url) title = title or infer_url_title(url) title = (title or '').strip(' \t\n\r\f-_:|="\'/\\') title = title if ' ' in (title or 'X') else None if title: brief_title = title.split('\n')[0].strip().split('|')[0].strip().split('Â')[0].strip().split('·')[0].strip() logging.info('URL: {}'.format(url)) logging.info('TITLE: {}'.format(title)) title = brief_title if len(brief_title) > 3 and len(title) > 55 else title title = title.replace('Â', '').replace('·', ':').replace('|', ':').replace('\n', '--') logging.info('FINAL: {}'.format(title)) title = title or default_title if title: new_footnote = 'footnote:[See the web page titled "{title}" ({url}).]'.format(title=(title or default_title), url=url) elif title is None: logging.error('Unable to find a title for url: {}'.format(url)) else: new_footnote = 'footnote:[See the web page ({url}).]'.format(url=url) line = line.replace( footnote, new_footnote) return line
[ "def", "translate_line_footnotes", "(", "line", ",", "tag", "=", "None", ",", "default_title", "=", "'<NOT_FOUND>'", ")", ":", "line_urls", "=", "get_line_bad_footnotes", "(", "line", ",", "tag", "=", "tag", ")", "urls", "=", "line_urls", "[", "1", ":", "]", "if", "line_urls", "else", "[", "]", "for", "url", "in", "urls", ":", "footnote", "=", "'footnote:[{url}]'", ".", "format", "(", "url", "=", "url", ")", "new_footnote", "=", "footnote", "# TODO: use these to extract name from hyperlinks", "title", "=", "get_url_title", "(", "url", ")", "title", "=", "title", "or", "infer_url_title", "(", "url", ")", "title", "=", "(", "title", "or", "''", ")", ".", "strip", "(", "' \\t\\n\\r\\f-_:|=\"\\'/\\\\'", ")", "title", "=", "title", "if", "' '", "in", "(", "title", "or", "'X'", ")", "else", "None", "if", "title", ":", "brief_title", "=", "title", ".", "split", "(", "'\\n'", ")", "[", "0", "]", ".", "strip", "(", ")", ".", "split", "(", "'|'", ")", "[", "0", "]", ".", "strip", "(", ")", ".", "split", "(", "'Â')", "[", "0", "]", ".", "s", "trip(", ")", ".", "s", "plit(", "'", "·')[", "0", "]", ".", "s", "t", "rip()", "", "", "logging", ".", "info", "(", "'URL: {}'", ".", "format", "(", "url", ")", ")", "logging", ".", "info", "(", "'TITLE: {}'", ".", "format", "(", "title", ")", ")", "title", "=", "brief_title", "if", "len", "(", "brief_title", ")", ">", "3", "and", "len", "(", "title", ")", ">", "55", "else", "title", "title", "=", "title", ".", "replace", "(", "'Â',", " ", "')", ".", "r", "eplace(", "'", "·', ", "'", "').", "r", "e", "place('", "|", "', ", "'", "').", "r", "e", "place('", "\\", "n', ", "'", "-')", "", "logging", ".", "info", "(", "'FINAL: {}'", ".", "format", "(", "title", ")", ")", "title", "=", "title", "or", "default_title", "if", "title", ":", "new_footnote", "=", "'footnote:[See the web page titled \"{title}\" ({url}).]'", ".", "format", "(", "title", "=", "(", "title", "or", "default_title", ")", ",", "url", "=", "url", ")", "elif", "title", "is", "None", ":", "logging", ".", "error", "(", "'Unable to find a title for url: {}'", ".", "format", "(", "url", ")", ")", "else", ":", "new_footnote", "=", "'footnote:[See the web page ({url}).]'", ".", "format", "(", "url", "=", "url", ")", "line", "=", "line", ".", "replace", "(", "footnote", ",", "new_footnote", ")", "return", "line" ]
52.307692
27.410256
def geojson_to_dict_list(data): """Parse GeoJSON-formatted information in <data> to list of Python dicts""" # return data formatted as list or dict if type(data) in (list, dict): return data # read from data defined as local file address try: with open(data, 'r') as f: features = json.load(f)['features'] # if data is defined as a URL, load JSON object from address except IOError: features = requests.get(data).json()['features'] except: raise SourceDataError('MapViz data must be valid GeoJSON or JSON. Please check your <data> parameter.') return [feature['properties'] for feature in features]
[ "def", "geojson_to_dict_list", "(", "data", ")", ":", "# return data formatted as list or dict", "if", "type", "(", "data", ")", "in", "(", "list", ",", "dict", ")", ":", "return", "data", "# read from data defined as local file address", "try", ":", "with", "open", "(", "data", ",", "'r'", ")", "as", "f", ":", "features", "=", "json", ".", "load", "(", "f", ")", "[", "'features'", "]", "# if data is defined as a URL, load JSON object from address", "except", "IOError", ":", "features", "=", "requests", ".", "get", "(", "data", ")", ".", "json", "(", ")", "[", "'features'", "]", "except", ":", "raise", "SourceDataError", "(", "'MapViz data must be valid GeoJSON or JSON. Please check your <data> parameter.'", ")", "return", "[", "feature", "[", "'properties'", "]", "for", "feature", "in", "features", "]" ]
33.45
23.5
def _download_predicate_data(self, class_, controller): """Get raw predicate information for given request class, and cache for subsequent calls. """ self.authenticate() url = ('{0}{1}/modeldef/class/{2}' .format(self.base_url, controller, class_)) logger.debug(requests.utils.requote_uri(url)) resp = self._ratelimited_get(url) _raise_for_status(resp) return resp.json()['data']
[ "def", "_download_predicate_data", "(", "self", ",", "class_", ",", "controller", ")", ":", "self", ".", "authenticate", "(", ")", "url", "=", "(", "'{0}{1}/modeldef/class/{2}'", ".", "format", "(", "self", ".", "base_url", ",", "controller", ",", "class_", ")", ")", "logger", ".", "debug", "(", "requests", ".", "utils", ".", "requote_uri", "(", "url", ")", ")", "resp", "=", "self", ".", "_ratelimited_get", "(", "url", ")", "_raise_for_status", "(", "resp", ")", "return", "resp", ".", "json", "(", ")", "[", "'data'", "]" ]
28.5
18.25
def connect(self, forceReconnect=False): """ Check current conditions and initiate connection if possible. This is called to check preconditions for starting a new connection, and initating the connection itself. If the service is not running, this will do nothing. @param forceReconnect: Drop an existing connection to reconnnect. @type forceReconnect: C{False} @raises L{ConnectError}: When a connection (attempt) is already in progress, unless C{forceReconnect} is set. @raises L{NoConsumerError}: When there is no consumer for incoming tweets. No further connection attempts will be made, unless L{connect} is called again. """ if self._state == 'stopped': raise Error("This service is not running. Not connecting.") if self._state == 'connected': if forceReconnect: self._toState('disconnecting') return True else: raise ConnectError("Already connected.") elif self._state == 'aborting': raise ConnectError("Aborting connection in progress.") elif self._state == 'disconnecting': raise ConnectError("Disconnect in progress.") elif self._state == 'connecting': if forceReconnect: self._toState('aborting') return True else: raise ConnectError("Connect in progress.") if self.delegate is None: if self._state != 'idle': self._toState('idle') raise NoConsumerError() if self._state == 'waiting': if self._reconnectDelayedCall.called: self._reconnectDelayedCall = None pass else: self._reconnectDelayedCall.reset(0) return True self._toState('connecting') return True
[ "def", "connect", "(", "self", ",", "forceReconnect", "=", "False", ")", ":", "if", "self", ".", "_state", "==", "'stopped'", ":", "raise", "Error", "(", "\"This service is not running. Not connecting.\"", ")", "if", "self", ".", "_state", "==", "'connected'", ":", "if", "forceReconnect", ":", "self", ".", "_toState", "(", "'disconnecting'", ")", "return", "True", "else", ":", "raise", "ConnectError", "(", "\"Already connected.\"", ")", "elif", "self", ".", "_state", "==", "'aborting'", ":", "raise", "ConnectError", "(", "\"Aborting connection in progress.\"", ")", "elif", "self", ".", "_state", "==", "'disconnecting'", ":", "raise", "ConnectError", "(", "\"Disconnect in progress.\"", ")", "elif", "self", ".", "_state", "==", "'connecting'", ":", "if", "forceReconnect", ":", "self", ".", "_toState", "(", "'aborting'", ")", "return", "True", "else", ":", "raise", "ConnectError", "(", "\"Connect in progress.\"", ")", "if", "self", ".", "delegate", "is", "None", ":", "if", "self", ".", "_state", "!=", "'idle'", ":", "self", ".", "_toState", "(", "'idle'", ")", "raise", "NoConsumerError", "(", ")", "if", "self", ".", "_state", "==", "'waiting'", ":", "if", "self", ".", "_reconnectDelayedCall", ".", "called", ":", "self", ".", "_reconnectDelayedCall", "=", "None", "pass", "else", ":", "self", ".", "_reconnectDelayedCall", ".", "reset", "(", "0", ")", "return", "True", "self", ".", "_toState", "(", "'connecting'", ")", "return", "True" ]
36.075472
17.584906
def gait_regularity_symmetry(self, x, average_step_duration='autodetect', average_stride_duration='autodetect', unbias=1, normalize=2): """ Compute step and stride regularity and symmetry from accelerometer data with the help of steps and strides. :param x: The time series to assess freeze of gait on. This could be x, y, z or mag_sum_acc. :type x: pandas.Series :param average_step_duration: Average duration of each step using the same time unit as the time series. If this is set to 'autodetect' it will infer this from the time series. :type average_step_duration: float :param average_stride_duration: Average duration of each stride using the same time unit as the time series. If this is set to 'autodetect' it will infer this from the time series. :type average_stride_duration: float :param unbias: Unbiased autocorrelation: divide by range (unbias=1) or by weighted range (unbias=2). :type unbias: int :param int normalize: Normalize: divide by 1st coefficient (normalize=1) or by maximum abs. value (normalize=2). :type normalize: int :return step_regularity: Step regularity measure along axis. :rtype step_regularity: float :return stride_regularity: Stride regularity measure along axis. :rtype stride_regularity: float :return symmetry: Symmetry measure along axis. :rtype symmetry: float """ if (average_step_duration=='autodetect') or (average_stride_duration=='autodetect'): strikes, _ = self.heel_strikes(x) step_durations = [] for i in range(1, np.size(strikes)): step_durations.append(strikes[i] - strikes[i-1]) average_step_duration = np.mean(step_durations) number_of_steps = np.size(strikes) strides1 = strikes[0::2] strides2 = strikes[1::2] stride_durations1 = [] for i in range(1, np.size(strides1)): stride_durations1.append(strides1[i] - strides1[i-1]) stride_durations2 = [] for i in range(1, np.size(strides2)): stride_durations2.append(strides2[i] - strides2[i-1]) strides = [strides1, strides2] stride_durations = [stride_durations1, stride_durations2] average_stride_duration = np.mean((np.mean(stride_durations1), np.mean(stride_durations2))) return self.gait_regularity_symmetry(x, average_step_duration, average_stride_duration) else: coefficients, _ = autocorrelate(x, unbias=1, normalize=2) step_period = np.int(np.round(1 / average_step_duration)) stride_period = np.int(np.round(1 / average_stride_duration)) step_regularity = coefficients[step_period] stride_regularity = coefficients[stride_period] symmetry = np.abs(stride_regularity - step_regularity) return step_regularity, stride_regularity, symmetry
[ "def", "gait_regularity_symmetry", "(", "self", ",", "x", ",", "average_step_duration", "=", "'autodetect'", ",", "average_stride_duration", "=", "'autodetect'", ",", "unbias", "=", "1", ",", "normalize", "=", "2", ")", ":", "if", "(", "average_step_duration", "==", "'autodetect'", ")", "or", "(", "average_stride_duration", "==", "'autodetect'", ")", ":", "strikes", ",", "_", "=", "self", ".", "heel_strikes", "(", "x", ")", "step_durations", "=", "[", "]", "for", "i", "in", "range", "(", "1", ",", "np", ".", "size", "(", "strikes", ")", ")", ":", "step_durations", ".", "append", "(", "strikes", "[", "i", "]", "-", "strikes", "[", "i", "-", "1", "]", ")", "average_step_duration", "=", "np", ".", "mean", "(", "step_durations", ")", "number_of_steps", "=", "np", ".", "size", "(", "strikes", ")", "strides1", "=", "strikes", "[", "0", ":", ":", "2", "]", "strides2", "=", "strikes", "[", "1", ":", ":", "2", "]", "stride_durations1", "=", "[", "]", "for", "i", "in", "range", "(", "1", ",", "np", ".", "size", "(", "strides1", ")", ")", ":", "stride_durations1", ".", "append", "(", "strides1", "[", "i", "]", "-", "strides1", "[", "i", "-", "1", "]", ")", "stride_durations2", "=", "[", "]", "for", "i", "in", "range", "(", "1", ",", "np", ".", "size", "(", "strides2", ")", ")", ":", "stride_durations2", ".", "append", "(", "strides2", "[", "i", "]", "-", "strides2", "[", "i", "-", "1", "]", ")", "strides", "=", "[", "strides1", ",", "strides2", "]", "stride_durations", "=", "[", "stride_durations1", ",", "stride_durations2", "]", "average_stride_duration", "=", "np", ".", "mean", "(", "(", "np", ".", "mean", "(", "stride_durations1", ")", ",", "np", ".", "mean", "(", "stride_durations2", ")", ")", ")", "return", "self", ".", "gait_regularity_symmetry", "(", "x", ",", "average_step_duration", ",", "average_stride_duration", ")", "else", ":", "coefficients", ",", "_", "=", "autocorrelate", "(", "x", ",", "unbias", "=", "1", ",", "normalize", "=", "2", ")", "step_period", "=", "np", ".", "int", "(", "np", ".", "round", "(", "1", "/", "average_step_duration", ")", ")", "stride_period", "=", "np", ".", "int", "(", "np", ".", "round", "(", "1", "/", "average_stride_duration", ")", ")", "step_regularity", "=", "coefficients", "[", "step_period", "]", "stride_regularity", "=", "coefficients", "[", "stride_period", "]", "symmetry", "=", "np", ".", "abs", "(", "stride_regularity", "-", "step_regularity", ")", "return", "step_regularity", ",", "stride_regularity", ",", "symmetry" ]
49.174603
31.634921
def int_to_key(value, base=BASE62): """ Convert the specified integer to a key using the given base. @param value: a positive integer. @param base: a sequence of characters that is used to encode the integer value. @return: a key expressed in the specified base. """ def key_sequence_generator(value, base): """ Generator for producing sequence of characters of a key providing an integer value and a base of characters for encoding, such as Base62 for instance. @param value: a positive integer. @param base: a sequence of characters that is used to encode the integer value. @return: the next character of the object's key encoded with the specified base. """ base_length = len(base) while True: yield base[value % base_length] if value < base_length: break value /= base_length return ''.join([ c for c in key_sequence_generator(value, base) ])
[ "def", "int_to_key", "(", "value", ",", "base", "=", "BASE62", ")", ":", "def", "key_sequence_generator", "(", "value", ",", "base", ")", ":", "\"\"\"\n Generator for producing sequence of characters of a key providing an\n integer value and a base of characters for encoding, such as Base62\n for instance.\n\n @param value: a positive integer.\n @param base: a sequence of characters that is used to encode the\n integer value.\n\n @return: the next character of the object's key encoded with the\n specified base.\n \"\"\"", "base_length", "=", "len", "(", "base", ")", "while", "True", ":", "yield", "base", "[", "value", "%", "base_length", "]", "if", "value", "<", "base_length", ":", "break", "value", "/=", "base_length", "return", "''", ".", "join", "(", "[", "c", "for", "c", "in", "key_sequence_generator", "(", "value", ",", "base", ")", "]", ")" ]
32.28125
18.71875
def _publish(self, obj): ''' Publish the OC object. ''' bin_obj = umsgpack.packb(obj) self.pub.send(bin_obj)
[ "def", "_publish", "(", "self", ",", "obj", ")", ":", "bin_obj", "=", "umsgpack", ".", "packb", "(", "obj", ")", "self", ".", "pub", ".", "send", "(", "bin_obj", ")" ]
23.833333
16.166667
def set_col_width(self, col, tab, width): """Sets column width""" try: old_width = self.col_widths.pop((col, tab)) except KeyError: old_width = None if width is not None: self.col_widths[(col, tab)] = float(width)
[ "def", "set_col_width", "(", "self", ",", "col", ",", "tab", ",", "width", ")", ":", "try", ":", "old_width", "=", "self", ".", "col_widths", ".", "pop", "(", "(", "col", ",", "tab", ")", ")", "except", "KeyError", ":", "old_width", "=", "None", "if", "width", "is", "not", "None", ":", "self", ".", "col_widths", "[", "(", "col", ",", "tab", ")", "]", "=", "float", "(", "width", ")" ]
24.909091
19.727273
def refresh_token(token_service, refresh_token, client_id, client_secret): """Refreshes a token.""" data = { 'client_id': client_id, 'client_secret': client_secret, 'refresh_token': refresh_token, 'grant_type': 'refresh_token', } resp = requests.post(token_service, data) print resp, 'refreshing', resp.json() return resp.json()
[ "def", "refresh_token", "(", "token_service", ",", "refresh_token", ",", "client_id", ",", "client_secret", ")", ":", "data", "=", "{", "'client_id'", ":", "client_id", ",", "'client_secret'", ":", "client_secret", ",", "'refresh_token'", ":", "refresh_token", ",", "'grant_type'", ":", "'refresh_token'", ",", "}", "resp", "=", "requests", ".", "post", "(", "token_service", ",", "data", ")", "print", "resp", ",", "'refreshing'", ",", "resp", ".", "json", "(", ")", "return", "resp", ".", "json", "(", ")" ]
31.454545
14
def load_to(self, last_level_load): """Set level where to load from.""" assert isinstance(last_level_load, Cache), \ "last_level needs to be a Cache object." assert last_level_load.load_from is None, \ "last_level_load must be a last level cache (.load_from is None)." self.last_level_load = last_level_load
[ "def", "load_to", "(", "self", ",", "last_level_load", ")", ":", "assert", "isinstance", "(", "last_level_load", ",", "Cache", ")", ",", "\"last_level needs to be a Cache object.\"", "assert", "last_level_load", ".", "load_from", "is", "None", ",", "\"last_level_load must be a last level cache (.load_from is None).\"", "self", ".", "last_level_load", "=", "last_level_load" ]
51
12
def my_solid_angle(center, coords): """ Helper method to calculate the solid angle of a set of coords from the center. Args: center: Center to measure solid angle from. coords: List of coords to determine solid angle. Returns: The solid angle. """ o = np.array(center) r = [np.array(c) - o for c in coords] r.append(r[0]) n = [np.cross(r[i + 1], r[i]) for i in range(len(r) - 1)] n.append(np.cross(r[1], r[0])) phi = 0.0 for i in range(len(n) - 1): try: value = math.acos(-np.dot(n[i], n[i + 1]) / (np.linalg.norm(n[i]) * np.linalg.norm(n[i + 1]))) except ValueError: mycos = -np.dot(n[i], n[i + 1]) / (np.linalg.norm(n[i]) * np.linalg.norm(n[i + 1])) if 0.999999999999 < mycos < 1.000000000001: value = math.acos(1.0) elif -0.999999999999 > mycos > -1.000000000001: value = math.acos(-1.0) else: raise SolidAngleError(mycos) phi += value return phi + (3 - len(r)) * math.pi
[ "def", "my_solid_angle", "(", "center", ",", "coords", ")", ":", "o", "=", "np", ".", "array", "(", "center", ")", "r", "=", "[", "np", ".", "array", "(", "c", ")", "-", "o", "for", "c", "in", "coords", "]", "r", ".", "append", "(", "r", "[", "0", "]", ")", "n", "=", "[", "np", ".", "cross", "(", "r", "[", "i", "+", "1", "]", ",", "r", "[", "i", "]", ")", "for", "i", "in", "range", "(", "len", "(", "r", ")", "-", "1", ")", "]", "n", ".", "append", "(", "np", ".", "cross", "(", "r", "[", "1", "]", ",", "r", "[", "0", "]", ")", ")", "phi", "=", "0.0", "for", "i", "in", "range", "(", "len", "(", "n", ")", "-", "1", ")", ":", "try", ":", "value", "=", "math", ".", "acos", "(", "-", "np", ".", "dot", "(", "n", "[", "i", "]", ",", "n", "[", "i", "+", "1", "]", ")", "/", "(", "np", ".", "linalg", ".", "norm", "(", "n", "[", "i", "]", ")", "*", "np", ".", "linalg", ".", "norm", "(", "n", "[", "i", "+", "1", "]", ")", ")", ")", "except", "ValueError", ":", "mycos", "=", "-", "np", ".", "dot", "(", "n", "[", "i", "]", ",", "n", "[", "i", "+", "1", "]", ")", "/", "(", "np", ".", "linalg", ".", "norm", "(", "n", "[", "i", "]", ")", "*", "np", ".", "linalg", ".", "norm", "(", "n", "[", "i", "+", "1", "]", ")", ")", "if", "0.999999999999", "<", "mycos", "<", "1.000000000001", ":", "value", "=", "math", ".", "acos", "(", "1.0", ")", "elif", "-", "0.999999999999", ">", "mycos", ">", "-", "1.000000000001", ":", "value", "=", "math", ".", "acos", "(", "-", "1.0", ")", "else", ":", "raise", "SolidAngleError", "(", "mycos", ")", "phi", "+=", "value", "return", "phi", "+", "(", "3", "-", "len", "(", "r", ")", ")", "*", "math", ".", "pi" ]
32.727273
19.454545
def getrawheader(self, name): """A higher-level interface to getfirstmatchingheader(). Return a string containing the literal text of the header but with the keyword stripped. All leading, trailing and embedded whitespace is kept in the string, however. Return None if the header does not occur. """ lst = self.getfirstmatchingheader(name) if not lst: return None lst[0] = lst[0][len(name) + 1:] return ''.join(lst)
[ "def", "getrawheader", "(", "self", ",", "name", ")", ":", "lst", "=", "self", ".", "getfirstmatchingheader", "(", "name", ")", "if", "not", "lst", ":", "return", "None", "lst", "[", "0", "]", "=", "lst", "[", "0", "]", "[", "len", "(", "name", ")", "+", "1", ":", "]", "return", "''", ".", "join", "(", "lst", ")" ]
35.571429
20.071429
def accel_move_tab_left(self, *args): # TODO KEYBINDINGS ONLY """ Callback to move a tab to the left """ pos = self.get_notebook().get_current_page() if pos != 0: self.move_tab(pos, pos - 1) return True
[ "def", "accel_move_tab_left", "(", "self", ",", "*", "args", ")", ":", "# TODO KEYBINDINGS ONLY", "pos", "=", "self", ".", "get_notebook", "(", ")", ".", "get_current_page", "(", ")", "if", "pos", "!=", "0", ":", "self", ".", "move_tab", "(", "pos", ",", "pos", "-", "1", ")", "return", "True" ]
35.428571
9.428571
def transformToRef(self,ref_wcs,force=False): """ Transform sky coords from ALL chips into X,Y coords in reference WCS. """ if not isinstance(ref_wcs, pywcs.WCS): print(textutil.textbox('Reference WCS not a valid HSTWCS object'), file=sys.stderr) raise ValueError # Need to concatenate catalogs from each input if self.outxy is None or force: outxy = ref_wcs.wcs_world2pix(self.all_radec[0],self.all_radec[1],self.origin) # convert outxy list to a Nx2 array self.outxy = np.column_stack([outxy[0][:,np.newaxis],outxy[1][:,np.newaxis]]) if self.pars['writecat']: catname = self.rootname+"_refxy_catalog.coo" self.write_outxy(catname) self.catalog_names['ref_xy'] = catname
[ "def", "transformToRef", "(", "self", ",", "ref_wcs", ",", "force", "=", "False", ")", ":", "if", "not", "isinstance", "(", "ref_wcs", ",", "pywcs", ".", "WCS", ")", ":", "print", "(", "textutil", ".", "textbox", "(", "'Reference WCS not a valid HSTWCS object'", ")", ",", "file", "=", "sys", ".", "stderr", ")", "raise", "ValueError", "# Need to concatenate catalogs from each input", "if", "self", ".", "outxy", "is", "None", "or", "force", ":", "outxy", "=", "ref_wcs", ".", "wcs_world2pix", "(", "self", ".", "all_radec", "[", "0", "]", ",", "self", ".", "all_radec", "[", "1", "]", ",", "self", ".", "origin", ")", "# convert outxy list to a Nx2 array", "self", ".", "outxy", "=", "np", ".", "column_stack", "(", "[", "outxy", "[", "0", "]", "[", ":", ",", "np", ".", "newaxis", "]", ",", "outxy", "[", "1", "]", "[", ":", ",", "np", ".", "newaxis", "]", "]", ")", "if", "self", ".", "pars", "[", "'writecat'", "]", ":", "catname", "=", "self", ".", "rootname", "+", "\"_refxy_catalog.coo\"", "self", ".", "write_outxy", "(", "catname", ")", "self", ".", "catalog_names", "[", "'ref_xy'", "]", "=", "catname" ]
52.125
14.125
def split_locale(loc): ''' Split a locale specifier. The general format is language[_territory][.codeset][@modifier] [charmap] For example: ca_ES.UTF-8@valencia UTF-8 ''' def split(st, char): ''' Split a string `st` once by `char`; always return a two-element list even if the second element is empty. ''' split_st = st.split(char, 1) if len(split_st) == 1: split_st.append('') return split_st comps = {} work_st, comps['charmap'] = split(loc, ' ') work_st, comps['modifier'] = split(work_st, '@') work_st, comps['codeset'] = split(work_st, '.') comps['language'], comps['territory'] = split(work_st, '_') return comps
[ "def", "split_locale", "(", "loc", ")", ":", "def", "split", "(", "st", ",", "char", ")", ":", "'''\n Split a string `st` once by `char`; always return a two-element list\n even if the second element is empty.\n '''", "split_st", "=", "st", ".", "split", "(", "char", ",", "1", ")", "if", "len", "(", "split_st", ")", "==", "1", ":", "split_st", ".", "append", "(", "''", ")", "return", "split_st", "comps", "=", "{", "}", "work_st", ",", "comps", "[", "'charmap'", "]", "=", "split", "(", "loc", ",", "' '", ")", "work_st", ",", "comps", "[", "'modifier'", "]", "=", "split", "(", "work_st", ",", "'@'", ")", "work_st", ",", "comps", "[", "'codeset'", "]", "=", "split", "(", "work_st", ",", "'.'", ")", "comps", "[", "'language'", "]", ",", "comps", "[", "'territory'", "]", "=", "split", "(", "work_st", ",", "'_'", ")", "return", "comps" ]
27.615385
21.615385
def calculate_transmission(thickness_cm: np.float, atoms_per_cm3: np.float, sigma_b: np.array): """calculate the transmission signal using the formula transmission = exp( - thickness_cm * atoms_per_cm3 * 1e-24 * sigma_b) Parameters: =========== thickness: float (in cm) atoms_per_cm3: float (number of atoms per cm3 of element/isotope) sigma_b: np.array of sigma retrieved from database Returns: ======== transmission array """ miu_per_cm = calculate_linear_attenuation_coefficient(atoms_per_cm3=atoms_per_cm3, sigma_b=sigma_b) transmission = calculate_trans(thickness_cm=thickness_cm, miu_per_cm=miu_per_cm) return miu_per_cm, transmission
[ "def", "calculate_transmission", "(", "thickness_cm", ":", "np", ".", "float", ",", "atoms_per_cm3", ":", "np", ".", "float", ",", "sigma_b", ":", "np", ".", "array", ")", ":", "miu_per_cm", "=", "calculate_linear_attenuation_coefficient", "(", "atoms_per_cm3", "=", "atoms_per_cm3", ",", "sigma_b", "=", "sigma_b", ")", "transmission", "=", "calculate_trans", "(", "thickness_cm", "=", "thickness_cm", ",", "miu_per_cm", "=", "miu_per_cm", ")", "return", "miu_per_cm", ",", "transmission" ]
38.555556
27.055556
def __authenticate(self, ad, username, password): ''' Active Directory auth function :param ad: LDAP connection string ('ldap://server') :param username: username with domain ('user@domain.name') :param password: auth password :return: ldap connection or None if error ''' result = None conn = ldap.initialize(ad) conn.protocol_version = 3 conn.set_option(ldap.OPT_REFERRALS, 0) user = self.__prepare_user_with_domain(username) self.logger.debug("Trying to auth with user '{}' to {}".format(user, ad)) try: conn.simple_bind_s(user, password) result = conn self.username, self.password = username, password self.logger.debug("Successfull login as {}".format(username)) except ldap.INVALID_CREDENTIALS: result = False self.logger.debug(traceback.format_exc()) self.logger.debug("Invalid ldap-creds.") except Exception as ex: # nocv self.logger.debug(traceback.format_exc()) self.logger.debug("Unknown error: {}".format(str(ex))) return result
[ "def", "__authenticate", "(", "self", ",", "ad", ",", "username", ",", "password", ")", ":", "result", "=", "None", "conn", "=", "ldap", ".", "initialize", "(", "ad", ")", "conn", ".", "protocol_version", "=", "3", "conn", ".", "set_option", "(", "ldap", ".", "OPT_REFERRALS", ",", "0", ")", "user", "=", "self", ".", "__prepare_user_with_domain", "(", "username", ")", "self", ".", "logger", ".", "debug", "(", "\"Trying to auth with user '{}' to {}\"", ".", "format", "(", "user", ",", "ad", ")", ")", "try", ":", "conn", ".", "simple_bind_s", "(", "user", ",", "password", ")", "result", "=", "conn", "self", ".", "username", ",", "self", ".", "password", "=", "username", ",", "password", "self", ".", "logger", ".", "debug", "(", "\"Successfull login as {}\"", ".", "format", "(", "username", ")", ")", "except", "ldap", ".", "INVALID_CREDENTIALS", ":", "result", "=", "False", "self", ".", "logger", ".", "debug", "(", "traceback", ".", "format_exc", "(", ")", ")", "self", ".", "logger", ".", "debug", "(", "\"Invalid ldap-creds.\"", ")", "except", "Exception", "as", "ex", ":", "# nocv", "self", ".", "logger", ".", "debug", "(", "traceback", ".", "format_exc", "(", ")", ")", "self", ".", "logger", ".", "debug", "(", "\"Unknown error: {}\"", ".", "format", "(", "str", "(", "ex", ")", ")", ")", "return", "result" ]
39.965517
17.275862
def tf_step(self, time, variables, **kwargs): """ Keyword Args: global_variables: List of global variables to apply the proposed optimization step to. Returns: List of delta tensors corresponding to the updates for each optimized variable. """ global_variables = kwargs["global_variables"] assert all( util.shape(global_variable) == util.shape(local_variable) for global_variable, local_variable in zip(global_variables, variables) ) local_deltas = self.optimizer.step(time=time, variables=variables, **kwargs) with tf.control_dependencies(control_inputs=local_deltas): applied = self.optimizer.apply_step(variables=global_variables, deltas=local_deltas) with tf.control_dependencies(control_inputs=(applied,)): update_deltas = list() for global_variable, local_variable in zip(global_variables, variables): delta = global_variable - local_variable update_deltas.append(delta) applied = self.apply_step(variables=variables, deltas=update_deltas) # TODO: Update time, episode, etc (like in Synchronization)? with tf.control_dependencies(control_inputs=(applied,)): return [local_delta + update_delta for local_delta, update_delta in zip(local_deltas, update_deltas)]
[ "def", "tf_step", "(", "self", ",", "time", ",", "variables", ",", "*", "*", "kwargs", ")", ":", "global_variables", "=", "kwargs", "[", "\"global_variables\"", "]", "assert", "all", "(", "util", ".", "shape", "(", "global_variable", ")", "==", "util", ".", "shape", "(", "local_variable", ")", "for", "global_variable", ",", "local_variable", "in", "zip", "(", "global_variables", ",", "variables", ")", ")", "local_deltas", "=", "self", ".", "optimizer", ".", "step", "(", "time", "=", "time", ",", "variables", "=", "variables", ",", "*", "*", "kwargs", ")", "with", "tf", ".", "control_dependencies", "(", "control_inputs", "=", "local_deltas", ")", ":", "applied", "=", "self", ".", "optimizer", ".", "apply_step", "(", "variables", "=", "global_variables", ",", "deltas", "=", "local_deltas", ")", "with", "tf", ".", "control_dependencies", "(", "control_inputs", "=", "(", "applied", ",", ")", ")", ":", "update_deltas", "=", "list", "(", ")", "for", "global_variable", ",", "local_variable", "in", "zip", "(", "global_variables", ",", "variables", ")", ":", "delta", "=", "global_variable", "-", "local_variable", "update_deltas", ".", "append", "(", "delta", ")", "applied", "=", "self", ".", "apply_step", "(", "variables", "=", "variables", ",", "deltas", "=", "update_deltas", ")", "# TODO: Update time, episode, etc (like in Synchronization)?", "with", "tf", ".", "control_dependencies", "(", "control_inputs", "=", "(", "applied", ",", ")", ")", ":", "return", "[", "local_delta", "+", "update_delta", "for", "local_delta", ",", "update_delta", "in", "zip", "(", "local_deltas", ",", "update_deltas", ")", "]" ]
41.878788
31.575758
def create_config_(self, index=0, update=False): """Create config file. Create config file in :attr:`config_files_[index]`. Parameters: index(int): index of config file. update (bool): if set to True and :attr:`config_files_` already exists, its content is read and all the options it sets are kept in the produced config file. """ if not self.config_files_[index:]: return path = self.config_files_[index] if not path.parent.exists(): path.parent.mkdir(parents=True) conf_dict = {} for section in self.sections_(): conf_opts = [o for o, m in self[section].defaults_() if m.conf_arg] if not conf_opts: continue conf_dict[section] = {} for opt in conf_opts: conf_dict[section][opt] = (self[section][opt] if update else self[section].def_[opt].default) with path.open('w') as cfile: toml.dump(conf_dict, cfile)
[ "def", "create_config_", "(", "self", ",", "index", "=", "0", ",", "update", "=", "False", ")", ":", "if", "not", "self", ".", "config_files_", "[", "index", ":", "]", ":", "return", "path", "=", "self", ".", "config_files_", "[", "index", "]", "if", "not", "path", ".", "parent", ".", "exists", "(", ")", ":", "path", ".", "parent", ".", "mkdir", "(", "parents", "=", "True", ")", "conf_dict", "=", "{", "}", "for", "section", "in", "self", ".", "sections_", "(", ")", ":", "conf_opts", "=", "[", "o", "for", "o", ",", "m", "in", "self", "[", "section", "]", ".", "defaults_", "(", ")", "if", "m", ".", "conf_arg", "]", "if", "not", "conf_opts", ":", "continue", "conf_dict", "[", "section", "]", "=", "{", "}", "for", "opt", "in", "conf_opts", ":", "conf_dict", "[", "section", "]", "[", "opt", "]", "=", "(", "self", "[", "section", "]", "[", "opt", "]", "if", "update", "else", "self", "[", "section", "]", ".", "def_", "[", "opt", "]", ".", "default", ")", "with", "path", ".", "open", "(", "'w'", ")", "as", "cfile", ":", "toml", ".", "dump", "(", "conf_dict", ",", "cfile", ")" ]
39.962963
15.333333
def cli(*args, **kwargs): """ CSVtoTable commandline utility. """ # Convert CSV file content = convert.convert(kwargs["input_file"], **kwargs) # Serve the temporary file in browser. if kwargs["serve"]: convert.serve(content) # Write to output file elif kwargs["output_file"]: # Check if file can be overwrite if (not kwargs["overwrite"] and not prompt_overwrite(kwargs["output_file"])): raise click.Abort() convert.save(kwargs["output_file"], content) click.secho("File converted successfully: {}".format( kwargs["output_file"]), fg="green") else: # If its not server and output file is missing then raise error raise click.BadOptionUsage("Missing argument \"output_file\".")
[ "def", "cli", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# Convert CSV file", "content", "=", "convert", ".", "convert", "(", "kwargs", "[", "\"input_file\"", "]", ",", "*", "*", "kwargs", ")", "# Serve the temporary file in browser.", "if", "kwargs", "[", "\"serve\"", "]", ":", "convert", ".", "serve", "(", "content", ")", "# Write to output file", "elif", "kwargs", "[", "\"output_file\"", "]", ":", "# Check if file can be overwrite", "if", "(", "not", "kwargs", "[", "\"overwrite\"", "]", "and", "not", "prompt_overwrite", "(", "kwargs", "[", "\"output_file\"", "]", ")", ")", ":", "raise", "click", ".", "Abort", "(", ")", "convert", ".", "save", "(", "kwargs", "[", "\"output_file\"", "]", ",", "content", ")", "click", ".", "secho", "(", "\"File converted successfully: {}\"", ".", "format", "(", "kwargs", "[", "\"output_file\"", "]", ")", ",", "fg", "=", "\"green\"", ")", "else", ":", "# If its not server and output file is missing then raise error", "raise", "click", ".", "BadOptionUsage", "(", "\"Missing argument \\\"output_file\\\".\"", ")" ]
34.391304
15.434783
def process_log_group(config): """CLI - Replay / Index """ from c7n.credentials import SessionFactory factory = SessionFactory( config.region, config.profile, assume_role=config.role) session = factory() client = session.client('logs') params = dict(logGroupName=config.log_group, filterPattern='Traceback', interleaved=True) if config.log_streams: params['logStreamNames'] = config.log_streams if config.start: params['startTime'] = int(time.mktime( parse_date(config.start).replace( hour=0, minute=0, second=0, microsecond=0).timetuple()) * 1000) if config.end: params['endTime'] = int(time.mktime( parse_date(config.end).replace( hour=0, minute=0, second=0, microsecond=0).timetuple()) * 1000) settings = dict(account_id=config.account_id, account_name=config.account_name) paginator = client.get_paginator('filter_log_events') event_count = 0 log.debug("Querying log events with %s", params) for p in paginator.paginate(**params): # log.debug("Searched streams\n %s", ", ".join( # [s['logStreamName'] for s in p['searchedLogStreams']])) for e in p['events']: event_count += 1 msg = get_sentry_message( settings, {'logEvents': [e], 'logStream': e['logStreamName'], 'logGroup': config.log_group}, client) if msg is None: continue send_sentry_message(config.sentry_dsn, msg) if event_count > 0: log.info("Processed %s %d error events", config.account_name, event_count)
[ "def", "process_log_group", "(", "config", ")", ":", "from", "c7n", ".", "credentials", "import", "SessionFactory", "factory", "=", "SessionFactory", "(", "config", ".", "region", ",", "config", ".", "profile", ",", "assume_role", "=", "config", ".", "role", ")", "session", "=", "factory", "(", ")", "client", "=", "session", ".", "client", "(", "'logs'", ")", "params", "=", "dict", "(", "logGroupName", "=", "config", ".", "log_group", ",", "filterPattern", "=", "'Traceback'", ",", "interleaved", "=", "True", ")", "if", "config", ".", "log_streams", ":", "params", "[", "'logStreamNames'", "]", "=", "config", ".", "log_streams", "if", "config", ".", "start", ":", "params", "[", "'startTime'", "]", "=", "int", "(", "time", ".", "mktime", "(", "parse_date", "(", "config", ".", "start", ")", ".", "replace", "(", "hour", "=", "0", ",", "minute", "=", "0", ",", "second", "=", "0", ",", "microsecond", "=", "0", ")", ".", "timetuple", "(", ")", ")", "*", "1000", ")", "if", "config", ".", "end", ":", "params", "[", "'endTime'", "]", "=", "int", "(", "time", ".", "mktime", "(", "parse_date", "(", "config", ".", "end", ")", ".", "replace", "(", "hour", "=", "0", ",", "minute", "=", "0", ",", "second", "=", "0", ",", "microsecond", "=", "0", ")", ".", "timetuple", "(", ")", ")", "*", "1000", ")", "settings", "=", "dict", "(", "account_id", "=", "config", ".", "account_id", ",", "account_name", "=", "config", ".", "account_name", ")", "paginator", "=", "client", ".", "get_paginator", "(", "'filter_log_events'", ")", "event_count", "=", "0", "log", ".", "debug", "(", "\"Querying log events with %s\"", ",", "params", ")", "for", "p", "in", "paginator", ".", "paginate", "(", "*", "*", "params", ")", ":", "# log.debug(\"Searched streams\\n %s\", \", \".join(", "# [s['logStreamName'] for s in p['searchedLogStreams']]))", "for", "e", "in", "p", "[", "'events'", "]", ":", "event_count", "+=", "1", "msg", "=", "get_sentry_message", "(", "settings", ",", "{", "'logEvents'", ":", "[", "e", "]", ",", "'logStream'", ":", "e", "[", "'logStreamName'", "]", ",", "'logGroup'", ":", "config", ".", "log_group", "}", ",", "client", ")", "if", "msg", "is", "None", ":", "continue", "send_sentry_message", "(", "config", ".", "sentry_dsn", ",", "msg", ")", "if", "event_count", ">", "0", ":", "log", ".", "info", "(", "\"Processed %s %d error events\"", ",", "config", ".", "account_name", ",", "event_count", ")" ]
37.577778
17.8
def confab_conformers(self, forcefield="mmff94", freeze_atoms=None, rmsd_cutoff=0.5, energy_cutoff=50.0, conf_cutoff=100000, verbose=False): """ Conformer generation based on Confab to generate all diverse low-energy conformers for molecules. This is different from rotor_conformer or gen3d_conformer as it aims to not simply to find a low energy conformation but to generate several different conformations. Args: forcefield (str): Default is mmff94. Options are 'gaff', 'ghemical', 'mmff94', 'mmff94s', and 'uff'. freeze_atoms ([int]): index of atoms to be freezed when performing conformer search, default is None. rmsd_cutoff (float): rmsd_cufoff, default is 0.5 Angstrom. energy_cutoff (float): energy_cutoff, default is 50.0 kcal/mol. conf_cutoff (float): max number of conformers to test, default is 1 million. verbose (bool): whether to display information on torsions found, default is False. Returns: (list): list of pymatgen Molecule objects for generated conformers. """ if self._obmol.GetDimension() != 3: self.make3d() else: self.add_hydrogen() ff = ob.OBForceField_FindType(forcefield) if ff == 0: print("Could not find forcefield {} in openbabel, the forcefield " "will be reset as default 'mmff94'".format(forcefield)) ff = ob.OBForceField_FindType("mmff94") if freeze_atoms: print('{} atoms will be freezed'.format(len(freeze_atoms))) constraints = ob.OBFFConstraints() for atom in ob.OBMolAtomIter(self._obmol): atom_id = atom.GetIndex() + 1 if id in freeze_atoms: constraints.AddAtomConstraint(atom_id) ff.SetConstraints(constraints) # Confab conformer generation ff.DiverseConfGen(rmsd_cutoff, conf_cutoff, energy_cutoff, verbose) ff.GetConformers(self._obmol) # Number of conformers generated by Confab conformer generation conformer_num = self._obmol.NumConformers() conformers = [] for i in range(conformer_num): self._obmol.SetConformer(i) conformer = copy.deepcopy(BabelMolAdaptor(self._obmol).pymatgen_mol) conformers.append(conformer) self._obmol.SetConformer(0) return conformers
[ "def", "confab_conformers", "(", "self", ",", "forcefield", "=", "\"mmff94\"", ",", "freeze_atoms", "=", "None", ",", "rmsd_cutoff", "=", "0.5", ",", "energy_cutoff", "=", "50.0", ",", "conf_cutoff", "=", "100000", ",", "verbose", "=", "False", ")", ":", "if", "self", ".", "_obmol", ".", "GetDimension", "(", ")", "!=", "3", ":", "self", ".", "make3d", "(", ")", "else", ":", "self", ".", "add_hydrogen", "(", ")", "ff", "=", "ob", ".", "OBForceField_FindType", "(", "forcefield", ")", "if", "ff", "==", "0", ":", "print", "(", "\"Could not find forcefield {} in openbabel, the forcefield \"", "\"will be reset as default 'mmff94'\"", ".", "format", "(", "forcefield", ")", ")", "ff", "=", "ob", ".", "OBForceField_FindType", "(", "\"mmff94\"", ")", "if", "freeze_atoms", ":", "print", "(", "'{} atoms will be freezed'", ".", "format", "(", "len", "(", "freeze_atoms", ")", ")", ")", "constraints", "=", "ob", ".", "OBFFConstraints", "(", ")", "for", "atom", "in", "ob", ".", "OBMolAtomIter", "(", "self", ".", "_obmol", ")", ":", "atom_id", "=", "atom", ".", "GetIndex", "(", ")", "+", "1", "if", "id", "in", "freeze_atoms", ":", "constraints", ".", "AddAtomConstraint", "(", "atom_id", ")", "ff", ".", "SetConstraints", "(", "constraints", ")", "# Confab conformer generation", "ff", ".", "DiverseConfGen", "(", "rmsd_cutoff", ",", "conf_cutoff", ",", "energy_cutoff", ",", "verbose", ")", "ff", ".", "GetConformers", "(", "self", ".", "_obmol", ")", "# Number of conformers generated by Confab conformer generation", "conformer_num", "=", "self", ".", "_obmol", ".", "NumConformers", "(", ")", "conformers", "=", "[", "]", "for", "i", "in", "range", "(", "conformer_num", ")", ":", "self", ".", "_obmol", ".", "SetConformer", "(", "i", ")", "conformer", "=", "copy", ".", "deepcopy", "(", "BabelMolAdaptor", "(", "self", ".", "_obmol", ")", ".", "pymatgen_mol", ")", "conformers", ".", "append", "(", "conformer", ")", "self", ".", "_obmol", ".", "SetConformer", "(", "0", ")", "return", "conformers" ]
42.7
21.1