text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def create_or_update(sender, **kwargs): """ Create or update an Activity Monitor item from some instance. """ now = datetime.datetime.now() # I can't explain why this import fails unless it's here. from activity_monitor.models import Activity instance = kwargs['instance'] # Find this object's content type and model class. instance_content_type = ContentType.objects.get_for_model(sender) instance_model = sender content_object = instance_model.objects.get(id=instance.id) # check to see if the activity already exists. Will need later. try: activity = Activity.objects.get(content_type=instance_content_type, object_id=content_object.id) except: activity = None # We now know the content type, the model (sender), content type and content object. # We need to loop through ACTIVITY_MONITOR_MODELS in settings for other fields for activity_setting in settings.ACTIVITY_MONITOR_MODELS: this_app_label = activity_setting['model'].split('.')[0] this_model_label = activity_setting['model'].split('.')[1] this_content_type = ContentType.objects.get(app_label=this_app_label, model=this_model_label) if this_content_type == instance_content_type: # first, check to see if we even WANT to register this activity. # use the boolean 'check' field. Also, delete if needed. if 'check' in activity_setting: if getattr(instance, activity_setting['check']) is False: if activity: activity.delete() return # does it use the default manager (objects) or a custom manager? try: manager = activity_setting['manager'] except: manager = 'objects' # what field denotes the activity time? created is default try: timestamp = getattr(instance, activity_setting['date_field']) except: timestamp = getattr(instance, 'created') # if the given time stamp is a daterather than datetime type, # normalize it out to a datetime if type(timestamp) == type(now): clean_timestamp = timestamp else: clean_timestamp = datetime.datetime.combine(timestamp, datetime.time()) # Find a valid user object if 'user_field' in activity_setting: # pull the user object from instance using user_field user = getattr(instance, activity_setting['user_field']) elif this_model_label == 'user' or this_model_label == 'profile': # this IS auth.user or a Django 1.5 custom user user = instance else: # we didn't specify a user, so it must be instance.user user = instance.user # BAIL-OUT CHECKS # Determine all the reasons we would want to bail out. # Make sure it's not a future item, like a future-published blog entry. if clean_timestamp > now: return # or some really old content that was just re-saved for some reason if clean_timestamp < (now - datetime.timedelta(days=3)): return # or there's not a user object if not user: return # or the user is god or staff, and we're filtering out, don't add to monitor if user.is_superuser and 'filter_superuser' in activity_setting: return if user.is_staff and 'filter_staff' in activity_setting: return # build a default string representation # note that each activity can get back to the object via get_absolute_url() verb = activity_setting.get('verb', None) override_string = activity_setting.get('override_string', None) # MANAGER CHECK # Make sure the item "should" be registered, based on the manager argument. # If InstanceModel.manager.all() includes this item, then register. Otherwise, return. # Also, check to see if it should be deleted. try: getattr(instance_model, manager).get(pk=instance.pk) except instance_model.DoesNotExist: try: activity.delete() return except Activity.DoesNotExist: return if user and clean_timestamp and instance: if not activity: # If the activity didn't already exist, create it. activity = Activity( actor = user, content_type = instance_content_type, object_id = content_object.id, content_object = content_object, timestamp = clean_timestamp, verb = verb, override_string = override_string, ) activity.save() return activity
[ "def", "create_or_update", "(", "sender", ",", "*", "*", "kwargs", ")", ":", "now", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "# I can't explain why this import fails unless it's here.", "from", "activity_monitor", ".", "models", "import", "Activity", "instance", "=", "kwargs", "[", "'instance'", "]", "# Find this object's content type and model class.", "instance_content_type", "=", "ContentType", ".", "objects", ".", "get_for_model", "(", "sender", ")", "instance_model", "=", "sender", "content_object", "=", "instance_model", ".", "objects", ".", "get", "(", "id", "=", "instance", ".", "id", ")", "# check to see if the activity already exists. Will need later.", "try", ":", "activity", "=", "Activity", ".", "objects", ".", "get", "(", "content_type", "=", "instance_content_type", ",", "object_id", "=", "content_object", ".", "id", ")", "except", ":", "activity", "=", "None", "# We now know the content type, the model (sender), content type and content object.", "# We need to loop through ACTIVITY_MONITOR_MODELS in settings for other fields", "for", "activity_setting", "in", "settings", ".", "ACTIVITY_MONITOR_MODELS", ":", "this_app_label", "=", "activity_setting", "[", "'model'", "]", ".", "split", "(", "'.'", ")", "[", "0", "]", "this_model_label", "=", "activity_setting", "[", "'model'", "]", ".", "split", "(", "'.'", ")", "[", "1", "]", "this_content_type", "=", "ContentType", ".", "objects", ".", "get", "(", "app_label", "=", "this_app_label", ",", "model", "=", "this_model_label", ")", "if", "this_content_type", "==", "instance_content_type", ":", "# first, check to see if we even WANT to register this activity.", "# use the boolean 'check' field. Also, delete if needed.", "if", "'check'", "in", "activity_setting", ":", "if", "getattr", "(", "instance", ",", "activity_setting", "[", "'check'", "]", ")", "is", "False", ":", "if", "activity", ":", "activity", ".", "delete", "(", ")", "return", "# does it use the default manager (objects) or a custom manager?", "try", ":", "manager", "=", "activity_setting", "[", "'manager'", "]", "except", ":", "manager", "=", "'objects'", "# what field denotes the activity time? created is default", "try", ":", "timestamp", "=", "getattr", "(", "instance", ",", "activity_setting", "[", "'date_field'", "]", ")", "except", ":", "timestamp", "=", "getattr", "(", "instance", ",", "'created'", ")", "# if the given time stamp is a daterather than datetime type,", "# normalize it out to a datetime", "if", "type", "(", "timestamp", ")", "==", "type", "(", "now", ")", ":", "clean_timestamp", "=", "timestamp", "else", ":", "clean_timestamp", "=", "datetime", ".", "datetime", ".", "combine", "(", "timestamp", ",", "datetime", ".", "time", "(", ")", ")", "# Find a valid user object", "if", "'user_field'", "in", "activity_setting", ":", "# pull the user object from instance using user_field", "user", "=", "getattr", "(", "instance", ",", "activity_setting", "[", "'user_field'", "]", ")", "elif", "this_model_label", "==", "'user'", "or", "this_model_label", "==", "'profile'", ":", "# this IS auth.user or a Django 1.5 custom user", "user", "=", "instance", "else", ":", "# we didn't specify a user, so it must be instance.user", "user", "=", "instance", ".", "user", "# BAIL-OUT CHECKS", "# Determine all the reasons we would want to bail out.", "# Make sure it's not a future item, like a future-published blog entry.", "if", "clean_timestamp", ">", "now", ":", "return", "# or some really old content that was just re-saved for some reason", "if", "clean_timestamp", "<", "(", "now", "-", "datetime", ".", "timedelta", "(", "days", "=", "3", ")", ")", ":", "return", "# or there's not a user object", "if", "not", "user", ":", "return", "# or the user is god or staff, and we're filtering out, don't add to monitor", "if", "user", ".", "is_superuser", "and", "'filter_superuser'", "in", "activity_setting", ":", "return", "if", "user", ".", "is_staff", "and", "'filter_staff'", "in", "activity_setting", ":", "return", "# build a default string representation", "# note that each activity can get back to the object via get_absolute_url()", "verb", "=", "activity_setting", ".", "get", "(", "'verb'", ",", "None", ")", "override_string", "=", "activity_setting", ".", "get", "(", "'override_string'", ",", "None", ")", "# MANAGER CHECK", "# Make sure the item \"should\" be registered, based on the manager argument.", "# If InstanceModel.manager.all() includes this item, then register. Otherwise, return.", "# Also, check to see if it should be deleted.", "try", ":", "getattr", "(", "instance_model", ",", "manager", ")", ".", "get", "(", "pk", "=", "instance", ".", "pk", ")", "except", "instance_model", ".", "DoesNotExist", ":", "try", ":", "activity", ".", "delete", "(", ")", "return", "except", "Activity", ".", "DoesNotExist", ":", "return", "if", "user", "and", "clean_timestamp", "and", "instance", ":", "if", "not", "activity", ":", "# If the activity didn't already exist, create it.", "activity", "=", "Activity", "(", "actor", "=", "user", ",", "content_type", "=", "instance_content_type", ",", "object_id", "=", "content_object", ".", "id", ",", "content_object", "=", "content_object", ",", "timestamp", "=", "clean_timestamp", ",", "verb", "=", "verb", ",", "override_string", "=", "override_string", ",", ")", "activity", ".", "save", "(", ")", "return", "activity" ]
43.061947
24.053097
def add_select(self, *column): """ Add a new select column to query :param column: The column to add :type column: str :return: The current QueryBuilder instance :rtype: QueryBuilder """ if not column: column = [] self.columns += list(column) return self
[ "def", "add_select", "(", "self", ",", "*", "column", ")", ":", "if", "not", "column", ":", "column", "=", "[", "]", "self", ".", "columns", "+=", "list", "(", "column", ")", "return", "self" ]
20.9375
16.6875
def delete_list_item(self, item_id): """ Delete an existing list item :param item_id: Id of the item to be delted """ url = self.build_url(self._endpoints.get('get_item_by_id').format(item_id=item_id)) response = self.con.delete(url) return bool(response)
[ "def", "delete_list_item", "(", "self", ",", "item_id", ")", ":", "url", "=", "self", ".", "build_url", "(", "self", ".", "_endpoints", ".", "get", "(", "'get_item_by_id'", ")", ".", "format", "(", "item_id", "=", "item_id", ")", ")", "response", "=", "self", ".", "con", ".", "delete", "(", "url", ")", "return", "bool", "(", "response", ")" ]
27
21.636364
def __driver_completer(self, toks, text, state): """Driver level completer. Arguments: toks: A list of tokens, tokenized from the original input line. text: A string, the text to be replaced if a completion candidate is chosen. state: An integer, the index of the candidate out of the list of candidates. Returns: A string, the candidate. """ if state != 0: return self.__completion_candidates[state] # Update the cache when this method is first called, i.e., state == 0. # If the line is empty or the user is still inputing the first token, # complete with available commands. if not toks or (len(toks) == 1 and text == toks[0]): try: self.__completion_candidates = self.__complete_cmds(text) except: self.stderr.write('\n') self.stderr.write(traceback.format_exc()) self.__completion_candidates = [] return self.__completion_candidates[state] # Otherwise, try to complete with the registered completer method. cmd = toks[0] args = toks[1:] if len(toks) > 1 else None if text and args: del args[-1] if cmd in self._completer_map.keys(): completer_name = self._completer_map[cmd] completer_method = getattr(self, completer_name) try: self.__completion_candidates = completer_method(cmd, args, text) except: self.stderr.write('\n') self.stderr.write(traceback.format_exc()) self.__completion_candidates = [] else: self.__completion_candidates = [] return self.__completion_candidates[state]
[ "def", "__driver_completer", "(", "self", ",", "toks", ",", "text", ",", "state", ")", ":", "if", "state", "!=", "0", ":", "return", "self", ".", "__completion_candidates", "[", "state", "]", "# Update the cache when this method is first called, i.e., state == 0.", "# If the line is empty or the user is still inputing the first token,", "# complete with available commands.", "if", "not", "toks", "or", "(", "len", "(", "toks", ")", "==", "1", "and", "text", "==", "toks", "[", "0", "]", ")", ":", "try", ":", "self", ".", "__completion_candidates", "=", "self", ".", "__complete_cmds", "(", "text", ")", "except", ":", "self", ".", "stderr", ".", "write", "(", "'\\n'", ")", "self", ".", "stderr", ".", "write", "(", "traceback", ".", "format_exc", "(", ")", ")", "self", ".", "__completion_candidates", "=", "[", "]", "return", "self", ".", "__completion_candidates", "[", "state", "]", "# Otherwise, try to complete with the registered completer method.", "cmd", "=", "toks", "[", "0", "]", "args", "=", "toks", "[", "1", ":", "]", "if", "len", "(", "toks", ")", ">", "1", "else", "None", "if", "text", "and", "args", ":", "del", "args", "[", "-", "1", "]", "if", "cmd", "in", "self", ".", "_completer_map", ".", "keys", "(", ")", ":", "completer_name", "=", "self", ".", "_completer_map", "[", "cmd", "]", "completer_method", "=", "getattr", "(", "self", ",", "completer_name", ")", "try", ":", "self", ".", "__completion_candidates", "=", "completer_method", "(", "cmd", ",", "args", ",", "text", ")", "except", ":", "self", ".", "stderr", ".", "write", "(", "'\\n'", ")", "self", ".", "stderr", ".", "write", "(", "traceback", ".", "format_exc", "(", ")", ")", "self", ".", "__completion_candidates", "=", "[", "]", "else", ":", "self", ".", "__completion_candidates", "=", "[", "]", "return", "self", ".", "__completion_candidates", "[", "state", "]" ]
37.604167
21.125
def import_url(self, url=None, force=None): """ Read a list of host entries from a URL, convert them into instances of HostsEntry and then append to the list of entries in Hosts :param url: The URL of where to download a hosts file :return: Counts reflecting the attempted additions """ file_contents = self.get_hosts_by_url(url=url).decode('utf-8') file_contents = file_contents.rstrip().replace('^M', '\n') file_contents = file_contents.rstrip().replace('\r\n', '\n') lines = file_contents.split('\n') skipped = 0 import_entries = [] for line in lines: stripped_entry = line.strip() if (not stripped_entry) or (stripped_entry.startswith('#')): skipped += 1 else: line = line.partition('#')[0] line = line.rstrip() import_entry = HostsEntry.str_to_hostentry(line) if import_entry: import_entries.append(import_entry) add_result = self.add(entries=import_entries, force=force) write_result = self.write() return {'result': 'success', 'skipped': skipped, 'add_result': add_result, 'write_result': write_result}
[ "def", "import_url", "(", "self", ",", "url", "=", "None", ",", "force", "=", "None", ")", ":", "file_contents", "=", "self", ".", "get_hosts_by_url", "(", "url", "=", "url", ")", ".", "decode", "(", "'utf-8'", ")", "file_contents", "=", "file_contents", ".", "rstrip", "(", ")", ".", "replace", "(", "'^M'", ",", "'\\n'", ")", "file_contents", "=", "file_contents", ".", "rstrip", "(", ")", ".", "replace", "(", "'\\r\\n'", ",", "'\\n'", ")", "lines", "=", "file_contents", ".", "split", "(", "'\\n'", ")", "skipped", "=", "0", "import_entries", "=", "[", "]", "for", "line", "in", "lines", ":", "stripped_entry", "=", "line", ".", "strip", "(", ")", "if", "(", "not", "stripped_entry", ")", "or", "(", "stripped_entry", ".", "startswith", "(", "'#'", ")", ")", ":", "skipped", "+=", "1", "else", ":", "line", "=", "line", ".", "partition", "(", "'#'", ")", "[", "0", "]", "line", "=", "line", ".", "rstrip", "(", ")", "import_entry", "=", "HostsEntry", ".", "str_to_hostentry", "(", "line", ")", "if", "import_entry", ":", "import_entries", ".", "append", "(", "import_entry", ")", "add_result", "=", "self", ".", "add", "(", "entries", "=", "import_entries", ",", "force", "=", "force", ")", "write_result", "=", "self", ".", "write", "(", ")", "return", "{", "'result'", ":", "'success'", ",", "'skipped'", ":", "skipped", ",", "'add_result'", ":", "add_result", ",", "'write_result'", ":", "write_result", "}" ]
44.586207
14.103448
def solve_simple_captcha(self, pathfile=None, filedata=None, filename=None): """ Upload a image (from disk or a bytearray), and then block until the captcha has been solved. Return value is the captcha result. either pathfile OR filedata should be specified. Filename is ignored (and is only kept for compatibility with the 2captcha solver interface) Failure will result in a subclass of WebRequest.CaptchaSolverFailure being thrown. """ if pathfile and os.path.exists(pathfile): fp = open(pathfile, 'rb') elif filedata: fp = io.BytesIO(filedata) else: raise ValueError("You must pass either a valid file path, or a bytes array containing the captcha image!") try: task = python_anticaptcha.ImageToTextTask(fp) job = self.client.createTask(task) job.join(maximum_time = self.waittime) return job.get_captcha_text() except python_anticaptcha.AnticaptchaException as e: raise exc.CaptchaSolverFailure("Failure solving captcha: %s, %s, %s" % ( e.error_id, e.error_code, e.error_description, ))
[ "def", "solve_simple_captcha", "(", "self", ",", "pathfile", "=", "None", ",", "filedata", "=", "None", ",", "filename", "=", "None", ")", ":", "if", "pathfile", "and", "os", ".", "path", ".", "exists", "(", "pathfile", ")", ":", "fp", "=", "open", "(", "pathfile", ",", "'rb'", ")", "elif", "filedata", ":", "fp", "=", "io", ".", "BytesIO", "(", "filedata", ")", "else", ":", "raise", "ValueError", "(", "\"You must pass either a valid file path, or a bytes array containing the captcha image!\"", ")", "try", ":", "task", "=", "python_anticaptcha", ".", "ImageToTextTask", "(", "fp", ")", "job", "=", "self", ".", "client", ".", "createTask", "(", "task", ")", "job", ".", "join", "(", "maximum_time", "=", "self", ".", "waittime", ")", "return", "job", ".", "get_captcha_text", "(", ")", "except", "python_anticaptcha", ".", "AnticaptchaException", "as", "e", ":", "raise", "exc", ".", "CaptchaSolverFailure", "(", "\"Failure solving captcha: %s, %s, %s\"", "%", "(", "e", ".", "error_id", ",", "e", ".", "error_code", ",", "e", ".", "error_description", ",", ")", ")" ]
30.441176
23.970588
def init_app(self, app, url='/hooks'): """Register the URL route to the application. :param app: the optional :class:`~flask.Flask` instance to register the extension :param url: the url that events will be posted to """ app.config.setdefault('VALIDATE_IP', True) app.config.setdefault('VALIDATE_SIGNATURE', True) @app.route(url, methods=['POST']) def hook(): if app.config['VALIDATE_IP']: if not is_github_ip(request.remote_addr): raise Forbidden('Requests must originate from GitHub') if app.config['VALIDATE_SIGNATURE']: key = app.config.get('GITHUB_WEBHOOKS_KEY', app.secret_key) signature = request.headers.get('X-Hub-Signature') if hasattr(request, 'get_data'): # Werkzeug >= 0.9 payload = request.get_data() else: payload = request.data if not signature: raise BadRequest('Missing signature') if not check_signature(signature, key, payload): raise BadRequest('Wrong signature') event = request.headers.get('X-GitHub-Event') guid = request.headers.get('X-GitHub-Delivery') if not event: raise BadRequest('Missing header: X-GitHub-Event') elif not guid: raise BadRequest('Missing header: X-GitHub-Delivery') if hasattr(request, 'get_json'): # Flask >= 0.10 data = request.get_json() else: data = request.json if event in self._hooks: return self._hooks[event](data, guid) else: return 'Hook not used\n'
[ "def", "init_app", "(", "self", ",", "app", ",", "url", "=", "'/hooks'", ")", ":", "app", ".", "config", ".", "setdefault", "(", "'VALIDATE_IP'", ",", "True", ")", "app", ".", "config", ".", "setdefault", "(", "'VALIDATE_SIGNATURE'", ",", "True", ")", "@", "app", ".", "route", "(", "url", ",", "methods", "=", "[", "'POST'", "]", ")", "def", "hook", "(", ")", ":", "if", "app", ".", "config", "[", "'VALIDATE_IP'", "]", ":", "if", "not", "is_github_ip", "(", "request", ".", "remote_addr", ")", ":", "raise", "Forbidden", "(", "'Requests must originate from GitHub'", ")", "if", "app", ".", "config", "[", "'VALIDATE_SIGNATURE'", "]", ":", "key", "=", "app", ".", "config", ".", "get", "(", "'GITHUB_WEBHOOKS_KEY'", ",", "app", ".", "secret_key", ")", "signature", "=", "request", ".", "headers", ".", "get", "(", "'X-Hub-Signature'", ")", "if", "hasattr", "(", "request", ",", "'get_data'", ")", ":", "# Werkzeug >= 0.9", "payload", "=", "request", ".", "get_data", "(", ")", "else", ":", "payload", "=", "request", ".", "data", "if", "not", "signature", ":", "raise", "BadRequest", "(", "'Missing signature'", ")", "if", "not", "check_signature", "(", "signature", ",", "key", ",", "payload", ")", ":", "raise", "BadRequest", "(", "'Wrong signature'", ")", "event", "=", "request", ".", "headers", ".", "get", "(", "'X-GitHub-Event'", ")", "guid", "=", "request", ".", "headers", ".", "get", "(", "'X-GitHub-Delivery'", ")", "if", "not", "event", ":", "raise", "BadRequest", "(", "'Missing header: X-GitHub-Event'", ")", "elif", "not", "guid", ":", "raise", "BadRequest", "(", "'Missing header: X-GitHub-Delivery'", ")", "if", "hasattr", "(", "request", ",", "'get_json'", ")", ":", "# Flask >= 0.10", "data", "=", "request", ".", "get_json", "(", ")", "else", ":", "data", "=", "request", ".", "json", "if", "event", "in", "self", ".", "_hooks", ":", "return", "self", ".", "_hooks", "[", "event", "]", "(", "data", ",", "guid", ")", "else", ":", "return", "'Hook not used\\n'" ]
36.979592
18
def get_order_book(self, code): """ 获取实时摆盘数据 :param code: 股票代码 :return: (ret, data) ret == RET_OK 返回字典,数据格式如下 ret != RET_OK 返回错误字符串 {‘code’: 股票代码 ‘Ask’:[ (ask_price1, ask_volume1,order_num), (ask_price2, ask_volume2, order_num),…] ‘Bid’: [ (bid_price1, bid_volume1, order_num), (bid_price2, bid_volume2, order_num),…] } 'Ask':卖盘, 'Bid'买盘。每个元组的含义是(委托价格,委托数量,委托订单数) """ if code is None or is_str(code) is False: error_str = ERROR_STR_PREFIX + "the type of code param is wrong" return RET_ERROR, error_str query_processor = self._get_sync_query_processor( OrderBookQuery.pack_req, OrderBookQuery.unpack_rsp, ) kargs = { "code": code, "conn_id": self.get_sync_conn_id() } ret_code, msg, orderbook = query_processor(**kargs) if ret_code == RET_ERROR: return ret_code, msg return RET_OK, orderbook
[ "def", "get_order_book", "(", "self", ",", "code", ")", ":", "if", "code", "is", "None", "or", "is_str", "(", "code", ")", "is", "False", ":", "error_str", "=", "ERROR_STR_PREFIX", "+", "\"the type of code param is wrong\"", "return", "RET_ERROR", ",", "error_str", "query_processor", "=", "self", ".", "_get_sync_query_processor", "(", "OrderBookQuery", ".", "pack_req", ",", "OrderBookQuery", ".", "unpack_rsp", ",", ")", "kargs", "=", "{", "\"code\"", ":", "code", ",", "\"conn_id\"", ":", "self", ".", "get_sync_conn_id", "(", ")", "}", "ret_code", ",", "msg", ",", "orderbook", "=", "query_processor", "(", "*", "*", "kargs", ")", "if", "ret_code", "==", "RET_ERROR", ":", "return", "ret_code", ",", "msg", "return", "RET_OK", ",", "orderbook" ]
29.555556
21.555556
def get_fabric_tasks(self, project): """ Generate a list of fabric tasks that are available """ cache_key = 'project_{}_fabfile_tasks'.format(project.pk) cached_result = cache.get(cache_key) if cached_result: return cached_result try: fabfile_path, activate_loc = self.get_fabfile_path(project) if activate_loc: output = self.check_output( 'source {};fab --list --list-format=short --fabfile={}'.format(activate_loc, fabfile_path), shell=True ) else: output = self.check_output( 'fab --list --list-format=short --fabfile={}'.format(fabfile_path), shell=True ) lines = output.splitlines() tasks = [] for line in lines: name = line.strip() if activate_loc: o = self.check_output( 'source {};fab --display={} --fabfile={}'.format(activate_loc, name, fabfile_path), shell=True ) else: o = self.check_output( ['fab', '--display={}'.format(name), '--fabfile={}'.format(fabfile_path)] ) tasks.append(self.parse_task_details(name, o)) cache.set(cache_key, tasks, settings.FABRIC_TASK_CACHE_TIMEOUT) except Exception as e: tasks = [] return tasks
[ "def", "get_fabric_tasks", "(", "self", ",", "project", ")", ":", "cache_key", "=", "'project_{}_fabfile_tasks'", ".", "format", "(", "project", ".", "pk", ")", "cached_result", "=", "cache", ".", "get", "(", "cache_key", ")", "if", "cached_result", ":", "return", "cached_result", "try", ":", "fabfile_path", ",", "activate_loc", "=", "self", ".", "get_fabfile_path", "(", "project", ")", "if", "activate_loc", ":", "output", "=", "self", ".", "check_output", "(", "'source {};fab --list --list-format=short --fabfile={}'", ".", "format", "(", "activate_loc", ",", "fabfile_path", ")", ",", "shell", "=", "True", ")", "else", ":", "output", "=", "self", ".", "check_output", "(", "'fab --list --list-format=short --fabfile={}'", ".", "format", "(", "fabfile_path", ")", ",", "shell", "=", "True", ")", "lines", "=", "output", ".", "splitlines", "(", ")", "tasks", "=", "[", "]", "for", "line", "in", "lines", ":", "name", "=", "line", ".", "strip", "(", ")", "if", "activate_loc", ":", "o", "=", "self", ".", "check_output", "(", "'source {};fab --display={} --fabfile={}'", ".", "format", "(", "activate_loc", ",", "name", ",", "fabfile_path", ")", ",", "shell", "=", "True", ")", "else", ":", "o", "=", "self", ".", "check_output", "(", "[", "'fab'", ",", "'--display={}'", ".", "format", "(", "name", ")", ",", "'--fabfile={}'", ".", "format", "(", "fabfile_path", ")", "]", ")", "tasks", ".", "append", "(", "self", ".", "parse_task_details", "(", "name", ",", "o", ")", ")", "cache", ".", "set", "(", "cache_key", ",", "tasks", ",", "settings", ".", "FABRIC_TASK_CACHE_TIMEOUT", ")", "except", "Exception", "as", "e", ":", "tasks", "=", "[", "]", "return", "tasks" ]
33.478261
22.086957
def listener(self): """Helper for mt_interact() -- this executes in the other thread.""" while 1: try: data = self.read_eager() except EOFError: print('*** Connection closed by remote host ***') return if data: self.stdout.write(data) else: self.stdout.flush()
[ "def", "listener", "(", "self", ")", ":", "while", "1", ":", "try", ":", "data", "=", "self", ".", "read_eager", "(", ")", "except", "EOFError", ":", "print", "(", "'*** Connection closed by remote host ***'", ")", "return", "if", "data", ":", "self", ".", "stdout", ".", "write", "(", "data", ")", "else", ":", "self", ".", "stdout", ".", "flush", "(", ")" ]
32.75
14.416667
def zncc(ts1,ts2): """Zero mean normalised cross-correlation (ZNCC) This function does ZNCC of two signals, ts1 and ts2 Normalisation by very small values is avoided by doing max(nmin,nvalue) Parameters -------------- ts1 : ndarray Input signal 1 to be aligned with ts2 : ndarray Input signal 2 Returns -------------- best_shift : float The best shift of *ts1* to align it with *ts2* ts_out : ndarray The correlation result """ # Output is the same size as ts1 Ns1 = np.size(ts1) Ns2 = np.size(ts2) ts_out = np.zeros((Ns1,1), dtype='float64') ishift = int(np.floor(Ns2/2)) # origin of ts2 t1m = np.mean(ts1) t2m = np.mean(ts2) for k in range(0,Ns1): lstart = np.int(ishift-k) if lstart<0 : lstart=0 lend = np.int(ishift-k+Ns2) imax = np.int(np.min([Ns2,Ns1-k+ishift])) if lend>imax : lend=imax csum = 0 ts1sum = 0 ts1sum2 = 0 ts2sum = 0 ts2sum2 = 0 Nterms = lend-lstart for l in range(lstart,lend): csum += ts1[k+l-ishift]*ts2[l] ts1sum += ts1[k+l-ishift] ts1sum2 += ts1[k+l-ishift]*ts1[k+l-ishift] ts2sum += ts2[l] ts2sum2 += ts2[l]*ts2[l] ts1sum2 = np.max([t1m*t1m*100,ts1sum2])-ts1sum*ts1sum/Nterms ts2sum2 = np.max([t2m*t2m*100,ts2sum2])-ts2sum*ts2sum/Nterms #ts_out[k]=csum/np.sqrt(ts1sum2*ts2sum2) ts_out[k]=(csum-2.0*ts1sum*ts2sum/Nterms+ts1sum*ts2sum/Nterms/Nterms)/np.sqrt(ts1sum2*ts2sum2) best_shift = np.argmax(ts_out)-ishift return best_shift, ts_out
[ "def", "zncc", "(", "ts1", ",", "ts2", ")", ":", "# Output is the same size as ts1", "Ns1", "=", "np", ".", "size", "(", "ts1", ")", "Ns2", "=", "np", ".", "size", "(", "ts2", ")", "ts_out", "=", "np", ".", "zeros", "(", "(", "Ns1", ",", "1", ")", ",", "dtype", "=", "'float64'", ")", "ishift", "=", "int", "(", "np", ".", "floor", "(", "Ns2", "/", "2", ")", ")", "# origin of ts2", "t1m", "=", "np", ".", "mean", "(", "ts1", ")", "t2m", "=", "np", ".", "mean", "(", "ts2", ")", "for", "k", "in", "range", "(", "0", ",", "Ns1", ")", ":", "lstart", "=", "np", ".", "int", "(", "ishift", "-", "k", ")", "if", "lstart", "<", "0", ":", "lstart", "=", "0", "lend", "=", "np", ".", "int", "(", "ishift", "-", "k", "+", "Ns2", ")", "imax", "=", "np", ".", "int", "(", "np", ".", "min", "(", "[", "Ns2", ",", "Ns1", "-", "k", "+", "ishift", "]", ")", ")", "if", "lend", ">", "imax", ":", "lend", "=", "imax", "csum", "=", "0", "ts1sum", "=", "0", "ts1sum2", "=", "0", "ts2sum", "=", "0", "ts2sum2", "=", "0", "Nterms", "=", "lend", "-", "lstart", "for", "l", "in", "range", "(", "lstart", ",", "lend", ")", ":", "csum", "+=", "ts1", "[", "k", "+", "l", "-", "ishift", "]", "*", "ts2", "[", "l", "]", "ts1sum", "+=", "ts1", "[", "k", "+", "l", "-", "ishift", "]", "ts1sum2", "+=", "ts1", "[", "k", "+", "l", "-", "ishift", "]", "*", "ts1", "[", "k", "+", "l", "-", "ishift", "]", "ts2sum", "+=", "ts2", "[", "l", "]", "ts2sum2", "+=", "ts2", "[", "l", "]", "*", "ts2", "[", "l", "]", "ts1sum2", "=", "np", ".", "max", "(", "[", "t1m", "*", "t1m", "*", "100", ",", "ts1sum2", "]", ")", "-", "ts1sum", "*", "ts1sum", "/", "Nterms", "ts2sum2", "=", "np", ".", "max", "(", "[", "t2m", "*", "t2m", "*", "100", ",", "ts2sum2", "]", ")", "-", "ts2sum", "*", "ts2sum", "/", "Nterms", "#ts_out[k]=csum/np.sqrt(ts1sum2*ts2sum2)", "ts_out", "[", "k", "]", "=", "(", "csum", "-", "2.0", "*", "ts1sum", "*", "ts2sum", "/", "Nterms", "+", "ts1sum", "*", "ts2sum", "/", "Nterms", "/", "Nterms", ")", "/", "np", ".", "sqrt", "(", "ts1sum2", "*", "ts2sum2", ")", "best_shift", "=", "np", ".", "argmax", "(", "ts_out", ")", "-", "ishift", "return", "best_shift", ",", "ts_out" ]
29.258621
18.206897
def fast_clone(self, VM, clone_name, mem=None): """ Create a 'fast' clone of a VM. This means we make a snapshot of the disk and copy some of the settings and then create a new VM based on the snapshot and settings The VM is transient so when it is shutdown it deletes itself :param VM: The VM to base this clone on :type VM: sham.machine.VirtualMachine :param clone_name: The name for this clone :type clone_name: str """ disks = VM.get_disks() ints = VM.get_interfaces() count = 0 new_disks = [] for disk in disks: pool = disk.pool new_disk_name = '{0}-disk{1}'.format(clone_name, count) count += 1 new_disk = pool.create_backed_vol(new_disk_name, disk) new_disks.append(new_disk) for inter in ints: inter.mac = None # if the mac is set to None we don't include it in the xml # and libvirt will autogen one for us return self.create_vm( VM.domain_type, clone_name, VM.num_cpus, mem or VM.current_memory, mem or VM.max_memory, new_disks, ints)
[ "def", "fast_clone", "(", "self", ",", "VM", ",", "clone_name", ",", "mem", "=", "None", ")", ":", "disks", "=", "VM", ".", "get_disks", "(", ")", "ints", "=", "VM", ".", "get_interfaces", "(", ")", "count", "=", "0", "new_disks", "=", "[", "]", "for", "disk", "in", "disks", ":", "pool", "=", "disk", ".", "pool", "new_disk_name", "=", "'{0}-disk{1}'", ".", "format", "(", "clone_name", ",", "count", ")", "count", "+=", "1", "new_disk", "=", "pool", ".", "create_backed_vol", "(", "new_disk_name", ",", "disk", ")", "new_disks", ".", "append", "(", "new_disk", ")", "for", "inter", "in", "ints", ":", "inter", ".", "mac", "=", "None", "# if the mac is set to None we don't include it in the xml", "# and libvirt will autogen one for us", "return", "self", ".", "create_vm", "(", "VM", ".", "domain_type", ",", "clone_name", ",", "VM", ".", "num_cpus", ",", "mem", "or", "VM", ".", "current_memory", ",", "mem", "or", "VM", ".", "max_memory", ",", "new_disks", ",", "ints", ")" ]
35.085714
15.428571
def export(self, class_name, method_name, export_data=False, export_dir='.', export_filename='data.json', export_append_checksum=False, **kwargs): """ Port a trained estimator to the syntax of a chosen programming language. Parameters ---------- :param class_name : string The name of the class in the returned result. :param method_name : string The name of the method in the returned result. :param export_data : bool, default: False Whether the model data should be saved or not. :param export_dir : string, default: '.' (current directory) The directory where the model data should be saved. :param export_filename : string, default: 'data.json' The filename of the exported model data. :param export_append_checksum : bool, default: False Whether to append the checksum to the filename or not. Returns ------- :return : string The transpiled algorithm with the defined placeholders. """ # Arguments: self.class_name = class_name self.method_name = method_name # Templates of primitive data types: temp_type = self.temp('type') temp_arr = self.temp('arr') temp_arr_ = self.temp('arr[]') temp_arr__ = self.temp('arr[][]') # Estimator: est = self.estimator self.params = est.get_params() # Check kernel type: supported_kernels = ['linear', 'rbf', 'poly', 'sigmoid'] if self.params['kernel'] not in supported_kernels: msg = 'The kernel type is not supported.' raise ValueError(msg) self.n_features = len(est.support_vectors_[0]) self.svs_rows = est.n_support_ self.n_svs_rows = len(est.n_support_) self.weights = self.temp('arr[]', skipping=True).format( type='int', name='weights', values=', '.join([str(e) for e in self.svs_rows]), n=len(self.svs_rows)) self.n_weights = len(self.svs_rows) self.n_classes = len(est.classes_) self.is_binary = self.n_classes == 2 self.prefix = 'binary' if self.is_binary else 'multi' # Support vectors: vectors = [] for vector in est.support_vectors_: _vectors = [temp_type.format(self.repr(v)) for v in vector] _vectors = temp_arr.format(', '.join(_vectors)) vectors.append(_vectors) vectors = ', '.join(vectors) vectors = self.temp('arr[][]', skipping=True).format( type='double', name='vectors', values=vectors, n=len(est.support_vectors_), m=len(est.support_vectors_[0])) self.vectors = vectors self.n_vectors = len(est.support_vectors_) # Coefficients: coeffs = [] for coeff in est.dual_coef_: _coeffs = [temp_type.format(self.repr(c)) for c in coeff] _coeffs = temp_arr.format(', '.join(_coeffs)) coeffs.append(_coeffs) coeffs = ', '.join(coeffs) coeffs = temp_arr__.format(type='double', name='coefficients', values=coeffs, n=len(est.dual_coef_), m=len(est.dual_coef_[0])) self.coefficients = coeffs self.n_coefficients = len(est.dual_coef_) # Interceptions: inters = [temp_type.format(self.repr(i)) for i in est._intercept_] inters = ', '.join(inters) inters = temp_arr_.format(type='double', name='intercepts', values=inters, n=len(est._intercept_)) self.intercepts = inters self.n_intercepts = len(est._intercept_) # Kernel: self.kernel = str(self.params['kernel']) if self.target_language == 'c': self.kernel = self.kernel[0] # Gamma: self.gamma = self.params['gamma'] if self.gamma == 'auto': self.gamma = 1. / self.n_features self.gamma = self.repr(self.gamma) # Coefficient and degree: self.coef0 = self.repr(self.params['coef0']) self.degree = self.repr(self.params['degree']) if self.target_method == 'predict': # Exported: if export_data and os.path.isdir(export_dir): self.export_data(export_dir, export_filename, export_append_checksum) return self.predict('exported') # Separated: return self.predict('separated')
[ "def", "export", "(", "self", ",", "class_name", ",", "method_name", ",", "export_data", "=", "False", ",", "export_dir", "=", "'.'", ",", "export_filename", "=", "'data.json'", ",", "export_append_checksum", "=", "False", ",", "*", "*", "kwargs", ")", ":", "# Arguments:", "self", ".", "class_name", "=", "class_name", "self", ".", "method_name", "=", "method_name", "# Templates of primitive data types:", "temp_type", "=", "self", ".", "temp", "(", "'type'", ")", "temp_arr", "=", "self", ".", "temp", "(", "'arr'", ")", "temp_arr_", "=", "self", ".", "temp", "(", "'arr[]'", ")", "temp_arr__", "=", "self", ".", "temp", "(", "'arr[][]'", ")", "# Estimator:", "est", "=", "self", ".", "estimator", "self", ".", "params", "=", "est", ".", "get_params", "(", ")", "# Check kernel type:", "supported_kernels", "=", "[", "'linear'", ",", "'rbf'", ",", "'poly'", ",", "'sigmoid'", "]", "if", "self", ".", "params", "[", "'kernel'", "]", "not", "in", "supported_kernels", ":", "msg", "=", "'The kernel type is not supported.'", "raise", "ValueError", "(", "msg", ")", "self", ".", "n_features", "=", "len", "(", "est", ".", "support_vectors_", "[", "0", "]", ")", "self", ".", "svs_rows", "=", "est", ".", "n_support_", "self", ".", "n_svs_rows", "=", "len", "(", "est", ".", "n_support_", ")", "self", ".", "weights", "=", "self", ".", "temp", "(", "'arr[]'", ",", "skipping", "=", "True", ")", ".", "format", "(", "type", "=", "'int'", ",", "name", "=", "'weights'", ",", "values", "=", "', '", ".", "join", "(", "[", "str", "(", "e", ")", "for", "e", "in", "self", ".", "svs_rows", "]", ")", ",", "n", "=", "len", "(", "self", ".", "svs_rows", ")", ")", "self", ".", "n_weights", "=", "len", "(", "self", ".", "svs_rows", ")", "self", ".", "n_classes", "=", "len", "(", "est", ".", "classes_", ")", "self", ".", "is_binary", "=", "self", ".", "n_classes", "==", "2", "self", ".", "prefix", "=", "'binary'", "if", "self", ".", "is_binary", "else", "'multi'", "# Support vectors:", "vectors", "=", "[", "]", "for", "vector", "in", "est", ".", "support_vectors_", ":", "_vectors", "=", "[", "temp_type", ".", "format", "(", "self", ".", "repr", "(", "v", ")", ")", "for", "v", "in", "vector", "]", "_vectors", "=", "temp_arr", ".", "format", "(", "', '", ".", "join", "(", "_vectors", ")", ")", "vectors", ".", "append", "(", "_vectors", ")", "vectors", "=", "', '", ".", "join", "(", "vectors", ")", "vectors", "=", "self", ".", "temp", "(", "'arr[][]'", ",", "skipping", "=", "True", ")", ".", "format", "(", "type", "=", "'double'", ",", "name", "=", "'vectors'", ",", "values", "=", "vectors", ",", "n", "=", "len", "(", "est", ".", "support_vectors_", ")", ",", "m", "=", "len", "(", "est", ".", "support_vectors_", "[", "0", "]", ")", ")", "self", ".", "vectors", "=", "vectors", "self", ".", "n_vectors", "=", "len", "(", "est", ".", "support_vectors_", ")", "# Coefficients:", "coeffs", "=", "[", "]", "for", "coeff", "in", "est", ".", "dual_coef_", ":", "_coeffs", "=", "[", "temp_type", ".", "format", "(", "self", ".", "repr", "(", "c", ")", ")", "for", "c", "in", "coeff", "]", "_coeffs", "=", "temp_arr", ".", "format", "(", "', '", ".", "join", "(", "_coeffs", ")", ")", "coeffs", ".", "append", "(", "_coeffs", ")", "coeffs", "=", "', '", ".", "join", "(", "coeffs", ")", "coeffs", "=", "temp_arr__", ".", "format", "(", "type", "=", "'double'", ",", "name", "=", "'coefficients'", ",", "values", "=", "coeffs", ",", "n", "=", "len", "(", "est", ".", "dual_coef_", ")", ",", "m", "=", "len", "(", "est", ".", "dual_coef_", "[", "0", "]", ")", ")", "self", ".", "coefficients", "=", "coeffs", "self", ".", "n_coefficients", "=", "len", "(", "est", ".", "dual_coef_", ")", "# Interceptions:", "inters", "=", "[", "temp_type", ".", "format", "(", "self", ".", "repr", "(", "i", ")", ")", "for", "i", "in", "est", ".", "_intercept_", "]", "inters", "=", "', '", ".", "join", "(", "inters", ")", "inters", "=", "temp_arr_", ".", "format", "(", "type", "=", "'double'", ",", "name", "=", "'intercepts'", ",", "values", "=", "inters", ",", "n", "=", "len", "(", "est", ".", "_intercept_", ")", ")", "self", ".", "intercepts", "=", "inters", "self", ".", "n_intercepts", "=", "len", "(", "est", ".", "_intercept_", ")", "# Kernel:", "self", ".", "kernel", "=", "str", "(", "self", ".", "params", "[", "'kernel'", "]", ")", "if", "self", ".", "target_language", "==", "'c'", ":", "self", ".", "kernel", "=", "self", ".", "kernel", "[", "0", "]", "# Gamma:", "self", ".", "gamma", "=", "self", ".", "params", "[", "'gamma'", "]", "if", "self", ".", "gamma", "==", "'auto'", ":", "self", ".", "gamma", "=", "1.", "/", "self", ".", "n_features", "self", ".", "gamma", "=", "self", ".", "repr", "(", "self", ".", "gamma", ")", "# Coefficient and degree:", "self", ".", "coef0", "=", "self", ".", "repr", "(", "self", ".", "params", "[", "'coef0'", "]", ")", "self", ".", "degree", "=", "self", ".", "repr", "(", "self", ".", "params", "[", "'degree'", "]", ")", "if", "self", ".", "target_method", "==", "'predict'", ":", "# Exported:", "if", "export_data", "and", "os", ".", "path", ".", "isdir", "(", "export_dir", ")", ":", "self", ".", "export_data", "(", "export_dir", ",", "export_filename", ",", "export_append_checksum", ")", "return", "self", ".", "predict", "(", "'exported'", ")", "# Separated:", "return", "self", ".", "predict", "(", "'separated'", ")" ]
38.974359
17.538462
def _get_option(self, settings, find_key): """ Return index for provided key """ # This is used as in IAR template, everything # is as an array with random positions. We look for key with an index for option in settings: if option['name'] == find_key: return settings.index(option)
[ "def", "_get_option", "(", "self", ",", "settings", ",", "find_key", ")", ":", "# This is used as in IAR template, everything ", "# is as an array with random positions. We look for key with an index", "for", "option", "in", "settings", ":", "if", "option", "[", "'name'", "]", "==", "find_key", ":", "return", "settings", ".", "index", "(", "option", ")" ]
48
9.857143
def _is_axis_allowed(self, axis): """Check if axis are allowed. In case the calculation is requested over CA items dimension, it is not valid. It's valid in all other cases. """ if axis is None: # If table direction was requested, we must ensure that each slice # doesn't have the CA items dimension (thus the [-2:] part). It's # OK for the 0th dimension to be items, since no calculation is # performed over it. if DT.CA_SUBVAR in self.dim_types[-2:]: return False return True if isinstance(axis, int): if self.ndim == 1 and axis == 1: # Special allowed case of a 1D cube, where "row" # directions is requested. return True axis = [axis] # ---axis is a tuple--- for dim_idx in axis: if self.dim_types[dim_idx] == DT.CA_SUBVAR: # If any of the directions explicitly asked for directly # corresponds to the CA items dimension, the requested # calculation is not valid. return False return True
[ "def", "_is_axis_allowed", "(", "self", ",", "axis", ")", ":", "if", "axis", "is", "None", ":", "# If table direction was requested, we must ensure that each slice", "# doesn't have the CA items dimension (thus the [-2:] part). It's", "# OK for the 0th dimension to be items, since no calculation is", "# performed over it.", "if", "DT", ".", "CA_SUBVAR", "in", "self", ".", "dim_types", "[", "-", "2", ":", "]", ":", "return", "False", "return", "True", "if", "isinstance", "(", "axis", ",", "int", ")", ":", "if", "self", ".", "ndim", "==", "1", "and", "axis", "==", "1", ":", "# Special allowed case of a 1D cube, where \"row\"", "# directions is requested.", "return", "True", "axis", "=", "[", "axis", "]", "# ---axis is a tuple---", "for", "dim_idx", "in", "axis", ":", "if", "self", ".", "dim_types", "[", "dim_idx", "]", "==", "DT", ".", "CA_SUBVAR", ":", "# If any of the directions explicitly asked for directly", "# corresponds to the CA items dimension, the requested", "# calculation is not valid.", "return", "False", "return", "True" ]
37.870968
18.83871
def __filename_to_modname(self, pathname): """ @type pathname: str @param pathname: Pathname to a module. @rtype: str @return: Module name. """ filename = PathOperations.pathname_to_filename(pathname) if filename: filename = filename.lower() filepart, extpart = PathOperations.split_extension(filename) if filepart and extpart: modName = filepart else: modName = filename else: modName = pathname return modName
[ "def", "__filename_to_modname", "(", "self", ",", "pathname", ")", ":", "filename", "=", "PathOperations", ".", "pathname_to_filename", "(", "pathname", ")", "if", "filename", ":", "filename", "=", "filename", ".", "lower", "(", ")", "filepart", ",", "extpart", "=", "PathOperations", ".", "split_extension", "(", "filename", ")", "if", "filepart", "and", "extpart", ":", "modName", "=", "filepart", "else", ":", "modName", "=", "filename", "else", ":", "modName", "=", "pathname", "return", "modName" ]
29.894737
13.789474
def ekdelr(handle, segno, recno): """ Delete a specified record from a specified E-kernel segment. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekdelr_c.html :param handle: File handle. :type handle: int :param segno: Segment number. :type segno: int :param recno: Record number. :type recno: int """ handle = ctypes.c_int(handle) segno = ctypes.c_int(segno) recno = ctypes.c_int(recno) libspice.ekdelr_c(handle, segno, recno)
[ "def", "ekdelr", "(", "handle", ",", "segno", ",", "recno", ")", ":", "handle", "=", "ctypes", ".", "c_int", "(", "handle", ")", "segno", "=", "ctypes", ".", "c_int", "(", "segno", ")", "recno", "=", "ctypes", ".", "c_int", "(", "recno", ")", "libspice", ".", "ekdelr_c", "(", "handle", ",", "segno", ",", "recno", ")" ]
28.176471
15
def remove_users_from_account_group(self, account_id, group_id, **kwargs): # noqa: E501 """Remove users from a group. # noqa: E501 An endpoint for removing users from groups. **Example usage:** `curl -X DELETE https://api.us-east-1.mbedcloud.com/v3/accounts/{accountID}/policy-groups/{groupID}/users -d '[0162056a9a1586f30242590700000000,0117056a9a1586f30242590700000000]' -H 'content-type: application/json' -H 'Authorization: Bearer API_KEY'` # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.remove_users_from_account_group(account_id, group_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str account_id: Account ID. (required) :param str group_id: (required) :param SubjectList body: :return: UpdatedResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous'): return self.remove_users_from_account_group_with_http_info(account_id, group_id, **kwargs) # noqa: E501 else: (data) = self.remove_users_from_account_group_with_http_info(account_id, group_id, **kwargs) # noqa: E501 return data
[ "def", "remove_users_from_account_group", "(", "self", ",", "account_id", ",", "group_id", ",", "*", "*", "kwargs", ")", ":", "# noqa: E501", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'asynchronous'", ")", ":", "return", "self", ".", "remove_users_from_account_group_with_http_info", "(", "account_id", ",", "group_id", ",", "*", "*", "kwargs", ")", "# noqa: E501", "else", ":", "(", "data", ")", "=", "self", ".", "remove_users_from_account_group_with_http_info", "(", "account_id", ",", "group_id", ",", "*", "*", "kwargs", ")", "# noqa: E501", "return", "data" ]
60.826087
35.347826
def load_model(self, tid, custom_objects=None): """Load saved keras model of the trial. If tid = None, get the best model Not applicable for trials ran in cross validion (i.e. not applicable for `CompileFN.cv_n_folds is None` """ if tid is None: tid = self.best_trial_tid() model_path = self.get_trial(tid)["result"]["path"]["model"] return load_model(model_path, custom_objects=custom_objects)
[ "def", "load_model", "(", "self", ",", "tid", ",", "custom_objects", "=", "None", ")", ":", "if", "tid", "is", "None", ":", "tid", "=", "self", ".", "best_trial_tid", "(", ")", "model_path", "=", "self", ".", "get_trial", "(", "tid", ")", "[", "\"result\"", "]", "[", "\"path\"", "]", "[", "\"model\"", "]", "return", "load_model", "(", "model_path", ",", "custom_objects", "=", "custom_objects", ")" ]
35.461538
18.384615
def __replace(config, wildcards, config_file): """For each kvp in config, do wildcard substitution on the values""" for config_key in config: config_value = config[config_key] original_value = config_value if isinstance(config_value, str): for token in wildcards: if wildcards[token]: config_value = config_value.replace(token, wildcards[token]) found = re.findall(r'\${[A-Z_]+}', config_value) if found: raise ValueError("%s=%s in file %s contains unsupported or unset wildcard tokens: %s" % (config_key, original_value, config_file, ", ".join(found))) config[config_key] = config_value return config
[ "def", "__replace", "(", "config", ",", "wildcards", ",", "config_file", ")", ":", "for", "config_key", "in", "config", ":", "config_value", "=", "config", "[", "config_key", "]", "original_value", "=", "config_value", "if", "isinstance", "(", "config_value", ",", "str", ")", ":", "for", "token", "in", "wildcards", ":", "if", "wildcards", "[", "token", "]", ":", "config_value", "=", "config_value", ".", "replace", "(", "token", ",", "wildcards", "[", "token", "]", ")", "found", "=", "re", ".", "findall", "(", "r'\\${[A-Z_]+}'", ",", "config_value", ")", "if", "found", ":", "raise", "ValueError", "(", "\"%s=%s in file %s contains unsupported or unset wildcard tokens: %s\"", "%", "(", "config_key", ",", "original_value", ",", "config_file", ",", "\", \"", ".", "join", "(", "found", ")", ")", ")", "config", "[", "config_key", "]", "=", "config_value", "return", "config" ]
45.333333
16.666667
def update(self, E=None, **F): '''flatten nested dictionaries to update pathwise >>> Config({'foo': {'bar': 'glork'}}).update({'foo': {'blub': 'bla'}}) {'foo': {'bar': 'glork', 'blub': 'bla'} In contrast to: >>> {'foo': {'bar': 'glork'}}.update({'foo': {'blub': 'bla'}}) {'foo: {'blub': 'bla'}'} ''' def _update(D): for k,v in D.items(): if super(ConfigDict, self).__contains__(k): if isinstance(self[k], ConfigDict): self[k].update(v) else: self[k] = self.assimilate(v) else: self[k] = self.assimilate(v) if E is not None: if not hasattr(E, 'keys'): E = self.assimilate(dict(E)) _update(E) _update(F) return self
[ "def", "update", "(", "self", ",", "E", "=", "None", ",", "*", "*", "F", ")", ":", "def", "_update", "(", "D", ")", ":", "for", "k", ",", "v", "in", "D", ".", "items", "(", ")", ":", "if", "super", "(", "ConfigDict", ",", "self", ")", ".", "__contains__", "(", "k", ")", ":", "if", "isinstance", "(", "self", "[", "k", "]", ",", "ConfigDict", ")", ":", "self", "[", "k", "]", ".", "update", "(", "v", ")", "else", ":", "self", "[", "k", "]", "=", "self", ".", "assimilate", "(", "v", ")", "else", ":", "self", "[", "k", "]", "=", "self", ".", "assimilate", "(", "v", ")", "if", "E", "is", "not", "None", ":", "if", "not", "hasattr", "(", "E", ",", "'keys'", ")", ":", "E", "=", "self", ".", "assimilate", "(", "dict", "(", "E", ")", ")", "_update", "(", "E", ")", "_update", "(", "F", ")", "return", "self" ]
29.033333
21.033333
def query_raw(self, query, order_by=None, limit=None, offset=0): """ Do a full-text query on the OpenSearch API using the format specified in https://scihub.copernicus.eu/twiki/do/view/SciHubUserGuide/3FullTextSearch DEPRECATED: use :meth:`query(raw=...) <.query>` instead. This method will be removed in the next major release. Parameters ---------- query : str The query string. order_by: str, optional A comma-separated list of fields to order by (on server side). Prefix the field name by '+' or '-' to sort in ascending or descending order, respectively. Ascending order is used, if prefix is omitted. Example: "cloudcoverpercentage, -beginposition". limit: int, optional Maximum number of products returned. Defaults to no limit. offset: int, optional The number of results to skip. Defaults to 0. Returns ------- dict[string, dict] Products returned by the query as a dictionary with the product ID as the key and the product's attributes (a dictionary) as the value. """ warnings.warn( "query_raw() has been merged with query(). use query(raw=...) instead.", PendingDeprecationWarning ) return self.query(raw=query, order_by=order_by, limit=limit, offset=offset)
[ "def", "query_raw", "(", "self", ",", "query", ",", "order_by", "=", "None", ",", "limit", "=", "None", ",", "offset", "=", "0", ")", ":", "warnings", ".", "warn", "(", "\"query_raw() has been merged with query(). use query(raw=...) instead.\"", ",", "PendingDeprecationWarning", ")", "return", "self", ".", "query", "(", "raw", "=", "query", ",", "order_by", "=", "order_by", ",", "limit", "=", "limit", ",", "offset", "=", "offset", ")" ]
44.0625
27.375
def wait_for_resource_value_changes(self, wait: int=10): """ Long polling for changes and return a dictionary with resource:value for changes """ changes = {} payload = """<waitForResourceValueChanges1 xmlns=\"utcs\">{timeout}</waitForResourceValueChanges1> """.format(timeout=wait) xdoc = self.connection.soap_action('/ws/ResourceInteractionService', 'getResourceValue', payload) if not xdoc: return False result = xdoc.findall( './SOAP-ENV:Body/ns1:waitForResourceValueChanges2/ns1:arrayItem', IHCSoapClient.ihcns) for item in result: ihcid = item.find('ns1:resourceID', IHCSoapClient.ihcns) if ihcid is None: continue bvalue = item.find('./ns1:value/ns2:value', IHCSoapClient.ihcns) if bvalue is not None: changes[int(ihcid.text)] = bvalue.text == 'true' continue ivalue = item.find('./ns1:value/ns3:integer', IHCSoapClient.ihcns) if ivalue is not None: changes[int(ihcid.text)] = int(ivalue.text) fvalue = item.find('./ns1:value/ns2:floatingPointValue', IHCSoapClient.ihcns) if fvalue is not None: changes[int(ihcid.text)] = float(fvalue.text) continue enumName = item.find('./ns1:value/ns2:enumName', IHCSoapClient.ihcns) if enumName is not None: changes[int(ihcid.text)] = enumName.text return changes
[ "def", "wait_for_resource_value_changes", "(", "self", ",", "wait", ":", "int", "=", "10", ")", ":", "changes", "=", "{", "}", "payload", "=", "\"\"\"<waitForResourceValueChanges1\n xmlns=\\\"utcs\\\">{timeout}</waitForResourceValueChanges1>\n \"\"\"", ".", "format", "(", "timeout", "=", "wait", ")", "xdoc", "=", "self", ".", "connection", ".", "soap_action", "(", "'/ws/ResourceInteractionService'", ",", "'getResourceValue'", ",", "payload", ")", "if", "not", "xdoc", ":", "return", "False", "result", "=", "xdoc", ".", "findall", "(", "'./SOAP-ENV:Body/ns1:waitForResourceValueChanges2/ns1:arrayItem'", ",", "IHCSoapClient", ".", "ihcns", ")", "for", "item", "in", "result", ":", "ihcid", "=", "item", ".", "find", "(", "'ns1:resourceID'", ",", "IHCSoapClient", ".", "ihcns", ")", "if", "ihcid", "is", "None", ":", "continue", "bvalue", "=", "item", ".", "find", "(", "'./ns1:value/ns2:value'", ",", "IHCSoapClient", ".", "ihcns", ")", "if", "bvalue", "is", "not", "None", ":", "changes", "[", "int", "(", "ihcid", ".", "text", ")", "]", "=", "bvalue", ".", "text", "==", "'true'", "continue", "ivalue", "=", "item", ".", "find", "(", "'./ns1:value/ns3:integer'", ",", "IHCSoapClient", ".", "ihcns", ")", "if", "ivalue", "is", "not", "None", ":", "changes", "[", "int", "(", "ihcid", ".", "text", ")", "]", "=", "int", "(", "ivalue", ".", "text", ")", "fvalue", "=", "item", ".", "find", "(", "'./ns1:value/ns2:floatingPointValue'", ",", "IHCSoapClient", ".", "ihcns", ")", "if", "fvalue", "is", "not", "None", ":", "changes", "[", "int", "(", "ihcid", ".", "text", ")", "]", "=", "float", "(", "fvalue", ".", "text", ")", "continue", "enumName", "=", "item", ".", "find", "(", "'./ns1:value/ns2:enumName'", ",", "IHCSoapClient", ".", "ihcns", ")", "if", "enumName", "is", "not", "None", ":", "changes", "[", "int", "(", "ihcid", ".", "text", ")", "]", "=", "enumName", ".", "text", "return", "changes" ]
44.289474
17.868421
def days_to_hmsm(days): """ Convert fractional days to hours, minutes, seconds, and microseconds. Precision beyond microseconds is rounded to the nearest microsecond. Parameters ---------- days : float A fractional number of days. Must be less than 1. Returns ------- hour : int Hour number. min : int Minute number. sec : int Second number. micro : int Microsecond number. Raises ------ ValueError If `days` is >= 1. Examples -------- >>> days_to_hmsm(0.1) (2, 24, 0, 0) """ hours = days * 24. hours, hour = math.modf(hours) mins = hours * 60. mins, min = math.modf(mins) secs = mins * 60. secs, sec = math.modf(secs) micro = round(secs * 1.e6) return int(hour), int(min), int(sec), int(micro)
[ "def", "days_to_hmsm", "(", "days", ")", ":", "hours", "=", "days", "*", "24.", "hours", ",", "hour", "=", "math", ".", "modf", "(", "hours", ")", "mins", "=", "hours", "*", "60.", "mins", ",", "min", "=", "math", ".", "modf", "(", "mins", ")", "secs", "=", "mins", "*", "60.", "secs", ",", "sec", "=", "math", ".", "modf", "(", "secs", ")", "micro", "=", "round", "(", "secs", "*", "1.e6", ")", "return", "int", "(", "hour", ")", ",", "int", "(", "min", ")", ",", "int", "(", "sec", ")", ",", "int", "(", "micro", ")" ]
17.446809
25.148936
def get_events(self, from_=None, to=None): """Query a slice of the events. Events are always returned in the order the were added. Parameters: from_ -- if not None, return only events added after the event with id `from_`. If None, return from the start of history. to -- if not None, return only events added before, and including, the event with event id `to`. If None, return up to, and including, the last added event. returns -- an iterable of (event id, eventdata) tuples. """ assert from_ is None or isinstance(from_, str) assert to is None or isinstance(to, str) if from_ and not self.key_exists(from_): msg = 'from_={0}'.format(from_) raise EventStore.EventKeyDoesNotExistError(msg) if to and not self.key_exists(to): msg = 'to={0}'.format(to) raise EventStore.EventKeyDoesNotExistError(msg) # +1 below because we have already seen the event fromindex = self._get_eventid(from_) + 1 if from_ else 0 toindex = self._get_eventid(to) if to else None if from_ and to and fromindex > toindex: raise EventOrderError("'to' happened cronologically before" " 'from_'.") if toindex: sql = ('SELECT uuid, event FROM events ' 'WHERE eventid BETWEEN ? AND ?') params = (fromindex, toindex) else: sql = 'SELECT uuid, event FROM events WHERE eventid >= ?' params = (fromindex,) sql = sql + " ORDER BY eventid" return [(row[0], row[1].encode('utf-8')) for row in self.conn.execute(sql, params)]
[ "def", "get_events", "(", "self", ",", "from_", "=", "None", ",", "to", "=", "None", ")", ":", "assert", "from_", "is", "None", "or", "isinstance", "(", "from_", ",", "str", ")", "assert", "to", "is", "None", "or", "isinstance", "(", "to", ",", "str", ")", "if", "from_", "and", "not", "self", ".", "key_exists", "(", "from_", ")", ":", "msg", "=", "'from_={0}'", ".", "format", "(", "from_", ")", "raise", "EventStore", ".", "EventKeyDoesNotExistError", "(", "msg", ")", "if", "to", "and", "not", "self", ".", "key_exists", "(", "to", ")", ":", "msg", "=", "'to={0}'", ".", "format", "(", "to", ")", "raise", "EventStore", ".", "EventKeyDoesNotExistError", "(", "msg", ")", "# +1 below because we have already seen the event", "fromindex", "=", "self", ".", "_get_eventid", "(", "from_", ")", "+", "1", "if", "from_", "else", "0", "toindex", "=", "self", ".", "_get_eventid", "(", "to", ")", "if", "to", "else", "None", "if", "from_", "and", "to", "and", "fromindex", ">", "toindex", ":", "raise", "EventOrderError", "(", "\"'to' happened cronologically before\"", "\" 'from_'.\"", ")", "if", "toindex", ":", "sql", "=", "(", "'SELECT uuid, event FROM events '", "'WHERE eventid BETWEEN ? AND ?'", ")", "params", "=", "(", "fromindex", ",", "toindex", ")", "else", ":", "sql", "=", "'SELECT uuid, event FROM events WHERE eventid >= ?'", "params", "=", "(", "fromindex", ",", ")", "sql", "=", "sql", "+", "\" ORDER BY eventid\"", "return", "[", "(", "row", "[", "0", "]", ",", "row", "[", "1", "]", ".", "encode", "(", "'utf-8'", ")", ")", "for", "row", "in", "self", ".", "conn", ".", "execute", "(", "sql", ",", "params", ")", "]" ]
42.585366
18.926829
def add_group_email_grant(self, permission, email_address, headers=None): """ Convenience method that provides a quick way to add an email group grant to a key. This method retrieves the current ACL, creates a new grant based on the parameters passed in, adds that grant to the ACL and then PUT's the new ACL back to GS. :type permission: string :param permission: The permission being granted. Should be one of: READ|FULL_CONTROL See http://code.google.com/apis/storage/docs/developer-guide.html#authorization for more details on permissions. :type email_address: string :param email_address: The email address associated with the Google Group to which you are granting the permission. """ acl = self.get_acl(headers=headers) acl.add_group_email_grant(permission, email_address) self.set_acl(acl, headers=headers)
[ "def", "add_group_email_grant", "(", "self", ",", "permission", ",", "email_address", ",", "headers", "=", "None", ")", ":", "acl", "=", "self", ".", "get_acl", "(", "headers", "=", "headers", ")", "acl", ".", "add_group_email_grant", "(", "permission", ",", "email_address", ")", "self", ".", "set_acl", "(", "acl", ",", "headers", "=", "headers", ")" ]
47.45
20.75
def get_all_tweets(screen_name, api, since_id): """ Get all tweets for the givens screen_name. Returns list of text/created_at pairs. """ # Twitter only allows access to a users most recent 3240 tweets with this method all_tweets = [] # initial request for most recent tweets (200 is the maximum allowed count) new_tweets = api.user_timeline(screen_name=screen_name, count=200, since_id=since_id) all_tweets.extend(new_tweets) if len(all_tweets) == 0: raise Exception("tweets up to date for screen_name: %s" % (screen_name)) oldest = all_tweets[-1].id - 1 while len(new_tweets) > 0: print("getting tweets before %s" % (oldest)) new_tweets = api.user_timeline(screen_name=screen_name, count=200, max_id=oldest, since_id=since_id) all_tweets.extend(new_tweets) oldest = all_tweets[-1].id - 1 print("...%s tweets downloaded so far" % (len(all_tweets))) return [tweet.text.encode("utf-8") for tweet in all_tweets], all_tweets[0].id
[ "def", "get_all_tweets", "(", "screen_name", ",", "api", ",", "since_id", ")", ":", "# Twitter only allows access to a users most recent 3240 tweets with this method", "all_tweets", "=", "[", "]", "# initial request for most recent tweets (200 is the maximum allowed count)", "new_tweets", "=", "api", ".", "user_timeline", "(", "screen_name", "=", "screen_name", ",", "count", "=", "200", ",", "since_id", "=", "since_id", ")", "all_tweets", ".", "extend", "(", "new_tweets", ")", "if", "len", "(", "all_tweets", ")", "==", "0", ":", "raise", "Exception", "(", "\"tweets up to date for screen_name: %s\"", "%", "(", "screen_name", ")", ")", "oldest", "=", "all_tweets", "[", "-", "1", "]", ".", "id", "-", "1", "while", "len", "(", "new_tweets", ")", ">", "0", ":", "print", "(", "\"getting tweets before %s\"", "%", "(", "oldest", ")", ")", "new_tweets", "=", "api", ".", "user_timeline", "(", "screen_name", "=", "screen_name", ",", "count", "=", "200", ",", "max_id", "=", "oldest", ",", "since_id", "=", "since_id", ")", "all_tweets", ".", "extend", "(", "new_tweets", ")", "oldest", "=", "all_tweets", "[", "-", "1", "]", ".", "id", "-", "1", "print", "(", "\"...%s tweets downloaded so far\"", "%", "(", "len", "(", "all_tweets", ")", ")", ")", "return", "[", "tweet", ".", "text", ".", "encode", "(", "\"utf-8\"", ")", "for", "tweet", "in", "all_tweets", "]", ",", "all_tweets", "[", "0", "]", ".", "id" ]
58.764706
22.823529
def new_text_and_position(self): """ Return (new_text, new_cursor_position) for this completion. """ if self.complete_index is None: return self.original_document.text, self.original_document.cursor_position else: original_text_before_cursor = self.original_document.text_before_cursor original_text_after_cursor = self.original_document.text_after_cursor c = self.current_completions[self.complete_index] if c.start_position == 0: before = original_text_before_cursor else: before = original_text_before_cursor[:c.start_position] new_text = before + c.text + original_text_after_cursor new_cursor_position = len(before) + len(c.text) return new_text, new_cursor_position
[ "def", "new_text_and_position", "(", "self", ")", ":", "if", "self", ".", "complete_index", "is", "None", ":", "return", "self", ".", "original_document", ".", "text", ",", "self", ".", "original_document", ".", "cursor_position", "else", ":", "original_text_before_cursor", "=", "self", ".", "original_document", ".", "text_before_cursor", "original_text_after_cursor", "=", "self", ".", "original_document", ".", "text_after_cursor", "c", "=", "self", ".", "current_completions", "[", "self", ".", "complete_index", "]", "if", "c", ".", "start_position", "==", "0", ":", "before", "=", "original_text_before_cursor", "else", ":", "before", "=", "original_text_before_cursor", "[", ":", "c", ".", "start_position", "]", "new_text", "=", "before", "+", "c", ".", "text", "+", "original_text_after_cursor", "new_cursor_position", "=", "len", "(", "before", ")", "+", "len", "(", "c", ".", "text", ")", "return", "new_text", ",", "new_cursor_position" ]
43.947368
21.947368
def gap_to_sorl(time_gap): """ P1D to +1DAY :param time_gap: :return: solr's format duration. """ quantity, unit = parse_ISO8601(time_gap) if unit[0] == "WEEKS": return "+{0}DAYS".format(quantity * 7) else: return "+{0}{1}".format(quantity, unit[0])
[ "def", "gap_to_sorl", "(", "time_gap", ")", ":", "quantity", ",", "unit", "=", "parse_ISO8601", "(", "time_gap", ")", "if", "unit", "[", "0", "]", "==", "\"WEEKS\"", ":", "return", "\"+{0}DAYS\"", ".", "format", "(", "quantity", "*", "7", ")", "else", ":", "return", "\"+{0}{1}\"", ".", "format", "(", "quantity", ",", "unit", "[", "0", "]", ")" ]
26.090909
11.545455
def method2pos(method): ''' Returns a list of valid POS-tags for a given method. ''' if method in ('articles', 'plural', 'miniaturize', 'gender'): pos = ['NN'] elif method in ('conjugate',): pos = ['VB'] elif method in ('comparative, superlative'): pos = ['JJ'] else: pos = ['*'] return pos
[ "def", "method2pos", "(", "method", ")", ":", "if", "method", "in", "(", "'articles'", ",", "'plural'", ",", "'miniaturize'", ",", "'gender'", ")", ":", "pos", "=", "[", "'NN'", "]", "elif", "method", "in", "(", "'conjugate'", ",", ")", ":", "pos", "=", "[", "'VB'", "]", "elif", "method", "in", "(", "'comparative, superlative'", ")", ":", "pos", "=", "[", "'JJ'", "]", "else", ":", "pos", "=", "[", "'*'", "]", "return", "pos" ]
24.5
23.5
def __assert_field_mapping(self, mapping): """Assert that mapping.keys() == FIELDS. The programmer is not supposed to pass extra/less number of fields """ passed_keys = set(mapping.keys()) class_fields = set(self.FIELDS) if passed_keys != class_fields: raise ValueError('\n'.join([ "{0} got different fields from expected".format( self.__class__), " got : {0}".format(list(sorted(passed_keys))), " expected: {0}".format(list(sorted(class_fields)))]))
[ "def", "__assert_field_mapping", "(", "self", ",", "mapping", ")", ":", "passed_keys", "=", "set", "(", "mapping", ".", "keys", "(", ")", ")", "class_fields", "=", "set", "(", "self", ".", "FIELDS", ")", "if", "passed_keys", "!=", "class_fields", ":", "raise", "ValueError", "(", "'\\n'", ".", "join", "(", "[", "\"{0} got different fields from expected\"", ".", "format", "(", "self", ".", "__class__", ")", ",", "\" got : {0}\"", ".", "format", "(", "list", "(", "sorted", "(", "passed_keys", ")", ")", ")", ",", "\" expected: {0}\"", ".", "format", "(", "list", "(", "sorted", "(", "class_fields", ")", ")", ")", "]", ")", ")" ]
40.928571
14.714286
def create_tfs_project_analysis_client(url, token=None): """ Create a project_analysis_client.py client for a Team Foundation Server Enterprise connection instance. This is helpful for understanding project languages, but currently blank for all our test conditions. If token is not provided, will attempt to use the TFS_API_TOKEN environment variable if present. """ if token is None: token = os.environ.get('TFS_API_TOKEN', None) tfs_connection = create_tfs_connection(url, token) project_analysis_client = tfs_connection.get_client('vsts.project_analysis.v4_1.project_analysis_client.ProjectAnalysisClient') if project_analysis_client is None: msg = 'Unable to connect to TFS Enterprise (%s) with provided token.' raise RuntimeError(msg, url) return project_analysis_client
[ "def", "create_tfs_project_analysis_client", "(", "url", ",", "token", "=", "None", ")", ":", "if", "token", "is", "None", ":", "token", "=", "os", ".", "environ", ".", "get", "(", "'TFS_API_TOKEN'", ",", "None", ")", "tfs_connection", "=", "create_tfs_connection", "(", "url", ",", "token", ")", "project_analysis_client", "=", "tfs_connection", ".", "get_client", "(", "'vsts.project_analysis.v4_1.project_analysis_client.ProjectAnalysisClient'", ")", "if", "project_analysis_client", "is", "None", ":", "msg", "=", "'Unable to connect to TFS Enterprise (%s) with provided token.'", "raise", "RuntimeError", "(", "msg", ",", "url", ")", "return", "project_analysis_client" ]
43.684211
27.578947
def should_update(stack): """Tests whether a stack should be submitted for updates to CF. Args: stack (:class:`stacker.stack.Stack`): The stack object to check. Returns: bool: If the stack should be updated, return True. """ if stack.locked: if not stack.force: logger.debug("Stack %s locked and not in --force list. " "Refusing to update.", stack.name) return False else: logger.debug("Stack %s locked, but is in --force " "list.", stack.name) return True
[ "def", "should_update", "(", "stack", ")", ":", "if", "stack", ".", "locked", ":", "if", "not", "stack", ".", "force", ":", "logger", ".", "debug", "(", "\"Stack %s locked and not in --force list. \"", "\"Refusing to update.\"", ",", "stack", ".", "name", ")", "return", "False", "else", ":", "logger", ".", "debug", "(", "\"Stack %s locked, but is in --force \"", "\"list.\"", ",", "stack", ".", "name", ")", "return", "True" ]
30.684211
22.052632
def unpickle_dict(items): '''Returns a dict pickled with pickle_dict''' pickled_keys = items.pop('_pickled', '').split(',') ret = {} for key, val in items.items(): if key in pickled_keys: ret[key] = pickle.loads(val) else: ret[key] = val return ret
[ "def", "unpickle_dict", "(", "items", ")", ":", "pickled_keys", "=", "items", ".", "pop", "(", "'_pickled'", ",", "''", ")", ".", "split", "(", "','", ")", "ret", "=", "{", "}", "for", "key", ",", "val", "in", "items", ".", "items", "(", ")", ":", "if", "key", "in", "pickled_keys", ":", "ret", "[", "key", "]", "=", "pickle", ".", "loads", "(", "val", ")", "else", ":", "ret", "[", "key", "]", "=", "val", "return", "ret" ]
29.9
14.9
def mavlink_packet(self, msg): '''handle an incoming mavlink packet''' if not isinstance(self.console, wxconsole.MessageConsole): return if not self.console.is_alive(): self.mpstate.console = textconsole.SimpleConsole() return type = msg.get_type() master = self.master # add some status fields if type in [ 'GPS_RAW', 'GPS_RAW_INT' ]: if type == "GPS_RAW": num_sats1 = master.field('GPS_STATUS', 'satellites_visible', 0) else: num_sats1 = msg.satellites_visible num_sats2 = master.field('GPS2_RAW', 'satellites_visible', -1) if num_sats2 == -1: sats_string = "%u" % num_sats1 else: sats_string = "%u/%u" % (num_sats1, num_sats2) if ((msg.fix_type == 3 and master.mavlink10()) or (msg.fix_type == 2 and not master.mavlink10())): self.console.set_status('GPS', 'GPS: OK (%s)' % sats_string, fg='green') else: self.console.set_status('GPS', 'GPS: %u (%s)' % (msg.fix_type, sats_string), fg='red') if master.mavlink10(): gps_heading = int(self.mpstate.status.msgs['GPS_RAW_INT'].cog * 0.01) else: gps_heading = self.mpstate.status.msgs['GPS_RAW'].hdg self.console.set_status('Heading', 'Hdg %s/%u' % (master.field('VFR_HUD', 'heading', '-'), gps_heading)) elif type == 'VFR_HUD': if master.mavlink10(): alt = master.field('GPS_RAW_INT', 'alt', 0) / 1.0e3 else: alt = master.field('GPS_RAW', 'alt', 0) if self.module('wp').wploader.count() > 0: wp = self.module('wp').wploader.wp(0) home_lat = wp.x home_lng = wp.y else: home_lat = master.field('HOME', 'lat') * 1.0e-7 home_lng = master.field('HOME', 'lon') * 1.0e-7 lat = master.field('GLOBAL_POSITION_INT', 'lat', 0) * 1.0e-7 lng = master.field('GLOBAL_POSITION_INT', 'lon', 0) * 1.0e-7 rel_alt = master.field('GLOBAL_POSITION_INT', 'relative_alt', 0) * 1.0e-3 agl_alt = None if self.settings.basealt != 0: agl_alt = self.console.ElevationMap.GetElevation(lat, lng) if agl_alt is not None: agl_alt = self.settings.basealt - agl_alt else: try: agl_alt_home = self.console.ElevationMap.GetElevation(home_lat, home_lng) except Exception as ex: print(ex) agl_alt_home = None if agl_alt_home is not None: agl_alt = self.console.ElevationMap.GetElevation(lat, lng) if agl_alt is not None: agl_alt = agl_alt_home - agl_alt if agl_alt is not None: agl_alt += rel_alt vehicle_agl = master.field('TERRAIN_REPORT', 'current_height', None) if vehicle_agl is None: vehicle_agl = '---' else: vehicle_agl = int(vehicle_agl) self.console.set_status('AGL', 'AGL %u/%s' % (agl_alt, vehicle_agl)) self.console.set_status('Alt', 'Alt %u' % rel_alt) self.console.set_status('AirSpeed', 'AirSpeed %u' % msg.airspeed) self.console.set_status('GPSSpeed', 'GPSSpeed %u' % msg.groundspeed) self.console.set_status('Thr', 'Thr %u' % msg.throttle) t = time.localtime(msg._timestamp) flying = False if self.mpstate.vehicle_type == 'copter': flying = self.master.motors_armed() else: flying = msg.groundspeed > 3 if flying and not self.in_air: self.in_air = True self.start_time = time.mktime(t) elif flying and self.in_air: self.total_time = time.mktime(t) - self.start_time self.console.set_status('FlightTime', 'FlightTime %u:%02u' % (int(self.total_time)/60, int(self.total_time)%60)) elif not flying and self.in_air: self.in_air = False self.total_time = time.mktime(t) - self.start_time self.console.set_status('FlightTime', 'FlightTime %u:%02u' % (int(self.total_time)/60, int(self.total_time)%60)) elif type == 'ATTITUDE': self.console.set_status('Roll', 'Roll %u' % math.degrees(msg.roll)) self.console.set_status('Pitch', 'Pitch %u' % math.degrees(msg.pitch)) elif type in ['SYS_STATUS']: sensors = { 'AS' : mavutil.mavlink.MAV_SYS_STATUS_SENSOR_DIFFERENTIAL_PRESSURE, 'MAG' : mavutil.mavlink.MAV_SYS_STATUS_SENSOR_3D_MAG, 'INS' : mavutil.mavlink.MAV_SYS_STATUS_SENSOR_3D_ACCEL | mavutil.mavlink.MAV_SYS_STATUS_SENSOR_3D_GYRO, 'AHRS' : mavutil.mavlink.MAV_SYS_STATUS_AHRS, 'RC' : mavutil.mavlink.MAV_SYS_STATUS_SENSOR_RC_RECEIVER, 'TERR' : mavutil.mavlink.MAV_SYS_STATUS_TERRAIN, 'RNG' : mavutil.mavlink.MAV_SYS_STATUS_SENSOR_LASER_POSITION} announce = [ 'RC' ] for s in sensors.keys(): bits = sensors[s] present = ((msg.onboard_control_sensors_enabled & bits) == bits) healthy = ((msg.onboard_control_sensors_health & bits) == bits) if not present: fg = 'grey' elif not healthy: fg = 'red' else: fg = 'green' # for terrain show yellow if still loading if s == 'TERR' and fg == 'green' and master.field('TERRAIN_REPORT', 'pending', 0) != 0: fg = 'yellow' self.console.set_status(s, s, fg=fg) for s in announce: bits = sensors[s] present = ((msg.onboard_control_sensors_enabled & bits) == bits) healthy = ((msg.onboard_control_sensors_health & bits) == bits) was_healthy = ((self.last_sys_status_health & bits) == bits) if present and not healthy and was_healthy: self.say("%s fail" % s) self.last_sys_status_health = msg.onboard_control_sensors_health elif type == 'WIND': self.console.set_status('Wind', 'Wind %u/%.2f' % (msg.direction, msg.speed)) elif type == 'EKF_STATUS_REPORT': highest = 0.0 vars = ['velocity_variance', 'pos_horiz_variance', 'pos_vert_variance', 'compass_variance', 'terrain_alt_variance'] for var in vars: v = getattr(msg, var, 0) highest = max(v, highest) if highest >= 1.0: fg = 'red' elif highest >= 0.5: fg = 'orange' else: fg = 'green' self.console.set_status('EKF', 'EKF', fg=fg) elif type == 'HWSTATUS': if msg.Vcc >= 4600 and msg.Vcc <= 5300: fg = 'green' else: fg = 'red' self.console.set_status('Vcc', 'Vcc %.2f' % (msg.Vcc * 0.001), fg=fg) elif type == 'POWER_STATUS': if msg.flags & mavutil.mavlink.MAV_POWER_STATUS_CHANGED: fg = 'red' else: fg = 'green' status = 'PWR:' if msg.flags & mavutil.mavlink.MAV_POWER_STATUS_USB_CONNECTED: status += 'U' if msg.flags & mavutil.mavlink.MAV_POWER_STATUS_BRICK_VALID: status += 'B' if msg.flags & mavutil.mavlink.MAV_POWER_STATUS_SERVO_VALID: status += 'S' if msg.flags & mavutil.mavlink.MAV_POWER_STATUS_PERIPH_OVERCURRENT: status += 'O1' if msg.flags & mavutil.mavlink.MAV_POWER_STATUS_PERIPH_HIPOWER_OVERCURRENT: status += 'O2' self.console.set_status('PWR', status, fg=fg) self.console.set_status('Srv', 'Srv %.2f' % (msg.Vservo*0.001), fg='green') elif type in ['RADIO', 'RADIO_STATUS']: if msg.rssi < msg.noise+10 or msg.remrssi < msg.remnoise+10: fg = 'red' else: fg = 'black' self.console.set_status('Radio', 'Radio %u/%u %u/%u' % (msg.rssi, msg.noise, msg.remrssi, msg.remnoise), fg=fg) elif type == 'HEARTBEAT': self.console.set_status('Mode', '%s' % master.flightmode, fg='blue') if self.master.motors_armed(): arm_colour = 'green' else: arm_colour = 'red' self.console.set_status('ARM', 'ARM', fg=arm_colour) if self.max_link_num != len(self.mpstate.mav_master): for i in range(self.max_link_num): self.console.set_status('Link%u'%(i+1), '', row=1) self.max_link_num = len(self.mpstate.mav_master) for m in self.mpstate.mav_master: linkdelay = (self.mpstate.status.highest_msec - m.highest_msec)*1.0e-3 linkline = "Link %u " % (m.linknum+1) if m.linkerror: linkline += "down" fg = 'red' else: packets_rcvd_percentage = 100 if (m.mav_loss != 0): #avoid divide-by-zero packets_rcvd_percentage = (1.0 - (float(m.mav_loss) / float(m.mav_count))) * 100.0 linkline += "OK (%u pkts, %.2fs delay, %u lost) %u%%" % (m.mav_count, linkdelay, m.mav_loss, packets_rcvd_percentage) if linkdelay > 1: fg = 'orange' else: fg = 'dark green' self.console.set_status('Link%u'%m.linknum, linkline, row=1, fg=fg) elif type in ['WAYPOINT_CURRENT', 'MISSION_CURRENT']: self.console.set_status('WP', 'WP %u' % msg.seq) lat = master.field('GLOBAL_POSITION_INT', 'lat', 0) * 1.0e-7 lng = master.field('GLOBAL_POSITION_INT', 'lon', 0) * 1.0e-7 if lat != 0 and lng != 0: airspeed = master.field('VFR_HUD', 'airspeed', 30) if abs(airspeed - self.speed) > 5: self.speed = airspeed else: self.speed = 0.98*self.speed + 0.02*airspeed self.speed = max(1, self.speed) time_remaining = int(self.estimated_time_remaining(lat, lng, msg.seq, self.speed)) self.console.set_status('ETR', 'ETR %u:%02u' % (time_remaining/60, time_remaining%60)) elif type == 'NAV_CONTROLLER_OUTPUT': self.console.set_status('WPDist', 'Distance %u' % msg.wp_dist) self.console.set_status('WPBearing', 'Bearing %u' % msg.target_bearing) if msg.alt_error > 0: alt_error_sign = "L" else: alt_error_sign = "H" if msg.aspd_error > 0: aspd_error_sign = "L" else: aspd_error_sign = "H" self.console.set_status('AltError', 'AltError %d%s' % (msg.alt_error, alt_error_sign)) self.console.set_status('AspdError', 'AspdError %.1f%s' % (msg.aspd_error*0.01, aspd_error_sign))
[ "def", "mavlink_packet", "(", "self", ",", "msg", ")", ":", "if", "not", "isinstance", "(", "self", ".", "console", ",", "wxconsole", ".", "MessageConsole", ")", ":", "return", "if", "not", "self", ".", "console", ".", "is_alive", "(", ")", ":", "self", ".", "mpstate", ".", "console", "=", "textconsole", ".", "SimpleConsole", "(", ")", "return", "type", "=", "msg", ".", "get_type", "(", ")", "master", "=", "self", ".", "master", "# add some status fields", "if", "type", "in", "[", "'GPS_RAW'", ",", "'GPS_RAW_INT'", "]", ":", "if", "type", "==", "\"GPS_RAW\"", ":", "num_sats1", "=", "master", ".", "field", "(", "'GPS_STATUS'", ",", "'satellites_visible'", ",", "0", ")", "else", ":", "num_sats1", "=", "msg", ".", "satellites_visible", "num_sats2", "=", "master", ".", "field", "(", "'GPS2_RAW'", ",", "'satellites_visible'", ",", "-", "1", ")", "if", "num_sats2", "==", "-", "1", ":", "sats_string", "=", "\"%u\"", "%", "num_sats1", "else", ":", "sats_string", "=", "\"%u/%u\"", "%", "(", "num_sats1", ",", "num_sats2", ")", "if", "(", "(", "msg", ".", "fix_type", "==", "3", "and", "master", ".", "mavlink10", "(", ")", ")", "or", "(", "msg", ".", "fix_type", "==", "2", "and", "not", "master", ".", "mavlink10", "(", ")", ")", ")", ":", "self", ".", "console", ".", "set_status", "(", "'GPS'", ",", "'GPS: OK (%s)'", "%", "sats_string", ",", "fg", "=", "'green'", ")", "else", ":", "self", ".", "console", ".", "set_status", "(", "'GPS'", ",", "'GPS: %u (%s)'", "%", "(", "msg", ".", "fix_type", ",", "sats_string", ")", ",", "fg", "=", "'red'", ")", "if", "master", ".", "mavlink10", "(", ")", ":", "gps_heading", "=", "int", "(", "self", ".", "mpstate", ".", "status", ".", "msgs", "[", "'GPS_RAW_INT'", "]", ".", "cog", "*", "0.01", ")", "else", ":", "gps_heading", "=", "self", ".", "mpstate", ".", "status", ".", "msgs", "[", "'GPS_RAW'", "]", ".", "hdg", "self", ".", "console", ".", "set_status", "(", "'Heading'", ",", "'Hdg %s/%u'", "%", "(", "master", ".", "field", "(", "'VFR_HUD'", ",", "'heading'", ",", "'-'", ")", ",", "gps_heading", ")", ")", "elif", "type", "==", "'VFR_HUD'", ":", "if", "master", ".", "mavlink10", "(", ")", ":", "alt", "=", "master", ".", "field", "(", "'GPS_RAW_INT'", ",", "'alt'", ",", "0", ")", "/", "1.0e3", "else", ":", "alt", "=", "master", ".", "field", "(", "'GPS_RAW'", ",", "'alt'", ",", "0", ")", "if", "self", ".", "module", "(", "'wp'", ")", ".", "wploader", ".", "count", "(", ")", ">", "0", ":", "wp", "=", "self", ".", "module", "(", "'wp'", ")", ".", "wploader", ".", "wp", "(", "0", ")", "home_lat", "=", "wp", ".", "x", "home_lng", "=", "wp", ".", "y", "else", ":", "home_lat", "=", "master", ".", "field", "(", "'HOME'", ",", "'lat'", ")", "*", "1.0e-7", "home_lng", "=", "master", ".", "field", "(", "'HOME'", ",", "'lon'", ")", "*", "1.0e-7", "lat", "=", "master", ".", "field", "(", "'GLOBAL_POSITION_INT'", ",", "'lat'", ",", "0", ")", "*", "1.0e-7", "lng", "=", "master", ".", "field", "(", "'GLOBAL_POSITION_INT'", ",", "'lon'", ",", "0", ")", "*", "1.0e-7", "rel_alt", "=", "master", ".", "field", "(", "'GLOBAL_POSITION_INT'", ",", "'relative_alt'", ",", "0", ")", "*", "1.0e-3", "agl_alt", "=", "None", "if", "self", ".", "settings", ".", "basealt", "!=", "0", ":", "agl_alt", "=", "self", ".", "console", ".", "ElevationMap", ".", "GetElevation", "(", "lat", ",", "lng", ")", "if", "agl_alt", "is", "not", "None", ":", "agl_alt", "=", "self", ".", "settings", ".", "basealt", "-", "agl_alt", "else", ":", "try", ":", "agl_alt_home", "=", "self", ".", "console", ".", "ElevationMap", ".", "GetElevation", "(", "home_lat", ",", "home_lng", ")", "except", "Exception", "as", "ex", ":", "print", "(", "ex", ")", "agl_alt_home", "=", "None", "if", "agl_alt_home", "is", "not", "None", ":", "agl_alt", "=", "self", ".", "console", ".", "ElevationMap", ".", "GetElevation", "(", "lat", ",", "lng", ")", "if", "agl_alt", "is", "not", "None", ":", "agl_alt", "=", "agl_alt_home", "-", "agl_alt", "if", "agl_alt", "is", "not", "None", ":", "agl_alt", "+=", "rel_alt", "vehicle_agl", "=", "master", ".", "field", "(", "'TERRAIN_REPORT'", ",", "'current_height'", ",", "None", ")", "if", "vehicle_agl", "is", "None", ":", "vehicle_agl", "=", "'---'", "else", ":", "vehicle_agl", "=", "int", "(", "vehicle_agl", ")", "self", ".", "console", ".", "set_status", "(", "'AGL'", ",", "'AGL %u/%s'", "%", "(", "agl_alt", ",", "vehicle_agl", ")", ")", "self", ".", "console", ".", "set_status", "(", "'Alt'", ",", "'Alt %u'", "%", "rel_alt", ")", "self", ".", "console", ".", "set_status", "(", "'AirSpeed'", ",", "'AirSpeed %u'", "%", "msg", ".", "airspeed", ")", "self", ".", "console", ".", "set_status", "(", "'GPSSpeed'", ",", "'GPSSpeed %u'", "%", "msg", ".", "groundspeed", ")", "self", ".", "console", ".", "set_status", "(", "'Thr'", ",", "'Thr %u'", "%", "msg", ".", "throttle", ")", "t", "=", "time", ".", "localtime", "(", "msg", ".", "_timestamp", ")", "flying", "=", "False", "if", "self", ".", "mpstate", ".", "vehicle_type", "==", "'copter'", ":", "flying", "=", "self", ".", "master", ".", "motors_armed", "(", ")", "else", ":", "flying", "=", "msg", ".", "groundspeed", ">", "3", "if", "flying", "and", "not", "self", ".", "in_air", ":", "self", ".", "in_air", "=", "True", "self", ".", "start_time", "=", "time", ".", "mktime", "(", "t", ")", "elif", "flying", "and", "self", ".", "in_air", ":", "self", ".", "total_time", "=", "time", ".", "mktime", "(", "t", ")", "-", "self", ".", "start_time", "self", ".", "console", ".", "set_status", "(", "'FlightTime'", ",", "'FlightTime %u:%02u'", "%", "(", "int", "(", "self", ".", "total_time", ")", "/", "60", ",", "int", "(", "self", ".", "total_time", ")", "%", "60", ")", ")", "elif", "not", "flying", "and", "self", ".", "in_air", ":", "self", ".", "in_air", "=", "False", "self", ".", "total_time", "=", "time", ".", "mktime", "(", "t", ")", "-", "self", ".", "start_time", "self", ".", "console", ".", "set_status", "(", "'FlightTime'", ",", "'FlightTime %u:%02u'", "%", "(", "int", "(", "self", ".", "total_time", ")", "/", "60", ",", "int", "(", "self", ".", "total_time", ")", "%", "60", ")", ")", "elif", "type", "==", "'ATTITUDE'", ":", "self", ".", "console", ".", "set_status", "(", "'Roll'", ",", "'Roll %u'", "%", "math", ".", "degrees", "(", "msg", ".", "roll", ")", ")", "self", ".", "console", ".", "set_status", "(", "'Pitch'", ",", "'Pitch %u'", "%", "math", ".", "degrees", "(", "msg", ".", "pitch", ")", ")", "elif", "type", "in", "[", "'SYS_STATUS'", "]", ":", "sensors", "=", "{", "'AS'", ":", "mavutil", ".", "mavlink", ".", "MAV_SYS_STATUS_SENSOR_DIFFERENTIAL_PRESSURE", ",", "'MAG'", ":", "mavutil", ".", "mavlink", ".", "MAV_SYS_STATUS_SENSOR_3D_MAG", ",", "'INS'", ":", "mavutil", ".", "mavlink", ".", "MAV_SYS_STATUS_SENSOR_3D_ACCEL", "|", "mavutil", ".", "mavlink", ".", "MAV_SYS_STATUS_SENSOR_3D_GYRO", ",", "'AHRS'", ":", "mavutil", ".", "mavlink", ".", "MAV_SYS_STATUS_AHRS", ",", "'RC'", ":", "mavutil", ".", "mavlink", ".", "MAV_SYS_STATUS_SENSOR_RC_RECEIVER", ",", "'TERR'", ":", "mavutil", ".", "mavlink", ".", "MAV_SYS_STATUS_TERRAIN", ",", "'RNG'", ":", "mavutil", ".", "mavlink", ".", "MAV_SYS_STATUS_SENSOR_LASER_POSITION", "}", "announce", "=", "[", "'RC'", "]", "for", "s", "in", "sensors", ".", "keys", "(", ")", ":", "bits", "=", "sensors", "[", "s", "]", "present", "=", "(", "(", "msg", ".", "onboard_control_sensors_enabled", "&", "bits", ")", "==", "bits", ")", "healthy", "=", "(", "(", "msg", ".", "onboard_control_sensors_health", "&", "bits", ")", "==", "bits", ")", "if", "not", "present", ":", "fg", "=", "'grey'", "elif", "not", "healthy", ":", "fg", "=", "'red'", "else", ":", "fg", "=", "'green'", "# for terrain show yellow if still loading", "if", "s", "==", "'TERR'", "and", "fg", "==", "'green'", "and", "master", ".", "field", "(", "'TERRAIN_REPORT'", ",", "'pending'", ",", "0", ")", "!=", "0", ":", "fg", "=", "'yellow'", "self", ".", "console", ".", "set_status", "(", "s", ",", "s", ",", "fg", "=", "fg", ")", "for", "s", "in", "announce", ":", "bits", "=", "sensors", "[", "s", "]", "present", "=", "(", "(", "msg", ".", "onboard_control_sensors_enabled", "&", "bits", ")", "==", "bits", ")", "healthy", "=", "(", "(", "msg", ".", "onboard_control_sensors_health", "&", "bits", ")", "==", "bits", ")", "was_healthy", "=", "(", "(", "self", ".", "last_sys_status_health", "&", "bits", ")", "==", "bits", ")", "if", "present", "and", "not", "healthy", "and", "was_healthy", ":", "self", ".", "say", "(", "\"%s fail\"", "%", "s", ")", "self", ".", "last_sys_status_health", "=", "msg", ".", "onboard_control_sensors_health", "elif", "type", "==", "'WIND'", ":", "self", ".", "console", ".", "set_status", "(", "'Wind'", ",", "'Wind %u/%.2f'", "%", "(", "msg", ".", "direction", ",", "msg", ".", "speed", ")", ")", "elif", "type", "==", "'EKF_STATUS_REPORT'", ":", "highest", "=", "0.0", "vars", "=", "[", "'velocity_variance'", ",", "'pos_horiz_variance'", ",", "'pos_vert_variance'", ",", "'compass_variance'", ",", "'terrain_alt_variance'", "]", "for", "var", "in", "vars", ":", "v", "=", "getattr", "(", "msg", ",", "var", ",", "0", ")", "highest", "=", "max", "(", "v", ",", "highest", ")", "if", "highest", ">=", "1.0", ":", "fg", "=", "'red'", "elif", "highest", ">=", "0.5", ":", "fg", "=", "'orange'", "else", ":", "fg", "=", "'green'", "self", ".", "console", ".", "set_status", "(", "'EKF'", ",", "'EKF'", ",", "fg", "=", "fg", ")", "elif", "type", "==", "'HWSTATUS'", ":", "if", "msg", ".", "Vcc", ">=", "4600", "and", "msg", ".", "Vcc", "<=", "5300", ":", "fg", "=", "'green'", "else", ":", "fg", "=", "'red'", "self", ".", "console", ".", "set_status", "(", "'Vcc'", ",", "'Vcc %.2f'", "%", "(", "msg", ".", "Vcc", "*", "0.001", ")", ",", "fg", "=", "fg", ")", "elif", "type", "==", "'POWER_STATUS'", ":", "if", "msg", ".", "flags", "&", "mavutil", ".", "mavlink", ".", "MAV_POWER_STATUS_CHANGED", ":", "fg", "=", "'red'", "else", ":", "fg", "=", "'green'", "status", "=", "'PWR:'", "if", "msg", ".", "flags", "&", "mavutil", ".", "mavlink", ".", "MAV_POWER_STATUS_USB_CONNECTED", ":", "status", "+=", "'U'", "if", "msg", ".", "flags", "&", "mavutil", ".", "mavlink", ".", "MAV_POWER_STATUS_BRICK_VALID", ":", "status", "+=", "'B'", "if", "msg", ".", "flags", "&", "mavutil", ".", "mavlink", ".", "MAV_POWER_STATUS_SERVO_VALID", ":", "status", "+=", "'S'", "if", "msg", ".", "flags", "&", "mavutil", ".", "mavlink", ".", "MAV_POWER_STATUS_PERIPH_OVERCURRENT", ":", "status", "+=", "'O1'", "if", "msg", ".", "flags", "&", "mavutil", ".", "mavlink", ".", "MAV_POWER_STATUS_PERIPH_HIPOWER_OVERCURRENT", ":", "status", "+=", "'O2'", "self", ".", "console", ".", "set_status", "(", "'PWR'", ",", "status", ",", "fg", "=", "fg", ")", "self", ".", "console", ".", "set_status", "(", "'Srv'", ",", "'Srv %.2f'", "%", "(", "msg", ".", "Vservo", "*", "0.001", ")", ",", "fg", "=", "'green'", ")", "elif", "type", "in", "[", "'RADIO'", ",", "'RADIO_STATUS'", "]", ":", "if", "msg", ".", "rssi", "<", "msg", ".", "noise", "+", "10", "or", "msg", ".", "remrssi", "<", "msg", ".", "remnoise", "+", "10", ":", "fg", "=", "'red'", "else", ":", "fg", "=", "'black'", "self", ".", "console", ".", "set_status", "(", "'Radio'", ",", "'Radio %u/%u %u/%u'", "%", "(", "msg", ".", "rssi", ",", "msg", ".", "noise", ",", "msg", ".", "remrssi", ",", "msg", ".", "remnoise", ")", ",", "fg", "=", "fg", ")", "elif", "type", "==", "'HEARTBEAT'", ":", "self", ".", "console", ".", "set_status", "(", "'Mode'", ",", "'%s'", "%", "master", ".", "flightmode", ",", "fg", "=", "'blue'", ")", "if", "self", ".", "master", ".", "motors_armed", "(", ")", ":", "arm_colour", "=", "'green'", "else", ":", "arm_colour", "=", "'red'", "self", ".", "console", ".", "set_status", "(", "'ARM'", ",", "'ARM'", ",", "fg", "=", "arm_colour", ")", "if", "self", ".", "max_link_num", "!=", "len", "(", "self", ".", "mpstate", ".", "mav_master", ")", ":", "for", "i", "in", "range", "(", "self", ".", "max_link_num", ")", ":", "self", ".", "console", ".", "set_status", "(", "'Link%u'", "%", "(", "i", "+", "1", ")", ",", "''", ",", "row", "=", "1", ")", "self", ".", "max_link_num", "=", "len", "(", "self", ".", "mpstate", ".", "mav_master", ")", "for", "m", "in", "self", ".", "mpstate", ".", "mav_master", ":", "linkdelay", "=", "(", "self", ".", "mpstate", ".", "status", ".", "highest_msec", "-", "m", ".", "highest_msec", ")", "*", "1.0e-3", "linkline", "=", "\"Link %u \"", "%", "(", "m", ".", "linknum", "+", "1", ")", "if", "m", ".", "linkerror", ":", "linkline", "+=", "\"down\"", "fg", "=", "'red'", "else", ":", "packets_rcvd_percentage", "=", "100", "if", "(", "m", ".", "mav_loss", "!=", "0", ")", ":", "#avoid divide-by-zero", "packets_rcvd_percentage", "=", "(", "1.0", "-", "(", "float", "(", "m", ".", "mav_loss", ")", "/", "float", "(", "m", ".", "mav_count", ")", ")", ")", "*", "100.0", "linkline", "+=", "\"OK (%u pkts, %.2fs delay, %u lost) %u%%\"", "%", "(", "m", ".", "mav_count", ",", "linkdelay", ",", "m", ".", "mav_loss", ",", "packets_rcvd_percentage", ")", "if", "linkdelay", ">", "1", ":", "fg", "=", "'orange'", "else", ":", "fg", "=", "'dark green'", "self", ".", "console", ".", "set_status", "(", "'Link%u'", "%", "m", ".", "linknum", ",", "linkline", ",", "row", "=", "1", ",", "fg", "=", "fg", ")", "elif", "type", "in", "[", "'WAYPOINT_CURRENT'", ",", "'MISSION_CURRENT'", "]", ":", "self", ".", "console", ".", "set_status", "(", "'WP'", ",", "'WP %u'", "%", "msg", ".", "seq", ")", "lat", "=", "master", ".", "field", "(", "'GLOBAL_POSITION_INT'", ",", "'lat'", ",", "0", ")", "*", "1.0e-7", "lng", "=", "master", ".", "field", "(", "'GLOBAL_POSITION_INT'", ",", "'lon'", ",", "0", ")", "*", "1.0e-7", "if", "lat", "!=", "0", "and", "lng", "!=", "0", ":", "airspeed", "=", "master", ".", "field", "(", "'VFR_HUD'", ",", "'airspeed'", ",", "30", ")", "if", "abs", "(", "airspeed", "-", "self", ".", "speed", ")", ">", "5", ":", "self", ".", "speed", "=", "airspeed", "else", ":", "self", ".", "speed", "=", "0.98", "*", "self", ".", "speed", "+", "0.02", "*", "airspeed", "self", ".", "speed", "=", "max", "(", "1", ",", "self", ".", "speed", ")", "time_remaining", "=", "int", "(", "self", ".", "estimated_time_remaining", "(", "lat", ",", "lng", ",", "msg", ".", "seq", ",", "self", ".", "speed", ")", ")", "self", ".", "console", ".", "set_status", "(", "'ETR'", ",", "'ETR %u:%02u'", "%", "(", "time_remaining", "/", "60", ",", "time_remaining", "%", "60", ")", ")", "elif", "type", "==", "'NAV_CONTROLLER_OUTPUT'", ":", "self", ".", "console", ".", "set_status", "(", "'WPDist'", ",", "'Distance %u'", "%", "msg", ".", "wp_dist", ")", "self", ".", "console", ".", "set_status", "(", "'WPBearing'", ",", "'Bearing %u'", "%", "msg", ".", "target_bearing", ")", "if", "msg", ".", "alt_error", ">", "0", ":", "alt_error_sign", "=", "\"L\"", "else", ":", "alt_error_sign", "=", "\"H\"", "if", "msg", ".", "aspd_error", ">", "0", ":", "aspd_error_sign", "=", "\"L\"", "else", ":", "aspd_error_sign", "=", "\"H\"", "self", ".", "console", ".", "set_status", "(", "'AltError'", ",", "'AltError %d%s'", "%", "(", "msg", ".", "alt_error", ",", "alt_error_sign", ")", ")", "self", ".", "console", ".", "set_status", "(", "'AspdError'", ",", "'AspdError %.1f%s'", "%", "(", "msg", ".", "aspd_error", "*", "0.01", ",", "aspd_error_sign", ")", ")" ]
49.886957
21.434783
def get_response(self): """Get the original response of requests""" request = getattr(requests, self.request_method, None) if request is None and self._request_method is None: raise ValueError("A effective http request method must be set") if self.request_url is None: raise ValueError( "Fatal error occurred, the class property \"request_url\" is" "set to None, reset it with an effective url of dingtalk api." ) response = request(self.request_url, **self.kwargs) self.response = response return response
[ "def", "get_response", "(", "self", ")", ":", "request", "=", "getattr", "(", "requests", ",", "self", ".", "request_method", ",", "None", ")", "if", "request", "is", "None", "and", "self", ".", "_request_method", "is", "None", ":", "raise", "ValueError", "(", "\"A effective http request method must be set\"", ")", "if", "self", ".", "request_url", "is", "None", ":", "raise", "ValueError", "(", "\"Fatal error occurred, the class property \\\"request_url\\\" is\"", "\"set to None, reset it with an effective url of dingtalk api.\"", ")", "response", "=", "request", "(", "self", ".", "request_url", ",", "*", "*", "self", ".", "kwargs", ")", "self", ".", "response", "=", "response", "return", "response" ]
48.461538
19.692308
def already_downloaded(filename): """ Verify that the file has not already been downloaded. """ cur_file = os.path.join(c.bview_dir, filename) old_file = os.path.join(c.bview_dir, 'old', filename) if not os.path.exists(cur_file) and not os.path.exists(old_file): return False return True
[ "def", "already_downloaded", "(", "filename", ")", ":", "cur_file", "=", "os", ".", "path", ".", "join", "(", "c", ".", "bview_dir", ",", "filename", ")", "old_file", "=", "os", ".", "path", ".", "join", "(", "c", ".", "bview_dir", ",", "'old'", ",", "filename", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "cur_file", ")", "and", "not", "os", ".", "path", ".", "exists", "(", "old_file", ")", ":", "return", "False", "return", "True" ]
35.444444
14.333333
def create_feature_map(features, feature_indices, output_dir): """Returns feature_map about the transformed features. feature_map includes information such as: 1, cat1=0 2, cat1=1 3, numeric1 ... Returns: List in the from [(index, feature_description)] """ feature_map = [] for name, info in feature_indices: transform_name = features[name]['transform'] source_column = features[name]['source_column'] if transform_name in [IDENTITY_TRANSFORM, SCALE_TRANSFORM]: feature_map.append((info['index_start'], name)) elif transform_name in [ONE_HOT_TRANSFORM, MULTI_HOT_TRANSFORM]: vocab, _ = read_vocab_file( os.path.join(output_dir, VOCAB_ANALYSIS_FILE % source_column)) for i, word in enumerate(vocab): if transform_name == ONE_HOT_TRANSFORM: feature_map.append((info['index_start'] + i, '%s=%s' % (source_column, word))) elif transform_name == MULTI_HOT_TRANSFORM: feature_map.append((info['index_start'] + i, '%s has "%s"' % (source_column, word))) elif transform_name == IMAGE_TRANSFORM: for i in range(info['size']): feature_map.append((info['index_start'] + i, '%s image feature %d' % (source_column, i))) return feature_map
[ "def", "create_feature_map", "(", "features", ",", "feature_indices", ",", "output_dir", ")", ":", "feature_map", "=", "[", "]", "for", "name", ",", "info", "in", "feature_indices", ":", "transform_name", "=", "features", "[", "name", "]", "[", "'transform'", "]", "source_column", "=", "features", "[", "name", "]", "[", "'source_column'", "]", "if", "transform_name", "in", "[", "IDENTITY_TRANSFORM", ",", "SCALE_TRANSFORM", "]", ":", "feature_map", ".", "append", "(", "(", "info", "[", "'index_start'", "]", ",", "name", ")", ")", "elif", "transform_name", "in", "[", "ONE_HOT_TRANSFORM", ",", "MULTI_HOT_TRANSFORM", "]", ":", "vocab", ",", "_", "=", "read_vocab_file", "(", "os", ".", "path", ".", "join", "(", "output_dir", ",", "VOCAB_ANALYSIS_FILE", "%", "source_column", ")", ")", "for", "i", ",", "word", "in", "enumerate", "(", "vocab", ")", ":", "if", "transform_name", "==", "ONE_HOT_TRANSFORM", ":", "feature_map", ".", "append", "(", "(", "info", "[", "'index_start'", "]", "+", "i", ",", "'%s=%s'", "%", "(", "source_column", ",", "word", ")", ")", ")", "elif", "transform_name", "==", "MULTI_HOT_TRANSFORM", ":", "feature_map", ".", "append", "(", "(", "info", "[", "'index_start'", "]", "+", "i", ",", "'%s has \"%s\"'", "%", "(", "source_column", ",", "word", ")", ")", ")", "elif", "transform_name", "==", "IMAGE_TRANSFORM", ":", "for", "i", "in", "range", "(", "info", "[", "'size'", "]", ")", ":", "feature_map", ".", "append", "(", "(", "info", "[", "'index_start'", "]", "+", "i", ",", "'%s image feature %d'", "%", "(", "source_column", ",", "i", ")", ")", ")", "return", "feature_map" ]
39.774194
20.258065
def read_single_xso(src, type_): """ Read a single :class:`~.XSO` of the given `type_` from the binary file-like input `src` and return the instance. """ result = None def cb(instance): nonlocal result result = instance read_xso(src, {type_: cb}) return result
[ "def", "read_single_xso", "(", "src", ",", "type_", ")", ":", "result", "=", "None", "def", "cb", "(", "instance", ")", ":", "nonlocal", "result", "result", "=", "instance", "read_xso", "(", "src", ",", "{", "type_", ":", "cb", "}", ")", "return", "result" ]
19.866667
20.933333
def getWorkerQte(hosts): """Return the number of workers to launch depending on the environment""" if "SLURM_NTASKS" in os.environ: return int(os.environ["SLURM_NTASKS"]) elif "PBS_NP" in os.environ: return int(os.environ["PBS_NP"]) elif "NSLOTS" in os.environ: return int(os.environ["NSLOTS"]) else: return sum(host[1] for host in hosts)
[ "def", "getWorkerQte", "(", "hosts", ")", ":", "if", "\"SLURM_NTASKS\"", "in", "os", ".", "environ", ":", "return", "int", "(", "os", ".", "environ", "[", "\"SLURM_NTASKS\"", "]", ")", "elif", "\"PBS_NP\"", "in", "os", ".", "environ", ":", "return", "int", "(", "os", ".", "environ", "[", "\"PBS_NP\"", "]", ")", "elif", "\"NSLOTS\"", "in", "os", ".", "environ", ":", "return", "int", "(", "os", ".", "environ", "[", "\"NSLOTS\"", "]", ")", "else", ":", "return", "sum", "(", "host", "[", "1", "]", "for", "host", "in", "hosts", ")" ]
38.1
7.8
def patch(destination, name=None, settings=None): """Decorator to create a patch. The object being decorated becomes the :attr:`~Patch.obj` attribute of the patch. Parameters ---------- destination : object Patch destination. name : str Name of the attribute at the destination. settings : gorilla.Settings Settings. Returns ------- object The decorated object. See Also -------- :class:`Patch`. """ def decorator(wrapped): base = _get_base(wrapped) name_ = base.__name__ if name is None else name settings_ = copy.deepcopy(settings) patch = Patch(destination, name_, wrapped, settings=settings_) data = get_decorator_data(base, set_default=True) data.patches.append(patch) return wrapped return decorator
[ "def", "patch", "(", "destination", ",", "name", "=", "None", ",", "settings", "=", "None", ")", ":", "def", "decorator", "(", "wrapped", ")", ":", "base", "=", "_get_base", "(", "wrapped", ")", "name_", "=", "base", ".", "__name__", "if", "name", "is", "None", "else", "name", "settings_", "=", "copy", ".", "deepcopy", "(", "settings", ")", "patch", "=", "Patch", "(", "destination", ",", "name_", ",", "wrapped", ",", "settings", "=", "settings_", ")", "data", "=", "get_decorator_data", "(", "base", ",", "set_default", "=", "True", ")", "data", ".", "patches", ".", "append", "(", "patch", ")", "return", "wrapped", "return", "decorator" ]
24.5
21.5
def delete(username): """Delete a user. Example: \b ```bash $ polyaxon user delete david ``` """ try: PolyaxonClient().user.delete_user(username) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not delete user `{}`.'.format(username)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) Printer.print_success("User `{}` was deleted successfully.".format(username))
[ "def", "delete", "(", "username", ")", ":", "try", ":", "PolyaxonClient", "(", ")", ".", "user", ".", "delete_user", "(", "username", ")", "except", "(", "PolyaxonHTTPError", ",", "PolyaxonShouldExitError", ",", "PolyaxonClientException", ")", "as", "e", ":", "Printer", ".", "print_error", "(", "'Could not delete user `{}`.'", ".", "format", "(", "username", ")", ")", "Printer", ".", "print_error", "(", "'Error message `{}`.'", ".", "format", "(", "e", ")", ")", "sys", ".", "exit", "(", "1", ")", "Printer", ".", "print_success", "(", "\"User `{}` was deleted successfully.\"", ".", "format", "(", "username", ")", ")" ]
27.611111
26.5
def fullfill_descendants_info(desc_matrix): ''' flat_offset ''' pathloc_mapping = {} locpath_mapping = {} #def leaf_handler(desc,pdesc,offset): def leaf_handler(desc,pdesc): #desc['flat_offset'] = (offset,offset+1) desc['non_leaf_son_paths'] = [] desc['leaf_son_paths'] = [] desc['non_leaf_descendant_paths'] = [] desc['leaf_descendant_paths'] = [] desc['flat_len'] = 1 if(pdesc['flat_len']): pdesc['flat_len'] = pdesc['flat_len'] + 1 else: pdesc['flat_len'] = 1 #def non_leaf_handler(desc,pdesc,offset): def non_leaf_handler(desc,pdesc): #desc['flat_offset'] = (offset,offset+desc['flat_len']) pdesc['non_leaf_descendant_paths'].extend(copy.deepcopy(desc['non_leaf_descendant_paths'])) pdesc['leaf_descendant_paths'].extend(copy.deepcopy(desc['leaf_descendant_paths'])) if(pdesc['flat_len']): pdesc['flat_len'] = pdesc['flat_len'] + desc['flat_len'] else: pdesc['flat_len'] = desc['flat_len'] def fill_path_mapping(desc): pmk = tuple(desc['path']) pmv = tuple(DescMatrix.loc(desc)) pathloc_mapping[pmk] = pmv locpath_mapping[pmv] = pmk dm = DescMatrix(desc_matrix) depth = desc_matrix.__len__() desc_level = desc_matrix[depth - 1] length = desc_level.__len__() #the last level #offset = 0 for j in range(length - 1,-1,-1): desc = desc_level[j] fill_path_mapping(desc) pdesc = dm.pdesc(desc) leaf_handler(desc,pdesc) #leaf_handler(desc,pdesc,offset) #offset = offset + 1 for i in range(depth-2,0,-1): #offset = 0 desc_level = desc_matrix[i] length = desc_level.__len__() for j in range(length-1,-1,-1): desc = desc_level[j] fill_path_mapping(desc) pdesc = dm.pdesc(desc) if(desc['leaf']): leaf_handler(desc,pdesc) #leaf_handler(desc,pdesc,offset) #offset = offset + 1 else: non_leaf_handler(desc,pdesc) #non_leaf_handler(desc,pdesc,offset) #offset = offset + desc['flat_len'] desc_matrix[0][0]['flat_offset'] = (0,desc_matrix[0][0]['flat_len']) for i in range(0,depth-1): pdesc_level = desc_matrix[i] length = pdesc_level.__len__() for j in range(0,length): pdesc = pdesc_level[j] si = pdesc['flat_offset'][0] for i in range(0,pdesc['sons_count']): spl = append(pdesc['path'],i,mode='new') pk = tuple(spl) locx,locy = pathloc_mapping[pk] son = desc_matrix[locx][locy] ei = si + son['flat_len'] son['flat_offset'] = (si,ei) si = ei return(desc_matrix,pathloc_mapping,locpath_mapping)
[ "def", "fullfill_descendants_info", "(", "desc_matrix", ")", ":", "pathloc_mapping", "=", "{", "}", "locpath_mapping", "=", "{", "}", "#def leaf_handler(desc,pdesc,offset):", "def", "leaf_handler", "(", "desc", ",", "pdesc", ")", ":", "#desc['flat_offset'] = (offset,offset+1)", "desc", "[", "'non_leaf_son_paths'", "]", "=", "[", "]", "desc", "[", "'leaf_son_paths'", "]", "=", "[", "]", "desc", "[", "'non_leaf_descendant_paths'", "]", "=", "[", "]", "desc", "[", "'leaf_descendant_paths'", "]", "=", "[", "]", "desc", "[", "'flat_len'", "]", "=", "1", "if", "(", "pdesc", "[", "'flat_len'", "]", ")", ":", "pdesc", "[", "'flat_len'", "]", "=", "pdesc", "[", "'flat_len'", "]", "+", "1", "else", ":", "pdesc", "[", "'flat_len'", "]", "=", "1", "#def non_leaf_handler(desc,pdesc,offset):", "def", "non_leaf_handler", "(", "desc", ",", "pdesc", ")", ":", "#desc['flat_offset'] = (offset,offset+desc['flat_len'])", "pdesc", "[", "'non_leaf_descendant_paths'", "]", ".", "extend", "(", "copy", ".", "deepcopy", "(", "desc", "[", "'non_leaf_descendant_paths'", "]", ")", ")", "pdesc", "[", "'leaf_descendant_paths'", "]", ".", "extend", "(", "copy", ".", "deepcopy", "(", "desc", "[", "'leaf_descendant_paths'", "]", ")", ")", "if", "(", "pdesc", "[", "'flat_len'", "]", ")", ":", "pdesc", "[", "'flat_len'", "]", "=", "pdesc", "[", "'flat_len'", "]", "+", "desc", "[", "'flat_len'", "]", "else", ":", "pdesc", "[", "'flat_len'", "]", "=", "desc", "[", "'flat_len'", "]", "def", "fill_path_mapping", "(", "desc", ")", ":", "pmk", "=", "tuple", "(", "desc", "[", "'path'", "]", ")", "pmv", "=", "tuple", "(", "DescMatrix", ".", "loc", "(", "desc", ")", ")", "pathloc_mapping", "[", "pmk", "]", "=", "pmv", "locpath_mapping", "[", "pmv", "]", "=", "pmk", "dm", "=", "DescMatrix", "(", "desc_matrix", ")", "depth", "=", "desc_matrix", ".", "__len__", "(", ")", "desc_level", "=", "desc_matrix", "[", "depth", "-", "1", "]", "length", "=", "desc_level", ".", "__len__", "(", ")", "#the last level", "#offset = 0", "for", "j", "in", "range", "(", "length", "-", "1", ",", "-", "1", ",", "-", "1", ")", ":", "desc", "=", "desc_level", "[", "j", "]", "fill_path_mapping", "(", "desc", ")", "pdesc", "=", "dm", ".", "pdesc", "(", "desc", ")", "leaf_handler", "(", "desc", ",", "pdesc", ")", "#leaf_handler(desc,pdesc,offset)", "#offset = offset + 1", "for", "i", "in", "range", "(", "depth", "-", "2", ",", "0", ",", "-", "1", ")", ":", "#offset = 0", "desc_level", "=", "desc_matrix", "[", "i", "]", "length", "=", "desc_level", ".", "__len__", "(", ")", "for", "j", "in", "range", "(", "length", "-", "1", ",", "-", "1", ",", "-", "1", ")", ":", "desc", "=", "desc_level", "[", "j", "]", "fill_path_mapping", "(", "desc", ")", "pdesc", "=", "dm", ".", "pdesc", "(", "desc", ")", "if", "(", "desc", "[", "'leaf'", "]", ")", ":", "leaf_handler", "(", "desc", ",", "pdesc", ")", "#leaf_handler(desc,pdesc,offset)", "#offset = offset + 1", "else", ":", "non_leaf_handler", "(", "desc", ",", "pdesc", ")", "#non_leaf_handler(desc,pdesc,offset)", "#offset = offset + desc['flat_len']", "desc_matrix", "[", "0", "]", "[", "0", "]", "[", "'flat_offset'", "]", "=", "(", "0", ",", "desc_matrix", "[", "0", "]", "[", "0", "]", "[", "'flat_len'", "]", ")", "for", "i", "in", "range", "(", "0", ",", "depth", "-", "1", ")", ":", "pdesc_level", "=", "desc_matrix", "[", "i", "]", "length", "=", "pdesc_level", ".", "__len__", "(", ")", "for", "j", "in", "range", "(", "0", ",", "length", ")", ":", "pdesc", "=", "pdesc_level", "[", "j", "]", "si", "=", "pdesc", "[", "'flat_offset'", "]", "[", "0", "]", "for", "i", "in", "range", "(", "0", ",", "pdesc", "[", "'sons_count'", "]", ")", ":", "spl", "=", "append", "(", "pdesc", "[", "'path'", "]", ",", "i", ",", "mode", "=", "'new'", ")", "pk", "=", "tuple", "(", "spl", ")", "locx", ",", "locy", "=", "pathloc_mapping", "[", "pk", "]", "son", "=", "desc_matrix", "[", "locx", "]", "[", "locy", "]", "ei", "=", "si", "+", "son", "[", "'flat_len'", "]", "son", "[", "'flat_offset'", "]", "=", "(", "si", ",", "ei", ")", "si", "=", "ei", "return", "(", "desc_matrix", ",", "pathloc_mapping", ",", "locpath_mapping", ")" ]
37.467532
11.181818
def read_sif(cls, path): """ Creates a graph from a `simple interaction format (SIF)`_ file Parameters ---------- path : str Absolute path to a SIF file Returns ------- caspo.core.graph.Graph Created object instance .. _simple interaction format (SIF): http://wiki.cytoscape.org/Cytoscape_User_Manual/Network_Formats """ df = pd.read_csv(path, delim_whitespace=True, names=['source', 'sign', 'target']).drop_duplicates() edges = [(source, target, {'sign': sign}) for _, source, sign, target in df.itertuples()] return cls(data=edges)
[ "def", "read_sif", "(", "cls", ",", "path", ")", ":", "df", "=", "pd", ".", "read_csv", "(", "path", ",", "delim_whitespace", "=", "True", ",", "names", "=", "[", "'source'", ",", "'sign'", ",", "'target'", "]", ")", ".", "drop_duplicates", "(", ")", "edges", "=", "[", "(", "source", ",", "target", ",", "{", "'sign'", ":", "sign", "}", ")", "for", "_", ",", "source", ",", "sign", ",", "target", "in", "df", ".", "itertuples", "(", ")", "]", "return", "cls", "(", "data", "=", "edges", ")" ]
32.3
27
def send(tag, data=None): ''' Send an event with the given tag and data. This is useful for sending events directly to the master from the shell with salt-run. It is also quite useful for sending events in orchestration states where the ``fire_event`` requisite isn't sufficient because it does not support sending custom data with the event. Note that event tags will *not* be namespaced like events sent with the ``fire_event`` requisite! Whereas events produced from ``fire_event`` are prefixed with ``salt/state_result/<jid>/<minion_id>/<name>``, events sent using this runner module will have no such prefix. Make sure your reactors don't expect a prefix! :param tag: the tag to send with the event :param data: an optional dictionary of data to send with the event CLI Example: .. code-block:: bash salt-run event.send my/custom/event '{"foo": "bar"}' Orchestration Example: .. code-block:: yaml # orch/command.sls run_a_command: salt.function: - name: cmd.run - tgt: my_minion - arg: - exit {{ pillar['exit_code'] }} send_success_event: salt.runner: - name: event.send - tag: my_event/success - data: foo: bar - require: - salt: run_a_command send_failure_event: salt.runner: - name: event.send - tag: my_event/failure - data: baz: qux - onfail: - salt: run_a_command .. code-block:: bash salt-run state.orchestrate orch.command pillar='{"exit_code": 0}' salt-run state.orchestrate orch.command pillar='{"exit_code": 1}' ''' data = data or {} event = salt.utils.event.get_master_event(__opts__, __opts__['sock_dir'], listen=False) return event.fire_event(data, tag)
[ "def", "send", "(", "tag", ",", "data", "=", "None", ")", ":", "data", "=", "data", "or", "{", "}", "event", "=", "salt", ".", "utils", ".", "event", ".", "get_master_event", "(", "__opts__", ",", "__opts__", "[", "'sock_dir'", "]", ",", "listen", "=", "False", ")", "return", "event", ".", "fire_event", "(", "data", ",", "tag", ")" ]
30.453125
23.890625
def surface_area(self): r"""Calculate all atomic surface area. :rtype: [float] """ return [self.atomic_sa(i) for i in range(len(self.rads))]
[ "def", "surface_area", "(", "self", ")", ":", "return", "[", "self", ".", "atomic_sa", "(", "i", ")", "for", "i", "in", "range", "(", "len", "(", "self", ".", "rads", ")", ")", "]" ]
28
16.5
def get_serializer(serializer_format): """ Get the serializer for a specific format """ if serializer_format == Format.JSON: return _serialize_json if serializer_format == Format.PICKLE: return _serialize_pickle
[ "def", "get_serializer", "(", "serializer_format", ")", ":", "if", "serializer_format", "==", "Format", ".", "JSON", ":", "return", "_serialize_json", "if", "serializer_format", "==", "Format", ".", "PICKLE", ":", "return", "_serialize_pickle" ]
39
3.666667
def pypsa_id(self): #TODO: docstring """ Description """ return '_'.join(['MV', str( self.grid.grid_district.lv_load_area.mv_grid_district.mv_grid.\ id_db), 'tru', str(self.id_db)])
[ "def", "pypsa_id", "(", "self", ")", ":", "#TODO: docstring", "return", "'_'", ".", "join", "(", "[", "'MV'", ",", "str", "(", "self", ".", "grid", ".", "grid_district", ".", "lv_load_area", ".", "mv_grid_district", ".", "mv_grid", ".", "id_db", ")", ",", "'tru'", ",", "str", "(", "self", ".", "id_db", ")", "]", ")" ]
34.142857
12.142857
def install(client, force): """Install Git hooks.""" import pkg_resources from git.index.fun import hook_path as get_hook_path for hook in HOOKS: hook_path = Path(get_hook_path(hook, client.repo.git_dir)) if hook_path.exists(): if not force: click.echo( 'Hook already exists. Skipping {0}'.format(str(hook_path)), err=True ) continue else: hook_path.unlink() # Make sure the hooks directory exists. hook_path.parent.mkdir(parents=True, exist_ok=True) Path(hook_path).write_bytes( pkg_resources.resource_string( 'renku.data', '{hook}.sh'.format(hook=hook) ) ) hook_path.chmod(hook_path.stat().st_mode | stat.S_IEXEC)
[ "def", "install", "(", "client", ",", "force", ")", ":", "import", "pkg_resources", "from", "git", ".", "index", ".", "fun", "import", "hook_path", "as", "get_hook_path", "for", "hook", "in", "HOOKS", ":", "hook_path", "=", "Path", "(", "get_hook_path", "(", "hook", ",", "client", ".", "repo", ".", "git_dir", ")", ")", "if", "hook_path", ".", "exists", "(", ")", ":", "if", "not", "force", ":", "click", ".", "echo", "(", "'Hook already exists. Skipping {0}'", ".", "format", "(", "str", "(", "hook_path", ")", ")", ",", "err", "=", "True", ")", "continue", "else", ":", "hook_path", ".", "unlink", "(", ")", "# Make sure the hooks directory exists.", "hook_path", ".", "parent", ".", "mkdir", "(", "parents", "=", "True", ",", "exist_ok", "=", "True", ")", "Path", "(", "hook_path", ")", ".", "write_bytes", "(", "pkg_resources", ".", "resource_string", "(", "'renku.data'", ",", "'{hook}.sh'", ".", "format", "(", "hook", "=", "hook", ")", ")", ")", "hook_path", ".", "chmod", "(", "hook_path", ".", "stat", "(", ")", ".", "st_mode", "|", "stat", ".", "S_IEXEC", ")" ]
32.038462
19.192308
def _adapt_WSDateTime(dt): """Return unix timestamp of the datetime like input. If conversion overflows high, return sint64_max , if underflows, return 0 """ try: ts = int( (dt.replace(tzinfo=pytz.utc) - datetime(1970,1,1,tzinfo=pytz.utc) ).total_seconds() ) except (OverflowError,OSError): if dt < datetime.now(): ts = 0 else: ts = 2**63-1 return ts
[ "def", "_adapt_WSDateTime", "(", "dt", ")", ":", "try", ":", "ts", "=", "int", "(", "(", "dt", ".", "replace", "(", "tzinfo", "=", "pytz", ".", "utc", ")", "-", "datetime", "(", "1970", ",", "1", ",", "1", ",", "tzinfo", "=", "pytz", ".", "utc", ")", ")", ".", "total_seconds", "(", ")", ")", "except", "(", "OverflowError", ",", "OSError", ")", ":", "if", "dt", "<", "datetime", ".", "now", "(", ")", ":", "ts", "=", "0", "else", ":", "ts", "=", "2", "**", "63", "-", "1", "return", "ts" ]
26.705882
14.764706
def is_in(self, point_x, point_y): """ Test if the point is within this ellipse """ x = self.x_origin y = self.y_origin a = self.e_width#/2 # FIXME: Why divide by two b = self.e_height#/2 return ((point_x-x)**2/(a**2)) + ((point_y-y)**2/(b**2)) < 1.0
[ "def", "is_in", "(", "self", ",", "point_x", ",", "point_y", ")", ":", "x", "=", "self", ".", "x_origin", "y", "=", "self", ".", "y_origin", "a", "=", "self", ".", "e_width", "#/2 # FIXME: Why divide by two", "b", "=", "self", ".", "e_height", "#/2", "return", "(", "(", "point_x", "-", "x", ")", "**", "2", "/", "(", "a", "**", "2", ")", ")", "+", "(", "(", "point_y", "-", "y", ")", "**", "2", "/", "(", "b", "**", "2", ")", ")", "<", "1.0" ]
32.444444
19.111111
def get_value_index(self, indices): """Converts a list of dimensions’ indices into a numeric value index. Args: indices(list): list of dimension's indices. Returns: num(int): numeric value index. """ size = self['size'] if self.get('size') else self['dimension']['size'] ndims = len(size) mult = 1 num = 0 for idx, dim in enumerate(size): mult *= size[ndims - idx] if (idx > 0) else 1 num += mult * indices[ndims - idx - 1] return num
[ "def", "get_value_index", "(", "self", ",", "indices", ")", ":", "size", "=", "self", "[", "'size'", "]", "if", "self", ".", "get", "(", "'size'", ")", "else", "self", "[", "'dimension'", "]", "[", "'size'", "]", "ndims", "=", "len", "(", "size", ")", "mult", "=", "1", "num", "=", "0", "for", "idx", ",", "dim", "in", "enumerate", "(", "size", ")", ":", "mult", "*=", "size", "[", "ndims", "-", "idx", "]", "if", "(", "idx", ">", "0", ")", "else", "1", "num", "+=", "mult", "*", "indices", "[", "ndims", "-", "idx", "-", "1", "]", "return", "num" ]
30.388889
19.055556
def load_cookie(cls, request, key='session', secret_key=None): """Loads a :class:`SecureCookie` from a cookie in request. If the cookie is not set, a new :class:`SecureCookie` instanced is returned. :param request: a request object that has a `cookies` attribute which is a dict of all cookie values. :param key: the name of the cookie. :param secret_key: the secret key used to unquote the cookie. Always provide the value even though it has no default! """ data = request.cookies.get(key) if not data: return cls(secret_key=secret_key) return cls.unserialize(data, secret_key)
[ "def", "load_cookie", "(", "cls", ",", "request", ",", "key", "=", "'session'", ",", "secret_key", "=", "None", ")", ":", "data", "=", "request", ".", "cookies", ".", "get", "(", "key", ")", "if", "not", "data", ":", "return", "cls", "(", "secret_key", "=", "secret_key", ")", "return", "cls", ".", "unserialize", "(", "data", ",", "secret_key", ")" ]
45.9375
16.375
def validate_log_format(self, log): ''' >>> lc = LogCollector('file=/path/to/file.log:formatter=logagg.formatters.basescript', 30) >>> incomplete_log = {'data' : {'x' : 1, 'y' : 2}, ... 'raw' : 'Not all keys present'} >>> lc.validate_log_format(incomplete_log) 'failed' >>> redundant_log = {'one_invalid_key' : 'Extra information', ... 'data': {'x' : 1, 'y' : 2}, ... 'error': False, ... 'error_tb': '', ... 'event': 'event', ... 'file': '/path/to/file.log', ... 'formatter': 'logagg.formatters.mongodb', ... 'host': 'deepcompute-ThinkPad-E470', ... 'id': '0112358', ... 'level': 'debug', ... 'raw': 'some log line here', ... 'timestamp': '2018-04-07T14:06:17.404818', ... 'type': 'log'} >>> lc.validate_log_format(redundant_log) 'failed' >>> correct_log = {'data': {'x' : 1, 'y' : 2}, ... 'error': False, ... 'error_tb': '', ... 'event': 'event', ... 'file': '/path/to/file.log', ... 'formatter': 'logagg.formatters.mongodb', ... 'host': 'deepcompute-ThinkPad-E470', ... 'id': '0112358', ... 'level': 'debug', ... 'raw': 'some log line here', ... 'timestamp': '2018-04-07T14:06:17.404818', ... 'type': 'log'} >>> lc.validate_log_format(correct_log) 'passed' ''' keys_in_log = set(log) keys_in_log_structure = set(self.LOG_STRUCTURE) try: assert (keys_in_log == keys_in_log_structure) except AssertionError as e: self.log.warning('formatted_log_structure_rejected' , key_not_found = list(keys_in_log_structure-keys_in_log), extra_keys_found = list(keys_in_log-keys_in_log_structure), num_logs=1, type='metric') return 'failed' for key in log: try: assert isinstance(log[key], self.LOG_STRUCTURE[key]) except AssertionError as e: self.log.warning('formatted_log_structure_rejected' , key_datatype_not_matched = key, datatype_expected = type(self.LOG_STRUCTURE[key]), datatype_got = type(log[key]), num_logs=1, type='metric') return 'failed' return 'passed'
[ "def", "validate_log_format", "(", "self", ",", "log", ")", ":", "keys_in_log", "=", "set", "(", "log", ")", "keys_in_log_structure", "=", "set", "(", "self", ".", "LOG_STRUCTURE", ")", "try", ":", "assert", "(", "keys_in_log", "==", "keys_in_log_structure", ")", "except", "AssertionError", "as", "e", ":", "self", ".", "log", ".", "warning", "(", "'formatted_log_structure_rejected'", ",", "key_not_found", "=", "list", "(", "keys_in_log_structure", "-", "keys_in_log", ")", ",", "extra_keys_found", "=", "list", "(", "keys_in_log", "-", "keys_in_log_structure", ")", ",", "num_logs", "=", "1", ",", "type", "=", "'metric'", ")", "return", "'failed'", "for", "key", "in", "log", ":", "try", ":", "assert", "isinstance", "(", "log", "[", "key", "]", ",", "self", ".", "LOG_STRUCTURE", "[", "key", "]", ")", "except", "AssertionError", "as", "e", ":", "self", ".", "log", ".", "warning", "(", "'formatted_log_structure_rejected'", ",", "key_datatype_not_matched", "=", "key", ",", "datatype_expected", "=", "type", "(", "self", ".", "LOG_STRUCTURE", "[", "key", "]", ")", ",", "datatype_got", "=", "type", "(", "log", "[", "key", "]", ")", ",", "num_logs", "=", "1", ",", "type", "=", "'metric'", ")", "return", "'failed'", "return", "'passed'" ]
39.257576
18.5
def create_model(config: dict, output_dir: Optional[str], dataset: AbstractDataset, restore_from: Optional[str]=None) -> AbstractModel: """ Create a model object either from scratch of from the checkpoint in ``resume_dir``. Cxflow allows the following scenarios 1. Create model: leave ``restore_from=None`` and specify ``class``; 2. Restore model: specify ``restore_from`` which is a backend-specific path to (a directory with) the saved model. :param config: config dict with model config :param output_dir: path to the training output dir :param dataset: dataset object implementing the :py:class:`cxflow.datasets.AbstractDataset` concept :param restore_from: from whence the model should be restored (backend-specific information) :return: model object """ logging.info('Creating a model') model_config = config['model'] # workaround for ruamel.yaml expansion bug; see #222 model_config = dict(model_config.items()) assert 'class' in model_config, '`model.class` not present in the config' model_module, model_class = parse_fully_qualified_name(model_config['class']) # create model kwargs (without `class` and `name`) model_kwargs = {'dataset': dataset, 'log_dir': output_dir, 'restore_from': restore_from, **model_config} del model_kwargs['class'] if 'name' in model_kwargs: del model_kwargs['name'] try: model = create_object(model_module, model_class, kwargs=model_kwargs) except (ImportError, AttributeError) as ex: if restore_from is None: # training case raise ImportError('Cannot create model from the specified model module `{}` and class `{}`.'.format( model_module, model_class)) from ex else: # restore cases (resume, predict) logging.warning('Cannot create model from the specified model class `%s`.', model_config['class']) assert 'restore_fallback' in model_config, '`model.restore_fallback` not present in the config' logging.info('Trying to restore with fallback `%s` instead.', model_config['restore_fallback']) try: # try fallback class fallback_module, fallback_class = parse_fully_qualified_name(model_config['restore_fallback']) model = create_object(fallback_module, fallback_class, kwargs=model_kwargs) except (ImportError, AttributeError) as ex: # if fallback module/class specified but it fails raise ImportError('Cannot create model from the specified restore_fallback `{}`.'.format( model_config['restore_fallback'],)) from ex logging.info('\t%s created', type(model).__name__) return model
[ "def", "create_model", "(", "config", ":", "dict", ",", "output_dir", ":", "Optional", "[", "str", "]", ",", "dataset", ":", "AbstractDataset", ",", "restore_from", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "AbstractModel", ":", "logging", ".", "info", "(", "'Creating a model'", ")", "model_config", "=", "config", "[", "'model'", "]", "# workaround for ruamel.yaml expansion bug; see #222", "model_config", "=", "dict", "(", "model_config", ".", "items", "(", ")", ")", "assert", "'class'", "in", "model_config", ",", "'`model.class` not present in the config'", "model_module", ",", "model_class", "=", "parse_fully_qualified_name", "(", "model_config", "[", "'class'", "]", ")", "# create model kwargs (without `class` and `name`)", "model_kwargs", "=", "{", "'dataset'", ":", "dataset", ",", "'log_dir'", ":", "output_dir", ",", "'restore_from'", ":", "restore_from", ",", "*", "*", "model_config", "}", "del", "model_kwargs", "[", "'class'", "]", "if", "'name'", "in", "model_kwargs", ":", "del", "model_kwargs", "[", "'name'", "]", "try", ":", "model", "=", "create_object", "(", "model_module", ",", "model_class", ",", "kwargs", "=", "model_kwargs", ")", "except", "(", "ImportError", ",", "AttributeError", ")", "as", "ex", ":", "if", "restore_from", "is", "None", ":", "# training case", "raise", "ImportError", "(", "'Cannot create model from the specified model module `{}` and class `{}`.'", ".", "format", "(", "model_module", ",", "model_class", ")", ")", "from", "ex", "else", ":", "# restore cases (resume, predict)", "logging", ".", "warning", "(", "'Cannot create model from the specified model class `%s`.'", ",", "model_config", "[", "'class'", "]", ")", "assert", "'restore_fallback'", "in", "model_config", ",", "'`model.restore_fallback` not present in the config'", "logging", ".", "info", "(", "'Trying to restore with fallback `%s` instead.'", ",", "model_config", "[", "'restore_fallback'", "]", ")", "try", ":", "# try fallback class", "fallback_module", ",", "fallback_class", "=", "parse_fully_qualified_name", "(", "model_config", "[", "'restore_fallback'", "]", ")", "model", "=", "create_object", "(", "fallback_module", ",", "fallback_class", ",", "kwargs", "=", "model_kwargs", ")", "except", "(", "ImportError", ",", "AttributeError", ")", "as", "ex", ":", "# if fallback module/class specified but it fails", "raise", "ImportError", "(", "'Cannot create model from the specified restore_fallback `{}`.'", ".", "format", "(", "model_config", "[", "'restore_fallback'", "]", ",", ")", ")", "from", "ex", "logging", ".", "info", "(", "'\\t%s created'", ",", "type", "(", "model", ")", ".", "__name__", ")", "return", "model" ]
49.796296
32.944444
def _static(self, target, value): """PHP's "static" """ return 'static ' + self.__p(ast.Assign(targets=[target],value=value))
[ "def", "_static", "(", "self", ",", "target", ",", "value", ")", ":", "return", "'static '", "+", "self", ".", "__p", "(", "ast", ".", "Assign", "(", "targets", "=", "[", "target", "]", ",", "value", "=", "value", ")", ")" ]
29.2
16.8
def substitute_minor_for_major(progression, substitute_index, ignore_suffix=False): """Substitute minor chords for its major equivalent. 'm' and 'm7' suffixes recognized, and ['II', 'III', 'VI'] if there is no suffix. Examples: >>> substitute_minor_for_major(['VI'], 0) ['I'] >>> substitute_minor_for_major(['Vm'], 0) ['bVIIM'] >>> substitute_minor_for_major(['VIm7'], 0) ['IM7'] """ (roman, acc, suff) = parse_string(progression[substitute_index]) res = [] # Minor to major substitution if suff == 'm' or suff == 'm7' or suff == '' and roman in ['II', 'III', 'VI' ] or ignore_suffix: n = skip(roman, 2) a = interval_diff(roman, n, 3) + acc if suff == 'm' or ignore_suffix: res.append(tuple_to_string((n, a, 'M'))) elif suff == 'm7' or ignore_suffix: res.append(tuple_to_string((n, a, 'M7'))) elif suff == '' or ignore_suffix: res.append(tuple_to_string((n, a, ''))) return res
[ "def", "substitute_minor_for_major", "(", "progression", ",", "substitute_index", ",", "ignore_suffix", "=", "False", ")", ":", "(", "roman", ",", "acc", ",", "suff", ")", "=", "parse_string", "(", "progression", "[", "substitute_index", "]", ")", "res", "=", "[", "]", "# Minor to major substitution", "if", "suff", "==", "'m'", "or", "suff", "==", "'m7'", "or", "suff", "==", "''", "and", "roman", "in", "[", "'II'", ",", "'III'", ",", "'VI'", "]", "or", "ignore_suffix", ":", "n", "=", "skip", "(", "roman", ",", "2", ")", "a", "=", "interval_diff", "(", "roman", ",", "n", ",", "3", ")", "+", "acc", "if", "suff", "==", "'m'", "or", "ignore_suffix", ":", "res", ".", "append", "(", "tuple_to_string", "(", "(", "n", ",", "a", ",", "'M'", ")", ")", ")", "elif", "suff", "==", "'m7'", "or", "ignore_suffix", ":", "res", ".", "append", "(", "tuple_to_string", "(", "(", "n", ",", "a", ",", "'M7'", ")", ")", ")", "elif", "suff", "==", "''", "or", "ignore_suffix", ":", "res", ".", "append", "(", "tuple_to_string", "(", "(", "n", ",", "a", ",", "''", ")", ")", ")", "return", "res" ]
33.7
18.133333
def from_json(cls, json_info): """Build a Trial instance from a json string.""" if json_info is None: return None return TrialRecord( trial_id=json_info["trial_id"], job_id=json_info["job_id"], trial_status=json_info["status"], start_time=json_info["start_time"], params=json_info["params"])
[ "def", "from_json", "(", "cls", ",", "json_info", ")", ":", "if", "json_info", "is", "None", ":", "return", "None", "return", "TrialRecord", "(", "trial_id", "=", "json_info", "[", "\"trial_id\"", "]", ",", "job_id", "=", "json_info", "[", "\"job_id\"", "]", ",", "trial_status", "=", "json_info", "[", "\"status\"", "]", ",", "start_time", "=", "json_info", "[", "\"start_time\"", "]", ",", "params", "=", "json_info", "[", "\"params\"", "]", ")" ]
37.8
6.8
def _read_erd(erd_file, begsam, endsam): """Read the raw data and return a matrix, converted to microvolts. Parameters ---------- erd_file : str one of the .erd files to read begsam : int index of the first sample to read endsam : int index of the last sample (excluded, per python convention) Returns ------- numpy.ndarray 2d matrix with the data, as read from the file Error ----- It checks whether the event byte (the first byte) is x00 as expected. It can also be x01, meaning that an event was generated by an external trigger. According to the manual, "a photic stimulator is the only supported device which generates an external trigger." If the eventbyte is something else, it throws an error. Notes ----- Each sample point consists of these parts: - Event Byte - Frequency byte (only if file_schema >= 8 and one chan has != freq) - Delta mask (only if file_schema >= 8) - Delta Information - Absolute Channel Values Event Byte: Bit 0 of the event byte indicates the presence of the external trigger during the sample period. It's very rare. Delta Mask: Bit-mask of a size int( number_of_channels / 8 + 0.5). Each 1 in the mask indicates that corresponding channel has 2*n bit delta, 0 means that corresponding channel has n bit delta. The rest of the byte of the delta mask is filled with "1". If file_schema <= 7, it generates a "fake" delta, where everything is 0. Some channels are shorted (i.e. not recorded), however they are stored in a non-intuitive way: deltamask takes them into account, but for the rest they are never used/recorded. So, we need to keep track both of all the channels (including the non-shorted) and of the actual channels only. When we save the data as memory-mapped, we only save the real channels. However, the data in the output have both shorted and non-shorted channels. Shorted channels have NaN's only. About the actual implementation, we always follow the python convention that the first sample is included and the last sample is not. """ hdr = _read_hdr_file(erd_file) n_allchan = hdr['num_channels'] shorted = hdr['shorted'] # does this exist for Schema 7 at all? n_shorted = sum(shorted) if n_shorted > 0: raise NotImplementedError('shorted channels not tested yet') if hdr['file_schema'] in (7,): abs_delta = b'\x80' # one byte: 10000000 raise NotImplementedError('schema 7 not tested yet') if hdr['file_schema'] in (8, 9): abs_delta = b'\xff\xff' n_smp = endsam - begsam data = empty((n_allchan, n_smp)) data.fill(NaN) # it includes the sample in both cases etc = _read_etc(erd_file.with_suffix('.etc')) all_beg = etc['samplestamp'] all_end = etc['samplestamp'] + etc['sample_span'] - 1 try: begrec = where((all_end >= begsam))[0][0] endrec = where((all_beg < endsam))[0][-1] except IndexError: return data with erd_file.open('rb') as f: for rec in range(begrec, endrec + 1): # [begpos_rec, endpos_rec] begpos_rec = begsam - all_beg[rec] endpos_rec = endsam - all_beg[rec] begpos_rec = max(begpos_rec, 0) endpos_rec = min(endpos_rec, all_end[rec] - all_beg[rec] + 1) # [d1, d2) d1 = begpos_rec + all_beg[rec] - begsam d2 = endpos_rec + all_beg[rec] - begsam dat = _read_packet(f, etc['offset'][rec], endpos_rec, n_allchan, abs_delta) data[:, d1:d2] = dat[:, begpos_rec:endpos_rec] # fill up the output data, put NaN for shorted channels if n_shorted > 0: full_channels = where(asarray([x == 0 for x in shorted]))[0] output = empty((n_allchan, n_smp)) output.fill(NaN) output[full_channels, :] = data else: output = data factor = _calculate_conversion(hdr) return expand_dims(factor, 1) * output
[ "def", "_read_erd", "(", "erd_file", ",", "begsam", ",", "endsam", ")", ":", "hdr", "=", "_read_hdr_file", "(", "erd_file", ")", "n_allchan", "=", "hdr", "[", "'num_channels'", "]", "shorted", "=", "hdr", "[", "'shorted'", "]", "# does this exist for Schema 7 at all?", "n_shorted", "=", "sum", "(", "shorted", ")", "if", "n_shorted", ">", "0", ":", "raise", "NotImplementedError", "(", "'shorted channels not tested yet'", ")", "if", "hdr", "[", "'file_schema'", "]", "in", "(", "7", ",", ")", ":", "abs_delta", "=", "b'\\x80'", "# one byte: 10000000", "raise", "NotImplementedError", "(", "'schema 7 not tested yet'", ")", "if", "hdr", "[", "'file_schema'", "]", "in", "(", "8", ",", "9", ")", ":", "abs_delta", "=", "b'\\xff\\xff'", "n_smp", "=", "endsam", "-", "begsam", "data", "=", "empty", "(", "(", "n_allchan", ",", "n_smp", ")", ")", "data", ".", "fill", "(", "NaN", ")", "# it includes the sample in both cases", "etc", "=", "_read_etc", "(", "erd_file", ".", "with_suffix", "(", "'.etc'", ")", ")", "all_beg", "=", "etc", "[", "'samplestamp'", "]", "all_end", "=", "etc", "[", "'samplestamp'", "]", "+", "etc", "[", "'sample_span'", "]", "-", "1", "try", ":", "begrec", "=", "where", "(", "(", "all_end", ">=", "begsam", ")", ")", "[", "0", "]", "[", "0", "]", "endrec", "=", "where", "(", "(", "all_beg", "<", "endsam", ")", ")", "[", "0", "]", "[", "-", "1", "]", "except", "IndexError", ":", "return", "data", "with", "erd_file", ".", "open", "(", "'rb'", ")", "as", "f", ":", "for", "rec", "in", "range", "(", "begrec", ",", "endrec", "+", "1", ")", ":", "# [begpos_rec, endpos_rec]", "begpos_rec", "=", "begsam", "-", "all_beg", "[", "rec", "]", "endpos_rec", "=", "endsam", "-", "all_beg", "[", "rec", "]", "begpos_rec", "=", "max", "(", "begpos_rec", ",", "0", ")", "endpos_rec", "=", "min", "(", "endpos_rec", ",", "all_end", "[", "rec", "]", "-", "all_beg", "[", "rec", "]", "+", "1", ")", "# [d1, d2)", "d1", "=", "begpos_rec", "+", "all_beg", "[", "rec", "]", "-", "begsam", "d2", "=", "endpos_rec", "+", "all_beg", "[", "rec", "]", "-", "begsam", "dat", "=", "_read_packet", "(", "f", ",", "etc", "[", "'offset'", "]", "[", "rec", "]", ",", "endpos_rec", ",", "n_allchan", ",", "abs_delta", ")", "data", "[", ":", ",", "d1", ":", "d2", "]", "=", "dat", "[", ":", ",", "begpos_rec", ":", "endpos_rec", "]", "# fill up the output data, put NaN for shorted channels", "if", "n_shorted", ">", "0", ":", "full_channels", "=", "where", "(", "asarray", "(", "[", "x", "==", "0", "for", "x", "in", "shorted", "]", ")", ")", "[", "0", "]", "output", "=", "empty", "(", "(", "n_allchan", ",", "n_smp", ")", ")", "output", ".", "fill", "(", "NaN", ")", "output", "[", "full_channels", ",", ":", "]", "=", "data", "else", ":", "output", "=", "data", "factor", "=", "_calculate_conversion", "(", "hdr", ")", "return", "expand_dims", "(", "factor", ",", "1", ")", "*", "output" ]
34.698276
22.327586
def no_selenium_errors(func): """ Decorator to create an `EmptyPromise` check function that is satisfied only when `func` executes without a Selenium error. This protects against many common test failures due to timing issues. For example, accessing an element after it has been modified by JavaScript ordinarily results in a `StaleElementException`. Methods decorated with `no_selenium_errors` will simply retry if that happens, which makes tests more robust. Args: func (callable): The function to execute, with retries if an error occurs. Returns: Decorated function """ def _inner(*args, **kwargs): # pylint: disable=missing-docstring try: return_val = func(*args, **kwargs) except WebDriverException: LOGGER.warning(u'Exception ignored during retry loop:', exc_info=True) return False else: return return_val return _inner
[ "def", "no_selenium_errors", "(", "func", ")", ":", "def", "_inner", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# pylint: disable=missing-docstring", "try", ":", "return_val", "=", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "except", "WebDriverException", ":", "LOGGER", ".", "warning", "(", "u'Exception ignored during retry loop:'", ",", "exc_info", "=", "True", ")", "return", "False", "else", ":", "return", "return_val", "return", "_inner" ]
35.074074
25.592593
def export_data(filename_or_fobj, data, mode="w"): """Return the object ready to be exported or only data if filename_or_fobj is not passed.""" if filename_or_fobj is None: return data _, fobj = get_filename_and_fobj(filename_or_fobj, mode=mode) source = Source.from_file(filename_or_fobj, mode=mode, plugin_name=None) source.fobj.write(data) source.fobj.flush() return source.fobj
[ "def", "export_data", "(", "filename_or_fobj", ",", "data", ",", "mode", "=", "\"w\"", ")", ":", "if", "filename_or_fobj", "is", "None", ":", "return", "data", "_", ",", "fobj", "=", "get_filename_and_fobj", "(", "filename_or_fobj", ",", "mode", "=", "mode", ")", "source", "=", "Source", ".", "from_file", "(", "filename_or_fobj", ",", "mode", "=", "mode", ",", "plugin_name", "=", "None", ")", "source", ".", "fobj", ".", "write", "(", "data", ")", "source", ".", "fobj", ".", "flush", "(", ")", "return", "source", ".", "fobj" ]
37.181818
20.636364
def print_difftext(text, other=None): """ Args: text (str): CommandLine: #python -m utool.util_print --test-print_difftext #autopep8 ingest_data.py --diff | python -m utool.util_print --test-print_difftext """ if other is not None: # hack text = util_str.difftext(text, other) colortext = util_str.color_diff_text(text) try: print(colortext) except UnicodeEncodeError as ex: # NOQA import unicodedata colortext = unicodedata.normalize('NFKD', colortext).encode('ascii', 'ignore') print(colortext)
[ "def", "print_difftext", "(", "text", ",", "other", "=", "None", ")", ":", "if", "other", "is", "not", "None", ":", "# hack", "text", "=", "util_str", ".", "difftext", "(", "text", ",", "other", ")", "colortext", "=", "util_str", ".", "color_diff_text", "(", "text", ")", "try", ":", "print", "(", "colortext", ")", "except", "UnicodeEncodeError", "as", "ex", ":", "# NOQA", "import", "unicodedata", "colortext", "=", "unicodedata", ".", "normalize", "(", "'NFKD'", ",", "colortext", ")", ".", "encode", "(", "'ascii'", ",", "'ignore'", ")", "print", "(", "colortext", ")" ]
29.2
20.3
def running(self, offset=0, count=25): '''Return all the currently-running jobs''' return self.client('jobs', 'running', self.name, offset, count)
[ "def", "running", "(", "self", ",", "offset", "=", "0", ",", "count", "=", "25", ")", ":", "return", "self", ".", "client", "(", "'jobs'", ",", "'running'", ",", "self", ".", "name", ",", "offset", ",", "count", ")" ]
53.333333
14.666667
def BVirial_Pitzer_Curl(T, Tc, Pc, omega, order=0): r'''Calculates the second virial coefficient using the model in [1]_. Designed for simple calculations. .. math:: B_r=B^{(0)}+\omega B^{(1)} B^{(0)}=0.1445-0.33/T_r-0.1385/T_r^2-0.0121/T_r^3 B^{(1)} = 0.073+0.46/T_r-0.5/T_r^2 -0.097/T_r^3 - 0.0073/T_r^8 Parameters ---------- T : float Temperature of fluid [K] Tc : float Critical temperature of fluid [K] Pc : float Critical pressure of the fluid [Pa] omega : float Acentric factor for fluid, [-] order : int, optional Order of the calculation. 0 for the calculation of B itself; for 1/2/3, the first/second/third derivative of B with respect to temperature; and for -1/-2, the first/second indefinite integral of B with respect to temperature. No other integrals or derivatives are implemented, and an exception will be raised if any other order is given. Returns ------- B : float Second virial coefficient in density form or its integral/derivative if specified, [m^3/mol or m^3/mol/K^order] Notes ----- Analytical models for derivatives and integrals are available for orders -2, -1, 1, 2, and 3, all obtained with SymPy. For first temperature derivative of B: .. math:: \frac{d B^{(0)}}{dT} = \frac{33 Tc}{100 T^{2}} + \frac{277 Tc^{2}}{1000 T^{3}} + \frac{363 Tc^{3}}{10000 T^{4}} \frac{d B^{(1)}}{dT} = - \frac{23 Tc}{50 T^{2}} + \frac{Tc^{2}}{T^{3}} + \frac{291 Tc^{3}}{1000 T^{4}} + \frac{73 Tc^{8}}{1250 T^{9}} For the second temperature derivative of B: .. math:: \frac{d^2 B^{(0)}}{dT^2} = - \frac{3 Tc}{5000 T^{3}} \left(1100 + \frac{1385 Tc}{T} + \frac{242 Tc^{2}}{T^{2}}\right) \frac{d^2 B^{(1)}}{dT^2} = \frac{Tc}{T^{3}} \left(\frac{23}{25} - \frac{3 Tc}{T} - \frac{291 Tc^{2}}{250 T^{2}} - \frac{657 Tc^{7}}{1250 T^{7}}\right) For the third temperature derivative of B: .. math:: \frac{d^3 B^{(0)}}{dT^3} = \frac{3 Tc}{500 T^{4}} \left(330 + \frac{554 Tc}{T} + \frac{121 Tc^{2}}{T^{2}}\right) \frac{d^3 B^{(1)}}{dT^3} = \frac{3 Tc}{T^{4}} \left(- \frac{23}{25} + \frac{4 Tc}{T} + \frac{97 Tc^{2}}{50 T^{2}} + \frac{219 Tc^{7}}{125 T^{7}}\right) For the first indefinite integral of B: .. math:: \int{B^{(0)}} dT = \frac{289 T}{2000} - \frac{33 Tc}{100} \log{\left (T \right )} + \frac{1}{20000 T^{2}} \left(2770 T Tc^{2} + 121 Tc^{3}\right) \int{B^{(1)}} dT = \frac{73 T}{1000} + \frac{23 Tc}{50} \log{\left (T \right )} + \frac{1}{70000 T^{7}} \left(35000 T^{6} Tc^{2} + 3395 T^{5} Tc^{3} + 73 Tc^{8}\right) For the second indefinite integral of B: .. math:: \int\int B^{(0)} dT dT = \frac{289 T^{2}}{4000} - \frac{33 T}{100} Tc \log{\left (T \right )} + \frac{33 T}{100} Tc + \frac{277 Tc^{2}}{2000} \log{\left (T \right )} - \frac{121 Tc^{3}}{20000 T} \int\int B^{(1)} dT dT = \frac{73 T^{2}}{2000} + \frac{23 T}{50} Tc \log{\left (T \right )} - \frac{23 T}{50} Tc + \frac{Tc^{2}}{2} \log{\left (T \right )} - \frac{1}{420000 T^{6}} \left(20370 T^{5} Tc^{3} + 73 Tc^{8}\right) Examples -------- Example matching that in BVirial_Abbott, for isobutane. >>> BVirial_Pitzer_Curl(510., 425.2, 38E5, 0.193) -0.0002084535541385102 References ---------- .. [1] Pitzer, Kenneth S., and R. F. Curl. "The Volumetric and Thermodynamic Properties of Fluids. III. Empirical Equation for the Second Virial Coefficient1." Journal of the American Chemical Society 79, no. 10 (May 1, 1957): 2369-70. doi:10.1021/ja01567a007. ''' Tr = T/Tc if order == 0: B0 = 0.1445 - 0.33/Tr - 0.1385/Tr**2 - 0.0121/Tr**3 B1 = 0.073 + 0.46/Tr - 0.5/Tr**2 - 0.097/Tr**3 - 0.0073/Tr**8 elif order == 1: B0 = Tc*(3300*T**2 + 2770*T*Tc + 363*Tc**2)/(10000*T**4) B1 = Tc*(-2300*T**7 + 5000*T**6*Tc + 1455*T**5*Tc**2 + 292*Tc**7)/(5000*T**9) elif order == 2: B0 = -3*Tc*(1100*T**2 + 1385*T*Tc + 242*Tc**2)/(5000*T**5) B1 = Tc*(1150*T**7 - 3750*T**6*Tc - 1455*T**5*Tc**2 - 657*Tc**7)/(1250*T**10) elif order == 3: B0 = 3*Tc*(330*T**2 + 554*T*Tc + 121*Tc**2)/(500*T**6) B1 = 3*Tc*(-230*T**7 + 1000*T**6*Tc + 485*T**5*Tc**2 + 438*Tc**7)/(250*T**11) elif order == -1: B0 = 289*T/2000 - 33*Tc*log(T)/100 + (2770*T*Tc**2 + 121*Tc**3)/(20000*T**2) B1 = 73*T/1000 + 23*Tc*log(T)/50 + (35000*T**6*Tc**2 + 3395*T**5*Tc**3 + 73*Tc**8)/(70000*T**7) elif order == -2: B0 = 289*T**2/4000 - 33*T*Tc*log(T)/100 + 33*T*Tc/100 + 277*Tc**2*log(T)/2000 - 121*Tc**3/(20000*T) B1 = 73*T**2/2000 + 23*T*Tc*log(T)/50 - 23*T*Tc/50 + Tc**2*log(T)/2 - (20370*T**5*Tc**3 + 73*Tc**8)/(420000*T**6) else: raise Exception('Only orders -2, -1, 0, 1, 2 and 3 are supported.') Br = B0 + omega*B1 return Br*R*Tc/Pc
[ "def", "BVirial_Pitzer_Curl", "(", "T", ",", "Tc", ",", "Pc", ",", "omega", ",", "order", "=", "0", ")", ":", "Tr", "=", "T", "/", "Tc", "if", "order", "==", "0", ":", "B0", "=", "0.1445", "-", "0.33", "/", "Tr", "-", "0.1385", "/", "Tr", "**", "2", "-", "0.0121", "/", "Tr", "**", "3", "B1", "=", "0.073", "+", "0.46", "/", "Tr", "-", "0.5", "/", "Tr", "**", "2", "-", "0.097", "/", "Tr", "**", "3", "-", "0.0073", "/", "Tr", "**", "8", "elif", "order", "==", "1", ":", "B0", "=", "Tc", "*", "(", "3300", "*", "T", "**", "2", "+", "2770", "*", "T", "*", "Tc", "+", "363", "*", "Tc", "**", "2", ")", "/", "(", "10000", "*", "T", "**", "4", ")", "B1", "=", "Tc", "*", "(", "-", "2300", "*", "T", "**", "7", "+", "5000", "*", "T", "**", "6", "*", "Tc", "+", "1455", "*", "T", "**", "5", "*", "Tc", "**", "2", "+", "292", "*", "Tc", "**", "7", ")", "/", "(", "5000", "*", "T", "**", "9", ")", "elif", "order", "==", "2", ":", "B0", "=", "-", "3", "*", "Tc", "*", "(", "1100", "*", "T", "**", "2", "+", "1385", "*", "T", "*", "Tc", "+", "242", "*", "Tc", "**", "2", ")", "/", "(", "5000", "*", "T", "**", "5", ")", "B1", "=", "Tc", "*", "(", "1150", "*", "T", "**", "7", "-", "3750", "*", "T", "**", "6", "*", "Tc", "-", "1455", "*", "T", "**", "5", "*", "Tc", "**", "2", "-", "657", "*", "Tc", "**", "7", ")", "/", "(", "1250", "*", "T", "**", "10", ")", "elif", "order", "==", "3", ":", "B0", "=", "3", "*", "Tc", "*", "(", "330", "*", "T", "**", "2", "+", "554", "*", "T", "*", "Tc", "+", "121", "*", "Tc", "**", "2", ")", "/", "(", "500", "*", "T", "**", "6", ")", "B1", "=", "3", "*", "Tc", "*", "(", "-", "230", "*", "T", "**", "7", "+", "1000", "*", "T", "**", "6", "*", "Tc", "+", "485", "*", "T", "**", "5", "*", "Tc", "**", "2", "+", "438", "*", "Tc", "**", "7", ")", "/", "(", "250", "*", "T", "**", "11", ")", "elif", "order", "==", "-", "1", ":", "B0", "=", "289", "*", "T", "/", "2000", "-", "33", "*", "Tc", "*", "log", "(", "T", ")", "/", "100", "+", "(", "2770", "*", "T", "*", "Tc", "**", "2", "+", "121", "*", "Tc", "**", "3", ")", "/", "(", "20000", "*", "T", "**", "2", ")", "B1", "=", "73", "*", "T", "/", "1000", "+", "23", "*", "Tc", "*", "log", "(", "T", ")", "/", "50", "+", "(", "35000", "*", "T", "**", "6", "*", "Tc", "**", "2", "+", "3395", "*", "T", "**", "5", "*", "Tc", "**", "3", "+", "73", "*", "Tc", "**", "8", ")", "/", "(", "70000", "*", "T", "**", "7", ")", "elif", "order", "==", "-", "2", ":", "B0", "=", "289", "*", "T", "**", "2", "/", "4000", "-", "33", "*", "T", "*", "Tc", "*", "log", "(", "T", ")", "/", "100", "+", "33", "*", "T", "*", "Tc", "/", "100", "+", "277", "*", "Tc", "**", "2", "*", "log", "(", "T", ")", "/", "2000", "-", "121", "*", "Tc", "**", "3", "/", "(", "20000", "*", "T", ")", "B1", "=", "73", "*", "T", "**", "2", "/", "2000", "+", "23", "*", "T", "*", "Tc", "*", "log", "(", "T", ")", "/", "50", "-", "23", "*", "T", "*", "Tc", "/", "50", "+", "Tc", "**", "2", "*", "log", "(", "T", ")", "/", "2", "-", "(", "20370", "*", "T", "**", "5", "*", "Tc", "**", "3", "+", "73", "*", "Tc", "**", "8", ")", "/", "(", "420000", "*", "T", "**", "6", ")", "else", ":", "raise", "Exception", "(", "'Only orders -2, -1, 0, 1, 2 and 3 are supported.'", ")", "Br", "=", "B0", "+", "omega", "*", "B1", "return", "Br", "*", "R", "*", "Tc", "/", "Pc" ]
44.315315
36.567568
def get_resources(cls): """Returns Ext Resources.""" plugin = directory.get_plugin() controller = MacAddressRangesController(plugin) return [extensions.ResourceExtension(Mac_address_ranges.get_alias(), controller)]
[ "def", "get_resources", "(", "cls", ")", ":", "plugin", "=", "directory", ".", "get_plugin", "(", ")", "controller", "=", "MacAddressRangesController", "(", "plugin", ")", "return", "[", "extensions", ".", "ResourceExtension", "(", "Mac_address_ranges", ".", "get_alias", "(", ")", ",", "controller", ")", "]" ]
47.666667
14.333333
def getAsGrassAsciiRaster(self, tableName, rasterId=1, rasterIdFieldName='id', rasterFieldName='raster', newSRID=None): """ Returns a string representation of the raster in GRASS ASCII raster format. """ # Get raster in ArcInfo Grid format arcInfoGrid = self.getAsGdalRaster(rasterFieldName, tableName, rasterIdFieldName, rasterId, 'AAIGrid', newSRID).splitlines() ## Convert arcInfoGrid to GRASS ASCII format ## # Get values from header which look something this: # ncols 67 # nrows 55 # xllcorner 425802.32143212341 # yllcorner 44091450.41551345213 # cellsize 90.0000000 # ... nCols = int(arcInfoGrid[0].split()[1]) nRows = int(arcInfoGrid[1].split()[1]) xLLCorner = float(arcInfoGrid[2].split()[1]) yLLCorner = float(arcInfoGrid[3].split()[1]) cellSize = float(arcInfoGrid[4].split()[1]) # Remove old headers for i in range(0, 5): arcInfoGrid.pop(0) # Check for NODATA_value row and remove if it is there if 'NODATA_value' in arcInfoGrid[0]: arcInfoGrid.pop(0) ## Calculate values for GRASS ASCII headers ## # These should look like this: # north: 4501028.972140 # south: 4494548.972140 # east: 460348.288604 # west: 454318.288604 # rows: 72 # cols: 67 # ... # xLLCorner and yLLCorner represent the coordinates for the Lower Left corner of the raster north = yLLCorner + (cellSize * nRows) south = yLLCorner east = xLLCorner + (cellSize * nCols) west = xLLCorner # Create header Lines (the first shall be last and the last shall be first) grassHeader = ['cols: %s' % nCols, 'rows: %s' % nRows, 'west: %s' % west, 'east: %s' % east, 'south: %s' % south, 'north: %s' % north] # Insert grass headers into the grid for header in grassHeader: arcInfoGrid.insert(0, header) # Create string arcInfoGridString = '\n'.join(arcInfoGrid) return arcInfoGridString
[ "def", "getAsGrassAsciiRaster", "(", "self", ",", "tableName", ",", "rasterId", "=", "1", ",", "rasterIdFieldName", "=", "'id'", ",", "rasterFieldName", "=", "'raster'", ",", "newSRID", "=", "None", ")", ":", "# Get raster in ArcInfo Grid format", "arcInfoGrid", "=", "self", ".", "getAsGdalRaster", "(", "rasterFieldName", ",", "tableName", ",", "rasterIdFieldName", ",", "rasterId", ",", "'AAIGrid'", ",", "newSRID", ")", ".", "splitlines", "(", ")", "## Convert arcInfoGrid to GRASS ASCII format ##", "# Get values from header which look something this:", "# ncols 67", "# nrows 55", "# xllcorner 425802.32143212341", "# yllcorner 44091450.41551345213", "# cellsize 90.0000000", "# ...", "nCols", "=", "int", "(", "arcInfoGrid", "[", "0", "]", ".", "split", "(", ")", "[", "1", "]", ")", "nRows", "=", "int", "(", "arcInfoGrid", "[", "1", "]", ".", "split", "(", ")", "[", "1", "]", ")", "xLLCorner", "=", "float", "(", "arcInfoGrid", "[", "2", "]", ".", "split", "(", ")", "[", "1", "]", ")", "yLLCorner", "=", "float", "(", "arcInfoGrid", "[", "3", "]", ".", "split", "(", ")", "[", "1", "]", ")", "cellSize", "=", "float", "(", "arcInfoGrid", "[", "4", "]", ".", "split", "(", ")", "[", "1", "]", ")", "# Remove old headers", "for", "i", "in", "range", "(", "0", ",", "5", ")", ":", "arcInfoGrid", ".", "pop", "(", "0", ")", "# Check for NODATA_value row and remove if it is there", "if", "'NODATA_value'", "in", "arcInfoGrid", "[", "0", "]", ":", "arcInfoGrid", ".", "pop", "(", "0", ")", "## Calculate values for GRASS ASCII headers ##", "# These should look like this:", "# north: 4501028.972140", "# south: 4494548.972140", "# east: 460348.288604", "# west: 454318.288604", "# rows: 72", "# cols: 67", "# ...", "# xLLCorner and yLLCorner represent the coordinates for the Lower Left corner of the raster", "north", "=", "yLLCorner", "+", "(", "cellSize", "*", "nRows", ")", "south", "=", "yLLCorner", "east", "=", "xLLCorner", "+", "(", "cellSize", "*", "nCols", ")", "west", "=", "xLLCorner", "# Create header Lines (the first shall be last and the last shall be first)", "grassHeader", "=", "[", "'cols: %s'", "%", "nCols", ",", "'rows: %s'", "%", "nRows", ",", "'west: %s'", "%", "west", ",", "'east: %s'", "%", "east", ",", "'south: %s'", "%", "south", ",", "'north: %s'", "%", "north", "]", "# Insert grass headers into the grid", "for", "header", "in", "grassHeader", ":", "arcInfoGrid", ".", "insert", "(", "0", ",", "header", ")", "# Create string", "arcInfoGridString", "=", "'\\n'", ".", "join", "(", "arcInfoGrid", ")", "return", "arcInfoGridString" ]
37.033333
18.066667
def fwd(self, astr_startPath, **kwargs): """ Return the files-in-working-directory in treeRecurse compatible format. :return: Return the cwd in treeRecurse compatible format. """ status = self.cd(astr_startPath)['status'] if status: l = self.lsf() if len(l): lf = [self.cwd() + '/' + f for f in l] for entry in lf: self.l_fwd.append(entry) return {'status': status, 'cwd': self.cwd()}
[ "def", "fwd", "(", "self", ",", "astr_startPath", ",", "*", "*", "kwargs", ")", ":", "status", "=", "self", ".", "cd", "(", "astr_startPath", ")", "[", "'status'", "]", "if", "status", ":", "l", "=", "self", ".", "lsf", "(", ")", "if", "len", "(", "l", ")", ":", "lf", "=", "[", "self", ".", "cwd", "(", ")", "+", "'/'", "+", "f", "for", "f", "in", "l", "]", "for", "entry", "in", "lf", ":", "self", ".", "l_fwd", ".", "append", "(", "entry", ")", "return", "{", "'status'", ":", "status", ",", "'cwd'", ":", "self", ".", "cwd", "(", ")", "}" ]
37.533333
13.666667
def increment(self, kwargs): """ Increments the counter for the given *kwargs*. The counter index is computed from *kwargs*. *kwargs* is an optional dict of vars captured by the :class:`filter.Filter` that match the log entry. An immutable version of *kwargs* is used as an index to keep track of several counters for the same :class:`rule.Rule`. It can be `None`. Returns the index of the updated counter. """ index = None if kwargs: # index = hash(tuple(sorted(kwargs.items()))) # Better keep something readable so we can output it. index = tuple(sorted(kwargs.items())) self[index] += 1 return index
[ "def", "increment", "(", "self", ",", "kwargs", ")", ":", "index", "=", "None", "if", "kwargs", ":", "# index = hash(tuple(sorted(kwargs.items())))", "# Better keep something readable so we can output it.", "index", "=", "tuple", "(", "sorted", "(", "kwargs", ".", "items", "(", ")", ")", ")", "self", "[", "index", "]", "+=", "1", "return", "index" ]
31.565217
22.782609
def group_associations_types(self, group_type, api_entity=None, api_branch=None, params=None): """ Gets the group association from a Indicator/Group/Victim Args: group_type: api_entity: api_branch: params: Returns: """ if params is None: params = {} if not self.can_update(): self._tcex.handle_error(910, [self.type]) target = self._tcex.ti.group(group_type) for gat in self.tc_requests.group_associations_types( self.api_type, self.api_sub_type, self.unique_id, target, api_entity=api_entity, api_branch=api_branch, owner=self.owner, params=params, ): yield gat
[ "def", "group_associations_types", "(", "self", ",", "group_type", ",", "api_entity", "=", "None", ",", "api_branch", "=", "None", ",", "params", "=", "None", ")", ":", "if", "params", "is", "None", ":", "params", "=", "{", "}", "if", "not", "self", ".", "can_update", "(", ")", ":", "self", ".", "_tcex", ".", "handle_error", "(", "910", ",", "[", "self", ".", "type", "]", ")", "target", "=", "self", ".", "_tcex", ".", "ti", ".", "group", "(", "group_type", ")", "for", "gat", "in", "self", ".", "tc_requests", ".", "group_associations_types", "(", "self", ".", "api_type", ",", "self", ".", "api_sub_type", ",", "self", ".", "unique_id", ",", "target", ",", "api_entity", "=", "api_entity", ",", "api_branch", "=", "api_branch", ",", "owner", "=", "self", ".", "owner", ",", "params", "=", "params", ",", ")", ":", "yield", "gat" ]
25.709677
20.16129
def RSA(im: array, radius: int, volume_fraction: int = 1, mode: str = 'extended'): r""" Generates a sphere or disk packing using Random Sequential Addition This which ensures that spheres do not overlap but does not guarantee they are tightly packed. Parameters ---------- im : ND-array The image into which the spheres should be inserted. By accepting an image rather than a shape, it allows users to insert spheres into an already existing image. To begin the process, start with an array of zero such as ``im = np.zeros([200, 200], dtype=bool)``. radius : int The radius of the disk or sphere to insert. volume_fraction : scalar The fraction of the image that should be filled with spheres. The spheres are addeds 1's, so each sphere addition increases the ``volume_fraction`` until the specified limit is reach. mode : string Controls how the edges of the image are handled. Options are: 'extended' - Spheres are allowed to extend beyond the edge of the image 'contained' - Spheres are all completely within the image 'periodic' - The portion of a sphere that extends beyond the image is inserted into the opposite edge of the image (Not Implemented Yet!) Returns ------- image : ND-array A copy of ``im`` with spheres of specified radius *added* to the background. Notes ----- Each sphere is filled with 1's, but the center is marked with a 2. This allows easy boolean masking to extract only the centers, which can be converted to coordinates using ``scipy.where`` and used for other purposes. The obtain only the spheres, use``im = im == 1``. This function adds spheres to the background of the received ``im``, which allows iteratively adding spheres of different radii to the unfilled space. References ---------- [1] Random Heterogeneous Materials, S. Torquato (2001) """ # Note: The 2D vs 3D splitting of this just me being lazy...I can't be # bothered to figure it out programmatically right now # TODO: Ideally the spheres should be added periodically print(78*'―') print('RSA: Adding spheres of size ' + str(radius)) d2 = len(im.shape) == 2 mrad = 2*radius if d2: im_strel = ps_disk(radius) mask_strel = ps_disk(mrad) else: im_strel = ps_ball(radius) mask_strel = ps_ball(mrad) if sp.any(im > 0): # Dilate existing objects by im_strel to remove pixels near them # from consideration for sphere placement mask = ps.tools.fftmorphology(im > 0, im_strel > 0, mode='dilate') mask = mask.astype(int) else: mask = sp.zeros_like(im) if mode == 'contained': mask = _remove_edge(mask, radius) elif mode == 'extended': pass elif mode == 'periodic': raise Exception('Periodic edges are not implemented yet') else: raise Exception('Unrecognized mode: ' + mode) vf = im.sum()/im.size free_spots = sp.argwhere(mask == 0) i = 0 while vf <= volume_fraction and len(free_spots) > 0: choice = sp.random.randint(0, len(free_spots), size=1) if d2: [x, y] = free_spots[choice].flatten() im = _fit_strel_to_im_2d(im, im_strel, radius, x, y) mask = _fit_strel_to_im_2d(mask, mask_strel, mrad, x, y) im[x, y] = 2 else: [x, y, z] = free_spots[choice].flatten() im = _fit_strel_to_im_3d(im, im_strel, radius, x, y, z) mask = _fit_strel_to_im_3d(mask, mask_strel, mrad, x, y, z) im[x, y, z] = 2 free_spots = sp.argwhere(mask == 0) vf = im.sum()/im.size i += 1 if vf > volume_fraction: print('Volume Fraction', volume_fraction, 'reached') if len(free_spots) == 0: print('No more free spots', 'Volume Fraction', vf) return im
[ "def", "RSA", "(", "im", ":", "array", ",", "radius", ":", "int", ",", "volume_fraction", ":", "int", "=", "1", ",", "mode", ":", "str", "=", "'extended'", ")", ":", "# Note: The 2D vs 3D splitting of this just me being lazy...I can't be", "# bothered to figure it out programmatically right now", "# TODO: Ideally the spheres should be added periodically", "print", "(", "78", "*", "'―')", "", "print", "(", "'RSA: Adding spheres of size '", "+", "str", "(", "radius", ")", ")", "d2", "=", "len", "(", "im", ".", "shape", ")", "==", "2", "mrad", "=", "2", "*", "radius", "if", "d2", ":", "im_strel", "=", "ps_disk", "(", "radius", ")", "mask_strel", "=", "ps_disk", "(", "mrad", ")", "else", ":", "im_strel", "=", "ps_ball", "(", "radius", ")", "mask_strel", "=", "ps_ball", "(", "mrad", ")", "if", "sp", ".", "any", "(", "im", ">", "0", ")", ":", "# Dilate existing objects by im_strel to remove pixels near them", "# from consideration for sphere placement", "mask", "=", "ps", ".", "tools", ".", "fftmorphology", "(", "im", ">", "0", ",", "im_strel", ">", "0", ",", "mode", "=", "'dilate'", ")", "mask", "=", "mask", ".", "astype", "(", "int", ")", "else", ":", "mask", "=", "sp", ".", "zeros_like", "(", "im", ")", "if", "mode", "==", "'contained'", ":", "mask", "=", "_remove_edge", "(", "mask", ",", "radius", ")", "elif", "mode", "==", "'extended'", ":", "pass", "elif", "mode", "==", "'periodic'", ":", "raise", "Exception", "(", "'Periodic edges are not implemented yet'", ")", "else", ":", "raise", "Exception", "(", "'Unrecognized mode: '", "+", "mode", ")", "vf", "=", "im", ".", "sum", "(", ")", "/", "im", ".", "size", "free_spots", "=", "sp", ".", "argwhere", "(", "mask", "==", "0", ")", "i", "=", "0", "while", "vf", "<=", "volume_fraction", "and", "len", "(", "free_spots", ")", ">", "0", ":", "choice", "=", "sp", ".", "random", ".", "randint", "(", "0", ",", "len", "(", "free_spots", ")", ",", "size", "=", "1", ")", "if", "d2", ":", "[", "x", ",", "y", "]", "=", "free_spots", "[", "choice", "]", ".", "flatten", "(", ")", "im", "=", "_fit_strel_to_im_2d", "(", "im", ",", "im_strel", ",", "radius", ",", "x", ",", "y", ")", "mask", "=", "_fit_strel_to_im_2d", "(", "mask", ",", "mask_strel", ",", "mrad", ",", "x", ",", "y", ")", "im", "[", "x", ",", "y", "]", "=", "2", "else", ":", "[", "x", ",", "y", ",", "z", "]", "=", "free_spots", "[", "choice", "]", ".", "flatten", "(", ")", "im", "=", "_fit_strel_to_im_3d", "(", "im", ",", "im_strel", ",", "radius", ",", "x", ",", "y", ",", "z", ")", "mask", "=", "_fit_strel_to_im_3d", "(", "mask", ",", "mask_strel", ",", "mrad", ",", "x", ",", "y", ",", "z", ")", "im", "[", "x", ",", "y", ",", "z", "]", "=", "2", "free_spots", "=", "sp", ".", "argwhere", "(", "mask", "==", "0", ")", "vf", "=", "im", ".", "sum", "(", ")", "/", "im", ".", "size", "i", "+=", "1", "if", "vf", ">", "volume_fraction", ":", "print", "(", "'Volume Fraction'", ",", "volume_fraction", ",", "'reached'", ")", "if", "len", "(", "free_spots", ")", "==", "0", ":", "print", "(", "'No more free spots'", ",", "'Volume Fraction'", ",", "vf", ")", "return", "im" ]
37.893204
23.631068
def find_all_globals(node, globs): """Search Syntax Tree node to find variable names that are global.""" for n in node: if isinstance(n, SyntaxTree): globs = find_all_globals(n, globs) elif n.kind in read_write_global_ops: globs.add(n.pattr) return globs
[ "def", "find_all_globals", "(", "node", ",", "globs", ")", ":", "for", "n", "in", "node", ":", "if", "isinstance", "(", "n", ",", "SyntaxTree", ")", ":", "globs", "=", "find_all_globals", "(", "n", ",", "globs", ")", "elif", "n", ".", "kind", "in", "read_write_global_ops", ":", "globs", ".", "add", "(", "n", ".", "pattr", ")", "return", "globs" ]
37.375
9.5
def NewFile(self, filename, encoding, options): """parse an XML file from the filesystem or the network. The parsing flags @options are a combination of xmlParserOption. This reuses the existing @reader xmlTextReader. """ ret = libxml2mod.xmlReaderNewFile(self._o, filename, encoding, options) return ret
[ "def", "NewFile", "(", "self", ",", "filename", ",", "encoding", ",", "options", ")", ":", "ret", "=", "libxml2mod", ".", "xmlReaderNewFile", "(", "self", ".", "_o", ",", "filename", ",", "encoding", ",", "options", ")", "return", "ret" ]
50.428571
14.285714
def interpolate(self, factor, minGlyph, maxGlyph, round=True, suppressError=True): """ Interpolate the contents of this glyph at location ``factor`` in a linear interpolation between ``minGlyph`` and ``maxGlyph``. >>> glyph.interpolate(0.5, otherGlyph1, otherGlyph2) ``factor`` may be a :ref:`type-int-float` or a tuple containing two :ref:`type-int-float` values representing x and y factors. >>> glyph.interpolate((0.5, 1.0), otherGlyph1, otherGlyph2) ``minGlyph`` must be a :class:`BaseGlyph` and will be located at 0.0 in the interpolation range. ``maxGlyph`` must be a :class:`BaseGlyph` and will be located at 1.0 in the interpolation range. If ``round`` is ``True``, the contents of the glyph will be rounded to integers after the interpolation is performed. >>> glyph.interpolate(0.5, otherGlyph1, otherGlyph2, round=True) This method assumes that ``minGlyph`` and ``maxGlyph`` are completely compatible with each other for interpolation. If not, any errors encountered will raise a :class:`FontPartsError`. If ``suppressError`` is ``True``, no exception will be raised and errors will be silently ignored. """ factor = normalizers.normalizeInterpolationFactor(factor) if not isinstance(minGlyph, BaseGlyph): raise TypeError(("Interpolation to an instance of %r can not be " "performed from an instance of %r.") % (self.__class__.__name__, minGlyph.__class__.__name__)) if not isinstance(maxGlyph, BaseGlyph): raise TypeError(("Interpolation to an instance of %r can not be " "performed from an instance of %r.") % (self.__class__.__name__, maxGlyph.__class__.__name__)) round = normalizers.normalizeBoolean(round) suppressError = normalizers.normalizeBoolean(suppressError) self._interpolate(factor, minGlyph, maxGlyph, round=round, suppressError=suppressError)
[ "def", "interpolate", "(", "self", ",", "factor", ",", "minGlyph", ",", "maxGlyph", ",", "round", "=", "True", ",", "suppressError", "=", "True", ")", ":", "factor", "=", "normalizers", ".", "normalizeInterpolationFactor", "(", "factor", ")", "if", "not", "isinstance", "(", "minGlyph", ",", "BaseGlyph", ")", ":", "raise", "TypeError", "(", "(", "\"Interpolation to an instance of %r can not be \"", "\"performed from an instance of %r.\"", ")", "%", "(", "self", ".", "__class__", ".", "__name__", ",", "minGlyph", ".", "__class__", ".", "__name__", ")", ")", "if", "not", "isinstance", "(", "maxGlyph", ",", "BaseGlyph", ")", ":", "raise", "TypeError", "(", "(", "\"Interpolation to an instance of %r can not be \"", "\"performed from an instance of %r.\"", ")", "%", "(", "self", ".", "__class__", ".", "__name__", ",", "maxGlyph", ".", "__class__", ".", "__name__", ")", ")", "round", "=", "normalizers", ".", "normalizeBoolean", "(", "round", ")", "suppressError", "=", "normalizers", ".", "normalizeBoolean", "(", "suppressError", ")", "self", ".", "_interpolate", "(", "factor", ",", "minGlyph", ",", "maxGlyph", ",", "round", "=", "round", ",", "suppressError", "=", "suppressError", ")" ]
52.214286
26.166667
def FanOut(self, obj, parent=None): """Expand values from various attribute types. Strings are returned as is. Dictionaries are returned with a key string, and an expanded set of values. Other iterables are expanded until they flatten out. Other items are returned in string format. Args: obj: The object to expand out. parent: The parent object: Used to short-circuit infinite recursion. Returns: a list of expanded values as strings. """ # Catch cases where RDFs are iterable but return themselves. if parent and obj == parent: results = [utils.SmartUnicode(obj).strip()] elif isinstance(obj, (string_types, rdf_structs.EnumNamedValue)): results = [utils.SmartUnicode(obj).strip()] elif isinstance(obj, rdf_protodict.DataBlob): results = self.FanOut(obj.GetValue()) elif isinstance(obj, (collections.Mapping, rdf_protodict.Dict)): results = [] # rdf_protodict.Dict only has items, not iteritems. for k, v in iteritems(obj): expanded_v = [utils.SmartUnicode(r) for r in self.FanOut(v)] results.append("%s:%s" % (utils.SmartUnicode(k), ",".join(expanded_v))) elif isinstance(obj, (collections.Iterable, rdf_structs.RepeatedFieldHelper)): results = [] for rslt in [self.FanOut(o, obj) for o in obj]: results.extend(rslt) else: results = [utils.SmartUnicode(obj).strip()] return results
[ "def", "FanOut", "(", "self", ",", "obj", ",", "parent", "=", "None", ")", ":", "# Catch cases where RDFs are iterable but return themselves.", "if", "parent", "and", "obj", "==", "parent", ":", "results", "=", "[", "utils", ".", "SmartUnicode", "(", "obj", ")", ".", "strip", "(", ")", "]", "elif", "isinstance", "(", "obj", ",", "(", "string_types", ",", "rdf_structs", ".", "EnumNamedValue", ")", ")", ":", "results", "=", "[", "utils", ".", "SmartUnicode", "(", "obj", ")", ".", "strip", "(", ")", "]", "elif", "isinstance", "(", "obj", ",", "rdf_protodict", ".", "DataBlob", ")", ":", "results", "=", "self", ".", "FanOut", "(", "obj", ".", "GetValue", "(", ")", ")", "elif", "isinstance", "(", "obj", ",", "(", "collections", ".", "Mapping", ",", "rdf_protodict", ".", "Dict", ")", ")", ":", "results", "=", "[", "]", "# rdf_protodict.Dict only has items, not iteritems.", "for", "k", ",", "v", "in", "iteritems", "(", "obj", ")", ":", "expanded_v", "=", "[", "utils", ".", "SmartUnicode", "(", "r", ")", "for", "r", "in", "self", ".", "FanOut", "(", "v", ")", "]", "results", ".", "append", "(", "\"%s:%s\"", "%", "(", "utils", ".", "SmartUnicode", "(", "k", ")", ",", "\",\"", ".", "join", "(", "expanded_v", ")", ")", ")", "elif", "isinstance", "(", "obj", ",", "(", "collections", ".", "Iterable", ",", "rdf_structs", ".", "RepeatedFieldHelper", ")", ")", ":", "results", "=", "[", "]", "for", "rslt", "in", "[", "self", ".", "FanOut", "(", "o", ",", "obj", ")", "for", "o", "in", "obj", "]", ":", "results", ".", "extend", "(", "rslt", ")", "else", ":", "results", "=", "[", "utils", ".", "SmartUnicode", "(", "obj", ")", ".", "strip", "(", ")", "]", "return", "results" ]
39.944444
18.416667
def set_data(self, data): """ Fills form with data Args: data (dict): Data to assign form fields. Returns: Self. Form object. """ for name in self._fields: setattr(self, name, data.get(name)) return self
[ "def", "set_data", "(", "self", ",", "data", ")", ":", "for", "name", "in", "self", ".", "_fields", ":", "setattr", "(", "self", ",", "name", ",", "data", ".", "get", "(", "name", ")", ")", "return", "self" ]
20.357143
18.214286
def _psed(text, before, after, limit, flags): ''' Does the actual work for file.psed, so that single lines can be passed in ''' atext = text if limit: limit = re.compile(limit) comps = text.split(limit) atext = ''.join(comps[1:]) count = 1 if 'g' in flags: count = 0 flags = flags.replace('g', '') aflags = 0 for flag in flags: aflags |= RE_FLAG_TABLE[flag] before = re.compile(before, flags=aflags) text = re.sub(before, after, atext, count=count) return text
[ "def", "_psed", "(", "text", ",", "before", ",", "after", ",", "limit", ",", "flags", ")", ":", "atext", "=", "text", "if", "limit", ":", "limit", "=", "re", ".", "compile", "(", "limit", ")", "comps", "=", "text", ".", "split", "(", "limit", ")", "atext", "=", "''", ".", "join", "(", "comps", "[", "1", ":", "]", ")", "count", "=", "1", "if", "'g'", "in", "flags", ":", "count", "=", "0", "flags", "=", "flags", ".", "replace", "(", "'g'", ",", "''", ")", "aflags", "=", "0", "for", "flag", "in", "flags", ":", "aflags", "|=", "RE_FLAG_TABLE", "[", "flag", "]", "before", "=", "re", ".", "compile", "(", "before", ",", "flags", "=", "aflags", ")", "text", "=", "re", ".", "sub", "(", "before", ",", "after", ",", "atext", ",", "count", "=", "count", ")", "return", "text" ]
21.259259
22.740741
def add_version_pattern(self, m): """For QR codes with a version 7 or higher, a special pattern specifying the code's version is required. For further information see: http://www.thonky.com/qr-code-tutorial/format-version-information/#example-of-version-7-information-string """ if self.version < 7: return #Get the bit fields for this code's version #We will iterate across the string, the bit string #needs the least significant digit in the zero-th position field = iter(tables.version_pattern[self.version][::-1]) #Where to start placing the pattern start = len(m)-11 #The version pattern is pretty odd looking for i in range(6): #The pattern is three modules wide for j in range(start, start+3): bit = int(next(field)) #Bottom Left m[i][j] = bit #Upper right m[j][i] = bit
[ "def", "add_version_pattern", "(", "self", ",", "m", ")", ":", "if", "self", ".", "version", "<", "7", ":", "return", "#Get the bit fields for this code's version", "#We will iterate across the string, the bit string", "#needs the least significant digit in the zero-th position", "field", "=", "iter", "(", "tables", ".", "version_pattern", "[", "self", ".", "version", "]", "[", ":", ":", "-", "1", "]", ")", "#Where to start placing the pattern", "start", "=", "len", "(", "m", ")", "-", "11", "#The version pattern is pretty odd looking", "for", "i", "in", "range", "(", "6", ")", ":", "#The pattern is three modules wide", "for", "j", "in", "range", "(", "start", ",", "start", "+", "3", ")", ":", "bit", "=", "int", "(", "next", "(", "field", ")", ")", "#Bottom Left", "m", "[", "i", "]", "[", "j", "]", "=", "bit", "#Upper right", "m", "[", "j", "]", "[", "i", "]", "=", "bit" ]
33.896552
18.862069
def date_decimal_hook(dct): '''The default JSON decoder hook. It is the inverse of :class:`stdnet.utils.jsontools.JSONDateDecimalEncoder`.''' if '__datetime__' in dct: return todatetime(dct['__datetime__']) elif '__date__' in dct: return todatetime(dct['__date__']).date() elif '__decimal__' in dct: return Decimal(dct['__decimal__']) else: return dct
[ "def", "date_decimal_hook", "(", "dct", ")", ":", "if", "'__datetime__'", "in", "dct", ":", "return", "todatetime", "(", "dct", "[", "'__datetime__'", "]", ")", "elif", "'__date__'", "in", "dct", ":", "return", "todatetime", "(", "dct", "[", "'__date__'", "]", ")", ".", "date", "(", ")", "elif", "'__decimal__'", "in", "dct", ":", "return", "Decimal", "(", "dct", "[", "'__decimal__'", "]", ")", "else", ":", "return", "dct" ]
35.727273
13.909091
def _apply_cn_keys_patch(): """ apply this patch due to an issue in http.client.parse_headers when there're multi-bytes in headers. it will truncate some headers. https://github.com/aliyun/aliyun-log-python-sdk/issues/79 """ import sys if sys.version_info[:2] == (3, 5): import http.client as hc old_parse = hc.parse_headers def parse_header(*args, **kwargs): fp = args[0] old_readline = fp.readline def new_readline(*args, **kwargs): ret = old_readline(*args, **kwargs) if ret.lower().startswith(b'x-log-query-info'): return b'x-log-query-info: \r\n' return ret fp.readline = new_readline ret = old_parse(*args, **kwargs) return ret hc.parse_headers = parse_header
[ "def", "_apply_cn_keys_patch", "(", ")", ":", "import", "sys", "if", "sys", ".", "version_info", "[", ":", "2", "]", "==", "(", "3", ",", "5", ")", ":", "import", "http", ".", "client", "as", "hc", "old_parse", "=", "hc", ".", "parse_headers", "def", "parse_header", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "fp", "=", "args", "[", "0", "]", "old_readline", "=", "fp", ".", "readline", "def", "new_readline", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "ret", "=", "old_readline", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "ret", ".", "lower", "(", ")", ".", "startswith", "(", "b'x-log-query-info'", ")", ":", "return", "b'x-log-query-info: \\r\\n'", "return", "ret", "fp", ".", "readline", "=", "new_readline", "ret", "=", "old_parse", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "ret", "hc", ".", "parse_headers", "=", "parse_header" ]
32.222222
16.148148
def sum_of_squares(simulated_trajectories, observed_trajectories_lookup): """ Returns the sum-of-squares distance between the simulated_trajectories and observed_trajectories :param simulated_trajectories: Simulated trajectories :type simulated_trajectories: list[:class:`means.simulation.Trajectory`] :param observed_trajectories_lookup: A dictionary of (trajectory.description: trajectory) of observed trajectories :type observed_trajectories_lookup: dict :return: the distance between simulated and observed trajectories :rtype: float """ dist = 0 for simulated_trajectory in simulated_trajectories: observed_trajectory = None try: observed_trajectory = observed_trajectories_lookup[simulated_trajectory.description] except KeyError: continue deviations = observed_trajectory.values - simulated_trajectory.values # Drop NaNs arising from missing datapoints deviations = deviations[~np.isnan(deviations)] dist += np.sum(np.square(deviations)) return dist
[ "def", "sum_of_squares", "(", "simulated_trajectories", ",", "observed_trajectories_lookup", ")", ":", "dist", "=", "0", "for", "simulated_trajectory", "in", "simulated_trajectories", ":", "observed_trajectory", "=", "None", "try", ":", "observed_trajectory", "=", "observed_trajectories_lookup", "[", "simulated_trajectory", ".", "description", "]", "except", "KeyError", ":", "continue", "deviations", "=", "observed_trajectory", ".", "values", "-", "simulated_trajectory", ".", "values", "# Drop NaNs arising from missing datapoints", "deviations", "=", "deviations", "[", "~", "np", ".", "isnan", "(", "deviations", ")", "]", "dist", "+=", "np", ".", "sum", "(", "np", ".", "square", "(", "deviations", ")", ")", "return", "dist" ]
40.884615
26.961538
def delete_webhook(self, scaling_group, policy, webhook): """ Deletes the specified webhook from the policy. """ return self._manager.delete_webhook(scaling_group, policy, webhook)
[ "def", "delete_webhook", "(", "self", ",", "scaling_group", ",", "policy", ",", "webhook", ")", ":", "return", "self", ".", "_manager", ".", "delete_webhook", "(", "scaling_group", ",", "policy", ",", "webhook", ")" ]
41.6
13.2
def _listen(self, uuid=None, session=None): """ Listen a connection uuid """ if self.url is None: raise Exception("NURESTPushCenter needs to have a valid URL. please use setURL: before starting it.") events_url = "%s/events" % self.url if uuid: events_url = "%s?uuid=%s" % (events_url, uuid) request = NURESTRequest(method='GET', url=events_url) # Force async to False so the push center will have only 1 thread running connection = NURESTConnection(request=request, async=True, callback=self._did_receive_event, root_object=self._root_object) if self._timeout: if int(time()) - self._start_time >= self._timeout: pushcenter_logger.debug("[NURESTPushCenter] Timeout (timeout=%ss)." % self._timeout) return else: connection.timeout = self._timeout pushcenter_logger.info('Bambou Sending >>>>>>\n%s %s' % (request.method, request.url)) # connection.ignore_request_idle = True connection.start()
[ "def", "_listen", "(", "self", ",", "uuid", "=", "None", ",", "session", "=", "None", ")", ":", "if", "self", ".", "url", "is", "None", ":", "raise", "Exception", "(", "\"NURESTPushCenter needs to have a valid URL. please use setURL: before starting it.\"", ")", "events_url", "=", "\"%s/events\"", "%", "self", ".", "url", "if", "uuid", ":", "events_url", "=", "\"%s?uuid=%s\"", "%", "(", "events_url", ",", "uuid", ")", "request", "=", "NURESTRequest", "(", "method", "=", "'GET'", ",", "url", "=", "events_url", ")", "# Force async to False so the push center will have only 1 thread running", "connection", "=", "NURESTConnection", "(", "request", "=", "request", ",", "async", "=", "True", ",", "callback", "=", "self", ".", "_did_receive_event", ",", "root_object", "=", "self", ".", "_root_object", ")", "if", "self", ".", "_timeout", ":", "if", "int", "(", "time", "(", ")", ")", "-", "self", ".", "_start_time", ">=", "self", ".", "_timeout", ":", "pushcenter_logger", ".", "debug", "(", "\"[NURESTPushCenter] Timeout (timeout=%ss).\"", "%", "self", ".", "_timeout", ")", "return", "else", ":", "connection", ".", "timeout", "=", "self", ".", "_timeout", "pushcenter_logger", ".", "info", "(", "'Bambou Sending >>>>>>\\n%s %s'", "%", "(", "request", ".", "method", ",", "request", ".", "url", ")", ")", "# connection.ignore_request_idle = True", "connection", ".", "start", "(", ")" ]
39.185185
30.740741
def store(self): ''' Record the current value of each variable X named in track_vars in an attribute named X_hist. Parameters ---------- none Returns ------- none ''' for var_name in self.track_vars: value_now = getattr(self,var_name) getattr(self,var_name + '_hist').append(value_now)
[ "def", "store", "(", "self", ")", ":", "for", "var_name", "in", "self", ".", "track_vars", ":", "value_now", "=", "getattr", "(", "self", ",", "var_name", ")", "getattr", "(", "self", ",", "var_name", "+", "'_hist'", ")", ".", "append", "(", "value_now", ")" ]
24
24.125
async def process_message(self, message, wait=True): """Process a message to see if it wakes any waiters. This will check waiters registered to see if they match the given message. If so, they are awoken and passed the message. All matching waiters will be woken. This method returns False if the message matched no waiters so it was ignored. Normally you want to use wait=True (the default behavior) to guarantee that all callbacks have finished before this method returns. However, sometimes that can cause a deadlock if those callbacks would themselves invoke behavior that requires whatever is waiting for this method to be alive. In that case you can pass wait=False to ensure that the caller of this method does not block. Args: message (dict or object): The message that we should process wait (bool): Whether to block until all callbacks have finished or to return once the callbacks have been launched. Returns: bool: True if at least one waiter matched, otherwise False. """ to_check = deque([self._waiters]) ignored = True while len(to_check) > 0: context = to_check.popleft() waiters = context.get(OperationManager._LEAF, []) for waiter in waiters: if isinstance(waiter, asyncio.Future): waiter.set_result(message) else: try: await _wait_or_launch(self._loop, waiter, message, wait) except: #pylint:disable=bare-except;We can't let a user callback break this routine self._logger.warning("Error calling every_match callback, callback=%s, message=%s", waiter, message, exc_info=True) ignored = False for key in context: if key is OperationManager._LEAF: continue message_val = _get_key(message, key) if message_val is _MISSING: continue next_level = context[key] if message_val in next_level: to_check.append(next_level[message_val]) return not ignored
[ "async", "def", "process_message", "(", "self", ",", "message", ",", "wait", "=", "True", ")", ":", "to_check", "=", "deque", "(", "[", "self", ".", "_waiters", "]", ")", "ignored", "=", "True", "while", "len", "(", "to_check", ")", ">", "0", ":", "context", "=", "to_check", ".", "popleft", "(", ")", "waiters", "=", "context", ".", "get", "(", "OperationManager", ".", "_LEAF", ",", "[", "]", ")", "for", "waiter", "in", "waiters", ":", "if", "isinstance", "(", "waiter", ",", "asyncio", ".", "Future", ")", ":", "waiter", ".", "set_result", "(", "message", ")", "else", ":", "try", ":", "await", "_wait_or_launch", "(", "self", ".", "_loop", ",", "waiter", ",", "message", ",", "wait", ")", "except", ":", "#pylint:disable=bare-except;We can't let a user callback break this routine", "self", ".", "_logger", ".", "warning", "(", "\"Error calling every_match callback, callback=%s, message=%s\"", ",", "waiter", ",", "message", ",", "exc_info", "=", "True", ")", "ignored", "=", "False", "for", "key", "in", "context", ":", "if", "key", "is", "OperationManager", ".", "_LEAF", ":", "continue", "message_val", "=", "_get_key", "(", "message", ",", "key", ")", "if", "message_val", "is", "_MISSING", ":", "continue", "next_level", "=", "context", "[", "key", "]", "if", "message_val", "in", "next_level", ":", "to_check", ".", "append", "(", "next_level", "[", "message_val", "]", ")", "return", "not", "ignored" ]
39.948276
25.206897
def get_family_lookup_session(self): """Gets the ``OsidSession`` associated with the family lookup service. return: (osid.relationship.FamilyLookupSession) - a ``FamilyLookupSession`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_family_lookup()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_family_lookup()`` is ``true``.* """ if not self.supports_family_lookup(): raise Unimplemented() try: from . import sessions except ImportError: raise OperationFailed() try: session = sessions.FamilyLookupSession(proxy=self._proxy, runtime=self._runtime) except AttributeError: raise OperationFailed() return session
[ "def", "get_family_lookup_session", "(", "self", ")", ":", "if", "not", "self", ".", "supports_family_lookup", "(", ")", ":", "raise", "Unimplemented", "(", ")", "try", ":", "from", ".", "import", "sessions", "except", "ImportError", ":", "raise", "OperationFailed", "(", ")", "try", ":", "session", "=", "sessions", ".", "FamilyLookupSession", "(", "proxy", "=", "self", ".", "_proxy", ",", "runtime", "=", "self", ".", "_runtime", ")", "except", "AttributeError", ":", "raise", "OperationFailed", "(", ")", "return", "session" ]
39.478261
18.217391
def get_lyrics_threaded(song, l_sources=None): """ Launches a pool of threads to search for the lyrics of a single song. The optional parameter 'sources' specifies an alternative list of sources. If not present, the main list will be used. """ if l_sources is None: l_sources = sources if song.lyrics and not CONFIG['overwrite']: logger.debug('%s already has embedded lyrics', song) return None runtimes = {} queue = Queue() pool = [LyrThread(source, song, queue) for source in l_sources] for thread in pool: thread.start() for _ in range(len(pool)): result = queue.get() runtimes[result['source']] = result['runtime'] if result['lyrics']: break if result['lyrics']: song.lyrics = result['lyrics'] source = result['source'] else: source = None return Result(song, source, runtimes)
[ "def", "get_lyrics_threaded", "(", "song", ",", "l_sources", "=", "None", ")", ":", "if", "l_sources", "is", "None", ":", "l_sources", "=", "sources", "if", "song", ".", "lyrics", "and", "not", "CONFIG", "[", "'overwrite'", "]", ":", "logger", ".", "debug", "(", "'%s already has embedded lyrics'", ",", "song", ")", "return", "None", "runtimes", "=", "{", "}", "queue", "=", "Queue", "(", ")", "pool", "=", "[", "LyrThread", "(", "source", ",", "song", ",", "queue", ")", "for", "source", "in", "l_sources", "]", "for", "thread", "in", "pool", ":", "thread", ".", "start", "(", ")", "for", "_", "in", "range", "(", "len", "(", "pool", ")", ")", ":", "result", "=", "queue", ".", "get", "(", ")", "runtimes", "[", "result", "[", "'source'", "]", "]", "=", "result", "[", "'runtime'", "]", "if", "result", "[", "'lyrics'", "]", ":", "break", "if", "result", "[", "'lyrics'", "]", ":", "song", ".", "lyrics", "=", "result", "[", "'lyrics'", "]", "source", "=", "result", "[", "'source'", "]", "else", ":", "source", "=", "None", "return", "Result", "(", "song", ",", "source", ",", "runtimes", ")" ]
27.484848
19.787879
def get(self, node=None): """Run basic healthchecks against the current node, or against a given node. Example response: > {"status":"ok"} > {"status":"failed","reason":"string"} :param node: Node name :raises ApiError: Raises if the remote server encountered an error. :raises ApiConnectionError: Raises if there was a connectivity issue. :rtype: dict """ if not node: return self.http_client.get(HEALTHCHECKS) return self.http_client.get(HEALTHCHECKS_NODE % node)
[ "def", "get", "(", "self", ",", "node", "=", "None", ")", ":", "if", "not", "node", ":", "return", "self", ".", "http_client", ".", "get", "(", "HEALTHCHECKS", ")", "return", "self", ".", "http_client", ".", "get", "(", "HEALTHCHECKS_NODE", "%", "node", ")" ]
32.222222
21.722222
def envelop(self, begin, end=None): """ Returns the set of all intervals fully contained in the range [begin, end). Completes in O(m + k*log n) time, where: * n = size of the tree * m = number of matches * k = size of the search range :rtype: set of Interval """ root = self.top_node if not root: return set() if end is None: iv = begin return self.envelop(iv.begin, iv.end) elif begin >= end: return set() result = root.search_point(begin, set()) # bound_begin might be greater boundary_table = self.boundary_table bound_begin = boundary_table.bisect_left(begin) bound_end = boundary_table.bisect_left(end) # up to, but not including end result.update(root.search_overlap( # slice notation is slightly slower boundary_table.keys()[index] for index in xrange(bound_begin, bound_end) )) # TODO: improve envelop() to use node info instead of less-efficient filtering result = set( iv for iv in result if iv.begin >= begin and iv.end <= end ) return result
[ "def", "envelop", "(", "self", ",", "begin", ",", "end", "=", "None", ")", ":", "root", "=", "self", ".", "top_node", "if", "not", "root", ":", "return", "set", "(", ")", "if", "end", "is", "None", ":", "iv", "=", "begin", "return", "self", ".", "envelop", "(", "iv", ".", "begin", ",", "iv", ".", "end", ")", "elif", "begin", ">=", "end", ":", "return", "set", "(", ")", "result", "=", "root", ".", "search_point", "(", "begin", ",", "set", "(", ")", ")", "# bound_begin might be greater", "boundary_table", "=", "self", ".", "boundary_table", "bound_begin", "=", "boundary_table", ".", "bisect_left", "(", "begin", ")", "bound_end", "=", "boundary_table", ".", "bisect_left", "(", "end", ")", "# up to, but not including end", "result", ".", "update", "(", "root", ".", "search_overlap", "(", "# slice notation is slightly slower", "boundary_table", ".", "keys", "(", ")", "[", "index", "]", "for", "index", "in", "xrange", "(", "bound_begin", ",", "bound_end", ")", ")", ")", "# TODO: improve envelop() to use node info instead of less-efficient filtering", "result", "=", "set", "(", "iv", "for", "iv", "in", "result", "if", "iv", ".", "begin", ">=", "begin", "and", "iv", ".", "end", "<=", "end", ")", "return", "result" ]
35.558824
17.794118
def sync_skills_data(self): """ Update internal skill_data_structure from disk. """ self.skills_data = self.load_skills_data() if 'upgraded' in self.skills_data: self.skills_data.pop('upgraded') else: self.skills_data_hash = skills_data_hash(self.skills_data)
[ "def", "sync_skills_data", "(", "self", ")", ":", "self", ".", "skills_data", "=", "self", ".", "load_skills_data", "(", ")", "if", "'upgraded'", "in", "self", ".", "skills_data", ":", "self", ".", "skills_data", ".", "pop", "(", "'upgraded'", ")", "else", ":", "self", ".", "skills_data_hash", "=", "skills_data_hash", "(", "self", ".", "skills_data", ")" ]
44.142857
12.285714
def standardize_cell(cell, to_primitive=False, no_idealize=False, symprec=1e-5, angle_tolerance=-1.0): """Return standardized cell. Args: cell, symprec, angle_tolerance: See the docstring of get_symmetry. to_primitive: bool: If True, the standardized primitive cell is created. no_idealize: bool: If True, it is disabled to idealize lengths and angles of basis vectors and positions of atoms according to crystal symmetry. Return: The standardized unit cell or primitive cell is returned by a tuple of (lattice, positions, numbers). If it fails, None is returned. """ _set_no_error() lattice, _positions, _numbers, _ = _expand_cell(cell) if lattice is None: return None # Atomic positions have to be specified by scaled positions for spglib. num_atom = len(_positions) positions = np.zeros((num_atom * 4, 3), dtype='double', order='C') positions[:num_atom] = _positions numbers = np.zeros(num_atom * 4, dtype='intc') numbers[:num_atom] = _numbers num_atom_std = spg.standardize_cell(lattice, positions, numbers, num_atom, to_primitive * 1, no_idealize * 1, symprec, angle_tolerance) _set_error_message() if num_atom_std > 0: return (np.array(lattice.T, dtype='double', order='C'), np.array(positions[:num_atom_std], dtype='double', order='C'), np.array(numbers[:num_atom_std], dtype='intc')) else: return None
[ "def", "standardize_cell", "(", "cell", ",", "to_primitive", "=", "False", ",", "no_idealize", "=", "False", ",", "symprec", "=", "1e-5", ",", "angle_tolerance", "=", "-", "1.0", ")", ":", "_set_no_error", "(", ")", "lattice", ",", "_positions", ",", "_numbers", ",", "_", "=", "_expand_cell", "(", "cell", ")", "if", "lattice", "is", "None", ":", "return", "None", "# Atomic positions have to be specified by scaled positions for spglib.", "num_atom", "=", "len", "(", "_positions", ")", "positions", "=", "np", ".", "zeros", "(", "(", "num_atom", "*", "4", ",", "3", ")", ",", "dtype", "=", "'double'", ",", "order", "=", "'C'", ")", "positions", "[", ":", "num_atom", "]", "=", "_positions", "numbers", "=", "np", ".", "zeros", "(", "num_atom", "*", "4", ",", "dtype", "=", "'intc'", ")", "numbers", "[", ":", "num_atom", "]", "=", "_numbers", "num_atom_std", "=", "spg", ".", "standardize_cell", "(", "lattice", ",", "positions", ",", "numbers", ",", "num_atom", ",", "to_primitive", "*", "1", ",", "no_idealize", "*", "1", ",", "symprec", ",", "angle_tolerance", ")", "_set_error_message", "(", ")", "if", "num_atom_std", ">", "0", ":", "return", "(", "np", ".", "array", "(", "lattice", ".", "T", ",", "dtype", "=", "'double'", ",", "order", "=", "'C'", ")", ",", "np", ".", "array", "(", "positions", "[", ":", "num_atom_std", "]", ",", "dtype", "=", "'double'", ",", "order", "=", "'C'", ")", ",", "np", ".", "array", "(", "numbers", "[", ":", "num_atom_std", "]", ",", "dtype", "=", "'intc'", ")", ")", "else", ":", "return", "None" ]
38.244898
17.897959
def field_metadata(self, well_row=0, well_column=0, field_row=0, field_column=0): """Get OME-XML metadata of given field. Parameters ---------- well_row : int Y well coordinate. Same as --V in files. well_column : int X well coordinate. Same as --U in files. field_row : int Y field coordinate. Same as --Y in files. field_column : int X field coordinate. Same as --X in files. Returns ------- lxml.objectify.ObjectifiedElement lxml object of OME-XML found in slide/chamber/field/metadata. """ def condition(path): attrs = attributes(path) return (attrs.u == well_column and attrs.v == well_row and attrs.x == field_column and attrs.y == field_row) field = [f for f in self.fields if condition(f)] if field: field = field[0] filename = _pattern(field, 'metadata', _image, extension='*.ome.xml') filename = glob(filename)[0] # resolve, assume found return objectify.parse(filename).getroot()
[ "def", "field_metadata", "(", "self", ",", "well_row", "=", "0", ",", "well_column", "=", "0", ",", "field_row", "=", "0", ",", "field_column", "=", "0", ")", ":", "def", "condition", "(", "path", ")", ":", "attrs", "=", "attributes", "(", "path", ")", "return", "(", "attrs", ".", "u", "==", "well_column", "and", "attrs", ".", "v", "==", "well_row", "and", "attrs", ".", "x", "==", "field_column", "and", "attrs", ".", "y", "==", "field_row", ")", "field", "=", "[", "f", "for", "f", "in", "self", ".", "fields", "if", "condition", "(", "f", ")", "]", "if", "field", ":", "field", "=", "field", "[", "0", "]", "filename", "=", "_pattern", "(", "field", ",", "'metadata'", ",", "_image", ",", "extension", "=", "'*.ome.xml'", ")", "filename", "=", "glob", "(", "filename", ")", "[", "0", "]", "# resolve, assume found", "return", "objectify", ".", "parse", "(", "filename", ")", ".", "getroot", "(", ")" ]
35.909091
18.939394
def _split_horizontal(self, section, width, height): """For an horizontal split the rectangle is placed in the lower left corner of the section (section's xy coordinates), the top most side of the rectangle and its horizontal continuation, marks the line of division for the split. +-----------------+ | | | | | | | | +-------+---------+ |#######| | |#######| | |#######| | +-------+---------+ If the rectangle width is equal to the the section width, only one section is created over the rectangle. If the rectangle height is equal to the section height, only one section to the right of the rectangle is created. If both width and height are equal, no sections are created. """ # First remove the section we are splitting so it doesn't # interfere when later we try to merge the resulting split # rectangles, with the rest of free sections. #self._sections.remove(section) # Creates two new empty sections, and returns the new rectangle. if height < section.height: self._add_section(Rectangle(section.x, section.y+height, section.width, section.height-height)) if width < section.width: self._add_section(Rectangle(section.x+width, section.y, section.width-width, height))
[ "def", "_split_horizontal", "(", "self", ",", "section", ",", "width", ",", "height", ")", ":", "# First remove the section we are splitting so it doesn't ", "# interfere when later we try to merge the resulting split", "# rectangles, with the rest of free sections.", "#self._sections.remove(section)", "# Creates two new empty sections, and returns the new rectangle.", "if", "height", "<", "section", ".", "height", ":", "self", ".", "_add_section", "(", "Rectangle", "(", "section", ".", "x", ",", "section", ".", "y", "+", "height", ",", "section", ".", "width", ",", "section", ".", "height", "-", "height", ")", ")", "if", "width", "<", "section", ".", "width", ":", "self", ".", "_add_section", "(", "Rectangle", "(", "section", ".", "x", "+", "width", ",", "section", ".", "y", ",", "section", ".", "width", "-", "width", ",", "height", ")", ")" ]
44.264706
18.5
def _format_date_param(params, key, format="%Y-%m-%d %H:%M:%S"): """ Utility function to convert datetime values to strings. If the value is already a str, or is not in the dict, no change is made. :param params: A `dict` of params that may contain a `datetime` value. :param key: The datetime value to be converted to a `str` :param format: The `strftime` format to be used to format the date. The default value is '%Y-%m-%d %H:%M:%S' """ if key in params: param = params[key] if hasattr(param, "strftime"): params[key] = param.strftime(format)
[ "def", "_format_date_param", "(", "params", ",", "key", ",", "format", "=", "\"%Y-%m-%d %H:%M:%S\"", ")", ":", "if", "key", "in", "params", ":", "param", "=", "params", "[", "key", "]", "if", "hasattr", "(", "param", ",", "\"strftime\"", ")", ":", "params", "[", "key", "]", "=", "param", ".", "strftime", "(", "format", ")" ]
42.428571
23.428571
def transition_matrix_reversible_pisym(C, return_statdist=False, **kwargs): r""" Estimates reversible transition matrix as follows: ..:math: p_{ij} = c_{ij} / c_i where c_i = sum_j c_{ij} \pi_j = \sum_j \pi_i p_{ij} x_{ij} = \pi_i p_{ij} + \pi_j p_{ji} p^{rev}_{ij} = x_{ij} / x_i where x_i = sum_j x_{ij} In words: takes the nonreversible transition matrix estimate, uses its stationary distribution to compute an equilibrium correlation matrix, symmetrizes that correlation matrix and then normalizes to the reversible transition matrix estimate. Parameters ---------- C: ndarray, shape (n,n) count matrix Returns ------- T: Estimated transition matrix """ # nonreversible estimate T_nonrev = transition_matrix_non_reversible(C) from msmtools.analysis import stationary_distribution pi = stationary_distribution(T_nonrev) # correlation matrix X = pi[:, None] * T_nonrev X = X.T + X # result T_rev = X / X.sum(axis=1)[:, None] if return_statdist: #np.testing.assert_allclose(pi, stationary_distribution(T_rev)) #np.testing.assert_allclose(T_rev.T.dot(pi), pi) return T_rev, pi return T_rev
[ "def", "transition_matrix_reversible_pisym", "(", "C", ",", "return_statdist", "=", "False", ",", "*", "*", "kwargs", ")", ":", "# nonreversible estimate", "T_nonrev", "=", "transition_matrix_non_reversible", "(", "C", ")", "from", "msmtools", ".", "analysis", "import", "stationary_distribution", "pi", "=", "stationary_distribution", "(", "T_nonrev", ")", "# correlation matrix", "X", "=", "pi", "[", ":", ",", "None", "]", "*", "T_nonrev", "X", "=", "X", ".", "T", "+", "X", "# result", "T_rev", "=", "X", "/", "X", ".", "sum", "(", "axis", "=", "1", ")", "[", ":", ",", "None", "]", "if", "return_statdist", ":", "#np.testing.assert_allclose(pi, stationary_distribution(T_rev))", "#np.testing.assert_allclose(T_rev.T.dot(pi), pi)", "return", "T_rev", ",", "pi", "return", "T_rev" ]
31.307692
20.717949
def _convert_choices(self, choices): """Auto create db values then call super method""" final_choices = [] for choice in choices: if isinstance(choice, ChoiceEntry): final_choices.append(choice) continue original_choice = choice if isinstance(choice, six.string_types): if choice == _NO_SUBSET_NAME_: continue choice = [choice, ] else: choice = list(choice) length = len(choice) assert 1 <= length <= 4, 'Invalid number of entries in %s' % (original_choice,) final_choice = [] # do we have attributes? if length > 1 and isinstance(choice[-1], Mapping): final_choice.append(choice.pop()) elif length == 4: attributes = choice.pop() assert attributes is None or isinstance(attributes, Mapping), 'Last argument must be a dict-like object in %s' % (original_choice,) if attributes: final_choice.append(attributes) # the constant final_choice.insert(0, choice.pop(0)) if len(choice): # we were given a db value final_choice.insert(1, choice.pop(0)) if len(choice): # we were given a display value final_choice.insert(2, choice.pop(0)) else: # set None to compute it later final_choice.insert(1, None) if final_choice[1] is None: # no db value, we compute it from the constant final_choice[1] = self.value_transform(final_choice[0]) final_choices.append(final_choice) return super(AutoChoices, self)._convert_choices(final_choices)
[ "def", "_convert_choices", "(", "self", ",", "choices", ")", ":", "final_choices", "=", "[", "]", "for", "choice", "in", "choices", ":", "if", "isinstance", "(", "choice", ",", "ChoiceEntry", ")", ":", "final_choices", ".", "append", "(", "choice", ")", "continue", "original_choice", "=", "choice", "if", "isinstance", "(", "choice", ",", "six", ".", "string_types", ")", ":", "if", "choice", "==", "_NO_SUBSET_NAME_", ":", "continue", "choice", "=", "[", "choice", ",", "]", "else", ":", "choice", "=", "list", "(", "choice", ")", "length", "=", "len", "(", "choice", ")", "assert", "1", "<=", "length", "<=", "4", ",", "'Invalid number of entries in %s'", "%", "(", "original_choice", ",", ")", "final_choice", "=", "[", "]", "# do we have attributes?", "if", "length", ">", "1", "and", "isinstance", "(", "choice", "[", "-", "1", "]", ",", "Mapping", ")", ":", "final_choice", ".", "append", "(", "choice", ".", "pop", "(", ")", ")", "elif", "length", "==", "4", ":", "attributes", "=", "choice", ".", "pop", "(", ")", "assert", "attributes", "is", "None", "or", "isinstance", "(", "attributes", ",", "Mapping", ")", ",", "'Last argument must be a dict-like object in %s'", "%", "(", "original_choice", ",", ")", "if", "attributes", ":", "final_choice", ".", "append", "(", "attributes", ")", "# the constant", "final_choice", ".", "insert", "(", "0", ",", "choice", ".", "pop", "(", "0", ")", ")", "if", "len", "(", "choice", ")", ":", "# we were given a db value", "final_choice", ".", "insert", "(", "1", ",", "choice", ".", "pop", "(", "0", ")", ")", "if", "len", "(", "choice", ")", ":", "# we were given a display value", "final_choice", ".", "insert", "(", "2", ",", "choice", ".", "pop", "(", "0", ")", ")", "else", ":", "# set None to compute it later", "final_choice", ".", "insert", "(", "1", ",", "None", ")", "if", "final_choice", "[", "1", "]", "is", "None", ":", "# no db value, we compute it from the constant", "final_choice", "[", "1", "]", "=", "self", ".", "value_transform", "(", "final_choice", "[", "0", "]", ")", "final_choices", ".", "append", "(", "final_choice", ")", "return", "super", "(", "AutoChoices", ",", "self", ")", ".", "_convert_choices", "(", "final_choices", ")" ]
34.811321
19.943396
def _forceInt(x,y,z,a2,b2,c2,n,i): """Integral involved in the force at (x,y,z) integrates 1/A B^n (x_i/(tau+a_i)) where A = sqrt((tau+a)(tau+b)(tau+c)) and B = (1-x^2/(tau+a)-y^2/(tau+b)-z^2/(tau+c)) from lambda to infty with respect to tau. The lower limit lambda is given by lowerlim function. """ def integrand(tau): return (x*(i==0) + y*(i==1) + z*(i==2))/(a2*(i==0) + b2*(i==1) + c2*(i==2) + tau) * \ _FracInt(x, y, z, a2, b2, c2, tau, n) return integrate.quad(integrand, lowerlim(x**2, y**2, z**2, a2, b2, c2), np.inf, epsabs=1e-12)[0]
[ "def", "_forceInt", "(", "x", ",", "y", ",", "z", ",", "a2", ",", "b2", ",", "c2", ",", "n", ",", "i", ")", ":", "def", "integrand", "(", "tau", ")", ":", "return", "(", "x", "*", "(", "i", "==", "0", ")", "+", "y", "*", "(", "i", "==", "1", ")", "+", "z", "*", "(", "i", "==", "2", ")", ")", "/", "(", "a2", "*", "(", "i", "==", "0", ")", "+", "b2", "*", "(", "i", "==", "1", ")", "+", "c2", "*", "(", "i", "==", "2", ")", "+", "tau", ")", "*", "_FracInt", "(", "x", ",", "y", ",", "z", ",", "a2", ",", "b2", ",", "c2", ",", "tau", ",", "n", ")", "return", "integrate", ".", "quad", "(", "integrand", ",", "lowerlim", "(", "x", "**", "2", ",", "y", "**", "2", ",", "z", "**", "2", ",", "a2", ",", "b2", ",", "c2", ")", ",", "np", ".", "inf", ",", "epsabs", "=", "1e-12", ")", "[", "0", "]" ]
53.090909
19.545455
def _read_string(self, cpu, buf): """ Reads a null terminated concrete buffer form memory :todo: FIX. move to cpu or memory """ filename = "" for i in range(0, 1024): c = Operators.CHR(cpu.read_int(buf + i, 8)) if c == '\x00': break filename += c return filename
[ "def", "_read_string", "(", "self", ",", "cpu", ",", "buf", ")", ":", "filename", "=", "\"\"", "for", "i", "in", "range", "(", "0", ",", "1024", ")", ":", "c", "=", "Operators", ".", "CHR", "(", "cpu", ".", "read_int", "(", "buf", "+", "i", ",", "8", ")", ")", "if", "c", "==", "'\\x00'", ":", "break", "filename", "+=", "c", "return", "filename" ]
29.916667
11.083333
def validate_commit_index(func): """Apply to State Machine everything up to commit index""" @functools.wraps(func) def wrapped(self, *args, **kwargs): for not_applied in range(self.log.last_applied + 1, self.log.commit_index + 1): self.state_machine.apply(self.log[not_applied]['command']) self.log.last_applied += 1 try: self.apply_future.set_result(not_applied) except (asyncio.futures.InvalidStateError, AttributeError): pass return func(self, *args, **kwargs) return wrapped
[ "def", "validate_commit_index", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "wrapped", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "for", "not_applied", "in", "range", "(", "self", ".", "log", ".", "last_applied", "+", "1", ",", "self", ".", "log", ".", "commit_index", "+", "1", ")", ":", "self", ".", "state_machine", ".", "apply", "(", "self", ".", "log", "[", "not_applied", "]", "[", "'command'", "]", ")", "self", ".", "log", ".", "last_applied", "+=", "1", "try", ":", "self", ".", "apply_future", ".", "set_result", "(", "not_applied", ")", "except", "(", "asyncio", ".", "futures", ".", "InvalidStateError", ",", "AttributeError", ")", ":", "pass", "return", "func", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapped" ]
36.125
21.125
def delete(self, **kwargs): """Delete a resource by issuing a DELETE http request against it.""" self.method = 'delete' if len(kwargs) > 0: self.load(self.client.delete(self.url, params=kwargs)) else: self.load(self.client.delete(self.url)) self.parent.remove(self) return
[ "def", "delete", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "method", "=", "'delete'", "if", "len", "(", "kwargs", ")", ">", "0", ":", "self", ".", "load", "(", "self", ".", "client", ".", "delete", "(", "self", ".", "url", ",", "params", "=", "kwargs", ")", ")", "else", ":", "self", ".", "load", "(", "self", ".", "client", ".", "delete", "(", "self", ".", "url", ")", ")", "self", ".", "parent", ".", "remove", "(", "self", ")", "return" ]
37.333333
14.888889
def header(self, item0, *items): """print string item0 to the current position and next strings to defined positions example: .header("Name", 75, "Quantity", 100, "Unit") """ self.txt(item0) at_x = None for item in items: if at_x is None: at_x = item else: self.txt(item, at_x=at_x) at_x = None
[ "def", "header", "(", "self", ",", "item0", ",", "*", "items", ")", ":", "self", ".", "txt", "(", "item0", ")", "at_x", "=", "None", "for", "item", "in", "items", ":", "if", "at_x", "is", "None", ":", "at_x", "=", "item", "else", ":", "self", ".", "txt", "(", "item", ",", "at_x", "=", "at_x", ")", "at_x", "=", "None" ]
33.583333
11.916667