text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def _prepare_smoove_bams(full_bams, sr_bams, disc_bams, items, tx_work_dir): """Prepare BAMs for smoove, linking in pre-existing split/disc BAMs if present. Smoove can use pre-existing discordant and split BAMs prepared by samblaster if present as $sample.split.bam and $sample.disc.bam. """ input_dir = utils.safe_makedir(tx_work_dir) out = [] for full, sr, disc, data in zip(full_bams, sr_bams, disc_bams, items): if sr and disc: new_full = os.path.join(input_dir, "%s.bam" % dd.get_sample_name(data)) new_sr = os.path.join(input_dir, "%s.split.bam" % dd.get_sample_name(data)) new_disc = os.path.join(input_dir, "%s.disc.bam" % dd.get_sample_name(data)) utils.symlink_plus(full, new_full) utils.symlink_plus(sr, new_sr) utils.symlink_plus(disc, new_disc) out.append(new_full) else: out.append(full) return out
[ "def", "_prepare_smoove_bams", "(", "full_bams", ",", "sr_bams", ",", "disc_bams", ",", "items", ",", "tx_work_dir", ")", ":", "input_dir", "=", "utils", ".", "safe_makedir", "(", "tx_work_dir", ")", "out", "=", "[", "]", "for", "full", ",", "sr", ",", "disc", ",", "data", "in", "zip", "(", "full_bams", ",", "sr_bams", ",", "disc_bams", ",", "items", ")", ":", "if", "sr", "and", "disc", ":", "new_full", "=", "os", ".", "path", ".", "join", "(", "input_dir", ",", "\"%s.bam\"", "%", "dd", ".", "get_sample_name", "(", "data", ")", ")", "new_sr", "=", "os", ".", "path", ".", "join", "(", "input_dir", ",", "\"%s.split.bam\"", "%", "dd", ".", "get_sample_name", "(", "data", ")", ")", "new_disc", "=", "os", ".", "path", ".", "join", "(", "input_dir", ",", "\"%s.disc.bam\"", "%", "dd", ".", "get_sample_name", "(", "data", ")", ")", "utils", ".", "symlink_plus", "(", "full", ",", "new_full", ")", "utils", ".", "symlink_plus", "(", "sr", ",", "new_sr", ")", "utils", ".", "symlink_plus", "(", "disc", ",", "new_disc", ")", "out", ".", "append", "(", "new_full", ")", "else", ":", "out", ".", "append", "(", "full", ")", "return", "out" ]
46.9
22.2
def _uncythonized_model(self, beta): """ Creates the structure of the model Parameters ---------- beta : np.array Contains untransformed starting values for latent variables Returns ---------- theta : np.array Contains the predicted values for the time series Y : np.array Contains the length-adjusted time series (accounting for lags) scores : np.array Contains the scores for the time series """ parm = np.array([self.latent_variables.z_list[k].prior.transform(beta[k]) for k in range(beta.shape[0])]) model_scale, model_shape, model_skewness = self._get_scale_and_shape(parm) theta = np.matmul(self.X[self.integ+self.max_lag:],parm[self.sc+self.ar:(self.sc+self.ar+len(self.X_names))]) # Loop over time series theta, self.model_scores = gasx_recursion(parm, theta, self.model_scores, self.model_Y, self.ar, self.sc, self.model_Y.shape[0], self.family.score_function, self.link, model_scale, model_shape, model_skewness, self.max_lag) return np.array(theta), self.model_Y, self.model_scores
[ "def", "_uncythonized_model", "(", "self", ",", "beta", ")", ":", "parm", "=", "np", ".", "array", "(", "[", "self", ".", "latent_variables", ".", "z_list", "[", "k", "]", ".", "prior", ".", "transform", "(", "beta", "[", "k", "]", ")", "for", "k", "in", "range", "(", "beta", ".", "shape", "[", "0", "]", ")", "]", ")", "model_scale", ",", "model_shape", ",", "model_skewness", "=", "self", ".", "_get_scale_and_shape", "(", "parm", ")", "theta", "=", "np", ".", "matmul", "(", "self", ".", "X", "[", "self", ".", "integ", "+", "self", ".", "max_lag", ":", "]", ",", "parm", "[", "self", ".", "sc", "+", "self", ".", "ar", ":", "(", "self", ".", "sc", "+", "self", ".", "ar", "+", "len", "(", "self", ".", "X_names", ")", ")", "]", ")", "# Loop over time series", "theta", ",", "self", ".", "model_scores", "=", "gasx_recursion", "(", "parm", ",", "theta", ",", "self", ".", "model_scores", ",", "self", ".", "model_Y", ",", "self", ".", "ar", ",", "self", ".", "sc", ",", "self", ".", "model_Y", ".", "shape", "[", "0", "]", ",", "self", ".", "family", ".", "score_function", ",", "self", ".", "link", ",", "model_scale", ",", "model_shape", ",", "model_skewness", ",", "self", ".", "max_lag", ")", "return", "np", ".", "array", "(", "theta", ")", ",", "self", ".", "model_Y", ",", "self", ".", "model_scores" ]
40
31.965517
def entry(argv): ''' Command entry ''' command_dic = { 'migrate': run_migrate, 'init': run_init, 'send_nologin': run_send_nologin, 'send_all': run_send_all, 'review': run_review, 'sitemap': run_sitemap, 'editmap': run_editmap, 'check_kind': run_check_kind, 'init_tables': run_init_tables, 'drop_tables': run_drop_tables, 'gen_category': run_gen_category, 'auto': run_auto, 'whoosh': run_whoosh, 'html': run_checkit, 'update_cat': run_update_cat, 'check200': run_check200, } try: # 这里的 h 就表示该选项无参数,i:表示 i 选项后需要有参数 opts, args = getopt.getopt(argv, "hi:") except getopt.GetoptError: print('Error: helper.py -i cmd') sys.exit(2) for opt, arg in opts: if opt == "-h": print('helper.py -i cmd') print('cmd list ----------------------') print(' init: ') print(' migrate: ') print(' review: ') print(' -------------') print(' send_all: ') print(' send_nologin: ') print(' sitemap: ') print(' editmap: ') print(' check_kind: ') print(' check200: ') sys.exit() elif opt == "-i": if arg in command_dic: command_dic[arg](args) print('QED!') else: print('Wrong Command.')
[ "def", "entry", "(", "argv", ")", ":", "command_dic", "=", "{", "'migrate'", ":", "run_migrate", ",", "'init'", ":", "run_init", ",", "'send_nologin'", ":", "run_send_nologin", ",", "'send_all'", ":", "run_send_all", ",", "'review'", ":", "run_review", ",", "'sitemap'", ":", "run_sitemap", ",", "'editmap'", ":", "run_editmap", ",", "'check_kind'", ":", "run_check_kind", ",", "'init_tables'", ":", "run_init_tables", ",", "'drop_tables'", ":", "run_drop_tables", ",", "'gen_category'", ":", "run_gen_category", ",", "'auto'", ":", "run_auto", ",", "'whoosh'", ":", "run_whoosh", ",", "'html'", ":", "run_checkit", ",", "'update_cat'", ":", "run_update_cat", ",", "'check200'", ":", "run_check200", ",", "}", "try", ":", "# 这里的 h 就表示该选项无参数,i:表示 i 选项后需要有参数", "opts", ",", "args", "=", "getopt", ".", "getopt", "(", "argv", ",", "\"hi:\"", ")", "except", "getopt", ".", "GetoptError", ":", "print", "(", "'Error: helper.py -i cmd'", ")", "sys", ".", "exit", "(", "2", ")", "for", "opt", ",", "arg", "in", "opts", ":", "if", "opt", "==", "\"-h\"", ":", "print", "(", "'helper.py -i cmd'", ")", "print", "(", "'cmd list ----------------------'", ")", "print", "(", "' init: '", ")", "print", "(", "' migrate: '", ")", "print", "(", "' review: '", ")", "print", "(", "' -------------'", ")", "print", "(", "' send_all: '", ")", "print", "(", "' send_nologin: '", ")", "print", "(", "' sitemap: '", ")", "print", "(", "' editmap: '", ")", "print", "(", "' check_kind: '", ")", "print", "(", "' check200: '", ")", "sys", ".", "exit", "(", ")", "elif", "opt", "==", "\"-i\"", ":", "if", "arg", "in", "command_dic", ":", "command_dic", "[", "arg", "]", "(", "args", ")", "print", "(", "'QED!'", ")", "else", ":", "print", "(", "'Wrong Command.'", ")" ]
29.45098
11.529412
def cart_get(self, CartId=None, HMAC=None, **kwargs): """CartGet fetches existing cart :param CartId: see CartCreate :param HMAC: see CartCreate :return: An :class:`~.AmazonCart`. """ if not CartId or not HMAC: raise CartException('CartId required for CartGet call') response = self.api.CartGet(CartId=CartId, HMAC=HMAC, **kwargs) root = objectify.fromstring(response) cart = AmazonCart(root) self._check_for_cart_error(cart) return cart
[ "def", "cart_get", "(", "self", ",", "CartId", "=", "None", ",", "HMAC", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "not", "CartId", "or", "not", "HMAC", ":", "raise", "CartException", "(", "'CartId required for CartGet call'", ")", "response", "=", "self", ".", "api", ".", "CartGet", "(", "CartId", "=", "CartId", ",", "HMAC", "=", "HMAC", ",", "*", "*", "kwargs", ")", "root", "=", "objectify", ".", "fromstring", "(", "response", ")", "cart", "=", "AmazonCart", "(", "root", ")", "self", ".", "_check_for_cart_error", "(", "cart", ")", "return", "cart" ]
35
13.466667
def update(self, data): "returns True if unknown" if data not in self.filter: self.filter.append(data) if len(self.filter) > self.max_items: self.filter.pop(0) return True else: self.filter.append(self.filter.pop(0)) return False
[ "def", "update", "(", "self", ",", "data", ")", ":", "if", "data", "not", "in", "self", ".", "filter", ":", "self", ".", "filter", ".", "append", "(", "data", ")", "if", "len", "(", "self", ".", "filter", ")", ">", "self", ".", "max_items", ":", "self", ".", "filter", ".", "pop", "(", "0", ")", "return", "True", "else", ":", "self", ".", "filter", ".", "append", "(", "self", ".", "filter", ".", "pop", "(", "0", ")", ")", "return", "False" ]
32
11.8
def clear(self): """Restart with a clean config""" self._config = configparser.RawConfigParser() # Override config from command line even if we modify the config file and live reload it. self._override_config = {} self.read_config()
[ "def", "clear", "(", "self", ")", ":", "self", ".", "_config", "=", "configparser", ".", "RawConfigParser", "(", ")", "# Override config from command line even if we modify the config file and live reload it.", "self", ".", "_override_config", "=", "{", "}", "self", ".", "read_config", "(", ")" ]
38.142857
22
def add_header_check(self, code=HEADER_CHECK_FAILED, message=MESSAGES[HEADER_CHECK_FAILED]): """ Add a header check, i.e., check whether the header record is consistent with the expected field names. Arguments --------- `code` - problem code to report if the header record is not valid, defaults to `HEADER_CHECK_FAILED` `message` - problem message to report if a value is not valid """ t = code, message self._header_checks.append(t)
[ "def", "add_header_check", "(", "self", ",", "code", "=", "HEADER_CHECK_FAILED", ",", "message", "=", "MESSAGES", "[", "HEADER_CHECK_FAILED", "]", ")", ":", "t", "=", "code", ",", "message", "self", ".", "_header_checks", ".", "append", "(", "t", ")" ]
29.421053
21.947368
def close(self): """Close all connections in the pool.""" self._lock.acquire() try: while self._idle_cache: # close all idle connections con = self._idle_cache.pop(0) try: con.close() except Exception: pass if self._maxshared: # close all shared connections while self._shared_cache: con = self._shared_cache.pop(0).con try: con.close() except Exception: pass self._connections -= 1 self._lock.notifyAll() finally: self._lock.release()
[ "def", "close", "(", "self", ")", ":", "self", ".", "_lock", ".", "acquire", "(", ")", "try", ":", "while", "self", ".", "_idle_cache", ":", "# close all idle connections", "con", "=", "self", ".", "_idle_cache", ".", "pop", "(", "0", ")", "try", ":", "con", ".", "close", "(", ")", "except", "Exception", ":", "pass", "if", "self", ".", "_maxshared", ":", "# close all shared connections", "while", "self", ".", "_shared_cache", ":", "con", "=", "self", ".", "_shared_cache", ".", "pop", "(", "0", ")", ".", "con", "try", ":", "con", ".", "close", "(", ")", "except", "Exception", ":", "pass", "self", ".", "_connections", "-=", "1", "self", ".", "_lock", ".", "notifyAll", "(", ")", "finally", ":", "self", ".", "_lock", ".", "release", "(", ")" ]
34.714286
12.428571
def unregister_iq_request_handler(self, type_, payload_cls): """ Unregister a coroutine previously registered with :meth:`register_iq_request_handler`. :param type_: IQ type to react to (must be a request type). :type type_: :class:`~structs.IQType` :param payload_cls: Payload class to react to (subclass of :class:`~xso.XSO`) :type payload_cls: :class:`~.XMLStreamClass` :raises KeyError: if no coroutine has been registered for the given ``(type_, payload_cls)`` pair :raises ValueError: if `type_` is not a valid :class:`~.IQType` (and cannot be cast to a :class:`~.IQType`) The match is solely made using the `type_` and `payload_cls` arguments, which have the same meaning as in :meth:`register_iq_request_coro`. .. versionchanged:: 0.10 Renamed from :meth:`unregister_iq_request_coro`. .. versionchanged:: 0.7 The `type_` argument is now supposed to be a :class:`~.IQType` member. .. deprecated:: 0.7 Passing a :class:`str` as `type_` argument is deprecated and will raise a :class:`TypeError` as of the 1.0 release. See the Changelog for :ref:`api-changelog-0.7` for further details on how to upgrade your code efficiently. """ type_ = self._coerce_enum(type_, structs.IQType) del self._iq_request_map[type_, payload_cls] self._logger.debug( "iq request coroutine unregistered: type=%r, payload=%r", type_, payload_cls)
[ "def", "unregister_iq_request_handler", "(", "self", ",", "type_", ",", "payload_cls", ")", ":", "type_", "=", "self", ".", "_coerce_enum", "(", "type_", ",", "structs", ".", "IQType", ")", "del", "self", ".", "_iq_request_map", "[", "type_", ",", "payload_cls", "]", "self", ".", "_logger", ".", "debug", "(", "\"iq request coroutine unregistered: type=%r, payload=%r\"", ",", "type_", ",", "payload_cls", ")" ]
40.9
22.9
def rec_sqrt(x, context=None): """ Return the reciprocal square root of x. Return +Inf if x is ±0, +0 if x is +Inf, and NaN if x is negative. """ return _apply_function_in_current_context( BigFloat, mpfr.mpfr_rec_sqrt, (BigFloat._implicit_convert(x),), context, )
[ "def", "rec_sqrt", "(", "x", ",", "context", "=", "None", ")", ":", "return", "_apply_function_in_current_context", "(", "BigFloat", ",", "mpfr", ".", "mpfr_rec_sqrt", ",", "(", "BigFloat", ".", "_implicit_convert", "(", "x", ")", ",", ")", ",", "context", ",", ")" ]
23.769231
17.307692
def get_glitter_app(self, glitter_app_name): """ Retrieve the Glitter App config for a specific Glitter App. """ if not self.discovered: self.discover_glitter_apps() try: glitter_app = self.glitter_apps[glitter_app_name] return glitter_app except KeyError: return None
[ "def", "get_glitter_app", "(", "self", ",", "glitter_app_name", ")", ":", "if", "not", "self", ".", "discovered", ":", "self", ".", "discover_glitter_apps", "(", ")", "try", ":", "glitter_app", "=", "self", ".", "glitter_apps", "[", "glitter_app_name", "]", "return", "glitter_app", "except", "KeyError", ":", "return", "None" ]
29.5
14.333333
def assert_page_source_contains(self, expected_value, failure_message='Expected page source to contain: "{}"'): """ Asserts that the page source contains the string passed in expected_value """ assertion = lambda: expected_value in self.driver_wrapper.page_source() self.webdriver_assert(assertion, unicode(failure_message).format(expected_value))
[ "def", "assert_page_source_contains", "(", "self", ",", "expected_value", ",", "failure_message", "=", "'Expected page source to contain: \"{}\"'", ")", ":", "assertion", "=", "lambda", ":", "expected_value", "in", "self", ".", "driver_wrapper", ".", "page_source", "(", ")", "self", ".", "webdriver_assert", "(", "assertion", ",", "unicode", "(", "failure_message", ")", ".", "format", "(", "expected_value", ")", ")" ]
63.666667
33.333333
def read_samples_from_file(self): """ Load the audio samples from file into memory. If ``self.file_format`` is ``None`` or it is not ``("pcm_s16le", 1, self.rconf.sample_rate)``, the file will be first converted to a temporary PCM16 mono WAVE file. Audio data will be read from this temporary file, which will be then deleted from disk immediately. Otherwise, the audio data will be read directly from the given file, which will not be deleted from disk. :raises: :class:`~aeneas.audiofile.AudioFileConverterError`: if the path to the ``ffmpeg`` executable cannot be called :raises: :class:`~aeneas.audiofile.AudioFileUnsupportedFormatError`: if the audio file has a format not supported :raises: OSError: if the audio file cannot be read """ self.log(u"Loading audio data...") # check the file can be read if not gf.file_can_be_read(self.file_path): self.log_exc(u"File '%s' cannot be read" % (self.file_path), None, True, OSError) # determine if we need to convert the audio file convert_audio_file = ( (self.file_format is None) or ( (self.rconf.safety_checks) and (self.file_format != ("pcm_s16le", 1, self.rconf.sample_rate)) ) ) # convert the audio file if needed if convert_audio_file: # convert file to PCM16 mono WAVE with correct sample rate self.log(u"self.file_format is None or not good => converting self.file_path") tmp_handler, tmp_file_path = gf.tmp_file(suffix=u".wav", root=self.rconf[RuntimeConfiguration.TMP_PATH]) self.log([u"Temporary PCM16 mono WAVE file: '%s'", tmp_file_path]) try: self.log(u"Converting audio file to mono...") converter = FFMPEGWrapper(rconf=self.rconf, logger=self.logger) converter.convert(self.file_path, tmp_file_path) self.file_format = ("pcm_s16le", 1, self.rconf.sample_rate) self.log(u"Converting audio file to mono... done") except FFMPEGPathError: gf.delete_file(tmp_handler, tmp_file_path) self.log_exc(u"Unable to call ffmpeg executable", None, True, AudioFileConverterError) except OSError: gf.delete_file(tmp_handler, tmp_file_path) self.log_exc(u"Audio file format not supported by ffmpeg", None, True, AudioFileUnsupportedFormatError) else: # read the file directly if self.rconf.safety_checks: self.log(u"self.file_format is good => reading self.file_path directly") else: self.log_warn(u"Safety checks disabled => reading self.file_path directly") tmp_handler = None tmp_file_path = self.file_path # TODO allow calling C extension cwave to read samples faster try: self.audio_format = "pcm16" self.audio_channels = 1 self.audio_sample_rate, self.__samples = scipywavread(tmp_file_path) # scipy reads a sample as an int16_t, that is, a number in [-32768, 32767] # so we convert it to a float64 in [-1, 1] self.__samples = self.__samples.astype("float64") / 32768 self.__samples_capacity = len(self.__samples) self.__samples_length = self.__samples_capacity self._update_length() except ValueError: self.log_exc(u"Audio format not supported by scipywavread", None, True, AudioFileUnsupportedFormatError) # if we converted the audio file, delete the temporary converted audio file if convert_audio_file: gf.delete_file(tmp_handler, tmp_file_path) self.log([u"Deleted temporary audio file: '%s'", tmp_file_path]) self._update_length() self.log([u"Sample length: %.3f", self.audio_length]) self.log([u"Sample rate: %d", self.audio_sample_rate]) self.log([u"Audio format: %s", self.audio_format]) self.log([u"Audio channels: %d", self.audio_channels]) self.log(u"Loading audio data... done")
[ "def", "read_samples_from_file", "(", "self", ")", ":", "self", ".", "log", "(", "u\"Loading audio data...\"", ")", "# check the file can be read", "if", "not", "gf", ".", "file_can_be_read", "(", "self", ".", "file_path", ")", ":", "self", ".", "log_exc", "(", "u\"File '%s' cannot be read\"", "%", "(", "self", ".", "file_path", ")", ",", "None", ",", "True", ",", "OSError", ")", "# determine if we need to convert the audio file", "convert_audio_file", "=", "(", "(", "self", ".", "file_format", "is", "None", ")", "or", "(", "(", "self", ".", "rconf", ".", "safety_checks", ")", "and", "(", "self", ".", "file_format", "!=", "(", "\"pcm_s16le\"", ",", "1", ",", "self", ".", "rconf", ".", "sample_rate", ")", ")", ")", ")", "# convert the audio file if needed", "if", "convert_audio_file", ":", "# convert file to PCM16 mono WAVE with correct sample rate", "self", ".", "log", "(", "u\"self.file_format is None or not good => converting self.file_path\"", ")", "tmp_handler", ",", "tmp_file_path", "=", "gf", ".", "tmp_file", "(", "suffix", "=", "u\".wav\"", ",", "root", "=", "self", ".", "rconf", "[", "RuntimeConfiguration", ".", "TMP_PATH", "]", ")", "self", ".", "log", "(", "[", "u\"Temporary PCM16 mono WAVE file: '%s'\"", ",", "tmp_file_path", "]", ")", "try", ":", "self", ".", "log", "(", "u\"Converting audio file to mono...\"", ")", "converter", "=", "FFMPEGWrapper", "(", "rconf", "=", "self", ".", "rconf", ",", "logger", "=", "self", ".", "logger", ")", "converter", ".", "convert", "(", "self", ".", "file_path", ",", "tmp_file_path", ")", "self", ".", "file_format", "=", "(", "\"pcm_s16le\"", ",", "1", ",", "self", ".", "rconf", ".", "sample_rate", ")", "self", ".", "log", "(", "u\"Converting audio file to mono... done\"", ")", "except", "FFMPEGPathError", ":", "gf", ".", "delete_file", "(", "tmp_handler", ",", "tmp_file_path", ")", "self", ".", "log_exc", "(", "u\"Unable to call ffmpeg executable\"", ",", "None", ",", "True", ",", "AudioFileConverterError", ")", "except", "OSError", ":", "gf", ".", "delete_file", "(", "tmp_handler", ",", "tmp_file_path", ")", "self", ".", "log_exc", "(", "u\"Audio file format not supported by ffmpeg\"", ",", "None", ",", "True", ",", "AudioFileUnsupportedFormatError", ")", "else", ":", "# read the file directly", "if", "self", ".", "rconf", ".", "safety_checks", ":", "self", ".", "log", "(", "u\"self.file_format is good => reading self.file_path directly\"", ")", "else", ":", "self", ".", "log_warn", "(", "u\"Safety checks disabled => reading self.file_path directly\"", ")", "tmp_handler", "=", "None", "tmp_file_path", "=", "self", ".", "file_path", "# TODO allow calling C extension cwave to read samples faster", "try", ":", "self", ".", "audio_format", "=", "\"pcm16\"", "self", ".", "audio_channels", "=", "1", "self", ".", "audio_sample_rate", ",", "self", ".", "__samples", "=", "scipywavread", "(", "tmp_file_path", ")", "# scipy reads a sample as an int16_t, that is, a number in [-32768, 32767]", "# so we convert it to a float64 in [-1, 1]", "self", ".", "__samples", "=", "self", ".", "__samples", ".", "astype", "(", "\"float64\"", ")", "/", "32768", "self", ".", "__samples_capacity", "=", "len", "(", "self", ".", "__samples", ")", "self", ".", "__samples_length", "=", "self", ".", "__samples_capacity", "self", ".", "_update_length", "(", ")", "except", "ValueError", ":", "self", ".", "log_exc", "(", "u\"Audio format not supported by scipywavread\"", ",", "None", ",", "True", ",", "AudioFileUnsupportedFormatError", ")", "# if we converted the audio file, delete the temporary converted audio file", "if", "convert_audio_file", ":", "gf", ".", "delete_file", "(", "tmp_handler", ",", "tmp_file_path", ")", "self", ".", "log", "(", "[", "u\"Deleted temporary audio file: '%s'\"", ",", "tmp_file_path", "]", ")", "self", ".", "_update_length", "(", ")", "self", ".", "log", "(", "[", "u\"Sample length: %.3f\"", ",", "self", ".", "audio_length", "]", ")", "self", ".", "log", "(", "[", "u\"Sample rate: %d\"", ",", "self", ".", "audio_sample_rate", "]", ")", "self", ".", "log", "(", "[", "u\"Audio format: %s\"", ",", "self", ".", "audio_format", "]", ")", "self", ".", "log", "(", "[", "u\"Audio channels: %d\"", ",", "self", ".", "audio_channels", "]", ")", "self", ".", "log", "(", "u\"Loading audio data... done\"", ")" ]
48.367816
24.942529
def pipe(): """Return the optimum pipe implementation for the capabilities of the active system.""" try: from os import pipe return pipe() except: pipe = Pipe() return pipe.reader_fd, pipe.writer_fd
[ "def", "pipe", "(", ")", ":", "try", ":", "from", "os", "import", "pipe", "return", "pipe", "(", ")", "except", ":", "pipe", "=", "Pipe", "(", ")", "return", "pipe", ".", "reader_fd", ",", "pipe", ".", "writer_fd" ]
24.3
21.8
def adjoint(self): """Adjoint of the linear operator. Note that this implementation uses an approximation that is only valid for small displacements. """ # TODO allow users to select what method to use here. div_op = Divergence(domain=self.displacement.space, method='forward', pad_mode='symmetric') jacobian_det = self.domain.element( np.exp(-div_op(self.displacement))) return jacobian_det * self.inverse
[ "def", "adjoint", "(", "self", ")", ":", "# TODO allow users to select what method to use here.", "div_op", "=", "Divergence", "(", "domain", "=", "self", ".", "displacement", ".", "space", ",", "method", "=", "'forward'", ",", "pad_mode", "=", "'symmetric'", ")", "jacobian_det", "=", "self", ".", "domain", ".", "element", "(", "np", ".", "exp", "(", "-", "div_op", "(", "self", ".", "displacement", ")", ")", ")", "return", "jacobian_det", "*", "self", ".", "inverse" ]
38.461538
16.538462
def _set_family_view(self, session): """Sets the underlying family view to match current view""" if self._family_view == COMPARATIVE: try: session.use_comparative_family_view() except AttributeError: pass else: try: session.use_plenary_family_view() except AttributeError: pass
[ "def", "_set_family_view", "(", "self", ",", "session", ")", ":", "if", "self", ".", "_family_view", "==", "COMPARATIVE", ":", "try", ":", "session", ".", "use_comparative_family_view", "(", ")", "except", "AttributeError", ":", "pass", "else", ":", "try", ":", "session", ".", "use_plenary_family_view", "(", ")", "except", "AttributeError", ":", "pass" ]
33.5
13.083333
def execute(func, handler, args, kwargs): """ Wrap the handler ``_execute`` method to trace incoming requests, extracting the context from the headers, if available. """ tracing = handler.settings.get('opentracing_tracing') with tracer_stack_context(): if tracing._trace_all: attrs = handler.settings.get('opentracing_traced_attributes', []) tracing._apply_tracing(handler, attrs) return func(*args, **kwargs)
[ "def", "execute", "(", "func", ",", "handler", ",", "args", ",", "kwargs", ")", ":", "tracing", "=", "handler", ".", "settings", ".", "get", "(", "'opentracing_tracing'", ")", "with", "tracer_stack_context", "(", ")", ":", "if", "tracing", ".", "_trace_all", ":", "attrs", "=", "handler", ".", "settings", ".", "get", "(", "'opentracing_traced_attributes'", ",", "[", "]", ")", "tracing", ".", "_apply_tracing", "(", "handler", ",", "attrs", ")", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
35.615385
16.384615
def resume(self, campaign_id): """ Resume an RSS-Driven campaign. :param campaign_id: The unique id for the campaign. :type campaign_id: :py:class:`str` """ self.campaign_id = campaign_id return self._mc_client._post(url=self._build_path(campaign_id, 'actions/resume'))
[ "def", "resume", "(", "self", ",", "campaign_id", ")", ":", "self", ".", "campaign_id", "=", "campaign_id", "return", "self", ".", "_mc_client", ".", "_post", "(", "url", "=", "self", ".", "_build_path", "(", "campaign_id", ",", "'actions/resume'", ")", ")" ]
35.333333
13.777778
def transform(self, maps): """ This function transforms from spherical to cartesian spins. Parameters ---------- maps : a mapping object Examples -------- Convert a dict of numpy.array: >>> import numpy >>> from pycbc import transforms >>> t = transforms.SphericalSpin1ToCartesianSpin1() >>> t.transform({'spin1_a': numpy.array([0.1]), 'spin1_azimuthal': numpy.array([0.1]), 'spin1_polar': numpy.array([0.1])}) {'spin1_a': array([ 0.1]), 'spin1_azimuthal': array([ 0.1]), 'spin1_polar': array([ 0.1]), 'spin2x': array([ 0.00993347]), 'spin2y': array([ 0.00099667]), 'spin2z': array([ 0.09950042])} Returns ------- out : dict A dict with key as parameter name and value as numpy.array or float of transformed values. """ a, az, po = self._inputs data = coordinates.spherical_to_cartesian(maps[a], maps[az], maps[po]) out = {param : val for param, val in zip(self._outputs, data)} return self.format_output(maps, out)
[ "def", "transform", "(", "self", ",", "maps", ")", ":", "a", ",", "az", ",", "po", "=", "self", ".", "_inputs", "data", "=", "coordinates", ".", "spherical_to_cartesian", "(", "maps", "[", "a", "]", ",", "maps", "[", "az", "]", ",", "maps", "[", "po", "]", ")", "out", "=", "{", "param", ":", "val", "for", "param", ",", "val", "in", "zip", "(", "self", ".", "_outputs", ",", "data", ")", "}", "return", "self", ".", "format_output", "(", "maps", ",", "out", ")" ]
39.035714
26.035714
def add_tileset(self, tileset): """ Add a tileset to the map :param tileset: TiledTileset """ assert (isinstance(tileset, TiledTileset)) self.tilesets.append(tileset)
[ "def", "add_tileset", "(", "self", ",", "tileset", ")", ":", "assert", "(", "isinstance", "(", "tileset", ",", "TiledTileset", ")", ")", "self", ".", "tilesets", ".", "append", "(", "tileset", ")" ]
28.714286
9.428571
def to_glyphs_master_user_data(self, ufo, master): """Set the GSFontMaster userData from the UFO master-specific lib data.""" target_user_data = master.userData for key, value in ufo.lib.items(): if _user_data_has_no_special_meaning(key): target_user_data[key] = value # Save UFO data files if ufo.data.fileNames: from glyphsLib.types import BinaryData ufo_data = {} for os_filename in ufo.data.fileNames: filename = posixpath.join(*os_filename.split(os.path.sep)) ufo_data[filename] = BinaryData(ufo.data[os_filename]) master.userData[UFO_DATA_KEY] = ufo_data
[ "def", "to_glyphs_master_user_data", "(", "self", ",", "ufo", ",", "master", ")", ":", "target_user_data", "=", "master", ".", "userData", "for", "key", ",", "value", "in", "ufo", ".", "lib", ".", "items", "(", ")", ":", "if", "_user_data_has_no_special_meaning", "(", "key", ")", ":", "target_user_data", "[", "key", "]", "=", "value", "# Save UFO data files", "if", "ufo", ".", "data", ".", "fileNames", ":", "from", "glyphsLib", ".", "types", "import", "BinaryData", "ufo_data", "=", "{", "}", "for", "os_filename", "in", "ufo", ".", "data", ".", "fileNames", ":", "filename", "=", "posixpath", ".", "join", "(", "*", "os_filename", ".", "split", "(", "os", ".", "path", ".", "sep", ")", ")", "ufo_data", "[", "filename", "]", "=", "BinaryData", "(", "ufo", ".", "data", "[", "os_filename", "]", ")", "master", ".", "userData", "[", "UFO_DATA_KEY", "]", "=", "ufo_data" ]
40.1875
14.3125
def reset_logformat(logger: logging.Logger, fmt: str, datefmt: str = '%Y-%m-%d %H:%M:%S') -> None: """ Create a new formatter and apply it to the logger. :func:`logging.basicConfig` won't reset the formatter if another module has called it, so always set the formatter like this. Args: logger: logger to modify fmt: passed to the ``fmt=`` argument of :class:`logging.Formatter` datefmt: passed to the ``datefmt=`` argument of :class:`logging.Formatter` """ handler = logging.StreamHandler() formatter = logging.Formatter(fmt=fmt, datefmt=datefmt) handler.setFormatter(formatter) remove_all_logger_handlers(logger) logger.addHandler(handler) logger.propagate = False
[ "def", "reset_logformat", "(", "logger", ":", "logging", ".", "Logger", ",", "fmt", ":", "str", ",", "datefmt", ":", "str", "=", "'%Y-%m-%d %H:%M:%S'", ")", "->", "None", ":", "handler", "=", "logging", ".", "StreamHandler", "(", ")", "formatter", "=", "logging", ".", "Formatter", "(", "fmt", "=", "fmt", ",", "datefmt", "=", "datefmt", ")", "handler", ".", "setFormatter", "(", "formatter", ")", "remove_all_logger_handlers", "(", "logger", ")", "logger", ".", "addHandler", "(", "handler", ")", "logger", ".", "propagate", "=", "False" ]
36.714286
15.47619
def findTargetNS(self, node): """Return the defined target namespace uri for the given node.""" attrget = self.getAttr attrkey = (self.NS_XMLNS, 'xmlns') DOCUMENT_NODE = node.DOCUMENT_NODE ELEMENT_NODE = node.ELEMENT_NODE while 1: if node.nodeType != ELEMENT_NODE: node = node.parentNode continue result = attrget(node, 'targetNamespace', default=None) if result is not None: return result node = node.parentNode if node.nodeType == DOCUMENT_NODE: raise DOMException('Cannot determine target namespace.')
[ "def", "findTargetNS", "(", "self", ",", "node", ")", ":", "attrget", "=", "self", ".", "getAttr", "attrkey", "=", "(", "self", ".", "NS_XMLNS", ",", "'xmlns'", ")", "DOCUMENT_NODE", "=", "node", ".", "DOCUMENT_NODE", "ELEMENT_NODE", "=", "node", ".", "ELEMENT_NODE", "while", "1", ":", "if", "node", ".", "nodeType", "!=", "ELEMENT_NODE", ":", "node", "=", "node", ".", "parentNode", "continue", "result", "=", "attrget", "(", "node", ",", "'targetNamespace'", ",", "default", "=", "None", ")", "if", "result", "is", "not", "None", ":", "return", "result", "node", "=", "node", ".", "parentNode", "if", "node", ".", "nodeType", "==", "DOCUMENT_NODE", ":", "raise", "DOMException", "(", "'Cannot determine target namespace.'", ")" ]
41.3125
10
def useKeyID(self, keyID): """ Use the given API key config specified by `keyID` during subsequent API calls :param str keyID: an index into the 'keys' maintained in this config """ if keyID not in self._data['keys']: raise ConfigException('keyID does not exist: %s' % keyID) self._keyID = keyID
[ "def", "useKeyID", "(", "self", ",", "keyID", ")", ":", "if", "keyID", "not", "in", "self", ".", "_data", "[", "'keys'", "]", ":", "raise", "ConfigException", "(", "'keyID does not exist: %s'", "%", "keyID", ")", "self", ".", "_keyID", "=", "keyID" ]
35.5
19.3
def get_reliabledictionary_schema(client, application_name, service_name, dictionary_name, output_file=None): """Query Schema information for existing reliable dictionaries. Query Schema information existing reliable dictionaries for given application and service. :param application_name: Name of the application. :type application_name: str :param service_name: Name of the service. :type service_name: str :param dictionary: Name of the reliable dictionary. :type dictionary: str :param output_file: Optional file to save the schema. """ cluster = Cluster.from_sfclient(client) dictionary = cluster.get_application(application_name).get_service(service_name).get_dictionary(dictionary_name) result = json.dumps(dictionary.get_information(), indent=4) if (output_file == None): output_file = "{}-{}-{}-schema-output.json".format(application_name, service_name, dictionary_name) with open(output_file, "w") as output: output.write(result) print('Printed schema information to: ' + output_file) print(result)
[ "def", "get_reliabledictionary_schema", "(", "client", ",", "application_name", ",", "service_name", ",", "dictionary_name", ",", "output_file", "=", "None", ")", ":", "cluster", "=", "Cluster", ".", "from_sfclient", "(", "client", ")", "dictionary", "=", "cluster", ".", "get_application", "(", "application_name", ")", ".", "get_service", "(", "service_name", ")", ".", "get_dictionary", "(", "dictionary_name", ")", "result", "=", "json", ".", "dumps", "(", "dictionary", ".", "get_information", "(", ")", ",", "indent", "=", "4", ")", "if", "(", "output_file", "==", "None", ")", ":", "output_file", "=", "\"{}-{}-{}-schema-output.json\"", ".", "format", "(", "application_name", ",", "service_name", ",", "dictionary_name", ")", "with", "open", "(", "output_file", ",", "\"w\"", ")", "as", "output", ":", "output", ".", "write", "(", "result", ")", "print", "(", "'Printed schema information to: '", "+", "output_file", ")", "print", "(", "result", ")" ]
43.4
25.32
def tf_combined_loss(self, states, internals, actions, terminal, reward, next_states, next_internals, update, reference=None): """ Combines Q-loss and demo loss. """ q_model_loss = self.fn_loss( states=states, internals=internals, actions=actions, terminal=terminal, reward=reward, next_states=next_states, next_internals=next_internals, update=update, reference=reference ) demo_loss = self.fn_demo_loss( states=states, internals=internals, actions=actions, terminal=terminal, reward=reward, update=update, reference=reference ) return q_model_loss + self.supervised_weight * demo_loss
[ "def", "tf_combined_loss", "(", "self", ",", "states", ",", "internals", ",", "actions", ",", "terminal", ",", "reward", ",", "next_states", ",", "next_internals", ",", "update", ",", "reference", "=", "None", ")", ":", "q_model_loss", "=", "self", ".", "fn_loss", "(", "states", "=", "states", ",", "internals", "=", "internals", ",", "actions", "=", "actions", ",", "terminal", "=", "terminal", ",", "reward", "=", "reward", ",", "next_states", "=", "next_states", ",", "next_internals", "=", "next_internals", ",", "update", "=", "update", ",", "reference", "=", "reference", ")", "demo_loss", "=", "self", ".", "fn_demo_loss", "(", "states", "=", "states", ",", "internals", "=", "internals", ",", "actions", "=", "actions", ",", "terminal", "=", "terminal", ",", "reward", "=", "reward", ",", "update", "=", "update", ",", "reference", "=", "reference", ")", "return", "q_model_loss", "+", "self", ".", "supervised_weight", "*", "demo_loss" ]
30.296296
15.851852
def rect( self, x: int, y: int, width: int, height: int, clear: bool, bg_blend: int = tcod.constants.BKGND_DEFAULT, ) -> None: """Draw a the background color on a rect optionally clearing the text. If `clear` is True the affected tiles are changed to space character. Args: x (int): The x coordinate from the left. y (int): The y coordinate from the top. width (int): Maximum width to render the text. height (int): Maximum lines to render the text. clear (bool): If True all text in the affected area will be removed. bg_blend (int): Background blending flag. .. deprecated:: 8.5 Console methods which depend on console defaults have been deprecated. Use :any:`Console.draw_rect` instead, calling this function will print a warning detailing which default values need to be made explicit. """ self.__deprecate_defaults("draw_rect", bg_blend, clear=bool(clear)) lib.TCOD_console_rect( self.console_c, x, y, width, height, clear, bg_blend )
[ "def", "rect", "(", "self", ",", "x", ":", "int", ",", "y", ":", "int", ",", "width", ":", "int", ",", "height", ":", "int", ",", "clear", ":", "bool", ",", "bg_blend", ":", "int", "=", "tcod", ".", "constants", ".", "BKGND_DEFAULT", ",", ")", "->", "None", ":", "self", ".", "__deprecate_defaults", "(", "\"draw_rect\"", ",", "bg_blend", ",", "clear", "=", "bool", "(", "clear", ")", ")", "lib", ".", "TCOD_console_rect", "(", "self", ".", "console_c", ",", "x", ",", "y", ",", "width", ",", "height", ",", "clear", ",", "bg_blend", ")" ]
36.484848
22.757576
def ctype_for_encoding(self, encoding): """Return ctypes type for an encoded Objective-C type.""" if encoding in self.typecodes: return self.typecodes[encoding] elif encoding[0:1] == b'^' and encoding[1:] in self.typecodes: return POINTER(self.typecodes[encoding[1:]]) elif encoding[0:1] == b'^' and encoding[1:] in [CGImageEncoding, NSZoneEncoding]: return c_void_p elif encoding[0:1] == b'r' and encoding[1:] in self.typecodes: return self.typecodes[encoding[1:]] elif encoding[0:2] == b'r^' and encoding[2:] in self.typecodes: return POINTER(self.typecodes[encoding[2:]]) else: raise Exception('unknown encoding for %s: %s' % (self.name, encoding))
[ "def", "ctype_for_encoding", "(", "self", ",", "encoding", ")", ":", "if", "encoding", "in", "self", ".", "typecodes", ":", "return", "self", ".", "typecodes", "[", "encoding", "]", "elif", "encoding", "[", "0", ":", "1", "]", "==", "b'^'", "and", "encoding", "[", "1", ":", "]", "in", "self", ".", "typecodes", ":", "return", "POINTER", "(", "self", ".", "typecodes", "[", "encoding", "[", "1", ":", "]", "]", ")", "elif", "encoding", "[", "0", ":", "1", "]", "==", "b'^'", "and", "encoding", "[", "1", ":", "]", "in", "[", "CGImageEncoding", ",", "NSZoneEncoding", "]", ":", "return", "c_void_p", "elif", "encoding", "[", "0", ":", "1", "]", "==", "b'r'", "and", "encoding", "[", "1", ":", "]", "in", "self", ".", "typecodes", ":", "return", "self", ".", "typecodes", "[", "encoding", "[", "1", ":", "]", "]", "elif", "encoding", "[", "0", ":", "2", "]", "==", "b'r^'", "and", "encoding", "[", "2", ":", "]", "in", "self", ".", "typecodes", ":", "return", "POINTER", "(", "self", ".", "typecodes", "[", "encoding", "[", "2", ":", "]", "]", ")", "else", ":", "raise", "Exception", "(", "'unknown encoding for %s: %s'", "%", "(", "self", ".", "name", ",", "encoding", ")", ")" ]
53
16.8125
def find_node_api_version(self, node_pyxb): """Find the highest API major version supported by node.""" max_major = 0 for s in node_pyxb.services.service: max_major = max(max_major, int(s.version[1:])) return max_major
[ "def", "find_node_api_version", "(", "self", ",", "node_pyxb", ")", ":", "max_major", "=", "0", "for", "s", "in", "node_pyxb", ".", "services", ".", "service", ":", "max_major", "=", "max", "(", "max_major", ",", "int", "(", "s", ".", "version", "[", "1", ":", "]", ")", ")", "return", "max_major" ]
42.833333
10
def copy_security(source, target, obj_type='file', copy_owner=True, copy_group=True, copy_dacl=True, copy_sacl=True): r''' Copy the security descriptor of the Source to the Target. You can specify a specific portion of the security descriptor to copy using one of the `copy_*` parameters. .. note:: At least one `copy_*` parameter must be ``True`` .. note:: The user account running this command must have the following privileges: - SeTakeOwnershipPrivilege - SeRestorePrivilege - SeSecurityPrivilege Args: source (str): The full path to the source. This is where the security info will be copied from target (str): The full path to the target. This is where the security info will be applied obj_type (str): file The type of object to query. This value changes the format of the ``obj_name`` parameter as follows: - file: indicates a file or directory - a relative path, such as ``FileName.txt`` or ``..\FileName`` - an absolute path, such as ``C:\DirName\FileName.txt`` - A UNC name, such as ``\\ServerName\ShareName\FileName.txt`` - service: indicates the name of a Windows service - printer: indicates the name of a printer - registry: indicates a registry key - Uses the following literal strings to denote the hive: - HKEY_LOCAL_MACHINE - MACHINE - HKLM - HKEY_USERS - USERS - HKU - HKEY_CURRENT_USER - CURRENT_USER - HKCU - HKEY_CLASSES_ROOT - CLASSES_ROOT - HKCR - Should be in the format of ``HIVE\Path\To\Key``. For example, ``HKLM\SOFTWARE\Windows`` - registry32: indicates a registry key under WOW64. Formatting is the same as it is for ``registry`` - share: indicates a network share copy_owner (bool): True ``True`` copies owner information. Default is ``True`` copy_group (bool): True ``True`` copies group information. Default is ``True`` copy_dacl (bool): True ``True`` copies the DACL. Default is ``True`` copy_sacl (bool): True ``True`` copies the SACL. Default is ``True`` Returns: bool: ``True`` if successful Raises: SaltInvocationError: When parameters are invalid CommandExecutionError: On failure to set security Usage: .. code-block:: python salt.utils.win_dacl.copy_security( source='C:\\temp\\source_file.txt', target='C:\\temp\\target_file.txt', obj_type='file') salt.utils.win_dacl.copy_security( source='HKLM\\SOFTWARE\\salt\\test_source', target='HKLM\\SOFTWARE\\salt\\test_target', obj_type='registry', copy_owner=False) ''' obj_dacl = dacl(obj_type=obj_type) if 'registry' in obj_type.lower(): source = obj_dacl.get_reg_name(source) log.info('Source converted to: %s', source) target = obj_dacl.get_reg_name(target) log.info('Target converted to: %s', target) # Set flags try: obj_type_flag = flags().obj_type[obj_type.lower()] except KeyError: raise SaltInvocationError( 'Invalid "obj_type" passed: {0}'.format(obj_type)) security_flags = 0 if copy_owner: security_flags |= win32security.OWNER_SECURITY_INFORMATION if copy_group: security_flags |= win32security.GROUP_SECURITY_INFORMATION if copy_dacl: security_flags |= win32security.DACL_SECURITY_INFORMATION if copy_sacl: security_flags |= win32security.SACL_SECURITY_INFORMATION if not security_flags: raise SaltInvocationError( 'One of copy_owner, copy_group, copy_dacl, or copy_sacl must be ' 'True') # To set the owner to something other than the logged in user requires # SE_TAKE_OWNERSHIP_NAME and SE_RESTORE_NAME privileges # Enable them for the logged in user # Setup the privilege set new_privs = set() luid = win32security.LookupPrivilegeValue('', 'SeTakeOwnershipPrivilege') new_privs.add((luid, win32con.SE_PRIVILEGE_ENABLED)) luid = win32security.LookupPrivilegeValue('', 'SeRestorePrivilege') new_privs.add((luid, win32con.SE_PRIVILEGE_ENABLED)) luid = win32security.LookupPrivilegeValue('', 'SeSecurityPrivilege') new_privs.add((luid, win32con.SE_PRIVILEGE_ENABLED)) # Get the current token p_handle = win32api.GetCurrentProcess() t_handle = win32security.OpenProcessToken( p_handle, win32security.TOKEN_ALL_ACCESS | win32con.TOKEN_ADJUST_PRIVILEGES) # Enable the privileges win32security.AdjustTokenPrivileges(t_handle, 0, new_privs) # Load object Security Info from the Source sec = win32security.GetNamedSecurityInfo( source, obj_type_flag, security_flags) # The following return None if the corresponding flag is not set sd_sid = sec.GetSecurityDescriptorOwner() sd_gid = sec.GetSecurityDescriptorGroup() sd_dacl = sec.GetSecurityDescriptorDacl() sd_sacl = sec.GetSecurityDescriptorSacl() # Set Security info on the target try: win32security.SetNamedSecurityInfo( target, obj_type_flag, security_flags, sd_sid, sd_gid, sd_dacl, sd_sacl) except pywintypes.error as exc: raise CommandExecutionError( 'Failed to set security info: {0}'.format(exc.strerror)) return True
[ "def", "copy_security", "(", "source", ",", "target", ",", "obj_type", "=", "'file'", ",", "copy_owner", "=", "True", ",", "copy_group", "=", "True", ",", "copy_dacl", "=", "True", ",", "copy_sacl", "=", "True", ")", ":", "obj_dacl", "=", "dacl", "(", "obj_type", "=", "obj_type", ")", "if", "'registry'", "in", "obj_type", ".", "lower", "(", ")", ":", "source", "=", "obj_dacl", ".", "get_reg_name", "(", "source", ")", "log", ".", "info", "(", "'Source converted to: %s'", ",", "source", ")", "target", "=", "obj_dacl", ".", "get_reg_name", "(", "target", ")", "log", ".", "info", "(", "'Target converted to: %s'", ",", "target", ")", "# Set flags", "try", ":", "obj_type_flag", "=", "flags", "(", ")", ".", "obj_type", "[", "obj_type", ".", "lower", "(", ")", "]", "except", "KeyError", ":", "raise", "SaltInvocationError", "(", "'Invalid \"obj_type\" passed: {0}'", ".", "format", "(", "obj_type", ")", ")", "security_flags", "=", "0", "if", "copy_owner", ":", "security_flags", "|=", "win32security", ".", "OWNER_SECURITY_INFORMATION", "if", "copy_group", ":", "security_flags", "|=", "win32security", ".", "GROUP_SECURITY_INFORMATION", "if", "copy_dacl", ":", "security_flags", "|=", "win32security", ".", "DACL_SECURITY_INFORMATION", "if", "copy_sacl", ":", "security_flags", "|=", "win32security", ".", "SACL_SECURITY_INFORMATION", "if", "not", "security_flags", ":", "raise", "SaltInvocationError", "(", "'One of copy_owner, copy_group, copy_dacl, or copy_sacl must be '", "'True'", ")", "# To set the owner to something other than the logged in user requires", "# SE_TAKE_OWNERSHIP_NAME and SE_RESTORE_NAME privileges", "# Enable them for the logged in user", "# Setup the privilege set", "new_privs", "=", "set", "(", ")", "luid", "=", "win32security", ".", "LookupPrivilegeValue", "(", "''", ",", "'SeTakeOwnershipPrivilege'", ")", "new_privs", ".", "add", "(", "(", "luid", ",", "win32con", ".", "SE_PRIVILEGE_ENABLED", ")", ")", "luid", "=", "win32security", ".", "LookupPrivilegeValue", "(", "''", ",", "'SeRestorePrivilege'", ")", "new_privs", ".", "add", "(", "(", "luid", ",", "win32con", ".", "SE_PRIVILEGE_ENABLED", ")", ")", "luid", "=", "win32security", ".", "LookupPrivilegeValue", "(", "''", ",", "'SeSecurityPrivilege'", ")", "new_privs", ".", "add", "(", "(", "luid", ",", "win32con", ".", "SE_PRIVILEGE_ENABLED", ")", ")", "# Get the current token", "p_handle", "=", "win32api", ".", "GetCurrentProcess", "(", ")", "t_handle", "=", "win32security", ".", "OpenProcessToken", "(", "p_handle", ",", "win32security", ".", "TOKEN_ALL_ACCESS", "|", "win32con", ".", "TOKEN_ADJUST_PRIVILEGES", ")", "# Enable the privileges", "win32security", ".", "AdjustTokenPrivileges", "(", "t_handle", ",", "0", ",", "new_privs", ")", "# Load object Security Info from the Source", "sec", "=", "win32security", ".", "GetNamedSecurityInfo", "(", "source", ",", "obj_type_flag", ",", "security_flags", ")", "# The following return None if the corresponding flag is not set", "sd_sid", "=", "sec", ".", "GetSecurityDescriptorOwner", "(", ")", "sd_gid", "=", "sec", ".", "GetSecurityDescriptorGroup", "(", ")", "sd_dacl", "=", "sec", ".", "GetSecurityDescriptorDacl", "(", ")", "sd_sacl", "=", "sec", ".", "GetSecurityDescriptorSacl", "(", ")", "# Set Security info on the target", "try", ":", "win32security", ".", "SetNamedSecurityInfo", "(", "target", ",", "obj_type_flag", ",", "security_flags", ",", "sd_sid", ",", "sd_gid", ",", "sd_dacl", ",", "sd_sacl", ")", "except", "pywintypes", ".", "error", "as", "exc", ":", "raise", "CommandExecutionError", "(", "'Failed to set security info: {0}'", ".", "format", "(", "exc", ".", "strerror", ")", ")", "return", "True" ]
35.060241
20.674699
def dup_idx(arr): """Return the indices of all duplicated array elements. Parameters ---------- arr : array-like object An array-like object Returns ------- idx : NumPy array An array containing the indices of the duplicated elements Examples -------- >>> from root_numpy import dup_idx >>> dup_idx([1, 2, 3, 4, 5]) array([], dtype=int64) >>> dup_idx([1, 2, 3, 4, 5, 5]) array([4, 5]) >>> dup_idx([1, 2, 3, 4, 5, 5, 1]) array([0, 4, 5, 6]) """ _, b = np.unique(arr, return_inverse=True) return np.nonzero(np.logical_or.reduce( b[:, np.newaxis] == np.nonzero(np.bincount(b) > 1), axis=1))[0]
[ "def", "dup_idx", "(", "arr", ")", ":", "_", ",", "b", "=", "np", ".", "unique", "(", "arr", ",", "return_inverse", "=", "True", ")", "return", "np", ".", "nonzero", "(", "np", ".", "logical_or", ".", "reduce", "(", "b", "[", ":", ",", "np", ".", "newaxis", "]", "==", "np", ".", "nonzero", "(", "np", ".", "bincount", "(", "b", ")", ">", "1", ")", ",", "axis", "=", "1", ")", ")", "[", "0", "]" ]
24.107143
19.25
def tokenize(self, data, *args, **kwargs): """Invoke the lexer on an input string an return the list of tokens. This is relatively inefficient and should only be used for testing/debugging as it slurps up all tokens into one list. Args: data: The input to be tokenized. Returns: A list of LexTokens """ self.lexer.input(data) tokens = list() while True: token = self.lexer.token() if not token: break tokens.append(token) return tokens
[ "def", "tokenize", "(", "self", ",", "data", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "lexer", ".", "input", "(", "data", ")", "tokens", "=", "list", "(", ")", "while", "True", ":", "token", "=", "self", ".", "lexer", ".", "token", "(", ")", "if", "not", "token", ":", "break", "tokens", ".", "append", "(", "token", ")", "return", "tokens" ]
33.588235
13.529412
def surfacemass(self,R,log=False): """ NAME: surfacemass PURPOSE: return the surface density profile at this R INPUT: R - Galactocentric radius (/ro) log - if True, return the log (default: False) OUTPUT: Sigma(R) HISTORY: 2010-03-26 - Written - Bovy (NYU) """ if log: return -R/self._params[0] else: return sc.exp(-R/self._params[0])
[ "def", "surfacemass", "(", "self", ",", "R", ",", "log", "=", "False", ")", ":", "if", "log", ":", "return", "-", "R", "/", "self", ".", "_params", "[", "0", "]", "else", ":", "return", "sc", ".", "exp", "(", "-", "R", "/", "self", ".", "_params", "[", "0", "]", ")" ]
26.611111
14.944444
def _setStyle(node, styleMap): u"""Sets the style attribute of a node to the dictionary ``styleMap``.""" fixedStyle = ';'.join([prop + ':' + styleMap[prop] for prop in styleMap]) if fixedStyle != '': node.setAttribute('style', fixedStyle) elif node.getAttribute('style'): node.removeAttribute('style') return node
[ "def", "_setStyle", "(", "node", ",", "styleMap", ")", ":", "fixedStyle", "=", "';'", ".", "join", "(", "[", "prop", "+", "':'", "+", "styleMap", "[", "prop", "]", "for", "prop", "in", "styleMap", "]", ")", "if", "fixedStyle", "!=", "''", ":", "node", ".", "setAttribute", "(", "'style'", ",", "fixedStyle", ")", "elif", "node", ".", "getAttribute", "(", "'style'", ")", ":", "node", ".", "removeAttribute", "(", "'style'", ")", "return", "node" ]
42.75
12.625
def _read_deref(ctx: ReaderContext) -> LispForm: """Read a derefed form from the input stream.""" start = ctx.reader.advance() assert start == "@" next_form = _read_next_consuming_comment(ctx) return llist.l(_DEREF, next_form)
[ "def", "_read_deref", "(", "ctx", ":", "ReaderContext", ")", "->", "LispForm", ":", "start", "=", "ctx", ".", "reader", ".", "advance", "(", ")", "assert", "start", "==", "\"@\"", "next_form", "=", "_read_next_consuming_comment", "(", "ctx", ")", "return", "llist", ".", "l", "(", "_DEREF", ",", "next_form", ")" ]
40.166667
7.5
def _save(self, stateName, path): """save into 'stateName' to pyz-path""" print('saving...') state = {'session': dict(self.opts), 'dialogs': self.dialogs.saveState()} self.sigSave.emit(state) self.saveThread.prepare(stateName, path, self.tmp_dir_session, state) self.saveThread.start() self.current_session = stateName r = self.opts['recent sessions'] try: # is this session already exists: remove it r.pop(r.index(path)) except ValueError: pass # add this session at the beginning r.insert(0, path)
[ "def", "_save", "(", "self", ",", "stateName", ",", "path", ")", ":", "print", "(", "'saving...'", ")", "state", "=", "{", "'session'", ":", "dict", "(", "self", ".", "opts", ")", ",", "'dialogs'", ":", "self", ".", "dialogs", ".", "saveState", "(", ")", "}", "self", ".", "sigSave", ".", "emit", "(", "state", ")", "self", ".", "saveThread", ".", "prepare", "(", "stateName", ",", "path", ",", "self", ".", "tmp_dir_session", ",", "state", ")", "self", ".", "saveThread", ".", "start", "(", ")", "self", ".", "current_session", "=", "stateName", "r", "=", "self", ".", "opts", "[", "'recent sessions'", "]", "try", ":", "# is this session already exists: remove it\r", "r", ".", "pop", "(", "r", ".", "index", "(", "path", ")", ")", "except", "ValueError", ":", "pass", "# add this session at the beginning\r", "r", ".", "insert", "(", "0", ",", "path", ")" ]
31.047619
16.857143
def is_attacked_by(self, color: Color, square: Square) -> bool: """ Checks if the given side attacks the given square. Pinned pieces still count as attackers. Pawns that can be captured en passant are **not** considered attacked. """ return bool(self.attackers_mask(color, square))
[ "def", "is_attacked_by", "(", "self", ",", "color", ":", "Color", ",", "square", ":", "Square", ")", "->", "bool", ":", "return", "bool", "(", "self", ".", "attackers_mask", "(", "color", ",", "square", ")", ")" ]
40.375
17.625
def main(): """ Print lines of input along with output. """ source_lines = (line.rstrip() for line in sys.stdin) console = InteractiveInterpreter() console.runsource('import turicreate') source = '' try: while True: source = source_lines.next() more = console.runsource(source) while more: next_line = source_lines.next() print '...', next_line source += '\n' + next_line more = console.runsource(source) except StopIteration: if more: print '... ' more = console.runsource(source + '\n')
[ "def", "main", "(", ")", ":", "source_lines", "=", "(", "line", ".", "rstrip", "(", ")", "for", "line", "in", "sys", ".", "stdin", ")", "console", "=", "InteractiveInterpreter", "(", ")", "console", ".", "runsource", "(", "'import turicreate'", ")", "source", "=", "''", "try", ":", "while", "True", ":", "source", "=", "source_lines", ".", "next", "(", ")", "more", "=", "console", ".", "runsource", "(", "source", ")", "while", "more", ":", "next_line", "=", "source_lines", ".", "next", "(", ")", "print", "'...'", ",", "next_line", "source", "+=", "'\\n'", "+", "next_line", "more", "=", "console", ".", "runsource", "(", "source", ")", "except", "StopIteration", ":", "if", "more", ":", "print", "'... '", "more", "=", "console", ".", "runsource", "(", "source", "+", "'\\n'", ")" ]
30.666667
11.238095
def get_xpath(stmt, qualified=False, prefix_to_module=False): """Gets the XPath of the statement. Unless qualified=True, does not include prefixes unless the prefix changes mid-XPath. qualified will add a prefix to each node. prefix_to_module will resolve prefixes to module names instead. For RFC 8040, set prefix_to_module=True: /prefix:root/node/prefix:node/... qualified=True: /prefix:root/prefix:node/prefix:node/... qualified=True, prefix_to_module=True: /module:root/module:node/module:node/... prefix_to_module=True: /module:root/node/module:node/... """ return mk_path_str(stmt, with_prefixes=qualified, prefix_onchange=True, prefix_to_module=prefix_to_module)
[ "def", "get_xpath", "(", "stmt", ",", "qualified", "=", "False", ",", "prefix_to_module", "=", "False", ")", ":", "return", "mk_path_str", "(", "stmt", ",", "with_prefixes", "=", "qualified", ",", "prefix_onchange", "=", "True", ",", "prefix_to_module", "=", "prefix_to_module", ")" ]
35.285714
19.571429
def reduce(self, others): """ Reduces others into this one by concatenating all the others onto this one and returning the result. Does not modify self, instead, makes a copy and returns that. :param others: The other AudioSegment objects to append to this one. :returns: The concatenated result. """ ret = AudioSegment(self.seg, self.name) selfdata = [self.seg._data] otherdata = [o.seg._data for o in others] ret.seg._data = b''.join(selfdata + otherdata) return ret
[ "def", "reduce", "(", "self", ",", "others", ")", ":", "ret", "=", "AudioSegment", "(", "self", ".", "seg", ",", "self", ".", "name", ")", "selfdata", "=", "[", "self", ".", "seg", ".", "_data", "]", "otherdata", "=", "[", "o", ".", "seg", ".", "_data", "for", "o", "in", "others", "]", "ret", ".", "seg", ".", "_data", "=", "b''", ".", "join", "(", "selfdata", "+", "otherdata", ")", "return", "ret" ]
38.928571
20.5
def _include_module(self, module, mn): """ See if a module should be included or excluded based upon included_packages and excluded_packages. As some packages have the following format: scipy.special.specfun scipy.linalg Where the top-level package name is just a prefix to a longer package name, we don't want to do a direct comparison. Instead, we want to exclude packages which are either exactly "<package_name>", or start with "<package_name>". """ if mn in self.topology.include_packages: _debug.debug("_include_module:explicit using __include_packages: module=%s", mn) return True if '.' in mn: for include_package in self.topology.include_packages: if mn.startswith(include_package + '.'): _debug.debug("_include_module:explicit pattern using __include_packages: module=%s pattern=%s", mn, \ include_package + '.') return True if mn in self.topology.exclude_packages: _debug.debug("_include_module:explicit using __exclude_packages: module=%s", mn) return False if '.' in mn: for exclude_package in self.topology.exclude_packages: if mn.startswith(exclude_package + '.'): _debug.debug("_include_module:explicit pattern using __exclude_packages: module=%s pattern=%s", mn, \ exclude_package + '.') return False _debug.debug("_include_module:including: module=%s", mn) return True
[ "def", "_include_module", "(", "self", ",", "module", ",", "mn", ")", ":", "if", "mn", "in", "self", ".", "topology", ".", "include_packages", ":", "_debug", ".", "debug", "(", "\"_include_module:explicit using __include_packages: module=%s\"", ",", "mn", ")", "return", "True", "if", "'.'", "in", "mn", ":", "for", "include_package", "in", "self", ".", "topology", ".", "include_packages", ":", "if", "mn", ".", "startswith", "(", "include_package", "+", "'.'", ")", ":", "_debug", ".", "debug", "(", "\"_include_module:explicit pattern using __include_packages: module=%s pattern=%s\"", ",", "mn", ",", "include_package", "+", "'.'", ")", "return", "True", "if", "mn", "in", "self", ".", "topology", ".", "exclude_packages", ":", "_debug", ".", "debug", "(", "\"_include_module:explicit using __exclude_packages: module=%s\"", ",", "mn", ")", "return", "False", "if", "'.'", "in", "mn", ":", "for", "exclude_package", "in", "self", ".", "topology", ".", "exclude_packages", ":", "if", "mn", ".", "startswith", "(", "exclude_package", "+", "'.'", ")", ":", "_debug", ".", "debug", "(", "\"_include_module:explicit pattern using __exclude_packages: module=%s pattern=%s\"", ",", "mn", ",", "exclude_package", "+", "'.'", ")", "return", "False", "_debug", ".", "debug", "(", "\"_include_module:including: module=%s\"", ",", "mn", ")", "return", "True" ]
45.055556
26.277778
def _height_is_big_enough(image, height): """Check that the image height is superior to `height`""" if height > image.size[1]: raise ImageSizeError(image.size[1], height)
[ "def", "_height_is_big_enough", "(", "image", ",", "height", ")", ":", "if", "height", ">", "image", ".", "size", "[", "1", "]", ":", "raise", "ImageSizeError", "(", "image", ".", "size", "[", "1", "]", ",", "height", ")" ]
45.75
5.5
def average_cq(seq, efficiency=1.0): """Given a set of Cq values, return the Cq value that represents the average expression level of the input. The intent is to average the expression levels of the samples, since the average of Cq values is not biologically meaningful. :param iterable seq: A sequence (e.g. list, array, or Series) of Cq values. :param float efficiency: The fractional efficiency of the PCR reaction; i.e. 1.0 is 100% efficiency, producing 2 copies per amplicon per cycle. :return: Cq value representing average expression level :rtype: float """ denominator = sum( [pow(2.0*efficiency, -Ci) for Ci in seq] ) return log(len(seq)/denominator)/log(2.0*efficiency)
[ "def", "average_cq", "(", "seq", ",", "efficiency", "=", "1.0", ")", ":", "denominator", "=", "sum", "(", "[", "pow", "(", "2.0", "*", "efficiency", ",", "-", "Ci", ")", "for", "Ci", "in", "seq", "]", ")", "return", "log", "(", "len", "(", "seq", ")", "/", "denominator", ")", "/", "log", "(", "2.0", "*", "efficiency", ")" ]
47.933333
22.266667
def calc_neg_log_likelihood_and_neg_gradient(self, params): """ Calculates and returns the negative of the log-likelihood and the negative of the gradient. This function is used as the objective function in scipy.optimize.minimize. """ neg_log_likelihood = -1 * self.convenience_calc_log_likelihood(params) neg_gradient = -1 * self.convenience_calc_gradient(params) if self.constrained_pos is not None: neg_gradient[self.constrained_pos] = 0 return neg_log_likelihood, neg_gradient
[ "def", "calc_neg_log_likelihood_and_neg_gradient", "(", "self", ",", "params", ")", ":", "neg_log_likelihood", "=", "-", "1", "*", "self", ".", "convenience_calc_log_likelihood", "(", "params", ")", "neg_gradient", "=", "-", "1", "*", "self", ".", "convenience_calc_gradient", "(", "params", ")", "if", "self", ".", "constrained_pos", "is", "not", "None", ":", "neg_gradient", "[", "self", ".", "constrained_pos", "]", "=", "0", "return", "neg_log_likelihood", ",", "neg_gradient" ]
42.692308
19.461538
def add_track(self, *args, **kwargs): """ Add a track to a position. Parameters ---------- track_type: string The type of track to add (e.g. "heatmap", "line") position: string One of 'top', 'bottom', 'center', 'left', 'right' tileset: hgflask.tilesets.Tileset The tileset to be plotted in this track server: string The server serving this track height: int The height of the track, if it is a top, bottom or a center track width: int The width of the track, if it is a left, right or a center track """ new_track = Track(*args, **kwargs) self.tracks = self.tracks + [new_track]
[ "def", "add_track", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "new_track", "=", "Track", "(", "*", "args", ",", "*", "*", "kwargs", ")", "self", ".", "tracks", "=", "self", ".", "tracks", "+", "[", "new_track", "]" ]
35
15.285714
def render_category(slug): """Template tag to render a category with all it's entries.""" try: category = EntryCategory.objects.get(slug=slug) except EntryCategory.DoesNotExist: pass else: return {'category': category} return {}
[ "def", "render_category", "(", "slug", ")", ":", "try", ":", "category", "=", "EntryCategory", ".", "objects", ".", "get", "(", "slug", "=", "slug", ")", "except", "EntryCategory", ".", "DoesNotExist", ":", "pass", "else", ":", "return", "{", "'category'", ":", "category", "}", "return", "{", "}" ]
29.333333
16.888889
def get_key_params_from_user(gpg_key_param_list): """Displays parameter entry dialog and returns parameter dict Parameters ---------- gpg_key_param_list: List of 2-tuples \tContains GPG key generation parameters but not name_real """ params = [[_('Real name'), 'name_real']] vals = [""] * len(params) while "" in vals: dlg = GPGParamsDialog(None, -1, "Enter GPG key parameters", params) dlg.CenterOnScreen() for val, textctrl in zip(vals, dlg.textctrls): textctrl.SetValue(val) if dlg.ShowModal() != wx.ID_OK: dlg.Destroy() return vals = [textctrl.Value for textctrl in dlg.textctrls] dlg.Destroy() if "" in vals: msg = _("Please enter a value in each field.") dlg = GMD.GenericMessageDialog(None, msg, _("Missing value"), wx.OK | wx.ICON_ERROR) dlg.ShowModal() dlg.Destroy() for (__, key), val in zip(params, vals): gpg_key_param_list.insert(-2, (key, val)) return dict(gpg_key_param_list)
[ "def", "get_key_params_from_user", "(", "gpg_key_param_list", ")", ":", "params", "=", "[", "[", "_", "(", "'Real name'", ")", ",", "'name_real'", "]", "]", "vals", "=", "[", "\"\"", "]", "*", "len", "(", "params", ")", "while", "\"\"", "in", "vals", ":", "dlg", "=", "GPGParamsDialog", "(", "None", ",", "-", "1", ",", "\"Enter GPG key parameters\"", ",", "params", ")", "dlg", ".", "CenterOnScreen", "(", ")", "for", "val", ",", "textctrl", "in", "zip", "(", "vals", ",", "dlg", ".", "textctrls", ")", ":", "textctrl", ".", "SetValue", "(", "val", ")", "if", "dlg", ".", "ShowModal", "(", ")", "!=", "wx", ".", "ID_OK", ":", "dlg", ".", "Destroy", "(", ")", "return", "vals", "=", "[", "textctrl", ".", "Value", "for", "textctrl", "in", "dlg", ".", "textctrls", "]", "dlg", ".", "Destroy", "(", ")", "if", "\"\"", "in", "vals", ":", "msg", "=", "_", "(", "\"Please enter a value in each field.\"", ")", "dlg", "=", "GMD", ".", "GenericMessageDialog", "(", "None", ",", "msg", ",", "_", "(", "\"Missing value\"", ")", ",", "wx", ".", "OK", "|", "wx", ".", "ICON_ERROR", ")", "dlg", ".", "ShowModal", "(", ")", "dlg", ".", "Destroy", "(", ")", "for", "(", "__", ",", "key", ")", ",", "val", "in", "zip", "(", "params", ",", "vals", ")", ":", "gpg_key_param_list", ".", "insert", "(", "-", "2", ",", "(", "key", ",", "val", ")", ")", "return", "dict", "(", "gpg_key_param_list", ")" ]
26.142857
22.904762
def get_monomers(self, ligands=True): """Retrieves all the `Monomers` from the AMPAL object. Parameters ---------- ligands : bool, optional If true, will include ligand `Monomers`. """ if ligands and self.ligands: monomers = self._monomers + self.ligands._monomers else: monomers = self._monomers return iter(monomers)
[ "def", "get_monomers", "(", "self", ",", "ligands", "=", "True", ")", ":", "if", "ligands", "and", "self", ".", "ligands", ":", "monomers", "=", "self", ".", "_monomers", "+", "self", ".", "ligands", ".", "_monomers", "else", ":", "monomers", "=", "self", ".", "_monomers", "return", "iter", "(", "monomers", ")" ]
31.307692
13.384615
def remove_tmp_prefix_from_file_path(file_path): """ Remove tmp prefix from file path or url. """ path, filename = os.path.split(file_path) return os.path.join(path, remove_tmp_prefix_from_filename(filename)).replace('\\', '/')
[ "def", "remove_tmp_prefix_from_file_path", "(", "file_path", ")", ":", "path", ",", "filename", "=", "os", ".", "path", ".", "split", "(", "file_path", ")", "return", "os", ".", "path", ".", "join", "(", "path", ",", "remove_tmp_prefix_from_filename", "(", "filename", ")", ")", ".", "replace", "(", "'\\\\'", ",", "'/'", ")" ]
40.333333
11.333333
def get_paths(self, key): "Like `gets`, but include the paths, like `get_path` for all matches." result_list = [] if key in self.keys(): result_list.append(((key,), self[key])) for sub_key, v in self.items(): if isinstance(v, self.__class__): sub_res_list = v.get_paths(key) for key_path, res in sub_res_list: result_list.append(((sub_key,) + key_path, res)) elif isinstance(v, dict): if key in v.keys(): result_list.append(((sub_key, key), v[key])) return result_list
[ "def", "get_paths", "(", "self", ",", "key", ")", ":", "result_list", "=", "[", "]", "if", "key", "in", "self", ".", "keys", "(", ")", ":", "result_list", ".", "append", "(", "(", "(", "key", ",", ")", ",", "self", "[", "key", "]", ")", ")", "for", "sub_key", ",", "v", "in", "self", ".", "items", "(", ")", ":", "if", "isinstance", "(", "v", ",", "self", ".", "__class__", ")", ":", "sub_res_list", "=", "v", ".", "get_paths", "(", "key", ")", "for", "key_path", ",", "res", "in", "sub_res_list", ":", "result_list", ".", "append", "(", "(", "(", "sub_key", ",", ")", "+", "key_path", ",", "res", ")", ")", "elif", "isinstance", "(", "v", ",", "dict", ")", ":", "if", "key", "in", "v", ".", "keys", "(", ")", ":", "result_list", ".", "append", "(", "(", "(", "sub_key", ",", "key", ")", ",", "v", "[", "key", "]", ")", ")", "return", "result_list" ]
44.214286
13.357143
async def get_pairwise(self, pairwise_filt: str = None) -> dict: """ Return dict mapping each pairwise DID of interest in wallet to its pairwise info, or, for no filter specified, mapping them all. If wallet has no such item, return empty dict. :param pairwise_filt: remote DID of interest, or WQL json (default all) :return: dict mapping remote DIDs to PairwiseInfo """ LOGGER.debug('Wallet.get_pairwise >>> pairwise_filt: %s', pairwise_filt) if not self.handle: LOGGER.debug('Wallet.get_pairwise <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) storecs = await self.get_non_secret( TYPE_PAIRWISE, pairwise_filt if ok_did(pairwise_filt) or not pairwise_filt else json.loads(pairwise_filt), canon_pairwise_wql) rv = {k: storage_record2pairwise_info(storecs[k]) for k in storecs} # touch up tags, mute leading ~ LOGGER.debug('Wallet.get_pairwise <<< %s', rv) return rv
[ "async", "def", "get_pairwise", "(", "self", ",", "pairwise_filt", ":", "str", "=", "None", ")", "->", "dict", ":", "LOGGER", ".", "debug", "(", "'Wallet.get_pairwise >>> pairwise_filt: %s'", ",", "pairwise_filt", ")", "if", "not", "self", ".", "handle", ":", "LOGGER", ".", "debug", "(", "'Wallet.get_pairwise <!< Wallet %s is closed'", ",", "self", ".", "name", ")", "raise", "WalletState", "(", "'Wallet {} is closed'", ".", "format", "(", "self", ".", "name", ")", ")", "storecs", "=", "await", "self", ".", "get_non_secret", "(", "TYPE_PAIRWISE", ",", "pairwise_filt", "if", "ok_did", "(", "pairwise_filt", ")", "or", "not", "pairwise_filt", "else", "json", ".", "loads", "(", "pairwise_filt", ")", ",", "canon_pairwise_wql", ")", "rv", "=", "{", "k", ":", "storage_record2pairwise_info", "(", "storecs", "[", "k", "]", ")", "for", "k", "in", "storecs", "}", "# touch up tags, mute leading ~", "LOGGER", ".", "debug", "(", "'Wallet.get_pairwise <<< %s'", ",", "rv", ")", "return", "rv" ]
45.826087
30.869565
def _from_center_cartesian( self, x: float, y: float, z: float) -> Point: """ Specifies an arbitrary point relative to the center of the well based on percentages of the radius in each axis. For example, to specify the back-right corner of a well at 1/4 of the well depth from the bottom, the call would be `_from_center_cartesian(1, 1, -0.5)`. No checks are performed to ensure that the resulting position will be inside of the well. :param x: a float in the range [-1.0, 1.0] for a percentage of half of the radius/length in the X axis :param y: a float in the range [-1.0, 1.0] for a percentage of half of the radius/width in the Y axis :param z: a float in the range [-1.0, 1.0] for a percentage of half of the height above/below the center :return: a Point representing the specified location in absolute deck coordinates """ center = self.center() if self._shape is WellShape.RECTANGULAR: x_size = self._length y_size = self._width else: x_size = self._diameter y_size = self._diameter z_size = self._depth return Point( x=center.point.x + (x * (x_size / 2.0)), y=center.point.y + (y * (y_size / 2.0)), z=center.point.z + (z * (z_size / 2.0)))
[ "def", "_from_center_cartesian", "(", "self", ",", "x", ":", "float", ",", "y", ":", "float", ",", "z", ":", "float", ")", "->", "Point", ":", "center", "=", "self", ".", "center", "(", ")", "if", "self", ".", "_shape", "is", "WellShape", ".", "RECTANGULAR", ":", "x_size", "=", "self", ".", "_length", "y_size", "=", "self", ".", "_width", "else", ":", "x_size", "=", "self", ".", "_diameter", "y_size", "=", "self", ".", "_diameter", "z_size", "=", "self", ".", "_depth", "return", "Point", "(", "x", "=", "center", ".", "point", ".", "x", "+", "(", "x", "*", "(", "x_size", "/", "2.0", ")", ")", ",", "y", "=", "center", ".", "point", ".", "y", "+", "(", "y", "*", "(", "y_size", "/", "2.0", ")", ")", ",", "z", "=", "center", ".", "point", ".", "z", "+", "(", "z", "*", "(", "z_size", "/", "2.0", ")", ")", ")" ]
41.058824
20.411765
def get_stats(self, obj, stat_name): """ Send CLI command that returns list of integer counters. :param obj: requested object. :param stat_name: statistics command name. :return: list of counters. :rtype: list(int) """ return [int(v) for v in self.send_command_return(obj, stat_name, '?').split()]
[ "def", "get_stats", "(", "self", ",", "obj", ",", "stat_name", ")", ":", "return", "[", "int", "(", "v", ")", "for", "v", "in", "self", ".", "send_command_return", "(", "obj", ",", "stat_name", ",", "'?'", ")", ".", "split", "(", ")", "]" ]
38.444444
13.777778
def check_object( state, index, missing_msg=None, expand_msg=None, typestr="variable" ): """Check object existence (and equality) Check whether an object is defined in the student's process, and zoom in on its value in both student and solution process to inspect quality (with has_equal_value(). In ``pythonbackend``, both the student's submission as well as the solution code are executed, in separate processes. ``check_object()`` looks at these processes and checks if the referenced object is available in the student process. Next, you can use ``has_equal_value()`` to check whether the objects in the student and solution process correspond. Args: index (str): the name of the object which value has to be checked. missing_msg (str): feedback message when the object is not defined in the student process. expand_msg (str): If specified, this overrides any messages that are prepended by previous SCT chains. :Example: Suppose you want the student to create a variable ``x``, equal to 15: :: x = 15 The following SCT will verify this: :: Ex().check_object("x").has_equal_value() - ``check_object()`` will check if the variable ``x`` is defined in the student process. - ``has_equal_value()`` will check whether the value of ``x`` in the solution process is the same as in the student process. Note that ``has_equal_value()`` only looks at **end result** of a variable in the student process. In the example, how the object ``x`` came about in the student's submission, does not matter. This means that all of the following submission will also pass the above SCT: :: x = 15 x = 12 + 3 x = 3; x += 12 :Example: As the previous example mentioned, ``has_equal_value()`` only looks at the **end result**. If your exercise is first initializing and object and further down the script is updating the object, you can only look at the final value! Suppose you want the student to initialize and populate a list `my_list` as follows: :: my_list = [] for i in range(20): if i % 3 == 0: my_list.append(i) There is no robust way to verify whether `my_list = [0]` was coded correctly in a separate way. The best SCT would look something like this: :: msg = "Have you correctly initialized `my_list`?" Ex().check_correct( check_object('my_list').has_equal_value(), multi( # check initialization: [] or list() check_or( has_equal_ast(code = "[]", incorrect_msg = msg), check_function('list') ), check_for_loop().multi( check_iter().has_equal_value(), check_body().check_if_else().multi( check_test().multi( set_context(2).has_equal_value(), set_context(3).has_equal_value() ), check_body().set_context(3).\\ set_env(my_list = [0]).\\ has_equal_value(name = 'my_list') ) ) ) ) - ``check_correct()`` is used to robustly check whether ``my_list`` was built correctly. - If ``my_list`` is not correct, **both** the initialization and the population code are checked. :Example: Because checking object correctness incorrectly is such a common misconception, we're adding another example: :: import pandas as pd df = pd.DataFrame({'a': [1, 2, 3], 'b': [4, 5, 6]}) df['c'] = [7, 8, 9] The following SCT would be **wrong**, as it does not factor in the possibility that the 'add column ``c``' step could've been wrong: :: Ex().check_correct( check_object('df').has_equal_value(), check_function('pandas.DataFrame').check_args(0).has_equal_value() ) The following SCT would be better, as it is specific to the steps: :: # verify the df = pd.DataFrame(...) step Ex().check_correct( check_df('df').multi( check_keys('a').has_equal_value(), check_keys('b').has_equal_value() ), check_function('pandas.DataFrame').check_args(0).has_equal_value() ) # verify the df['c'] = [...] step Ex().check_df('df').check_keys('c').has_equal_value() :Example: pythonwhat compares the objects in the student and solution process with the ``==`` operator. For basic objects, this ``==`` is operator is properly implemented, so that the objects can be effectively compared. For more complex objects that are produced by third-party packages, however, it's possible that this equality operator is not implemented in a way you'd expect. Often, for these object types the ``==`` will compare the actual object instances: :: # pre exercise code class Number(): def __init__(self, n): self.n = n # solution x = Number(1) # sct that won't work Ex().check_object().has_equal_value() # sct Ex().check_object().has_equal_value(expr_code = 'x.n') # submissions that will pass this sct x = Number(1) x = Number(2 - 1) The basic SCT like in the previous example will notwork here. Notice how we used the ``expr_code`` argument to _override_ which value `has_equal_value()` is checking. Instead of checking whether `x` corresponds between student and solution process, it's now executing the expression ``x.n`` and seeing if the result of running this expression in both student and solution process match. """ # Only do the assertion if PYTHONWHAT_V2_ONLY is set to '1' if v2_only(): extra_msg = "If you want to check the value of an object in e.g. a for loop, use `has_equal_value(name = 'my_obj')` instead." state.assert_root("check_object", extra_msg=extra_msg) if missing_msg is None: missing_msg = "Did you define the {{typestr}} `{{index}}` without errors?" if expand_msg is None: expand_msg = "Did you correctly define the {{typestr}} `{{index}}`? " if ( not isDefinedInProcess(index, state.solution_process) and state.has_different_processes() ): raise InstructorError( "`check_object()` couldn't find object `%s` in the solution process." % index ) append_message = {"msg": expand_msg, "kwargs": {"index": index, "typestr": typestr}} # create child state, using either parser output, or create part from name fallback = lambda: ObjectAssignmentParser.get_part(index) stu_part = state.ast_dispatcher("object_assignments", state.student_ast).get(index, fallback()) sol_part = state.ast_dispatcher("object_assignments", state.solution_ast).get(index, fallback()) # test object exists _msg = state.build_message(missing_msg, append_message["kwargs"]) state.do_test(DefinedProcessTest(index, state.student_process, Feedback(_msg))) child = part_to_child( stu_part, sol_part, append_message, state, node_name="object_assignments" ) return child
[ "def", "check_object", "(", "state", ",", "index", ",", "missing_msg", "=", "None", ",", "expand_msg", "=", "None", ",", "typestr", "=", "\"variable\"", ")", ":", "# Only do the assertion if PYTHONWHAT_V2_ONLY is set to '1'", "if", "v2_only", "(", ")", ":", "extra_msg", "=", "\"If you want to check the value of an object in e.g. a for loop, use `has_equal_value(name = 'my_obj')` instead.\"", "state", ".", "assert_root", "(", "\"check_object\"", ",", "extra_msg", "=", "extra_msg", ")", "if", "missing_msg", "is", "None", ":", "missing_msg", "=", "\"Did you define the {{typestr}} `{{index}}` without errors?\"", "if", "expand_msg", "is", "None", ":", "expand_msg", "=", "\"Did you correctly define the {{typestr}} `{{index}}`? \"", "if", "(", "not", "isDefinedInProcess", "(", "index", ",", "state", ".", "solution_process", ")", "and", "state", ".", "has_different_processes", "(", ")", ")", ":", "raise", "InstructorError", "(", "\"`check_object()` couldn't find object `%s` in the solution process.\"", "%", "index", ")", "append_message", "=", "{", "\"msg\"", ":", "expand_msg", ",", "\"kwargs\"", ":", "{", "\"index\"", ":", "index", ",", "\"typestr\"", ":", "typestr", "}", "}", "# create child state, using either parser output, or create part from name", "fallback", "=", "lambda", ":", "ObjectAssignmentParser", ".", "get_part", "(", "index", ")", "stu_part", "=", "state", ".", "ast_dispatcher", "(", "\"object_assignments\"", ",", "state", ".", "student_ast", ")", ".", "get", "(", "index", ",", "fallback", "(", ")", ")", "sol_part", "=", "state", ".", "ast_dispatcher", "(", "\"object_assignments\"", ",", "state", ".", "solution_ast", ")", ".", "get", "(", "index", ",", "fallback", "(", ")", ")", "# test object exists", "_msg", "=", "state", ".", "build_message", "(", "missing_msg", ",", "append_message", "[", "\"kwargs\"", "]", ")", "state", ".", "do_test", "(", "DefinedProcessTest", "(", "index", ",", "state", ".", "student_process", ",", "Feedback", "(", "_msg", ")", ")", ")", "child", "=", "part_to_child", "(", "stu_part", ",", "sol_part", ",", "append_message", ",", "state", ",", "node_name", "=", "\"object_assignments\"", ")", "return", "child" ]
42.903955
33.451977
def _get_info(self, item, check_not_on_or_after=True): """ Get session information about a subject gotten from a specified IdP/AA. :param item: Information stored :return: The session information as a dictionary """ timestamp = item["timestamp"] if check_not_on_or_after and not time_util.not_on_or_after(timestamp): raise ToOld() try: return item["info"] except KeyError: return None
[ "def", "_get_info", "(", "self", ",", "item", ",", "check_not_on_or_after", "=", "True", ")", ":", "timestamp", "=", "item", "[", "\"timestamp\"", "]", "if", "check_not_on_or_after", "and", "not", "time_util", ".", "not_on_or_after", "(", "timestamp", ")", ":", "raise", "ToOld", "(", ")", "try", ":", "return", "item", "[", "\"info\"", "]", "except", "KeyError", ":", "return", "None" ]
30
18.25
def create(vm_): ''' Create a single VM from a data dict ''' try: # Check for required profile parameters before sending any API calls. if vm_['profile'] and config.is_profile_configured(__opts__, __active_provider_name__ or 'nova', vm_['profile'], vm_=vm_) is False: return False except AttributeError: pass deploy = config.get_cloud_config_value('deploy', vm_, __opts__) key_filename = config.get_cloud_config_value( 'ssh_key_file', vm_, __opts__, search_global=False, default=None ) if key_filename is not None and not os.path.isfile(key_filename): raise SaltCloudConfigError( 'The defined ssh_key_file \'{0}\' does not exist'.format( key_filename ) ) vm_['key_filename'] = key_filename __utils__['cloud.fire_event']( 'event', 'starting create', 'salt/cloud/{0}/creating'.format(vm_['name']), args=__utils__['cloud.filter_event']('creating', vm_, ['name', 'profile', 'provider', 'driver']), sock_dir=__opts__['sock_dir'], transport=__opts__['transport'] ) conn = get_conn() if 'instance_id' in vm_: # This was probably created via another process, and doesn't have # things like salt keys created yet, so let's create them now. if 'pub_key' not in vm_ and 'priv_key' not in vm_: log.debug('Generating minion keys for \'%s\'', vm_['name']) vm_['priv_key'], vm_['pub_key'] = salt.utils.cloud.gen_keys( salt.config.get_cloud_config_value( 'keysize', vm_, __opts__ ) ) data = conn.server_show_libcloud(vm_['instance_id']) if vm_['key_filename'] is None and 'change_password' in __opts__ and __opts__['change_password'] is True: vm_['password'] = salt.utils.pycrypto.secure_password() conn.root_password(vm_['instance_id'], vm_['password']) else: # Put together all of the information required to request the instance, # and then fire off the request for it data, vm_ = request_instance(vm_) # Pull the instance ID, valid for both spot and normal instances vm_['instance_id'] = data.id try: data = salt.utils.cloud.wait_for_ip( _query_node_data, update_args=(vm_, data, conn), timeout=config.get_cloud_config_value( 'wait_for_ip_timeout', vm_, __opts__, default=10 * 60), interval=config.get_cloud_config_value( 'wait_for_ip_interval', vm_, __opts__, default=10), ) except (SaltCloudExecutionTimeout, SaltCloudExecutionFailure) as exc: try: # It might be already up, let's destroy it! destroy(vm_['name']) except SaltCloudSystemExit: pass finally: raise SaltCloudSystemExit(six.text_type(exc)) log.debug('VM is now running') if ssh_interface(vm_) == 'private_ips': ip_address = preferred_ip(vm_, data.private_ips) elif ssh_interface(vm_) == 'fixed_ips': ip_address = preferred_ip(vm_, data.fixed_ips) elif ssh_interface(vm_) == 'floating_ips': ip_address = preferred_ip(vm_, data.floating_ips) else: ip_address = preferred_ip(vm_, data.public_ips) log.debug('Using IP address %s', ip_address) if salt.utils.cloud.get_salt_interface(vm_, __opts__) == 'private_ips': salt_ip_address = preferred_ip(vm_, data.private_ips) log.info('Salt interface set to: %s', salt_ip_address) elif salt.utils.cloud.get_salt_interface(vm_, __opts__) == 'fixed_ips': salt_ip_address = preferred_ip(vm_, data.fixed_ips) log.info('Salt interface set to: %s', salt_ip_address) elif salt.utils.cloud.get_salt_interface(vm_, __opts__) == 'floating_ips': salt_ip_address = preferred_ip(vm_, data.floating_ips) log.info('Salt interface set to: %s', salt_ip_address) else: salt_ip_address = preferred_ip(vm_, data.public_ips) log.debug('Salt interface set to: %s', salt_ip_address) if not ip_address: raise SaltCloudSystemExit('A valid IP address was not found') vm_['ssh_host'] = ip_address vm_['salt_host'] = salt_ip_address ret = __utils__['cloud.bootstrap'](vm_, __opts__) ret.update(data.__dict__) if 'password' in ret['extra']: del ret['extra']['password'] log.info('Created Cloud VM \'%s\'', vm_['name']) log.debug( '\'%s\' VM creation details:\n%s', vm_['name'], pprint.pformat(data.__dict__) ) event_data = { 'name': vm_['name'], 'profile': vm_['profile'], 'provider': vm_['driver'], 'instance_id': vm_['instance_id'], 'floating_ips': data.floating_ips, 'fixed_ips': data.fixed_ips, 'private_ips': data.private_ips, 'public_ips': data.public_ips } __utils__['cloud.fire_event']( 'event', 'created instance', 'salt/cloud/{0}/created'.format(vm_['name']), args=__utils__['cloud.filter_event']('created', event_data, list(event_data)), sock_dir=__opts__['sock_dir'], transport=__opts__['transport'] ) __utils__['cloud.cachedir_index_add'](vm_['name'], vm_['profile'], 'nova', vm_['driver']) return ret
[ "def", "create", "(", "vm_", ")", ":", "try", ":", "# Check for required profile parameters before sending any API calls.", "if", "vm_", "[", "'profile'", "]", "and", "config", ".", "is_profile_configured", "(", "__opts__", ",", "__active_provider_name__", "or", "'nova'", ",", "vm_", "[", "'profile'", "]", ",", "vm_", "=", "vm_", ")", "is", "False", ":", "return", "False", "except", "AttributeError", ":", "pass", "deploy", "=", "config", ".", "get_cloud_config_value", "(", "'deploy'", ",", "vm_", ",", "__opts__", ")", "key_filename", "=", "config", ".", "get_cloud_config_value", "(", "'ssh_key_file'", ",", "vm_", ",", "__opts__", ",", "search_global", "=", "False", ",", "default", "=", "None", ")", "if", "key_filename", "is", "not", "None", "and", "not", "os", ".", "path", ".", "isfile", "(", "key_filename", ")", ":", "raise", "SaltCloudConfigError", "(", "'The defined ssh_key_file \\'{0}\\' does not exist'", ".", "format", "(", "key_filename", ")", ")", "vm_", "[", "'key_filename'", "]", "=", "key_filename", "__utils__", "[", "'cloud.fire_event'", "]", "(", "'event'", ",", "'starting create'", ",", "'salt/cloud/{0}/creating'", ".", "format", "(", "vm_", "[", "'name'", "]", ")", ",", "args", "=", "__utils__", "[", "'cloud.filter_event'", "]", "(", "'creating'", ",", "vm_", ",", "[", "'name'", ",", "'profile'", ",", "'provider'", ",", "'driver'", "]", ")", ",", "sock_dir", "=", "__opts__", "[", "'sock_dir'", "]", ",", "transport", "=", "__opts__", "[", "'transport'", "]", ")", "conn", "=", "get_conn", "(", ")", "if", "'instance_id'", "in", "vm_", ":", "# This was probably created via another process, and doesn't have", "# things like salt keys created yet, so let's create them now.", "if", "'pub_key'", "not", "in", "vm_", "and", "'priv_key'", "not", "in", "vm_", ":", "log", ".", "debug", "(", "'Generating minion keys for \\'%s\\''", ",", "vm_", "[", "'name'", "]", ")", "vm_", "[", "'priv_key'", "]", ",", "vm_", "[", "'pub_key'", "]", "=", "salt", ".", "utils", ".", "cloud", ".", "gen_keys", "(", "salt", ".", "config", ".", "get_cloud_config_value", "(", "'keysize'", ",", "vm_", ",", "__opts__", ")", ")", "data", "=", "conn", ".", "server_show_libcloud", "(", "vm_", "[", "'instance_id'", "]", ")", "if", "vm_", "[", "'key_filename'", "]", "is", "None", "and", "'change_password'", "in", "__opts__", "and", "__opts__", "[", "'change_password'", "]", "is", "True", ":", "vm_", "[", "'password'", "]", "=", "salt", ".", "utils", ".", "pycrypto", ".", "secure_password", "(", ")", "conn", ".", "root_password", "(", "vm_", "[", "'instance_id'", "]", ",", "vm_", "[", "'password'", "]", ")", "else", ":", "# Put together all of the information required to request the instance,", "# and then fire off the request for it", "data", ",", "vm_", "=", "request_instance", "(", "vm_", ")", "# Pull the instance ID, valid for both spot and normal instances", "vm_", "[", "'instance_id'", "]", "=", "data", ".", "id", "try", ":", "data", "=", "salt", ".", "utils", ".", "cloud", ".", "wait_for_ip", "(", "_query_node_data", ",", "update_args", "=", "(", "vm_", ",", "data", ",", "conn", ")", ",", "timeout", "=", "config", ".", "get_cloud_config_value", "(", "'wait_for_ip_timeout'", ",", "vm_", ",", "__opts__", ",", "default", "=", "10", "*", "60", ")", ",", "interval", "=", "config", ".", "get_cloud_config_value", "(", "'wait_for_ip_interval'", ",", "vm_", ",", "__opts__", ",", "default", "=", "10", ")", ",", ")", "except", "(", "SaltCloudExecutionTimeout", ",", "SaltCloudExecutionFailure", ")", "as", "exc", ":", "try", ":", "# It might be already up, let's destroy it!", "destroy", "(", "vm_", "[", "'name'", "]", ")", "except", "SaltCloudSystemExit", ":", "pass", "finally", ":", "raise", "SaltCloudSystemExit", "(", "six", ".", "text_type", "(", "exc", ")", ")", "log", ".", "debug", "(", "'VM is now running'", ")", "if", "ssh_interface", "(", "vm_", ")", "==", "'private_ips'", ":", "ip_address", "=", "preferred_ip", "(", "vm_", ",", "data", ".", "private_ips", ")", "elif", "ssh_interface", "(", "vm_", ")", "==", "'fixed_ips'", ":", "ip_address", "=", "preferred_ip", "(", "vm_", ",", "data", ".", "fixed_ips", ")", "elif", "ssh_interface", "(", "vm_", ")", "==", "'floating_ips'", ":", "ip_address", "=", "preferred_ip", "(", "vm_", ",", "data", ".", "floating_ips", ")", "else", ":", "ip_address", "=", "preferred_ip", "(", "vm_", ",", "data", ".", "public_ips", ")", "log", ".", "debug", "(", "'Using IP address %s'", ",", "ip_address", ")", "if", "salt", ".", "utils", ".", "cloud", ".", "get_salt_interface", "(", "vm_", ",", "__opts__", ")", "==", "'private_ips'", ":", "salt_ip_address", "=", "preferred_ip", "(", "vm_", ",", "data", ".", "private_ips", ")", "log", ".", "info", "(", "'Salt interface set to: %s'", ",", "salt_ip_address", ")", "elif", "salt", ".", "utils", ".", "cloud", ".", "get_salt_interface", "(", "vm_", ",", "__opts__", ")", "==", "'fixed_ips'", ":", "salt_ip_address", "=", "preferred_ip", "(", "vm_", ",", "data", ".", "fixed_ips", ")", "log", ".", "info", "(", "'Salt interface set to: %s'", ",", "salt_ip_address", ")", "elif", "salt", ".", "utils", ".", "cloud", ".", "get_salt_interface", "(", "vm_", ",", "__opts__", ")", "==", "'floating_ips'", ":", "salt_ip_address", "=", "preferred_ip", "(", "vm_", ",", "data", ".", "floating_ips", ")", "log", ".", "info", "(", "'Salt interface set to: %s'", ",", "salt_ip_address", ")", "else", ":", "salt_ip_address", "=", "preferred_ip", "(", "vm_", ",", "data", ".", "public_ips", ")", "log", ".", "debug", "(", "'Salt interface set to: %s'", ",", "salt_ip_address", ")", "if", "not", "ip_address", ":", "raise", "SaltCloudSystemExit", "(", "'A valid IP address was not found'", ")", "vm_", "[", "'ssh_host'", "]", "=", "ip_address", "vm_", "[", "'salt_host'", "]", "=", "salt_ip_address", "ret", "=", "__utils__", "[", "'cloud.bootstrap'", "]", "(", "vm_", ",", "__opts__", ")", "ret", ".", "update", "(", "data", ".", "__dict__", ")", "if", "'password'", "in", "ret", "[", "'extra'", "]", ":", "del", "ret", "[", "'extra'", "]", "[", "'password'", "]", "log", ".", "info", "(", "'Created Cloud VM \\'%s\\''", ",", "vm_", "[", "'name'", "]", ")", "log", ".", "debug", "(", "'\\'%s\\' VM creation details:\\n%s'", ",", "vm_", "[", "'name'", "]", ",", "pprint", ".", "pformat", "(", "data", ".", "__dict__", ")", ")", "event_data", "=", "{", "'name'", ":", "vm_", "[", "'name'", "]", ",", "'profile'", ":", "vm_", "[", "'profile'", "]", ",", "'provider'", ":", "vm_", "[", "'driver'", "]", ",", "'instance_id'", ":", "vm_", "[", "'instance_id'", "]", ",", "'floating_ips'", ":", "data", ".", "floating_ips", ",", "'fixed_ips'", ":", "data", ".", "fixed_ips", ",", "'private_ips'", ":", "data", ".", "private_ips", ",", "'public_ips'", ":", "data", ".", "public_ips", "}", "__utils__", "[", "'cloud.fire_event'", "]", "(", "'event'", ",", "'created instance'", ",", "'salt/cloud/{0}/created'", ".", "format", "(", "vm_", "[", "'name'", "]", ")", ",", "args", "=", "__utils__", "[", "'cloud.filter_event'", "]", "(", "'created'", ",", "event_data", ",", "list", "(", "event_data", ")", ")", ",", "sock_dir", "=", "__opts__", "[", "'sock_dir'", "]", ",", "transport", "=", "__opts__", "[", "'transport'", "]", ")", "__utils__", "[", "'cloud.cachedir_index_add'", "]", "(", "vm_", "[", "'name'", "]", ",", "vm_", "[", "'profile'", "]", ",", "'nova'", ",", "vm_", "[", "'driver'", "]", ")", "return", "ret" ]
38.076389
22.3125
def add_adjustment(self, adjustment): """ Create a new Gift Card Adjustment """ resource = self.post("adjustments", adjustment.encode()) return GiftCardAdjustment(GiftCard.format.decode(resource.body))
[ "def", "add_adjustment", "(", "self", ",", "adjustment", ")", ":", "resource", "=", "self", ".", "post", "(", "\"adjustments\"", ",", "adjustment", ".", "encode", "(", ")", ")", "return", "GiftCardAdjustment", "(", "GiftCard", ".", "format", ".", "decode", "(", "resource", ".", "body", ")", ")" ]
39.333333
10
def print_projects(projects=None): """ Print a list of projects registered for that experiment. Args: exp: The experiment to print all projects for. """ grouped_by = {} if not projects: print( "Your selection didn't include any projects for this experiment.") return for name in projects: prj = projects[name] if prj.GROUP not in grouped_by: grouped_by[prj.GROUP] = [] grouped_by[prj.GROUP].append("{name}/{group}".format( name=prj.NAME, group=prj.GROUP)) for name in grouped_by: print("group: {0}".format(name)) group_projects = sorted(grouped_by[name]) for prj in group_projects: prj_cls = projects[prj] version_str = None if hasattr(prj_cls, 'versions'): version_str = ", ".join(prj_cls.versions()) project_id = "{0}/{1}".format(prj_cls.NAME, prj_cls.GROUP) project_str = \ " name: {id:<32} version: {version:<24} source: {src}".format( id=str(project_id), version=str(prj_cls.VERSION), src=str(prj_cls.SRC_FILE)) print(project_str) if prj_cls.__doc__: docstr = prj_cls.__doc__.strip("\n ") print(" description: {desc}".format(desc=docstr)) if version_str: print(" versions: {versions}".format(versions=version_str)) print()
[ "def", "print_projects", "(", "projects", "=", "None", ")", ":", "grouped_by", "=", "{", "}", "if", "not", "projects", ":", "print", "(", "\"Your selection didn't include any projects for this experiment.\"", ")", "return", "for", "name", "in", "projects", ":", "prj", "=", "projects", "[", "name", "]", "if", "prj", ".", "GROUP", "not", "in", "grouped_by", ":", "grouped_by", "[", "prj", ".", "GROUP", "]", "=", "[", "]", "grouped_by", "[", "prj", ".", "GROUP", "]", ".", "append", "(", "\"{name}/{group}\"", ".", "format", "(", "name", "=", "prj", ".", "NAME", ",", "group", "=", "prj", ".", "GROUP", ")", ")", "for", "name", "in", "grouped_by", ":", "print", "(", "\"group: {0}\"", ".", "format", "(", "name", ")", ")", "group_projects", "=", "sorted", "(", "grouped_by", "[", "name", "]", ")", "for", "prj", "in", "group_projects", ":", "prj_cls", "=", "projects", "[", "prj", "]", "version_str", "=", "None", "if", "hasattr", "(", "prj_cls", ",", "'versions'", ")", ":", "version_str", "=", "\", \"", ".", "join", "(", "prj_cls", ".", "versions", "(", ")", ")", "project_id", "=", "\"{0}/{1}\"", ".", "format", "(", "prj_cls", ".", "NAME", ",", "prj_cls", ".", "GROUP", ")", "project_str", "=", "\" name: {id:<32} version: {version:<24} source: {src}\"", ".", "format", "(", "id", "=", "str", "(", "project_id", ")", ",", "version", "=", "str", "(", "prj_cls", ".", "VERSION", ")", ",", "src", "=", "str", "(", "prj_cls", ".", "SRC_FILE", ")", ")", "print", "(", "project_str", ")", "if", "prj_cls", ".", "__doc__", ":", "docstr", "=", "prj_cls", ".", "__doc__", ".", "strip", "(", "\"\\n \"", ")", "print", "(", "\" description: {desc}\"", ".", "format", "(", "desc", "=", "docstr", ")", ")", "if", "version_str", ":", "print", "(", "\" versions: {versions}\"", ".", "format", "(", "versions", "=", "version_str", ")", ")", "print", "(", ")" ]
31.510638
19.510638
def getquery(query): 'Performs a query and get the results.' try: conn = connection.cursor() conn.execute(query) data = conn.fetchall() conn.close() except: data = list() return data
[ "def", "getquery", "(", "query", ")", ":", "try", ":", "conn", "=", "connection", ".", "cursor", "(", ")", "conn", ".", "execute", "(", "query", ")", "data", "=", "conn", ".", "fetchall", "(", ")", "conn", ".", "close", "(", ")", "except", ":", "data", "=", "list", "(", ")", "return", "data" ]
20.666667
19.333333
def evaluateRforces(Pot,R,z,phi=None,t=0.,v=None): """ NAME: evaluateRforce PURPOSE: convenience function to evaluate a possible sum of potentials INPUT: Pot - a potential or list of potentials R - cylindrical Galactocentric distance (can be Quantity) z - distance above the plane (can be Quantity) phi - azimuth (optional; can be Quantity)) t - time (optional; can be Quantity) v - current velocity in cylindrical coordinates (optional, but required when including dissipative forces; can be a Quantity) OUTPUT: F_R(R,z,phi,t) HISTORY: 2010-04-16 - Written - Bovy (NYU) 2018-03-16 - Added velocity input for dissipative forces - Bovy (UofT) """ return _evaluateRforces(Pot,R,z,phi=phi,t=t,v=v)
[ "def", "evaluateRforces", "(", "Pot", ",", "R", ",", "z", ",", "phi", "=", "None", ",", "t", "=", "0.", ",", "v", "=", "None", ")", ":", "return", "_evaluateRforces", "(", "Pot", ",", "R", ",", "z", ",", "phi", "=", "phi", ",", "t", "=", "t", ",", "v", "=", "v", ")" ]
21.777778
29.388889
def load_data(self, *args, **kwargs): """ Collects positional and keyword arguments into `data` and applies units. :return: data """ # get positional argument names from parameters and apply them to args # update data with additional kwargs argpos = { v['extras']['argpos']: k for k, v in self.parameters.iteritems() if 'argpos' in v['extras'] } data = dict( {argpos[n]: a for n, a in enumerate(args)}, **kwargs ) return self.apply_units_to_cache(data)
[ "def", "load_data", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# get positional argument names from parameters and apply them to args", "# update data with additional kwargs", "argpos", "=", "{", "v", "[", "'extras'", "]", "[", "'argpos'", "]", ":", "k", "for", "k", ",", "v", "in", "self", ".", "parameters", ".", "iteritems", "(", ")", "if", "'argpos'", "in", "v", "[", "'extras'", "]", "}", "data", "=", "dict", "(", "{", "argpos", "[", "n", "]", ":", "a", "for", "n", ",", "a", "in", "enumerate", "(", "args", ")", "}", ",", "*", "*", "kwargs", ")", "return", "self", ".", "apply_units_to_cache", "(", "data", ")" ]
35.125
19.75
def set_window_geometry(geometry): """Set window geometry. Parameters ========== geometry : tuple (4 integers) or None x, y, dx, dy values employed to set the Qt backend geometry. """ if geometry is not None: x_geom, y_geom, dx_geom, dy_geom = geometry mngr = plt.get_current_fig_manager() if 'window' in dir(mngr): try: mngr.window.setGeometry(x_geom, y_geom, dx_geom, dy_geom) except AttributeError: pass else: pass
[ "def", "set_window_geometry", "(", "geometry", ")", ":", "if", "geometry", "is", "not", "None", ":", "x_geom", ",", "y_geom", ",", "dx_geom", ",", "dy_geom", "=", "geometry", "mngr", "=", "plt", ".", "get_current_fig_manager", "(", ")", "if", "'window'", "in", "dir", "(", "mngr", ")", ":", "try", ":", "mngr", ".", "window", ".", "setGeometry", "(", "x_geom", ",", "y_geom", ",", "dx_geom", ",", "dy_geom", ")", "except", "AttributeError", ":", "pass", "else", ":", "pass" ]
27.05
18.35
def _load_config(self): """Load the workflow stage config from the database.""" pb_key = SchedulingObject.get_key(PB_KEY, self._pb_id) stages = DB.get_hash_value(pb_key, 'workflow_stages') stages = ast.literal_eval(stages) return stages[self._index]
[ "def", "_load_config", "(", "self", ")", ":", "pb_key", "=", "SchedulingObject", ".", "get_key", "(", "PB_KEY", ",", "self", ".", "_pb_id", ")", "stages", "=", "DB", ".", "get_hash_value", "(", "pb_key", ",", "'workflow_stages'", ")", "stages", "=", "ast", ".", "literal_eval", "(", "stages", ")", "return", "stages", "[", "self", ".", "_index", "]" ]
47.333333
11.166667
def wr_xlsx(fout_xlsx, data_xlsx, **kws): """Write a spreadsheet into a xlsx file.""" from goatools.wr_tbl_class import WrXlsx # optional keyword args: fld2col_widths hdrs prt_if sort_by fld2fmt prt_flds items_str = kws.get("items", "items") if "items" not in kws else kws["items"] if data_xlsx: # Open xlsx file xlsxobj = WrXlsx(fout_xlsx, data_xlsx[0]._fields, **kws) worksheet = xlsxobj.add_worksheet() # Write title (optional) and headers. row_idx = xlsxobj.wr_title(worksheet) row_idx = xlsxobj.wr_hdrs(worksheet, row_idx) row_idx_data0 = row_idx # Write data row_idx = xlsxobj.wr_data(data_xlsx, row_idx, worksheet) # Close xlsx file xlsxobj.workbook.close() sys.stdout.write(" {N:>5} {ITEMS} WROTE: {FOUT}\n".format( N=row_idx-row_idx_data0, ITEMS=items_str, FOUT=fout_xlsx)) else: sys.stdout.write(" 0 {ITEMS}. NOT WRITING {FOUT}\n".format( ITEMS=items_str, FOUT=fout_xlsx))
[ "def", "wr_xlsx", "(", "fout_xlsx", ",", "data_xlsx", ",", "*", "*", "kws", ")", ":", "from", "goatools", ".", "wr_tbl_class", "import", "WrXlsx", "# optional keyword args: fld2col_widths hdrs prt_if sort_by fld2fmt prt_flds", "items_str", "=", "kws", ".", "get", "(", "\"items\"", ",", "\"items\"", ")", "if", "\"items\"", "not", "in", "kws", "else", "kws", "[", "\"items\"", "]", "if", "data_xlsx", ":", "# Open xlsx file", "xlsxobj", "=", "WrXlsx", "(", "fout_xlsx", ",", "data_xlsx", "[", "0", "]", ".", "_fields", ",", "*", "*", "kws", ")", "worksheet", "=", "xlsxobj", ".", "add_worksheet", "(", ")", "# Write title (optional) and headers.", "row_idx", "=", "xlsxobj", ".", "wr_title", "(", "worksheet", ")", "row_idx", "=", "xlsxobj", ".", "wr_hdrs", "(", "worksheet", ",", "row_idx", ")", "row_idx_data0", "=", "row_idx", "# Write data", "row_idx", "=", "xlsxobj", ".", "wr_data", "(", "data_xlsx", ",", "row_idx", ",", "worksheet", ")", "# Close xlsx file", "xlsxobj", ".", "workbook", ".", "close", "(", ")", "sys", ".", "stdout", ".", "write", "(", "\" {N:>5} {ITEMS} WROTE: {FOUT}\\n\"", ".", "format", "(", "N", "=", "row_idx", "-", "row_idx_data0", ",", "ITEMS", "=", "items_str", ",", "FOUT", "=", "fout_xlsx", ")", ")", "else", ":", "sys", ".", "stdout", ".", "write", "(", "\" 0 {ITEMS}. NOT WRITING {FOUT}\\n\"", ".", "format", "(", "ITEMS", "=", "items_str", ",", "FOUT", "=", "fout_xlsx", ")", ")" ]
46.318182
17.090909
def mouserightclick(self, window_name, object_name): """ Mouse right click on an object. @param window_name: Window name to look for, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @param object_name: Object name to look for, either full name, LDTP's name convention, or a Unix glob. Or menu heirarchy @type object_name: string @return: 1 on success. @rtype: integer """ object_handle = self._get_object_handle(window_name, object_name) if not object_handle.AXEnabled: raise LdtpServerException(u"Object %s state disabled" % object_name) self._grabfocus(object_handle) x, y, width, height = self._getobjectsize(object_handle) # Mouse right click on the object object_handle.clickMouseButtonRight((x + width / 2, y + height / 2)) return 1
[ "def", "mouserightclick", "(", "self", ",", "window_name", ",", "object_name", ")", ":", "object_handle", "=", "self", ".", "_get_object_handle", "(", "window_name", ",", "object_name", ")", "if", "not", "object_handle", ".", "AXEnabled", ":", "raise", "LdtpServerException", "(", "u\"Object %s state disabled\"", "%", "object_name", ")", "self", ".", "_grabfocus", "(", "object_handle", ")", "x", ",", "y", ",", "width", ",", "height", "=", "self", ".", "_getobjectsize", "(", "object_handle", ")", "# Mouse right click on the object", "object_handle", ".", "clickMouseButtonRight", "(", "(", "x", "+", "width", "/", "2", ",", "y", "+", "height", "/", "2", ")", ")", "return", "1" ]
41.772727
17.227273
def topDownCompute(self, encoded): """ See the function description in base.py """ # Get/generate the topDown mapping table topDownMappingM = self._getTopDownMapping() # See which "category" we match the closest. category = topDownMappingM.rightVecProd(encoded).argmax() # Return that bucket info return self.getBucketInfo([category])
[ "def", "topDownCompute", "(", "self", ",", "encoded", ")", ":", "# Get/generate the topDown mapping table", "topDownMappingM", "=", "self", ".", "_getTopDownMapping", "(", ")", "# See which \"category\" we match the closest.", "category", "=", "topDownMappingM", ".", "rightVecProd", "(", "encoded", ")", ".", "argmax", "(", ")", "# Return that bucket info", "return", "self", ".", "getBucketInfo", "(", "[", "category", "]", ")" ]
29.833333
14.833333
def collapse_initials(name): """Remove the space between initials, eg T. A. --> T.A.""" if len(name.split(".")) > 1: name = re.sub(r'([A-Z]\.)[\s\-]+(?=[A-Z]\.)', r'\1', name) return name
[ "def", "collapse_initials", "(", "name", ")", ":", "if", "len", "(", "name", ".", "split", "(", "\".\"", ")", ")", ">", "1", ":", "name", "=", "re", ".", "sub", "(", "r'([A-Z]\\.)[\\s\\-]+(?=[A-Z]\\.)'", ",", "r'\\1'", ",", "name", ")", "return", "name" ]
40.6
14.2
def _parse_normalizations(self, normalizations): """Returns a list of parsed normalizations. Iterates over a list of normalizations, removing those not correctly defined. It also transform complex items to have a common format (list of tuples and strings). Args: normalizations: List of normalizations to parse. Returns: A list of normalizations after being parsed and curated. """ parsed_normalizations = [] if isinstance(normalizations, list): for item in normalizations: normalization = self._parse_normalization(item) if normalization: parsed_normalizations.append(normalization) else: raise ConfigError('List expected. Found %s' % type(normalizations)) return parsed_normalizations
[ "def", "_parse_normalizations", "(", "self", ",", "normalizations", ")", ":", "parsed_normalizations", "=", "[", "]", "if", "isinstance", "(", "normalizations", ",", "list", ")", ":", "for", "item", "in", "normalizations", ":", "normalization", "=", "self", ".", "_parse_normalization", "(", "item", ")", "if", "normalization", ":", "parsed_normalizations", ".", "append", "(", "normalization", ")", "else", ":", "raise", "ConfigError", "(", "'List expected. Found %s'", "%", "type", "(", "normalizations", ")", ")", "return", "parsed_normalizations" ]
35.666667
21.083333
def bernstein(n, t): """returns a list of the Bernstein basis polynomials b_{i, n} evaluated at t, for i =0...n""" t1 = 1-t return [n_choose_k(n, k) * t1**(n-k) * t**k for k in range(n+1)]
[ "def", "bernstein", "(", "n", ",", "t", ")", ":", "t1", "=", "1", "-", "t", "return", "[", "n_choose_k", "(", "n", ",", "k", ")", "*", "t1", "**", "(", "n", "-", "k", ")", "*", "t", "**", "k", "for", "k", "in", "range", "(", "n", "+", "1", ")", "]" ]
40
15.2
def apply(): """Monkey patching rope See [1], [2], [3], [4] and [5] in module docstring.""" from spyder.utils.programs import is_module_installed if is_module_installed('rope', '<0.9.4'): import rope raise ImportError("rope %s can't be patched" % rope.VERSION) # [1] Patching project.Project for compatibility with py2exe/cx_Freeze # distributions from spyder.config.base import is_py2exe_or_cx_Freeze if is_py2exe_or_cx_Freeze(): from rope.base import project class PatchedProject(project.Project): def _default_config(self): # py2exe/cx_Freeze distribution from spyder.config.base import get_module_source_path fname = get_module_source_path('spyder', 'default_config.py') return open(fname, 'rb').read() project.Project = PatchedProject # Patching pycore.PyCore... from rope.base import pycore class PatchedPyCore(pycore.PyCore): # [2] ...so that forced builtin modules (i.e. modules that were # declared as 'extension_modules' in rope preferences) will be indeed # recognized as builtins by rope, as expected # # This patch is included in rope 0.9.4+ but applying it anyway is ok def get_module(self, name, folder=None): """Returns a `PyObject` if the module was found.""" # check if this is a builtin module pymod = self._builtin_module(name) if pymod is not None: return pymod module = self.find_module(name, folder) if module is None: raise pycore.ModuleNotFoundError( 'Module %s not found' % name) return self.resource_to_pyobject(module) # [3] ...to avoid considering folders without __init__.py as Python # packages def _find_module_in_folder(self, folder, modname): module = folder packages = modname.split('.') for pkg in packages[:-1]: if module.is_folder() and module.has_child(pkg): module = module.get_child(pkg) else: return None if module.is_folder(): if module.has_child(packages[-1]) and \ module.get_child(packages[-1]).is_folder() and \ module.get_child(packages[-1]).has_child('__init__.py'): return module.get_child(packages[-1]) elif module.has_child(packages[-1] + '.py') and \ not module.get_child(packages[-1] + '.py').is_folder(): return module.get_child(packages[-1] + '.py') pycore.PyCore = PatchedPyCore # [2] Patching BuiltinName for the go to definition feature to simply work # with forced builtins from rope.base import builtins, libutils, pyobjects import inspect import os.path as osp class PatchedBuiltinName(builtins.BuiltinName): def _pycore(self): p = self.pyobject while p.parent is not None: p = p.parent if isinstance(p, builtins.BuiltinModule) and p.pycore is not None: return p.pycore def get_definition_location(self): if not inspect.isbuiltin(self.pyobject): _lines, lineno = inspect.getsourcelines(self.pyobject.builtin) path = inspect.getfile(self.pyobject.builtin) if path.endswith('pyc') and osp.isfile(path[:-1]): path = path[:-1] pycore = self._pycore() if pycore and pycore.project: resource = libutils.path_to_resource(pycore.project, path) module = pyobjects.PyModule(pycore, None, resource) return (module, lineno) return (None, None) builtins.BuiltinName = PatchedBuiltinName # [4] Patching several PyDocExtractor methods: # 1. get_doc: # To force rope to return the docstring of any object which has one, even # if it's not an instance of AbstractFunction, AbstractClass, or # AbstractModule. # Also, to use utils.dochelpers.getdoc to get docs from forced builtins. # # 2. _get_class_docstring and _get_single_function_docstring: # To not let rope add a 2 spaces indentation to every docstring, which was # breaking our rich text mode. The only value that we are modifying is the # 'indents' keyword of those methods, from 2 to 0. # # 3. get_calltip # To easily get calltips of forced builtins from rope.contrib import codeassist from spyder_kernels.utils.dochelpers import getdoc from rope.base import exceptions class PatchedPyDocExtractor(codeassist.PyDocExtractor): def get_builtin_doc(self, pyobject): buitin = pyobject.builtin return getdoc(buitin) def get_doc(self, pyobject): if hasattr(pyobject, 'builtin'): doc = self.get_builtin_doc(pyobject) return doc elif isinstance(pyobject, builtins.BuiltinModule): docstring = pyobject.get_doc() if docstring is not None: docstring = self._trim_docstring(docstring) else: docstring = '' # TODO: Add a module_name key, so that the name could appear # on the OI text filed but not be used by sphinx to render # the page doc = {'name': '', 'argspec': '', 'note': '', 'docstring': docstring } return doc elif isinstance(pyobject, pyobjects.AbstractFunction): return self._get_function_docstring(pyobject) elif isinstance(pyobject, pyobjects.AbstractClass): return self._get_class_docstring(pyobject) elif isinstance(pyobject, pyobjects.AbstractModule): return self._trim_docstring(pyobject.get_doc()) elif pyobject.get_doc() is not None: # Spyder patch return self._trim_docstring(pyobject.get_doc()) return None def get_calltip(self, pyobject, ignore_unknown=False, remove_self=False): if hasattr(pyobject, 'builtin'): doc = self.get_builtin_doc(pyobject) return doc['name'] + doc['argspec'] try: if isinstance(pyobject, pyobjects.AbstractClass): pyobject = pyobject['__init__'].get_object() if not isinstance(pyobject, pyobjects.AbstractFunction): pyobject = pyobject['__call__'].get_object() except exceptions.AttributeNotFoundError: return None if ignore_unknown and not isinstance(pyobject, pyobjects.PyFunction): return if isinstance(pyobject, pyobjects.AbstractFunction): result = self._get_function_signature(pyobject, add_module=True) if remove_self and self._is_method(pyobject): return result.replace('(self)', '()').replace('(self, ', '(') return result def _get_class_docstring(self, pyclass): contents = self._trim_docstring(pyclass.get_doc(), indents=0) supers = [super.get_name() for super in pyclass.get_superclasses()] doc = 'class %s(%s):\n\n' % (pyclass.get_name(), ', '.join(supers)) + contents if '__init__' in pyclass: init = pyclass['__init__'].get_object() if isinstance(init, pyobjects.AbstractFunction): doc += '\n\n' + self._get_single_function_docstring(init) return doc def _get_single_function_docstring(self, pyfunction): docs = pyfunction.get_doc() docs = self._trim_docstring(docs, indents=0) return docs codeassist.PyDocExtractor = PatchedPyDocExtractor # [5] Get the right matplotlib docstrings for Help try: import matplotlib as mpl mpl.rcParams['docstring.hardcopy'] = True except: pass
[ "def", "apply", "(", ")", ":", "from", "spyder", ".", "utils", ".", "programs", "import", "is_module_installed", "if", "is_module_installed", "(", "'rope'", ",", "'<0.9.4'", ")", ":", "import", "rope", "raise", "ImportError", "(", "\"rope %s can't be patched\"", "%", "rope", ".", "VERSION", ")", "# [1] Patching project.Project for compatibility with py2exe/cx_Freeze\r", "# distributions\r", "from", "spyder", ".", "config", ".", "base", "import", "is_py2exe_or_cx_Freeze", "if", "is_py2exe_or_cx_Freeze", "(", ")", ":", "from", "rope", ".", "base", "import", "project", "class", "PatchedProject", "(", "project", ".", "Project", ")", ":", "def", "_default_config", "(", "self", ")", ":", "# py2exe/cx_Freeze distribution\r", "from", "spyder", ".", "config", ".", "base", "import", "get_module_source_path", "fname", "=", "get_module_source_path", "(", "'spyder'", ",", "'default_config.py'", ")", "return", "open", "(", "fname", ",", "'rb'", ")", ".", "read", "(", ")", "project", ".", "Project", "=", "PatchedProject", "# Patching pycore.PyCore...\r", "from", "rope", ".", "base", "import", "pycore", "class", "PatchedPyCore", "(", "pycore", ".", "PyCore", ")", ":", "# [2] ...so that forced builtin modules (i.e. modules that were \r", "# declared as 'extension_modules' in rope preferences) will be indeed\r", "# recognized as builtins by rope, as expected\r", "# \r", "# This patch is included in rope 0.9.4+ but applying it anyway is ok\r", "def", "get_module", "(", "self", ",", "name", ",", "folder", "=", "None", ")", ":", "\"\"\"Returns a `PyObject` if the module was found.\"\"\"", "# check if this is a builtin module\r", "pymod", "=", "self", ".", "_builtin_module", "(", "name", ")", "if", "pymod", "is", "not", "None", ":", "return", "pymod", "module", "=", "self", ".", "find_module", "(", "name", ",", "folder", ")", "if", "module", "is", "None", ":", "raise", "pycore", ".", "ModuleNotFoundError", "(", "'Module %s not found'", "%", "name", ")", "return", "self", ".", "resource_to_pyobject", "(", "module", ")", "# [3] ...to avoid considering folders without __init__.py as Python\r", "# packages\r", "def", "_find_module_in_folder", "(", "self", ",", "folder", ",", "modname", ")", ":", "module", "=", "folder", "packages", "=", "modname", ".", "split", "(", "'.'", ")", "for", "pkg", "in", "packages", "[", ":", "-", "1", "]", ":", "if", "module", ".", "is_folder", "(", ")", "and", "module", ".", "has_child", "(", "pkg", ")", ":", "module", "=", "module", ".", "get_child", "(", "pkg", ")", "else", ":", "return", "None", "if", "module", ".", "is_folder", "(", ")", ":", "if", "module", ".", "has_child", "(", "packages", "[", "-", "1", "]", ")", "and", "module", ".", "get_child", "(", "packages", "[", "-", "1", "]", ")", ".", "is_folder", "(", ")", "and", "module", ".", "get_child", "(", "packages", "[", "-", "1", "]", ")", ".", "has_child", "(", "'__init__.py'", ")", ":", "return", "module", ".", "get_child", "(", "packages", "[", "-", "1", "]", ")", "elif", "module", ".", "has_child", "(", "packages", "[", "-", "1", "]", "+", "'.py'", ")", "and", "not", "module", ".", "get_child", "(", "packages", "[", "-", "1", "]", "+", "'.py'", ")", ".", "is_folder", "(", ")", ":", "return", "module", ".", "get_child", "(", "packages", "[", "-", "1", "]", "+", "'.py'", ")", "pycore", ".", "PyCore", "=", "PatchedPyCore", "# [2] Patching BuiltinName for the go to definition feature to simply work \r", "# with forced builtins\r", "from", "rope", ".", "base", "import", "builtins", ",", "libutils", ",", "pyobjects", "import", "inspect", "import", "os", ".", "path", "as", "osp", "class", "PatchedBuiltinName", "(", "builtins", ".", "BuiltinName", ")", ":", "def", "_pycore", "(", "self", ")", ":", "p", "=", "self", ".", "pyobject", "while", "p", ".", "parent", "is", "not", "None", ":", "p", "=", "p", ".", "parent", "if", "isinstance", "(", "p", ",", "builtins", ".", "BuiltinModule", ")", "and", "p", ".", "pycore", "is", "not", "None", ":", "return", "p", ".", "pycore", "def", "get_definition_location", "(", "self", ")", ":", "if", "not", "inspect", ".", "isbuiltin", "(", "self", ".", "pyobject", ")", ":", "_lines", ",", "lineno", "=", "inspect", ".", "getsourcelines", "(", "self", ".", "pyobject", ".", "builtin", ")", "path", "=", "inspect", ".", "getfile", "(", "self", ".", "pyobject", ".", "builtin", ")", "if", "path", ".", "endswith", "(", "'pyc'", ")", "and", "osp", ".", "isfile", "(", "path", "[", ":", "-", "1", "]", ")", ":", "path", "=", "path", "[", ":", "-", "1", "]", "pycore", "=", "self", ".", "_pycore", "(", ")", "if", "pycore", "and", "pycore", ".", "project", ":", "resource", "=", "libutils", ".", "path_to_resource", "(", "pycore", ".", "project", ",", "path", ")", "module", "=", "pyobjects", ".", "PyModule", "(", "pycore", ",", "None", ",", "resource", ")", "return", "(", "module", ",", "lineno", ")", "return", "(", "None", ",", "None", ")", "builtins", ".", "BuiltinName", "=", "PatchedBuiltinName", "# [4] Patching several PyDocExtractor methods:\r", "# 1. get_doc:\r", "# To force rope to return the docstring of any object which has one, even\r", "# if it's not an instance of AbstractFunction, AbstractClass, or\r", "# AbstractModule.\r", "# Also, to use utils.dochelpers.getdoc to get docs from forced builtins.\r", "#\r", "# 2. _get_class_docstring and _get_single_function_docstring:\r", "# To not let rope add a 2 spaces indentation to every docstring, which was\r", "# breaking our rich text mode. The only value that we are modifying is the\r", "# 'indents' keyword of those methods, from 2 to 0.\r", "#\r", "# 3. get_calltip\r", "# To easily get calltips of forced builtins\r", "from", "rope", ".", "contrib", "import", "codeassist", "from", "spyder_kernels", ".", "utils", ".", "dochelpers", "import", "getdoc", "from", "rope", ".", "base", "import", "exceptions", "class", "PatchedPyDocExtractor", "(", "codeassist", ".", "PyDocExtractor", ")", ":", "def", "get_builtin_doc", "(", "self", ",", "pyobject", ")", ":", "buitin", "=", "pyobject", ".", "builtin", "return", "getdoc", "(", "buitin", ")", "def", "get_doc", "(", "self", ",", "pyobject", ")", ":", "if", "hasattr", "(", "pyobject", ",", "'builtin'", ")", ":", "doc", "=", "self", ".", "get_builtin_doc", "(", "pyobject", ")", "return", "doc", "elif", "isinstance", "(", "pyobject", ",", "builtins", ".", "BuiltinModule", ")", ":", "docstring", "=", "pyobject", ".", "get_doc", "(", ")", "if", "docstring", "is", "not", "None", ":", "docstring", "=", "self", ".", "_trim_docstring", "(", "docstring", ")", "else", ":", "docstring", "=", "''", "# TODO: Add a module_name key, so that the name could appear\r", "# on the OI text filed but not be used by sphinx to render\r", "# the page\r", "doc", "=", "{", "'name'", ":", "''", ",", "'argspec'", ":", "''", ",", "'note'", ":", "''", ",", "'docstring'", ":", "docstring", "}", "return", "doc", "elif", "isinstance", "(", "pyobject", ",", "pyobjects", ".", "AbstractFunction", ")", ":", "return", "self", ".", "_get_function_docstring", "(", "pyobject", ")", "elif", "isinstance", "(", "pyobject", ",", "pyobjects", ".", "AbstractClass", ")", ":", "return", "self", ".", "_get_class_docstring", "(", "pyobject", ")", "elif", "isinstance", "(", "pyobject", ",", "pyobjects", ".", "AbstractModule", ")", ":", "return", "self", ".", "_trim_docstring", "(", "pyobject", ".", "get_doc", "(", ")", ")", "elif", "pyobject", ".", "get_doc", "(", ")", "is", "not", "None", ":", "# Spyder patch\r", "return", "self", ".", "_trim_docstring", "(", "pyobject", ".", "get_doc", "(", ")", ")", "return", "None", "def", "get_calltip", "(", "self", ",", "pyobject", ",", "ignore_unknown", "=", "False", ",", "remove_self", "=", "False", ")", ":", "if", "hasattr", "(", "pyobject", ",", "'builtin'", ")", ":", "doc", "=", "self", ".", "get_builtin_doc", "(", "pyobject", ")", "return", "doc", "[", "'name'", "]", "+", "doc", "[", "'argspec'", "]", "try", ":", "if", "isinstance", "(", "pyobject", ",", "pyobjects", ".", "AbstractClass", ")", ":", "pyobject", "=", "pyobject", "[", "'__init__'", "]", ".", "get_object", "(", ")", "if", "not", "isinstance", "(", "pyobject", ",", "pyobjects", ".", "AbstractFunction", ")", ":", "pyobject", "=", "pyobject", "[", "'__call__'", "]", ".", "get_object", "(", ")", "except", "exceptions", ".", "AttributeNotFoundError", ":", "return", "None", "if", "ignore_unknown", "and", "not", "isinstance", "(", "pyobject", ",", "pyobjects", ".", "PyFunction", ")", ":", "return", "if", "isinstance", "(", "pyobject", ",", "pyobjects", ".", "AbstractFunction", ")", ":", "result", "=", "self", ".", "_get_function_signature", "(", "pyobject", ",", "add_module", "=", "True", ")", "if", "remove_self", "and", "self", ".", "_is_method", "(", "pyobject", ")", ":", "return", "result", ".", "replace", "(", "'(self)'", ",", "'()'", ")", ".", "replace", "(", "'(self, '", ",", "'('", ")", "return", "result", "def", "_get_class_docstring", "(", "self", ",", "pyclass", ")", ":", "contents", "=", "self", ".", "_trim_docstring", "(", "pyclass", ".", "get_doc", "(", ")", ",", "indents", "=", "0", ")", "supers", "=", "[", "super", ".", "get_name", "(", ")", "for", "super", "in", "pyclass", ".", "get_superclasses", "(", ")", "]", "doc", "=", "'class %s(%s):\\n\\n'", "%", "(", "pyclass", ".", "get_name", "(", ")", ",", "', '", ".", "join", "(", "supers", ")", ")", "+", "contents", "if", "'__init__'", "in", "pyclass", ":", "init", "=", "pyclass", "[", "'__init__'", "]", ".", "get_object", "(", ")", "if", "isinstance", "(", "init", ",", "pyobjects", ".", "AbstractFunction", ")", ":", "doc", "+=", "'\\n\\n'", "+", "self", ".", "_get_single_function_docstring", "(", "init", ")", "return", "doc", "def", "_get_single_function_docstring", "(", "self", ",", "pyfunction", ")", ":", "docs", "=", "pyfunction", ".", "get_doc", "(", ")", "docs", "=", "self", ".", "_trim_docstring", "(", "docs", ",", "indents", "=", "0", ")", "return", "docs", "codeassist", ".", "PyDocExtractor", "=", "PatchedPyDocExtractor", "# [5] Get the right matplotlib docstrings for Help\r", "try", ":", "import", "matplotlib", "as", "mpl", "mpl", ".", "rcParams", "[", "'docstring.hardcopy'", "]", "=", "True", "except", ":", "pass" ]
46.214286
19.021978
def create_box_comments(self, box_key, message, **kwargs): '''Creates a comments in a box with the provided attributes. Args: box_key key for box message message string kwargs {} see StreakComment object for more information return (status code, comment dict) ''' uri = '/'.join([ self.api_uri, self.boxes_suffix, box_key, self.comments_suffix ]) if not (box_key and message): return requests.codes.bad_request, None kwargs.update({'message':message}) new_cmt = StreakComment(**kwargs) #print(new_pl.attributes) #print(new_pl.to_dict()) #raw_input() code, r_data = self._req('put', uri, new_cmt.to_dict()) return code, r_data
[ "def", "create_box_comments", "(", "self", ",", "box_key", ",", "message", ",", "*", "*", "kwargs", ")", ":", "uri", "=", "'/'", ".", "join", "(", "[", "self", ".", "api_uri", ",", "self", ".", "boxes_suffix", ",", "box_key", ",", "self", ".", "comments_suffix", "]", ")", "if", "not", "(", "box_key", "and", "message", ")", ":", "return", "requests", ".", "codes", ".", "bad_request", ",", "None", "kwargs", ".", "update", "(", "{", "'message'", ":", "message", "}", ")", "new_cmt", "=", "StreakComment", "(", "*", "*", "kwargs", ")", "#print(new_pl.attributes)", "#print(new_pl.to_dict())", "#raw_input()", "code", ",", "r_data", "=", "self", ".", "_req", "(", "'put'", ",", "uri", ",", "new_cmt", ".", "to_dict", "(", ")", ")", "return", "code", ",", "r_data" ]
25.259259
20.592593
def to(self, new_unit): """ Conversion to a new_unit. Right now, only supports 1 to 1 mapping of units of each type. Args: new_unit: New unit type. Returns: A FloatWithUnit object in the new units. Example usage: >>> e = Energy(1.1, "eV") >>> e = Energy(1.1, "Ha") >>> e.to("eV") 29.932522246 eV """ return FloatWithUnit( self * self.unit.get_conversion_factor(new_unit), unit_type=self._unit_type, unit=new_unit)
[ "def", "to", "(", "self", ",", "new_unit", ")", ":", "return", "FloatWithUnit", "(", "self", "*", "self", ".", "unit", ".", "get_conversion_factor", "(", "new_unit", ")", ",", "unit_type", "=", "self", ".", "_unit_type", ",", "unit", "=", "new_unit", ")" ]
26.285714
17.52381
def _warn_silly_options(cls, args): '''Print warnings about any options that may be silly.''' if 'page-requisites' in args.span_hosts_allow \ and not args.page_requisites: _logger.warning( _('Spanning hosts is allowed for page requisites, ' 'but the page requisites option is not on.') ) if 'linked-pages' in args.span_hosts_allow \ and not args.recursive: _logger.warning( _('Spanning hosts is allowed for linked pages, ' 'but the recursive option is not on.') ) if args.warc_file and \ (args.http_proxy or args.https_proxy): _logger.warning(_('WARC specifications do not handle proxies.')) if (args.password or args.ftp_password or args.http_password or args.proxy_password) and \ args.warc_file: _logger.warning( _('Your password is recorded in the WARC file.'))
[ "def", "_warn_silly_options", "(", "cls", ",", "args", ")", ":", "if", "'page-requisites'", "in", "args", ".", "span_hosts_allow", "and", "not", "args", ".", "page_requisites", ":", "_logger", ".", "warning", "(", "_", "(", "'Spanning hosts is allowed for page requisites, '", "'but the page requisites option is not on.'", ")", ")", "if", "'linked-pages'", "in", "args", ".", "span_hosts_allow", "and", "not", "args", ".", "recursive", ":", "_logger", ".", "warning", "(", "_", "(", "'Spanning hosts is allowed for linked pages, '", "'but the recursive option is not on.'", ")", ")", "if", "args", ".", "warc_file", "and", "(", "args", ".", "http_proxy", "or", "args", ".", "https_proxy", ")", ":", "_logger", ".", "warning", "(", "_", "(", "'WARC specifications do not handle proxies.'", ")", ")", "if", "(", "args", ".", "password", "or", "args", ".", "ftp_password", "or", "args", ".", "http_password", "or", "args", ".", "proxy_password", ")", "and", "args", ".", "warc_file", ":", "_logger", ".", "warning", "(", "_", "(", "'Your password is recorded in the WARC file.'", ")", ")" ]
40.8
19.52
def validate_grant_type(self, client_id, grant_type, client, request, *args, **kwargs): """Ensure the client is authorized to use the grant type requested. It will allow any of the four grant types (`authorization_code`, `password`, `client_credentials`, `refresh_token`) by default. Implemented `allowed_grant_types` for client object to authorize the request. It is suggested that `allowed_grant_types` should contain at least `authorization_code` and `refresh_token`. """ if self._usergetter is None and grant_type == 'password': log.debug('Password credential authorization is disabled.') return False default_grant_types = ( 'authorization_code', 'password', 'client_credentials', 'refresh_token', ) # Grant type is allowed if it is part of the 'allowed_grant_types' # of the selected client or if it is one of the default grant types if hasattr(client, 'allowed_grant_types'): if grant_type not in client.allowed_grant_types: return False else: if grant_type not in default_grant_types: return False if grant_type == 'client_credentials': if not hasattr(client, 'user'): log.debug('Client should have a user property') return False request.user = client.user return True
[ "def", "validate_grant_type", "(", "self", ",", "client_id", ",", "grant_type", ",", "client", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "_usergetter", "is", "None", "and", "grant_type", "==", "'password'", ":", "log", ".", "debug", "(", "'Password credential authorization is disabled.'", ")", "return", "False", "default_grant_types", "=", "(", "'authorization_code'", ",", "'password'", ",", "'client_credentials'", ",", "'refresh_token'", ",", ")", "# Grant type is allowed if it is part of the 'allowed_grant_types'", "# of the selected client or if it is one of the default grant types", "if", "hasattr", "(", "client", ",", "'allowed_grant_types'", ")", ":", "if", "grant_type", "not", "in", "client", ".", "allowed_grant_types", ":", "return", "False", "else", ":", "if", "grant_type", "not", "in", "default_grant_types", ":", "return", "False", "if", "grant_type", "==", "'client_credentials'", ":", "if", "not", "hasattr", "(", "client", ",", "'user'", ")", ":", "log", ".", "debug", "(", "'Client should have a user property'", ")", "return", "False", "request", ".", "user", "=", "client", ".", "user", "return", "True" ]
39.72973
21.297297
def add_files(self, repo, files): """ Add files to the repo """ rootdir = repo.rootdir for f in files: relativepath = f['relativepath'] sourcepath = f['localfullpath'] if sourcepath is None: # This can happen if the relative path is a URL continue # # Prepare the target path targetpath = os.path.join(rootdir, relativepath) try: os.makedirs(os.path.dirname(targetpath)) except: pass # print(sourcepath," => ", targetpath) print("Updating: {}".format(relativepath)) shutil.copyfile(sourcepath, targetpath) with cd(repo.rootdir): self._run(['add', relativepath])
[ "def", "add_files", "(", "self", ",", "repo", ",", "files", ")", ":", "rootdir", "=", "repo", ".", "rootdir", "for", "f", "in", "files", ":", "relativepath", "=", "f", "[", "'relativepath'", "]", "sourcepath", "=", "f", "[", "'localfullpath'", "]", "if", "sourcepath", "is", "None", ":", "# This can happen if the relative path is a URL", "continue", "#", "# Prepare the target path", "targetpath", "=", "os", ".", "path", ".", "join", "(", "rootdir", ",", "relativepath", ")", "try", ":", "os", ".", "makedirs", "(", "os", ".", "path", ".", "dirname", "(", "targetpath", ")", ")", "except", ":", "pass", "# print(sourcepath,\" => \", targetpath)", "print", "(", "\"Updating: {}\"", ".", "format", "(", "relativepath", ")", ")", "shutil", ".", "copyfile", "(", "sourcepath", ",", "targetpath", ")", "with", "cd", "(", "repo", ".", "rootdir", ")", ":", "self", ".", "_run", "(", "[", "'add'", ",", "relativepath", "]", ")" ]
36
11.272727
def tai(self, year=None, month=1, day=1, hour=0, minute=0, second=0.0, jd=None): """Build a `Time` from a TAI calendar date. Supply the International Atomic Time (TAI) as a proleptic Gregorian calendar date: >>> t = ts.tai(2014, 1, 18, 1, 35, 37.5) >>> t.tai 2456675.56640625 >>> t.tai_calendar() (2014, 1, 18, 1, 35, 37.5) """ if jd is not None: tai = jd else: tai = julian_date( _to_array(year), _to_array(month), _to_array(day), _to_array(hour), _to_array(minute), _to_array(second), ) return self.tai_jd(tai)
[ "def", "tai", "(", "self", ",", "year", "=", "None", ",", "month", "=", "1", ",", "day", "=", "1", ",", "hour", "=", "0", ",", "minute", "=", "0", ",", "second", "=", "0.0", ",", "jd", "=", "None", ")", ":", "if", "jd", "is", "not", "None", ":", "tai", "=", "jd", "else", ":", "tai", "=", "julian_date", "(", "_to_array", "(", "year", ")", ",", "_to_array", "(", "month", ")", ",", "_to_array", "(", "day", ")", ",", "_to_array", "(", "hour", ")", ",", "_to_array", "(", "minute", ")", ",", "_to_array", "(", "second", ")", ",", ")", "return", "self", ".", "tai_jd", "(", "tai", ")" ]
30.454545
19.545455
def median_interval(data, alpha=_alpha): """ Median including bayesian credible interval. """ q = [100*alpha/2., 50, 100*(1-alpha/2.)] lo,med,hi = np.percentile(data,q) return interval(med,lo,hi)
[ "def", "median_interval", "(", "data", ",", "alpha", "=", "_alpha", ")", ":", "q", "=", "[", "100", "*", "alpha", "/", "2.", ",", "50", ",", "100", "*", "(", "1", "-", "alpha", "/", "2.", ")", "]", "lo", ",", "med", ",", "hi", "=", "np", ".", "percentile", "(", "data", ",", "q", ")", "return", "interval", "(", "med", ",", "lo", ",", "hi", ")" ]
30.428571
3.571429
def _compute(self, left_on, right_on): """Compare the data on the left and right. :meth:`BaseCompareFeature._compute` and :meth:`BaseCompareFeature.compute` differ on the accepted arguments. `_compute` accepts indexed data while `compute` accepts the record pairs and the DataFrame's. Parameters ---------- left_on : (tuple of) pandas.Series Data to compare with `right_on` right_on : (tuple of) pandas.Series Data to compare with `left_on` Returns ------- pandas.Series, pandas.DataFrame, numpy.ndarray The result of comparing record pairs (the features). Can be a tuple with multiple pandas.Series, pandas.DataFrame, numpy.ndarray objects. """ result = self._compute_vectorized(*tuple(left_on + right_on)) return result
[ "def", "_compute", "(", "self", ",", "left_on", ",", "right_on", ")", ":", "result", "=", "self", ".", "_compute_vectorized", "(", "*", "tuple", "(", "left_on", "+", "right_on", ")", ")", "return", "result" ]
35.24
18.48
def _updateTargetFromNode(self): """ Applies the configuration to its target axis """ if self.axisNumber == X_AXIS: xMode, yMode = self.configValue, None else: xMode, yMode = None, self.configValue self.plotItem.setLogMode(x=xMode, y=yMode)
[ "def", "_updateTargetFromNode", "(", "self", ")", ":", "if", "self", ".", "axisNumber", "==", "X_AXIS", ":", "xMode", ",", "yMode", "=", "self", ".", "configValue", ",", "None", "else", ":", "xMode", ",", "yMode", "=", "None", ",", "self", ".", "configValue", "self", ".", "plotItem", ".", "setLogMode", "(", "x", "=", "xMode", ",", "y", "=", "yMode", ")" ]
33
11.777778
def draw(self, X, y, **kwargs): """Called from the fit method, this method creates a scatter plot that draws each instance as a class or target colored point, whose location is determined by the feature data set. If y is not None, then it draws a scatter plot where each class is in a different color. """ nan_locs = self.get_nan_locs() if y is None: x_, y_ = list(zip(*nan_locs)) self.ax.scatter(x_, y_, alpha=self.alpha, marker=self.marker, label=None) else: self.draw_multi_dispersion_chart(nan_locs)
[ "def", "draw", "(", "self", ",", "X", ",", "y", ",", "*", "*", "kwargs", ")", ":", "nan_locs", "=", "self", ".", "get_nan_locs", "(", ")", "if", "y", "is", "None", ":", "x_", ",", "y_", "=", "list", "(", "zip", "(", "*", "nan_locs", ")", ")", "self", ".", "ax", ".", "scatter", "(", "x_", ",", "y_", ",", "alpha", "=", "self", ".", "alpha", ",", "marker", "=", "self", ".", "marker", ",", "label", "=", "None", ")", "else", ":", "self", ".", "draw_multi_dispersion_chart", "(", "nan_locs", ")" ]
42.785714
18.285714
def get_state_map(meta_graph, state_ops, unsupported_state_ops, get_tensor_by_name): """Returns a map from tensor names to tensors that hold the state.""" state_map = {} for node in meta_graph.graph_def.node: if node.op in state_ops: tensor_name = node.name + ":0" tensor = get_tensor_by_name(tensor_name) num_outputs = len(tensor.op.outputs) if num_outputs != 1: raise ValueError("Stateful op %s has %d outputs, expected 1" % (node.op, num_outputs)) state_map[tensor_name] = tensor if node.op in unsupported_state_ops: raise ValueError("Unsupported stateful op: %s" % node.op) return state_map
[ "def", "get_state_map", "(", "meta_graph", ",", "state_ops", ",", "unsupported_state_ops", ",", "get_tensor_by_name", ")", ":", "state_map", "=", "{", "}", "for", "node", "in", "meta_graph", ".", "graph_def", ".", "node", ":", "if", "node", ".", "op", "in", "state_ops", ":", "tensor_name", "=", "node", ".", "name", "+", "\":0\"", "tensor", "=", "get_tensor_by_name", "(", "tensor_name", ")", "num_outputs", "=", "len", "(", "tensor", ".", "op", ".", "outputs", ")", "if", "num_outputs", "!=", "1", ":", "raise", "ValueError", "(", "\"Stateful op %s has %d outputs, expected 1\"", "%", "(", "node", ".", "op", ",", "num_outputs", ")", ")", "state_map", "[", "tensor_name", "]", "=", "tensor", "if", "node", ".", "op", "in", "unsupported_state_ops", ":", "raise", "ValueError", "(", "\"Unsupported stateful op: %s\"", "%", "node", ".", "op", ")", "return", "state_map" ]
42.625
10.8125
def getPrice(self): """The function obtains the analysis' price without VAT and without member discount :return: the price (without VAT or Member Discount) in decimal format """ analysis_request = self.aq_parent client = analysis_request.aq_parent if client.getBulkDiscount(): price = self.getBulkPrice() else: price = self.getField('Price').get(self) return price
[ "def", "getPrice", "(", "self", ")", ":", "analysis_request", "=", "self", ".", "aq_parent", "client", "=", "analysis_request", ".", "aq_parent", "if", "client", ".", "getBulkDiscount", "(", ")", ":", "price", "=", "self", ".", "getBulkPrice", "(", ")", "else", ":", "price", "=", "self", ".", "getField", "(", "'Price'", ")", ".", "get", "(", "self", ")", "return", "price" ]
37.416667
11.916667
def _long_from_raw(thehash): """Fold to a long, a digest supplied as a string.""" hashnum = 0 for h in thehash: hashnum <<= 8 hashnum |= ord(bytes([h])) return hashnum
[ "def", "_long_from_raw", "(", "thehash", ")", ":", "hashnum", "=", "0", "for", "h", "in", "thehash", ":", "hashnum", "<<=", "8", "hashnum", "|=", "ord", "(", "bytes", "(", "[", "h", "]", ")", ")", "return", "hashnum" ]
27.571429
14.714286
def sys_info(fname=None, overwrite=False): """Get relevant system and debugging information Parameters ---------- fname : str | None Filename to dump info to. Use None to simply print. overwrite : bool If True, overwrite file (if it exists). Returns ------- out : str The system information as a string. """ if fname is not None and op.isfile(fname) and not overwrite: raise IOError('file exists, use overwrite=True to overwrite') out = '' try: # Nest all imports here to avoid any circular imports from ..app import use_app, Canvas from ..app.backends import BACKEND_NAMES from ..gloo import gl from ..testing import has_backend # get default app with use_log_level('warning'): app = use_app(call_reuse=False) # suppress messages out += 'Platform: %s\n' % platform.platform() out += 'Python: %s\n' % str(sys.version).replace('\n', ' ') out += 'Backend: %s\n' % app.backend_name for backend in BACKEND_NAMES: if backend.startswith('ipynb_'): continue with use_log_level('warning', print_msg=False): which = has_backend(backend, out=['which'])[1] out += '{0:<9} {1}\n'.format(backend + ':', which) out += '\n' # We need an OpenGL context to get GL info canvas = Canvas('Test', (10, 10), show=False, app=app) canvas._backend._vispy_set_current() out += 'GL version: %r\n' % (gl.glGetParameter(gl.GL_VERSION),) x_ = gl.GL_MAX_TEXTURE_SIZE out += 'MAX_TEXTURE_SIZE: %r\n' % (gl.glGetParameter(x_),) out += 'Extensions: %r\n' % (gl.glGetParameter(gl.GL_EXTENSIONS),) canvas.close() except Exception: # don't stop printing info out += '\nInfo-gathering error:\n%s' % traceback.format_exc() pass if fname is not None: with open(fname, 'w') as fid: fid.write(out) return out
[ "def", "sys_info", "(", "fname", "=", "None", ",", "overwrite", "=", "False", ")", ":", "if", "fname", "is", "not", "None", "and", "op", ".", "isfile", "(", "fname", ")", "and", "not", "overwrite", ":", "raise", "IOError", "(", "'file exists, use overwrite=True to overwrite'", ")", "out", "=", "''", "try", ":", "# Nest all imports here to avoid any circular imports", "from", ".", ".", "app", "import", "use_app", ",", "Canvas", "from", ".", ".", "app", ".", "backends", "import", "BACKEND_NAMES", "from", ".", ".", "gloo", "import", "gl", "from", ".", ".", "testing", "import", "has_backend", "# get default app", "with", "use_log_level", "(", "'warning'", ")", ":", "app", "=", "use_app", "(", "call_reuse", "=", "False", ")", "# suppress messages", "out", "+=", "'Platform: %s\\n'", "%", "platform", ".", "platform", "(", ")", "out", "+=", "'Python: %s\\n'", "%", "str", "(", "sys", ".", "version", ")", ".", "replace", "(", "'\\n'", ",", "' '", ")", "out", "+=", "'Backend: %s\\n'", "%", "app", ".", "backend_name", "for", "backend", "in", "BACKEND_NAMES", ":", "if", "backend", ".", "startswith", "(", "'ipynb_'", ")", ":", "continue", "with", "use_log_level", "(", "'warning'", ",", "print_msg", "=", "False", ")", ":", "which", "=", "has_backend", "(", "backend", ",", "out", "=", "[", "'which'", "]", ")", "[", "1", "]", "out", "+=", "'{0:<9} {1}\\n'", ".", "format", "(", "backend", "+", "':'", ",", "which", ")", "out", "+=", "'\\n'", "# We need an OpenGL context to get GL info", "canvas", "=", "Canvas", "(", "'Test'", ",", "(", "10", ",", "10", ")", ",", "show", "=", "False", ",", "app", "=", "app", ")", "canvas", ".", "_backend", ".", "_vispy_set_current", "(", ")", "out", "+=", "'GL version: %r\\n'", "%", "(", "gl", ".", "glGetParameter", "(", "gl", ".", "GL_VERSION", ")", ",", ")", "x_", "=", "gl", ".", "GL_MAX_TEXTURE_SIZE", "out", "+=", "'MAX_TEXTURE_SIZE: %r\\n'", "%", "(", "gl", ".", "glGetParameter", "(", "x_", ")", ",", ")", "out", "+=", "'Extensions: %r\\n'", "%", "(", "gl", ".", "glGetParameter", "(", "gl", ".", "GL_EXTENSIONS", ")", ",", ")", "canvas", ".", "close", "(", ")", "except", "Exception", ":", "# don't stop printing info", "out", "+=", "'\\nInfo-gathering error:\\n%s'", "%", "traceback", ".", "format_exc", "(", ")", "pass", "if", "fname", "is", "not", "None", ":", "with", "open", "(", "fname", ",", "'w'", ")", "as", "fid", ":", "fid", ".", "write", "(", "out", ")", "return", "out" ]
37.566038
18.037736
def BBI(Series, N1, N2, N3, N4): '多空指标' bbi = (MA(Series, N1) + MA(Series, N2) + MA(Series, N3) + MA(Series, N4)) / 4 DICT = {'BBI': bbi} VAR = pd.DataFrame(DICT) return VAR
[ "def", "BBI", "(", "Series", ",", "N1", ",", "N2", ",", "N3", ",", "N4", ")", ":", "bbi", "=", "(", "MA", "(", "Series", ",", "N1", ")", "+", "MA", "(", "Series", ",", "N2", ")", "+", "MA", "(", "Series", ",", "N3", ")", "+", "MA", "(", "Series", ",", "N4", ")", ")", "/", "4", "DICT", "=", "{", "'BBI'", ":", "bbi", "}", "VAR", "=", "pd", ".", "DataFrame", "(", "DICT", ")", "return", "VAR" ]
24.75
18
def update_media_assetfile(access_token, parent_asset_id, asset_id, content_length, name): '''Update Media Service Asset File. Args: access_token (str): A valid Azure authentication token. parent_asset_id (str): A Media Service Asset Parent Asset ID. asset_id (str): A Media Service Asset Asset ID. content_length (str): A Media Service Asset Content Length. name (str): A Media Service Asset name. Returns: HTTP response. JSON body. ''' path = '/Files' full_path = ''.join([path, "('", asset_id, "')"]) full_path_encoded = urllib.parse.quote(full_path, safe='') endpoint = ''.join([ams_rest_endpoint, full_path_encoded]) body = '{ \ "ContentFileSize": "' + str(content_length) + '", \ "Id": "' + asset_id + '", \ "MimeType": "video/mp4", \ "Name": "' + name + '", \ "ParentAssetId": "' + parent_asset_id + '" \ }' return do_ams_patch(endpoint, full_path_encoded, body, access_token)
[ "def", "update_media_assetfile", "(", "access_token", ",", "parent_asset_id", ",", "asset_id", ",", "content_length", ",", "name", ")", ":", "path", "=", "'/Files'", "full_path", "=", "''", ".", "join", "(", "[", "path", ",", "\"('\"", ",", "asset_id", ",", "\"')\"", "]", ")", "full_path_encoded", "=", "urllib", ".", "parse", ".", "quote", "(", "full_path", ",", "safe", "=", "''", ")", "endpoint", "=", "''", ".", "join", "(", "[", "ams_rest_endpoint", ",", "full_path_encoded", "]", ")", "body", "=", "'{ \\\n\t\t\"ContentFileSize\": \"'", "+", "str", "(", "content_length", ")", "+", "'\", \\\n\t\t\"Id\": \"'", "+", "asset_id", "+", "'\", \\\n\t\t\"MimeType\": \"video/mp4\", \\\n\t\t\"Name\": \"'", "+", "name", "+", "'\", \\\n\t\t\"ParentAssetId\": \"'", "+", "parent_asset_id", "+", "'\" \\\n\t}'", "return", "do_ams_patch", "(", "endpoint", ",", "full_path_encoded", ",", "body", ",", "access_token", ")" ]
38.4
22.32
def _str2datetime(self, datetime_str): """ Parse datetime from string. If there's no template matches your string, Please go https://github.com/MacHu-GWU/rolex-project/issues submit your datetime string. I 'll update templates ASAP. This method is faster than :meth:`dateutil.parser.parse`. :param datetime_str: a string represent a datetime :type datetime_str: str :return: a datetime object **中文文档** 从string解析datetime。首先尝试默认模板, 如果失败了, 则尝试所有的模板。 一旦尝试成功, 就将当前成功的模板保存为默认模板。这样做在当你待解析的 字符串非常多, 且模式单一时, 只有第一次尝试耗时较多, 之后就非常快了。 该方法要快过 :meth:`dateutil.parser.parse` 方法。 为了防止模板库失败的情况, 程序设定在失败后自动一直启用 :meth:`dateutil.parser.parse` 进行解析。你可以调用 :meth:`Parser.reset()` 方法恢复默认设定。 """ # try default datetime template try: a_datetime = datetime.strptime( datetime_str, self._default_datetime_template) return a_datetime except: pass # try every datetime templates for template in datetime_template_list: try: a_datetime = datetime.strptime(datetime_str, template) self._default_datetime_template = template return a_datetime except: pass # raise error a_datetime = parse(datetime_str) self.str2datetime = parse return a_datetime
[ "def", "_str2datetime", "(", "self", ",", "datetime_str", ")", ":", "# try default datetime template", "try", ":", "a_datetime", "=", "datetime", ".", "strptime", "(", "datetime_str", ",", "self", ".", "_default_datetime_template", ")", "return", "a_datetime", "except", ":", "pass", "# try every datetime templates", "for", "template", "in", "datetime_template_list", ":", "try", ":", "a_datetime", "=", "datetime", ".", "strptime", "(", "datetime_str", ",", "template", ")", "self", ".", "_default_datetime_template", "=", "template", "return", "a_datetime", "except", ":", "pass", "# raise error", "a_datetime", "=", "parse", "(", "datetime_str", ")", "self", ".", "str2datetime", "=", "parse", "return", "a_datetime" ]
30.425532
18.765957
def getBaseUrl(self): '''Return a file: URL that probably points to the basedir. This is used as a halfway sane default when the base URL is not provided; not perfect, but should work in most cases.''' components = util.splitpath(os.path.abspath(self.basepath)) url = '/'.join([url_quote(component, '') for component in components]) return 'file:///' + url + '/'
[ "def", "getBaseUrl", "(", "self", ")", ":", "components", "=", "util", ".", "splitpath", "(", "os", ".", "path", ".", "abspath", "(", "self", ".", "basepath", ")", ")", "url", "=", "'/'", ".", "join", "(", "[", "url_quote", "(", "component", ",", "''", ")", "for", "component", "in", "components", "]", ")", "return", "'file:///'", "+", "url", "+", "'/'" ]
50.5
26
def get_alert(self, alert): """ Recieves a day as an argument and returns the prediction for that alert if is available. If not, function will return None. """ if alert > self.alerts_count() or self.alerts_count() is None: return None else: return self.get()[alert-1]
[ "def", "get_alert", "(", "self", ",", "alert", ")", ":", "if", "alert", ">", "self", ".", "alerts_count", "(", ")", "or", "self", ".", "alerts_count", "(", ")", "is", "None", ":", "return", "None", "else", ":", "return", "self", ".", "get", "(", ")", "[", "alert", "-", "1", "]" ]
36.777778
16.333333
def constant_compare(a, b): """ Compares two byte strings in constant time to see if they are equal :param a: The first byte string :param b: The second byte string :return: A boolean if the two byte strings are equal """ if not isinstance(a, byte_cls): raise TypeError(pretty_message( ''' a must be a byte string, not %s ''', type_name(a) )) if not isinstance(b, byte_cls): raise TypeError(pretty_message( ''' b must be a byte string, not %s ''', type_name(b) )) if len(a) != len(b): return False if sys.version_info < (3,): a = [ord(char) for char in a] b = [ord(char) for char in b] result = 0 for x, y in zip(a, b): result |= x ^ y return result == 0
[ "def", "constant_compare", "(", "a", ",", "b", ")", ":", "if", "not", "isinstance", "(", "a", ",", "byte_cls", ")", ":", "raise", "TypeError", "(", "pretty_message", "(", "'''\n a must be a byte string, not %s\n '''", ",", "type_name", "(", "a", ")", ")", ")", "if", "not", "isinstance", "(", "b", ",", "byte_cls", ")", ":", "raise", "TypeError", "(", "pretty_message", "(", "'''\n b must be a byte string, not %s\n '''", ",", "type_name", "(", "b", ")", ")", ")", "if", "len", "(", "a", ")", "!=", "len", "(", "b", ")", ":", "return", "False", "if", "sys", ".", "version_info", "<", "(", "3", ",", ")", ":", "a", "=", "[", "ord", "(", "char", ")", "for", "char", "in", "a", "]", "b", "=", "[", "ord", "(", "char", ")", "for", "char", "in", "b", "]", "result", "=", "0", "for", "x", ",", "y", "in", "zip", "(", "a", ",", "b", ")", ":", "result", "|=", "x", "^", "y", "return", "result", "==", "0" ]
20.926829
19.804878
def update(self): """ Update the content of the `iana-domains-db` file. """ if not PyFunceble.CONFIGURATION["quiet"]: # * The quiet mode is not activated. # We print on screen what we are doing. print("Update of iana-domains-db", end=" ") # We loop through the line of the iana website. for extension, referer in self._extensions(): if extension not in self.iana_db or self.iana_db[extension] != referer: # We add the extension to the databae. self.iana_db[extension] = referer # We save the content of the constructed database. Dict(self.iana_db).to_json(self.destination) if not PyFunceble.CONFIGURATION["quiet"]: # The quiet mode is not activated. # We indicate that the work is done without any issue. print(PyFunceble.INTERN["done"])
[ "def", "update", "(", "self", ")", ":", "if", "not", "PyFunceble", ".", "CONFIGURATION", "[", "\"quiet\"", "]", ":", "# * The quiet mode is not activated.", "# We print on screen what we are doing.", "print", "(", "\"Update of iana-domains-db\"", ",", "end", "=", "\" \"", ")", "# We loop through the line of the iana website.", "for", "extension", ",", "referer", "in", "self", ".", "_extensions", "(", ")", ":", "if", "extension", "not", "in", "self", ".", "iana_db", "or", "self", ".", "iana_db", "[", "extension", "]", "!=", "referer", ":", "# We add the extension to the databae.", "self", ".", "iana_db", "[", "extension", "]", "=", "referer", "# We save the content of the constructed database.", "Dict", "(", "self", ".", "iana_db", ")", ".", "to_json", "(", "self", ".", "destination", ")", "if", "not", "PyFunceble", ".", "CONFIGURATION", "[", "\"quiet\"", "]", ":", "# The quiet mode is not activated.", "# We indicate that the work is done without any issue.", "print", "(", "PyFunceble", ".", "INTERN", "[", "\"done\"", "]", ")" ]
35.538462
21.076923
def remote_image_request(self, image_url, params=None): """ Send an image for classification. The imagewill be retrieved from the URL specified. The params parameter is optional. On success this method will immediately return a job information. Its status will initially be :py:data:`cloudsight.STATUS_NOT_COMPLETED` as it usually takes 6-12 seconds for the server to process an image. In order to retrieve the annotation data, you need to keep updating the job status using the :py:meth:`cloudsight.API.image_response` method until the status changes. You may also use the :py:meth:`cloudsight.API.wait` method which does this automatically. :param image_url: Image URL. :param params: Additional parameters for CloudSight API. """ data = self._init_data(params) data['image_request[remote_image_url]'] = image_url response = requests.post(REQUESTS_URL, headers={ 'Authorization': self.auth.authorize('POST', REQUESTS_URL, data), 'User-Agent': USER_AGENT, }, data=data) return self._unwrap_error(response)
[ "def", "remote_image_request", "(", "self", ",", "image_url", ",", "params", "=", "None", ")", ":", "data", "=", "self", ".", "_init_data", "(", "params", ")", "data", "[", "'image_request[remote_image_url]'", "]", "=", "image_url", "response", "=", "requests", ".", "post", "(", "REQUESTS_URL", ",", "headers", "=", "{", "'Authorization'", ":", "self", ".", "auth", ".", "authorize", "(", "'POST'", ",", "REQUESTS_URL", ",", "data", ")", ",", "'User-Agent'", ":", "USER_AGENT", ",", "}", ",", "data", "=", "data", ")", "return", "self", ".", "_unwrap_error", "(", "response", ")" ]
50.521739
21.73913
def prepare_working_directory(job, submission_path, validator_path): ''' Based on two downloaded files in the working directory, the student submission and the validation package, the working directory is prepared. We unpack student submission first, so that teacher files overwrite them in case. When the student submission is a single directory, we change the working directory and go directly into it, before dealing with the validator stuff. If unrecoverable errors happen, such as an empty student archive, a JobException is raised. ''' # Safeguard for fail-fast in disk full scenarios on the executor dusage = shutil.disk_usage(job.working_dir) if dusage.free < 1024 * 1024 * 50: # 50 MB info_student = "Internal error with the validator. Please contact your course responsible." info_tutor = "Error: Execution cancelled, less then 50MB of disk space free on the executor." logger.error(info_tutor) raise JobException(info_student=info_student, info_tutor=info_tutor) submission_fname = os.path.basename(submission_path) validator_fname = os.path.basename(validator_path) # Un-archive student submission single_dir, did_unpack = unpack_if_needed(job.working_dir, submission_path) job.student_files = os.listdir(job.working_dir) if did_unpack: job.student_files.remove(submission_fname) # Fail automatically on empty student submissions if len(job.student_files) is 0: info_student = "Your compressed upload is empty - no files in there." info_tutor = "Submission archive file has no content." logger.error(info_tutor) raise JobException(info_student=info_student, info_tutor=info_tutor) # Handle student archives containing a single directory with all data if single_dir: logger.warning( "The submission archive contains only one directory. Changing working directory.") # Set new working directory job.working_dir = job.working_dir + single_dir + os.sep # Move validator package there shutil.move(validator_path, job.working_dir) validator_path = job.working_dir + validator_fname # Re-scan for list of student files job.student_files = os.listdir(job.working_dir) # The working directory now only contains the student data and the downloaded # validator package. # Update the file list accordingly. job.student_files.remove(validator_fname) logger.debug("Student files: {0}".format(job.student_files)) # Unpack validator package single_dir, did_unpack = unpack_if_needed(job.working_dir, validator_path) if single_dir: info_student = "Internal error with the validator. Please contact your course responsible." info_tutor = "Error: Directories are not allowed in the validator archive." logger.error(info_tutor) raise JobException(info_student=info_student, info_tutor=info_tutor) if not os.path.exists(job.validator_script_name): if did_unpack: # The download was an archive, but the validator was not inside. # This is a failure of the tutor. info_student = "Internal error with the validator. Please contact your course responsible." info_tutor = "Error: Missing validator.py in the validator archive." logger.error(info_tutor) raise JobException(info_student=info_student, info_tutor=info_tutor) else: # The download is already the script, but has the wrong name logger.warning("Renaming {0} to {1}.".format( validator_path, job.validator_script_name)) shutil.move(validator_path, job.validator_script_name)
[ "def", "prepare_working_directory", "(", "job", ",", "submission_path", ",", "validator_path", ")", ":", "# Safeguard for fail-fast in disk full scenarios on the executor", "dusage", "=", "shutil", ".", "disk_usage", "(", "job", ".", "working_dir", ")", "if", "dusage", ".", "free", "<", "1024", "*", "1024", "*", "50", ":", "# 50 MB", "info_student", "=", "\"Internal error with the validator. Please contact your course responsible.\"", "info_tutor", "=", "\"Error: Execution cancelled, less then 50MB of disk space free on the executor.\"", "logger", ".", "error", "(", "info_tutor", ")", "raise", "JobException", "(", "info_student", "=", "info_student", ",", "info_tutor", "=", "info_tutor", ")", "submission_fname", "=", "os", ".", "path", ".", "basename", "(", "submission_path", ")", "validator_fname", "=", "os", ".", "path", ".", "basename", "(", "validator_path", ")", "# Un-archive student submission", "single_dir", ",", "did_unpack", "=", "unpack_if_needed", "(", "job", ".", "working_dir", ",", "submission_path", ")", "job", ".", "student_files", "=", "os", ".", "listdir", "(", "job", ".", "working_dir", ")", "if", "did_unpack", ":", "job", ".", "student_files", ".", "remove", "(", "submission_fname", ")", "# Fail automatically on empty student submissions", "if", "len", "(", "job", ".", "student_files", ")", "is", "0", ":", "info_student", "=", "\"Your compressed upload is empty - no files in there.\"", "info_tutor", "=", "\"Submission archive file has no content.\"", "logger", ".", "error", "(", "info_tutor", ")", "raise", "JobException", "(", "info_student", "=", "info_student", ",", "info_tutor", "=", "info_tutor", ")", "# Handle student archives containing a single directory with all data", "if", "single_dir", ":", "logger", ".", "warning", "(", "\"The submission archive contains only one directory. Changing working directory.\"", ")", "# Set new working directory", "job", ".", "working_dir", "=", "job", ".", "working_dir", "+", "single_dir", "+", "os", ".", "sep", "# Move validator package there", "shutil", ".", "move", "(", "validator_path", ",", "job", ".", "working_dir", ")", "validator_path", "=", "job", ".", "working_dir", "+", "validator_fname", "# Re-scan for list of student files", "job", ".", "student_files", "=", "os", ".", "listdir", "(", "job", ".", "working_dir", ")", "# The working directory now only contains the student data and the downloaded", "# validator package.", "# Update the file list accordingly.", "job", ".", "student_files", ".", "remove", "(", "validator_fname", ")", "logger", ".", "debug", "(", "\"Student files: {0}\"", ".", "format", "(", "job", ".", "student_files", ")", ")", "# Unpack validator package", "single_dir", ",", "did_unpack", "=", "unpack_if_needed", "(", "job", ".", "working_dir", ",", "validator_path", ")", "if", "single_dir", ":", "info_student", "=", "\"Internal error with the validator. Please contact your course responsible.\"", "info_tutor", "=", "\"Error: Directories are not allowed in the validator archive.\"", "logger", ".", "error", "(", "info_tutor", ")", "raise", "JobException", "(", "info_student", "=", "info_student", ",", "info_tutor", "=", "info_tutor", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "job", ".", "validator_script_name", ")", ":", "if", "did_unpack", ":", "# The download was an archive, but the validator was not inside.", "# This is a failure of the tutor.", "info_student", "=", "\"Internal error with the validator. Please contact your course responsible.\"", "info_tutor", "=", "\"Error: Missing validator.py in the validator archive.\"", "logger", ".", "error", "(", "info_tutor", ")", "raise", "JobException", "(", "info_student", "=", "info_student", ",", "info_tutor", "=", "info_tutor", ")", "else", ":", "# The download is already the script, but has the wrong name", "logger", ".", "warning", "(", "\"Renaming {0} to {1}.\"", ".", "format", "(", "validator_path", ",", "job", ".", "validator_script_name", ")", ")", "shutil", ".", "move", "(", "validator_path", ",", "job", ".", "validator_script_name", ")" ]
46.160494
24.975309
def get_repository_form_for_update(self, repository_id=None): """Gets the repository form for updating an existing repository. A new repository form should be requested for each update transaction. arg: repository_id (osid.id.Id): the ``Id`` of the ``Repository`` return: (osid.repository.RepositoryForm) - the repository form raise: NotFound - ``repository_id`` is not found raise: NullArgument - ``repository_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* """ # Implemented from awsosid template for - # osid.resource.BinAdminSession.get_bin_form_for_update_template if not self._can('update'): raise PermissionDenied() else: return self._provider_session.get_repository_form_for_update(repository_id)
[ "def", "get_repository_form_for_update", "(", "self", ",", "repository_id", "=", "None", ")", ":", "# Implemented from awsosid template for -", "# osid.resource.BinAdminSession.get_bin_form_for_update_template", "if", "not", "self", ".", "_can", "(", "'update'", ")", ":", "raise", "PermissionDenied", "(", ")", "else", ":", "return", "self", ".", "_provider_session", ".", "get_repository_form_for_update", "(", "repository_id", ")" ]
44.636364
21.409091
def _getSyntaxByFirstLine(self, firstLine): """Get syntax by first line of the file """ for pattern, xmlFileName in self._firstLineToXmlFileName.items(): if fnmatch.fnmatch(firstLine, pattern): return self._getSyntaxByXmlFileName(xmlFileName) else: raise KeyError("No syntax for " + firstLine)
[ "def", "_getSyntaxByFirstLine", "(", "self", ",", "firstLine", ")", ":", "for", "pattern", ",", "xmlFileName", "in", "self", ".", "_firstLineToXmlFileName", ".", "items", "(", ")", ":", "if", "fnmatch", ".", "fnmatch", "(", "firstLine", ",", "pattern", ")", ":", "return", "self", ".", "_getSyntaxByXmlFileName", "(", "xmlFileName", ")", "else", ":", "raise", "KeyError", "(", "\"No syntax for \"", "+", "firstLine", ")" ]
44.75
14.25
def build_block(self): """ Assembles the candidate block into it's finalized form for broadcast. """ header_bytes = self.block_header.SerializeToString() block = Block(header=header_bytes, header_signature=self._header_signature) block.batches.extend(self.batches) return block
[ "def", "build_block", "(", "self", ")", ":", "header_bytes", "=", "self", ".", "block_header", ".", "SerializeToString", "(", ")", "block", "=", "Block", "(", "header", "=", "header_bytes", ",", "header_signature", "=", "self", ".", "_header_signature", ")", "block", ".", "batches", ".", "extend", "(", "self", ".", "batches", ")", "return", "block" ]
38.555556
13.444444
def UpdateFlow(self, client_id, flow_id, flow_obj=db.Database.unchanged, flow_state=db.Database.unchanged, client_crash_info=db.Database.unchanged, pending_termination=db.Database.unchanged, processing_on=db.Database.unchanged, processing_since=db.Database.unchanged, processing_deadline=db.Database.unchanged): """Updates flow objects in the database.""" try: flow = self.flows[(client_id, flow_id)] except KeyError: raise db.UnknownFlowError(client_id, flow_id) if flow_obj != db.Database.unchanged: self.flows[(client_id, flow_id)] = flow_obj flow = flow_obj if flow_state != db.Database.unchanged: flow.flow_state = flow_state if client_crash_info != db.Database.unchanged: flow.client_crash_info = client_crash_info if pending_termination != db.Database.unchanged: flow.pending_termination = pending_termination if processing_on != db.Database.unchanged: flow.processing_on = processing_on if processing_since != db.Database.unchanged: flow.processing_since = processing_since if processing_deadline != db.Database.unchanged: flow.processing_deadline = processing_deadline flow.last_update_time = rdfvalue.RDFDatetime.Now()
[ "def", "UpdateFlow", "(", "self", ",", "client_id", ",", "flow_id", ",", "flow_obj", "=", "db", ".", "Database", ".", "unchanged", ",", "flow_state", "=", "db", ".", "Database", ".", "unchanged", ",", "client_crash_info", "=", "db", ".", "Database", ".", "unchanged", ",", "pending_termination", "=", "db", ".", "Database", ".", "unchanged", ",", "processing_on", "=", "db", ".", "Database", ".", "unchanged", ",", "processing_since", "=", "db", ".", "Database", ".", "unchanged", ",", "processing_deadline", "=", "db", ".", "Database", ".", "unchanged", ")", ":", "try", ":", "flow", "=", "self", ".", "flows", "[", "(", "client_id", ",", "flow_id", ")", "]", "except", "KeyError", ":", "raise", "db", ".", "UnknownFlowError", "(", "client_id", ",", "flow_id", ")", "if", "flow_obj", "!=", "db", ".", "Database", ".", "unchanged", ":", "self", ".", "flows", "[", "(", "client_id", ",", "flow_id", ")", "]", "=", "flow_obj", "flow", "=", "flow_obj", "if", "flow_state", "!=", "db", ".", "Database", ".", "unchanged", ":", "flow", ".", "flow_state", "=", "flow_state", "if", "client_crash_info", "!=", "db", ".", "Database", ".", "unchanged", ":", "flow", ".", "client_crash_info", "=", "client_crash_info", "if", "pending_termination", "!=", "db", ".", "Database", ".", "unchanged", ":", "flow", ".", "pending_termination", "=", "pending_termination", "if", "processing_on", "!=", "db", ".", "Database", ".", "unchanged", ":", "flow", ".", "processing_on", "=", "processing_on", "if", "processing_since", "!=", "db", ".", "Database", ".", "unchanged", ":", "flow", ".", "processing_since", "=", "processing_since", "if", "processing_deadline", "!=", "db", ".", "Database", ".", "unchanged", ":", "flow", ".", "processing_deadline", "=", "processing_deadline", "flow", ".", "last_update_time", "=", "rdfvalue", ".", "RDFDatetime", ".", "Now", "(", ")" ]
39.852941
14.058824
def _run_pass(self): """Read lines from a file and performs a callback against them""" while True: try: data = self._file.read(4096) except IOError, e: if e.errno == errno.ESTALE: self.active = False return False lines = self._buffer_extract(data) if not lines: # Before returning, check if an event (maybe partial) is waiting for too long. if self._current_event and time.time() - self._last_activity > 1: event = '\n'.join(self._current_event) self._current_event.clear() self._callback_wrapper([event]) break self._last_activity = time.time() if self._multiline_regex_after or self._multiline_regex_before: # Multiline is enabled for this file. events = multiline_merge( lines, self._current_event, self._multiline_regex_after, self._multiline_regex_before) else: events = lines if events: self._callback_wrapper(events) if self._sincedb_path: current_line_count = len(lines) self._sincedb_update_position(lines=current_line_count) self._sincedb_update_position()
[ "def", "_run_pass", "(", "self", ")", ":", "while", "True", ":", "try", ":", "data", "=", "self", ".", "_file", ".", "read", "(", "4096", ")", "except", "IOError", ",", "e", ":", "if", "e", ".", "errno", "==", "errno", ".", "ESTALE", ":", "self", ".", "active", "=", "False", "return", "False", "lines", "=", "self", ".", "_buffer_extract", "(", "data", ")", "if", "not", "lines", ":", "# Before returning, check if an event (maybe partial) is waiting for too long.", "if", "self", ".", "_current_event", "and", "time", ".", "time", "(", ")", "-", "self", ".", "_last_activity", ">", "1", ":", "event", "=", "'\\n'", ".", "join", "(", "self", ".", "_current_event", ")", "self", ".", "_current_event", ".", "clear", "(", ")", "self", ".", "_callback_wrapper", "(", "[", "event", "]", ")", "break", "self", ".", "_last_activity", "=", "time", ".", "time", "(", ")", "if", "self", ".", "_multiline_regex_after", "or", "self", ".", "_multiline_regex_before", ":", "# Multiline is enabled for this file.", "events", "=", "multiline_merge", "(", "lines", ",", "self", ".", "_current_event", ",", "self", ".", "_multiline_regex_after", ",", "self", ".", "_multiline_regex_before", ")", "else", ":", "events", "=", "lines", "if", "events", ":", "self", ".", "_callback_wrapper", "(", "events", ")", "if", "self", ".", "_sincedb_path", ":", "current_line_count", "=", "len", "(", "lines", ")", "self", ".", "_sincedb_update_position", "(", "lines", "=", "current_line_count", ")", "self", ".", "_sincedb_update_position", "(", ")" ]
35.95
18.425
def getOutputElementCount(self, name): """ Returns the size of the output array """ if name in ["activeCells", "learnableCells", "sensoryAssociatedCells"]: return self.cellCount * self.moduleCount else: raise Exception("Invalid output name specified: " + name)
[ "def", "getOutputElementCount", "(", "self", ",", "name", ")", ":", "if", "name", "in", "[", "\"activeCells\"", ",", "\"learnableCells\"", ",", "\"sensoryAssociatedCells\"", "]", ":", "return", "self", ".", "cellCount", "*", "self", ".", "moduleCount", "else", ":", "raise", "Exception", "(", "\"Invalid output name specified: \"", "+", "name", ")" ]
35.625
12.125
def get_default_config_help(self): """ Return help text for collector configuration. """ config_help = super(MemoryLxcCollector, self).get_default_config_help() config_help.update({ "sys_path": "Defaults to '/sys/fs/cgroup/lxc'", }) return config_help
[ "def", "get_default_config_help", "(", "self", ")", ":", "config_help", "=", "super", "(", "MemoryLxcCollector", ",", "self", ")", ".", "get_default_config_help", "(", ")", "config_help", ".", "update", "(", "{", "\"sys_path\"", ":", "\"Defaults to '/sys/fs/cgroup/lxc'\"", ",", "}", ")", "return", "config_help" ]
34.555556
14.777778