_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q260300
get
validation
def get(name, default=None, allow_default=True): """ Shortcut method for getting a setting value. :param str name: Setting key name. :param default: Default value of setting if it's not explicitly set. Defaults to `None` :param bool allow_default: If true, use the parameter default as default if the key is not set, else raise :exc:`KeyError`. Defaults to `None` :raises: :exc:`KeyError` if allow_default is false and the setting is not set. """ return Config().get(name, default, allow_default=allow_default)
python
{ "resource": "" }
q260301
env
validation
def env(key, default): """ Helper to try to get a setting from the environment, or pyconfig, or finally use a provided default. """ value = os.environ.get(key, None) if value is not None: log.info(' %s = %r', key.lower().replace('_', '.'), value) return value key = key.lower().replace('_', '.') value = get(key) if value is not None: return value return default
python
{ "resource": "" }
q260302
env_key
validation
def env_key(key, default): """ Try to get `key` from the environment. This mutates `key` to replace dots with underscores and makes it all uppercase. my.database.host => MY_DATABASE_HOST """ env = key.upper().replace('.', '_') return os.environ.get(env, default)
python
{ "resource": "" }
q260303
Config.set
validation
def set(self, name, value): """ Changes a setting value. This implements a locking mechanism to ensure some level of thread safety. :param str name: Setting key name. :param value: Setting value. """ if not self.settings.get('pyconfig.case_sensitive', False): name = name.lower() log.info(" %s = %s", name, repr(value)) # Acquire our lock to change the config with self.mut_lock: self.settings[name] = value
python
{ "resource": "" }
q260304
Config._update
validation
def _update(self, conf_dict, base_name=None): """ Updates the current configuration with the values in `conf_dict`. :param dict conf_dict: Dictionary of key value settings. :param str base_name: Base namespace for setting keys. """ for name in conf_dict: # Skip private names if name.startswith('_'): continue value = conf_dict[name] # Skip Namespace if it's imported if value is Namespace: continue # Use a base namespace if base_name: name = base_name + '.' + name if isinstance(value, Namespace): for name, value in value.iteritems(name): self.set(name, value) # Automatically call any functions in the settings module, and if # they return a value other than None, that value becomes a setting elif callable(value): value = value() if value is not None: self.set(name, value) else: self.set(name, value)
python
{ "resource": "" }
q260305
Config.load
validation
def load(self, clear=False): """ Loads all the config plugin modules to build a working configuration. If there is a ``localconfig`` module on the python path, it will be loaded last, overriding other settings. :param bool clear: Clear out the previous settings before loading """ if clear: self.settings = {} defer = [] # Load all config plugins for conf in pkg_resources.iter_entry_points('pyconfig'): if conf.attrs: raise RuntimeError("config must be a module") mod_name = conf.module_name base_name = conf.name if conf.name != 'any' else None log.info("Loading module '%s'", mod_name) mod_dict = runpy.run_module(mod_name) # If this module wants to be deferred, save it for later if mod_dict.get('deferred', None) is deferred: log.info("Deferring module '%s'", mod_name) mod_dict.pop('deferred') defer.append((mod_name, base_name, mod_dict)) continue self._update(mod_dict, base_name) # Load deferred modules for mod_name, base_name, mod_dict in defer: log.info("Loading deferred module '%s'", mod_name) self._update(mod_dict, base_name) if etcd().configured: # Load etcd stuff mod_dict = etcd().load() if mod_dict: self._update(mod_dict) # Allow localconfig overrides mod_dict = None try: mod_dict = runpy.run_module('localconfig') except ImportError: pass except ValueError as err: if getattr(err, 'message') != '__package__ set to non-string': raise # This is a bad work-around to make this work transparently... # shouldn't really access core stuff like this, but Fuck It[tm] mod_name = 'localconfig' if sys.version_info < (2, 7): loader, code, fname = runpy._get_module_details(mod_name) else: _, loader, code, fname = runpy._get_module_details(mod_name) mod_dict = runpy._run_code(code, {}, {}, mod_name, fname, loader, pkg_name=None) if mod_dict: log.info("Loading module 'localconfig'") self._update(mod_dict) self.call_reload_hooks()
python
{ "resource": "" }
q260306
Config.get
validation
def get(self, name, default, allow_default=True): """ Return a setting value. :param str name: Setting key name. :param default: Default value of setting if it's not explicitly set. :param bool allow_default: If true, use the parameter default as default if the key is not set, else raise :exc:`LookupError` :raises: :exc:`LookupError` if allow_default is false and the setting is not set. """ if not self.settings.get('pyconfig.case_sensitive', False): name = name.lower() if name not in self.settings: if not allow_default: raise LookupError('No setting "{name}"'.format(name=name)) self.settings[name] = default return self.settings[name]
python
{ "resource": "" }
q260307
etcd.init
validation
def init(self, hosts=None, cacert=None, client_cert=None, client_key=None): """ Handle creating the new etcd client instance and other business. :param hosts: Host string or list of hosts (default: `'127.0.0.1:2379'`) :param cacert: CA cert filename (optional) :param client_cert: Client cert filename (optional) :param client_key: Client key filename (optional) :type ca: str :type cert: str :type key: str """ # Try to get the etcd module try: import etcd self.module = etcd except ImportError: pass if not self.module: return self._parse_jetconfig() # Check env for overriding configuration or pyconfig setting hosts = env('PYCONFIG_ETCD_HOSTS', hosts) protocol = env('PYCONFIG_ETCD_PROTOCOL', None) cacert = env('PYCONFIG_ETCD_CACERT', cacert) client_cert = env('PYCONFIG_ETCD_CERT', client_cert) client_key = env('PYCONFIG_ETCD_KEY', client_key) # Parse auth string if there is one username = None password = None auth = env('PYCONFIG_ETCD_AUTH', None) if auth: auth = auth.split(':') auth.append('') username = auth[0] password = auth[1] # Create new etcd instance hosts = self._parse_hosts(hosts) if hosts is None: return kw = {} # Need this when passing a list of hosts to python-etcd, which we # always do, even if it's a list of one kw['allow_reconnect'] = True # Grab optional protocol argument if protocol: kw['protocol'] = protocol # Add auth to constructor if we got it if username: kw['username'] = username if password: kw['password'] = password # Assign the SSL args if we have 'em if cacert: kw['ca_cert'] = os.path.abspath(cacert) if client_cert and client_key: kw['cert'] = ((os.path.abspath(client_cert), os.path.abspath(client_key))) elif client_cert: kw['cert'] = os.path.abspath(client_cert) if cacert or client_cert or client_key: kw['protocol'] = 'https' self.client = self.module.Client(hosts, **kw)
python
{ "resource": "" }
q260308
etcd.load
validation
def load(self, prefix=None, depth=None): """ Return a dictionary of settings loaded from etcd. """ prefix = prefix or self.prefix prefix = '/' + prefix.strip('/') + '/' if depth is None: depth = self.inherit_depth if not self.configured: log.debug("etcd not available") return if self.watching: log.info("Starting watcher for %r", prefix) self.start_watching() log.info("Loading from etcd %r", prefix) try: result = self.client.get(prefix) except self.module.EtcdKeyNotFound: result = None if not result: log.info("No configuration found") return {} # Iterate over the returned keys from etcd update = {} for item in result.children: key = item.key value = item.value # Try to parse them as JSON strings, just in case it works try: value = pytool.json.from_json(value) except: pass # Make the key lower-case if we're not case-sensitive if not self.case_sensitive: key = key.lower() # Strip off the prefix that we're using if key.startswith(prefix): key = key[len(prefix):] # Store the key/value to update the config update[key] = value # Access cached settings directly to avoid recursion inherited = Config().settings.get(self.inherit_key, update.get(self.inherit_key, None)) if depth > 0 and inherited: log.info(" ... inheriting ...") inherited = self.load(inherited, depth - 1) or {} inherited.update(update) update = inherited return update
python
{ "resource": "" }
q260309
etcd.get_watcher
validation
def get_watcher(self): """ Return a etcd watching generator which yields events as they happen. """ if not self.watching: raise StopIteration() return self.client.eternal_watch(self.prefix, recursive=True)
python
{ "resource": "" }
q260310
etcd.start_watching
validation
def start_watching(self): """ Begins watching etcd for changes. """ # Don't create a new watcher thread if we already have one running if self.watcher and self.watcher.is_alive(): return # Create a new watcher thread and start it self.watcher = Watcher() self.watcher.start()
python
{ "resource": "" }
q260311
etcd._parse_hosts
validation
def _parse_hosts(self, hosts): """ Return hosts parsed into a tuple of tuples. :param hosts: String or list of hosts """ # Default host if hosts is None: return # If it's a string, we allow comma separated strings if isinstance(hosts, six.string_types): # Split comma-separated list hosts = [host.strip() for host in hosts.split(',')] # Split host and port hosts = [host.split(':') for host in hosts] # Coerce ports to int hosts = [(host[0], int(host[1])) for host in hosts] # The python-etcd client explicitly checks for a tuple type return tuple(hosts)
python
{ "resource": "" }
q260312
main
validation
def main(): """ Main script for `pyconfig` command. """ parser = argparse.ArgumentParser(description="Helper for working with " "pyconfigs") target_group = parser.add_mutually_exclusive_group() target_group.add_argument('-f', '--filename', help="parse an individual file or directory", metavar='F') target_group.add_argument('-m', '--module', help="parse a package or module, recursively looking inside it", metavar='M') parser.add_argument('-v', '--view-call', help="show the actual pyconfig call made (default: show namespace)", action='store_true') parser.add_argument('-l', '--load-configs', help="query the currently set value for each key found", action='store_true') key_group = parser.add_mutually_exclusive_group() key_group.add_argument('-a', '--all', help="show keys which don't have defaults set", action='store_true') key_group.add_argument('-k', '--only-keys', help="show a list of discovered keys without values", action='store_true') parser.add_argument('-n', '--natural-sort', help="sort by filename and line (default: alphabetical by key)", action='store_true') parser.add_argument('-s', '--source', help="show source annotations (implies --natural-sort)", action='store_true') parser.add_argument('-c', '--color', help="toggle output colors (default: %s)" % bool(pygments), action='store_const', default=bool(pygments), const=(not bool(pygments))) args = parser.parse_args() if args.color and not pygments: _error("Pygments is required for color output.\n" " pip install pygments") if args.module: _handle_module(args) if args.filename: _handle_file(args)
python
{ "resource": "" }
q260313
_handle_module
validation
def _handle_module(args): """ Handles the -m argument. """ module = _get_module_filename(args.module) if not module: _error("Could not load module or package: %r", args.module) elif isinstance(module, Unparseable): _error("Could not determine module source: %r", args.module) _parse_and_output(module, args)
python
{ "resource": "" }
q260314
_error
validation
def _error(msg, *args): """ Print an error message and exit. :param msg: A message to print :type msg: str """ print(msg % args, file=sys.stderr) sys.exit(1)
python
{ "resource": "" }
q260315
_get_module_filename
validation
def _get_module_filename(module): """ Return the filename of `module` if it can be imported. If `module` is a package, its directory will be returned. If it cannot be imported ``None`` is returned. If the ``__file__`` attribute is missing, or the module or package is a compiled egg, then an :class:`Unparseable` instance is returned, since the source can't be retrieved. :param module: A module name, such as ``'test.test_config'`` :type module: str """ # Split up the module and its containing package, if it has one module = module.split('.') package = '.'.join(module[:-1]) module = module[-1] try: if not package: # We aren't accessing a module within a package, but rather a top # level package, so it's a straight up import module = __import__(module) else: # Import the package containing our desired module package = __import__(package, fromlist=[module]) # Get the module from that package module = getattr(package, module, None) filename = getattr(module, '__file__', None) if not filename: # No filename? Nothing to do here return Unparseable() # If we get a .pyc, strip the c to get .py so we can parse the source if filename.endswith('.pyc'): filename = filename[:-1] if not os.path.exists(filename) and os.path.isfile(filename): # If there's only a .pyc and no .py it's a compile package or # egg and we can't get at the source for parsing return Unparseable() # If we have a package, we want the directory not the init file if filename.endswith('__init__.py'): filename = filename[:-11] # Yey, we found it return filename except ImportError: # Definitely not a valid module or package return
python
{ "resource": "" }
q260316
_parse_and_output
validation
def _parse_and_output(filename, args): """ Parse `filename` appropriately and then output calls according to the `args` specified. :param filename: A file or directory :param args: Command arguments :type filename: str """ relpath = os.path.dirname(filename) if os.path.isfile(filename): calls = _parse_file(filename, relpath) elif os.path.isdir(filename): calls = _parse_dir(filename, relpath) else: # XXX(shakefu): This is an error of some sort, maybe symlinks? # Probably need some thorough testing _error("Could not determine file type: %r", filename) if not calls: # XXX(shakefu): Probably want to change this to not be an error and # just be a normal fail (e.g. command runs, no output). _error("No pyconfig calls.") if args.load_configs: # We want to iterate over the configs and add any keys which haven't # already been found keys = set() for call in calls: keys.add(call.key) # Iterate the loaded keys and make _PyconfigCall instances conf = pyconfig.Config() for key, value in conf.settings.items(): if key in keys: continue calls.append(_PyconfigCall('set', key, value, [None]*4)) _output(calls, args)
python
{ "resource": "" }
q260317
_output
validation
def _output(calls, args): """ Outputs `calls`. :param calls: List of :class:`_PyconfigCall` instances :param args: :class:`~argparse.ArgumentParser` instance :type calls: list :type args: argparse.ArgumentParser """ # Sort the keys appropriately if args.natural_sort or args.source: calls = sorted(calls, key=lambda c: (c.filename, c.lineno)) else: calls = sorted(calls, key=lambda c: c.key) out = [] # Handle displaying only the list of keys if args.only_keys: keys = set() for call in calls: if call.key in keys: continue out.append(_format_call(call, args)) keys.add(call.key) out = '\n'.join(out) if args.color: out = _colorize(out) print(out, end=' ') # We're done here return # Build a list of keys which have default values available, so that we can # toggle between displaying only those keys with defaults and all keys keys = set() for call in calls: if call.default: keys.add(call.key) for call in calls: if not args.all and not call.default and call.key in keys: continue out.append(_format_call(call, args)) out = '\n'.join(out) if args.color: out = _colorize(out) print(out, end=' ')
python
{ "resource": "" }
q260318
_format_call
validation
def _format_call(call, args): """ Return `call` formatted appropriately for `args`. :param call: A pyconfig call object :param args: Arguments from the command :type call: :class:`_PyconfigCall` """ out = '' if args.source: out += call.annotation() + '\n' if args.only_keys: out += call.get_key() return out if args.view_call: out += call.as_call() elif args.load_configs: out += call.as_live() else: out += call.as_namespace() return out
python
{ "resource": "" }
q260319
_colorize
validation
def _colorize(output): """ Return `output` colorized with Pygments, if available. """ if not pygments: return output # Available styles # ['monokai', 'manni', 'rrt', 'perldoc', 'borland', 'colorful', 'default', # 'murphy', 'vs', 'trac', 'tango', 'fruity', 'autumn', 'bw', 'emacs', # 'vim', 'pastie', 'friendly', 'native'] return pygments.highlight(output, pygments.lexers.PythonLexer(), pygments.formatters.Terminal256Formatter(style='monokai'))
python
{ "resource": "" }
q260320
_map_arg
validation
def _map_arg(arg): """ Return `arg` appropriately parsed or mapped to a usable value. """ # Grab the easy to parse values if isinstance(arg, _ast.Str): return repr(arg.s) elif isinstance(arg, _ast.Num): return arg.n elif isinstance(arg, _ast.Name): name = arg.id if name == 'True': return True elif name == 'False': return False elif name == 'None': return None return name else: # Everything else we don't bother with return Unparseable()
python
{ "resource": "" }
q260321
_PyconfigCall.as_namespace
validation
def as_namespace(self, namespace=None): """ Return this call as if it were being assigned in a pyconfig namespace. If `namespace` is specified and matches the top level of this call's :attr:`key`, then that section of the key will be removed. """ key = self.key if namespace and key.startswith(namespace): key = key[len(namespace) + 1:] return "%s = %s" % (self.get_key(), self._default() or NotSet())
python
{ "resource": "" }
q260322
_PyconfigCall.as_live
validation
def as_live(self): """ Return this call as if it were being assigned in a pyconfig namespace, but load the actual value currently available in pyconfig. """ key = self.get_key() default = pyconfig.get(key) if default: default = repr(default) else: default = self._default() or NotSet() return "%s = %s" % (key, default)
python
{ "resource": "" }
q260323
_PyconfigCall.as_call
validation
def as_call(self): """ Return this call as it is called in its source. """ default = self._default() default = ', ' + default if default else '' return "pyconfig.%s(%r%s)" % (self.method, self.get_key(), default)
python
{ "resource": "" }
q260324
_PyconfigCall.get_key
validation
def get_key(self): """ Return the call key, even if it has to be parsed from the source. """ if not isinstance(self.key, Unparseable): return self.key line = self.source[self.col_offset:] regex = re.compile('''pyconfig\.[eginst]+\(([^,]+).*?\)''') match = regex.match(line) if not match: return Unparseable() return "<%s>" % match.group(1)
python
{ "resource": "" }
q260325
_PyconfigCall._default_value_only
validation
def _default_value_only(self): """ Return only the default value, if there is one. """ line = self.source[self.col_offset:] regex = re.compile('''pyconfig\.[eginst]+\(['"][^)]+?['"], ?(.*?)\)''') match = regex.match(line) if not match: return '' return match.group(1)
python
{ "resource": "" }
q260326
_PyconfigCall._default
validation
def _default(self): """ Return the default argument, formatted nicely. """ try: # Check if it's iterable iter(self.default) except TypeError: return repr(self.default) # This is to look for unparsable values, and if we find one, we try to # directly parse the string for v in self.default: if isinstance(v, Unparseable): default = self._default_value_only() if default: return default # Otherwise just make it a string and go return ', '.join(str(v) for v in self.default)
python
{ "resource": "" }
q260327
Pylearn2Estimator._get_param_names
validation
def _get_param_names(self): """ Get mappable parameters from YAML. """ template = Template(self.yaml_string) names = ['yaml_string'] # always include the template for match in re.finditer(template.pattern, template.template): name = match.group('named') or match.group('braced') assert name is not None names.append(name) return names
python
{ "resource": "" }
q260328
Pylearn2Estimator._get_dataset
validation
def _get_dataset(self, X, y=None): """ Construct a pylearn2 dataset. Parameters ---------- X : array_like Training examples. y : array_like, optional Labels. """ from pylearn2.datasets import DenseDesignMatrix X = np.asarray(X) assert X.ndim > 1 if y is not None: y = self._get_labels(y) if X.ndim == 2: return DenseDesignMatrix(X=X, y=y) return DenseDesignMatrix(topo_view=X, y=y)
python
{ "resource": "" }
q260329
Pylearn2Estimator.fit
validation
def fit(self, X, y=None): """ Build a trainer and run main_loop. Parameters ---------- X : array_like Training examples. y : array_like, optional Labels. """ from pylearn2.config import yaml_parse from pylearn2.train import Train # build trainer params = self.get_params() yaml_string = Template(self.yaml_string).substitute(params) self.trainer = yaml_parse.load(yaml_string) assert isinstance(self.trainer, Train) if self.trainer.dataset is not None: raise ValueError('Train YAML database must evaluate to None.') self.trainer.dataset = self._get_dataset(X, y) # update monitoring dataset(s) if (hasattr(self.trainer.algorithm, 'monitoring_dataset') and self.trainer.algorithm.monitoring_dataset is not None): monitoring_dataset = self.trainer.algorithm.monitoring_dataset if len(monitoring_dataset) == 1 and '' in monitoring_dataset: monitoring_dataset[''] = self.trainer.dataset else: monitoring_dataset['train'] = self.trainer.dataset self.trainer.algorithm._set_monitoring_dataset(monitoring_dataset) else: self.trainer.algorithm._set_monitoring_dataset( self.trainer.dataset) # run main loop self.trainer.main_loop()
python
{ "resource": "" }
q260330
Pylearn2Estimator._predict
validation
def _predict(self, X, method='fprop'): """ Get model predictions. See pylearn2.scripts.mlp.predict_csv and http://fastml.com/how-to-get-predictions-from-pylearn2/. Parameters ---------- X : array_like Test dataset. method : str Model method to call for prediction. """ import theano X_sym = self.trainer.model.get_input_space().make_theano_batch() y_sym = getattr(self.trainer.model, method)(X_sym) f = theano.function([X_sym], y_sym, allow_input_downcast=True) return f(X)
python
{ "resource": "" }
q260331
Pylearn2DatasetLoader.load
validation
def load(self): """ Load the dataset using pylearn2.config.yaml_parse. """ from pylearn2.config import yaml_parse from pylearn2.datasets import Dataset dataset = yaml_parse.load(self.yaml_string) assert isinstance(dataset, Dataset) data = dataset.iterator(mode='sequential', num_batches=1, data_specs=dataset.data_specs, return_tuple=True).next() if len(data) == 2: X, y = data y = np.squeeze(y) if self.one_hot: y = np.argmax(y, axis=1) else: X = data y = None return X, y
python
{ "resource": "" }
q260332
GaussianProcessKernel._create_kernel
validation
def _create_kernel(self): """ creates an additive kernel """ # Check kernels kernels = self.kernel_params if not isinstance(kernels, list): raise RuntimeError('Must provide enumeration of kernels') for kernel in kernels: if sorted(list(kernel.keys())) != ['name', 'options', 'params']: raise RuntimeError( 'strategy/params/kernels must contain keys: "name", "options", "params"') # Turn into entry points. # TODO use eval to allow user to specify internal variables for kernels (e.g. V) in config file. kernels = [] for kern in self.kernel_params: params = kern['params'] options = kern['options'] name = kern['name'] kernel_ep = load_entry_point(name, 'strategy/params/kernels') if issubclass(kernel_ep, KERNEL_BASE_CLASS): if options['independent']: # TODO Catch errors here? Estimator entry points don't catch instantiation errors kernel = np.sum([kernel_ep(1, active_dims=[i], **params) for i in range(self.n_dims)]) else: kernel = kernel_ep(self.n_dims, **params) if not isinstance(kernel, KERNEL_BASE_CLASS): raise RuntimeError('strategy/params/kernel must load a' 'GPy derived Kernel') kernels.append(kernel) self.kernel = np.sum(kernels)
python
{ "resource": "" }
q260333
fit_and_score_estimator
validation
def fit_and_score_estimator(estimator, parameters, cv, X, y=None, scoring=None, iid=True, n_jobs=1, verbose=1, pre_dispatch='2*n_jobs'): """Fit and score an estimator with cross-validation This function is basically a copy of sklearn's model_selection._BaseSearchCV._fit(), which is the core of the GridSearchCV fit() method. Unfortunately, that class does _not_ return the training set scores, which we want to save in the database, and because of the way it's written, you can't change it by subclassing or monkeypatching. This function uses some undocumented internal sklearn APIs (non-public). It was written against sklearn version 0.16.1. Prior Versions are likely to fail due to changes in the design of cross_validation module. Returns ------- out : dict, with keys 'mean_test_score' 'test_scores', 'train_scores' The scores on the training and test sets, as well as the mean test set score. """ scorer = check_scoring(estimator, scoring=scoring) n_samples = num_samples(X) X, y = check_arrays(X, y, allow_lists=True, sparse_format='csr', allow_nans=True) if y is not None: if len(y) != n_samples: raise ValueError('Target variable (y) has a different number ' 'of samples (%i) than data (X: %i samples)' % (len(y), n_samples)) cv = check_cv(cv=cv, y=y, classifier=is_classifier(estimator)) out = Parallel( n_jobs=n_jobs, verbose=verbose, pre_dispatch=pre_dispatch )( delayed(_fit_and_score)(clone(estimator), X, y, scorer, train, test, verbose, parameters, fit_params=None) for train, test in cv.split(X, y)) assert len(out) == cv.n_splits train_scores, test_scores = [], [] n_train_samples, n_test_samples = [], [] for test_score, n_test, train_score, n_train, _ in out: train_scores.append(train_score) test_scores.append(test_score) n_test_samples.append(n_test) n_train_samples.append(n_train) train_scores, test_scores = map(list, check_arrays(train_scores, test_scores, warn_nans=True, replace_nans=True)) if iid: if verbose > 0 and is_msmbuilder_estimator(estimator): print('[CV] Using MSMBuilder API n_samples averaging') print('[CV] n_train_samples: %s' % str(n_train_samples)) print('[CV] n_test_samples: %s' % str(n_test_samples)) mean_test_score = np.average(test_scores, weights=n_test_samples) mean_train_score = np.average(train_scores, weights=n_train_samples) else: mean_test_score = np.average(test_scores) mean_train_score = np.average(train_scores) grid_scores = { 'mean_test_score': mean_test_score, 'test_scores': test_scores, 'mean_train_score': mean_train_score, 'train_scores': train_scores, 'n_test_samples': n_test_samples, 'n_train_samples': n_train_samples} return grid_scores
python
{ "resource": "" }
q260334
dict_merge
validation
def dict_merge(base, top): """Recursively merge two dictionaries, with the elements from `top` taking precedence over elements from `top`. Returns ------- out : dict A new dict, containing the merged records. """ out = dict(top) for key in base: if key in top: if isinstance(base[key], dict) and isinstance(top[key], dict): out[key] = dict_merge(base[key], top[key]) else: out[key] = base[key] return out
python
{ "resource": "" }
q260335
format_timedelta
validation
def format_timedelta(td_object): """Format a timedelta object for display to users Returns ------- str """ def get_total_seconds(td): # timedelta.total_seconds not in py2.6 return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 1e6) / 1e6 seconds = int(get_total_seconds(td_object)) periods = [('year', 60*60*24*365), ('month', 60*60*24*30), ('day', 60*60*24), ('hour', 60*60), ('minute', 60), ('second', 1)] strings = [] for period_name, period_seconds in periods: if seconds > period_seconds: period_value, seconds = divmod(seconds, period_seconds) if period_value == 1: strings.append("%s %s" % (period_value, period_name)) else: strings.append("%s %ss" % (period_value, period_name)) return ", ".join(strings)
python
{ "resource": "" }
q260336
_assert_all_finite
validation
def _assert_all_finite(X): """Like assert_all_finite, but only for ndarray.""" X = np.asanyarray(X) # First try an O(n) time, O(1) space solution for the common case that # everything is finite; fall back to O(n) space np.isfinite to prevent # false positives from overflow in sum method if (X.dtype.char in np.typecodes['AllFloat'] and not np.isfinite(X.sum()) and not np.isfinite(X).all()): raise ValueError("Input contains NaN, infinity" " or a value too large for %r." % X.dtype)
python
{ "resource": "" }
q260337
_warn_if_not_finite
validation
def _warn_if_not_finite(X): """UserWarning if array contains non-finite elements""" X = np.asanyarray(X) # First try an O(n) time, O(1) space solution for the common case that # everything is finite; fall back to O(n) space np.isfinite to prevent # false positives from overflow in sum method if (X.dtype.char in np.typecodes['AllFloat'] and not np.isfinite(X.sum()) and not np.isfinite(X).all()): warnings.warn("Result contains NaN, infinity" " or a value too large for %r." % X.dtype, category=UserWarning)
python
{ "resource": "" }
q260338
Config.fromdict
validation
def fromdict(cls, config, check_fields=True): """Create a Config object from config dict directly.""" m = super(Config, cls).__new__(cls) m.path = '.' m.verbose = False m.config = m._merge_defaults(config) if check_fields: m._check_fields() return m
python
{ "resource": "" }
q260339
Config.sha1
validation
def sha1(self): """SHA1 hash of the config file itself.""" with open(self.path, 'rb') as f: return hashlib.sha1(f.read()).hexdigest()
python
{ "resource": "" }
q260340
plot_3
validation
def plot_3(data, ss, *args): """t-SNE embedding of the parameters, colored by score """ if len(data) <= 1: warnings.warn("Only one datapoint. Could not compute t-SNE embedding.") return None scores = np.array([d['mean_test_score'] for d in data]) # maps each parameters to a vector of floats warped = np.array([ss.point_to_unit(d['parameters']) for d in data]) # Embed into 2 dimensions with t-SNE X = TSNE(n_components=2).fit_transform(warped) e_scores = np.exp(scores) mine, maxe = np.min(e_scores), np.max(e_scores) color = (e_scores - mine) / (maxe - mine) mapped_colors = list(map(rgb2hex, cm.get_cmap('RdBu_r')(color))) p = bk.figure(title='t-SNE (unsupervised)', tools=TOOLS) df_params = nonconstant_parameters(data) df_params['score'] = scores df_params['x'] = X[:, 0] df_params['y'] = X[:, 1] df_params['color'] = mapped_colors df_params['radius'] = 1 p.circle( x='x', y='y', color='color', radius='radius', source=ColumnDataSource(data=df_params), fill_alpha=0.6, line_color=None) cp = p hover = cp.select(dict(type=HoverTool)) format_tt = [(s, '@%s' % s) for s in df_params.columns] hover.tooltips = OrderedDict([("index", "$index")] + format_tt) xax, yax = p.axis xax.axis_label = 't-SNE coord 1' yax.axis_label = 't-SNE coord 2' return p
python
{ "resource": "" }
q260341
plot_4
validation
def plot_4(data, *args): """Scatter plot of score vs each param """ params = nonconstant_parameters(data) scores = np.array([d['mean_test_score'] for d in data]) order = np.argsort(scores) for key in params.keys(): if params[key].dtype == np.dtype('bool'): params[key] = params[key].astype(np.int) p_list = [] for key in params.keys(): x = params[key][order] y = scores[order] params = params.loc[order] try: radius = (np.max(x) - np.min(x)) / 100.0 except: print("error making plot4 for '%s'" % key) continue p_list.append(build_scatter_tooltip( x=x, y=y, radius=radius, add_line=False, tt=params, xlabel=key, title='Score vs %s' % key)) return p_list
python
{ "resource": "" }
q260342
SearchSpace.add_int
validation
def add_int(self, name, min, max, warp=None): """An integer-valued dimension bounded between `min` <= x <= `max`. Note that the right endpoint of the interval includes `max`. When `warp` is None, the base measure associated with this dimension is a categorical distribution with each weight on each of the integers in [min, max]. With `warp == 'log'`, the base measure is a uniform distribution on the log of the variable, with bounds at `log(min)` and `log(max)`. This is appropriate for variables that are "naturally" in log-space. Other `warp` functions are not supported (yet), but may be at a later time. Please note that this functionality is not supported for `hyperopt_tpe`. """ min, max = map(int, (min, max)) if max < min: raise ValueError('variable %s: max < min error' % name) if warp not in (None, 'log'): raise ValueError('variable %s: warp=%s is not supported. use ' 'None or "log",' % (name, warp)) if min <= 0 and warp == 'log': raise ValueError('variable %s: log-warping requires min > 0') self.variables[name] = IntVariable(name, min, max, warp)
python
{ "resource": "" }
q260343
SearchSpace.add_float
validation
def add_float(self, name, min, max, warp=None): """A floating point-valued dimension bounded `min` <= x < `max` When `warp` is None, the base measure associated with this dimension is a uniform distribution on [min, max). With `warp == 'log'`, the base measure is a uniform distribution on the log of the variable, with bounds at `log(min)` and `log(max)`. This is appropriate for variables that are "naturally" in log-space. Other `warp` functions are not supported (yet), but may be at a later time. """ min, max = map(float, (min, max)) if not min < max: raise ValueError('variable %s: min >= max error' % name) if warp not in (None, 'log'): raise ValueError('variable %s: warp=%s is not supported. use ' 'None or "log",' % (name, warp)) if min <= 0 and warp == 'log': raise ValueError('variable %s: log-warping requires min > 0') self.variables[name] = FloatVariable(name, min, max, warp)
python
{ "resource": "" }
q260344
SearchSpace.add_enum
validation
def add_enum(self, name, choices): """An enumeration-valued dimension. The base measure associated with this dimension is a categorical distribution with equal weight on each element in `choices`. """ if not isinstance(choices, Iterable): raise ValueError('variable %s: choices must be iterable' % name) self.variables[name] = EnumVariable(name, choices)
python
{ "resource": "" }
q260345
log_callback
validation
def log_callback(wrapped_function): """Decorator that produces DEBUG level log messages before and after calling a parser method. If a callback raises an IgnoredMatchException the log will show 'IGNORED' instead to indicate that the parser will not create any objects from the matched string. Example: DEBUG:poyo.parser:parse_simple <- 123: 456.789 DEBUG:poyo.parser:parse_int <- 123 DEBUG:poyo.parser:parse_int -> 123 DEBUG:poyo.parser:parse_float <- 456.789 DEBUG:poyo.parser:parse_float -> 456.789 DEBUG:poyo.parser:parse_simple -> <Simple name: 123, value: 456.789> """ def debug_log(message): """Helper to log an escaped version of the given message to DEBUG""" logger.debug(message.encode('unicode_escape').decode()) @functools.wraps(wrapped_function) def _wrapper(parser, match, **kwargs): func_name = wrapped_function.__name__ debug_log(u'{func_name} <- {matched_string}'.format( func_name=func_name, matched_string=match.group(), )) try: result = wrapped_function(parser, match, **kwargs) except IgnoredMatchException: debug_log(u'{func_name} -> IGNORED'.format(func_name=func_name)) raise debug_log(u'{func_name} -> {result}'.format( func_name=func_name, result=result, )) return result return _wrapper
python
{ "resource": "" }
q260346
_Parser.find_match
validation
def find_match(self): """Try to find a pattern that matches the source and calll a parser method to create Python objects. A callback that raises an IgnoredMatchException indicates that the given string data is ignored by the parser and no objects are created. If none of the pattern match a NoMatchException is raised. """ for pattern, callback in self.rules: match = pattern.match(self.source, pos=self.pos) if not match: continue try: node = callback(match) except IgnoredMatchException: pass else: self.seen.append(node) return match raise NoMatchException( 'None of the known patterns match for {}' ''.format(self.source[self.pos:]) )
python
{ "resource": "" }
q260347
ContainerMixin.add_child
validation
def add_child(self, child): """If the given object is an instance of Child add it to self and register self as a parent. """ if not isinstance(child, ChildMixin): raise TypeError( 'Requires instance of TreeElement. ' 'Got {}'.format(type(child)) ) child.parent = self self._children.append(child)
python
{ "resource": "" }
q260348
get_ip_packet
validation
def get_ip_packet(data, client_port, server_port, is_loopback=False): """ if client_port is 0 any client_port is good """ header = _loopback if is_loopback else _ethernet try: header.unpack(data) except Exception as ex: raise ValueError('Bad header: %s' % ex) tcp_p = getattr(header.data, 'data', None) if type(tcp_p) != dpkt.tcp.TCP: raise ValueError('Not a TCP packet') if tcp_p.dport == server_port: if client_port != 0 and tcp_p.sport != client_port: raise ValueError('Request from different client') elif tcp_p.sport == server_port: if client_port != 0 and tcp_p.dport != client_port: raise ValueError('Reply for different client') else: raise ValueError('Packet not for/from client/server') return header.data
python
{ "resource": "" }
q260349
LatencyPrinter.report
validation
def report(self): """ get stats & show them """ self._output.write('\r') sort_by = 'avg' results = {} for key, latencies in self._latencies_by_method.items(): result = {} result['count'] = len(latencies) result['avg'] = sum(latencies) / len(latencies) result['min'] = min(latencies) result['max'] = max(latencies) latencies = sorted(latencies) result['p90'] = percentile(latencies, 0.90) result['p95'] = percentile(latencies, 0.95) result['p99'] = percentile(latencies, 0.99) result['p999'] = percentile(latencies, 0.999) results[key] = result headers = ['method', 'count', 'avg', 'min', 'max', 'p90', 'p95', 'p99', 'p999'] data = [] results = sorted(results.items(), key=lambda it: it[1][sort_by], reverse=True) def row(key, res): data = [key] + [res[header] for header in headers[1:]] return tuple(data) data = [row(key, result) for key, result in results] self._output.write('%s\n' % tabulate(data, headers=headers)) self._output.flush()
python
{ "resource": "" }
q260350
ThriftDiff.of_structs
validation
def of_structs(cls, a, b): """ Diff two thrift structs and return the result as a ThriftDiff instance """ t_diff = ThriftDiff(a, b) t_diff._do_diff() return t_diff
python
{ "resource": "" }
q260351
ThriftDiff.of_messages
validation
def of_messages(cls, msg_a, msg_b): """ Diff two thrift messages by comparing their args, raises exceptions if for some reason the messages can't be diffed. Only args of type 'struct' are compared. Returns a list of ThriftDiff results - one for each struct arg """ ok_to_diff, reason = cls.can_diff(msg_a, msg_b) if not ok_to_diff: raise ValueError(reason) return [cls.of_structs(x.value, y.value) for x, y in zip(msg_a.args, msg_b.args) if x.field_type == 'struct']
python
{ "resource": "" }
q260352
ThriftDiff.can_diff
validation
def can_diff(msg_a, msg_b): """ Check if two thrift messages are diff ready. Returns a tuple of (boolean, reason_string), i.e. (False, reason_string) if the messages can not be diffed along with the reason and (True, None) for the opposite case """ if msg_a.method != msg_b.method: return False, 'method name of messages do not match' if len(msg_a.args) != len(msg_b.args) \ or not msg_a.args.is_isomorphic_to(msg_b.args): return False, 'argument signature of methods do not match' return True, None
python
{ "resource": "" }
q260353
ThriftStruct.is_isomorphic_to
validation
def is_isomorphic_to(self, other): """ Returns true if all fields of other struct are isomorphic to this struct's fields """ return (isinstance(other, self.__class__) and len(self.fields) == len(other.fields) and all(a.is_isomorphic_to(b) for a, b in zip(self.fields, other.fields)))
python
{ "resource": "" }
q260354
ThriftMessage.read
validation
def read(cls, data, protocol=None, fallback_protocol=TBinaryProtocol, finagle_thrift=False, max_fields=MAX_FIELDS, max_list_size=MAX_LIST_SIZE, max_map_size=MAX_MAP_SIZE, max_set_size=MAX_SET_SIZE, read_values=False): """ tries to deserialize a message, might fail if data is missing """ # do we have enough data? if len(data) < cls.MIN_MESSAGE_SIZE: raise ValueError('not enough data') if protocol is None: protocol = cls.detect_protocol(data, fallback_protocol) trans = TTransport.TMemoryBuffer(data) proto = protocol(trans) # finagle-thrift prepends a RequestHeader # # See: http://git.io/vsziG header = None if finagle_thrift: try: header = ThriftStruct.read( proto, max_fields, max_list_size, max_map_size, max_set_size, read_values) except: # reset stream, maybe it's not finagle-thrift trans = TTransport.TMemoryBuffer(data) proto = protocol(trans) # unpack the message method, mtype, seqid = proto.readMessageBegin() mtype = cls.message_type_to_str(mtype) if len(method) == 0 or method.isspace() or method.startswith(' '): raise ValueError('no method name') if len(method) > cls.MAX_METHOD_LENGTH: raise ValueError('method name too long') # we might have made it until this point by mere chance, so filter out # suspicious method names valid = range(33, 127) if any(ord(char) not in valid for char in method): raise ValueError('invalid method name' % method) args = ThriftStruct.read( proto, max_fields, max_list_size, max_map_size, max_set_size, read_values) proto.readMessageEnd() # Note: this is a bit fragile, the right thing would be to count bytes # as we read them (i.e.: when calling readI32, etc). msglen = trans._buffer.tell() return cls(method, mtype, seqid, args, header, msglen), msglen
python
{ "resource": "" }
q260355
Stream.pop
validation
def pop(self, nbytes): """ pops packets with _at least_ nbytes of payload """ size = 0 popped = [] with self._lock_packets: while size < nbytes: try: packet = self._packets.pop(0) size += len(packet.data.data) self._remaining -= len(packet.data.data) popped.append(packet) except IndexError: break return popped
python
{ "resource": "" }
q260356
Stream.pop_data
validation
def pop_data(self, nbytes): """ similar to pop, but returns payload + last timestamp """ last_timestamp = 0 data = [] for packet in self.pop(nbytes): last_timestamp = packet.timestamp data.append(packet.data.data) return ''.join(data), last_timestamp
python
{ "resource": "" }
q260357
Stream.push
validation
def push(self, ip_packet): """ push the packet into the queue """ data_len = len(ip_packet.data.data) seq_id = ip_packet.data.seq if data_len == 0: self._next_seq_id = seq_id return False # have we seen this packet? if self._next_seq_id != -1 and seq_id != self._next_seq_id: return False self._next_seq_id = seq_id + data_len with self._lock_packets: # Note: we only account for payload (i.e.: tcp data) self._length += len(ip_packet.data.data) self._remaining += len(ip_packet.data.data) self._packets.append(ip_packet) return True
python
{ "resource": "" }
q260358
Dispatcher.run
validation
def run(self, *args, **kwargs): """ Deal with the incoming packets """ while True: try: timestamp, ip_p = self._queue.popleft() src_ip = get_ip(ip_p, ip_p.src) dst_ip = get_ip(ip_p, ip_p.dst) src = intern('%s:%s' % (src_ip, ip_p.data.sport)) dst = intern('%s:%s' % (dst_ip, ip_p.data.dport)) key = intern('%s<->%s' % (src, dst)) stream = self._streams.get(key) if stream is None: stream = Stream(src, dst) self._streams[key] = stream # HACK: save the timestamp setattr(ip_p, 'timestamp', timestamp) pushed = stream.push(ip_p) if not pushed: continue # let listeners know about the updated stream for handler in self._handlers: try: handler(stream) except Exception as ex: print('handler exception: %s' % ex) except Exception: time.sleep(0.00001)
python
{ "resource": "" }
q260359
get_disk_image_by_name
validation
def get_disk_image_by_name(pbclient, location, image_name): """ Returns all disk images within a location with a given image name. The name must match exactly. The list may be empty. """ all_images = pbclient.list_images() matching = [i for i in all_images['items'] if i['properties']['name'] == image_name and i['properties']['imageType'] == "HDD" and i['properties']['location'] == location] return matching
python
{ "resource": "" }
q260360
ProfitBricksService._read_config
validation
def _read_config(self, filename=None): """ Read the user configuration """ if filename: self._config_filename = filename else: try: import appdirs except ImportError: raise Exception("Missing dependency for determining config path. Please install " "the 'appdirs' Python module.") self._config_filename = appdirs.user_config_dir(_LIBRARY_NAME, "ProfitBricks") + ".ini" if not self._config: self._config = configparser.ConfigParser() self._config.optionxform = str self._config.read(self._config_filename)
python
{ "resource": "" }
q260361
ProfitBricksService._save_config
validation
def _save_config(self, filename=None): """ Save the given user configuration. """ if filename is None: filename = self._config_filename parent_path = os.path.dirname(filename) if not os.path.isdir(parent_path): os.makedirs(parent_path) with open(filename, "w") as configfile: self._config.write(configfile)
python
{ "resource": "" }
q260362
ProfitBricksService._get_username
validation
def _get_username(self, username=None, use_config=True, config_filename=None): """Determine the username If a username is given, this name is used. Otherwise the configuration file will be consulted if `use_config` is set to True. The user is asked for the username if the username is not available. Then the username is stored in the configuration file. :param username: Username (used directly if given) :type username: ``str`` :param use_config: Whether to read username from configuration file :type use_config: ``bool`` :param config_filename: Path to the configuration file :type config_filename: ``str`` """ if not username and use_config: if self._config is None: self._read_config(config_filename) username = self._config.get("credentials", "username", fallback=None) if not username: username = input("Please enter your username: ").strip() while not username: username = input("No username specified. Please enter your username: ").strip() if 'credendials' not in self._config: self._config.add_section('credentials') self._config.set("credentials", "username", username) self._save_config() return username
python
{ "resource": "" }
q260363
ProfitBricksService._get_password
validation
def _get_password(self, password, use_config=True, config_filename=None, use_keyring=HAS_KEYRING): """ Determine the user password If the password is given, this password is used. Otherwise this function will try to get the password from the user's keyring if `use_keyring` is set to True. :param username: Username (used directly if given) :type username: ``str`` :param use_config: Whether to read username from configuration file :type use_config: ``bool`` :param config_filename: Path to the configuration file :type config_filename: ``str`` """ if not password and use_config: if self._config is None: self._read_config(config_filename) password = self._config.get("credentials", "password", fallback=None) if not password and use_keyring: logger = logging.getLogger(__name__) question = ("Please enter your password for {} on {}: " .format(self.username, self.host_base)) if HAS_KEYRING: password = keyring.get_password(self.keyring_identificator, self.username) if password is None: password = getpass.getpass(question) try: keyring.set_password(self.keyring_identificator, self.username, password) except keyring.errors.PasswordSetError as error: logger.warning("Storing password in keyring '%s' failed: %s", self.keyring_identificator, error) else: logger.warning("Install the 'keyring' Python module to store your password " "securely in your keyring!") password = self._config.get("credentials", "password", fallback=None) if password is None: password = getpass.getpass(question) store_plaintext_passwords = self._config.get( "preferences", "store-plaintext-passwords", fallback=None) if store_plaintext_passwords != "no": question = ("Do you want to store your password in plain text in " + self._config_filename()) answer = ask(question, ["yes", "no", "never"], "no") if answer == "yes": self._config.set("credentials", "password", password) self._save_config() elif answer == "never": if "preferences" not in self._config: self._config.add_section("preferences") self._config.set("preferences", "store-plaintext-passwords", "no") self._save_config() return password
python
{ "resource": "" }
q260364
ProfitBricksService.get_datacenter
validation
def get_datacenter(self, datacenter_id, depth=1): """ Retrieves a data center by its ID. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param depth: The depth of the response data. :type depth: ``int`` """ response = self._perform_request( '/datacenters/%s?depth=%s' % (datacenter_id, str(depth))) return response
python
{ "resource": "" }
q260365
ProfitBricksService.get_datacenter_by_name
validation
def get_datacenter_by_name(self, name, depth=1): """ Retrieves a data center by its name. Either returns the data center response or raises an Exception if no or more than one data center was found with the name. The search for the name is done in this relaxing way: - exact name match - case-insentive name match - data center starts with the name - data center starts with the name (case insensitive) - name appears in the data center name - name appears in the data center name (case insensitive) :param name: The name of the data center. :type name: ``str`` :param depth: The depth of the response data. :type depth: ``int`` """ all_data_centers = self.list_datacenters(depth=depth)['items'] data_center = find_item_by_name(all_data_centers, lambda i: i['properties']['name'], name) if not data_center: raise NameError("No data center found with name " "containing '{name}'.".format(name=name)) if len(data_center) > 1: raise NameError("Found {n} data centers with the name '{name}': {names}".format( n=len(data_center), name=name, names=", ".join(d['properties']['name'] for d in data_center) )) return data_center[0]
python
{ "resource": "" }
q260366
ProfitBricksService.delete_datacenter
validation
def delete_datacenter(self, datacenter_id): """ Removes the data center and all its components such as servers, NICs, load balancers, volumes. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` """ response = self._perform_request( url='/datacenters/%s' % (datacenter_id), method='DELETE') return response
python
{ "resource": "" }
q260367
ProfitBricksService.get_firewall_rule
validation
def get_firewall_rule(self, datacenter_id, server_id, nic_id, firewall_rule_id): """ Retrieves a single firewall rule by ID. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param server_id: The unique ID of the server. :type server_id: ``str`` :param nic_id: The unique ID of the NIC. :type nic_id: ``str`` :param firewall_rule_id: The unique ID of the firewall rule. :type firewall_rule_id: ``str`` """ response = self._perform_request( '/datacenters/%s/servers/%s/nics/%s/firewallrules/%s' % ( datacenter_id, server_id, nic_id, firewall_rule_id)) return response
python
{ "resource": "" }
q260368
ProfitBricksService.delete_firewall_rule
validation
def delete_firewall_rule(self, datacenter_id, server_id, nic_id, firewall_rule_id): """ Removes a firewall rule from the NIC. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param server_id: The unique ID of the server. :type server_id: ``str`` :param nic_id: The unique ID of the NIC. :type nic_id: ``str`` :param firewall_rule_id: The unique ID of the firewall rule. :type firewall_rule_id: ``str`` """ response = self._perform_request( url='/datacenters/%s/servers/%s/nics/%s/firewallrules/%s' % ( datacenter_id, server_id, nic_id, firewall_rule_id), method='DELETE') return response
python
{ "resource": "" }
q260369
ProfitBricksService.create_firewall_rule
validation
def create_firewall_rule(self, datacenter_id, server_id, nic_id, firewall_rule): """ Creates a firewall rule on the specified NIC and server. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param server_id: The unique ID of the server. :type server_id: ``str`` :param nic_id: The unique ID of the NIC. :type nic_id: ``str`` :param firewall_rule: A firewall rule dict. :type firewall_rule: ``dict`` """ properties = { "name": firewall_rule.name } if firewall_rule.protocol: properties['protocol'] = firewall_rule.protocol # Optional Properties if firewall_rule.source_mac: properties['sourceMac'] = firewall_rule.source_mac if firewall_rule.source_ip: properties['sourceIp'] = firewall_rule.source_ip if firewall_rule.target_ip: properties['targetIp'] = firewall_rule.target_ip if firewall_rule.port_range_start: properties['portRangeStart'] = firewall_rule.port_range_start if firewall_rule.port_range_end: properties['portRangeEnd'] = firewall_rule.port_range_end if firewall_rule.icmp_type: properties['icmpType'] = firewall_rule.icmp_type if firewall_rule.icmp_code: properties['icmpCode'] = firewall_rule.icmp_code data = { "properties": properties } response = self._perform_request( url='/datacenters/%s/servers/%s/nics/%s/firewallrules' % ( datacenter_id, server_id, nic_id), method='POST', data=json.dumps(data)) return response
python
{ "resource": "" }
q260370
ProfitBricksService.update_firewall_rule
validation
def update_firewall_rule(self, datacenter_id, server_id, nic_id, firewall_rule_id, **kwargs): """ Updates a firewall rule. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param server_id: The unique ID of the server. :type server_id: ``str`` :param nic_id: The unique ID of the NIC. :type nic_id: ``str`` :param firewall_rule_id: The unique ID of the firewall rule. :type firewall_rule_id: ``str`` """ data = {} for attr, value in kwargs.items(): data[self._underscore_to_camelcase(attr)] = value if attr == 'source_mac': data['sourceMac'] = value elif attr == 'source_ip': data['sourceIp'] = value elif attr == 'target_ip': data['targetIp'] = value elif attr == 'port_range_start': data['portRangeStart'] = value elif attr == 'port_range_end': data['portRangeEnd'] = value elif attr == 'icmp_type': data['icmpType'] = value elif attr == 'icmp_code': data['icmpCode'] = value else: data[self._underscore_to_camelcase(attr)] = value response = self._perform_request( url='/datacenters/%s/servers/%s/nics/%s/firewallrules/%s' % ( datacenter_id, server_id, nic_id, firewall_rule_id), method='PATCH', data=json.dumps(data)) return response
python
{ "resource": "" }
q260371
ProfitBricksService.delete_image
validation
def delete_image(self, image_id): """ Removes only user created images. :param image_id: The unique ID of the image. :type image_id: ``str`` """ response = self._perform_request(url='/images/' + image_id, method='DELETE') return response
python
{ "resource": "" }
q260372
ProfitBricksService.update_image
validation
def update_image(self, image_id, **kwargs): """ Replace all properties of an image. """ data = {} for attr, value in kwargs.items(): data[self._underscore_to_camelcase(attr)] = value response = self._perform_request(url='/images/' + image_id, method='PATCH', data=json.dumps(data)) return response
python
{ "resource": "" }
q260373
ProfitBricksService.delete_ipblock
validation
def delete_ipblock(self, ipblock_id): """ Removes a single IP block from your account. :param ipblock_id: The unique ID of the IP block. :type ipblock_id: ``str`` """ response = self._perform_request( url='/ipblocks/' + ipblock_id, method='DELETE') return response
python
{ "resource": "" }
q260374
ProfitBricksService.reserve_ipblock
validation
def reserve_ipblock(self, ipblock): """ Reserves an IP block within your account. """ properties = { "name": ipblock.name } if ipblock.location: properties['location'] = ipblock.location if ipblock.size: properties['size'] = str(ipblock.size) raw = { "properties": properties, } response = self._perform_request( url='/ipblocks', method='POST', data=json.dumps(raw)) return response
python
{ "resource": "" }
q260375
ProfitBricksService.get_lan
validation
def get_lan(self, datacenter_id, lan_id, depth=1): """ Retrieves a single LAN by ID. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param lan_id: The unique ID of the LAN. :type lan_id: ``str`` :param depth: The depth of the response data. :type depth: ``int`` """ response = self._perform_request( '/datacenters/%s/lans/%s?depth=%s' % ( datacenter_id, lan_id, str(depth))) return response
python
{ "resource": "" }
q260376
ProfitBricksService.list_lans
validation
def list_lans(self, datacenter_id, depth=1): """ Retrieves a list of LANs available in the account. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param depth: The depth of the response data. :type depth: ``int`` """ response = self._perform_request( '/datacenters/%s/lans?depth=%s' % ( datacenter_id, str(depth))) return response
python
{ "resource": "" }
q260377
ProfitBricksService.delete_lan
validation
def delete_lan(self, datacenter_id, lan_id): """ Removes a LAN from the data center. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param lan_id: The unique ID of the LAN. :type lan_id: ``str`` """ response = self._perform_request( url='/datacenters/%s/lans/%s' % ( datacenter_id, lan_id), method='DELETE') return response
python
{ "resource": "" }
q260378
ProfitBricksService.create_lan
validation
def create_lan(self, datacenter_id, lan): """ Creates a LAN in the data center. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param lan: The LAN object to be created. :type lan: ``dict`` """ data = json.dumps(self._create_lan_dict(lan)) response = self._perform_request( url='/datacenters/%s/lans' % datacenter_id, method='POST', data=data) return response
python
{ "resource": "" }
q260379
ProfitBricksService.update_lan
validation
def update_lan(self, datacenter_id, lan_id, name=None, public=None, ip_failover=None): """ Updates a LAN :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param lan_id: The unique ID of the LAN. :type lan_id: ``str`` :param name: The new name of the LAN. :type name: ``str`` :param public: Indicates if the LAN is public. :type public: ``bool`` :param ip_failover: A list of IP fail-over dicts. :type ip_failover: ``list`` """ data = {} if name: data['name'] = name if public is not None: data['public'] = public if ip_failover: data['ipFailover'] = ip_failover response = self._perform_request( url='/datacenters/%s/lans/%s' % (datacenter_id, lan_id), method='PATCH', data=json.dumps(data)) return response
python
{ "resource": "" }
q260380
ProfitBricksService.get_lan_members
validation
def get_lan_members(self, datacenter_id, lan_id, depth=1): """ Retrieves the list of NICs that are part of the LAN. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param lan_id: The unique ID of the LAN. :type lan_id: ``str`` """ response = self._perform_request( '/datacenters/%s/lans/%s/nics?depth=%s' % ( datacenter_id, lan_id, str(depth))) return response
python
{ "resource": "" }
q260381
ProfitBricksService.get_loadbalancer
validation
def get_loadbalancer(self, datacenter_id, loadbalancer_id): """ Retrieves a single load balancer by ID. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param loadbalancer_id: The unique ID of the load balancer. :type loadbalancer_id: ``str`` """ response = self._perform_request( '/datacenters/%s/loadbalancers/%s' % ( datacenter_id, loadbalancer_id)) return response
python
{ "resource": "" }
q260382
ProfitBricksService.list_loadbalancers
validation
def list_loadbalancers(self, datacenter_id, depth=1): """ Retrieves a list of load balancers in the data center. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param depth: The depth of the response data. :type depth: ``int`` """ response = self._perform_request( '/datacenters/%s/loadbalancers?depth=%s' % ( datacenter_id, str(depth))) return response
python
{ "resource": "" }
q260383
ProfitBricksService.delete_loadbalancer
validation
def delete_loadbalancer(self, datacenter_id, loadbalancer_id): """ Removes the load balancer from the data center. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param loadbalancer_id: The unique ID of the load balancer. :type loadbalancer_id: ``str`` """ response = self._perform_request( url='/datacenters/%s/loadbalancers/%s' % ( datacenter_id, loadbalancer_id), method='DELETE') return response
python
{ "resource": "" }
q260384
ProfitBricksService.create_loadbalancer
validation
def create_loadbalancer(self, datacenter_id, loadbalancer): """ Creates a load balancer within the specified data center. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param loadbalancer: The load balancer object to be created. :type loadbalancer: ``dict`` """ data = json.dumps(self._create_loadbalancer_dict(loadbalancer)) response = self._perform_request( url='/datacenters/%s/loadbalancers' % datacenter_id, method='POST', data=data) return response
python
{ "resource": "" }
q260385
ProfitBricksService.update_loadbalancer
validation
def update_loadbalancer(self, datacenter_id, loadbalancer_id, **kwargs): """ Updates a load balancer :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param loadbalancer_id: The unique ID of the load balancer. :type loadbalancer_id: ``str`` """ data = {} for attr, value in kwargs.items(): data[self._underscore_to_camelcase(attr)] = value response = self._perform_request( url='/datacenters/%s/loadbalancers/%s' % (datacenter_id, loadbalancer_id), method='PATCH', data=json.dumps(data)) return response
python
{ "resource": "" }
q260386
ProfitBricksService.get_loadbalancer_members
validation
def get_loadbalancer_members(self, datacenter_id, loadbalancer_id, depth=1): """ Retrieves the list of NICs that are associated with a load balancer. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param loadbalancer_id: The unique ID of the load balancer. :type loadbalancer_id: ``str`` :param depth: The depth of the response data. :type depth: ``int`` """ response = self._perform_request( '/datacenters/%s/loadbalancers/%s/balancednics?depth=%s' % ( datacenter_id, loadbalancer_id, str(depth))) return response
python
{ "resource": "" }
q260387
ProfitBricksService.add_loadbalanced_nics
validation
def add_loadbalanced_nics(self, datacenter_id, loadbalancer_id, nic_id): """ Associates a NIC with the given load balancer. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param loadbalancer_id: The unique ID of the load balancer. :type loadbalancer_id: ``str`` :param nic_id: The ID of the NIC. :type nic_id: ``str`` """ data = '{ "id": "' + nic_id + '" }' response = self._perform_request( url='/datacenters/%s/loadbalancers/%s/balancednics' % ( datacenter_id, loadbalancer_id), method='POST', data=data) return response
python
{ "resource": "" }
q260388
ProfitBricksService.get_loadbalanced_nic
validation
def get_loadbalanced_nic(self, datacenter_id, loadbalancer_id, nic_id, depth=1): """ Gets the properties of a load balanced NIC. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param loadbalancer_id: The unique ID of the load balancer. :type loadbalancer_id: ``str`` :param nic_id: The unique ID of the NIC. :type nic_id: ``str`` :param depth: The depth of the response data. :type depth: ``int`` """ response = self._perform_request( '/datacenters/%s/loadbalancers/%s/balancednics/%s?depth=%s' % ( datacenter_id, loadbalancer_id, nic_id, str(depth))) return response
python
{ "resource": "" }
q260389
ProfitBricksService.remove_loadbalanced_nic
validation
def remove_loadbalanced_nic(self, datacenter_id, loadbalancer_id, nic_id): """ Removes a NIC from the load balancer. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param loadbalancer_id: The unique ID of the load balancer. :type loadbalancer_id: ``str`` :param nic_id: The unique ID of the NIC. :type nic_id: ``str`` """ response = self._perform_request( url='/datacenters/%s/loadbalancers/%s/balancednics/%s' % ( datacenter_id, loadbalancer_id, nic_id), method='DELETE') return response
python
{ "resource": "" }
q260390
ProfitBricksService.get_location
validation
def get_location(self, location_id, depth=0): """ Retrieves a single location by ID. :param location_id: The unique ID of the location. :type location_id: ``str`` """ response = self._perform_request('/locations/%s?depth=%s' % (location_id, depth)) return response
python
{ "resource": "" }
q260391
ProfitBricksService.get_nic
validation
def get_nic(self, datacenter_id, server_id, nic_id, depth=1): """ Retrieves a NIC by its ID. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param server_id: The unique ID of the server. :type server_id: ``str`` :param nic_id: The unique ID of the NIC. :type nic_id: ``str`` :param depth: The depth of the response data. :type depth: ``int`` """ response = self._perform_request( '/datacenters/%s/servers/%s/nics/%s?depth=%s' % ( datacenter_id, server_id, nic_id, str(depth))) return response
python
{ "resource": "" }
q260392
ProfitBricksService.list_nics
validation
def list_nics(self, datacenter_id, server_id, depth=1): """ Retrieves a list of all NICs bound to the specified server. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param server_id: The unique ID of the server. :type server_id: ``str`` :param depth: The depth of the response data. :type depth: ``int`` """ response = self._perform_request( '/datacenters/%s/servers/%s/nics?depth=%s' % ( datacenter_id, server_id, str(depth))) return response
python
{ "resource": "" }
q260393
ProfitBricksService.delete_nic
validation
def delete_nic(self, datacenter_id, server_id, nic_id): """ Removes a NIC from the server. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param server_id: The unique ID of the server. :type server_id: ``str`` :param nic_id: The unique ID of the NIC. :type nic_id: ``str`` """ response = self._perform_request( url='/datacenters/%s/servers/%s/nics/%s' % ( datacenter_id, server_id, nic_id), method='DELETE') return response
python
{ "resource": "" }
q260394
ProfitBricksService.create_nic
validation
def create_nic(self, datacenter_id, server_id, nic): """ Creates a NIC on the specified server. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param server_id: The unique ID of the server. :type server_id: ``str`` :param nic: A NIC dict. :type nic: ``dict`` """ data = json.dumps(self._create_nic_dict(nic)) response = self._perform_request( url='/datacenters/%s/servers/%s/nics' % ( datacenter_id, server_id), method='POST', data=data) return response
python
{ "resource": "" }
q260395
ProfitBricksService.update_nic
validation
def update_nic(self, datacenter_id, server_id, nic_id, **kwargs): """ Updates a NIC with the parameters provided. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param server_id: The unique ID of the server. :type server_id: ``str`` :param nic_id: The unique ID of the NIC. :type nic_id: ``str`` """ data = {} for attr, value in kwargs.items(): data[self._underscore_to_camelcase(attr)] = value response = self._perform_request( url='/datacenters/%s/servers/%s/nics/%s' % ( datacenter_id, server_id, nic_id), method='PATCH', data=json.dumps(data)) return response
python
{ "resource": "" }
q260396
ProfitBricksService.get_request
validation
def get_request(self, request_id, status=False): """ Retrieves a single request by ID. :param request_id: The unique ID of the request. :type request_id: ``str`` :param status: Retreive the full status of the request. :type status: ``bool`` """ if status: response = self._perform_request( '/requests/' + request_id + '/status') else: response = self._perform_request( '/requests/%s' % request_id) return response
python
{ "resource": "" }
q260397
ProfitBricksService.get_server
validation
def get_server(self, datacenter_id, server_id, depth=1): """ Retrieves a server by its ID. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param server_id: The unique ID of the server. :type server_id: ``str`` :param depth: The depth of the response data. :type depth: ``int`` """ response = self._perform_request( '/datacenters/%s/servers/%s?depth=%s' % ( datacenter_id, server_id, str(depth))) return response
python
{ "resource": "" }
q260398
ProfitBricksService.list_servers
validation
def list_servers(self, datacenter_id, depth=1): """ Retrieves a list of all servers bound to the specified data center. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param depth: The depth of the response data. :type depth: ``int`` """ response = self._perform_request( '/datacenters/%s/servers?depth=%s' % (datacenter_id, str(depth))) return response
python
{ "resource": "" }
q260399
ProfitBricksService.delete_server
validation
def delete_server(self, datacenter_id, server_id): """ Removes the server from your data center. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param server_id: The unique ID of the server. :type server_id: ``str`` """ response = self._perform_request( url='/datacenters/%s/servers/%s' % ( datacenter_id, server_id), method='DELETE') return response
python
{ "resource": "" }