code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def get_proxies_from_environ(): proxies = {} http_proxy = os.getenv('http_proxy') or os.getenv('HTTP_PROXY') https_proxy = os.getenv('https_proxy') or os.getenv('HTTPS_PROXY') if http_proxy: proxies['http'] = http_proxy if https_proxy: proxies['https'] = https_proxy return proxies
Get proxies from os.environ.
def build_output_table(cls, name='inputTableName', output_name='output'): obj = cls(name) obj.exporter = 'get_output_table_name' obj.output_name = output_name return obj
Build an output table parameter :param name: parameter name :type name: str :param output_name: bind input port name :type output_name: str :return: output description :rtype: ParamDef
def lookup(self, label): if self.is_child: try: return self._children[label] except KeyError: self._children[label] = ChildFieldPicklist(self.parent, label, self.field_name) return self._children[label] else: return get_label_value(label, self._picklist)
take a field_name_label and return the id
def queue_ramp_dicts(ramp_dict_list, server_ip_and_port): client = server.ClientForServer(server.BECServer, server_ip_and_port) for dct in ramp_dict_list: client.queue_ramp(dct) client.start({})
Simple utility function to queue up a list of dictionaries.
def setup(app): "Setup function for Sphinx Extension" app.add_config_value("sphinx_to_github", True, '') app.add_config_value("sphinx_to_github_verbose", True, '') app.connect("build-finished", sphinx_extension)
Setup function for Sphinx Extension
def makeCubiccFunc(self,mNrm,cNrm): EndOfPrdvPP = self.DiscFacEff*self.Rfree*self.Rfree*self.PermGroFac**(-self.CRRA-1.0)* \ np.sum(self.PermShkVals_temp**(-self.CRRA-1.0)* self.vPPfuncNext(self.mNrmNext)*self.ShkPrbs_temp,axis=0) dcda = EndOfPrdvPP/self.uPP(np.array(cNrm[1:])) MPC = dcda/(dcda+1.) MPC = np.insert(MPC,0,self.MPCmaxNow) cFuncNowUnc = CubicInterp(mNrm,cNrm,MPC,self.MPCminNow*self.hNrmNow,self.MPCminNow) return cFuncNowUnc
Makes a cubic spline interpolation of the unconstrained consumption function for this period. Parameters ---------- mNrm : np.array Corresponding market resource points for interpolation. cNrm : np.array Consumption points for interpolation. Returns ------- cFuncUnc : CubicInterp The unconstrained consumption function for this period.
def add_object(self, obj): state = self.state if not obj.layer in state.layers: state.layers[obj.layer] = {} state.layers[obj.layer][obj.key] = obj state.need_redraw = True
add an object to a later
def update_assessment(self, assessment_form): collection = JSONClientValidated('assessment', collection='Assessment', runtime=self._runtime) if not isinstance(assessment_form, ABCAssessmentForm): raise errors.InvalidArgument('argument type is not an AssessmentForm') if not assessment_form.is_for_update(): raise errors.InvalidArgument('the AssessmentForm is for update only, not create') try: if self._forms[assessment_form.get_id().get_identifier()] == UPDATED: raise errors.IllegalState('assessment_form already used in an update transaction') except KeyError: raise errors.Unsupported('assessment_form did not originate from this session') if not assessment_form.is_valid(): raise errors.InvalidArgument('one or more of the form elements is invalid') collection.save(assessment_form._my_map) self._forms[assessment_form.get_id().get_identifier()] = UPDATED return objects.Assessment( osid_object_map=assessment_form._my_map, runtime=self._runtime, proxy=self._proxy)
Updates an existing assessment. arg: assessment_form (osid.assessment.AssessmentForm): the form containing the elements to be updated raise: IllegalState - ``assessment_form`` already used in an update transaction raise: InvalidArgument - the form contains an invalid value raise: NullArgument - ``assessment_form`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure occurred raise: Unsupported - ``assessment_form did not originate from get_assessment_form_for_update()`` *compliance: mandatory -- This method must be implemented.*
def reset(self): logger.debug('resetting dongle state') self._clear() if self.api is not None: self._set_state(Dongle._STATE_RESET) self.api.ble_cmd_gap_set_mode(gap_discoverable_mode['gap_non_discoverable'], gap_connectable_mode['gap_non_connectable']) self._wait_for_state(self._STATE_RESET) for i in range(self.supported_connections): self._set_conn_state(i, self._STATE_DISCONNECTING) self.api.ble_cmd_connection_disconnect(i) self._wait_for_conn_state(i, self._STATE_DISCONNECTING) logger.debug('reset completed')
Attempts to reset the dongle to a known state. When called, this method will reset the internal state of the object, and disconnect any active connections.
def get_user(uid, channel=14, **kwargs): name = get_user_name(uid, **kwargs) access = get_user_access(uid, channel, **kwargs) data = {'name': name, 'uid': uid, 'channel': channel, 'access': access['access']} return data
Get user from uid and access on channel :param uid: user number [1:16] :param channel: number [1:7] :param kwargs: - api_host=127.0.0.1 - api_user=admin - api_pass=example - api_port=623 - api_kg=None Return Data .. code-block:: none name: (str) uid: (int) channel: (int) access: - callback (bool) - link_auth (bool) - ipmi_msg (bool) - privilege_level: (str)[callback, user, operatorm administrator, proprietary, no_access] CLI Examples: .. code-block:: bash salt-call ipmi.get_user uid=2
def _nominal_kernel(x, y, out): for i in range(x.shape[0]): for j in range(y.shape[0]): out[i, j] += (x[i, :] == y[j, :]).sum() return out
Number of features that match exactly
def policy(self): policies = VNXIOPolicy.get(cli=self._cli) ret = None for policy in policies: contained = policy.ioclasses.name if self._get_name() in contained: ret = VNXIOPolicy.get(name=policy.name, cli=self._cli) break return ret
Returns policy which contains this ioclass.
def clients(self): clients_response = self.get_request('clients/') return [Client(self, cjson['client']) for cjson in clients_response]
Generates a list of all Clients.
def ls_dir(base_dir): return [ os.path.join(dirpath.replace(base_dir, '', 1), f) for (dirpath, dirnames, files) in os.walk(base_dir) for f in files ]
List files recursively.
def get_strategy(name_or_cls): if isinstance(name_or_cls, six.string_types): if name_or_cls not in STRATS: raise MutationError("strat is not defined") return STRATS[name_or_cls]() return name_or_cls()
Return the strategy identified by its name. If ``name_or_class`` is a class, it will be simply returned.
def _get_dependencies_from_cache(ireq): if os.environ.get("PASSA_IGNORE_LOCAL_CACHE"): return if ireq.editable: return try: deps = DEPENDENCY_CACHE[ireq] pyrq = REQUIRES_PYTHON_CACHE[ireq] except KeyError: return try: packaging.specifiers.SpecifierSet(pyrq) ireq_name = packaging.utils.canonicalize_name(ireq.name) if any(_is_cache_broken(line, ireq_name) for line in deps): broken = True else: broken = False except Exception: broken = True if broken: print("dropping broken cache for {0}".format(ireq.name)) del DEPENDENCY_CACHE[ireq] del REQUIRES_PYTHON_CACHE[ireq] return return deps, pyrq
Retrieves dependencies for the requirement from the dependency cache.
def missing_property_names(self): propname = lambda x: self.__prop_names__[x] missing = [] for x in self.__required__: propinfo = self.propinfo(propname(x)) null_type = False if 'type' in propinfo: type_info = propinfo['type'] null_type = (type_info == 'null' or isinstance(type_info, (list, tuple)) and 'null' in type_info) elif 'oneOf' in propinfo: for o in propinfo['oneOf']: type_info = o.get('type') if type_info and type_info == 'null' \ or isinstance(type_info, (list, tuple)) \ and 'null' in type_info: null_type = True break if (propname(x) not in self._properties and null_type) or \ (self._properties[propname(x)] is None and not null_type): missing.append(x) return missing
Returns a list of properties which are required and missing. Properties are excluded from this list if they are allowed to be null. :return: list of missing properties.
def to_struct_file(self, f): if isinstance(f, str): f = open(f,'w') f.write("STRUCTURE {0}\n".format(self.name)) f.write(" NUGGET {0}\n".format(self.nugget)) f.write(" NUMVARIOGRAM {0}\n".format(len(self.variograms))) for v in self.variograms: f.write(" VARIOGRAM {0} {1}\n".format(v.name,v.contribution)) f.write(" TRANSFORM {0}\n".format(self.transform)) f.write("END STRUCTURE\n\n") for v in self.variograms: v.to_struct_file(f)
write a PEST-style structure file Parameters ---------- f : (str or file handle) file to write the GeoStruct information to
def get_proj(geom, proj_list=None): out_srs = None if proj_list is None: proj_list = gen_proj_list() for projbox in proj_list: if projbox.geom.Intersects(geom): out_srs = projbox.srs break if out_srs is None: out_srs = getUTMsrs(geom) return out_srs
Determine best projection for input geometry
def size(self, units="MiB"): self.open() size = lvm_vg_get_size(self.handle) self.close() return size_convert(size, units)
Returns the volume group size in the given units. Default units are MiB. *Args:* * units (str): Unit label ('MiB', 'GiB', etc...). Default is MiB.
def guest_resize_mem(self, userid, size): action = "resize guest '%s' to have '%s' memory" % (userid, size) LOG.info("Begin to %s" % action) with zvmutils.log_and_reraise_sdkbase_error(action): self._vmops.resize_memory(userid, size) LOG.info("%s successfully." % action)
Resize memory of guests. :param userid: (str) the userid of the guest to be resized :param size: (str) The memory size that the guest should have defined in user directory after resize. The value should be specified by 1-4 bits of number suffixed by either M (Megabytes) or G (Gigabytes). And the number should be an integer.
def _load(self): data = get_data(self.endpoint, self.id_, force_lookup=self.__force_lookup) for key, val in data.items(): if key == 'location_area_encounters' \ and self.endpoint == 'pokemon': params = val.split('/')[-3:] ep, id_, subr = params encounters = get_data(ep, int(id_), subr) data[key] = [_make_obj(enc) for enc in encounters] continue if isinstance(val, dict): data[key] = _make_obj(val) elif isinstance(val, list): data[key] = [_make_obj(i) for i in val] self.__dict__.update(data) return None
Function to collect reference data and connect it to the instance as attributes. Internal function, does not usually need to be called by the user, as it is called automatically when an attribute is requested. :return None
def external_metadata(self, datasource_type=None, datasource_id=None): if datasource_type == 'druid': datasource = ConnectorRegistry.get_datasource( datasource_type, datasource_id, db.session) elif datasource_type == 'table': database = ( db.session .query(Database) .filter_by(id=request.args.get('db_id')) .one() ) Table = ConnectorRegistry.sources['table'] datasource = Table( database=database, table_name=request.args.get('table_name'), schema=request.args.get('schema') or None, ) external_metadata = datasource.external_metadata() return self.json_response(external_metadata)
Gets column info from the source system
def element(cls, name, parent=None, interleave=None, occur=0): node = cls("element", parent, interleave=interleave) node.attr["name"] = name node.occur = occur return node
Create an element node.
def _do_register(cls, code, name, hash_name=None, hash_new=None): cls._func_from_name[name.replace('-', '_')] = code cls._func_from_name[name.replace('_', '-')] = code if hash_name: cls._func_from_hash[hash_name] = code cls._func_hash[code] = cls._hash(hash_name, hash_new)
Add hash function data to the registry without checks.
def add_fabfile(): fabfile_src = os.path.join(PACKAGE_ROOT, 'fabfile.py') fabfile_dest = os.path.join(os.getcwd(), 'fabfile_deployer.py') if os.path.exists(fabfile_dest): print "`fabfile.py` exists in the current directory. " \ "Please remove or rename it and try again." return shutil.copyfile(fabfile_src, fabfile_dest)
Copy the base fabfile.py to the current working directory.
def text_concat(*args, **kwargs): separator = text_value(kwargs.get("separator", "")) values = filter(None, [text_value(v) for v in args]) return separator.join(values)
Concatenate several values as a text string with an optional separator
def parse_xml(data, handle_units): root = ET.fromstring(data) return squish(parse_xml_dataset(root, handle_units))
Parse XML data returned by NCSS.
def release(self): lock = vars(self).pop('lock', missing) lock is not missing and self._release(lock)
Release the lock and cleanup
def _configure_injector(self, modules): self._register() self._create_injector() self._bind_core() self._bind_modules(modules) self.logger.debug("Injector configuration with modules {0}.".format(modules)) self._dependencies_initialized = True
Create the injector and install the modules. There is a necessary order of calls. First we have to bind `Config` and `Zsl`, then we need to register the app into the global stack and then we can install all other modules, which can use `Zsl` and `Config` injection. :param modules: list of injection modules :type modules: list
def cudaMallocPitch(pitch, rows, cols, elesize): ptr = ctypes.c_void_p() status = _libcudart.cudaMallocPitch(ctypes.byref(ptr), ctypes.c_size_t(pitch), cols*elesize, rows) cudaCheckStatus(status) return ptr, pitch
Allocate pitched device memory. Allocate pitched memory on the device associated with the current active context. Parameters ---------- pitch : int Pitch for allocation. rows : int Requested pitched allocation height. cols : int Requested pitched allocation width. elesize : int Size of memory element. Returns ------- ptr : ctypes pointer Pointer to allocated device memory.
def array(data, tcoords=None, chcoords=None, scalarcoords=None, datacoords=None, attrs=None, name=None): array = xr.DataArray(data, dims=('t', 'ch'), attrs=attrs, name=name) array.dca._initcoords() if tcoords is not None: array.coords.update({key: ('t', tcoords[key]) for key in tcoords}) if chcoords is not None: array.coords.update({key: ('ch', chcoords[key]) for key in chcoords}) if scalarcoords is not None: array.coords.update(scalarcoords) if datacoords is not None: array.coords.update({key: (('t', 'ch'), datacoords[key]) for key in datacoords}) return array
Create an array as an instance of xarray.DataArray with Decode accessor. Args: data (numpy.ndarray): 2D (time x channel) array. tcoords (dict, optional): Dictionary of arrays that label time axis. chcoords (dict, optional): Dictionary of arrays that label channel axis. scalarcoords (dict, optional): Dictionary of values that don't label any axes (point-like). datacoords (dict, optional): Dictionary of arrays that label time and channel axes. attrs (dict, optional): Dictionary of attributes to add to the instance. name (str, optional): String that names the instance. Returns: array (decode.array): Deode array.
def get_repository_form(self, *args, **kwargs): if isinstance(args[-1], list) or 'repository_record_types' in kwargs: return self.get_repository_form_for_create(*args, **kwargs) else: return self.get_repository_form_for_update(*args, **kwargs)
Pass through to provider RepositoryAdminSession.get_repository_form_for_update
def infer_type(data): if isinstance(data, (type(None), numbers.Number)): return 'scalar' if isinstance(data, SummaryStats): return 'summarystats' if hasattr(data, "__len__"): data = [x for x in data if x is not None] if len(data) == 0 or isinstance(data[0], numbers.Number): return 'distribution_scalar' if isinstance(data[0], SummaryStats): return 'distribution_summarystats' raise TypeError( "{} is not a valid input. It should be a number, a SummaryStats " "object, or None".format(data[0])) raise TypeError( "{} is not a valid input. It should be a number, a SummaryStats " "object, or a list".format(data))
Infer the type of objects returned by indicators. infer_type returns: - 'scalar' for a number or None, - 'summarystats' for a SummaryStats object, - 'distribution_scalar' for a list of scalars, - 'distribution_summarystats' for a list of SummaryStats objects
def cf_tokenize(s): t = [] parts = split_re.split(s) for part in parts: cf_func = replace_re.search(part) if cf_func: args = [a.strip("'\" ") for a in cf_func.group("args").split(",")] t.append(HELPERS[cf_func.group("helper")](*args).data) else: t.append(part) return t
Parses UserData for Cloudformation helper functions. http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/user-data.html http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/intrinsic-function-reference.html http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/quickref-cloudformation.html#scenario-userdata-base64 It breaks apart the given string at each recognized function (see HELPERS) and instantiates the helper function objects in place of those. Returns a list of parts as a result. Useful when used with Join() and Base64() CloudFormation functions to produce user data. ie: Base64(Join('', cf_tokenize(userdata_string)))
def convert_softmax(node, **kwargs): name, input_nodes, attrs = get_inputs(node, kwargs) axis = int(attrs.get("axis", -1)) softmax_node = onnx.helper.make_node( "Softmax", input_nodes, [name], axis=axis, name=name ) return [softmax_node]
Map MXNet's softmax operator attributes to onnx's Softmax operator and return the created node.
def notify(self, instance, old, new): if self._disabled.get(instance, False): return for cback in self._callbacks.get(instance, []): cback(new) for cback in self._2arg_callbacks.get(instance, []): cback(old, new)
Call all callback functions with the current value Each callback will either be called using callback(new) or callback(old, new) depending on whether ``echo_old`` was set to `True` when calling :func:`~echo.add_callback` Parameters ---------- instance The instance to consider old The old value of the property new The new value of the property
def remove_editor(self, username, *args, **kwargs): return self.add_editor(username=username, _delete=True, *args, **kwargs)
Remove an editor from this wiki page. :param username: The name or Redditor object of the user to remove. This method points to :meth:`add_editor` with _delete=True. Additional parameters are are passed to :meth:`add_editor` and subsequently into :meth:`~praw.__init__.BaseReddit.request_json`.
def set_batch(self, data): rows = self.bucket.view("_all_docs", keys=data.keys(), include_docs=True) existing = {} for row in rows: key = row.id if key and not data[key].has_key("_rev"): data[key]["_rev"] = row.doc["_rev"] for id,item in data.items(): data[id]["_id"] = id revs = {} for success, docid, rev_or_exc in self.bucket.update(data.values()): if not success and self.logger: self.logger.error("Document update conflict (batch) '%s', %s" % (docid, rev_or_exc)) elif success: revs[docid] = rev_or_exc return revs
Store multiple documents Args data <dict> data to store, use document ids as keys Returns revs <dict> dictionary of new revisions indexed by document ids
def compute_cheby_coeff(f, m=30, N=None, *args, **kwargs): r G = f.G i = kwargs.pop('i', 0) if not N: N = m + 1 a_arange = [0, G.lmax] a1 = (a_arange[1] - a_arange[0]) / 2 a2 = (a_arange[1] + a_arange[0]) / 2 c = np.zeros(m + 1) tmpN = np.arange(N) num = np.cos(np.pi * (tmpN + 0.5) / N) for o in range(m + 1): c[o] = 2. / N * np.dot(f._kernels[i](a1 * num + a2), np.cos(np.pi * o * (tmpN + 0.5) / N)) return c
r""" Compute Chebyshev coefficients for a Filterbank. Parameters ---------- f : Filter Filterbank with at least 1 filter m : int Maximum order of Chebyshev coeff to compute (default = 30) N : int Grid order used to compute quadrature (default = m + 1) i : int Index of the Filterbank element to compute (default = 0) Returns ------- c : ndarray Matrix of Chebyshev coefficients
def _write_report(report, file_path): with open(file_path, mode='wb') as f: if not isinstance(report, binary_type): report = report.encode('utf-8') f.write(report)
Write report to the given file path.
def stop_process(self, process, timeout=None): process["terminate"] = True if timeout is not None: process["terminate_at"] = time.time() + timeout process["subprocess"].send_signal(signal.SIGINT)
Initiates a graceful stop of one process
def insert_arguments_into_sql_query(compilation_result, arguments): if compilation_result.language != SQL_LANGUAGE: raise AssertionError(u'Unexpected query output language: {}'.format(compilation_result)) base_query = compilation_result.query return base_query.params(**arguments)
Insert the arguments into the compiled SQL query to form a complete query. Args: compilation_result: CompilationResult, compilation result from the GraphQL compiler. arguments: Dict[str, Any], parameter name -> value, for every parameter the query expects. Returns: SQLAlchemy Selectable, a executable SQL query with parameters bound.
def _get_default_tempdir(): namer = _RandomNameSequence() dirlist = _candidate_tempdir_list() for dir in dirlist: if dir != _os.curdir: dir = _os.path.abspath(dir) for seq in range(100): name = next(namer) filename = _os.path.join(dir, name) try: fd = _os.open(filename, _bin_openflags, 0o600) try: try: with _io.open(fd, 'wb', closefd=False) as fp: fp.write(b'blat') finally: _os.close(fd) finally: _os.unlink(filename) return dir except FileExistsError: pass except OSError: break raise FileNotFoundError(_errno.ENOENT, "No usable temporary directory found in %s" % dirlist)
Calculate the default directory to use for temporary files. This routine should be called exactly once. We determine whether or not a candidate temp dir is usable by trying to create and write to a file in that directory. If this is successful, the test file is deleted. To prevent denial of service, the name of the test file must be randomized.
def toggle(self, event=None): if self.choice.get() == "yes": self.rbno.select() else: self.rbyes.select() self.widgetEdited()
Toggle value between Yes and No
def stop(self) -> None: self._shutdown = True self._protocol.close() self.cancel_pending_tasks()
Stop monitoring the base unit.
def clone_rtcpath_update_rt_as(path, new_rt_as): assert path and new_rt_as if not path or path.route_family != RF_RTC_UC: raise ValueError('Expected RT_NLRI path') old_nlri = path.nlri new_rt_nlri = RouteTargetMembershipNLRI(new_rt_as, old_nlri.route_target) return RtcPath(path.source, new_rt_nlri, path.source_version_num, pattrs=path.pathattr_map, nexthop=path.nexthop, is_withdraw=path.is_withdraw)
Clones given RT NLRI `path`, and updates it with new RT_NLRI AS. Parameters: - `path`: (Path) RT_NLRI path - `new_rt_as`: AS value of cloned paths' RT_NLRI
def load_tag(corpus, path): tag_idx = os.path.basename(path) data_path = os.path.join(path, 'by_book') tag_utt_ids = [] for gender_path in MailabsReader.get_folders(data_path): if os.path.basename(gender_path) == 'mix': utt_ids = MailabsReader.load_books_of_speaker(corpus, gender_path, None) tag_utt_ids.extend(utt_ids) else: for speaker_path in MailabsReader.get_folders(gender_path): speaker = MailabsReader.load_speaker(corpus, speaker_path) utt_ids = MailabsReader.load_books_of_speaker(corpus, speaker_path, speaker) tag_utt_ids.extend(utt_ids) filter = subset.MatchingUtteranceIdxFilter( utterance_idxs=set(tag_utt_ids) ) subview = subset.Subview(corpus, filter_criteria=[filter]) corpus.import_subview(tag_idx, subview)
Iterate over all speakers on load them. Collect all utterance-idx and create a subset of them.
def validatePage(self): widgets = self.propertyWidgetMap() failed = '' for prop, widget in widgets.items(): val, success = projexui.widgetValue(widget) if success: if not val and not (prop.type == 'bool' and val is False): if prop.default: val = prop.default elif prop.required: msg = '{0} is a required value'.format(prop.label) failed = msg break elif prop.regex and not re.match(prop.regex, nativestring(val)): msg = '{0} needs to be in the format {1}'.format(prop.label, prop.regex) failed = msg break prop.value = val else: msg = 'Failed to get a proper value for {0}'.format(prop.label) failed = msg break if failed: QtGui.QMessageBox.warning(None, 'Properties Failed', failed) return False return True
Validates the page against the scaffold information, setting the values along the way.
def check_instance_folder(self, create=False): path = Path(self.instance_path) err = None eno = 0 if not path.exists(): if create: logger.info("Create instance folder: %s", path) path.mkdir(0o775, parents=True) else: err = "Instance folder does not exists" eno = errno.ENOENT elif not path.is_dir(): err = "Instance folder is not a directory" eno = errno.ENOTDIR elif not os.access(str(path), os.R_OK | os.W_OK | os.X_OK): err = 'Require "rwx" access rights, please verify permissions' eno = errno.EPERM if err: raise OSError(eno, err, str(path))
Verify instance folder exists, is a directory, and has necessary permissions. :param:create: if `True`, creates directory hierarchy :raises: OSError with relevant errno if something is wrong.
def _get_active_contract_at_offset(self, root_symbol, dt, offset): oc = self.asset_finder.get_ordered_contracts(root_symbol) session = self.trading_calendar.minute_to_session_label(dt) front = oc.contract_before_auto_close(session.value) back = oc.contract_at_offset(front, 1, dt.value) if back is None: return front primary = self._active_contract(oc, front, back, session) return oc.contract_at_offset(primary, offset, session.value)
For the given root symbol, find the contract that is considered active on a specific date at a specific offset.
def version_to_evr(verstring): if verstring in [None, '']: return '0', '', '' idx_e = verstring.find(':') if idx_e != -1: try: epoch = six.text_type(int(verstring[:idx_e])) except ValueError: epoch = '0' else: epoch = '0' idx_r = verstring.find('-') if idx_r != -1: version = verstring[idx_e + 1:idx_r] release = verstring[idx_r + 1:] else: version = verstring[idx_e + 1:] release = '' return epoch, version, release
Split the package version string into epoch, version and release. Return this as tuple. The epoch is always not empty. The version and the release can be an empty string if such a component could not be found in the version string. "2:1.0-1.2" => ('2', '1.0', '1.2) "1.0" => ('0', '1.0', '') "" => ('0', '', '')
def run(self, stash='active', n=None, until=None, **kwargs): for _ in (itertools.count() if n is None else range(0, n)): if not self.complete() and self._stashes[stash]: self.step(stash=stash, **kwargs) if not (until and until(self)): continue break return self
Run until the SimulationManager has reached a completed state, according to the current exploration techniques. If no exploration techniques that define a completion state are being used, run until there is nothing left to run. :param stash: Operate on this stash :param n: Step at most this many times :param until: If provided, should be a function that takes a SimulationManager and returns True or False. Stepping will terminate when it is True. :return: The simulation manager, for chaining. :rtype: SimulationManager
def bs_progress_bar(*args, **kwargs): bars = [] contexts = kwargs.get( 'contexts', ['', 'success', 'info', 'warning', 'danger'] ) for ndx, arg in enumerate(args): bars.append( dict(percent=arg, context=kwargs.get('context', contexts[ndx % len(contexts)])) ) return { 'bars': bars, 'text': kwargs.pop('text', False), 'striped': kwargs.pop('striped', False), 'animated': kwargs.pop('animated', False), 'min_val': kwargs.pop('min_val', 0), 'max_val': kwargs.pop('max_val', 100), }
A Standard Bootstrap Progress Bar. http://getbootstrap.com/components/#progress param args (Array of Numbers: 0-100): Percent of Progress Bars param context (String): Adds 'progress-bar-{context} to the class attribute param contexts (Array of Strings): Cycles through contexts for stacked bars param text (String): True: shows value within the bar, False: uses sr span param striped (Boolean): Adds 'progress-bar-striped' to the class attribute param animated (Boolean): Adds 'active' to the class attribute if striped param min_val (0): Used for the aria-min value param max_val (0): Used for the aria-max value
def _get_tunnel_context_mask(address_translations=False, internal_subnets=False, remote_subnets=False, static_subnets=False, service_subnets=False): entries = ['id', 'accountId', 'advancedConfigurationFlag', 'createDate', 'customerPeerIpAddress', 'modifyDate', 'name', 'friendlyName', 'internalPeerIpAddress', 'phaseOneAuthentication', 'phaseOneDiffieHellmanGroup', 'phaseOneEncryption', 'phaseOneKeylife', 'phaseTwoAuthentication', 'phaseTwoDiffieHellmanGroup', 'phaseTwoEncryption', 'phaseTwoKeylife', 'phaseTwoPerfectForwardSecrecy', 'presharedKey'] if address_translations: entries.append('addressTranslations[internalIpAddressRecord[ipAddress],' 'customerIpAddressRecord[ipAddress]]') if internal_subnets: entries.append('internalSubnets') if remote_subnets: entries.append('customerSubnets') if static_subnets: entries.append('staticRouteSubnets') if service_subnets: entries.append('serviceSubnets') return '[mask[{}]]'.format(','.join(entries))
Yields a mask object for a tunnel context. All exposed properties on the tunnel context service are included in the constructed mask. Additional joins may be requested. :param bool address_translations: Whether to join the context's address translation entries. :param bool internal_subnets: Whether to join the context's internal subnet associations. :param bool remote_subnets: Whether to join the context's remote subnet associations. :param bool static_subnets: Whether to join the context's statically routed subnet associations. :param bool service_subnets: Whether to join the SoftLayer service network subnets. :return string: Encoding for the requested mask object.
def start(controller_class): args = parser.parse() obj = controller_class(args, platform.operating_system()) if args.foreground: try: obj.start() except KeyboardInterrupt: obj.stop() else: try: with platform.Daemon(obj) as daemon: daemon.start() except (OSError, ValueError) as error: sys.stderr.write('\nError starting %s: %s\n\n' % (sys.argv[0], error)) sys.exit(1)
Start the Helper controller either in the foreground or as a daemon process. :param controller_class: The controller class handle to create and run :type controller_class: callable
def database(connection_string, db_class=SimplDB): if not hasattr(database, "singletons"): database.singletons = {} if connection_string not in database.singletons: instance = db_class(connection_string) database.singletons[connection_string] = instance return database.singletons[connection_string]
Return database singleton instance. This function will always return the same database instance for the same connection_string. It stores instances in a dict saved as an attribute of this function.
def read_batch_from_datastore(self, class_batch_id): client = self._datastore_client key = client.key(KIND_CLASSIFICATION_BATCH, class_batch_id) result = client.get(key) if result is not None: return dict(result) else: raise KeyError( 'Key {0} not found in the datastore'.format(key.flat_path))
Reads and returns single batch from the datastore.
def run(self): while True: self.open_lock.acquire() if self.stopped(): return self.__open() self.open_lock.release()
This override threading.Thread to open socket and wait for messages.
def from_uncharted_json_file(cls, file): with open(file, "r") as f: _dict = json.load(f) return cls.from_uncharted_json_serialized_dict(_dict)
Construct an AnalysisGraph object from a file containing INDRA statements serialized exported by Uncharted's CauseMos webapp.
def prepare_authorization_request(self, authorization_url, state=None, redirect_url=None, scope=None, **kwargs): if not is_secure_transport(authorization_url): raise InsecureTransportError() self.state = state or self.state_generator() self.redirect_url = redirect_url or self.redirect_url self.scope = scope or self.scope auth_url = self.prepare_request_uri( authorization_url, redirect_uri=self.redirect_url, scope=self.scope, state=self.state, **kwargs) return auth_url, FORM_ENC_HEADERS, ''
Prepare the authorization request. This is the first step in many OAuth flows in which the user is redirected to a certain authorization URL. This method adds required parameters to the authorization URL. :param authorization_url: Provider authorization endpoint URL. :param state: CSRF protection string. Will be automatically created if not provided. The generated state is available via the ``state`` attribute. Clients should verify that the state is unchanged and present in the authorization response. This verification is done automatically if using the ``authorization_response`` parameter with ``prepare_token_request``. :param redirect_url: Redirect URL to which the user will be returned after authorization. Must be provided unless previously setup with the provider. If provided then it must also be provided in the token request. :param scope: :param kwargs: Additional parameters to included in the request. :returns: The prepared request tuple with (url, headers, body).
def meantsubpool(d, data_read): logger.info('Subtracting mean visibility in time...') data_read = numpyview(data_read_mem, 'complex64', datashape(d)) tsubpart = partial(rtlib.meantsub, data_read) blranges = [(d['nbl'] * t/d['nthread'], d['nbl']*(t+1)/d['nthread']) for t in range(d['nthread'])] with closing(mp.Pool(1, initializer=initreadonly, initargs=(data_read_mem,))) as tsubpool: tsubpool.map(tsubpart, blr)
Wrapper for mean visibility subtraction in time. Doesn't work when called from pipeline using multiprocessing pool.
def _build_index(maf_strm, ref_spec): idx_strm = StringIO.StringIO() bound_iter = functools.partial(genome_alignment_iterator, reference_species=ref_spec) hash_func = JustInTimeGenomeAlignmentBlock.build_hash idx = IndexedFile(maf_strm, bound_iter, hash_func) idx.write_index(idx_strm) idx_strm.seek(0) return idx_strm
Build an index for a MAF genome alig file and return StringIO of it.
def _outer_error_is_decreasing(self): is_decreasing, self._last_outer_error = self._error_is_decreasing(self._last_outer_error) return is_decreasing
True if outer iteration error is decreasing.
def _make_wildcard_attr_map(): _xmap = {} for wc in OpenflowWildcard: if not wc.name.endswith('All') and \ not wc.name.endswith('Mask'): translated = '' for ch in wc.name: if ch.isupper(): translated += '_' translated += ch.lower() else: translated += ch _xmap[translated] = wc return _xmap
Create a dictionary that maps an attribute name in OpenflowMatch with a non-prefix-related wildcard bit from the above OpenflowWildcard enumeration.
def do_exit(self, arg_list: List[str]) -> bool: if arg_list: try: self.exit_code = int(arg_list[0]) except ValueError: self.perror("{} isn't a valid integer exit code".format(arg_list[0])) self.exit_code = -1 self._should_quit = True return self._STOP_AND_EXIT
Exit the application with an optional exit code. Usage: exit [exit_code] Where: * exit_code - integer exit code to return to the shell
def create_schema_from_xsd_directory(directory, version): schema = Schema(version) for f in _get_xsd_from_directory(directory): logger.info("Loading schema %s" % f) fill_schema_from_xsd_file(f, schema) return schema
Create and fill the schema from a directory which contains xsd files. It calls fill_schema_from_xsd_file for each xsd file found.
def read(address, length): arr = create_string_buffer(length) return i2c_msg( addr=address, flags=I2C_M_RD, len=length, buf=arr)
Prepares an i2c read transaction. :param address: Slave address. :type: address: int :param length: Number of bytes to read. :type: length: int :return: New :py:class:`i2c_msg` instance for read operation. :rtype: :py:class:`i2c_msg`
def on_log(request, page_name): page = Page.query.filter_by(name=page_name).first() if page is None: return page_missing(request, page_name, False) return Response(generate_template("action_log.html", page=page))
Show the list of recent changes.
def thresholdcoloring(coloring, names): for key in coloring.keys(): if len([k for k in coloring[key] if k in names]) == 0: coloring.pop(key) else: coloring[key] = utils.uniqify([k for k in coloring[key] if k in names]) return coloring
Threshold a coloring dictionary for a given list of column names. Threshold `coloring` based on `names`, a list of strings in:: coloring.values() **Parameters** **coloring** : dictionary Hierarchical structure on the columns given in the header of the file; an attribute of tabarrays. See :func:`tabular.tab.tabarray.__new__` for more information about coloring. **names** : list of strings List of strings giving column names. **Returns** **newcoloring** : dictionary The thresholded coloring dictionary.
def index_exists(index, hosts=None, profile=None): es = _get_instance(hosts, profile) try: return es.indices.exists(index=index) except elasticsearch.exceptions.NotFoundError: return False except elasticsearch.TransportError as e: raise CommandExecutionError("Cannot retrieve index {0}, server returned code {1} with message {2}".format(index, e.status_code, e.error))
Return a boolean indicating whether given index exists index Index name CLI example:: salt myminion elasticsearch.index_exists testindex
def update_warning(self): widget = self._button_warning if not self.is_valid(): tip = _('Array dimensions not valid') widget.setIcon(ima.icon('MessageBoxWarning')) widget.setToolTip(tip) QToolTip.showText(self._widget.mapToGlobal(QPoint(0, 5)), tip) else: self._button_warning.setToolTip('')
Updates the icon and tip based on the validity of the array content.
def init_atom_feed(self, feed): atom_feed = FeedGenerator() atom_feed.id(id=self.request.route_url(self.get_atom_feed_url, id=feed.id)) atom_feed.link(href=self.request.route_url(self.get_atom_feed_url, id=feed.id), rel='self') atom_feed.language('nl-BE') self.link_to_sibling(feed, 'previous', atom_feed) self.link_to_sibling(feed, 'next', atom_feed) return atom_feed
Initializing an atom feed `feedgen.feed.FeedGenerator` given a feed object :param feed: a feed object :return: an atom feed `feedgen.feed.FeedGenerator`
def add(self, properties): new_hba = super(FakedHbaManager, self).add(properties) partition = self.parent assert 'hba-uris' in partition.properties partition.properties['hba-uris'].append(new_hba.uri) if 'device-number' not in new_hba.properties: devno = partition.devno_alloc() new_hba.properties['device-number'] = devno if 'wwpn' not in new_hba.properties: wwpn = partition.wwpn_alloc() new_hba.properties['wwpn'] = wwpn return new_hba
Add a faked HBA resource. Parameters: properties (dict): Resource properties. Special handling and requirements for certain properties: * 'element-id' will be auto-generated with a unique value across all instances of this resource type, if not specified. * 'element-uri' will be auto-generated based upon the element ID, if not specified. * 'class' will be auto-generated to 'hba', if not specified. * 'adapter-port-uri' identifies the backing FCP port for this HBA and is required to be specified. * 'device-number' will be auto-generated with a unique value within the partition in the range 0x8000 to 0xFFFF, if not specified. This method also updates the 'hba-uris' property in the parent faked Partition resource, by adding the URI for the faked HBA resource. Returns: :class:`~zhmcclient_mock.FakedHba`: The faked HBA resource. Raises: :exc:`zhmcclient_mock.InputError`: Some issue with the input properties.
def get_mr_filters(data_shape, opt='', coarse=False): data_shape = np.array(data_shape) data_shape += data_shape % 2 - 1 fake_data = np.zeros(data_shape) fake_data[tuple(zip(data_shape // 2))] = 1 mr_filters = call_mr_transform(fake_data, opt=opt) if coarse: return mr_filters else: return mr_filters[:-1]
Get mr_transform filters This method obtains wavelet filters by calling mr_transform Parameters ---------- data_shape : tuple 2D data shape opt : list, optional List of additonal mr_transform options coarse : bool, optional Option to keep coarse scale (default is 'False') Returns ------- np.ndarray 3D array of wavelet filters
def _get_magnitude_scaling_term(self, C, mag): if mag < 6.75: return C["a1_lo"] + C["a2_lo"] * mag + C["a3"] *\ ((8.5 - mag) ** 2.0) else: return C["a1_hi"] + C["a2_hi"] * mag + C["a3"] *\ ((8.5 - mag) ** 2.0)
Returns the magnitude scaling term defined in equation 3
def quantize(image, nlevels): tmp = np.array(image // (1.0 / nlevels), dtype='i1') return tmp.clip(0, nlevels - 1)
Quantize an image into integers 0, 1, ..., nlevels - 1. image -- a numpy array of type float, range [0, 1] nlevels -- an integer
def apply_upstring(upstring, component_list): assert len(upstring) == len(component_list) def add_up_key(comp_dict, up_indicator): assert up_indicator == 'U' or up_indicator == "_" comp_dict['up'] = up_indicator == 'U' for comp_dict, up_indicator in zip(component_list, upstring): add_up_key(comp_dict, up_indicator)
Update the dictionaries resulting from ``parse_array_start`` with the "up" key based on the upstring returned from ``parse_upstring``. The function assumes that the upstring and component_list parameters passed in are from the same device array stanza of a ``/proc/mdstat`` file. The function modifies component_list in place, adding or updating the value of the "up" key to True if there is a corresponding ``U`` in the upstring string, or to False if there is a corresponding ``_``. If there the number of rows in component_list does not match the number of characters in upstring, an ``AssertionError`` is raised. Parameters ---------- upstring : str String sequence of ``U``s and ``_``s as determined by the ``parse_upstring`` method component_list : list List of dictionaries output from the ``parse_array_start`` method.
def table_schema(self): if self.__dict__.get('_table_schema') is None: self._table_schema = None table_schema = {} for row in self.query_schema(): name, default, dtype = self.db().lexicon.column_info(row) if isinstance(default, str): json_matches = re.findall(r"^\'(.*)\'::jsonb$", default) if len(json_matches) > 0: default = json.loads(json_matches[0]) if name == self.primary_key: default = None table_schema[name] = {'default': default, 'type': dtype} if len(table_schema): self._table_schema = table_schema return self._table_schema
Returns the table schema. :returns: dict
def _write_config(self, cfg, slot): old_pgm_seq = self._status.pgm_seq frame = cfg.to_frame(slot=slot) self._debug("Writing %s frame :\n%s\n" % \ (yubikey_config.command2str(frame.command), cfg)) self._write(frame) self._waitfor_clear(yubikey_defs.SLOT_WRITE_FLAG) self.status() self._debug("Programmed slot %i, sequence %i -> %i\n" % (slot, old_pgm_seq, self._status.pgm_seq)) cfgs = self._status.valid_configs() if not cfgs and self._status.pgm_seq == 0: return if self._status.pgm_seq == old_pgm_seq + 1: return raise YubiKeyUSBHIDError('YubiKey programming failed (seq %i not increased (%i))' % \ (old_pgm_seq, self._status.pgm_seq))
Write configuration to YubiKey.
def show_yticklabels_for_all(self, row_column_list=None): if row_column_list is None: for subplot in self.subplots: subplot.show_yticklabels() else: for row, column in row_column_list: self.show_yticklabels(row, column)
Show the y-axis tick labels for all specified subplots. :param row_column_list: a list containing (row, column) tuples to specify the subplots, or None to indicate *all* subplots. :type row_column_list: list or None
def delete_directories(paths): for path in paths: if os.path.exists(path): shutil.rmtree(path)
Delete directories. If the directory is exist, It will delete it including files. :type paths: Array of string or string :param paths: the location of directory
def get(self, key): keystr = str(key) res = None try: res = self.ctx[keystr] except KeyError: for k, v in self.ctx.items(): if "name" in v and v["name"].lower() == keystr.lower(): res = v break return res
Returns context data for a given app, can be an ID or a case insensitive name
def sendhello(self): try: cli_hello_msg = "<hello>\n" +\ " <capabilities>\n" +\ " <capability>urn:ietf:params:netconf:base:1.0</capability>\n" +\ " </capabilities>\n" +\ "</hello>\n" self._cParams.set('cli_hello', cli_hello_msg) self._hConn.sendmsg(cli_hello_msg) ser_hello_msg = self._hConn.recvmsg() self._cParams.set('ser_hello', ser_hello_msg) except: print 'BNClient: Call sendhello fail' sys.exit()
end of function exchgcaps
def get_user(self, user_id, expand=False): url = urljoin(self.user_url, F"{user_id}.json") response = self._get_sync(url) if not response: raise InvalidUserID user = User(response) if expand and user.submitted: items = self.get_items_by_ids(user.submitted) user_opt = { 'stories': 'story', 'comments': 'comment', 'jobs': 'job', 'polls': 'poll', 'pollopts': 'pollopt' } for key, value in user_opt.items(): setattr( user, key, [i for i in items if i.item_type == value] ) return user
Returns Hacker News `User` object. Fetches data from the url: https://hacker-news.firebaseio.com/v0/user/<user_id>.json e.g. https://hacker-news.firebaseio.com/v0/user/pg.json Args: user_id (string): unique user id of a Hacker News user. expand (bool): Flag to indicate whether to transform all IDs into objects. Returns: `User` object representing a user on Hacker News. Raises: InvalidUserID: If no such user exists on Hacker News.
def get_compression_type(self, file_name): ext = os.path.splitext(file_name)[1] if ext == '.gz': self.ctype = 'gzip' elif ext == '.bz2': self.ctype = 'bzip2' elif ext in ('.xz', '.lzma'): self.ctype = 'lzma' else: self.ctype = None
Determine compression type for a given file using its extension. :param file_name: a given file name :type file_name: str
def get_all_spot_instance_requests(self, request_ids=None, filters=None): params = {} if request_ids: self.build_list_params(params, request_ids, 'SpotInstanceRequestId') if filters: if 'launch.group-id' in filters: lgid = filters.get('launch.group-id') if not lgid.startswith('sg-') or len(lgid) != 11: warnings.warn( "The 'launch.group-id' filter now requires a security " "group id (sg-*) and no longer supports filtering by " "group name. Please update your filters accordingly.", UserWarning) self.build_filter_params(params, filters) return self.get_list('DescribeSpotInstanceRequests', params, [('item', SpotInstanceRequest)], verb='POST')
Retrieve all the spot instances requests associated with your account. :type request_ids: list :param request_ids: A list of strings of spot instance request IDs :type filters: dict :param filters: Optional filters that can be used to limit the results returned. Filters are provided in the form of a dictionary consisting of filter names as the key and filter values as the value. The set of allowable filter names/values is dependent on the request being performed. Check the EC2 API guide for details. :rtype: list :return: A list of :class:`boto.ec2.spotinstancerequest.SpotInstanceRequest`
def execute(self, input_data): raw_bytes = input_data['sample']['raw_bytes'] zipfile_output = zipfile.ZipFile(StringIO(raw_bytes)) payload_md5s = [] for name in zipfile_output.namelist(): filename = os.path.basename(name) payload_md5s.append(self.workbench.store_sample(zipfile_output.read(name), name, 'unknown')) return {'payload_md5s': payload_md5s}
Execute the Unzip worker
def get(cont=None, path=None, local_file=None, return_bin=False, profile=None): swift_conn = _auth(profile) if cont is None: return swift_conn.get_account() if path is None: return swift_conn.get_container(cont) if return_bin is True: return swift_conn.get_object(cont, path, return_bin) if local_file is not None: return swift_conn.get_object(cont, path, local_file) return False
List the contents of a container, or return an object from a container. Set return_bin to True in order to retrieve an object wholesale. Otherwise, Salt will attempt to parse an XML response. CLI Example to list containers: .. code-block:: bash salt myminion swift.get CLI Example to list the contents of a container: .. code-block:: bash salt myminion swift.get mycontainer CLI Example to return the binary contents of an object: .. code-block:: bash salt myminion swift.get mycontainer myfile.png return_bin=True CLI Example to save the binary contents of an object to a local file: .. code-block:: bash salt myminion swift.get mycontainer myfile.png local_file=/tmp/myfile.png
def sample(self, wavelength): wave = self.waveunits.Convert(wavelength, 'angstrom') return self(wave)
Input wavelengths assumed to be in user unit.
def add_sip_to_fc(fc, tfidf, limit=40): if 'bowNP' not in fc: return if tfidf is None: return sips = features.sip_noun_phrases(tfidf, fc['bowNP'].keys(), limit=limit) fc[u'bowNP_sip'] = StringCounter(sips)
add "bowNP_sip" to `fc` using `tfidf` data
def register(self, peer): assert isinstance(peer, beans.Peer) with self.__lock: peer_id = peer.peer_id if peer_id in self.peers: raise KeyError("Already known peer: {0}".format(peer)) self.peers[peer_id] = peer for name in peer.groups: self.groups.setdefault(name, set()).add(peer_id)
Registers a peer according to its description :param peer: A Peer description bean :raise KeyError:
def CreateFlowArgs(self, flow_name=None): if not self._flow_descriptors: self._flow_descriptors = {} result = self._context.SendRequest("ListFlowDescriptors", None) for item in result.items: self._flow_descriptors[item.name] = item try: flow_descriptor = self._flow_descriptors[flow_name] except KeyError: raise UnknownFlowName(flow_name) return utils.CopyProto(utils.UnpackAny(flow_descriptor.default_args))
Creates flow arguments object for a flow with a given name.
def get_events(maximum=10): events = [] for ev in range(0, maximum): try: if CONTROLLER.queue.empty(): break else: events.append(CONTROLLER.queue.get_nowait()) except NameError: print('PyMLGame is not initialized correctly. Use pymlgame.init() first.') events = False break return events
Get all events since the last time you asked for them. You can define a maximum which is 10 by default. :param maximum: Maximum number of events :type maximum: int :return: List of events :rtype: list
def GET_parameteritemvalues(self) -> None: for item in state.parameteritems: self._outputs[item.name] = item.value
Get the values of all |ChangeItem| objects handling |Parameter| objects.
def _login(self, password): self.smtp = self.connection(self.host, self.port, **self.kwargs) self.smtp.set_debuglevel(self.debuglevel) if self.starttls: self.smtp.ehlo() if self.starttls is True: self.smtp.starttls() else: self.smtp.starttls(**self.starttls) self.smtp.ehlo() self.is_closed = False if not self.smtp_skip_login: password = self.handle_password(self.user, password) self.smtp.login(self.user, password) self.log.info("Connected to SMTP @ %s:%s as %s", self.host, self.port, self.user)
Login to the SMTP server using password. `login` only needs to be manually run when the connection to the SMTP server was closed by the user.
def add_controller(self, key, controller): assert isinstance(controller, ExtendedController) controller.parent = self self.__child_controllers[key] = controller if self.__shortcut_manager is not None and controller not in self.__action_registered_controllers: controller.register_actions(self.__shortcut_manager) self.__action_registered_controllers.append(controller)
Add child controller The passed controller is registered as child of self. The register_actions method of the child controller is called, allowing the child controller to register shortcut callbacks. :param key: Name of the controller (unique within self), to later access it again :param ExtendedController controller: Controller to be added as child
def get(self, filename): timer = Timer() self.check_prerequisites() with PatchedBotoConfig(): raw_key = self.get_cache_key(filename) logger.info("Checking if distribution archive is available in S3 bucket: %s", raw_key) key = self.s3_bucket.get_key(raw_key) if key is None: logger.debug("Distribution archive is not available in S3 bucket.") else: logger.info("Downloading distribution archive from S3 bucket ..") file_in_cache = os.path.join(self.config.binary_cache, filename) makedirs(os.path.dirname(file_in_cache)) with AtomicReplace(file_in_cache) as temporary_file: key.get_contents_to_filename(temporary_file) logger.debug("Finished downloading distribution archive from S3 bucket in %s.", timer) return file_in_cache
Download a distribution archive from the configured Amazon S3 bucket. :param filename: The filename of the distribution archive (a string). :returns: The pathname of a distribution archive on the local file system or :data:`None`. :raises: :exc:`.CacheBackendError` when any underlying method fails.
def get_config(self, name, default=_MISSING): val = self._config.get(name, default) if val is _MISSING: raise ArgumentError("DeviceAdapter config {} did not exist and no default".format(name)) return val
Get a configuration setting from this DeviceAdapter. See :meth:`AbstractDeviceAdapter.get_config`.
def version(): def linux_version(): try: with salt.utils.files.fopen('/proc/version', 'r') as fp_: return salt.utils.stringutils.to_unicode(fp_.read()).strip() except IOError: return {} def bsd_version(): return __salt__['cmd.run']('sysctl -n kern.version') get_version = { 'Linux': linux_version, 'FreeBSD': bsd_version, 'OpenBSD': bsd_version, 'AIX': lambda: __salt__['cmd.run']('oslevel -s'), } errmsg = 'This method is unsupported on the current operating system!' return get_version.get(__grains__['kernel'], lambda: errmsg)()
Return the system version for this minion .. versionchanged:: 2016.11.4 Added support for AIX .. versionchanged:: 2018.3.0 Added support for OpenBSD CLI Example: .. code-block:: bash salt '*' status.version