id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
51
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
22,500
sdss/tree
setup.py
remove_args
def remove_args(parser): ''' Remove custom arguments from the parser ''' arguments = [] for action in list(parser._get_optional_actions()): if '--help' not in action.option_strings: arguments += action.option_strings for arg in arguments: if arg in sys.argv: sys.argv.remove(arg)
python
def remove_args(parser): ''' Remove custom arguments from the parser ''' arguments = [] for action in list(parser._get_optional_actions()): if '--help' not in action.option_strings: arguments += action.option_strings for arg in arguments: if arg in sys.argv: sys.argv.remove(arg)
[ "def", "remove_args", "(", "parser", ")", ":", "arguments", "=", "[", "]", "for", "action", "in", "list", "(", "parser", ".", "_get_optional_actions", "(", ")", ")", ":", "if", "'--help'", "not", "in", "action", ".", "option_strings", ":", "arguments", "...
Remove custom arguments from the parser
[ "Remove", "custom", "arguments", "from", "the", "parser" ]
f61fe0876c138ccb61874912d4b8590dadfa835c
https://github.com/sdss/tree/blob/f61fe0876c138ccb61874912d4b8590dadfa835c/setup.py#L70-L80
22,501
sarugaku/shellingham
tasks/__init__.py
_render_log
def _render_log(): """Totally tap into Towncrier internals to get an in-memory result. """ config = load_config(ROOT) definitions = config['types'] fragments, fragment_filenames = find_fragments( pathlib.Path(config['directory']).absolute(), config['sections'], None, definitions, ) rendered = render_fragments( pathlib.Path(config['template']).read_text(encoding='utf-8'), config['issue_format'], split_fragments(fragments, definitions), definitions, config['underlines'][1:], ) return rendered
python
def _render_log(): config = load_config(ROOT) definitions = config['types'] fragments, fragment_filenames = find_fragments( pathlib.Path(config['directory']).absolute(), config['sections'], None, definitions, ) rendered = render_fragments( pathlib.Path(config['template']).read_text(encoding='utf-8'), config['issue_format'], split_fragments(fragments, definitions), definitions, config['underlines'][1:], ) return rendered
[ "def", "_render_log", "(", ")", ":", "config", "=", "load_config", "(", "ROOT", ")", "definitions", "=", "config", "[", "'types'", "]", "fragments", ",", "fragment_filenames", "=", "find_fragments", "(", "pathlib", ".", "Path", "(", "config", "[", "'director...
Totally tap into Towncrier internals to get an in-memory result.
[ "Totally", "tap", "into", "Towncrier", "internals", "to", "get", "an", "in", "-", "memory", "result", "." ]
295fc3094ef05437597ea0baa02f5cd7a3335d28
https://github.com/sarugaku/shellingham/blob/295fc3094ef05437597ea0baa02f5cd7a3335d28/tasks/__init__.py#L49-L67
22,502
sods/paramz
paramz/core/nameable.py
adjust_name_for_printing
def adjust_name_for_printing(name): """ Make sure a name can be printed, alongside used as a variable name. """ if name is not None: name2 = name name = name.replace(" ", "_").replace(".", "_").replace("-", "_m_") name = name.replace("+", "_p_").replace("!", "_I_") name = name.replace("**", "_xx_").replace("*", "_x_") name = name.replace("/", "_l_").replace("@", '_at_') name = name.replace("(", "_of_").replace(")", "") if re.match(r'^[a-zA-Z_][a-zA-Z0-9-_]*$', name) is None: raise NameError("name {} converted to {} cannot be further converted to valid python variable name!".format(name2, name)) return name return ''
python
def adjust_name_for_printing(name): if name is not None: name2 = name name = name.replace(" ", "_").replace(".", "_").replace("-", "_m_") name = name.replace("+", "_p_").replace("!", "_I_") name = name.replace("**", "_xx_").replace("*", "_x_") name = name.replace("/", "_l_").replace("@", '_at_') name = name.replace("(", "_of_").replace(")", "") if re.match(r'^[a-zA-Z_][a-zA-Z0-9-_]*$', name) is None: raise NameError("name {} converted to {} cannot be further converted to valid python variable name!".format(name2, name)) return name return ''
[ "def", "adjust_name_for_printing", "(", "name", ")", ":", "if", "name", "is", "not", "None", ":", "name2", "=", "name", "name", "=", "name", ".", "replace", "(", "\" \"", ",", "\"_\"", ")", ".", "replace", "(", "\".\"", ",", "\"_\"", ")", ".", "repla...
Make sure a name can be printed, alongside used as a variable name.
[ "Make", "sure", "a", "name", "can", "be", "printed", "alongside", "used", "as", "a", "variable", "name", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/nameable.py#L33-L47
22,503
sods/paramz
paramz/core/nameable.py
Nameable.name
def name(self, name): """ Set the name of this object. Tell the parent if the name has changed. """ from_name = self.name assert isinstance(name, str) self._name = name if self.has_parent(): self._parent_._name_changed(self, from_name)
python
def name(self, name): from_name = self.name assert isinstance(name, str) self._name = name if self.has_parent(): self._parent_._name_changed(self, from_name)
[ "def", "name", "(", "self", ",", "name", ")", ":", "from_name", "=", "self", ".", "name", "assert", "isinstance", "(", "name", ",", "str", ")", "self", ".", "_name", "=", "name", "if", "self", ".", "has_parent", "(", ")", ":", "self", ".", "_parent...
Set the name of this object. Tell the parent if the name has changed.
[ "Set", "the", "name", "of", "this", "object", ".", "Tell", "the", "parent", "if", "the", "name", "has", "changed", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/nameable.py#L65-L74
22,504
sods/paramz
paramz/core/nameable.py
Nameable.hierarchy_name
def hierarchy_name(self, adjust_for_printing=True): """ return the name for this object with the parents names attached by dots. :param bool adjust_for_printing: whether to call :func:`~adjust_for_printing()` on the names, recursively """ if adjust_for_printing: adjust = lambda x: adjust_name_for_printing(x) else: adjust = lambda x: x if self.has_parent(): return self._parent_.hierarchy_name() + "." + adjust(self.name) return adjust(self.name)
python
def hierarchy_name(self, adjust_for_printing=True): if adjust_for_printing: adjust = lambda x: adjust_name_for_printing(x) else: adjust = lambda x: x if self.has_parent(): return self._parent_.hierarchy_name() + "." + adjust(self.name) return adjust(self.name)
[ "def", "hierarchy_name", "(", "self", ",", "adjust_for_printing", "=", "True", ")", ":", "if", "adjust_for_printing", ":", "adjust", "=", "lambda", "x", ":", "adjust_name_for_printing", "(", "x", ")", "else", ":", "adjust", "=", "lambda", "x", ":", "x", "i...
return the name for this object with the parents names attached by dots. :param bool adjust_for_printing: whether to call :func:`~adjust_for_printing()` on the names, recursively
[ "return", "the", "name", "for", "this", "object", "with", "the", "parents", "names", "attached", "by", "dots", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/nameable.py#L76-L88
22,505
sods/paramz
paramz/parameterized.py
Parameterized.grep_param_names
def grep_param_names(self, regexp): """ create a list of parameters, matching regular expression regexp """ if not isinstance(regexp, _pattern_type): regexp = compile(regexp) found_params = [] def visit(innerself, regexp): if (innerself is not self) and regexp.match(innerself.hierarchy_name().partition('.')[2]): found_params.append(innerself) self.traverse(visit, regexp) return found_params
python
def grep_param_names(self, regexp): if not isinstance(regexp, _pattern_type): regexp = compile(regexp) found_params = [] def visit(innerself, regexp): if (innerself is not self) and regexp.match(innerself.hierarchy_name().partition('.')[2]): found_params.append(innerself) self.traverse(visit, regexp) return found_params
[ "def", "grep_param_names", "(", "self", ",", "regexp", ")", ":", "if", "not", "isinstance", "(", "regexp", ",", "_pattern_type", ")", ":", "regexp", "=", "compile", "(", "regexp", ")", "found_params", "=", "[", "]", "def", "visit", "(", "innerself", ",",...
create a list of parameters, matching regular expression regexp
[ "create", "a", "list", "of", "parameters", "matching", "regular", "expression", "regexp" ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/parameterized.py#L282-L292
22,506
sods/paramz
paramz/param.py
Param._setup_observers
def _setup_observers(self): """ Setup the default observers 1: pass through to parent, if present """ if self.has_parent(): self.add_observer(self._parent_, self._parent_._pass_through_notify_observers, -np.inf)
python
def _setup_observers(self): if self.has_parent(): self.add_observer(self._parent_, self._parent_._pass_through_notify_observers, -np.inf)
[ "def", "_setup_observers", "(", "self", ")", ":", "if", "self", ".", "has_parent", "(", ")", ":", "self", ".", "add_observer", "(", "self", ".", "_parent_", ",", "self", ".", "_parent_", ".", "_pass_through_notify_observers", ",", "-", "np", ".", "inf", ...
Setup the default observers 1: pass through to parent, if present
[ "Setup", "the", "default", "observers" ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/param.py#L211-L218
22,507
sods/paramz
paramz/param.py
Param._repr_html_
def _repr_html_(self, indices=None, iops=None, lx=None, li=None, lls=None): """Representation of the parameter in html for notebook display.""" filter_ = self._current_slice_ vals = self.flat if indices is None: indices = self._indices(filter_) if iops is None: ravi = self._raveled_index(filter_) iops = OrderedDict([name, iop.properties_for(ravi)] for name, iop in self._index_operations.items()) if lls is None: lls = [self._max_len_names(iop, name) for name, iop in iops.items()] header_format = """ <tr> <th><b>{i}</b></th> <th><b>{x}</b></th> <th><b>{iops}</b></th> </tr>""" header = header_format.format(x=self.hierarchy_name(), i=__index_name__, iops="</b></th><th><b>".join(list(iops.keys()))) # nice header for printing to_print = ["""<style type="text/css"> .tg {padding:2px 3px;word-break:normal;border-collapse:collapse;border-spacing:0;border-color:#DCDCDC;margin:0px auto;width:100%;} .tg td{font-family:"Courier New", Courier, monospace !important;font-weight:bold;color:#444;background-color:#F7FDFA;border-style:solid;border-width:1px;overflow:hidden;word-break:normal;border-color:#DCDCDC;} .tg th{font-family:"Courier New", Courier, monospace !important;font-weight:normal;color:#fff;background-color:#26ADE4;border-style:solid;border-width:1px;overflow:hidden;word-break:normal;border-color:#DCDCDC;} .tg .tg-left{font-family:"Courier New", Courier, monospace !important;font-weight:normal;text-align:left;} .tg .tg-right{font-family:"Courier New", Courier, monospace !important;font-weight:normal;text-align:right;} </style>"""] to_print.append('<table class="tg">') to_print.append(header) format_spec = self._format_spec(indices, iops, lx, li, lls, False) format_spec[:2] = ["<tr><td class=tg-left>{i}</td>".format(i=format_spec[0]), "<td class=tg-right>{i}</td>".format(i=format_spec[1])] for i in range(2, len(format_spec)): format_spec[i] = '<td class=tg-left>{c}</td>'.format(c=format_spec[i]) format_spec = "".join(format_spec) + '</tr>' for i in range(self.size): to_print.append(format_spec.format(index=indices[i], value="{1:.{0}f}".format(__precision__, vals[i]), **dict((name, ' '.join(map(str, iops[name][i]))) for name in iops))) return '\n'.join(to_print)
python
def _repr_html_(self, indices=None, iops=None, lx=None, li=None, lls=None): filter_ = self._current_slice_ vals = self.flat if indices is None: indices = self._indices(filter_) if iops is None: ravi = self._raveled_index(filter_) iops = OrderedDict([name, iop.properties_for(ravi)] for name, iop in self._index_operations.items()) if lls is None: lls = [self._max_len_names(iop, name) for name, iop in iops.items()] header_format = """ <tr> <th><b>{i}</b></th> <th><b>{x}</b></th> <th><b>{iops}</b></th> </tr>""" header = header_format.format(x=self.hierarchy_name(), i=__index_name__, iops="</b></th><th><b>".join(list(iops.keys()))) # nice header for printing to_print = ["""<style type="text/css"> .tg {padding:2px 3px;word-break:normal;border-collapse:collapse;border-spacing:0;border-color:#DCDCDC;margin:0px auto;width:100%;} .tg td{font-family:"Courier New", Courier, monospace !important;font-weight:bold;color:#444;background-color:#F7FDFA;border-style:solid;border-width:1px;overflow:hidden;word-break:normal;border-color:#DCDCDC;} .tg th{font-family:"Courier New", Courier, monospace !important;font-weight:normal;color:#fff;background-color:#26ADE4;border-style:solid;border-width:1px;overflow:hidden;word-break:normal;border-color:#DCDCDC;} .tg .tg-left{font-family:"Courier New", Courier, monospace !important;font-weight:normal;text-align:left;} .tg .tg-right{font-family:"Courier New", Courier, monospace !important;font-weight:normal;text-align:right;} </style>"""] to_print.append('<table class="tg">') to_print.append(header) format_spec = self._format_spec(indices, iops, lx, li, lls, False) format_spec[:2] = ["<tr><td class=tg-left>{i}</td>".format(i=format_spec[0]), "<td class=tg-right>{i}</td>".format(i=format_spec[1])] for i in range(2, len(format_spec)): format_spec[i] = '<td class=tg-left>{c}</td>'.format(c=format_spec[i]) format_spec = "".join(format_spec) + '</tr>' for i in range(self.size): to_print.append(format_spec.format(index=indices[i], value="{1:.{0}f}".format(__precision__, vals[i]), **dict((name, ' '.join(map(str, iops[name][i]))) for name in iops))) return '\n'.join(to_print)
[ "def", "_repr_html_", "(", "self", ",", "indices", "=", "None", ",", "iops", "=", "None", ",", "lx", "=", "None", ",", "li", "=", "None", ",", "lls", "=", "None", ")", ":", "filter_", "=", "self", ".", "_current_slice_", "vals", "=", "self", ".", ...
Representation of the parameter in html for notebook display.
[ "Representation", "of", "the", "parameter", "in", "html", "for", "notebook", "display", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/param.py#L275-L311
22,508
sods/paramz
paramz/core/observable.py
Observable.add_observer
def add_observer(self, observer, callble, priority=0): """ Add an observer `observer` with the callback `callble` and priority `priority` to this observers list. """ self.observers.add(priority, observer, callble)
python
def add_observer(self, observer, callble, priority=0): self.observers.add(priority, observer, callble)
[ "def", "add_observer", "(", "self", ",", "observer", ",", "callble", ",", "priority", "=", "0", ")", ":", "self", ".", "observers", ".", "add", "(", "priority", ",", "observer", ",", "callble", ")" ]
Add an observer `observer` with the callback `callble` and priority `priority` to this observers list.
[ "Add", "an", "observer", "observer", "with", "the", "callback", "callble", "and", "priority", "priority", "to", "this", "observers", "list", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/observable.py#L49-L54
22,509
sods/paramz
paramz/core/observable.py
Observable.notify_observers
def notify_observers(self, which=None, min_priority=None): """ Notifies all observers. Which is the element, which kicked off this notification loop. The first argument will be self, the second `which`. .. note:: notifies only observers with priority p > min_priority! :param min_priority: only notify observers with priority > min_priority if min_priority is None, notify all observers in order """ if self._update_on: if which is None: which = self if min_priority is None: [callble(self, which=which) for _, _, callble in self.observers] else: for p, _, callble in self.observers: if p <= min_priority: break callble(self, which=which)
python
def notify_observers(self, which=None, min_priority=None): if self._update_on: if which is None: which = self if min_priority is None: [callble(self, which=which) for _, _, callble in self.observers] else: for p, _, callble in self.observers: if p <= min_priority: break callble(self, which=which)
[ "def", "notify_observers", "(", "self", ",", "which", "=", "None", ",", "min_priority", "=", "None", ")", ":", "if", "self", ".", "_update_on", ":", "if", "which", "is", "None", ":", "which", "=", "self", "if", "min_priority", "is", "None", ":", "[", ...
Notifies all observers. Which is the element, which kicked off this notification loop. The first argument will be self, the second `which`. .. note:: notifies only observers with priority p > min_priority! :param min_priority: only notify observers with priority > min_priority if min_priority is None, notify all observers in order
[ "Notifies", "all", "observers", ".", "Which", "is", "the", "element", "which", "kicked", "off", "this", "notification", "loop", ".", "The", "first", "argument", "will", "be", "self", "the", "second", "which", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/observable.py#L75-L96
22,510
sods/paramz
paramz/core/constrainable.py
Constrainable.constrain_fixed
def constrain_fixed(self, value=None, warning=True, trigger_parent=True): """ Constrain this parameter to be fixed to the current value it carries. This does not override the previous constraints, so unfixing will restore the constraint set before fixing. :param warning: print a warning for overwriting constraints. """ if value is not None: self[:] = value #index = self.unconstrain() index = self._add_to_index_operations(self.constraints, np.empty(0), __fixed__, warning) self._highest_parent_._set_fixed(self, index) self.notify_observers(self, None if trigger_parent else -np.inf) return index
python
def constrain_fixed(self, value=None, warning=True, trigger_parent=True): if value is not None: self[:] = value #index = self.unconstrain() index = self._add_to_index_operations(self.constraints, np.empty(0), __fixed__, warning) self._highest_parent_._set_fixed(self, index) self.notify_observers(self, None if trigger_parent else -np.inf) return index
[ "def", "constrain_fixed", "(", "self", ",", "value", "=", "None", ",", "warning", "=", "True", ",", "trigger_parent", "=", "True", ")", ":", "if", "value", "is", "not", "None", ":", "self", "[", ":", "]", "=", "value", "#index = self.unconstrain()", "ind...
Constrain this parameter to be fixed to the current value it carries. This does not override the previous constraints, so unfixing will restore the constraint set before fixing. :param warning: print a warning for overwriting constraints.
[ "Constrain", "this", "parameter", "to", "be", "fixed", "to", "the", "current", "value", "it", "carries", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/constrainable.py#L52-L68
22,511
sods/paramz
paramz/core/constrainable.py
Constrainable.unconstrain_fixed
def unconstrain_fixed(self): """ This parameter will no longer be fixed. If there was a constraint on this parameter when fixing it, it will be constraint with that previous constraint. """ unconstrained = self.unconstrain(__fixed__) self._highest_parent_._set_unfixed(self, unconstrained) #if self._default_constraint_ is not None: # return self.constrain(self._default_constraint_) return unconstrained
python
def unconstrain_fixed(self): unconstrained = self.unconstrain(__fixed__) self._highest_parent_._set_unfixed(self, unconstrained) #if self._default_constraint_ is not None: # return self.constrain(self._default_constraint_) return unconstrained
[ "def", "unconstrain_fixed", "(", "self", ")", ":", "unconstrained", "=", "self", ".", "unconstrain", "(", "__fixed__", ")", "self", ".", "_highest_parent_", ".", "_set_unfixed", "(", "self", ",", "unconstrained", ")", "#if self._default_constraint_ is not None:", "#...
This parameter will no longer be fixed. If there was a constraint on this parameter when fixing it, it will be constraint with that previous constraint.
[ "This", "parameter", "will", "no", "longer", "be", "fixed", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/constrainable.py#L71-L82
22,512
sods/paramz
paramz/core/gradcheckable.py
Gradcheckable.checkgrad
def checkgrad(self, verbose=0, step=1e-6, tolerance=1e-3, df_tolerance=1e-12): """ Check the gradient of this parameter with respect to the highest parent's objective function. This is a three point estimate of the gradient, wiggling at the parameters with a stepsize step. The check passes if either the ratio or the difference between numerical and analytical gradient is smaller then tolerance. :param bool verbose: whether each parameter shall be checked individually. :param float step: the stepsize for the numerical three point gradient estimate. :param float tolerance: the tolerance for the gradient ratio or difference. :param float df_tolerance: the tolerance for df_tolerance .. note:: The *dF_ratio* indicates the limit of accuracy of numerical gradients. If it is too small, e.g., smaller than 1e-12, the numerical gradients are usually not accurate enough for the tests (shown with blue). """ # Make sure we always call the gradcheck on the highest parent # This ensures the assumption of the highest parent to hold the fixes # In the checkgrad function we take advantage of that, so it needs # to be set in place here. if self.has_parent(): return self._highest_parent_._checkgrad(self, verbose=verbose, step=step, tolerance=tolerance, df_tolerance=df_tolerance) return self._checkgrad(self, verbose=verbose, step=step, tolerance=tolerance, df_tolerance=df_tolerance)
python
def checkgrad(self, verbose=0, step=1e-6, tolerance=1e-3, df_tolerance=1e-12): # Make sure we always call the gradcheck on the highest parent # This ensures the assumption of the highest parent to hold the fixes # In the checkgrad function we take advantage of that, so it needs # to be set in place here. if self.has_parent(): return self._highest_parent_._checkgrad(self, verbose=verbose, step=step, tolerance=tolerance, df_tolerance=df_tolerance) return self._checkgrad(self, verbose=verbose, step=step, tolerance=tolerance, df_tolerance=df_tolerance)
[ "def", "checkgrad", "(", "self", ",", "verbose", "=", "0", ",", "step", "=", "1e-6", ",", "tolerance", "=", "1e-3", ",", "df_tolerance", "=", "1e-12", ")", ":", "# Make sure we always call the gradcheck on the highest parent", "# This ensures the assumption of the highe...
Check the gradient of this parameter with respect to the highest parent's objective function. This is a three point estimate of the gradient, wiggling at the parameters with a stepsize step. The check passes if either the ratio or the difference between numerical and analytical gradient is smaller then tolerance. :param bool verbose: whether each parameter shall be checked individually. :param float step: the stepsize for the numerical three point gradient estimate. :param float tolerance: the tolerance for the gradient ratio or difference. :param float df_tolerance: the tolerance for df_tolerance .. note:: The *dF_ratio* indicates the limit of accuracy of numerical gradients. If it is too small, e.g., smaller than 1e-12, the numerical gradients are usually not accurate enough for the tests (shown with blue).
[ "Check", "the", "gradient", "of", "this", "parameter", "with", "respect", "to", "the", "highest", "parent", "s", "objective", "function", ".", "This", "is", "a", "three", "point", "estimate", "of", "the", "gradient", "wiggling", "at", "the", "parameters", "w...
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/gradcheckable.py#L44-L69
22,513
sods/paramz
paramz/optimization/optimization.py
opt_tnc.opt
def opt(self, x_init, f_fp=None, f=None, fp=None): """ Run the TNC optimizer """ tnc_rcstrings = ['Local minimum', 'Converged', 'XConverged', 'Maximum number of f evaluations reached', 'Line search failed', 'Function is constant'] assert f_fp != None, "TNC requires f_fp" opt_dict = {} if self.xtol is not None: opt_dict['xtol'] = self.xtol if self.ftol is not None: opt_dict['ftol'] = self.ftol if self.gtol is not None: opt_dict['pgtol'] = self.gtol opt_result = optimize.fmin_tnc(f_fp, x_init, messages=self.messages, maxfun=self.max_f_eval, **opt_dict) self.x_opt = opt_result[0] self.f_opt = f_fp(self.x_opt)[0] self.funct_eval = opt_result[1] self.status = tnc_rcstrings[opt_result[2]]
python
def opt(self, x_init, f_fp=None, f=None, fp=None): tnc_rcstrings = ['Local minimum', 'Converged', 'XConverged', 'Maximum number of f evaluations reached', 'Line search failed', 'Function is constant'] assert f_fp != None, "TNC requires f_fp" opt_dict = {} if self.xtol is not None: opt_dict['xtol'] = self.xtol if self.ftol is not None: opt_dict['ftol'] = self.ftol if self.gtol is not None: opt_dict['pgtol'] = self.gtol opt_result = optimize.fmin_tnc(f_fp, x_init, messages=self.messages, maxfun=self.max_f_eval, **opt_dict) self.x_opt = opt_result[0] self.f_opt = f_fp(self.x_opt)[0] self.funct_eval = opt_result[1] self.status = tnc_rcstrings[opt_result[2]]
[ "def", "opt", "(", "self", ",", "x_init", ",", "f_fp", "=", "None", ",", "f", "=", "None", ",", "fp", "=", "None", ")", ":", "tnc_rcstrings", "=", "[", "'Local minimum'", ",", "'Converged'", ",", "'XConverged'", ",", "'Maximum number of f evaluations reached...
Run the TNC optimizer
[ "Run", "the", "TNC", "optimizer" ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/optimization/optimization.py#L75-L98
22,514
sods/paramz
paramz/optimization/optimization.py
opt_simplex.opt
def opt(self, x_init, f_fp=None, f=None, fp=None): """ The simplex optimizer does not require gradients. """ statuses = ['Converged', 'Maximum number of function evaluations made', 'Maximum number of iterations reached'] opt_dict = {} if self.xtol is not None: opt_dict['xtol'] = self.xtol if self.ftol is not None: opt_dict['ftol'] = self.ftol if self.gtol is not None: print("WARNING: simplex doesn't have an gtol arg, so I'm going to ignore it") opt_result = optimize.fmin(f, x_init, (), disp=self.messages, maxfun=self.max_f_eval, full_output=True, **opt_dict) self.x_opt = opt_result[0] self.f_opt = opt_result[1] self.funct_eval = opt_result[3] self.status = statuses[opt_result[4]] self.trace = None
python
def opt(self, x_init, f_fp=None, f=None, fp=None): statuses = ['Converged', 'Maximum number of function evaluations made', 'Maximum number of iterations reached'] opt_dict = {} if self.xtol is not None: opt_dict['xtol'] = self.xtol if self.ftol is not None: opt_dict['ftol'] = self.ftol if self.gtol is not None: print("WARNING: simplex doesn't have an gtol arg, so I'm going to ignore it") opt_result = optimize.fmin(f, x_init, (), disp=self.messages, maxfun=self.max_f_eval, full_output=True, **opt_dict) self.x_opt = opt_result[0] self.f_opt = opt_result[1] self.funct_eval = opt_result[3] self.status = statuses[opt_result[4]] self.trace = None
[ "def", "opt", "(", "self", ",", "x_init", ",", "f_fp", "=", "None", ",", "f", "=", "None", ",", "fp", "=", "None", ")", ":", "statuses", "=", "[", "'Converged'", ",", "'Maximum number of function evaluations made'", ",", "'Maximum number of iterations reached'",...
The simplex optimizer does not require gradients.
[ "The", "simplex", "optimizer", "does", "not", "require", "gradients", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/optimization/optimization.py#L166-L188
22,515
sods/paramz
paramz/caching.py
Cacher.combine_inputs
def combine_inputs(self, args, kw, ignore_args): "Combines the args and kw in a unique way, such that ordering of kwargs does not lead to recompute" inputs= args + tuple(c[1] for c in sorted(kw.items(), key=lambda x: x[0])) # REMOVE the ignored arguments from input and PREVENT it from being checked!!! return [a for i,a in enumerate(inputs) if i not in ignore_args]
python
def combine_inputs(self, args, kw, ignore_args): "Combines the args and kw in a unique way, such that ordering of kwargs does not lead to recompute" inputs= args + tuple(c[1] for c in sorted(kw.items(), key=lambda x: x[0])) # REMOVE the ignored arguments from input and PREVENT it from being checked!!! return [a for i,a in enumerate(inputs) if i not in ignore_args]
[ "def", "combine_inputs", "(", "self", ",", "args", ",", "kw", ",", "ignore_args", ")", ":", "inputs", "=", "args", "+", "tuple", "(", "c", "[", "1", "]", "for", "c", "in", "sorted", "(", "kw", ".", "items", "(", ")", ",", "key", "=", "lambda", ...
Combines the args and kw in a unique way, such that ordering of kwargs does not lead to recompute
[ "Combines", "the", "args", "and", "kw", "in", "a", "unique", "way", "such", "that", "ordering", "of", "kwargs", "does", "not", "lead", "to", "recompute" ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/caching.py#L90-L94
22,516
sods/paramz
paramz/caching.py
Cacher.ensure_cache_length
def ensure_cache_length(self): "Ensures the cache is within its limits and has one place free" if len(self.order) == self.limit: # we have reached the limit, so lets release one element cache_id = self.order.popleft() combined_args_kw = self.cached_inputs[cache_id] for ind in combined_args_kw: ind_id = self.id(ind) tmp = self.cached_input_ids.get(ind_id, None) if tmp is not None: ref, cache_ids = tmp if len(cache_ids) == 1 and ref() is not None: ref().remove_observer(self, self.on_cache_changed) del self.cached_input_ids[ind_id] else: cache_ids.remove(cache_id) self.cached_input_ids[ind_id] = [ref, cache_ids] try: del self.cached_outputs[cache_id] except KeyError: # Was not cached before, possibly a keyboard interrupt pass try: del self.inputs_changed[cache_id] except KeyError: # Was not cached before, possibly a keyboard interrupt pass try: del self.cached_inputs[cache_id] except KeyError: # Was not cached before, possibly a keyboard interrupt pass
python
def ensure_cache_length(self): "Ensures the cache is within its limits and has one place free" if len(self.order) == self.limit: # we have reached the limit, so lets release one element cache_id = self.order.popleft() combined_args_kw = self.cached_inputs[cache_id] for ind in combined_args_kw: ind_id = self.id(ind) tmp = self.cached_input_ids.get(ind_id, None) if tmp is not None: ref, cache_ids = tmp if len(cache_ids) == 1 and ref() is not None: ref().remove_observer(self, self.on_cache_changed) del self.cached_input_ids[ind_id] else: cache_ids.remove(cache_id) self.cached_input_ids[ind_id] = [ref, cache_ids] try: del self.cached_outputs[cache_id] except KeyError: # Was not cached before, possibly a keyboard interrupt pass try: del self.inputs_changed[cache_id] except KeyError: # Was not cached before, possibly a keyboard interrupt pass try: del self.cached_inputs[cache_id] except KeyError: # Was not cached before, possibly a keyboard interrupt pass
[ "def", "ensure_cache_length", "(", "self", ")", ":", "if", "len", "(", "self", ".", "order", ")", "==", "self", ".", "limit", ":", "# we have reached the limit, so lets release one element", "cache_id", "=", "self", ".", "order", ".", "popleft", "(", ")", "com...
Ensures the cache is within its limits and has one place free
[ "Ensures", "the", "cache", "is", "within", "its", "limits", "and", "has", "one", "place", "free" ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/caching.py#L101-L132
22,517
sods/paramz
paramz/caching.py
Cacher.add_to_cache
def add_to_cache(self, cache_id, inputs, output): """This adds cache_id to the cache, with inputs and output""" self.inputs_changed[cache_id] = False self.cached_outputs[cache_id] = output self.order.append(cache_id) self.cached_inputs[cache_id] = inputs for a in inputs: if a is not None and not isinstance(a, Number) and not isinstance(a, str): ind_id = self.id(a) v = self.cached_input_ids.get(ind_id, [weakref.ref(a), []]) v[1].append(cache_id) if len(v[1]) == 1: a.add_observer(self, self.on_cache_changed) self.cached_input_ids[ind_id] = v
python
def add_to_cache(self, cache_id, inputs, output): self.inputs_changed[cache_id] = False self.cached_outputs[cache_id] = output self.order.append(cache_id) self.cached_inputs[cache_id] = inputs for a in inputs: if a is not None and not isinstance(a, Number) and not isinstance(a, str): ind_id = self.id(a) v = self.cached_input_ids.get(ind_id, [weakref.ref(a), []]) v[1].append(cache_id) if len(v[1]) == 1: a.add_observer(self, self.on_cache_changed) self.cached_input_ids[ind_id] = v
[ "def", "add_to_cache", "(", "self", ",", "cache_id", ",", "inputs", ",", "output", ")", ":", "self", ".", "inputs_changed", "[", "cache_id", "]", "=", "False", "self", ".", "cached_outputs", "[", "cache_id", "]", "=", "output", "self", ".", "order", ".",...
This adds cache_id to the cache, with inputs and output
[ "This", "adds", "cache_id", "to", "the", "cache", "with", "inputs", "and", "output" ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/caching.py#L134-L147
22,518
sods/paramz
paramz/caching.py
Cacher.on_cache_changed
def on_cache_changed(self, direct, which=None): """ A callback funtion, which sets local flags when the elements of some cached inputs change this function gets 'hooked up' to the inputs when we cache them, and upon their elements being changed we update here. """ for what in [direct, which]: ind_id = self.id(what) _, cache_ids = self.cached_input_ids.get(ind_id, [None, []]) for cache_id in cache_ids: self.inputs_changed[cache_id] = True
python
def on_cache_changed(self, direct, which=None): for what in [direct, which]: ind_id = self.id(what) _, cache_ids = self.cached_input_ids.get(ind_id, [None, []]) for cache_id in cache_ids: self.inputs_changed[cache_id] = True
[ "def", "on_cache_changed", "(", "self", ",", "direct", ",", "which", "=", "None", ")", ":", "for", "what", "in", "[", "direct", ",", "which", "]", ":", "ind_id", "=", "self", ".", "id", "(", "what", ")", "_", ",", "cache_ids", "=", "self", ".", "...
A callback funtion, which sets local flags when the elements of some cached inputs change this function gets 'hooked up' to the inputs when we cache them, and upon their elements being changed we update here.
[ "A", "callback", "funtion", "which", "sets", "local", "flags", "when", "the", "elements", "of", "some", "cached", "inputs", "change" ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/caching.py#L194-L204
22,519
sods/paramz
paramz/caching.py
Cacher.reset
def reset(self): """ Totally reset the cache """ [a().remove_observer(self, self.on_cache_changed) if (a() is not None) else None for [a, _] in self.cached_input_ids.values()] self.order = collections.deque() self.cached_inputs = {} # point from cache_ids to a list of [ind_ids], which where used in cache cache_id #======================================================================= # point from each ind_id to [ref(obj), cache_ids] # 0: a weak reference to the object itself # 1: the cache_ids in which this ind_id is used (len will be how many times we have seen this ind_id) self.cached_input_ids = {} #======================================================================= self.cached_outputs = {} # point from cache_ids to outputs self.inputs_changed = {}
python
def reset(self): [a().remove_observer(self, self.on_cache_changed) if (a() is not None) else None for [a, _] in self.cached_input_ids.values()] self.order = collections.deque() self.cached_inputs = {} # point from cache_ids to a list of [ind_ids], which where used in cache cache_id #======================================================================= # point from each ind_id to [ref(obj), cache_ids] # 0: a weak reference to the object itself # 1: the cache_ids in which this ind_id is used (len will be how many times we have seen this ind_id) self.cached_input_ids = {} #======================================================================= self.cached_outputs = {} # point from cache_ids to outputs self.inputs_changed = {}
[ "def", "reset", "(", "self", ")", ":", "[", "a", "(", ")", ".", "remove_observer", "(", "self", ",", "self", ".", "on_cache_changed", ")", "if", "(", "a", "(", ")", "is", "not", "None", ")", "else", "None", "for", "[", "a", ",", "_", "]", "in",...
Totally reset the cache
[ "Totally", "reset", "the", "cache" ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/caching.py#L206-L223
22,520
sods/paramz
paramz/caching.py
FunctionCache.disable_caching
def disable_caching(self): "Disable the cache of this object. This also removes previously cached results" self.caching_enabled = False for c in self.values(): c.disable_cacher()
python
def disable_caching(self): "Disable the cache of this object. This also removes previously cached results" self.caching_enabled = False for c in self.values(): c.disable_cacher()
[ "def", "disable_caching", "(", "self", ")", ":", "self", ".", "caching_enabled", "=", "False", "for", "c", "in", "self", ".", "values", "(", ")", ":", "c", ".", "disable_cacher", "(", ")" ]
Disable the cache of this object. This also removes previously cached results
[ "Disable", "the", "cache", "of", "this", "object", ".", "This", "also", "removes", "previously", "cached", "results" ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/caching.py#L246-L250
22,521
sods/paramz
paramz/caching.py
FunctionCache.enable_caching
def enable_caching(self): "Enable the cache of this object." self.caching_enabled = True for c in self.values(): c.enable_cacher()
python
def enable_caching(self): "Enable the cache of this object." self.caching_enabled = True for c in self.values(): c.enable_cacher()
[ "def", "enable_caching", "(", "self", ")", ":", "self", ".", "caching_enabled", "=", "True", "for", "c", "in", "self", ".", "values", "(", ")", ":", "c", ".", "enable_cacher", "(", ")" ]
Enable the cache of this object.
[ "Enable", "the", "cache", "of", "this", "object", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/caching.py#L252-L256
22,522
sods/paramz
paramz/core/lists_and_dicts.py
ObserverList.remove
def remove(self, priority, observer, callble): """ Remove one observer, which had priority and callble. """ self.flush() for i in range(len(self) - 1, -1, -1): p,o,c = self[i] if priority==p and observer==o and callble==c: del self._poc[i]
python
def remove(self, priority, observer, callble): self.flush() for i in range(len(self) - 1, -1, -1): p,o,c = self[i] if priority==p and observer==o and callble==c: del self._poc[i]
[ "def", "remove", "(", "self", ",", "priority", ",", "observer", ",", "callble", ")", ":", "self", ".", "flush", "(", ")", "for", "i", "in", "range", "(", "len", "(", "self", ")", "-", "1", ",", "-", "1", ",", "-", "1", ")", ":", "p", ",", "...
Remove one observer, which had priority and callble.
[ "Remove", "one", "observer", "which", "had", "priority", "and", "callble", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/lists_and_dicts.py#L78-L86
22,523
sods/paramz
paramz/core/lists_and_dicts.py
ObserverList.add
def add(self, priority, observer, callble): """ Add an observer with priority and callble """ #if observer is not None: ins = 0 for pr, _, _ in self: if priority > pr: break ins += 1 self._poc.insert(ins, (priority, weakref.ref(observer), callble))
python
def add(self, priority, observer, callble): #if observer is not None: ins = 0 for pr, _, _ in self: if priority > pr: break ins += 1 self._poc.insert(ins, (priority, weakref.ref(observer), callble))
[ "def", "add", "(", "self", ",", "priority", ",", "observer", ",", "callble", ")", ":", "#if observer is not None:", "ins", "=", "0", "for", "pr", ",", "_", ",", "_", "in", "self", ":", "if", "priority", ">", "pr", ":", "break", "ins", "+=", "1", "s...
Add an observer with priority and callble
[ "Add", "an", "observer", "with", "priority", "and", "callble" ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/lists_and_dicts.py#L91-L101
22,524
sods/paramz
paramz/core/index_operations.py
ParameterIndexOperations.properties_for
def properties_for(self, index): """ Returns a list of properties, such that each entry in the list corresponds to the element of the index given. Example: let properties: 'one':[1,2,3,4], 'two':[3,5,6] >>> properties_for([2,3,5]) [['one'], ['one', 'two'], ['two']] """ return vectorize(lambda i: [prop for prop in self.properties() if i in self[prop]], otypes=[list])(index)
python
def properties_for(self, index): return vectorize(lambda i: [prop for prop in self.properties() if i in self[prop]], otypes=[list])(index)
[ "def", "properties_for", "(", "self", ",", "index", ")", ":", "return", "vectorize", "(", "lambda", "i", ":", "[", "prop", "for", "prop", "in", "self", ".", "properties", "(", ")", "if", "i", "in", "self", "[", "prop", "]", "]", ",", "otypes", "=",...
Returns a list of properties, such that each entry in the list corresponds to the element of the index given. Example: let properties: 'one':[1,2,3,4], 'two':[3,5,6] >>> properties_for([2,3,5]) [['one'], ['one', 'two'], ['two']]
[ "Returns", "a", "list", "of", "properties", "such", "that", "each", "entry", "in", "the", "list", "corresponds", "to", "the", "element", "of", "the", "index", "given", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/index_operations.py#L132-L143
22,525
sods/paramz
paramz/core/index_operations.py
ParameterIndexOperations.properties_dict_for
def properties_dict_for(self, index): """ Return a dictionary, containing properties as keys and indices as index Thus, the indices for each constraint, which is contained will be collected as one dictionary Example: let properties: 'one':[1,2,3,4], 'two':[3,5,6] >>> properties_dict_for([2,3,5]) {'one':[2,3], 'two':[3,5]} """ props = self.properties_for(index) prop_index = extract_properties_to_index(index, props) return prop_index
python
def properties_dict_for(self, index): props = self.properties_for(index) prop_index = extract_properties_to_index(index, props) return prop_index
[ "def", "properties_dict_for", "(", "self", ",", "index", ")", ":", "props", "=", "self", ".", "properties_for", "(", "index", ")", "prop_index", "=", "extract_properties_to_index", "(", "index", ",", "props", ")", "return", "prop_index" ]
Return a dictionary, containing properties as keys and indices as index Thus, the indices for each constraint, which is contained will be collected as one dictionary Example: let properties: 'one':[1,2,3,4], 'two':[3,5,6] >>> properties_dict_for([2,3,5]) {'one':[2,3], 'two':[3,5]}
[ "Return", "a", "dictionary", "containing", "properties", "as", "keys", "and", "indices", "as", "index", "Thus", "the", "indices", "for", "each", "constraint", "which", "is", "contained", "will", "be", "collected", "as", "one", "dictionary" ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/index_operations.py#L145-L159
22,526
sods/paramz
paramz/model.py
Model.optimize
def optimize(self, optimizer=None, start=None, messages=False, max_iters=1000, ipython_notebook=True, clear_after_finish=False, **kwargs): """ Optimize the model using self.log_likelihood and self.log_likelihood_gradient, as well as self.priors. kwargs are passed to the optimizer. They can be: :param max_iters: maximum number of function evaluations :type max_iters: int :messages: True: Display messages during optimisation, "ipython_notebook": :type messages: bool"string :param optimizer: which optimizer to use (defaults to self.preferred optimizer) :type optimizer: string Valid optimizers are: - 'scg': scaled conjugate gradient method, recommended for stability. See also GPy.inference.optimization.scg - 'fmin_tnc': truncated Newton method (see scipy.optimize.fmin_tnc) - 'simplex': the Nelder-Mead simplex method (see scipy.optimize.fmin), - 'lbfgsb': the l-bfgs-b method (see scipy.optimize.fmin_l_bfgs_b), - 'lbfgs': the bfgs method (see scipy.optimize.fmin_bfgs), - 'sgd': stochastic gradient decsent (see scipy.optimize.sgd). For experts only! """ if self.is_fixed or self.size == 0: print('nothing to optimize') return if not self.update_model(): print("updates were off, setting updates on again") self.update_model(True) if start is None: start = self.optimizer_array if optimizer is None: optimizer = self.preferred_optimizer if isinstance(optimizer, optimization.Optimizer): opt = optimizer opt.model = self else: optimizer = optimization.get_optimizer(optimizer) opt = optimizer(max_iters=max_iters, **kwargs) with VerboseOptimization(self, opt, maxiters=max_iters, verbose=messages, ipython_notebook=ipython_notebook, clear_after_finish=clear_after_finish) as vo: opt.run(start, f_fp=self._objective_grads, f=self._objective, fp=self._grads) self.optimizer_array = opt.x_opt self.optimization_runs.append(opt) return opt
python
def optimize(self, optimizer=None, start=None, messages=False, max_iters=1000, ipython_notebook=True, clear_after_finish=False, **kwargs): if self.is_fixed or self.size == 0: print('nothing to optimize') return if not self.update_model(): print("updates were off, setting updates on again") self.update_model(True) if start is None: start = self.optimizer_array if optimizer is None: optimizer = self.preferred_optimizer if isinstance(optimizer, optimization.Optimizer): opt = optimizer opt.model = self else: optimizer = optimization.get_optimizer(optimizer) opt = optimizer(max_iters=max_iters, **kwargs) with VerboseOptimization(self, opt, maxiters=max_iters, verbose=messages, ipython_notebook=ipython_notebook, clear_after_finish=clear_after_finish) as vo: opt.run(start, f_fp=self._objective_grads, f=self._objective, fp=self._grads) self.optimizer_array = opt.x_opt self.optimization_runs.append(opt) return opt
[ "def", "optimize", "(", "self", ",", "optimizer", "=", "None", ",", "start", "=", "None", ",", "messages", "=", "False", ",", "max_iters", "=", "1000", ",", "ipython_notebook", "=", "True", ",", "clear_after_finish", "=", "False", ",", "*", "*", "kwargs"...
Optimize the model using self.log_likelihood and self.log_likelihood_gradient, as well as self.priors. kwargs are passed to the optimizer. They can be: :param max_iters: maximum number of function evaluations :type max_iters: int :messages: True: Display messages during optimisation, "ipython_notebook": :type messages: bool"string :param optimizer: which optimizer to use (defaults to self.preferred optimizer) :type optimizer: string Valid optimizers are: - 'scg': scaled conjugate gradient method, recommended for stability. See also GPy.inference.optimization.scg - 'fmin_tnc': truncated Newton method (see scipy.optimize.fmin_tnc) - 'simplex': the Nelder-Mead simplex method (see scipy.optimize.fmin), - 'lbfgsb': the l-bfgs-b method (see scipy.optimize.fmin_l_bfgs_b), - 'lbfgs': the bfgs method (see scipy.optimize.fmin_bfgs), - 'sgd': stochastic gradient decsent (see scipy.optimize.sgd). For experts only!
[ "Optimize", "the", "model", "using", "self", ".", "log_likelihood", "and", "self", ".", "log_likelihood_gradient", "as", "well", "as", "self", ".", "priors", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/model.py#L65-L116
22,527
sods/paramz
paramz/model.py
Model.optimize_restarts
def optimize_restarts(self, num_restarts=10, robust=False, verbose=True, parallel=False, num_processes=None, **kwargs): """ Perform random restarts of the model, and set the model to the best seen solution. If the robust flag is set, exceptions raised during optimizations will be handled silently. If _all_ runs fail, the model is reset to the existing parameter values. \*\*kwargs are passed to the optimizer. :param num_restarts: number of restarts to use (default 10) :type num_restarts: int :param robust: whether to handle exceptions silently or not (default False) :type robust: bool :param parallel: whether to run each restart as a separate process. It relies on the multiprocessing module. :type parallel: bool :param num_processes: number of workers in the multiprocessing pool :type numprocesses: int :param max_f_eval: maximum number of function evaluations :type max_f_eval: int :param max_iters: maximum number of iterations :type max_iters: int :param messages: whether to display during optimisation :type messages: bool .. note:: If num_processes is None, the number of workes in the multiprocessing pool is automatically set to the number of processors on the current machine. """ initial_length = len(self.optimization_runs) initial_parameters = self.optimizer_array.copy() if parallel: #pragma: no cover try: pool = mp.Pool(processes=num_processes) obs = [self.copy() for i in range(num_restarts)] [obs[i].randomize() for i in range(num_restarts-1)] jobs = pool.map(opt_wrapper, [(o,kwargs) for o in obs]) pool.close() pool.join() except KeyboardInterrupt: print("Ctrl+c received, terminating and joining pool.") pool.terminate() pool.join() for i in range(num_restarts): try: if not parallel: if i > 0: self.randomize() self.optimize(**kwargs) else:#pragma: no cover self.optimization_runs.append(jobs[i]) if verbose: print(("Optimization restart {0}/{1}, f = {2}".format(i + 1, num_restarts, self.optimization_runs[-1].f_opt))) except Exception as e: if robust: print(("Warning - optimization restart {0}/{1} failed".format(i + 1, num_restarts))) else: raise e if len(self.optimization_runs) > initial_length: # This works, since failed jobs don't get added to the optimization_runs. i = np.argmin([o.f_opt for o in self.optimization_runs[initial_length:]]) self.optimizer_array = self.optimization_runs[initial_length + i].x_opt else: self.optimizer_array = initial_parameters return self.optimization_runs
python
def optimize_restarts(self, num_restarts=10, robust=False, verbose=True, parallel=False, num_processes=None, **kwargs): initial_length = len(self.optimization_runs) initial_parameters = self.optimizer_array.copy() if parallel: #pragma: no cover try: pool = mp.Pool(processes=num_processes) obs = [self.copy() for i in range(num_restarts)] [obs[i].randomize() for i in range(num_restarts-1)] jobs = pool.map(opt_wrapper, [(o,kwargs) for o in obs]) pool.close() pool.join() except KeyboardInterrupt: print("Ctrl+c received, terminating and joining pool.") pool.terminate() pool.join() for i in range(num_restarts): try: if not parallel: if i > 0: self.randomize() self.optimize(**kwargs) else:#pragma: no cover self.optimization_runs.append(jobs[i]) if verbose: print(("Optimization restart {0}/{1}, f = {2}".format(i + 1, num_restarts, self.optimization_runs[-1].f_opt))) except Exception as e: if robust: print(("Warning - optimization restart {0}/{1} failed".format(i + 1, num_restarts))) else: raise e if len(self.optimization_runs) > initial_length: # This works, since failed jobs don't get added to the optimization_runs. i = np.argmin([o.f_opt for o in self.optimization_runs[initial_length:]]) self.optimizer_array = self.optimization_runs[initial_length + i].x_opt else: self.optimizer_array = initial_parameters return self.optimization_runs
[ "def", "optimize_restarts", "(", "self", ",", "num_restarts", "=", "10", ",", "robust", "=", "False", ",", "verbose", "=", "True", ",", "parallel", "=", "False", ",", "num_processes", "=", "None", ",", "*", "*", "kwargs", ")", ":", "initial_length", "=",...
Perform random restarts of the model, and set the model to the best seen solution. If the robust flag is set, exceptions raised during optimizations will be handled silently. If _all_ runs fail, the model is reset to the existing parameter values. \*\*kwargs are passed to the optimizer. :param num_restarts: number of restarts to use (default 10) :type num_restarts: int :param robust: whether to handle exceptions silently or not (default False) :type robust: bool :param parallel: whether to run each restart as a separate process. It relies on the multiprocessing module. :type parallel: bool :param num_processes: number of workers in the multiprocessing pool :type numprocesses: int :param max_f_eval: maximum number of function evaluations :type max_f_eval: int :param max_iters: maximum number of iterations :type max_iters: int :param messages: whether to display during optimisation :type messages: bool .. note:: If num_processes is None, the number of workes in the multiprocessing pool is automatically set to the number of processors on the current machine.
[ "Perform", "random", "restarts", "of", "the", "model", "and", "set", "the", "model", "to", "the", "best", "seen", "solution", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/model.py#L118-L190
22,528
sods/paramz
paramz/model.py
Model._grads
def _grads(self, x): """ Gets the gradients from the likelihood and the priors. Failures are handled robustly. The algorithm will try several times to return the gradients, and will raise the original exception if the objective cannot be computed. :param x: the parameters of the model. :type x: np.array """ try: # self._set_params_transformed(x) self.optimizer_array = x self.obj_grads = self._transform_gradients(self.objective_function_gradients()) self._fail_count = 0 except (LinAlgError, ZeroDivisionError, ValueError): #pragma: no cover if self._fail_count >= self._allowed_failures: raise self._fail_count += 1 self.obj_grads = np.clip(self._transform_gradients(self.objective_function_gradients()), -1e100, 1e100) return self.obj_grads
python
def _grads(self, x): try: # self._set_params_transformed(x) self.optimizer_array = x self.obj_grads = self._transform_gradients(self.objective_function_gradients()) self._fail_count = 0 except (LinAlgError, ZeroDivisionError, ValueError): #pragma: no cover if self._fail_count >= self._allowed_failures: raise self._fail_count += 1 self.obj_grads = np.clip(self._transform_gradients(self.objective_function_gradients()), -1e100, 1e100) return self.obj_grads
[ "def", "_grads", "(", "self", ",", "x", ")", ":", "try", ":", "# self._set_params_transformed(x)", "self", ".", "optimizer_array", "=", "x", "self", ".", "obj_grads", "=", "self", ".", "_transform_gradients", "(", "self", ".", "objective_function_gradients", "("...
Gets the gradients from the likelihood and the priors. Failures are handled robustly. The algorithm will try several times to return the gradients, and will raise the original exception if the objective cannot be computed. :param x: the parameters of the model. :type x: np.array
[ "Gets", "the", "gradients", "from", "the", "likelihood", "and", "the", "priors", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/model.py#L225-L246
22,529
sods/paramz
paramz/model.py
Model._objective
def _objective(self, x): """ The objective function passed to the optimizer. It combines the likelihood and the priors. Failures are handled robustly. The algorithm will try several times to return the objective, and will raise the original exception if the objective cannot be computed. :param x: the parameters of the model. :parameter type: np.array """ try: self.optimizer_array = x obj = self.objective_function() self._fail_count = 0 except (LinAlgError, ZeroDivisionError, ValueError):#pragma: no cover if self._fail_count >= self._allowed_failures: raise self._fail_count += 1 return np.inf return obj
python
def _objective(self, x): try: self.optimizer_array = x obj = self.objective_function() self._fail_count = 0 except (LinAlgError, ZeroDivisionError, ValueError):#pragma: no cover if self._fail_count >= self._allowed_failures: raise self._fail_count += 1 return np.inf return obj
[ "def", "_objective", "(", "self", ",", "x", ")", ":", "try", ":", "self", ".", "optimizer_array", "=", "x", "obj", "=", "self", ".", "objective_function", "(", ")", "self", ".", "_fail_count", "=", "0", "except", "(", "LinAlgError", ",", "ZeroDivisionErr...
The objective function passed to the optimizer. It combines the likelihood and the priors. Failures are handled robustly. The algorithm will try several times to return the objective, and will raise the original exception if the objective cannot be computed. :param x: the parameters of the model. :parameter type: np.array
[ "The", "objective", "function", "passed", "to", "the", "optimizer", ".", "It", "combines", "the", "likelihood", "and", "the", "priors", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/model.py#L248-L269
22,530
sods/paramz
paramz/model.py
Model._repr_html_
def _repr_html_(self): """Representation of the model in html for notebook display.""" model_details = [['<b>Model</b>', self.name + '<br>'], ['<b>Objective</b>', '{}<br>'.format(float(self.objective_function()))], ["<b>Number of Parameters</b>", '{}<br>'.format(self.size)], ["<b>Number of Optimization Parameters</b>", '{}<br>'.format(self._size_transformed())], ["<b>Updates</b>", '{}<br>'.format(self._update_on)], ] from operator import itemgetter to_print = ["""<style type="text/css"> .pd{ font-family: "Courier New", Courier, monospace !important; width: 100%; padding: 3px; } </style>\n"""] + ["<p class=pd>"] + ["{}: {}".format(name, detail) for name, detail in model_details] + ["</p>"] to_print.append(super(Model, self)._repr_html_()) return "\n".join(to_print)
python
def _repr_html_(self): model_details = [['<b>Model</b>', self.name + '<br>'], ['<b>Objective</b>', '{}<br>'.format(float(self.objective_function()))], ["<b>Number of Parameters</b>", '{}<br>'.format(self.size)], ["<b>Number of Optimization Parameters</b>", '{}<br>'.format(self._size_transformed())], ["<b>Updates</b>", '{}<br>'.format(self._update_on)], ] from operator import itemgetter to_print = ["""<style type="text/css"> .pd{ font-family: "Courier New", Courier, monospace !important; width: 100%; padding: 3px; } </style>\n"""] + ["<p class=pd>"] + ["{}: {}".format(name, detail) for name, detail in model_details] + ["</p>"] to_print.append(super(Model, self)._repr_html_()) return "\n".join(to_print)
[ "def", "_repr_html_", "(", "self", ")", ":", "model_details", "=", "[", "[", "'<b>Model</b>'", ",", "self", ".", "name", "+", "'<br>'", "]", ",", "[", "'<b>Objective</b>'", ",", "'{}<br>'", ".", "format", "(", "float", "(", "self", ".", "objective_function...
Representation of the model in html for notebook display.
[ "Representation", "of", "the", "model", "in", "html", "for", "notebook", "display", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/model.py#L410-L427
22,531
sods/paramz
paramz/core/indexable.py
Indexable.add_index_operation
def add_index_operation(self, name, operations): """ Add index operation with name to the operations given. raises: attribute error if operations exist. """ if name not in self._index_operations: self._add_io(name, operations) else: raise AttributeError("An index operation with the name {} was already taken".format(name))
python
def add_index_operation(self, name, operations): if name not in self._index_operations: self._add_io(name, operations) else: raise AttributeError("An index operation with the name {} was already taken".format(name))
[ "def", "add_index_operation", "(", "self", ",", "name", ",", "operations", ")", ":", "if", "name", "not", "in", "self", ".", "_index_operations", ":", "self", ".", "_add_io", "(", "name", ",", "operations", ")", "else", ":", "raise", "AttributeError", "(",...
Add index operation with name to the operations given. raises: attribute error if operations exist.
[ "Add", "index", "operation", "with", "name", "to", "the", "operations", "given", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/indexable.py#L81-L90
22,532
sods/paramz
paramz/core/indexable.py
Indexable._offset_for
def _offset_for(self, param): """ Return the offset of the param inside this parameterized object. This does not need to account for shaped parameters, as it basically just sums up the parameter sizes which come before param. """ if param.has_parent(): p = param._parent_._get_original(param) if p in self.parameters: return reduce(lambda a,b: a + b.size, self.parameters[:p._parent_index_], 0) return self._offset_for(param._parent_) + param._parent_._offset_for(param) return 0
python
def _offset_for(self, param): if param.has_parent(): p = param._parent_._get_original(param) if p in self.parameters: return reduce(lambda a,b: a + b.size, self.parameters[:p._parent_index_], 0) return self._offset_for(param._parent_) + param._parent_._offset_for(param) return 0
[ "def", "_offset_for", "(", "self", ",", "param", ")", ":", "if", "param", ".", "has_parent", "(", ")", ":", "p", "=", "param", ".", "_parent_", ".", "_get_original", "(", "param", ")", "if", "p", "in", "self", ".", "parameters", ":", "return", "reduc...
Return the offset of the param inside this parameterized object. This does not need to account for shaped parameters, as it basically just sums up the parameter sizes which come before param.
[ "Return", "the", "offset", "of", "the", "param", "inside", "this", "parameterized", "object", ".", "This", "does", "not", "need", "to", "account", "for", "shaped", "parameters", "as", "it", "basically", "just", "sums", "up", "the", "parameter", "sizes", "whi...
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/indexable.py#L130-L141
22,533
sods/paramz
paramz/core/indexable.py
Indexable._raveled_index_for
def _raveled_index_for(self, param): """ get the raveled index for a param that is an int array, containing the indexes for the flattened param inside this parameterized logic. !Warning! be sure to call this method on the highest parent of a hierarchy, as it uses the fixes to do its work """ from ..param import ParamConcatenation if isinstance(param, ParamConcatenation): return np.hstack((self._raveled_index_for(p) for p in param.params)) return param._raveled_index() + self._offset_for(param)
python
def _raveled_index_for(self, param): from ..param import ParamConcatenation if isinstance(param, ParamConcatenation): return np.hstack((self._raveled_index_for(p) for p in param.params)) return param._raveled_index() + self._offset_for(param)
[ "def", "_raveled_index_for", "(", "self", ",", "param", ")", ":", "from", ".", ".", "param", "import", "ParamConcatenation", "if", "isinstance", "(", "param", ",", "ParamConcatenation", ")", ":", "return", "np", ".", "hstack", "(", "(", "self", ".", "_rave...
get the raveled index for a param that is an int array, containing the indexes for the flattened param inside this parameterized logic. !Warning! be sure to call this method on the highest parent of a hierarchy, as it uses the fixes to do its work
[ "get", "the", "raveled", "index", "for", "a", "param", "that", "is", "an", "int", "array", "containing", "the", "indexes", "for", "the", "flattened", "param", "inside", "this", "parameterized", "logic", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/indexable.py#L150-L162
22,534
sods/paramz
paramz/core/observable_array.py
ObsAr.copy
def copy(self): """ Make a copy. This means, we delete all observers and return a copy of this array. It will still be an ObsAr! """ from .lists_and_dicts import ObserverList memo = {} memo[id(self)] = self memo[id(self.observers)] = ObserverList() return self.__deepcopy__(memo)
python
def copy(self): from .lists_and_dicts import ObserverList memo = {} memo[id(self)] = self memo[id(self.observers)] = ObserverList() return self.__deepcopy__(memo)
[ "def", "copy", "(", "self", ")", ":", "from", ".", "lists_and_dicts", "import", "ObserverList", "memo", "=", "{", "}", "memo", "[", "id", "(", "self", ")", "]", "=", "self", "memo", "[", "id", "(", "self", ".", "observers", ")", "]", "=", "Observer...
Make a copy. This means, we delete all observers and return a copy of this array. It will still be an ObsAr!
[ "Make", "a", "copy", ".", "This", "means", "we", "delete", "all", "observers", "and", "return", "a", "copy", "of", "this", "array", ".", "It", "will", "still", "be", "an", "ObsAr!" ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/observable_array.py#L91-L100
22,535
sods/paramz
paramz/core/updateable.py
Updateable.update_model
def update_model(self, updates=None): """ Get or set, whether automatic updates are performed. When updates are off, the model might be in a non-working state. To make the model work turn updates on again. :param bool|None updates: bool: whether to do updates None: get the current update state """ if updates is None: return self._update_on assert isinstance(updates, bool), "updates are either on (True) or off (False)" p = getattr(self, '_highest_parent_', None) def turn_updates(s): s._update_on = updates p.traverse(turn_updates) self.trigger_update()
python
def update_model(self, updates=None): if updates is None: return self._update_on assert isinstance(updates, bool), "updates are either on (True) or off (False)" p = getattr(self, '_highest_parent_', None) def turn_updates(s): s._update_on = updates p.traverse(turn_updates) self.trigger_update()
[ "def", "update_model", "(", "self", ",", "updates", "=", "None", ")", ":", "if", "updates", "is", "None", ":", "return", "self", ".", "_update_on", "assert", "isinstance", "(", "updates", ",", "bool", ")", ",", "\"updates are either on (True) or off (False)\"", ...
Get or set, whether automatic updates are performed. When updates are off, the model might be in a non-working state. To make the model work turn updates on again. :param bool|None updates: bool: whether to do updates None: get the current update state
[ "Get", "or", "set", "whether", "automatic", "updates", "are", "performed", ".", "When", "updates", "are", "off", "the", "model", "might", "be", "in", "a", "non", "-", "working", "state", ".", "To", "make", "the", "model", "work", "turn", "updates", "on",...
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/updateable.py#L42-L60
22,536
sods/paramz
paramz/core/updateable.py
Updateable.trigger_update
def trigger_update(self, trigger_parent=True): """ Update the model from the current state. Make sure that updates are on, otherwise this method will do nothing :param bool trigger_parent: Whether to trigger the parent, after self has updated """ if not self.update_model() or (hasattr(self, "_in_init_") and self._in_init_): #print "Warning: updates are off, updating the model will do nothing" return self._trigger_params_changed(trigger_parent)
python
def trigger_update(self, trigger_parent=True): if not self.update_model() or (hasattr(self, "_in_init_") and self._in_init_): #print "Warning: updates are off, updating the model will do nothing" return self._trigger_params_changed(trigger_parent)
[ "def", "trigger_update", "(", "self", ",", "trigger_parent", "=", "True", ")", ":", "if", "not", "self", ".", "update_model", "(", ")", "or", "(", "hasattr", "(", "self", ",", "\"_in_init_\"", ")", "and", "self", ".", "_in_init_", ")", ":", "#print \"War...
Update the model from the current state. Make sure that updates are on, otherwise this method will do nothing :param bool trigger_parent: Whether to trigger the parent, after self has updated
[ "Update", "the", "model", "from", "the", "current", "state", ".", "Make", "sure", "that", "updates", "are", "on", "otherwise", "this", "method", "will", "do", "nothing" ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/updateable.py#L68-L79
22,537
sods/paramz
paramz/core/parameter_core.py
OptimizationHandlable.optimizer_array
def optimizer_array(self): """ Array for the optimizer to work on. This array always lives in the space for the optimizer. Thus, it is untransformed, going from Transformations. Setting this array, will make sure the transformed parameters for this model will be set accordingly. It has to be set with an array, retrieved from this method, as e.g. fixing will resize the array. The optimizer should only interfere with this array, such that transformations are secured. """ if self.__dict__.get('_optimizer_copy_', None) is None or self.size != self._optimizer_copy_.size: self._optimizer_copy_ = np.empty(self.size) if not self._optimizer_copy_transformed: self._optimizer_copy_.flat = self.param_array.flat #py3 fix #[np.put(self._optimizer_copy_, ind, c.finv(self.param_array[ind])) for c, ind in self.constraints.iteritems() if c != __fixed__] [np.put(self._optimizer_copy_, ind, c.finv(self.param_array[ind])) for c, ind in self.constraints.items() if c != __fixed__] self._optimizer_copy_transformed = True if self._has_fixes():# or self._has_ties()): self._ensure_fixes() return self._optimizer_copy_[self._fixes_] return self._optimizer_copy_
python
def optimizer_array(self): if self.__dict__.get('_optimizer_copy_', None) is None or self.size != self._optimizer_copy_.size: self._optimizer_copy_ = np.empty(self.size) if not self._optimizer_copy_transformed: self._optimizer_copy_.flat = self.param_array.flat #py3 fix #[np.put(self._optimizer_copy_, ind, c.finv(self.param_array[ind])) for c, ind in self.constraints.iteritems() if c != __fixed__] [np.put(self._optimizer_copy_, ind, c.finv(self.param_array[ind])) for c, ind in self.constraints.items() if c != __fixed__] self._optimizer_copy_transformed = True if self._has_fixes():# or self._has_ties()): self._ensure_fixes() return self._optimizer_copy_[self._fixes_] return self._optimizer_copy_
[ "def", "optimizer_array", "(", "self", ")", ":", "if", "self", ".", "__dict__", ".", "get", "(", "'_optimizer_copy_'", ",", "None", ")", "is", "None", "or", "self", ".", "size", "!=", "self", ".", "_optimizer_copy_", ".", "size", ":", "self", ".", "_op...
Array for the optimizer to work on. This array always lives in the space for the optimizer. Thus, it is untransformed, going from Transformations. Setting this array, will make sure the transformed parameters for this model will be set accordingly. It has to be set with an array, retrieved from this method, as e.g. fixing will resize the array. The optimizer should only interfere with this array, such that transformations are secured.
[ "Array", "for", "the", "optimizer", "to", "work", "on", ".", "This", "array", "always", "lives", "in", "the", "space", "for", "the", "optimizer", ".", "Thus", "it", "is", "untransformed", "going", "from", "Transformations", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/parameter_core.py#L67-L93
22,538
sods/paramz
paramz/core/parameter_core.py
OptimizationHandlable._trigger_params_changed
def _trigger_params_changed(self, trigger_parent=True): """ First tell all children to update, then update yourself. If trigger_parent is True, we will tell the parent, otherwise not. """ [p._trigger_params_changed(trigger_parent=False) for p in self.parameters if not p.is_fixed] self.notify_observers(None, None if trigger_parent else -np.inf)
python
def _trigger_params_changed(self, trigger_parent=True): [p._trigger_params_changed(trigger_parent=False) for p in self.parameters if not p.is_fixed] self.notify_observers(None, None if trigger_parent else -np.inf)
[ "def", "_trigger_params_changed", "(", "self", ",", "trigger_parent", "=", "True", ")", ":", "[", "p", ".", "_trigger_params_changed", "(", "trigger_parent", "=", "False", ")", "for", "p", "in", "self", ".", "parameters", "if", "not", "p", ".", "is_fixed", ...
First tell all children to update, then update yourself. If trigger_parent is True, we will tell the parent, otherwise not.
[ "First", "tell", "all", "children", "to", "update", "then", "update", "yourself", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/parameter_core.py#L126-L134
22,539
sods/paramz
paramz/core/parameter_core.py
OptimizationHandlable._transform_gradients
def _transform_gradients(self, g): """ Transform the gradients by multiplying the gradient factor for each constraint to it. """ #py3 fix #[np.put(g, i, c.gradfactor(self.param_array[i], g[i])) for c, i in self.constraints.iteritems() if c != __fixed__] [np.put(g, i, c.gradfactor(self.param_array[i], g[i])) for c, i in self.constraints.items() if c != __fixed__] if self._has_fixes(): return g[self._fixes_] return g
python
def _transform_gradients(self, g): #py3 fix #[np.put(g, i, c.gradfactor(self.param_array[i], g[i])) for c, i in self.constraints.iteritems() if c != __fixed__] [np.put(g, i, c.gradfactor(self.param_array[i], g[i])) for c, i in self.constraints.items() if c != __fixed__] if self._has_fixes(): return g[self._fixes_] return g
[ "def", "_transform_gradients", "(", "self", ",", "g", ")", ":", "#py3 fix", "#[np.put(g, i, c.gradfactor(self.param_array[i], g[i])) for c, i in self.constraints.iteritems() if c != __fixed__]", "[", "np", ".", "put", "(", "g", ",", "i", ",", "c", ".", "gradfactor", "(", ...
Transform the gradients by multiplying the gradient factor for each constraint to it.
[ "Transform", "the", "gradients", "by", "multiplying", "the", "gradient", "factor", "for", "each", "constraint", "to", "it", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/parameter_core.py#L143-L152
22,540
sods/paramz
paramz/core/parameter_core.py
OptimizationHandlable.parameter_names
def parameter_names(self, add_self=False, adjust_for_printing=False, recursive=True, intermediate=False): """ Get the names of all parameters of this model or parameter. It starts from the parameterized object you are calling this method on. Note: This does not unravel multidimensional parameters, use parameter_names_flat to unravel parameters! :param bool add_self: whether to add the own name in front of names :param bool adjust_for_printing: whether to call `adjust_name_for_printing` on names :param bool recursive: whether to traverse through hierarchy and append leaf node names :param bool intermediate: whether to add intermediate names, that is parameterized objects """ if adjust_for_printing: adjust = adjust_name_for_printing else: adjust = lambda x: x names = [] if intermediate or (not recursive): names.extend([adjust(x.name) for x in self.parameters]) if intermediate or recursive: names.extend([ xi for x in self.parameters for xi in x.parameter_names(add_self=True, adjust_for_printing=adjust_for_printing, recursive=True, intermediate=False)]) if add_self: names = map(lambda x: adjust(self.name) + "." + x, names) return names
python
def parameter_names(self, add_self=False, adjust_for_printing=False, recursive=True, intermediate=False): if adjust_for_printing: adjust = adjust_name_for_printing else: adjust = lambda x: x names = [] if intermediate or (not recursive): names.extend([adjust(x.name) for x in self.parameters]) if intermediate or recursive: names.extend([ xi for x in self.parameters for xi in x.parameter_names(add_self=True, adjust_for_printing=adjust_for_printing, recursive=True, intermediate=False)]) if add_self: names = map(lambda x: adjust(self.name) + "." + x, names) return names
[ "def", "parameter_names", "(", "self", ",", "add_self", "=", "False", ",", "adjust_for_printing", "=", "False", ",", "recursive", "=", "True", ",", "intermediate", "=", "False", ")", ":", "if", "adjust_for_printing", ":", "adjust", "=", "adjust_name_for_printing...
Get the names of all parameters of this model or parameter. It starts from the parameterized object you are calling this method on. Note: This does not unravel multidimensional parameters, use parameter_names_flat to unravel parameters! :param bool add_self: whether to add the own name in front of names :param bool adjust_for_printing: whether to call `adjust_name_for_printing` on names :param bool recursive: whether to traverse through hierarchy and append leaf node names :param bool intermediate: whether to add intermediate names, that is parameterized objects
[ "Get", "the", "names", "of", "all", "parameters", "of", "this", "model", "or", "parameter", ".", "It", "starts", "from", "the", "parameterized", "object", "you", "are", "calling", "this", "method", "on", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/parameter_core.py#L174-L199
22,541
sods/paramz
paramz/core/parameter_core.py
OptimizationHandlable.parameter_names_flat
def parameter_names_flat(self, include_fixed=False): """ Return the flattened parameter names for all subsequent parameters of this parameter. We do not include the name for self here! If you want the names for fixed parameters as well in this list, set include_fixed to True. if not hasattr(obj, 'cache'): obj.cache = FunctionCacher() :param bool include_fixed: whether to include fixed names here. """ name_list = [] for p in self.flattened_parameters: name = p.hierarchy_name() if p.size > 1: name_list.extend(["{}[{!s}]".format(name, i) for i in p._indices()]) else: name_list.append(name) name_list = np.array(name_list) if not include_fixed and self._has_fixes(): return name_list[self._fixes_] return name_list
python
def parameter_names_flat(self, include_fixed=False): name_list = [] for p in self.flattened_parameters: name = p.hierarchy_name() if p.size > 1: name_list.extend(["{}[{!s}]".format(name, i) for i in p._indices()]) else: name_list.append(name) name_list = np.array(name_list) if not include_fixed and self._has_fixes(): return name_list[self._fixes_] return name_list
[ "def", "parameter_names_flat", "(", "self", ",", "include_fixed", "=", "False", ")", ":", "name_list", "=", "[", "]", "for", "p", "in", "self", ".", "flattened_parameters", ":", "name", "=", "p", ".", "hierarchy_name", "(", ")", "if", "p", ".", "size", ...
Return the flattened parameter names for all subsequent parameters of this parameter. We do not include the name for self here! If you want the names for fixed parameters as well in this list, set include_fixed to True. if not hasattr(obj, 'cache'): obj.cache = FunctionCacher() :param bool include_fixed: whether to include fixed names here.
[ "Return", "the", "flattened", "parameter", "names", "for", "all", "subsequent", "parameters", "of", "this", "parameter", ".", "We", "do", "not", "include", "the", "name", "for", "self", "here!" ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/parameter_core.py#L201-L223
22,542
sods/paramz
paramz/core/parameter_core.py
OptimizationHandlable._propagate_param_grad
def _propagate_param_grad(self, parray, garray): """ For propagating the param_array and gradient_array. This ensures the in memory view of each subsequent array. 1.) connect param_array of children to self.param_array 2.) tell all children to propagate further """ #if self.param_array.size != self.size: # self._param_array_ = np.empty(self.size, dtype=np.float64) #if self.gradient.size != self.size: # self._gradient_array_ = np.empty(self.size, dtype=np.float64) pi_old_size = 0 for pi in self.parameters: pislice = slice(pi_old_size, pi_old_size + pi.size) self.param_array[pislice] = pi.param_array.flat # , requirements=['C', 'W']).flat self.gradient_full[pislice] = pi.gradient_full.flat # , requirements=['C', 'W']).flat pi.param_array.data = parray[pislice].data pi.gradient_full.data = garray[pislice].data pi._propagate_param_grad(parray[pislice], garray[pislice]) pi_old_size += pi.size self._model_initialized_ = True
python
def _propagate_param_grad(self, parray, garray): #if self.param_array.size != self.size: # self._param_array_ = np.empty(self.size, dtype=np.float64) #if self.gradient.size != self.size: # self._gradient_array_ = np.empty(self.size, dtype=np.float64) pi_old_size = 0 for pi in self.parameters: pislice = slice(pi_old_size, pi_old_size + pi.size) self.param_array[pislice] = pi.param_array.flat # , requirements=['C', 'W']).flat self.gradient_full[pislice] = pi.gradient_full.flat # , requirements=['C', 'W']).flat pi.param_array.data = parray[pislice].data pi.gradient_full.data = garray[pislice].data pi._propagate_param_grad(parray[pislice], garray[pislice]) pi_old_size += pi.size self._model_initialized_ = True
[ "def", "_propagate_param_grad", "(", "self", ",", "parray", ",", "garray", ")", ":", "#if self.param_array.size != self.size:", "# self._param_array_ = np.empty(self.size, dtype=np.float64)", "#if self.gradient.size != self.size:", "# self._gradient_array_ = np.empty(self.size, dtype...
For propagating the param_array and gradient_array. This ensures the in memory view of each subsequent array. 1.) connect param_array of children to self.param_array 2.) tell all children to propagate further
[ "For", "propagating", "the", "param_array", "and", "gradient_array", ".", "This", "ensures", "the", "in", "memory", "view", "of", "each", "subsequent", "array", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/parameter_core.py#L270-L296
22,543
sods/paramz
paramz/core/parameter_core.py
Parameterizable.initialize_parameter
def initialize_parameter(self): """ Call this function to initialize the model, if you built it without initialization. This HAS to be called manually before optmizing or it will be causing unexpected behaviour, if not errors! """ #logger.debug("connecting parameters") self._highest_parent_._notify_parent_change() self._highest_parent_._connect_parameters() #logger.debug("calling parameters changed") self._highest_parent_._connect_fixes() self.trigger_update()
python
def initialize_parameter(self): #logger.debug("connecting parameters") self._highest_parent_._notify_parent_change() self._highest_parent_._connect_parameters() #logger.debug("calling parameters changed") self._highest_parent_._connect_fixes() self.trigger_update()
[ "def", "initialize_parameter", "(", "self", ")", ":", "#logger.debug(\"connecting parameters\")", "self", ".", "_highest_parent_", ".", "_notify_parent_change", "(", ")", "self", ".", "_highest_parent_", ".", "_connect_parameters", "(", ")", "#logger.debug(\"calling paramet...
Call this function to initialize the model, if you built it without initialization. This HAS to be called manually before optmizing or it will be causing unexpected behaviour, if not errors!
[ "Call", "this", "function", "to", "initialize", "the", "model", "if", "you", "built", "it", "without", "initialization", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/parameter_core.py#L326-L337
22,544
sods/paramz
paramz/core/parameter_core.py
Parameterizable.traverse_parents
def traverse_parents(self, visit, *args, **kwargs): """ Traverse the hierarchy upwards, visiting all parents and their children except self. See "visitor pattern" in literature. This is implemented in pre-order fashion. Example: parents = [] self.traverse_parents(parents.append) print parents """ if self.has_parent(): self.__visited = True self._parent_.traverse_parents(visit, *args, **kwargs) self._parent_.traverse(visit, *args, **kwargs) self.__visited = False
python
def traverse_parents(self, visit, *args, **kwargs): if self.has_parent(): self.__visited = True self._parent_.traverse_parents(visit, *args, **kwargs) self._parent_.traverse(visit, *args, **kwargs) self.__visited = False
[ "def", "traverse_parents", "(", "self", ",", "visit", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "has_parent", "(", ")", ":", "self", ".", "__visited", "=", "True", "self", ".", "_parent_", ".", "traverse_parents", "(", "...
Traverse the hierarchy upwards, visiting all parents and their children except self. See "visitor pattern" in literature. This is implemented in pre-order fashion. Example: parents = [] self.traverse_parents(parents.append) print parents
[ "Traverse", "the", "hierarchy", "upwards", "visiting", "all", "parents", "and", "their", "children", "except", "self", ".", "See", "visitor", "pattern", "in", "literature", ".", "This", "is", "implemented", "in", "pre", "-", "order", "fashion", "." ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/core/parameter_core.py#L395-L410
22,545
sods/paramz
paramz/examples/ridge_regression.py
RidgeRegression.phi
def phi(self, Xpred, degrees=None): """ Compute the design matrix for this model using the degrees given by the index array in degrees :param array-like Xpred: inputs to compute the design matrix for :param array-like degrees: array of degrees to use [default=range(self.degree+1)] :returns array-like phi: The design matrix [degree x #samples x #dimensions] """ assert Xpred.shape[1] == self.X.shape[1], "Need to predict with same shape as training data." if degrees is None: degrees = range(self.basis.degree+1) tmp_phi = np.empty((len(degrees), Xpred.shape[0], Xpred.shape[1])) for i, w in enumerate(degrees): # Objective function tmpX = self._phi(Xpred, w) tmp_phi[i] = tmpX * self.weights[[w], :] return tmp_phi
python
def phi(self, Xpred, degrees=None): assert Xpred.shape[1] == self.X.shape[1], "Need to predict with same shape as training data." if degrees is None: degrees = range(self.basis.degree+1) tmp_phi = np.empty((len(degrees), Xpred.shape[0], Xpred.shape[1])) for i, w in enumerate(degrees): # Objective function tmpX = self._phi(Xpred, w) tmp_phi[i] = tmpX * self.weights[[w], :] return tmp_phi
[ "def", "phi", "(", "self", ",", "Xpred", ",", "degrees", "=", "None", ")", ":", "assert", "Xpred", ".", "shape", "[", "1", "]", "==", "self", ".", "X", ".", "shape", "[", "1", "]", ",", "\"Need to predict with same shape as training data.\"", "if", "degr...
Compute the design matrix for this model using the degrees given by the index array in degrees :param array-like Xpred: inputs to compute the design matrix for :param array-like degrees: array of degrees to use [default=range(self.degree+1)] :returns array-like phi: The design matrix [degree x #samples x #dimensions]
[ "Compute", "the", "design", "matrix", "for", "this", "model", "using", "the", "degrees", "given", "by", "the", "index", "array", "in", "degrees" ]
ae6fc6274b70fb723d91e48fc5026a9bc5a06508
https://github.com/sods/paramz/blob/ae6fc6274b70fb723d91e48fc5026a9bc5a06508/paramz/examples/ridge_regression.py#L57-L75
22,546
PyAr/fades
fades/main.py
consolidate_dependencies
def consolidate_dependencies(needs_ipython, child_program, requirement_files, manual_dependencies): """Parse files, get deps and merge them. Deps read later overwrite those read earlier.""" # We get the logger here because it's not defined at module level logger = logging.getLogger('fades') if needs_ipython: logger.debug("Adding ipython dependency because --ipython was detected") ipython_dep = parsing.parse_manual(['ipython']) else: ipython_dep = {} if child_program: srcfile_deps = parsing.parse_srcfile(child_program) logger.debug("Dependencies from source file: %s", srcfile_deps) docstring_deps = parsing.parse_docstring(child_program) logger.debug("Dependencies from docstrings: %s", docstring_deps) else: srcfile_deps = {} docstring_deps = {} all_dependencies = [ipython_dep, srcfile_deps, docstring_deps] if requirement_files is not None: for rf_path in requirement_files: rf_deps = parsing.parse_reqfile(rf_path) logger.debug('Dependencies from requirements file %r: %s', rf_path, rf_deps) all_dependencies.append(rf_deps) manual_deps = parsing.parse_manual(manual_dependencies) logger.debug("Dependencies from parameters: %s", manual_deps) all_dependencies.append(manual_deps) # Merge dependencies indicated_deps = {} for dep in all_dependencies: for repo, info in dep.items(): indicated_deps.setdefault(repo, set()).update(info) return indicated_deps
python
def consolidate_dependencies(needs_ipython, child_program, requirement_files, manual_dependencies): # We get the logger here because it's not defined at module level logger = logging.getLogger('fades') if needs_ipython: logger.debug("Adding ipython dependency because --ipython was detected") ipython_dep = parsing.parse_manual(['ipython']) else: ipython_dep = {} if child_program: srcfile_deps = parsing.parse_srcfile(child_program) logger.debug("Dependencies from source file: %s", srcfile_deps) docstring_deps = parsing.parse_docstring(child_program) logger.debug("Dependencies from docstrings: %s", docstring_deps) else: srcfile_deps = {} docstring_deps = {} all_dependencies = [ipython_dep, srcfile_deps, docstring_deps] if requirement_files is not None: for rf_path in requirement_files: rf_deps = parsing.parse_reqfile(rf_path) logger.debug('Dependencies from requirements file %r: %s', rf_path, rf_deps) all_dependencies.append(rf_deps) manual_deps = parsing.parse_manual(manual_dependencies) logger.debug("Dependencies from parameters: %s", manual_deps) all_dependencies.append(manual_deps) # Merge dependencies indicated_deps = {} for dep in all_dependencies: for repo, info in dep.items(): indicated_deps.setdefault(repo, set()).update(info) return indicated_deps
[ "def", "consolidate_dependencies", "(", "needs_ipython", ",", "child_program", ",", "requirement_files", ",", "manual_dependencies", ")", ":", "# We get the logger here because it's not defined at module level", "logger", "=", "logging", ".", "getLogger", "(", "'fades'", ")",...
Parse files, get deps and merge them. Deps read later overwrite those read earlier.
[ "Parse", "files", "get", "deps", "and", "merge", "them", ".", "Deps", "read", "later", "overwrite", "those", "read", "earlier", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/main.py#L56-L95
22,547
PyAr/fades
fades/main.py
detect_inside_virtualenv
def detect_inside_virtualenv(prefix, real_prefix, base_prefix): """Tell if fades is running inside a virtualenv. The params 'real_prefix' and 'base_prefix' may be None. This is copied from pip code (slightly modified), see https://github.com/pypa/pip/blob/281eb61b09d87765d7c2b92f6982b3fe76ccb0af/ pip/locations.py#L39 """ if real_prefix is not None: return True if base_prefix is None: return False # if prefix is different than base_prefix, it's a venv return prefix != base_prefix
python
def detect_inside_virtualenv(prefix, real_prefix, base_prefix): if real_prefix is not None: return True if base_prefix is None: return False # if prefix is different than base_prefix, it's a venv return prefix != base_prefix
[ "def", "detect_inside_virtualenv", "(", "prefix", ",", "real_prefix", ",", "base_prefix", ")", ":", "if", "real_prefix", "is", "not", "None", ":", "return", "True", "if", "base_prefix", "is", "None", ":", "return", "False", "# if prefix is different than base_prefix...
Tell if fades is running inside a virtualenv. The params 'real_prefix' and 'base_prefix' may be None. This is copied from pip code (slightly modified), see https://github.com/pypa/pip/blob/281eb61b09d87765d7c2b92f6982b3fe76ccb0af/ pip/locations.py#L39
[ "Tell", "if", "fades", "is", "running", "inside", "a", "virtualenv", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/main.py#L137-L154
22,548
PyAr/fades
fades/main.py
_get_normalized_args
def _get_normalized_args(parser): """Return the parsed command line arguments. Support the case when executed from a shebang, where all the parameters come in sys.argv[1] in a single string separated by spaces (in this case, the third parameter is what is being executed) """ env = os.environ if '_' in env and env['_'] != sys.argv[0] and len(sys.argv) >= 1 and " " in sys.argv[1]: return parser.parse_args(shlex.split(sys.argv[1]) + sys.argv[2:]) else: return parser.parse_args()
python
def _get_normalized_args(parser): env = os.environ if '_' in env and env['_'] != sys.argv[0] and len(sys.argv) >= 1 and " " in sys.argv[1]: return parser.parse_args(shlex.split(sys.argv[1]) + sys.argv[2:]) else: return parser.parse_args()
[ "def", "_get_normalized_args", "(", "parser", ")", ":", "env", "=", "os", ".", "environ", "if", "'_'", "in", "env", "and", "env", "[", "'_'", "]", "!=", "sys", ".", "argv", "[", "0", "]", "and", "len", "(", "sys", ".", "argv", ")", ">=", "1", "...
Return the parsed command line arguments. Support the case when executed from a shebang, where all the parameters come in sys.argv[1] in a single string separated by spaces (in this case, the third parameter is what is being executed)
[ "Return", "the", "parsed", "command", "line", "arguments", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/main.py#L157-L169
22,549
PyAr/fades
fades/parsing.py
parse_fade_requirement
def parse_fade_requirement(text): """Return a requirement and repo from the given text, already parsed and converted.""" text = text.strip() if "::" in text: repo_raw, requirement = text.split("::", 1) try: repo = {'pypi': REPO_PYPI, 'vcs': REPO_VCS}[repo_raw] except KeyError: logger.warning("Not understood fades repository: %r", repo_raw) return else: if ":" in text and "/" in text: repo = REPO_VCS else: repo = REPO_PYPI requirement = text if repo == REPO_VCS: dependency = VCSDependency(requirement) else: dependency = list(parse_requirements(requirement))[0] return repo, dependency
python
def parse_fade_requirement(text): text = text.strip() if "::" in text: repo_raw, requirement = text.split("::", 1) try: repo = {'pypi': REPO_PYPI, 'vcs': REPO_VCS}[repo_raw] except KeyError: logger.warning("Not understood fades repository: %r", repo_raw) return else: if ":" in text and "/" in text: repo = REPO_VCS else: repo = REPO_PYPI requirement = text if repo == REPO_VCS: dependency = VCSDependency(requirement) else: dependency = list(parse_requirements(requirement))[0] return repo, dependency
[ "def", "parse_fade_requirement", "(", "text", ")", ":", "text", "=", "text", ".", "strip", "(", ")", "if", "\"::\"", "in", "text", ":", "repo_raw", ",", "requirement", "=", "text", ".", "split", "(", "\"::\"", ",", "1", ")", "try", ":", "repo", "=", ...
Return a requirement and repo from the given text, already parsed and converted.
[ "Return", "a", "requirement", "and", "repo", "from", "the", "given", "text", "already", "parsed", "and", "converted", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/parsing.py#L67-L89
22,550
PyAr/fades
fades/parsing.py
_parse_content
def _parse_content(fh): """Parse the content of a script to find marked dependencies.""" content = iter(fh) deps = {} for line in content: # quickly discard most of the lines if 'fades' not in line: continue # discard other string with 'fades' that isn't a comment if '#' not in line: continue # assure that it's a well commented line and no other stuff line = line.strip() index_of_last_fades = line.rfind('fades') index_of_first_hash = line.index('#') # discard when fades does not appear after # if index_of_first_hash > index_of_last_fades: continue import_part, fades_part = line.rsplit("#", 1) # discard other comments in the same line that aren't for fades if "fades" not in fades_part: import_part, fades_part = import_part.rsplit("#", 1) fades_part = fades_part.strip() if not fades_part.startswith("fades"): continue if not import_part: # the fades comment was done at the beginning of the line, # which means that the import info is in the next one import_part = next(content).strip() if import_part.startswith('#'): continue # get module import_tokens = import_part.split() if import_tokens[0] == 'import': module_path = import_tokens[1] elif import_tokens[0] == 'from' and import_tokens[2] == 'import': module_path = import_tokens[1] else: logger.debug("Not understood import info: %s", import_tokens) continue module = module_path.split(".")[0] # If fades know the real name of the pkg. Replace it! if module in PKG_NAMES_DB: module = PKG_NAMES_DB[module] # To match the "safe" name that pkg_resources creates: module = module.replace('_', '-') # get the fades info after 'fades' mark, if any if len(fades_part) == 5 or fades_part[5:].strip()[0] in "<>=!": # just the 'fades' mark, and maybe a version specification, the requirement is what # was imported (maybe with that version comparison) requirement = module + fades_part[5:] elif fades_part[5] != " ": # starts with fades but it's part of a longer weird word logger.warning("Not understood fades info: %r", fades_part) continue else: # more complex stuff, to be parsed as a normal requirement requirement = fades_part[5:] # parse and convert the requirement parsed_req = parse_fade_requirement(requirement) if parsed_req is None: continue repo, dependency = parsed_req deps.setdefault(repo, []).append(dependency) return deps
python
def _parse_content(fh): content = iter(fh) deps = {} for line in content: # quickly discard most of the lines if 'fades' not in line: continue # discard other string with 'fades' that isn't a comment if '#' not in line: continue # assure that it's a well commented line and no other stuff line = line.strip() index_of_last_fades = line.rfind('fades') index_of_first_hash = line.index('#') # discard when fades does not appear after # if index_of_first_hash > index_of_last_fades: continue import_part, fades_part = line.rsplit("#", 1) # discard other comments in the same line that aren't for fades if "fades" not in fades_part: import_part, fades_part = import_part.rsplit("#", 1) fades_part = fades_part.strip() if not fades_part.startswith("fades"): continue if not import_part: # the fades comment was done at the beginning of the line, # which means that the import info is in the next one import_part = next(content).strip() if import_part.startswith('#'): continue # get module import_tokens = import_part.split() if import_tokens[0] == 'import': module_path = import_tokens[1] elif import_tokens[0] == 'from' and import_tokens[2] == 'import': module_path = import_tokens[1] else: logger.debug("Not understood import info: %s", import_tokens) continue module = module_path.split(".")[0] # If fades know the real name of the pkg. Replace it! if module in PKG_NAMES_DB: module = PKG_NAMES_DB[module] # To match the "safe" name that pkg_resources creates: module = module.replace('_', '-') # get the fades info after 'fades' mark, if any if len(fades_part) == 5 or fades_part[5:].strip()[0] in "<>=!": # just the 'fades' mark, and maybe a version specification, the requirement is what # was imported (maybe with that version comparison) requirement = module + fades_part[5:] elif fades_part[5] != " ": # starts with fades but it's part of a longer weird word logger.warning("Not understood fades info: %r", fades_part) continue else: # more complex stuff, to be parsed as a normal requirement requirement = fades_part[5:] # parse and convert the requirement parsed_req = parse_fade_requirement(requirement) if parsed_req is None: continue repo, dependency = parsed_req deps.setdefault(repo, []).append(dependency) return deps
[ "def", "_parse_content", "(", "fh", ")", ":", "content", "=", "iter", "(", "fh", ")", "deps", "=", "{", "}", "for", "line", "in", "content", ":", "# quickly discard most of the lines", "if", "'fades'", "not", "in", "line", ":", "continue", "# discard other s...
Parse the content of a script to find marked dependencies.
[ "Parse", "the", "content", "of", "a", "script", "to", "find", "marked", "dependencies", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/parsing.py#L92-L169
22,551
PyAr/fades
fades/parsing.py
_parse_docstring
def _parse_docstring(fh): """Parse the docstrings of a script to find marked dependencies.""" find_fades = re.compile(r'\b(fades)\b:').search for line in fh: if line.startswith("'"): quote = "'" break if line.startswith('"'): quote = '"' break else: return {} if line[1] == quote: # comment start with triple quotes endquote = quote * 3 else: endquote = quote if endquote in line[len(endquote):]: docstring_lines = [line[:line.index(endquote)]] else: docstring_lines = [line] for line in fh: if endquote in line: docstring_lines.append(line[:line.index(endquote)]) break docstring_lines.append(line) docstring_lines = iter(docstring_lines) for doc_line in docstring_lines: if find_fades(doc_line): break else: return {} return _parse_requirement(list(docstring_lines))
python
def _parse_docstring(fh): find_fades = re.compile(r'\b(fades)\b:').search for line in fh: if line.startswith("'"): quote = "'" break if line.startswith('"'): quote = '"' break else: return {} if line[1] == quote: # comment start with triple quotes endquote = quote * 3 else: endquote = quote if endquote in line[len(endquote):]: docstring_lines = [line[:line.index(endquote)]] else: docstring_lines = [line] for line in fh: if endquote in line: docstring_lines.append(line[:line.index(endquote)]) break docstring_lines.append(line) docstring_lines = iter(docstring_lines) for doc_line in docstring_lines: if find_fades(doc_line): break else: return {} return _parse_requirement(list(docstring_lines))
[ "def", "_parse_docstring", "(", "fh", ")", ":", "find_fades", "=", "re", ".", "compile", "(", "r'\\b(fades)\\b:'", ")", ".", "search", "for", "line", "in", "fh", ":", "if", "line", ".", "startswith", "(", "\"'\"", ")", ":", "quote", "=", "\"'\"", "brea...
Parse the docstrings of a script to find marked dependencies.
[ "Parse", "the", "docstrings", "of", "a", "script", "to", "find", "marked", "dependencies", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/parsing.py#L172-L209
22,552
PyAr/fades
fades/parsing.py
_parse_requirement
def _parse_requirement(iterable): """Actually parse the requirements, from file or manually specified.""" deps = {} for line in iterable: line = line.strip() if not line or line[0] == '#': continue parsed_req = parse_fade_requirement(line) if parsed_req is None: continue repo, dependency = parsed_req deps.setdefault(repo, []).append(dependency) return deps
python
def _parse_requirement(iterable): deps = {} for line in iterable: line = line.strip() if not line or line[0] == '#': continue parsed_req = parse_fade_requirement(line) if parsed_req is None: continue repo, dependency = parsed_req deps.setdefault(repo, []).append(dependency) return deps
[ "def", "_parse_requirement", "(", "iterable", ")", ":", "deps", "=", "{", "}", "for", "line", "in", "iterable", ":", "line", "=", "line", ".", "strip", "(", ")", "if", "not", "line", "or", "line", "[", "0", "]", "==", "'#'", ":", "continue", "parse...
Actually parse the requirements, from file or manually specified.
[ "Actually", "parse", "the", "requirements", "from", "file", "or", "manually", "specified", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/parsing.py#L212-L226
22,553
PyAr/fades
fades/parsing.py
_read_lines
def _read_lines(filepath): """Read a req file to a list to support nested requirement files.""" with open(filepath, 'rt', encoding='utf8') as fh: for line in fh: line = line.strip() if line.startswith("-r"): logger.debug("Reading deps from nested requirement file: %s", line) try: nested_filename = line.split()[1] except IndexError: logger.warning( "Invalid format to indicate a nested requirements file: '%r'", line) else: nested_filepath = os.path.join( os.path.dirname(filepath), nested_filename) yield from _read_lines(nested_filepath) else: yield line
python
def _read_lines(filepath): with open(filepath, 'rt', encoding='utf8') as fh: for line in fh: line = line.strip() if line.startswith("-r"): logger.debug("Reading deps from nested requirement file: %s", line) try: nested_filename = line.split()[1] except IndexError: logger.warning( "Invalid format to indicate a nested requirements file: '%r'", line) else: nested_filepath = os.path.join( os.path.dirname(filepath), nested_filename) yield from _read_lines(nested_filepath) else: yield line
[ "def", "_read_lines", "(", "filepath", ")", ":", "with", "open", "(", "filepath", ",", "'rt'", ",", "encoding", "=", "'utf8'", ")", "as", "fh", ":", "for", "line", "in", "fh", ":", "line", "=", "line", ".", "strip", "(", ")", "if", "line", ".", "...
Read a req file to a list to support nested requirement files.
[ "Read", "a", "req", "file", "to", "a", "list", "to", "support", "nested", "requirement", "files", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/parsing.py#L236-L253
22,554
PyAr/fades
fades/envbuilder.py
create_venv
def create_venv(requested_deps, interpreter, is_current, options, pip_options): """Create a new virtualvenv with the requirements of this script.""" # create virtualenv env = _FadesEnvBuilder() env_path, env_bin_path, pip_installed = env.create_env(interpreter, is_current, options) venv_data = {} venv_data['env_path'] = env_path venv_data['env_bin_path'] = env_bin_path venv_data['pip_installed'] = pip_installed # install deps installed = {} for repo in requested_deps.keys(): if repo in (REPO_PYPI, REPO_VCS): mgr = PipManager(env_bin_path, pip_installed=pip_installed, options=pip_options) else: logger.warning("Install from %r not implemented", repo) continue installed[repo] = {} repo_requested = requested_deps[repo] logger.debug("Installing dependencies for repo %r: requested=%s", repo, repo_requested) for dependency in repo_requested: try: mgr.install(dependency) except Exception: logger.debug("Installation Step failed, removing virtualenv") destroy_venv(env_path) raise FadesError('Dependency installation failed') if repo == REPO_VCS: # no need to request the installed version, as we'll always compare # to the url itself project = dependency.url version = None else: # always store the installed dependency, as in the future we'll select the venv # based on what is installed, not what used requested (remember that user may # request >, >=, etc!) project = dependency.project_name version = mgr.get_version(project) installed[repo][project] = version logger.debug("Installed dependencies: %s", installed) return venv_data, installed
python
def create_venv(requested_deps, interpreter, is_current, options, pip_options): # create virtualenv env = _FadesEnvBuilder() env_path, env_bin_path, pip_installed = env.create_env(interpreter, is_current, options) venv_data = {} venv_data['env_path'] = env_path venv_data['env_bin_path'] = env_bin_path venv_data['pip_installed'] = pip_installed # install deps installed = {} for repo in requested_deps.keys(): if repo in (REPO_PYPI, REPO_VCS): mgr = PipManager(env_bin_path, pip_installed=pip_installed, options=pip_options) else: logger.warning("Install from %r not implemented", repo) continue installed[repo] = {} repo_requested = requested_deps[repo] logger.debug("Installing dependencies for repo %r: requested=%s", repo, repo_requested) for dependency in repo_requested: try: mgr.install(dependency) except Exception: logger.debug("Installation Step failed, removing virtualenv") destroy_venv(env_path) raise FadesError('Dependency installation failed') if repo == REPO_VCS: # no need to request the installed version, as we'll always compare # to the url itself project = dependency.url version = None else: # always store the installed dependency, as in the future we'll select the venv # based on what is installed, not what used requested (remember that user may # request >, >=, etc!) project = dependency.project_name version = mgr.get_version(project) installed[repo][project] = version logger.debug("Installed dependencies: %s", installed) return venv_data, installed
[ "def", "create_venv", "(", "requested_deps", ",", "interpreter", ",", "is_current", ",", "options", ",", "pip_options", ")", ":", "# create virtualenv", "env", "=", "_FadesEnvBuilder", "(", ")", "env_path", ",", "env_bin_path", ",", "pip_installed", "=", "env", ...
Create a new virtualvenv with the requirements of this script.
[ "Create", "a", "new", "virtualvenv", "with", "the", "requirements", "of", "this", "script", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/envbuilder.py#L124-L168
22,555
PyAr/fades
fades/envbuilder.py
destroy_venv
def destroy_venv(env_path, venvscache=None): """Destroy a venv.""" # remove the venv itself in disk logger.debug("Destroying virtualenv at: %s", env_path) shutil.rmtree(env_path, ignore_errors=True) # remove venv from cache if venvscache is not None: venvscache.remove(env_path)
python
def destroy_venv(env_path, venvscache=None): # remove the venv itself in disk logger.debug("Destroying virtualenv at: %s", env_path) shutil.rmtree(env_path, ignore_errors=True) # remove venv from cache if venvscache is not None: venvscache.remove(env_path)
[ "def", "destroy_venv", "(", "env_path", ",", "venvscache", "=", "None", ")", ":", "# remove the venv itself in disk", "logger", ".", "debug", "(", "\"Destroying virtualenv at: %s\"", ",", "env_path", ")", "shutil", ".", "rmtree", "(", "env_path", ",", "ignore_errors...
Destroy a venv.
[ "Destroy", "a", "venv", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/envbuilder.py#L171-L179
22,556
PyAr/fades
fades/envbuilder.py
_FadesEnvBuilder.create_with_virtualenv
def create_with_virtualenv(self, interpreter, virtualenv_options): """Create a virtualenv using the virtualenv lib.""" args = ['virtualenv', '--python', interpreter, self.env_path] args.extend(virtualenv_options) if not self.pip_installed: args.insert(3, '--no-pip') try: helpers.logged_exec(args) self.env_bin_path = os.path.join(self.env_path, 'bin') except FileNotFoundError as error: logger.error('Virtualenv is not installed. It is needed to create a virtualenv with ' 'a different python version than fades (got {})'.format(error)) raise FadesError('virtualenv not found') except helpers.ExecutionError as error: error.dump_to_log(logger) raise FadesError('virtualenv could not be run') except Exception as error: logger.exception("Error creating virtualenv: %s", error) raise FadesError('General error while running virtualenv')
python
def create_with_virtualenv(self, interpreter, virtualenv_options): args = ['virtualenv', '--python', interpreter, self.env_path] args.extend(virtualenv_options) if not self.pip_installed: args.insert(3, '--no-pip') try: helpers.logged_exec(args) self.env_bin_path = os.path.join(self.env_path, 'bin') except FileNotFoundError as error: logger.error('Virtualenv is not installed. It is needed to create a virtualenv with ' 'a different python version than fades (got {})'.format(error)) raise FadesError('virtualenv not found') except helpers.ExecutionError as error: error.dump_to_log(logger) raise FadesError('virtualenv could not be run') except Exception as error: logger.exception("Error creating virtualenv: %s", error) raise FadesError('General error while running virtualenv')
[ "def", "create_with_virtualenv", "(", "self", ",", "interpreter", ",", "virtualenv_options", ")", ":", "args", "=", "[", "'virtualenv'", ",", "'--python'", ",", "interpreter", ",", "self", ".", "env_path", "]", "args", ".", "extend", "(", "virtualenv_options", ...
Create a virtualenv using the virtualenv lib.
[ "Create", "a", "virtualenv", "using", "the", "virtualenv", "lib", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/envbuilder.py#L75-L93
22,557
PyAr/fades
fades/envbuilder.py
_FadesEnvBuilder.create_env
def create_env(self, interpreter, is_current, options): """Create the virtualenv and return its info.""" if is_current: # apply pyvenv options pyvenv_options = options['pyvenv_options'] if "--system-site-packages" in pyvenv_options: self.system_site_packages = True logger.debug("Creating virtualenv with pyvenv. options=%s", pyvenv_options) self.create(self.env_path) else: virtualenv_options = options['virtualenv_options'] logger.debug("Creating virtualenv with virtualenv") self.create_with_virtualenv(interpreter, virtualenv_options) logger.debug("env_bin_path: %s", self.env_bin_path) # Re check if pip was installed (supporting both binary and .exe for Windows) pip_bin = os.path.join(self.env_bin_path, "pip") pip_exe = os.path.join(self.env_bin_path, "pip.exe") if not (os.path.exists(pip_bin) or os.path.exists(pip_exe)): logger.debug("pip isn't installed in the venv, setting pip_installed=False") self.pip_installed = False return self.env_path, self.env_bin_path, self.pip_installed
python
def create_env(self, interpreter, is_current, options): if is_current: # apply pyvenv options pyvenv_options = options['pyvenv_options'] if "--system-site-packages" in pyvenv_options: self.system_site_packages = True logger.debug("Creating virtualenv with pyvenv. options=%s", pyvenv_options) self.create(self.env_path) else: virtualenv_options = options['virtualenv_options'] logger.debug("Creating virtualenv with virtualenv") self.create_with_virtualenv(interpreter, virtualenv_options) logger.debug("env_bin_path: %s", self.env_bin_path) # Re check if pip was installed (supporting both binary and .exe for Windows) pip_bin = os.path.join(self.env_bin_path, "pip") pip_exe = os.path.join(self.env_bin_path, "pip.exe") if not (os.path.exists(pip_bin) or os.path.exists(pip_exe)): logger.debug("pip isn't installed in the venv, setting pip_installed=False") self.pip_installed = False return self.env_path, self.env_bin_path, self.pip_installed
[ "def", "create_env", "(", "self", ",", "interpreter", ",", "is_current", ",", "options", ")", ":", "if", "is_current", ":", "# apply pyvenv options", "pyvenv_options", "=", "options", "[", "'pyvenv_options'", "]", "if", "\"--system-site-packages\"", "in", "pyvenv_op...
Create the virtualenv and return its info.
[ "Create", "the", "virtualenv", "and", "return", "its", "info", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/envbuilder.py#L95-L117
22,558
PyAr/fades
fades/envbuilder.py
UsageManager.store_usage_stat
def store_usage_stat(self, venv_data, cache): """Log an usage record for venv_data.""" with open(self.stat_file_path, 'at') as f: self._write_venv_usage(f, venv_data)
python
def store_usage_stat(self, venv_data, cache): with open(self.stat_file_path, 'at') as f: self._write_venv_usage(f, venv_data)
[ "def", "store_usage_stat", "(", "self", ",", "venv_data", ",", "cache", ")", ":", "with", "open", "(", "self", ".", "stat_file_path", ",", "'at'", ")", "as", "f", ":", "self", ".", "_write_venv_usage", "(", "f", ",", "venv_data", ")" ]
Log an usage record for venv_data.
[ "Log", "an", "usage", "record", "for", "venv_data", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/envbuilder.py#L192-L195
22,559
PyAr/fades
fades/envbuilder.py
UsageManager.clean_unused_venvs
def clean_unused_venvs(self, max_days_to_keep): """Compact usage stats and remove venvs. This method loads the complete file usage in memory, for every venv compact all records in one (the lastest), updates this info for every env deleted and, finally, write the entire file to disk. If something failed during this steps, usage file remains unchanged and can contain some data about some deleted env. This is not a problem, the next time this function it's called, this records will be deleted. """ with filelock(self.stat_file_lock): now = datetime.utcnow() venvs_dict = self._get_compacted_dict_usage_from_file() for venv_uuid, usage_date in venvs_dict.copy().items(): usage_date = self._str_to_datetime(usage_date) if (now - usage_date).days > max_days_to_keep: # remove venv from usage dict del venvs_dict[venv_uuid] venv_meta = self.venvscache.get_venv(uuid=venv_uuid) if venv_meta is None: # if meta isn't found means that something had failed previously and # usage_file wasn't updated. continue env_path = venv_meta['env_path'] logger.info("Destroying virtualenv at: %s", env_path) # #256 destroy_venv(env_path, self.venvscache) self._write_compacted_dict_usage_to_file(venvs_dict)
python
def clean_unused_venvs(self, max_days_to_keep): with filelock(self.stat_file_lock): now = datetime.utcnow() venvs_dict = self._get_compacted_dict_usage_from_file() for venv_uuid, usage_date in venvs_dict.copy().items(): usage_date = self._str_to_datetime(usage_date) if (now - usage_date).days > max_days_to_keep: # remove venv from usage dict del venvs_dict[venv_uuid] venv_meta = self.venvscache.get_venv(uuid=venv_uuid) if venv_meta is None: # if meta isn't found means that something had failed previously and # usage_file wasn't updated. continue env_path = venv_meta['env_path'] logger.info("Destroying virtualenv at: %s", env_path) # #256 destroy_venv(env_path, self.venvscache) self._write_compacted_dict_usage_to_file(venvs_dict)
[ "def", "clean_unused_venvs", "(", "self", ",", "max_days_to_keep", ")", ":", "with", "filelock", "(", "self", ".", "stat_file_lock", ")", ":", "now", "=", "datetime", ".", "utcnow", "(", ")", "venvs_dict", "=", "self", ".", "_get_compacted_dict_usage_from_file",...
Compact usage stats and remove venvs. This method loads the complete file usage in memory, for every venv compact all records in one (the lastest), updates this info for every env deleted and, finally, write the entire file to disk. If something failed during this steps, usage file remains unchanged and can contain some data about some deleted env. This is not a problem, the next time this function it's called, this records will be deleted.
[ "Compact", "usage", "stats", "and", "remove", "venvs", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/envbuilder.py#L214-L242
22,560
PyAr/fades
fades/helpers.py
logged_exec
def logged_exec(cmd): """Execute a command, redirecting the output to the log.""" logger = logging.getLogger('fades.exec') logger.debug("Executing external command: %r", cmd) p = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) stdout = [] for line in p.stdout: line = line[:-1] stdout.append(line) logger.debug(STDOUT_LOG_PREFIX + line) retcode = p.wait() if retcode: raise ExecutionError(retcode, cmd, stdout) return stdout
python
def logged_exec(cmd): logger = logging.getLogger('fades.exec') logger.debug("Executing external command: %r", cmd) p = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) stdout = [] for line in p.stdout: line = line[:-1] stdout.append(line) logger.debug(STDOUT_LOG_PREFIX + line) retcode = p.wait() if retcode: raise ExecutionError(retcode, cmd, stdout) return stdout
[ "def", "logged_exec", "(", "cmd", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", "'fades.exec'", ")", "logger", ".", "debug", "(", "\"Executing external command: %r\"", ",", "cmd", ")", "p", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "st...
Execute a command, redirecting the output to the log.
[ "Execute", "a", "command", "redirecting", "the", "output", "to", "the", "log", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/helpers.py#L72-L86
22,561
PyAr/fades
fades/helpers.py
_get_specific_dir
def _get_specific_dir(dir_type): """Get a specific directory, using some XDG base, with sensible default.""" if SNAP_BASEDIR_NAME in os.environ: logger.debug("Getting base dir information from SNAP_BASEDIR_NAME env var.") direct = os.path.join(os.environ[SNAP_BASEDIR_NAME], dir_type) else: try: basedirectory = _get_basedirectory() except ImportError: logger.debug("Using last resort base dir: ~/.fades") from os.path import expanduser direct = os.path.join(expanduser("~"), ".fades") else: xdg_attrib = 'xdg_{}_home'.format(dir_type) base = getattr(basedirectory, xdg_attrib) direct = os.path.join(base, 'fades') if not os.path.exists(direct): os.makedirs(direct) return direct
python
def _get_specific_dir(dir_type): if SNAP_BASEDIR_NAME in os.environ: logger.debug("Getting base dir information from SNAP_BASEDIR_NAME env var.") direct = os.path.join(os.environ[SNAP_BASEDIR_NAME], dir_type) else: try: basedirectory = _get_basedirectory() except ImportError: logger.debug("Using last resort base dir: ~/.fades") from os.path import expanduser direct = os.path.join(expanduser("~"), ".fades") else: xdg_attrib = 'xdg_{}_home'.format(dir_type) base = getattr(basedirectory, xdg_attrib) direct = os.path.join(base, 'fades') if not os.path.exists(direct): os.makedirs(direct) return direct
[ "def", "_get_specific_dir", "(", "dir_type", ")", ":", "if", "SNAP_BASEDIR_NAME", "in", "os", ".", "environ", ":", "logger", ".", "debug", "(", "\"Getting base dir information from SNAP_BASEDIR_NAME env var.\"", ")", "direct", "=", "os", ".", "path", ".", "join", ...
Get a specific directory, using some XDG base, with sensible default.
[ "Get", "a", "specific", "directory", "using", "some", "XDG", "base", "with", "sensible", "default", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/helpers.py#L94-L113
22,562
PyAr/fades
fades/helpers.py
_get_interpreter_info
def _get_interpreter_info(interpreter=None): """Return the interpreter's full path using pythonX.Y format.""" if interpreter is None: # If interpreter is None by default returns the current interpreter data. major, minor = sys.version_info[:2] executable = sys.executable else: args = [interpreter, '-c', SHOW_VERSION_CMD] try: requested_interpreter_info = logged_exec(args) except Exception as error: logger.error("Error getting requested interpreter version: %s", error) raise FadesError("Could not get interpreter version") requested_interpreter_info = json.loads(requested_interpreter_info[0]) executable = requested_interpreter_info['path'] major = requested_interpreter_info['major'] minor = requested_interpreter_info['minor'] if executable[-1].isdigit(): executable = executable.split(".")[0][:-1] interpreter = "{}{}.{}".format(executable, major, minor) return interpreter
python
def _get_interpreter_info(interpreter=None): if interpreter is None: # If interpreter is None by default returns the current interpreter data. major, minor = sys.version_info[:2] executable = sys.executable else: args = [interpreter, '-c', SHOW_VERSION_CMD] try: requested_interpreter_info = logged_exec(args) except Exception as error: logger.error("Error getting requested interpreter version: %s", error) raise FadesError("Could not get interpreter version") requested_interpreter_info = json.loads(requested_interpreter_info[0]) executable = requested_interpreter_info['path'] major = requested_interpreter_info['major'] minor = requested_interpreter_info['minor'] if executable[-1].isdigit(): executable = executable.split(".")[0][:-1] interpreter = "{}{}.{}".format(executable, major, minor) return interpreter
[ "def", "_get_interpreter_info", "(", "interpreter", "=", "None", ")", ":", "if", "interpreter", "is", "None", ":", "# If interpreter is None by default returns the current interpreter data.", "major", ",", "minor", "=", "sys", ".", "version_info", "[", ":", "2", "]", ...
Return the interpreter's full path using pythonX.Y format.
[ "Return", "the", "interpreter", "s", "full", "path", "using", "pythonX", ".", "Y", "format", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/helpers.py#L126-L146
22,563
PyAr/fades
fades/helpers.py
get_interpreter_version
def get_interpreter_version(requested_interpreter): """Return a 'sanitized' interpreter and indicates if it is the current one.""" logger.debug('Getting interpreter version for: %s', requested_interpreter) current_interpreter = _get_interpreter_info() logger.debug('Current interpreter is %s', current_interpreter) if requested_interpreter is None: return(current_interpreter, True) else: requested_interpreter = _get_interpreter_info(requested_interpreter) is_current = requested_interpreter == current_interpreter logger.debug('Interpreter=%s. It is the same as fades?=%s', requested_interpreter, is_current) return (requested_interpreter, is_current)
python
def get_interpreter_version(requested_interpreter): logger.debug('Getting interpreter version for: %s', requested_interpreter) current_interpreter = _get_interpreter_info() logger.debug('Current interpreter is %s', current_interpreter) if requested_interpreter is None: return(current_interpreter, True) else: requested_interpreter = _get_interpreter_info(requested_interpreter) is_current = requested_interpreter == current_interpreter logger.debug('Interpreter=%s. It is the same as fades?=%s', requested_interpreter, is_current) return (requested_interpreter, is_current)
[ "def", "get_interpreter_version", "(", "requested_interpreter", ")", ":", "logger", ".", "debug", "(", "'Getting interpreter version for: %s'", ",", "requested_interpreter", ")", "current_interpreter", "=", "_get_interpreter_info", "(", ")", "logger", ".", "debug", "(", ...
Return a 'sanitized' interpreter and indicates if it is the current one.
[ "Return", "a", "sanitized", "interpreter", "and", "indicates", "if", "it", "is", "the", "current", "one", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/helpers.py#L149-L161
22,564
PyAr/fades
fades/helpers.py
check_pypi_updates
def check_pypi_updates(dependencies): """Return a list of dependencies to upgrade.""" dependencies_up_to_date = [] for dependency in dependencies.get('pypi', []): # get latest version from PyPI api try: latest_version = get_latest_version_number(dependency.project_name) except Exception as error: logger.warning("--check-updates command will be aborted. Error: %s", error) return dependencies # get required version required_version = None if dependency.specs: _, required_version = dependency.specs[0] if required_version: dependencies_up_to_date.append(dependency) if latest_version > required_version: logger.info("There is a new version of %s: %s", dependency.project_name, latest_version) elif latest_version < required_version: logger.warning("The requested version for %s is greater " "than latest found in PyPI: %s", dependency.project_name, latest_version) else: logger.info("The requested version for %s is the latest one in PyPI: %s", dependency.project_name, latest_version) else: project_name_plus = "{}=={}".format(dependency.project_name, latest_version) dependencies_up_to_date.append(pkg_resources.Requirement.parse(project_name_plus)) logger.info("The latest version of %r is %s and will use it.", dependency.project_name, latest_version) dependencies["pypi"] = dependencies_up_to_date return dependencies
python
def check_pypi_updates(dependencies): dependencies_up_to_date = [] for dependency in dependencies.get('pypi', []): # get latest version from PyPI api try: latest_version = get_latest_version_number(dependency.project_name) except Exception as error: logger.warning("--check-updates command will be aborted. Error: %s", error) return dependencies # get required version required_version = None if dependency.specs: _, required_version = dependency.specs[0] if required_version: dependencies_up_to_date.append(dependency) if latest_version > required_version: logger.info("There is a new version of %s: %s", dependency.project_name, latest_version) elif latest_version < required_version: logger.warning("The requested version for %s is greater " "than latest found in PyPI: %s", dependency.project_name, latest_version) else: logger.info("The requested version for %s is the latest one in PyPI: %s", dependency.project_name, latest_version) else: project_name_plus = "{}=={}".format(dependency.project_name, latest_version) dependencies_up_to_date.append(pkg_resources.Requirement.parse(project_name_plus)) logger.info("The latest version of %r is %s and will use it.", dependency.project_name, latest_version) dependencies["pypi"] = dependencies_up_to_date return dependencies
[ "def", "check_pypi_updates", "(", "dependencies", ")", ":", "dependencies_up_to_date", "=", "[", "]", "for", "dependency", "in", "dependencies", ".", "get", "(", "'pypi'", ",", "[", "]", ")", ":", "# get latest version from PyPI api", "try", ":", "latest_version",...
Return a list of dependencies to upgrade.
[ "Return", "a", "list", "of", "dependencies", "to", "upgrade", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/helpers.py#L180-L214
22,565
PyAr/fades
fades/helpers.py
_pypi_head_package
def _pypi_head_package(dependency): """Hit pypi with a http HEAD to check if pkg_name exists.""" if dependency.specs: _, version = dependency.specs[0] url = BASE_PYPI_URL_WITH_VERSION.format(name=dependency.project_name, version=version) else: url = BASE_PYPI_URL.format(name=dependency.project_name) logger.debug("Doing HEAD requests against %s", url) req = request.Request(url, method='HEAD') try: response = request.urlopen(req) except HTTPError as http_error: if http_error.code == HTTP_STATUS_NOT_FOUND: return False else: raise if response.status == HTTP_STATUS_OK: logger.debug("%r exists in PyPI.", dependency) return True else: # Maybe we are getting somethink like a redirect. In this case we are only # warning to the user and trying to install the dependency. # In the worst scenery fades will fail to install it. logger.warning("Got a (unexpected) HTTP_STATUS=%r and reason=%r checking if %r exists", response.status, response.reason, dependency) return True
python
def _pypi_head_package(dependency): if dependency.specs: _, version = dependency.specs[0] url = BASE_PYPI_URL_WITH_VERSION.format(name=dependency.project_name, version=version) else: url = BASE_PYPI_URL.format(name=dependency.project_name) logger.debug("Doing HEAD requests against %s", url) req = request.Request(url, method='HEAD') try: response = request.urlopen(req) except HTTPError as http_error: if http_error.code == HTTP_STATUS_NOT_FOUND: return False else: raise if response.status == HTTP_STATUS_OK: logger.debug("%r exists in PyPI.", dependency) return True else: # Maybe we are getting somethink like a redirect. In this case we are only # warning to the user and trying to install the dependency. # In the worst scenery fades will fail to install it. logger.warning("Got a (unexpected) HTTP_STATUS=%r and reason=%r checking if %r exists", response.status, response.reason, dependency) return True
[ "def", "_pypi_head_package", "(", "dependency", ")", ":", "if", "dependency", ".", "specs", ":", "_", ",", "version", "=", "dependency", ".", "specs", "[", "0", "]", "url", "=", "BASE_PYPI_URL_WITH_VERSION", ".", "format", "(", "name", "=", "dependency", "...
Hit pypi with a http HEAD to check if pkg_name exists.
[ "Hit", "pypi", "with", "a", "http", "HEAD", "to", "check", "if", "pkg_name", "exists", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/helpers.py#L217-L242
22,566
PyAr/fades
fades/helpers.py
check_pypi_exists
def check_pypi_exists(dependencies): """Check if the indicated dependencies actually exists in pypi.""" for dependency in dependencies.get('pypi', []): logger.debug("Checking if %r exists in PyPI", dependency) try: exists = _pypi_head_package(dependency) except Exception as error: logger.error("Error checking %s in PyPI: %r", dependency, error) raise FadesError("Could not check if dependency exists in PyPI") else: if not exists: logger.error("%s doesn't exists in PyPI.", dependency) return False return True
python
def check_pypi_exists(dependencies): for dependency in dependencies.get('pypi', []): logger.debug("Checking if %r exists in PyPI", dependency) try: exists = _pypi_head_package(dependency) except Exception as error: logger.error("Error checking %s in PyPI: %r", dependency, error) raise FadesError("Could not check if dependency exists in PyPI") else: if not exists: logger.error("%s doesn't exists in PyPI.", dependency) return False return True
[ "def", "check_pypi_exists", "(", "dependencies", ")", ":", "for", "dependency", "in", "dependencies", ".", "get", "(", "'pypi'", ",", "[", "]", ")", ":", "logger", ".", "debug", "(", "\"Checking if %r exists in PyPI\"", ",", "dependency", ")", "try", ":", "e...
Check if the indicated dependencies actually exists in pypi.
[ "Check", "if", "the", "indicated", "dependencies", "actually", "exists", "in", "pypi", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/helpers.py#L245-L258
22,567
PyAr/fades
fades/helpers.py
download_remote_script
def download_remote_script(url): """Download the content of a remote script to a local temp file.""" temp_fh = tempfile.NamedTemporaryFile('wt', encoding='utf8', suffix=".py", delete=False) downloader = _ScriptDownloader(url) logger.info( "Downloading remote script from %r using (%r downloader) to %r", url, downloader.name, temp_fh.name) content = downloader.get() temp_fh.write(content) temp_fh.close() return temp_fh.name
python
def download_remote_script(url): temp_fh = tempfile.NamedTemporaryFile('wt', encoding='utf8', suffix=".py", delete=False) downloader = _ScriptDownloader(url) logger.info( "Downloading remote script from %r using (%r downloader) to %r", url, downloader.name, temp_fh.name) content = downloader.get() temp_fh.write(content) temp_fh.close() return temp_fh.name
[ "def", "download_remote_script", "(", "url", ")", ":", "temp_fh", "=", "tempfile", ".", "NamedTemporaryFile", "(", "'wt'", ",", "encoding", "=", "'utf8'", ",", "suffix", "=", "\".py\"", ",", "delete", "=", "False", ")", "downloader", "=", "_ScriptDownloader", ...
Download the content of a remote script to a local temp file.
[ "Download", "the", "content", "of", "a", "remote", "script", "to", "a", "local", "temp", "file", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/helpers.py#L334-L345
22,568
PyAr/fades
fades/helpers.py
ExecutionError.dump_to_log
def dump_to_log(self, logger): """Send the cmd info and collected stdout to logger.""" logger.error("Execution ended in %s for cmd %s", self._retcode, self._cmd) for line in self._collected_stdout: logger.error(STDOUT_LOG_PREFIX + line)
python
def dump_to_log(self, logger): logger.error("Execution ended in %s for cmd %s", self._retcode, self._cmd) for line in self._collected_stdout: logger.error(STDOUT_LOG_PREFIX + line)
[ "def", "dump_to_log", "(", "self", ",", "logger", ")", ":", "logger", ".", "error", "(", "\"Execution ended in %s for cmd %s\"", ",", "self", ".", "_retcode", ",", "self", ".", "_cmd", ")", "for", "line", "in", "self", ".", "_collected_stdout", ":", "logger"...
Send the cmd info and collected stdout to logger.
[ "Send", "the", "cmd", "info", "and", "collected", "stdout", "to", "logger", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/helpers.py#L65-L69
22,569
PyAr/fades
fades/helpers.py
_ScriptDownloader._decide
def _decide(self): """Find out which method should be applied to download that URL.""" netloc = parse.urlparse(self.url).netloc name = self.NETLOCS.get(netloc, 'raw') return name
python
def _decide(self): netloc = parse.urlparse(self.url).netloc name = self.NETLOCS.get(netloc, 'raw') return name
[ "def", "_decide", "(", "self", ")", ":", "netloc", "=", "parse", ".", "urlparse", "(", "self", ".", "url", ")", ".", "netloc", "name", "=", "self", ".", "NETLOCS", ".", "get", "(", "netloc", ",", "'raw'", ")", "return", "name" ]
Find out which method should be applied to download that URL.
[ "Find", "out", "which", "method", "should", "be", "applied", "to", "download", "that", "URL", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/helpers.py#L287-L291
22,570
PyAr/fades
fades/helpers.py
_ScriptDownloader.get
def get(self): """Get the script content from the URL using the decided downloader.""" method_name = "_download_" + self.name method = getattr(self, method_name) return method()
python
def get(self): method_name = "_download_" + self.name method = getattr(self, method_name) return method()
[ "def", "get", "(", "self", ")", ":", "method_name", "=", "\"_download_\"", "+", "self", ".", "name", "method", "=", "getattr", "(", "self", ",", "method_name", ")", "return", "method", "(", ")" ]
Get the script content from the URL using the decided downloader.
[ "Get", "the", "script", "content", "from", "the", "URL", "using", "the", "decided", "downloader", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/helpers.py#L293-L297
22,571
PyAr/fades
fades/helpers.py
_ScriptDownloader._download_raw
def _download_raw(self, url=None): """Download content from URL directly.""" if url is None: url = self.url req = request.Request(url, headers=self.HEADERS_PLAIN) return request.urlopen(req).read().decode("utf8")
python
def _download_raw(self, url=None): if url is None: url = self.url req = request.Request(url, headers=self.HEADERS_PLAIN) return request.urlopen(req).read().decode("utf8")
[ "def", "_download_raw", "(", "self", ",", "url", "=", "None", ")", ":", "if", "url", "is", "None", ":", "url", "=", "self", ".", "url", "req", "=", "request", ".", "Request", "(", "url", ",", "headers", "=", "self", ".", "HEADERS_PLAIN", ")", "retu...
Download content from URL directly.
[ "Download", "content", "from", "URL", "directly", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/helpers.py#L299-L304
22,572
PyAr/fades
fades/helpers.py
_ScriptDownloader._download_linkode
def _download_linkode(self): """Download content from Linkode pastebin.""" # build the API url linkode_id = self.url.split("/")[-1] if linkode_id.startswith("#"): linkode_id = linkode_id[1:] url = "https://linkode.org/api/1/linkodes/" + linkode_id req = request.Request(url, headers=self.HEADERS_JSON) resp = request.urlopen(req) raw = resp.read() data = json.loads(raw.decode("utf8")) content = data['content'] return content
python
def _download_linkode(self): # build the API url linkode_id = self.url.split("/")[-1] if linkode_id.startswith("#"): linkode_id = linkode_id[1:] url = "https://linkode.org/api/1/linkodes/" + linkode_id req = request.Request(url, headers=self.HEADERS_JSON) resp = request.urlopen(req) raw = resp.read() data = json.loads(raw.decode("utf8")) content = data['content'] return content
[ "def", "_download_linkode", "(", "self", ")", ":", "# build the API url", "linkode_id", "=", "self", ".", "url", ".", "split", "(", "\"/\"", ")", "[", "-", "1", "]", "if", "linkode_id", ".", "startswith", "(", "\"#\"", ")", ":", "linkode_id", "=", "linko...
Download content from Linkode pastebin.
[ "Download", "content", "from", "Linkode", "pastebin", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/helpers.py#L306-L319
22,573
PyAr/fades
fades/helpers.py
_ScriptDownloader._download_pastebin
def _download_pastebin(self): """Download content from Pastebin itself.""" paste_id = self.url.split("/")[-1] url = "https://pastebin.com/raw/" + paste_id return self._download_raw(url)
python
def _download_pastebin(self): paste_id = self.url.split("/")[-1] url = "https://pastebin.com/raw/" + paste_id return self._download_raw(url)
[ "def", "_download_pastebin", "(", "self", ")", ":", "paste_id", "=", "self", ".", "url", ".", "split", "(", "\"/\"", ")", "[", "-", "1", "]", "url", "=", "\"https://pastebin.com/raw/\"", "+", "paste_id", "return", "self", ".", "_download_raw", "(", "url", ...
Download content from Pastebin itself.
[ "Download", "content", "from", "Pastebin", "itself", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/helpers.py#L321-L325
22,574
PyAr/fades
fades/helpers.py
_ScriptDownloader._download_gist
def _download_gist(self): """Download content from github's pastebin.""" parts = parse.urlparse(self.url) url = "https://gist.github.com" + parts.path + "/raw" return self._download_raw(url)
python
def _download_gist(self): parts = parse.urlparse(self.url) url = "https://gist.github.com" + parts.path + "/raw" return self._download_raw(url)
[ "def", "_download_gist", "(", "self", ")", ":", "parts", "=", "parse", ".", "urlparse", "(", "self", ".", "url", ")", "url", "=", "\"https://gist.github.com\"", "+", "parts", ".", "path", "+", "\"/raw\"", "return", "self", ".", "_download_raw", "(", "url",...
Download content from github's pastebin.
[ "Download", "content", "from", "github", "s", "pastebin", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/helpers.py#L327-L331
22,575
PyAr/fades
setup.py
get_version
def get_version(): """Retrieves package version from the file.""" with open('fades/_version.py') as fh: m = re.search("\(([^']*)\)", fh.read()) if m is None: raise ValueError("Unrecognized version in 'fades/_version.py'") return m.groups()[0].replace(', ', '.')
python
def get_version(): with open('fades/_version.py') as fh: m = re.search("\(([^']*)\)", fh.read()) if m is None: raise ValueError("Unrecognized version in 'fades/_version.py'") return m.groups()[0].replace(', ', '.')
[ "def", "get_version", "(", ")", ":", "with", "open", "(", "'fades/_version.py'", ")", "as", "fh", ":", "m", "=", "re", ".", "search", "(", "\"\\(([^']*)\\)\"", ",", "fh", ".", "read", "(", ")", ")", "if", "m", "is", "None", ":", "raise", "ValueError"...
Retrieves package version from the file.
[ "Retrieves", "package", "version", "from", "the", "file", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/setup.py#L53-L59
22,576
PyAr/fades
setup.py
CustomInstall.initialize_options
def initialize_options(self): """Run parent initialization and then fix the scripts var.""" install.initialize_options(self) # leave the proper script according to the platform script = SCRIPT_WIN if sys.platform == "win32" else SCRIPT_REST self.distribution.scripts = [script]
python
def initialize_options(self): install.initialize_options(self) # leave the proper script according to the platform script = SCRIPT_WIN if sys.platform == "win32" else SCRIPT_REST self.distribution.scripts = [script]
[ "def", "initialize_options", "(", "self", ")", ":", "install", ".", "initialize_options", "(", "self", ")", "# leave the proper script according to the platform", "script", "=", "SCRIPT_WIN", "if", "sys", ".", "platform", "==", "\"win32\"", "else", "SCRIPT_REST", "sel...
Run parent initialization and then fix the scripts var.
[ "Run", "parent", "initialization", "and", "then", "fix", "the", "scripts", "var", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/setup.py#L70-L76
22,577
PyAr/fades
setup.py
CustomInstall.run
def run(self): """Run parent install, and then save the man file.""" install.run(self) # man directory if self._custom_man_dir is not None: if not os.path.exists(self._custom_man_dir): os.makedirs(self._custom_man_dir) shutil.copy("man/fades.1", self._custom_man_dir)
python
def run(self): install.run(self) # man directory if self._custom_man_dir is not None: if not os.path.exists(self._custom_man_dir): os.makedirs(self._custom_man_dir) shutil.copy("man/fades.1", self._custom_man_dir)
[ "def", "run", "(", "self", ")", ":", "install", ".", "run", "(", "self", ")", "# man directory", "if", "self", ".", "_custom_man_dir", "is", "not", "None", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "_custom_man_dir", ")", ...
Run parent install, and then save the man file.
[ "Run", "parent", "install", "and", "then", "save", "the", "man", "file", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/setup.py#L78-L86
22,578
PyAr/fades
setup.py
CustomInstall.finalize_options
def finalize_options(self): """Alter the installation path.""" install.finalize_options(self) if self.prefix is None: # no place for man page (like in a 'snap') man_dir = None else: man_dir = os.path.join(self.prefix, "share", "man", "man1") # if we have 'root', put the building path also under it (used normally # by pbuilder) if self.root is not None: man_dir = os.path.join(self.root, man_dir[1:]) self._custom_man_dir = man_dir
python
def finalize_options(self): install.finalize_options(self) if self.prefix is None: # no place for man page (like in a 'snap') man_dir = None else: man_dir = os.path.join(self.prefix, "share", "man", "man1") # if we have 'root', put the building path also under it (used normally # by pbuilder) if self.root is not None: man_dir = os.path.join(self.root, man_dir[1:]) self._custom_man_dir = man_dir
[ "def", "finalize_options", "(", "self", ")", ":", "install", ".", "finalize_options", "(", "self", ")", "if", "self", ".", "prefix", "is", "None", ":", "# no place for man page (like in a 'snap')", "man_dir", "=", "None", "else", ":", "man_dir", "=", "os", "."...
Alter the installation path.
[ "Alter", "the", "installation", "path", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/setup.py#L88-L101
22,579
PyAr/fades
fades/file_options.py
options_from_file
def options_from_file(args): """Get a argparse.Namespace and return it updated with options from config files. Config files will be parsed with priority equal to his order in CONFIG_FILES. """ logger.debug("updating options from config files") updated_from_file = [] for config_file in CONFIG_FILES: logger.debug("updating from: %s", config_file) parser = ConfigParser() parser.read(config_file) try: items = parser.items('fades') except NoSectionError: continue for config_key, config_value in items: if config_value in ['true', 'false']: config_value = config_value == 'true' if config_key in MERGEABLE_CONFIGS: current_value = getattr(args, config_key, []) if current_value is None: current_value = [] current_value.append(config_value) setattr(args, config_key, current_value) if not getattr(args, config_key, False) or config_key in updated_from_file: # By default all 'store-true' arguments are False. So we only # override them if they are False. If they are True means that the # user is setting those on the CLI. setattr(args, config_key, config_value) updated_from_file.append(config_key) logger.debug("updating %s to %s from file settings", config_key, config_value) return args
python
def options_from_file(args): logger.debug("updating options from config files") updated_from_file = [] for config_file in CONFIG_FILES: logger.debug("updating from: %s", config_file) parser = ConfigParser() parser.read(config_file) try: items = parser.items('fades') except NoSectionError: continue for config_key, config_value in items: if config_value in ['true', 'false']: config_value = config_value == 'true' if config_key in MERGEABLE_CONFIGS: current_value = getattr(args, config_key, []) if current_value is None: current_value = [] current_value.append(config_value) setattr(args, config_key, current_value) if not getattr(args, config_key, False) or config_key in updated_from_file: # By default all 'store-true' arguments are False. So we only # override them if they are False. If they are True means that the # user is setting those on the CLI. setattr(args, config_key, config_value) updated_from_file.append(config_key) logger.debug("updating %s to %s from file settings", config_key, config_value) return args
[ "def", "options_from_file", "(", "args", ")", ":", "logger", ".", "debug", "(", "\"updating options from config files\"", ")", "updated_from_file", "=", "[", "]", "for", "config_file", "in", "CONFIG_FILES", ":", "logger", ".", "debug", "(", "\"updating from: %s\"", ...
Get a argparse.Namespace and return it updated with options from config files. Config files will be parsed with priority equal to his order in CONFIG_FILES.
[ "Get", "a", "argparse", ".", "Namespace", "and", "return", "it", "updated", "with", "options", "from", "config", "files", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/file_options.py#L33-L66
22,580
PyAr/fades
fades/cache.py
VEnvsCache._venv_match
def _venv_match(self, installed, requirements): """Return True if what is installed satisfies the requirements. This method has multiple exit-points, but only for False (because if *anything* is not satisified, the venv is no good). Only after all was checked, and it didn't exit, the venv is ok so return True. """ if not requirements: # special case for no requirements, where we can't actually # check anything: the venv is useful if nothing installed too return None if installed else [] satisfying_deps = [] for repo, req_deps in requirements.items(): useful_inst = set() if repo not in installed: # the venv doesn't even have the repo return None if repo == REPO_VCS: inst_deps = {VCSDependency(url) for url in installed[repo].keys()} else: inst_deps = {Distribution(project_name=dep, version=ver) for (dep, ver) in installed[repo].items()} for req in req_deps: for inst in inst_deps: if inst in req: useful_inst.add(inst) break else: # nothing installed satisfied that requirement return None # assure *all* that is installed is useful for the requirements if useful_inst == inst_deps: satisfying_deps.extend(inst_deps) else: return None # it did it through! return satisfying_deps
python
def _venv_match(self, installed, requirements): if not requirements: # special case for no requirements, where we can't actually # check anything: the venv is useful if nothing installed too return None if installed else [] satisfying_deps = [] for repo, req_deps in requirements.items(): useful_inst = set() if repo not in installed: # the venv doesn't even have the repo return None if repo == REPO_VCS: inst_deps = {VCSDependency(url) for url in installed[repo].keys()} else: inst_deps = {Distribution(project_name=dep, version=ver) for (dep, ver) in installed[repo].items()} for req in req_deps: for inst in inst_deps: if inst in req: useful_inst.add(inst) break else: # nothing installed satisfied that requirement return None # assure *all* that is installed is useful for the requirements if useful_inst == inst_deps: satisfying_deps.extend(inst_deps) else: return None # it did it through! return satisfying_deps
[ "def", "_venv_match", "(", "self", ",", "installed", ",", "requirements", ")", ":", "if", "not", "requirements", ":", "# special case for no requirements, where we can't actually", "# check anything: the venv is useful if nothing installed too", "return", "None", "if", "install...
Return True if what is installed satisfies the requirements. This method has multiple exit-points, but only for False (because if *anything* is not satisified, the venv is no good). Only after all was checked, and it didn't exit, the venv is ok so return True.
[ "Return", "True", "if", "what", "is", "installed", "satisfies", "the", "requirements", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/cache.py#L44-L84
22,581
PyAr/fades
fades/cache.py
VEnvsCache._match_by_uuid
def _match_by_uuid(self, current_venvs, uuid): """Select a venv matching exactly by uuid.""" for venv_str in current_venvs: venv = json.loads(venv_str) env_path = venv.get('metadata', {}).get('env_path') _, env_uuid = os.path.split(env_path) if env_uuid == uuid: return venv
python
def _match_by_uuid(self, current_venvs, uuid): for venv_str in current_venvs: venv = json.loads(venv_str) env_path = venv.get('metadata', {}).get('env_path') _, env_uuid = os.path.split(env_path) if env_uuid == uuid: return venv
[ "def", "_match_by_uuid", "(", "self", ",", "current_venvs", ",", "uuid", ")", ":", "for", "venv_str", "in", "current_venvs", ":", "venv", "=", "json", ".", "loads", "(", "venv_str", ")", "env_path", "=", "venv", ".", "get", "(", "'metadata'", ",", "{", ...
Select a venv matching exactly by uuid.
[ "Select", "a", "venv", "matching", "exactly", "by", "uuid", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/cache.py#L86-L93
22,582
PyAr/fades
fades/cache.py
VEnvsCache._select_better_fit
def _select_better_fit(self, matching_venvs): """Receive a list of matching venvs, and decide which one is the best fit.""" # keep the venvs in a separate array, to pick up the winner, and the (sorted, to compare # each dependency with its equivalent) in other structure to later compare venvs = [] to_compare = [] for matching, venv in matching_venvs: to_compare.append(sorted(matching, key=lambda req: getattr(req, 'key', ''))) venvs.append(venv) # compare each n-tuple of dependencies to see which one is bigger, and add score to the # position of the winner scores = [0] * len(venvs) for dependencies in zip(*to_compare): if not isinstance(dependencies[0], Distribution): # only distribution URLs can be compared continue winner = dependencies.index(max(dependencies)) scores[winner] = scores[winner] + 1 # get the rightmost winner (in case of ties, to select the latest venv) winner_pos = None winner_score = -1 for i, score in enumerate(scores): if score >= winner_score: winner_score = score winner_pos = i return venvs[winner_pos]
python
def _select_better_fit(self, matching_venvs): # keep the venvs in a separate array, to pick up the winner, and the (sorted, to compare # each dependency with its equivalent) in other structure to later compare venvs = [] to_compare = [] for matching, venv in matching_venvs: to_compare.append(sorted(matching, key=lambda req: getattr(req, 'key', ''))) venvs.append(venv) # compare each n-tuple of dependencies to see which one is bigger, and add score to the # position of the winner scores = [0] * len(venvs) for dependencies in zip(*to_compare): if not isinstance(dependencies[0], Distribution): # only distribution URLs can be compared continue winner = dependencies.index(max(dependencies)) scores[winner] = scores[winner] + 1 # get the rightmost winner (in case of ties, to select the latest venv) winner_pos = None winner_score = -1 for i, score in enumerate(scores): if score >= winner_score: winner_score = score winner_pos = i return venvs[winner_pos]
[ "def", "_select_better_fit", "(", "self", ",", "matching_venvs", ")", ":", "# keep the venvs in a separate array, to pick up the winner, and the (sorted, to compare", "# each dependency with its equivalent) in other structure to later compare", "venvs", "=", "[", "]", "to_compare", "="...
Receive a list of matching venvs, and decide which one is the best fit.
[ "Receive", "a", "list", "of", "matching", "venvs", "and", "decide", "which", "one", "is", "the", "best", "fit", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/cache.py#L95-L123
22,583
PyAr/fades
fades/cache.py
VEnvsCache._match_by_requirements
def _match_by_requirements(self, current_venvs, requirements, interpreter, options): """Select a venv matching interpreter and options, complying with requirements. Several venvs can be found in this case, will return the better fit. """ matching_venvs = [] for venv_str in current_venvs: venv = json.loads(venv_str) # simple filter, need to have exactly same options and interpreter if venv.get('options') != options or venv.get('interpreter') != interpreter: continue # requirements complying: result can be None (no comply) or a score to later sort matching = self._venv_match(venv['installed'], requirements) if matching is not None: matching_venvs.append((matching, venv)) if not matching_venvs: return return self._select_better_fit(matching_venvs)
python
def _match_by_requirements(self, current_venvs, requirements, interpreter, options): matching_venvs = [] for venv_str in current_venvs: venv = json.loads(venv_str) # simple filter, need to have exactly same options and interpreter if venv.get('options') != options or venv.get('interpreter') != interpreter: continue # requirements complying: result can be None (no comply) or a score to later sort matching = self._venv_match(venv['installed'], requirements) if matching is not None: matching_venvs.append((matching, venv)) if not matching_venvs: return return self._select_better_fit(matching_venvs)
[ "def", "_match_by_requirements", "(", "self", ",", "current_venvs", ",", "requirements", ",", "interpreter", ",", "options", ")", ":", "matching_venvs", "=", "[", "]", "for", "venv_str", "in", "current_venvs", ":", "venv", "=", "json", ".", "loads", "(", "ve...
Select a venv matching interpreter and options, complying with requirements. Several venvs can be found in this case, will return the better fit.
[ "Select", "a", "venv", "matching", "interpreter", "and", "options", "complying", "with", "requirements", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/cache.py#L125-L146
22,584
PyAr/fades
fades/cache.py
VEnvsCache._select
def _select(self, current_venvs, requirements=None, interpreter='', uuid='', options=None): """Select which venv satisfy the received requirements.""" if uuid: logger.debug("Searching a venv by uuid: %s", uuid) venv = self._match_by_uuid(current_venvs, uuid) else: logger.debug("Searching a venv for: reqs=%s interpreter=%s options=%s", requirements, interpreter, options) venv = self._match_by_requirements(current_venvs, requirements, interpreter, options) if venv is None: logger.debug("No matching venv found :(") return logger.debug("Found a matching venv! %s", venv) return venv['metadata']
python
def _select(self, current_venvs, requirements=None, interpreter='', uuid='', options=None): if uuid: logger.debug("Searching a venv by uuid: %s", uuid) venv = self._match_by_uuid(current_venvs, uuid) else: logger.debug("Searching a venv for: reqs=%s interpreter=%s options=%s", requirements, interpreter, options) venv = self._match_by_requirements(current_venvs, requirements, interpreter, options) if venv is None: logger.debug("No matching venv found :(") return logger.debug("Found a matching venv! %s", venv) return venv['metadata']
[ "def", "_select", "(", "self", ",", "current_venvs", ",", "requirements", "=", "None", ",", "interpreter", "=", "''", ",", "uuid", "=", "''", ",", "options", "=", "None", ")", ":", "if", "uuid", ":", "logger", ".", "debug", "(", "\"Searching a venv by uu...
Select which venv satisfy the received requirements.
[ "Select", "which", "venv", "satisfy", "the", "received", "requirements", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/cache.py#L148-L163
22,585
PyAr/fades
fades/cache.py
VEnvsCache.get_venv
def get_venv(self, requirements=None, interpreter='', uuid='', options=None): """Find a venv that serves these requirements, if any.""" lines = self._read_cache() return self._select(lines, requirements, interpreter, uuid=uuid, options=options)
python
def get_venv(self, requirements=None, interpreter='', uuid='', options=None): lines = self._read_cache() return self._select(lines, requirements, interpreter, uuid=uuid, options=options)
[ "def", "get_venv", "(", "self", ",", "requirements", "=", "None", ",", "interpreter", "=", "''", ",", "uuid", "=", "''", ",", "options", "=", "None", ")", ":", "lines", "=", "self", ".", "_read_cache", "(", ")", "return", "self", ".", "_select", "(",...
Find a venv that serves these requirements, if any.
[ "Find", "a", "venv", "that", "serves", "these", "requirements", "if", "any", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/cache.py#L165-L168
22,586
PyAr/fades
fades/cache.py
VEnvsCache.store
def store(self, installed_stuff, metadata, interpreter, options): """Store the virtualenv metadata for the indicated installed_stuff.""" new_content = { 'timestamp': int(time.mktime(time.localtime())), 'installed': installed_stuff, 'metadata': metadata, 'interpreter': interpreter, 'options': options } logger.debug("Storing installed=%s metadata=%s interpreter=%s options=%s", installed_stuff, metadata, interpreter, options) with filelock(self.lockpath): self._write_cache([json.dumps(new_content)], append=True)
python
def store(self, installed_stuff, metadata, interpreter, options): new_content = { 'timestamp': int(time.mktime(time.localtime())), 'installed': installed_stuff, 'metadata': metadata, 'interpreter': interpreter, 'options': options } logger.debug("Storing installed=%s metadata=%s interpreter=%s options=%s", installed_stuff, metadata, interpreter, options) with filelock(self.lockpath): self._write_cache([json.dumps(new_content)], append=True)
[ "def", "store", "(", "self", ",", "installed_stuff", ",", "metadata", ",", "interpreter", ",", "options", ")", ":", "new_content", "=", "{", "'timestamp'", ":", "int", "(", "time", ".", "mktime", "(", "time", ".", "localtime", "(", ")", ")", ")", ",", ...
Store the virtualenv metadata for the indicated installed_stuff.
[ "Store", "the", "virtualenv", "metadata", "for", "the", "indicated", "installed_stuff", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/cache.py#L175-L187
22,587
PyAr/fades
fades/cache.py
VEnvsCache.remove
def remove(self, env_path): """Remove metadata for a given virtualenv from cache.""" with filelock(self.lockpath): cache = self._read_cache() logger.debug("Removing virtualenv from cache: %s" % env_path) lines = [ line for line in cache if json.loads(line).get('metadata', {}).get('env_path') != env_path ] self._write_cache(lines)
python
def remove(self, env_path): with filelock(self.lockpath): cache = self._read_cache() logger.debug("Removing virtualenv from cache: %s" % env_path) lines = [ line for line in cache if json.loads(line).get('metadata', {}).get('env_path') != env_path ] self._write_cache(lines)
[ "def", "remove", "(", "self", ",", "env_path", ")", ":", "with", "filelock", "(", "self", ".", "lockpath", ")", ":", "cache", "=", "self", ".", "_read_cache", "(", ")", "logger", ".", "debug", "(", "\"Removing virtualenv from cache: %s\"", "%", "env_path", ...
Remove metadata for a given virtualenv from cache.
[ "Remove", "metadata", "for", "a", "given", "virtualenv", "from", "cache", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/cache.py#L189-L198
22,588
PyAr/fades
fades/cache.py
VEnvsCache._read_cache
def _read_cache(self): """Read virtualenv metadata from cache.""" if os.path.exists(self.filepath): with open(self.filepath, 'rt', encoding='utf8') as fh: lines = [x.strip() for x in fh] else: logger.debug("Index not found, starting empty") lines = [] return lines
python
def _read_cache(self): if os.path.exists(self.filepath): with open(self.filepath, 'rt', encoding='utf8') as fh: lines = [x.strip() for x in fh] else: logger.debug("Index not found, starting empty") lines = [] return lines
[ "def", "_read_cache", "(", "self", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "self", ".", "filepath", ")", ":", "with", "open", "(", "self", ".", "filepath", ",", "'rt'", ",", "encoding", "=", "'utf8'", ")", "as", "fh", ":", "lines", ...
Read virtualenv metadata from cache.
[ "Read", "virtualenv", "metadata", "from", "cache", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/cache.py#L200-L208
22,589
PyAr/fades
fades/cache.py
VEnvsCache._write_cache
def _write_cache(self, lines, append=False): """Write virtualenv metadata to cache.""" mode = 'at' if append else 'wt' with open(self.filepath, mode, encoding='utf8') as fh: fh.writelines(line + '\n' for line in lines)
python
def _write_cache(self, lines, append=False): mode = 'at' if append else 'wt' with open(self.filepath, mode, encoding='utf8') as fh: fh.writelines(line + '\n' for line in lines)
[ "def", "_write_cache", "(", "self", ",", "lines", ",", "append", "=", "False", ")", ":", "mode", "=", "'at'", "if", "append", "else", "'wt'", "with", "open", "(", "self", ".", "filepath", ",", "mode", ",", "encoding", "=", "'utf8'", ")", "as", "fh", ...
Write virtualenv metadata to cache.
[ "Write", "virtualenv", "metadata", "to", "cache", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/cache.py#L210-L214
22,590
PyAr/fades
fades/pipmanager.py
PipManager.install
def install(self, dependency): """Install a new dependency.""" if not self.pip_installed: logger.info("Need to install a dependency with pip, but no builtin, " "doing it manually (just wait a little, all should go well)") self._brute_force_install_pip() # split to pass several tokens on multiword dependency (this is very specific for '-e' on # external requirements, but implemented generically; note that this does not apply for # normal reqs, because even if it originally is 'foo > 1.2', after parsing it loses the # internal spaces) str_dep = str(dependency) args = [self.pip_exe, "install"] + str_dep.split() if self.options: for option in self.options: args.extend(option.split()) logger.info("Installing dependency: %r", str_dep) try: helpers.logged_exec(args) except helpers.ExecutionError as error: error.dump_to_log(logger) raise error except Exception as error: logger.exception("Error installing %s: %s", str_dep, error) raise error
python
def install(self, dependency): if not self.pip_installed: logger.info("Need to install a dependency with pip, but no builtin, " "doing it manually (just wait a little, all should go well)") self._brute_force_install_pip() # split to pass several tokens on multiword dependency (this is very specific for '-e' on # external requirements, but implemented generically; note that this does not apply for # normal reqs, because even if it originally is 'foo > 1.2', after parsing it loses the # internal spaces) str_dep = str(dependency) args = [self.pip_exe, "install"] + str_dep.split() if self.options: for option in self.options: args.extend(option.split()) logger.info("Installing dependency: %r", str_dep) try: helpers.logged_exec(args) except helpers.ExecutionError as error: error.dump_to_log(logger) raise error except Exception as error: logger.exception("Error installing %s: %s", str_dep, error) raise error
[ "def", "install", "(", "self", ",", "dependency", ")", ":", "if", "not", "self", ".", "pip_installed", ":", "logger", ".", "info", "(", "\"Need to install a dependency with pip, but no builtin, \"", "\"doing it manually (just wait a little, all should go well)\"", ")", "sel...
Install a new dependency.
[ "Install", "a", "new", "dependency", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/pipmanager.py#L50-L75
22,591
PyAr/fades
fades/pipmanager.py
PipManager.get_version
def get_version(self, dependency): """Return the installed version parsing the output of 'pip show'.""" logger.debug("getting installed version for %s", dependency) stdout = helpers.logged_exec([self.pip_exe, "show", str(dependency)]) version = [line for line in stdout if line.startswith('Version:')] if len(version) == 1: version = version[0].strip().split()[1] logger.debug("Installed version of %s is: %s", dependency, version) return version else: logger.error('Fades is having problems getting the installed version. ' 'Run with -v or check the logs for details') return ''
python
def get_version(self, dependency): logger.debug("getting installed version for %s", dependency) stdout = helpers.logged_exec([self.pip_exe, "show", str(dependency)]) version = [line for line in stdout if line.startswith('Version:')] if len(version) == 1: version = version[0].strip().split()[1] logger.debug("Installed version of %s is: %s", dependency, version) return version else: logger.error('Fades is having problems getting the installed version. ' 'Run with -v or check the logs for details') return ''
[ "def", "get_version", "(", "self", ",", "dependency", ")", ":", "logger", ".", "debug", "(", "\"getting installed version for %s\"", ",", "dependency", ")", "stdout", "=", "helpers", ".", "logged_exec", "(", "[", "self", ".", "pip_exe", ",", "\"show\"", ",", ...
Return the installed version parsing the output of 'pip show'.
[ "Return", "the", "installed", "version", "parsing", "the", "output", "of", "pip", "show", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/pipmanager.py#L77-L89
22,592
PyAr/fades
fades/pipmanager.py
PipManager._brute_force_install_pip
def _brute_force_install_pip(self): """A brute force install of pip itself.""" if os.path.exists(self.pip_installer_fname): logger.debug("Using pip installer from %r", self.pip_installer_fname) else: logger.debug( "Installer for pip not found in %r, downloading it", self.pip_installer_fname) self._download_pip_installer() logger.debug("Installing PIP manually in the virtualenv") python_exe = os.path.join(self.env_bin_path, "python") helpers.logged_exec([python_exe, self.pip_installer_fname, '-I']) self.pip_installed = True
python
def _brute_force_install_pip(self): if os.path.exists(self.pip_installer_fname): logger.debug("Using pip installer from %r", self.pip_installer_fname) else: logger.debug( "Installer for pip not found in %r, downloading it", self.pip_installer_fname) self._download_pip_installer() logger.debug("Installing PIP manually in the virtualenv") python_exe = os.path.join(self.env_bin_path, "python") helpers.logged_exec([python_exe, self.pip_installer_fname, '-I']) self.pip_installed = True
[ "def", "_brute_force_install_pip", "(", "self", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "self", ".", "pip_installer_fname", ")", ":", "logger", ".", "debug", "(", "\"Using pip installer from %r\"", ",", "self", ".", "pip_installer_fname", ")", "...
A brute force install of pip itself.
[ "A", "brute", "force", "install", "of", "pip", "itself", "." ]
e5ea457b09b105f321d4f81772f25e8695159604
https://github.com/PyAr/fades/blob/e5ea457b09b105f321d4f81772f25e8695159604/fades/pipmanager.py#L98-L110
22,593
albertyw/csv-ical
csv_ical/convert.py
Convert._generate_configs_from_default
def _generate_configs_from_default(self, overrides=None): # type: (Dict[str, int]) -> Dict[str, int] """ Generate configs by inheriting from defaults """ config = DEFAULT_CONFIG.copy() if not overrides: overrides = {} for k, v in overrides.items(): config[k] = v return config
python
def _generate_configs_from_default(self, overrides=None): # type: (Dict[str, int]) -> Dict[str, int] config = DEFAULT_CONFIG.copy() if not overrides: overrides = {} for k, v in overrides.items(): config[k] = v return config
[ "def", "_generate_configs_from_default", "(", "self", ",", "overrides", "=", "None", ")", ":", "# type: (Dict[str, int]) -> Dict[str, int]", "config", "=", "DEFAULT_CONFIG", ".", "copy", "(", ")", "if", "not", "overrides", ":", "overrides", "=", "{", "}", "for", ...
Generate configs by inheriting from defaults
[ "Generate", "configs", "by", "inheriting", "from", "defaults" ]
cdb55a226cd0cb6cc214d896a6cea41a5b92c9ed
https://github.com/albertyw/csv-ical/blob/cdb55a226cd0cb6cc214d896a6cea41a5b92c9ed/csv_ical/convert.py#L29-L37
22,594
albertyw/csv-ical
csv_ical/convert.py
Convert.read_ical
def read_ical(self, ical_file_location): # type: (str) -> Calendar """ Read the ical file """ with open(ical_file_location, 'r') as ical_file: data = ical_file.read() self.cal = Calendar.from_ical(data) return self.cal
python
def read_ical(self, ical_file_location): # type: (str) -> Calendar with open(ical_file_location, 'r') as ical_file: data = ical_file.read() self.cal = Calendar.from_ical(data) return self.cal
[ "def", "read_ical", "(", "self", ",", "ical_file_location", ")", ":", "# type: (str) -> Calendar", "with", "open", "(", "ical_file_location", ",", "'r'", ")", "as", "ical_file", ":", "data", "=", "ical_file", ".", "read", "(", ")", "self", ".", "cal", "=", ...
Read the ical file
[ "Read", "the", "ical", "file" ]
cdb55a226cd0cb6cc214d896a6cea41a5b92c9ed
https://github.com/albertyw/csv-ical/blob/cdb55a226cd0cb6cc214d896a6cea41a5b92c9ed/csv_ical/convert.py#L39-L44
22,595
albertyw/csv-ical
csv_ical/convert.py
Convert.read_csv
def read_csv(self, csv_location, csv_configs=None): # type: (str, Dict[str, int]) -> List[List[str]] """ Read the csv file """ csv_configs = self._generate_configs_from_default(csv_configs) with open(csv_location, 'r') as csv_file: csv_reader = csv.reader(csv_file) self.csv_data = list(csv_reader) self.csv_data = self.csv_data[csv_configs['HEADER_COLUMNS_TO_SKIP']:] return self.csv_data
python
def read_csv(self, csv_location, csv_configs=None): # type: (str, Dict[str, int]) -> List[List[str]] csv_configs = self._generate_configs_from_default(csv_configs) with open(csv_location, 'r') as csv_file: csv_reader = csv.reader(csv_file) self.csv_data = list(csv_reader) self.csv_data = self.csv_data[csv_configs['HEADER_COLUMNS_TO_SKIP']:] return self.csv_data
[ "def", "read_csv", "(", "self", ",", "csv_location", ",", "csv_configs", "=", "None", ")", ":", "# type: (str, Dict[str, int]) -> List[List[str]]", "csv_configs", "=", "self", ".", "_generate_configs_from_default", "(", "csv_configs", ")", "with", "open", "(", "csv_lo...
Read the csv file
[ "Read", "the", "csv", "file" ]
cdb55a226cd0cb6cc214d896a6cea41a5b92c9ed
https://github.com/albertyw/csv-ical/blob/cdb55a226cd0cb6cc214d896a6cea41a5b92c9ed/csv_ical/convert.py#L46-L54
22,596
albertyw/csv-ical
csv_ical/convert.py
Convert.make_ical
def make_ical(self, csv_configs=None): # type: (Dict[str, int]) -> Calendar """ Make iCal entries """ csv_configs = self._generate_configs_from_default(csv_configs) self.cal = Calendar() for row in self.csv_data: event = Event() event.add('summary', row[csv_configs['CSV_NAME']]) event.add('dtstart', row[csv_configs['CSV_START_DATE']]) event.add('dtend', row[csv_configs['CSV_END_DATE']]) event.add('description', row[csv_configs['CSV_DESCRIPTION']]) event.add('location', row[csv_configs['CSV_LOCATION']]) self.cal.add_component(event) return self.cal
python
def make_ical(self, csv_configs=None): # type: (Dict[str, int]) -> Calendar csv_configs = self._generate_configs_from_default(csv_configs) self.cal = Calendar() for row in self.csv_data: event = Event() event.add('summary', row[csv_configs['CSV_NAME']]) event.add('dtstart', row[csv_configs['CSV_START_DATE']]) event.add('dtend', row[csv_configs['CSV_END_DATE']]) event.add('description', row[csv_configs['CSV_DESCRIPTION']]) event.add('location', row[csv_configs['CSV_LOCATION']]) self.cal.add_component(event) return self.cal
[ "def", "make_ical", "(", "self", ",", "csv_configs", "=", "None", ")", ":", "# type: (Dict[str, int]) -> Calendar", "csv_configs", "=", "self", ".", "_generate_configs_from_default", "(", "csv_configs", ")", "self", ".", "cal", "=", "Calendar", "(", ")", "for", ...
Make iCal entries
[ "Make", "iCal", "entries" ]
cdb55a226cd0cb6cc214d896a6cea41a5b92c9ed
https://github.com/albertyw/csv-ical/blob/cdb55a226cd0cb6cc214d896a6cea41a5b92c9ed/csv_ical/convert.py#L56-L69
22,597
albertyw/csv-ical
csv_ical/convert.py
Convert.save_ical
def save_ical(self, ical_location): # type: (str) -> None """ Save the calendar instance to a file """ data = self.cal.to_ical() with open(ical_location, 'w') as ical_file: ical_file.write(data.decode('utf-8'))
python
def save_ical(self, ical_location): # type: (str) -> None data = self.cal.to_ical() with open(ical_location, 'w') as ical_file: ical_file.write(data.decode('utf-8'))
[ "def", "save_ical", "(", "self", ",", "ical_location", ")", ":", "# type: (str) -> None", "data", "=", "self", ".", "cal", ".", "to_ical", "(", ")", "with", "open", "(", "ical_location", ",", "'w'", ")", "as", "ical_file", ":", "ical_file", ".", "write", ...
Save the calendar instance to a file
[ "Save", "the", "calendar", "instance", "to", "a", "file" ]
cdb55a226cd0cb6cc214d896a6cea41a5b92c9ed
https://github.com/albertyw/csv-ical/blob/cdb55a226cd0cb6cc214d896a6cea41a5b92c9ed/csv_ical/convert.py#L86-L90
22,598
albertyw/csv-ical
csv_ical/convert.py
Convert.save_csv
def save_csv(self, csv_location): # type: (str) -> None """ Save the csv to a file """ with open(csv_location, 'w') as csv_handle: writer = csv.writer(csv_handle) for row in self.csv_data: writer.writerow(row)
python
def save_csv(self, csv_location): # type: (str) -> None with open(csv_location, 'w') as csv_handle: writer = csv.writer(csv_handle) for row in self.csv_data: writer.writerow(row)
[ "def", "save_csv", "(", "self", ",", "csv_location", ")", ":", "# type: (str) -> None", "with", "open", "(", "csv_location", ",", "'w'", ")", "as", "csv_handle", ":", "writer", "=", "csv", ".", "writer", "(", "csv_handle", ")", "for", "row", "in", "self", ...
Save the csv to a file
[ "Save", "the", "csv", "to", "a", "file" ]
cdb55a226cd0cb6cc214d896a6cea41a5b92c9ed
https://github.com/albertyw/csv-ical/blob/cdb55a226cd0cb6cc214d896a6cea41a5b92c9ed/csv_ical/convert.py#L92-L97
22,599
planetarypy/planetaryimage
planetaryimage/image.py
PlanetaryImage.open
def open(cls, filename): """ Read an image file from disk Parameters ---------- filename : string Name of file to read as an image file. This file may be gzip (``.gz``) or bzip2 (``.bz2``) compressed. """ if filename.endswith('.gz'): fp = gzip.open(filename, 'rb') try: return cls(fp, filename, compression='gz') finally: fp.close() elif filename.endswith('.bz2'): fp = bz2.BZ2File(filename, 'rb') try: return cls(fp, filename, compression='bz2') finally: fp.close() else: with open(filename, 'rb') as fp: return cls(fp, filename)
python
def open(cls, filename): if filename.endswith('.gz'): fp = gzip.open(filename, 'rb') try: return cls(fp, filename, compression='gz') finally: fp.close() elif filename.endswith('.bz2'): fp = bz2.BZ2File(filename, 'rb') try: return cls(fp, filename, compression='bz2') finally: fp.close() else: with open(filename, 'rb') as fp: return cls(fp, filename)
[ "def", "open", "(", "cls", ",", "filename", ")", ":", "if", "filename", ".", "endswith", "(", "'.gz'", ")", ":", "fp", "=", "gzip", ".", "open", "(", "filename", ",", "'rb'", ")", "try", ":", "return", "cls", "(", "fp", ",", "filename", ",", "com...
Read an image file from disk Parameters ---------- filename : string Name of file to read as an image file. This file may be gzip (``.gz``) or bzip2 (``.bz2``) compressed.
[ "Read", "an", "image", "file", "from", "disk" ]
ee9aef4746ff7a003b1457565acb13f5f1db0375
https://github.com/planetarypy/planetaryimage/blob/ee9aef4746ff7a003b1457565acb13f5f1db0375/planetaryimage/image.py#L69-L92