docstring
stringlengths
52
499
function
stringlengths
67
35.2k
__index_level_0__
int64
52.6k
1.16M
Ask a user for a boolean input args: message (str): Prompt for user returns: bool_in (boolean): Input boolean
def bool_input(message): while True: suffix = ' (true or false): ' inp = input(message + suffix) if inp.lower() == 'true': return True elif inp.lower() == 'false': return False else: print(colored('Must be either true or false, try ag...
746,539
Select a project from configuration to run transfer on args: user_provided_project (str): Project name that should match a project in the config returns: project (dict): Configuration settings for a user selected project
def select_project(user_provided_project): home = os.path.expanduser('~') if os.path.isfile(os.path.join(home, '.transfer', 'config.yaml')): with open(os.path.join(home, '.transfer', 'config.yaml'), 'r') as fp: projects = yaml.load(fp.read()) if len(projects) == 1: ...
746,552
Store configuration args: config (list[dict]): configurations for each project
def store_config(config, suffix = None): home = os.path.expanduser('~') if suffix is not None: config_path = os.path.join(home, '.transfer', suffix) else: config_path = os.path.join(home, '.transfer') os.makedirs(config_path, exist_ok = True) with open(os.path.join(config_path,...
746,556
Update project in configuration args: updated_project (dict): Updated project configuration values
def update_config(updated_project): home = os.path.expanduser('~') if os.path.isfile(os.path.join(home, '.transfer', 'config.yaml')): with open(os.path.join(home, '.transfer', 'config.yaml'), 'r') as fp: projects = yaml.load(fp.read()) replace_index = -1 for i, project ...
746,557
Construct a graph that repeats this graph a number of times Arguments: | ``repeat`` -- The number of repetitions.
def __mul__(self, repeat): if not isinstance(repeat, int): raise TypeError("Can only multiply a graph with an integer") # copy edges new_edges = [] for i in range(repeat): for vertex1, vertex2 in self.edges: new_edges.append(frozenset([ver...
746,575
Return True only if the number of neighbors is correct Arguments: | ``index`` -- the index of the vertex/edge on which the criterion is applied | ``graph`` -- the graph on which the criterion is tested
def __call__(self, index, graph): return len(graph.neighbors[index]) == self.count
746,581
Return True only if each neighbor can be linked with an atom number Arguments: | ``index`` -- the index of the vertex/edge on which the criterion is applied | ``graph`` -- the graph on which the criterion is tested
def __call__(self, index, graph): neighbors = graph.neighbors[index] if not len(neighbors) == len(self.numbers): return False neighbor_numbers = sorted([graph.numbers[neighbor] for neighbor in neighbors]) return neighbor_numbers == self.numbers
746,583
Return True only if each neighbor can be linked with a positive criterion Arguments: | ``index`` -- the index of the vertex/edge on which the criterion is applied | ``graph`` -- the graph on which the criterion is tested
def __call__(self, index, graph): def all_permutations(l): if len(l) == 1: yield l return for i in range(len(l)): for sub in all_permutations(l[:i]+l[i+1:]): yield [l[i]] + sub neighbors =...
746,585
Initialize a ArrayAttr object Arguments: ``owner`` -- the instance to read the attribute from ``name`` -- the name of the attribute
def __init__(self, owner, name): StateAttr.__init__(self, owner, name) array = self.get() if array.dtype.fields is not None: raise ValueError("Record arrays are not supported yet.")
746,592
Register a new attribute to take care of with dump and load Arguments: | ``name`` -- the name to be used in the dump file | ``AttrCls`` -- an attr class describing the attribute
def _register(self, name, AttrCls): if not issubclass(AttrCls, StateAttr): raise TypeError("The second argument must a StateAttr instance.") if len(name) > 40: raise ValueError("Name can count at most 40 characters.") self._fields[name] = AttrCls(self._owner, nam...
746,597
Return the length of a bond between n1 and n2 of type bond_type Arguments: | ``n1`` -- the atom number of the first atom in the bond | ``n2`` -- the atom number of the second atom the bond Optional argument: | ``bond_type`` -- the type of bond [default=B...
def get_length(self, n1, n2, bond_type=BOND_SINGLE): dataset = self.lengths.get(bond_type) if dataset == None: return None return dataset.get(frozenset([n1, n2]))
746,606
Initialize a pair potential object Arguments: scaling -- symmetric NxN array with pairwise scaling factors. When an element is set to zero, it will be excluded. Optional argument: coordinates -- the initial Cartesian coordinates of the syst...
def __init__(self, scaling, coordinates=None): if coordinates is not None: self.update_coordinates(coordinates) self.scaling = scaling self.scaling.ravel()[::len(self.scaling)+1] = 0
746,625
Dump a single molecule to a CML file Arguments: | ``f`` -- a file-like object | ``molecule`` -- a Molecule instance
def _dump_cml_molecule(f, molecule): extra = getattr(molecule, "extra", {}) attr_str = " ".join("%s='%s'" % (key, value) for key, value in extra.items()) f.write(" <molecule id='%s' %s>\n" % (molecule.title, attr_str)) f.write(" <atomArray>\n") atoms_extra = getattr(molecule, "atoms_extra", {}...
746,648
Write a list of molecules to a CML file Arguments: | ``f`` -- a filename of a CML file or a file-like object | ``molecules`` -- a list of molecule objects.
def dump_cml(f, molecules): if isinstance(f, str): f = open(f, "w") close = True else: close = False f.write("<?xml version='1.0'?>\n") f.write("<list xmlns='http://www.xml-cml.org/schema'>\n") for molecule in molecules: _dump_cml_molecule(f, molecule) f.writ...
746,649
Read all the requested fields Arguments: | ``filename`` -- the filename of the FCHK file | ``field_labels`` -- when given, only these fields are read
def _read(self, filename, field_labels=None): # if fields is None, all fields are read def read_field(f): datatype = None while datatype is None: # find a sane header line line = f.readline() if line == "": ...
746,654
Add the contributions of this energy term to the Hessian Arguments: | ``coordinates`` -- A numpy array with 3N Cartesian coordinates. | ``hessian`` -- A matrix for the full Hessian to which this energy term has to add its contribution.
def add_to_hessian(self, coordinates, hessian): # Compute the derivatives of the bond stretch towards the two cartesian # coordinates. The bond length is computed too, but not used. q, g = self.icfn(coordinates[list(self.indexes)], 1) # Add the contribution to the Hessian (an ou...
746,679
Compute the rotational symmetry number Arguments: | ``molecule`` -- The molecule | ``graph`` -- The corresponding bond graph Optional argument: | ``threshold`` -- only when a rotation results in an rmsd below the given threshold, the rotation is...
def compute_rotsym(molecule, graph, threshold=1e-3*angstrom): result = 0 for match in graph.symmetries: permutation = list(j for i,j in sorted(match.forward.items())) new_coordinates = molecule.coordinates[permutation] rmsd = fit_rmsd(molecule.coordinates, new_coordinates)[2] ...
746,684
Return a vector orthogonal to the given triangle Arguments: a, b, c -- three 3D numpy vectors
def triangle_normal(a, b, c): normal = np.cross(a - c, b - c) norm = np.linalg.norm(normal) return normal/norm
746,717
Compute the dot product Arguments: | ``r1``, ``r2`` -- two :class:`Vector3` objects (Returns a Scalar)
def dot(r1, r2): if r1.size != r2.size: raise ValueError("Both arguments must have the same input size.") if r1.deriv != r2.deriv: raise ValueError("Both arguments must have the same deriv.") return r1.x*r2.x + r1.y*r2.y + r1.z*r2.z
746,718
Compute the cross product Arguments: | ``r1``, ``r2`` -- two :class:`Vector3` objects (Returns a Vector3)
def cross(r1, r2): if r1.size != r2.size: raise ValueError("Both arguments must have the same input size.") if r1.deriv != r2.deriv: raise ValueError("Both arguments must have the same deriv.") result = Vector3(r1.size, r1.deriv) result.x = r1.y*r2.z - r1.z*r2.y result.y = r1.z*...
746,719
Construct a Jacobian for the given internal and Cartesian coordinates Arguments: | ``ics`` -- A list of internal coordinate objects. | ``coordinates`` -- A numpy array with Cartesian coordinates, shape=(N,3) The return value will be a numpy array with the Jac...
def compute_jacobian(ics, coordinates): N3 = coordinates.size jacobian = numpy.zeros((N3, len(ics)), float) for j, ic in enumerate(ics): # Let the ic object fill in each column of the Jacobian. ic.fill_jacobian_column(jacobian[:,j], coordinates) return jacobian
746,755
Fill in a column of the Jacobian. Arguments: | ``jaccol`` -- The column of Jacobian to which the result must be added. | ``coordinates`` -- A numpy array with Cartesian coordinates, shape=(N,3)
def fill_jacobian_column(self, jaccol, coordinates): q, g = self.icfn(coordinates[list(self.indexes)], 1) for i, j in enumerate(self.indexes): jaccol[3*j:3*j+3] += g[i] return jaccol
746,757
Initialize a similarity descriptor Arguments: distance_matrix -- a matrix with interatomic distances, this can also be distances in a graph labels -- a list with integer labels used to identify atoms of the same type
def __init__(self, distance_matrix, labels): self.table_distances = similarity_table_distances(distance_matrix.astype(float)) self.table_labels = similarity_table_labels(labels.astype(int)) print(len(labels), len(distance_matrix)) order = np.lexsort([self.table_labels[:, 1], sel...
746,762
Initialize a similarity descriptor Arguments: molecule -- a Molecules object labels -- a list with integer labels used to identify atoms of the same type. When not given, the atom numbers from the molecule are used.
def from_molecule(cls, molecule, labels=None): if labels is None: labels = molecule.numbers return cls(molecule.distance_matrix, labels)
746,763
Initialize a similarity descriptor Arguments: molecular_graphs -- A MolecularGraphs object labels -- a list with integer labels used to identify atoms of the same type. When not given, the atom numbers from the molecular graph a...
def from_molecular_graph(cls, molecular_graph, labels=None): if labels is None: labels = molecular_graph.numbers.astype(int) return cls(molecular_graph.distances, labels)
746,764
Initialize a similarity descriptor Arguments: coordinates -- a Nx3 numpy array labels -- a list with integer labels used to identify atoms of the same type
def from_coordinates(cls, coordinates, labels): from molmod.ext import molecules_distance_matrix distance_matrix = molecules_distance_matrix(coordinates) return cls(distance_matrix, labels)
746,765
Initialize a new MolecularDistortion object Arguments: affected_atoms -- a list of atoms that undergo the transformation transformation -- a transformation object
def __init__(self, affected_atoms, transformation): self.affected_atoms = affected_atoms self.transformation = Complete.cast(transformation)
746,790
Initialize a RandomManipulation object Arguments: affected_atoms -- a list of atoms that undergo the transformation max_amplitude -- the maximum displacement (unit depends on actual implementation) hinge_atoms -- atoms that are inv...
def __init__(self, affected_atoms, max_amplitude, hinge_atoms): if len(hinge_atoms) != self.num_hinge_atoms: raise ValueError("The number of hinge atoms must be %i, got %i." % ( self.num_hinge_atoms, len(hinge_atoms) )) self.affected_atoms...
746,793
Create transformation that represents a rotation about an axis Arguments: | ``center`` -- Point on the axis | ``angle`` -- Rotation angle | ``axis`` -- Rotation axis | ``invert`` -- When True, an inversion rotation is constructed ...
def about_axis(cls, center, angle, axis, invert=False): return Translation(center) * \ Rotation.from_properties(angle, axis, invert) * \ Translation(-center)
746,886
Dump a frame to the trajectory file Arguments: | ``title`` -- the title of the frame | ``coordinates`` -- a numpy array with coordinates in atomic units
def dump(self, title, coordinates): print("% 8i" % len(self.symbols), file=self._f) print(str(title), file=self._f) for symbol, coordinate in zip(self.symbols, coordinates): print("% 2s % 12.9f % 12.9f % 12.9f" % ((symbol, ) + tuple(coordinate/self.file_unit)), file=self._f)
746,910
Efficiently test if counter is in ``xrange(*sub)`` Arguments: | ``sub`` -- a slice object | ``counter`` -- an integer The function returns True if the counter is in ``xrange(sub.start, sub.stop, sub.step)``.
def slice_match(sub, counter): if sub.start is not None and counter < sub.start: return False if sub.stop is not None and counter >= sub.stop: raise StopIteration if sub.step is not None: if sub.start is None: if counter % sub.step != 0: return False...
746,914
The actual wrapper around the function call. Arguments: | ``x_prec`` -- the unknowns in preconditioned coordinates | ``do_gradient`` -- if True, the gradient is also computed and transformed to preconditioned coordinates Note that th...
def __call__(self, x_prec, do_gradient=False): if do_gradient: f, g = self.fun(self.undo(x_prec), do_gradient=True) return f, self.undo(g) else: return self.fun(self.undo(x_prec))
746,935
Configure the 1D function for a line search Arguments: x0 -- the reference point (q=0) axis -- a unit vector in the direction of the line search
def configure(self, x0, axis): self.x0 = x0 self.axis = axis
746,948
Compute the values and the normals (gradients) of active constraints. Arguments: | ``x`` -- The unknowns.
def _compute_equations(self, x, verbose=False): # compute the error and the normals. normals = [] values = [] signs = [] error = 0.0 if verbose: print() print(' '.join('% 10.3e' % val for val in x), end=' ') active_str = '' ...
746,953
Take a robust, but not very efficient step towards the constraints. Arguments: | ``x`` -- The unknowns. | ``normals`` -- A numpy array with the gradients of the active constraints. Each row is one gradient. | ``values`` -- A numpy array with t...
def _rough_shake(self, x, normals, values, error): counter = 0 while error > self.threshold and counter < self.max_iter: dxs = [] for i in range(len(normals)): dx = -normals[i]*values[i]/np.dot(normals[i], normals[i]) dxs.append(dx) ...
746,954
Take an efficient (not always robust) step towards the constraints. Arguments: | ``x`` -- The unknowns. | ``normals`` -- A numpy array with the gradients of the active constraints. Each row is one gradient. | ``values`` -- A numpy array with t...
def _fast_shake(self, x, normals, values, error): # filter out the degrees of freedom that do not feel the constraints. mask = (normals!=0).any(axis=0) > 0 normals = normals[:,mask] # Take a step to lower the constraint cost function. If the step is too # large, it is re...
746,955
Brings unknowns to the constraints. Arguments: | ``x`` -- The unknowns.
def free_shake(self, x): self.lock[:] = False normals, values, error = self._compute_equations(x)[:-1] counter = 0 while True: if error <= self.threshold: break # try a well-behaved move to the constrains result = self._fast_sh...
746,956
Project a vector (gradient or direction) on the active constraints. Arguments: | ``x`` -- The unknowns. | ``vector`` -- A numpy array with a direction or a gradient. The return value is a gradient or direction, where the components that point away from the cons...
def project(self, x, vector): scale = np.linalg.norm(vector) if scale == 0.0: return vector self.lock[:] = False normals, signs = self._compute_equations(x)[::3] if len(normals) == 0: return vector vector = vector/scale mask = sig...
746,958
Construct a graph that repeats this graph a number of times Arguments: | ``repeat`` -- The number of repetitions.
def __mul__(self, repeat): if not isinstance(repeat, int): raise TypeError("Can only multiply a graph with an integer") new_edges = [] for i in range(repeat): for vertex1, vertex2 in self.edges: new_edges.append(frozenset([ ver...
746,968
Evaluates all the criteria and applies an OR opartion Arguments: | ``index`` -- the index of the vertex/edge on which the criterion is applied | ``graph`` -- the graph on which the criterion is tested
def __call__(self, index, graph): for c in self.criteria: if c(index, graph): return True return False
747,001
Evaluates all the criteria and applies a generalized XOR opartion Arguments: | ``index`` -- the index of the vertex/edge on which the criterion is applied | ``graph`` -- the graph on which the criterion is tested when the XOR operation is ...
def __call__(self, index, graph): count = 0 for c in self.criteria: if c(index, graph): count += 1 return (count % 2) == 1
747,002
Iterator over all matches of self.pattern in the given graph. Arguments: | subject_graph -- The subject_graph in which the matches according to self.pattern have to be found. | one_match -- If True, only one match will be returned. This ...
def __call__(self, subject_graph, one_match=False): # Matches are grown iteratively. for vertex0, vertex1 in self.pattern.iter_initial_relations(subject_graph): init_match = self.pattern.MatchClass.from_first_relation(vertex0, vertex1) # init_match cotains only one sourc...
747,018
Parse incoming request and return an email instance. Args: request: an HttpRequest object, containing the forwarded email, as per the SendGrid specification for inbound emails. Returns: an EmailMultiAlternatives instance, containing the parsed contents ...
def parse(self, request): assert isinstance(request, HttpRequest), "Invalid request type: %s" % type(request) try: # from_email should never be a list (unless we change our API) from_email = self._get_addresses([_decode_POST_value(request, 'from')])[0] # .....
747,995
Parse incoming request and return an email instance. Args: request: an HttpRequest object, containing a list of forwarded emails, as per Mandrill specification for inbound emails. Returns: a list of EmailMultiAlternatives instances
def parse(self, request): assert isinstance(request, HttpRequest), "Invalid request type: %s" % type(request) if settings.INBOUND_MANDRILL_AUTHENTICATION_KEY: _check_mandrill_signature( request=request, key=settings.INBOUND_MANDRILL_AUTHENTICATION_KE...
748,001
Initialize Paystack Request object for browsing resource. Args: api_url: str headers: dict
def __init__(self, api_url='https://api.paystack.co/', headers=None): self.API_BASE_URL = '{api_url}'.format(**locals()) self.headers = headers
748,347
Perform a method on a resource. Args: method: requests.`method` resource_uri: resource endpoint Raises: HTTPError Returns: JSON Response
def _request(self, method, resource_uri, **kwargs): data = kwargs.get('data') response = method(self.API_BASE_URL + resource_uri, json=data, headers=self.headers) response.raise_for_status() return response.json()
748,348
Get a resource. Args: endpoint: resource endpoint.
def get(self, endpoint, **kwargs): return self._request(requests.get, endpoint, **kwargs)
748,349
Create a resource. Args: endpoint: resource endpoint.
def post(self, endpoint, **kwargs): return self._request(requests.post, endpoint, **kwargs)
748,350
Update a resource. Args: endpoint: resource endpoint.
def put(self, endpoint, **kwargs): return self._request(requests.put, endpoint, **kwargs)
748,351
Static method defined to update paystack customer data by id. Args: customer_id: paystack customer id. first_name: customer's first name(optional). last_name: customer's last name(optional). email: customer's email address(optional). phone:customer's ...
def update(cls, customer_id, **kwargs): return cls().requests.put('customer/{customer_id}'.format(**locals()), data=kwargs)
748,352
Reentrenar parcialmente un clasificador SVM. Args: name (str): Nombre para el clasidicador. ids (list): Se espera una lista de N ids de textos ya almacenados en el TextClassifier. labels (list): Se espera una lista de N etiquetas. Una por cada id ...
def retrain(self, name, ids, labels): if not all(np.in1d(ids, self.ids)): raise ValueError("Hay ids de textos que no se encuentran \ almacenados.") try: classifier = getattr(self, name) except AttributeError: raise Attrib...
748,505
Calcula los vectores de terminos de textos y los almacena. A diferencia de :func:`~TextClassifier.TextClassifier.store_text` esta funcion borra cualquier informacion almacenada y comienza el conteo desde cero. Se usa para redefinir el vocabulario sobre el que se construyen los vectores....
def reload_texts(self, texts, ids, vocabulary=None): self._check_id_length(ids) self.ids = np.array(sorted(ids)) if vocabulary: self.vectorizer.vocabulary = vocabulary sorted_texts = [x for (y, x) in sorted(zip(ids, texts))] self.term_mat = self.vectorizer.fi...
748,509
Create a new release branch. Args: component (str): Version component to bump when creating the release. Can be *major*, *minor* or *patch*. exact (str): The exact version to set for the release. Overrides the component argument. This allows to re-rel...
def start(component, exact): # type: (str, str) -> None version_file = conf.get_path('version_file', 'VERSION') develop = conf.get('git.devel_branch', 'develop') common.assert_on_branch(develop) with conf.within_proj_dir(): out = shell.run('git status --porcelain', capture=True).stdou...
749,066
Lint python files. Args: exclude (list[str]): A list of glob string patterns to test against. If the file/path matches any of those patters, it will be filtered out. skip_untracked (bool): If set to **True** it will skip all files not tracked by git. comm...
def lint(exclude, skip_untracked, commit_only): # type: (List[str], bool, bool) -> None exclude = list(exclude) + conf.get('lint.exclude', []) runner = LintRunner(exclude, skip_untracked, commit_only) if not runner.run(): exit(1)
749,077
Decorator for defining lint tools. Args: name (str): The name of the tool. This name will be used to identify the tool in `pelconf.yaml`.
def tool(name): # type: (str) -> FunctionType global g_tools def decorator(fn): # pylint: disable=missing-docstring # type: (FunctionType) -> FunctionType g_tools[name] = fn return fn return decorator
749,078
Run code checks using pylint. Args: files (list[str]): A list of files to check Returns: bool: **True** if all files passed the checks, **False** otherwise.
def pylint_check(files): # type: (List[str]) -> int files = fs.wrap_paths(files) cfg_path = conf.get_path('lint.pylint_cfg', 'ops/tools/pylint.ini') pylint_cmd = 'pylint --rcfile {} {}'.format(cfg_path, files) return shell.run(pylint_cmd, exit_on_error=False).return_code
749,080
Add a new property to the app (with setattr) Args: name (str): the name of the new property value (any): the value of the new property
def enrich_app(self, name, value): #Method shouldn't be added: https://stackoverflow.com/a/28060251/3042398 if type(value) == type(self.enrich_app): raise ValueError("enrich_app can't add method") setattr(self.app, name, value)
749,094
Bump the given version component. Args: component (str): What part of the version should be bumped. Can be one of: - major - minor - patch exact (str): The exact version that should be set instead of bumping the current one. ...
def bump(component='patch', exact=None): # type: (str, str) -> Tuple[str, str] old_ver = current() if exact is None: new_ver = _bump_version(old_ver, component) else: new_ver = exact write(new_ver) return old_ver, new_ver
749,245
Bump the given version component. Args: version (str): The current version. The format is: MAJOR.MINOR[.PATCH]. component (str): What part of the version should be bumped. Can be one of: - major - minor - patch Returns: str: ...
def _bump_version(version, component='patch'): # type: (str, str) -> str if component not in ('major', 'minor', 'patch'): raise ValueError("Invalid version component: {}".format(component)) m = RE_VERSION.match(version) if m is None: raise ValueError("Version must be in MAJOR.MINOR...
749,246
Return the author of the given commit. Args: sha1 (str): The sha1 of the commit to query. If not given, it will return the sha1 for the current commit. Returns: Author: A named tuple ``(name, email)`` with the commit author details.
def commit_author(sha1=''): # type: (str) -> Author with conf.within_proj_dir(): cmd = 'git show -s --format="%an||%ae" {}'.format(sha1) result = shell.run( cmd, capture=True, never_pretend=True ).stdout name, email = result.split('||') ...
749,460
Tag the current commit. Args: name (str): The tag name. message (str): The tag message. Same as ``-m`` parameter in ``git tag``. author (Author): The commit author. Will default to the author of the commit. pretend (bool): If set to **...
def tag(name, message, author=None): # type: (str, str, Author, bool) -> None cmd = ( 'git -c "user.name={author.name}" -c "user.email={author.email}" ' 'tag -a "{name}" -m "{message}"' ).format( author=author or latest_commit().author, name=name, message=message...
749,464
Verify if the given branch exists. Args: branch_name (str): The name of the branch to check. Returns: bool: **True** if a branch with name *branch_name* exits, **False** otherwise.
def verify_branch(branch_name): # type: (str) -> bool try: shell.run( 'git rev-parse --verify {}'.format(branch_name), never_pretend=True ) return True except IOError: return False
749,467
Return details about a given commit. Args: sha1 (str): The sha1 of the commit to query. If not given, it will return the details for the latest commit. Returns: CommitDetails: Commit details. You can use the instance of the class to q...
def get(cls, sha1=''): # type: (str) -> CommitDetails with conf.within_proj_dir(): cmd = 'git show -s --format="%H||%an||%ae||%s||%b||%P" {}'.format( sha1 ) result = shell.run(cmd, capture=True, never_pretend=True).stdout sha1, name, ...
749,473
Load a YAML configuration. This will not update the configuration but replace it entirely. Args: conf_file (str): Path to the YAML config. This function will not check the file name or extension and will just crash if the given file does not exist or is not a valid ...
def load_yaml_config(conf_file): # type: (str) -> None global g_config with open(conf_file) as fp: # Initialize config g_config = util.yaml_load(fp) # Add src_dir to sys.paths if it's set. This is only done with YAML # configs, py configs have to do this manually. ...
749,507
Load template from file. The templates are part of the package and must be included as ``package_data`` in project ``setup.py``. Args: filename (str): The template path. Relative to `peltak` package directory. Returns: str: The content of the chosen template.
def load_template(filename): # type: (str) -> str template_file = os.path.join(PKG_DIR, 'templates', filename) with open(template_file) as fp: return fp.read()
749,509
Return absolute path to the repo dir (root project directory). Args: path (str): The path relative to the project root (pelconf.yaml). Returns: str: The given path converted to an absolute path.
def proj_path(*path_parts): # type: (str) -> str path_parts = path_parts or ['.'] # If path represented by path_parts is absolute, do not modify it. if not os.path.isabs(path_parts[0]): proj_path = _find_proj_root() if proj_path is not None: path_parts = [proj_path] + ...
749,510
Print error and exit if the current branch is not of a given type. Args: branch_type (str): The branch type. This assumes the branch is in the '<type>/<title>` format.
def assert_branch_type(branch_type): # type: (str) -> None branch = git.current_branch(refresh=True) if branch.type != branch_type: if context.get('pretend', False): log.info("Would assert that you're on a <33>{}/*<32> branch", branch_type) else: ...
749,544
Print error and exit if *branch_name* is not the current branch. Args: branch_name (str): The supposed name of the current branch.
def assert_on_branch(branch_name): # type: (str) -> None branch = git.current_branch(refresh=True) if branch.name != branch_name: if context.get('pretend', False): log.info("Would assert that you're on a <33>{}<32> branch", branch_name) else: ...
749,545
Delete the given branch. Args: branch_name (str): Name of the branch to delete.
def git_branch_delete(branch_name): # type: (str) -> None if branch_name not in git.protected_branches(): log.info("Deleting branch <33>{}", branch_name) shell.run('git branch -d {}'.format(branch_name))
749,546
Rename the current branch Args: new_name (str): New name for the current branch.
def git_branch_rename(new_name): # type: (str) -> None curr_name = git.current_branch(refresh=True).name if curr_name not in git.protected_branches(): log.info("Renaming branch from <33>{}<32> to <33>{}".format( curr_name, new_name )) shell.run('git branch -m {}'.fo...
749,547
Checkout or create a given branch Args: branch_name (str): The name of the branch to checkout or create. create (bool): If set to **True** it will create the branch instead of checking it out.
def git_checkout(branch_name, create=False): # type: (str, bool) -> None log.info("Checking out <33>{}".format(branch_name)) shell.run('git checkout {} {}'.format('-b' if create else '', branch_name))
749,548
Merge *head* into *base*. Args: base (str): The base branch. *head* will be merged into this branch. head (str): The branch that will be merged into *base*. no_ff (bool): If set to **True** it will force git to create merge commit. If set to *...
def git_merge(base, head, no_ff=False): # type: (str, str, bool) -> None pretend = context.get('pretend', False) branch = git.current_branch(refresh=True) if branch.name != base and not pretend: git_checkout(base) args = [] if no_ff: args.append('--no-ff') log.info("...
749,549
Show the user a menu to pick a branch from the existing ones. Args: exclude (list[str]): List of branch names to exclude from the menu. By default it will exclude master and develop branches. To show all branches pass an empty array here. Returns: str: The n...
def choose_branch(exclude=None): # type: (List[str]) -> str if exclude is None: master = conf.get('git.master_branch', 'master') develop = conf.get('git.devel_branch', 'develop') exclude = {master, develop} branches = list(set(git.branches()) - exclude) # Print the menu ...
749,551
Generates identicon image based on passed data. Arguments: data - Data which should be used for generating an identicon. This data will be used in order to create a digest which is used for generating the identicon. If the data passed is a hex digest already, the digest will be used as-is....
def image(request, data): # Get image width, height, padding, and format from GET parameters, or # fall-back to default values from settings. try: width = int(request.GET.get("w", PYDENTICON_WIDTH)) except ValueError: raise SuspiciousOperation("Identicon width must be a positive in...
749,793
Set context value. Args: name (str): The name of the context value to change. value (Any): The new value for the selected context value
def set(self, name, value): curr = self.values parts = name.split('.') for i, part in enumerate(parts[:-1]): try: curr = curr.setdefault(part, {}) except AttributeError: raise InvalidPath('.'.join(parts[:i + 1])) try: ...
749,877
List all tags for the given image stored in the registry. Args: image_name (str): The name of the image to query. The image must be present on the registry for this call to return any values. Returns: list[str]: List of tags for that image.
def list_tags(self, image_name): # type: (str) -> Iterator[str] tags_url = self.registry_url + '/v2/{}/tags/list' r = self.get(tags_url.format(image_name), auth=self.auth) data = r.json() if 'tags' in data: return reversed(sorted(data['tags'])) ret...
749,989
Patches current record and udpates the current instance's 'attrs' attribute to reflect the new changes. Args: payload - hash. This will be JSON-formatted prior to sending the request. Returns: `dict`. The JSON formatted response. Raises: `requests.e...
def patch(self, payload, append_to_arrays=True): if not isinstance(payload, dict): raise ValueError("The 'payload' parameter must be provided a dictionary object.") payload = self.__class__.set_id_in_fkeys(payload) if append_to_arrays: for key in payload: ...
750,065
Posts the data to the specified record. Args: payload: `dict`. This will be JSON-formatted prior to sending the request. Returns: `dict`. The JSON formatted response. Raises: `Requests.exceptions.HTTPError`: The status code is not ok. `RecordNot...
def post(cls, payload): if not isinstance(payload, dict): raise ValueError("The 'payload' parameter must be provided a dictionary object.") payload = cls.set_id_in_fkeys(payload) payload = cls.check_boolean_fields(payload) payload = cls.add_model_name_to_payload(payl...
750,067
Logs the provided error message to both the error logger and the debug logger logging instances. Args: msg: `str`. The error message to log.
def log_error(cls, msg): cls.error_logger.error(msg) cls.debug_logger.debug(msg)
750,068
An aid in troubleshooting internal application errors, i.e. <Response [500]>, to be mainly beneficial when developing the server-side API. This method will write the response HTML for viewing the error details in the browesr. Args: response: `requests.models.Response` instance. ...
def write_response_html_to_file(response,filename): fout = open(filename,'w') if not str(response.status_code).startswith("2"): Model.debug_logger.debug(response.text) fout.write(response.text) fout.close()
750,069
Calls the SequencingRequest's get_library_barcode_sequence_hash server-side endpoint to create a hash of the form {LibraryID -> barcode_sequence} for all Libraries on the SequencingRequest. Args: inverse: `bool`. True means to inverse the key and value pairs such that the barcode ...
def get_library_barcode_sequence_hash(self, inverse=False): action = os.path.join(self.record_url, "get_library_barcode_sequence_hash") res = requests.get(url=action, headers=HEADERS, verify=False) res.raise_for_status() res_json = res.json() # Convert library ID from st...
750,079
Unarchives the user with the specified user ID. Args: user_id: `int`. The ID of the user to unarchive. Returns: `NoneType`: None.
def unarchive_user(self, user_id): url = self.record_url + "/unarchive" res = requests.patch(url=url, json={"user_id": user_id}, headers=HEADERS, verify=False) self.write_response_html_to_file(res,"bob.html") res.raise_for_status()
750,082
Deploy the app to AppEngine. Args: app_id (str): AppEngine App ID. Overrides config value app_id if given. version (str): AppEngine project version. Overrides config values if given. promote (bool): If set to **True** promote the current remote app versio...
def deploy(app_id, version, promote, quiet): # type: (str, str, bool, bool) -> None gae_app = GaeApp.for_branch(git.current_branch().name) if gae_app is None and None in (app_id, version): msg = ( "Can't find an AppEngine app setup for branch <35>{}<32> and" "--project...
750,179
Run devserver. Args: port (int): Port on which the app will be served. admin_port (int): Port on which the admin interface is served. clear (bool): If set to **True**, clear the datastore on startup.
def devserver(port, admin_port, clear): # type: (int, int, bool) -> None admin_port = admin_port or (port + 1) args = [ '--port={}'.format(port), '--admin_port={}'.format(admin_port) ] if clear: args += ['--clear_datastore=yes'] with conf.within_proj_dir(): ...
750,180
Mark function as experimental. Args: fn (FunctionType): The command function to decorate.
def mark_experimental(fn): # type: (FunctionType) -> FunctionType @wraps(fn) def wrapper(*args, **kw): # pylint: disable=missing-docstring from peltak.core import shell if shell.is_tty: warnings.warn("This command is has experimental status. The " ...
750,185
Mark command as deprecated. Args: replaced_by (str): The command that deprecated this command and should be used instead.
def mark_deprecated(replaced_by): # type: (Text) -> FunctionType def decorator(fn): # pylint: disable=missing-docstring @wraps(fn) def wrapper(*args, **kw): # pylint: disable=missing-docstring from peltak.core import shell if shell.is_tty: warnin...
750,186
Dump data to a YAML string/file. Args: data (YamlData): The data to serialize as YAML. stream (TextIO): The file-like object to save to. If given, this function will write the resulting YAML to that stream. Returns: str: The YAML string.
def yaml_dump(data, stream=None): # type: (YamlData, Optional[TextIO]) -> Text return yaml.dump( data, stream=stream, Dumper=Dumper, default_flow_style=False )
750,188
Apply the decorator to the given function. Args: fn (FunctionType): The function to decorate. :return Function: The function wrapped in caching logic.
def __call__(self, fn): # type: (FunctionType) -> FunctionType @wraps(fn) def wrapper(refresh=False): # pylint: disable=missing-docstring if refresh or not hasattr(wrapper, self.CACHE_VAR): result = fn() setattr(wrapper, self.CACHE_VAR, resu...
750,190
Clear result cache on the given function. If the function has no cached result, this call will do nothing. Args: fn (FunctionType): The function whose cache should be cleared.
def clear(cls, fn): # type: (FunctionType) -> None if hasattr(fn, cls.CACHE_VAR): delattr(fn, cls.CACHE_VAR)
750,191
Start working on a new feature by branching off develop. This will create a new branch off develop called feature/<name>. Args: name (str): The name of the new feature.
def start(name): # type: (str) -> None branch = git.current_branch(refresh=True) task_branch = 'task/' + common.to_branch_name(name) if branch.type not in ('feature', 'hotfix'): log.err("Task branches can only branch off <33>feature<32> or " "<33>hotfix<32> branches") ...
750,192
Start working on a new hotfix. This will create a new branch off master called hotfix/<name>. Args: name (str): The name of the new feature.
def start(name): # type: (str) -> None hotfix_branch = 'hotfix/' + common.to_branch_name(name) master = conf.get('git.master_branch', 'master') common.assert_on_branch(master) common.git_checkout(hotfix_branch, create=True)
750,388
Send text to stdin. Can only be used on non blocking commands Args: value (str): the text to write on stdin Raises: TypeError: If command is blocking Returns: ShellCommand: return this ShellCommand instance for chaining
def send(self, value): if not self.block and self._stdin is not None: self.writer.write("{}\n".format(value)) return self else: raise TypeError(NON_BLOCKING_ERROR_MESSAGE)
750,396
Block until a pattern have been found in stdout and stderr Args: pattern(:class:`~re.Pattern`): The pattern to search timeout(int): Maximum number of second to wait. If None, wait infinitely Raises: TimeoutError: When timeout is reach
def wait_for(self, pattern, timeout=None): should_continue = True if self.block: raise TypeError(NON_BLOCKING_ERROR_MESSAGE) def stop(signum, frame): # pylint: disable=W0613 nonlocal should_continue if should_continue: raise Timeout...
750,400
Create an instance of :class:`~ShellCommand` and run it Args: command (str): :class:`~ShellCommand` block (bool): See :class:`~ShellCommand` cwd (str): Override the runner cwd. Useb by the :class:`~ShellCommand` instance
def run(self, command, block=True, cwd=None, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE): if cwd is None: cwd = self.cwd return ShellCommand(command=command, logger=self.logger, block=block, cwd=cwd, stdin=stdin, stdout=stdout, stderr=stderr).run()
750,404
Test whether the given *path* matches any patterns in *patterns* Args: path (str): A file path to test for matches. patterns (list[str]): A list of glob string patterns to test against. If *path* matches any of those patters, it will return True. Returns: ...
def match_globs(path, patterns): # type: (str, List[str]) -> bool for pattern in (p for p in patterns if p): if pattern.startswith('/'): regex = fnmatch.translate(pattern[1:]) temp_path = path[1:] if path.startswith('/') else path m = re.search(regex, temp_path...
750,416
Test whether the given *path* contains any patterns in *patterns* Args: path (str): A file path to test for matches. patterns (list[str]): A list of glob string patterns to test against. If *path* matches any of those patters, it will return True. Returns: ...
def search_globs(path, patterns): # type: (str, List[str]) -> bool for pattern in (p for p in patterns if p): if pattern.startswith('/'): # If pattern starts with root it means it match from root only regex = fnmatch.translate(pattern[1:]) regex = regex.replace('...
750,417
--pretend aware file writing. You can always write files manually but you should always handle the --pretend case. Args: path (str): content (str): mode (str):
def write_file(path, content, mode='w'): # type: (Text, Union[Text,bytes], Text) -> None from peltak.core import context from peltak.core import log if context.get('pretend', False): log.info("Would overwrite <34>{path}<32> with:\n<90>{content}", path=path, ...
750,418
Upload the release to a pypi server. TODO: Make sure the git directory is clean before allowing a release. Args: target (str): pypi target as defined in ~/.pypirc
def upload(target): # type: (str) -> None log.info("Uploading to pypi server <33>{}".format(target)) with conf.within_proj_dir(): shell.run('python setup.py sdist register -r "{}"'.format(target)) shell.run('python setup.py sdist upload -r "{}"'.format(target))
750,906
Generate ~/.pypirc with the given credentials. Useful for CI builds. Can also get credentials through env variables ``PYPI_USER`` and ``PYPI_PASS``. Args: username (str): pypi username. If not given it will try to take it from the `` PYPI_USER`` env variable. passwo...
def gen_pypirc(username=None, password=None): # type: (str, str) -> None path = join(conf.getenv('HOME'), '.pypirc') username = username or conf.getenv('PYPI_USER', None) password = password or conf.getenv('PYPI_PASS', None) if username is None or password is None: log.err("You must pr...
750,907
For each plugins, check if a "step" method exist on it, and call it Args: step (str): The method to search and call on each plugin
def call_plugins(self, step): for plugin in self.plugins: try: getattr(plugin, step)() except AttributeError: self.logger.debug("{} doesn't exist on plugin {}".format(step, plugin)) except TypeError: self.logger.debug("...
751,020