docstring
stringlengths
52
499
function
stringlengths
67
35.2k
__index_level_0__
int64
52.6k
1.16M
Convert the color from CIE XYZ coordinates to sRGB. .. note:: Compensation for sRGB gamma correction is applied before converting. Parameters: :x: The X component value [0...1] :y: The Y component value [0...1] :z: The Z component value [0...1] Returns: The color as an (...
def xyz_to_rgb(x, y=None, z=None): if type(x) in [list,tuple]: x, y, z = x r = (x * 3.2406255) - (y * 1.5372080) - (z * 0.4986286) g = -(x * 0.9689307) + (y * 1.8757561) + (z * 0.0415175) b = (x * 0.0557101) - (y * 0.2040211) + (z * 1.0569959) return tuple((((v <= _srgbGammaCorrInv) and [v * 12.92] o...
762,681
Convert the color from CMYK coordinates to CMY. Parameters: :c: The Cyan component value [0...1] :m: The Magenta component value [0...1] :y: The Yellow component value [0...1] :k: The Black component value [0...1] Returns: The color as an (c, m, y) tuple in the range: ...
def cmyk_to_cmy(c, m=None, y=None, k=None): if type(c) in [list,tuple]: c, m, y, k = c mk = 1-k return ((c*mk + k), (m*mk + k), (y*mk + k))
762,684
Convert the color from CMY coordinates to CMYK. Parameters: :c: The Cyan component value [0...1] :m: The Magenta component value [0...1] :y: The Yellow component value [0...1] Returns: The color as an (c, m, y, k) tuple in the range: c[0...1], m[0...1], y[0...1], ...
def cmy_to_cmyk(c, m=None, y=None): if type(c) in [list,tuple]: c, m, y = c k = min(c, m, y) if k==1.0: return (0.0, 0.0, 0.0, 1.0) mk = 1.0-k return ((c-k) / mk, (m-k) / mk, (y-k) / mk, k)
762,685
Convert the color from RGB coordinates to CMY. Parameters: :r: The Red component value [0...1] :g: The Green component value [0...1] :b: The Blue component value [0...1] Returns: The color as an (c, m, y) tuple in the range: c[0...1], m[0...1], y[0...1] >>> rgb_to_...
def rgb_to_cmy(r, g=None, b=None): if type(r) in [list,tuple]: r, g, b = r return (1-r, 1-g, 1-b)
762,686
Convert the color from CMY coordinates to RGB. Parameters: :c: The Cyan component value [0...1] :m: The Magenta component value [0...1] :y: The Yellow component value [0...1] Returns: The color as an (r, g, b) tuple in the range: r[0...1], g[0...1], b[0...1] >>> cm...
def cmy_to_rgb(c, m=None, y=None): if type(c) in [list,tuple]: c, m, y = c return (1-c, 1-m, 1-y)
762,687
Convert the color in the standard [0...1] range to ints in the [0..255] range. Parameters: :r: The Red component value [0...1] :g: The Green component value [0...1] :b: The Blue component value [0...1] Returns: The color as an (r, g, b) tuple in the range: r[0...255], g[0...
def rgb_to_ints(r, g=None, b=None): if type(r) in [list,tuple]: r, g, b = r return tuple(int(round(v*255)) for v in (r, g, b))
762,688
Convert ints in the [0...255] range to the standard [0...1] range. Parameters: :r: The Red component value [0...255] :g: The Green component value [0...255] :b: The Blue component value [0...255] Returns: The color as an (r, g, b) tuple in the range: r[0...1], g[0...1], ...
def ints_to_rgb(r, g=None, b=None): if type(r) in [list,tuple]: r, g, b = r return tuple(float(v) / 255.0 for v in [r, g, b])
762,689
Convert the color from (r, g, b) to #RRGGBB. Parameters: :r: The Red component value [0...1] :g: The Green component value [0...1] :b: The Blue component value [0...1] Returns: A CSS string representation of this color (#RRGGBB). >>> rgb_to_html(1, 0.5, 0) '#ff8000'
def rgb_to_html(r, g=None, b=None): if type(r) in [list,tuple]: r, g, b = r return '#%02x%02x%02x' % tuple((min(round(v*255), 255) for v in (r, g, b)))
762,690
Convert the color from RGB to a PIL-compatible integer. Parameters: :r: The Red component value [0...1] :g: The Green component value [0...1] :b: The Blue component value [0...1] Returns: A PIL compatible integer (0xBBGGRR). >>> '0x%06x' % rgb_to_pil(1, 0.5, 0) '0x0080ff'
def rgb_to_pil(r, g=None, b=None): if type(r) in [list,tuple]: r, g, b = r r, g, b = [min(int(round(v*255)), 255) for v in (r, g, b)] return (b << 16) + (g << 8) + r
762,692
Convert the color from a PIL-compatible integer to RGB. Parameters: pil: a PIL compatible color representation (0xBBGGRR) Returns: The color as an (r, g, b) tuple in the range: the range: r: [0...1] g: [0...1] b: [0...1] >>> '(%g, %g, %g)' % pil_to_rgb(0x0080ff) '(1, 0.501961, 0)'
def pil_to_rgb(pil): r = 0xff & pil g = 0xff & (pil >> 8) b = 0xff & (pil >> 16) return tuple((v / 255.0 for v in (r, g, b)))
762,693
Convert a color component to its web safe equivalent. Parameters: :c: The component value [0...1] :alt: If True, return the alternative value instead of the nearest one. Returns: The web safe equivalent of the component value.
def _websafe_component(c, alt=False): # This sucks, but floating point between 0 and 1 is quite fuzzy... # So we just change the scale a while to make the equality tests # work, otherwise it gets wrong at some decimal far to the right. sc = c * 100.0 # If the color is already safe, return it straight away...
762,694
Convert the color from RGB to 'web safe' RGB Parameters: :r: The Red component value [0...1] :g: The Green component value [0...1] :b: The Blue component value [0...1] :alt: If True, use the alternative color instead of the nearest one. Can be used for dithering. Retu...
def rgb_to_websafe(r, g=None, b=None, alt=False): if type(r) in [list,tuple]: r, g, b = r websafeComponent = _websafe_component return tuple((websafeComponent(v, alt) for v in (r, g, b)))
762,695
Convert the color from RGB to its greyscale equivalent Parameters: :r: The Red component value [0...1] :g: The Green component value [0...1] :b: The Blue component value [0...1] Returns: The color as an (r, g, b) tuple in the range: the range: r[0...1], g[0...1], ...
def rgb_to_greyscale(r, g=None, b=None): if type(r) in [list,tuple]: r, g, b = r v = (r + g + b) / 3.0 return (v, v, v)
762,696
Maps a hue on the RGB color wheel to Itten's RYB wheel. Parameters: :hue: The hue on the RGB color wheel [0...360] Returns: An approximation of the corresponding hue on Itten's RYB wheel. >>> rgb_to_ryb(15) 26.0
def rgb_to_ryb(hue): d = hue % 15 i = int(hue / 15) x0 = _RybWheel[i] x1 = _RybWheel[i+1] return x0 + (x1-x0) * d / 15
762,697
Maps a hue on Itten's RYB color wheel to the standard RGB wheel. Parameters: :hue: The hue on Itten's RYB color wheel [0...360] Returns: An approximation of the corresponding hue on the standard RGB wheel. >>> ryb_to_rgb(15) 8.0
def ryb_to_rgb(hue): d = hue % 15 i = int(hue / 15) x0 = _RgbWheel[i] x1 = _RgbWheel[i+1] return x0 + (x1-x0) * d / 15
762,698
Create a new instance based on the specifed PIL color. Parameters: :pil: A PIL compatible color representation (0xBBGGRR) :alpha: The color transparency [0...1], default is opaque :wref: The whitepoint reference, default is 2° D65. Returns: A grapefruit.Color in...
def from_pil(pil, alpha=1.0, wref=_DEFAULT_WREF): return Color(pil_to_rgb(pil), 'rgb', alpha, wref)
762,709
Instantiate a new grapefruit.Color object. Parameters: :values: The values of this color, in the specified representation. :mode: The representation mode used for values. :alpha: the alpha value (transparency) of this color. :wref: The whitepoint reference, d...
def __init__(self, values, mode='rgb', alpha=1.0, wref=_DEFAULT_WREF): if not(isinstance(values, tuple)): raise TypeError("values must be a tuple") if mode=='rgb': self.__rgb = tuple([float(v) for v in values]) self.__hsl = rgb_to_hsl(*self.__rgb) elif mode=='hsl': self.__hsl =...
762,710
Create a new instance based on this one but less saturated. Parameters: :level: The amount by which the color should be desaturated to produce the new one [0...1]. Returns: A grapefruit.Color instance. >>> Color.from_hsl(30, 0.5, 0.5).desaturate(0.25) Color(0.625, 0.5, 0.3...
def desaturate(self, level): h, s, l = self.__hsl return Color((h, max(s - level, 0), l), 'hsl', self.__a, self.__wref)
762,731
Create a new instance which is the complementary color of this one. Parameters: :mode: Select which color wheel to use for the generation (ryb/rgb). Returns: A grapefruit.Color instance. >>> Color.from_hsl(30, 1, 0.5).complementary_color(mode='rgb') Color(0.0, 0.5, 1.0, 1.0) ...
def complementary_color(self, mode='ryb'): h, s, l = self.__hsl if mode == 'ryb': h = rgb_to_ryb(h) h = (h+180)%360 if mode == 'ryb': h = ryb_to_rgb(h) return Color((h, s, l), 'hsl', self.__a, self.__wref)
762,733
Alpha-blend this color on the other one. Args: :other: The grapefruit.Color to alpha-blend with this one. Returns: A grapefruit.Color instance which is the result of alpha-blending this color on the other one. >>> c1 = Color.from_rgb(1, 0.5, 0, 0.2) >>> c2 = Color.from_rgb(1...
def alpha_blend(self, other): # get final alpha channel fa = self.__a + other.__a - (self.__a * other.__a) # get percentage of source alpha compared to final alpha if fa==0: sa = 0 else: sa = min(1.0, self.__a/other.__a) # destination percentage is just the additive inverse da = 1.0 -...
762,735
blend this color with the other one. Args: :other: the grapefruit.Color to blend with this one. Returns: A grapefruit.Color instance which is the result of blending this color on the other one. >>> c1 = Color.from_rgb(1, 0.5, 0, 0.2) >>> c2 = Color.from_rgb(1, 1, 1, 0.6) ...
def blend(self, other, percent=0.5): dest = 1.0 - percent rgb = tuple(((u * percent) + (v * dest) for u, v in zip(self.__rgb, other.__rgb))) a = (self.__a * percent) + (other.__a * dest) return Color(rgb, 'rgb', a, self.__wref)
762,736
Converts python data structure to pybel.Molecule. This will infer bond data if not specified. Args: data: The loaded json data of a molecule, as a Python object infer_bonds (Optional): If no bonds specified in input, infer them Returns: An instance of `pybel.Molecule`
def json_to_pybel(data, infer_bonds=False): obmol = ob.OBMol() obmol.BeginModify() for atom in data['atoms']: obatom = obmol.NewAtom() obatom.SetAtomicNum(table.GetAtomicNum(str(atom['element']))) obatom.SetVector(*atom['location']) if 'label' in atom: pd = o...
762,809
Converts a pybel molecule to json. Args: molecule: An instance of `pybel.Molecule` name: (Optional) If specified, will save a "name" property Returns: A Python dictionary containing atom and bond data
def pybel_to_json(molecule, name=None): # Save atom element type and 3D location. atoms = [{'element': table.GetSymbol(atom.atomicnum), 'location': list(atom.coords)} for atom in molecule.atoms] # Recover auxiliary data, if exists for json_atom, pybel_atom in zip(atoms, m...
762,810
Converts input chemical formats to json and optimizes structure. Args: data: A string or file representing a chemical format: The format of the `data` variable (default is 'auto') The `format` can be any value specified by Open Babel (http://openbabel.org/docs/2.3.1/FileFormats/Overview.ht...
def generate(data, format="auto"): # Support both files and strings and attempt to infer file type try: with open(data) as in_file: if format == 'auto': format = data.split('.')[-1] data = in_file.read() except: if format == 'auto': fo...
762,813
Checks if jasper can connect a network server. Arguments: server -- (optional) the server to connect with (Default: "www.google.com") Returns: True or False
def check_network_connection(server, port): logger = logging.getLogger(__name__) logger.debug("Checking network connection to server '%s'...", server) try: # see if we can resolve the host name -- tells us if there is # a DNS listening host = socket.gethostbyname(server) ...
763,320
Checks if a python package or module is importable. Arguments: package_or_module -- the package or module name to check Returns: True or False
def check_python_import(package_or_module): logger = logging.getLogger(__name__) logger.debug("Checking python import '%s'...", package_or_module) loader = pkgutil.get_loader(package_or_module) found = loader is not None if found: logger.debug("Python %s '%s' found", ...
763,321
Returns the variable name assigned to the given dependency or None if the dependency has not yet been registered. Args: dependency (str): Thet dependency that needs to be imported. Returns: str or None
def _get_depencency_var_name(self, dependency): for dep_path, var_name in self.dependencies: if dep_path == dependency: return var_name
763,855
Adds the given dependency and returns the variable name to use to access it. If `var_name` is not given then a random one will be created. Args: dependency (str): var_name (str, optional): Returns: str
def _add_dependency(self, dependency, var_name=None): if var_name is None: var_name = next(self.temp_var_names) # Don't add duplicate dependencies if (dependency, var_name) not in self.dependencies: self.dependencies.append((dependency, var_name)) return ...
763,856
Builds and registers a :class:`Selector` object with the given name and configuration. Args: name (str): The name of the selector. Yields: SelectorFactory: The factory that will build the :class:`Selector`.
def add_selector(name): factory = SelectorFactory(name) yield factory selectors[name] = factory.build_selector()
764,244
Returns a decorator function for adding an expression filter. Args: name (str): The name of the filter. **kwargs: Variable keyword arguments for the filter. Returns: Callable[[Callable[[AbstractExpression, Any], AbstractExpression]]]: A decorator fun...
def expression_filter(self, name, **kwargs): def decorator(func): self.filters[name] = ExpressionFilter(name, func, **kwargs) return decorator
764,250
Returns a decorator function for adding a node filter. Args: name (str): The name of the filter. **kwargs: Variable keyword arguments for the filter. Returns: Callable[[Callable[[Element, Any], bool]]]: A decorator function for adding a node filter.
def node_filter(self, name, **kwargs): def decorator(func): self.filters[name] = NodeFilter(name, func, **kwargs) return decorator
764,251
Adds filters from a particular global :class:`FilterSet`. Args: name (str): The name of the set whose filters should be added.
def filter_set(self, name): filter_set = filter_sets[name] for name, filter in iter(filter_set.filters.items()): self.filters[name] = filter self.descriptions += filter_set.descriptions
764,252
Checks whether a document with the specified workflow id already exists. Args: workflow_id (str): The workflow id that should be checked. Raises: DataStoreNotConnected: If the data store is not connected to the server. Returns: bool: ``True`` if a document ...
def exists(self, workflow_id): try: db = self._client[self.database] col = db[WORKFLOW_DATA_COLLECTION_NAME] return col.find_one({"_id": ObjectId(workflow_id)}) is not None except ConnectionFailure: raise DataStoreNotConnected()
764,257
Adds a new document to the data store and returns its id. Args: payload (dict): Dictionary of initial data that should be stored in the new document in the meta section. Raises: DataStoreNotConnected: If the data store is not connected to the server. Re...
def add(self, payload=None): try: db = self._client[self.database] col = db[WORKFLOW_DATA_COLLECTION_NAME] return str(col.insert_one({ DataStoreDocumentSection.Meta: payload if isinstance(payload, dict) else {}, Dat...
764,258
Removes a document specified by its id from the data store. All associated GridFs documents are deleted as well. Args: workflow_id (str): The id of the document that represents a workflow run. Raises: DataStoreNotConnected: If the data store is not connected to the ser...
def remove(self, workflow_id): try: db = self._client[self.database] fs = GridFSProxy(GridFS(db.unproxied_object)) for grid_doc in fs.find({"workflow_id": workflow_id}, no_cursor_timeout=True): fs.delete(grid_doc._...
764,259
Returns the document for the given workflow id. Args: workflow_id (str): The id of the document that represents a workflow run. Raises: DataStoreNotConnected: If the data store is not connected to the server. Returns: DataStoreDocument: The document for the...
def get(self, workflow_id): try: db = self._client[self.database] fs = GridFSProxy(GridFS(db.unproxied_object)) return DataStoreDocument(db[WORKFLOW_DATA_COLLECTION_NAME], fs, workflow_id) except ConnectionFailure: raise DataStoreNotConnected()
764,260
Returns the MongoDB data from a key using dot notation. Args: key (str): The key to the field in the workflow document. Supports MongoDB's dot notation for embedded fields. default (object): The default value that is returned if the key does not exist. ...
def _data_from_dotnotation(self, key, default=None): if key is None: raise KeyError('NoneType is not a valid key!') doc = self._collection.find_one({"_id": ObjectId(self._workflow_id)}) if doc is None: return default for k in key.split('.'): ...
764,266
Encodes the value such that it can be stored into MongoDB. Any primitive types are stored directly into MongoDB, while non-primitive types are pickled and stored as GridFS objects. The id pointing to a GridFS object replaces the original value. Args: value (object): The obj...
def _encode_value(self, value): if isinstance(value, (int, float, str, bool, datetime)): return value elif isinstance(value, list): return [self._encode_value(item) for item in value] elif isinstance(value, dict): result = {} for key, item...
764,267
Delete all GridFS data that is linked by fields in the specified data. Args: data: The data that is parsed for MongoDB ObjectIDs. The linked GridFs object for any ObjectID is deleted.
def _delete_gridfs_data(self, data): if isinstance(data, ObjectId): if self._gridfs.exists({"_id": data}): self._gridfs.delete(data) else: raise DataStoreGridfsIdInvalid() elif isinstance(data, list): for item in data: ...
764,269
Resolves this query relative to the given node. Args: node (node.Base): The node to be evaluated. Returns: int: The number of matches found.
def resolves_for(self, node): self.node = node self.actual_styles = node.style(*self.expected_styles.keys()) return all( toregex(value).search(self.actual_styles[style]) for style, value in iter(self.expected_styles.items()))
764,282
try to identify the callsign's identity by analyzing it in the following order: Args: callsign (str): Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Raises: KeyError: Callsign could not be identified
def _dismantle_callsign(self, callsign, timestamp=timestamp_now): entire_callsign = callsign.upper() if re.search('[/A-Z0-9\-]{3,15}', entire_callsign): # make sure the call has at least 3 characters if re.search('\-\d{1,3}$', entire_callsign): # cut off any -10 / -02 appendixes...
764,287
Returns CQ Zone of a callsign Args: callsign (str): Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Returns: int: containing the callsign's CQ Zone Raises: KeyError: no CQ Zone found for callsign
def get_cqz(self, callsign, timestamp=timestamp_now): return self.get_all(callsign, timestamp)[const.CQZ]
764,292
Returns ITU Zone of a callsign Args: callsign (str): Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Returns: int: containing the callsign's CQ Zone Raises: KeyError: No ITU Zone found for callsign ...
def get_ituz(self, callsign, timestamp=timestamp_now): return self.get_all(callsign, timestamp)[const.ITUZ]
764,293
Returns ADIF id of a callsign's country Args: callsign (str): Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Returns: int: containing the country ADIF id Raises: KeyError: No Country found for callsign
def get_adif_id(self, callsign, timestamp=timestamp_now): return self.get_all(callsign, timestamp)[const.ADIF]
764,295
Asserts that the page has the given path. By default this will compare against the path+query portion of the full URL. Args: path (str | RegexObject): The string or regex that the current "path" should match. **kwargs: Arbitrary keyword arguments for :class:`CurrentPathQuery`. ...
def assert_current_path(self, path, **kwargs): query = CurrentPathQuery(path, **kwargs) @self.document.synchronize def assert_current_path(): if not query.resolves_for(self): raise ExpectationNotMet(query.failure_message) assert_current_path() ...
764,297
Asserts that the page doesn't have the given path. Args: path (str | RegexObject): The string or regex that the current "path" should match. **kwargs: Arbitrary keyword arguments for :class:`CurrentPathQuery`. Returns: True Raises: ExpectationNo...
def assert_no_current_path(self, path, **kwargs): query = CurrentPathQuery(path, **kwargs) @self.document.synchronize def assert_no_current_path(): if query.resolves_for(self): raise ExpectationNotMet(query.negative_failure_message) assert_no_curre...
764,298
Checks if the page has the given path. Args: path (str | RegexObject): The string or regex that the current "path" should match. **kwargs: Arbitrary keyword arguments for :class:`CurrentPathQuery`. Returns: bool: Whether it matches.
def has_current_path(self, path, **kwargs): try: return self.assert_current_path(path, **kwargs) except ExpectationNotMet: return False
764,299
Checks if the page doesn't have the given path. Args: path (str | RegexObject): The string or regex that the current "path" should match. **kwargs: Arbitrary keyword arguments for :class:`CurrentPathQuery`. Returns: bool: Whether it doesn't match.
def has_no_current_path(self, path, **kwargs): try: return self.assert_no_current_path(path, **kwargs) except ExpectationNotMet: return False
764,300
Returns the indices for all occurrences of 'element' in 'lst'. Args: lst (list): List to search. element: Element to find. Returns: list: List of indices or values
def find_indices(lst, element): result = [] offset = -1 while True: try: offset = lst.index(element, offset+1) except ValueError: return result result.append(offset)
764,301
Add a new dag to the queue. If the stop workflow flag is set, no new dag can be queued. Args: name (str): The name of the dag that should be queued. data (MultiTaskData): The data that should be passed on to the new dag. Raises: DagNameUnknown: If the speci...
def _queue_dag(self, name, *, data=None): if self._stop_workflow: return None if name not in self._dags_blueprint: raise DagNameUnknown() new_dag = copy.deepcopy(self._dags_blueprint[name]) new_dag.workflow_name = self.name self._dags_running[ne...
764,320
Handle an incoming request by forwarding it to the appropriate method. Args: request (Request): Reference to a request object containing the incoming request. Raises: RequestActionUnknown: If the action specified in the request is not known. ...
def _handle_request(self, request): if request is None: return Response(success=False, uid=request.uid) action_map = { 'start_dag': self._handle_start_dag, 'stop_workflow': self._handle_stop_workflow, 'join_dags': self._handle_join_dags, ...
764,321
This function is called when the worker received a request to terminate. Upon the termination of the worker, the workflows for all running jobs are stopped gracefully. Args: consumer (Consumer): Reference to the consumer object that handles messages ...
def stop(self, consumer): stopped_workflows = [] for request in [r for r in consumer.controller.state.active_requests]: job = AsyncResult(request.id) workflow_id = job.result['workflow_id'] if workflow_id not in stopped_workflows: client = Cl...
764,330
Returns the given expression filtered by the given value. Args: expr (xpath.expression.AbstractExpression): The expression to filter. value (object): The desired value with which the expression should be filtered. Returns: xpath.expression.AbstractExpression: The fi...
def apply_filter(self, expr, value): if self.skip(value): return expr if not self._valid_value(value): msg = "Invalid value {value} passed to filter {name} - ".format( value=repr(value), name=self.name) if self.default is no...
764,331
Returns an instance of the given browser with the given capabilities. Args: browser_name (str): The name of the desired browser. capabilities (Dict[str, str | bool], optional): The desired capabilities of the browser. Defaults to None. options: Arbitrary keyword arguments for th...
def get_browser(browser_name, capabilities=None, **options): if browser_name == "chrome": return webdriver.Chrome(desired_capabilities=capabilities, **options) if browser_name == "edge": return webdriver.Edge(capabilities=capabilities, **options) if browser_name in ["ff", "firefox"]: ...
764,332
Initialise the task signal convenience class. Args: client (Client): A reference to a signal client object. dag_name (str): The name of the dag the task belongs to.
def __init__(self, client, dag_name): self._client = client self._dag_name = dag_name
764,333
Schedule the execution of a dag by sending a signal to the workflow. Args: dag (Dag, str): The dag object or the name of the dag that should be started. data (MultiTaskData): The data that should be passed on to the new dag. Returns: str: The name of the successfull...
def start_dag(self, dag, *, data=None): return self._client.send( Request( action='start_dag', payload={'name': dag.name if isinstance(dag, Dag) else dag, 'data': data if isinstance(data, MultiTaskData) else None} ) ...
764,334
Wait for the specified dags to terminate. This function blocks until the specified dags terminate. If no dags are specified wait for all dags of the workflow, except the dag of the task calling this signal, to terminate. Args: names (list): The names of the dags that have t...
def join_dags(self, names=None): return self._client.send( Request( action='join_dags', payload={'names': names} ) ).success
764,335
Send a stop signal to the specified dag or the dag that hosts this task. Args: name str: The name of the dag that should be stopped. If no name is given the dag that hosts this task is stopped. Upon receiving the stop signal, the dag will not queue any new tasks and w...
def stop_dag(self, name=None): return self._client.send( Request( action='stop_dag', payload={'name': name if name is not None else self._dag_name} ) ).success
764,336
Generator function that returns celery events. This function turns the callback based celery event handling into a generator. Args: app: Reference to a celery application object. filter_by_prefix (str): If not None, only allow events that have a type that start...
def event_stream(app, *, filter_by_prefix=None): q = Queue() def handle_event(event): if filter_by_prefix is None or\ (filter_by_prefix is not None and event['type'].startswith(filter_by_prefix)): q.put(event) def receive_events(): with app...
764,339
Factory function that turns a celery event into an event object. Args: event (dict): A dictionary that represents a celery event. Returns: object: An event object representing the received event. Raises: JobEventTypeUnsupported: If an unsupported celery job event was received. ...
def create_event_model(event): if event['type'].startswith('task'): factory = { JobEventName.Started: JobStartedEvent, JobEventName.Succeeded: JobSucceededEvent, JobEventName.Stopped: JobStoppedEvent, JobEventName.Aborted: JobAbortedEvent } ...
764,340
Helper function to enable/disable styled output text. Args: enable (bool): Turn on or off styling. text (string): The string that should be styled. kwargs (dict): Parameters that are passed through to click.style Returns: string: The input with either the styling applied (enabl...
def _style(enabled, text, **kwargs): if enabled: return click.style(text, **kwargs) else: return text
764,356
Returns the XPath query for this selector. Args: exact (bool, optional): Whether to exactly match text. Returns: str: The XPath query for this selector.
def xpath(self, exact=None): exact = exact if exact is not None else self.exact if isinstance(self.expression, AbstractExpression): expression = self._apply_expression_filters(self.expression) return to_xpath(expression, exact=exact) else: return s...
764,361
Resolves this query relative to the given node. Args: node (node.Base): The node relative to which this query should be resolved. exact (bool, optional): Whether to exactly match text. Returns: list[Element]: A list of elements matched by this query.
def resolve_for(self, node, exact=None): from capybara.driver.node import Node from capybara.node.element import Element from capybara.node.simple import Simple @node.synchronize def resolve(): if self.selector.format == "css": children = no...
764,362
Returns whether the given node matches all filters. Args: node (Element): The node to evaluate. Returns: bool: Whether the given node matches.
def matches_filters(self, node): visible = self.visible if self.options["text"]: if isregex(self.options["text"]): regex = self.options["text"] elif self.exact_text is True: regex = re.compile(r"\A{}\Z".format(re.escape(self.options["tex...
764,363
Create a fully configured Celery application object. Args: config (Config): A reference to a lightflow configuration object. Returns: Celery: A fully configured Celery application object.
def create_app(config): # configure the celery logging system with the lightflow settings setup_logging.connect(partial(_initialize_logging, config), weak=False) task_postrun.connect(partial(_cleanup_workflow, config), weak=False) # patch Celery to use cloudpickle instead of pickle for serialisat...
764,366
Cleanup the results of a workflow when it finished. Connects to the postrun signal of Celery. If the signal was sent by a workflow, remove the result from the result backend. Args: task_id (str): The id of the task. args (tuple): The arguments the task was started with. **kwargs: K...
def _cleanup_workflow(config, task_id, args, **kwargs): from lightflow.models import Workflow if isinstance(args[0], Workflow): if config.celery['result_expires'] == 0: AsyncResult(task_id).forget()
764,367
Celery task that runs a single task on a worker. Args: self (Task): Reference to itself, the celery task object. task (BaseTask): Reference to the task object that performs the work in its run() method. workflow_id (string): The unique ID of the workflow run that st...
def execute_task(self, task, workflow_id, data=None): start_time = datetime.utcnow() store_doc = DataStore(**self.app.user_options['config'].data_store, auto_connect=True).get(workflow_id) store_loc = 'log.{}.tasks.{}'.format(task.dag_name, task.name) def handle_callback...
764,370
Execute the wrapped code within the given iframe using the given frame or frame name/id. May not be supported by all drivers. Args: locator (str | Element, optional): The name/id of the frame or the frame's element. Defaults to the only frame in the document.
def frame(self, locator=None, *args, **kwargs): self.switch_to_frame(self._find_frame(locator, *args, **kwargs)) try: yield finally: self.switch_to_frame("parent")
764,383
Switch to the given frame. If you use this method you are responsible for making sure you switch back to the parent frame when done in the frame changed to. :meth:`frame` is preferred over this method and should be used when possible. May not be supported by all drivers. Args: ...
def switch_to_frame(self, frame): if isinstance(frame, Element): self.driver.switch_to_frame(frame) self._scopes.append("frame") elif frame == "parent": if self._scopes[-1] != "frame": raise ScopeError("`switch_to_frame(\"parent\")` cannot be...
764,385
Execute the given script, not returning a result. This is useful for scripts that return complex objects, such as jQuery statements. ``execute_script`` should be used over :meth:`evaluate_script` whenever possible. Args: script (str): A string of JavaScript to execute. *...
def execute_script(self, script, *args): args = [arg.base if isinstance(arg, Base) else arg for arg in args] self.driver.execute_script(script, *args)
764,389
Evaluate the given JavaScript and return the result. Be careful when using this with scripts that return complex objects, such as jQuery statements. :meth:`execute_script` might be a better alternative. Args: script (str): A string of JavaScript to evaluate. *args: Varia...
def evaluate_script(self, script, *args): args = [arg.base if isinstance(arg, Base) else arg for arg in args] result = self.driver.evaluate_script(script, *args) return self._wrap_element_script_result(result)
764,390
Execute the wrapped code, accepting an alert. Args: text (str | RegexObject, optional): Text to match against the text in the modal. wait (int | float, optional): Maximum time to wait for the modal to appear after executing the wrapped code. Raises: ...
def accept_alert(self, text=None, wait=None): wait = wait or capybara.default_max_wait_time with self.driver.accept_modal("alert", text=text, wait=wait): yield
764,391
Execute the wrapped code, accepting a confirm. Args: text (str | RegexObject, optional): Text to match against the text in the modal. wait (int | float, optional): Maximum time to wait for the modal to appear after executing the wrapped code. Raises: ...
def accept_confirm(self, text=None, wait=None): with self.driver.accept_modal("confirm", text=text, wait=wait): yield
764,392
Execute the wrapped code, dismissing a confirm. Args: text (str | RegexObject, optional): Text to match against the text in the modal. wait (int | float, optional): Maximum time to wait for the modal to appear after executing the wrapped code. Raises: ...
def dismiss_confirm(self, text=None, wait=None): with self.driver.dismiss_modal("confirm", text=text, wait=wait): yield
764,393
Execute the wrapped code, accepting a prompt, optionally responding to the prompt. Args: text (str | RegexObject, optional): Text to match against the text in the modal. response (str, optional): Response to provide to the prompt. wait (int | float, optional): Maximum time t...
def accept_prompt(self, text=None, response=None, wait=None): with self.driver.accept_modal("prompt", text=text, response=response, wait=wait): yield
764,394
Execute the wrapped code, dismissing a prompt. Args: text (str | RegexObject, optional): Text to match against the text in the modal. wait (int | float, optional): Maximum time to wait for the modal to appear after executing the wrapped code. Raises: ...
def dismiss_prompt(self, text=None, wait=None): with self.driver.dismiss_modal("prompt", text=text, wait=wait): yield
764,395
Save a snapshot of the page. If invoked without arguments, it will save a file to :data:`capybara.save_path` and the file will be given a randomly generated filename. If invoked with a relative path, the path will be relative to :data:`capybara.save_path`. Args: path (str, ...
def save_page(self, path=None): path = _prepare_path(path, "html") with open(path, "wb") as f: f.write(encode_string(self.body)) return path
764,396
Returns whether the given node matches the filter rule with the given value. Args: node (Element): The node to filter. value (object): The desired value with which the node should be evaluated. Returns: bool: Whether the given node matches.
def matches(self, node, value): if self.skip(value): return True if not self._valid_value(value): msg = "Invalid value {value} passed to filter {name} - ".format( value=repr(value), name=self.name) if self.default is not Non...
764,400
Create a BrokerStats object from the dictionary returned by celery. Args: broker_dict (dict): The dictionary as returned by celery. Returns: BrokerStats: A fully initialized BrokerStats object.
def from_celery(cls, broker_dict): return BrokerStats( hostname=broker_dict['hostname'], port=broker_dict['port'], transport=broker_dict['transport'], virtual_host=broker_dict['virtual_host'] )
764,402
Create a WorkerStats object from the dictionary returned by celery. Args: name (str): The name of the worker. worker_dict (dict): The dictionary as returned by celery. queues (list): A list of QueueStats objects that represent the queues this worker is listen...
def from_celery(cls, name, worker_dict, queues): return WorkerStats( name=name, broker=BrokerStats.from_celery(worker_dict['broker']), pid=worker_dict['pid'], process_pids=worker_dict['pool']['processes'], concurrency=worker_dict['pool']['max-...
764,406
Create a JobStats object from the dictionary returned by celery. Args: worker_name (str): The name of the worker this jobs runs on. job_dict (dict): The dictionary as returned by celery. celery_app: Reference to a celery application object. Returns: JobS...
def from_celery(cls, worker_name, job_dict, celery_app): if not isinstance(job_dict, dict) or 'id' not in job_dict: raise JobStatInvalid('The job description is missing important fields.') async_result = AsyncResult(id=job_dict['id'], app=celery_app) a_info = async_result.i...
764,409
Create a JobEvent object from the event dictionary returned by celery. Args: event (dict): The dictionary as returned by celery. Returns: JobEvent: A fully initialized JobEvent object.
def from_event(cls, event): return cls( uuid=event['uuid'], job_type=event['job_type'], event_type=event['type'], queue=event['queue'], hostname=event['hostname'], pid=event['pid'], name=event['name'], workf...
764,412
List all available workflows. Returns a list of all workflows that are available from the paths specified in the config. A workflow is defined as a Python file with at least one DAG. Args: config (Config): Reference to the configuration object from which the settings are retrieved. ...
def list_workflows(config): workflows = [] for path in config.workflows: filenames = glob.glob(os.path.join(os.path.abspath(path), '*.py')) for filename in filenames: module_name = os.path.splitext(os.path.basename(filename))[0] workflow = Workflow() try...
764,417
Return a generator that yields workflow events. For every workflow event that is sent from celery this generator yields an event object. Args: config (Config): Reference to the configuration object from which the settings are retrieved. Returns: generator: A generator that...
def events(config): celery_app = create_app(config) for event in event_stream(celery_app, filter_by_prefix='task'): try: yield create_event_model(event) except JobEventTypeUnsupported: pass
764,419
Read the output of the process, executed the callback and save the output. Args: stream: A file object pointing to the output stream that should be read. callback(callable, None): A callback function that is called for each new line of output. output_file: A ...
def _read_output(self, stream, callback, output_file): if (callback is None and output_file is None) or stream.closed: return False line = stream.readline() if line: if callback is not None: callback(line.decode(), self._...
764,440
Resolves this query relative to the given node. Args: node (node.Document): The node to be evaluated. Returns: bool: Whether the given node matches this query.
def resolves_for(self, node): self.actual_title = normalize_text(node.title) return bool(self.search_regexp.search(self.actual_title))
764,445
Initialise the workflow option. Args: name (str): The name of the option under which the value will be stored. default: The default value that should be used when no value is specified. Set to None to make this a non-optional option. help (str): A short ...
def __init__(self, name, default=None, help=None, type=str): self._name = name self._default = default self._help = help self._type = type
764,447
Convert the specified value to the type of the option. Args: value: The value that should be converted. Returns: The value with the type given by the option.
def convert(self, value): if self._type is str: return str(value) elif self._type is int: try: return int(value) except (UnicodeError, ValueError): raise WorkflowArgumentError('Cannot convert {} to int'.format(value)) e...
764,448
Returns the names of all options that are required but were not specified. All options that don't have a default value are required in order to run the workflow. Args: args (dict): A dictionary of the provided arguments that is checked for missing options. ...
def check_missing(self, args): return [opt.name for opt in self if (opt.name not in args) and (opt.default is None)]
764,449
Consolidate the provided arguments. If the provided arguments have matching options, this performs a type conversion. For any option that has a default value and is not present in the provided arguments, the default value is added. Args: args (dict): A dictionary of the pro...
def consolidate(self, args): result = dict(args) for opt in self: if opt.name in result: result[opt.name] = opt.convert(result[opt.name]) else: if opt.default is not None: result[opt.name] = opt.convert(opt.default) ...
764,450
Initialise the Action object. Args: data (MultiTaskData): The processed data from the task that should be passed on to successor tasks. limit (list): A list of names of all immediate successor tasks that should be executed.
def __init__(self, data, limit=None): self._data = data self._limit = limit
764,451
Validate the graph by checking whether it is a directed acyclic graph. Args: graph (DiGraph): Reference to a DiGraph object from NetworkX. Raises: DirectedAcyclicGraphInvalid: If the graph is not a valid dag.
def validate(self, graph): if not nx.is_directed_acyclic_graph(graph): raise DirectedAcyclicGraphInvalid(graph_name=self._name)
764,458
Create a copy of the dag object. This method keeps track of the number of copies that have been made. The number is appended to the name of the copy. Args: memo (dict): a dictionary that keeps track of the objects that have already been copied. Ret...
def __deepcopy__(self, memo): self._copy_counter += 1 new_dag = Dag('{}:{}'.format(self._name, self._copy_counter), autostart=self._autostart, queue=self._queue) new_dag._schema = deepcopy(self._schema, memo) return new_dag
764,460
Merge the specified dataset on top of the existing data. This replaces all values in the existing dataset with the values from the given dataset. Args: dataset (TaskData): A reference to the TaskData object that should be merged on top of the existing object.
def merge(self, dataset): def merge_data(source, dest): for key, value in source.items(): if isinstance(value, dict): merge_data(value, dest.setdefault(key, {})) else: dest[key] = value return dest ...
764,462
Add a new dataset to the MultiTaskData. Args: task_name (str): The name of the task from which the dataset was received. dataset (TaskData): The dataset that should be added. aliases (list): A list of aliases that should be registered with the dataset.
def add_dataset(self, task_name, dataset=None, *, aliases=None): self._datasets.append(dataset if dataset is not None else TaskData()) last_index = len(self._datasets) - 1 self._aliases[task_name] = last_index if aliases is not None: for alias in aliases: ...
764,465
Add an alias pointing to the specified index. Args: alias (str): The alias that should point to the given index. index (int): The index of the dataset for which an alias should be added. Raises: DataInvalidIndex: If the index does not represent a valid dataset.
def add_alias(self, alias, index): if index >= len(self._datasets): raise DataInvalidIndex('A dataset with index {} does not exist'.format(index)) self._aliases[alias] = index
764,466
Set the default dataset by its alias. After changing the default dataset, all calls without explicitly specifying the dataset by index or alias will be redirected to this dataset. Args: alias (str): The alias of the dataset that should be made the default. Raises: ...
def set_default_by_alias(self, alias): if alias not in self._aliases: raise DataInvalidAlias('A dataset with alias {} does not exist'.format(alias)) self._default_index = self._aliases[alias]
764,468
Set the default dataset by its index. After changing the default dataset, all calls without explicitly specifying the dataset by index or alias will be redirected to this dataset. Args: index (int): The index of the dataset that should be made the default. Raises: ...
def set_default_by_index(self, index): if index >= len(self._datasets): raise DataInvalidIndex('A dataset with index {} does not exist'.format(index)) self._default_index = index
764,469
Return a dataset by its alias. Args: alias (str): The alias of the dataset that should be returned. Raises: DataInvalidAlias: If the alias does not represent a valid dataset.
def get_by_alias(self, alias): if alias not in self._aliases: raise DataInvalidAlias('A dataset with alias {} does not exist'.format(alias)) return self.get_by_index(self._aliases[alias])
764,470
Return a dataset by its index. Args: index (int): The index of the dataset that should be returned. Raises: DataInvalidIndex: If the index does not represent a valid dataset.
def get_by_index(self, index): if index >= len(self._datasets): raise DataInvalidIndex('A dataset with index {} does not exist'.format(index)) return self._datasets[index]
764,471