docstring
stringlengths
52
499
function
stringlengths
67
35.2k
__index_level_0__
int64
52.6k
1.16M
Find a program in the resolved environment. Args: cmd: String name of the program to find. parent_environ: Environment to interpret the context within, defaults to os.environ if None. fallback: If True, and the program is not found in the context, ...
def which(self, cmd, parent_environ=None, fallback=False): env = self.get_environ(parent_environ=parent_environ) path = which(cmd, env=env) if fallback and path is None: path = which(cmd) return path
233,423
Convert context to dict containing only builtin types. Args: fields (list of str): If present, only write these fields into the dict. This can be used to avoid constructing expensive fields (such as 'graph') for some cases. Returns: dict: Dictifi...
def to_dict(self, fields=None): data = {} def _add(field): return (fields is None or field in fields) if _add("resolved_packages"): resolved_packages = [] for pkg in (self._resolved_packages or []): resolved_packages.append(pkg.handl...
233,427
Load a `ResolvedContext` from a dict. Args: d (dict): Dict containing context data. identifier_str (str): String identifying the context, this is only used to display in an error string if a serialization version mismatch is detected. Returns: ...
def from_dict(cls, d, identifier_str=None): # check serialization version def _print_version(value): return '.'.join(str(x) for x in value) toks = str(d["serialize_version"]).split('.') load_ver = tuple(int(x) for x in toks) curr_ver = ResolvedContext.serial...
233,428
Create a config. Args: filepaths (list of str): List of config files to load. overrides (dict): A dict containing settings that override all others. Nested settings are overridden with nested dicts. locked: If True, settings overrides in environment variables...
def __init__(self, filepaths, overrides=None, locked=False): self.filepaths = filepaths self._sourced_filepaths = None self.overrides = overrides or {} self.locked = locked
233,595
Runs a python subproc to calculate a package attribute. Args: attr (str): Name of package attribute being created. src (list of str): Python code to execute, will be converted into semicolon-delimited single line of code. Returns: str: Output of python process.
def exec_python(attr, src, executable="python"): import subprocess if isinstance(src, basestring): src = [src] p = popen([executable, "-c", "; ".join(src)], stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() if p.returncode: from rez.exce...
233,635
Escape the <, >, ^, and & special characters reserved by Windows. Args: value (str/EscapedString): String or already escaped string. Returns: str: The value escaped for Windows.
def escape_string(self, value): if isinstance(value, EscapedString): return value.formatted(self._escaper) return self._escaper(value)
233,675
Invoke a diff editor to show the difference between the source of two packages. Args: pkg1 (`Package`): Package to diff. pkg2 (`Package`): Package to diff against. If None, the next most recent package version is used.
def diff_packages(pkg1, pkg2=None): if pkg2 is None: it = iter_packages(pkg1.name) pkgs = [x for x in it if x.version < pkg1.version] if not pkgs: raise RezError("No package to diff with - %s is the earliest " "package version" % pkg1.qualified_nam...
233,734
Test the validity of a package name string. Args: name (str): Name to test. raise_error (bool): If True, raise an exception on failure Returns: bool.
def is_valid_package_name(name, raise_error=False): is_valid = PACKAGE_NAME_REGEX.match(name) if raise_error and not is_valid: raise PackageRequestError("Not a valid package name: %r" % name) return is_valid
233,778
Expand abbreviations in a format string. If an abbreviation does not match a field, or matches multiple fields, it is left unchanged. Example: >>> fields = ("hey", "there", "dude") >>> expand_abbreviations("hello {d}", fields) 'hello dude' Args: txt (str): Format stri...
def expand_abbreviations(txt, fields): def _expand(matchobj): s = matchobj.group("var") if s not in fields: matches = [x for x in fields if x.startswith(s)] if len(matches) == 1: s = matches[0] return "{%s}" % s return re.sub(FORMAT_VAR_REGEX,...
233,779
Expand shell variables of form $var and ${var}. Unknown variables are left unchanged. Args: text (str): String to expand. environ (dict): Environ dict to use for expansions, defaults to os.environ. Returns: The expanded string.
def expandvars(text, environ=None): if '$' not in text: return text i = 0 if environ is None: environ = os.environ while True: m = ENV_VAR_REGEX.search(text, i) if not m: break i, j = m.span(0) name = m.group(1) if name.startswit...
233,780
Like `columnise`, but with colored rows. Args: printer (`colorize.Printer`): Printer object. Note: The last entry in each row is the row color, or None for no coloring.
def print_colored_columns(printer, rows, padding=2): rows_ = [x[:-1] for x in rows] colors = [x[-1] for x in rows] for col, line in zip(colors, columnise(rows_, padding=padding)): printer(line, col)
233,783
Create a formatter. Args: instance: The object to format with. pretty: If True, references to non-string attributes such as lists are converted to basic form, with characters such as brackets and parentheses removed. expand: `StringFormatType`...
def __init__(self, instance, pretty=False, expand=StringFormatType.error): self.instance = instance self.pretty = pretty self.expand = expand
233,790
Create a Version object. Args: ver_str: Version string. make_token: Callable that creates a VersionToken subclass from a string.
def __init__(self, ver_str='', make_token=AlphanumericVersionToken): self.tokens = [] self.seps = [] self._str = None self._hash = None if ver_str: toks = re_token.findall(ver_str) if not toks: raise VersionError(ver_str) ...
233,808
Return a copy of the version, possibly with less tokens. Args: len_ (int): New version length. If >= current length, an unchanged copy of the version is returned.
def trim(self, len_): other = Version(None) other.tokens = self.tokens[:len_] other.seps = self.seps[:len_ - 1] return other
233,810
Create a VersionRange object. Args: range_str: Range string, such as "3", "3+<4.5", "2|6+". The range will be optimised, so the string representation of this instance may not match range_str. For example, "3+<6|4+<8" == "3+<8". make_token: Version token c...
def __init__(self, range_str='', make_token=AlphanumericVersionToken, invalid_bound_error=True): self._str = None self.bounds = [] # note: kept in ascending order if range_str is None: return try: parser = _VersionRangeParser(range_str,...
233,837
OR together version ranges. Calculates the union of this range with one or more other ranges. Args: other: VersionRange object (or list of) to OR with. Returns: New VersionRange object representing the union.
def union(self, other): if not hasattr(other, "__iter__"): other = [other] bounds = self.bounds[:] for range in other: bounds += range.bounds bounds = self._union(bounds) range = VersionRange(None) range.bounds = bounds return ran...
233,838
AND together version ranges. Calculates the intersection of this range with one or more other ranges. Args: other: VersionRange object (or list of) to AND with. Returns: New VersionRange object representing the intersection, or None if no ranges intersect.
def intersection(self, other): if not hasattr(other, "__iter__"): other = [other] bounds = self.bounds for range in other: bounds = self._intersection(bounds, range.bounds) if not bounds: return None range = VersionRange(None...
233,839
Create a range from lower_version..upper_version. Args: lower_version: Version object representing lower bound of the range. upper_version: Version object representing upper bound of the range. Returns: `VersionRange` object.
def as_span(cls, lower_version=None, upper_version=None, lower_inclusive=True, upper_inclusive=True): lower = (None if lower_version is None else _LowerBound(lower_version, lower_inclusive)) upper = (None if upper_version is None else _UpperBoun...
233,842
Create a range from a version. Args: version: Version object. This is used as the upper/lower bound of the range. op: Operation as a string. One of 'gt'/'>', 'gte'/'>=', lt'/'<', 'lte'/'<=', 'eq'/'=='. If None, a bounded range will be created ...
def from_version(cls, version, op=None): lower = None upper = None if op is None: lower = _LowerBound(version, True) upper = _UpperBound(version.next(), False) elif op in ("eq", "=="): lower = _LowerBound(version, True) upper = _U...
233,843
Create a range from a list of versions. This method creates a range that contains only the given versions and no other. Typically the range looks like (for eg) "==3|==4|==5.1". Args: versions: List of Version objects. Returns: `VersionRange` object.
def from_versions(cls, versions): range = cls(None) range.bounds = [] for version in dedup(sorted(versions)): lower = _LowerBound(version, True) upper = _UpperBound(version, True) bound = _Bound(lower, upper) range.bounds.append(bound) ...
233,844
Internal method to streamline the getting of data from the json Args: json_inp (json): json input from our caller ndx (int): index where the data is located in the api Returns: If pandas is present: DataFrame (pandas.DataFrame): data set from ndx within the API'...
def _api_scrape(json_inp, ndx): try: headers = json_inp['resultSets'][ndx]['headers'] values = json_inp['resultSets'][ndx]['rowSet'] except KeyError: # This is so ugly but this is what you get when your data comes out # in not a standard format try: head...
234,340
Internal method to streamline our requests / json getting Args: endpoint (str): endpoint to be called from the API params (dict): parameters to be passed to the API Raises: HTTPError: if requests hits a status code != 200 Returns: json (json): json object for selected API ...
def _get_json(endpoint, params, referer='scores'): h = dict(HEADERS) h['referer'] = 'http://stats.nba.com/{ref}/'.format(ref=referer) _get = get(BASE_URL.format(endpoint=endpoint), params=params, headers=h) # print _get.url _get.raise_for_status() return _get.json()
234,341
Return dict of name -> object pairs from a list of objects with unique names. Args: object_list: list of objects, each X of which has a unique name accessible as X.name.value Returns: dict, { X.name.value: X for x in object_list } If the list is empty or None, returns an empty dict.
def get_uniquely_named_objects_by_name(object_list): if not object_list: return dict() result = dict() for obj in object_list: name = obj.name.value if name in result: raise GraphQLCompilationError(u'Found duplicate object key: ' ...
235,096
Return a list of vertex fields, and a list of property fields, for the given AST node. Also verifies that all property fields for the AST node appear before all vertex fields, raising GraphQLCompilationError if that is not the case. Args: ast: GraphQL AST node, obtained from the graphql library ...
def _get_fields(ast): if not ast.selection_set: # There are no child fields. return [], [] property_fields = [] vertex_fields = [] seen_field_names = set() switched_to_vertices = False # Ensures that all property fields are before all vertex fields. for field_ast in ast.se...
235,132
Perform type checks on the enclosing type and the recursed type for a recurse directive. Args: current_schema_type: GraphQLType, the schema type at the current location field_schema_type: GraphQLType, the schema type at the inner scope context: dict, various per-compilation data (e.g. decla...
def _validate_recurse_directive_types(current_schema_type, field_schema_type, context): # Get the set of all allowed types in the current scope. type_hints = context['type_equivalence_hints'].get(field_schema_type) type_hints_inverse = context['type_equivalence_hints_inverse'].get(field_schema_type) ...
235,137
Construct the final ConstructResult basic block that defines the output format of the query. Args: outputs: dict, output name (string) -> output data dict, specifying the location from where to get the data, and whether the data is optional (and therefore may be missing); ...
def _compile_output_step(outputs): if not outputs: raise GraphQLCompilationError(u'No fields were selected for output! Please mark at least ' u'one field with the @output directive.') output_fields = {} for output_name, output_context in six.iteritems(outp...
235,144
Construct a new Variable object for the given variable name. Args: variable_name: string, should start with '$' and then obey variable naming rules (see validate_safe_string()) inferred_type: GraphQL type object, specifying the inferred type of the variable ...
def __init__(self, variable_name, inferred_type): variable_name = ensure_unicode_string(variable_name) super(Variable, self).__init__(variable_name, inferred_type) self.variable_name = variable_name self.inferred_type = inferred_type self.validate()
235,161
Construct a new GlobalContextField object that references a field at a given location. Args: location: Location, specifying where the field was declared. Returns: new GlobalContextField object
def __init__(self, location, field_type): super(GlobalContextField, self).__init__(location, field_type) self.location = location self.field_type = field_type self.validate()
235,168
Construct a new OutputContextField object for the field at the given location. Args: location: Location, specifying where the field was declared. The Location must point to a property, and that property's value is output as the result. field_type: GraphQL type obje...
def __init__(self, location, field_type): super(OutputContextField, self).__init__(location, field_type) self.location = location self.field_type = field_type self.validate()
235,174
Construct a new FoldedContextField object for this folded field. Args: fold_scope_location: FoldScopeLocation specifying the location of the context field being output. field_type: GraphQL type object, specifying the type of the field being output. ...
def __init__(self, fold_scope_location, field_type): super(FoldedContextField, self).__init__(fold_scope_location, field_type) self.fold_scope_location = fold_scope_location self.field_type = field_type self.validate()
235,179
Construct a new FoldCountContextField object for this fold. Args: fold_scope_location: FoldScopeLocation specifying the fold whose size is being output. Returns: new FoldCountContextField object
def __init__(self, fold_scope_location): super(FoldCountContextField, self).__init__(fold_scope_location) self.fold_scope_location = fold_scope_location self.validate()
235,183
Construct a new ContextFieldExistence object for a vertex field from the global context. Args: location: Location, specifying where the field was declared. Must point to a vertex. Returns: new ContextFieldExistence expression which evaluates to True iff the vertex exists
def __init__(self, location): super(ContextFieldExistence, self).__init__(location) self.location = location self.validate()
235,186
Construct an expression that connects two expressions with an operator. Args: operator: unicode, specifying where the field was declared left: Expression on the left side of the binary operator right: Expression on the right side of the binary operator Returns: ...
def __init__(self, operator, left, right): super(BinaryComposition, self).__init__(operator, left, right) self.operator = operator self.left = left self.right = right self.validate()
235,193
Construct an expression that evaluates a predicate and returns one of two results. Args: predicate: Expression to evaluate, and based on which to choose the returned value if_true: Expression to return if the predicate was true if_false: Expression to return if the predicate...
def __init__(self, predicate, if_true, if_false): super(TernaryConditional, self).__init__(predicate, if_true, if_false) self.predicate = predicate self.if_true = if_true self.if_false = if_false self.validate()
235,198
Assert that IR blocks originating from the frontend do not have nonsensical structure. Args: ir_blocks: list of BasicBlocks representing the IR to sanity-check Raises: AssertionError, if the IR has unexpected structure. If the IR produced by the front-end cannot be successfully and cor...
def sanity_check_ir_blocks_from_frontend(ir_blocks, query_metadata_table): if not ir_blocks: raise AssertionError(u'Received no ir_blocks: {}'.format(ir_blocks)) _sanity_check_fold_scope_locations_are_unique(ir_blocks) _sanity_check_no_nested_folds(ir_blocks) _sanity_check_query_root_block...
235,203
Attempt to get a column by name from the selectable. Args: column_name: str, name of the column to retrieve. node: SqlNode, the node the column is being retrieved for. context: CompilationContext, compilation specific metadata. Returns: Optional[column], the SQLAlchemy column i...
def try_get_column(column_name, node, context): selectable = get_node_selectable(node, context) if not hasattr(selectable, 'c'): raise AssertionError( u'Selectable "{}" does not have a column collection. Context is {}.'.format( selectable, context)) return selectable...
235,242
Get a column by name from the selectable. Args: column_name: str, name of the column to retrieve. node: SqlNode, the node the column is being retrieved for. context: CompilationContext, compilation specific metadata. Returns: column, the SQLAlchemy column if found. Raises an As...
def get_column(column_name, node, context): column = try_get_column(column_name, node, context) if column is None: selectable = get_node_selectable(node, context) raise AssertionError( u'Column "{}" not found in selectable "{}". Columns present are {}. ' u'Context is...
235,243
Return a dict of directive name to directive object for the given AST node. Any directives that are allowed to exist more than once on any AST node are ignored. For any directives that can only exist up to once, we verify that they are not duplicated raising GraphQLCompilationError in case we find them mor...
def get_unique_directives(ast): if not ast.directives: return dict() result = dict() for directive_obj in ast.directives: directive_name = directive_obj.name.value if directive_name in ALLOWED_DUPLICATED_DIRECTIVES: pass # We don't return these. elif direct...
235,244
Insert the arguments into the compiled MATCH query to form a complete query. Args: compilation_result: a CompilationResult object derived from the GraphQL compiler arguments: dict, mapping argument name to its value, for every parameter the query expects. Returns: string, a MATCH query...
def insert_arguments_into_match_query(compilation_result, arguments): if compilation_result.language != MATCH_LANGUAGE: raise AssertionError(u'Unexpected query output language: {}'.format(compilation_result)) base_query = compilation_result.query argument_types = compilation_result.input_metad...
235,255
Split a list of IR blocks into per-location MATCH steps. Args: pruned_ir_blocks: list of IR basic block objects that have gone through a lowering step. Returns: list of MatchStep namedtuples, each of which contains all basic blocks that correspond to a single MATCH step.
def _split_ir_into_match_steps(pruned_ir_blocks): output = [] current_tuple = None for block in pruned_ir_blocks: if isinstance(block, OutputSource): # OutputSource blocks do not require any MATCH code, and only serve to help # optimizations and debugging. Simply omit th...
235,259
Insert the arguments into the compiled SQL query to form a complete query. Args: compilation_result: CompilationResult, compilation result from the GraphQL compiler. arguments: Dict[str, Any], parameter name -> value, for every parameter the query expects. Returns: SQLAlchemy Selectabl...
def insert_arguments_into_sql_query(compilation_result, arguments): if compilation_result.language != SQL_LANGUAGE: raise AssertionError(u'Unexpected query output language: {}'.format(compilation_result)) base_query = compilation_result.query return base_query.params(**arguments)
235,263
Construct an expression that is true when the field value is within the given bounds. Args: field: LocalField Expression, denoting the field in consideration lower_bound: lower bound constraint for given field upper_bound: upper bound constraint for given field Retu...
def __init__(self, field, lower_bound, upper_bound): super(BetweenClause, self).__init__(field, lower_bound, upper_bound) self.field = field self.lower_bound = lower_bound self.upper_bound = upper_bound self.validate()
235,271
Insert a path of optional Locations into the tree. Each OptionalTraversalTree object contains child Location objects as keys mapping to other OptionalTraversalTree objects. Args: optional_root_locations_path: list of optional root Locations all except the last ...
def insert(self, optional_root_locations_path): encountered_simple_optional = False parent_location = self._root_location for optional_root_location in optional_root_locations_path: if encountered_simple_optional: raise AssertionError(u'Encountered simple opt...
235,276
Return a SQLAlchemy Query from a passed SqlQueryTree. Args: sql_query_tree: SqlQueryTree, tree representation of the query to emit. compiler_metadata: SqlMetadata, SQLAlchemy specific metadata. Returns: SQLAlchemy Query
def emit_code_from_ir(sql_query_tree, compiler_metadata): context = CompilationContext( query_path_to_selectable=dict(), query_path_to_location_info=sql_query_tree.query_path_to_location_info, query_path_to_output_fields=sql_query_tree.query_path_to_output_fields, query_path_to_...
235,278
Create an aliased table for a SqlNode. Updates the relevant Selectable global context. Args: node: SqlNode, the current node. context: CompilationContext, global compilation state and metadata. Returns: Table, the newly aliased SQLAlchemy table.
def _create_table_and_update_context(node, context): schema_type_name = sql_context_helpers.get_schema_type_name(node, context) table = context.compiler_metadata.get_table(schema_type_name).alias() context.query_path_to_selectable[node.query_path] = table return table
235,279
Create a query from a SqlNode. Args: node: SqlNode, the current node. context: CompilationContext, global compilation state and metadata. Returns: Selectable, selectable of the generated query.
def _create_query(node, context): visited_nodes = [node] output_columns = _get_output_columns(visited_nodes, context) filters = _get_filters(visited_nodes, context) selectable = sql_context_helpers.get_node_selectable(node, context) query = select(output_columns).select_from(selectable).where(a...
235,280
Get the output columns for a list of SqlNodes. Args: nodes: List[SqlNode], the nodes to get output columns from. context: CompilationContext, global compilation state and metadata. Returns: List[Column], list of SqlAlchemy Columns to output for this query.
def _get_output_columns(nodes, context): columns = [] for node in nodes: for sql_output in sql_context_helpers.get_outputs(node, context): field_name = sql_output.field_name column = sql_context_helpers.get_column(field_name, node, context) column = column.label(...
235,281
Get filters to apply to a list of SqlNodes. Args: nodes: List[SqlNode], the SqlNodes to get filters for. context: CompilationContext, global compilation state and metadata. Returns: List[Expression], list of SQLAlchemy expressions.
def _get_filters(nodes, context): filters = [] for node in nodes: for filter_block in sql_context_helpers.get_filters(node, context): filter_sql_expression = _transform_filter_to_sql(filter_block, node, context) filters.append(filter_sql_expression) return filters
235,282
Transform a Filter block to its corresponding SQLAlchemy expression. Args: filter_block: Filter, the Filter block to transform. node: SqlNode, the node Filter block applies to. context: CompilationContext, global compilation state and metadata. Returns: Expression, SQLAlchemy e...
def _transform_filter_to_sql(filter_block, node, context): expression = filter_block.predicate return _expression_to_sql(expression, node, context)
235,283
Recursively transform a Filter block predicate to its SQLAlchemy expression representation. Args: expression: expression, the compiler expression to transform. node: SqlNode, the SqlNode the expression applies to. context: CompilationContext, global compilation state and metadata. Retu...
def _expression_to_sql(expression, node, context): _expression_transformers = { expressions.LocalField: _transform_local_field_to_expression, expressions.Variable: _transform_variable_to_expression, expressions.Literal: _transform_literal_to_expression, expressions.BinaryComposi...
235,284
Transform a BinaryComposition compiler expression into a SQLAlchemy expression. Recursively calls _expression_to_sql to convert its left and right sub-expressions. Args: expression: expression, BinaryComposition compiler expression. node: SqlNode, the SqlNode the expression applies to. ...
def _transform_binary_composition_to_expression(expression, node, context): if expression.operator not in constants.SUPPORTED_OPERATORS: raise NotImplementedError( u'Filter operation "{}" is not supported by the SQL backend.'.format( expression.operator)) sql_operator = ...
235,285
Transform a Variable compiler expression into its SQLAlchemy expression representation. Args: expression: expression, Variable compiler expression. node: SqlNode, the SqlNode the expression applies to. context: CompilationContext, global compilation state and metadata. Returns: ...
def _transform_variable_to_expression(expression, node, context): variable_name = expression.variable_name if not variable_name.startswith(u'$'): raise AssertionError(u'Unexpectedly received variable name {} that is not ' u'prefixed with "$"'.format(variable_name)) ...
235,287
Transform a LocalField compiler expression into its SQLAlchemy expression representation. Args: expression: expression, LocalField compiler expression. node: SqlNode, the SqlNode the expression applies to. context: CompilationContext, global compilation state and metadata. Returns: ...
def _transform_local_field_to_expression(expression, node, context): column_name = expression.field_name column = sql_context_helpers.get_column(column_name, node, context) return column
235,288
Optimize comparisons of a boolean binary comparison expression against a boolean literal. Rewriting example: BinaryComposition( '=', BinaryComposition('!=', something, NullLiteral) False) The above is rewritten into: BinaryComposition('=', something, NullLit...
def optimize_boolean_expression_comparisons(ir_blocks): operator_inverses = { u'=': u'!=', u'!=': u'=', } def visitor_fn(expression): if not isinstance(expression, BinaryComposition): return expression left_is_binary_composition = isinstance(expres...
235,291
Extract all @fold data from the IR blocks, and cut the folded IR blocks out of the IR. Args: ir_blocks: list of IR blocks to extract fold data from Returns: tuple (folds, remaining_ir_blocks): - folds: dict of FoldScopeLocation -> list of IR blocks corresponding to that @fold scope. ...
def extract_folds_from_ir_blocks(ir_blocks): folds = dict() remaining_ir_blocks = [] current_folded_blocks = [] in_fold_location = None for block in ir_blocks: if isinstance(block, Fold): if in_fold_location is not None: raise AssertionError(u'in_fold_locati...
235,292
If location A translates to B, and B to C, then make A translate directly to C. Args: location_translations: dict of Location -> Location, where the key translates to the value. Mutated in place for efficiency and simplicity of implementation.
def _flatten_location_translations(location_translations): sources_to_process = set(six.iterkeys(location_translations)) def _update_translation(source): destination = location_translations[source] if destination not in location_translations: # "destination" cannot be ...
235,302
Return a dict mapping location -> list of filters applied at that location. Args: match_query: MatchQuery object from which to extract location -> filters dict Returns: dict mapping each location in match_query to a list of Filter objects applied at that location
def _construct_location_to_filter_list(match_query): # For each location, all filters for that location should be applied at the first instance. # This function collects a list of all filters corresponding to each location # present in the given MatchQuery. location_to_filters = {} for match_tr...
235,311
Lower BinaryCompositions involving non-existent ContextFields to True. Args: present_locations: set of all locations in the current MatchQuery that have not been pruned expression: BinaryComposition with at least one ContextField operand Returns: TrueLiteral iff either ContextField ope...
def _update_context_field_binary_composition(present_locations, expression): if not any((isinstance(expression.left, ContextField), isinstance(expression.right, ContextField))): raise AssertionError(u'Received a BinaryComposition {} without any ContextField ' ...
235,315
Return a simplified BinaryComposition if either operand is a TrueLiteral. Args: expression: BinaryComposition without any ContextField operand(s) Returns: simplified expression if the given expression is a disjunction/conjunction and one of it's operands is a TrueLiteral, and t...
def _simplify_non_context_field_binary_composition(expression): if any((isinstance(expression.left, ContextField), isinstance(expression.right, ContextField))): raise AssertionError(u'Received a BinaryComposition {} with a ContextField ' u'operand. This should n...
235,316
Insert the arguments into the compiled GraphQL query to form a complete query. Args: compilation_result: a CompilationResult object derived from the GraphQL compiler arguments: dict, mapping argument name to its value, for every parameter the query expects. Returns: string, a query in ...
def insert_arguments_into_query(compilation_result, arguments): _ensure_arguments_are_provided(compilation_result.input_metadata, arguments) if compilation_result.language == MATCH_LANGUAGE: return insert_arguments_into_match_query(compilation_result, arguments) elif compilation_result.languag...
235,356
Construct a ConstructResult object that maps the given field names to their expressions. Args: fields: dict, variable name string -> Expression see rules for variable names in validate_safe_string(). Returns: new ConstructResult object
def __init__(self, fields): self.fields = { ensure_unicode_string(key): value for key, value in six.iteritems(fields) } # All key values are normalized to unicode before being passed to the parent constructor, # which saves them to enable human-readable ...
235,362
Create a new MarkLocation at the specified Location. Args: location: Location object, must not be at a property field in the query Returns: new MarkLocation object
def __init__(self, location): super(MarkLocation, self).__init__(location) self.location = location self.validate()
235,369
Create a new Traverse block in the given direction and across the given edge. Args: direction: string, 'in' or 'out' edge_name: string obeying variable name rules (see validate_safe_string). optional: optional bool, specifying whether the traversal to the given location ...
def __init__(self, direction, edge_name, optional=False, within_optional_scope=False): super(Traverse, self).__init__( direction, edge_name, optional=optional, within_optional_scope=within_optional_scope) self.direction = direction self.edge_name = edge_name self.opt...
235,371
Create a new Recurse block which traverses the given edge up to "depth" times. Args: direction: string, 'in' or 'out'. edge_name: string obeying variable name rules (see validate_safe_string). depth: int, always greater than or equal to 1. Returns: new R...
def __init__(self, direction, edge_name, depth, within_optional_scope=False): super(Recurse, self).__init__( direction, edge_name, depth, within_optional_scope=within_optional_scope) self.direction = direction self.edge_name = edge_name self.depth = depth # D...
235,374
Create a new Backtrack block, returning to the given location in the query. Args: location: Location object, specifying where to backtrack to optional: optional bool, specifying whether the steps between the current location and the location to which Backtrack is r...
def __init__(self, location, optional=False): super(Backtrack, self).__init__(location, optional=optional) self.location = location self.optional = optional self.validate()
235,377
Create a map from each query path to a LocationInfo at that path. Args: query_metadata_table: QueryMetadataTable, object containing all metadata collected during query processing, including location metadata (e.g. which locations are folded or opt...
def _map_query_path_to_location_info(query_metadata_table): query_path_to_location_info = {} for location, location_info in query_metadata_table.registered_locations: if not isinstance(location, Location): continue if location.query_path in query_path_to_location_info: ...
235,385
Return True if LocationInfo objects are equivalent for the SQL backend, False otherwise. LocationInfo objects are considered equal for the SQL backend iff the optional scopes depth, recursive scopes depth, types and parent query paths are equal. Args: left: LocationInfo, left location info object ...
def _location_infos_equal(left, right): if not isinstance(left, LocationInfo) or not isinstance(right, LocationInfo): raise AssertionError( u'Unsupported LocationInfo comparison between types {} and {} ' u'with values {}, {}'.format(type(left), type(right), left, right)) opt...
235,386
Approximate the 95% confidence interval for Student's T distribution. Given the degrees of freedom, returns an approximation to the 95% confidence interval for the Student's T distribution. Args: df: An integer, the number of degrees of freedom. Returns: A float.
def tdist95conf_level(df): df = int(round(df)) highest_table_df = len(_T_DIST_95_CONF_LEVELS) if df >= 200: return 1.960 if df >= 100: return 1.984 if df >= 80: return 1.990 if df >= 60: return 2.000 if df >= 50: return 2.009 if df >= 40: ...
235,446
Find the pooled sample variance for two samples. Args: sample1: one sample. sample2: the other sample. Returns: Pooled sample variance, as a float.
def pooled_sample_variance(sample1, sample2): deg_freedom = len(sample1) + len(sample2) - 2 mean1 = statistics.mean(sample1) squares1 = ((x - mean1) ** 2 for x in sample1) mean2 = statistics.mean(sample2) squares2 = ((x - mean2) ** 2 for x in sample2) return (math.fsum(squares1) + math.fsu...
235,447
Calculate a t-test score for the difference between two samples. Args: sample1: one sample. sample2: the other sample. Returns: The t-test score, as a float.
def tscore(sample1, sample2): if len(sample1) != len(sample2): raise ValueError("different number of values") error = pooled_sample_variance(sample1, sample2) / len(sample1) diff = statistics.mean(sample1) - statistics.mean(sample2) return diff / math.sqrt(error * 2)
235,448
Determine whether two samples differ significantly. This uses a Student's two-sample, two-tailed t-test with alpha=0.95. Args: sample1: one sample. sample2: the other sample. Returns: (significant, t_score) where significant is a bool indicating whether the two samples dif...
def is_significant(sample1, sample2): deg_freedom = len(sample1) + len(sample2) - 2 critical_value = tdist95conf_level(deg_freedom) t_score = tscore(sample1, sample2) return (abs(t_score) >= critical_value, t_score)
235,449
N-Queens solver. Args: queen_count: the number of queens to solve for. This is also the board size. Yields: Solutions to the problem. Each yielded value is looks like (3, 8, 2, 1, 4, ..., 6) where each number is the column position for the queen, and the index into ...
def n_queens(queen_count): cols = range(queen_count) for vec in permutations(cols): if (queen_count == len(set(vec[i] + i for i in cols)) == len(set(vec[i] - i for i in cols))): yield vec
235,509
Filters out benchmarks not supported by both Pythons. Args: benchmarks: a set() of benchmark names bench_funcs: dict mapping benchmark names to functions python: the interpereter commands (as lists) Returns: The filtered set of benchmark names
def filter_benchmarks(benchmarks, bench_funcs, base_ver): for bm in list(benchmarks): func = bench_funcs[bm] if getattr(func, '_python2_only', False) and (3, 0) <= base_ver: benchmarks.discard(bm) logging.info("Skipping Python2-only benchmark %s; " ...
235,548
Recursively expand name benchmark names. Args: bm_name: string naming a benchmark or benchmark group. Yields: Names of actual benchmarks, with all group names fully expanded.
def expand_benchmark_name(bm_name, bench_groups): expansion = bench_groups.get(bm_name) if expansion: for name in expansion: for name in expand_benchmark_name(name, bench_groups): yield name else: yield bm_name
235,549
Checks if the connection to provided ``host`` and ``port`` is possible or not. Args: host (str): Hostname for the host to check connection. port (int): Port name of the host to check connection on.
def _can_connect(host, port=22): # type: (str, int) -> bool try: logger.debug('Testing connection to host %s', host) client = paramiko.SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) client.connect(host, ...
237,149
Add a signal-based timeout to any block of code. If multiple time units are specified, they will be added together to determine time limit. Usage: with timeout(seconds=5): my_slow_function(...) Args: - seconds: The time limit, in seconds. - minutes: The time limit, in minutes. ...
def timeout(seconds=0, minutes=0, hours=0): limit = seconds + 60 * minutes + 3600 * hours def handler(signum, frame): # pylint: disable=W0613 raise TimeoutError('timed out after {} seconds'.format(limit)) try: signal.signal(signal.SIGALRM, handler) signal.setitimer(signal.IT...
237,156
Prepare a Python script (or module) to be imported as a module. If the script does not contain a setup.py file, it creates a minimal setup. Args: path (str): path to directory with the script or module. name (str): name of the script or module.
def prepare(path, name): # type: (str, str) -> None setup_path = os.path.join(path, 'setup.py') if not os.path.exists(setup_path): data = textwrap.dedent( % name) logger.info('Module %s does not provide a setup.py. \nGenerating setup.py' % name) _files.write_file(setup_path, data...
237,157
Install a Python module in the executing Python environment. Args: path (str): Real path location of the Python module. capture_error (bool): Default false. If True, the running process captures the stderr, and appends it to the returned Exception message in case of errors.
def install(path, capture_error=False): # type: (str, bool) -> None cmd = '%s -m pip install -U . ' % _process.python_executable() if has_requirements(path): cmd += '-r requirements.txt' logger.info('Installing module with the following command:\n%s', cmd) _process.check_error(shlex.spl...
237,158
Install the user provided entry point to be executed as follow: - add the path to sys path - if the user entry point is a command, gives exec permissions to the script Args: name (str): name of the script or module. dst (str): path to directory with the script or module. cap...
def install(name, dst, capture_error=False): if dst not in sys.path: sys.path.insert(0, dst) entrypoint_type = _entry_point_type.get(dst, name) if entrypoint_type is _entry_point_type.PYTHON_PACKAGE: _modules.install(dst, capture_error) if entrypoint_type is _entry_point_type.COMMA...
237,170
Set logger configuration. Args: level (int): Logger level format (str): Logger format
def configure_logger(level, format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s'): # type: (int, str) -> None logging.basicConfig(format=format, level=level) if level >= logging.INFO: logging.getLogger('boto3').setLevel(logging.INFO) logging.getLogger('s3transfer').setLevel(log...
237,171
Transform a dictionary in a dictionary of env vars. Example: >>>env_vars = mapping.to_env_vars({'model_dir': '/opt/ml/model', 'batch_size': 25}) >>> >>>print(args) ['MODEL_DIR', '/opt/ml/model', 'BATCH_SIZE', 25] Args: mapping (dict[str, object]): A Python mapping....
def to_env_vars(mapping): # type: (dict) -> dict def format_key(key): if key: decoded_name = 'SM_%s' % str(key).upper() return decoded_name else: return '' def format_value(_mapping): if six.PY3 and isinstance(_mapping, six.binary_type...
237,182
Transform a dictionary in a list of cmd arguments. Example: >>>args = mapping.to_cmd_args({'model_dir': '/opt/ml/model', 'batch_size': 25}) >>> >>>print(args) ['--model_dir', '/opt/ml/model', '--batch_size', 25] Args: mapping (dict[str, object]): A Python mapping. Ret...
def to_cmd_args(mapping): # type: (dict) -> list sorted_keys = sorted(mapping.keys()) def arg_name(obj): string = _decode(obj) if string: return u'--%s' % string if len(string) > 1 else u'-%s' % string else: return u'' arg_names = [arg_name(argument) ...
237,183
Decode an object to unicode. Args: obj (bytes or str or unicode or anything serializable): object to be decoded Returns: object decoded in unicode.
def _decode(obj): # type: (bytes or str or unicode or object) -> unicode # noqa ignore=F821 if obj is None: return u'' if six.PY3 and isinstance(obj, six.binary_type): # transforms a byte string (b'') in unicode return obj.decode('latin1') elif six.PY3: # PY3 strings ar...
237,184
Function responsible to serialize the prediction for the response. Args: prediction (obj): prediction returned by predict_fn . accept (str): accept content-type expected by the client. Returns: (worker.Response): a Flask response object with the following args: * Args: ...
def default_output_fn(prediction, accept): return _worker.Response(response=_encoders.encode(prediction, accept), mimetype=accept)
237,189
Write the dictionary env_vars in the system, as environment variables. Args: env_vars (): Returns:
def write_env_vars(env_vars=None): # type: (dict) -> None env_vars = env_vars or {} env_vars['PYTHONPATH'] = ':'.join(sys.path) for name, value in env_vars.items(): os.environ[name] = value
237,205
Convert an array like object to the NPY format. To understand better what an array like object is see: https://docs.scipy.org/doc/numpy/user/basics.creation.html#converting-python-array-like-objects-to-numpy-arrays Args: array_like (np.array or Iterable or int or float): array like object to be co...
def array_to_npy(array_like): # type: (np.array or Iterable or int or float) -> object buffer = BytesIO() np.save(buffer, array_like) return buffer.getvalue()
237,210
Convert an NPY array into numpy. Args: npy_array (npy array): to be converted to numpy array Returns: (np.array): converted numpy array.
def npy_to_numpy(npy_array): # type: (object) -> np.array stream = BytesIO(npy_array) return np.load(stream, allow_pickle=True)
237,211
Convert an array like object to JSON. To understand better what an array like object is see: https://docs.scipy.org/doc/numpy/user/basics.creation.html#converting-python-array-like-objects-to-numpy-arrays Args: array_like (np.array or Iterable or int or float): array like object to be converted to...
def array_to_json(array_like): # type: (np.array or Iterable or int or float) -> str def default(_array_like): if hasattr(_array_like, 'tolist'): return _array_like.tolist() return json.JSONEncoder().default(_array_like) return json.dumps(array_like, default=default)
237,212
Convert a JSON object to a numpy array. Args: string_like (str): JSON string. dtype (dtype, optional): Data type of the resulting array. If None, the dtypes will be determined by the contents of each column, individually. This argument can only b...
def json_to_numpy(string_like, dtype=None): # type: (str) -> np.array data = json.loads(string_like) return np.array(data, dtype=dtype)
237,213
Convert a CSV object to a numpy array. Args: string_like (str): CSV string. dtype (dtype, optional): Data type of the resulting array. If None, the dtypes will be determined by the contents of each column, individually. This argument can only be used to ...
def csv_to_numpy(string_like, dtype=None): # type: (str) -> np.array stream = StringIO(string_like) return np.genfromtxt(stream, dtype=dtype, delimiter=',')
237,214
Convert an array like object to CSV. To understand better what an array like object is see: https://docs.scipy.org/doc/numpy/user/basics.creation.html#converting-python-array-like-objects-to-numpy-arrays Args: array_like (np.array or Iterable or int or float): array like object to be converted to ...
def array_to_csv(array_like): # type: (np.array or Iterable or int or float) -> str stream = StringIO() np.savetxt(stream, array_like, delimiter=',', fmt='%s') return stream.getvalue()
237,215
Decode an object ton a one of the default content types to a numpy array. Args: obj (object): to be decoded. content_type (str): content type to be used. Returns: np.array: decoded object.
def decode(obj, content_type): # type: (np.array or Iterable or int or float, str) -> np.array try: decoder = _decoders_map[content_type] return decoder(obj) except KeyError: raise _errors.UnsupportedFormatError(content_type)
237,216
Encode an array like object in a specific content_type to a numpy array. To understand better what an array like object is see: https://docs.scipy.org/doc/numpy/user/basics.creation.html#converting-python-array-like-objects-to-numpy-arrays Args: array_like (np.array or Iterable or int or float): t...
def encode(array_like, content_type): # type: (np.array or Iterable or int or float, str) -> np.array try: encoder = _encoders_map[content_type] return encoder(array_like) except KeyError: raise _errors.UnsupportedFormatError(content_type)
237,217
Create a file 'failure' if training fails after all algorithm output (for example, logging) completes, the failure description should be written to this file. In a DescribeTrainingJob response, Amazon SageMaker returns the first 1024 characters from this file as FailureReason. See: https://docs.aws.amazon.c...
def write_failure_file(failure_msg): # type: (str) -> None file_path = os.path.join(_env.output_dir, 'failure') write_file(file_path, failure_msg)
237,223
Write data to a file. Args: path (str): path to the file. data (str): data to be written to the file. mode (str): mode which the file will be open.
def write_file(path, data, mode='w'): # type: (str, str, str) -> None with open(path, mode) as f: f.write(data)
237,225
Download, prepare and install a compressed tar file from S3 or local directory as an entry point. SageMaker Python SDK saves the user provided entry points as compressed tar files in S3 Args: name (str): name of the entry point. uri (str): the location of the entry point. path (bool): ...
def download_and_extract(uri, name, path): # type: (str, str, str) -> None if not os.path.exists(path): os.makedirs(path) if not os.listdir(path): with tmpdir() as tmp: if uri.startswith('s3://'): dst = os.path.join(tmp, 'tar_file') s3_download(u...
237,226
Download a file from S3. Args: url (str): the s3 url of the file. dst (str): the destination where the file will be saved.
def s3_download(url, dst): # type: (str, str) -> None url = parse.urlparse(url) if url.scheme != 's3': raise ValueError("Expecting 's3' scheme, got: %s in %s" % (url.scheme, url)) bucket, key = url.netloc, url.path.lstrip('/') region = os.environ.get('AWS_REGION', os.environ.get(_params...
237,227
Wraps function fn in a try catch block that re-raises error_class. Args: fn (function): function to wrapped error_class (Exception): Error class to be re-raised Returns: (object): fn wrapped in a try catch.
def error_wrapper(fn, error_class): # type: (Callable or None, Exception) -> ... def wrapper(*args, **kwargs): try: return fn(*args, **kwargs) except Exception as e: six.reraise(error_class, error_class(e), sys.exc_info()[2]) return wrapper
237,230
Represents a Tuya device. Args: dev_id (str): The device id. address (str): The network address. local_key (str, optional): The encryption key. Defaults to None. dev_type (str, optional): The device type. It will be used as key for lookups...
def __init__(self, dev_id, address, local_key=None, dev_type=None, connection_timeout=10): self.id = dev_id self.address = address self.local_key = local_key self.local_key = local_key.encode('latin1') self.dev_type = dev_type self.connection_timeout = connection...
237,677
Send single buffer `payload` and receive a single buffer. Args: payload(bytes): Data to send.
def _send_receive(self, payload): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) s.settimeout(self.connection_timeout) s.connect((self.address, self.port)) s.send(payload) data = s.recv(1024) ...
237,678