_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q274600
MultiFunction.__remove_method
test
def __remove_method(m: lmap.Map, key: T) -> lmap.Map: """Swap the methods atom to remove method with key.""" return m.dissoc(key)
python
{ "resource": "" }
q274601
MultiFunction.remove_method
test
def remove_method(self, key: T) -> Optional[Method]: """Remove the method defined for this key and return it.""" method = self.methods.entry(key, None) if method: self._methods.swap(MultiFunction.__remove_method, key) return method
python
{ "resource": "" }
q274602
_is_macro
test
def _is_macro(v: Var) -> bool: """Return True if the Var holds a macro function.""" return ( Maybe(v.meta) .map(lambda m: m.entry(SYM_MACRO_META_KEY, None)) # type: ignore .or_else_get(False) )
python
{ "resource": "" }
q274603
_loc
test
def _loc(form: Union[LispForm, ISeq]) -> Optional[Tuple[int, int]]: """Fetch the location of the form in the original filename from the input form, if it has metadata.""" try: meta = form.meta # type: ignore line = meta.get(reader.READER_LINE_KW) # type: ignore col = meta.get(reader.READER_COL_KW) # type: ignore except AttributeError: return None else: assert isinstance(line, int) and isinstance(col, int) return line, col
python
{ "resource": "" }
q274604
_with_loc
test
def _with_loc(f: ParseFunction): """Attach any available location information from the input form to the node environment returned from the parsing function.""" @wraps(f) def _parse_form(ctx: ParserContext, form: Union[LispForm, ISeq]) -> Node: form_loc = _loc(form) if form_loc is None: return f(ctx, form) else: return f(ctx, form).fix_missing_locations(form_loc) return _parse_form
python
{ "resource": "" }
q274605
_assert_no_recur
test
def _assert_no_recur(node: Node) -> None: """Assert that `recur` forms do not appear in any position of this or child AST nodes.""" if node.op == NodeOp.RECUR: raise ParserException( "recur must appear in tail position", form=node.form, lisp_ast=node ) elif node.op in {NodeOp.FN, NodeOp.LOOP}: pass else: node.visit(_assert_no_recur)
python
{ "resource": "" }
q274606
_assert_recur_is_tail
test
def _assert_recur_is_tail(node: Node) -> None: # pylint: disable=too-many-branches """Assert that `recur` forms only appear in the tail position of this or child AST nodes. `recur` forms may only appear in `do` nodes (both literal and synthetic `do` nodes) and in either the :then or :else expression of an `if` node.""" if node.op == NodeOp.DO: assert isinstance(node, Do) for child in node.statements: _assert_no_recur(child) _assert_recur_is_tail(node.ret) elif node.op in {NodeOp.FN, NodeOp.FN_METHOD, NodeOp.METHOD}: assert isinstance(node, (Fn, FnMethod, Method)) node.visit(_assert_recur_is_tail) elif node.op == NodeOp.IF: assert isinstance(node, If) _assert_no_recur(node.test) _assert_recur_is_tail(node.then) _assert_recur_is_tail(node.else_) elif node.op in {NodeOp.LET, NodeOp.LETFN}: assert isinstance(node, (Let, LetFn)) for binding in node.bindings: assert binding.init is not None _assert_no_recur(binding.init) _assert_recur_is_tail(node.body) elif node.op == NodeOp.LOOP: assert isinstance(node, Loop) for binding in node.bindings: assert binding.init is not None _assert_no_recur(binding.init) elif node.op == NodeOp.RECUR: pass elif node.op == NodeOp.TRY: assert isinstance(node, Try) _assert_recur_is_tail(node.body) for catch in node.catches: _assert_recur_is_tail(catch) if node.finally_: _assert_no_recur(node.finally_) else: node.visit(_assert_no_recur)
python
{ "resource": "" }
q274607
__resolve_bare_symbol
test
def __resolve_bare_symbol( ctx: ParserContext, form: sym.Symbol ) -> Union[MaybeClass, VarRef]: """Resolve a non-namespaced symbol into a Python name or a local Basilisp Var.""" assert form.ns is None # Look up the symbol in the namespace mapping of the current namespace. v = ctx.current_ns.find(form) if v is not None: return VarRef(form=form, var=v, env=ctx.get_node_env()) if "." in form.name: raise ParserException( "symbol names may not contain the '.' operator", form=form ) munged = munge(form.name, allow_builtins=True) if munged in vars(builtins): return MaybeClass( form=form, class_=munged, target=vars(builtins)[munged], env=ctx.get_node_env(), ) assert munged not in vars(ctx.current_ns.module) raise ParserException( f"unable to resolve symbol '{form}' in this context", form=form )
python
{ "resource": "" }
q274608
_resolve_sym
test
def _resolve_sym( ctx: ParserContext, form: sym.Symbol ) -> Union[MaybeClass, MaybeHostForm, VarRef]: """Resolve a Basilisp symbol as a Var or Python name.""" # Support special class-name syntax to instantiate new classes # (Classname. *args) # (aliased.Classname. *args) # (fully.qualified.Classname. *args) if form.ns is None and form.name.endswith("."): try: ns, name = form.name[:-1].rsplit(".", maxsplit=1) form = sym.symbol(name, ns=ns) except ValueError: form = sym.symbol(form.name[:-1]) if form.ns is not None: return __resolve_namespaced_symbol(ctx, form) else: return __resolve_bare_symbol(ctx, form)
python
{ "resource": "" }
q274609
parse_ast
test
def parse_ast(ctx: ParserContext, form: ReaderForm) -> Node: """Take a Lisp form as an argument and produce a Basilisp syntax tree matching the clojure.tools.analyzer AST spec.""" return _parse_ast(ctx, form).assoc(top_level=True)
python
{ "resource": "" }
q274610
ParserContext.warn_on_shadowed_var
test
def warn_on_shadowed_var(self) -> bool: """If True, warn when a def'ed Var name is shadowed in an inner scope. Implied by warn_on_shadowed_name. The value of warn_on_shadowed_name supersedes the value of this flag.""" return self.warn_on_shadowed_name or self._opts.entry( WARN_ON_SHADOWED_VAR, False )
python
{ "resource": "" }
q274611
ParserContext.put_new_symbol
test
def put_new_symbol( # pylint: disable=too-many-arguments self, s: sym.Symbol, binding: Binding, warn_on_shadowed_name: bool = True, warn_on_shadowed_var: bool = True, warn_if_unused: bool = True, ): """Add a new symbol to the symbol table. This function allows individual warnings to be disabled for one run by supplying keyword arguments temporarily disabling those warnings. In certain cases, we do not want to issue warnings again for a previously checked case, so this is a simple way of disabling these warnings for those cases. If WARN_ON_SHADOWED_NAME compiler option is active and the warn_on_shadowed_name keyword argument is True, then a warning will be emitted if a local name is shadowed by another local name. Note that WARN_ON_SHADOWED_NAME implies WARN_ON_SHADOWED_VAR. If WARN_ON_SHADOWED_VAR compiler option is active and the warn_on_shadowed_var keyword argument is True, then a warning will be emitted if a named var is shadowed by a local name.""" st = self.symbol_table if warn_on_shadowed_name and self.warn_on_shadowed_name: if st.find_symbol(s) is not None: logger.warning(f"name '{s}' shadows name from outer scope") if ( warn_on_shadowed_name or warn_on_shadowed_var ) and self.warn_on_shadowed_var: if self.current_ns.find(s) is not None: logger.warning(f"name '{s}' shadows def'ed Var from outer scope") if s.meta is not None and s.meta.entry(SYM_NO_WARN_WHEN_UNUSED_META_KEY, None): warn_if_unused = False st.new_symbol(s, binding, warn_if_unused=warn_if_unused)
python
{ "resource": "" }
q274612
map_lrepr
test
def map_lrepr( entries: Callable[[], Iterable[Tuple[Any, Any]]], start: str, end: str, meta=None, **kwargs, ) -> str: """Produce a Lisp representation of an associative collection, bookended with the start and end string supplied. The entries argument must be a callable which will produce tuples of key-value pairs. The keyword arguments will be passed along to lrepr for the sequence elements.""" print_level = kwargs["print_level"] if isinstance(print_level, int) and print_level < 1: return SURPASSED_PRINT_LEVEL kwargs = _process_kwargs(**kwargs) def entry_reprs(): for k, v in entries(): yield "{k} {v}".format(k=lrepr(k, **kwargs), v=lrepr(v, **kwargs)) trailer = [] print_dup = kwargs["print_dup"] print_length = kwargs["print_length"] if not print_dup and isinstance(print_length, int): items = seq(entry_reprs()).take(print_length + 1).to_list() if len(items) > print_length: items.pop() trailer.append(SURPASSED_PRINT_LENGTH) else: items = list(entry_reprs()) seq_lrepr = PRINT_SEPARATOR.join(items + trailer) print_meta = kwargs["print_meta"] if print_meta and meta: return f"^{lrepr(meta, **kwargs)} {start}{seq_lrepr}{end}" return f"{start}{seq_lrepr}{end}"
python
{ "resource": "" }
q274613
seq_lrepr
test
def seq_lrepr( iterable: Iterable[Any], start: str, end: str, meta=None, **kwargs ) -> str: """Produce a Lisp representation of a sequential collection, bookended with the start and end string supplied. The keyword arguments will be passed along to lrepr for the sequence elements.""" print_level = kwargs["print_level"] if isinstance(print_level, int) and print_level < 1: return SURPASSED_PRINT_LEVEL kwargs = _process_kwargs(**kwargs) trailer = [] print_dup = kwargs["print_dup"] print_length = kwargs["print_length"] if not print_dup and isinstance(print_length, int): items = seq(iterable).take(print_length + 1).to_list() if len(items) > print_length: items.pop() trailer.append(SURPASSED_PRINT_LENGTH) else: items = iterable items = list(map(lambda o: lrepr(o, **kwargs), items)) seq_lrepr = PRINT_SEPARATOR.join(items + trailer) print_meta = kwargs["print_meta"] if print_meta and meta: return f"^{lrepr(meta, **kwargs)} {start}{seq_lrepr}{end}" return f"{start}{seq_lrepr}{end}"
python
{ "resource": "" }
q274614
lrepr
test
def lrepr( # pylint: disable=too-many-arguments o: Any, human_readable: bool = False, print_dup: bool = PRINT_DUP, print_length: PrintCountSetting = PRINT_LENGTH, print_level: PrintCountSetting = PRINT_LEVEL, print_meta: bool = PRINT_META, print_readably: bool = PRINT_READABLY, ) -> str: """Return a string representation of a Lisp object. Permissible keyword arguments are: - human_readable: if logical True, print strings without quotations or escape sequences (default: false) - print_dup: if logical true, print objects in a way that preserves their types (default: false) - print_length: the number of items in a collection which will be printed, or no limit if bound to a logical falsey value (default: 50) - print_level: the depth of the object graph to print, starting with 0, or no limit if bound to a logical falsey value (default: nil) - print_meta: if logical true, print objects meta in a way that can be read back by the reader (default: false) - print_readably: if logical false, print strings and characters with non-alphanumeric characters converted to escape sequences (default: true) Note that this function is not capable of capturing the values bound at runtime to the basilisp.core dynamic variables which correspond to each of the keyword arguments to this function. To use a version of lrepr which does capture those values, call basilisp.lang.runtime.lrepr directly.""" if isinstance(o, LispObject): return o._lrepr( human_readable=human_readable, print_dup=print_dup, print_length=print_length, print_level=print_level, print_meta=print_meta, print_readably=print_readably, ) else: # pragma: no cover return _lrepr_fallback( o, human_readable=human_readable, print_dup=print_dup, print_length=print_length, print_level=print_level, print_meta=print_meta, print_readably=print_readably, )
python
{ "resource": "" }
q274615
_lrepr_fallback
test
def _lrepr_fallback( # pylint: disable=too-many-arguments o: Any, human_readable: bool = False, print_dup: bool = PRINT_DUP, print_length: PrintCountSetting = PRINT_LENGTH, print_level: PrintCountSetting = PRINT_LEVEL, print_meta: bool = PRINT_META, print_readably: bool = PRINT_READABLY, ) -> str: # pragma: no cover """Fallback function for lrepr for subclasses of standard types. The singledispatch used for standard lrepr dispatches using an exact type match on the first argument, so we will only hit this function for subclasses of common Python types like strings or lists.""" kwargs = { "human_readable": human_readable, "print_dup": print_dup, "print_length": print_length, "print_level": print_level, "print_meta": print_meta, "print_readably": print_readably, } if isinstance(o, bool): return _lrepr_bool(o) elif o is None: return _lrepr_nil(o) elif isinstance(o, str): return _lrepr_str( o, human_readable=human_readable, print_readably=print_readably ) elif isinstance(o, dict): return _lrepr_py_dict(o, **kwargs) elif isinstance(o, list): return _lrepr_py_list(o, **kwargs) elif isinstance(o, set): return _lrepr_py_set(o, **kwargs) elif isinstance(o, tuple): return _lrepr_py_tuple(o, **kwargs) elif isinstance(o, complex): return _lrepr_complex(o) elif isinstance(o, datetime.datetime): return _lrepr_datetime(o) elif isinstance(o, Decimal): return _lrepr_decimal(o, print_dup=print_dup) elif isinstance(o, Fraction): return _lrepr_fraction(o) elif isinstance(o, Pattern): return _lrepr_pattern(o) elif isinstance(o, uuid.UUID): return _lrepr_uuid(o) else: return repr(o)
python
{ "resource": "" }
q274616
Node.fix_missing_locations
test
def fix_missing_locations( self, start_loc: Optional[Tuple[int, int]] = None ) -> "Node": """Return a transformed copy of this node with location in this node's environment updated to match the `start_loc` if given, or using its existing location otherwise. All child nodes will be recursively transformed and replaced. Child nodes will use their parent node location if they do not have one.""" if self.env.line is None or self.env.col is None: loc = start_loc else: loc = (self.env.line, self.env.col) assert loc is not None and all( [e is not None for e in loc] ), "Must specify location information" new_attrs: MutableMapping[str, Union[NodeEnv, Node, Iterable[Node]]] = { "env": attr.evolve(self.env, line=loc[0], col=loc[1]) } for child_kw in self.children: child_attr = munge(child_kw.name) assert child_attr != "env", "Node environment already set" if child_attr.endswith("s"): iter_child: Iterable[Node] = getattr(self, child_attr) assert iter_child is not None, "Listed child must not be none" new_children = [] for item in iter_child: new_children.append(item.fix_missing_locations(start_loc)) new_attrs[child_attr] = vec.vector(new_children) else: child: Node = getattr(self, child_attr) assert child is not None, "Listed child must not be none" new_attrs[child_attr] = child.fix_missing_locations(start_loc) return self.assoc(**new_attrs)
python
{ "resource": "" }
q274617
compile_and_exec_form
test
def compile_and_exec_form( # pylint: disable= too-many-arguments form: ReaderForm, ctx: CompilerContext, module: types.ModuleType, wrapped_fn_name: str = _DEFAULT_FN, collect_bytecode: Optional[BytecodeCollector] = None, ) -> Any: """Compile and execute the given form. This function will be most useful for the REPL and testing purposes. Returns the result of the executed expression. Callers may override the wrapped function name, which is used by the REPL to evaluate the result of an expression and print it back out.""" if form is None: return None if not module.__basilisp_bootstrapped__: # type: ignore _bootstrap_module(ctx.generator_context, ctx.py_ast_optimizer, module) final_wrapped_name = genname(wrapped_fn_name) lisp_ast = parse_ast(ctx.parser_context, form) py_ast = gen_py_ast(ctx.generator_context, lisp_ast) form_ast = list( map( _statementize, itertools.chain( py_ast.dependencies, [_expressionize(GeneratedPyAST(node=py_ast.node), final_wrapped_name)], ), ) ) ast_module = ast.Module(body=form_ast) ast_module = ctx.py_ast_optimizer.visit(ast_module) ast.fix_missing_locations(ast_module) _emit_ast_string(ast_module) bytecode = compile(ast_module, ctx.filename, "exec") if collect_bytecode: collect_bytecode(bytecode) exec(bytecode, module.__dict__) return getattr(module, final_wrapped_name)()
python
{ "resource": "" }
q274618
_incremental_compile_module
test
def _incremental_compile_module( optimizer: PythonASTOptimizer, py_ast: GeneratedPyAST, mod: types.ModuleType, source_filename: str, collect_bytecode: Optional[BytecodeCollector] = None, ) -> None: """Incrementally compile a stream of AST nodes in module mod. The source_filename will be passed to Python's native compile. Incremental compilation is an integral part of generating a Python module during the same process as macro-expansion.""" module_body = list( map(_statementize, itertools.chain(py_ast.dependencies, [py_ast.node])) ) module = ast.Module(body=list(module_body)) module = optimizer.visit(module) ast.fix_missing_locations(module) _emit_ast_string(module) bytecode = compile(module, source_filename, "exec") if collect_bytecode: collect_bytecode(bytecode) exec(bytecode, mod.__dict__)
python
{ "resource": "" }
q274619
compile_module
test
def compile_module( forms: Iterable[ReaderForm], ctx: CompilerContext, module: types.ModuleType, collect_bytecode: Optional[BytecodeCollector] = None, ) -> None: """Compile an entire Basilisp module into Python bytecode which can be executed as a Python module. This function is designed to generate bytecode which can be used for the Basilisp import machinery, to allow callers to import Basilisp modules from Python code. """ _bootstrap_module(ctx.generator_context, ctx.py_ast_optimizer, module) for form in forms: nodes = gen_py_ast(ctx.generator_context, parse_ast(ctx.parser_context, form)) _incremental_compile_module( ctx.py_ast_optimizer, nodes, module, source_filename=ctx.filename, collect_bytecode=collect_bytecode, )
python
{ "resource": "" }
q274620
compile_bytecode
test
def compile_bytecode( code: List[types.CodeType], gctx: GeneratorContext, optimizer: PythonASTOptimizer, module: types.ModuleType, ) -> None: """Compile cached bytecode into the given module. The Basilisp import hook attempts to cache bytecode while compiling Basilisp namespaces. When the cached bytecode is reloaded from disk, it needs to be compiled within a bootstrapped module. This function bootstraps the module and then proceeds to compile a collection of bytecodes into the module.""" _bootstrap_module(gctx, optimizer, module) for bytecode in code: exec(bytecode, module.__dict__)
python
{ "resource": "" }
q274621
sequence
test
def sequence(s: Iterable) -> ISeq[Any]: """Create a Sequence from Iterable s.""" try: i = iter(s) return _Sequence(i, next(i)) except StopIteration: return EMPTY
python
{ "resource": "" }
q274622
munge
test
def munge(s: str, allow_builtins: bool = False) -> str: """Replace characters which are not valid in Python symbols with valid replacement strings.""" new_str = [] for c in s: new_str.append(_MUNGE_REPLACEMENTS.get(c, c)) new_s = "".join(new_str) if keyword.iskeyword(new_s): return f"{new_s}_" if not allow_builtins and new_s in builtins.__dict__: return f"{new_s}_" return new_s
python
{ "resource": "" }
q274623
demunge
test
def demunge(s: str) -> str: """Replace munged string components with their original representation.""" def demunge_replacer(match: Match) -> str: full_match = match.group(0) replacement = _DEMUNGE_REPLACEMENTS.get(full_match, None) if replacement: return replacement return full_match return re.sub(_DEMUNGE_PATTERN, demunge_replacer, s).replace("_", "-")
python
{ "resource": "" }
q274624
fraction
test
def fraction(numerator: int, denominator: int) -> Fraction: """Create a Fraction from a numerator and denominator.""" return Fraction(numerator=numerator, denominator=denominator)
python
{ "resource": "" }
q274625
get_handler
test
def get_handler(level: str, fmt: str) -> logging.Handler: """Get the default logging handler for Basilisp.""" handler: logging.Handler = logging.NullHandler() if os.getenv("BASILISP_USE_DEV_LOGGER") == "true": handler = logging.StreamHandler() handler.setFormatter(logging.Formatter(fmt)) handler.setLevel(level) return handler
python
{ "resource": "" }
q274626
map
test
def map(kvs: Mapping[K, V], meta=None) -> Map[K, V]: # pylint:disable=redefined-builtin """Creates a new map.""" return Map(pmap(initial=kvs), meta=meta)
python
{ "resource": "" }
q274627
partition
test
def partition(coll, n: int): """Partition coll into groups of size n.""" assert n > 0 start = 0 stop = n while stop <= len(coll): yield tuple(e for e in coll[start:stop]) start += n stop += n if start < len(coll) < stop: stop = len(coll) yield tuple(e for e in coll[start:stop])
python
{ "resource": "" }
q274628
_with_loc
test
def _with_loc(f: W) -> W: """Wrap a reader function in a decorator to supply line and column information along with relevant forms.""" @functools.wraps(f) def with_lineno_and_col(ctx): meta = lmap.map( {READER_LINE_KW: ctx.reader.line, READER_COL_KW: ctx.reader.col} ) v = f(ctx) try: return v.with_meta(meta) # type: ignore except AttributeError: return v return cast(W, with_lineno_and_col)
python
{ "resource": "" }
q274629
_read_namespaced
test
def _read_namespaced( ctx: ReaderContext, allowed_suffix: Optional[str] = None ) -> Tuple[Optional[str], str]: """Read a namespaced token from the input stream.""" ns: List[str] = [] name: List[str] = [] reader = ctx.reader has_ns = False while True: token = reader.peek() if token == "/": reader.next_token() if has_ns: raise SyntaxError("Found '/'; expected word character") elif len(name) == 0: name.append("/") else: if "/" in name: raise SyntaxError("Found '/' after '/'") has_ns = True ns = name name = [] elif ns_name_chars.match(token): reader.next_token() name.append(token) elif allowed_suffix is not None and token == allowed_suffix: reader.next_token() name.append(token) else: break ns_str = None if not has_ns else "".join(ns) name_str = "".join(name) # A small exception for the symbol '/ used for division if ns_str is None: if "/" in name_str and name_str != "/": raise SyntaxError("'/' character disallowed in names") assert ns_str is None or len(ns_str) > 0 return ns_str, name_str
python
{ "resource": "" }
q274630
_read_coll
test
def _read_coll( ctx: ReaderContext, f: Callable[[Collection[Any]], Union[llist.List, lset.Set, vector.Vector]], end_token: str, coll_name: str, ): """Read a collection from the input stream and create the collection using f.""" coll: List = [] reader = ctx.reader while True: token = reader.peek() if token == "": raise SyntaxError(f"Unexpected EOF in {coll_name}") if whitespace_chars.match(token): reader.advance() continue if token == end_token: reader.next_token() return f(coll) elem = _read_next(ctx) if elem is COMMENT: continue coll.append(elem)
python
{ "resource": "" }
q274631
_read_list
test
def _read_list(ctx: ReaderContext) -> llist.List: """Read a list element from the input stream.""" start = ctx.reader.advance() assert start == "(" return _read_coll(ctx, llist.list, ")", "list")
python
{ "resource": "" }
q274632
_read_vector
test
def _read_vector(ctx: ReaderContext) -> vector.Vector: """Read a vector element from the input stream.""" start = ctx.reader.advance() assert start == "[" return _read_coll(ctx, vector.vector, "]", "vector")
python
{ "resource": "" }
q274633
_read_set
test
def _read_set(ctx: ReaderContext) -> lset.Set: """Return a set from the input stream.""" start = ctx.reader.advance() assert start == "{" def set_if_valid(s: Collection) -> lset.Set: if len(s) != len(set(s)): raise SyntaxError("Duplicated values in set") return lset.set(s) return _read_coll(ctx, set_if_valid, "}", "set")
python
{ "resource": "" }
q274634
_read_map
test
def _read_map(ctx: ReaderContext) -> lmap.Map: """Return a map from the input stream.""" reader = ctx.reader start = reader.advance() assert start == "{" d: MutableMapping[Any, Any] = {} while True: if reader.peek() == "}": reader.next_token() break k = _read_next(ctx) if k is COMMENT: continue while True: if reader.peek() == "}": raise SyntaxError("Unexpected token '}'; expected map value") v = _read_next(ctx) if v is COMMENT: continue if k in d: raise SyntaxError(f"Duplicate key '{k}' in map literal") break d[k] = v return lmap.map(d)
python
{ "resource": "" }
q274635
_read_str
test
def _read_str(ctx: ReaderContext, allow_arbitrary_escapes: bool = False) -> str: """Return a string from the input stream. If allow_arbitrary_escapes is True, do not throw a SyntaxError if an unknown escape sequence is encountered.""" s: List[str] = [] reader = ctx.reader while True: token = reader.next_token() if token == "": raise SyntaxError("Unexpected EOF in string") if token == "\\": token = reader.next_token() escape_char = _STR_ESCAPE_CHARS.get(token, None) if escape_char: s.append(escape_char) continue if allow_arbitrary_escapes: s.append("\\") else: raise SyntaxError("Unknown escape sequence: \\{token}") if token == '"': reader.next_token() return "".join(s) s.append(token)
python
{ "resource": "" }
q274636
_read_sym
test
def _read_sym(ctx: ReaderContext) -> MaybeSymbol: """Return a symbol from the input stream. If a symbol appears in a syntax quoted form, the reader will attempt to resolve the symbol using the resolver in the ReaderContext `ctx`. The resolver will look into the current namespace for an alias or namespace matching the symbol's namespace.""" ns, name = _read_namespaced(ctx, allowed_suffix="#") if not ctx.is_syntax_quoted and name.endswith("#"): raise SyntaxError("Gensym may not appear outside syntax quote") if ns is not None: if any(map(lambda s: len(s) == 0, ns.split("."))): raise SyntaxError( "All '.' separated segments of a namespace " "must contain at least one character." ) if name.startswith(".") and ns is not None: raise SyntaxError("Symbols starting with '.' may not have a namespace") if ns is None: if name == "nil": return None elif name == "true": return True elif name == "false": return False if ctx.is_syntax_quoted and not name.endswith("#"): return ctx.resolve(symbol.symbol(name, ns)) return symbol.symbol(name, ns=ns)
python
{ "resource": "" }
q274637
_read_kw
test
def _read_kw(ctx: ReaderContext) -> keyword.Keyword: """Return a keyword from the input stream.""" start = ctx.reader.advance() assert start == ":" ns, name = _read_namespaced(ctx) if "." in name: raise SyntaxError("Found '.' in keyword name") return keyword.keyword(name, ns=ns)
python
{ "resource": "" }
q274638
_read_meta
test
def _read_meta(ctx: ReaderContext) -> IMeta: """Read metadata and apply that to the next object in the input stream.""" start = ctx.reader.advance() assert start == "^" meta = _read_next_consuming_comment(ctx) meta_map: Optional[lmap.Map[LispForm, LispForm]] = None if isinstance(meta, symbol.Symbol): meta_map = lmap.map({keyword.keyword("tag"): meta}) elif isinstance(meta, keyword.Keyword): meta_map = lmap.map({meta: True}) elif isinstance(meta, lmap.Map): meta_map = meta else: raise SyntaxError( f"Expected symbol, keyword, or map for metadata, not {type(meta)}" ) obj_with_meta = _read_next_consuming_comment(ctx) try: return obj_with_meta.with_meta(meta_map) # type: ignore except AttributeError: raise SyntaxError( f"Can not attach metadata to object of type {type(obj_with_meta)}" )
python
{ "resource": "" }
q274639
_read_function
test
def _read_function(ctx: ReaderContext) -> llist.List: """Read a function reader macro from the input stream.""" if ctx.is_in_anon_fn: raise SyntaxError(f"Nested #() definitions not allowed") with ctx.in_anon_fn(): form = _read_list(ctx) arg_set = set() def arg_suffix(arg_num): if arg_num is None: return "1" elif arg_num == "&": return "rest" else: return arg_num def sym_replacement(arg_num): suffix = arg_suffix(arg_num) return symbol.symbol(f"arg-{suffix}") def identify_and_replace(f): if isinstance(f, symbol.Symbol): if f.ns is None: match = fn_macro_args.match(f.name) if match is not None: arg_num = match.group(2) suffix = arg_suffix(arg_num) arg_set.add(suffix) return sym_replacement(arg_num) return f body = walk.postwalk(identify_and_replace, form) if len(form) > 0 else None arg_list: List[symbol.Symbol] = [] numbered_args = sorted(map(int, filter(lambda k: k != "rest", arg_set))) if len(numbered_args) > 0: max_arg = max(numbered_args) arg_list = [sym_replacement(str(i)) for i in range(1, max_arg + 1)] if "rest" in arg_set: arg_list.append(_AMPERSAND) arg_list.append(sym_replacement("rest")) return llist.l(_FN, vector.vector(arg_list), body)
python
{ "resource": "" }
q274640
_read_quoted
test
def _read_quoted(ctx: ReaderContext) -> llist.List: """Read a quoted form from the input stream.""" start = ctx.reader.advance() assert start == "'" next_form = _read_next_consuming_comment(ctx) return llist.l(_QUOTE, next_form)
python
{ "resource": "" }
q274641
_expand_syntax_quote
test
def _expand_syntax_quote( ctx: ReaderContext, form: IterableLispForm ) -> Iterable[LispForm]: """Expand syntax quoted forms to handle unquoting and unquote-splicing. The unquoted form (unquote x) becomes: (list x) The unquote-spliced form (unquote-splicing x) becomes x All other forms are recursively processed as by _process_syntax_quoted_form and are returned as: (list form)""" expanded = [] for elem in form: if _is_unquote(elem): expanded.append(llist.l(_LIST, elem[1])) elif _is_unquote_splicing(elem): expanded.append(elem[1]) else: expanded.append(llist.l(_LIST, _process_syntax_quoted_form(ctx, elem))) return expanded
python
{ "resource": "" }
q274642
_process_syntax_quoted_form
test
def _process_syntax_quoted_form(ctx: ReaderContext, form: ReaderForm) -> ReaderForm: """Post-process syntax quoted forms to generate forms that can be assembled into the correct types at runtime. Lists are turned into: (basilisp.core/seq (basilisp.core/concat [& rest])) Vectors are turned into: (basilisp.core/apply basilisp.core/vector (basilisp.core/concat [& rest])) Sets are turned into: (basilisp.core/apply basilisp.core/hash-set (basilisp.core/concat [& rest])) Maps are turned into: (basilisp.core/apply basilisp.core/hash-map (basilisp.core/concat [& rest])) The child forms (called rest above) are processed by _expand_syntax_quote. All other forms are passed through without modification.""" lconcat = lambda v: llist.list(v).cons(_CONCAT) if _is_unquote(form): return form[1] # type: ignore elif _is_unquote_splicing(form): raise SyntaxError("Cannot splice outside collection") elif isinstance(form, llist.List): return llist.l(_SEQ, lconcat(_expand_syntax_quote(ctx, form))) elif isinstance(form, vector.Vector): return llist.l(_APPLY, _VECTOR, lconcat(_expand_syntax_quote(ctx, form))) elif isinstance(form, lset.Set): return llist.l(_APPLY, _HASH_SET, lconcat(_expand_syntax_quote(ctx, form))) elif isinstance(form, lmap.Map): flat_kvs = seq(form.items()).flatten().to_list() return llist.l(_APPLY, _HASH_MAP, lconcat(_expand_syntax_quote(ctx, flat_kvs))) elif isinstance(form, symbol.Symbol): if form.ns is None and form.name.endswith("#"): try: return llist.l(_QUOTE, ctx.gensym_env[form.name]) except KeyError: genned = symbol.symbol(langutil.genname(form.name[:-1])).with_meta( form.meta ) ctx.gensym_env[form.name] = genned return llist.l(_QUOTE, genned) return llist.l(_QUOTE, form) else: return form
python
{ "resource": "" }
q274643
_read_syntax_quoted
test
def _read_syntax_quoted(ctx: ReaderContext) -> ReaderForm: """Read a syntax-quote and set the syntax-quoting state in the reader.""" start = ctx.reader.advance() assert start == "`" with ctx.syntax_quoted(): return _process_syntax_quoted_form(ctx, _read_next_consuming_comment(ctx))
python
{ "resource": "" }
q274644
_read_unquote
test
def _read_unquote(ctx: ReaderContext) -> LispForm: """Read an unquoted form and handle any special logic of unquoting. Unquoted forms can take two, well... forms: `~form` is read as `(unquote form)` and any nested forms are read literally and passed along to the compiler untouched. `~@form` is read as `(unquote-splicing form)` which tells the compiler to splice in the contents of a sequential form such as a list or vector into the final compiled form. This helps macro writers create longer forms such as function calls, function bodies, or data structures with the contents of another collection they have.""" start = ctx.reader.advance() assert start == "~" with ctx.unquoted(): next_char = ctx.reader.peek() if next_char == "@": ctx.reader.advance() next_form = _read_next_consuming_comment(ctx) return llist.l(_UNQUOTE_SPLICING, next_form) else: next_form = _read_next_consuming_comment(ctx) return llist.l(_UNQUOTE, next_form)
python
{ "resource": "" }
q274645
_read_deref
test
def _read_deref(ctx: ReaderContext) -> LispForm: """Read a derefed form from the input stream.""" start = ctx.reader.advance() assert start == "@" next_form = _read_next_consuming_comment(ctx) return llist.l(_DEREF, next_form)
python
{ "resource": "" }
q274646
_read_character
test
def _read_character(ctx: ReaderContext) -> str: """Read a character literal from the input stream. Character literals may appear as: - \\a \\b \\c etc will yield 'a', 'b', and 'c' respectively - \\newline, \\space, \\tab, \\formfeed, \\backspace, \\return yield the named characters - \\uXXXX yield the unicode digit corresponding to the code point named by the hex digits XXXX""" start = ctx.reader.advance() assert start == "\\" s: List[str] = [] reader = ctx.reader token = reader.peek() while True: if token == "" or whitespace_chars.match(token): break if not alphanumeric_chars.match(token): break s.append(token) token = reader.next_token() char = "".join(s) special = _SPECIAL_CHARS.get(char, None) if special is not None: return special match = unicode_char.match(char) if match is not None: try: return chr(int(f"0x{match.group(1)}", 16)) except (ValueError, OverflowError): raise SyntaxError(f"Unsupported character \\u{char}") from None if len(char) > 1: raise SyntaxError(f"Unsupported character \\{char}") return char
python
{ "resource": "" }
q274647
_read_regex
test
def _read_regex(ctx: ReaderContext) -> Pattern: """Read a regex reader macro from the input stream.""" s = _read_str(ctx, allow_arbitrary_escapes=True) try: return langutil.regex_from_str(s) except re.error: raise SyntaxError(f"Unrecognized regex pattern syntax: {s}")
python
{ "resource": "" }
q274648
_read_reader_macro
test
def _read_reader_macro(ctx: ReaderContext) -> LispReaderForm: """Return a data structure evaluated as a reader macro from the input stream.""" start = ctx.reader.advance() assert start == "#" token = ctx.reader.peek() if token == "{": return _read_set(ctx) elif token == "(": return _read_function(ctx) elif token == "'": ctx.reader.advance() s = _read_sym(ctx) return llist.l(_VAR, s) elif token == '"': return _read_regex(ctx) elif token == "_": ctx.reader.advance() _read_next(ctx) # Ignore the entire next form return COMMENT elif ns_name_chars.match(token): s = _read_sym(ctx) assert isinstance(s, symbol.Symbol) v = _read_next_consuming_comment(ctx) if s in ctx.data_readers: f = ctx.data_readers[s] return f(v) else: raise SyntaxError(f"No data reader found for tag #{s}") raise SyntaxError(f"Unexpected token '{token}' in reader macro")
python
{ "resource": "" }
q274649
_read_next_consuming_comment
test
def _read_next_consuming_comment(ctx: ReaderContext) -> ReaderForm: """Read the next full form from the input stream, consuming any reader comments completely.""" while True: v = _read_next(ctx) if v is ctx.eof: return ctx.eof if v is COMMENT or isinstance(v, Comment): continue return v
python
{ "resource": "" }
q274650
_read_next
test
def _read_next(ctx: ReaderContext) -> LispReaderForm: # noqa: C901 """Read the next full form from the input stream.""" reader = ctx.reader token = reader.peek() if token == "(": return _read_list(ctx) elif token == "[": return _read_vector(ctx) elif token == "{": return _read_map(ctx) elif begin_num_chars.match(token): return _read_num(ctx) elif whitespace_chars.match(token): reader.next_token() return _read_next(ctx) elif token == ":": return _read_kw(ctx) elif token == '"': return _read_str(ctx) elif token == "'": return _read_quoted(ctx) elif token == "\\": return _read_character(ctx) elif ns_name_chars.match(token): return _read_sym(ctx) elif token == "#": return _read_reader_macro(ctx) elif token == "^": return _read_meta(ctx) # type: ignore elif token == ";": return _read_comment(ctx) elif token == "`": return _read_syntax_quoted(ctx) elif token == "~": return _read_unquote(ctx) elif token == "@": return _read_deref(ctx) elif token == "": return ctx.eof else: raise SyntaxError("Unexpected token '{token}'".format(token=token))
python
{ "resource": "" }
q274651
read
test
def read( stream, resolver: Resolver = None, data_readers: DataReaders = None, eof: Any = EOF, is_eof_error: bool = False, ) -> Iterable[ReaderForm]: """Read the contents of a stream as a Lisp expression. Callers may optionally specify a namespace resolver, which will be used to adjudicate the fully-qualified name of symbols appearing inside of a syntax quote. Callers may optionally specify a map of custom data readers that will be used to resolve values in reader macros. Data reader tags specified by callers must be namespaced symbols; non-namespaced symbols are reserved by the reader. Data reader functions must be functions taking one argument and returning a value. The caller is responsible for closing the input stream.""" reader = StreamReader(stream) ctx = ReaderContext(reader, resolver=resolver, data_readers=data_readers, eof=eof) while True: expr = _read_next(ctx) if expr is ctx.eof: if is_eof_error: raise EOFError return if expr is COMMENT or isinstance(expr, Comment): continue yield expr
python
{ "resource": "" }
q274652
read_str
test
def read_str( s: str, resolver: Resolver = None, data_readers: DataReaders = None, eof: Any = None, is_eof_error: bool = False, ) -> Iterable[ReaderForm]: """Read the contents of a string as a Lisp expression. Keyword arguments to this function have the same meanings as those of basilisp.lang.reader.read.""" with io.StringIO(s) as buf: yield from read( buf, resolver=resolver, data_readers=data_readers, eof=eof, is_eof_error=is_eof_error, )
python
{ "resource": "" }
q274653
read_file
test
def read_file( filename: str, resolver: Resolver = None, data_readers: DataReaders = None, eof: Any = None, is_eof_error: bool = False, ) -> Iterable[ReaderForm]: """Read the contents of a file as a Lisp expression. Keyword arguments to this function have the same meanings as those of basilisp.lang.reader.read.""" with open(filename) as f: yield from read( f, resolver=resolver, data_readers=data_readers, eof=eof, is_eof_error=is_eof_error, )
python
{ "resource": "" }
q274654
StreamReader._update_loc
test
def _update_loc(self, c): """Update the internal line and column buffers after a new character is added. The column number is set to 0, so the first character on the next line is column number 1.""" if newline_chars.match(c): self._col.append(0) self._line.append(self._line[-1] + 1) else: self._col.append(self._col[-1] + 1) self._line.append(self._line[-1])
python
{ "resource": "" }
q274655
StreamReader.pushback
test
def pushback(self) -> None: """Push one character back onto the stream, allowing it to be read again.""" if abs(self._idx - 1) > self._pushback_depth: raise IndexError("Exceeded pushback depth") self._idx -= 1
python
{ "resource": "" }
q274656
StreamReader.next_token
test
def next_token(self) -> str: """Advance the stream forward by one character and return the next token in the stream.""" if self._idx < StreamReader.DEFAULT_INDEX: self._idx += 1 else: c = self._stream.read(1) self._update_loc(c) self._buffer.append(c) return self.peek()
python
{ "resource": "" }
q274657
_basilisp_bytecode
test
def _basilisp_bytecode( mtime: int, source_size: int, code: List[types.CodeType] ) -> bytes: """Return the bytes for a Basilisp bytecode cache file.""" data = bytearray(MAGIC_NUMBER) data.extend(_w_long(mtime)) data.extend(_w_long(source_size)) data.extend(marshal.dumps(code)) # type: ignore return data
python
{ "resource": "" }
q274658
_get_basilisp_bytecode
test
def _get_basilisp_bytecode( fullname: str, mtime: int, source_size: int, cache_data: bytes ) -> List[types.CodeType]: """Unmarshal the bytes from a Basilisp bytecode cache file, validating the file header prior to returning. If the file header does not match, throw an exception.""" exc_details = {"name": fullname} magic = cache_data[:4] raw_timestamp = cache_data[4:8] raw_size = cache_data[8:12] if magic != MAGIC_NUMBER: message = ( f"Incorrect magic number ({magic}) in {fullname}; expected {MAGIC_NUMBER}" ) logger.debug(message) raise ImportError(message, **exc_details) # type: ignore elif len(raw_timestamp) != 4: message = f"Reached EOF while reading timestamp in {fullname}" logger.debug(message) raise EOFError(message) elif _r_long(raw_timestamp) != mtime: message = f"Non-matching timestamp ({_r_long(raw_timestamp)}) in {fullname} bytecode cache; expected {mtime}" logger.debug(message) raise ImportError(message, **exc_details) # type: ignore elif len(raw_size) != 4: message = f"Reached EOF while reading size of source in {fullname}" logger.debug(message) raise EOFError(message) elif _r_long(raw_size) != source_size: message = f"Non-matching filesize ({_r_long(raw_size)}) in {fullname} bytecode cache; expected {source_size}" logger.debug(message) raise ImportError(message, **exc_details) # type: ignore return marshal.loads(cache_data[12:])
python
{ "resource": "" }
q274659
_cache_from_source
test
def _cache_from_source(path: str) -> str: """Return the path to the cached file for the given path. The original path does not have to exist.""" cache_path, cache_file = os.path.split(importlib.util.cache_from_source(path)) filename, _ = os.path.splitext(cache_file) return os.path.join(cache_path, filename + ".lpyc")
python
{ "resource": "" }
q274660
hook_imports
test
def hook_imports(): """Hook into Python's import machinery with a custom Basilisp code importer. Once this is called, Basilisp code may be called from within Python code using standard `import module.submodule` syntax.""" if any([isinstance(o, BasilispImporter) for o in sys.meta_path]): return sys.meta_path.insert( 0, BasilispImporter() # pylint:disable=abstract-class-instantiated )
python
{ "resource": "" }
q274661
BasilispImporter.find_spec
test
def find_spec( self, fullname: str, path, # Optional[List[str]] # MyPy complains this is incompatible with supertype target: types.ModuleType = None, ) -> Optional[importlib.machinery.ModuleSpec]: """Find the ModuleSpec for the specified Basilisp module. Returns None if the module is not a Basilisp module to allow import processing to continue.""" package_components = fullname.split(".") if path is None: path = sys.path module_name = package_components else: module_name = [package_components[-1]] for entry in path: filenames = [ f"{os.path.join(entry, *module_name, '__init__')}.lpy", f"{os.path.join(entry, *module_name)}.lpy", ] for filename in filenames: if os.path.exists(filename): state = { "fullname": fullname, "filename": filename, "path": entry, "target": target, "cache_filename": _cache_from_source(filename), } logger.debug( f"Found potential Basilisp module '{fullname}' in file '{filename}'" ) return importlib.machinery.ModuleSpec( fullname, self, origin=filename, loader_state=state ) return None
python
{ "resource": "" }
q274662
BasilispImporter._exec_cached_module
test
def _exec_cached_module( self, fullname: str, loader_state: Mapping[str, str], path_stats: Mapping[str, int], module: types.ModuleType, ): """Load and execute a cached Basilisp module.""" filename = loader_state["filename"] cache_filename = loader_state["cache_filename"] with timed( lambda duration: logger.debug( f"Loaded cached Basilisp module '{fullname}' in {duration / 1000000}ms" ) ): logger.debug(f"Checking for cached Basilisp module '{fullname}''") cache_data = self.get_data(cache_filename) cached_code = _get_basilisp_bytecode( fullname, path_stats["mtime"], path_stats["size"], cache_data ) compiler.compile_bytecode( cached_code, compiler.GeneratorContext(filename=filename), compiler.PythonASTOptimizer(), module, )
python
{ "resource": "" }
q274663
BasilispImporter._exec_module
test
def _exec_module( self, fullname: str, loader_state: Mapping[str, str], path_stats: Mapping[str, int], module: types.ModuleType, ): """Load and execute a non-cached Basilisp module.""" filename = loader_state["filename"] cache_filename = loader_state["cache_filename"] with timed( lambda duration: logger.debug( f"Loaded Basilisp module '{fullname}' in {duration / 1000000}ms" ) ): # During compilation, bytecode objects are added to the list via the closure # add_bytecode below, which is passed to the compiler. The collected bytecodes # will be used to generate an .lpyc file for caching the compiled file. all_bytecode = [] def add_bytecode(bytecode: types.CodeType): all_bytecode.append(bytecode) logger.debug(f"Reading and compiling Basilisp module '{fullname}'") forms = reader.read_file(filename, resolver=runtime.resolve_alias) compiler.compile_module( # pylint: disable=unexpected-keyword-arg forms, compiler.CompilerContext(filename=filename), module, collect_bytecode=add_bytecode, ) # Cache the bytecode that was collected through the compilation run. cache_file_bytes = _basilisp_bytecode( path_stats["mtime"], path_stats["size"], all_bytecode ) self._cache_bytecode(filename, cache_filename, cache_file_bytes)
python
{ "resource": "" }
q274664
BasilispImporter.exec_module
test
def exec_module(self, module): """Compile the Basilisp module into Python code. Basilisp is fundamentally a form-at-a-time compilation, meaning that each form in a module may require code compiled from an earlier form, so we incrementally compile a Python module by evaluating a single top-level form at a time and inserting the resulting AST nodes into the Pyton module.""" fullname = module.__name__ cached = self._cache[fullname] cached["module"] = module spec = cached["spec"] filename = spec.loader_state["filename"] path_stats = self.path_stats(filename) # During the bootstrapping process, the 'basilisp.core namespace is created with # a blank module. If we do not replace the module here with the module we are # generating, then we will not be able to use advanced compilation features such # as direct Python variable access to functions and other def'ed values. ns_name = demunge(fullname) ns: runtime.Namespace = runtime.set_current_ns(ns_name).value ns.module = module # Check if a valid, cached version of this Basilisp namespace exists and, if so, # load it and bypass the expensive compilation process below. if os.getenv(_NO_CACHE_ENVVAR, None) == "true": self._exec_module(fullname, spec.loader_state, path_stats, module) else: try: self._exec_cached_module( fullname, spec.loader_state, path_stats, module ) except (EOFError, ImportError, IOError, OSError) as e: logger.debug(f"Failed to load cached Basilisp module: {e}") self._exec_module(fullname, spec.loader_state, path_stats, module) # Because we want to (by default) add 'basilisp.core into every namespace by default, # we want to make sure we don't try to add 'basilisp.core into itself, causing a # circular import error. # # Later on, we can probably remove this and just use the 'ns macro to auto-refer # all 'basilisp.core values into the current namespace. runtime.Namespace.add_default_import(ns_name)
python
{ "resource": "" }
q274665
symbol
test
def symbol(name: str, ns: Optional[str] = None, meta=None) -> Symbol: """Create a new symbol.""" return Symbol(name, ns=ns, meta=meta)
python
{ "resource": "" }
q274666
complete
test
def complete( text: str, kw_cache: atom.Atom["PMap[int, Keyword]"] = __INTERN ) -> Iterable[str]: """Return an iterable of possible completions for the given text.""" assert text.startswith(":") interns = kw_cache.deref() text = text[1:] if "/" in text: prefix, suffix = text.split("/", maxsplit=1) results = filter( lambda kw: (kw.ns is not None and kw.ns == prefix) and kw.name.startswith(suffix), interns.itervalues(), ) else: results = filter( lambda kw: kw.name.startswith(text) or (kw.ns is not None and kw.ns.startswith(text)), interns.itervalues(), ) return map(str, results)
python
{ "resource": "" }
q274667
__get_or_create
test
def __get_or_create( kw_cache: "PMap[int, Keyword]", h: int, name: str, ns: Optional[str] ) -> PMap: """Private swap function used to either get the interned keyword instance from the input string.""" if h in kw_cache: return kw_cache kw = Keyword(name, ns=ns) return kw_cache.set(h, kw)
python
{ "resource": "" }
q274668
keyword
test
def keyword( name: str, ns: Optional[str] = None, kw_cache: atom.Atom["PMap[int, Keyword]"] = __INTERN, ) -> Keyword: """Create a new keyword.""" h = hash((name, ns)) return kw_cache.swap(__get_or_create, h, name, ns)[h]
python
{ "resource": "" }
q274669
_chain_py_ast
test
def _chain_py_ast(*genned: GeneratedPyAST,) -> Tuple[PyASTStream, PyASTStream]: """Chain a sequence of generated Python ASTs into a tuple of dependency nodes""" deps = chain.from_iterable(map(lambda n: n.dependencies, genned)) nodes = map(lambda n: n.node, genned) return deps, nodes
python
{ "resource": "" }
q274670
_load_attr
test
def _load_attr(name: str, ctx: ast.AST = ast.Load()) -> ast.Attribute: """Generate recursive Python Attribute AST nodes for resolving nested names.""" attrs = name.split(".") def attr_node(node, idx): if idx >= len(attrs): node.ctx = ctx return node return attr_node( ast.Attribute(value=node, attr=attrs[idx], ctx=ast.Load()), idx + 1 ) return attr_node(ast.Name(id=attrs[0], ctx=ast.Load()), 1)
python
{ "resource": "" }
q274671
_simple_ast_generator
test
def _simple_ast_generator(gen_ast): """Wrap simpler AST generators to return a GeneratedPyAST.""" @wraps(gen_ast) def wrapped_ast_generator(ctx: GeneratorContext, form: LispForm) -> GeneratedPyAST: return GeneratedPyAST(node=gen_ast(ctx, form)) return wrapped_ast_generator
python
{ "resource": "" }
q274672
_collection_ast
test
def _collection_ast( ctx: GeneratorContext, form: Iterable[Node] ) -> Tuple[PyASTStream, PyASTStream]: """Turn a collection of Lisp forms into Python AST nodes.""" return _chain_py_ast(*map(partial(gen_py_ast, ctx), form))
python
{ "resource": "" }
q274673
_ast_with_loc
test
def _ast_with_loc( py_ast: GeneratedPyAST, env: NodeEnv, include_dependencies: bool = False ) -> GeneratedPyAST: """Hydrate Generated Python AST nodes with line numbers and column offsets if they exist in the node environment.""" if env.line is not None: py_ast.node.lineno = env.line if include_dependencies: for dep in py_ast.dependencies: dep.lineno = env.line if env.col is not None: py_ast.node.col_offset = env.col if include_dependencies: for dep in py_ast.dependencies: dep.col_offset = env.col return py_ast
python
{ "resource": "" }
q274674
_with_ast_loc
test
def _with_ast_loc(f): """Wrap a generator function in a decorator to supply line and column information to the returned Python AST node. Dependency nodes will not be hydrated, functions whose returns need dependency nodes to be hydrated should use `_with_ast_loc_deps` below.""" @wraps(f) def with_lineno_and_col( ctx: GeneratorContext, node: Node, *args, **kwargs ) -> GeneratedPyAST: py_ast = f(ctx, node, *args, **kwargs) return _ast_with_loc(py_ast, node.env) return with_lineno_and_col
python
{ "resource": "" }
q274675
_with_ast_loc_deps
test
def _with_ast_loc_deps(f): """Wrap a generator function in a decorator to supply line and column information to the returned Python AST node and dependency nodes. Dependency nodes should likely only be included if they are new nodes created in the same function wrapped by this function. Otherwise, dependencies returned from e.g. calling `gen_py_ast` should be assumed to already have their location information hydrated.""" @wraps(f) def with_lineno_and_col( ctx: GeneratorContext, node: Node, *args, **kwargs ) -> GeneratedPyAST: py_ast = f(ctx, node, *args, **kwargs) return _ast_with_loc(py_ast, node.env, include_dependencies=True) return with_lineno_and_col
python
{ "resource": "" }
q274676
_is_dynamic
test
def _is_dynamic(v: Var) -> bool: """Return True if the Var holds a value which should be compiled to a dynamic Var access.""" return ( Maybe(v.meta) .map(lambda m: m.get(SYM_DYNAMIC_META_KEY, None)) # type: ignore .or_else_get(False) )
python
{ "resource": "" }
q274677
_is_redefable
test
def _is_redefable(v: Var) -> bool: """Return True if the Var can be redefined.""" return ( Maybe(v.meta) .map(lambda m: m.get(SYM_REDEF_META_KEY, None)) # type: ignore .or_else_get(False) )
python
{ "resource": "" }
q274678
statementize
test
def statementize(e: ast.AST) -> ast.AST: """Transform non-statements into ast.Expr nodes so they can stand alone as statements.""" # noinspection PyPep8 if isinstance( e, ( ast.Assign, ast.AnnAssign, ast.AugAssign, ast.Expr, ast.Raise, ast.Assert, ast.Pass, ast.Import, ast.ImportFrom, ast.If, ast.For, ast.While, ast.Continue, ast.Break, ast.Try, ast.ExceptHandler, ast.With, ast.FunctionDef, ast.Return, ast.Yield, ast.YieldFrom, ast.Global, ast.ClassDef, ast.AsyncFunctionDef, ast.AsyncFor, ast.AsyncWith, ), ): return e return ast.Expr(value=e)
python
{ "resource": "" }
q274679
expressionize
test
def expressionize( body: GeneratedPyAST, fn_name: str, args: Optional[Iterable[ast.arg]] = None, vargs: Optional[ast.arg] = None, ) -> ast.FunctionDef: """Given a series of expression AST nodes, create a function AST node with the given name that can be called and will return the result of the final expression in the input body nodes. This helps to fix the impedance mismatch of Python, which includes statements and expressions, and Lisps, which have only expressions. """ args = Maybe(args).or_else_get([]) body_nodes: List[ast.AST] = list(map(statementize, body.dependencies)) body_nodes.append(ast.Return(value=body.node)) return ast.FunctionDef( name=fn_name, args=ast.arguments( args=args, kwarg=None, vararg=vargs, kwonlyargs=[], defaults=[], kw_defaults=[], ), body=body_nodes, decorator_list=[], returns=None, )
python
{ "resource": "" }
q274680
__should_warn_on_redef
test
def __should_warn_on_redef( ctx: GeneratorContext, defsym: sym.Symbol, safe_name: str, def_meta: lmap.Map ) -> bool: """Return True if the compiler should emit a warning about this name being redefined.""" no_warn_on_redef = def_meta.entry(SYM_NO_WARN_ON_REDEF_META_KEY, False) if no_warn_on_redef: return False elif safe_name in ctx.current_ns.module.__dict__: return True elif defsym in ctx.current_ns.interns: var = ctx.current_ns.find(defsym) assert var is not None, f"Var {defsym} cannot be none here" if var.meta is not None and var.meta.entry(SYM_REDEF_META_KEY): return False elif var.is_bound: return True else: return False else: return False
python
{ "resource": "" }
q274681
_do_to_py_ast
test
def _do_to_py_ast(ctx: GeneratorContext, node: Do) -> GeneratedPyAST: """Return a Python AST Node for a `do` expression.""" assert node.op == NodeOp.DO assert not node.is_body body_ast = GeneratedPyAST.reduce( *map(partial(gen_py_ast, ctx), chain(node.statements, [node.ret])) ) fn_body_ast: List[ast.AST] = [] do_result_name = genname(_DO_PREFIX) fn_body_ast.extend(map(statementize, body_ast.dependencies)) fn_body_ast.append( ast.Assign( targets=[ast.Name(id=do_result_name, ctx=ast.Store())], value=body_ast.node ) ) return GeneratedPyAST( node=ast.Name(id=do_result_name, ctx=ast.Load()), dependencies=fn_body_ast )
python
{ "resource": "" }
q274682
__fn_name
test
def __fn_name(s: Optional[str]) -> str: """Generate a safe Python function name from a function name symbol. If no symbol is provided, generate a name with a default prefix.""" return genname("__" + munge(Maybe(s).or_else_get(_FN_PREFIX)))
python
{ "resource": "" }
q274683
__fn_args_to_py_ast
test
def __fn_args_to_py_ast( ctx: GeneratorContext, params: Iterable[Binding], body: Do ) -> Tuple[List[ast.arg], Optional[ast.arg], List[ast.AST]]: """Generate a list of Python AST nodes from function method parameters.""" fn_args, varg = [], None fn_body_ast: List[ast.AST] = [] for binding in params: assert binding.init is None, ":fn nodes cannot have bindint :inits" assert varg is None, "Must have at most one variadic arg" arg_name = genname(munge(binding.name)) if not binding.is_variadic: fn_args.append(ast.arg(arg=arg_name, annotation=None)) ctx.symbol_table.new_symbol( sym.symbol(binding.name), arg_name, LocalType.ARG ) else: varg = ast.arg(arg=arg_name, annotation=None) safe_local = genname(munge(binding.name)) fn_body_ast.append( ast.Assign( targets=[ast.Name(id=safe_local, ctx=ast.Store())], value=ast.Call( func=_COLLECT_ARGS_FN_NAME, args=[ast.Name(id=arg_name, ctx=ast.Load())], keywords=[], ), ) ) ctx.symbol_table.new_symbol( sym.symbol(binding.name), safe_local, LocalType.ARG ) body_ast = _synthetic_do_to_py_ast(ctx, body) fn_body_ast.extend(map(statementize, body_ast.dependencies)) fn_body_ast.append(ast.Return(value=body_ast.node)) return fn_args, varg, fn_body_ast
python
{ "resource": "" }
q274684
__single_arity_fn_to_py_ast
test
def __single_arity_fn_to_py_ast( ctx: GeneratorContext, node: Fn, method: FnMethod, def_name: Optional[str] = None, meta_node: Optional[MetaNode] = None, ) -> GeneratedPyAST: """Return a Python AST node for a function with a single arity.""" assert node.op == NodeOp.FN assert method.op == NodeOp.FN_METHOD lisp_fn_name = node.local.name if node.local is not None else None py_fn_name = __fn_name(lisp_fn_name) if def_name is None else munge(def_name) py_fn_node = ast.AsyncFunctionDef if node.is_async else ast.FunctionDef with ctx.new_symbol_table(py_fn_name), ctx.new_recur_point( method.loop_id, RecurType.FN, is_variadic=node.is_variadic ): # Allow named anonymous functions to recursively call themselves if lisp_fn_name is not None: ctx.symbol_table.new_symbol( sym.symbol(lisp_fn_name), py_fn_name, LocalType.FN ) fn_args, varg, fn_body_ast = __fn_args_to_py_ast( ctx, method.params, method.body ) meta_deps, meta_decorators = __fn_meta(ctx, meta_node) return GeneratedPyAST( node=ast.Name(id=py_fn_name, ctx=ast.Load()), dependencies=list( chain( meta_deps, [ py_fn_node( name=py_fn_name, args=ast.arguments( args=fn_args, kwarg=None, vararg=varg, kwonlyargs=[], defaults=[], kw_defaults=[], ), body=fn_body_ast, decorator_list=list( chain( meta_decorators, [_BASILISP_FN_FN_NAME], [_TRAMPOLINE_FN_NAME] if ctx.recur_point.has_recur else [], ) ), returns=None, ) ], ) ), )
python
{ "resource": "" }
q274685
__multi_arity_fn_to_py_ast
test
def __multi_arity_fn_to_py_ast( # pylint: disable=too-many-locals ctx: GeneratorContext, node: Fn, methods: Collection[FnMethod], def_name: Optional[str] = None, meta_node: Optional[MetaNode] = None, ) -> GeneratedPyAST: """Return a Python AST node for a function with multiple arities.""" assert node.op == NodeOp.FN assert all([method.op == NodeOp.FN_METHOD for method in methods]) lisp_fn_name = node.local.name if node.local is not None else None py_fn_name = __fn_name(lisp_fn_name) if def_name is None else munge(def_name) py_fn_node = ast.AsyncFunctionDef if node.is_async else ast.FunctionDef arity_to_name = {} rest_arity_name: Optional[str] = None fn_defs = [] for method in methods: arity_name = f"{py_fn_name}__arity{'_rest' if method.is_variadic else method.fixed_arity}" if method.is_variadic: rest_arity_name = arity_name else: arity_to_name[method.fixed_arity] = arity_name with ctx.new_symbol_table(arity_name), ctx.new_recur_point( method.loop_id, RecurType.FN, is_variadic=node.is_variadic ): # Allow named anonymous functions to recursively call themselves if lisp_fn_name is not None: ctx.symbol_table.new_symbol( sym.symbol(lisp_fn_name), py_fn_name, LocalType.FN ) fn_args, varg, fn_body_ast = __fn_args_to_py_ast( ctx, method.params, method.body ) fn_defs.append( py_fn_node( name=arity_name, args=ast.arguments( args=fn_args, kwarg=None, vararg=varg, kwonlyargs=[], defaults=[], kw_defaults=[], ), body=fn_body_ast, decorator_list=[_TRAMPOLINE_FN_NAME] if ctx.recur_point.has_recur else [], returns=None, ) ) dispatch_fn_ast = __multi_arity_dispatch_fn( ctx, py_fn_name, arity_to_name, default_name=rest_arity_name, max_fixed_arity=node.max_fixed_arity, meta_node=meta_node, is_async=node.is_async, ) return GeneratedPyAST( node=dispatch_fn_ast.node, dependencies=list(chain(fn_defs, dispatch_fn_ast.dependencies)), )
python
{ "resource": "" }
q274686
_fn_to_py_ast
test
def _fn_to_py_ast( ctx: GeneratorContext, node: Fn, def_name: Optional[str] = None, meta_node: Optional[MetaNode] = None, ) -> GeneratedPyAST: """Return a Python AST Node for a `fn` expression.""" assert node.op == NodeOp.FN if len(node.methods) == 1: return __single_arity_fn_to_py_ast( ctx, node, next(iter(node.methods)), def_name=def_name, meta_node=meta_node ) else: return __multi_arity_fn_to_py_ast( ctx, node, node.methods, def_name=def_name, meta_node=meta_node )
python
{ "resource": "" }
q274687
__if_body_to_py_ast
test
def __if_body_to_py_ast( ctx: GeneratorContext, node: Node, result_name: str ) -> GeneratedPyAST: """Generate custom `if` nodes to handle `recur` bodies. Recur nodes can appear in the then and else expressions of `if` forms. Recur nodes generate Python `continue` statements, which we would otherwise attempt to insert directly into an expression. Python will complain if it finds a statement in an expression AST slot, so we special case the recur handling here.""" if node.op == NodeOp.RECUR and ctx.recur_point.type == RecurType.LOOP: assert isinstance(node, Recur) return _recur_to_py_ast(ctx, node) elif node.op == NodeOp.DO: assert isinstance(node, Do) if_body = _synthetic_do_to_py_ast(ctx, node.assoc(is_body=True)) return GeneratedPyAST( node=ast.Assign( targets=[ast.Name(id=result_name, ctx=ast.Store())], value=if_body.node ), dependencies=list(map(statementize, if_body.dependencies)), ) else: py_ast = gen_py_ast(ctx, node) return GeneratedPyAST( node=ast.Assign( targets=[ast.Name(id=result_name, ctx=ast.Store())], value=py_ast.node ), dependencies=py_ast.dependencies, )
python
{ "resource": "" }
q274688
_if_to_py_ast
test
def _if_to_py_ast(ctx: GeneratorContext, node: If) -> GeneratedPyAST: """Generate an intermediate if statement which assigns to a temporary variable, which is returned as the expression value at the end of evaluation. Every expression in Basilisp is true if it is not the literal values nil or false. This function compiles direct checks for the test value against the Python values None and False to accommodate this behavior. Note that the if and else bodies are switched in compilation so that we can perform a short-circuit or comparison, rather than exhaustively checking for both false and nil each time.""" assert node.op == NodeOp.IF test_ast = gen_py_ast(ctx, node.test) result_name = genname(_IF_RESULT_PREFIX) then_ast = __if_body_to_py_ast(ctx, node.then, result_name) else_ast = __if_body_to_py_ast(ctx, node.else_, result_name) test_name = genname(_IF_TEST_PREFIX) test_assign = ast.Assign( targets=[ast.Name(id=test_name, ctx=ast.Store())], value=test_ast.node ) ifstmt = ast.If( test=ast.BoolOp( op=ast.Or(), values=[ ast.Compare( left=ast.NameConstant(None), ops=[ast.Is()], comparators=[ast.Name(id=test_name, ctx=ast.Load())], ), ast.Compare( left=ast.NameConstant(False), ops=[ast.Is()], comparators=[ast.Name(id=test_name, ctx=ast.Load())], ), ], ), values=[], body=list(map(statementize, chain(else_ast.dependencies, [else_ast.node]))), orelse=list(map(statementize, chain(then_ast.dependencies, [then_ast.node]))), ) return GeneratedPyAST( node=ast.Name(id=result_name, ctx=ast.Load()), dependencies=list(chain(test_ast.dependencies, [test_assign, ifstmt])), )
python
{ "resource": "" }
q274689
_invoke_to_py_ast
test
def _invoke_to_py_ast(ctx: GeneratorContext, node: Invoke) -> GeneratedPyAST: """Return a Python AST Node for a Basilisp function invocation.""" assert node.op == NodeOp.INVOKE fn_ast = gen_py_ast(ctx, node.fn) args_deps, args_nodes = _collection_ast(ctx, node.args) return GeneratedPyAST( node=ast.Call(func=fn_ast.node, args=list(args_nodes), keywords=[]), dependencies=list(chain(fn_ast.dependencies, args_deps)), )
python
{ "resource": "" }
q274690
_quote_to_py_ast
test
def _quote_to_py_ast(ctx: GeneratorContext, node: Quote) -> GeneratedPyAST: """Return a Python AST Node for a `quote` expression.""" assert node.op == NodeOp.QUOTE return _const_node_to_py_ast(ctx, node.expr)
python
{ "resource": "" }
q274691
__loop_recur_to_py_ast
test
def __loop_recur_to_py_ast(ctx: GeneratorContext, node: Recur) -> GeneratedPyAST: """Return a Python AST node for `recur` occurring inside a `loop`.""" assert node.op == NodeOp.RECUR recur_deps: List[ast.AST] = [] recur_targets: List[ast.Name] = [] recur_exprs: List[ast.AST] = [] for name, expr in zip(ctx.recur_point.binding_names, node.exprs): expr_ast = gen_py_ast(ctx, expr) recur_deps.extend(expr_ast.dependencies) recur_targets.append(ast.Name(id=name, ctx=ast.Store())) recur_exprs.append(expr_ast.node) if len(recur_targets) == 1: assert len(recur_exprs) == 1 recur_deps.append(ast.Assign(targets=recur_targets, value=recur_exprs[0])) else: recur_deps.append( ast.Assign( targets=[ast.Tuple(elts=recur_targets, ctx=ast.Store())], value=ast.Tuple(elts=recur_exprs, ctx=ast.Load()), ) ) recur_deps.append(ast.Continue()) return GeneratedPyAST(node=ast.NameConstant(None), dependencies=recur_deps)
python
{ "resource": "" }
q274692
_recur_to_py_ast
test
def _recur_to_py_ast(ctx: GeneratorContext, node: Recur) -> GeneratedPyAST: """Return a Python AST Node for a `recur` expression. Note that `recur` nodes can only legally appear in two AST locations: (1) in :then or :else expressions in :if nodes, and (2) in :ret expressions in :do nodes As such, both of these handlers special case the recur construct, as it is the only case in which the code generator emits a statement rather than an expression.""" assert node.op == NodeOp.RECUR assert ctx.recur_point is not None, "Must have set a recur point to recur" handle_recur = _RECUR_TYPE_HANDLER.get(ctx.recur_point.type) assert ( handle_recur is not None ), f"No recur point handler defined for {ctx.recur_point.type}" ctx.recur_point.has_recur = True return handle_recur(ctx, node)
python
{ "resource": "" }
q274693
_set_bang_to_py_ast
test
def _set_bang_to_py_ast(ctx: GeneratorContext, node: SetBang) -> GeneratedPyAST: """Return a Python AST Node for a `set!` expression.""" assert node.op == NodeOp.SET_BANG val_temp_name = genname("set_bang_val") val_ast = gen_py_ast(ctx, node.val) target = node.target assert isinstance( target, (HostField, Local, VarRef) ), f"invalid set! target type {type(target)}" if isinstance(target, HostField): target_ast = _interop_prop_to_py_ast(ctx, target, is_assigning=True) elif isinstance(target, VarRef): target_ast = _var_sym_to_py_ast(ctx, target, is_assigning=True) elif isinstance(target, Local): target_ast = _local_sym_to_py_ast(ctx, target, is_assigning=True) else: # pragma: no cover raise GeneratorException( f"invalid set! target type {type(target)}", lisp_ast=target ) return GeneratedPyAST( node=ast.Name(id=val_temp_name, ctx=ast.Load()), dependencies=list( chain( val_ast.dependencies, [ ast.Assign( targets=[ast.Name(id=val_temp_name, ctx=ast.Store())], value=val_ast.node, ) ], target_ast.dependencies, [ast.Assign(targets=[target_ast.node], value=val_ast.node)], ) ), )
python
{ "resource": "" }
q274694
_throw_to_py_ast
test
def _throw_to_py_ast(ctx: GeneratorContext, node: Throw) -> GeneratedPyAST: """Return a Python AST Node for a `throw` expression.""" assert node.op == NodeOp.THROW throw_fn = genname(_THROW_PREFIX) exc_ast = gen_py_ast(ctx, node.exception) raise_body = ast.Raise(exc=exc_ast.node, cause=None) return GeneratedPyAST( node=ast.Call(func=ast.Name(id=throw_fn, ctx=ast.Load()), args=[], keywords=[]), dependencies=[ ast.FunctionDef( name=throw_fn, args=ast.arguments( args=[], kwarg=None, vararg=None, kwonlyargs=[], defaults=[], kw_defaults=[], ), body=list(chain(exc_ast.dependencies, [raise_body])), decorator_list=[], returns=None, ) ], )
python
{ "resource": "" }
q274695
_try_to_py_ast
test
def _try_to_py_ast(ctx: GeneratorContext, node: Try) -> GeneratedPyAST: """Return a Python AST Node for a `try` expression.""" assert node.op == NodeOp.TRY try_expr_name = genname("try_expr") body_ast = _synthetic_do_to_py_ast(ctx, node.body) catch_handlers = list( map(partial(__catch_to_py_ast, ctx, try_expr_name=try_expr_name), node.catches) ) finallys: List[ast.AST] = [] if node.finally_ is not None: finally_ast = _synthetic_do_to_py_ast(ctx, node.finally_) finallys.extend(map(statementize, finally_ast.dependencies)) finallys.append(statementize(finally_ast.node)) return GeneratedPyAST( node=ast.Name(id=try_expr_name, ctx=ast.Load()), dependencies=[ ast.Try( body=list( chain( body_ast.dependencies, [ ast.Assign( targets=[ast.Name(id=try_expr_name, ctx=ast.Store())], value=body_ast.node, ) ], ) ), handlers=catch_handlers, orelse=[], finalbody=finallys, ) ], )
python
{ "resource": "" }
q274696
_local_sym_to_py_ast
test
def _local_sym_to_py_ast( ctx: GeneratorContext, node: Local, is_assigning: bool = False ) -> GeneratedPyAST: """Generate a Python AST node for accessing a locally defined Python variable.""" assert node.op == NodeOp.LOCAL sym_entry = ctx.symbol_table.find_symbol(sym.symbol(node.name)) assert sym_entry is not None if node.local == LocalType.FIELD: this_entry = ctx.symbol_table.find_symbol(ctx.current_this) assert this_entry is not None, "Field type local must have this" return GeneratedPyAST( node=_load_attr( f"{this_entry.munged}.{sym_entry.munged}", ctx=ast.Store() if is_assigning else ast.Load(), ) ) else: return GeneratedPyAST( node=ast.Name( id=sym_entry.munged, ctx=ast.Store() if is_assigning else ast.Load() ) )
python
{ "resource": "" }
q274697
__var_find_to_py_ast
test
def __var_find_to_py_ast( var_name: str, ns_name: str, py_var_ctx: ast.AST ) -> GeneratedPyAST: """Generate Var.find calls for the named symbol.""" return GeneratedPyAST( node=ast.Attribute( value=ast.Call( func=_FIND_VAR_FN_NAME, args=[ ast.Call( func=_NEW_SYM_FN_NAME, args=[ast.Str(var_name)], keywords=[ast.keyword(arg="ns", value=ast.Str(ns_name))], ) ], keywords=[], ), attr="value", ctx=py_var_ctx, ) )
python
{ "resource": "" }
q274698
_var_sym_to_py_ast
test
def _var_sym_to_py_ast( ctx: GeneratorContext, node: VarRef, is_assigning: bool = False ) -> GeneratedPyAST: """Generate a Python AST node for accessing a Var. If the Var is marked as :dynamic or :redef or the compiler option USE_VAR_INDIRECTION is active, do not compile to a direct access. If the corresponding function name is not defined in a Python module, no direct variable access is possible and Var.find indirection must be used.""" assert node.op == NodeOp.VAR var = node.var ns = var.ns ns_name = ns.name ns_module = ns.module safe_ns = munge(ns_name) var_name = var.name.name py_var_ctx = ast.Store() if is_assigning else ast.Load() # Return the actual var, rather than its value if requested if node.return_var: return GeneratedPyAST( node=ast.Call( func=_FIND_VAR_FN_NAME, args=[ ast.Call( func=_NEW_SYM_FN_NAME, args=[ast.Str(var_name)], keywords=[ast.keyword(arg="ns", value=ast.Str(ns_name))], ) ], keywords=[], ) ) # Check if we should use Var indirection if ctx.use_var_indirection or _is_dynamic(var) or _is_redefable(var): return __var_find_to_py_ast(var_name, ns_name, py_var_ctx) # Otherwise, try to direct-link it like a Python variable # Try without allowing builtins first safe_name = munge(var_name) if safe_name not in ns_module.__dict__: # Try allowing builtins safe_name = munge(var_name, allow_builtins=True) if safe_name in ns_module.__dict__: if ns is ctx.current_ns: return GeneratedPyAST(node=ast.Name(id=safe_name, ctx=py_var_ctx)) return GeneratedPyAST(node=_load_attr(f"{safe_ns}.{safe_name}", ctx=py_var_ctx)) if ctx.warn_on_var_indirection: logger.warning(f"could not resolve a direct link to Var '{var_name}'") return __var_find_to_py_ast(var_name, ns_name, py_var_ctx)
python
{ "resource": "" }
q274699
_interop_prop_to_py_ast
test
def _interop_prop_to_py_ast( ctx: GeneratorContext, node: HostField, is_assigning: bool = False ) -> GeneratedPyAST: """Generate a Python AST node for Python interop property access.""" assert node.op == NodeOp.HOST_FIELD target_ast = gen_py_ast(ctx, node.target) return GeneratedPyAST( node=ast.Attribute( value=target_ast.node, attr=munge(node.field), ctx=ast.Store() if is_assigning else ast.Load(), ), dependencies=target_ast.dependencies, )
python
{ "resource": "" }