after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def _get_ast(
self,
schema: s_schema.Schema,
context: sd.CommandContext,
*,
parent_node: Optional[qlast.DDLOperation] = None,
) -> Optional[qlast.DDLOperation]:
set_field = super()._get_ast(schema, context, parent_node=parent_node)
if set_field is None or self.is_attribute_computed("target"):
return None
else:
assert isinstance(set_field, qlast.SetField)
return qlast.SetPointerType(
value=set_field.value,
cast_expr=(self.cast_expr.qlast if self.cast_expr is not None else None),
)
|
def _get_ast(
self,
schema: s_schema.Schema,
context: sd.CommandContext,
*,
parent_node: Optional[qlast.DDLOperation] = None,
) -> Optional[qlast.DDLOperation]:
set_field = super()._get_ast(schema, context, parent_node=parent_node)
if set_field is None:
return None
else:
assert isinstance(set_field, qlast.SetField)
return qlast.SetPointerType(
value=set_field.value,
cast_expr=(self.cast_expr.qlast if self.cast_expr is not None else None),
)
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def get_referrers_ex(
self,
scls: so.Object,
*,
scls_type: Optional[Type[so.Object_T]] = None,
) -> Dict[
Tuple[Type[so.Object_T], str],
FrozenSet[so.Object_T],
]:
raise NotImplementedError
|
def get_referrers_ex(
self,
scls: so.Object,
) -> Dict[
Tuple[Type[so.Object], str],
FrozenSet[so.Object],
]:
raise NotImplementedError
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def _get_referrers(
self,
scls: so.Object,
*,
scls_type: Optional[Type[so.Object_T]] = None,
field_name: Optional[str] = None,
) -> FrozenSet[so.Object_T]:
try:
refs = self._refs_to[scls.id]
except KeyError:
return frozenset()
else:
referrers: Set[so.Object] = set()
if scls_type is not None:
if field_name is not None:
for (st, fn), ids in refs.items():
if issubclass(st, scls_type) and fn == field_name:
referrers.update(self.get_by_id(objid) for objid in ids)
else:
for (st, _), ids in refs.items():
if issubclass(st, scls_type):
referrers.update(self.get_by_id(objid) for objid in ids)
elif field_name is not None:
for (_, fn), ids in refs.items():
if fn == field_name:
referrers.update(self.get_by_id(objid) for objid in ids)
else:
refids = itertools.chain.from_iterable(refs.values())
referrers.update(self.get_by_id(objid) for objid in refids)
return frozenset(referrers) # type: ignore
|
def _get_referrers(
self,
scls: so.Object,
*,
scls_type: Optional[Type[so.Object_T]] = None,
field_name: Optional[str] = None,
) -> FrozenSet[so.Object_T]:
try:
refs = self._refs_to[scls.id]
except KeyError:
return frozenset()
else:
referrers: Set[so.Object] = set()
if scls_type is not None:
if field_name is not None:
for (st, fn), ids in refs.items():
if st is scls_type and fn == field_name:
referrers.update(self.get_by_id(objid) for objid in ids)
else:
for (st, _), ids in refs.items():
if st is scls_type:
referrers.update(self.get_by_id(objid) for objid in ids)
elif field_name is not None:
raise ValueError(
"get_referrers: field_name cannot be used without scls_type"
)
else:
refids = itertools.chain.from_iterable(refs.values())
referrers.update(self.get_by_id(objid) for objid in refids)
return frozenset(referrers) # type: ignore
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def get_referrers_ex(
self,
scls: so.Object,
*,
scls_type: Optional[Type[so.Object_T]] = None,
) -> Dict[
Tuple[Type[so.Object_T], str],
FrozenSet[so.Object_T],
]:
try:
refs = self._refs_to[scls.id]
except KeyError:
return {}
else:
result = {}
if scls_type is not None:
for (st, fn), ids in refs.items():
if issubclass(st, scls_type):
result[st, fn] = frozenset(self.get_by_id(objid) for objid in ids)
else:
for (st, fn), ids in refs.items():
result[st, fn] = frozenset( # type: ignore
self.get_by_id(objid) for objid in ids
)
return result # type: ignore
|
def get_referrers_ex(
self,
scls: so.Object,
) -> Dict[
Tuple[Type[so.Object], str],
FrozenSet[so.Object],
]:
try:
refs = self._refs_to[scls.id]
except KeyError:
return {}
else:
result = {}
for (st, fn), ids in refs.items():
result[st, fn] = frozenset(self.get_by_id(objid) for objid in ids)
return result
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def get_referrers_ex(
self,
scls: so.Object,
*,
scls_type: Optional[Type[so.Object_T]] = None,
) -> Dict[
Tuple[Type[so.Object_T], str],
FrozenSet[so.Object_T],
]:
base = self._base_schema.get_referrers_ex(scls, scls_type=scls_type)
top = self._top_schema.get_referrers_ex(scls, scls_type=scls_type)
return {
k: base.get(k, frozenset()) | top.get(k, frozenset())
for k in itertools.chain(base, top)
}
|
def get_referrers_ex(
self,
scls: so.Object,
) -> Dict[
Tuple[Type[so.Object], str],
FrozenSet[so.Object],
]:
base = self._base_schema.get_referrers_ex(scls)
top = self._top_schema.get_referrers_ex(scls)
return {
k: base.get(k, frozenset()) | top.get(k, frozenset())
for k in itertools.chain(base, top)
}
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def resolve(self, schema: s_schema.Schema) -> Type:
return schema.get(
self.get_name(schema),
type=self.schemaclass,
sourcectx=self.sourcectx,
)
|
def resolve(self, schema: s_schema.Schema) -> Type:
return schema.get(
self.name,
type=self.schemaclass,
sourcectx=self.sourcectx,
)
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def __init__(
self,
*,
components: Iterable[TypeShell],
module: str,
schemaclass: typing.Type[Type] = Type,
sourcectx: Optional[parsing.ParserContext] = None,
) -> None:
super().__init__(
name=s_name.UnqualName("__unresolved__"),
schemaclass=schemaclass,
sourcectx=sourcectx,
)
self.components = tuple(components)
self.module = module
|
def __init__(
self,
components: Iterable[TypeShell],
module: str,
) -> None:
self.components = tuple(components)
self.module = module
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def __init__(
self,
*,
components: Iterable[TypeShell],
module: str,
opaque: bool = False,
schemaclass: typing.Type[Type] = Type,
sourcectx: Optional[parsing.ParserContext] = None,
) -> None:
super().__init__(
components=components,
module=module,
schemaclass=schemaclass,
sourcectx=sourcectx,
)
self.opaque = opaque
|
def __init__(
self,
components: Iterable[TypeShell],
module: str,
opaque: bool = False,
) -> None:
self.components = tuple(components)
self.module = module
self.opaque = opaque
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def get_name(
self,
schema: s_schema.Schema,
) -> s_name.Name:
return get_union_type_name(
(c.get_name(schema) for c in self.components),
opaque=self.opaque,
module=self.module,
)
|
def get_name(
self,
schema: s_schema.Schema,
) -> s_name.Name:
_, name = get_union_type_id(
schema,
self.components,
opaque=self.opaque,
module=self.module,
)
return name
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def as_create_delta(
self,
schema: s_schema.Schema,
*,
view_name: Optional[s_name.QualName] = None,
attrs: Optional[Dict[str, Any]] = None,
) -> sd.Command:
name = get_union_type_name(
(c.get_name(schema) for c in self.components),
opaque=self.opaque,
module=self.module,
)
cmd = CreateUnionType(classname=name)
cmd.set_attribute_value("name", name)
cmd.set_attribute_value("components", tuple(self.components))
cmd.set_attribute_value("is_opaque_union", self.opaque)
return cmd
|
def as_create_delta(
self,
schema: s_schema.Schema,
*,
view_name: Optional[s_name.QualName] = None,
attrs: Optional[Dict[str, Any]] = None,
) -> sd.Command:
type_id, name = get_union_type_id(
schema,
self.components,
opaque=self.opaque,
module=self.module,
)
cmd = CreateUnionType(classname=name)
cmd.set_attribute_value("id", type_id)
cmd.set_attribute_value("name", name)
cmd.set_attribute_value("components", tuple(self.components))
cmd.set_attribute_value("is_opaque_union", self.opaque)
return cmd
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def get_name(
self,
schema: s_schema.Schema,
) -> s_name.Name:
return get_intersection_type_name(
(c.get_name(schema) for c in self.components),
module=self.module,
)
|
def get_name(
self,
schema: s_schema.Schema,
) -> s_name.Name:
_, name = get_intersection_type_id(
schema,
self.components,
module=self.module,
)
return name
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def get_displayname_static(cls, name: s_name.Name) -> str:
if isinstance(name, s_name.QualName):
return str(name)
else:
return s_name.unmangle_name(str(name))
|
def get_displayname_static(cls, name: s_name.Name) -> str:
return type_displayname_from_name(name)
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def create(
cls: typing.Type[Array_T],
schema: s_schema.Schema,
*,
name: Optional[s_name.Name] = None,
id: Optional[uuid.UUID] = None,
dimensions: Sequence[int] = (),
element_type: Any,
**kwargs: Any,
) -> typing.Tuple[s_schema.Schema, Array_T]:
if not dimensions:
dimensions = [-1]
if dimensions != [-1]:
raise errors.UnsupportedFeatureError(
f"multi-dimensional arrays are not supported"
)
if name is None:
name = cls.generate_name(element_type.get_name(schema))
if isinstance(name, s_name.QualName):
result = schema.get(name, type=cls, default=None)
else:
result = schema.get_global(cls, name, default=None)
if result is None:
schema, result = super().create_in_schema(
schema,
id=id,
name=name,
element_type=element_type,
dimensions=dimensions,
**kwargs,
)
return schema, result
|
def create(
cls: typing.Type[Array_T],
schema: s_schema.Schema,
*,
name: Optional[s_name.Name] = None,
id: Union[uuid.UUID, so.NoDefaultT] = so.NoDefault,
dimensions: Sequence[int] = (),
element_type: Any,
**kwargs: Any,
) -> typing.Tuple[s_schema.Schema, Array_T]:
if not dimensions:
dimensions = [-1]
if dimensions != [-1]:
raise errors.UnsupportedFeatureError(
f"multi-dimensional arrays are not supported"
)
if id is so.NoDefault:
quals = []
if name is not None:
quals.append(str(name))
id = generate_array_type_id(schema, element_type, dimensions, *quals)
if name is None:
dn = f"array<{element_type.get_displayname(schema)}>"
name = type_name_from_id_and_displayname(id, dn)
result = typing.cast(Array_T, schema.get_by_id(id, default=None))
if result is None:
schema, result = super().create_in_schema(
schema,
id=id,
name=name,
element_type=element_type,
dimensions=dimensions,
**kwargs,
)
return schema, result
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def derive_subtype(
self,
schema: s_schema.Schema,
*,
name: s_name.QualName,
attrs: Optional[Mapping[str, Any]] = None,
**kwargs: Any,
) -> typing.Tuple[s_schema.Schema, ArrayExprAlias]:
assert not kwargs
return ArrayExprAlias.from_subtypes(
schema,
[self.get_element_type(schema)],
self.get_typemods(schema),
name=name,
**(attrs or {}),
)
|
def derive_subtype(
self,
schema: s_schema.Schema,
*,
name: s_name.QualName,
attrs: Optional[Mapping[str, Any]] = None,
**kwargs: Any,
) -> typing.Tuple[s_schema.Schema, Array]:
assert not kwargs
return Array.from_subtypes(
schema,
[self.get_element_type(schema)],
self.get_typemods(schema),
name=name,
**(attrs or {}),
)
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def from_subtypes(
cls: typing.Type[Array_T],
schema: s_schema.Schema,
subtypes: Sequence[Type],
typemods: Any = None,
*,
name: Optional[s_name.QualName] = None,
**kwargs: Any,
) -> typing.Tuple[s_schema.Schema, Array_T]:
if len(subtypes) != 1:
raise errors.SchemaError(
f"unexpected number of subtypes, expecting 1: {subtypes!r}"
)
stype = subtypes[0]
if isinstance(stype, Array):
raise errors.UnsupportedFeatureError(f"nested arrays are not supported")
# One-dimensional unbounded array.
dimensions = [-1]
return cls.create(
schema,
element_type=stype,
dimensions=dimensions,
name=name,
**kwargs,
)
|
def from_subtypes(
cls: typing.Type[Array_T],
schema: s_schema.Schema,
subtypes: Sequence[Type],
typemods: Any = None,
*,
name: Optional[s_name.QualName] = None,
id: Union[uuid.UUID, so.NoDefaultT] = so.NoDefault,
**kwargs: Any,
) -> typing.Tuple[s_schema.Schema, Array_T]:
if len(subtypes) != 1:
raise errors.SchemaError(
f"unexpected number of subtypes, expecting 1: {subtypes!r}"
)
stype = subtypes[0]
if isinstance(stype, Array):
raise errors.UnsupportedFeatureError(f"nested arrays are not supported")
# One-dimensional unbounded array.
dimensions = [-1]
return cls.create(
schema,
element_type=stype,
dimensions=dimensions,
name=name,
id=id,
**kwargs,
)
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def get_name(self, schema: s_schema.Schema) -> s_name.Name:
if str(self.name) == "__unresolved__":
self.name = self.schemaclass.generate_name(
self.subtype.get_name(schema),
)
return self.name
|
def get_name(self, schema: s_schema.Schema) -> s_name.Name:
if str(self.name) == "__unresolved__":
typemods = self.typemods
dimensions = typemods[0]
tid = generate_array_type_id(schema, self.subtype, dimensions)
self.name = type_name_from_id_and_displayname(
tid, f"array<{self.subtype.get_displayname(schema)}>"
)
return self.name
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def get_id(self, schema: s_schema.Schema) -> uuid.UUID:
return generate_array_type_id(schema, self.subtype, self.typemods[0])
|
def get_id(self, schema: s_schema.Schema) -> uuid.UUID:
name = self.get_name(schema)
stable_type_id = type_id_from_name(name)
if stable_type_id is not None:
return stable_type_id
dimensions = self.typemods[0]
quals: typing.List[str] = [str(name)]
if self.expr is not None:
quals.append(self.expr)
return generate_array_type_id(
schema,
self.subtype,
dimensions,
*quals,
)
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def resolve(self, schema: s_schema.Schema) -> Array:
if isinstance(self.name, s_name.QualName):
return schema.get(self.name, type=Array)
else:
return schema.get_by_id(self.get_id(schema), type=Array)
|
def resolve(self, schema: s_schema.Schema) -> Array:
return schema.get_by_id(self.get_id(schema), type=Array)
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def as_create_delta(
self,
schema: s_schema.Schema,
*,
view_name: Optional[s_name.QualName] = None,
attrs: Optional[Dict[str, Any]] = None,
) -> sd.CommandGroup:
ca: Union[CreateArray, CreateArrayExprAlias]
cmd = sd.CommandGroup()
if view_name is None:
ca = CreateArray(
classname=self.get_name(schema),
if_not_exists=True,
)
ca.set_attribute_value("id", self.get_id(schema))
else:
ca = CreateArrayExprAlias(
classname=view_name,
)
el = self.subtype
if (
isinstance(el, CollectionTypeShell)
and schema.get_by_id(el.get_id(schema), None) is None
):
cmd.add(el.as_create_delta(schema))
ca.set_attribute_value("name", ca.classname)
ca.set_attribute_value("element_type", el)
ca.set_attribute_value("is_persistent", True)
ca.set_attribute_value("dimensions", self.typemods[0])
if attrs:
for k, v in attrs.items():
ca.set_attribute_value(k, v)
cmd.add(ca)
return cmd
|
def as_create_delta(
self,
schema: s_schema.Schema,
*,
view_name: Optional[s_name.QualName] = None,
attrs: Optional[Dict[str, Any]] = None,
) -> sd.CommandGroup:
ca: Union[CreateArray, CreateArrayExprAlias]
cmd = sd.CommandGroup()
type_id = self.get_id(schema)
if view_name is None:
ca = CreateArray(
classname=self.get_name(schema),
if_not_exists=True,
)
else:
ca = CreateArrayExprAlias(
classname=view_name,
)
el = self.subtype
if (
isinstance(el, CollectionTypeShell)
and schema.get_by_id(el.get_id(schema), None) is None
):
cmd.add(el.as_create_delta(schema))
ca.set_attribute_value("id", type_id)
ca.set_attribute_value("name", ca.classname)
ca.set_attribute_value("element_type", el)
ca.set_attribute_value("is_persistent", True)
ca.set_attribute_value("dimensions", self.typemods[0])
if attrs:
for k, v in attrs.items():
ca.set_attribute_value(k, v)
cmd.add(ca)
return cmd
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def create(
cls: typing.Type[Tuple_T],
schema: s_schema.Schema,
*,
name: Optional[s_name.Name] = None,
id: Optional[uuid.UUID] = None,
element_types: Mapping[str, Type],
named: bool = False,
**kwargs: Any,
) -> typing.Tuple[s_schema.Schema, Tuple_T]:
el_types = so.ObjectDict[str, Type].create(schema, element_types)
if name is None:
name = cls.generate_name(
{n: el.get_name(schema) for n, el in element_types.items()},
named,
)
if isinstance(name, s_name.QualName):
result = schema.get(name, type=cls, default=None)
else:
result = schema.get_global(cls, name, default=None)
if result is None:
schema, result = super().create_in_schema(
schema,
id=id,
name=name,
named=named,
element_types=el_types,
**kwargs,
)
return schema, result
|
def create(
cls: typing.Type[Tuple_T],
schema: s_schema.Schema,
*,
name: Optional[s_name.Name] = None,
id: Union[uuid.UUID, so.NoDefaultT] = so.NoDefault,
element_types: Mapping[str, Type],
named: bool = False,
**kwargs: Any,
) -> typing.Tuple[s_schema.Schema, Tuple_T]:
element_types = types.MappingProxyType(element_types)
if id is so.NoDefault:
quals = []
if name is not None:
quals.append(str(name))
id = generate_tuple_type_id(schema, element_types, named, *quals)
if name is None:
st_names = ", ".join(
st.get_displayname(schema) for st in element_types.values()
)
name = type_name_from_id_and_displayname(id, f"tuple<{st_names}>")
result = typing.cast(Tuple_T, schema.get_by_id(id, default=None))
if result is None:
schema, result = super().create_in_schema(
schema,
id=id,
name=name,
named=named,
element_types=element_types,
**kwargs,
)
return schema, result
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def get_subtypes(self, schema: s_schema.Schema) -> typing.Tuple[Type, ...]:
return self.get_element_types(schema).values(schema)
|
def get_subtypes(self, schema: s_schema.Schema) -> typing.Tuple[Type, ...]:
return self.get_element_types(schema).values(schema)
if self.element_types:
return self.element_types.objects(schema)
else:
return []
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def derive_subtype(
self,
schema: s_schema.Schema,
*,
name: s_name.QualName,
attrs: Optional[Mapping[str, Any]] = None,
**kwargs: Any,
) -> typing.Tuple[s_schema.Schema, TupleExprAlias]:
assert not kwargs
return TupleExprAlias.from_subtypes(
schema,
dict(self.iter_subtypes(schema)),
self.get_typemods(schema),
name=name,
**(attrs or {}),
)
|
def derive_subtype(
self,
schema: s_schema.Schema,
*,
name: s_name.QualName,
attrs: Optional[Mapping[str, Any]] = None,
**kwargs: Any,
) -> typing.Tuple[s_schema.Schema, Tuple]:
assert not kwargs
return Tuple.from_subtypes(
schema,
dict(self.iter_subtypes(schema)),
self.get_typemods(schema),
name=name,
**(attrs or {}),
)
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def from_subtypes(
cls: typing.Type[Tuple_T],
schema: s_schema.Schema,
subtypes: Union[Iterable[Type], Mapping[str, Type]],
typemods: Any = None,
*,
name: Optional[s_name.QualName] = None,
**kwargs: Any,
) -> typing.Tuple[s_schema.Schema, Tuple_T]:
named = False
if typemods is not None:
named = typemods.get("named", False)
types: Mapping[str, Type]
if isinstance(subtypes, collections.abc.Mapping):
types = subtypes
else:
types = {str(i): type for i, type in enumerate(subtypes)}
return cls.create(schema, element_types=types, named=named, name=name, **kwargs)
|
def from_subtypes(
cls: typing.Type[Tuple_T],
schema: s_schema.Schema,
subtypes: Union[Iterable[Type], Mapping[str, Type]],
typemods: Any = None,
*,
name: Optional[s_name.QualName] = None,
id: Union[uuid.UUID, so.NoDefaultT] = so.NoDefault,
**kwargs: Any,
) -> typing.Tuple[s_schema.Schema, Tuple_T]:
named = False
if typemods is not None:
named = typemods.get("named", False)
types: Mapping[str, Type]
if isinstance(subtypes, collections.abc.Mapping):
types = subtypes
else:
types = {str(i): type for i, type in enumerate(subtypes)}
return cls.create(
schema, element_types=types, named=named, name=name, id=id, **kwargs
)
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def create_shell(
cls,
schema: s_schema.Schema,
*,
subtypes: Mapping[str, TypeShell],
typemods: Any = None,
name: Optional[s_name.Name] = None,
) -> TupleTypeShell:
if name is None:
name = s_name.UnqualName(name="__unresolved__")
return TupleTypeShell(
subtypes=subtypes,
typemods=typemods,
name=name,
schemaclass=cls,
)
|
def create_shell(
cls,
schema: s_schema.Schema,
*,
subtypes: Mapping[str, TypeShell],
typemods: Any = None,
name: Optional[s_name.Name] = None,
) -> TupleTypeShell:
if name is None:
name = s_name.UnqualName(name="__unresolved__")
return TupleTypeShell(
subtypes=subtypes,
typemods=typemods,
name=name,
)
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def material_type(
self,
schema: s_schema.Schema,
) -> typing.Tuple[s_schema.Schema, Tuple]:
# We need to resolve material types of all the subtypes recursively.
new_material_type = False
subtypes = {}
for st_name, st in self.iter_subtypes(schema):
schema, stm = st.material_type(schema)
if stm != st:
new_material_type = True
subtypes[st_name] = stm
if new_material_type or isinstance(self, TupleExprAlias):
return Tuple.from_subtypes(schema, subtypes, typemods=self.get_typemods(schema))
else:
return schema, self
|
def material_type(
self: Tuple_T,
schema: s_schema.Schema,
) -> typing.Tuple[s_schema.Schema, Tuple_T]:
# We need to resolve material types of all the subtypes recursively.
new_material_type = False
subtypes = {}
for st_name, st in self.iter_subtypes(schema):
schema, stm = st.material_type(schema)
if stm != st:
new_material_type = True
subtypes[st_name] = stm
if new_material_type or str(self.get_name(schema)) != str(self.id):
return self.__class__.from_subtypes(
schema, subtypes, typemods=self.get_typemods(schema)
)
else:
return schema, self
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def __init__(
self,
*,
name: s_name.Name,
subtypes: Mapping[str, TypeShell],
typemods: Any = None,
schemaclass: typing.Type[Tuple] = Tuple,
) -> None:
super().__init__(name=name, schemaclass=schemaclass)
self.subtypes = subtypes
self.typemods = typemods
|
def __init__(
self,
*,
name: s_name.Name,
expr: Optional[str] = None,
subtypes: Mapping[str, TypeShell],
typemods: Any = None,
) -> None:
super().__init__(name=name, schemaclass=Tuple, expr=expr)
self.subtypes = subtypes
self.typemods = typemods
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def get_name(self, schema: s_schema.Schema) -> s_name.Name:
if str(self.name) == "__unresolved__":
typemods = self.typemods
subtypes = self.subtypes
named = typemods is not None and typemods.get("named", False)
self.name = self.schemaclass.generate_name(
{n: st.get_name(schema) for n, st in subtypes.items()},
named,
)
return self.name
|
def get_name(self, schema: s_schema.Schema) -> s_name.Name:
if str(self.name) == "__unresolved__":
typemods = self.typemods
subtypes = self.subtypes
named = typemods is not None and typemods.get("named", False)
tid = generate_tuple_type_id(schema, subtypes, named)
st_names = ", ".join(st.get_displayname(schema) for st in subtypes.values())
name = type_name_from_id_and_displayname(tid, f"tuple<{st_names}>")
self.name = name
return self.name
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def get_id(self, schema: s_schema.Schema) -> uuid.UUID:
return generate_tuple_type_id(schema, self.subtypes, self.is_named())
|
def get_id(self, schema: s_schema.Schema) -> uuid.UUID:
name = self.get_name(schema)
stable_type_id = type_id_from_name(name)
if stable_type_id is not None:
return stable_type_id
named = self.is_named()
quals: typing.List[str] = [str(name)]
if self.expr is not None:
quals.append(self.expr)
return generate_tuple_type_id(schema, self.subtypes, named, *quals)
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def resolve(self, schema: s_schema.Schema) -> Tuple:
if isinstance(self.name, s_name.QualName):
return schema.get(self.name, type=Tuple)
else:
return schema.get_by_id(self.get_id(schema), type=Tuple)
|
def resolve(self, schema: s_schema.Schema) -> Tuple:
return schema.get_by_id(self.get_id(schema), type=Tuple)
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def as_create_delta(
self,
schema: s_schema.Schema,
*,
view_name: Optional[s_name.QualName] = None,
attrs: Optional[Dict[str, Any]] = None,
) -> sd.CommandGroup:
ct: Union[CreateTuple, CreateTupleExprAlias]
cmd = sd.CommandGroup()
if view_name is None:
ct = CreateTuple(
classname=self.get_name(schema),
if_not_exists=True,
)
ct.set_attribute_value("id", self.get_id(schema))
else:
ct = CreateTupleExprAlias(
classname=view_name,
)
for el in self.subtypes.values():
if (
isinstance(el, CollectionTypeShell)
and schema.get_by_id(el.get_id(schema), None) is None
):
cmd.add(el.as_create_delta(schema))
named = self.is_named()
ct.set_attribute_value("name", ct.classname)
ct.set_attribute_value("named", named)
ct.set_attribute_value("is_persistent", True)
ct.set_attribute_value("element_types", self.subtypes)
if attrs:
for k, v in attrs.items():
ct.set_attribute_value(k, v)
cmd.add(ct)
return cmd
|
def as_create_delta(
self,
schema: s_schema.Schema,
*,
view_name: Optional[s_name.QualName] = None,
attrs: Optional[Dict[str, Any]] = None,
) -> sd.CommandGroup:
ct: Union[CreateTuple, CreateTupleExprAlias]
cmd = sd.CommandGroup()
type_id = self.get_id(schema)
if view_name is None:
ct = CreateTuple(
classname=self.get_name(schema),
if_not_exists=True,
)
else:
ct = CreateTupleExprAlias(
classname=view_name,
)
for el in self.subtypes.values():
if (
isinstance(el, CollectionTypeShell)
and schema.get_by_id(el.get_id(schema), None) is None
):
cmd.add(el.as_create_delta(schema))
named = self.is_named()
ct.set_attribute_value("id", type_id)
ct.set_attribute_value("name", ct.classname)
ct.set_attribute_value("named", named)
ct.set_attribute_value("is_persistent", True)
ct.set_attribute_value("element_types", self.subtypes)
if attrs:
for k, v in attrs.items():
ct.set_attribute_value(k, v)
cmd.add(ct)
return cmd
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def ensure_schema_type_expr_type(
schema: s_schema.Schema,
type_shell: TypeExprShell,
parent_cmd: sd.Command,
*,
src_context: typing.Optional[parsing.ParserContext] = None,
context: sd.CommandContext,
) -> Optional[sd.Command]:
name = type_shell.get_name(schema)
texpr_type = schema.get(name, default=None, type=Type)
cmd = None
if texpr_type is None:
cmd = type_shell.as_create_delta(schema)
if cmd is not None:
parent_cmd.add_prerequisite(cmd)
return cmd
|
def ensure_schema_type_expr_type(
schema: s_schema.Schema,
type_shell: TypeExprShell,
parent_cmd: sd.Command,
*,
src_context: typing.Optional[parsing.ParserContext] = None,
context: sd.CommandContext,
) -> Optional[sd.Command]:
module = type_shell.module
components = type_shell.components
if isinstance(type_shell, UnionTypeShell):
type_id, type_name = get_union_type_id(
schema,
components,
opaque=type_shell.opaque,
module=module,
)
elif isinstance(type_shell, IntersectionTypeShell):
type_id, type_name = get_intersection_type_id(
schema,
components,
module=module,
)
else:
raise AssertionError(f"unexpected type shell: {type_shell!r}")
texpr_type = schema.get_by_id(type_id, None, type=Type)
cmd = None
if texpr_type is None:
cmd = type_shell.as_create_delta(schema)
if cmd is not None:
parent_cmd.add_prerequisite(cmd)
return cmd
|
https://github.com/edgedb/edgedb/issues/2002
|
ERROR: InternalServerError: 'UnionType' object has no attribute 'get_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1808, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1442, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1365, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 794, in _compile_ql_migration
target_schema = s_ddl.apply_sdl(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/schema/ddl.py", line 371, in apply_sdl
ddl_stmts = s_decl.sdl_to_ddl(current_schema, documents)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 230, in sdl_to_ddl
trace_dependencies(decl_ast, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 499, in trace_default
_register_item(node, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 623, in _register_item
trace_dependencies(cmd, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 443, in trace_ConcretePointer
_register_item(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/declarative.py", line 638, in _register_item
tdeps = qltracer.trace_refs(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 162, in trace_refs
trace(qltree, ctx=ctx)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha6/lib/python3.8/site-packages/edb/edgeql/tracer.py", line 653, in trace_Select
ctx.path_prefix = tip.get_name(ctx.schema)
AttributeError: 'UnionType' object has no attribute 'get_name'
|
AttributeError
|
def _describe_object(
self,
schema: s_schema.Schema,
source: s_obj.Object,
) -> List[DumpBlockDescriptor]:
cols = []
shape = []
ptrdesc: List[DumpBlockDescriptor] = []
if isinstance(source, s_props.Property):
schema, prop_tuple = s_types.Tuple.from_subtypes(
schema,
{
"source": schema.get("std::uuid"),
"target": source.get_target(schema),
"ptr_item_id": schema.get("std::uuid"),
},
{"named": True},
)
type_data, type_id = sertypes.TypeSerializer.describe(
schema,
prop_tuple,
view_shapes={},
view_shapes_metadata={},
follow_links=False,
)
cols.extend(
[
"source",
"target",
"ptr_item_id",
]
)
elif isinstance(source, s_links.Link):
props = {
"source": schema.get("std::uuid"),
"target": schema.get("std::uuid"),
"ptr_item_id": schema.get("std::uuid"),
}
cols.extend(
[
"source",
"target",
"ptr_item_id",
]
)
for ptr in source.get_pointers(schema).objects(schema):
if not ptr.is_dumpable(schema):
continue
stor_info = pg_types.get_pointer_storage_info(
ptr,
schema=schema,
source=source,
link_bias=True,
)
cols.append(stor_info.column_name)
props[ptr.get_shortname(schema).name] = ptr.get_target(schema)
schema, link_tuple = s_types.Tuple.from_subtypes(
schema,
props,
{"named": True},
)
type_data, type_id = sertypes.TypeSerializer.describe(
schema,
link_tuple,
view_shapes={},
view_shapes_metadata={},
follow_links=False,
)
else:
for ptr in source.get_pointers(schema).objects(schema):
if not ptr.is_dumpable(schema):
continue
stor_info = pg_types.get_pointer_storage_info(
ptr,
schema=schema,
source=source,
)
if stor_info.table_type == "ObjectType":
cols.append(stor_info.column_name)
shape.append(ptr)
link_stor_info = pg_types.get_pointer_storage_info(
ptr,
schema=schema,
source=source,
link_bias=True,
)
if link_stor_info is not None:
ptrdesc.extend(self._describe_object(schema, ptr))
type_data, type_id = sertypes.TypeSerializer.describe(
schema,
source,
view_shapes={source: shape},
view_shapes_metadata={},
follow_links=False,
)
table_name = pg_common.get_backend_name(schema, source, catenate=True)
stmt = (
f"COPY {table_name} "
f"({', '.join(pg_common.quote_ident(c) for c in cols)}) "
f"TO STDOUT WITH BINARY"
).encode()
return [
DumpBlockDescriptor(
schema_object_id=source.id,
schema_object_class=type(source).get_ql_class(),
schema_deps=tuple(p.schema_object_id for p in ptrdesc),
type_desc_id=type_id,
type_desc=type_data,
sql_copy_stmt=stmt,
)
] + ptrdesc
|
def _describe_object(
self,
schema: s_schema.Schema,
source: s_obj.Object,
) -> List[DumpBlockDescriptor]:
cols = []
shape = []
ptrdesc: List[DumpBlockDescriptor] = []
if isinstance(source, s_props.Property):
schema, prop_tuple = s_types.Tuple.from_subtypes(
schema,
{
"source": schema.get("std::uuid"),
"target": source.get_target(schema),
"ptr_item_id": schema.get("std::uuid"),
},
{"named": True},
)
type_data, type_id = sertypes.TypeSerializer.describe(
schema,
prop_tuple,
view_shapes={},
view_shapes_metadata={},
follow_links=False,
)
cols.extend(
[
"source",
"target",
"ptr_item_id",
]
)
elif isinstance(source, s_links.Link):
props = {
"source": schema.get("std::uuid"),
"target": schema.get("std::uuid"),
"ptr_item_id": schema.get("std::uuid"),
}
cols.extend(
[
"source",
"target",
"ptr_item_id",
]
)
for ptr in source.get_pointers(schema).objects(schema):
if ptr.is_endpoint_pointer(schema):
continue
stor_info = pg_types.get_pointer_storage_info(
ptr,
schema=schema,
source=source,
link_bias=True,
)
cols.append(stor_info.column_name)
props[ptr.get_shortname(schema).name] = ptr.get_target(schema)
schema, link_tuple = s_types.Tuple.from_subtypes(
schema,
props,
{"named": True},
)
type_data, type_id = sertypes.TypeSerializer.describe(
schema,
link_tuple,
view_shapes={},
view_shapes_metadata={},
follow_links=False,
)
else:
for ptr in source.get_pointers(schema).objects(schema):
if ptr.is_endpoint_pointer(schema):
continue
stor_info = pg_types.get_pointer_storage_info(
ptr,
schema=schema,
source=source,
)
if stor_info.table_type == "ObjectType":
cols.append(stor_info.column_name)
shape.append(ptr)
link_stor_info = pg_types.get_pointer_storage_info(
ptr,
schema=schema,
source=source,
link_bias=True,
)
if link_stor_info is not None:
ptrdesc.extend(self._describe_object(schema, ptr))
type_data, type_id = sertypes.TypeSerializer.describe(
schema,
source,
view_shapes={source: shape},
view_shapes_metadata={},
follow_links=False,
)
table_name = pg_common.get_backend_name(schema, source, catenate=True)
stmt = (
f"COPY {table_name} "
f"({', '.join(pg_common.quote_ident(c) for c in cols)}) "
f"TO STDOUT WITH BINARY"
).encode()
return [
DumpBlockDescriptor(
schema_object_id=source.id,
schema_object_class=type(source).get_ql_class(),
schema_deps=tuple(p.schema_object_id for p in ptrdesc),
type_desc_id=type_id,
type_desc=type_data,
sql_copy_stmt=stmt,
)
] + ptrdesc
|
https://github.com/edgedb/edgedb/issues/2057
|
edgedb error: WARNING: unsolicited message ErrorResponse(ErrorResponse { severity: Error, code: 16777216, message: "unhandled errors in a TaskGroup; 1 sub errors: (BackendError)\n + BackendError: relation \"edgedbpub.b33c7457-3ee5-11eb-ada4-4716c5d2f104\" does not exist\n | File \"edb/server/pgcon/pgcon.pyx\", line 867, in dump\n | await self._dump(block, output_queue, fragment_suggested_size)\n | File \"edb/server/pgcon/pgcon.pyx\", line 855, in _dump\n | raise pgerror.BackendError(fields=er)\n\n", attributes: {1: b"This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md", 257: b"Traceback (most recent call last):\n File \"edb/server/mng_port/edgecon.pyx\", line 1635, in edb.server.mng_port.edgecon.EdgeConnection.main\n await self.dump()\n File \"edb/server/mng_port/edgecon.pyx\", line 2090, in dump\n async with taskgroup.TaskGroup() as g:\n File \"/work/edb/common/taskgroup.py\", line 169, in __aexit__\n raise me from None\nedb.common.taskgroup.TaskGroupError: unhandled errors in a TaskGroup; 1 sub errors: (BackendError)\n + BackendError: relation \"edgedbpub.b33c7457-3ee5-11eb-ada4-4716c5d2f104\" does not exist\n | File \"edb/server/pgcon/pgcon.pyx\", line 867, in dump\n | await self._dump(block, output_queue, fragment_suggested_size)\n | File \"edb/server/pgcon/pgcon.pyx\", line 855, in _dump\n | raise pgerror.BackendError(fields=er)\n\n\n"} })
|
BackendError
|
async def describe_database_restore(
self,
tx_snapshot_id: str,
dump_server_ver_str: Optional[str],
schema_ddl: bytes,
schema_ids: List[Tuple[str, str, bytes]],
blocks: List[Tuple[bytes, bytes]], # type_id, typespec
) -> RestoreDescriptor:
schema_object_ids = {
(name, qltype if qltype else None): uuidgen.from_bytes(objid)
for name, qltype, objid in schema_ids
}
if dump_server_ver_str is not None:
dump_server_ver = verutils.parse_version(dump_server_ver_str)
else:
dump_server_ver = None
schema = await self._introspect_schema_in_snapshot(tx_snapshot_id)
ctx = await self._ctx_new_con_state(
dbver=b"",
io_format=enums.IoFormat.BINARY,
expect_one=False,
modaliases=DEFAULT_MODULE_ALIASES_MAP,
session_config=EMPTY_MAP,
stmt_mode=enums.CompileStatementMode.ALL,
capability=enums.Capability.ALL,
json_parameters=False,
schema=schema,
schema_object_ids=schema_object_ids,
compat_ver=dump_server_ver,
)
ctx.state.start_tx()
units = self._compile(
ctx=ctx,
tokens=tokenizer.tokenize(schema_ddl),
)
schema = ctx.state.current_tx().get_schema()
restore_blocks = []
tables = []
for schema_object_id, typedesc in blocks:
schema_object_id = uuidgen.from_bytes(schema_object_id)
obj = schema._id_to_type.get(schema_object_id)
desc = sertypes.TypeSerializer.parse(typedesc)
if isinstance(obj, s_props.Property):
assert isinstance(desc, sertypes.NamedTupleDesc)
desc_ptrs = list(desc.fields.keys())
if set(desc_ptrs) != {"source", "target", "ptr_item_id"}:
raise RuntimeError("Property table dump data has extra fields")
cols = {
"source": "source",
"target": "target",
"ptr_item_id": "ptr_item_id",
}
elif isinstance(obj, s_links.Link):
assert isinstance(desc, sertypes.NamedTupleDesc)
desc_ptrs = list(desc.fields.keys())
cols = {
"source": "source",
"target": "target",
"ptr_item_id": "ptr_item_id",
}
for ptr in obj.get_pointers(schema).objects(schema):
if not ptr.is_dumpable(schema):
continue
stor_info = pg_types.get_pointer_storage_info(
ptr,
schema=schema,
source=obj,
link_bias=True,
)
ptr_name = ptr.get_shortname(schema).name
cols[ptr_name] = stor_info.column_name
if set(desc_ptrs) != set(cols):
raise RuntimeError("Link table dump data has extra fields")
elif isinstance(obj, s_objtypes.ObjectType):
assert isinstance(desc, sertypes.ShapeDesc)
desc_ptrs = list(desc.fields.keys())
cols = {}
for ptr in obj.get_pointers(schema).objects(schema):
if not ptr.is_dumpable(schema):
continue
stor_info = pg_types.get_pointer_storage_info(
ptr,
schema=schema,
source=obj,
)
if stor_info.table_type == "ObjectType":
ptr_name = ptr.get_shortname(schema).name
cols[ptr_name] = stor_info.column_name
if set(desc_ptrs) != set(cols):
raise RuntimeError("Object table dump data has extra fields")
else:
raise AssertionError(
f"unexpected object type in restore type descriptor: {obj!r}"
)
table_name = pg_common.get_backend_name(schema, obj, catenate=True)
col_list = (pg_common.quote_ident(cols[pn]) for pn in desc_ptrs)
stmt = (
f"COPY {table_name} ({', '.join(col_list)})FROM STDIN WITH BINARY"
).encode()
restore_blocks.append(
RestoreBlockDescriptor(
schema_object_id=schema_object_id,
sql_copy_stmt=stmt,
)
)
tables.append(table_name)
return RestoreDescriptor(
units=units,
blocks=restore_blocks,
tables=tables,
)
|
async def describe_database_restore(
self,
tx_snapshot_id: str,
dump_server_ver_str: Optional[str],
schema_ddl: bytes,
schema_ids: List[Tuple[str, str, bytes]],
blocks: List[Tuple[bytes, bytes]], # type_id, typespec
) -> RestoreDescriptor:
schema_object_ids = {
(name, qltype if qltype else None): uuidgen.from_bytes(objid)
for name, qltype, objid in schema_ids
}
if dump_server_ver_str is not None:
dump_server_ver = verutils.parse_version(dump_server_ver_str)
else:
dump_server_ver = None
schema = await self._introspect_schema_in_snapshot(tx_snapshot_id)
ctx = await self._ctx_new_con_state(
dbver=b"",
io_format=enums.IoFormat.BINARY,
expect_one=False,
modaliases=DEFAULT_MODULE_ALIASES_MAP,
session_config=EMPTY_MAP,
stmt_mode=enums.CompileStatementMode.ALL,
capability=enums.Capability.ALL,
json_parameters=False,
schema=schema,
schema_object_ids=schema_object_ids,
compat_ver=dump_server_ver,
)
ctx.state.start_tx()
units = self._compile(
ctx=ctx,
tokens=tokenizer.tokenize(schema_ddl),
)
schema = ctx.state.current_tx().get_schema()
restore_blocks = []
tables = []
for schema_object_id, typedesc in blocks:
schema_object_id = uuidgen.from_bytes(schema_object_id)
obj = schema._id_to_type.get(schema_object_id)
desc = sertypes.TypeSerializer.parse(typedesc)
if isinstance(obj, s_props.Property):
assert isinstance(desc, sertypes.NamedTupleDesc)
desc_ptrs = list(desc.fields.keys())
if set(desc_ptrs) != {"source", "target", "ptr_item_id"}:
raise RuntimeError("Property table dump data has extra fields")
cols = {
"source": "source",
"target": "target",
"ptr_item_id": "ptr_item_id",
}
elif isinstance(obj, s_links.Link):
assert isinstance(desc, sertypes.NamedTupleDesc)
desc_ptrs = list(desc.fields.keys())
cols = {
"source": "source",
"target": "target",
"ptr_item_id": "ptr_item_id",
}
for ptr in obj.get_pointers(schema).objects(schema):
if ptr.is_endpoint_pointer(schema):
continue
stor_info = pg_types.get_pointer_storage_info(
ptr,
schema=schema,
source=obj,
link_bias=True,
)
ptr_name = ptr.get_shortname(schema).name
cols[ptr_name] = stor_info.column_name
if set(desc_ptrs) != set(cols):
raise RuntimeError("Link table dump data has extra fields")
elif isinstance(obj, s_objtypes.ObjectType):
assert isinstance(desc, sertypes.ShapeDesc)
desc_ptrs = list(desc.fields.keys())
cols = {}
for ptr in obj.get_pointers(schema).objects(schema):
if ptr.is_endpoint_pointer(schema):
continue
stor_info = pg_types.get_pointer_storage_info(
ptr,
schema=schema,
source=obj,
)
if stor_info.table_type == "ObjectType":
ptr_name = ptr.get_shortname(schema).name
cols[ptr_name] = stor_info.column_name
if set(desc_ptrs) != set(cols):
raise RuntimeError("Object table dump data has extra fields")
else:
raise AssertionError(
f"unexpected object type in restore type descriptor: {obj!r}"
)
table_name = pg_common.get_backend_name(schema, obj, catenate=True)
col_list = (pg_common.quote_ident(cols[pn]) for pn in desc_ptrs)
stmt = (
f"COPY {table_name} ({', '.join(col_list)})FROM STDIN WITH BINARY"
).encode()
restore_blocks.append(
RestoreBlockDescriptor(
schema_object_id=schema_object_id,
sql_copy_stmt=stmt,
)
)
tables.append(table_name)
return RestoreDescriptor(
units=units,
blocks=restore_blocks,
tables=tables,
)
|
https://github.com/edgedb/edgedb/issues/2057
|
edgedb error: WARNING: unsolicited message ErrorResponse(ErrorResponse { severity: Error, code: 16777216, message: "unhandled errors in a TaskGroup; 1 sub errors: (BackendError)\n + BackendError: relation \"edgedbpub.b33c7457-3ee5-11eb-ada4-4716c5d2f104\" does not exist\n | File \"edb/server/pgcon/pgcon.pyx\", line 867, in dump\n | await self._dump(block, output_queue, fragment_suggested_size)\n | File \"edb/server/pgcon/pgcon.pyx\", line 855, in _dump\n | raise pgerror.BackendError(fields=er)\n\n", attributes: {1: b"This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md", 257: b"Traceback (most recent call last):\n File \"edb/server/mng_port/edgecon.pyx\", line 1635, in edb.server.mng_port.edgecon.EdgeConnection.main\n await self.dump()\n File \"edb/server/mng_port/edgecon.pyx\", line 2090, in dump\n async with taskgroup.TaskGroup() as g:\n File \"/work/edb/common/taskgroup.py\", line 169, in __aexit__\n raise me from None\nedb.common.taskgroup.TaskGroupError: unhandled errors in a TaskGroup; 1 sub errors: (BackendError)\n + BackendError: relation \"edgedbpub.b33c7457-3ee5-11eb-ada4-4716c5d2f104\" does not exist\n | File \"edb/server/pgcon/pgcon.pyx\", line 867, in dump\n | await self._dump(block, output_queue, fragment_suggested_size)\n | File \"edb/server/pgcon/pgcon.pyx\", line 855, in _dump\n | raise pgerror.BackendError(fields=er)\n\n\n"} })
|
BackendError
|
async def describe_database_restore(
self,
tx_snapshot_id: str,
dump_server_ver_str: Optional[str],
schema_ddl: bytes,
schema_ids: List[Tuple[str, str, bytes]],
blocks: List[Tuple[bytes, bytes]], # type_id, typespec
) -> RestoreDescriptor:
schema_object_ids = {
(s_name.name_from_string(name), qltype if qltype else None): uuidgen.from_bytes(
objid
)
for name, qltype, objid in schema_ids
}
if dump_server_ver_str is not None:
dump_server_ver = verutils.parse_version(dump_server_ver_str)
else:
dump_server_ver = None
schema = await self._introspect_schema_in_snapshot(tx_snapshot_id)
ctx = await self._ctx_new_con_state(
dbver=b"",
io_format=enums.IoFormat.BINARY,
expect_one=False,
modaliases=DEFAULT_MODULE_ALIASES_MAP,
session_config=EMPTY_MAP,
stmt_mode=enums.CompileStatementMode.ALL,
json_parameters=False,
schema=schema,
schema_object_ids=schema_object_ids,
compat_ver=dump_server_ver,
)
ctx.state.start_tx()
ddl_source = edgeql.Source.from_string(schema_ddl.decode("utf-8"))
units = self._compile(ctx=ctx, source=ddl_source)
schema = ctx.state.current_tx().get_schema()
restore_blocks = []
tables = []
for schema_object_id, typedesc in blocks:
schema_object_id = uuidgen.from_bytes(schema_object_id)
obj = schema.get_by_id(schema_object_id)
desc = sertypes.TypeSerializer.parse(typedesc)
if isinstance(obj, s_props.Property):
assert isinstance(desc, sertypes.NamedTupleDesc)
desc_ptrs = list(desc.fields.keys())
if set(desc_ptrs) != {"source", "target", "ptr_item_id"}:
raise RuntimeError("Property table dump data has extra fields")
cols = {
"source": "source",
"target": "target",
"ptr_item_id": "ptr_item_id",
}
elif isinstance(obj, s_links.Link):
assert isinstance(desc, sertypes.NamedTupleDesc)
desc_ptrs = list(desc.fields.keys())
cols = {
"source": "source",
"target": "target",
"ptr_item_id": "ptr_item_id",
}
for ptr in obj.get_pointers(schema).objects(schema):
if not ptr.is_dumpable(schema):
continue
stor_info = pg_types.get_pointer_storage_info(
ptr,
schema=schema,
source=obj,
link_bias=True,
)
ptr_name = ptr.get_shortname(schema).name
cols[ptr_name] = stor_info.column_name
if set(desc_ptrs) != set(cols):
raise RuntimeError("Link table dump data has extra fields")
elif isinstance(obj, s_objtypes.ObjectType):
assert isinstance(desc, sertypes.ShapeDesc)
desc_ptrs = list(desc.fields.keys())
cols = {}
for ptr in obj.get_pointers(schema).objects(schema):
if not ptr.is_dumpable(schema):
continue
stor_info = pg_types.get_pointer_storage_info(
ptr,
schema=schema,
source=obj,
)
if stor_info.table_type == "ObjectType":
ptr_name = ptr.get_shortname(schema).name
cols[ptr_name] = stor_info.column_name
if set(desc_ptrs) != set(cols):
raise RuntimeError("Object table dump data has extra fields")
else:
raise AssertionError(
f"unexpected object type in restore type descriptor: {obj!r}"
)
table_name = pg_common.get_backend_name(schema, obj, catenate=True)
col_list = (pg_common.quote_ident(cols[pn]) for pn in desc_ptrs)
stmt = (
f"COPY {table_name} ({', '.join(col_list)})FROM STDIN WITH BINARY"
).encode()
restore_blocks.append(
RestoreBlockDescriptor(
schema_object_id=schema_object_id,
sql_copy_stmt=stmt,
)
)
tables.append(table_name)
return RestoreDescriptor(
units=units,
blocks=restore_blocks,
tables=tables,
)
|
async def describe_database_restore(
self,
tx_snapshot_id: str,
dump_server_ver_str: Optional[str],
schema_ddl: bytes,
schema_ids: List[Tuple[str, str, bytes]],
blocks: List[Tuple[bytes, bytes]], # type_id, typespec
) -> RestoreDescriptor:
schema_object_ids = {
(s_name.name_from_string(name), qltype if qltype else None): uuidgen.from_bytes(
objid
)
for name, qltype, objid in schema_ids
}
if dump_server_ver_str is not None:
dump_server_ver = verutils.parse_version(dump_server_ver_str)
else:
dump_server_ver = None
schema = await self._introspect_schema_in_snapshot(tx_snapshot_id)
ctx = await self._ctx_new_con_state(
dbver=b"",
io_format=enums.IoFormat.BINARY,
expect_one=False,
modaliases=DEFAULT_MODULE_ALIASES_MAP,
session_config=EMPTY_MAP,
stmt_mode=enums.CompileStatementMode.ALL,
json_parameters=False,
schema=schema,
schema_object_ids=schema_object_ids,
compat_ver=dump_server_ver,
)
ctx.state.start_tx()
ddl_source = edgeql.Source.from_string(schema_ddl.decode("utf-8"))
units = self._compile(ctx=ctx, source=ddl_source)
schema = ctx.state.current_tx().get_schema()
restore_blocks = []
tables = []
for schema_object_id, typedesc in blocks:
schema_object_id = uuidgen.from_bytes(schema_object_id)
obj = schema.get_by_id(schema_object_id)
desc = sertypes.TypeSerializer.parse(typedesc)
if isinstance(obj, s_props.Property):
assert isinstance(desc, sertypes.NamedTupleDesc)
desc_ptrs = list(desc.fields.keys())
if set(desc_ptrs) != {"source", "target", "ptr_item_id"}:
raise RuntimeError("Property table dump data has extra fields")
cols = {
"source": "source",
"target": "target",
"ptr_item_id": "ptr_item_id",
}
elif isinstance(obj, s_links.Link):
assert isinstance(desc, sertypes.NamedTupleDesc)
desc_ptrs = list(desc.fields.keys())
cols = {
"source": "source",
"target": "target",
"ptr_item_id": "ptr_item_id",
}
for ptr in obj.get_pointers(schema).objects(schema):
if ptr.is_endpoint_pointer(schema):
continue
stor_info = pg_types.get_pointer_storage_info(
ptr,
schema=schema,
source=obj,
link_bias=True,
)
ptr_name = ptr.get_shortname(schema).name
cols[ptr_name] = stor_info.column_name
if set(desc_ptrs) != set(cols):
raise RuntimeError("Link table dump data has extra fields")
elif isinstance(obj, s_objtypes.ObjectType):
assert isinstance(desc, sertypes.ShapeDesc)
desc_ptrs = list(desc.fields.keys())
cols = {}
for ptr in obj.get_pointers(schema).objects(schema):
if ptr.is_endpoint_pointer(schema):
continue
stor_info = pg_types.get_pointer_storage_info(
ptr,
schema=schema,
source=obj,
)
if stor_info.table_type == "ObjectType":
ptr_name = ptr.get_shortname(schema).name
cols[ptr_name] = stor_info.column_name
if set(desc_ptrs) != set(cols):
raise RuntimeError("Object table dump data has extra fields")
else:
raise AssertionError(
f"unexpected object type in restore type descriptor: {obj!r}"
)
table_name = pg_common.get_backend_name(schema, obj, catenate=True)
col_list = (pg_common.quote_ident(cols[pn]) for pn in desc_ptrs)
stmt = (
f"COPY {table_name} ({', '.join(col_list)})FROM STDIN WITH BINARY"
).encode()
restore_blocks.append(
RestoreBlockDescriptor(
schema_object_id=schema_object_id,
sql_copy_stmt=stmt,
)
)
tables.append(table_name)
return RestoreDescriptor(
units=units,
blocks=restore_blocks,
tables=tables,
)
|
https://github.com/edgedb/edgedb/issues/2057
|
edgedb error: WARNING: unsolicited message ErrorResponse(ErrorResponse { severity: Error, code: 16777216, message: "unhandled errors in a TaskGroup; 1 sub errors: (BackendError)\n + BackendError: relation \"edgedbpub.b33c7457-3ee5-11eb-ada4-4716c5d2f104\" does not exist\n | File \"edb/server/pgcon/pgcon.pyx\", line 867, in dump\n | await self._dump(block, output_queue, fragment_suggested_size)\n | File \"edb/server/pgcon/pgcon.pyx\", line 855, in _dump\n | raise pgerror.BackendError(fields=er)\n\n", attributes: {1: b"This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md", 257: b"Traceback (most recent call last):\n File \"edb/server/mng_port/edgecon.pyx\", line 1635, in edb.server.mng_port.edgecon.EdgeConnection.main\n await self.dump()\n File \"edb/server/mng_port/edgecon.pyx\", line 2090, in dump\n async with taskgroup.TaskGroup() as g:\n File \"/work/edb/common/taskgroup.py\", line 169, in __aexit__\n raise me from None\nedb.common.taskgroup.TaskGroupError: unhandled errors in a TaskGroup; 1 sub errors: (BackendError)\n + BackendError: relation \"edgedbpub.b33c7457-3ee5-11eb-ada4-4716c5d2f104\" does not exist\n | File \"edb/server/pgcon/pgcon.pyx\", line 867, in dump\n | await self._dump(block, output_queue, fragment_suggested_size)\n | File \"edb/server/pgcon/pgcon.pyx\", line 855, in _dump\n | raise pgerror.BackendError(fields=er)\n\n\n"} })
|
BackendError
|
def _describe_type(
self, t, view_shapes, view_shapes_metadata, follow_links: bool = True
):
# The encoding format is documented in edb/api/types.txt.
buf = self.buffer
if isinstance(t, s_types.Tuple):
subtypes = [
self._describe_type(st, view_shapes, view_shapes_metadata)
for st in t.get_subtypes(self.schema)
]
if t.is_named(self.schema):
element_names = list(t.get_element_names(self.schema))
assert len(element_names) == len(subtypes)
type_id = self._get_collection_type_id(
t.schema_name, subtypes, element_names
)
if type_id in self.uuid_to_pos:
return type_id
buf.append(CTYPE_NAMEDTUPLE)
buf.append(type_id.bytes)
buf.append(_uint16_packer(len(subtypes)))
for el_name, el_type in zip(element_names, subtypes):
el_name_bytes = el_name.encode("utf-8")
buf.append(_uint32_packer(len(el_name_bytes)))
buf.append(el_name_bytes)
buf.append(_uint16_packer(self.uuid_to_pos[el_type]))
else:
type_id = self._get_collection_type_id(t.schema_name, subtypes)
if type_id in self.uuid_to_pos:
return type_id
buf.append(CTYPE_TUPLE)
buf.append(type_id.bytes)
buf.append(_uint16_packer(len(subtypes)))
for el_type in subtypes:
buf.append(_uint16_packer(self.uuid_to_pos[el_type]))
self._register_type_id(type_id)
return type_id
elif isinstance(t, s_types.Array):
subtypes = [
self._describe_type(st, view_shapes, view_shapes_metadata)
for st in t.get_subtypes(self.schema)
]
assert len(subtypes) == 1
type_id = self._get_collection_type_id(t.schema_name, subtypes)
if type_id in self.uuid_to_pos:
return type_id
buf.append(CTYPE_ARRAY)
buf.append(type_id.bytes)
buf.append(_uint16_packer(self.uuid_to_pos[subtypes[0]]))
# Number of dimensions (currently always 1)
buf.append(_uint16_packer(1))
# Dimension cardinality (currently always unbound)
buf.append(_int32_packer(-1))
self._register_type_id(type_id)
return type_id
elif isinstance(t, s_types.Collection):
raise errors.SchemaError(f"unsupported collection type {t!r}")
elif view_shapes.get(t):
# This is a view
self.schema, mt = t.material_type(self.schema)
base_type_id = mt.id
subtypes = []
element_names = []
link_props = []
links = []
metadata = view_shapes_metadata.get(t)
implicit_id = metadata is not None and metadata.has_implicit_id
for ptr in view_shapes[t]:
if ptr.singular(self.schema):
if isinstance(ptr, s_links.Link) and not follow_links:
subtype_id = self._describe_type(
self.schema.get("std::uuid"),
view_shapes,
view_shapes_metadata,
)
else:
subtype_id = self._describe_type(
ptr.get_target(self.schema), view_shapes, view_shapes_metadata
)
else:
if isinstance(ptr, s_links.Link) and not follow_links:
raise errors.InternalServerError(
"cannot describe multi links when follow_links=False"
)
else:
subtype_id = self._describe_set(
ptr.get_target(self.schema), view_shapes, view_shapes_metadata
)
subtypes.append(subtype_id)
element_names.append(ptr.get_shortname(self.schema).name)
link_props.append(False)
links.append(not ptr.is_property(self.schema))
t_rptr = t.get_rptr(self.schema)
if t_rptr is not None and (rptr_ptrs := view_shapes.get(t_rptr)):
# There are link properties in the mix
for ptr in rptr_ptrs:
if ptr.singular(self.schema):
subtype_id = self._describe_type(
ptr.get_target(self.schema), view_shapes, view_shapes_metadata
)
else:
subtype_id = self._describe_set(
ptr.get_target(self.schema), view_shapes, view_shapes_metadata
)
subtypes.append(subtype_id)
element_names.append(ptr.get_shortname(self.schema).name)
link_props.append(True)
links.append(False)
type_id = self._get_object_type_id(
base_type_id,
subtypes,
element_names,
links_props=link_props,
links=links,
has_implicit_fields=implicit_id,
)
if type_id in self.uuid_to_pos:
return type_id
buf.append(CTYPE_SHAPE)
buf.append(type_id.bytes)
assert len(subtypes) == len(element_names)
buf.append(_uint16_packer(len(subtypes)))
for el_name, el_type, el_lp, el_l in zip(
element_names, subtypes, link_props, links
):
flags = 0
if el_lp:
flags |= self.EDGE_POINTER_IS_LINKPROP
if (implicit_id and el_name == "id") or el_name == "__tid__":
if el_type != UUID_TYPE_ID:
raise errors.InternalServerError(
f"{el_name!r} is expected to be a 'std::uuid' singleton"
)
flags |= self.EDGE_POINTER_IS_IMPLICIT
if el_l:
flags |= self.EDGE_POINTER_IS_LINK
buf.append(_uint8_packer(flags))
el_name_bytes = el_name.encode("utf-8")
buf.append(_uint32_packer(len(el_name_bytes)))
buf.append(el_name_bytes)
buf.append(_uint16_packer(self.uuid_to_pos[el_type]))
self._register_type_id(type_id)
return type_id
elif isinstance(t, s_scalars.ScalarType):
# This is a scalar type
self.schema, mt = t.material_type(self.schema)
type_id = mt.id
if type_id in self.uuid_to_pos:
# already described
return type_id
base_type = mt.get_topmost_concrete_base(self.schema)
enum_values = mt.get_enum_values(self.schema)
if enum_values:
buf.append(CTYPE_ENUM)
buf.append(type_id.bytes)
buf.append(_uint16_packer(len(enum_values)))
for enum_val in enum_values:
enum_val_bytes = enum_val.encode("utf-8")
buf.append(_uint32_packer(len(enum_val_bytes)))
buf.append(enum_val_bytes)
elif mt is base_type:
buf.append(CTYPE_BASE_SCALAR)
buf.append(type_id.bytes)
else:
bt_id = self._describe_type(base_type, view_shapes, view_shapes_metadata)
buf.append(CTYPE_SCALAR)
buf.append(type_id.bytes)
buf.append(_uint16_packer(self.uuid_to_pos[bt_id]))
self._register_type_id(type_id)
return type_id
else:
raise errors.InternalServerError(
f"cannot describe type {t.get_name(self.schema)}"
)
|
def _describe_type(
self, t, view_shapes, view_shapes_metadata, follow_links: bool = True
):
# The encoding format is documented in edb/api/types.txt.
buf = self.buffer
if isinstance(t, s_types.Tuple):
subtypes = [
self._describe_type(st, view_shapes, view_shapes_metadata)
for st in t.get_subtypes(self.schema)
]
if t.is_named(self.schema):
element_names = list(t.get_element_names(self.schema))
assert len(element_names) == len(subtypes)
type_id = self._get_collection_type_id(
t.schema_name, subtypes, element_names
)
if type_id in self.uuid_to_pos:
return type_id
buf.append(CTYPE_NAMEDTUPLE)
buf.append(type_id.bytes)
buf.append(_uint16_packer(len(subtypes)))
for el_name, el_type in zip(element_names, subtypes):
el_name_bytes = el_name.encode("utf-8")
buf.append(_uint32_packer(len(el_name_bytes)))
buf.append(el_name_bytes)
buf.append(_uint16_packer(self.uuid_to_pos[el_type]))
else:
type_id = self._get_collection_type_id(t.schema_name, subtypes)
if type_id in self.uuid_to_pos:
return type_id
buf.append(CTYPE_TUPLE)
buf.append(type_id.bytes)
buf.append(_uint16_packer(len(subtypes)))
for el_type in subtypes:
buf.append(_uint16_packer(self.uuid_to_pos[el_type]))
self._register_type_id(type_id)
return type_id
elif isinstance(t, s_types.Array):
subtypes = [
self._describe_type(st, view_shapes, view_shapes_metadata)
for st in t.get_subtypes(self.schema)
]
assert len(subtypes) == 1
type_id = self._get_collection_type_id(t.schema_name, subtypes)
if type_id in self.uuid_to_pos:
return type_id
buf.append(CTYPE_ARRAY)
buf.append(type_id.bytes)
buf.append(_uint16_packer(self.uuid_to_pos[subtypes[0]]))
# Number of dimensions (currently always 1)
buf.append(_uint16_packer(1))
# Dimension cardinality (currently always unbound)
buf.append(_int32_packer(-1))
self._register_type_id(type_id)
return type_id
elif isinstance(t, s_types.Collection):
raise errors.SchemaError(f"unsupported collection type {t!r}")
elif view_shapes.get(t):
# This is a view
self.schema, mt = t.material_type(self.schema)
base_type_id = mt.id
subtypes = []
element_names = []
link_props = []
links = []
metadata = view_shapes_metadata.get(t)
implicit_id = metadata is not None and metadata.has_implicit_id
for ptr in view_shapes[t]:
if ptr.singular(self.schema):
if isinstance(ptr, s_links.Link) and not follow_links:
subtype_id = self._describe_type(
self.schema.get("std::uuid"),
view_shapes,
view_shapes_metadata,
)
else:
subtype_id = self._describe_type(
ptr.get_target(self.schema), view_shapes, view_shapes_metadata
)
else:
if isinstance(ptr, s_links.Link) and not follow_links:
raise errors.InternalServerError(
"cannot describe multi links when follow_links=False"
)
else:
subtype_id = self._describe_set(
ptr.get_target(self.schema), view_shapes, view_shapes_metadata
)
subtypes.append(subtype_id)
element_names.append(ptr.get_shortname(self.schema).name)
link_props.append(False)
links.append(not ptr.is_property(self.schema))
t_rptr = t.get_rptr(self.schema)
if t_rptr is not None:
# There are link properties in the mix
for ptr in view_shapes[t_rptr]:
if ptr.singular(self.schema):
subtype_id = self._describe_type(
ptr.get_target(self.schema), view_shapes, view_shapes_metadata
)
else:
subtype_id = self._describe_set(
ptr.get_target(self.schema), view_shapes, view_shapes_metadata
)
subtypes.append(subtype_id)
element_names.append(ptr.get_shortname(self.schema).name)
link_props.append(True)
links.append(False)
type_id = self._get_object_type_id(
base_type_id,
subtypes,
element_names,
links_props=link_props,
links=links,
has_implicit_fields=implicit_id,
)
if type_id in self.uuid_to_pos:
return type_id
buf.append(CTYPE_SHAPE)
buf.append(type_id.bytes)
assert len(subtypes) == len(element_names)
buf.append(_uint16_packer(len(subtypes)))
for el_name, el_type, el_lp, el_l in zip(
element_names, subtypes, link_props, links
):
flags = 0
if el_lp:
flags |= self.EDGE_POINTER_IS_LINKPROP
if (implicit_id and el_name == "id") or el_name == "__tid__":
if el_type != UUID_TYPE_ID:
raise errors.InternalServerError(
f"{el_name!r} is expected to be a 'std::uuid' singleton"
)
flags |= self.EDGE_POINTER_IS_IMPLICIT
if el_l:
flags |= self.EDGE_POINTER_IS_LINK
buf.append(_uint8_packer(flags))
el_name_bytes = el_name.encode("utf-8")
buf.append(_uint32_packer(len(el_name_bytes)))
buf.append(el_name_bytes)
buf.append(_uint16_packer(self.uuid_to_pos[el_type]))
self._register_type_id(type_id)
return type_id
elif isinstance(t, s_scalars.ScalarType):
# This is a scalar type
self.schema, mt = t.material_type(self.schema)
type_id = mt.id
if type_id in self.uuid_to_pos:
# already described
return type_id
base_type = mt.get_topmost_concrete_base(self.schema)
enum_values = mt.get_enum_values(self.schema)
if enum_values:
buf.append(CTYPE_ENUM)
buf.append(type_id.bytes)
buf.append(_uint16_packer(len(enum_values)))
for enum_val in enum_values:
enum_val_bytes = enum_val.encode("utf-8")
buf.append(_uint32_packer(len(enum_val_bytes)))
buf.append(enum_val_bytes)
elif mt is base_type:
buf.append(CTYPE_BASE_SCALAR)
buf.append(type_id.bytes)
else:
bt_id = self._describe_type(base_type, view_shapes, view_shapes_metadata)
buf.append(CTYPE_SCALAR)
buf.append(type_id.bytes)
buf.append(_uint16_packer(self.uuid_to_pos[bt_id]))
self._register_type_id(type_id)
return type_id
else:
raise errors.InternalServerError(
f"cannot describe type {t.get_name(self.schema)}"
)
|
https://github.com/edgedb/edgedb/issues/1812
|
asttests> SELECT array_agg((SELECT ast::Dict {keys} FILTER count(.keys) = 2 LIMIT 1).keys);
ERROR: InternalServerError: <Link 48b8eb51-fe77-11ea-bb24-0de48e5974e3 at 0x0x7ff9d977dc10>
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1795, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1429, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1392, in _compile_dispatch_ql
return self._compile_ql_query(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 628, in _compile_ql_query
out_type_data, out_type_id = sertypes.TypeSerializer.describe(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/sertypes.py", line 335, in describe
type_id = builder._describe_type(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/sertypes.py", line 172, in _describe_type
subtypes = [self._describe_type(st, view_shapes,
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/sertypes.py", line 172, in <listcomp>
subtypes = [self._describe_type(st, view_shapes,
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/sertypes.py", line 238, in _describe_type
for ptr in view_shapes[t_rptr]:
KeyError: <Link 48b8eb51-fe77-11ea-bb24-0de48e5974e3 at 0x0x7f0c72013970>
asttests> SELECT ast::Dict {keys} FILTER count(.keys) = 2 LIMIT 1;
{Object {keys: {Object {id: 4b71ae19-fda8-11ea-8708-23c2efedd44c}, Object {id: 4b71ae1a-fda8-11ea-8708-efc8c882bd87}}}}
asttests>
|
KeyError
|
def compile_Shape(shape: qlast.Shape, *, ctx: context.ContextLevel) -> irast.Set:
expr = setgen.ensure_set(dispatch.compile(shape.expr, ctx=ctx), ctx=ctx)
expr_stype = setgen.get_set_type(expr, ctx=ctx)
if not isinstance(expr_stype, s_objtypes.ObjectType):
raise errors.QueryError(
f"shapes cannot be applied to {expr_stype.get_verbosename(ctx.env.schema)}",
context=shape.context,
)
view_type = viewgen.process_view(
stype=expr_stype,
path_id=expr.path_id,
elements=shape.elements,
parser_context=shape.context,
ctx=ctx,
)
return setgen.ensure_set(expr, type_override=view_type, ctx=ctx)
|
def compile_Shape(shape: qlast.Shape, *, ctx: context.ContextLevel) -> irast.Set:
expr = setgen.ensure_set(dispatch.compile(shape.expr, ctx=ctx), ctx=ctx)
expr_stype = setgen.get_set_type(expr, ctx=ctx)
if not isinstance(expr_stype, s_objtypes.ObjectType):
raise errors.QueryError(
f"shapes cannot be applied to {expr_stype.get_verbosename(ctx.env.schema)}"
)
view_type = viewgen.process_view(
stype=expr_stype,
path_id=expr.path_id,
elements=shape.elements,
parser_context=shape.context,
ctx=ctx,
)
return setgen.ensure_set(expr, type_override=view_type, ctx=ctx)
|
https://github.com/edgedb/edgedb/issues/1788
|
ERROR: InternalServerError:
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/Users/yury/dev/edge/edgedb/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/Users/yury/dev/edge/edgedb/edb/server/compiler/compiler.py", line 1795, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/Users/yury/dev/edge/edgedb/edb/server/compiler/compiler.py", line 1429, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/Users/yury/dev/edge/edgedb/edb/server/compiler/compiler.py", line 1392, in _compile_dispatch_ql
return self._compile_ql_query(ctx, ql)
File "/Users/yury/dev/edge/edgedb/edb/server/compiler/compiler.py", line 592, in _compile_ql_query
ir = qlcompiler.compile_ast_to_ir(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/__init__.py", line 175, in wrapper
return func(*args, **kwargs)
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/__init__.py", line 223, in compile_ast_to_ir
ir_set = dispatch_mod.compile(tree, ctx=ctx)
File "/opt/local/Library/Frameworks/Python.framework/Versions/3.8/lib/python3.8/functools.py", line 874, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/stmt.py", line 84, in compile_SelectQuery
stmt.result = compile_result_clause(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/stmt.py", line 1017, in compile_result_clause
ir_result = compile_query_subject(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/stmt.py", line 1097, in compile_query_subject
view_scls = viewgen.process_view(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/viewgen.py", line 86, in process_view
view_scls = _process_view(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/viewgen.py", line 173, in _process_view
pointer = _normalize_view_ptr_expr(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/viewgen.py", line 549, in _normalize_view_ptr_expr
ptr_target = _process_view(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/viewgen.py", line 160, in _process_view
assert isinstance(view_scls, s_objtypes.ObjectType)
AssertionError
|
InternalServerError
|
def compile_result_clause(
result: qlast.Expr,
*,
view_scls: Optional[s_types.Type] = None,
view_rptr: Optional[context.ViewRPtr] = None,
view_name: Optional[s_name.SchemaName] = None,
result_alias: Optional[str] = None,
forward_rptr: bool = False,
ctx: context.ContextLevel,
) -> irast.Set:
with ctx.new() as sctx:
if sctx.stmt is ctx.toplevel_stmt:
sctx.expr_exposed = True
if forward_rptr:
sctx.view_rptr = view_rptr
# sctx.view_scls = view_scls
result_expr: qlast.Expr
shape: Optional[Sequence[qlast.ShapeElement]]
if isinstance(result, qlast.Shape):
result_expr = result.expr
shape = result.elements
else:
result_expr = result
shape = None
if result_alias:
# `SELECT foo := expr` is equivalent to
# `WITH foo := expr SELECT foo`
stmtctx.declare_view(result_expr, alias=result_alias, ctx=sctx)
result_expr = qlast.Path(steps=[qlast.ObjectRef(name=result_alias)])
if (
view_rptr is not None
and (view_rptr.is_insert or view_rptr.is_update)
and view_rptr.ptrcls is not None
) and False:
# If we have an empty set assigned to a pointer in an INSERT
# or UPDATE, there's no need to explicitly specify the
# empty set type and it can be assumed to match the pointer
# target type.
target_t = view_rptr.ptrcls.get_target(ctx.env.schema)
if astutils.is_ql_empty_set(result_expr):
expr = setgen.new_empty_set(
stype=target_t,
alias=ctx.aliases.get("e"),
ctx=sctx,
srcctx=result_expr.context,
)
else:
with sctx.new() as exprctx:
exprctx.empty_result_type_hint = target_t
expr = setgen.ensure_set(
dispatch.compile(result_expr, ctx=exprctx), ctx=exprctx
)
else:
if astutils.is_ql_empty_set(result_expr):
expr = setgen.new_empty_set(
stype=sctx.empty_result_type_hint,
alias=ctx.aliases.get("e"),
ctx=sctx,
srcctx=result_expr.context,
)
else:
expr = setgen.ensure_set(
dispatch.compile(result_expr, ctx=sctx), ctx=sctx
)
ctx.partial_path_prefix = expr
ir_result = compile_query_subject(
expr,
shape=shape,
view_rptr=view_rptr,
view_name=view_name,
result_alias=result_alias,
view_scls=view_scls,
compile_views=ctx.stmt is ctx.toplevel_stmt,
ctx=sctx,
parser_context=result.context,
)
ctx.partial_path_prefix = ir_result
return ir_result
|
def compile_result_clause(
result: qlast.Expr,
*,
view_scls: Optional[s_types.Type] = None,
view_rptr: Optional[context.ViewRPtr] = None,
view_name: Optional[s_name.SchemaName] = None,
result_alias: Optional[str] = None,
forward_rptr: bool = False,
ctx: context.ContextLevel,
) -> irast.Set:
with ctx.new() as sctx:
if sctx.stmt is ctx.toplevel_stmt:
sctx.expr_exposed = True
if forward_rptr:
sctx.view_rptr = view_rptr
# sctx.view_scls = view_scls
result_expr: qlast.Expr
shape: Optional[Sequence[qlast.ShapeElement]]
if isinstance(result, qlast.Shape):
result_expr = result.expr
shape = result.elements
else:
result_expr = result
shape = None
if result_alias:
# `SELECT foo := expr` is equivalent to
# `WITH foo := expr SELECT foo`
stmtctx.declare_view(result_expr, alias=result_alias, ctx=sctx)
result_expr = qlast.Path(steps=[qlast.ObjectRef(name=result_alias)])
if (
view_rptr is not None
and (view_rptr.is_insert or view_rptr.is_update)
and view_rptr.ptrcls is not None
) and False:
# If we have an empty set assigned to a pointer in an INSERT
# or UPDATE, there's no need to explicitly specify the
# empty set type and it can be assumed to match the pointer
# target type.
target_t = view_rptr.ptrcls.get_target(ctx.env.schema)
if astutils.is_ql_empty_set(result_expr):
expr = setgen.new_empty_set(
stype=target_t,
alias=ctx.aliases.get("e"),
ctx=sctx,
srcctx=result_expr.context,
)
else:
with sctx.new() as exprctx:
exprctx.empty_result_type_hint = target_t
expr = setgen.ensure_set(
dispatch.compile(result_expr, ctx=exprctx), ctx=exprctx
)
else:
if astutils.is_ql_empty_set(result_expr):
expr = setgen.new_empty_set(
stype=sctx.empty_result_type_hint,
alias=ctx.aliases.get("e"),
ctx=sctx,
srcctx=result_expr.context,
)
else:
expr = setgen.ensure_set(
dispatch.compile(result_expr, ctx=sctx), ctx=sctx
)
ctx.partial_path_prefix = expr
ir_result = compile_query_subject(
expr,
shape=shape,
view_rptr=view_rptr,
view_name=view_name,
result_alias=result_alias,
view_scls=view_scls,
compile_views=ctx.stmt is ctx.toplevel_stmt,
ctx=sctx,
)
ctx.partial_path_prefix = ir_result
return ir_result
|
https://github.com/edgedb/edgedb/issues/1788
|
ERROR: InternalServerError:
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/Users/yury/dev/edge/edgedb/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/Users/yury/dev/edge/edgedb/edb/server/compiler/compiler.py", line 1795, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/Users/yury/dev/edge/edgedb/edb/server/compiler/compiler.py", line 1429, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/Users/yury/dev/edge/edgedb/edb/server/compiler/compiler.py", line 1392, in _compile_dispatch_ql
return self._compile_ql_query(ctx, ql)
File "/Users/yury/dev/edge/edgedb/edb/server/compiler/compiler.py", line 592, in _compile_ql_query
ir = qlcompiler.compile_ast_to_ir(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/__init__.py", line 175, in wrapper
return func(*args, **kwargs)
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/__init__.py", line 223, in compile_ast_to_ir
ir_set = dispatch_mod.compile(tree, ctx=ctx)
File "/opt/local/Library/Frameworks/Python.framework/Versions/3.8/lib/python3.8/functools.py", line 874, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/stmt.py", line 84, in compile_SelectQuery
stmt.result = compile_result_clause(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/stmt.py", line 1017, in compile_result_clause
ir_result = compile_query_subject(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/stmt.py", line 1097, in compile_query_subject
view_scls = viewgen.process_view(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/viewgen.py", line 86, in process_view
view_scls = _process_view(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/viewgen.py", line 173, in _process_view
pointer = _normalize_view_ptr_expr(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/viewgen.py", line 549, in _normalize_view_ptr_expr
ptr_target = _process_view(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/viewgen.py", line 160, in _process_view
assert isinstance(view_scls, s_objtypes.ObjectType)
AssertionError
|
InternalServerError
|
def compile_query_subject(
expr: irast.Set,
*,
shape: Optional[List[qlast.ShapeElement]] = None,
view_rptr: Optional[context.ViewRPtr] = None,
view_name: Optional[s_name.SchemaName] = None,
result_alias: Optional[str] = None,
view_scls: Optional[s_types.Type] = None,
compile_views: bool = True,
is_insert: bool = False,
is_update: bool = False,
is_delete: bool = False,
parser_context: Optional[pctx.ParserContext] = None,
ctx: context.ContextLevel,
) -> irast.Set:
expr_stype = setgen.get_set_type(expr, ctx=ctx)
expr_rptr = expr.rptr
while isinstance(expr_rptr, irast.TypeIntersectionPointer):
expr_rptr = expr_rptr.source.rptr
is_ptr_alias = (
view_rptr is not None
and view_rptr.ptrcls is None
and view_rptr.ptrcls_name is not None
and expr_rptr is not None
and expr_rptr.direction is s_pointers.PointerDirection.Outbound
and (view_rptr.ptrcls_is_linkprop == (expr_rptr.ptrref.source_ptr is not None))
)
if is_ptr_alias:
assert view_rptr is not None
# We are inside an expression that defines a link alias in
# the parent shape, ie. Spam { alias := Spam.bar }, so
# `Spam.alias` should be a subclass of `Spam.bar` inheriting
# its properties.
base_ptrcls = typegen.ptrcls_from_ptrref(expr_rptr.ptrref, ctx=ctx)
if isinstance(base_ptrcls, s_pointers.Pointer):
view_rptr.base_ptrcls = base_ptrcls
view_rptr.ptrcls_is_alias = True
if (
ctx.expr_exposed
and viewgen.has_implicit_tid(
expr_stype,
is_mutation=is_insert or is_update or is_delete,
ctx=ctx,
)
and shape is None
and expr_stype not in ctx.env.view_shapes
):
# Force the subject to be compiled as a view if a __tid__
# insertion is anticipated (the actual decision is taken
# by the compile_view_shapes() flow).
shape = []
if shape is not None and view_scls is None:
if view_name is None and isinstance(result_alias, s_name.SchemaName):
view_name = result_alias
if not isinstance(expr_stype, s_objtypes.ObjectType):
raise errors.QueryError(
f"shapes cannot be applied to "
f"{expr_stype.get_verbosename(ctx.env.schema)}",
context=parser_context,
)
view_scls = viewgen.process_view(
stype=expr_stype,
path_id=expr.path_id,
elements=shape,
view_rptr=view_rptr,
view_name=view_name,
is_insert=is_insert,
is_update=is_update,
is_delete=is_delete,
parser_context=expr.context,
ctx=ctx,
)
if view_scls is not None:
expr = setgen.ensure_set(expr, type_override=view_scls, ctx=ctx)
expr_stype = view_scls
if compile_views:
rptr = view_rptr.rptr if view_rptr is not None else None
viewgen.compile_view_shapes(expr, rptr=rptr, ctx=ctx)
if (shape is not None or view_scls is not None) and len(expr.path_id) == 1:
ctx.class_view_overrides[expr.path_id.target.id] = expr_stype
return expr
|
def compile_query_subject(
expr: irast.Set,
*,
shape: Optional[List[qlast.ShapeElement]] = None,
view_rptr: Optional[context.ViewRPtr] = None,
view_name: Optional[s_name.SchemaName] = None,
result_alias: Optional[str] = None,
view_scls: Optional[s_types.Type] = None,
compile_views: bool = True,
is_insert: bool = False,
is_update: bool = False,
is_delete: bool = False,
ctx: context.ContextLevel,
) -> irast.Set:
expr_stype = setgen.get_set_type(expr, ctx=ctx)
expr_rptr = expr.rptr
while isinstance(expr_rptr, irast.TypeIntersectionPointer):
expr_rptr = expr_rptr.source.rptr
is_ptr_alias = (
view_rptr is not None
and view_rptr.ptrcls is None
and view_rptr.ptrcls_name is not None
and expr_rptr is not None
and expr_rptr.direction is s_pointers.PointerDirection.Outbound
and (view_rptr.ptrcls_is_linkprop == (expr_rptr.ptrref.source_ptr is not None))
)
if is_ptr_alias:
assert view_rptr is not None
# We are inside an expression that defines a link alias in
# the parent shape, ie. Spam { alias := Spam.bar }, so
# `Spam.alias` should be a subclass of `Spam.bar` inheriting
# its properties.
base_ptrcls = typegen.ptrcls_from_ptrref(expr_rptr.ptrref, ctx=ctx)
if isinstance(base_ptrcls, s_pointers.Pointer):
view_rptr.base_ptrcls = base_ptrcls
view_rptr.ptrcls_is_alias = True
if (
ctx.expr_exposed
and viewgen.has_implicit_tid(
expr_stype,
is_mutation=is_insert or is_update or is_delete,
ctx=ctx,
)
and shape is None
and expr_stype not in ctx.env.view_shapes
):
# Force the subject to be compiled as a view if a __tid__
# insertion is anticipated (the actual decision is taken
# by the compile_view_shapes() flow).
shape = []
if shape is not None and view_scls is None:
if view_name is None and isinstance(result_alias, s_name.SchemaName):
view_name = result_alias
if not isinstance(expr_stype, s_objtypes.ObjectType):
raise errors.QueryError(
f"shapes cannot be applied to "
f"{expr_stype.get_verbosename(ctx.env.schema)}"
)
view_scls = viewgen.process_view(
stype=expr_stype,
path_id=expr.path_id,
elements=shape,
view_rptr=view_rptr,
view_name=view_name,
is_insert=is_insert,
is_update=is_update,
is_delete=is_delete,
parser_context=expr.context,
ctx=ctx,
)
if view_scls is not None:
expr = setgen.ensure_set(expr, type_override=view_scls, ctx=ctx)
expr_stype = view_scls
if compile_views:
rptr = view_rptr.rptr if view_rptr is not None else None
viewgen.compile_view_shapes(expr, rptr=rptr, ctx=ctx)
if (shape is not None or view_scls is not None) and len(expr.path_id) == 1:
ctx.class_view_overrides[expr.path_id.target.id] = expr_stype
return expr
|
https://github.com/edgedb/edgedb/issues/1788
|
ERROR: InternalServerError:
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/Users/yury/dev/edge/edgedb/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/Users/yury/dev/edge/edgedb/edb/server/compiler/compiler.py", line 1795, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/Users/yury/dev/edge/edgedb/edb/server/compiler/compiler.py", line 1429, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/Users/yury/dev/edge/edgedb/edb/server/compiler/compiler.py", line 1392, in _compile_dispatch_ql
return self._compile_ql_query(ctx, ql)
File "/Users/yury/dev/edge/edgedb/edb/server/compiler/compiler.py", line 592, in _compile_ql_query
ir = qlcompiler.compile_ast_to_ir(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/__init__.py", line 175, in wrapper
return func(*args, **kwargs)
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/__init__.py", line 223, in compile_ast_to_ir
ir_set = dispatch_mod.compile(tree, ctx=ctx)
File "/opt/local/Library/Frameworks/Python.framework/Versions/3.8/lib/python3.8/functools.py", line 874, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/stmt.py", line 84, in compile_SelectQuery
stmt.result = compile_result_clause(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/stmt.py", line 1017, in compile_result_clause
ir_result = compile_query_subject(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/stmt.py", line 1097, in compile_query_subject
view_scls = viewgen.process_view(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/viewgen.py", line 86, in process_view
view_scls = _process_view(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/viewgen.py", line 173, in _process_view
pointer = _normalize_view_ptr_expr(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/viewgen.py", line 549, in _normalize_view_ptr_expr
ptr_target = _process_view(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/viewgen.py", line 160, in _process_view
assert isinstance(view_scls, s_objtypes.ObjectType)
AssertionError
|
InternalServerError
|
def _process_view(
*,
stype: s_objtypes.ObjectType,
path_id: irast.PathId,
path_id_namespace: Optional[irast.WeakNamespace] = None,
elements: List[qlast.ShapeElement],
view_rptr: Optional[context.ViewRPtr] = None,
view_name: Optional[sn.SchemaName] = None,
is_insert: bool = False,
is_update: bool = False,
is_delete: bool = False,
parser_context: pctx.ParserContext,
ctx: context.ContextLevel,
) -> s_objtypes.ObjectType:
if view_name is None and ctx.env.options.schema_view_mode and view_rptr is not None:
# Make sure persistent schema expression aliases have properly formed
# names as opposed to the usual mangled form of the ephemeral
# aliases. This is needed for introspection readability, as well
# as helps in maintaining proper type names for schema
# representations that require alphanumeric names, such as
# GraphQL.
#
# We use the name of the source together with the name
# of the inbound link to form the name, so in e.g.
# CREATE ALIAS V := (SELECT Foo { bar: { baz: { ... } })
# The name of the innermost alias would be "__V__bar__baz".
source_name = view_rptr.source.get_name(ctx.env.schema).name
if not source_name.startswith("__"):
source_name = f"__{source_name}"
if view_rptr.ptrcls_name is not None:
ptr_name = view_rptr.ptrcls_name.name
elif view_rptr.ptrcls is not None:
ptr_name = view_rptr.ptrcls.get_shortname(ctx.env.schema).name
else:
raise errors.InternalServerError(
"_process_view in schema mode received view_rptr with "
"neither ptrcls_name, not ptrcls"
)
name = f"{source_name}__{ptr_name}"
view_name = sn.Name(
module=ctx.derived_target_module or "__derived__",
name=name,
)
view_scls = schemactx.derive_view(
stype,
is_insert=is_insert,
is_update=is_update,
is_delete=is_delete,
derived_name=view_name,
ctx=ctx,
)
assert isinstance(view_scls, s_objtypes.ObjectType), view_scls
is_mutation = is_insert or is_update
is_defining_shape = ctx.expr_exposed or is_mutation
if view_rptr is not None and view_rptr.ptrcls is None:
derive_ptrcls(view_rptr, target_scls=view_scls, transparent=True, ctx=ctx)
pointers = []
for shape_el in elements:
with ctx.newscope(fenced=True) as scopectx:
pointer = _normalize_view_ptr_expr(
shape_el,
view_scls,
path_id=path_id,
path_id_namespace=path_id_namespace,
is_insert=is_insert,
is_update=is_update,
view_rptr=view_rptr,
ctx=scopectx,
)
if pointer in pointers:
schema = ctx.env.schema
vnp = pointer.get_verbosename(schema, with_parent=True)
raise errors.QueryError(
f"duplicate definition of {vnp}", context=shape_el.context
)
pointers.append(pointer)
if is_insert:
explicit_ptrs = {
ptrcls.get_shortname(ctx.env.schema).name for ptrcls in pointers
}
scls_pointers = stype.get_pointers(ctx.env.schema)
for pn, ptrcls in scls_pointers.items(ctx.env.schema):
if pn in explicit_ptrs or ptrcls.is_pure_computable(ctx.env.schema):
continue
default_expr = ptrcls.get_default(ctx.env.schema)
if not default_expr:
if ptrcls.get_required(ctx.env.schema):
if ptrcls.is_property(ctx.env.schema):
# If the target is a sequence, there's no need
# for an explicit value.
ptrcls_target = ptrcls.get_target(ctx.env.schema)
assert ptrcls_target is not None
if ptrcls_target.issubclass(
ctx.env.schema, ctx.env.schema.get("std::sequence")
):
continue
what = "property"
else:
what = "link"
raise errors.MissingRequiredError(
f"missing value for required {what} "
f"{stype.get_displayname(ctx.env.schema)}."
f"{ptrcls.get_displayname(ctx.env.schema)}"
)
else:
continue
ptrcls_sn = ptrcls.get_shortname(ctx.env.schema)
default_ql = qlast.ShapeElement(
expr=qlast.Path(
steps=[
qlast.Ptr(
ptr=qlast.ObjectRef(
name=ptrcls_sn.name,
module=ptrcls_sn.module,
),
),
],
),
compexpr=qlast.DetachedExpr(
expr=default_expr.qlast,
),
)
with ctx.newscope(fenced=True) as scopectx:
pointers.append(
_normalize_view_ptr_expr(
default_ql,
view_scls,
path_id=path_id,
path_id_namespace=path_id_namespace,
is_insert=is_insert,
is_update=is_update,
from_default=True,
view_rptr=view_rptr,
ctx=scopectx,
),
)
elif (
stype.get_name(ctx.env.schema).module == "schema"
and ctx.env.options.introspection_schema_rewrites
):
explicit_ptrs = {
ptrcls.get_shortname(ctx.env.schema).name for ptrcls in pointers
}
scls_pointers = stype.get_pointers(ctx.env.schema)
for pn, ptrcls in scls_pointers.items(ctx.env.schema):
if pn in explicit_ptrs or ptrcls.is_pure_computable(ctx.env.schema):
continue
schema_deflt = ptrcls.get_schema_reflection_default(ctx.env.schema)
if schema_deflt is None:
continue
with ctx.newscope(fenced=True) as scopectx:
implicit_ql = qlast.ShapeElement(
expr=qlast.Path(
steps=[
qlast.Ptr(
ptr=qlast.ObjectRef(
name=pn,
),
),
],
),
compexpr=qlast.BinOp(
left=qlast.Path(
partial=True,
steps=[
qlast.Ptr(
ptr=qlast.ObjectRef(name=pn),
direction=(s_pointers.PointerDirection.Outbound),
)
],
),
right=qlparser.parse_fragment(schema_deflt),
op="??",
),
)
# Note: we only need to record the schema default
# as a computable, but not include it in the type
# shape, so we ignore the return value.
_normalize_view_ptr_expr(
implicit_ql,
view_scls,
path_id=path_id,
path_id_namespace=path_id_namespace,
is_insert=is_insert,
is_update=is_update,
view_rptr=view_rptr,
ctx=scopectx,
)
for ptrcls in pointers:
source: Union[s_types.Type, s_pointers.PointerLike]
if ptrcls.is_link_property(ctx.env.schema):
assert view_rptr is not None and view_rptr.ptrcls is not None
source = view_rptr.ptrcls
else:
source = view_scls
if is_defining_shape:
cinfo = ctx.source_map.get(ptrcls)
if cinfo is not None:
shape_op = cinfo.shape_op
else:
shape_op = qlast.ShapeOp.ASSIGN
ctx.env.view_shapes[source].append((ptrcls, shape_op))
if (
view_rptr is not None
and view_rptr.ptrcls is not None
and view_scls is not stype
):
ctx.env.schema = view_scls.set_field_value(
ctx.env.schema, "rptr", view_rptr.ptrcls
)
return view_scls
|
def _process_view(
*,
stype: s_objtypes.ObjectType,
path_id: irast.PathId,
path_id_namespace: Optional[irast.WeakNamespace] = None,
elements: List[qlast.ShapeElement],
view_rptr: Optional[context.ViewRPtr] = None,
view_name: Optional[sn.SchemaName] = None,
is_insert: bool = False,
is_update: bool = False,
is_delete: bool = False,
parser_context: pctx.ParserContext,
ctx: context.ContextLevel,
) -> s_objtypes.ObjectType:
if view_name is None and ctx.env.options.schema_view_mode and view_rptr is not None:
# Make sure persistent schema expression aliases have properly formed
# names as opposed to the usual mangled form of the ephemeral
# aliases. This is needed for introspection readability, as well
# as helps in maintaining proper type names for schema
# representations that require alphanumeric names, such as
# GraphQL.
#
# We use the name of the source together with the name
# of the inbound link to form the name, so in e.g.
# CREATE ALIAS V := (SELECT Foo { bar: { baz: { ... } })
# The name of the innermost alias would be "__V__bar__baz".
source_name = view_rptr.source.get_name(ctx.env.schema).name
if not source_name.startswith("__"):
source_name = f"__{source_name}"
if view_rptr.ptrcls_name is not None:
ptr_name = view_rptr.ptrcls_name.name
elif view_rptr.ptrcls is not None:
ptr_name = view_rptr.ptrcls.get_shortname(ctx.env.schema).name
else:
raise errors.InternalServerError(
"_process_view in schema mode received view_rptr with "
"neither ptrcls_name, not ptrcls"
)
name = f"{source_name}__{ptr_name}"
view_name = sn.Name(
module=ctx.derived_target_module or "__derived__",
name=name,
)
view_scls = schemactx.derive_view(
stype,
is_insert=is_insert,
is_update=is_update,
is_delete=is_delete,
derived_name=view_name,
ctx=ctx,
)
assert isinstance(view_scls, s_objtypes.ObjectType)
is_mutation = is_insert or is_update
is_defining_shape = ctx.expr_exposed or is_mutation
if view_rptr is not None and view_rptr.ptrcls is None:
derive_ptrcls(view_rptr, target_scls=view_scls, transparent=True, ctx=ctx)
pointers = []
for shape_el in elements:
with ctx.newscope(fenced=True) as scopectx:
pointer = _normalize_view_ptr_expr(
shape_el,
view_scls,
path_id=path_id,
path_id_namespace=path_id_namespace,
is_insert=is_insert,
is_update=is_update,
view_rptr=view_rptr,
ctx=scopectx,
)
if pointer in pointers:
schema = ctx.env.schema
vnp = pointer.get_verbosename(schema, with_parent=True)
raise errors.QueryError(
f"duplicate definition of {vnp}", context=shape_el.context
)
pointers.append(pointer)
if is_insert:
explicit_ptrs = {
ptrcls.get_shortname(ctx.env.schema).name for ptrcls in pointers
}
scls_pointers = stype.get_pointers(ctx.env.schema)
for pn, ptrcls in scls_pointers.items(ctx.env.schema):
if pn in explicit_ptrs or ptrcls.is_pure_computable(ctx.env.schema):
continue
default_expr = ptrcls.get_default(ctx.env.schema)
if not default_expr:
if ptrcls.get_required(ctx.env.schema):
if ptrcls.is_property(ctx.env.schema):
# If the target is a sequence, there's no need
# for an explicit value.
ptrcls_target = ptrcls.get_target(ctx.env.schema)
assert ptrcls_target is not None
if ptrcls_target.issubclass(
ctx.env.schema, ctx.env.schema.get("std::sequence")
):
continue
what = "property"
else:
what = "link"
raise errors.MissingRequiredError(
f"missing value for required {what} "
f"{stype.get_displayname(ctx.env.schema)}."
f"{ptrcls.get_displayname(ctx.env.schema)}"
)
else:
continue
ptrcls_sn = ptrcls.get_shortname(ctx.env.schema)
default_ql = qlast.ShapeElement(
expr=qlast.Path(
steps=[
qlast.Ptr(
ptr=qlast.ObjectRef(
name=ptrcls_sn.name,
module=ptrcls_sn.module,
),
),
],
),
compexpr=qlast.DetachedExpr(
expr=default_expr.qlast,
),
)
with ctx.newscope(fenced=True) as scopectx:
pointers.append(
_normalize_view_ptr_expr(
default_ql,
view_scls,
path_id=path_id,
path_id_namespace=path_id_namespace,
is_insert=is_insert,
is_update=is_update,
from_default=True,
view_rptr=view_rptr,
ctx=scopectx,
),
)
elif (
stype.get_name(ctx.env.schema).module == "schema"
and ctx.env.options.introspection_schema_rewrites
):
explicit_ptrs = {
ptrcls.get_shortname(ctx.env.schema).name for ptrcls in pointers
}
scls_pointers = stype.get_pointers(ctx.env.schema)
for pn, ptrcls in scls_pointers.items(ctx.env.schema):
if pn in explicit_ptrs or ptrcls.is_pure_computable(ctx.env.schema):
continue
schema_deflt = ptrcls.get_schema_reflection_default(ctx.env.schema)
if schema_deflt is None:
continue
with ctx.newscope(fenced=True) as scopectx:
implicit_ql = qlast.ShapeElement(
expr=qlast.Path(
steps=[
qlast.Ptr(
ptr=qlast.ObjectRef(
name=pn,
),
),
],
),
compexpr=qlast.BinOp(
left=qlast.Path(
partial=True,
steps=[
qlast.Ptr(
ptr=qlast.ObjectRef(name=pn),
direction=(s_pointers.PointerDirection.Outbound),
)
],
),
right=qlparser.parse_fragment(schema_deflt),
op="??",
),
)
# Note: we only need to record the schema default
# as a computable, but not include it in the type
# shape, so we ignore the return value.
_normalize_view_ptr_expr(
implicit_ql,
view_scls,
path_id=path_id,
path_id_namespace=path_id_namespace,
is_insert=is_insert,
is_update=is_update,
view_rptr=view_rptr,
ctx=scopectx,
)
for ptrcls in pointers:
source: Union[s_types.Type, s_pointers.PointerLike]
if ptrcls.is_link_property(ctx.env.schema):
assert view_rptr is not None and view_rptr.ptrcls is not None
source = view_rptr.ptrcls
else:
source = view_scls
if is_defining_shape:
cinfo = ctx.source_map.get(ptrcls)
if cinfo is not None:
shape_op = cinfo.shape_op
else:
shape_op = qlast.ShapeOp.ASSIGN
ctx.env.view_shapes[source].append((ptrcls, shape_op))
if (
view_rptr is not None
and view_rptr.ptrcls is not None
and view_scls is not stype
):
ctx.env.schema = view_scls.set_field_value(
ctx.env.schema, "rptr", view_rptr.ptrcls
)
return view_scls
|
https://github.com/edgedb/edgedb/issues/1788
|
ERROR: InternalServerError:
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/Users/yury/dev/edge/edgedb/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/Users/yury/dev/edge/edgedb/edb/server/compiler/compiler.py", line 1795, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/Users/yury/dev/edge/edgedb/edb/server/compiler/compiler.py", line 1429, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/Users/yury/dev/edge/edgedb/edb/server/compiler/compiler.py", line 1392, in _compile_dispatch_ql
return self._compile_ql_query(ctx, ql)
File "/Users/yury/dev/edge/edgedb/edb/server/compiler/compiler.py", line 592, in _compile_ql_query
ir = qlcompiler.compile_ast_to_ir(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/__init__.py", line 175, in wrapper
return func(*args, **kwargs)
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/__init__.py", line 223, in compile_ast_to_ir
ir_set = dispatch_mod.compile(tree, ctx=ctx)
File "/opt/local/Library/Frameworks/Python.framework/Versions/3.8/lib/python3.8/functools.py", line 874, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/stmt.py", line 84, in compile_SelectQuery
stmt.result = compile_result_clause(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/stmt.py", line 1017, in compile_result_clause
ir_result = compile_query_subject(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/stmt.py", line 1097, in compile_query_subject
view_scls = viewgen.process_view(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/viewgen.py", line 86, in process_view
view_scls = _process_view(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/viewgen.py", line 173, in _process_view
pointer = _normalize_view_ptr_expr(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/viewgen.py", line 549, in _normalize_view_ptr_expr
ptr_target = _process_view(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/viewgen.py", line 160, in _process_view
assert isinstance(view_scls, s_objtypes.ObjectType)
AssertionError
|
InternalServerError
|
def _normalize_view_ptr_expr(
shape_el: qlast.ShapeElement,
view_scls: s_objtypes.ObjectType,
*,
path_id: irast.PathId,
path_id_namespace: Optional[irast.WeakNamespace] = None,
is_insert: bool = False,
is_update: bool = False,
from_default: bool = False,
view_rptr: Optional[context.ViewRPtr] = None,
ctx: context.ContextLevel,
) -> s_pointers.Pointer:
steps = shape_el.expr.steps
is_linkprop = False
is_polymorphic = False
is_mutation = is_insert or is_update
# Pointers may be qualified by the explicit source
# class, which is equivalent to Expr[IS Type].
plen = len(steps)
ptrsource: s_sources.Source = view_scls
qlexpr: Optional[qlast.Expr] = None
target_typexpr = None
source: qlast.Base
base_ptrcls_is_alias = False
if plen >= 2 and isinstance(steps[-1], qlast.TypeIntersection):
# Target type intersection: foo: Type
target_typexpr = steps[-1].type
plen -= 1
steps = steps[:-1]
if plen == 1:
# regular shape
lexpr = steps[0]
assert isinstance(lexpr, qlast.Ptr)
is_linkprop = lexpr.type == "property"
if is_linkprop:
if view_rptr is None or view_rptr.ptrcls is None:
raise errors.QueryError(
"invalid reference to link property in top level shape",
context=lexpr.context,
)
assert isinstance(view_rptr.ptrcls, s_links.Link)
ptrsource = view_rptr.ptrcls
source = qlast.Source()
elif plen == 2 and isinstance(steps[0], qlast.TypeIntersection):
# Source type intersection: [IS Type].foo
source = qlast.Path(
steps=[
qlast.Source(),
steps[0],
]
)
lexpr = steps[1]
ptype = steps[0].type
if not isinstance(ptype, qlast.TypeName):
raise errors.QueryError(
"complex type expressions are not supported here",
context=ptype.context,
)
source_spec = schemactx.get_schema_type(ptype.maintype, ctx=ctx)
if not isinstance(source_spec, s_objtypes.ObjectType):
raise errors.QueryError(
f"expected object type, got "
f"{source_spec.get_verbosename(ctx.env.schema)}",
context=ptype.context,
)
ptrsource = source_spec
is_polymorphic = True
else: # pragma: no cover
raise RuntimeError(f"unexpected path length in view shape: {len(steps)}")
assert isinstance(lexpr, qlast.Ptr)
ptrname = lexpr.ptr.name
compexpr: Optional[qlast.Expr] = shape_el.compexpr
if compexpr is None and is_insert and shape_el.elements:
# Short shape form in INSERT, e.g
# INSERT Foo { bar: Spam { name := 'name' }}
# is prohibited.
raise errors.EdgeQLSyntaxError("unexpected ':'", context=steps[-1].context)
ptrcls: Optional[s_pointers.Pointer]
if compexpr is None:
ptrcls = setgen.resolve_ptr(ptrsource, ptrname, ctx=ctx)
if is_polymorphic:
ptrcls = schemactx.derive_ptr(
ptrcls, view_scls, is_insert=is_insert, is_update=is_update, ctx=ctx
)
base_ptrcls = ptrcls.get_bases(ctx.env.schema).first(ctx.env.schema)
base_ptr_is_computable = base_ptrcls in ctx.source_map
ptr_name = sn.Name(
module="__",
name=ptrcls.get_shortname(ctx.env.schema).name,
)
base_cardinality = base_ptrcls.get_cardinality(ctx.env.schema)
base_is_singleton = base_cardinality is qltypes.SchemaCardinality.ONE
if (
shape_el.where
or shape_el.orderby
or shape_el.offset
or shape_el.limit
or base_ptr_is_computable
or is_polymorphic
or target_typexpr is not None
or (ctx.implicit_limit and not base_is_singleton)
):
if target_typexpr is None:
qlexpr = qlast.Path(steps=[source, lexpr])
else:
qlexpr = qlast.Path(
steps=[
source,
lexpr,
qlast.TypeIntersection(type=target_typexpr),
]
)
qlexpr = astutils.ensure_qlstmt(qlexpr)
assert isinstance(qlexpr, qlast.SelectQuery)
qlexpr.where = shape_el.where
qlexpr.orderby = shape_el.orderby
if shape_el.offset or shape_el.limit:
qlexpr = qlast.SelectQuery(result=qlexpr, implicit=True)
qlexpr.offset = shape_el.offset
qlexpr.limit = shape_el.limit
if (
(ctx.expr_exposed or ctx.stmt is ctx.toplevel_stmt)
and not qlexpr.limit
and ctx.implicit_limit
and not base_is_singleton
):
qlexpr.limit = qlast.IntegerConstant(
value=str(ctx.implicit_limit),
)
if target_typexpr is not None:
assert isinstance(target_typexpr, qlast.TypeName)
intersector_type = schemactx.get_schema_type(
target_typexpr.maintype, ctx=ctx
)
int_result = schemactx.apply_intersection(
ptrcls.get_target(ctx.env.schema), # type: ignore
intersector_type,
ctx=ctx,
)
ptr_target = int_result.stype
else:
_ptr_target = ptrcls.get_target(ctx.env.schema)
assert _ptr_target
ptr_target = _ptr_target
if base_ptrcls in ctx.pending_cardinality:
# We do not know the parent's pointer cardinality yet.
ptr_cardinality = None
ctx.pointer_derivation_map[base_ptrcls].append(ptrcls)
stmtctx.pend_pointer_cardinality_inference(
ptrcls=ptrcls,
specified_required=shape_el.required,
specified_card=shape_el.cardinality,
source_ctx=shape_el.context,
ctx=ctx,
)
else:
ptr_cardinality = base_ptrcls.get_cardinality(ctx.env.schema)
implicit_tid = has_implicit_tid(
ptr_target,
is_mutation=is_mutation,
ctx=ctx,
)
if shape_el.elements or implicit_tid:
sub_view_rptr = context.ViewRPtr(
ptrsource if is_linkprop else view_scls,
ptrcls=ptrcls,
is_insert=is_insert,
is_update=is_update,
)
sub_path_id = pathctx.extend_path_id(
path_id, ptrcls=base_ptrcls, ns=ctx.path_id_namespace, ctx=ctx
)
ctx.path_scope.attach_path(sub_path_id, context=shape_el.context)
if not isinstance(ptr_target, s_objtypes.ObjectType):
raise errors.QueryError(
f"shapes cannot be applied to "
f"{ptr_target.get_verbosename(ctx.env.schema)}",
context=shape_el.context,
)
if is_update:
for subel in shape_el.elements or []:
is_prop = (
isinstance(subel.expr.steps[0], qlast.Ptr)
and subel.expr.steps[0].type == "property"
)
if not is_prop:
raise errors.QueryError(
"only references to link properties are allowed "
"in nested UPDATE shapes",
context=subel.context,
)
ptr_target = _process_view(
stype=ptr_target,
path_id=sub_path_id,
path_id_namespace=path_id_namespace,
view_rptr=sub_view_rptr,
elements=shape_el.elements,
is_update=True,
parser_context=shape_el.context,
ctx=ctx,
)
else:
ptr_target = _process_view(
stype=ptr_target,
path_id=sub_path_id,
path_id_namespace=path_id_namespace,
view_rptr=sub_view_rptr,
elements=shape_el.elements,
parser_context=shape_el.context,
ctx=ctx,
)
else:
base_ptrcls = ptrcls = None
if is_mutation and ptrname not in ctx.special_computables_in_mutation_shape:
# If this is a mutation, the pointer must exist.
ptrcls = setgen.resolve_ptr(ptrsource, ptrname, ctx=ctx)
base_ptrcls = ptrcls.get_bases(ctx.env.schema).first(ctx.env.schema)
ptr_name = sn.Name(
module="__",
name=ptrcls.get_shortname(ctx.env.schema).name,
)
else:
ptr_name = sn.Name(
module="__",
name=ptrname,
)
try:
ptrcls = setgen.resolve_ptr(
ptrsource,
ptrname,
track_ref=False,
ctx=ctx,
)
base_ptrcls = ptrcls.get_bases(ctx.env.schema).first(ctx.env.schema)
except errors.InvalidReferenceError:
# This is a NEW compitable pointer, it's fine.
pass
qlexpr = astutils.ensure_qlstmt(compexpr)
if (
(ctx.expr_exposed or ctx.stmt is ctx.toplevel_stmt)
and ctx.implicit_limit
and isinstance(qlexpr, qlast.OffsetLimitMixin)
and not qlexpr.limit
):
qlexpr.limit = qlast.IntegerConstant(value=str(ctx.implicit_limit))
with ctx.newscope(fenced=True) as shape_expr_ctx:
# Put current pointer class in context, so
# that references to link properties in sub-SELECT
# can be resolved. This is necessary for proper
# evaluation of link properties on computable links,
# most importantly, in INSERT/UPDATE context.
shape_expr_ctx.view_rptr = context.ViewRPtr(
ptrsource if is_linkprop else view_scls,
ptrcls=ptrcls,
ptrcls_name=ptr_name,
ptrcls_is_linkprop=is_linkprop,
is_insert=is_insert,
is_update=is_update,
)
shape_expr_ctx.defining_view = view_scls
shape_expr_ctx.path_scope.unnest_fence = True
shape_expr_ctx.partial_path_prefix = setgen.class_set(
view_scls.get_bases(ctx.env.schema).first(ctx.env.schema),
path_id=path_id,
ctx=shape_expr_ctx,
)
prefix_rptrref = path_id.rptr()
if prefix_rptrref is not None:
# Source path seems to contain multiple steps,
# so set up a rptr for abbreviated link property
# paths.
src_path_id = path_id.src_path()
assert src_path_id is not None
ctx.env.schema, src_t = irtyputils.ir_typeref_to_type(
shape_expr_ctx.env.schema,
src_path_id.target,
)
prefix_rptr = irast.Pointer(
source=setgen.class_set(
src_t,
path_id=src_path_id,
ctx=shape_expr_ctx,
),
target=shape_expr_ctx.partial_path_prefix,
ptrref=prefix_rptrref,
direction=s_pointers.PointerDirection.Outbound,
)
shape_expr_ctx.partial_path_prefix.rptr = prefix_rptr
if is_mutation and ptrcls is not None:
shape_expr_ctx.expr_exposed = True
shape_expr_ctx.empty_result_type_hint = ptrcls.get_target(
ctx.env.schema
)
shape_expr_ctx.stmt_metadata[qlexpr] = context.StatementMetadata(
iterator_target=True,
)
irexpr = dispatch.compile(qlexpr, ctx=shape_expr_ctx)
if (
shape_el.operation.op is qlast.ShapeOp.APPEND
or shape_el.operation.op is qlast.ShapeOp.SUBTRACT
):
if not is_update:
op = "+=" if shape_el.operation.op is qlast.ShapeOp.APPEND else "-="
raise errors.EdgeQLSyntaxError(
f"unexpected '{op}'",
context=shape_el.operation.context,
)
irexpr.context = compexpr.context
if base_ptrcls is None:
base_ptrcls = shape_expr_ctx.view_rptr.base_ptrcls
base_ptrcls_is_alias = shape_expr_ctx.view_rptr.ptrcls_is_alias
if ptrcls is not None:
ctx.env.schema = ptrcls.set_field_value(
ctx.env.schema, "is_owned", True
)
ptr_cardinality = None
ptr_target = inference.infer_type(irexpr, ctx.env)
if isinstance(
ptr_target, s_types.Collection
) and not ctx.env.orig_schema.get_by_id(ptr_target.id, default=None):
# Record references to implicitly defined collection types,
# so that the alias delta machinery can pick them up.
ctx.env.created_schema_objects.add(ptr_target)
anytype = ptr_target.find_any(ctx.env.schema)
if anytype is not None:
raise errors.QueryError(
"expression returns value of indeterminate type",
context=ctx.env.type_origins.get(anytype),
)
# Validate that the insert/update expression is
# of the correct class.
if is_mutation and ptrcls is not None:
base_target = ptrcls.get_target(ctx.env.schema)
assert base_target is not None
if ptr_target.assignment_castable_to(base_target, schema=ctx.env.schema):
# Force assignment casts if the target type is not a
# subclass of the base type and the cast is not to an
# object type.
if not (
base_target.is_object_type()
or schemactx.is_type_compatible(base_target, ptr_target, ctx=ctx)
):
qlexpr = astutils.ensure_qlstmt(
qlast.TypeCast(
type=typegen.type_to_ql_typeref(base_target, ctx=ctx),
expr=compexpr,
)
)
ptr_target = base_target
else:
expected = [repr(str(base_target.get_displayname(ctx.env.schema)))]
ercls: Type[errors.EdgeDBError]
if ptrcls.is_property(ctx.env.schema):
ercls = errors.InvalidPropertyTargetError
else:
ercls = errors.InvalidLinkTargetError
ptr_vn = ptrcls.get_verbosename(ctx.env.schema, with_parent=True)
raise ercls(
f"invalid target for {ptr_vn}: "
f"{str(ptr_target.get_displayname(ctx.env.schema))!r} "
f"(expecting {' or '.join(expected)})"
)
if qlexpr is not None or ptrcls is None:
src_scls: s_sources.Source
if is_linkprop:
# Proper checking was done when is_linkprop is defined.
assert view_rptr is not None
assert isinstance(view_rptr.ptrcls, s_links.Link)
src_scls = view_rptr.ptrcls
else:
src_scls = view_scls
if ptr_target.is_object_type():
base = ctx.env.get_track_schema_object("std::link")
else:
base = ctx.env.get_track_schema_object("std::property")
if base_ptrcls is not None:
derive_from = base_ptrcls
else:
derive_from = base
derived_name = schemactx.derive_view_name(
base_ptrcls,
derived_name_base=ptr_name,
derived_name_quals=[src_scls.get_name(ctx.env.schema)],
ctx=ctx,
)
existing: Optional[s_objects.Object] = ctx.env.schema.get(derived_name, None)
if existing is not None:
assert isinstance(existing, s_pointers.Pointer)
existing_target = existing.get_target(ctx.env.schema)
assert existing_target is not None
if ptr_target == existing_target:
ptrcls = existing
elif ptr_target.implicitly_castable_to(existing_target, ctx.env.schema):
ctx.env.schema = existing.set_target(ctx.env.schema, ptr_target)
ptrcls = existing
else:
target_rptr_set = ptr_target.get_rptr(ctx.env.schema) is not None
if target_rptr_set:
ctx.env.schema = ptr_target.set_field_value(
ctx.env.schema,
"rptr",
None,
)
ctx.env.schema = existing.delete(ctx.env.schema)
try:
ptrcls = schemactx.derive_ptr(
derive_from,
src_scls,
ptr_target,
is_insert=is_insert,
is_update=is_update,
derived_name=derived_name,
inheritance_merge=True,
ctx=ctx,
)
except errors.SchemaError as e:
if compexpr is not None:
e.set_source_context(compexpr.context)
else:
e.set_source_context(shape_el.expr.steps[-1].context)
raise
if target_rptr_set:
ctx.env.schema = ptr_target.set_field_value(
ctx.env.schema,
"rptr",
ptrcls,
)
else:
ptrcls = schemactx.derive_ptr(
derive_from,
src_scls,
ptr_target,
is_insert=is_insert,
is_update=is_update,
derived_name=derived_name,
ctx=ctx,
)
elif ptrcls.get_target(ctx.env.schema) != ptr_target:
ctx.env.schema = ptrcls.set_target(ctx.env.schema, ptr_target)
assert ptrcls is not None
if qlexpr is None:
# This is not a computable, just a pointer
# to a nested shape. Have it reuse the original
# pointer name so that in `Foo.ptr.name` and
# `Foo { ptr: {name}}` are the same path.
path_id_name = base_ptrcls.get_name(ctx.env.schema)
ctx.env.schema = ptrcls.set_field_value(
ctx.env.schema, "path_id_name", path_id_name
)
if qlexpr is not None:
ctx.source_map[ptrcls] = context.ComputableInfo(
qlexpr=qlexpr,
context=ctx,
path_id=path_id,
path_id_ns=path_id_namespace,
shape_op=shape_el.operation.op,
)
if compexpr is not None or is_polymorphic:
ctx.env.schema = ptrcls.set_field_value(
ctx.env.schema,
"computable",
True,
)
ctx.env.schema = ptrcls.set_field_value(
ctx.env.schema,
"is_owned",
True,
)
if ptr_cardinality is not None:
ctx.env.schema = ptrcls.set_field_value(
ctx.env.schema, "cardinality", ptr_cardinality
)
else:
if qlexpr is None and ptrcls is not base_ptrcls:
ctx.pointer_derivation_map[base_ptrcls].append(ptrcls)
base_cardinality = None
base_required = False
if base_ptrcls is not None and not base_ptrcls_is_alias:
base_cardinality = base_ptrcls.get_cardinality(ctx.env.schema)
base_required = base_ptrcls.get_required(ctx.env.schema)
if base_cardinality is None:
specified_cardinality = shape_el.cardinality
specified_required = shape_el.required
else:
specified_cardinality = base_cardinality
specified_required = base_required
if (
shape_el.cardinality is not None
and base_ptrcls is not None
and shape_el.cardinality != base_cardinality
):
base_src = base_ptrcls.get_source(ctx.env.schema)
assert base_src is not None
base_src_name = base_src.get_verbosename(ctx.env.schema)
raise errors.SchemaError(
f"cannot redefine the cardinality of "
f"{ptrcls.get_verbosename(ctx.env.schema)}: "
f"it is defined as {base_cardinality.as_ptr_qual()!r} "
f"in the base {base_src_name}",
context=compexpr and compexpr.context,
)
# The required flag may be inherited from the base
specified_required = shape_el.required or base_required
stmtctx.pend_pointer_cardinality_inference(
ptrcls=ptrcls,
specified_required=specified_required,
specified_card=specified_cardinality,
is_mut_assignment=is_mutation,
shape_op=shape_el.operation.op,
source_ctx=shape_el.context,
ctx=ctx,
)
ctx.env.schema = ptrcls.set_field_value(ctx.env.schema, "cardinality", None)
if (
ptrcls.is_protected_pointer(ctx.env.schema)
and qlexpr is not None
and not from_default
and not ctx.env.options.allow_writing_protected_pointers
):
ptrcls_sn = ptrcls.get_shortname(ctx.env.schema)
if is_polymorphic:
msg = f"cannot access {ptrcls_sn.name} on a polymorphic shape element"
else:
msg = f"cannot assign to {ptrcls_sn.name}"
raise errors.QueryError(msg, context=shape_el.context)
if is_update and ptrcls.get_readonly(ctx.env.schema):
raise errors.QueryError(
f"cannot update {ptrcls.get_verbosename(ctx.env.schema)}: "
f"it is declared as read-only",
context=compexpr and compexpr.context,
)
return ptrcls
|
def _normalize_view_ptr_expr(
shape_el: qlast.ShapeElement,
view_scls: s_objtypes.ObjectType,
*,
path_id: irast.PathId,
path_id_namespace: Optional[irast.WeakNamespace] = None,
is_insert: bool = False,
is_update: bool = False,
from_default: bool = False,
view_rptr: Optional[context.ViewRPtr] = None,
ctx: context.ContextLevel,
) -> s_pointers.Pointer:
steps = shape_el.expr.steps
is_linkprop = False
is_polymorphic = False
is_mutation = is_insert or is_update
# Pointers may be qualified by the explicit source
# class, which is equivalent to Expr[IS Type].
plen = len(steps)
ptrsource: s_sources.Source = view_scls
qlexpr = None
target_typexpr = None
source: qlast.Base
base_ptrcls_is_alias = False
if plen >= 2 and isinstance(steps[-1], qlast.TypeIntersection):
# Target type intersection: foo: Type
target_typexpr = steps[-1].type
plen -= 1
steps = steps[:-1]
if plen == 1:
# regular shape
lexpr = steps[0]
assert isinstance(lexpr, qlast.Ptr)
is_linkprop = lexpr.type == "property"
if is_linkprop:
if view_rptr is None or view_rptr.ptrcls is None:
raise errors.QueryError(
"invalid reference to link property in top level shape",
context=lexpr.context,
)
assert isinstance(view_rptr.ptrcls, s_links.Link)
ptrsource = view_rptr.ptrcls
source = qlast.Source()
elif plen == 2 and isinstance(steps[0], qlast.TypeIntersection):
# Source type intersection: [IS Type].foo
source = qlast.Path(
steps=[
qlast.Source(),
steps[0],
]
)
lexpr = steps[1]
ptype = steps[0].type
if not isinstance(ptype, qlast.TypeName):
raise errors.QueryError(
"complex type expressions are not supported here",
context=ptype.context,
)
source_spec = schemactx.get_schema_type(ptype.maintype, ctx=ctx)
if not isinstance(source_spec, s_objtypes.ObjectType):
raise errors.QueryError(
f"expected object type, got "
f"{source_spec.get_verbosename(ctx.env.schema)}",
context=ptype.context,
)
ptrsource = source_spec
is_polymorphic = True
else: # pragma: no cover
raise RuntimeError(f"unexpected path length in view shape: {len(steps)}")
assert isinstance(lexpr, qlast.Ptr)
ptrname = lexpr.ptr.name
compexpr = shape_el.compexpr
if compexpr is None and is_insert and shape_el.elements:
# Short shape form in INSERT, e.g
# INSERT Foo { bar: Spam { name := 'name' }}
# is prohibited.
raise errors.EdgeQLSyntaxError("unexpected ':'", context=steps[-1].context)
if compexpr is None:
ptrcls = setgen.resolve_ptr(ptrsource, ptrname, ctx=ctx)
if is_polymorphic:
ptrcls = schemactx.derive_ptr(
ptrcls, view_scls, is_insert=is_insert, is_update=is_update, ctx=ctx
)
base_ptrcls = ptrcls.get_bases(ctx.env.schema).first(ctx.env.schema)
base_ptr_is_computable = base_ptrcls in ctx.source_map
ptr_name = sn.Name(
module="__",
name=ptrcls.get_shortname(ctx.env.schema).name,
)
base_cardinality = base_ptrcls.get_cardinality(ctx.env.schema)
base_is_singleton = base_cardinality is qltypes.SchemaCardinality.ONE
if (
shape_el.where
or shape_el.orderby
or shape_el.offset
or shape_el.limit
or base_ptr_is_computable
or is_polymorphic
or target_typexpr is not None
or (ctx.implicit_limit and not base_is_singleton)
):
if target_typexpr is None:
qlexpr = qlast.Path(steps=[source, lexpr])
else:
qlexpr = qlast.Path(
steps=[
source,
lexpr,
qlast.TypeIntersection(type=target_typexpr),
]
)
qlexpr = astutils.ensure_qlstmt(qlexpr)
qlexpr.where = shape_el.where
qlexpr.orderby = shape_el.orderby
if shape_el.offset or shape_el.limit:
qlexpr = qlast.SelectQuery(result=qlexpr, implicit=True)
qlexpr.offset = shape_el.offset
qlexpr.limit = shape_el.limit
if (
(ctx.expr_exposed or ctx.stmt is ctx.toplevel_stmt)
and not qlexpr.limit
and ctx.implicit_limit
and not base_is_singleton
):
qlexpr.limit = qlast.IntegerConstant(
value=str(ctx.implicit_limit),
)
if target_typexpr is not None:
intersector_type = schemactx.get_schema_type(
target_typexpr.maintype, ctx=ctx
)
int_result = schemactx.apply_intersection(
ptrcls.get_target(ctx.env.schema),
intersector_type,
ctx=ctx,
)
ptr_target = int_result.stype
else:
ptr_target = ptrcls.get_target(ctx.env.schema)
if base_ptrcls in ctx.pending_cardinality:
# We do not know the parent's pointer cardinality yet.
ptr_cardinality = None
ctx.pointer_derivation_map[base_ptrcls].append(ptrcls)
stmtctx.pend_pointer_cardinality_inference(
ptrcls=ptrcls,
specified_required=shape_el.required,
specified_card=shape_el.cardinality,
source_ctx=shape_el.context,
ctx=ctx,
)
else:
ptr_cardinality = base_ptrcls.get_cardinality(ctx.env.schema)
implicit_tid = has_implicit_tid(
ptr_target,
is_mutation=is_mutation,
ctx=ctx,
)
if shape_el.elements or implicit_tid:
sub_view_rptr = context.ViewRPtr(
ptrsource if is_linkprop else view_scls,
ptrcls=ptrcls,
is_insert=is_insert,
is_update=is_update,
)
sub_path_id = pathctx.extend_path_id(
path_id, ptrcls=base_ptrcls, ns=ctx.path_id_namespace, ctx=ctx
)
ctx.path_scope.attach_path(sub_path_id, context=shape_el.context)
if is_update:
for subel in shape_el.elements or []:
is_prop = (
isinstance(subel.expr.steps[0], qlast.Ptr)
and subel.expr.steps[0].type == "property"
)
if not is_prop:
raise errors.QueryError(
"only references to link properties are allowed "
"in nested UPDATE shapes",
context=subel.context,
)
ptr_target = _process_view(
stype=ptr_target,
path_id=sub_path_id,
path_id_namespace=path_id_namespace,
view_rptr=sub_view_rptr,
elements=shape_el.elements,
is_update=True,
parser_context=shape_el.context,
ctx=ctx,
)
else:
ptr_target = _process_view(
stype=ptr_target,
path_id=sub_path_id,
path_id_namespace=path_id_namespace,
view_rptr=sub_view_rptr,
elements=shape_el.elements,
parser_context=shape_el.context,
ctx=ctx,
)
else:
base_ptrcls = ptrcls = None
if is_mutation and ptrname not in ctx.special_computables_in_mutation_shape:
# If this is a mutation, the pointer must exist.
ptrcls = setgen.resolve_ptr(ptrsource, ptrname, ctx=ctx)
base_ptrcls = ptrcls.get_bases(ctx.env.schema).first(ctx.env.schema)
ptr_name = sn.Name(
module="__",
name=ptrcls.get_shortname(ctx.env.schema).name,
)
else:
ptr_name = sn.Name(
module="__",
name=ptrname,
)
try:
ptrcls = setgen.resolve_ptr(
ptrsource,
ptrname,
track_ref=False,
ctx=ctx,
)
base_ptrcls = ptrcls.get_bases(ctx.env.schema).first(ctx.env.schema)
except errors.InvalidReferenceError:
# This is a NEW compitable pointer, it's fine.
pass
qlexpr = astutils.ensure_qlstmt(compexpr)
if (
(ctx.expr_exposed or ctx.stmt is ctx.toplevel_stmt)
and ctx.implicit_limit
and isinstance(qlexpr, qlast.OffsetLimitMixin)
and not qlexpr.limit
):
qlexpr.limit = qlast.IntegerConstant(value=str(ctx.implicit_limit))
with ctx.newscope(fenced=True) as shape_expr_ctx:
# Put current pointer class in context, so
# that references to link properties in sub-SELECT
# can be resolved. This is necessary for proper
# evaluation of link properties on computable links,
# most importantly, in INSERT/UPDATE context.
shape_expr_ctx.view_rptr = context.ViewRPtr(
ptrsource if is_linkprop else view_scls,
ptrcls=ptrcls,
ptrcls_name=ptr_name,
ptrcls_is_linkprop=is_linkprop,
is_insert=is_insert,
is_update=is_update,
)
shape_expr_ctx.defining_view = view_scls
shape_expr_ctx.path_scope.unnest_fence = True
shape_expr_ctx.partial_path_prefix = setgen.class_set(
view_scls.get_bases(ctx.env.schema).first(ctx.env.schema),
path_id=path_id,
ctx=shape_expr_ctx,
)
prefix_rptrref = path_id.rptr()
if prefix_rptrref is not None:
# Source path seems to contain multiple steps,
# so set up a rptr for abbreviated link property
# paths.
src_path_id = path_id.src_path()
assert src_path_id is not None
ctx.env.schema, src_t = irtyputils.ir_typeref_to_type(
shape_expr_ctx.env.schema,
src_path_id.target,
)
prefix_rptr = irast.Pointer(
source=setgen.class_set(
src_t,
path_id=src_path_id,
ctx=shape_expr_ctx,
),
target=shape_expr_ctx.partial_path_prefix,
ptrref=prefix_rptrref,
direction=s_pointers.PointerDirection.Outbound,
)
shape_expr_ctx.partial_path_prefix.rptr = prefix_rptr
if is_mutation and ptrcls is not None:
shape_expr_ctx.expr_exposed = True
shape_expr_ctx.empty_result_type_hint = ptrcls.get_target(
ctx.env.schema
)
shape_expr_ctx.stmt_metadata[qlexpr] = context.StatementMetadata(
iterator_target=True,
)
irexpr = dispatch.compile(qlexpr, ctx=shape_expr_ctx)
if (
shape_el.operation.op is qlast.ShapeOp.APPEND
or shape_el.operation.op is qlast.ShapeOp.SUBTRACT
):
if not is_update:
op = "+=" if shape_el.operation.op is qlast.ShapeOp.APPEND else "-="
raise errors.EdgeQLSyntaxError(
f"unexpected '{op}'",
context=shape_el.operation.context,
)
irexpr.context = compexpr.context
if base_ptrcls is None:
base_ptrcls = shape_expr_ctx.view_rptr.base_ptrcls
base_ptrcls_is_alias = shape_expr_ctx.view_rptr.ptrcls_is_alias
if ptrcls is not None:
ctx.env.schema = ptrcls.set_field_value(
ctx.env.schema, "is_owned", True
)
ptr_cardinality = None
ptr_target = inference.infer_type(irexpr, ctx.env)
if isinstance(
ptr_target, s_types.Collection
) and not ctx.env.orig_schema.get_by_id(ptr_target.id, default=None):
# Record references to implicitly defined collection types,
# so that the alias delta machinery can pick them up.
ctx.env.created_schema_objects.add(ptr_target)
anytype = ptr_target.find_any(ctx.env.schema)
if anytype is not None:
raise errors.QueryError(
"expression returns value of indeterminate type",
context=ctx.env.type_origins.get(anytype),
)
# Validate that the insert/update expression is
# of the correct class.
if is_mutation and ptrcls is not None:
base_target = ptrcls.get_target(ctx.env.schema)
assert base_target is not None
if ptr_target.assignment_castable_to(base_target, schema=ctx.env.schema):
# Force assignment casts if the target type is not a
# subclass of the base type and the cast is not to an
# object type.
if not (
base_target.is_object_type()
or schemactx.is_type_compatible(base_target, ptr_target, ctx=ctx)
):
qlexpr = astutils.ensure_qlstmt(
qlast.TypeCast(
type=typegen.type_to_ql_typeref(base_target, ctx=ctx),
expr=compexpr,
)
)
ptr_target = base_target
else:
expected = [repr(str(base_target.get_displayname(ctx.env.schema)))]
ercls: Type[errors.EdgeDBError]
if ptrcls.is_property(ctx.env.schema):
ercls = errors.InvalidPropertyTargetError
else:
ercls = errors.InvalidLinkTargetError
ptr_vn = ptrcls.get_verbosename(ctx.env.schema, with_parent=True)
raise ercls(
f"invalid target for {ptr_vn}: "
f"{str(ptr_target.get_displayname(ctx.env.schema))!r} "
f"(expecting {' or '.join(expected)})"
)
if qlexpr is not None or ptrcls is None:
src_scls: s_sources.Source
if is_linkprop:
# Proper checking was done when is_linkprop is defined.
assert view_rptr is not None
assert isinstance(view_rptr.ptrcls, s_links.Link)
src_scls = view_rptr.ptrcls
else:
src_scls = view_scls
if ptr_target.is_object_type():
base = ctx.env.get_track_schema_object("std::link")
else:
base = ctx.env.get_track_schema_object("std::property")
if base_ptrcls is not None:
derive_from = base_ptrcls
else:
derive_from = base
derived_name = schemactx.derive_view_name(
base_ptrcls,
derived_name_base=ptr_name,
derived_name_quals=[src_scls.get_name(ctx.env.schema)],
ctx=ctx,
)
existing = ctx.env.schema.get(derived_name, None)
if existing is not None:
assert isinstance(existing, s_pointers.Pointer)
existing_target = existing.get_target(ctx.env.schema)
assert existing_target is not None
if ptr_target == existing_target:
ptrcls = existing
elif ptr_target.implicitly_castable_to(existing_target, ctx.env.schema):
ctx.env.schema = existing.set_target(ctx.env.schema, ptr_target)
ptrcls = existing
else:
target_rptr_set = ptr_target.get_rptr(ctx.env.schema) is not None
if target_rptr_set:
ctx.env.schema = ptr_target.set_field_value(
ctx.env.schema,
"rptr",
None,
)
ctx.env.schema = existing.delete(ctx.env.schema)
try:
ptrcls = schemactx.derive_ptr(
derive_from,
src_scls,
ptr_target,
is_insert=is_insert,
is_update=is_update,
derived_name=derived_name,
inheritance_merge=True,
ctx=ctx,
)
except errors.SchemaError as e:
if compexpr is not None:
e.set_source_context(compexpr.context)
else:
e.set_source_context(shape_el.expr.steps[-1].context)
raise
if target_rptr_set:
ctx.env.schema = ptr_target.set_field_value(
ctx.env.schema,
"rptr",
ptrcls,
)
else:
ptrcls = schemactx.derive_ptr(
derive_from,
src_scls,
ptr_target,
is_insert=is_insert,
is_update=is_update,
derived_name=derived_name,
ctx=ctx,
)
elif ptrcls.get_target(ctx.env.schema) != ptr_target:
ctx.env.schema = ptrcls.set_target(ctx.env.schema, ptr_target)
assert ptrcls is not None
if qlexpr is None:
# This is not a computable, just a pointer
# to a nested shape. Have it reuse the original
# pointer name so that in `Foo.ptr.name` and
# `Foo { ptr: {name}}` are the same path.
path_id_name = base_ptrcls.get_name(ctx.env.schema)
ctx.env.schema = ptrcls.set_field_value(
ctx.env.schema, "path_id_name", path_id_name
)
if qlexpr is not None:
ctx.source_map[ptrcls] = context.ComputableInfo(
qlexpr=qlexpr,
context=ctx,
path_id=path_id,
path_id_ns=path_id_namespace,
shape_op=shape_el.operation.op,
)
if compexpr is not None or is_polymorphic:
ctx.env.schema = ptrcls.set_field_value(
ctx.env.schema,
"computable",
True,
)
ctx.env.schema = ptrcls.set_field_value(
ctx.env.schema,
"is_owned",
True,
)
if ptr_cardinality is not None:
ctx.env.schema = ptrcls.set_field_value(
ctx.env.schema, "cardinality", ptr_cardinality
)
else:
if qlexpr is None and ptrcls is not base_ptrcls:
ctx.pointer_derivation_map[base_ptrcls].append(ptrcls)
base_cardinality = None
base_required = False
if base_ptrcls is not None and not base_ptrcls_is_alias:
base_cardinality = base_ptrcls.get_cardinality(ctx.env.schema)
base_required = base_ptrcls.get_required(ctx.env.schema)
if base_cardinality is None:
specified_cardinality = shape_el.cardinality
specified_required = shape_el.required
else:
specified_cardinality = base_cardinality
specified_required = base_required
if (
shape_el.cardinality is not None
and base_ptrcls is not None
and shape_el.cardinality != base_cardinality
):
base_src = base_ptrcls.get_source(ctx.env.schema)
assert base_src is not None
base_src_name = base_src.get_verbosename(ctx.env.schema)
raise errors.SchemaError(
f"cannot redefine the cardinality of "
f"{ptrcls.get_verbosename(ctx.env.schema)}: "
f"it is defined as {base_cardinality.as_ptr_qual()!r} "
f"in the base {base_src_name}",
context=compexpr.context,
)
# The required flag may be inherited from the base
specified_required = shape_el.required or base_required
stmtctx.pend_pointer_cardinality_inference(
ptrcls=ptrcls,
specified_required=specified_required,
specified_card=specified_cardinality,
is_mut_assignment=is_mutation,
shape_op=shape_el.operation.op,
source_ctx=shape_el.context,
ctx=ctx,
)
ctx.env.schema = ptrcls.set_field_value(ctx.env.schema, "cardinality", None)
if (
ptrcls.is_protected_pointer(ctx.env.schema)
and qlexpr is not None
and not from_default
and not ctx.env.options.allow_writing_protected_pointers
):
ptrcls_sn = ptrcls.get_shortname(ctx.env.schema)
if is_polymorphic:
msg = f"cannot access {ptrcls_sn.name} on a polymorphic shape element"
else:
msg = f"cannot assign to {ptrcls_sn.name}"
raise errors.QueryError(msg, context=shape_el.context)
if is_update and ptrcls.get_readonly(ctx.env.schema):
raise errors.QueryError(
f"cannot update {ptrcls.get_verbosename(ctx.env.schema)}: "
f"it is declared as read-only",
context=compexpr.context,
)
return ptrcls
|
https://github.com/edgedb/edgedb/issues/1788
|
ERROR: InternalServerError:
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/Users/yury/dev/edge/edgedb/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/Users/yury/dev/edge/edgedb/edb/server/compiler/compiler.py", line 1795, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/Users/yury/dev/edge/edgedb/edb/server/compiler/compiler.py", line 1429, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/Users/yury/dev/edge/edgedb/edb/server/compiler/compiler.py", line 1392, in _compile_dispatch_ql
return self._compile_ql_query(ctx, ql)
File "/Users/yury/dev/edge/edgedb/edb/server/compiler/compiler.py", line 592, in _compile_ql_query
ir = qlcompiler.compile_ast_to_ir(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/__init__.py", line 175, in wrapper
return func(*args, **kwargs)
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/__init__.py", line 223, in compile_ast_to_ir
ir_set = dispatch_mod.compile(tree, ctx=ctx)
File "/opt/local/Library/Frameworks/Python.framework/Versions/3.8/lib/python3.8/functools.py", line 874, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/stmt.py", line 84, in compile_SelectQuery
stmt.result = compile_result_clause(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/stmt.py", line 1017, in compile_result_clause
ir_result = compile_query_subject(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/stmt.py", line 1097, in compile_query_subject
view_scls = viewgen.process_view(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/viewgen.py", line 86, in process_view
view_scls = _process_view(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/viewgen.py", line 173, in _process_view
pointer = _normalize_view_ptr_expr(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/viewgen.py", line 549, in _normalize_view_ptr_expr
ptr_target = _process_view(
File "/Users/yury/dev/edge/edgedb/edb/edgeql/compiler/viewgen.py", line 160, in _process_view
assert isinstance(view_scls, s_objtypes.ObjectType)
AssertionError
|
InternalServerError
|
def compile_Set(
ir_set: irast.Set, *, ctx: context.CompilerContextLevel
) -> pgast.BaseExpr:
if ctx.singleton_mode:
return _compile_set_in_singleton_mode(ir_set, ctx=ctx)
is_toplevel = ctx.toplevel_stmt is context.NO_STMT
_compile_set_impl(ir_set, ctx=ctx)
if is_toplevel:
if isinstance(ir_set.expr, irast.ConfigCommand):
return config.top_output_as_config_op(ir_set, ctx.rel, env=ctx.env)
else:
pathctx.get_path_serialized_output(ctx.rel, ir_set.path_id, env=ctx.env)
return output.top_output_as_value(ctx.rel, ir_set, env=ctx.env)
else:
value = pathctx.get_path_value_var(ctx.rel, ir_set.path_id, env=ctx.env)
return output.output_as_value(value, env=ctx.env)
|
def compile_Set(
ir_set: irast.Set, *, ctx: context.CompilerContextLevel
) -> pgast.BaseExpr:
if ctx.singleton_mode:
return _compile_set_in_singleton_mode(ir_set, ctx=ctx)
is_toplevel = ctx.toplevel_stmt is context.NO_STMT
_compile_set_impl(ir_set, ctx=ctx)
if is_toplevel:
if isinstance(ir_set.expr, irast.ConfigCommand):
return config.top_output_as_config_op(ir_set, ctx.rel, env=ctx.env)
else:
return output.top_output_as_value(ctx.rel, ir_set, env=ctx.env)
else:
value = pathctx.get_path_value_var(ctx.rel, ir_set.path_id, env=ctx.env)
return output.output_as_value(value, env=ctx.env)
|
https://github.com/edgedb/edgedb/issues/1715
|
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1816, in compile_eql_tokens_in_tx
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1429, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1352, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1047, in _compile_ql_migration
ddl_query = self._compile_and_apply_ddl_stmt(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 752, in _compile_and_apply_ddl_stmt
block, new_types = self._process_delta(ctx, delta)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 412, in _process_delta
schema = delta.apply(schema, context)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/pgsql/delta.py", line 4027, in apply
schema = sd.DeltaRoot.apply(self, schema, context)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/schema/delta.py", line 1094, in apply
schema = objop.apply(schema, context)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/pgsql/delta.py", line 194, in apply
schema = self.__class__.get_adaptee().apply(self, schema, context)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/schema/delta.py", line 2037, in apply
schema = self._create_innards(schema, context)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/schema/delta.py", line 2005, in _create_innards
schema = op.apply(schema, context=context)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/pgsql/delta.py", line 595, in apply
schema, op = self.make_op(self.scls, schema, context)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/pgsql/delta.py", line 575, in make_op
dbf = self.compile_edgeql_function(func, schema, context)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/pgsql/delta.py", line 546, in compile_edgeql_function
sql_text, _ = compiler.compile_ir_to_sql(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/pgsql/compiler/__init__.py", line 105, in compile_ir_to_sql
qtree = compile_ir_to_sql_tree(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/pgsql/compiler/__init__.py", line 90, in compile_ir_to_sql_tree
raise errors.InternalServerError(*args) from e
edb.errors.InternalServerError: list index out of range
|
edb.errors.InternalServerError
|
def get_path_output(
rel: pgast.BaseRelation,
path_id: irast.PathId,
*,
aspect: str,
allow_nullable: bool = True,
ptr_info: Optional[pg_types.PointerStorageInfo] = None,
env: context.Environment,
) -> pgast.OutputVar:
if isinstance(rel, pgast.Query):
path_id = map_path_id(path_id, rel.view_path_id_map)
return _get_path_output(
rel,
path_id=path_id,
aspect=aspect,
ptr_info=ptr_info,
allow_nullable=allow_nullable,
env=env,
)
|
def get_path_output(
rel: pgast.BaseRelation,
path_id: irast.PathId,
*,
aspect: str,
allow_nullable: bool = True,
ptr_info: Optional[pg_types.PointerStorageInfo] = None,
env: context.Environment,
) -> pgast.OutputVar:
view_path_id_map = getattr(rel, "view_path_id_map", None)
if view_path_id_map:
path_id = map_path_id(path_id, view_path_id_map)
return _get_path_output(
rel,
path_id=path_id,
aspect=aspect,
ptr_info=ptr_info,
allow_nullable=allow_nullable,
env=env,
)
|
https://github.com/edgedb/edgedb/issues/1715
|
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1816, in compile_eql_tokens_in_tx
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1429, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1352, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1047, in _compile_ql_migration
ddl_query = self._compile_and_apply_ddl_stmt(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 752, in _compile_and_apply_ddl_stmt
block, new_types = self._process_delta(ctx, delta)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 412, in _process_delta
schema = delta.apply(schema, context)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/pgsql/delta.py", line 4027, in apply
schema = sd.DeltaRoot.apply(self, schema, context)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/schema/delta.py", line 1094, in apply
schema = objop.apply(schema, context)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/pgsql/delta.py", line 194, in apply
schema = self.__class__.get_adaptee().apply(self, schema, context)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/schema/delta.py", line 2037, in apply
schema = self._create_innards(schema, context)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/schema/delta.py", line 2005, in _create_innards
schema = op.apply(schema, context=context)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/pgsql/delta.py", line 595, in apply
schema, op = self.make_op(self.scls, schema, context)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/pgsql/delta.py", line 575, in make_op
dbf = self.compile_edgeql_function(func, schema, context)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/pgsql/delta.py", line 546, in compile_edgeql_function
sql_text, _ = compiler.compile_ir_to_sql(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/pgsql/compiler/__init__.py", line 105, in compile_ir_to_sql
qtree = compile_ir_to_sql_tree(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/pgsql/compiler/__init__.py", line 90, in compile_ir_to_sql_tree
raise errors.InternalServerError(*args) from e
edb.errors.InternalServerError: list index out of range
|
edb.errors.InternalServerError
|
def get_path_serialized_output(
rel: pgast.Query, path_id: irast.PathId, *, env: context.Environment
) -> pgast.OutputVar:
# Serialized output is a special case, we don't
# want this behaviour to be recursive, so it
# must be kept outside of get_path_output() generic.
aspect = "serialized"
path_id = map_path_id(path_id, rel.view_path_id_map)
result = rel.path_outputs.get((path_id, aspect))
if result is not None:
return result
ref = get_path_serialized_or_value_var(rel, path_id, env=env)
refexpr = output.serialize_expr(ref, path_id=path_id, env=env)
alias = get_path_output_alias(path_id, aspect, env=env)
restarget = pgast.ResTarget(name=alias, val=refexpr, ser_safe=True)
rel.target_list.append(restarget)
result = pgast.ColumnRef(name=[alias], nullable=refexpr.nullable, ser_safe=True)
_put_path_output_var(rel, path_id, aspect, result, env=env)
return result
|
def get_path_serialized_output(
rel: pgast.Query, path_id: irast.PathId, *, env: context.Environment
) -> pgast.OutputVar:
# Serialized output is a special case, we don't
# want this behaviour to be recursive, so it
# must be kept outside of get_path_output() generic.
aspect = "serialized"
result = rel.path_outputs.get((path_id, aspect))
if result is not None:
return result
ref = get_path_serialized_or_value_var(rel, path_id, env=env)
refexpr = output.serialize_expr(ref, path_id=path_id, env=env)
alias = get_path_output_alias(path_id, aspect, env=env)
restarget = pgast.ResTarget(name=alias, val=refexpr, ser_safe=True)
rel.target_list.append(restarget)
result = pgast.ColumnRef(name=[alias], nullable=refexpr.nullable, ser_safe=True)
_put_path_output_var(rel, path_id, aspect, result, env=env)
return result
|
https://github.com/edgedb/edgedb/issues/1715
|
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1816, in compile_eql_tokens_in_tx
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1429, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1352, in _compile_dispatch_ql
return self._compile_ql_migration(ctx, ql)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 1047, in _compile_ql_migration
ddl_query = self._compile_and_apply_ddl_stmt(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 752, in _compile_and_apply_ddl_stmt
block, new_types = self._process_delta(ctx, delta)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/server/compiler/compiler.py", line 412, in _process_delta
schema = delta.apply(schema, context)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/pgsql/delta.py", line 4027, in apply
schema = sd.DeltaRoot.apply(self, schema, context)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/schema/delta.py", line 1094, in apply
schema = objop.apply(schema, context)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/pgsql/delta.py", line 194, in apply
schema = self.__class__.get_adaptee().apply(self, schema, context)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/schema/delta.py", line 2037, in apply
schema = self._create_innards(schema, context)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/schema/delta.py", line 2005, in _create_innards
schema = op.apply(schema, context=context)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/pgsql/delta.py", line 595, in apply
schema, op = self.make_op(self.scls, schema, context)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/pgsql/delta.py", line 575, in make_op
dbf = self.compile_edgeql_function(func, schema, context)
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/pgsql/delta.py", line 546, in compile_edgeql_function
sql_text, _ = compiler.compile_ir_to_sql(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/pgsql/compiler/__init__.py", line 105, in compile_ir_to_sql
qtree = compile_ir_to_sql_tree(
File "/usr/lib/x86_64-linux-gnu/edgedb-server-1-alpha5/lib/python3.8/site-packages/edb/pgsql/compiler/__init__.py", line 90, in compile_ir_to_sql_tree
raise errors.InternalServerError(*args) from e
edb.errors.InternalServerError: list index out of range
|
edb.errors.InternalServerError
|
def ptrref_from_ptrcls(
*,
schema: s_schema.Schema,
ptrcls: s_pointers.PointerLike,
direction: s_pointers.PointerDirection = (s_pointers.PointerDirection.Outbound),
cache: Optional[Dict[PtrRefCacheKey, irast.BasePointerRef]] = None,
typeref_cache: Optional[Dict[TypeRefCacheKey, irast.TypeRef]] = None,
include_descendants: bool = False,
) -> irast.BasePointerRef:
"""Return an IR pointer descriptor for a given schema pointer.
An IR PointerRef is an object that fully describes a schema pointer for
the purposes of query compilation.
Args:
schema:
A schema instance, in which the type *t* is defined.
ptrcls:
A :class:`schema.pointers.Pointer` instance for which to
return the PointerRef.
direction:
The direction of the pointer in the path expression.
Returns:
An instance of a subclass of :class:`ir.ast.BasePointerRef`
corresponding to the given schema pointer.
"""
if cache is not None:
cached = cache.get((ptrcls, direction, include_descendants))
if cached is not None:
return cached
kwargs: Dict[str, Any] = {}
ircls: Type[irast.BasePointerRef]
source_ref: Optional[irast.TypeRef]
target_ref: Optional[irast.TypeRef]
out_source: Optional[irast.TypeRef]
if isinstance(ptrcls, irast.TupleIndirectionLink):
ircls = irast.TupleIndirectionPointerRef
elif isinstance(ptrcls, irast.TypeIntersectionLink):
ircls = irast.TypeIntersectionPointerRef
kwargs["optional"] = ptrcls.is_optional()
kwargs["is_empty"] = ptrcls.is_empty()
kwargs["is_subtype"] = ptrcls.is_subtype()
kwargs["rptr_specialization"] = ptrcls.get_rptr_specialization()
elif isinstance(ptrcls, s_pointers.Pointer):
ircls = irast.PointerRef
kwargs["id"] = ptrcls.id
name = ptrcls.get_name(schema)
kwargs["module_id"] = schema.get_global(s_mod.Module, name.module).id
else:
raise AssertionError(f"unexpected pointer class: {ptrcls}")
target = ptrcls.get_far_endpoint(schema, direction)
if target is not None and not isinstance(target, irast.TypeRef):
assert isinstance(target, s_types.Type)
target_ref = type_to_typeref(schema, target, cache=typeref_cache)
else:
target_ref = target
source = ptrcls.get_near_endpoint(schema, direction)
source_ptr: Optional[irast.BasePointerRef]
if isinstance(ptrcls, s_props.Property) and isinstance(source, s_links.Link):
source_ptr = ptrref_from_ptrcls(
ptrcls=source,
direction=direction,
schema=schema,
cache=cache,
typeref_cache=typeref_cache,
)
source_ref = None
else:
if source is not None and not isinstance(source, irast.TypeRef):
assert isinstance(source, s_types.Type)
source_ref = type_to_typeref(schema, source, cache=typeref_cache)
else:
source_ref = source
source_ptr = None
if direction is s_pointers.PointerDirection.Inbound:
out_source = target_ref
out_target = source_ref
else:
out_source = source_ref
out_target = target_ref
out_cardinality, dir_cardinality = cardinality_from_ptrcls(
schema, ptrcls, direction=direction
)
material_ptrcls = ptrcls.material_type(schema)
material_ptr: Optional[irast.BasePointerRef]
if material_ptrcls is not None and material_ptrcls is not ptrcls:
material_ptr = ptrref_from_ptrcls(
ptrcls=material_ptrcls,
direction=direction,
schema=schema,
cache=cache,
typeref_cache=typeref_cache,
include_descendants=include_descendants,
)
else:
material_ptr = None
union_components: Set[irast.BasePointerRef] = set()
union_of = ptrcls.get_union_of(schema)
union_is_concrete = False
if union_of:
union_ptrs = set()
for component in union_of.objects(schema):
assert isinstance(component, s_pointers.Pointer)
material_comp = component.material_type(schema)
union_ptrs.add(material_comp)
non_overlapping, union_is_concrete = s_utils.get_non_overlapping_union(
schema,
union_ptrs,
)
union_components = {
ptrref_from_ptrcls(
ptrcls=p,
direction=direction,
schema=schema,
cache=cache,
typeref_cache=typeref_cache,
)
for p in non_overlapping
}
std_parent_name = None
for ancestor in ptrcls.get_ancestors(schema).objects(schema):
ancestor_name = ancestor.get_name(schema)
if ancestor_name.module == "std" and ancestor.generic(schema):
std_parent_name = ancestor_name
break
is_derived = ptrcls.get_is_derived(schema)
base_ptr: Optional[irast.BasePointerRef]
if is_derived:
base_ptrcls = ptrcls.get_bases(schema).first(schema)
top_ptr_name = type(base_ptrcls).get_default_base_name()
if base_ptrcls.get_name(schema) != top_ptr_name:
base_ptr = ptrref_from_ptrcls(
ptrcls=base_ptrcls,
direction=direction,
schema=schema,
cache=cache,
typeref_cache=typeref_cache,
)
else:
base_ptr = None
else:
base_ptr = None
if (
material_ptr is None
and include_descendants
and isinstance(ptrcls, s_pointers.Pointer)
):
descendants = frozenset(
ptrref_from_ptrcls(
ptrcls=child,
direction=direction,
schema=schema,
cache=cache,
typeref_cache=typeref_cache,
)
for child in ptrcls.children(schema)
if not child.get_is_derived(schema)
)
else:
descendants = frozenset()
kwargs.update(
dict(
out_source=out_source,
out_target=out_target,
name=ptrcls.get_name(schema),
shortname=ptrcls.get_shortname(schema),
path_id_name=ptrcls.get_path_id_name(schema),
std_parent_name=std_parent_name,
direction=direction,
source_ptr=source_ptr,
base_ptr=base_ptr,
material_ptr=material_ptr,
descendants=descendants,
is_derived=ptrcls.get_is_derived(schema),
is_computable=ptrcls.get_computable(schema),
union_components=union_components,
union_is_concrete=union_is_concrete,
has_properties=ptrcls.has_user_defined_properties(schema),
dir_cardinality=dir_cardinality,
out_cardinality=out_cardinality,
)
)
ptrref = ircls(**kwargs)
if cache is not None:
cache[ptrcls, direction, include_descendants] = ptrref
return ptrref
|
def ptrref_from_ptrcls(
*,
schema: s_schema.Schema,
ptrcls: s_pointers.PointerLike,
direction: s_pointers.PointerDirection = (s_pointers.PointerDirection.Outbound),
cache: Optional[Dict[PtrRefCacheKey, irast.BasePointerRef]] = None,
typeref_cache: Optional[Dict[TypeRefCacheKey, irast.TypeRef]] = None,
include_descendants: bool = False,
) -> irast.BasePointerRef:
"""Return an IR pointer descriptor for a given schema pointer.
An IR PointerRef is an object that fully describes a schema pointer for
the purposes of query compilation.
Args:
schema:
A schema instance, in which the type *t* is defined.
ptrcls:
A :class:`schema.pointers.Pointer` instance for which to
return the PointerRef.
direction:
The direction of the pointer in the path expression.
Returns:
An instance of a subclass of :class:`ir.ast.BasePointerRef`
corresponding to the given schema pointer.
"""
if cache is not None:
cached = cache.get((ptrcls, direction, include_descendants))
if cached is not None:
return cached
kwargs: Dict[str, Any] = {}
ircls: Type[irast.BasePointerRef]
source_ref: Optional[irast.TypeRef]
target_ref: Optional[irast.TypeRef]
out_source: Optional[irast.TypeRef]
if isinstance(ptrcls, irast.TupleIndirectionLink):
ircls = irast.TupleIndirectionPointerRef
elif isinstance(ptrcls, irast.TypeIntersectionLink):
ircls = irast.TypeIntersectionPointerRef
kwargs["optional"] = ptrcls.is_optional()
kwargs["is_empty"] = ptrcls.is_empty()
kwargs["is_subtype"] = ptrcls.is_subtype()
kwargs["rptr_specialization"] = ptrcls.get_rptr_specialization()
elif isinstance(ptrcls, s_pointers.Pointer):
ircls = irast.PointerRef
kwargs["id"] = ptrcls.id
name = ptrcls.get_name(schema)
kwargs["module_id"] = schema.get_global(s_mod.Module, name.module).id
else:
raise AssertionError(f"unexpected pointer class: {ptrcls}")
target = ptrcls.get_far_endpoint(schema, direction)
if target is not None and not isinstance(target, irast.TypeRef):
assert isinstance(target, s_types.Type)
target_ref = type_to_typeref(schema, target, cache=typeref_cache)
else:
target_ref = target
source = ptrcls.get_near_endpoint(schema, direction)
source_ptr: Optional[irast.BasePointerRef]
if isinstance(ptrcls, s_props.Property) and isinstance(source, s_links.Link):
source_ptr = ptrref_from_ptrcls(
ptrcls=source,
direction=direction,
schema=schema,
cache=cache,
typeref_cache=typeref_cache,
)
source_ref = None
else:
if source is not None and not isinstance(source, irast.TypeRef):
assert isinstance(source, s_types.Type)
source_ref = type_to_typeref(schema, source, cache=typeref_cache)
else:
source_ref = source
source_ptr = None
if direction is s_pointers.PointerDirection.Inbound:
out_source = target_ref
out_target = source_ref
else:
out_source = source_ref
out_target = target_ref
out_cardinality, dir_cardinality = cardinality_from_ptrcls(
schema, ptrcls, direction=direction
)
material_ptrcls = ptrcls.material_type(schema)
material_ptr: Optional[irast.BasePointerRef]
if material_ptrcls is not None and material_ptrcls is not ptrcls:
material_ptr = ptrref_from_ptrcls(
ptrcls=material_ptrcls,
direction=direction,
schema=schema,
cache=cache,
typeref_cache=typeref_cache,
include_descendants=include_descendants,
)
else:
material_ptr = None
union_components: Set[irast.BasePointerRef] = set()
union_of = ptrcls.get_union_of(schema)
union_is_concrete = False
if union_of:
union_ptrs = set()
for component in union_of.objects(schema):
assert isinstance(component, s_pointers.Pointer)
material_comp = component.material_type(schema)
union_ptrs.add(material_comp)
non_overlapping, union_is_concrete = s_utils.get_non_overlapping_union(
schema,
union_ptrs,
)
union_components = {
ptrref_from_ptrcls(
ptrcls=p,
direction=direction,
schema=schema,
cache=cache,
typeref_cache=typeref_cache,
)
for p in non_overlapping
}
std_parent_name = None
for ancestor in ptrcls.get_ancestors(schema).objects(schema):
ancestor_name = ancestor.get_name(schema)
if ancestor_name.module == "std" and ancestor.generic(schema):
std_parent_name = ancestor_name
break
is_derived = ptrcls.get_is_derived(schema)
base_ptr: Optional[irast.BasePointerRef]
if is_derived:
base_ptrcls = ptrcls.get_bases(schema).first(schema)
base_ptr = ptrref_from_ptrcls(
ptrcls=base_ptrcls,
direction=direction,
schema=schema,
cache=cache,
typeref_cache=typeref_cache,
)
else:
base_ptr = None
if (
material_ptr is None
and include_descendants
and isinstance(ptrcls, s_pointers.Pointer)
):
descendants = frozenset(
ptrref_from_ptrcls(
ptrcls=child,
direction=direction,
schema=schema,
cache=cache,
typeref_cache=typeref_cache,
)
for child in ptrcls.children(schema)
if not child.get_is_derived(schema)
)
else:
descendants = frozenset()
kwargs.update(
dict(
out_source=out_source,
out_target=out_target,
name=ptrcls.get_name(schema),
shortname=ptrcls.get_shortname(schema),
path_id_name=ptrcls.get_path_id_name(schema),
std_parent_name=std_parent_name,
direction=direction,
source_ptr=source_ptr,
base_ptr=base_ptr,
material_ptr=material_ptr,
descendants=descendants,
is_derived=ptrcls.get_is_derived(schema),
is_computable=ptrcls.get_computable(schema),
union_components=union_components,
union_is_concrete=union_is_concrete,
has_properties=ptrcls.has_user_defined_properties(schema),
dir_cardinality=dir_cardinality,
out_cardinality=out_cardinality,
)
)
ptrref = ircls(**kwargs)
if cache is not None:
cache[ptrcls, direction, include_descendants] = ptrref
return ptrref
|
https://github.com/edgedb/edgedb/issues/1614
|
ERROR: InternalServerError: 'NoneType' object has no attribute 'is_single'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/home/victor/dev/magicstack/edgedb/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/home/victor/dev/magicstack/edgedb/edb/server/compiler/compiler.py", line 1683, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/home/victor/dev/magicstack/edgedb/edb/server/compiler/compiler.py", line 1319, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/home/victor/dev/magicstack/edgedb/edb/server/compiler/compiler.py", line 1282, in _compile_dispatch_ql
return self._compile_ql_query(ctx, ql)
File "/home/victor/dev/magicstack/edgedb/edb/server/compiler/compiler.py", line 589, in _compile_ql_query
ir = qlcompiler.compile_ast_to_ir(
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/compiler/__init__.py", line 175, in wrapper
return func(*args, **kwargs)
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/compiler/__init__.py", line 224, in compile_ast_to_ir
ir_expr = stmtctx_mod.fini_expression(ir_set, ctx=ctx)
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/compiler/stmtctx.py", line 116, in fini_expression
cb(ctx=ctx)
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/compiler/stmtctx.py", line 512, in _infer_pointer_cardinality
inferred_card = infer_expr_cardinality(irexpr=irexpr, ctx=ctx)
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/compiler/stmtctx.py", line 489, in infer_expr_cardinality
return inference.infer_cardinality(irexpr, scope_tree=scope, env=ctx.env)
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/compiler/inference/cardinality.py", line 929, in infer_cardinality
result = _infer_cardinality(
File "/home/victor/dev/venvs/edgedb/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/compiler/inference/cardinality.py", line 371, in __infer_set
return infer_cardinality(
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/compiler/inference/cardinality.py", line 929, in infer_cardinality
result = _infer_cardinality(
File "/home/victor/dev/venvs/edgedb/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/compiler/inference/cardinality.py", line 719, in __infer_select_stmt
stmt_card = _infer_stmt_cardinality(
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/compiler/inference/cardinality.py", line 689, in _infer_stmt_cardinality
result_card = infer_cardinality(
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/compiler/inference/cardinality.py", line 929, in infer_cardinality
result = _infer_cardinality(
File "/home/victor/dev/venvs/edgedb/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/compiler/inference/cardinality.py", line 371, in __infer_set
return infer_cardinality(
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/compiler/inference/cardinality.py", line 929, in infer_cardinality
result = _infer_cardinality(
File "/home/victor/dev/venvs/edgedb/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/compiler/inference/cardinality.py", line 403, in __infer_func_call
return _common_cardinality(
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/compiler/inference/cardinality.py", line 185, in _common_cardinality
return cartesian_cardinality(
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/compiler/inference/cardinality.py", line 125, in cartesian_cardinality
card = list(zip(*(_card_to_bounds(a) for a in args)))
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/compiler/inference/cardinality.py", line 125, in <genexpr>
card = list(zip(*(_card_to_bounds(a) for a in args)))
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/compiler/inference/cardinality.py", line 186, in <genexpr>
infer_cardinality(
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/compiler/inference/cardinality.py", line 929, in infer_cardinality
result = _infer_cardinality(
File "/home/victor/dev/venvs/edgedb/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/compiler/inference/cardinality.py", line 360, in __infer_set
source_card = infer_cardinality(
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/compiler/inference/cardinality.py", line 929, in infer_cardinality
result = _infer_cardinality(
File "/home/victor/dev/venvs/edgedb/lib/python3.8/functools.py", line 875, in wrapper
return dispatch(args[0].__class__)(*args, **kw)
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/compiler/inference/cardinality.py", line 358, in __infer_set
if rptrref_card.is_single():
AttributeError: 'NoneType' object has no attribute 'is_single'
|
AttributeError
|
def reduce_CreateConstraint(self, *kids):
r"""%reduce ABSTRACT CONSTRAINT NodeName OptOnExpr \
OptExtendingSimple"""
self.val = qlast.CreateConstraint(
name=kids[2].val,
subject=kids[3].val,
bases=kids[4].val,
)
|
def reduce_CreateConstraint(self, *kids):
r"""%reduce ABSTRACT CONSTRAINT NodeName OptOnExpr \
OptExtendingSimple"""
self.val = qlast.CreateConstraint(
name=kids[2].val,
subject=kids[3].val,
extends=kids[4].val,
)
|
https://github.com/edgedb/edgedb/issues/1441
|
db> CREATE ABSTRACT CONSTRAINT `🚀🍿` EXTENDING max_len_value;
OK: CREATE
db> CREATE SCALAR TYPE foo EXTENDING str {
... CREATE CONSTRAINT `🚀🍿`(10);
... };
ERROR: InternalServerError: 'NoneType' object has no attribute 'get_parameter_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/home/victor/dev/magicstack/edgedb/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/home/victor/dev/magicstack/edgedb/edb/server/compiler/compiler.py", line 1480, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/home/victor/dev/magicstack/edgedb/edb/server/compiler/compiler.py", line 1141, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/home/victor/dev/magicstack/edgedb/edb/server/compiler/compiler.py", line 1076, in _compile_dispatch_ql
return self._compile_ql_ddl(ctx, ql)
File "/home/victor/dev/magicstack/edgedb/edb/server/compiler/compiler.py", line 763, in _compile_ql_ddl
cmd = s_ddl.delta_from_ddl(
File "/home/victor/dev/magicstack/edgedb/edb/schema/ddl.py", line 466, in delta_from_ddl
_, cmd = _delta_from_ddl(ddl_stmt, schema=schema, modaliases=modaliases,
File "/home/victor/dev/magicstack/edgedb/edb/schema/ddl.py", line 496, in _delta_from_ddl
schema = cmd.apply(schema, context)
File "/home/victor/dev/magicstack/edgedb/edb/schema/delta.py", line 1531, in apply
schema = self._create_innards(schema, context)
File "/home/victor/dev/magicstack/edgedb/edb/schema/inheriting.py", line 622, in _create_innards
return super()._create_innards(schema, context)
File "/home/victor/dev/magicstack/edgedb/edb/schema/delta.py", line 1498, in _create_innards
schema = op.apply(schema, context=context)
File "/home/victor/dev/magicstack/edgedb/edb/schema/delta.py", line 1527, in apply
schema = self._create_begin(schema, context)
File "/home/victor/dev/magicstack/edgedb/edb/schema/constraints.py", line 641, in _create_begin
constr_base, attrs, inh = Constraint.get_concrete_constraint_attrs(
File "/home/victor/dev/magicstack/edgedb/edb/schema/constraints.py", line 226, in get_concrete_constraint_attrs
args_map = qlutils.index_parameters(
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/utils.py", line 92, in index_parameters
result[p.get_parameter_name(schema)] = e
AttributeError: 'NoneType' object has no attribute 'get_parameter_name'
|
AttributeError
|
def reduce_CreateConstraint_CreateFunctionArgs(self, *kids):
r"""%reduce ABSTRACT CONSTRAINT NodeName CreateFunctionArgs \
OptOnExpr OptExtendingSimple"""
self.val = qlast.CreateConstraint(
name=kids[2].val,
params=kids[3].val,
subject=kids[4].val,
bases=kids[5].val,
)
|
def reduce_CreateConstraint_CreateFunctionArgs(self, *kids):
r"""%reduce ABSTRACT CONSTRAINT NodeName CreateFunctionArgs \
OptOnExpr OptExtendingSimple"""
self.val = qlast.CreateConstraint(
name=kids[2].val,
params=kids[3].val,
subject=kids[4].val,
extends=kids[5].val,
)
|
https://github.com/edgedb/edgedb/issues/1441
|
db> CREATE ABSTRACT CONSTRAINT `🚀🍿` EXTENDING max_len_value;
OK: CREATE
db> CREATE SCALAR TYPE foo EXTENDING str {
... CREATE CONSTRAINT `🚀🍿`(10);
... };
ERROR: InternalServerError: 'NoneType' object has no attribute 'get_parameter_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/home/victor/dev/magicstack/edgedb/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/home/victor/dev/magicstack/edgedb/edb/server/compiler/compiler.py", line 1480, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/home/victor/dev/magicstack/edgedb/edb/server/compiler/compiler.py", line 1141, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/home/victor/dev/magicstack/edgedb/edb/server/compiler/compiler.py", line 1076, in _compile_dispatch_ql
return self._compile_ql_ddl(ctx, ql)
File "/home/victor/dev/magicstack/edgedb/edb/server/compiler/compiler.py", line 763, in _compile_ql_ddl
cmd = s_ddl.delta_from_ddl(
File "/home/victor/dev/magicstack/edgedb/edb/schema/ddl.py", line 466, in delta_from_ddl
_, cmd = _delta_from_ddl(ddl_stmt, schema=schema, modaliases=modaliases,
File "/home/victor/dev/magicstack/edgedb/edb/schema/ddl.py", line 496, in _delta_from_ddl
schema = cmd.apply(schema, context)
File "/home/victor/dev/magicstack/edgedb/edb/schema/delta.py", line 1531, in apply
schema = self._create_innards(schema, context)
File "/home/victor/dev/magicstack/edgedb/edb/schema/inheriting.py", line 622, in _create_innards
return super()._create_innards(schema, context)
File "/home/victor/dev/magicstack/edgedb/edb/schema/delta.py", line 1498, in _create_innards
schema = op.apply(schema, context=context)
File "/home/victor/dev/magicstack/edgedb/edb/schema/delta.py", line 1527, in apply
schema = self._create_begin(schema, context)
File "/home/victor/dev/magicstack/edgedb/edb/schema/constraints.py", line 641, in _create_begin
constr_base, attrs, inh = Constraint.get_concrete_constraint_attrs(
File "/home/victor/dev/magicstack/edgedb/edb/schema/constraints.py", line 226, in get_concrete_constraint_attrs
args_map = qlutils.index_parameters(
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/utils.py", line 92, in index_parameters
result[p.get_parameter_name(schema)] = e
AttributeError: 'NoneType' object has no attribute 'get_parameter_name'
|
AttributeError
|
def _create_begin(
self,
schema: s_schema.Schema,
context: sd.CommandContext,
) -> s_schema.Schema:
referrer_ctx = self.get_referrer_context(context)
if referrer_ctx is None:
schema = super()._create_begin(schema, context)
if not context.canonical:
self._validate_params(schema, context)
return schema
subject = referrer_ctx.scls
assert isinstance(subject, ConsistencySubject)
if not subject.can_accept_constraints(schema):
raise errors.UnsupportedFeatureError(
f"constraints cannot be defined on {subject.get_verbosename(schema)}",
context=self.source_context,
)
if not context.canonical:
props = self.get_resolved_attributes(schema, context)
props.pop("name")
props.pop("subject", None)
fullname = self.classname
shortname = sn.shortname_from_fullname(fullname)
constr_base, attrs, inh = Constraint.get_concrete_constraint_attrs(
schema, subject, name=shortname, sourcectx=self.source_context, **props
)
for k, v in attrs.items():
inherited = inh.get(k)
self.set_attribute_value(k, v, inherited=bool(inherited))
self.set_attribute_value("subject", subject)
return super()._create_begin(schema, context)
|
def _create_begin(
self,
schema: s_schema.Schema,
context: sd.CommandContext,
) -> s_schema.Schema:
referrer_ctx = self.get_referrer_context(context)
if referrer_ctx is None:
return super()._create_begin(schema, context)
subject = referrer_ctx.scls
assert isinstance(subject, ConsistencySubject)
if not subject.can_accept_constraints(schema):
raise errors.UnsupportedFeatureError(
f"constraints cannot be defined on {subject.get_verbosename(schema)}",
context=self.source_context,
)
if not context.canonical:
props = self.get_resolved_attributes(schema, context)
props.pop("name")
props.pop("subject", None)
fullname = self.classname
shortname = sn.shortname_from_fullname(fullname)
constr_base, attrs, inh = Constraint.get_concrete_constraint_attrs(
schema, subject, name=shortname, sourcectx=self.source_context, **props
)
for k, v in attrs.items():
inherited = inh.get(k)
self.set_attribute_value(k, v, inherited=bool(inherited))
self.set_attribute_value("subject", subject)
return super()._create_begin(schema, context)
|
https://github.com/edgedb/edgedb/issues/1441
|
db> CREATE ABSTRACT CONSTRAINT `🚀🍿` EXTENDING max_len_value;
OK: CREATE
db> CREATE SCALAR TYPE foo EXTENDING str {
... CREATE CONSTRAINT `🚀🍿`(10);
... };
ERROR: InternalServerError: 'NoneType' object has no attribute 'get_parameter_name'
Hint: This is most likely a bug in EdgeDB. Please consider opening an issue ticket at https://github.com/edgedb/edgedb/issues/new?template=bug_report.md
Server traceback:
Traceback (most recent call last):
File "/home/victor/dev/magicstack/edgedb/edb/server/procpool/worker.py", line 75, in worker
res = await meth(*args)
File "/home/victor/dev/magicstack/edgedb/edb/server/compiler/compiler.py", line 1480, in compile_eql_tokens
return self._compile(ctx=ctx, tokens=eql_tokens)
File "/home/victor/dev/magicstack/edgedb/edb/server/compiler/compiler.py", line 1141, in _compile
comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt)
File "/home/victor/dev/magicstack/edgedb/edb/server/compiler/compiler.py", line 1076, in _compile_dispatch_ql
return self._compile_ql_ddl(ctx, ql)
File "/home/victor/dev/magicstack/edgedb/edb/server/compiler/compiler.py", line 763, in _compile_ql_ddl
cmd = s_ddl.delta_from_ddl(
File "/home/victor/dev/magicstack/edgedb/edb/schema/ddl.py", line 466, in delta_from_ddl
_, cmd = _delta_from_ddl(ddl_stmt, schema=schema, modaliases=modaliases,
File "/home/victor/dev/magicstack/edgedb/edb/schema/ddl.py", line 496, in _delta_from_ddl
schema = cmd.apply(schema, context)
File "/home/victor/dev/magicstack/edgedb/edb/schema/delta.py", line 1531, in apply
schema = self._create_innards(schema, context)
File "/home/victor/dev/magicstack/edgedb/edb/schema/inheriting.py", line 622, in _create_innards
return super()._create_innards(schema, context)
File "/home/victor/dev/magicstack/edgedb/edb/schema/delta.py", line 1498, in _create_innards
schema = op.apply(schema, context=context)
File "/home/victor/dev/magicstack/edgedb/edb/schema/delta.py", line 1527, in apply
schema = self._create_begin(schema, context)
File "/home/victor/dev/magicstack/edgedb/edb/schema/constraints.py", line 641, in _create_begin
constr_base, attrs, inh = Constraint.get_concrete_constraint_attrs(
File "/home/victor/dev/magicstack/edgedb/edb/schema/constraints.py", line 226, in get_concrete_constraint_attrs
args_map = qlutils.index_parameters(
File "/home/victor/dev/magicstack/edgedb/edb/edgeql/utils.py", line 92, in index_parameters
result[p.get_parameter_name(schema)] = e
AttributeError: 'NoneType' object has no attribute 'get_parameter_name'
|
AttributeError
|
def apply(self, schema, context):
attrsubj = context.get(AnnotationSubjectCommandContext)
assert attrsubj, "Annotation commands must be run in " + "AnnotationSubject context"
name = sn.shortname_from_fullname(self.classname)
attrs = attrsubj.scls.get_annotations(schema)
annotation = attrs.get(schema, name, None)
if annotation is None:
schema, annotation = super().apply(schema, context)
else:
schema, annotation = sd.AlterObject.apply(self, schema, context)
return schema, annotation
|
def apply(self, schema, context):
attrsubj = context.get(AnnotationSubjectCommandContext)
assert attrsubj, "Annotation commands must be run in " + "AnnotationSubject context"
with context(AnnotationValueCommandContext(schema, self, None)):
name = sn.shortname_from_fullname(self.classname)
attrs = attrsubj.scls.get_annotations(schema)
annotation = attrs.get(schema, name, None)
if annotation is None:
schema, annotation = super().apply(schema, context)
schema = self.add_annotation(schema, annotation, attrsubj.scls)
else:
schema, annotation = sd.AlterObject.apply(self, schema, context)
return schema, annotation
|
https://github.com/edgedb/edgedb/issues/755
|
Traceback (most recent call last):
File "/home/victor/dev/magicstack/edgedb/tests/test_schema.py", line 2084, in test_migrations_equivalence_annotation_01
"""])
File "/home/victor/dev/magicstack/edgedb/tests/test_schema.py", line 603, in _assert_migration_equivalence
self._assert_migration_consistency(migrations[-1])
File "/home/victor/dev/magicstack/edgedb/tests/test_schema.py", line 588, in _assert_migration_consistency
diff = s_ddl.delta_schemas(baseline_schema, test_schema)
File "/home/victor/dev/magicstack/edgedb/edb/schema/ddl.py", line 87, in delta_schemas
old, new, old_schema=schema2, new_schema=schema1))
File "/home/victor/dev/magicstack/edgedb/edb/schema/objects.py", line 1143, in delta_sets
their_schema=old_schema)
File "/home/victor/dev/magicstack/edgedb/edb/schema/objects.py", line 778, in compare
context=context)
File "/home/victor/dev/magicstack/edgedb/edb/schema/objects.py", line 796, in compare_field_value
compcoef=field.compcoef or 0.5)
File "/home/victor/dev/magicstack/edgedb/edb/schema/objects.py", line 1466, in compare_values
their_schema=their_schema, context=context))
File "/home/victor/dev/magicstack/edgedb/edb/schema/objects.py", line 770, in compare
their_value = other.get_field_value(their_schema, field_name)
File "/home/victor/dev/magicstack/edgedb/edb/schema/objects.py", line 642, in get_field_value
schema, field_name, allow_default=allow_default)
File "/home/victor/dev/magicstack/edgedb/edb/schema/objects.py", line 635, in _get_schema_field_value
f'{self!r} object has no value for field {field_name!r}')
edb.schema.objects.FieldValueNotFoundError: <AnnotationValue: at 0x7f27da85bf60> object has no value for field 'annotation'
|
edb.schema.objects.FieldValueNotFoundError
|
def _create_innards(self, schema, context):
schema = super()._create_innards(schema, context)
referrer_ctx = self.get_referrer_context(context)
if referrer_ctx is None:
return schema
referrer = referrer_ctx.scls
referrer_cls = type(referrer)
mcls = type(self.scls)
refdict = referrer_cls.get_refdict_for_class(mcls)
if refdict.backref_attr:
# Set the back-reference on referenced object
# to the referrer.
schema = self.scls.set_field_value(schema, refdict.backref_attr, referrer)
schema = referrer.add_classref(schema, refdict.attr, self.scls)
if not self.scls.get_is_final(schema) and isinstance(
referrer, inheriting.InheritingObject
):
if not context.canonical:
# Propagate the creation of a new ref to descendants.
alter_cmd = sd.ObjectCommandMeta.get_command_class_or_die(
sd.AlterObject, referrer_cls
)
ref_field_type = referrer_cls.get_field(refdict.attr).type
refname = ref_field_type.get_key_for(schema, self.scls)
if context.enable_recursion:
for child in referrer.children(schema):
alter = alter_cmd(classname=child.get_name(schema))
with alter.new_context(schema, context, child):
schema, cmd = self._propagate_ref_creation(
schema, context, refdict, refname, child
)
alter.add(cmd)
self.add(alter)
else:
for op in self.get_subcommands(metaclass=referrer_cls):
schema, _ = op.apply(schema, context=context)
return schema
|
def _create_innards(self, schema, context):
schema = super()._create_innards(schema, context)
referrer_ctx = self.get_referrer_context(context)
if referrer_ctx is None:
return schema
referrer = referrer_ctx.scls
referrer_cls = type(referrer)
mcls = type(self.scls)
refdict = referrer_cls.get_refdict_for_class(mcls)
if refdict.backref_attr:
# Set the back-reference on referenced object
# to the referrer.
schema = self.scls.set_field_value(schema, refdict.backref_attr, referrer)
schema = referrer.add_classref(schema, refdict.attr, self.scls)
if isinstance(referrer, inheriting.InheritingObject):
if not context.canonical:
# Propagate the creation of a new ref to descendants.
alter_cmd = sd.ObjectCommandMeta.get_command_class_or_die(
sd.AlterObject, referrer_cls
)
ref_field_type = referrer_cls.get_field(refdict.attr).type
refname = ref_field_type.get_key_for(schema, self.scls)
if context.enable_recursion:
for child in referrer.children(schema):
alter = alter_cmd(classname=child.get_name(schema))
with alter.new_context(schema, context, child):
schema, cmd = self._propagate_ref_creation(
schema, context, refdict, refname, child
)
alter.add(cmd)
self.add(alter)
else:
for op in self.get_subcommands(metaclass=referrer_cls):
schema, _ = op.apply(schema, context=context)
return schema
|
https://github.com/edgedb/edgedb/issues/755
|
Traceback (most recent call last):
File "/home/victor/dev/magicstack/edgedb/tests/test_schema.py", line 2084, in test_migrations_equivalence_annotation_01
"""])
File "/home/victor/dev/magicstack/edgedb/tests/test_schema.py", line 603, in _assert_migration_equivalence
self._assert_migration_consistency(migrations[-1])
File "/home/victor/dev/magicstack/edgedb/tests/test_schema.py", line 588, in _assert_migration_consistency
diff = s_ddl.delta_schemas(baseline_schema, test_schema)
File "/home/victor/dev/magicstack/edgedb/edb/schema/ddl.py", line 87, in delta_schemas
old, new, old_schema=schema2, new_schema=schema1))
File "/home/victor/dev/magicstack/edgedb/edb/schema/objects.py", line 1143, in delta_sets
their_schema=old_schema)
File "/home/victor/dev/magicstack/edgedb/edb/schema/objects.py", line 778, in compare
context=context)
File "/home/victor/dev/magicstack/edgedb/edb/schema/objects.py", line 796, in compare_field_value
compcoef=field.compcoef or 0.5)
File "/home/victor/dev/magicstack/edgedb/edb/schema/objects.py", line 1466, in compare_values
their_schema=their_schema, context=context))
File "/home/victor/dev/magicstack/edgedb/edb/schema/objects.py", line 770, in compare
their_value = other.get_field_value(their_schema, field_name)
File "/home/victor/dev/magicstack/edgedb/edb/schema/objects.py", line 642, in get_field_value
schema, field_name, allow_default=allow_default)
File "/home/victor/dev/magicstack/edgedb/edb/schema/objects.py", line 635, in _get_schema_field_value
f'{self!r} object has no value for field {field_name!r}')
edb.schema.objects.FieldValueNotFoundError: <AnnotationValue: at 0x7f27da85bf60> object has no value for field 'annotation'
|
edb.schema.objects.FieldValueNotFoundError
|
def update_job_status(job_id, role, party_id, job_info, create=False):
job_tracker = Tracking(job_id=job_id, role=role, party_id=party_id)
job_info["f_run_ip"] = RuntimeConfig.JOB_SERVER_HOST
if create:
dsl = json_loads(job_info["f_dsl"])
runtime_conf = json_loads(job_info["f_runtime_conf"])
train_runtime_conf = json_loads(job_info["f_train_runtime_conf"])
save_job_conf(job_id=job_id, job_dsl=dsl, job_runtime_conf=runtime_conf)
roles = json_loads(job_info["f_roles"])
partner = {}
show_role = {}
is_initiator = job_info.get("f_is_initiator", 0)
for _role, _role_party in roles.items():
if is_initiator or _role == role:
show_role[_role] = show_role.get(_role, [])
for _party_id in _role_party:
if is_initiator or _party_id == party_id:
show_role[_role].append(_party_id)
if _role != role:
partner[_role] = partner.get(_role, [])
partner[_role].extend(_role_party)
else:
for _party_id in _role_party:
if _party_id != party_id:
partner[_role] = partner.get(_role, [])
partner[_role].append(_party_id)
dag = get_job_dsl_parser(
dsl=dsl, runtime_conf=runtime_conf, train_runtime_conf=train_runtime_conf
)
job_args = dag.get_args_input()
dataset = {}
for _role, _role_party_args in job_args.items():
if is_initiator or _role == role:
for _party_index in range(len(_role_party_args)):
_party_id = roles[_role][_party_index]
if is_initiator or _party_id == party_id:
dataset[_role] = dataset.get(_role, {})
dataset[_role][_party_id] = dataset[_role].get(_party_id, {})
for _data_type, _data_location in _role_party_args[
_party_index
]["args"]["data"].items():
dataset[_role][_party_id][_data_type] = "{}.{}".format(
_data_location["namespace"], _data_location["name"]
)
job_tracker.log_job_view(
{"partner": partner, "dataset": dataset, "roles": show_role}
)
job_tracker.save_job_info(
role=role, party_id=party_id, job_info=job_info, create=create
)
|
def update_job_status(job_id, role, party_id, job_info, create=False):
job_tracker = Tracking(job_id=job_id, role=role, party_id=party_id)
job_info["f_run_ip"] = get_lan_ip()
if create:
dsl = json_loads(job_info["f_dsl"])
runtime_conf = json_loads(job_info["f_runtime_conf"])
train_runtime_conf = json_loads(job_info["f_train_runtime_conf"])
save_job_conf(job_id=job_id, job_dsl=dsl, job_runtime_conf=runtime_conf)
roles = json_loads(job_info["f_roles"])
partner = {}
show_role = {}
is_initiator = job_info.get("f_is_initiator", 0)
for _role, _role_party in roles.items():
if is_initiator or _role == role:
show_role[_role] = show_role.get(_role, [])
for _party_id in _role_party:
if is_initiator or _party_id == party_id:
show_role[_role].append(_party_id)
if _role != role:
partner[_role] = partner.get(_role, [])
partner[_role].extend(_role_party)
else:
for _party_id in _role_party:
if _party_id != party_id:
partner[_role] = partner.get(_role, [])
partner[_role].append(_party_id)
dag = get_job_dsl_parser(
dsl=dsl, runtime_conf=runtime_conf, train_runtime_conf=train_runtime_conf
)
job_args = dag.get_args_input()
dataset = {}
for _role, _role_party_args in job_args.items():
if is_initiator or _role == role:
for _party_index in range(len(_role_party_args)):
_party_id = roles[_role][_party_index]
if is_initiator or _party_id == party_id:
dataset[_role] = dataset.get(_role, {})
dataset[_role][_party_id] = dataset[_role].get(_party_id, {})
for _data_type, _data_location in _role_party_args[
_party_index
]["args"]["data"].items():
dataset[_role][_party_id][_data_type] = "{}.{}".format(
_data_location["namespace"], _data_location["name"]
)
job_tracker.log_job_view(
{"partner": partner, "dataset": dataset, "roles": show_role}
)
job_tracker.save_job_info(
role=role, party_id=party_id, job_info=job_info, create=create
)
|
https://github.com/FederatedAI/FATE/issues/518
|
stdout:Traceback (most recent call last):
File "/home/wy/FATE/examples/federatedml-1.0-examples/../../fate_flow/fate_flow_client.py", line 17, in <module>
import argparse
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/argparse.py", line 86, in <module>
import collections as _collections
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/collections/__init__.py", line 26, in <module>
from operator import itemgetter as _itemgetter, eq as _eq
ImportError: cannot import name 'itemgetter'
|
ImportError
|
def run_do(self):
try:
running_tasks = job_utils.query_task(status="running", run_ip=get_lan_ip())
stop_job_ids = set()
detect_logger.info("start to detect running job..")
for task in running_tasks:
try:
process_exist = job_utils.check_job_process(int(task.f_run_pid))
if not process_exist:
detect_logger.info(
"job {} component {} on {} {} task {} {} process does not exist".format(
task.f_job_id,
task.f_component_name,
task.f_role,
task.f_party_id,
task.f_task_id,
task.f_run_pid,
)
)
stop_job_ids.add(task.f_job_id)
except Exception as e:
detect_logger.exception(e)
if stop_job_ids:
schedule_logger.info("start to stop jobs: {}".format(stop_job_ids))
for job_id in stop_job_ids:
jobs = job_utils.query_job(job_id=job_id)
if jobs:
initiator_party_id = jobs[0].f_initiator_party_id
job_work_mode = jobs[0].f_work_mode
if len(jobs) > 1:
# i am initiator
my_party_id = initiator_party_id
else:
my_party_id = jobs[0].f_party_id
initiator_party_id = jobs[0].f_initiator_party_id
api_utils.local_api(
method="POST",
endpoint="/{}/job/stop".format(API_VERSION),
json_body={"job_id": job_id},
)
schedule_logger.info("send stop job {} command".format(job_id))
except Exception as e:
detect_logger.exception(e)
finally:
detect_logger.info("finish detect running job")
|
def run_do(self):
try:
running_tasks = job_utils.query_task(status="running", run_ip=get_lan_ip())
stop_job_ids = set()
detect_logger.info("start to detect running job..")
for task in running_tasks:
try:
process_exist = job_utils.check_job_process(int(task.f_run_pid))
if not process_exist:
detect_logger.info(
"job {} component {} on {} {} task {} {} process does not exist".format(
task.f_job_id,
task.f_component_name,
task.f_role,
task.f_party_id,
task.f_task_id,
task.f_run_pid,
)
)
stop_job_ids.add(task.f_job_id)
except Exception as e:
detect_logger.exception(e)
if stop_job_ids:
schedule_logger.info("start to stop jobs: {}".format(stop_job_ids))
for job_id in stop_job_ids:
jobs = job_utils.query_job(job_id=job_id)
if jobs:
initiator_party_id = jobs[0].f_initiator_party_id
job_work_mode = jobs[0].f_work_mode
if len(jobs) > 1:
# i am initiator
my_party_id = initiator_party_id
else:
my_party_id = jobs[0].f_party_id
initiator_party_id = jobs[0].f_initiator_party_id
api_utils.federated_api(
job_id=job_id,
method="POST",
endpoint="/{}/job/stop".format(API_VERSION),
src_party_id=my_party_id,
dest_party_id=initiator_party_id,
json_body={"job_id": job_id},
work_mode=job_work_mode,
)
schedule_logger.info("send stop job {} command".format(job_id))
except Exception as e:
detect_logger.exception(e)
finally:
detect_logger.info("finish detect running job")
|
https://github.com/FederatedAI/FATE/issues/518
|
stdout:Traceback (most recent call last):
File "/home/wy/FATE/examples/federatedml-1.0-examples/../../fate_flow/fate_flow_client.py", line 17, in <module>
import argparse
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/argparse.py", line 86, in <module>
import collections as _collections
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/collections/__init__.py", line 26, in <module>
from operator import itemgetter as _itemgetter, eq as _eq
ImportError: cannot import name 'itemgetter'
|
ImportError
|
def run_task():
task = Task()
task.f_create_time = current_timestamp()
try:
parser = argparse.ArgumentParser()
parser.add_argument("-j", "--job_id", required=True, type=str, help="job id")
parser.add_argument(
"-n", "--component_name", required=True, type=str, help="component name"
)
parser.add_argument("-t", "--task_id", required=True, type=str, help="task id")
parser.add_argument("-r", "--role", required=True, type=str, help="role")
parser.add_argument(
"-p", "--party_id", required=True, type=str, help="party id"
)
parser.add_argument(
"-c", "--config", required=True, type=str, help="task config"
)
parser.add_argument("--job_server", help="job server", type=str)
args = parser.parse_args()
schedule_logger.info("enter task process")
schedule_logger.info(args)
# init function args
if args.job_server:
RuntimeConfig.init_config(HTTP_PORT=args.job_server.split(":")[1])
job_id = args.job_id
component_name = args.component_name
task_id = args.task_id
role = args.role
party_id = int(args.party_id)
task_config = file_utils.load_json_conf(args.config)
job_parameters = task_config["job_parameters"]
job_initiator = task_config["job_initiator"]
job_args = task_config["job_args"]
task_input_dsl = task_config["input"]
task_output_dsl = task_config["output"]
parameters = task_config["parameters"]
module_name = task_config["module_name"]
except Exception as e:
schedule_logger.exception(e)
task.f_status = TaskStatus.FAILED
return
try:
# init environment, process is shared globally
RuntimeConfig.init_config(WORK_MODE=job_parameters["work_mode"])
storage.init_storage(job_id=task_id, work_mode=RuntimeConfig.WORK_MODE)
federation.init(job_id=task_id, runtime_conf=parameters)
job_log_dir = os.path.join(
job_utils.get_job_log_directory(job_id=job_id), role, str(party_id)
)
task_log_dir = os.path.join(job_log_dir, component_name)
log_utils.LoggerFactory.set_directory(
directory=task_log_dir,
parent_log_dir=job_log_dir,
append_to_parent_log=True,
force=True,
)
task.f_job_id = job_id
task.f_component_name = component_name
task.f_task_id = task_id
task.f_role = role
task.f_party_id = party_id
task.f_operator = "python_operator"
tracker = Tracking(
job_id=job_id,
role=role,
party_id=party_id,
component_name=component_name,
task_id=task_id,
model_id=job_parameters["model_id"],
model_version=job_parameters["model_version"],
module_name=module_name,
)
task.f_start_time = current_timestamp()
task.f_run_ip = get_lan_ip()
task.f_run_pid = os.getpid()
run_class_paths = parameters.get("CodePath").split("/")
run_class_package = (
".".join(run_class_paths[:-2]) + "." + run_class_paths[-2].rstrip(".py")
)
run_class_name = run_class_paths[-1]
task_run_args = TaskExecutor.get_task_run_args(
job_id=job_id,
role=role,
party_id=party_id,
job_parameters=job_parameters,
job_args=job_args,
input_dsl=task_input_dsl,
)
run_object = getattr(
importlib.import_module(run_class_package), run_class_name
)()
run_object.set_tracker(tracker=tracker)
run_object.set_taskid(taskid=task_id)
task.f_status = TaskStatus.RUNNING
TaskExecutor.sync_task_status(
job_id=job_id,
component_name=component_name,
task_id=task_id,
role=role,
party_id=party_id,
initiator_party_id=job_initiator.get("party_id", None),
task_info=task.to_json(),
)
schedule_logger.info(
"run {} {} {} {} {} task".format(
job_id, component_name, task_id, role, party_id
)
)
schedule_logger.info(parameters)
schedule_logger.info(task_input_dsl)
run_object.run(parameters, task_run_args)
if task_output_dsl:
if task_output_dsl.get("data", []):
output_data = run_object.save_data()
tracker.save_output_data_table(
output_data, task_output_dsl.get("data")[0]
)
if task_output_dsl.get("model", []):
output_model = run_object.export_model()
# There is only one model output at the current dsl version.
tracker.save_output_model(output_model, task_output_dsl["model"][0])
task.f_status = TaskStatus.SUCCESS
except Exception as e:
schedule_logger.exception(e)
task.f_status = TaskStatus.FAILED
finally:
try:
task.f_end_time = current_timestamp()
task.f_elapsed = task.f_end_time - task.f_start_time
task.f_update_time = current_timestamp()
TaskExecutor.sync_task_status(
job_id=job_id,
component_name=component_name,
task_id=task_id,
role=role,
party_id=party_id,
initiator_party_id=job_initiator.get("party_id", None),
task_info=task.to_json(),
)
except Exception as e:
schedule_logger.exception(e)
schedule_logger.info(
"finish {} {} {} {} {} {} task".format(
job_id, component_name, task_id, role, party_id, task.f_status
)
)
print(
"finish {} {} {} {} {} {} task".format(
job_id, component_name, task_id, role, party_id, task.f_status
)
)
|
def run_task():
task = Task()
task.f_create_time = current_timestamp()
try:
parser = argparse.ArgumentParser()
parser.add_argument(
"-j",
"--job_id",
required=True,
type=str,
help="Specify a config json file path",
)
parser.add_argument(
"-n",
"--component_name",
required=True,
type=str,
help="Specify a config json file path",
)
parser.add_argument(
"-t",
"--task_id",
required=True,
type=str,
help="Specify a config json file path",
)
parser.add_argument(
"-r",
"--role",
required=True,
type=str,
help="Specify a config json file path",
)
parser.add_argument(
"-p",
"--party_id",
required=True,
type=str,
help="Specify a config json file path",
)
parser.add_argument(
"-c",
"--config",
required=True,
type=str,
help="Specify a config json file path",
)
args = parser.parse_args()
schedule_logger.info("enter task process")
schedule_logger.info(args)
# init function args
job_id = args.job_id
component_name = args.component_name
task_id = args.task_id
role = args.role
party_id = int(args.party_id)
task_config = file_utils.load_json_conf(args.config)
job_parameters = task_config.get("job_parameters", None)
job_initiator = task_config.get("job_initiator", None)
job_args = task_config.get("job_args", {})
task_input_dsl = task_config.get("input", {})
task_output_dsl = task_config.get("output", {})
parameters = task_config.get("parameters", {})
module_name = task_config.get("module_name", "")
except Exception as e:
schedule_logger.exception(e)
task.f_status = TaskStatus.FAILED
return
try:
# init environment
RuntimeConfig.init_config(WORK_MODE=job_parameters["work_mode"])
storage.init_storage(job_id=task_id, work_mode=RuntimeConfig.WORK_MODE)
federation.init(job_id=task_id, runtime_conf=parameters)
job_log_dir = os.path.join(
job_utils.get_job_log_directory(job_id=job_id), role, str(party_id)
)
task_log_dir = os.path.join(job_log_dir, component_name)
log_utils.LoggerFactory.set_directory(
directory=task_log_dir,
parent_log_dir=job_log_dir,
append_to_parent_log=True,
force=True,
)
task.f_job_id = job_id
task.f_component_name = component_name
task.f_task_id = task_id
task.f_role = role
task.f_party_id = party_id
task.f_operator = "python_operator"
tracker = Tracking(
job_id=job_id,
role=role,
party_id=party_id,
component_name=component_name,
task_id=task_id,
model_id=job_parameters["model_id"],
model_version=job_parameters["model_version"],
module_name=module_name,
)
task.f_start_time = current_timestamp()
task.f_run_ip = get_lan_ip()
task.f_run_pid = os.getpid()
run_class_paths = parameters.get("CodePath").split("/")
run_class_package = (
".".join(run_class_paths[:-2]) + "." + run_class_paths[-2].rstrip(".py")
)
run_class_name = run_class_paths[-1]
task_run_args = TaskExecutor.get_task_run_args(
job_id=job_id,
role=role,
party_id=party_id,
job_parameters=job_parameters,
job_args=job_args,
input_dsl=task_input_dsl,
)
run_object = getattr(
importlib.import_module(run_class_package), run_class_name
)()
run_object.set_tracker(tracker=tracker)
run_object.set_taskid(taskid=task_id)
task.f_status = TaskStatus.RUNNING
TaskExecutor.sync_task_status(
job_id=job_id,
component_name=component_name,
task_id=task_id,
role=role,
party_id=party_id,
initiator_party_id=job_initiator.get("party_id", None),
task_info=task.to_json(),
)
schedule_logger.info(
"run {} {} {} {} {} task".format(
job_id, component_name, task_id, role, party_id
)
)
schedule_logger.info(parameters)
schedule_logger.info(task_input_dsl)
run_object.run(parameters, task_run_args)
if task_output_dsl:
if task_output_dsl.get("data", []):
output_data = run_object.save_data()
tracker.save_output_data_table(
output_data, task_output_dsl.get("data")[0]
)
if task_output_dsl.get("model", []):
output_model = run_object.export_model()
# There is only one model output at the current dsl version.
tracker.save_output_model(output_model, task_output_dsl["model"][0])
task.f_status = TaskStatus.SUCCESS
except Exception as e:
schedule_logger.exception(e)
task.f_status = TaskStatus.FAILED
finally:
try:
task.f_end_time = current_timestamp()
task.f_elapsed = task.f_end_time - task.f_start_time
task.f_update_time = current_timestamp()
TaskExecutor.sync_task_status(
job_id=job_id,
component_name=component_name,
task_id=task_id,
role=role,
party_id=party_id,
initiator_party_id=job_initiator.get("party_id", None),
task_info=task.to_json(),
)
except Exception as e:
schedule_logger.exception(e)
schedule_logger.info(
"finish {} {} {} {} {} {} task".format(
job_id, component_name, task_id, role, party_id, task.f_status
)
)
print(
"finish {} {} {} {} {} {} task".format(
job_id, component_name, task_id, role, party_id, task.f_status
)
)
|
https://github.com/FederatedAI/FATE/issues/518
|
stdout:Traceback (most recent call last):
File "/home/wy/FATE/examples/federatedml-1.0-examples/../../fate_flow/fate_flow_client.py", line 17, in <module>
import argparse
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/argparse.py", line 86, in <module>
import collections as _collections
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/collections/__init__.py", line 26, in <module>
from operator import itemgetter as _itemgetter, eq as _eq
ImportError: cannot import name 'itemgetter'
|
ImportError
|
def sync_task_status(
job_id, component_name, task_id, role, party_id, initiator_party_id, task_info
):
for dest_party_id in {party_id, initiator_party_id}:
if party_id != initiator_party_id and dest_party_id == initiator_party_id:
# do not pass the process id to the initiator
task_info["f_run_ip"] = ""
federated_api(
job_id=job_id,
method="POST",
endpoint="/{}/schedule/{}/{}/{}/{}/{}/status".format(
API_VERSION, job_id, component_name, task_id, role, party_id
),
src_party_id=party_id,
dest_party_id=dest_party_id,
json_body=task_info,
work_mode=RuntimeConfig.WORK_MODE,
)
|
def sync_task_status(
job_id, component_name, task_id, role, party_id, initiator_party_id, task_info
):
for dest_party_id in {party_id, initiator_party_id}:
if party_id != initiator_party_id and dest_party_id == initiator_party_id:
# do not pass the process id to the initiator
task_info["f_run_ip"] = ""
federated_api(
job_id=job_id,
method="POST",
endpoint="/{}/job/{}/{}/{}/{}/{}/status".format(
API_VERSION, job_id, component_name, task_id, role, party_id
),
src_party_id=party_id,
dest_party_id=dest_party_id,
json_body=task_info,
work_mode=RuntimeConfig.WORK_MODE,
)
|
https://github.com/FederatedAI/FATE/issues/518
|
stdout:Traceback (most recent call last):
File "/home/wy/FATE/examples/federatedml-1.0-examples/../../fate_flow/fate_flow_client.py", line 17, in <module>
import argparse
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/argparse.py", line 86, in <module>
import collections as _collections
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/collections/__init__.py", line 26, in <module>
from operator import itemgetter as _itemgetter, eq as _eq
ImportError: cannot import name 'itemgetter'
|
ImportError
|
def distribute_job(job, roles, job_initiator):
for role, partys in roles.items():
job.f_role = role
for party_id in partys:
job.f_party_id = party_id
if role == job_initiator["role"] and party_id == job_initiator["party_id"]:
job.f_is_initiator = 1
else:
job.f_is_initiator = 0
federated_api(
job_id=job.f_job_id,
method="POST",
endpoint="/{}/schedule/{}/{}/{}/create".format(
API_VERSION, job.f_job_id, role, party_id
),
src_party_id=job_initiator["party_id"],
dest_party_id=party_id,
json_body=job.to_json(),
work_mode=job.f_work_mode,
)
|
def distribute_job(job, roles, job_initiator):
for role, partys in roles.items():
job.f_role = role
for party_id in partys:
job.f_party_id = party_id
if role == job_initiator["role"] and party_id == job_initiator["party_id"]:
job.f_is_initiator = 1
else:
job.f_is_initiator = 0
federated_api(
job_id=job.f_job_id,
method="POST",
endpoint="/{}/job/{}/{}/{}/create".format(
API_VERSION, job.f_job_id, role, party_id
),
src_party_id=job_initiator["party_id"],
dest_party_id=party_id,
json_body=job.to_json(),
work_mode=job.f_work_mode,
)
|
https://github.com/FederatedAI/FATE/issues/518
|
stdout:Traceback (most recent call last):
File "/home/wy/FATE/examples/federatedml-1.0-examples/../../fate_flow/fate_flow_client.py", line 17, in <module>
import argparse
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/argparse.py", line 86, in <module>
import collections as _collections
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/collections/__init__.py", line 26, in <module>
from operator import itemgetter as _itemgetter, eq as _eq
ImportError: cannot import name 'itemgetter'
|
ImportError
|
def run_component(
job_id, job_runtime_conf, job_parameters, job_initiator, job_args, dag, component
):
parameters = component.get_role_parameters()
component_name = component.get_name()
module_name = component.get_module()
task_id = job_utils.generate_task_id(job_id=job_id, component_name=component_name)
schedule_logger.info("job {} run component {}".format(job_id, component_name))
for role, partys_parameters in parameters.items():
for party_index in range(len(partys_parameters)):
party_parameters = partys_parameters[party_index]
if role in job_args:
party_job_args = job_args[role][party_index]["args"]
else:
party_job_args = {}
dest_party_id = party_parameters.get("local", {}).get("party_id")
federated_api(
job_id=job_id,
method="POST",
endpoint="/{}/schedule/{}/{}/{}/{}/{}/run".format(
API_VERSION, job_id, component_name, task_id, role, dest_party_id
),
src_party_id=job_initiator["party_id"],
dest_party_id=dest_party_id,
json_body={
"job_parameters": job_parameters,
"job_initiator": job_initiator,
"job_args": party_job_args,
"parameters": party_parameters,
"module_name": module_name,
"input": component.get_input(),
"output": component.get_output(),
"job_server": {
"ip": get_lan_ip(),
"http_port": RuntimeConfig.HTTP_PORT,
},
},
work_mode=job_parameters["work_mode"],
)
component_task_status = TaskScheduler.check_task_status(
job_id=job_id, component=component
)
if component_task_status:
task_success = True
else:
task_success = False
schedule_logger.info(
"job {} component {} run {}".format(
job_id, component_name, "success" if task_success else "failed"
)
)
# update progress
TaskScheduler.sync_job_status(
job_id=job_id,
roles=job_runtime_conf["role"],
work_mode=job_parameters["work_mode"],
initiator_party_id=job_initiator["party_id"],
job_info=job_utils.update_job_progress(
job_id=job_id, dag=dag, current_task_id=task_id
).to_json(),
)
if task_success:
next_components = dag.get_next_components(component_name)
schedule_logger.info(
"job {} component {} next components is {}".format(
job_id,
component_name,
[next_component.get_name() for next_component in next_components],
)
)
for next_component in next_components:
try:
schedule_logger.info(
"job {} check component {} dependencies status".format(
job_id, next_component.get_name()
)
)
dependencies_status = TaskScheduler.check_dependencies(
job_id=job_id, dag=dag, component=next_component
)
schedule_logger.info(
"job {} component {} dependencies status is {}".format(
job_id, next_component.get_name(), dependencies_status
)
)
if dependencies_status:
run_status = TaskScheduler.run_component(
job_id,
job_runtime_conf,
job_parameters,
job_initiator,
job_args,
dag,
next_component,
)
else:
run_status = False
except Exception as e:
schedule_logger.info(e)
run_status = False
if not run_status:
return False
return True
else:
return False
|
def run_component(
job_id, job_runtime_conf, job_parameters, job_initiator, job_args, dag, component
):
parameters = component.get_role_parameters()
component_name = component.get_name()
module_name = component.get_module()
task_id = job_utils.generate_task_id(job_id=job_id, component_name=component_name)
schedule_logger.info("job {} run component {}".format(job_id, component_name))
for role, partys_parameters in parameters.items():
for party_index in range(len(partys_parameters)):
party_parameters = partys_parameters[party_index]
if role in job_args:
party_job_args = job_args[role][party_index]["args"]
else:
party_job_args = {}
dest_party_id = party_parameters.get("local", {}).get("party_id")
federated_api(
job_id=job_id,
method="POST",
endpoint="/{}/job/{}/{}/{}/{}/{}/run".format(
API_VERSION, job_id, component_name, task_id, role, dest_party_id
),
src_party_id=job_initiator["party_id"],
dest_party_id=dest_party_id,
json_body={
"job_parameters": job_parameters,
"job_initiator": job_initiator,
"job_args": party_job_args,
"parameters": party_parameters,
"module_name": module_name,
"input": component.get_input(),
"output": component.get_output(),
},
work_mode=job_parameters["work_mode"],
)
component_task_status = TaskScheduler.check_task_status(
job_id=job_id, component=component
)
if component_task_status:
task_success = True
else:
task_success = False
schedule_logger.info(
"job {} component {} run {}".format(
job_id, component_name, "success" if task_success else "failed"
)
)
# update progress
TaskScheduler.sync_job_status(
job_id=job_id,
roles=job_runtime_conf["role"],
work_mode=job_parameters["work_mode"],
initiator_party_id=job_initiator["party_id"],
job_info=job_utils.update_job_progress(
job_id=job_id, dag=dag, current_task_id=task_id
).to_json(),
)
if task_success:
next_components = dag.get_next_components(component_name)
schedule_logger.info(
"job {} component {} next components is {}".format(
job_id,
component_name,
[next_component.get_name() for next_component in next_components],
)
)
for next_component in next_components:
try:
schedule_logger.info(
"job {} check component {} dependencies status".format(
job_id, next_component.get_name()
)
)
dependencies_status = TaskScheduler.check_dependencies(
job_id=job_id, dag=dag, component=next_component
)
schedule_logger.info(
"job {} component {} dependencies status is {}".format(
job_id, next_component.get_name(), dependencies_status
)
)
if dependencies_status:
run_status = TaskScheduler.run_component(
job_id,
job_runtime_conf,
job_parameters,
job_initiator,
job_args,
dag,
next_component,
)
else:
run_status = False
except Exception as e:
schedule_logger.info(e)
run_status = False
if not run_status:
return False
return True
else:
return False
|
https://github.com/FederatedAI/FATE/issues/518
|
stdout:Traceback (most recent call last):
File "/home/wy/FATE/examples/federatedml-1.0-examples/../../fate_flow/fate_flow_client.py", line 17, in <module>
import argparse
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/argparse.py", line 86, in <module>
import collections as _collections
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/collections/__init__.py", line 26, in <module>
from operator import itemgetter as _itemgetter, eq as _eq
ImportError: cannot import name 'itemgetter'
|
ImportError
|
def start_task(job_id, component_name, task_id, role, party_id, task_config):
schedule_logger.info(
"job {} {} {} {} task subprocess is ready".format(
job_id, component_name, role, party_id, task_config
)
)
task_process_start_status = False
try:
task_dir = os.path.join(
job_utils.get_job_directory(job_id=job_id), role, party_id, component_name
)
os.makedirs(task_dir, exist_ok=True)
task_config_path = os.path.join(task_dir, "task_config.json")
with open(task_config_path, "w") as fw:
json.dump(task_config, fw)
process_cmd = [
"python3",
sys.modules[TaskExecutor.__module__].__file__,
"-j",
job_id,
"-n",
component_name,
"-t",
task_id,
"-r",
role,
"-p",
party_id,
"-c",
task_config_path,
"--job_server",
"{}:{}".format(
task_config["job_server"]["ip"], task_config["job_server"]["http_port"]
),
]
task_log_dir = os.path.join(
job_utils.get_job_log_directory(job_id=job_id),
role,
party_id,
component_name,
)
schedule_logger.info(
"job {} {} {} {} task subprocess start".format(
job_id, component_name, role, party_id, task_config
)
)
p = job_utils.run_subprocess(
config_dir=task_dir, process_cmd=process_cmd, log_dir=task_log_dir
)
if p:
task_process_start_status = True
except Exception as e:
schedule_logger.exception(e)
finally:
schedule_logger.info(
"job {} component {} on {} {} start task subprocess {}".format(
job_id,
component_name,
role,
party_id,
"success" if task_process_start_status else "failed",
)
)
|
def start_task(job_id, component_name, task_id, role, party_id, task_config):
schedule_logger.info(
"job {} {} {} {} task subprocess is ready".format(
job_id, component_name, role, party_id, task_config
)
)
task_process_start_status = False
try:
task_dir = os.path.join(
job_utils.get_job_directory(job_id=job_id), role, party_id, component_name
)
os.makedirs(task_dir, exist_ok=True)
task_config_path = os.path.join(task_dir, "task_config.json")
with open(task_config_path, "w") as fw:
json.dump(task_config, fw)
process_cmd = [
"python3",
sys.modules[TaskExecutor.__module__].__file__,
"-j",
job_id,
"-n",
component_name,
"-t",
task_id,
"-r",
role,
"-p",
party_id,
"-c",
task_config_path,
]
task_log_dir = os.path.join(
job_utils.get_job_log_directory(job_id=job_id),
role,
party_id,
component_name,
)
schedule_logger.info(
"job {} {} {} {} task subprocess start".format(
job_id, component_name, role, party_id, task_config
)
)
p = job_utils.run_subprocess(
config_dir=task_dir, process_cmd=process_cmd, log_dir=task_log_dir
)
if p:
task_process_start_status = True
except Exception as e:
schedule_logger.exception(e)
finally:
schedule_logger.info(
"job {} component {} on {} {} start task subprocess {}".format(
job_id,
component_name,
role,
party_id,
"success" if task_process_start_status else "failed",
)
)
|
https://github.com/FederatedAI/FATE/issues/518
|
stdout:Traceback (most recent call last):
File "/home/wy/FATE/examples/federatedml-1.0-examples/../../fate_flow/fate_flow_client.py", line 17, in <module>
import argparse
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/argparse.py", line 86, in <module>
import collections as _collections
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/collections/__init__.py", line 26, in <module>
from operator import itemgetter as _itemgetter, eq as _eq
ImportError: cannot import name 'itemgetter'
|
ImportError
|
def sync_job_status(job_id, roles, work_mode, initiator_party_id, job_info):
for role, partys in roles.items():
job_info["f_role"] = role
for party_id in partys:
job_info["f_party_id"] = party_id
federated_api(
job_id=job_id,
method="POST",
endpoint="/{}/schedule/{}/{}/{}/status".format(
API_VERSION, job_id, role, party_id
),
src_party_id=initiator_party_id,
dest_party_id=party_id,
json_body=job_info,
work_mode=work_mode,
)
|
def sync_job_status(job_id, roles, work_mode, initiator_party_id, job_info):
for role, partys in roles.items():
job_info["f_role"] = role
for party_id in partys:
job_info["f_party_id"] = party_id
federated_api(
job_id=job_id,
method="POST",
endpoint="/{}/job/{}/{}/{}/status".format(
API_VERSION, job_id, role, party_id
),
src_party_id=initiator_party_id,
dest_party_id=party_id,
json_body=job_info,
work_mode=work_mode,
)
|
https://github.com/FederatedAI/FATE/issues/518
|
stdout:Traceback (most recent call last):
File "/home/wy/FATE/examples/federatedml-1.0-examples/../../fate_flow/fate_flow_client.py", line 17, in <module>
import argparse
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/argparse.py", line 86, in <module>
import collections as _collections
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/collections/__init__.py", line 26, in <module>
from operator import itemgetter as _itemgetter, eq as _eq
ImportError: cannot import name 'itemgetter'
|
ImportError
|
def finish_job(job_id, job_runtime_conf):
job_parameters = job_runtime_conf["job_parameters"]
job_initiator = job_runtime_conf["initiator"]
model_id_base64 = base64_encode(job_parameters["model_id"])
model_version_base64 = base64_encode(job_parameters["model_version"])
for role, partys in job_runtime_conf["role"].items():
for party_id in partys:
# save pipeline
federated_api(
job_id=job_id,
method="POST",
endpoint="/{}/schedule/{}/{}/{}/{}/{}/save/pipeline".format(
API_VERSION,
job_id,
role,
party_id,
model_id_base64,
model_version_base64,
),
src_party_id=job_initiator["party_id"],
dest_party_id=party_id,
json_body={},
work_mode=job_parameters["work_mode"],
)
# clean
federated_api(
job_id=job_id,
method="POST",
endpoint="/{}/schedule/{}/{}/{}/clean".format(
API_VERSION, job_id, role, party_id
),
src_party_id=job_initiator["party_id"],
dest_party_id=party_id,
json_body={},
work_mode=job_parameters["work_mode"],
)
|
def finish_job(job_id, job_runtime_conf):
job_parameters = job_runtime_conf["job_parameters"]
job_initiator = job_runtime_conf["initiator"]
model_id_base64 = base64_encode(job_parameters["model_id"])
model_version_base64 = base64_encode(job_parameters["model_version"])
for role, partys in job_runtime_conf["role"].items():
for party_id in partys:
# save pipeline
federated_api(
job_id=job_id,
method="POST",
endpoint="/{}/job/{}/{}/{}/{}/{}/save/pipeline".format(
API_VERSION,
job_id,
role,
party_id,
model_id_base64,
model_version_base64,
),
src_party_id=job_initiator["party_id"],
dest_party_id=party_id,
json_body={},
work_mode=job_parameters["work_mode"],
)
# clean
federated_api(
job_id=job_id,
method="POST",
endpoint="/{}/job/{}/{}/{}/clean".format(
API_VERSION, job_id, role, party_id
),
src_party_id=job_initiator["party_id"],
dest_party_id=party_id,
json_body={},
work_mode=job_parameters["work_mode"],
)
|
https://github.com/FederatedAI/FATE/issues/518
|
stdout:Traceback (most recent call last):
File "/home/wy/FATE/examples/federatedml-1.0-examples/../../fate_flow/fate_flow_client.py", line 17, in <module>
import argparse
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/argparse.py", line 86, in <module>
import collections as _collections
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/collections/__init__.py", line 26, in <module>
from operator import itemgetter as _itemgetter, eq as _eq
ImportError: cannot import name 'itemgetter'
|
ImportError
|
def stop_job(job_id):
schedule_logger.info("get stop job {} command".format(job_id))
jobs = job_utils.query_job(job_id=job_id, is_initiator=1)
if jobs:
initiator_job = jobs[0]
job_info = {"f_job_id": job_id, "f_status": JobStatus.FAILED}
roles = json_loads(initiator_job.f_roles)
job_work_mode = initiator_job.f_work_mode
initiator_party_id = initiator_job.f_party_id
# set status first
TaskScheduler.sync_job_status(
job_id=job_id,
roles=roles,
initiator_party_id=initiator_party_id,
work_mode=job_work_mode,
job_info=job_info,
)
for role, partys in roles.items():
for party_id in partys:
response = federated_api(
job_id=job_id,
method="POST",
endpoint="/{}/schedule/{}/{}/{}/kill".format(
API_VERSION, job_id, role, party_id
),
src_party_id=initiator_party_id,
dest_party_id=party_id,
json_body={
"job_initiator": {
"party_id": initiator_job.f_party_id,
"role": initiator_job.f_role,
}
},
work_mode=job_work_mode,
)
if response["retcode"] == 0:
schedule_logger.info(
"send {} {} kill job {} command successfully".format(
role, party_id, job_id
)
)
else:
schedule_logger.info(
"send {} {} kill job {} command failed: {}".format(
role, party_id, job_id, response["retmsg"]
)
)
else:
schedule_logger.info("send stop job {} command failed".format(job_id))
raise Exception("can not found job: {}".format(job_id))
|
def stop_job(job_id):
schedule_logger.info("get stop job {} command".format(job_id))
jobs = job_utils.query_job(job_id=job_id, is_initiator=1)
if jobs:
initiator_job = jobs[0]
job_info = {"f_job_id": job_id, "f_status": JobStatus.FAILED}
roles = json_loads(initiator_job.f_roles)
job_work_mode = initiator_job.f_work_mode
initiator_party_id = initiator_job.f_party_id
# set status first
TaskScheduler.sync_job_status(
job_id=job_id,
roles=roles,
initiator_party_id=initiator_party_id,
work_mode=job_work_mode,
job_info=job_info,
)
for role, partys in roles.items():
for party_id in partys:
response = federated_api(
job_id=job_id,
method="POST",
endpoint="/{}/job/{}/{}/{}/kill".format(
API_VERSION, job_id, role, party_id
),
src_party_id=initiator_party_id,
dest_party_id=party_id,
json_body={
"job_initiator": {
"party_id": initiator_job.f_party_id,
"role": initiator_job.f_role,
}
},
work_mode=job_work_mode,
)
if response["retcode"] == 0:
schedule_logger.info(
"send {} {} kill job {} command successfully".format(
role, party_id, job_id
)
)
else:
schedule_logger.info(
"send {} {} kill job {} command failed: {}".format(
role, party_id, job_id, response["retmsg"]
)
)
else:
schedule_logger.info("send stop job {} command failed".format(job_id))
raise Exception("can not found job: {}".format(job_id))
|
https://github.com/FederatedAI/FATE/issues/518
|
stdout:Traceback (most recent call last):
File "/home/wy/FATE/examples/federatedml-1.0-examples/../../fate_flow/fate_flow_client.py", line 17, in <module>
import argparse
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/argparse.py", line 86, in <module>
import collections as _collections
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/collections/__init__.py", line 26, in <module>
from operator import itemgetter as _itemgetter, eq as _eq
ImportError: cannot import name 'itemgetter'
|
ImportError
|
def init_config(**kwargs):
for k, v in kwargs.items():
if hasattr(RuntimeConfig, k):
setattr(RuntimeConfig, k, v)
if k == "HTTP_PORT":
setattr(
RuntimeConfig,
"JOB_SERVER_HOST",
"{}:{}".format(get_lan_ip(), RuntimeConfig.HTTP_PORT),
)
|
def init_config(**kwargs):
for k, v in kwargs.items():
if hasattr(RuntimeConfig, k):
setattr(RuntimeConfig, k, v)
|
https://github.com/FederatedAI/FATE/issues/518
|
stdout:Traceback (most recent call last):
File "/home/wy/FATE/examples/federatedml-1.0-examples/../../fate_flow/fate_flow_client.py", line 17, in <module>
import argparse
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/argparse.py", line 86, in <module>
import collections as _collections
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/collections/__init__.py", line 26, in <module>
from operator import itemgetter as _itemgetter, eq as _eq
ImportError: cannot import name 'itemgetter'
|
ImportError
|
def call_fun(func, config_data, dsl_path, config_path):
ip = server_conf.get(SERVERS).get(ROLE).get("host")
if ip in ["localhost", "127.0.0.1"]:
ip = get_lan_ip()
http_port = server_conf.get(SERVERS).get(ROLE).get("http.port")
server_url = "http://{}:{}/{}".format(ip, http_port, API_VERSION)
if func in JOB_OPERATE_FUNC:
if func == "submit_job":
if not config_path:
raise Exception(
"the following arguments are required: {}".format(
"runtime conf path"
)
)
dsl_data = {}
if (
dsl_path
or config_data.get("job_parameters", {}).get("job_type", "")
== "predict"
):
if dsl_path:
dsl_path = os.path.abspath(dsl_path)
with open(dsl_path, "r") as f:
dsl_data = json.load(f)
else:
raise Exception(
"the following arguments are required: {}".format("dsl path")
)
post_data = {"job_dsl": dsl_data, "job_runtime_conf": config_data}
response = requests.post(
"/".join([server_url, "job", func.rstrip("_job")]), json=post_data
)
if response.json()["retcode"] == 999:
print("use service.sh to start standalone node server....")
os.system("sh service.sh start --standalone_node")
time.sleep(5)
response = requests.post(
"/".join([server_url, "job", func.rstrip("_job")]), json=post_data
)
else:
if func != "query_job":
detect_utils.check_config(
config=config_data, required_arguments=["job_id"]
)
post_data = config_data
response = requests.post(
"/".join([server_url, "job", func.rstrip("_job")]), json=post_data
)
if func == "query_job":
response = response.json()
if response["retcode"] == 0:
for i in range(len(response["data"])):
del response["data"][i]["f_runtime_conf"]
del response["data"][i]["f_dsl"]
elif func in JOB_FUNC:
if func == "job_config":
detect_utils.check_config(
config=config_data,
required_arguments=["job_id", "role", "party_id", "output_path"],
)
response = requests.post(
"/".join([server_url, func.replace("_", "/")]), json=config_data
)
response_data = response.json()
if response_data["retcode"] == 0:
job_id = response_data["data"]["job_id"]
download_directory = os.path.join(
config_data["output_path"], "job_{}_config".format(job_id)
)
os.makedirs(download_directory, exist_ok=True)
for k, v in response_data["data"].items():
if k == "job_id":
continue
with open("{}/{}.json".format(download_directory, k), "w") as fw:
json.dump(v, fw, indent=4)
del response_data["data"]["dsl"]
del response_data["data"]["runtime_conf"]
response_data["directory"] = download_directory
response_data["retmsg"] = (
"download successfully, please check {} directory".format(
download_directory
)
)
response = response_data
elif func == "job_log":
detect_utils.check_config(
config=config_data, required_arguments=["job_id", "output_path"]
)
job_id = config_data["job_id"]
tar_file_name = "job_{}_log.tar.gz".format(job_id)
extract_dir = os.path.join(
config_data["output_path"], "job_{}_log".format(job_id)
)
with closing(
requests.get(
"/".join([server_url, func.replace("_", "/")]),
json=config_data,
stream=True,
)
) as response:
if response.status_code == 200:
download_from_request(
http_response=response,
tar_file_name=tar_file_name,
extract_dir=extract_dir,
)
response = {
"retcode": 0,
"directory": extract_dir,
"retmsg": "download successfully, please check {} directory".format(
extract_dir
),
}
else:
response = response.json()
elif func in TASK_OPERATE_FUNC:
response = requests.post(
"/".join([server_url, "job", "task", func.rstrip("_task")]),
json=config_data,
)
elif func in TRACKING_FUNC:
detect_utils.check_config(
config=config_data,
required_arguments=["job_id", "component_name", "role", "party_id"],
)
if func == "component_output_data":
detect_utils.check_config(
config=config_data, required_arguments=["output_path"]
)
tar_file_name = "job_{}_{}_{}_{}_output_data.tar.gz".format(
config_data["job_id"],
config_data["component_name"],
config_data["role"],
config_data["party_id"],
)
extract_dir = os.path.join(
config_data["output_path"], tar_file_name.replace(".tar.gz", "")
)
with closing(
requests.get(
"/".join(
[server_url, "tracking", func.replace("_", "/"), "download"]
),
json=config_data,
stream=True,
)
) as response:
if response.status_code == 200:
download_from_request(
http_response=response,
tar_file_name=tar_file_name,
extract_dir=extract_dir,
)
response = {
"retcode": 0,
"directory": extract_dir,
"retmsg": "download successfully, please check {} directory".format(
extract_dir
),
}
else:
response = response.json()
else:
response = requests.post(
"/".join([server_url, "tracking", func.replace("_", "/")]),
json=config_data,
)
elif func in DATA_FUNC:
response = requests.post("/".join([server_url, "data", func]), json=config_data)
elif func in TABLE_FUNC:
detect_utils.check_config(
config=config_data, required_arguments=["namespace", "table_name"]
)
response = requests.post(
"/".join([server_url, "table", func]), json=config_data
)
elif func in MODEL_FUNC:
if func == "version":
detect_utils.check_config(
config=config_data, required_arguments=["namespace"]
)
response = requests.post(
"/".join([server_url, "model", func]), json=config_data
)
return (
response.json() if isinstance(response, requests.models.Response) else response
)
|
def call_fun(func, config_data, dsl_path, config_path):
ip = server_conf.get(SERVERS).get(ROLE).get("host")
http_port = server_conf.get(SERVERS).get(ROLE).get("http.port")
local_url = "http://{}:{}/{}".format(ip, http_port, API_VERSION)
if func in JOB_OPERATE_FUNC:
if func == "submit_job":
if not config_path:
raise Exception(
"the following arguments are required: {}".format(
"runtime conf path"
)
)
dsl_data = {}
if (
dsl_path
or config_data.get("job_parameters", {}).get("job_type", "")
== "predict"
):
if dsl_path:
dsl_path = os.path.abspath(dsl_path)
with open(dsl_path, "r") as f:
dsl_data = json.load(f)
else:
raise Exception(
"the following arguments are required: {}".format("dsl path")
)
post_data = {"job_dsl": dsl_data, "job_runtime_conf": config_data}
else:
if func != "query_job":
detect_utils.check_config(
config=config_data, required_arguments=["job_id"]
)
post_data = config_data
response = requests.post(
"/".join([local_url, "job", func.rstrip("_job")]), json=post_data
)
if func == "query_job":
response = response.json()
if response["retcode"] == 0:
for i in range(len(response["data"])):
del response["data"][i]["f_runtime_conf"]
del response["data"][i]["f_dsl"]
elif func in JOB_FUNC:
if func == "job_config":
detect_utils.check_config(
config=config_data,
required_arguments=["job_id", "role", "party_id", "output_path"],
)
response = requests.post(
"/".join([local_url, func.replace("_", "/")]), json=config_data
)
response_data = response.json()
if response_data["retcode"] == 0:
job_id = response_data["data"]["job_id"]
download_directory = os.path.join(
config_data["output_path"], "job_{}_config".format(job_id)
)
os.makedirs(download_directory, exist_ok=True)
for k, v in response_data["data"].items():
if k == "job_id":
continue
with open("{}/{}.json".format(download_directory, k), "w") as fw:
json.dump(v, fw, indent=4)
del response_data["data"]["dsl"]
del response_data["data"]["runtime_conf"]
response_data["directory"] = download_directory
response_data["retmsg"] = (
"download successfully, please check {} directory".format(
download_directory
)
)
response = response_data
elif func == "job_log":
detect_utils.check_config(
config=config_data, required_arguments=["job_id", "output_path"]
)
with closing(
requests.get(
"/".join([local_url, func.replace("_", "/")]),
json=config_data,
stream=True,
)
) as response:
job_id = config_data["job_id"]
tar_file_name = "job_{}_log.tar.gz".format(job_id)
with open(tar_file_name, "wb") as fw:
for chunk in response.iter_content(1024):
if chunk:
fw.write(chunk)
extract_dir = os.path.join(
config_data["output_path"], "job_{}_log".format(job_id)
)
tar = tarfile.open(tar_file_name, "r:gz")
file_names = tar.getnames()
for file_name in file_names:
tar.extract(file_name, extract_dir)
tar.close()
os.remove(tar_file_name)
response = {
"retcode": 0,
"directory": extract_dir,
"retmsg": "download successfully, please check {} directory".format(
extract_dir
),
}
elif func in TASK_OPERATE_FUNC:
response = requests.post(
"/".join([local_url, "job", "task", func.rstrip("_task")]), json=config_data
)
elif func in TRACKING_FUNC:
detect_utils.check_config(
config=config_data,
required_arguments=["job_id", "component_name", "role", "party_id"],
)
if func == "component_output_data":
detect_utils.check_config(
config=config_data, required_arguments=["output_path"]
)
tar_file_name = "job_{}_{}_{}_{}_output_data.tar.gz".format(
config_data["job_id"],
config_data["component_name"],
config_data["role"],
config_data["party_id"],
)
extract_dir = os.path.join(
config_data["output_path"], tar_file_name.replace(".tar.gz", "")
)
with closing(
requests.get(
"/".join(
[local_url, "tracking", func.replace("_", "/"), "download"]
),
json=config_data,
stream=True,
)
) as res:
if res.status_code == 200:
with open(tar_file_name, "wb") as fw:
for chunk in res.iter_content(1024):
if chunk:
fw.write(chunk)
tar = tarfile.open(tar_file_name, "r:gz")
file_names = tar.getnames()
for file_name in file_names:
tar.extract(file_name, extract_dir)
tar.close()
os.remove(tar_file_name)
response = {
"retcode": 0,
"directory": extract_dir,
"retmsg": "download successfully, please check {} directory".format(
extract_dir
),
}
else:
response = res.json()
else:
response = requests.post(
"/".join([local_url, "tracking", func.replace("_", "/")]),
json=config_data,
)
elif func in DATA_FUNC:
response = requests.post("/".join([local_url, "data", func]), json=config_data)
elif func in TABLE_FUNC:
detect_utils.check_config(
config=config_data, required_arguments=["namespace", "table_name"]
)
response = requests.post("/".join([local_url, "table", func]), json=config_data)
elif func in MODEL_FUNC:
if func == "version":
detect_utils.check_config(
config=config_data, required_arguments=["namespace"]
)
response = requests.post("/".join([local_url, "model", func]), json=config_data)
return (
response.json() if isinstance(response, requests.models.Response) else response
)
|
https://github.com/FederatedAI/FATE/issues/518
|
stdout:Traceback (most recent call last):
File "/home/wy/FATE/examples/federatedml-1.0-examples/../../fate_flow/fate_flow_client.py", line 17, in <module>
import argparse
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/argparse.py", line 86, in <module>
import collections as _collections
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/collections/__init__.py", line 26, in <module>
from operator import itemgetter as _itemgetter, eq as _eq
ImportError: cannot import name 'itemgetter'
|
ImportError
|
def init_job_queue():
if RuntimeConfig.WORK_MODE == WorkMode.STANDALONE:
job_queue = InProcessQueue()
RuntimeConfig.init_config(JOB_QUEUE=job_queue)
elif RuntimeConfig.WORK_MODE == WorkMode.CLUSTER:
job_queue = RedisQueue(
queue_name="fate_flow_job_queue",
host=REDIS["host"],
port=REDIS["port"],
password=REDIS["password"],
max_connections=REDIS["max_connections"],
)
RuntimeConfig.init_config(JOB_QUEUE=job_queue)
else:
raise Exception("init queue failed.")
|
def init_job_queue():
if WORK_MODE == WorkMode.STANDALONE:
job_queue = InProcessQueue()
RuntimeConfig.init_config(JOB_QUEUE=job_queue)
elif WORK_MODE == WorkMode.CLUSTER:
job_queue = RedisQueue(
queue_name="fate_flow_job_queue",
host=REDIS["host"],
port=REDIS["port"],
password=REDIS["password"],
max_connections=REDIS["max_connections"],
)
RuntimeConfig.init_config(JOB_QUEUE=job_queue)
else:
raise Exception("init queue failed.")
|
https://github.com/FederatedAI/FATE/issues/518
|
stdout:Traceback (most recent call last):
File "/home/wy/FATE/examples/federatedml-1.0-examples/../../fate_flow/fate_flow_client.py", line 17, in <module>
import argparse
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/argparse.py", line 86, in <module>
import collections as _collections
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/collections/__init__.py", line 26, in <module>
from operator import itemgetter as _itemgetter, eq as _eq
ImportError: cannot import name 'itemgetter'
|
ImportError
|
def local_api(method, endpoint, json_body):
try:
url = "http://{}{}".format(RuntimeConfig.JOB_SERVER_HOST, endpoint)
stat_logger.info("local api request: {}".format(url))
action = getattr(requests, method.lower(), None)
response = action(url=url, json=json_body, headers=HEADERS)
stat_logger.info(response.text)
response_json_body = response.json()
stat_logger.info(
"local api response: {} {}".format(endpoint, response_json_body)
)
return response_json_body
except Exception as e:
raise Exception("local request error: {}".format(e))
|
def local_api(method, endpoint, json_body):
try:
stat_logger.info("local api request: {}".format(endpoint))
url = "{}{}".format(SERVER_HOST_URL, endpoint)
action = getattr(requests, method.lower(), None)
response = action(url=url, json=json_body, headers=HEADERS)
stat_logger.info(response.text)
response_json_body = response.json()
stat_logger.info(
"local api response: {} {}".format(endpoint, response_json_body)
)
return response_json_body
except Exception as e:
raise Exception("local request error: {}".format(e))
|
https://github.com/FederatedAI/FATE/issues/518
|
stdout:Traceback (most recent call last):
File "/home/wy/FATE/examples/federatedml-1.0-examples/../../fate_flow/fate_flow_client.py", line 17, in <module>
import argparse
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/argparse.py", line 86, in <module>
import collections as _collections
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/collections/__init__.py", line 26, in <module>
from operator import itemgetter as _itemgetter, eq as _eq
ImportError: cannot import name 'itemgetter'
|
ImportError
|
def check_config(config: typing.Dict, required_arguments: typing.List):
no_arguments = []
error_arguments = []
for require_argument in required_arguments:
if isinstance(require_argument, tuple):
config_value = config.get(require_argument[0], None)
if isinstance(require_argument[1], (tuple, list)):
if config_value not in require_argument[1]:
error_arguments.append(require_argument)
elif config_value != require_argument[1]:
error_arguments.append(require_argument)
elif require_argument not in config:
no_arguments.append(require_argument)
if no_arguments or error_arguments:
raise Exception(
"the following arguments are required: {} {}".format(
",".join(no_arguments),
",".join(["{}={}".format(a[0], a[1]) for a in error_arguments]),
)
)
|
def check_config(config: typing.Dict, required_arguments: typing.List):
no_arguments = []
error_arguments = []
for argument in required_arguments:
if isinstance(argument, tuple):
if config.get(argument[0], None) != argument[1]:
error_arguments.append(argument)
elif argument not in config:
no_arguments.append(argument)
if no_arguments or error_arguments:
raise Exception(
"the following arguments are required: {} {}".format(
",".join(no_arguments),
",".join(["{}={}".format(a[0], a[1]) for a in error_arguments]),
)
)
|
https://github.com/FederatedAI/FATE/issues/518
|
stdout:Traceback (most recent call last):
File "/home/wy/FATE/examples/federatedml-1.0-examples/../../fate_flow/fate_flow_client.py", line 17, in <module>
import argparse
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/argparse.py", line 86, in <module>
import collections as _collections
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/collections/__init__.py", line 26, in <module>
from operator import itemgetter as _itemgetter, eq as _eq
ImportError: cannot import name 'itemgetter'
|
ImportError
|
def get_url(_suffix):
return "http://{}/{}".format(
RuntimeConfig.JOB_SERVER_HOST.rstrip("/"), _suffix.lstrip("/")
)
|
def get_url(_suffix):
return "{}/{}".format(SERVER_HOST_URL.rstrip("/"), _suffix.lstrip("/"))
|
https://github.com/FederatedAI/FATE/issues/518
|
stdout:Traceback (most recent call last):
File "/home/wy/FATE/examples/federatedml-1.0-examples/../../fate_flow/fate_flow_client.py", line 17, in <module>
import argparse
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/argparse.py", line 86, in <module>
import collections as _collections
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/collections/__init__.py", line 26, in <module>
from operator import itemgetter as _itemgetter, eq as _eq
ImportError: cannot import name 'itemgetter'
|
ImportError
|
def check_pipeline_job_runtime_conf(runtime_conf: typing.Dict):
detect_utils.check_config(runtime_conf, ["initiator", "job_parameters", "role"])
detect_utils.check_config(runtime_conf["initiator"], ["role", "party_id"])
detect_utils.check_config(
runtime_conf["job_parameters"], [("work_mode", RuntimeConfig.WORK_MODE)]
)
# deal party id
runtime_conf["initiator"]["party_id"] = int(runtime_conf["initiator"]["party_id"])
for r in runtime_conf["role"].keys():
for i in range(len(runtime_conf["role"][r])):
runtime_conf["role"][r][i] = int(runtime_conf["role"][r][i])
|
def check_pipeline_job_runtime_conf(runtime_conf: typing.Dict):
detect_utils.check_config(runtime_conf, ["initiator", "job_parameters", "role"])
detect_utils.check_config(runtime_conf["initiator"], ["role", "party_id"])
detect_utils.check_config(
runtime_conf["job_parameters"], [("work_mode", WORK_MODE)]
)
# deal party id
runtime_conf["initiator"]["party_id"] = int(runtime_conf["initiator"]["party_id"])
for r in runtime_conf["role"].keys():
for i in range(len(runtime_conf["role"][r])):
runtime_conf["role"][r][i] = int(runtime_conf["role"][r][i])
|
https://github.com/FederatedAI/FATE/issues/518
|
stdout:Traceback (most recent call last):
File "/home/wy/FATE/examples/federatedml-1.0-examples/../../fate_flow/fate_flow_client.py", line 17, in <module>
import argparse
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/argparse.py", line 86, in <module>
import collections as _collections
File "/home/wy/.pyenv/versions/3.6.0/lib/python3.6/collections/__init__.py", line 26, in <module>
from operator import itemgetter as _itemgetter, eq as _eq
ImportError: cannot import name 'itemgetter'
|
ImportError
|
def __init__(self, storage_locator, partitions=1):
# self.__client = _EggRoll.get_instance()
self._namespace = storage_locator.namespace
self._name = storage_locator.name
self._type = storage_basic_pb2.StorageType.Name(storage_locator.type)
self._partitions = partitions
self.schema = {}
|
def __init__(self, storage_locator, partitions=1):
self.__client = _EggRoll.get_instance()
self._namespace = storage_locator.namespace
self._name = storage_locator.name
self._type = storage_basic_pb2.StorageType.Name(storage_locator.type)
self._partitions = partitions
self.schema = {}
|
https://github.com/FederatedAI/FATE/issues/73
|
Traceback (most recent call last):
File "test.py", line 19, in <module>
test.run()
File "test.py", line 15, in run
table = self.data.mapValues(lambda x: self.fun(x))
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 108, in mapValues
return self.__client.map_values(self, func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 290, in map_values
func_id, func_bytes = self.serialize_and_hash_func(func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 195, in serialize_and_hash_func
pickled_function = cloudpickle.dumps(func)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 892, in dumps
cp.dump(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 271, in dump
return Pickler.dump(self, obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 409, in dump
self.save(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 412, in save_function
self.save_function_tuple(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 559, in save_function_tuple
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 781, in save_list
self._batch_appends(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 808, in _batch_appends
save(tmp[0])
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 852, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 496, in save
rv = reduce(self.proto)
File "stringsource", line 2, in grpc._cython.cygrpc.Channel.__reduce_cython__
TypeError: no default __reduce__ due to non-trivial __cinit__
|
TypeError
|
def save_as(self, name, namespace, partition=None, use_serialize=True):
if partition is None:
partition = self._partitions
dup = _EggRoll.get_instance().table(name, namespace, partition=partition)
dup.put_all(self.collect(use_serialize=use_serialize), use_serialize=use_serialize)
return dup
|
def save_as(self, name, namespace, partition=None, use_serialize=True):
if partition is None:
partition = self._partitions
dup = self.__client.table(name, namespace, partition=partition)
dup.put_all(self.collect(use_serialize=use_serialize), use_serialize=use_serialize)
return dup
|
https://github.com/FederatedAI/FATE/issues/73
|
Traceback (most recent call last):
File "test.py", line 19, in <module>
test.run()
File "test.py", line 15, in run
table = self.data.mapValues(lambda x: self.fun(x))
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 108, in mapValues
return self.__client.map_values(self, func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 290, in map_values
func_id, func_bytes = self.serialize_and_hash_func(func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 195, in serialize_and_hash_func
pickled_function = cloudpickle.dumps(func)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 892, in dumps
cp.dump(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 271, in dump
return Pickler.dump(self, obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 409, in dump
self.save(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 412, in save_function
self.save_function_tuple(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 559, in save_function_tuple
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 781, in save_list
self._batch_appends(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 808, in _batch_appends
save(tmp[0])
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 852, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 496, in save
rv = reduce(self.proto)
File "stringsource", line 2, in grpc._cython.cygrpc.Channel.__reduce_cython__
TypeError: no default __reduce__ due to non-trivial __cinit__
|
TypeError
|
def put(self, k, v, use_serialize=True):
_EggRoll.get_instance().put(self, k, v, use_serialize=use_serialize)
|
def put(self, k, v, use_serialize=True):
self.__client.put(self, k, v, use_serialize=use_serialize)
|
https://github.com/FederatedAI/FATE/issues/73
|
Traceback (most recent call last):
File "test.py", line 19, in <module>
test.run()
File "test.py", line 15, in run
table = self.data.mapValues(lambda x: self.fun(x))
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 108, in mapValues
return self.__client.map_values(self, func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 290, in map_values
func_id, func_bytes = self.serialize_and_hash_func(func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 195, in serialize_and_hash_func
pickled_function = cloudpickle.dumps(func)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 892, in dumps
cp.dump(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 271, in dump
return Pickler.dump(self, obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 409, in dump
self.save(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 412, in save_function
self.save_function_tuple(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 559, in save_function_tuple
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 781, in save_list
self._batch_appends(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 808, in _batch_appends
save(tmp[0])
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 852, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 496, in save
rv = reduce(self.proto)
File "stringsource", line 2, in grpc._cython.cygrpc.Channel.__reduce_cython__
TypeError: no default __reduce__ due to non-trivial __cinit__
|
TypeError
|
def put_all(self, kv_list: Iterable, use_serialize=True):
return _EggRoll.get_instance().put_all(self, kv_list, use_serialize=use_serialize)
|
def put_all(self, kv_list: Iterable, use_serialize=True):
return self.__client.put_all(self, kv_list, use_serialize=use_serialize)
|
https://github.com/FederatedAI/FATE/issues/73
|
Traceback (most recent call last):
File "test.py", line 19, in <module>
test.run()
File "test.py", line 15, in run
table = self.data.mapValues(lambda x: self.fun(x))
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 108, in mapValues
return self.__client.map_values(self, func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 290, in map_values
func_id, func_bytes = self.serialize_and_hash_func(func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 195, in serialize_and_hash_func
pickled_function = cloudpickle.dumps(func)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 892, in dumps
cp.dump(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 271, in dump
return Pickler.dump(self, obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 409, in dump
self.save(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 412, in save_function
self.save_function_tuple(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 559, in save_function_tuple
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 781, in save_list
self._batch_appends(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 808, in _batch_appends
save(tmp[0])
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 852, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 496, in save
rv = reduce(self.proto)
File "stringsource", line 2, in grpc._cython.cygrpc.Channel.__reduce_cython__
TypeError: no default __reduce__ due to non-trivial __cinit__
|
TypeError
|
def get(self, k, use_serialize=True):
return _EggRoll.get_instance().get(self, k, use_serialize=use_serialize)
|
def get(self, k, use_serialize=True):
return self.__client.get(self, k, use_serialize=use_serialize)
|
https://github.com/FederatedAI/FATE/issues/73
|
Traceback (most recent call last):
File "test.py", line 19, in <module>
test.run()
File "test.py", line 15, in run
table = self.data.mapValues(lambda x: self.fun(x))
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 108, in mapValues
return self.__client.map_values(self, func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 290, in map_values
func_id, func_bytes = self.serialize_and_hash_func(func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 195, in serialize_and_hash_func
pickled_function = cloudpickle.dumps(func)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 892, in dumps
cp.dump(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 271, in dump
return Pickler.dump(self, obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 409, in dump
self.save(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 412, in save_function
self.save_function_tuple(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 559, in save_function_tuple
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 781, in save_list
self._batch_appends(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 808, in _batch_appends
save(tmp[0])
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 852, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 496, in save
rv = reduce(self.proto)
File "stringsource", line 2, in grpc._cython.cygrpc.Channel.__reduce_cython__
TypeError: no default __reduce__ due to non-trivial __cinit__
|
TypeError
|
def delete(self, k, use_serialize=True):
return _EggRoll.get_instance().delete(self, k, use_serialize=use_serialize)
|
def delete(self, k, use_serialize=True):
return self.__client.delete(self, k, use_serialize=use_serialize)
|
https://github.com/FederatedAI/FATE/issues/73
|
Traceback (most recent call last):
File "test.py", line 19, in <module>
test.run()
File "test.py", line 15, in run
table = self.data.mapValues(lambda x: self.fun(x))
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 108, in mapValues
return self.__client.map_values(self, func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 290, in map_values
func_id, func_bytes = self.serialize_and_hash_func(func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 195, in serialize_and_hash_func
pickled_function = cloudpickle.dumps(func)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 892, in dumps
cp.dump(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 271, in dump
return Pickler.dump(self, obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 409, in dump
self.save(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 412, in save_function
self.save_function_tuple(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 559, in save_function_tuple
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 781, in save_list
self._batch_appends(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 808, in _batch_appends
save(tmp[0])
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 852, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 496, in save
rv = reduce(self.proto)
File "stringsource", line 2, in grpc._cython.cygrpc.Channel.__reduce_cython__
TypeError: no default __reduce__ due to non-trivial __cinit__
|
TypeError
|
def destroy(self):
_EggRoll.get_instance().destroy(self)
|
def destroy(self):
self.__client.destroy(self)
|
https://github.com/FederatedAI/FATE/issues/73
|
Traceback (most recent call last):
File "test.py", line 19, in <module>
test.run()
File "test.py", line 15, in run
table = self.data.mapValues(lambda x: self.fun(x))
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 108, in mapValues
return self.__client.map_values(self, func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 290, in map_values
func_id, func_bytes = self.serialize_and_hash_func(func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 195, in serialize_and_hash_func
pickled_function = cloudpickle.dumps(func)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 892, in dumps
cp.dump(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 271, in dump
return Pickler.dump(self, obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 409, in dump
self.save(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 412, in save_function
self.save_function_tuple(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 559, in save_function_tuple
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 781, in save_list
self._batch_appends(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 808, in _batch_appends
save(tmp[0])
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 852, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 496, in save
rv = reduce(self.proto)
File "stringsource", line 2, in grpc._cython.cygrpc.Channel.__reduce_cython__
TypeError: no default __reduce__ due to non-trivial __cinit__
|
TypeError
|
def count(self):
return _EggRoll.get_instance().count(self)
|
def count(self):
return self.__client.count(self)
|
https://github.com/FederatedAI/FATE/issues/73
|
Traceback (most recent call last):
File "test.py", line 19, in <module>
test.run()
File "test.py", line 15, in run
table = self.data.mapValues(lambda x: self.fun(x))
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 108, in mapValues
return self.__client.map_values(self, func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 290, in map_values
func_id, func_bytes = self.serialize_and_hash_func(func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 195, in serialize_and_hash_func
pickled_function = cloudpickle.dumps(func)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 892, in dumps
cp.dump(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 271, in dump
return Pickler.dump(self, obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 409, in dump
self.save(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 412, in save_function
self.save_function_tuple(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 559, in save_function_tuple
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 781, in save_list
self._batch_appends(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 808, in _batch_appends
save(tmp[0])
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 852, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 496, in save
rv = reduce(self.proto)
File "stringsource", line 2, in grpc._cython.cygrpc.Channel.__reduce_cython__
TypeError: no default __reduce__ due to non-trivial __cinit__
|
TypeError
|
def put_if_absent(self, k, v, use_serialize=True):
return _EggRoll.get_instance().put_if_absent(
self, k, v, use_serialize=use_serialize
)
|
def put_if_absent(self, k, v, use_serialize=True):
return self.__client.put_if_absent(self, k, v, use_serialize=use_serialize)
|
https://github.com/FederatedAI/FATE/issues/73
|
Traceback (most recent call last):
File "test.py", line 19, in <module>
test.run()
File "test.py", line 15, in run
table = self.data.mapValues(lambda x: self.fun(x))
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 108, in mapValues
return self.__client.map_values(self, func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 290, in map_values
func_id, func_bytes = self.serialize_and_hash_func(func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 195, in serialize_and_hash_func
pickled_function = cloudpickle.dumps(func)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 892, in dumps
cp.dump(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 271, in dump
return Pickler.dump(self, obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 409, in dump
self.save(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 412, in save_function
self.save_function_tuple(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 559, in save_function_tuple
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 781, in save_list
self._batch_appends(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 808, in _batch_appends
save(tmp[0])
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 852, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 496, in save
rv = reduce(self.proto)
File "stringsource", line 2, in grpc._cython.cygrpc.Channel.__reduce_cython__
TypeError: no default __reduce__ due to non-trivial __cinit__
|
TypeError
|
def map(self, func):
_intermediate_result = _EggRoll.get_instance().map(self, func)
return _intermediate_result.save_as(
str(uuid.uuid1()),
_intermediate_result._namespace,
partition=_intermediate_result._partitions,
)
|
def map(self, func):
_intermediate_result = self.__client.map(self, func)
return _intermediate_result.save_as(
str(uuid.uuid1()),
_intermediate_result._namespace,
partition=_intermediate_result._partitions,
)
|
https://github.com/FederatedAI/FATE/issues/73
|
Traceback (most recent call last):
File "test.py", line 19, in <module>
test.run()
File "test.py", line 15, in run
table = self.data.mapValues(lambda x: self.fun(x))
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 108, in mapValues
return self.__client.map_values(self, func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 290, in map_values
func_id, func_bytes = self.serialize_and_hash_func(func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 195, in serialize_and_hash_func
pickled_function = cloudpickle.dumps(func)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 892, in dumps
cp.dump(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 271, in dump
return Pickler.dump(self, obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 409, in dump
self.save(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 412, in save_function
self.save_function_tuple(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 559, in save_function_tuple
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 781, in save_list
self._batch_appends(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 808, in _batch_appends
save(tmp[0])
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 852, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 496, in save
rv = reduce(self.proto)
File "stringsource", line 2, in grpc._cython.cygrpc.Channel.__reduce_cython__
TypeError: no default __reduce__ due to non-trivial __cinit__
|
TypeError
|
def mapValues(self, func):
return _EggRoll.get_instance().map_values(self, func)
|
def mapValues(self, func):
return self.__client.map_values(self, func)
|
https://github.com/FederatedAI/FATE/issues/73
|
Traceback (most recent call last):
File "test.py", line 19, in <module>
test.run()
File "test.py", line 15, in run
table = self.data.mapValues(lambda x: self.fun(x))
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 108, in mapValues
return self.__client.map_values(self, func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 290, in map_values
func_id, func_bytes = self.serialize_and_hash_func(func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 195, in serialize_and_hash_func
pickled_function = cloudpickle.dumps(func)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 892, in dumps
cp.dump(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 271, in dump
return Pickler.dump(self, obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 409, in dump
self.save(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 412, in save_function
self.save_function_tuple(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 559, in save_function_tuple
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 781, in save_list
self._batch_appends(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 808, in _batch_appends
save(tmp[0])
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 852, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 496, in save
rv = reduce(self.proto)
File "stringsource", line 2, in grpc._cython.cygrpc.Channel.__reduce_cython__
TypeError: no default __reduce__ due to non-trivial __cinit__
|
TypeError
|
def mapPartitions(self, func):
return _EggRoll.get_instance().map_partitions(self, func)
|
def mapPartitions(self, func):
return self.__client.map_partitions(self, func)
|
https://github.com/FederatedAI/FATE/issues/73
|
Traceback (most recent call last):
File "test.py", line 19, in <module>
test.run()
File "test.py", line 15, in run
table = self.data.mapValues(lambda x: self.fun(x))
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 108, in mapValues
return self.__client.map_values(self, func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 290, in map_values
func_id, func_bytes = self.serialize_and_hash_func(func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 195, in serialize_and_hash_func
pickled_function = cloudpickle.dumps(func)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 892, in dumps
cp.dump(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 271, in dump
return Pickler.dump(self, obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 409, in dump
self.save(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 412, in save_function
self.save_function_tuple(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 559, in save_function_tuple
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 781, in save_list
self._batch_appends(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 808, in _batch_appends
save(tmp[0])
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 852, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 496, in save
rv = reduce(self.proto)
File "stringsource", line 2, in grpc._cython.cygrpc.Channel.__reduce_cython__
TypeError: no default __reduce__ due to non-trivial __cinit__
|
TypeError
|
def reduce(self, func):
return _EggRoll.get_instance().reduce(self, func)
|
def reduce(self, func):
return self.__client.reduce(self, func)
|
https://github.com/FederatedAI/FATE/issues/73
|
Traceback (most recent call last):
File "test.py", line 19, in <module>
test.run()
File "test.py", line 15, in run
table = self.data.mapValues(lambda x: self.fun(x))
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 108, in mapValues
return self.__client.map_values(self, func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 290, in map_values
func_id, func_bytes = self.serialize_and_hash_func(func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 195, in serialize_and_hash_func
pickled_function = cloudpickle.dumps(func)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 892, in dumps
cp.dump(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 271, in dump
return Pickler.dump(self, obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 409, in dump
self.save(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 412, in save_function
self.save_function_tuple(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 559, in save_function_tuple
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 781, in save_list
self._batch_appends(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 808, in _batch_appends
save(tmp[0])
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 852, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 496, in save
rv = reduce(self.proto)
File "stringsource", line 2, in grpc._cython.cygrpc.Channel.__reduce_cython__
TypeError: no default __reduce__ due to non-trivial __cinit__
|
TypeError
|
def join(self, other, func):
if other._partitions != self._partitions:
if other.count() > self.count():
return self.save_as(
str(uuid.uuid1()),
_EggRoll.get_instance().job_id,
partition=other._partitions,
).join(other, func)
else:
return self.join(
other.save_as(
str(uuid.uuid1()),
_EggRoll.get_instance().job_id,
partition=self._partitions,
),
func,
)
return _EggRoll.get_instance().join(self, other, func)
|
def join(self, other, func):
if other._partitions != self._partitions:
if other.count() > self.count():
return self.save_as(
str(uuid.uuid1()), self.__client.job_id, partition=other._partitions
).join(other, func)
else:
return self.join(
other.save_as(
str(uuid.uuid1()), self.__client.job_id, partition=self._partitions
),
func,
)
return self.__client.join(self, other, func)
|
https://github.com/FederatedAI/FATE/issues/73
|
Traceback (most recent call last):
File "test.py", line 19, in <module>
test.run()
File "test.py", line 15, in run
table = self.data.mapValues(lambda x: self.fun(x))
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 108, in mapValues
return self.__client.map_values(self, func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 290, in map_values
func_id, func_bytes = self.serialize_and_hash_func(func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 195, in serialize_and_hash_func
pickled_function = cloudpickle.dumps(func)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 892, in dumps
cp.dump(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 271, in dump
return Pickler.dump(self, obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 409, in dump
self.save(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 412, in save_function
self.save_function_tuple(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 559, in save_function_tuple
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 781, in save_list
self._batch_appends(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 808, in _batch_appends
save(tmp[0])
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 852, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 496, in save
rv = reduce(self.proto)
File "stringsource", line 2, in grpc._cython.cygrpc.Channel.__reduce_cython__
TypeError: no default __reduce__ due to non-trivial __cinit__
|
TypeError
|
def glom(self):
return _EggRoll.get_instance().glom(self)
|
def glom(self):
return self.__client.glom(self)
|
https://github.com/FederatedAI/FATE/issues/73
|
Traceback (most recent call last):
File "test.py", line 19, in <module>
test.run()
File "test.py", line 15, in run
table = self.data.mapValues(lambda x: self.fun(x))
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 108, in mapValues
return self.__client.map_values(self, func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 290, in map_values
func_id, func_bytes = self.serialize_and_hash_func(func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 195, in serialize_and_hash_func
pickled_function = cloudpickle.dumps(func)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 892, in dumps
cp.dump(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 271, in dump
return Pickler.dump(self, obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 409, in dump
self.save(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 412, in save_function
self.save_function_tuple(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 559, in save_function_tuple
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 781, in save_list
self._batch_appends(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 808, in _batch_appends
save(tmp[0])
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 852, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 496, in save
rv = reduce(self.proto)
File "stringsource", line 2, in grpc._cython.cygrpc.Channel.__reduce_cython__
TypeError: no default __reduce__ due to non-trivial __cinit__
|
TypeError
|
def sample(self, fraction, seed=None):
return _EggRoll.get_instance().sample(self, fraction, seed)
|
def sample(self, fraction, seed=None):
return self.__client.sample(self, fraction, seed)
|
https://github.com/FederatedAI/FATE/issues/73
|
Traceback (most recent call last):
File "test.py", line 19, in <module>
test.run()
File "test.py", line 15, in run
table = self.data.mapValues(lambda x: self.fun(x))
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 108, in mapValues
return self.__client.map_values(self, func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 290, in map_values
func_id, func_bytes = self.serialize_and_hash_func(func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 195, in serialize_and_hash_func
pickled_function = cloudpickle.dumps(func)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 892, in dumps
cp.dump(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 271, in dump
return Pickler.dump(self, obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 409, in dump
self.save(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 412, in save_function
self.save_function_tuple(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 559, in save_function_tuple
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 781, in save_list
self._batch_appends(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 808, in _batch_appends
save(tmp[0])
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 852, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 496, in save
rv = reduce(self.proto)
File "stringsource", line 2, in grpc._cython.cygrpc.Channel.__reduce_cython__
TypeError: no default __reduce__ due to non-trivial __cinit__
|
TypeError
|
def cleanup(self, name, namespace, persistent):
if namespace is None or name is None:
raise ValueError("neither name nor namespace can be None")
_type = storage_basic_pb2.LMDB if persistent else storage_basic_pb2.IN_MEMORY
storage_locator = storage_basic_pb2.StorageLocator(
type=_type, namespace=namespace, name=name
)
_table = _DTable(storage_locator=storage_locator)
self.destroy_all(_table)
LOGGER.debug("cleaned up: %s", _table)
|
def cleanup(self, name, namespace, persistent):
if namespace is None or name is None:
raise ValueError("neither name nor namespace can be None")
type = storage_basic_pb2.LMDB if persistent else storage_basic_pb2.IN_MEMORY
storage_locator = storage_basic_pb2.StorageLocator(
type=type, namespace=namespace, name=name
)
_table = _DTable(storage_locator=storage_locator)
self.destroy_all(_table)
LOGGER.debug("cleaned up: %s", _table)
|
https://github.com/FederatedAI/FATE/issues/73
|
Traceback (most recent call last):
File "test.py", line 19, in <module>
test.run()
File "test.py", line 15, in run
table = self.data.mapValues(lambda x: self.fun(x))
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 108, in mapValues
return self.__client.map_values(self, func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 290, in map_values
func_id, func_bytes = self.serialize_and_hash_func(func)
File "/data/projects/fate/python/arch/api/cluster/eggroll.py", line 195, in serialize_and_hash_func
pickled_function = cloudpickle.dumps(func)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 892, in dumps
cp.dump(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 271, in dump
return Pickler.dump(self, obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 409, in dump
self.save(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 412, in save_function
self.save_function_tuple(obj)
File "/data/projects/fate/python/arch/api/utils/cloudpickle.py", line 559, in save_function_tuple
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 781, in save_list
self._batch_appends(obj)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 808, in _batch_appends
save(tmp[0])
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 852, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 634, in save_reduce
save(state)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 821, in save_dict
self._batch_setitems(obj.items())
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 847, in _batch_setitems
save(v)
File "/data/projects/common/miniconda3/lib/python3.6/pickle.py", line 496, in save
rv = reduce(self.proto)
File "stringsource", line 2, in grpc._cython.cygrpc.Channel.__reduce_cython__
TypeError: no default __reduce__ due to non-trivial __cinit__
|
TypeError
|
def save_episodes_as_file(self, episodes):
PRIVATE_FOLDER_ATTRIBUTE = "_save_episodes_as_file_folder"
folder = getattr(self, PRIVATE_FOLDER_ATTRIBUTE, None)
(notCancelled, folder) = self.show_folder_select_dialog(initial_directory=folder)
setattr(self, PRIVATE_FOLDER_ATTRIBUTE, folder)
if notCancelled:
for episode in episodes:
if episode.was_downloaded(and_exists=True):
copy_from = episode.local_filename(create=False)
assert copy_from is not None
base, extension = os.path.splitext(copy_from)
filename = self.build_filename(episode.sync_filename(), extension)
copy_to = os.path.join(folder, filename)
try:
shutil.copyfile(copy_from, copy_to)
except (OSError, IOError) as e:
# Remove characters not supported by VFAT (#282)
new_filename = re.sub(r"[\"*/:<>?\\|]", "_", filename)
destination = os.path.join(folder, new_filename)
if copy_to != destination:
shutil.copyfile(copy_from, destination)
else:
raise
|
def save_episodes_as_file(self, episodes):
PRIVATE_FOLDER_ATTRIBUTE = "_save_episodes_as_file_folder"
folder = getattr(self, PRIVATE_FOLDER_ATTRIBUTE, None)
(notCancelled, folder) = self.show_folder_select_dialog(initial_directory=folder)
setattr(self, PRIVATE_FOLDER_ATTRIBUTE, folder)
if notCancelled:
for episode in episodes:
if episode.was_downloaded(and_exists=True):
copy_from = episode.local_filename(create=False)
assert copy_from is not None
base, extension = os.path.splitext(copy_from)
filename = self.build_filename(episode.sync_filename(), extension)
copy_to = os.path.join(folder, filename)
shutil.copyfile(copy_from, copy_to)
|
https://github.com/gpodder/gpodder/issues/282
|
1492734407.836906 [gpodder.log] ERROR: Uncaught exception: Traceback (most recent call last):
File "/home/user/dev/gpodder/src/gpodder/gtkui/main.py", line 1750, in save_episodes_as_file
shutil.copyfile(copy_from, copy_to)
File "/usr/lib/python3.5/shutil.py", line 115, in copyfile
with open(dst, 'wb') as fdst:
OSError: [Errno 22] Invalid argument: '/media/user/0123-4567/Podcasts/Episode 84: Ur Fave is Problematic.mp3'
Traceback (most recent call last):
File "/home/user/dev/gpodder/src/gpodder/gtkui/main.py", line 1750, in save_episodes_as_file
shutil.copyfile(copy_from, copy_to)
File "/usr/lib/python3.5/shutil.py", line 115, in copyfile
with open(dst, 'wb') as fdst:
OSError: [Errno 22] Invalid argument: '/media/user/0123-4567/Podcasts/Episode 84: Ur Fave is Problematic.mp3'
|
OSError
|
def _save(self, data: tf.keras.Model) -> None:
save_path = get_filepath_str(self._get_save_path(), self._protocol)
# Make sure all intermediate directories are created.
save_dir = Path(save_path).parent
save_dir.mkdir(parents=True, exist_ok=True)
with tempfile.TemporaryDirectory(prefix=self._tmp_prefix) as path:
if self._is_h5:
path = str(PurePath(path) / TEMPORARY_H5_FILE)
tf.keras.models.save_model(data, path, **self._save_args)
# Use fsspec to take from local tempfile directory/file and
# put in ArbitraryFileSystem
if self._is_h5:
self._fs.copy(path, save_path)
else:
self._fs.put(path, save_path, recursive=True)
|
def _save(self, data: tf.keras.Model) -> None:
save_path = get_filepath_str(self._get_save_path(), self._protocol)
with tempfile.TemporaryDirectory(prefix=self._tmp_prefix) as path:
if self._is_h5:
path = str(PurePath(path) / TEMPORARY_H5_FILE)
tf.keras.models.save_model(data, path, **self._save_args)
# Use fsspec to take from local tempfile directory/file and
# put in ArbitraryFileSystem
if self._is_h5:
self._fs.copy(path, save_path)
else:
self._fs.put(path, save_path, recursive=True)
|
https://github.com/quantumblacklabs/kedro/issues/518
|
Traceback (most recent call last):
File "/home/daniel/git/cotton_counter/.venv/lib/python3.7/site-packages/kedro/io/core.py", line 240, in save
self._save(data)
File "/home/daniel/git/cotton_counter/.venv/lib/python3.7/site-packages/kedro/extras/datasets/tensorflow/tensorflow_model_dataset.py", line 167, in _save
self._fs.copy(path, save_path)
File "/home/daniel/git/cotton_counter/.venv/lib/python3.7/site-packages/fsspec/implementations/local.py", line 90, in copy
shutil.copyfile(path1, path2)
File "/home/daniel/.pyenv/versions/3.7.7/lib/python3.7/shutil.py", line 121, in copyfile
with open(dst, 'wb') as fdst:
FileNotFoundError: [Errno 2] No such file or directory: '/home/daniel/git/cotton_counter/data/06_models/fully_trained.hd5/2020-09-19T16.20.54.312Z/fully_trained.hd5'
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/home/daniel/.pyenv/versions/3.7.7/lib/python3.7/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/home/daniel/.pyenv/versions/3.7.7/lib/python3.7/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/home/daniel/git/cotton_counter/.venv/lib/python3.7/site-packages/kedro/__main__.py", line 38, in <module>
main()
File "/home/daniel/git/cotton_counter/.venv/lib/python3.7/site-packages/kedro/framework/cli/cli.py", line 724, in main
cli_collection()
File "/home/daniel/git/cotton_counter/.venv/lib/python3.7/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/home/daniel/git/cotton_counter/.venv/lib/python3.7/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/home/daniel/git/cotton_counter/.venv/lib/python3.7/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/daniel/git/cotton_counter/.venv/lib/python3.7/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/daniel/git/cotton_counter/.venv/lib/python3.7/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/home/daniel/git/cotton_counter/kedro_cli.py", line 263, in run
pipeline_name=pipeline,
File "/home/daniel/git/cotton_counter/.venv/lib/python3.7/site-packages/kedro/framework/context/context.py", line 767, in run
raise exc
File "/home/daniel/git/cotton_counter/.venv/lib/python3.7/site-packages/kedro/framework/context/context.py", line 759, in run
run_result = runner.run(filtered_pipeline, catalog, run_id)
File "/home/daniel/git/cotton_counter/.venv/lib/python3.7/site-packages/kedro/runner/runner.py", line 101, in run
self._run(pipeline, catalog, run_id)
File "/home/daniel/git/cotton_counter/.venv/lib/python3.7/site-packages/kedro/runner/sequential_runner.py", line 90, in _run
run_node(node, catalog, self._is_async, run_id)
File "/home/daniel/git/cotton_counter/.venv/lib/python3.7/site-packages/kedro/runner/runner.py", line 213, in run_node
node = _run_node_sequential(node, catalog, run_id)
File "/home/daniel/git/cotton_counter/.venv/lib/python3.7/site-packages/kedro/runner/runner.py", line 249, in _run_node_sequential
catalog.save(name, data)
File "/home/daniel/git/cotton_counter/.venv/lib/python3.7/site-packages/kedro/io/data_catalog.py", line 439, in save
func(data)
File "/home/daniel/git/cotton_counter/.venv/lib/python3.7/site-packages/kedro/io/core.py", line 625, in save
super().save(data)
File "/home/daniel/git/cotton_counter/.venv/lib/python3.7/site-packages/kedro/io/core.py", line 247, in save
raise DataSetError(message) from exc
kedro.io.core.DataSetError: Failed while saving data to data set TensorFlowModelDataset(filepath=/home/daniel/git/cotton_counter/data/06_models/fully_trained.hd5, load_args={'compile': False}, protocol=file, save_args={'save_format': h5}, version=Version(load=None, save='2020-09-19T16.20.54.312Z')).
[Errno 2] No such file or directory: '/home/daniel/git/cotton_counter/data/06_models/fully_trained.hd5/2020-09-19T16.20.54.312Z/fully_trained.hd5'
|
FileNotFoundError
|
def _get_credentials(
credentials_name: str, credentials: Dict[str, Any]
) -> Dict[str, Any]:
"""Return a set of credentials from the provided credentials dict.
Args:
credentials_name: Credentials name.
credentials: A dictionary with all credentials.
Returns:
The set of requested credentials.
Raises:
KeyError: When a data set with the given name has not yet been
registered.
"""
try:
return credentials[credentials_name]
except KeyError:
raise KeyError(
"Unable to find credentials '{}': check your data "
"catalog and credentials configuration. See "
"https://kedro.readthedocs.io/en/latest/kedro.io.DataCatalog.html "
"for an example.".format(credentials_name)
)
|
def _get_credentials(credentials_name: str, credentials: Dict) -> Dict:
"""Return a set of credentials from the provided credentials dict.
Args:
credentials_name: Credentials name.
credentials: A dictionary with all credentials.
Returns:
The set of requested credentials.
Raises:
KeyError: When a data set with the given name has not yet been
registered.
"""
try:
return credentials[credentials_name]
except KeyError:
raise KeyError(
"Unable to find credentials '{}': check your data "
"catalog and credentials configuration. See "
"https://kedro.readthedocs.io/en/latest/kedro.io.DataCatalog.html "
"for an example.".format(credentials_name)
)
|
https://github.com/quantumblacklabs/kedro/issues/380
|
~/.pyenv/versions/3.6.6/envs/kedro-mlflow/lib/python3.6/site-packages/kedro/context/__init__.py:46: DeprecationWarning: All the modules in `kedro.context` have been moved to `kedro.framework.context`, and `kedro.context` will be removed in Kedro 0.17.0. Please update import paths from `kedro.context` to `kedro.framework.context` in your Kedro project.
DeprecationWarning,
~/.pyenv/versions/3.6.6/envs/kedro-mlflow/lib/python3.6/site-packages/kedro/cli/__init__.py:44: DeprecationWarning: All the modules in `kedro.cli` have been moved to `kedro.framework.cli`, and `kedro.cli` will be removed in Kedro 0.17.0. Please update import paths from `kedro.cli` to `kedro.framework.cli` in your Kedro project.
DeprecationWarning,
Traceback (most recent call last):
File "~/.pyenv/versions/3.6.6/envs/kedro-mlflow/lib/python3.6/site-packages/kedro/framework/cli/cli.py", line 619, in main
kedro_cli = importlib.import_module("kedro_cli")
File "~/.pyenv/versions/3.6.6/lib/python3.6/importlib/__init__.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 994, in _gcd_import
File "<frozen importlib._bootstrap>", line 971, in _find_and_load
File "<frozen importlib._bootstrap>", line 955, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 665, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 678, in exec_module
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "/myProject/kedro_cli.py", line 54, in <module>
from kedro.context import KEDRO_ENV_VAR, load_context
ImportError: cannot import name 'KEDRO_ENV_VAR'
Traceback (most recent call last):
File "~/.pyenv/versions/3.6.6/envs/kedro-mlflow/lib/python3.6/site-packages/kedro/framework/cli/cli.py", line 619, in main
kedro_cli = importlib.import_module("kedro_cli")
File "~/.pyenv/versions/3.6.6/lib/python3.6/importlib/__init__.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 994, in _gcd_import
File "<frozen importlib._bootstrap>", line 971, in _find_and_load
File "<frozen importlib._bootstrap>", line 955, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 665, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 678, in exec_module
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "/myProject/kedro_cli.py", line 54, in <module>
from kedro.context import KEDRO_ENV_VAR, load_context
ImportError: cannot import name 'KEDRO_ENV_VAR'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "~/.pyenv/versions/3.6.6/envs/kedro-mlflow/bin/kedro", line 8, in <module>
sys.exit(main())
File "~/.pyenv/versions/3.6.6/envs/kedro-mlflow/lib/python3.6/site-packages/kedro/framework/cli/cli.py", line 623, in main
_handle_exception(f"Cannot load commands from {kedro_cli_path}")
File "~/.pyenv/versions/3.6.6/envs/kedro-mlflow/lib/python3.6/site-packages/kedro/framework/cli/cli.py", line 643, in _handle_exception
raise KedroCliError(msg)
kedro.framework.cli.utils.KedroCliError: Cannot load commands from /myProject/kedro_cli.py
|
ImportError
|
def from_config(
cls: Type,
catalog: Optional[Dict[str, Dict[str, Any]]],
credentials: Dict[str, Dict[str, Any]] = None,
load_versions: Dict[str, str] = None,
save_version: str = None,
journal: Journal = None,
) -> "DataCatalog":
"""Create a ``DataCatalog`` instance from configuration. This is a
factory method used to provide developers with a way to instantiate
``DataCatalog`` with configuration parsed from configuration files.
Args:
catalog: A dictionary whose keys are the data set names and
the values are dictionaries with the constructor arguments
for classes implementing ``AbstractDataSet``. The data set
class to be loaded is specified with the key ``type`` and their
fully qualified class name. All ``kedro.io`` data set can be
specified by their class name only, i.e. their module name
can be omitted.
credentials: A dictionary containing credentials for different
data sets. Use the ``credentials`` key in a ``AbstractDataSet``
to refer to the appropriate credentials as shown in the example
below.
load_versions: A mapping between dataset names and versions
to load. Has no effect on data sets without enabled versioning.
save_version: Version string to be used for ``save`` operations
by all data sets with enabled versioning. It must: a) be a
case-insensitive string that conforms with operating system
filename limitations, b) always return the latest version when
sorted in lexicographical order.
journal: Instance of Journal.
Returns:
An instantiated ``DataCatalog`` containing all specified
data sets, created and ready to use.
Raises:
DataSetError: When the method fails to create any of the data
sets from their config.
Example:
::
>>> config = {
>>> "cars": {
>>> "type": "CSVLocalDataSet",
>>> "filepath": "cars.csv",
>>> "save_args": {
>>> "index": False
>>> }
>>> },
>>> "boats": {
>>> "type": "CSVS3DataSet",
>>> "filepath": "boats.csv",
>>> "bucket_name": "mck-147789798-bucket",
>>> "credentials": "boats_credentials"
>>> "save_args": {
>>> "index": False
>>> }
>>> }
>>> }
>>>
>>> credentials = {
>>> "boats_credentials": {
>>> "aws_access_key_id": "<your key id>",
>>> "aws_secret_access_key": "<your secret>"
>>> }
>>> }
>>>
>>> catalog = DataCatalog.from_config(config, credentials)
>>>
>>> df = catalog.load("cars")
>>> catalog.save("boats", df)
"""
data_sets = {}
catalog = copy.deepcopy(catalog) or {}
credentials = copy.deepcopy(credentials) or {}
run_id = journal.run_id if journal else None
save_version = save_version or run_id or generate_timestamp()
load_versions = copy.deepcopy(load_versions) or {}
missing_keys = load_versions.keys() - catalog.keys()
if missing_keys:
warn(
"`load_versions` keys [{}] are not found in the catalog.".format(
", ".join(sorted(missing_keys))
)
)
for ds_name, ds_config in catalog.items():
ds_config = _resolve_credentials(ds_config, credentials)
data_sets[ds_name] = AbstractDataSet.from_config(
ds_name, ds_config, load_versions.get(ds_name), save_version
)
return cls(data_sets=data_sets, journal=journal)
|
def from_config(
cls: Type,
catalog: Optional[Dict[str, Dict[str, Any]]],
credentials: Dict[str, Dict[str, Any]] = None,
load_versions: Dict[str, str] = None,
save_version: str = None,
journal: Journal = None,
) -> "DataCatalog":
"""Create a ``DataCatalog`` instance from configuration. This is a
factory method used to provide developers with a way to instantiate
``DataCatalog`` with configuration parsed from configuration files.
Args:
catalog: A dictionary whose keys are the data set names and
the values are dictionaries with the constructor arguments
for classes implementing ``AbstractDataSet``. The data set
class to be loaded is specified with the key ``type`` and their
fully qualified class name. All ``kedro.io`` data set can be
specified by their class name only, i.e. their module name
can be omitted.
credentials: A dictionary containing credentials for different
data sets. Use the ``credentials`` key in a ``AbstractDataSet``
to refer to the appropriate credentials as shown in the example
below.
load_versions: A mapping between dataset names and versions
to load. Has no effect on data sets without enabled versioning.
save_version: Version string to be used for ``save`` operations
by all data sets with enabled versioning. It must: a) be a
case-insensitive string that conforms with operating system
filename limitations, b) always return the latest version when
sorted in lexicographical order.
journal: Instance of Journal.
Returns:
An instantiated ``DataCatalog`` containing all specified
data sets, created and ready to use.
Raises:
DataSetError: When the method fails to create any of the data
sets from their config.
Example:
::
>>> config = {
>>> "cars": {
>>> "type": "CSVLocalDataSet",
>>> "filepath": "cars.csv",
>>> "save_args": {
>>> "index": False
>>> }
>>> },
>>> "boats": {
>>> "type": "CSVS3DataSet",
>>> "filepath": "boats.csv",
>>> "bucket_name": "mck-147789798-bucket",
>>> "credentials": "boats_credentials"
>>> "save_args": {
>>> "index": False
>>> }
>>> }
>>> }
>>>
>>> credentials = {
>>> "boats_credentials": {
>>> "aws_access_key_id": "<your key id>",
>>> "aws_secret_access_key": "<your secret>"
>>> }
>>> }
>>>
>>> catalog = DataCatalog.from_config(config, credentials)
>>>
>>> df = catalog.load("cars")
>>> catalog.save("boats", df)
"""
data_sets = {}
catalog = copy.deepcopy(catalog) or {}
credentials = copy.deepcopy(credentials) or {}
run_id = journal.run_id if journal else None
save_version = save_version or run_id or generate_timestamp()
load_versions = copy.deepcopy(load_versions) or {}
missing_keys = load_versions.keys() - catalog.keys()
if missing_keys:
warn(
"`load_versions` keys [{}] are not found in the catalog.".format(
", ".join(sorted(missing_keys))
)
)
for ds_name, ds_config in catalog.items():
if CREDENTIALS_KEY in ds_config:
ds_config[CREDENTIALS_KEY] = _get_credentials(
ds_config.pop(CREDENTIALS_KEY),
credentials, # credentials name
)
data_sets[ds_name] = AbstractDataSet.from_config(
ds_name, ds_config, load_versions.get(ds_name), save_version
)
return cls(data_sets=data_sets, journal=journal)
|
https://github.com/quantumblacklabs/kedro/issues/380
|
~/.pyenv/versions/3.6.6/envs/kedro-mlflow/lib/python3.6/site-packages/kedro/context/__init__.py:46: DeprecationWarning: All the modules in `kedro.context` have been moved to `kedro.framework.context`, and `kedro.context` will be removed in Kedro 0.17.0. Please update import paths from `kedro.context` to `kedro.framework.context` in your Kedro project.
DeprecationWarning,
~/.pyenv/versions/3.6.6/envs/kedro-mlflow/lib/python3.6/site-packages/kedro/cli/__init__.py:44: DeprecationWarning: All the modules in `kedro.cli` have been moved to `kedro.framework.cli`, and `kedro.cli` will be removed in Kedro 0.17.0. Please update import paths from `kedro.cli` to `kedro.framework.cli` in your Kedro project.
DeprecationWarning,
Traceback (most recent call last):
File "~/.pyenv/versions/3.6.6/envs/kedro-mlflow/lib/python3.6/site-packages/kedro/framework/cli/cli.py", line 619, in main
kedro_cli = importlib.import_module("kedro_cli")
File "~/.pyenv/versions/3.6.6/lib/python3.6/importlib/__init__.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 994, in _gcd_import
File "<frozen importlib._bootstrap>", line 971, in _find_and_load
File "<frozen importlib._bootstrap>", line 955, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 665, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 678, in exec_module
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "/myProject/kedro_cli.py", line 54, in <module>
from kedro.context import KEDRO_ENV_VAR, load_context
ImportError: cannot import name 'KEDRO_ENV_VAR'
Traceback (most recent call last):
File "~/.pyenv/versions/3.6.6/envs/kedro-mlflow/lib/python3.6/site-packages/kedro/framework/cli/cli.py", line 619, in main
kedro_cli = importlib.import_module("kedro_cli")
File "~/.pyenv/versions/3.6.6/lib/python3.6/importlib/__init__.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 994, in _gcd_import
File "<frozen importlib._bootstrap>", line 971, in _find_and_load
File "<frozen importlib._bootstrap>", line 955, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 665, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 678, in exec_module
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "/myProject/kedro_cli.py", line 54, in <module>
from kedro.context import KEDRO_ENV_VAR, load_context
ImportError: cannot import name 'KEDRO_ENV_VAR'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "~/.pyenv/versions/3.6.6/envs/kedro-mlflow/bin/kedro", line 8, in <module>
sys.exit(main())
File "~/.pyenv/versions/3.6.6/envs/kedro-mlflow/lib/python3.6/site-packages/kedro/framework/cli/cli.py", line 623, in main
_handle_exception(f"Cannot load commands from {kedro_cli_path}")
File "~/.pyenv/versions/3.6.6/envs/kedro-mlflow/lib/python3.6/site-packages/kedro/framework/cli/cli.py", line 643, in _handle_exception
raise KedroCliError(msg)
kedro.framework.cli.utils.KedroCliError: Cannot load commands from /myProject/kedro_cli.py
|
ImportError
|
def parse_dataset_definition(
config: Dict[str, Any], load_version: str = None, save_version: str = None
) -> Tuple[Type[AbstractDataSet], Dict[str, Any]]:
"""Parse and instantiate a dataset class using the configuration provided.
Args:
config: Data set config dictionary. It *must* contain the `type` key
with fully qualified class name.
load_version: Version string to be used for ``load`` operation if
the data set is versioned. Has no effect on the data set
if versioning was not enabled.
save_version: Version string to be used for ``save`` operation if
the data set is versioned. Has no effect on the data set
if versioning was not enabled.
Raises:
DataSetError: If the function fails to parse the configuration provided.
Returns:
2-tuple: (Dataset class object, configuration dictionary)
"""
save_version = save_version or generate_timestamp()
config = copy.deepcopy(config)
if "type" not in config:
raise DataSetError("`type` is missing from DataSet catalog configuration")
class_obj = config.pop("type")
if isinstance(class_obj, str):
try:
class_obj = load_obj(class_obj, "kedro.io")
except ImportError:
raise DataSetError(
"Cannot import module when trying to load type `{}`.".format(class_obj)
)
except AttributeError:
raise DataSetError("Class `{}` not found.".format(class_obj))
if not issubclass(class_obj, AbstractDataSet):
raise DataSetError(
"DataSet type `{}.{}` is invalid: all data set types must extend "
"`AbstractDataSet`.".format(class_obj.__module__, class_obj.__qualname__)
)
if VERSION_KEY in config:
# remove "version" key so that it's not passed
# to the "unversioned" data set constructor
message = (
"`%s` attribute removed from data set configuration since it is a "
"reserved word and cannot be directly specified"
)
logging.getLogger(__name__).warning(message, VERSION_KEY)
del config[VERSION_KEY]
if config.pop(VERSIONED_FLAG_KEY, False): # data set is versioned
config[VERSION_KEY] = Version(load_version, save_version)
return class_obj, config
|
def parse_dataset_definition(
config: Dict[str, Any], load_version: str = None, save_version: str = None
) -> Tuple[Type[AbstractDataSet], Dict]:
"""Parse and instantiate a dataset class using the configuration provided.
Args:
config: Data set config dictionary. It *must* contain the `type` key
with fully qualified class name.
load_version: Version string to be used for ``load`` operation if
the data set is versioned. Has no effect on the data set
if versioning was not enabled.
save_version: Version string to be used for ``save`` operation if
the data set is versioned. Has no effect on the data set
if versioning was not enabled.
Raises:
DataSetError: If the function fails to parse the configuration provided.
Returns:
2-tuple: (Dataset class object, configuration dictionary)
"""
save_version = save_version or generate_timestamp()
config = copy.deepcopy(config)
if "type" not in config:
raise DataSetError("`type` is missing from DataSet catalog configuration")
class_obj = config.pop("type")
if isinstance(class_obj, str):
try:
class_obj = load_obj(class_obj, "kedro.io")
except ImportError:
raise DataSetError(
"Cannot import module when trying to load type `{}`.".format(class_obj)
)
except AttributeError:
raise DataSetError("Class `{}` not found.".format(class_obj))
if not issubclass(class_obj, AbstractDataSet):
raise DataSetError(
"DataSet type `{}.{}` is invalid: all data set types must extend "
"`AbstractDataSet`.".format(class_obj.__module__, class_obj.__qualname__)
)
if VERSION_KEY in config:
# remove "version" key so that it's not passed
# to the "unversioned" data set constructor
message = (
"`%s` attribute removed from data set configuration since it is a "
"reserved word and cannot be directly specified"
)
logging.getLogger(__name__).warning(message, VERSION_KEY)
del config[VERSION_KEY]
if config.pop(VERSIONED_FLAG_KEY, False): # data set is versioned
config[VERSION_KEY] = Version(load_version, save_version)
return class_obj, config
|
https://github.com/quantumblacklabs/kedro/issues/374
|
/home/markf94/.../kedro-project/.venv/bin/python -m piptools compile /home/markf94/.../kedro-project/src/requirements.in
Traceback (most recent call last):
File "/usr/lib64/python3.7/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/lib64/python3.7/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/__main__.py", line 17, in <module>
cli()
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/markf94.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/decorators.py", line 21, in new_func
return f(get_current_context(), *args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/scripts/compile.py", line 304, in cli
for ireq in filter(is_pinned_requirement, ireqs):
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/utils.py", line 122, in is_pinned_requirement
if ireq.editable:
AttributeError: 'ParsedRequirement' object has no attribute 'editable'
|
AttributeError
|
def __init__( # pylint: disable=too-many-arguments
self,
path: str,
dataset: Union[str, Type[AbstractDataSet], Dict[str, Any]],
filepath_arg: str = "filepath",
filename_suffix: str = "",
credentials: Dict[str, Any] = None,
load_args: Dict[str, Any] = None,
):
"""Creates a new instance of ``PartitionedDataSet``.
Args:
path: Path to the folder containing partitioned data.
If path starts with the protocol (e.g., ``s3://``) then the
corresponding ``fsspec`` concrete filesystem implementation will
be used. If protocol is not specified,
``fsspec.implementations.local.LocalFileSystem`` will be used.
**Note:** Some concrete implementations are bundled with ``fsspec``,
while others (like ``s3`` or ``gcs``) must be installed separately
prior to usage of the ``PartitionedDataSet``.
dataset: Underlying dataset definition. This is used to instantiate
the dataset for each file located inside the ``path``.
Accepted formats are:
a) object of a class that inherits from ``AbstractDataSet``
b) a string representing a fully qualified class name to such class
c) a dictionary with ``type`` key pointing to a string from b),
other keys are passed to the Dataset initializer.
Credentials for the dataset can be explicitly specified in
this configuration.
filepath_arg: Underlying dataset initializer argument that will
contain a path to each corresponding partition file.
If unspecified, defaults to "filepath".
filename_suffix: If specified, only partitions that end with this
string will be processed.
credentials: Protocol-specific options that will be passed to
``fsspec.filesystem``
https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.filesystem
and the dataset initializer. If the dataset config contains
explicit credentials spec, then such spec will take precedence.
**Note:** ``dataset_credentials`` key has now been deprecated
and should not be specified.
All possible credentials management scenarios are documented here:
https://kedro.readthedocs.io/en/latest/04_user_guide/08_advanced_io.html#partitioned-dataset-credentials
load_args: Keyword arguments to be passed into ``find()`` method of
the filesystem implementation.
Raises:
DataSetError: If versioning is enabled for the underlying dataset.
"""
super().__init__()
self._path = path
self._filename_suffix = filename_suffix
self._protocol = infer_storage_options(self._path)["protocol"]
dataset = dataset if isinstance(dataset, dict) else {"type": dataset}
self._dataset_type, self._dataset_config = parse_dataset_definition(dataset)
if VERSION_KEY in self._dataset_config:
raise DataSetError(
"`{}` does not support versioning of the underlying dataset. "
"Please remove `{}` flag from the dataset definition.".format(
self.__class__.__name__, VERSIONED_FLAG_KEY
)
)
self._credentials, dataset_credentials = _split_credentials(credentials)
if dataset_credentials:
if CREDENTIALS_KEY in self._dataset_config:
self._logger.warning(
"Top-level credentials will not propagate into the "
"underlying dataset since credentials were explicitly "
"defined in the dataset config."
)
else:
self._dataset_config[CREDENTIALS_KEY] = dataset_credentials
self._filepath_arg = filepath_arg
if self._filepath_arg in self._dataset_config:
warn(
"`{}` key must not be specified in the dataset definition as it "
"will be overwritten by partition path".format(self._filepath_arg)
)
self._load_args = deepcopy(load_args) or {}
self._sep = self._filesystem.sep
# since some filesystem implementations may implement a global cache
self.invalidate_cache()
|
def __init__( # pylint: disable=too-many-arguments
self,
path: str,
dataset: Union[str, Type[AbstractDataSet], Dict[str, Any]],
filepath_arg: str = "filepath",
filename_suffix: str = "",
credentials: Dict[str, Any] = None,
load_args: Dict[str, Any] = None,
):
"""Creates a new instance of ``PartitionedDataSet``.
Args:
path: Path to the folder containing partitioned data.
If path starts with the protocol (e.g., ``s3://``) then the
corresponding ``fsspec`` concrete filesystem implementation will
be used. If protocol is not specified,
``fsspec.implementations.local.LocalFileSystem`` will be used.
**Note:** Some concrete implementations are bundled with ``fsspec``,
while others (like ``s3`` or ``gcs``) must be installed separately
prior to usage of the ``PartitionedDataSet``.
dataset: Underlying dataset definition. This is used to instantiate
the dataset for each file located inside the ``path``.
Accepted formats are:
a) object of a class that inherits from ``AbstractDataSet``
b) a string representing a fully qualified class name to such class
c) a dictionary with ``type`` key pointing to a string from b),
other keys are passed to the Dataset initializer.
**Note:** Credentials resolution is *not* currently supported
for the underlying dataset definition.
filepath_arg: Underlying dataset initializer argument that will
contain a path to each corresponding partition file.
If unspecified, defaults to "filepath".
filename_suffix: If specified, only partitions that end with this
string will be processed.
credentials: Protocol-specific options that will be passed to
``fsspec.filesystem`` call:
https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.filesystem
_and_ also to the underlying dataset initializer. If
``dataset_credentials`` key is present in this dictionary, then
only its value will be passed to the dataset initializer ``credentials``
argument instead of the copy of the entire dictionary.
Example 1: If ``credentials = {"k1": "secret1"}``, then filesystem
is called as ``filesystem(..., k1="secret1")``, the dataset is
instantiated as
``dataset_class(..., credentials={"k1": "secret1"})``.
Example 2: If
``credentials = {"k1": "secret1", "dataset_credentials": {"k2": "secret2"}}``,
then filesystem is called as ``filesystem(..., k1="secret1")``,
the dataset is instantiated as
``dataset_class(..., credentials={"k2": "secret2"})``.
Example 3: If
``credentials = {"dataset_credentials": {"k2": "secret2"}}``,
then credentials are not passed to the filesystem call, the dataset
is instantiated as
``dataset_class(..., credentials={"k2": "secret2"})``.
Example 4: If
``credentials = {"k1": "secret1", "dataset_credentials": None}``,
then filesystem is called as ``filesystem(..., k1="secret1")``,
credentials are not passed to the dataset initializer.
load_args: Keyword arguments to be passed into ``find()`` method of
the filesystem implementation.
Raises:
DataSetError: If versioning is enabled for the underlying dataset.
"""
super().__init__()
self._path = path
self._filename_suffix = filename_suffix
self._protocol = infer_storage_options(self._path)["protocol"]
dataset = dataset if isinstance(dataset, dict) else {"type": dataset}
self._dataset_type, self._dataset_config = parse_dataset_definition(dataset)
if VERSION_KEY in self._dataset_config:
raise DataSetError(
"`{}` does not support versioning of the underlying dataset. "
"Please remove `{}` flag from the dataset definition.".format(
self.__class__.__name__, VERSIONED_FLAG_KEY
)
)
if CREDENTIALS_KEY in self._dataset_config:
raise DataSetError(
"Credentials for the underlying dataset must not be specified "
"explicitly in dataset configuration. Please put those under "
"`dataset_credentials` key in a dictionary and pass as "
"`credentials` argument to {} initializer.".format(self.__class__.__name__)
)
self._credentials, dataset_credentials = _split_credentials(credentials)
if dataset_credentials:
self._dataset_config[CREDENTIALS_KEY] = dataset_credentials
self._filepath_arg = filepath_arg
if self._filepath_arg in self._dataset_config:
warn(
"`{}` key must not be specified in the dataset definition as it "
"will be overwritten by partition path".format(self._filepath_arg)
)
self._load_args = deepcopy(load_args) or {}
self._sep = self._filesystem.sep
# since some filesystem implementations may implement a global cache
self.invalidate_cache()
|
https://github.com/quantumblacklabs/kedro/issues/374
|
/home/markf94/.../kedro-project/.venv/bin/python -m piptools compile /home/markf94/.../kedro-project/src/requirements.in
Traceback (most recent call last):
File "/usr/lib64/python3.7/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/lib64/python3.7/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/__main__.py", line 17, in <module>
cli()
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/markf94.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/decorators.py", line 21, in new_func
return f(get_current_context(), *args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/scripts/compile.py", line 304, in cli
for ireq in filter(is_pinned_requirement, ireqs):
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/utils.py", line 122, in is_pinned_requirement
if ireq.editable:
AttributeError: 'ParsedRequirement' object has no attribute 'editable'
|
AttributeError
|
def _split_credentials(
credentials: Union[Dict[str, Any], None],
) -> Tuple[Dict[str, Any], Any]:
credentials = deepcopy(credentials) or {}
if DATASET_CREDENTIALS_KEY in credentials:
warn(
"Support for `{}` key in the credentials is now deprecated and will be "
"removed in the next version. Please specify the dataset credentials "
"explicitly inside the dataset config.".format(DATASET_CREDENTIALS_KEY),
DeprecationWarning,
)
dataset_credentials = credentials.pop(DATASET_CREDENTIALS_KEY)
else:
dataset_credentials = deepcopy(credentials)
return credentials, dataset_credentials
|
def _split_credentials(
credentials: Union[Dict[str, Any], None],
) -> Tuple[Dict[str, Any], Any]:
credentials = deepcopy(credentials) or {}
dataset_credentials = credentials.pop(
DATASET_CREDENTIALS_KEY, deepcopy(credentials)
)
return credentials, dataset_credentials
|
https://github.com/quantumblacklabs/kedro/issues/374
|
/home/markf94/.../kedro-project/.venv/bin/python -m piptools compile /home/markf94/.../kedro-project/src/requirements.in
Traceback (most recent call last):
File "/usr/lib64/python3.7/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/lib64/python3.7/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/__main__.py", line 17, in <module>
cli()
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/markf94.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/decorators.py", line 21, in new_func
return f(get_current_context(), *args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/scripts/compile.py", line 304, in cli
for ireq in filter(is_pinned_requirement, ireqs):
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/utils.py", line 122, in is_pinned_requirement
if ireq.editable:
AttributeError: 'ParsedRequirement' object has no attribute 'editable'
|
AttributeError
|
def _list_partitions(self) -> List[str]:
checkpoint = self._read_checkpoint()
checkpoint_path = self._filesystem._strip_protocol( # pylint: disable=protected-access
self._checkpoint_config[self._filepath_arg]
)
def _is_valid_partition(partition) -> bool:
if not partition.endswith(self._filename_suffix):
return False
if partition == checkpoint_path:
return False
if checkpoint is None:
# nothing was processed yet
return True
partition_id = self._path_to_partition(partition)
return self._comparison_func(partition_id, checkpoint)
return [
part
for part in sorted(self._filesystem.find(self._path, **self._load_args))
if _is_valid_partition(part)
]
|
def _list_partitions(self) -> List[str]:
return [
path
for path in self._filesystem.find(self._path, **self._load_args)
if path.endswith(self._filename_suffix)
]
|
https://github.com/quantumblacklabs/kedro/issues/374
|
/home/markf94/.../kedro-project/.venv/bin/python -m piptools compile /home/markf94/.../kedro-project/src/requirements.in
Traceback (most recent call last):
File "/usr/lib64/python3.7/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/lib64/python3.7/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/__main__.py", line 17, in <module>
cli()
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/markf94.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/decorators.py", line 21, in new_func
return f(get_current_context(), *args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/scripts/compile.py", line 304, in cli
for ireq in filter(is_pinned_requirement, ireqs):
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/utils.py", line 122, in is_pinned_requirement
if ireq.editable:
AttributeError: 'ParsedRequirement' object has no attribute 'editable'
|
AttributeError
|
def _load(self) -> Dict[str, Callable[[], Any]]:
partitions = {}
for partition in self._list_partitions():
partition_id = self._path_to_partition(partition)
kwargs = deepcopy(self._dataset_config)
# join the protocol back since PySpark may rely on it
kwargs[self._filepath_arg] = self._join_protocol(partition)
partitions[partition_id] = self._dataset_type( # type: ignore
**kwargs
).load()
return partitions
|
def _load(self) -> Dict[str, Callable[[], Any]]:
partitions = {}
for partition in self._list_partitions():
kwargs = deepcopy(self._dataset_config)
# join the protocol back since PySpark may rely on it
kwargs[self._filepath_arg] = self._join_protocol(partition)
dataset = self._dataset_type(**kwargs) # type: ignore
partition_id = self._path_to_partition(partition)
partitions[partition_id] = dataset.load
if not partitions:
raise DataSetError("No partitions found in `{}`".format(self._path))
return partitions
|
https://github.com/quantumblacklabs/kedro/issues/374
|
/home/markf94/.../kedro-project/.venv/bin/python -m piptools compile /home/markf94/.../kedro-project/src/requirements.in
Traceback (most recent call last):
File "/usr/lib64/python3.7/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/lib64/python3.7/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/__main__.py", line 17, in <module>
cli()
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/markf94.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/decorators.py", line 21, in new_func
return f(get_current_context(), *args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/scripts/compile.py", line 304, in cli
for ireq in filter(is_pinned_requirement, ireqs):
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/utils.py", line 122, in is_pinned_requirement
if ireq.editable:
AttributeError: 'ParsedRequirement' object has no attribute 'editable'
|
AttributeError
|
def __init__(
self,
func: Callable,
inputs: Union[None, str, List[str], Dict[str, str]],
outputs: Union[None, str, List[str], Dict[str, str]],
*,
name: str = None,
tags: Union[str, Iterable[str]] = None,
decorators: Iterable[Callable] = None,
confirms: Union[str, List[str]] = None,
):
"""Create a node in the pipeline by providing a function to be called
along with variable names for inputs and/or outputs.
Args:
func: A function that corresponds to the node logic.
The function should have at least one input or output.
inputs: The name or the list of the names of variables used as
inputs to the function. The number of names should match
the number of arguments in the definition of the provided
function. When Dict[str, str] is provided, variable names
will be mapped to function argument names.
outputs: The name or the list of the names of variables used
as outputs to the function. The number of names should match
the number of outputs returned by the provided function.
When Dict[str, str] is provided, variable names will be mapped
to the named outputs the function returns.
name: Optional node name to be used when displaying the node in
logs or any other visualisations.
tags: Optional set of tags to be applied to the node.
decorators: Optional list of decorators to be applied to the node.
confirms: Optional name or the list of the names of the datasets
that should be confirmed. This will result in calling
``confirm()`` method of the corresponding data set instance.
Specified dataset names do not necessarily need to be present
in the node ``inputs`` or ``outputs``.
Raises:
ValueError: Raised in the following cases:
a) When the provided arguments do not conform to
the format suggested by the type hint of the argument.
b) When the node produces multiple outputs with the same name.
c) An input has the same name as an output.
"""
if not callable(func):
raise ValueError(
_node_error_message(
"first argument must be a function, not `{}`.".format(
type(func).__name__
)
)
)
if inputs and not isinstance(inputs, (list, dict, str)):
raise ValueError(
_node_error_message(
"`inputs` type must be one of [String, List, Dict, None], "
"not `{}`.".format(type(inputs).__name__)
)
)
if outputs and not isinstance(outputs, (list, dict, str)):
raise ValueError(
_node_error_message(
"`outputs` type must be one of [String, List, Dict, None], "
"not `{}`.".format(type(outputs).__name__)
)
)
if not inputs and not outputs:
raise ValueError(
_node_error_message("it must have some `inputs` or `outputs`.")
)
self._validate_inputs(func, inputs)
self._func = func
self._inputs = inputs
self._outputs = outputs
self._name = name
self._tags = set(_to_list(tags))
self._decorators = list(decorators or [])
self._validate_unique_outputs()
self._validate_inputs_dif_than_outputs()
self._confirms = confirms
|
def __init__(
self,
func: Callable,
inputs: Union[None, str, List[str], Dict[str, str]],
outputs: Union[None, str, List[str], Dict[str, str]],
*,
name: str = None,
tags: Union[str, Iterable[str]] = None,
decorators: Iterable[Callable] = None,
):
"""Create a node in the pipeline by providing a function to be called
along with variable names for inputs and/or outputs.
Args:
func: A function that corresponds to the node logic.
The function should have at least one input or output.
inputs: The name or the list of the names of variables used as
inputs to the function. The number of names should match
the number of arguments in the definition of the provided
function. When Dict[str, str] is provided, variable names
will be mapped to function argument names.
outputs: The name or the list of the names of variables used
as outputs to the function. The number of names should match
the number of outputs returned by the provided function.
When Dict[str, str] is provided, variable names will be mapped
to the named outputs the function returns.
name: Optional node name to be used when displaying the node in
logs or any other visualisations.
tags: Optional set of tags to be applied to the node.
decorators: Optional list of decorators to be applied to the node.
Raises:
ValueError: Raised in the following cases:
a) When the provided arguments do not conform to
the format suggested by the type hint of the argument.
b) When the node produces multiple outputs with the same name.
c) An input has the same name as an output.
"""
if not callable(func):
raise ValueError(
_node_error_message(
"first argument must be a function, not `{}`.".format(
type(func).__name__
)
)
)
if inputs and not isinstance(inputs, (list, dict, str)):
raise ValueError(
_node_error_message(
"`inputs` type must be one of [String, List, Dict, None], "
"not `{}`.".format(type(inputs).__name__)
)
)
if outputs and not isinstance(outputs, (list, dict, str)):
raise ValueError(
_node_error_message(
"`outputs` type must be one of [String, List, Dict, None], "
"not `{}`.".format(type(outputs).__name__)
)
)
if not inputs and not outputs:
raise ValueError(
_node_error_message("it must have some `inputs` or `outputs`.")
)
self._validate_inputs(func, inputs)
self._func = func
self._inputs = inputs
self._outputs = outputs
self._name = name
self._tags = set(_to_list(tags))
self._decorators = list(decorators or [])
self._validate_unique_outputs()
self._validate_inputs_dif_than_outputs()
|
https://github.com/quantumblacklabs/kedro/issues/374
|
/home/markf94/.../kedro-project/.venv/bin/python -m piptools compile /home/markf94/.../kedro-project/src/requirements.in
Traceback (most recent call last):
File "/usr/lib64/python3.7/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/lib64/python3.7/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/__main__.py", line 17, in <module>
cli()
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/markf94.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/decorators.py", line 21, in new_func
return f(get_current_context(), *args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/scripts/compile.py", line 304, in cli
for ireq in filter(is_pinned_requirement, ireqs):
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/utils.py", line 122, in is_pinned_requirement
if ireq.editable:
AttributeError: 'ParsedRequirement' object has no attribute 'editable'
|
AttributeError
|
def _copy(self, **overwrite_params):
"""
Helper function to copy the node, replacing some values.
"""
params = {
"func": self._func,
"inputs": self._inputs,
"outputs": self._outputs,
"name": self._name,
"tags": self._tags,
"decorators": self._decorators,
"confirms": self._confirms,
}
params.update(overwrite_params)
return Node(**params)
|
def _copy(self, **overwrite_params):
"""
Helper function to copy the node, replacing some values.
"""
params = {
"func": self._func,
"inputs": self._inputs,
"outputs": self._outputs,
"name": self._name,
"tags": self._tags,
"decorators": self._decorators,
}
params.update(overwrite_params)
return Node(**params)
|
https://github.com/quantumblacklabs/kedro/issues/374
|
/home/markf94/.../kedro-project/.venv/bin/python -m piptools compile /home/markf94/.../kedro-project/src/requirements.in
Traceback (most recent call last):
File "/usr/lib64/python3.7/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/lib64/python3.7/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/__main__.py", line 17, in <module>
cli()
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/markf94.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/decorators.py", line 21, in new_func
return f(get_current_context(), *args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/scripts/compile.py", line 304, in cli
for ireq in filter(is_pinned_requirement, ireqs):
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/utils.py", line 122, in is_pinned_requirement
if ireq.editable:
AttributeError: 'ParsedRequirement' object has no attribute 'editable'
|
AttributeError
|
def node( # pylint: disable=missing-type-doc
func: Callable,
inputs: Union[None, str, List[str], Dict[str, str]],
outputs: Union[None, str, List[str], Dict[str, str]],
*,
name: str = None,
tags: Iterable[str] = None,
confirms: Union[str, List[str]] = None,
) -> Node:
"""Create a node in the pipeline by providing a function to be called
along with variable names for inputs and/or outputs.
Args:
func: A function that corresponds to the node logic. The function
should have at least one input or output.
inputs: The name or the list of the names of variables used as inputs
to the function. The number of names should match the number of
arguments in the definition of the provided function. When
Dict[str, str] is provided, variable names will be mapped to
function argument names.
outputs: The name or the list of the names of variables used as outputs
to the function. The number of names should match the number of
outputs returned by the provided function. When Dict[str, str]
is provided, variable names will be mapped to the named outputs the
function returns.
name: Optional node name to be used when displaying the node in logs or
any other visualisations.
tags: Optional set of tags to be applied to the node.
confirms: Optional name or the list of the names of the datasets
that should be confirmed. This will result in calling ``confirm()``
method of the corresponding data set instance. Specified dataset
names do not necessarily need to be present in the node ``inputs``
or ``outputs``.
Returns:
A Node object with mapped inputs, outputs and function.
Example:
::
>>> import pandas as pd
>>> import numpy as np
>>>
>>> def clean_data(cars: pd.DataFrame,
>>> boats: pd.DataFrame) -> Dict[str, pd.DataFrame]:
>>> return dict(cars_df=cars.dropna(), boats_df=boats.dropna())
>>>
>>> def halve_dataframe(data: pd.DataFrame) -> List[pd.DataFrame]:
>>> return np.array_split(data, 2)
>>>
>>> nodes = [
>>> node(clean_data,
>>> inputs=['cars2017', 'boats2017'],
>>> outputs=dict(cars_df='clean_cars2017',
>>> boats_df='clean_boats2017')),
>>> node(halve_dataframe,
>>> 'clean_cars2017',
>>> ['train_cars2017', 'test_cars2017']),
>>> node(halve_dataframe,
>>> dict(data='clean_boats2017'),
>>> ['train_boats2017', 'test_boats2017'])
>>> ]
"""
return Node(func, inputs, outputs, name=name, tags=tags, confirms=confirms)
|
def node( # pylint: disable=missing-type-doc
func: Callable,
inputs: Union[None, str, List[str], Dict[str, str]],
outputs: Union[None, str, List[str], Dict[str, str]],
*,
name: str = None,
tags: Iterable[str] = None,
) -> Node:
"""Create a node in the pipeline by providing a function to be called
along with variable names for inputs and/or outputs.
Args:
func: A function that corresponds to the node logic. The function
should have at least one input or output.
inputs: The name or the list of the names of variables used as inputs
to the function. The number of names should match the number of
arguments in the definition of the provided function. When
Dict[str, str] is provided, variable names will be mapped to
function argument names.
outputs: The name or the list of the names of variables used as outputs
to the function. The number of names should match the number of
outputs returned by the provided function. When Dict[str, str]
is provided, variable names will be mapped to the named outputs the
function returns.
name: Optional node name to be used when displaying the node in logs or
any other visualisations.
tags: Optional set of tags to be applied to the node.
Returns:
A Node object with mapped inputs, outputs and function.
Example:
::
>>> import pandas as pd
>>> import numpy as np
>>>
>>> def clean_data(cars: pd.DataFrame,
>>> boats: pd.DataFrame) -> Dict[str, pd.DataFrame]:
>>> return dict(cars_df=cars.dropna(), boats_df=boats.dropna())
>>>
>>> def halve_dataframe(data: pd.DataFrame) -> List[pd.DataFrame]:
>>> return np.array_split(data, 2)
>>>
>>> nodes = [
>>> node(clean_data,
>>> inputs=['cars2017', 'boats2017'],
>>> outputs=dict(cars_df='clean_cars2017',
>>> boats_df='clean_boats2017')),
>>> node(halve_dataframe,
>>> 'clean_cars2017',
>>> ['train_cars2017', 'test_cars2017']),
>>> node(halve_dataframe,
>>> dict(data='clean_boats2017'),
>>> ['train_boats2017', 'test_boats2017'])
>>> ]
"""
return Node(func, inputs, outputs, name=name, tags=tags)
|
https://github.com/quantumblacklabs/kedro/issues/374
|
/home/markf94/.../kedro-project/.venv/bin/python -m piptools compile /home/markf94/.../kedro-project/src/requirements.in
Traceback (most recent call last):
File "/usr/lib64/python3.7/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/lib64/python3.7/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/__main__.py", line 17, in <module>
cli()
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/markf94.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/decorators.py", line 21, in new_func
return f(get_current_context(), *args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/scripts/compile.py", line 304, in cli
for ireq in filter(is_pinned_requirement, ireqs):
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/utils.py", line 122, in is_pinned_requirement
if ireq.editable:
AttributeError: 'ParsedRequirement' object has no attribute 'editable'
|
AttributeError
|
def __init__(
self,
nodes: Iterable[Union[Node, "Pipeline"]],
*,
name: str = None,
tags: Union[str, Iterable[str]] = None,
): # pylint: disable=missing-type-doc
"""Initialise ``Pipeline`` with a list of ``Node`` instances.
Args:
nodes: The list of nodes the ``Pipeline`` will be made of. If you
provide pipelines among the list of nodes, those pipelines will
be expanded and all their nodes will become part of this
new pipeline.
name: (DEPRECATED, use `tags` method instead) The name of the pipeline.
If specified, this name will be used to tag all of the nodes
in the pipeline.
tags: Optional set of tags to be applied to all the pipeline nodes.
Raises:
ValueError:
When an empty list of nodes is provided, or when not all
nodes have unique names.
CircularDependencyError:
When visiting all the nodes is not
possible due to the existence of a circular dependency.
OutputNotUniqueError:
When multiple ``Node`` instances produce the same output.
ConfirmNotUniqueError:
When multiple ``Node`` instances attempt to confirm the same
dataset.
Example:
::
>>> from kedro.pipeline import Pipeline
>>> from kedro.pipeline import node
>>>
>>> # In the following scenario first_ds and second_ds
>>> # are data sets provided by io. Pipeline will pass these
>>> # data sets to first_node function and provides the result
>>> # to the second_node as input.
>>>
>>> def first_node(first_ds, second_ds):
>>> return dict(third_ds=first_ds+second_ds)
>>>
>>> def second_node(third_ds):
>>> return third_ds
>>>
>>> pipeline = Pipeline([
>>> node(first_node, ['first_ds', 'second_ds'], ['third_ds']),
>>> node(second_node, dict(third_ds='third_ds'), 'fourth_ds')])
>>>
>>> pipeline.describe()
>>>
"""
_validate_no_node_list(nodes)
nodes = list(
chain.from_iterable([[n] if isinstance(n, Node) else n.nodes for n in nodes])
)
_validate_duplicate_nodes(nodes)
_validate_transcoded_inputs_outputs(nodes)
_tags = set(_to_list(tags))
if name:
warnings.warn(
"`name` parameter is deprecated for the `Pipeline`"
" constructor, use `Pipeline.tag` method instead",
DeprecationWarning,
)
_tags.add(name)
nodes = [n.tag(_tags) for n in nodes]
self._name = name
self._nodes_by_name = {node.name: node for node in nodes}
_validate_unique_outputs(nodes)
_validate_unique_confirms(nodes)
# input -> nodes with input
self._nodes_by_input = defaultdict(set) # type: Dict[str, Set[Node]]
for node in nodes:
for input_ in node.inputs:
self._nodes_by_input[_get_transcode_compatible_name(input_)].add(node)
# output -> node with output
self._nodes_by_output = {} # type: Dict[str, Node]
for node in nodes:
for output in node.outputs:
self._nodes_by_output[_get_transcode_compatible_name(output)] = node
self._nodes = nodes
self._topo_sorted_nodes = _topologically_sorted(self.node_dependencies)
|
def __init__(
self,
nodes: Iterable[Union[Node, "Pipeline"]],
*,
name: str = None,
tags: Union[str, Iterable[str]] = None,
): # pylint: disable=missing-type-doc
"""Initialise ``Pipeline`` with a list of ``Node`` instances.
Args:
nodes: The list of nodes the ``Pipeline`` will be made of. If you
provide pipelines among the list of nodes, those pipelines will
be expanded and all their nodes will become part of this
new pipeline.
name: (DEPRECATED, use `tags` method instead) The name of the pipeline.
If specified, this name will be used to tag all of the nodes
in the pipeline.
tags: Optional set of tags to be applied to all the pipeline nodes.
Raises:
ValueError:
When an empty list of nodes is provided, or when not all
nodes have unique names.
CircularDependencyError:
When visiting all the nodes is not
possible due to the existence of a circular dependency.
OutputNotUniqueError:
When multiple ``Node`` instances produce the same output.
Example:
::
>>> from kedro.pipeline import Pipeline
>>> from kedro.pipeline import node
>>>
>>> # In the following scenario first_ds and second_ds
>>> # are data sets provided by io. Pipeline will pass these
>>> # data sets to first_node function and provides the result
>>> # to the second_node as input.
>>>
>>> def first_node(first_ds, second_ds):
>>> return dict(third_ds=first_ds+second_ds)
>>>
>>> def second_node(third_ds):
>>> return third_ds
>>>
>>> pipeline = Pipeline([
>>> node(first_node, ['first_ds', 'second_ds'], ['third_ds']),
>>> node(second_node, dict(third_ds='third_ds'), 'fourth_ds')])
>>>
>>> pipeline.describe()
>>>
"""
_validate_no_node_list(nodes)
nodes = list(
chain.from_iterable([[n] if isinstance(n, Node) else n.nodes for n in nodes])
)
_validate_duplicate_nodes(nodes)
_validate_transcoded_inputs_outputs(nodes)
_tags = set(_to_list(tags))
if name:
warnings.warn(
"`name` parameter is deprecated for the `Pipeline`"
" constructor, use `Pipeline.tag` method instead",
DeprecationWarning,
)
_tags.add(name)
nodes = [n.tag(_tags) for n in nodes]
self._name = name
self._nodes_by_name = {node.name: node for node in nodes}
_validate_unique_outputs(nodes)
# input -> nodes with input
self._nodes_by_input = defaultdict(set) # type: Dict[str, Set[Node]]
for node in nodes:
for input_ in node.inputs:
self._nodes_by_input[_get_transcode_compatible_name(input_)].add(node)
# output -> node with output
self._nodes_by_output = {} # type: Dict[str, Node]
for node in nodes:
for output in node.outputs:
self._nodes_by_output[_get_transcode_compatible_name(output)] = node
self._nodes = nodes
self._topo_sorted_nodes = _topologically_sorted(self.node_dependencies)
|
https://github.com/quantumblacklabs/kedro/issues/374
|
/home/markf94/.../kedro-project/.venv/bin/python -m piptools compile /home/markf94/.../kedro-project/src/requirements.in
Traceback (most recent call last):
File "/usr/lib64/python3.7/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/lib64/python3.7/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/__main__.py", line 17, in <module>
cli()
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/markf94.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/decorators.py", line 21, in new_func
return f(get_current_context(), *args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/scripts/compile.py", line 304, in cli
for ireq in filter(is_pinned_requirement, ireqs):
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/utils.py", line 122, in is_pinned_requirement
if ireq.editable:
AttributeError: 'ParsedRequirement' object has no attribute 'editable'
|
AttributeError
|
def _validate_unique_outputs(nodes: List[Node]) -> None:
outputs = chain.from_iterable(node.outputs for node in nodes)
outputs = map(_get_transcode_compatible_name, outputs)
duplicates = [key for key, value in Counter(outputs).items() if value > 1]
if duplicates:
raise OutputNotUniqueError(
"Output(s) {} are returned by more than one nodes. Node "
"outputs must be unique.".format(sorted(duplicates))
)
|
def _validate_unique_outputs(nodes: List[Node]) -> None:
outputs_list = list(chain.from_iterable(node.outputs for node in nodes))
outputs_list = [_get_transcode_compatible_name(o) for o in outputs_list]
counter_list = Counter(outputs_list)
counter_set = Counter(set(outputs_list))
diff = counter_list - counter_set
if diff:
raise OutputNotUniqueError(
"Output(s) {} are returned by "
"more than one nodes. Node "
"outputs must be unique.".format(sorted(diff.keys()))
)
|
https://github.com/quantumblacklabs/kedro/issues/374
|
/home/markf94/.../kedro-project/.venv/bin/python -m piptools compile /home/markf94/.../kedro-project/src/requirements.in
Traceback (most recent call last):
File "/usr/lib64/python3.7/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/lib64/python3.7/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/__main__.py", line 17, in <module>
cli()
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/markf94.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/decorators.py", line 21, in new_func
return f(get_current_context(), *args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/scripts/compile.py", line 304, in cli
for ireq in filter(is_pinned_requirement, ireqs):
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/utils.py", line 122, in is_pinned_requirement
if ireq.editable:
AttributeError: 'ParsedRequirement' object has no attribute 'editable'
|
AttributeError
|
def run_node(node: Node, catalog: DataCatalog) -> Node:
"""Run a single `Node` with inputs from and outputs to the `catalog`.
Args:
node: The ``Node`` to run.
catalog: A ``DataCatalog`` containing the node's inputs and outputs.
Returns:
The node argument.
"""
inputs = {name: catalog.load(name) for name in node.inputs}
outputs = node.run(inputs)
for name, data in outputs.items():
catalog.save(name, data)
for name in node.confirms:
catalog.confirm(name)
return node
|
def run_node(node: Node, catalog: DataCatalog) -> Node:
"""Run a single `Node` with inputs from and outputs to the `catalog`.
Args:
node: The ``Node`` to run.
catalog: A ``DataCatalog`` containing the node's inputs and outputs.
Returns:
The node argument.
"""
inputs = {name: catalog.load(name) for name in node.inputs}
outputs = node.run(inputs)
for name, data in outputs.items():
catalog.save(name, data)
return node
|
https://github.com/quantumblacklabs/kedro/issues/374
|
/home/markf94/.../kedro-project/.venv/bin/python -m piptools compile /home/markf94/.../kedro-project/src/requirements.in
Traceback (most recent call last):
File "/usr/lib64/python3.7/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/lib64/python3.7/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/__main__.py", line 17, in <module>
cli()
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/markf94.../kedro-project/.venv/lib/python3.7/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/click/decorators.py", line 21, in new_func
return f(get_current_context(), *args, **kwargs)
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/scripts/compile.py", line 304, in cli
for ireq in filter(is_pinned_requirement, ireqs):
File "/home/markf94/.../kedro-project/.venv/lib/python3.7/site-packages/piptools/utils.py", line 122, in is_pinned_requirement
if ireq.editable:
AttributeError: 'ParsedRequirement' object has no attribute 'editable'
|
AttributeError
|
def get_extensions():
this_dir = os.path.dirname(os.path.abspath(__file__))
extensions_dir = os.path.join(this_dir, "torchvision", "csrc")
main_file = glob.glob(os.path.join(extensions_dir, "*.cpp")) + glob.glob(
os.path.join(extensions_dir, "ops", "*.cpp")
)
source_cpu = glob.glob(
os.path.join(extensions_dir, "ops", "autograd", "*.cpp")
) + glob.glob(os.path.join(extensions_dir, "ops", "cpu", "*.cpp"))
is_rocm_pytorch = False
if torch.__version__ >= "1.5":
from torch.utils.cpp_extension import ROCM_HOME
is_rocm_pytorch = (
True
if ((torch.version.hip is not None) and (ROCM_HOME is not None))
else False
)
if is_rocm_pytorch:
hipify_python.hipify(
project_directory=this_dir,
output_directory=this_dir,
includes="torchvision/csrc/ops/cuda/*",
show_detailed=True,
is_pytorch_extension=True,
)
source_cuda = glob.glob(os.path.join(extensions_dir, "ops", "hip", "*.hip"))
# Copy over additional files
for file in glob.glob(r"torchvision/csrc/ops/cuda/*.h"):
shutil.copy(file, "torchvision/csrc/ops/hip")
else:
source_cuda = glob.glob(os.path.join(extensions_dir, "ops", "cuda", "*.cu"))
source_cuda += glob.glob(os.path.join(extensions_dir, "ops", "autocast", "*.cpp"))
sources = main_file + source_cpu
extension = CppExtension
compile_cpp_tests = os.getenv("WITH_CPP_MODELS_TEST", "0") == "1"
if compile_cpp_tests:
test_dir = os.path.join(this_dir, "test")
models_dir = os.path.join(this_dir, "torchvision", "csrc", "models")
test_file = glob.glob(os.path.join(test_dir, "*.cpp"))
source_models = glob.glob(os.path.join(models_dir, "*.cpp"))
test_file = [os.path.join(test_dir, s) for s in test_file]
source_models = [os.path.join(models_dir, s) for s in source_models]
tests = test_file + source_models
tests_include_dirs = [test_dir, models_dir]
define_macros = []
extra_compile_args = {"cxx": []}
if (
torch.cuda.is_available() and ((CUDA_HOME is not None) or is_rocm_pytorch)
) or os.getenv("FORCE_CUDA", "0") == "1":
extension = CUDAExtension
sources += source_cuda
if not is_rocm_pytorch:
define_macros += [("WITH_CUDA", None)]
nvcc_flags = os.getenv("NVCC_FLAGS", "")
if nvcc_flags == "":
nvcc_flags = []
else:
nvcc_flags = nvcc_flags.split(" ")
else:
define_macros += [("WITH_HIP", None)]
nvcc_flags = []
extra_compile_args["nvcc"] = nvcc_flags
if sys.platform == "win32":
define_macros += [("torchvision_EXPORTS", None)]
extra_compile_args["cxx"].append("/MP")
debug_mode = os.getenv("DEBUG", "0") == "1"
if debug_mode:
print("Compile in debug mode")
extra_compile_args["cxx"].append("-g")
extra_compile_args["cxx"].append("-O0")
if "nvcc" in extra_compile_args:
# we have to remove "-OX" and "-g" flag if exists and append
nvcc_flags = extra_compile_args["nvcc"]
extra_compile_args["nvcc"] = [
f for f in nvcc_flags if not ("-O" in f or "-g" in f)
]
extra_compile_args["nvcc"].append("-O0")
extra_compile_args["nvcc"].append("-g")
sources = [os.path.join(extensions_dir, s) for s in sources]
include_dirs = [extensions_dir]
ext_modules = [
extension(
"torchvision._C",
sorted(sources),
include_dirs=include_dirs,
define_macros=define_macros,
extra_compile_args=extra_compile_args,
)
]
if compile_cpp_tests:
ext_modules.append(
extension(
"torchvision._C_tests",
tests,
include_dirs=tests_include_dirs,
define_macros=define_macros,
extra_compile_args=extra_compile_args,
)
)
# ------------------- Torchvision extra extensions ------------------------
vision_include = os.environ.get("TORCHVISION_INCLUDE", None)
vision_library = os.environ.get("TORCHVISION_LIBRARY", None)
vision_include = (
vision_include.split(os.pathsep) if vision_include is not None else []
)
vision_library = (
vision_library.split(os.pathsep) if vision_library is not None else []
)
include_dirs += vision_include
library_dirs = vision_library
# Image reading extension
image_macros = []
image_include = [extensions_dir]
image_library = []
image_link_flags = []
# Locating libPNG
libpng = distutils.spawn.find_executable("libpng-config")
pngfix = distutils.spawn.find_executable("pngfix")
png_found = libpng is not None or pngfix is not None
print("PNG found: {0}".format(png_found))
if png_found:
if libpng is not None:
# Linux / Mac
png_version = subprocess.run([libpng, "--version"], stdout=subprocess.PIPE)
png_version = png_version.stdout.strip().decode("utf-8")
print("libpng version: {0}".format(png_version))
png_version = parse_version(png_version)
if png_version >= parse_version("1.6.0"):
print("Building torchvision with PNG image support")
png_lib = subprocess.run([libpng, "--libdir"], stdout=subprocess.PIPE)
png_lib = png_lib.stdout.strip().decode("utf-8")
if "disabled" not in png_lib:
image_library += [png_lib]
png_include = subprocess.run(
[libpng, "--I_opts"], stdout=subprocess.PIPE
)
png_include = png_include.stdout.strip().decode("utf-8")
_, png_include = png_include.split("-I")
print("libpng include path: {0}".format(png_include))
image_include += [png_include]
image_link_flags.append("png")
else:
print(
"libpng installed version is less than 1.6.0, disabling PNG support"
)
png_found = False
else:
# Windows
png_lib = os.path.join(os.path.dirname(os.path.dirname(pngfix)), "lib")
png_include = os.path.join(
os.path.dirname(os.path.dirname(pngfix)), "include", "libpng16"
)
image_library += [png_lib]
image_include += [png_include]
image_link_flags.append("libpng")
# Locating libjpeg
(jpeg_found, jpeg_conda, jpeg_include, jpeg_lib) = find_library(
"jpeglib", vision_include
)
print("JPEG found: {0}".format(jpeg_found))
image_macros += [("PNG_FOUND", str(int(png_found)))]
image_macros += [("JPEG_FOUND", str(int(jpeg_found)))]
if jpeg_found:
print("Building torchvision with JPEG image support")
image_link_flags.append("jpeg")
if jpeg_conda:
image_library += [jpeg_lib]
image_include += [jpeg_include]
image_path = os.path.join(extensions_dir, "io", "image")
image_src = glob.glob(os.path.join(image_path, "*.cpp")) + glob.glob(
os.path.join(image_path, "cpu", "*.cpp")
)
if png_found or jpeg_found:
ext_modules.append(
extension(
"torchvision.image",
image_src,
include_dirs=image_include + include_dirs + [image_path],
library_dirs=image_library + library_dirs,
define_macros=image_macros,
libraries=image_link_flags,
extra_compile_args=extra_compile_args,
)
)
ffmpeg_exe = distutils.spawn.find_executable("ffmpeg")
has_ffmpeg = ffmpeg_exe is not None
print("FFmpeg found: {}".format(has_ffmpeg))
if has_ffmpeg:
ffmpeg_libraries = {
"libavcodec",
"libavformat",
"libavutil",
"libswresample",
"libswscale",
}
ffmpeg_bin = os.path.dirname(ffmpeg_exe)
ffmpeg_root = os.path.dirname(ffmpeg_bin)
ffmpeg_include_dir = os.path.join(ffmpeg_root, "include")
ffmpeg_library_dir = os.path.join(ffmpeg_root, "lib")
gcc = distutils.spawn.find_executable("gcc")
platform_tag = subprocess.run([gcc, "-print-multiarch"], stdout=subprocess.PIPE)
platform_tag = platform_tag.stdout.strip().decode("utf-8")
if platform_tag:
# Most probably a Debian-based distribution
ffmpeg_include_dir = [
ffmpeg_include_dir,
os.path.join(ffmpeg_include_dir, platform_tag),
]
ffmpeg_library_dir = [
ffmpeg_library_dir,
os.path.join(ffmpeg_library_dir, platform_tag),
]
else:
ffmpeg_include_dir = [ffmpeg_include_dir]
ffmpeg_library_dir = [ffmpeg_library_dir]
has_ffmpeg = True
for library in ffmpeg_libraries:
library_found = False
for search_path in ffmpeg_include_dir + include_dirs:
full_path = os.path.join(search_path, library, "*.h")
library_found |= len(glob.glob(full_path)) > 0
if not library_found:
print("{0} header files were not found, disabling ffmpeg support")
has_ffmpeg = False
if has_ffmpeg:
print("ffmpeg include path: {}".format(ffmpeg_include_dir))
print("ffmpeg library_dir: {}".format(ffmpeg_library_dir))
# TorchVision base decoder + video reader
video_reader_src_dir = os.path.join(
this_dir, "torchvision", "csrc", "io", "video_reader"
)
video_reader_src = glob.glob(os.path.join(video_reader_src_dir, "*.cpp"))
base_decoder_src_dir = os.path.join(
this_dir, "torchvision", "csrc", "io", "decoder"
)
base_decoder_src = glob.glob(os.path.join(base_decoder_src_dir, "*.cpp"))
# Torchvision video API
videoapi_src_dir = os.path.join(this_dir, "torchvision", "csrc", "io", "video")
videoapi_src = glob.glob(os.path.join(videoapi_src_dir, "*.cpp"))
# exclude tests
base_decoder_src = [x for x in base_decoder_src if "_test.cpp" not in x]
combined_src = video_reader_src + base_decoder_src + videoapi_src
ext_modules.append(
CppExtension(
"torchvision.video_reader",
combined_src,
include_dirs=[
base_decoder_src_dir,
video_reader_src_dir,
videoapi_src_dir,
extensions_dir,
*ffmpeg_include_dir,
*include_dirs,
],
library_dirs=ffmpeg_library_dir + library_dirs,
libraries=[
"avcodec",
"avformat",
"avutil",
"swresample",
"swscale",
],
extra_compile_args=["-std=c++14"]
if os.name != "nt"
else ["/std:c++14", "/MP"],
extra_link_args=["-std=c++14" if os.name != "nt" else "/std:c++14"],
)
)
return ext_modules
|
def get_extensions():
this_dir = os.path.dirname(os.path.abspath(__file__))
extensions_dir = os.path.join(this_dir, "torchvision", "csrc")
main_file = glob.glob(os.path.join(extensions_dir, "*.cpp")) + glob.glob(
os.path.join(extensions_dir, "ops", "*.cpp")
)
source_cpu = glob.glob(
os.path.join(extensions_dir, "ops", "autograd", "*.cpp")
) + glob.glob(os.path.join(extensions_dir, "ops", "cpu", "*.cpp"))
is_rocm_pytorch = False
if torch.__version__ >= "1.5":
from torch.utils.cpp_extension import ROCM_HOME
is_rocm_pytorch = (
True
if ((torch.version.hip is not None) and (ROCM_HOME is not None))
else False
)
if is_rocm_pytorch:
hipify_python.hipify(
project_directory=this_dir,
output_directory=this_dir,
includes="torchvision/csrc/ops/cuda/*",
show_detailed=True,
is_pytorch_extension=True,
)
source_cuda = glob.glob(os.path.join(extensions_dir, "ops", "hip", "*.hip"))
# Copy over additional files
for file in glob.glob(r"torchvision/csrc/ops/cuda/*.h"):
shutil.copy(file, "torchvision/csrc/ops/hip")
else:
source_cuda = glob.glob(os.path.join(extensions_dir, "ops", "cuda", "*.cu"))
source_cuda += glob.glob(os.path.join(extensions_dir, "ops", "autocast", "*.cpp"))
sources = main_file + source_cpu
extension = CppExtension
compile_cpp_tests = os.getenv("WITH_CPP_MODELS_TEST", "0") == "1"
if compile_cpp_tests:
test_dir = os.path.join(this_dir, "test")
models_dir = os.path.join(this_dir, "torchvision", "csrc", "models")
test_file = glob.glob(os.path.join(test_dir, "*.cpp"))
source_models = glob.glob(os.path.join(models_dir, "*.cpp"))
test_file = [os.path.join(test_dir, s) for s in test_file]
source_models = [os.path.join(models_dir, s) for s in source_models]
tests = test_file + source_models
tests_include_dirs = [test_dir, models_dir]
define_macros = []
extra_compile_args = {}
if (
torch.cuda.is_available() and ((CUDA_HOME is not None) or is_rocm_pytorch)
) or os.getenv("FORCE_CUDA", "0") == "1":
extension = CUDAExtension
sources += source_cuda
if not is_rocm_pytorch:
define_macros += [("WITH_CUDA", None)]
nvcc_flags = os.getenv("NVCC_FLAGS", "")
if nvcc_flags == "":
nvcc_flags = []
else:
nvcc_flags = nvcc_flags.split(" ")
else:
define_macros += [("WITH_HIP", None)]
nvcc_flags = []
extra_compile_args = {
"cxx": [],
"nvcc": nvcc_flags,
}
if sys.platform == "win32":
define_macros += [("torchvision_EXPORTS", None)]
extra_compile_args.setdefault("cxx", [])
extra_compile_args["cxx"].append("/MP")
debug_mode = os.getenv("DEBUG", "0") == "1"
if debug_mode:
print("Compile in debug mode")
extra_compile_args["cxx"].append("-g")
extra_compile_args["cxx"].append("-O0")
if "nvcc" in extra_compile_args:
# we have to remove "-OX" and "-g" flag if exists and append
nvcc_flags = extra_compile_args["nvcc"]
extra_compile_args["nvcc"] = [
f for f in nvcc_flags if not ("-O" in f or "-g" in f)
]
extra_compile_args["nvcc"].append("-O0")
extra_compile_args["nvcc"].append("-g")
sources = [os.path.join(extensions_dir, s) for s in sources]
include_dirs = [extensions_dir]
ext_modules = [
extension(
"torchvision._C",
sorted(sources),
include_dirs=include_dirs,
define_macros=define_macros,
extra_compile_args=extra_compile_args,
)
]
if compile_cpp_tests:
ext_modules.append(
extension(
"torchvision._C_tests",
tests,
include_dirs=tests_include_dirs,
define_macros=define_macros,
extra_compile_args=extra_compile_args,
)
)
# ------------------- Torchvision extra extensions ------------------------
vision_include = os.environ.get("TORCHVISION_INCLUDE", None)
vision_library = os.environ.get("TORCHVISION_LIBRARY", None)
vision_include = (
vision_include.split(os.pathsep) if vision_include is not None else []
)
vision_library = (
vision_library.split(os.pathsep) if vision_library is not None else []
)
include_dirs += vision_include
library_dirs = vision_library
# Image reading extension
image_macros = []
image_include = [extensions_dir]
image_library = []
image_link_flags = []
# Locating libPNG
libpng = distutils.spawn.find_executable("libpng-config")
pngfix = distutils.spawn.find_executable("pngfix")
png_found = libpng is not None or pngfix is not None
print("PNG found: {0}".format(png_found))
if png_found:
if libpng is not None:
# Linux / Mac
png_version = subprocess.run([libpng, "--version"], stdout=subprocess.PIPE)
png_version = png_version.stdout.strip().decode("utf-8")
print("libpng version: {0}".format(png_version))
png_version = parse_version(png_version)
if png_version >= parse_version("1.6.0"):
print("Building torchvision with PNG image support")
png_lib = subprocess.run([libpng, "--libdir"], stdout=subprocess.PIPE)
png_lib = png_lib.stdout.strip().decode("utf-8")
if "disabled" not in png_lib:
image_library += [png_lib]
png_include = subprocess.run(
[libpng, "--I_opts"], stdout=subprocess.PIPE
)
png_include = png_include.stdout.strip().decode("utf-8")
_, png_include = png_include.split("-I")
print("libpng include path: {0}".format(png_include))
image_include += [png_include]
image_link_flags.append("png")
else:
print(
"libpng installed version is less than 1.6.0, disabling PNG support"
)
png_found = False
else:
# Windows
png_lib = os.path.join(os.path.dirname(os.path.dirname(pngfix)), "lib")
png_include = os.path.join(
os.path.dirname(os.path.dirname(pngfix)), "include", "libpng16"
)
image_library += [png_lib]
image_include += [png_include]
image_link_flags.append("libpng")
# Locating libjpeg
(jpeg_found, jpeg_conda, jpeg_include, jpeg_lib) = find_library(
"jpeglib", vision_include
)
print("JPEG found: {0}".format(jpeg_found))
image_macros += [("PNG_FOUND", str(int(png_found)))]
image_macros += [("JPEG_FOUND", str(int(jpeg_found)))]
if jpeg_found:
print("Building torchvision with JPEG image support")
image_link_flags.append("jpeg")
if jpeg_conda:
image_library += [jpeg_lib]
image_include += [jpeg_include]
image_path = os.path.join(extensions_dir, "io", "image")
image_src = glob.glob(os.path.join(image_path, "*.cpp")) + glob.glob(
os.path.join(image_path, "cpu", "*.cpp")
)
if png_found or jpeg_found:
ext_modules.append(
extension(
"torchvision.image",
image_src,
include_dirs=image_include + include_dirs + [image_path],
library_dirs=image_library + library_dirs,
define_macros=image_macros,
libraries=image_link_flags,
extra_compile_args=extra_compile_args,
)
)
ffmpeg_exe = distutils.spawn.find_executable("ffmpeg")
has_ffmpeg = ffmpeg_exe is not None
print("FFmpeg found: {}".format(has_ffmpeg))
if has_ffmpeg:
ffmpeg_libraries = {
"libavcodec",
"libavformat",
"libavutil",
"libswresample",
"libswscale",
}
ffmpeg_bin = os.path.dirname(ffmpeg_exe)
ffmpeg_root = os.path.dirname(ffmpeg_bin)
ffmpeg_include_dir = os.path.join(ffmpeg_root, "include")
ffmpeg_library_dir = os.path.join(ffmpeg_root, "lib")
gcc = distutils.spawn.find_executable("gcc")
platform_tag = subprocess.run([gcc, "-print-multiarch"], stdout=subprocess.PIPE)
platform_tag = platform_tag.stdout.strip().decode("utf-8")
if platform_tag:
# Most probably a Debian-based distribution
ffmpeg_include_dir = [
ffmpeg_include_dir,
os.path.join(ffmpeg_include_dir, platform_tag),
]
ffmpeg_library_dir = [
ffmpeg_library_dir,
os.path.join(ffmpeg_library_dir, platform_tag),
]
else:
ffmpeg_include_dir = [ffmpeg_include_dir]
ffmpeg_library_dir = [ffmpeg_library_dir]
has_ffmpeg = True
for library in ffmpeg_libraries:
library_found = False
for search_path in ffmpeg_include_dir + include_dirs:
full_path = os.path.join(search_path, library, "*.h")
library_found |= len(glob.glob(full_path)) > 0
if not library_found:
print("{0} header files were not found, disabling ffmpeg support")
has_ffmpeg = False
if has_ffmpeg:
print("ffmpeg include path: {}".format(ffmpeg_include_dir))
print("ffmpeg library_dir: {}".format(ffmpeg_library_dir))
# TorchVision base decoder + video reader
video_reader_src_dir = os.path.join(
this_dir, "torchvision", "csrc", "io", "video_reader"
)
video_reader_src = glob.glob(os.path.join(video_reader_src_dir, "*.cpp"))
base_decoder_src_dir = os.path.join(
this_dir, "torchvision", "csrc", "io", "decoder"
)
base_decoder_src = glob.glob(os.path.join(base_decoder_src_dir, "*.cpp"))
# Torchvision video API
videoapi_src_dir = os.path.join(this_dir, "torchvision", "csrc", "io", "video")
videoapi_src = glob.glob(os.path.join(videoapi_src_dir, "*.cpp"))
# exclude tests
base_decoder_src = [x for x in base_decoder_src if "_test.cpp" not in x]
combined_src = video_reader_src + base_decoder_src + videoapi_src
ext_modules.append(
CppExtension(
"torchvision.video_reader",
combined_src,
include_dirs=[
base_decoder_src_dir,
video_reader_src_dir,
videoapi_src_dir,
extensions_dir,
*ffmpeg_include_dir,
*include_dirs,
],
library_dirs=ffmpeg_library_dir + library_dirs,
libraries=[
"avcodec",
"avformat",
"avutil",
"swresample",
"swscale",
],
extra_compile_args=["-std=c++14"]
if os.name != "nt"
else ["/std:c++14", "/MP"],
extra_link_args=["-std=c++14" if os.name != "nt" else "/std:c++14"],
)
)
return ext_modules
|
https://github.com/pytorch/vision/issues/3473
|
No CUDA runtime is found, using CUDA_HOME='/opt/cuda'
Building wheel torchvision-0.9.0a0+b266c2f
Compile in debug mode
Traceback (most recent call last):
File "setup.py", line 472, in <module>
ext_modules=get_extensions(),
File "setup.py", line 213, in get_extensions
extra_compile_args['cxx'].append("-g")
KeyError: 'cxx'
|
KeyError
|
def forward(self, input: Tensor) -> Tensor: # noqa: F811
if isinstance(input, Tensor):
prev_features = [input]
else:
prev_features = input
if self.memory_efficient and self.any_requires_grad(prev_features):
if torch.jit.is_scripting():
raise Exception("Memory Efficient not supported in JIT")
bottleneck_output = self.call_checkpoint_bottleneck(prev_features)
else:
bottleneck_output = self.bn_function(prev_features)
new_features = self.conv2(self.relu2(self.norm2(bottleneck_output)))
if self.drop_rate > 0:
new_features = F.dropout(new_features, p=self.drop_rate, training=self.training)
return new_features
|
def forward(self, input: Tensor) -> Tensor: # type: ignore[no-redef] # noqa: F811
if isinstance(input, Tensor):
prev_features = [input]
else:
prev_features = input
if self.memory_efficient and self.any_requires_grad(prev_features):
if torch.jit.is_scripting():
raise Exception("Memory Efficient not supported in JIT")
bottleneck_output = self.call_checkpoint_bottleneck(prev_features)
else:
bottleneck_output = self.bn_function(prev_features)
new_features = self.conv2(self.relu2(self.norm2(bottleneck_output)))
if self.drop_rate > 0:
new_features = F.dropout(new_features, p=self.drop_rate, training=self.training)
return new_features
|
https://github.com/pytorch/vision/issues/3027
|
Traceback (most recent call last):
File "repro.py", line 7, in <module>
torch.jit.script(model).save('densenet161.pt')
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/torch/jit/_script.py", line 911, in script
return torch.jit._recursive.create_script_module(
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/torch/jit/_recursive.py", line 370, in create_script_module
return create_script_module_impl(nn_module, concrete_type, stubs_fn)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/torch/jit/_recursive.py", line 426, in create_script_module_impl
script_module = torch.jit.RecursiveScriptModule._construct(cpp_module, init_fn)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/torch/jit/_script.py", line 388, in _construct
init_fn(script_module)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/torch/jit/_recursive.py", line 406, in init_fn
scripted = create_script_module_impl(orig_value, sub_concrete_type, stubs_fn)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/torch/jit/_recursive.py", line 426, in create_script_module_impl
script_module = torch.jit.RecursiveScriptModule._construct(cpp_module, init_fn)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/torch/jit/_script.py", line 388, in _construct
init_fn(script_module)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/torch/jit/_recursive.py", line 406, in init_fn
scripted = create_script_module_impl(orig_value, sub_concrete_type, stubs_fn)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/torch/jit/_recursive.py", line 382, in create_script_module_impl
method_stubs = stubs_fn(nn_module)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/torch/jit/_recursive.py", line 618, in infer_methods_to_compile
stubs.append(make_stub_from_method(nn_module, method))
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/torch/jit/_recursive.py", line 52, in make_stub_from_method
return make_stub(func, method_name)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/torch/jit/_recursive.py", line 37, in make_stub
ast = get_jit_def(func, name, self_name="RecursiveScriptModule")
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/torch/jit/frontend.py", line 259, in get_jit_def
return build_def(ctx, fn_def, type_line, def_name, self_name=self_name)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/torch/jit/frontend.py", line 288, in build_def
type_comment_decl = torch._C.parse_type_comment(type_line)
RuntimeError: expected type comment but found 'def' here:
def forward(self, init_features: Tensor) -> Tensor: # type: ignore[override]
~~~ <--- HERE
|
RuntimeError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.