after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def _fix_py3_plus(contents_text): # type: (str) -> str
try:
ast_obj = ast_parse(contents_text)
except SyntaxError:
return contents_text
visitor = FindPy3Plus()
visitor.visit(ast_obj)
if not any(
(
visitor.bases_to_remove,
visitor.encode_calls,
visitor.if_py2_blocks_else,
visitor.if_py3_blocks,
visitor.if_py3_blocks_else,
visitor.native_literals,
visitor.io_open_calls,
visitor.os_error_alias_calls,
visitor.os_error_alias_simple,
visitor.os_error_alias_excepts,
visitor.six_add_metaclass,
visitor.six_b,
visitor.six_calls,
visitor.six_iter,
visitor.six_raise_from,
visitor.six_reraise,
visitor.six_remove_decorators,
visitor.six_simple,
visitor.six_type_ctx,
visitor.six_with_metaclass,
visitor.super_calls,
visitor.yield_from_fors,
)
):
return contents_text
try:
tokens = src_to_tokens(contents_text)
except tokenize.TokenError: # pragma: no cover (bpo-2180)
return contents_text
_fixup_dedent_tokens(tokens)
def _replace(i, mapping, node):
# type: (int, Dict[str, str], NameOrAttr) -> None
new_token = Token("CODE", _get_tmpl(mapping, node))
if isinstance(node, ast.Name):
tokens[i] = new_token
else:
j = i
while tokens[j].src != node.attr:
# timid: if we see a parenthesis here, skip it
if tokens[j].src == ")":
return
j += 1
tokens[i : j + 1] = [new_token]
for i, token in reversed_enumerate(tokens):
if not token.src:
continue
elif token.offset in visitor.bases_to_remove:
_remove_base_class(tokens, i)
elif token.offset in visitor.if_py3_blocks:
if tokens[i].src == "if":
if_block = Block.find(tokens, i)
if_block.dedent(tokens)
del tokens[if_block.start : if_block.block]
else:
if_block = Block.find(tokens, _find_elif(tokens, i))
if_block.replace_condition(tokens, [Token("NAME", "else")])
elif token.offset in visitor.if_py2_blocks_else:
if tokens[i].src == "if":
if_block, else_block = _find_if_else_block(tokens, i)
else_block.dedent(tokens)
del tokens[if_block.start : else_block.block]
else:
j = _find_elif(tokens, i)
if_block, else_block = _find_if_else_block(tokens, j)
del tokens[if_block.start : else_block.start]
elif token.offset in visitor.if_py3_blocks_else:
if tokens[i].src == "if":
if_block, else_block = _find_if_else_block(tokens, i)
if_block.dedent(tokens)
del tokens[if_block.end : else_block.end]
del tokens[if_block.start : if_block.block]
else:
j = _find_elif(tokens, i)
if_block, else_block = _find_if_else_block(tokens, j)
del tokens[if_block.end : else_block.end]
if_block.replace_condition(tokens, [Token("NAME", "else")])
elif token.offset in visitor.six_type_ctx:
_replace(i, SIX_TYPE_CTX_ATTRS, visitor.six_type_ctx[token.offset])
elif token.offset in visitor.six_simple:
_replace(i, SIX_SIMPLE_ATTRS, visitor.six_simple[token.offset])
elif token.offset in visitor.six_remove_decorators:
_remove_decorator(tokens, i)
elif token.offset in visitor.six_b:
j = _find_open_paren(tokens, i)
if (
tokens[j + 1].name == "STRING"
and _is_ascii(tokens[j + 1].src)
and tokens[j + 2].src == ")"
):
func_args, end = _parse_call_args(tokens, j)
_replace_call(tokens, i, end, func_args, SIX_B_TMPL)
elif token.offset in visitor.six_iter:
j = _find_open_paren(tokens, i)
func_args, end = _parse_call_args(tokens, j)
call = visitor.six_iter[token.offset]
assert isinstance(call.func, (ast.Name, ast.Attribute))
template = "iter({})".format(_get_tmpl(SIX_CALLS, call.func))
_replace_call(tokens, i, end, func_args, template)
elif token.offset in visitor.six_calls:
j = _find_open_paren(tokens, i)
func_args, end = _parse_call_args(tokens, j)
call = visitor.six_calls[token.offset]
assert isinstance(call.func, (ast.Name, ast.Attribute))
template = _get_tmpl(SIX_CALLS, call.func)
_replace_call(tokens, i, end, func_args, template)
elif token.offset in visitor.six_raise_from:
j = _find_open_paren(tokens, i)
func_args, end = _parse_call_args(tokens, j)
_replace_call(tokens, i, end, func_args, RAISE_FROM_TMPL)
elif token.offset in visitor.six_reraise:
j = _find_open_paren(tokens, i)
func_args, end = _parse_call_args(tokens, j)
if len(func_args) == 2:
_replace_call(tokens, i, end, func_args, RERAISE_2_TMPL)
else:
_replace_call(tokens, i, end, func_args, RERAISE_3_TMPL)
elif token.offset in visitor.six_add_metaclass:
j = _find_open_paren(tokens, i)
func_args, end = _parse_call_args(tokens, j)
metaclass = "metaclass={}".format(_arg_str(tokens, *func_args[0]))
# insert `metaclass={args[0]}` into `class:`
# search forward for the `class` token
j = i + 1
while tokens[j].src != "class":
j += 1
class_token = j
# then search forward for a `:` token, not inside a brace
j = _find_block_start(tokens, j)
last_paren = -1
for k in range(class_token, j):
if tokens[k].src == ")":
last_paren = k
if last_paren == -1:
tokens.insert(j, Token("CODE", "({})".format(metaclass)))
else:
insert = last_paren - 1
while tokens[insert].name in NON_CODING_TOKENS:
insert -= 1
if tokens[insert].src == "(": # no bases
src = metaclass
elif tokens[insert].src != ",":
src = ", {}".format(metaclass)
else:
src = " {},".format(metaclass)
tokens.insert(insert + 1, Token("CODE", src))
_remove_decorator(tokens, i)
elif token.offset in visitor.six_with_metaclass:
j = _find_open_paren(tokens, i)
func_args, end = _parse_call_args(tokens, j)
if len(func_args) == 1:
tmpl = WITH_METACLASS_NO_BASES_TMPL
else:
tmpl = WITH_METACLASS_BASES_TMPL
_replace_call(tokens, i, end, func_args, tmpl)
elif token.offset in visitor.super_calls:
i = _find_open_paren(tokens, i)
call = visitor.super_calls[token.offset]
victims = _victims(tokens, i, call, gen=False)
del tokens[victims.starts[0] + 1 : victims.ends[-1]]
elif token.offset in visitor.encode_calls:
i = _find_open_paren(tokens, i)
call = visitor.encode_calls[token.offset]
victims = _victims(tokens, i, call, gen=False)
del tokens[victims.starts[0] + 1 : victims.ends[-1]]
elif token.offset in visitor.native_literals:
j = _find_open_paren(tokens, i)
func_args, end = _parse_call_args(tokens, j)
if any(tok.name == "NL" for tok in tokens[i:end]):
continue
if func_args:
_replace_call(tokens, i, end, func_args, "{args[0]}")
else:
tokens[i:end] = [token._replace(name="STRING", src="''")]
elif token.offset in visitor.io_open_calls:
j = _find_open_paren(tokens, i)
tokens[i:j] = [token._replace(name="NAME", src="open")]
elif token.offset in visitor.os_error_alias_calls:
j = _find_open_paren(tokens, i)
tokens[i:j] = [token._replace(name="NAME", src="OSError")]
elif token.offset in visitor.os_error_alias_simple:
node = visitor.os_error_alias_simple[token.offset]
_replace(i, collections.defaultdict(lambda: "OSError"), node)
elif token.offset in visitor.os_error_alias_excepts:
line, utf8_byte_offset = token.line, token.utf8_byte_offset
# find all the arg strs in the tuple
except_index = i
while tokens[except_index].src != "except":
except_index -= 1
start = _find_open_paren(tokens, except_index)
func_args, end = _parse_call_args(tokens, start)
# save the exceptions and remove the block
arg_strs = [_arg_str(tokens, *arg) for arg in func_args]
del tokens[start:end]
# rewrite the block without dupes
args = []
for arg in arg_strs:
left, part, right = arg.partition(".")
if (
left in visitor.OS_ERROR_ALIAS_MODULES
and part == "."
and right == "error"
):
args.append("OSError")
elif left in visitor.OS_ERROR_ALIASES and part == right == "":
args.append("OSError")
elif (
left == "error"
and part == right == ""
and (
"error" in visitor.from_imported_names["mmap"]
or "error" in visitor.from_imported_names["select"]
or "error" in visitor.from_imported_names["socket"]
)
):
args.append("OSError")
else:
args.append(arg)
unique_args = tuple(collections.OrderedDict.fromkeys(args))
if len(unique_args) > 1:
joined = "({})".format(", ".join(unique_args))
elif tokens[start - 1].name != "UNIMPORTANT_WS":
joined = " {}".format(unique_args[0])
else:
joined = unique_args[0]
new = Token("CODE", joined, line, utf8_byte_offset)
tokens.insert(start, new)
visitor.os_error_alias_excepts.pop(token.offset)
elif token.offset in visitor.yield_from_fors:
_replace_yield(tokens, i)
return tokens_to_src(tokens)
|
def _fix_py3_plus(contents_text): # type: (str) -> str
try:
ast_obj = ast_parse(contents_text)
except SyntaxError:
return contents_text
visitor = FindPy3Plus()
visitor.visit(ast_obj)
if not any(
(
visitor.bases_to_remove,
visitor.encode_calls,
visitor.if_py2_blocks_else,
visitor.if_py3_blocks,
visitor.if_py3_blocks_else,
visitor.native_literals,
visitor.io_open_calls,
visitor.os_error_alias_calls,
visitor.os_error_alias_simple,
visitor.os_error_alias_excepts,
visitor.six_add_metaclass,
visitor.six_b,
visitor.six_calls,
visitor.six_iter,
visitor.six_raises,
visitor.six_remove_decorators,
visitor.six_simple,
visitor.six_type_ctx,
visitor.six_with_metaclass,
visitor.super_calls,
visitor.yield_from_fors,
)
):
return contents_text
try:
tokens = src_to_tokens(contents_text)
except tokenize.TokenError: # pragma: no cover (bpo-2180)
return contents_text
_fixup_dedent_tokens(tokens)
def _replace(i, mapping, node):
# type: (int, Dict[str, str], NameOrAttr) -> None
new_token = Token("CODE", _get_tmpl(mapping, node))
if isinstance(node, ast.Name):
tokens[i] = new_token
else:
j = i
while tokens[j].src != node.attr:
# timid: if we see a parenthesis here, skip it
if tokens[j].src == ")":
return
j += 1
tokens[i : j + 1] = [new_token]
for i, token in reversed_enumerate(tokens):
if not token.src:
continue
elif token.offset in visitor.bases_to_remove:
_remove_base_class(tokens, i)
elif token.offset in visitor.if_py3_blocks:
if tokens[i].src == "if":
if_block = Block.find(tokens, i)
if_block.dedent(tokens)
del tokens[if_block.start : if_block.block]
else:
if_block = Block.find(tokens, _find_elif(tokens, i))
if_block.replace_condition(tokens, [Token("NAME", "else")])
elif token.offset in visitor.if_py2_blocks_else:
if tokens[i].src == "if":
if_block, else_block = _find_if_else_block(tokens, i)
else_block.dedent(tokens)
del tokens[if_block.start : else_block.block]
else:
j = _find_elif(tokens, i)
if_block, else_block = _find_if_else_block(tokens, j)
del tokens[if_block.start : else_block.start]
elif token.offset in visitor.if_py3_blocks_else:
if tokens[i].src == "if":
if_block, else_block = _find_if_else_block(tokens, i)
if_block.dedent(tokens)
del tokens[if_block.end : else_block.end]
del tokens[if_block.start : if_block.block]
else:
j = _find_elif(tokens, i)
if_block, else_block = _find_if_else_block(tokens, j)
del tokens[if_block.end : else_block.end]
if_block.replace_condition(tokens, [Token("NAME", "else")])
elif token.offset in visitor.six_type_ctx:
_replace(i, SIX_TYPE_CTX_ATTRS, visitor.six_type_ctx[token.offset])
elif token.offset in visitor.six_simple:
_replace(i, SIX_SIMPLE_ATTRS, visitor.six_simple[token.offset])
elif token.offset in visitor.six_remove_decorators:
_remove_decorator(tokens, i)
elif token.offset in visitor.six_b:
j = _find_open_paren(tokens, i)
if (
tokens[j + 1].name == "STRING"
and _is_ascii(tokens[j + 1].src)
and tokens[j + 2].src == ")"
):
func_args, end = _parse_call_args(tokens, j)
_replace_call(tokens, i, end, func_args, SIX_B_TMPL)
elif token.offset in visitor.six_iter:
j = _find_open_paren(tokens, i)
func_args, end = _parse_call_args(tokens, j)
call = visitor.six_iter[token.offset]
assert isinstance(call.func, (ast.Name, ast.Attribute))
template = "iter({})".format(_get_tmpl(SIX_CALLS, call.func))
_replace_call(tokens, i, end, func_args, template)
elif token.offset in visitor.six_calls:
j = _find_open_paren(tokens, i)
func_args, end = _parse_call_args(tokens, j)
call = visitor.six_calls[token.offset]
assert isinstance(call.func, (ast.Name, ast.Attribute))
template = _get_tmpl(SIX_CALLS, call.func)
_replace_call(tokens, i, end, func_args, template)
elif token.offset in visitor.six_raises:
j = _find_open_paren(tokens, i)
func_args, end = _parse_call_args(tokens, j)
call = visitor.six_raises[token.offset]
assert isinstance(call.func, (ast.Name, ast.Attribute))
template = _get_tmpl(SIX_RAISES, call.func)
_replace_call(tokens, i, end, func_args, template)
elif token.offset in visitor.six_add_metaclass:
j = _find_open_paren(tokens, i)
func_args, end = _parse_call_args(tokens, j)
metaclass = "metaclass={}".format(_arg_str(tokens, *func_args[0]))
# insert `metaclass={args[0]}` into `class:`
# search forward for the `class` token
j = i + 1
while tokens[j].src != "class":
j += 1
class_token = j
# then search forward for a `:` token, not inside a brace
j = _find_block_start(tokens, j)
last_paren = -1
for k in range(class_token, j):
if tokens[k].src == ")":
last_paren = k
if last_paren == -1:
tokens.insert(j, Token("CODE", "({})".format(metaclass)))
else:
insert = last_paren - 1
while tokens[insert].name in NON_CODING_TOKENS:
insert -= 1
if tokens[insert].src == "(": # no bases
src = metaclass
elif tokens[insert].src != ",":
src = ", {}".format(metaclass)
else:
src = " {},".format(metaclass)
tokens.insert(insert + 1, Token("CODE", src))
_remove_decorator(tokens, i)
elif token.offset in visitor.six_with_metaclass:
j = _find_open_paren(tokens, i)
func_args, end = _parse_call_args(tokens, j)
if len(func_args) == 1:
tmpl = WITH_METACLASS_NO_BASES_TMPL
else:
tmpl = WITH_METACLASS_BASES_TMPL
_replace_call(tokens, i, end, func_args, tmpl)
elif token.offset in visitor.super_calls:
i = _find_open_paren(tokens, i)
call = visitor.super_calls[token.offset]
victims = _victims(tokens, i, call, gen=False)
del tokens[victims.starts[0] + 1 : victims.ends[-1]]
elif token.offset in visitor.encode_calls:
i = _find_open_paren(tokens, i)
call = visitor.encode_calls[token.offset]
victims = _victims(tokens, i, call, gen=False)
del tokens[victims.starts[0] + 1 : victims.ends[-1]]
elif token.offset in visitor.native_literals:
j = _find_open_paren(tokens, i)
func_args, end = _parse_call_args(tokens, j)
if any(tok.name == "NL" for tok in tokens[i:end]):
continue
if func_args:
_replace_call(tokens, i, end, func_args, "{args[0]}")
else:
tokens[i:end] = [token._replace(name="STRING", src="''")]
elif token.offset in visitor.io_open_calls:
j = _find_open_paren(tokens, i)
tokens[i:j] = [token._replace(name="NAME", src="open")]
elif token.offset in visitor.os_error_alias_calls:
j = _find_open_paren(tokens, i)
tokens[i:j] = [token._replace(name="NAME", src="OSError")]
elif token.offset in visitor.os_error_alias_simple:
node = visitor.os_error_alias_simple[token.offset]
_replace(i, collections.defaultdict(lambda: "OSError"), node)
elif token.offset in visitor.os_error_alias_excepts:
line, utf8_byte_offset = token.line, token.utf8_byte_offset
# find all the arg strs in the tuple
except_index = i
while tokens[except_index].src != "except":
except_index -= 1
start = _find_open_paren(tokens, except_index)
func_args, end = _parse_call_args(tokens, start)
# save the exceptions and remove the block
arg_strs = [_arg_str(tokens, *arg) for arg in func_args]
del tokens[start:end]
# rewrite the block without dupes
args = []
for arg in arg_strs:
left, part, right = arg.partition(".")
if (
left in visitor.OS_ERROR_ALIAS_MODULES
and part == "."
and right == "error"
):
args.append("OSError")
elif left in visitor.OS_ERROR_ALIASES and part == right == "":
args.append("OSError")
elif (
left == "error"
and part == right == ""
and (
"error" in visitor.from_imported_names["mmap"]
or "error" in visitor.from_imported_names["select"]
or "error" in visitor.from_imported_names["socket"]
)
):
args.append("OSError")
else:
args.append(arg)
unique_args = tuple(collections.OrderedDict.fromkeys(args))
if len(unique_args) > 1:
joined = "({})".format(", ".join(unique_args))
elif tokens[start - 1].name != "UNIMPORTANT_WS":
joined = " {}".format(unique_args[0])
else:
joined = unique_args[0]
new = Token("CODE", joined, line, utf8_byte_offset)
tokens.insert(start, new)
visitor.os_error_alias_excepts.pop(token.offset)
elif token.offset in visitor.yield_from_fors:
_replace_yield(tokens, i)
return tokens_to_src(tokens)
|
https://github.com/asottile/pyupgrade/issues/246
|
Traceback (most recent call last):
File ".../venv/bin/pyupgrade", line 10, in <module>
sys.exit(main())
File ".../venv/lib64/python3.8/site-packages/pyupgrade.py", line 2318, in main
ret |= _fix_file(filename, args)
File ".../venv/lib64/python3.8/site-packages/pyupgrade.py", line 2280, in _fix_file
contents_text = _fix_py3_plus(contents_text)
File ".../venv/lib64/python3.8/site-packages/pyupgrade.py", line 1984, in _fix_py3_plus
_replace_call(tokens, i, end, func_args, template)
File ".../venv/lib64/python3.8/site-packages/pyupgrade.py", line 1849, in _replace_call
src = tmpl.format(args=arg_strs, rest=rest)
IndexError: list index out of range
|
IndexError
|
def visit_Call(self, node): # type: (ast.Call) -> None
if (
isinstance(node.func, ast.Name)
and node.func.id in {"isinstance", "issubclass"}
and len(node.args) == 2
and self._is_six(node.args[1], SIX_TYPE_CTX_ATTRS)
):
arg = node.args[1]
# _is_six() enforces this
assert isinstance(arg, (ast.Name, ast.Attribute))
self.six_type_ctx[_ast_to_offset(node.args[1])] = arg
elif self._is_six(node.func, ("b",)):
self.six_b.add(_ast_to_offset(node))
elif self._is_six(node.func, SIX_CALLS) and not _starargs(node):
self.six_calls[_ast_to_offset(node)] = node
elif (
isinstance(node.func, ast.Name)
and node.func.id == "next"
and not _starargs(node)
and len(node.args) == 1
and isinstance(node.args[0], ast.Call)
and self._is_six(
node.args[0].func,
("iteritems", "iterkeys", "itervalues"),
)
and not _starargs(node.args[0])
):
self.six_iter[_ast_to_offset(node.args[0])] = node.args[0]
elif (
isinstance(self._previous_node, ast.Expr)
and self._is_six(node.func, SIX_RAISES)
and not _starargs(node)
):
self.six_raises[_ast_to_offset(node)] = node
elif (
not self._in_comp
and self._class_info_stack
and self._class_info_stack[-1].def_depth == 1
and isinstance(node.func, ast.Name)
and node.func.id == "super"
and len(node.args) == 2
and isinstance(node.args[0], ast.Name)
and isinstance(node.args[1], ast.Name)
and node.args[0].id == self._class_info_stack[-1].name
and node.args[1].id == self._class_info_stack[-1].first_arg_name
):
self.super_calls[_ast_to_offset(node)] = node
elif (
isinstance(node.func, ast.Name)
and node.func.id == "str"
and not node.keywords
and not _starargs(node)
and (
len(node.args) == 0
or (len(node.args) == 1 and isinstance(node.args[0], ast.Str))
)
):
self.native_literals.add(_ast_to_offset(node))
elif (
isinstance(node.func, ast.Attribute)
and isinstance(node.func.value, ast.Str)
and node.func.attr == "encode"
and not _starargs(node)
and len(node.args) == 1
and isinstance(node.args[0], ast.Str)
and _is_codec(node.args[0].s, "utf-8")
):
self.encode_calls[_ast_to_offset(node)] = node
elif self._is_io_open(node.func):
self.io_open_calls[_ast_to_offset(node)] = node
self.generic_visit(node)
|
def visit_Call(self, node): # type: (ast.Call) -> None
if (
isinstance(node.func, ast.Name)
and node.func.id in {"isinstance", "issubclass"}
and len(node.args) == 2
and self._is_six(node.args[1], SIX_TYPE_CTX_ATTRS)
):
arg = node.args[1]
# _is_six() enforces this
assert isinstance(arg, (ast.Name, ast.Attribute))
self.six_type_ctx[_ast_to_offset(node.args[1])] = arg
elif self._is_six(node.func, ("b",)):
self.six_b.add(_ast_to_offset(node))
elif self._is_six(node.func, SIX_CALLS) and not _starargs(node):
self.six_calls[_ast_to_offset(node)] = node
elif (
isinstance(node.func, ast.Name)
and node.func.id == "next"
and not _starargs(node)
and isinstance(node.args[0], ast.Call)
and self._is_six(
node.args[0].func,
("iteritems", "iterkeys", "itervalues"),
)
and not _starargs(node.args[0])
):
self.six_iter[_ast_to_offset(node.args[0])] = node.args[0]
elif (
isinstance(self._previous_node, ast.Expr)
and self._is_six(node.func, SIX_RAISES)
and not _starargs(node)
):
self.six_raises[_ast_to_offset(node)] = node
elif (
not self._in_comp
and self._class_info_stack
and self._class_info_stack[-1].def_depth == 1
and isinstance(node.func, ast.Name)
and node.func.id == "super"
and len(node.args) == 2
and isinstance(node.args[0], ast.Name)
and isinstance(node.args[1], ast.Name)
and node.args[0].id == self._class_info_stack[-1].name
and node.args[1].id == self._class_info_stack[-1].first_arg_name
):
self.super_calls[_ast_to_offset(node)] = node
elif (
isinstance(node.func, ast.Name)
and node.func.id == "str"
and not node.keywords
and not _starargs(node)
and (
len(node.args) == 0
or (len(node.args) == 1 and isinstance(node.args[0], ast.Str))
)
):
self.native_literals.add(_ast_to_offset(node))
elif (
isinstance(node.func, ast.Attribute)
and isinstance(node.func.value, ast.Str)
and node.func.attr == "encode"
and not _starargs(node)
and len(node.args) == 1
and isinstance(node.args[0], ast.Str)
and _is_codec(node.args[0].s, "utf-8")
):
self.encode_calls[_ast_to_offset(node)] = node
elif self._is_io_open(node.func):
self.io_open_calls[_ast_to_offset(node)] = node
self.generic_visit(node)
|
https://github.com/asottile/pyupgrade/issues/215
|
$ pyupgrade --py3-plus more_itertools/recipes.py
Traceback (most recent call last):
File "/home/jdufresne/.venv/more-itertools/bin/pyupgrade", line 10, in <module>
sys.exit(main())
File "/home/jdufresne/.venv/more-itertools/lib64/python3.7/site-packages/pyupgrade.py", line 2237, in main
ret |= _fix_file(filename, args)
File "/home/jdufresne/.venv/more-itertools/lib64/python3.7/site-packages/pyupgrade.py", line 2199, in _fix_file
contents_text = _fix_py3_plus(contents_text)
File "/home/jdufresne/.venv/more-itertools/lib64/python3.7/site-packages/pyupgrade.py", line 1801, in _fix_py3_plus
visitor.visit(ast_obj)
File "/usr/lib64/python3.7/ast.py", line 262, in visit
return visitor(node)
File "/home/jdufresne/.venv/more-itertools/lib64/python3.7/site-packages/pyupgrade.py", line 1536, in generic_visit
super(FindPy3Plus, self).generic_visit(node)
File "/usr/lib64/python3.7/ast.py", line 270, in generic_visit
self.visit(item)
File "/usr/lib64/python3.7/ast.py", line 262, in visit
return visitor(node)
File "/home/jdufresne/.venv/more-itertools/lib64/python3.7/site-packages/pyupgrade.py", line 1312, in _visit_sync_func
self._visit_func(node)
File "/home/jdufresne/.venv/more-itertools/lib64/python3.7/site-packages/pyupgrade.py", line 1307, in _visit_func
self.generic_visit(node)
File "/home/jdufresne/.venv/more-itertools/lib64/python3.7/site-packages/pyupgrade.py", line 1536, in generic_visit
super(FindPy3Plus, self).generic_visit(node)
File "/usr/lib64/python3.7/ast.py", line 270, in generic_visit
self.visit(item)
File "/usr/lib64/python3.7/ast.py", line 262, in visit
return visitor(node)
File "/home/jdufresne/.venv/more-itertools/lib64/python3.7/site-packages/pyupgrade.py", line 1536, in generic_visit
super(FindPy3Plus, self).generic_visit(node)
File "/usr/lib64/python3.7/ast.py", line 270, in generic_visit
self.visit(item)
File "/usr/lib64/python3.7/ast.py", line 262, in visit
return visitor(node)
File "/home/jdufresne/.venv/more-itertools/lib64/python3.7/site-packages/pyupgrade.py", line 1353, in visit_Try
self.generic_visit(node)
File "/home/jdufresne/.venv/more-itertools/lib64/python3.7/site-packages/pyupgrade.py", line 1536, in generic_visit
super(FindPy3Plus, self).generic_visit(node)
File "/usr/lib64/python3.7/ast.py", line 270, in generic_visit
self.visit(item)
File "/usr/lib64/python3.7/ast.py", line 262, in visit
return visitor(node)
File "/home/jdufresne/.venv/more-itertools/lib64/python3.7/site-packages/pyupgrade.py", line 1532, in visit_For
self.generic_visit(node)
File "/home/jdufresne/.venv/more-itertools/lib64/python3.7/site-packages/pyupgrade.py", line 1536, in generic_visit
super(FindPy3Plus, self).generic_visit(node)
File "/usr/lib64/python3.7/ast.py", line 270, in generic_visit
self.visit(item)
File "/usr/lib64/python3.7/ast.py", line 262, in visit
return visitor(node)
File "/home/jdufresne/.venv/more-itertools/lib64/python3.7/site-packages/pyupgrade.py", line 1536, in generic_visit
super(FindPy3Plus, self).generic_visit(node)
File "/usr/lib64/python3.7/ast.py", line 272, in generic_visit
self.visit(value)
File "/usr/lib64/python3.7/ast.py", line 262, in visit
return visitor(node)
File "/home/jdufresne/.venv/more-itertools/lib64/python3.7/site-packages/pyupgrade.py", line 1536, in generic_visit
super(FindPy3Plus, self).generic_visit(node)
File "/usr/lib64/python3.7/ast.py", line 272, in generic_visit
self.visit(value)
File "/usr/lib64/python3.7/ast.py", line 262, in visit
return visitor(node)
File "/home/jdufresne/.venv/more-itertools/lib64/python3.7/site-packages/pyupgrade.py", line 1393, in visit_Call
isinstance(node.args[0], ast.Call) and
IndexError: list index out of range
|
IndexError
|
def visit_ClassDef(self, node): # type: (ast.ClassDef) -> None
for decorator in node.decorator_list:
if self._is_six(decorator, ("python_2_unicode_compatible",)):
self.six_remove_decorators.add(_ast_to_offset(decorator))
for base in node.bases:
if isinstance(base, ast.Name) and base.id == "object":
self.bases_to_remove.add(_ast_to_offset(base))
elif self._is_six(base, ("Iterator",)):
self.bases_to_remove.add(_ast_to_offset(base))
if (
len(node.bases) == 1
and isinstance(node.bases[0], ast.Call)
and self._is_six(node.bases[0].func, ("with_metaclass",))
and not _starargs(node.bases[0])
):
self.six_with_metaclass.add(_ast_to_offset(node.bases[0]))
self._class_info_stack.append(FindPy3Plus.ClassInfo(node.name))
self.generic_visit(node)
self._class_info_stack.pop()
|
def visit_ClassDef(self, node): # type: (ast.ClassDef) -> None
for decorator in node.decorator_list:
if self._is_six(decorator, ("python_2_unicode_compatible",)):
self.six_remove_decorators.add(_ast_to_offset(decorator))
for base in node.bases:
if isinstance(base, ast.Name) and base.id == "object":
self.bases_to_remove.add(_ast_to_offset(base))
elif self._is_six(base, ("Iterator",)):
self.bases_to_remove.add(_ast_to_offset(base))
if (
len(node.bases) == 1
and isinstance(node.bases[0], ast.Call)
and self._is_six(node.bases[0].func, ("with_metaclass",))
):
self.six_with_metaclass.add(_ast_to_offset(node.bases[0]))
self._class_info_stack.append(FindPy3Plus.ClassInfo(node.name))
self.generic_visit(node)
self._class_info_stack.pop()
|
https://github.com/asottile/pyupgrade/issues/144
|
% pyupgrade --py3-only --keep-percent-format src/_pytest/fixtures.py
Traceback (most recent call last):
File "…/Vcs/pytest/.venv/bin/pyupgrade", line 11, in <module>
sys.exit(main())
File "…/Vcs/pytest/.venv/lib/python3.7/site-packages/pyupgrade.py", line 1428, in main
ret |= fix_file(filename, args)
File "…/Vcs/pytest/.venv/lib/python3.7/site-packages/pyupgrade.py", line 1402, in fix_file
contents_text = _fix_py3_plus(contents_text)
File "…/Vcs/pytest/.venv/lib/python3.7/site-packages/pyupgrade.py", line 1244, in _fix_py3_plus
_replace_call(tokens, i, end, func_args, template)
File "…/Vcs/pytest/.venv/lib/python3.7/site-packages/pyupgrade.py", line 1171, in _replace_call
src = tmpl.format(args=arg_strs, rest=rest)
IndexError: list index out of range
|
IndexError
|
def visit_Call(self, node): # type: (ast.Call) -> None
if (
isinstance(node.func, ast.Name)
and node.func.id in {"isinstance", "issubclass"}
and len(node.args) == 2
and self._is_six(node.args[1], SIX_TYPE_CTX_ATTRS)
):
self.six_type_ctx[_ast_to_offset(node.args[1])] = node.args[1]
elif self._is_six(node.func, ("b",)):
self.six_b.add(_ast_to_offset(node))
elif self._is_six(node.func, SIX_CALLS) and not _starargs(node):
self.six_calls[_ast_to_offset(node)] = node
elif (
isinstance(self._previous_node, ast.Expr)
and self._is_six(node.func, SIX_RAISES)
and not _starargs(node)
):
self.six_raises[_ast_to_offset(node)] = node
elif (
not self._in_comp
and self._class_info_stack
and self._class_info_stack[-1].def_depth == 1
and isinstance(node.func, ast.Name)
and node.func.id == "super"
and len(node.args) == 2
and all(isinstance(arg, ast.Name) for arg in node.args)
and node.args[0].id == self._class_info_stack[-1].name
and node.args[1].id == self._class_info_stack[-1].first_arg_name
):
self.super_calls[_ast_to_offset(node)] = node
elif (
isinstance(node.func, ast.Name)
and node.func.id == "str"
and len(node.args) == 1
and isinstance(node.args[0], ast.Str)
and not node.keywords
and not _starargs(node)
):
self.native_literals.add(_ast_to_offset(node))
self.generic_visit(node)
|
def visit_Call(self, node): # type: (ast.Call) -> None
if (
isinstance(node.func, ast.Name)
and node.func.id in {"isinstance", "issubclass"}
and len(node.args) == 2
and self._is_six(node.args[1], SIX_TYPE_CTX_ATTRS)
):
self.six_type_ctx[_ast_to_offset(node.args[1])] = node.args[1]
elif self._is_six(node.func, ("b",)):
self.six_b.add(_ast_to_offset(node))
elif self._is_six(node.func, SIX_CALLS):
self.six_calls[_ast_to_offset(node)] = node
elif isinstance(self._previous_node, ast.Expr) and self._is_six(
node.func, SIX_RAISES
):
self.six_raises[_ast_to_offset(node)] = node
elif (
not self._in_comp
and self._class_info_stack
and self._class_info_stack[-1].def_depth == 1
and isinstance(node.func, ast.Name)
and node.func.id == "super"
and len(node.args) == 2
and all(isinstance(arg, ast.Name) for arg in node.args)
and node.args[0].id == self._class_info_stack[-1].name
and node.args[1].id == self._class_info_stack[-1].first_arg_name
):
self.super_calls[_ast_to_offset(node)] = node
elif (
isinstance(node.func, ast.Name)
and node.func.id == "str"
and len(node.args) == 1
and isinstance(node.args[0], ast.Str)
and not node.keywords
and not _starargs(node)
):
self.native_literals.add(_ast_to_offset(node))
self.generic_visit(node)
|
https://github.com/asottile/pyupgrade/issues/144
|
% pyupgrade --py3-only --keep-percent-format src/_pytest/fixtures.py
Traceback (most recent call last):
File "…/Vcs/pytest/.venv/bin/pyupgrade", line 11, in <module>
sys.exit(main())
File "…/Vcs/pytest/.venv/lib/python3.7/site-packages/pyupgrade.py", line 1428, in main
ret |= fix_file(filename, args)
File "…/Vcs/pytest/.venv/lib/python3.7/site-packages/pyupgrade.py", line 1402, in fix_file
contents_text = _fix_py3_plus(contents_text)
File "…/Vcs/pytest/.venv/lib/python3.7/site-packages/pyupgrade.py", line 1244, in _fix_py3_plus
_replace_call(tokens, i, end, func_args, template)
File "…/Vcs/pytest/.venv/lib/python3.7/site-packages/pyupgrade.py", line 1171, in _replace_call
src = tmpl.format(args=arg_strs, rest=rest)
IndexError: list index out of range
|
IndexError
|
def _fix_format_literals(contents_text):
try:
tokens = src_to_tokens(contents_text)
except tokenize.TokenError:
return contents_text
to_replace = []
string_start = None
string_end = None
seen_dot = False
for i, token in enumerate(tokens):
if string_start is None and token.name == "STRING":
string_start = i
string_end = i + 1
elif string_start is not None and token.name == "STRING":
string_end = i + 1
elif string_start is not None and token.src == ".":
seen_dot = True
elif seen_dot and token.src == "format":
to_replace.append((string_start, string_end))
string_start, string_end, seen_dot = None, None, False
elif token.name not in NON_CODING_TOKENS:
string_start, string_end, seen_dot = None, None, False
for start, end in reversed(to_replace):
src = tokens_to_src(tokens[start:end])
new_src = _rewrite_string_literal(src)
tokens[start:end] = [Token("STRING", new_src)]
return tokens_to_src(tokens)
|
def _fix_format_literals(contents_text):
tokens = src_to_tokens(contents_text)
to_replace = []
string_start = None
string_end = None
seen_dot = False
for i, token in enumerate(tokens):
if string_start is None and token.name == "STRING":
string_start = i
string_end = i + 1
elif string_start is not None and token.name == "STRING":
string_end = i + 1
elif string_start is not None and token.src == ".":
seen_dot = True
elif seen_dot and token.src == "format":
to_replace.append((string_start, string_end))
string_start, string_end, seen_dot = None, None, False
elif token.name not in NON_CODING_TOKENS:
string_start, string_end, seen_dot = None, None, False
for start, end in reversed(to_replace):
src = tokens_to_src(tokens[start:end])
new_src = _rewrite_string_literal(src)
tokens[start:end] = [Token("STRING", new_src)]
return tokens_to_src(tokens)
|
https://github.com/asottile/pyupgrade/issues/145
|
filename1.py
filename2.py
Traceback (most recent call last):
File "/home/thomas/miniconda/envs/py37/bin/pyupgrade", line 10, in <module>
sys.exit(main())
File "/home/thomas/miniconda/envs/py37/lib/python3.7/site-packages/pyupgrade.py", line 1428, in main
ret |= fix_file(filename, args)
File "/home/thomas/miniconda/envs/py37/lib/python3.7/site-packages/pyupgrade.py", line 1397, in fix_file
contents_text = _fix_format_literals(contents_text)
File "/home/thomas/miniconda/envs/py37/lib/python3.7/site-packages/pyupgrade.py", line 107, in _fix_format_literals
tokens = src_to_tokens(contents_text)
File "/home/thomas/miniconda/envs/py37/lib/python3.7/site-packages/tokenize_rt.py", line 44, in src_to_tokens
) in tokenize.generate_tokens(tokenize_target.readline):
File "/home/thomas/miniconda/envs/py37/lib/python3.7/tokenize.py", line 579, in _tokenize
raise TokenError("EOF in multi-line statement", (lnum, 0))
tokenize.TokenError: ('EOF in multi-line statement', (350, 0))
|
tokenize.TokenError
|
def _fix_py2_compatible(contents_text):
try:
ast_obj = ast_parse(contents_text)
except SyntaxError:
return contents_text
visitor = Py2CompatibleVisitor()
visitor.visit(ast_obj)
if not any(
(
visitor.dicts,
visitor.sets,
visitor.set_empty_literals,
visitor.is_literal,
)
):
return contents_text
try:
tokens = src_to_tokens(contents_text)
except tokenize.TokenError: # pragma: no cover (bpo-2180)
return contents_text
for i, token in reversed_enumerate(tokens):
if token.offset in visitor.dicts:
_process_dict_comp(tokens, i, visitor.dicts[token.offset])
elif token.offset in visitor.set_empty_literals:
_process_set_empty_literal(tokens, i)
elif token.offset in visitor.sets:
_process_set_literal(tokens, i, visitor.sets[token.offset])
elif token.offset in visitor.is_literal:
_process_is_literal(tokens, i, visitor.is_literal[token.offset])
return tokens_to_src(tokens)
|
def _fix_py2_compatible(contents_text):
try:
ast_obj = ast_parse(contents_text)
except SyntaxError:
return contents_text
visitor = Py2CompatibleVisitor()
visitor.visit(ast_obj)
if not any(
(
visitor.dicts,
visitor.sets,
visitor.set_empty_literals,
visitor.is_literal,
)
):
return contents_text
tokens = src_to_tokens(contents_text)
for i, token in reversed_enumerate(tokens):
if token.offset in visitor.dicts:
_process_dict_comp(tokens, i, visitor.dicts[token.offset])
elif token.offset in visitor.set_empty_literals:
_process_set_empty_literal(tokens, i)
elif token.offset in visitor.sets:
_process_set_literal(tokens, i, visitor.sets[token.offset])
elif token.offset in visitor.is_literal:
_process_is_literal(tokens, i, visitor.is_literal[token.offset])
return tokens_to_src(tokens)
|
https://github.com/asottile/pyupgrade/issues/145
|
filename1.py
filename2.py
Traceback (most recent call last):
File "/home/thomas/miniconda/envs/py37/bin/pyupgrade", line 10, in <module>
sys.exit(main())
File "/home/thomas/miniconda/envs/py37/lib/python3.7/site-packages/pyupgrade.py", line 1428, in main
ret |= fix_file(filename, args)
File "/home/thomas/miniconda/envs/py37/lib/python3.7/site-packages/pyupgrade.py", line 1397, in fix_file
contents_text = _fix_format_literals(contents_text)
File "/home/thomas/miniconda/envs/py37/lib/python3.7/site-packages/pyupgrade.py", line 107, in _fix_format_literals
tokens = src_to_tokens(contents_text)
File "/home/thomas/miniconda/envs/py37/lib/python3.7/site-packages/tokenize_rt.py", line 44, in src_to_tokens
) in tokenize.generate_tokens(tokenize_target.readline):
File "/home/thomas/miniconda/envs/py37/lib/python3.7/tokenize.py", line 579, in _tokenize
raise TokenError("EOF in multi-line statement", (lnum, 0))
tokenize.TokenError: ('EOF in multi-line statement', (350, 0))
|
tokenize.TokenError
|
def _fix_percent_format(contents_text):
try:
ast_obj = ast_parse(contents_text)
except SyntaxError:
return contents_text
visitor = FindPercentFormats()
visitor.visit(ast_obj)
if not visitor.found:
return contents_text
try:
tokens = src_to_tokens(contents_text)
except tokenize.TokenError: # pragma: no cover (bpo-2180)
return contents_text
for i, token in reversed_enumerate(tokens):
node = visitor.found.get(token.offset)
if node is None:
continue
# no .format() equivalent for bytestrings in py3
# note that this code is only necessary when running in python2
if _is_bytestring(tokens[i].src): # pragma: no cover (py2-only)
continue
if isinstance(node.right, ast.Tuple):
_fix_percent_format_tuple(tokens, i, node)
elif isinstance(node.right, ast.Dict):
_fix_percent_format_dict(tokens, i, node)
return tokens_to_src(tokens)
|
def _fix_percent_format(contents_text):
try:
ast_obj = ast_parse(contents_text)
except SyntaxError:
return contents_text
visitor = FindPercentFormats()
visitor.visit(ast_obj)
if not visitor.found:
return contents_text
tokens = src_to_tokens(contents_text)
for i, token in reversed_enumerate(tokens):
node = visitor.found.get(token.offset)
if node is None:
continue
# no .format() equivalent for bytestrings in py3
# note that this code is only necessary when running in python2
if _is_bytestring(tokens[i].src): # pragma: no cover (py2-only)
continue
if isinstance(node.right, ast.Tuple):
_fix_percent_format_tuple(tokens, i, node)
elif isinstance(node.right, ast.Dict):
_fix_percent_format_dict(tokens, i, node)
return tokens_to_src(tokens)
|
https://github.com/asottile/pyupgrade/issues/145
|
filename1.py
filename2.py
Traceback (most recent call last):
File "/home/thomas/miniconda/envs/py37/bin/pyupgrade", line 10, in <module>
sys.exit(main())
File "/home/thomas/miniconda/envs/py37/lib/python3.7/site-packages/pyupgrade.py", line 1428, in main
ret |= fix_file(filename, args)
File "/home/thomas/miniconda/envs/py37/lib/python3.7/site-packages/pyupgrade.py", line 1397, in fix_file
contents_text = _fix_format_literals(contents_text)
File "/home/thomas/miniconda/envs/py37/lib/python3.7/site-packages/pyupgrade.py", line 107, in _fix_format_literals
tokens = src_to_tokens(contents_text)
File "/home/thomas/miniconda/envs/py37/lib/python3.7/site-packages/tokenize_rt.py", line 44, in src_to_tokens
) in tokenize.generate_tokens(tokenize_target.readline):
File "/home/thomas/miniconda/envs/py37/lib/python3.7/tokenize.py", line 579, in _tokenize
raise TokenError("EOF in multi-line statement", (lnum, 0))
tokenize.TokenError: ('EOF in multi-line statement', (350, 0))
|
tokenize.TokenError
|
def _fix_py3_plus(contents_text):
try:
ast_obj = ast_parse(contents_text)
except SyntaxError:
return contents_text
visitor = FindPy3Plus()
visitor.visit(ast_obj)
if not any(
(
visitor.bases_to_remove,
visitor.native_literals,
visitor.six_b,
visitor.six_calls,
visitor.six_raises,
visitor.six_remove_decorators,
visitor.six_simple,
visitor.six_type_ctx,
visitor.six_with_metaclass,
visitor.super_calls,
)
):
return contents_text
try:
tokens = src_to_tokens(contents_text)
except tokenize.TokenError: # pragma: no cover (bpo-2180)
return contents_text
def _replace(i, mapping, node):
new_token = Token("CODE", _get_tmpl(mapping, node))
if isinstance(node, ast.Name):
tokens[i] = new_token
else:
j = i
while tokens[j].src != node.attr:
j += 1
tokens[i : j + 1] = [new_token]
for i, token in reversed_enumerate(tokens):
if not token.src:
continue
elif token.offset in visitor.bases_to_remove:
_remove_base_class(tokens, i)
elif token.offset in visitor.six_type_ctx:
_replace(i, SIX_TYPE_CTX_ATTRS, visitor.six_type_ctx[token.offset])
elif token.offset in visitor.six_simple:
_replace(i, SIX_SIMPLE_ATTRS, visitor.six_simple[token.offset])
elif token.offset in visitor.six_remove_decorators:
if tokens[i - 1].src == "@":
end = i + 1
while tokens[end].name != "NEWLINE":
end += 1
del tokens[i - 1 : end + 1]
elif token.offset in visitor.six_b:
j = _find_open_paren(tokens, i)
if (
tokens[j + 1].name == "STRING"
and _is_ascii(tokens[j + 1].src)
and tokens[j + 2].src == ")"
):
func_args, end = _parse_call_args(tokens, j)
_replace_call(tokens, i, end, func_args, SIX_B_TMPL)
elif token.offset in visitor.six_calls:
j = _find_open_paren(tokens, i)
func_args, end = _parse_call_args(tokens, j)
node = visitor.six_calls[token.offset]
template = _get_tmpl(SIX_CALLS, node.func)
_replace_call(tokens, i, end, func_args, template)
elif token.offset in visitor.six_raises:
j = _find_open_paren(tokens, i)
func_args, end = _parse_call_args(tokens, j)
node = visitor.six_raises[token.offset]
template = _get_tmpl(SIX_RAISES, node.func)
_replace_call(tokens, i, end, func_args, template)
elif token.offset in visitor.six_with_metaclass:
j = _find_open_paren(tokens, i)
func_args, end = _parse_call_args(tokens, j)
if len(func_args) == 1:
tmpl = WITH_METACLASS_NO_BASES_TMPL
else:
tmpl = WITH_METACLASS_BASES_TMPL
_replace_call(tokens, i, end, func_args, tmpl)
elif token.offset in visitor.super_calls:
i = _find_open_paren(tokens, i)
call = visitor.super_calls[token.offset]
victims = _victims(tokens, i, call, gen=False)
del tokens[victims.starts[0] + 1 : victims.ends[-1]]
elif token.offset in visitor.native_literals:
j = _find_open_paren(tokens, i)
func_args, end = _parse_call_args(tokens, j)
if any(tok.name == "NL" for tok in tokens[i:end]):
continue
_replace_call(tokens, i, end, func_args, "{args[0]}")
return tokens_to_src(tokens)
|
def _fix_py3_plus(contents_text):
try:
ast_obj = ast_parse(contents_text)
except SyntaxError:
return contents_text
visitor = FindPy3Plus()
visitor.visit(ast_obj)
if not any(
(
visitor.bases_to_remove,
visitor.native_literals,
visitor.six_b,
visitor.six_calls,
visitor.six_raises,
visitor.six_remove_decorators,
visitor.six_simple,
visitor.six_type_ctx,
visitor.six_with_metaclass,
visitor.super_calls,
)
):
return contents_text
def _replace(i, mapping, node):
new_token = Token("CODE", _get_tmpl(mapping, node))
if isinstance(node, ast.Name):
tokens[i] = new_token
else:
j = i
while tokens[j].src != node.attr:
j += 1
tokens[i : j + 1] = [new_token]
tokens = src_to_tokens(contents_text)
for i, token in reversed_enumerate(tokens):
if not token.src:
continue
elif token.offset in visitor.bases_to_remove:
_remove_base_class(tokens, i)
elif token.offset in visitor.six_type_ctx:
_replace(i, SIX_TYPE_CTX_ATTRS, visitor.six_type_ctx[token.offset])
elif token.offset in visitor.six_simple:
_replace(i, SIX_SIMPLE_ATTRS, visitor.six_simple[token.offset])
elif token.offset in visitor.six_remove_decorators:
if tokens[i - 1].src == "@":
end = i + 1
while tokens[end].name != "NEWLINE":
end += 1
del tokens[i - 1 : end + 1]
elif token.offset in visitor.six_b:
j = _find_open_paren(tokens, i)
if (
tokens[j + 1].name == "STRING"
and _is_ascii(tokens[j + 1].src)
and tokens[j + 2].src == ")"
):
func_args, end = _parse_call_args(tokens, j)
_replace_call(tokens, i, end, func_args, SIX_B_TMPL)
elif token.offset in visitor.six_calls:
j = _find_open_paren(tokens, i)
func_args, end = _parse_call_args(tokens, j)
node = visitor.six_calls[token.offset]
template = _get_tmpl(SIX_CALLS, node.func)
_replace_call(tokens, i, end, func_args, template)
elif token.offset in visitor.six_raises:
j = _find_open_paren(tokens, i)
func_args, end = _parse_call_args(tokens, j)
node = visitor.six_raises[token.offset]
template = _get_tmpl(SIX_RAISES, node.func)
_replace_call(tokens, i, end, func_args, template)
elif token.offset in visitor.six_with_metaclass:
j = _find_open_paren(tokens, i)
func_args, end = _parse_call_args(tokens, j)
if len(func_args) == 1:
tmpl = WITH_METACLASS_NO_BASES_TMPL
else:
tmpl = WITH_METACLASS_BASES_TMPL
_replace_call(tokens, i, end, func_args, tmpl)
elif token.offset in visitor.super_calls:
i = _find_open_paren(tokens, i)
call = visitor.super_calls[token.offset]
victims = _victims(tokens, i, call, gen=False)
del tokens[victims.starts[0] + 1 : victims.ends[-1]]
elif token.offset in visitor.native_literals:
j = _find_open_paren(tokens, i)
func_args, end = _parse_call_args(tokens, j)
if any(tok.name == "NL" for tok in tokens[i:end]):
continue
_replace_call(tokens, i, end, func_args, "{args[0]}")
return tokens_to_src(tokens)
|
https://github.com/asottile/pyupgrade/issues/145
|
filename1.py
filename2.py
Traceback (most recent call last):
File "/home/thomas/miniconda/envs/py37/bin/pyupgrade", line 10, in <module>
sys.exit(main())
File "/home/thomas/miniconda/envs/py37/lib/python3.7/site-packages/pyupgrade.py", line 1428, in main
ret |= fix_file(filename, args)
File "/home/thomas/miniconda/envs/py37/lib/python3.7/site-packages/pyupgrade.py", line 1397, in fix_file
contents_text = _fix_format_literals(contents_text)
File "/home/thomas/miniconda/envs/py37/lib/python3.7/site-packages/pyupgrade.py", line 107, in _fix_format_literals
tokens = src_to_tokens(contents_text)
File "/home/thomas/miniconda/envs/py37/lib/python3.7/site-packages/tokenize_rt.py", line 44, in src_to_tokens
) in tokenize.generate_tokens(tokenize_target.readline):
File "/home/thomas/miniconda/envs/py37/lib/python3.7/tokenize.py", line 579, in _tokenize
raise TokenError("EOF in multi-line statement", (lnum, 0))
tokenize.TokenError: ('EOF in multi-line statement', (350, 0))
|
tokenize.TokenError
|
def _fix_fstrings(contents_text):
try:
ast_obj = ast_parse(contents_text)
except SyntaxError:
return contents_text
visitor = FindSimpleFormats()
visitor.visit(ast_obj)
if not visitor.found:
return contents_text
try:
tokens = src_to_tokens(contents_text)
except tokenize.TokenError: # pragma: no cover (bpo-2180)
return contents_text
for i, token in reversed_enumerate(tokens):
node = visitor.found.get(token.offset)
if node is None:
continue
if _is_bytestring(token.src): # pragma: no cover (py2-only)
continue
paren = i + 3
if tokens_to_src(tokens[i + 1 : paren + 1]) != ".format(":
continue
# we don't actually care about arg position, so we pass `node`
victims = _victims(tokens, paren, node, gen=False)
end = victims.ends[-1]
# if it spans more than one line, bail
if tokens[end].line != token.line:
continue
tokens[i] = token._replace(src=_to_fstring(token.src, node))
del tokens[i + 1 : end + 1]
return tokens_to_src(tokens)
|
def _fix_fstrings(contents_text):
try:
ast_obj = ast_parse(contents_text)
except SyntaxError:
return contents_text
visitor = FindSimpleFormats()
visitor.visit(ast_obj)
if not visitor.found:
return contents_text
tokens = src_to_tokens(contents_text)
for i, token in reversed_enumerate(tokens):
node = visitor.found.get(token.offset)
if node is None:
continue
if _is_bytestring(token.src): # pragma: no cover (py2-only)
continue
paren = i + 3
if tokens_to_src(tokens[i + 1 : paren + 1]) != ".format(":
continue
# we don't actually care about arg position, so we pass `node`
victims = _victims(tokens, paren, node, gen=False)
end = victims.ends[-1]
# if it spans more than one line, bail
if tokens[end].line != token.line:
continue
tokens[i] = token._replace(src=_to_fstring(token.src, node))
del tokens[i + 1 : end + 1]
return tokens_to_src(tokens)
|
https://github.com/asottile/pyupgrade/issues/145
|
filename1.py
filename2.py
Traceback (most recent call last):
File "/home/thomas/miniconda/envs/py37/bin/pyupgrade", line 10, in <module>
sys.exit(main())
File "/home/thomas/miniconda/envs/py37/lib/python3.7/site-packages/pyupgrade.py", line 1428, in main
ret |= fix_file(filename, args)
File "/home/thomas/miniconda/envs/py37/lib/python3.7/site-packages/pyupgrade.py", line 1397, in fix_file
contents_text = _fix_format_literals(contents_text)
File "/home/thomas/miniconda/envs/py37/lib/python3.7/site-packages/pyupgrade.py", line 107, in _fix_format_literals
tokens = src_to_tokens(contents_text)
File "/home/thomas/miniconda/envs/py37/lib/python3.7/site-packages/tokenize_rt.py", line 44, in src_to_tokens
) in tokenize.generate_tokens(tokenize_target.readline):
File "/home/thomas/miniconda/envs/py37/lib/python3.7/tokenize.py", line 579, in _tokenize
raise TokenError("EOF in multi-line statement", (lnum, 0))
tokenize.TokenError: ('EOF in multi-line statement', (350, 0))
|
tokenize.TokenError
|
def visit_Call(self, node): # type: (ast.Call) -> None
if (
isinstance(node.func, ast.Attribute)
and isinstance(node.func.value, ast.Str)
and node.func.attr == "format"
and all(_simple_arg(arg) for arg in node.args)
and all(_simple_arg(k.value) for k in node.keywords)
and not _starargs(node)
):
seen = set()
for _, name, spec, _ in parse_format(node.func.value.s):
# timid: difficult to rewrite correctly
if spec is not None and "{" in spec:
break
if name is not None:
candidate, _, _ = name.partition(".")
# timid: could make the f-string longer
if candidate and candidate in seen:
break
# timid: bracketed
elif "[" in candidate:
break
seen.add(candidate)
else:
self.found[_ast_to_offset(node)] = node
self.generic_visit(node)
|
def visit_Call(self, node): # type: (ast.Call) -> None
if (
isinstance(node.func, ast.Attribute)
and isinstance(node.func.value, ast.Str)
and node.func.attr == "format"
and all(_simple_arg(arg) for arg in node.args)
and all(_simple_arg(k.value) for k in node.keywords)
and not _starargs(node)
):
seen = set()
for _, name, spec, _ in parse_format(node.func.value.s):
# timid: difficult to rewrite correctly
if spec is not None and "{" in spec:
break
if name is not None:
candidate, _, _ = name.partition(".")
# timid: could make the f-string longer
if candidate and candidate in seen:
break
seen.add(candidate)
else:
self.found[_ast_to_offset(node)] = node
self.generic_visit(node)
|
https://github.com/asottile/pyupgrade/issues/134
|
Traceback (most recent call last):
File "/home/ethan/venv/bin/pyupgrade", line 10, in <module>
sys.exit(main())
File "/home/ethan/venv/lib/python3.7/site-packages/pyupgrade.py", line 1396, in main
ret |= fix_file(filename, args)
File "/home/ethan/venv/lib/python3.7/site-packages/pyupgrade.py", line 1372, in fix_file
contents_text = _fix_fstrings(contents_text)
File "/home/ethan/venv/lib/python3.7/site-packages/pyupgrade.py", line 1348, in _fix_fstrings
tokens[i] = token._replace(src=_to_fstring(token.src, node))
File "/home/ethan/venv/lib/python3.7/site-packages/pyupgrade.py", line 1310, in _to_fstring
name = ''.join((params[k or str(i)], dot, rest))
KeyError: '0[a]'
|
KeyError
|
def visit_Call(self, node): # type: (ast.Call) -> None
if (
isinstance(node.func, ast.Attribute)
and isinstance(node.func.value, ast.Str)
and node.func.attr == "format"
and all(_simple_arg(arg) for arg in node.args)
and all(_simple_arg(k.value) for k in node.keywords)
and not _starargs(node)
):
seen = set()
for _, name, spec, _ in parse_format(node.func.value.s):
# timid: difficult to rewrite correctly
if spec is not None and "{" in spec:
break
if name is not None:
candidate, _, _ = name.partition(".")
# timid: could make the f-string longer
if candidate and candidate in seen:
break
# timid: bracketed
elif "[" in name:
break
seen.add(candidate)
else:
self.found[_ast_to_offset(node)] = node
self.generic_visit(node)
|
def visit_Call(self, node): # type: (ast.Call) -> None
if (
isinstance(node.func, ast.Attribute)
and isinstance(node.func.value, ast.Str)
and node.func.attr == "format"
and all(_simple_arg(arg) for arg in node.args)
and all(_simple_arg(k.value) for k in node.keywords)
and not _starargs(node)
):
seen = set()
for _, name, spec, _ in parse_format(node.func.value.s):
# timid: difficult to rewrite correctly
if spec is not None and "{" in spec:
break
if name is not None:
candidate, _, _ = name.partition(".")
# timid: could make the f-string longer
if candidate and candidate in seen:
break
# timid: bracketed
elif "[" in candidate:
break
seen.add(candidate)
else:
self.found[_ast_to_offset(node)] = node
self.generic_visit(node)
|
https://github.com/asottile/pyupgrade/issues/134
|
Traceback (most recent call last):
File "/home/ethan/venv/bin/pyupgrade", line 10, in <module>
sys.exit(main())
File "/home/ethan/venv/lib/python3.7/site-packages/pyupgrade.py", line 1396, in main
ret |= fix_file(filename, args)
File "/home/ethan/venv/lib/python3.7/site-packages/pyupgrade.py", line 1372, in fix_file
contents_text = _fix_fstrings(contents_text)
File "/home/ethan/venv/lib/python3.7/site-packages/pyupgrade.py", line 1348, in _fix_fstrings
tokens[i] = token._replace(src=_to_fstring(token.src, node))
File "/home/ethan/venv/lib/python3.7/site-packages/pyupgrade.py", line 1310, in _to_fstring
name = ''.join((params[k or str(i)], dot, rest))
KeyError: '0[a]'
|
KeyError
|
def _to_fstring(src, call):
params = {}
for i, arg in enumerate(call.args):
params[str(i)] = _unparse(arg)
for kwd in call.keywords:
params[kwd.arg] = _unparse(kwd.value)
parts = []
i = 0
for s, name, spec, conv in parse_format("f" + src):
if name is not None:
k, dot, rest = name.partition(".")
name = "".join((params[k or str(i)], dot, rest))
i += 1
parts.append((s, name, spec, conv))
return unparse_parsed_string(parts)
|
def _to_fstring(src, call):
params = {}
for i, arg in enumerate(call.args):
params[str(i)] = _unparse(arg)
for kwd in call.keywords:
params[kwd.arg] = _unparse(kwd.value)
parts = []
for i, (s, name, spec, conv) in enumerate(parse_format("f" + src)):
if name is not None:
k, dot, rest = name.partition(".")
name = "".join((params[k or str(i)], dot, rest))
parts.append((s, name, spec, conv))
return unparse_parsed_string(parts)
|
https://github.com/asottile/pyupgrade/issues/52
|
Traceback (most recent call last):
File "/usr/bin/pyupgrade", line 11, in <module>
sys.exit(main())
File "/usr/lib/python3.7/site-packages/pyupgrade.py", line 907, in main
ret |= fix_file(filename, args)
File "/usr/lib/python3.7/site-packages/pyupgrade.py", line 884, in fix_file
contents_text = _fix_fstrings(contents_text)
File "/usr/lib/python3.7/site-packages/pyupgrade.py", line 858, in _fix_fstrings
tokens[i] = token._replace(src=_to_fstring(token.src, node))
File "/usr/lib/python3.7/site-packages/pyupgrade.py", line 824, in _to_fstring
name = ''.join((params[k or str(i)], dot, rest))
KeyError: '3'
|
KeyError
|
async def setup(self, *, creator=None, category=None, initial_message=None):
"""Create the thread channel and other io related initialisation tasks"""
self.bot.dispatch("thread_initiate", self)
recipient = self.recipient
# in case it creates a channel outside of category
overwrites = {
self.bot.modmail_guild.default_role: discord.PermissionOverwrite(
read_messages=False
)
}
category = category or self.bot.main_category
if category is not None:
overwrites = None
try:
channel = await self.bot.modmail_guild.create_text_channel(
name=format_channel_name(recipient, self.bot.modmail_guild),
category=category,
overwrites=overwrites,
reason="Creating a thread channel.",
)
except discord.HTTPException as e:
# try again but null-discrim (name could be banned)
try:
channel = await self.bot.modmail_guild.create_text_channel(
name=format_channel_name(
recipient, self.bot.modmail_guild, force_null=True
),
category=category,
overwrites=overwrites,
reason="Creating a thread channel.",
)
except discord.HTTPException as e: # Failed to create due to missing perms.
logger.critical("An error occurred while creating a thread.", exc_info=True)
self.manager.cache.pop(self.id)
embed = discord.Embed(color=self.bot.error_color)
embed.title = "Error while trying to create a thread."
embed.description = str(e)
embed.add_field(name="Recipient", value=recipient.mention)
if self.bot.log_channel is not None:
await self.bot.log_channel.send(embed=embed)
return
self._channel = channel
try:
log_url, log_data = await asyncio.gather(
self.bot.api.create_log_entry(recipient, channel, creator or recipient),
self.bot.api.get_user_logs(recipient.id),
)
log_count = sum(1 for log in log_data if not log["open"])
except Exception:
logger.error(
"An error occurred while posting logs to the database.", exc_info=True
)
log_url = log_count = None
# ensure core functionality still works
await channel.edit(topic=f"User ID: {recipient.id}")
self.ready = True
if creator is not None and creator != recipient:
mention = None
else:
mention = self.bot.config["mention"]
async def send_genesis_message():
info_embed = self._format_info_embed(
recipient, log_url, log_count, self.bot.main_color
)
try:
msg = await channel.send(mention, embed=info_embed)
self.bot.loop.create_task(msg.pin())
self.genesis_message = msg
except Exception:
logger.error("Failed unexpectedly:", exc_info=True)
async def send_recipient_genesis_message():
# Once thread is ready, tell the recipient.
thread_creation_response = self.bot.config["thread_creation_response"]
embed = discord.Embed(
color=self.bot.mod_color,
description=thread_creation_response,
timestamp=channel.created_at,
)
recipient_thread_close = self.bot.config.get("recipient_thread_close")
if recipient_thread_close:
footer = self.bot.config["thread_self_closable_creation_footer"]
else:
footer = self.bot.config["thread_creation_footer"]
embed.set_footer(text=footer, icon_url=self.bot.guild.icon_url)
embed.title = self.bot.config["thread_creation_title"]
if creator is None or creator == recipient:
msg = await recipient.send(embed=embed)
if recipient_thread_close:
close_emoji = self.bot.config["close_emoji"]
close_emoji = await self.bot.convert_emoji(close_emoji)
await self.bot.add_reaction(msg, close_emoji)
async def send_persistent_notes():
notes = await self.bot.api.find_notes(self.recipient)
ids = {}
class State:
def store_user(self, user):
return user
for note in notes:
author = note["author"]
class Author:
name = author["name"]
id = author["id"]
discriminator = author["discriminator"]
avatar_url = author["avatar_url"]
data = {
"id": round(time.time() * 1000 - discord.utils.DISCORD_EPOCH) << 22,
"attachments": {},
"embeds": {},
"edited_timestamp": None,
"type": None,
"pinned": None,
"mention_everyone": None,
"tts": None,
"content": note["message"],
"author": Author(),
}
message = discord.Message(state=State(), channel=None, data=data)
ids[note["_id"]] = str(
(await self.note(message, persistent=True, thread_creation=True)).id
)
await self.bot.api.update_note_ids(ids)
async def activate_auto_triggers():
message = DummyMessage(copy.copy(initial_message))
if message:
try:
return await self.bot.trigger_auto_triggers(message, channel)
except RuntimeError:
pass
await asyncio.gather(
send_genesis_message(),
send_recipient_genesis_message(),
activate_auto_triggers(),
send_persistent_notes(),
)
self.bot.dispatch("thread_ready", self)
|
async def setup(self, *, creator=None, category=None, initial_message=None):
"""Create the thread channel and other io related initialisation tasks"""
self.bot.dispatch("thread_initiate", self)
recipient = self.recipient
# in case it creates a channel outside of category
overwrites = {
self.bot.modmail_guild.default_role: discord.PermissionOverwrite(
read_messages=False
)
}
category = category or self.bot.main_category
if category is not None:
overwrites = None
try:
channel = await self.bot.modmail_guild.create_text_channel(
name=format_channel_name(recipient, self.bot.modmail_guild),
category=category,
overwrites=overwrites,
reason="Creating a thread channel.",
)
except discord.HTTPException as e: # Failed to create due to missing perms.
logger.critical("An error occurred while creating a thread.", exc_info=True)
self.manager.cache.pop(self.id)
embed = discord.Embed(color=self.bot.error_color)
embed.title = "Error while trying to create a thread."
embed.description = str(e)
embed.add_field(name="Recipient", value=recipient.mention)
if self.bot.log_channel is not None:
await self.bot.log_channel.send(embed=embed)
return
self._channel = channel
try:
log_url, log_data = await asyncio.gather(
self.bot.api.create_log_entry(recipient, channel, creator or recipient),
self.bot.api.get_user_logs(recipient.id),
)
log_count = sum(1 for log in log_data if not log["open"])
except Exception:
logger.error(
"An error occurred while posting logs to the database.", exc_info=True
)
log_url = log_count = None
# ensure core functionality still works
await channel.edit(topic=f"User ID: {recipient.id}")
self.ready = True
if creator is not None and creator != recipient:
mention = None
else:
mention = self.bot.config["mention"]
async def send_genesis_message():
info_embed = self._format_info_embed(
recipient, log_url, log_count, self.bot.main_color
)
try:
msg = await channel.send(mention, embed=info_embed)
self.bot.loop.create_task(msg.pin())
self.genesis_message = msg
except Exception:
logger.error("Failed unexpectedly:", exc_info=True)
async def send_recipient_genesis_message():
# Once thread is ready, tell the recipient.
thread_creation_response = self.bot.config["thread_creation_response"]
embed = discord.Embed(
color=self.bot.mod_color,
description=thread_creation_response,
timestamp=channel.created_at,
)
recipient_thread_close = self.bot.config.get("recipient_thread_close")
if recipient_thread_close:
footer = self.bot.config["thread_self_closable_creation_footer"]
else:
footer = self.bot.config["thread_creation_footer"]
embed.set_footer(text=footer, icon_url=self.bot.guild.icon_url)
embed.title = self.bot.config["thread_creation_title"]
if creator is None or creator == recipient:
msg = await recipient.send(embed=embed)
if recipient_thread_close:
close_emoji = self.bot.config["close_emoji"]
close_emoji = await self.bot.convert_emoji(close_emoji)
await self.bot.add_reaction(msg, close_emoji)
async def send_persistent_notes():
notes = await self.bot.api.find_notes(self.recipient)
ids = {}
class State:
def store_user(self, user):
return user
for note in notes:
author = note["author"]
class Author:
name = author["name"]
id = author["id"]
discriminator = author["discriminator"]
avatar_url = author["avatar_url"]
data = {
"id": round(time.time() * 1000 - discord.utils.DISCORD_EPOCH) << 22,
"attachments": {},
"embeds": {},
"edited_timestamp": None,
"type": None,
"pinned": None,
"mention_everyone": None,
"tts": None,
"content": note["message"],
"author": Author(),
}
message = discord.Message(state=State(), channel=None, data=data)
ids[note["_id"]] = str(
(await self.note(message, persistent=True, thread_creation=True)).id
)
await self.bot.api.update_note_ids(ids)
async def activate_auto_triggers():
message = DummyMessage(copy.copy(initial_message))
if message:
try:
return await self.bot.trigger_auto_triggers(message, channel)
except RuntimeError:
pass
await asyncio.gather(
send_genesis_message(),
send_recipient_genesis_message(),
activate_auto_triggers(),
send_persistent_notes(),
)
self.bot.dispatch("thread_ready", self)
|
https://github.com/kyb3r/modmail/issues/2934
|
discord.errors.HTTPException: 400 Bad Request (error code: 50035): Invalid Form Body
In name: Contains words not allowed for servers in Server Discovery.
2021-01-11 08:45:24 __main__[762] - ERROR: Failed to send message:
Traceback (most recent call last):
File "/app/bot.py", line 760, in process_dm_modmail
await thread.send(message)
File "/app/core/thread.py", line 713, in send
self.bot.loop.create_task(self.bot.api.append_log(message, channel_id=self.channel.id))
AttributeError: 'NoneType' object has no attribute 'id'
2021-01-11 08:45:30 core.thread[102] - CRITICAL: An error occurred while creating a thread.
Traceback (most recent call last):
File "/app/core/thread.py", line 95, in setup
channel = await self.bot.modmail_guild.create_text_channel(
File "/app/.heroku/python/lib/python3.9/site-packages/discord/guild.py", line 905, in create_text_channel
data = await self._create_channel(name, overwrites, ChannelType.text, category, reason=reason, **options)
File "/app/.heroku/python/lib/python3.9/site-packages/discord/http.py", line 245, in request
raise HTTPException(r, data)
|
AttributeError
|
def format_channel_name(author, guild, exclude_channel=None, force_null=False):
"""Sanitises a username for use with text channel names"""
name = author.name.lower()
if force_null:
name = "null"
name = new_name = (
"".join(l for l in name if l not in string.punctuation and l.isprintable())
or "null"
) + f"-{author.discriminator}"
counter = 1
existed = set(c.name for c in guild.text_channels if c != exclude_channel)
while new_name in existed:
new_name = f"{name}_{counter}" # multiple channels with same name
counter += 1
return new_name
|
def format_channel_name(author, guild, exclude_channel=None):
"""Sanitises a username for use with text channel names"""
name = author.name.lower()
name = new_name = (
"".join(l for l in name if l not in string.punctuation and l.isprintable())
or "null"
) + f"-{author.discriminator}"
counter = 1
existed = set(c.name for c in guild.text_channels if c != exclude_channel)
while new_name in existed:
new_name = f"{name}_{counter}" # multiple channels with same name
counter += 1
return new_name
|
https://github.com/kyb3r/modmail/issues/2934
|
discord.errors.HTTPException: 400 Bad Request (error code: 50035): Invalid Form Body
In name: Contains words not allowed for servers in Server Discovery.
2021-01-11 08:45:24 __main__[762] - ERROR: Failed to send message:
Traceback (most recent call last):
File "/app/bot.py", line 760, in process_dm_modmail
await thread.send(message)
File "/app/core/thread.py", line 713, in send
self.bot.loop.create_task(self.bot.api.append_log(message, channel_id=self.channel.id))
AttributeError: 'NoneType' object has no attribute 'id'
2021-01-11 08:45:30 core.thread[102] - CRITICAL: An error occurred while creating a thread.
Traceback (most recent call last):
File "/app/core/thread.py", line 95, in setup
channel = await self.bot.modmail_guild.create_text_channel(
File "/app/.heroku/python/lib/python3.9/site-packages/discord/guild.py", line 905, in create_text_channel
data = await self._create_channel(name, overwrites, ChannelType.text, category, reason=reason, **options)
File "/app/.heroku/python/lib/python3.9/site-packages/discord/http.py", line 245, in request
raise HTTPException(r, data)
|
AttributeError
|
async def find_linked_message_from_dm(self, message, either_direction=False):
if either_direction and message.embeds and message.embeds[0].author.url:
compare_url = message.embeds[0].author.url
compare_id = compare_url.split("#")[-1]
else:
compare_url = None
compare_id = None
if self.channel is not None:
async for linked_message in self.channel.history():
if not linked_message.embeds:
continue
url = linked_message.embeds[0].author.url
if not url:
continue
if url == compare_url:
return linked_message
msg_id = url.split("#")[-1]
if not msg_id.isdigit():
continue
msg_id = int(msg_id)
if int(msg_id) == message.id:
return linked_message
if compare_id is not None and compare_id.isdigit():
if int(msg_id) == int(compare_id):
return linked_message
raise ValueError("Thread channel message not found.")
|
async def find_linked_message_from_dm(self, message, either_direction=False):
if either_direction and message.embeds:
compare_url = message.embeds[0].author.url
compare_id = compare_url.split("#")[-1]
else:
compare_url = None
compare_id = None
if self.channel is not None:
async for linked_message in self.channel.history():
if not linked_message.embeds:
continue
url = linked_message.embeds[0].author.url
if not url:
continue
if url == compare_url:
return linked_message
msg_id = url.split("#")[-1]
if not msg_id.isdigit():
continue
msg_id = int(msg_id)
if int(msg_id) == message.id:
return linked_message
if compare_id is not None and compare_id.isdigit():
if int(msg_id) == int(compare_id):
return linked_message
raise ValueError("Thread channel message not found.")
|
https://github.com/kyb3r/modmail/issues/2931
|
2021-01-09T15:38:32.286633+00:00 app[worker.1]: 01/09/21 15:38:32 __main__[1402] - ERROR: Ignoring exception in on_raw_reaction_add.
2021-01-09T15:38:32.288758+00:00 app[worker.1]: 01/09/21 15:38:32 __main__[1403] - ERROR: Unexpected exception:
2021-01-09T15:38:32.288760+00:00 app[worker.1]: Traceback (most recent call last):
2021-01-09T15:38:32.288761+00:00 app[worker.1]: File "/app/.heroku/python/lib/python3.9/site-packages/discord/client.py", line 343, in _run_event
2021-01-09T15:38:32.288762+00:00 app[worker.1]: await coro(*args, **kwargs)
2021-01-09T15:38:32.288762+00:00 app[worker.1]: File "/app/bot.py", line 1223, in on_raw_reaction_add
2021-01-09T15:38:32.288763+00:00 app[worker.1]: await self.handle_reaction_events(payload)
2021-01-09T15:38:32.288763+00:00 app[worker.1]: File "/app/bot.py", line 1193, in handle_reaction_events
2021-01-09T15:38:32.288764+00:00 app[worker.1]: linked_message = await thread.find_linked_message_from_dm(
2021-01-09T15:38:32.288764+00:00 app[worker.1]: File "/app/core/thread.py", line 663, in find_linked_message_from_dm
2021-01-09T15:38:32.288765+00:00 app[worker.1]: compare_id = compare_url.split("#")[-1]
2021-01-09T15:38:32.288765+00:00 app[worker.1]: AttributeError: '_EmptyEmbed' object has no attribute 'split'
|
AttributeError
|
async def on_raw_reaction_add(self, payload):
if self.config["transfer_reactions"]:
await self.handle_reaction_events(payload)
|
async def on_raw_reaction_add(self, payload):
await self.handle_reaction_events(payload)
|
https://github.com/kyb3r/modmail/issues/2783
|
Traceback (most recent call last):
File "/app/.heroku/python/lib/python3.7/site-packages/discord/client.py", line 270, in _run_event
await coro(*args, **kwargs)
File "bot.py", line 868, in on_message
await self.process_commands(message)
File "bot.py", line 875, in process_commands
return await self.process_dm_modmail(message)
File "bot.py", line 744, in process_dm_modmail
thread = await self.threads.create(message.author)
File "/app/core/thread.py", line 988, in create
if category is None and len(cat.channels) == 50:
AttributeError: 'NoneType' object has no attribute 'channels'
2020-04-16 03:23:27 core.thread[918] - WARNING: Found existing thread for 301194255731392513 but the channel is invalid.
2020-04-16 03:23:30 core.thread[977] - WARNING: Found an existing thread for ElijahPepe#4897, closing previous thread.
2020-04-16 03:23:30 __main__[1126] - ERROR: Ignoring exception in on_message.
2020-04-16 03:23:30 __main__[1127] - ERROR: Unexpected exception:
Traceback (most recent call last):
File "/app/.heroku/python/lib/python3.7/site-packages/discord/client.py", line 270, in _run_event
await coro(*args, **kwargs)
File "bot.py", line 868, in on_message
await self.process_commands(message)
File "bot.py", line 875, in process_commands
return await self.process_dm_modmail(message)
File "bot.py", line 744, in process_dm_modmail
thread = await self.threads.create(message.author)
File "/app/core/thread.py", line 988, in create
if category is None and len(cat.channels) == 50:
AttributeError: 'NoneType' object has no attribute 'channels'
|
AttributeError
|
async def on_raw_reaction_remove(self, payload):
if self.config["transfer_reactions"]:
await self.handle_reaction_events(payload)
|
async def on_raw_reaction_remove(self, payload):
await self.handle_reaction_events(payload)
|
https://github.com/kyb3r/modmail/issues/2783
|
Traceback (most recent call last):
File "/app/.heroku/python/lib/python3.7/site-packages/discord/client.py", line 270, in _run_event
await coro(*args, **kwargs)
File "bot.py", line 868, in on_message
await self.process_commands(message)
File "bot.py", line 875, in process_commands
return await self.process_dm_modmail(message)
File "bot.py", line 744, in process_dm_modmail
thread = await self.threads.create(message.author)
File "/app/core/thread.py", line 988, in create
if category is None and len(cat.channels) == 50:
AttributeError: 'NoneType' object has no attribute 'channels'
2020-04-16 03:23:27 core.thread[918] - WARNING: Found existing thread for 301194255731392513 but the channel is invalid.
2020-04-16 03:23:30 core.thread[977] - WARNING: Found an existing thread for ElijahPepe#4897, closing previous thread.
2020-04-16 03:23:30 __main__[1126] - ERROR: Ignoring exception in on_message.
2020-04-16 03:23:30 __main__[1127] - ERROR: Unexpected exception:
Traceback (most recent call last):
File "/app/.heroku/python/lib/python3.7/site-packages/discord/client.py", line 270, in _run_event
await coro(*args, **kwargs)
File "bot.py", line 868, in on_message
await self.process_commands(message)
File "bot.py", line 875, in process_commands
return await self.process_dm_modmail(message)
File "bot.py", line 744, in process_dm_modmail
thread = await self.threads.create(message.author)
File "/app/core/thread.py", line 988, in create
if category is None and len(cat.channels) == 50:
AttributeError: 'NoneType' object has no attribute 'channels'
|
AttributeError
|
def _extract_sorting(self, limit):
# Permissions entries are not stored with timestamp, so do not
# force it.
result = super()._extract_sorting(limit)
without_last_modified = [s for s in result if s.field != self.model.modified_field]
# For pagination, there must be at least one sort criteria.
# We use ``uri`` because its values are unique.
if "uri" not in [s.field for s in without_last_modified]:
without_last_modified.append(Sort("uri", -1))
return without_last_modified
|
def _extract_sorting(self, limit):
# Permissions entries are not stored with timestamp, so do not
# force it.
result = super()._extract_sorting(limit)
without_last_modified = [s for s in result if s.field != self.model.modified_field]
return without_last_modified
|
https://github.com/Kinto/kinto/issues/1157
|
ERROR:root:list index out of range
Traceback (most recent call last):
File "/home/niko/work/kinto-http.js/.venv/lib/python3.5/site-packages/pyramid/tweens.py", line 22, in excview_tween
response = handler(request)
File "/home/niko/work/kinto-http.js/.venv/lib/python3.5/site-packages/pyramid_tm/__init__.py", line 119, in tm_tween
reraise(*exc_info)
File "/home/niko/work/kinto-http.js/.venv/lib/python3.5/site-packages/pyramid_tm/compat.py", line 15, in reraise
raise value
File "/home/niko/work/kinto-http.js/.venv/lib/python3.5/site-packages/pyramid_tm/__init__.py", line 98, in tm_tween
response = handler(request)
File "/home/niko/work/kinto-http.js/.venv/lib/python3.5/site-packages/pyramid/router.py", line 155, in handle_request
view_name
File "/home/niko/work/kinto-http.js/.venv/lib/python3.5/site-packages/pyramid/view.py", line 612, in _call_view
response = view_callable(context, request)
File "/home/niko/work/kinto-http.js/.venv/lib/python3.5/site-packages/pyramid/config/views.py", line 181, in __call__
return view(context, request)
File "/home/niko/work/kinto-http.js/.venv/lib/python3.5/site-packages/pyramid/viewderivers.py", line 389, in attr_view
return view(context, request)
File "/home/niko/work/kinto-http.js/.venv/lib/python3.5/site-packages/pyramid/viewderivers.py", line 367, in predicate_wrapper
return view(context, request)
File "/home/niko/work/kinto-http.js/.venv/lib/python3.5/site-packages/pyramid/viewderivers.py", line 300, in secured_view
return view(context, request)
File "/home/niko/work/kinto-http.js/.venv/lib/python3.5/site-packages/pyramid/viewderivers.py", line 438, in rendered_view
result = view(context, request)
File "/home/niko/work/kinto-http.js/.venv/lib/python3.5/site-packages/pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "/home/niko/work/kinto-http.js/.venv/lib/python3.5/site-packages/cornice/service.py", line 491, in wrapper
response = view_()
File "/home/niko/work/kinto-http.js/.venv/lib/python3.5/site-packages/kinto/core/resource/__init__.py", line 279, in collection_get
limit, sorting)
File "/home/niko/work/kinto-http.js/.venv/lib/python3.5/site-packages/kinto/core/resource/__init__.py", line 1055, in _extract_pagination_rules_from_token
filters = self._build_pagination_rules(sorting, last_record)
File "/home/niko/work/kinto-http.js/.venv/lib/python3.5/site-packages/kinto/core/resource/__init__.py", line 1021, in _build_pagination_rules
field, direction = sorting[-1]
IndexError: list index out of range
|
IndexError
|
def includeme(config):
config.add_api_capability(
"accounts",
description="Manage user accounts.",
url="https://kinto.readthedocs.io/en/latest/api/1.x/accounts.html",
)
config.scan("kinto.plugins.accounts.views")
PERMISSIONS_INHERITANCE_TREE[""].update({"account:create": {}})
PERMISSIONS_INHERITANCE_TREE["account"] = {
"write": {"account": ["write"]},
"read": {"account": ["write", "read"]},
}
# Add some safety to avoid weird behaviour with basicauth default policy.
settings = config.get_settings()
auth_policies = settings["multiauth.policies"]
if "basicauth" in auth_policies and "account" in auth_policies:
if auth_policies.index("basicauth") < auth_policies.index("account"):
error_msg = (
"'basicauth' should not be mentioned before 'account' "
"in 'multiauth.policies' setting."
)
raise ConfigurationError(error_msg)
|
def includeme(config):
config.add_api_capability(
"accounts",
description="Manage user accounts.",
url="https://kinto.readthedocs.io/en/latest/api/1.x/accounts.html",
)
config.scan("kinto.plugins.accounts.views")
PERMISSIONS_INHERITANCE_TREE[""].update({"account:create": {}})
PERMISSIONS_INHERITANCE_TREE["account"] = {
"write": {"account": ["write"]},
"read": {"account": ["write", "read"]},
}
|
https://github.com/Kinto/kinto/issues/1177
|
Traceback (most recent call last):
File "/Users/gsurita/kinto/kinto/.venv/lib/python3.6/site-packages/pyramid/tweens.py", line 22, in excview_tween
response = handler(request)
File "/Users/gsurita/kinto/kinto/.venv/lib/python3.6/site-packages/pyramid_tm/__init__.py", line 119, in tm_tween
reraise(*exc_info)
File "/Users/gsurita/kinto/kinto/.venv/lib/python3.6/site-packages/pyramid_tm/compat.py", line 15, in reraise
raise value
File "/Users/gsurita/kinto/kinto/.venv/lib/python3.6/site-packages/pyramid_tm/__init__.py", line 98, in tm_tween
response = handler(request)
File "/Users/gsurita/kinto/kinto/.venv/lib/python3.6/site-packages/pyramid/router.py", line 155, in handle_request
view_name
File "/Users/gsurita/kinto/kinto/.venv/lib/python3.6/site-packages/pyramid/view.py", line 612, in _call_view
response = view_callable(context, request)
File "/Users/gsurita/kinto/kinto/.venv/lib/python3.6/site-packages/pyramid/config/views.py", line 181, in __call__
return view(context, request)
File "/Users/gsurita/kinto/kinto/.venv/lib/python3.6/site-packages/pyramid/viewderivers.py", line 389, in attr_view
return view(context, request)
File "/Users/gsurita/kinto/kinto/.venv/lib/python3.6/site-packages/pyramid/viewderivers.py", line 367, in predicate_wrapper
return view(context, request)
File "/Users/gsurita/kinto/kinto/.venv/lib/python3.6/site-packages/pyramid/viewderivers.py", line 300, in secured_view
return view(context, request)
File "/Users/gsurita/kinto/kinto/.venv/lib/python3.6/site-packages/pyramid/viewderivers.py", line 438, in rendered_view
result = view(context, request)
File "/Users/gsurita/kinto/kinto/.venv/lib/python3.6/site-packages/pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "/Users/gsurita/kinto/kinto/.venv/lib/python3.6/site-packages/cornice/service.py", line 491, in wrapper
response = view_()
File "/Users/gsurita/kinto/kinto/kinto/plugins/accounts/views.py", line 81, in collection_post
result = super(Account, self).collection_post()
File "/Users/gsurita/kinto/kinto/kinto/core/resource/__init__.py", line 341, in collection_post
new_record = self.process_record(new_record)
File "/Users/gsurita/kinto/kinto/kinto/plugins/accounts/views.py", line 102, in process_record
if new[self.model.id_field] != self.request.selected_userid:
KeyError: 'id'
|
KeyError
|
def account_check(username, password, request):
parent_id = username
try:
existing = request.registry.storage.get(
parent_id=parent_id, collection_id="account", object_id=username
)
except storage_exceptions.RecordNotFoundError:
return None
hashed = existing["password"].encode(encoding="utf-8")
pwd_str = password.encode(encoding="utf-8")
if hashed == bcrypt.hashpw(pwd_str, hashed):
return True # Match! Return anything but None.
|
def account_check(username, password, request):
parent_id = username
try:
existing = request.registry.storage.get(
parent_id=parent_id, collection_id="account", object_id=username
)
except storage_exceptions.RecordNotFoundError:
return None
hashed = existing["password"]
pwd_str = password.encode(encoding="utf-8")
if hashed == bcrypt.hashpw(pwd_str, hashed):
return True # Match! Return anything but None.
|
https://github.com/Kinto/kinto/issues/1224
|
Unicode-objects must be encoded before hashing
Traceback (most recent call last):
File "/usr/local/lib/python3.5/dist-packages/pyramid/tweens.py", line 22, in excview_tween
response = handler(request)
File "/usr/local/lib/python3.5/dist-packages/pyramid_tm/__init__.py", line 119, in tm_tween
reraise(*exc_info)
File "/usr/local/lib/python3.5/dist-packages/pyramid_tm/compat.py", line 15, in reraise
raise value
File "/usr/local/lib/python3.5/dist-packages/pyramid_tm/__init__.py", line 98, in tm_tween
response = handler(request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 155, in handle_request
view_name
File "/usr/local/lib/python3.5/dist-packages/pyramid/view.py", line 612, in _call_view
response = view_callable(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/config/views.py", line 181, in __call__
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 389, in attr_view
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 367, in predicate_wrapper
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 438, in rendered_view
result = view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "/usr/local/lib/python3.5/dist-packages/cornice/service.py", line 493, in wrapper
response = view_(request)
File "/code/kinto/core/views/hello.py", line 45, in get_hello
if Authenticated in request.effective_principals:
File "/usr/local/lib/python3.5/dist-packages/pyramid/security.py", line 375, in effective_principals
return policy.effective_principals(self)
File "/usr/local/lib/python3.5/dist-packages/pyramid_multiauth/__init__.py", line 119, in effective_principals
userid = policy.authenticated_userid(request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/authentication.py", line 92, in authenticated_userid
callback_ok = self.callback(userid, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/authentication.py", line 1123, in callback
return self.check(username, password, request)
File "/code/kinto/plugins/accounts/authentication.py", line 18, in account_check
if hashed == bcrypt.hashpw(pwd_str, hashed):
File "/usr/local/lib/python3.5/dist-packages/bcrypt/__init__.py", line 62, in hashpw
raise TypeError("Unicode-objects must be encoded before hashing")
|
TypeError
|
def process_record(self, new, old=None):
new = super(Account, self).process_record(new, old)
# Store password safely in database as str
# (bcrypt.hashpw returns base64 bytes).
pwd_str = new["password"].encode(encoding="utf-8")
hashed = bcrypt.hashpw(pwd_str, bcrypt.gensalt())
new["password"] = hashed.decode(encoding="utf-8")
# Administrators can reach other accounts and anonymous have no
# selected_userid. So do not try to enforce.
if self.context.is_administrator or self.context.is_anonymous:
return new
# Otherwise, we force the id to match the authenticated username.
if new[self.model.id_field] != self.request.selected_userid:
error_details = {
"name": "data.id",
"description": "Username and account ID do not match.",
}
raise_invalid(self.request, **error_details)
return new
|
def process_record(self, new, old=None):
new = super(Account, self).process_record(new, old)
# Store password safely in database.
pwd_str = new["password"].encode(encoding="utf-8")
new["password"] = bcrypt.hashpw(pwd_str, bcrypt.gensalt())
# Administrators can reach other accounts and anonymous have no
# selected_userid. So do not try to enforce.
if self.context.is_administrator or self.context.is_anonymous:
return new
# Otherwise, we force the id to match the authenticated username.
if new[self.model.id_field] != self.request.selected_userid:
error_details = {
"name": "data.id",
"description": "Username and account ID do not match.",
}
raise_invalid(self.request, **error_details)
return new
|
https://github.com/Kinto/kinto/issues/1224
|
Unicode-objects must be encoded before hashing
Traceback (most recent call last):
File "/usr/local/lib/python3.5/dist-packages/pyramid/tweens.py", line 22, in excview_tween
response = handler(request)
File "/usr/local/lib/python3.5/dist-packages/pyramid_tm/__init__.py", line 119, in tm_tween
reraise(*exc_info)
File "/usr/local/lib/python3.5/dist-packages/pyramid_tm/compat.py", line 15, in reraise
raise value
File "/usr/local/lib/python3.5/dist-packages/pyramid_tm/__init__.py", line 98, in tm_tween
response = handler(request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 155, in handle_request
view_name
File "/usr/local/lib/python3.5/dist-packages/pyramid/view.py", line 612, in _call_view
response = view_callable(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/config/views.py", line 181, in __call__
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 389, in attr_view
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 367, in predicate_wrapper
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 438, in rendered_view
result = view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "/usr/local/lib/python3.5/dist-packages/cornice/service.py", line 493, in wrapper
response = view_(request)
File "/code/kinto/core/views/hello.py", line 45, in get_hello
if Authenticated in request.effective_principals:
File "/usr/local/lib/python3.5/dist-packages/pyramid/security.py", line 375, in effective_principals
return policy.effective_principals(self)
File "/usr/local/lib/python3.5/dist-packages/pyramid_multiauth/__init__.py", line 119, in effective_principals
userid = policy.authenticated_userid(request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/authentication.py", line 92, in authenticated_userid
callback_ok = self.callback(userid, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/authentication.py", line 1123, in callback
return self.check(username, password, request)
File "/code/kinto/plugins/accounts/authentication.py", line 18, in account_check
if hashed == bcrypt.hashpw(pwd_str, hashed):
File "/usr/local/lib/python3.5/dist-packages/bcrypt/__init__.py", line 62, in hashpw
raise TypeError("Unicode-objects must be encoded before hashing")
|
TypeError
|
def deserialize(self, cstruct=colander.null):
"""Preprocess received data to carefully merge defaults."""
if cstruct is not colander.null:
defaults = cstruct.get("defaults")
requests = cstruct.get("requests")
if isinstance(defaults, dict) and isinstance(requests, list):
for request in requests:
if isinstance(request, dict):
merge_dicts(request, defaults)
return super(BatchPayloadSchema, self).deserialize(cstruct)
|
def deserialize(self, cstruct=colander.null):
"""Preprocess received data to carefully merge defaults."""
defaults = cstruct.get("defaults")
requests = cstruct.get("requests")
if isinstance(defaults, dict) and isinstance(requests, list):
for request in requests:
if isinstance(request, dict):
merge_dicts(request, defaults)
return super(BatchPayloadSchema, self).deserialize(cstruct)
|
https://github.com/Kinto/kinto/issues/1024
|
2017-01-18 16:45:58,968 ERROR [kinto.core.views.errors][waitress] "POST /v1/batch" ? (? ms) '_null' object has no attribute 'get' agent=HTTPie/0.9.2 authn_type=None errno=None exception=Traceback (most recent call last):
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/pyramid/tweens.py", line 22, in excview_tween
response = handler(request)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/pyramid_tm/__init__.py", line 119, in tm_tween
reraise(*exc_info)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/pyramid_tm/__init__.py", line 98, in tm_tween
response = handler(request)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/pyramid/router.py", line 158, in handle_request
view_name
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/pyramid/view.py", line 547, in _call_view
response = view_callable(context, request)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/pyramid/config/views.py", line 182,
, in __call__
return view(context, request)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/pyramid/viewderivers.py", line 393, in attr_view
return view(context, request)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/pyramid/viewderivers.py", line 371, in predicate_wrapper
return view(context, request)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/pyramid/viewderivers.py", line 442, in rendered_view
result = view(context, request)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/cornice/service.py", line 484, in wrapper
validator(request, **args)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/cornice/validators/_colander.py", line 73, in validator
deserialized = schema.deserialize(cstruct)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/colander/__init__.py", line 2058, in deserialize
appstruct = self.typ.deserialize(self, cstruct)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/colander/__init__.py", line 719, in deserialize
return self._impl(node, cstruct, callback)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/colander/__init__.py", line 678, in _impl
sub_result = callback(subnode, subval)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/colander/__init__.py", line 717, in callback
return subnode.deserialize(subcstruct)
File "/home/mathieu/Code/Mozilla/kinto/kinto/core/views/batch.py", line 57, in deserialize
defaults = cstruct.get('defaults')
AttributeError: '_null' object has no attribute 'get' lang=None uid=None
2017-01-18 16:45:58,969 INFO [kinto.core.initialization][waitress] "POST /v1/batch" 500 (5 ms) request.summary agent=HTTPie/0.9.2 authn_type=None errno=999 lang=None time=2017-01-18T16:45:58 uid=None
|
AttributeError
|
def _raise_304_if_not_modified(self, record=None):
"""Raise 304 if current timestamp is inferior to the one specified
in headers.
:raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPNotModified`
"""
if_none_match = self.request.headers.get("If-None-Match")
if not if_none_match:
return
error_details = {
"location": "header",
"description": "Invalid value for If-None-Match",
}
try:
if_none_match = decode_header(if_none_match)
except UnicodeDecodeError:
raise_invalid(self.request, **error_details)
try:
if not (if_none_match[0] == if_none_match[-1] == '"'):
raise ValueError()
modified_since = int(if_none_match[1:-1])
except (IndexError, ValueError):
if if_none_match == "*":
return
raise_invalid(self.request, **error_details)
if record:
current_timestamp = record[self.model.modified_field]
else:
current_timestamp = self.model.timestamp()
if current_timestamp <= modified_since:
response = HTTPNotModified()
self._add_timestamp_header(response, timestamp=current_timestamp)
raise response
|
def _raise_304_if_not_modified(self, record=None):
"""Raise 304 if current timestamp is inferior to the one specified
in headers.
:raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPNotModified`
"""
if_none_match = self.request.headers.get("If-None-Match")
if not if_none_match:
return
if_none_match = decode_header(if_none_match)
try:
if not (if_none_match[0] == if_none_match[-1] == '"'):
raise ValueError()
modified_since = int(if_none_match[1:-1])
except (IndexError, ValueError):
if if_none_match == "*":
return
error_details = {
"location": "header",
"description": "Invalid value for If-None-Match",
}
raise_invalid(self.request, **error_details)
if record:
current_timestamp = record[self.model.modified_field]
else:
current_timestamp = self.model.timestamp()
if current_timestamp <= modified_since:
response = HTTPNotModified()
self._add_timestamp_header(response, timestamp=current_timestamp)
raise response
|
https://github.com/Kinto/kinto/issues/983
|
Traceback (most recent call last):
File \"/data/kinto-dist/lib/python2.7/site-packages/pyramid/tweens.py\", line 22, in excview_tween
response = handler(request)
File \"/data/kinto-dist/lib/python2.7/site-packages/pyramid_tm/__init__.py\", line 119, in tm_tween
reraise(*exc_info)
File \"/data/kinto-dist/lib/python2.7/site-packages/pyramid_tm/__init__.py\", line 98, in tm_tween
response = handler(request)
File \"/data/kinto-dist/lib/python2.7/site-packages/pyramid/router.py\", line 158, in handle_request
view_name
File \"/data/kinto-dist/lib/python2.7/site-packages/pyramid/view.py\", line 547, in _call_view
response = view_callable(context, request)
File \"/data/kinto-dist/lib/python2.7/site-packages/pyramid/config/views.py\", line 182, in __call__
return view(context, request)
File \"/data/kinto-dist/lib/python2.7/site-packages/pyramid/viewderivers.py\", line 393, in attr_view
return view(context, request)
File \"/data/kinto-dist/lib/python2.7/site-packages/pyramid/viewderivers.py\", line 371, in predicate_wrapper
return view(context, request)
File \"/data/kinto-dist/lib/python2.7/site-packages/pyramid/viewderivers.py\", line 302, in _secured_view
return view(context, request)
File \"/data/kinto-dist/lib/python2.7/site-packages/pyramid/viewderivers.py\", line 442, in rendered_view
result = view(context, request)
File \"/data/kinto-dist/lib/python2.7/site-packages/pyramid/viewderivers.py\", line 147, in _requestonly_view
response = view(request)
File \"/data/kinto-dist/lib/python2.7/site-packages/cornice/service.py\", line 571, in wrapper
response = view_()
File \"/data/kinto-dist/lib/python2.7/site-packages/kinto/views/records.py\", line 78, in collection_get
result = super(Record, self).collection_get()
File \"/data/kinto-dist/lib/python2.7/site-packages/kinto/core/resource/__init__.py\", line 249, in collection_get
self._raise_304_if_not_modified()
File \"/data/kinto-dist/lib/python2.7/site-packages/kinto/core/resource/__init__.py\", line 744, in _raise_304_if_not_modified
if_none_match = decode_header(if_none_match)
File \"/data/kinto-dist/lib/python2.7/site-packages/kinto/core/utils.py\", line 394, in decode_header
value = value.decode(encoding)
File \"/data/kinto-dist/lib64/python2.7/encodings/utf_8.py\", line 16, in decode
return codecs.utf_8_decode(input, errors, True)
UnicodeDecodeError: \'utf8\' codec can\'t decode byte 0xea in position 15: invalid continuation
|
UnicodeDecodeError
|
def _raise_412_if_modified(self, record=None):
"""Raise 412 if current timestamp is superior to the one
specified in headers.
:raises:
:exc:`~pyramid:pyramid.httpexceptions.HTTPPreconditionFailed`
"""
if_match = self.request.headers.get("If-Match")
if_none_match = self.request.headers.get("If-None-Match")
if not if_match and not if_none_match:
return
error_details = {
"location": "header",
"description": (
"Invalid value for If-Match. The value should "
"be integer between double quotes."
),
}
try:
if_match = decode_header(if_match) if if_match else None
if_none_match = decode_header(if_none_match) if if_none_match else None
except UnicodeDecodeError:
raise_invalid(self.request, **error_details)
if record and if_none_match == "*":
if record.get(self.model.deleted_field, False):
# Tombstones should not prevent creation.
return
modified_since = -1 # Always raise.
elif if_match:
try:
if not (if_match[0] == if_match[-1] == '"'):
raise ValueError()
modified_since = int(if_match[1:-1])
except (IndexError, ValueError):
raise_invalid(self.request, **error_details)
else:
# In case _raise_304_if_not_modified() did not raise.
return
if record:
current_timestamp = record[self.model.modified_field]
else:
current_timestamp = self.model.timestamp()
if current_timestamp > modified_since:
error_msg = "Resource was modified meanwhile"
details = {"existing": record} if record else {}
response = http_error(
HTTPPreconditionFailed(),
errno=ERRORS.MODIFIED_MEANWHILE,
message=error_msg,
details=details,
)
self._add_timestamp_header(response, timestamp=current_timestamp)
raise response
|
def _raise_412_if_modified(self, record=None):
"""Raise 412 if current timestamp is superior to the one
specified in headers.
:raises:
:exc:`~pyramid:pyramid.httpexceptions.HTTPPreconditionFailed`
"""
if_match = self.request.headers.get("If-Match")
if_none_match = self.request.headers.get("If-None-Match")
if not if_match and not if_none_match:
return
if_match = decode_header(if_match) if if_match else None
if record and if_none_match and decode_header(if_none_match) == "*":
if record.get(self.model.deleted_field, False):
# Tombstones should not prevent creation.
return
modified_since = -1 # Always raise.
elif if_match:
try:
if not (if_match[0] == if_match[-1] == '"'):
raise ValueError()
modified_since = int(if_match[1:-1])
except (IndexError, ValueError):
message = (
"Invalid value for If-Match. The value should "
"be integer between double quotes."
)
error_details = {"location": "header", "description": message}
raise_invalid(self.request, **error_details)
else:
# In case _raise_304_if_not_modified() did not raise.
return
if record:
current_timestamp = record[self.model.modified_field]
else:
current_timestamp = self.model.timestamp()
if current_timestamp > modified_since:
error_msg = "Resource was modified meanwhile"
details = {"existing": record} if record else {}
response = http_error(
HTTPPreconditionFailed(),
errno=ERRORS.MODIFIED_MEANWHILE,
message=error_msg,
details=details,
)
self._add_timestamp_header(response, timestamp=current_timestamp)
raise response
|
https://github.com/Kinto/kinto/issues/983
|
Traceback (most recent call last):
File \"/data/kinto-dist/lib/python2.7/site-packages/pyramid/tweens.py\", line 22, in excview_tween
response = handler(request)
File \"/data/kinto-dist/lib/python2.7/site-packages/pyramid_tm/__init__.py\", line 119, in tm_tween
reraise(*exc_info)
File \"/data/kinto-dist/lib/python2.7/site-packages/pyramid_tm/__init__.py\", line 98, in tm_tween
response = handler(request)
File \"/data/kinto-dist/lib/python2.7/site-packages/pyramid/router.py\", line 158, in handle_request
view_name
File \"/data/kinto-dist/lib/python2.7/site-packages/pyramid/view.py\", line 547, in _call_view
response = view_callable(context, request)
File \"/data/kinto-dist/lib/python2.7/site-packages/pyramid/config/views.py\", line 182, in __call__
return view(context, request)
File \"/data/kinto-dist/lib/python2.7/site-packages/pyramid/viewderivers.py\", line 393, in attr_view
return view(context, request)
File \"/data/kinto-dist/lib/python2.7/site-packages/pyramid/viewderivers.py\", line 371, in predicate_wrapper
return view(context, request)
File \"/data/kinto-dist/lib/python2.7/site-packages/pyramid/viewderivers.py\", line 302, in _secured_view
return view(context, request)
File \"/data/kinto-dist/lib/python2.7/site-packages/pyramid/viewderivers.py\", line 442, in rendered_view
result = view(context, request)
File \"/data/kinto-dist/lib/python2.7/site-packages/pyramid/viewderivers.py\", line 147, in _requestonly_view
response = view(request)
File \"/data/kinto-dist/lib/python2.7/site-packages/cornice/service.py\", line 571, in wrapper
response = view_()
File \"/data/kinto-dist/lib/python2.7/site-packages/kinto/views/records.py\", line 78, in collection_get
result = super(Record, self).collection_get()
File \"/data/kinto-dist/lib/python2.7/site-packages/kinto/core/resource/__init__.py\", line 249, in collection_get
self._raise_304_if_not_modified()
File \"/data/kinto-dist/lib/python2.7/site-packages/kinto/core/resource/__init__.py\", line 744, in _raise_304_if_not_modified
if_none_match = decode_header(if_none_match)
File \"/data/kinto-dist/lib/python2.7/site-packages/kinto/core/utils.py\", line 394, in decode_header
value = value.decode(encoding)
File \"/data/kinto-dist/lib64/python2.7/encodings/utf_8.py\", line 16, in decode
return codecs.utf_8_decode(input, errors, True)
UnicodeDecodeError: \'utf8\' codec can\'t decode byte 0xea in position 15: invalid continuation
|
UnicodeDecodeError
|
def _find_required_permission(self, request, service):
"""Find out what is the permission object id and the required
permission.
.. note::
This method saves an attribute ``self.current_record`` used
in :class:`kinto.core.resource.UserResource`.
"""
# By default, it's a URI a and permission associated to the method.
permission_object_id = self.get_permission_object_id(request)
method = request.method.lower()
required_permission = self.method_permissions.get(method)
# For create permission, the object id is the plural endpoint.
collection_path = six.text_type(service.collection_path)
collection_path = collection_path.format(**request.matchdict)
# In the case of a "PUT", check if the targetted record already
# exists, return "write" if it does, "create" otherwise.
if request.method.lower() == "put":
resource = service.resource(request=request, context=self)
try:
record = resource.model.get_record(resource.record_id)
# Save a reference, to avoid refetching from storage in
# resource.
self.current_record = record
except storage_exceptions.RecordNotFoundError:
# The record does not exist, the permission to create on
# the related collection is required.
permission_object_id = collection_path
required_permission = "create"
else:
# For safe creations, the user needs a create permission.
# See Kinto/kinto#792
if request.headers.get("If-None-Match") == "*":
permission_object_id = collection_path
required_permission = "create"
else:
required_permission = "write"
return (permission_object_id, required_permission)
|
def _find_required_permission(self, request, service):
"""Find out what is the permission object id and the required
permission.
.. note::
This method saves an attribute ``self.current_record`` used
in :class:`kinto.core.resource.UserResource`.
"""
# By default, it's a URI a and permission associated to the method.
permission_object_id = self.get_permission_object_id(request)
method = request.method.lower()
required_permission = self.method_permissions.get(method)
# For create permission, the object id is the plural endpoint.
collection_path = service.collection_path.format(**request.matchdict)
# In the case of a "PUT", check if the targetted record already
# exists, return "write" if it does, "create" otherwise.
if request.method.lower() == "put":
resource = service.resource(request=request, context=self)
try:
record = resource.model.get_record(resource.record_id)
# Save a reference, to avoid refetching from storage in
# resource.
self.current_record = record
except storage_exceptions.RecordNotFoundError:
# The record does not exist, the permission to create on
# the related collection is required.
permission_object_id = collection_path
required_permission = "create"
else:
# For safe creations, the user needs a create permission.
# See Kinto/kinto#792
if request.headers.get("If-None-Match") == "*":
permission_object_id = collection_path
required_permission = "create"
else:
required_permission = "write"
return (permission_object_id, required_permission)
|
https://github.com/Kinto/kinto/issues/931
|
{"Pid":3109,"EnvVersion":"2.0","Hostname":"ip-172-31-2-114","Timestamp":1479738668906000000,"Fields":{"lang":null,"exception":"Traceback (most recent call last):\n File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid\/tweens.py\", line 22, in excview_tween\n response = handler(request)\n File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid_tm\/__init__.py\", line 109, in tm_tween\n reraise(*exc_info)\n File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid_tm\/__init__.py\", line 88, in tm_tween\n response = handler(request)\n File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid\/router.py\", line 127, in handle_request\n root = root_factory(request)\n File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/kinto\/core\/authorization.py\", line 144, in __init__\n self._find_required_permission(request, service))\n File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/kinto\/core\/authorization.py\", line 237, in _find_required_permission\n collection_path = service.collection_path.format(**request.matchdict)\nUnicodeEncodeError: 'ascii' codec can't encode characters in position 5-6: ordinal not in range(128)","uid":null,"errno":null,"querystring":"{}","agent":"Amazon CloudFront","method":"GET","path":"\/v1\/buckets\/block%C2%93%C2%96sts\/collections\/certificates","authn_type":null},"Logger":"kinto","Type":["ascii","block\u0093\u0096sts",5,7,"ordinal not in range(128)"],"Severity":2}
|
nUnicodeEncodeError
|
def get_object_permissions(self, object_id, permissions=None):
return self.get_objects_permissions([object_id], permissions)[0]
|
def get_object_permissions(self, object_id, permissions=None):
perms = self.get_objects_permissions([object_id], permissions)
return perms[0] if perms else {}
|
https://github.com/Kinto/kinto/issues/842
|
http --check-status --form POST http://localhost:8888/v1/buckets/source/collections/source/records/80ec9929-6896-4022-8443-3da4f5353f47/attachment attachment@kinto-logo.png --auth user:pass
"POST /v1/buckets/source/collections/source/records/80ec9929-6896-4022-8443-3da4f5353f47/attachment" ? (? ms) u'/buckets/source/collections/source' agent=HTTPie/0.9.6 authn_type=basicauth collection_id=record collection_timestamp=1475232331014 errno=110 exception=Traceback (most recent call last):
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/pyramid/tweens.py", line 22, in excview_tween
response = handler(request)
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/pyramid_tm/__init__.py", line 109, in tm_tween
reraise(*exc_info)
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/pyramid_tm/__init__.py", line 95, in tm_tween
manager.commit()
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/transaction/_manager.py", line 123, in commit
return self.get().commit()
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/transaction/_transaction.py", line 265, in commit
self._callBeforeCommitHooks()
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/transaction/_transaction.py", line 336, in _callBeforeCommitHooks
hook(*args, **kws)
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/kinto/core/events.py", line 76, in _notify_resource_events_before
request.registry.notify(event)
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/pyramid/registry.py", line 91, in notify
[ _ for _ in self.subscribers(events, None) ]
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/zope/interface/registry.py", line 442, in subscribers
return self.adapters.subscribers(objects, provided)
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/zope/interface/adapter.py", line 596, in subscribers
subscription(*objects)
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/pyramid/config/adapters.py", line 130, in subscriber_wrapper
return derived_subscriber(*arg)
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/pyramid/config/adapters.py", line 103, in derived_subscriber
return subscriber(arg[0])
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/kinto/plugins/history/listener.py", line 65, in on_resource_changed
collection_perms = perms_by_object_id[collection_uri]
KeyError: u'/buckets/source/collections/source' lang=None uid=cbd3731f18c97ebe1d31d9846b5f1b95cf8eeeae586e201277263434041e99d1
"POST /v1/buckets/source/collections/source/records/80ec9929-6896-4022-8443-3da4f5353f47/attachment" 500 (23 ms) request.summary agent=HTTPie/0.9.6 authn_type=basicauth collection_id=record collection_timestamp=1475232331014 errno=999 lang=None time=2016-09-30T10:45:31 uid=cbd3731f18c97ebe1d31d9846b5f1b95cf8eeeae586e201277263434041e99d1
HTTP/1.1 500 Internal Server Error
Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff
Content-Length: 183
Content-Type: application/json; charset=UTF-8
Date: Fri, 30 Sep 2016 10:45:31 GMT
Server: waitress
{
"code": 500,
"errno": 999,
"error": "Internal Server Error",
"info": "https://github.com/Kinto/kinto/issues/",
"message": "A programmatic error occured, developers have been informed."
}
|
KeyError
|
def get_objects_permissions(self, objects_ids, permissions=None):
query = """
WITH required_object_ids AS (
VALUES %(objects_ids)s
)
SELECT object_id, permission, principal
FROM required_object_ids JOIN access_control_entries
ON (object_id = column2)
%(permissions_condition)s
ORDER BY column1 ASC;
"""
obj_ids_values = ",".join(["(%s, '%s')" % t for t in enumerate(objects_ids)])
safeholders = {"objects_ids": obj_ids_values, "permissions_condition": ""}
placeholders = {}
if permissions is not None:
safeholders["permissions_condition"] = """
WHERE permission IN :permissions"""
placeholders["permissions"] = tuple(permissions)
with self.client.connect(readonly=True) as conn:
result = conn.execute(query % safeholders, placeholders)
rows = result.fetchall()
groupby_id = OrderedDict()
for object_id in objects_ids:
groupby_id[object_id] = {}
for row in rows:
object_id, permission, principal = (
row["object_id"],
row["permission"],
row["principal"],
)
groupby_id[object_id].setdefault(permission, set()).add(principal)
return list(groupby_id.values())
|
def get_objects_permissions(self, objects_ids, permissions=None):
query = """
WITH required_object_ids AS (
VALUES %(objects_ids)s
)
SELECT object_id, permission, principal
FROM required_object_ids JOIN access_control_entries
ON (object_id = column2)
%(permissions_condition)s
ORDER BY column1 ASC;
"""
obj_ids_values = ",".join(["(%s, '%s')" % t for t in enumerate(objects_ids)])
safeholders = {"objects_ids": obj_ids_values, "permissions_condition": ""}
placeholders = {}
if permissions is not None:
safeholders["permissions_condition"] = """
WHERE permission IN :permissions"""
placeholders["permissions"] = tuple(permissions)
with self.client.connect(readonly=True) as conn:
result = conn.execute(query % safeholders, placeholders)
rows = result.fetchall()
groupby_id = OrderedDict()
for row in rows:
object_id, permission, principal = (
row["object_id"],
row["permission"],
row["principal"],
)
permissions = groupby_id.setdefault(object_id, {})
permissions.setdefault(permission, set()).add(principal)
return list(groupby_id.values())
|
https://github.com/Kinto/kinto/issues/842
|
http --check-status --form POST http://localhost:8888/v1/buckets/source/collections/source/records/80ec9929-6896-4022-8443-3da4f5353f47/attachment attachment@kinto-logo.png --auth user:pass
"POST /v1/buckets/source/collections/source/records/80ec9929-6896-4022-8443-3da4f5353f47/attachment" ? (? ms) u'/buckets/source/collections/source' agent=HTTPie/0.9.6 authn_type=basicauth collection_id=record collection_timestamp=1475232331014 errno=110 exception=Traceback (most recent call last):
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/pyramid/tweens.py", line 22, in excview_tween
response = handler(request)
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/pyramid_tm/__init__.py", line 109, in tm_tween
reraise(*exc_info)
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/pyramid_tm/__init__.py", line 95, in tm_tween
manager.commit()
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/transaction/_manager.py", line 123, in commit
return self.get().commit()
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/transaction/_transaction.py", line 265, in commit
self._callBeforeCommitHooks()
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/transaction/_transaction.py", line 336, in _callBeforeCommitHooks
hook(*args, **kws)
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/kinto/core/events.py", line 76, in _notify_resource_events_before
request.registry.notify(event)
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/pyramid/registry.py", line 91, in notify
[ _ for _ in self.subscribers(events, None) ]
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/zope/interface/registry.py", line 442, in subscribers
return self.adapters.subscribers(objects, provided)
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/zope/interface/adapter.py", line 596, in subscribers
subscription(*objects)
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/pyramid/config/adapters.py", line 130, in subscriber_wrapper
return derived_subscriber(*arg)
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/pyramid/config/adapters.py", line 103, in derived_subscriber
return subscriber(arg[0])
File "/home/travis/gopath/src/github.com/mozilla-services/kinto-dist/.venv/local/lib/python2.7/site-packages/kinto/plugins/history/listener.py", line 65, in on_resource_changed
collection_perms = perms_by_object_id[collection_uri]
KeyError: u'/buckets/source/collections/source' lang=None uid=cbd3731f18c97ebe1d31d9846b5f1b95cf8eeeae586e201277263434041e99d1
"POST /v1/buckets/source/collections/source/records/80ec9929-6896-4022-8443-3da4f5353f47/attachment" 500 (23 ms) request.summary agent=HTTPie/0.9.6 authn_type=basicauth collection_id=record collection_timestamp=1475232331014 errno=999 lang=None time=2016-09-30T10:45:31 uid=cbd3731f18c97ebe1d31d9846b5f1b95cf8eeeae586e201277263434041e99d1
HTTP/1.1 500 Internal Server Error
Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff
Content-Length: 183
Content-Type: application/json; charset=UTF-8
Date: Fri, 30 Sep 2016 10:45:31 GMT
Server: waitress
{
"code": 500,
"errno": 999,
"error": "Internal Server Error",
"info": "https://github.com/Kinto/kinto/issues/",
"message": "A programmatic error occured, developers have been informed."
}
|
KeyError
|
def ttl(self, key):
ttl = self._ttl.get(self.prefix + key)
if ttl is not None:
return (ttl - msec_time()) / 1000.0
return -1
|
def ttl(self, key):
ttl = self._ttl.get(self.prefix + key)
if ttl is not None:
return (ttl - utils.msec_time()) / 1000.0
return -1
|
https://github.com/Kinto/kinto/issues/759
|
2016-08-16 14:57:02,332 ERROR [kinto.core.views.errors][waitress] "POST /v1/buckets/e3e5dfdd-b5de-e9dd-e2ad-637fc2c6481f/collections/tickets/records" ? (? ms) dictionary changed size during iteration agent=None authn_type=basicauth collection_id=record collection_timestamp=1471359417323 errno=None exception=Traceback (most recent call last):
File "/code/kinto/core/utils.py", line 322, in follow_subrequest
return request.invoke_subrequest(subrequest, **kwargs), subrequest
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 211, in invoke_subrequest
response = handle_request(request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 158, in handle_request
view_name
File "/usr/local/lib/python3.5/dist-packages/pyramid/view.py", line 547, in _call_view
response = view_callable(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 413, in viewresult_to_response
result = view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "/code/kinto/plugins/default_bucket/__init__.py", line 159, in default_bucket
response = request.invoke_subrequest(subrequest)
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 211, in invoke_subrequest
response = handle_request(request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 158, in handle_request
view_name
File "/usr/local/lib/python3.5/dist-packages/pyramid/view.py", line 547, in _call_view
response = view_callable(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/config/views.py", line 182, in __call__
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 393, in attr_view
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 371, in predicate_wrapper
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 302, in _secured_view
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 442, in rendered_view
result = view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "/usr/local/lib/python3.5/dist-packages/cornice/service.py", line 571, in wrapper
response = view_()
File "/code/kinto/core/resource/__init__.py", line 311, in collection_post
existing = self._get_record_or_404(_id)
File "/code/kinto/core/resource/__init__.py", line 681, in _get_record_or_404
return self.model.get_record(record_id)
File "/code/kinto/core/resource/model.py", line 276, in get_record
permissions = self.permission.get_object_permissions(perm_object_id)
File "/code/kinto/core/permission/__init__.py", line 136, in get_object_permissions
perms = self.get_objects_permissions([object_id], permissions)
File "/code/kinto/core/permission/memory.py", line 114, in get_objects_permissions
aces = [k for k in self._store.keys()
File "/code/kinto/core/permission/memory.py", line 114, in <listcomp>
aces = [k for k in self._store.keys()
RuntimeError: dictionary changed size during iteration lang=None uid=84ffd9ad5044b2b77b40e0f96fee3112f02894fcbe0eec6dfb8008394955ab7f
2016-08-16 14:57:02,351 INFO [kinto.core.views.batch][waitress] "POST /v1/buckets/e3e5dfdd-b5de-e9dd-e2ad-637fc2c6481f/collections/tickets/records" 200 (22 ms) subrequest.summary agent=None authn_type=basicauth collection_id=record collection_timestamp=1471359422318 errno=None lang=None time=2016-08-16T14:57:02.351000 uid=84ffd9ad5044b2b77b40e0f96fee3112f02894fcbe0eec6dfb8008394955ab7f
2016-08-16 14:57:02,376 INFO [kinto.core.views.batch][waitress] "POST /v1/buckets/e3e5dfdd-b5de-e9dd-e2ad-637fc2c6481f/collections/tickets/records" 201 (33 ms) subrequest.summary agent=None authn_type=basicauth collection_id=record collection_timestamp=1471359422318 errno=110 lang=None time=2016-08-16T14:57:02.376000 uid=84ffd9ad5044b2b77b40e0f96fee3112f02894fcbe0eec6dfb8008394955ab7f
2016-08-16 14:57:02,381 INFO [kinto.core.views.batch][waitress] "POST /v1/buckets/e3e5dfdd-b5de-e9dd-e2ad-637fc2c6481f/collections/tickets/records" 200 (30 ms) subrequest.summary agent=None authn_type=basicauth collection_id=record collection_timestamp=1471359422318 errno=None lang=None time=2016-08-16T14:57:02.381000 uid=84ffd9ad5044b2b77b40e0f96fee3112f02894fcbe0eec6dfb8008394955ab7f
2016-08-16 14:57:02,381 ERROR [kinto.core.views.errors][waitress] "POST /v1/buckets/e3e5dfdd-b5de-e9dd-e2ad-637fc2c6481f/collections/tickets/records" ? (? ms) dictionary changed size during iteration agent=None authn_type=basicauth collection_id=record collection_timestamp=1471359417323 errno=999 exception=Traceback (most recent call last):
File "/usr/local/lib/python3.5/dist-packages/pyramid/tweens.py", line 22, in excview_tween
response = handler(request)
File "/usr/local/lib/python3.5/dist-packages/pyramid_tm/__init__.py", line 101, in tm_tween
reraise(*exc_info)
File "/usr/local/lib/python3.5/dist-packages/pyramid_tm/compat.py", line 15, in reraise
raise value
File "/usr/local/lib/python3.5/dist-packages/pyramid_tm/__init__.py", line 83, in tm_tween
response = handler(request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 158, in handle_request
view_name
File "/usr/local/lib/python3.5/dist-packages/pyramid/view.py", line 547, in _call_view
response = view_callable(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/config/views.py", line 182, in __call__
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 393, in attr_view
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 371, in predicate_wrapper
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 442, in rendered_view
result = view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "/usr/local/lib/python3.5/dist-packages/cornice/service.py", line 573, in wrapper
response = view_(request)
File "/code/kinto/core/views/batch.py", line 100, in post_batch
use_tweens=False)
File "/code/kinto/core/utils.py", line 326, in follow_subrequest
raise e
File "/code/kinto/core/utils.py", line 322, in follow_subrequest
return request.invoke_subrequest(subrequest, **kwargs), subrequest
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 211, in invoke_subrequest
response = handle_request(request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 158, in handle_request
view_name
File "/usr/local/lib/python3.5/dist-packages/pyramid/view.py", line 547, in _call_view
response = view_callable(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 413, in viewresult_to_response
result = view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "/code/kinto/plugins/default_bucket/__init__.py", line 159, in default_bucket
response = request.invoke_subrequest(subrequest)
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 211, in invoke_subrequest
response = handle_request(request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 158, in handle_request
view_name
File "/usr/local/lib/python3.5/dist-packages/pyramid/view.py", line 547, in _call_view
response = view_callable(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/config/views.py", line 182, in __call__
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 393, in attr_view
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 371, in predicate_wrapper
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 302, in _secured_view
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 442, in rendered_view
result = view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "/usr/local/lib/python3.5/dist-packages/cornice/service.py", line 571, in wrapper
response = view_()
File "/code/kinto/core/resource/__init__.py", line 311, in collection_post
existing = self._get_record_or_404(_id)
File "/code/kinto/core/resource/__init__.py", line 681, in _get_record_or_404
return self.model.get_record(record_id)
File "/code/kinto/core/resource/model.py", line 276, in get_record
permissions = self.permission.get_object_permissions(perm_object_id)
File "/code/kinto/core/permission/__init__.py", line 136, in get_object_permissions
perms = self.get_objects_permissions([object_id], permissions)
File "/code/kinto/core/permission/memory.py", line 114, in get_objects_permissions
aces = [k for k in self._store.keys()
File "/code/kinto/core/permission/memory.py", line 114, in <listcomp>
aces = [k for k in self._store.keys()
RuntimeError: dictionary changed size during iteration lang=None uid=84ffd9ad5044b2b77b40e0f96fee3112f02894fcbe0eec6dfb8008394955ab7f
|
RuntimeError
|
def expire(self, key, ttl):
self._ttl[self.prefix + key] = msec_time() + int(ttl * 1000.0)
|
def expire(self, key, ttl):
self._ttl[self.prefix + key] = utils.msec_time() + int(ttl * 1000.0)
|
https://github.com/Kinto/kinto/issues/759
|
2016-08-16 14:57:02,332 ERROR [kinto.core.views.errors][waitress] "POST /v1/buckets/e3e5dfdd-b5de-e9dd-e2ad-637fc2c6481f/collections/tickets/records" ? (? ms) dictionary changed size during iteration agent=None authn_type=basicauth collection_id=record collection_timestamp=1471359417323 errno=None exception=Traceback (most recent call last):
File "/code/kinto/core/utils.py", line 322, in follow_subrequest
return request.invoke_subrequest(subrequest, **kwargs), subrequest
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 211, in invoke_subrequest
response = handle_request(request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 158, in handle_request
view_name
File "/usr/local/lib/python3.5/dist-packages/pyramid/view.py", line 547, in _call_view
response = view_callable(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 413, in viewresult_to_response
result = view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "/code/kinto/plugins/default_bucket/__init__.py", line 159, in default_bucket
response = request.invoke_subrequest(subrequest)
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 211, in invoke_subrequest
response = handle_request(request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 158, in handle_request
view_name
File "/usr/local/lib/python3.5/dist-packages/pyramid/view.py", line 547, in _call_view
response = view_callable(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/config/views.py", line 182, in __call__
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 393, in attr_view
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 371, in predicate_wrapper
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 302, in _secured_view
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 442, in rendered_view
result = view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "/usr/local/lib/python3.5/dist-packages/cornice/service.py", line 571, in wrapper
response = view_()
File "/code/kinto/core/resource/__init__.py", line 311, in collection_post
existing = self._get_record_or_404(_id)
File "/code/kinto/core/resource/__init__.py", line 681, in _get_record_or_404
return self.model.get_record(record_id)
File "/code/kinto/core/resource/model.py", line 276, in get_record
permissions = self.permission.get_object_permissions(perm_object_id)
File "/code/kinto/core/permission/__init__.py", line 136, in get_object_permissions
perms = self.get_objects_permissions([object_id], permissions)
File "/code/kinto/core/permission/memory.py", line 114, in get_objects_permissions
aces = [k for k in self._store.keys()
File "/code/kinto/core/permission/memory.py", line 114, in <listcomp>
aces = [k for k in self._store.keys()
RuntimeError: dictionary changed size during iteration lang=None uid=84ffd9ad5044b2b77b40e0f96fee3112f02894fcbe0eec6dfb8008394955ab7f
2016-08-16 14:57:02,351 INFO [kinto.core.views.batch][waitress] "POST /v1/buckets/e3e5dfdd-b5de-e9dd-e2ad-637fc2c6481f/collections/tickets/records" 200 (22 ms) subrequest.summary agent=None authn_type=basicauth collection_id=record collection_timestamp=1471359422318 errno=None lang=None time=2016-08-16T14:57:02.351000 uid=84ffd9ad5044b2b77b40e0f96fee3112f02894fcbe0eec6dfb8008394955ab7f
2016-08-16 14:57:02,376 INFO [kinto.core.views.batch][waitress] "POST /v1/buckets/e3e5dfdd-b5de-e9dd-e2ad-637fc2c6481f/collections/tickets/records" 201 (33 ms) subrequest.summary agent=None authn_type=basicauth collection_id=record collection_timestamp=1471359422318 errno=110 lang=None time=2016-08-16T14:57:02.376000 uid=84ffd9ad5044b2b77b40e0f96fee3112f02894fcbe0eec6dfb8008394955ab7f
2016-08-16 14:57:02,381 INFO [kinto.core.views.batch][waitress] "POST /v1/buckets/e3e5dfdd-b5de-e9dd-e2ad-637fc2c6481f/collections/tickets/records" 200 (30 ms) subrequest.summary agent=None authn_type=basicauth collection_id=record collection_timestamp=1471359422318 errno=None lang=None time=2016-08-16T14:57:02.381000 uid=84ffd9ad5044b2b77b40e0f96fee3112f02894fcbe0eec6dfb8008394955ab7f
2016-08-16 14:57:02,381 ERROR [kinto.core.views.errors][waitress] "POST /v1/buckets/e3e5dfdd-b5de-e9dd-e2ad-637fc2c6481f/collections/tickets/records" ? (? ms) dictionary changed size during iteration agent=None authn_type=basicauth collection_id=record collection_timestamp=1471359417323 errno=999 exception=Traceback (most recent call last):
File "/usr/local/lib/python3.5/dist-packages/pyramid/tweens.py", line 22, in excview_tween
response = handler(request)
File "/usr/local/lib/python3.5/dist-packages/pyramid_tm/__init__.py", line 101, in tm_tween
reraise(*exc_info)
File "/usr/local/lib/python3.5/dist-packages/pyramid_tm/compat.py", line 15, in reraise
raise value
File "/usr/local/lib/python3.5/dist-packages/pyramid_tm/__init__.py", line 83, in tm_tween
response = handler(request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 158, in handle_request
view_name
File "/usr/local/lib/python3.5/dist-packages/pyramid/view.py", line 547, in _call_view
response = view_callable(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/config/views.py", line 182, in __call__
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 393, in attr_view
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 371, in predicate_wrapper
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 442, in rendered_view
result = view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "/usr/local/lib/python3.5/dist-packages/cornice/service.py", line 573, in wrapper
response = view_(request)
File "/code/kinto/core/views/batch.py", line 100, in post_batch
use_tweens=False)
File "/code/kinto/core/utils.py", line 326, in follow_subrequest
raise e
File "/code/kinto/core/utils.py", line 322, in follow_subrequest
return request.invoke_subrequest(subrequest, **kwargs), subrequest
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 211, in invoke_subrequest
response = handle_request(request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 158, in handle_request
view_name
File "/usr/local/lib/python3.5/dist-packages/pyramid/view.py", line 547, in _call_view
response = view_callable(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 413, in viewresult_to_response
result = view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "/code/kinto/plugins/default_bucket/__init__.py", line 159, in default_bucket
response = request.invoke_subrequest(subrequest)
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 211, in invoke_subrequest
response = handle_request(request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 158, in handle_request
view_name
File "/usr/local/lib/python3.5/dist-packages/pyramid/view.py", line 547, in _call_view
response = view_callable(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/config/views.py", line 182, in __call__
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 393, in attr_view
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 371, in predicate_wrapper
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 302, in _secured_view
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 442, in rendered_view
result = view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "/usr/local/lib/python3.5/dist-packages/cornice/service.py", line 571, in wrapper
response = view_()
File "/code/kinto/core/resource/__init__.py", line 311, in collection_post
existing = self._get_record_or_404(_id)
File "/code/kinto/core/resource/__init__.py", line 681, in _get_record_or_404
return self.model.get_record(record_id)
File "/code/kinto/core/resource/model.py", line 276, in get_record
permissions = self.permission.get_object_permissions(perm_object_id)
File "/code/kinto/core/permission/__init__.py", line 136, in get_object_permissions
perms = self.get_objects_permissions([object_id], permissions)
File "/code/kinto/core/permission/memory.py", line 114, in get_objects_permissions
aces = [k for k in self._store.keys()
File "/code/kinto/core/permission/memory.py", line 114, in <listcomp>
aces = [k for k in self._store.keys()
RuntimeError: dictionary changed size during iteration lang=None uid=84ffd9ad5044b2b77b40e0f96fee3112f02894fcbe0eec6dfb8008394955ab7f
|
RuntimeError
|
def get(self, key):
current = msec_time()
expired = [k for k, v in self._ttl.items() if current >= v]
for expired_item_key in expired:
self.delete(expired_item_key[len(self.prefix) :])
return self._store.get(self.prefix + key)
|
def get(self, key):
current = utils.msec_time()
expired = [k for k, v in self._ttl.items() if current >= v]
for expired_item_key in expired:
self.delete(expired_item_key[len(self.prefix) :])
return self._store.get(self.prefix + key)
|
https://github.com/Kinto/kinto/issues/759
|
2016-08-16 14:57:02,332 ERROR [kinto.core.views.errors][waitress] "POST /v1/buckets/e3e5dfdd-b5de-e9dd-e2ad-637fc2c6481f/collections/tickets/records" ? (? ms) dictionary changed size during iteration agent=None authn_type=basicauth collection_id=record collection_timestamp=1471359417323 errno=None exception=Traceback (most recent call last):
File "/code/kinto/core/utils.py", line 322, in follow_subrequest
return request.invoke_subrequest(subrequest, **kwargs), subrequest
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 211, in invoke_subrequest
response = handle_request(request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 158, in handle_request
view_name
File "/usr/local/lib/python3.5/dist-packages/pyramid/view.py", line 547, in _call_view
response = view_callable(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 413, in viewresult_to_response
result = view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "/code/kinto/plugins/default_bucket/__init__.py", line 159, in default_bucket
response = request.invoke_subrequest(subrequest)
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 211, in invoke_subrequest
response = handle_request(request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 158, in handle_request
view_name
File "/usr/local/lib/python3.5/dist-packages/pyramid/view.py", line 547, in _call_view
response = view_callable(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/config/views.py", line 182, in __call__
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 393, in attr_view
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 371, in predicate_wrapper
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 302, in _secured_view
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 442, in rendered_view
result = view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "/usr/local/lib/python3.5/dist-packages/cornice/service.py", line 571, in wrapper
response = view_()
File "/code/kinto/core/resource/__init__.py", line 311, in collection_post
existing = self._get_record_or_404(_id)
File "/code/kinto/core/resource/__init__.py", line 681, in _get_record_or_404
return self.model.get_record(record_id)
File "/code/kinto/core/resource/model.py", line 276, in get_record
permissions = self.permission.get_object_permissions(perm_object_id)
File "/code/kinto/core/permission/__init__.py", line 136, in get_object_permissions
perms = self.get_objects_permissions([object_id], permissions)
File "/code/kinto/core/permission/memory.py", line 114, in get_objects_permissions
aces = [k for k in self._store.keys()
File "/code/kinto/core/permission/memory.py", line 114, in <listcomp>
aces = [k for k in self._store.keys()
RuntimeError: dictionary changed size during iteration lang=None uid=84ffd9ad5044b2b77b40e0f96fee3112f02894fcbe0eec6dfb8008394955ab7f
2016-08-16 14:57:02,351 INFO [kinto.core.views.batch][waitress] "POST /v1/buckets/e3e5dfdd-b5de-e9dd-e2ad-637fc2c6481f/collections/tickets/records" 200 (22 ms) subrequest.summary agent=None authn_type=basicauth collection_id=record collection_timestamp=1471359422318 errno=None lang=None time=2016-08-16T14:57:02.351000 uid=84ffd9ad5044b2b77b40e0f96fee3112f02894fcbe0eec6dfb8008394955ab7f
2016-08-16 14:57:02,376 INFO [kinto.core.views.batch][waitress] "POST /v1/buckets/e3e5dfdd-b5de-e9dd-e2ad-637fc2c6481f/collections/tickets/records" 201 (33 ms) subrequest.summary agent=None authn_type=basicauth collection_id=record collection_timestamp=1471359422318 errno=110 lang=None time=2016-08-16T14:57:02.376000 uid=84ffd9ad5044b2b77b40e0f96fee3112f02894fcbe0eec6dfb8008394955ab7f
2016-08-16 14:57:02,381 INFO [kinto.core.views.batch][waitress] "POST /v1/buckets/e3e5dfdd-b5de-e9dd-e2ad-637fc2c6481f/collections/tickets/records" 200 (30 ms) subrequest.summary agent=None authn_type=basicauth collection_id=record collection_timestamp=1471359422318 errno=None lang=None time=2016-08-16T14:57:02.381000 uid=84ffd9ad5044b2b77b40e0f96fee3112f02894fcbe0eec6dfb8008394955ab7f
2016-08-16 14:57:02,381 ERROR [kinto.core.views.errors][waitress] "POST /v1/buckets/e3e5dfdd-b5de-e9dd-e2ad-637fc2c6481f/collections/tickets/records" ? (? ms) dictionary changed size during iteration agent=None authn_type=basicauth collection_id=record collection_timestamp=1471359417323 errno=999 exception=Traceback (most recent call last):
File "/usr/local/lib/python3.5/dist-packages/pyramid/tweens.py", line 22, in excview_tween
response = handler(request)
File "/usr/local/lib/python3.5/dist-packages/pyramid_tm/__init__.py", line 101, in tm_tween
reraise(*exc_info)
File "/usr/local/lib/python3.5/dist-packages/pyramid_tm/compat.py", line 15, in reraise
raise value
File "/usr/local/lib/python3.5/dist-packages/pyramid_tm/__init__.py", line 83, in tm_tween
response = handler(request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 158, in handle_request
view_name
File "/usr/local/lib/python3.5/dist-packages/pyramid/view.py", line 547, in _call_view
response = view_callable(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/config/views.py", line 182, in __call__
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 393, in attr_view
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 371, in predicate_wrapper
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 442, in rendered_view
result = view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "/usr/local/lib/python3.5/dist-packages/cornice/service.py", line 573, in wrapper
response = view_(request)
File "/code/kinto/core/views/batch.py", line 100, in post_batch
use_tweens=False)
File "/code/kinto/core/utils.py", line 326, in follow_subrequest
raise e
File "/code/kinto/core/utils.py", line 322, in follow_subrequest
return request.invoke_subrequest(subrequest, **kwargs), subrequest
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 211, in invoke_subrequest
response = handle_request(request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 158, in handle_request
view_name
File "/usr/local/lib/python3.5/dist-packages/pyramid/view.py", line 547, in _call_view
response = view_callable(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 413, in viewresult_to_response
result = view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "/code/kinto/plugins/default_bucket/__init__.py", line 159, in default_bucket
response = request.invoke_subrequest(subrequest)
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 211, in invoke_subrequest
response = handle_request(request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/router.py", line 158, in handle_request
view_name
File "/usr/local/lib/python3.5/dist-packages/pyramid/view.py", line 547, in _call_view
response = view_callable(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/config/views.py", line 182, in __call__
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 393, in attr_view
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 371, in predicate_wrapper
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 302, in _secured_view
return view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 442, in rendered_view
result = view(context, request)
File "/usr/local/lib/python3.5/dist-packages/pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "/usr/local/lib/python3.5/dist-packages/cornice/service.py", line 571, in wrapper
response = view_()
File "/code/kinto/core/resource/__init__.py", line 311, in collection_post
existing = self._get_record_or_404(_id)
File "/code/kinto/core/resource/__init__.py", line 681, in _get_record_or_404
return self.model.get_record(record_id)
File "/code/kinto/core/resource/model.py", line 276, in get_record
permissions = self.permission.get_object_permissions(perm_object_id)
File "/code/kinto/core/permission/__init__.py", line 136, in get_object_permissions
perms = self.get_objects_permissions([object_id], permissions)
File "/code/kinto/core/permission/memory.py", line 114, in get_objects_permissions
aces = [k for k in self._store.keys()
File "/code/kinto/core/permission/memory.py", line 114, in <listcomp>
aces = [k for k in self._store.keys()
RuntimeError: dictionary changed size during iteration lang=None uid=84ffd9ad5044b2b77b40e0f96fee3112f02894fcbe0eec6dfb8008394955ab7f
|
RuntimeError
|
def on_resource_changed(event):
"""
Everytime an object is created/changed/deleted, we create an entry in the
``history`` resource. The entries are served as read-only in the
:mod:`kinto.plugins.history.views` module.
"""
payload = event.payload
resource_name = payload["resource_name"]
event_uri = payload["uri"]
bucket_id = None
bucket_uri = None
collection_uri = None
storage = event.request.registry.storage
permission = event.request.registry.permission
targets = []
for impacted in event.impacted_records:
target = impacted["new"]
obj_id = target["id"]
try:
bucket_id = payload["bucket_id"]
except KeyError:
# e.g. DELETE /buckets
bucket_id = obj_id
bucket_uri = instance_uri(event.request, "bucket", id=bucket_id)
if "collection_id" in payload:
collection_id = payload["collection_id"]
collection_uri = instance_uri(
event.request, "collection", bucket_id=bucket_id, id=collection_id
)
# On POST .../records, the URI does not contain the newly created
# record id.
parts = event_uri.split("/")
if resource_name in parts[-1]:
parts.append(obj_id)
else:
# Make sure the id is correct on grouped events.
parts[-1] = obj_id
uri = "/".join(parts)
targets.append((uri, target))
# Prepare a list of object ids to be fetched from permission backend,
# and fetch them all at once. Use a mapping for later convenience.
all_perms_objects_ids = [oid for (oid, _) in targets]
all_perms_objects_ids.append(bucket_uri)
if collection_uri is not None:
all_perms_objects_ids.append(collection_uri)
all_perms_objects_ids = list(set(all_perms_objects_ids))
all_permissions = permission.get_objects_permissions(all_perms_objects_ids)
perms_by_object_id = dict(zip(all_perms_objects_ids, all_permissions))
bucket_perms = perms_by_object_id[bucket_uri]
collection_perms = {}
if collection_uri is not None:
collection_perms = perms_by_object_id[collection_uri]
# The principals allowed to read the bucket and collection.
# (Note: ``write`` means ``read``)
read_principals = set(bucket_perms.get("read", []))
read_principals.update(bucket_perms.get("write", []))
read_principals.update(collection_perms.get("read", []))
read_principals.update(collection_perms.get("write", []))
# Create a history entry for each impacted record.
for uri, target in targets:
obj_id = target["id"]
# Prepare the history entry attributes.
perms = {k: list(v) for k, v in perms_by_object_id[uri].items()}
eventattrs = dict(**payload)
eventattrs.pop("bucket_id", None)
eventattrs.setdefault("%s_id" % resource_name, obj_id)
eventattrs["uri"] = uri
attrs = dict(
date=datetime.now().isoformat(),
target={"data": target, "permissions": perms},
**eventattrs,
)
# Create a record for the 'history' resource, whose parent_id is
# the bucket URI (c.f. views.py).
# Note: this will be rolledback if the transaction is rolledback.
entry = storage.create(
parent_id=bucket_uri, collection_id="history", record=attrs
)
# The read permission on the newly created history entry is the union
# of the record permissions with the one from bucket and collection.
entry_principals = set(read_principals)
entry_principals.update(perms.get("read", []))
entry_principals.update(perms.get("write", []))
entry_perms = {"read": list(entry_principals)}
# /buckets/{id}/history is the URI for the list of history entries.
entry_perm_id = "/buckets/%s/history/%s" % (bucket_id, entry["id"])
permission.replace_object_permissions(entry_perm_id, entry_perms)
|
def on_resource_changed(event):
"""
Everytime an object is created/changed/deleted, we create an entry in the
``history`` resource. The entries are served as read-only in the
:mod:`kinto.plugins.history.views` module.
"""
payload = copy.deepcopy(event.payload)
resource_name = payload["resource_name"]
event_uri = payload["uri"]
bucket_id = payload.pop("bucket_id")
bucket_uri = instance_uri(event.request, "bucket", id=bucket_id)
collection_id = None
collection_uri = None
if "collection_id" in payload:
collection_id = payload["collection_id"]
collection_uri = instance_uri(
event.request, "collection", bucket_id=bucket_id, id=collection_id
)
storage = event.request.registry.storage
permission = event.request.registry.permission
targets = []
for impacted in event.impacted_records:
# On POST .../records, the URI does not contain the newly created
# record id.
target = impacted["new"]
obj_id = target["id"]
parts = event_uri.split("/")
if resource_name in parts[-1]:
parts.append(obj_id)
else:
# Make sure the id is correct on grouped events.
parts[-1] = obj_id
uri = "/".join(parts)
targets.append((uri, target))
# Prepare a list of object ids to be fetched from permission backend,
# and fetch them all at once. Use a mapping for later convenience.
all_perms_objects_ids = [oid for (oid, _) in targets]
all_perms_objects_ids.append(bucket_uri)
if collection_uri is not None:
all_perms_objects_ids.append(collection_uri)
all_perms_objects_ids = list(set(all_perms_objects_ids))
all_permissions = permission.get_objects_permissions(all_perms_objects_ids)
perms_by_object_id = dict(zip(all_perms_objects_ids, all_permissions))
bucket_perms = perms_by_object_id[bucket_uri]
collection_perms = {}
if collection_uri is not None:
collection_perms = perms_by_object_id[collection_uri]
# The principals allowed to read the bucket and collection.
# (Note: ``write`` means ``read``)
read_principals = set(bucket_perms.get("read", []))
read_principals.update(bucket_perms.get("write", []))
read_principals.update(collection_perms.get("read", []))
read_principals.update(collection_perms.get("write", []))
# Create a history entry for each impacted record.
for uri, target in targets:
obj_id = target["id"]
# Prepare the history entry attributes.
perms = {k: list(v) for k, v in perms_by_object_id[uri].items()}
eventattrs = dict(**payload)
eventattrs.setdefault("%s_id" % resource_name, obj_id)
eventattrs["uri"] = uri
attrs = dict(
date=datetime.now().isoformat(),
target={"data": target, "permissions": perms},
**eventattrs,
)
# Create a record for the 'history' resource, whose parent_id is
# the bucket URI (c.f. views.py).
# Note: this will be rolledback if the transaction is rolledback.
entry = storage.create(
parent_id=bucket_uri, collection_id="history", record=attrs
)
# The read permission on the newly created history entry is the union
# of the record permissions with the one from bucket and collection.
entry_principals = set(read_principals)
entry_principals.update(perms.get("read", []))
entry_principals.update(perms.get("write", []))
entry_perms = {"read": list(entry_principals)}
# /buckets/{id}/history is the URI for the list of history entries.
entry_perm_id = "/buckets/%s/history/%s" % (bucket_id, entry["id"])
permission.replace_object_permissions(entry_perm_id, entry_perms)
|
https://github.com/Kinto/kinto/issues/773
|
ERROR "DELETE /v1/buckets" ? (? ms) 'bucket_id' agent=node-fetch/1.0 (+https://github.com/bitinn/node-fetch) authn_type=basicauth collection_id=bucket collection_timestamp=1471536086687 errno=None exception=Traceback (most recent call last):
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid/tweens.py", line 22, in excview_tween
response = handler(request)
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid_tm/__init__.py", line 101, in tm_tween
reraise(*exc_info)
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid_tm/__init__.py", line 90, in tm_tween
manager.commit()
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/transaction/_manager.py", line 111, in commit
return self.get().commit()
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/transaction/_transaction.py", line 265, in commit
self._callBeforeCommitHooks()
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/transaction/_transaction.py", line 337, in _callBeforeCommitHooks
hook(*args, **kws)
File "/home/mathieu/Code/Mozilla/kinto/kinto/core/events.py", line 76, in _notify_resource_events_before
request.registry.notify(event)
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid/registry.py", line 91, in notify
[ _ for _ in self.subscribers(events, None) ]
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/zope/interface/registry.py", line 328, in subscribers
return self.adapters.subscribers(objects, provided)
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/zope/interface/adapter.py", line 596, in subscribers
subscription(*objects)
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid/config/adapters.py", line 130, in subscriber_wrapper
return derived_subscriber(*arg)
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid/config/adapters.py", line 103, in derived_subscriber
return subscriber(arg[0])
File "/home/mathieu/Code/Mozilla/kinto/kinto/plugins/history/listener.py", line 16, in on_resource_changed
bucket_id = payload.pop('bucket_id')
KeyError: 'bucket_id' lang=None uid=a9d81e1b56f2b77103e87601621a99b3d82a15abf7c72ce57a032d0964317c7a
|
KeyError
|
def view_lookup(request, uri):
"""
Look-up the specified `uri` and return the associated resource name
along the match dict.
:param request: the current request (used to obtain registry).
:param uri: a plural or object endpoint URI.
:rtype: tuple
:returns: the resource name and the associated matchdict.
"""
api_prefix = "/%s" % request.upath_info.split("/")[1]
# Path should be bytes in PY2, and unicode in PY3
path = _encoded(api_prefix + uri)
q = request.registry.queryUtility
routes_mapper = q(IRoutesMapper)
fakerequest = Request.blank(path=path)
info = routes_mapper(fakerequest)
matchdict, route = info["match"], info["route"]
if route is None:
raise ValueError("URI has no route")
resource_name = route.name.replace("-record", "").replace("-collection", "")
return resource_name, matchdict
|
def view_lookup(request, uri):
"""
Look-up the specified `uri` and return the associated resource name
along the match dict.
:param request: the current request (used to obtain registry).
:param uri: a plural or object endpoint URI.
:rtype: tuple
:returns: the resource name and the associated matchdict.
"""
api_prefix = "/%s" % request.upath_info.split("/")[1]
# Path should be bytes in PY2, and unicode in PY3
path = _encoded(api_prefix + uri)
q = request.registry.queryUtility
routes_mapper = q(IRoutesMapper)
fakerequest = Request.blank(path=path)
info = routes_mapper(fakerequest)
matchdict, route = info["match"], info["route"]
resource_name = route.name.replace("-record", "").replace("-collection", "")
return resource_name, matchdict
|
https://github.com/Kinto/kinto/issues/774
|
ERROR "GET /v1/permissions" ? (? ms) 'NoneType' object has no attribute 'name' agent=node-fetch/1.0 (+https://github.com/bitinn/node-fetch) authn_type=basicauth errno=None exception=Traceback (most recent call last):
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid/tweens.py", line 22, in excview_tween
response = handler(request)
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid_tm/__init__.py", line 101, in tm_tween
reraise(*exc_info)
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid_tm/__init__.py", line 83, in tm_tween
response = handler(request)
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid/router.py", line 158, in handle_request
view_name
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid/view.py", line 547, in _call_view
response = view_callable(context, request)
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid/config/views.py", line 182, in __call__
return view(context, request)
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid/viewderivers.py", line 393, in attr_view
return view(context, request)
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid/viewderivers.py", line 371, in predicate_wrapper
return view(context, request)
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid/viewderivers.py", line 442, in rendered_view
result = view(context, request)
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/cornice/service.py", line 573, in wrapper
response = view_(request)
File "/home/mathieu/Code/Mozilla/kinto/kinto/views/permissions.py", line 41, in permissions_get
resource_name, matchdict = core_utils.view_lookup(request, object_uri)
File "/home/mathieu/Code/Mozilla/kinto/kinto/core/utils.py", line 390, in view_lookup
resource_name = route.name.replace('-record', '')\
AttributeError: 'NoneType' object has no attribute 'name' lang=None uid=a9d81e1b56f2b77103e87601621a99b3d82a15abf7c72ce57a032d0964317c7a
|
AttributeError
|
def permissions_get(request):
# Invert the permissions inheritance tree.
perms_descending_tree = {}
for obtained, obtained_from in PERMISSIONS_INHERITANCE_TREE.items():
on_resource, obtained_perm = obtained.split(":", 1)
for from_resource, perms in obtained_from.items():
for perm in perms:
perms_descending_tree.setdefault(from_resource, {}).setdefault(
perm, {}
).setdefault(on_resource, set()).add(obtained_perm)
# Obtain current principals.
principals = request.effective_principals
if Authenticated in principals:
# Since this view does not require any permission (can be used to
# obtain public users permissions), we have to add the prefixed userid
# among the principals (see :mode:`kinto.core.authentication`)
userid = request.prefixed_userid
principals.append(userid)
# Query every possible permission of the current user from backend.
backend = request.registry.permission
perms_by_object_uri = backend.get_accessible_objects(principals)
entries = []
for object_uri, perms in perms_by_object_uri.items():
try:
# Obtain associated resource from object URI
resource_name, matchdict = core_utils.view_lookup(request, object_uri)
except ValueError:
# Skip permissions entries that are not linked to an object URI.
continue
# For consistency with events payloads, prefix id with resource name
matchdict[resource_name + "_id"] = matchdict.get("id")
# Expand implicit permissions using descending tree.
permissions = set(perms)
for perm in perms:
obtained = perms_descending_tree[resource_name][perm]
# Related to same resource only and not every sub-objects.
# (e.g "bucket:write" gives "bucket:read" but not "group:read")
permissions |= obtained[resource_name]
entry = dict(
uri=object_uri,
resource_name=resource_name,
permissions=list(permissions),
**matchdict,
)
entries.append(entry)
return {"data": entries}
|
def permissions_get(request):
# Invert the permissions inheritance tree.
perms_descending_tree = {}
for obtained, obtained_from in PERMISSIONS_INHERITANCE_TREE.items():
on_resource, obtained_perm = obtained.split(":", 1)
for from_resource, perms in obtained_from.items():
for perm in perms:
perms_descending_tree.setdefault(from_resource, {}).setdefault(
perm, {}
).setdefault(on_resource, set()).add(obtained_perm)
# Obtain current principals.
principals = request.effective_principals
if Authenticated in principals:
# Since this view does not require any permission (can be used to
# obtain public users permissions), we have to add the prefixed userid
# among the principals (see :mode:`kinto.core.authentication`)
userid = request.prefixed_userid
principals.append(userid)
# Query every possible permission of the current user from backend.
backend = request.registry.permission
perms_by_object_uri = backend.get_accessible_objects(principals)
entries = []
for object_uri, perms in perms_by_object_uri.items():
# Obtain associated resource from object URI
resource_name, matchdict = core_utils.view_lookup(request, object_uri)
# For consistency with events payloads, prefix id with resource name
matchdict[resource_name + "_id"] = matchdict.get("id")
# Expand implicit permissions using descending tree.
permissions = set(perms)
for perm in perms:
obtained = perms_descending_tree[resource_name][perm]
# Related to same resource only and not every sub-objects.
# (e.g "bucket:write" gives "bucket:read" but not "group:read")
permissions |= obtained[resource_name]
entry = dict(
uri=object_uri,
resource_name=resource_name,
permissions=list(permissions),
**matchdict,
)
entries.append(entry)
return {"data": entries}
|
https://github.com/Kinto/kinto/issues/774
|
ERROR "GET /v1/permissions" ? (? ms) 'NoneType' object has no attribute 'name' agent=node-fetch/1.0 (+https://github.com/bitinn/node-fetch) authn_type=basicauth errno=None exception=Traceback (most recent call last):
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid/tweens.py", line 22, in excview_tween
response = handler(request)
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid_tm/__init__.py", line 101, in tm_tween
reraise(*exc_info)
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid_tm/__init__.py", line 83, in tm_tween
response = handler(request)
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid/router.py", line 158, in handle_request
view_name
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid/view.py", line 547, in _call_view
response = view_callable(context, request)
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid/config/views.py", line 182, in __call__
return view(context, request)
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid/viewderivers.py", line 393, in attr_view
return view(context, request)
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid/viewderivers.py", line 371, in predicate_wrapper
return view(context, request)
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid/viewderivers.py", line 442, in rendered_view
result = view(context, request)
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "/home/mathieu/Code/Mozilla/kinto-client/.venv/local/lib/python2.7/site-packages/cornice/service.py", line 573, in wrapper
response = view_(request)
File "/home/mathieu/Code/Mozilla/kinto/kinto/views/permissions.py", line 41, in permissions_get
resource_name, matchdict = core_utils.view_lookup(request, object_uri)
File "/home/mathieu/Code/Mozilla/kinto/kinto/core/utils.py", line 390, in view_lookup
resource_name = route.name.replace('-record', '')\
AttributeError: 'NoneType' object has no attribute 'name' lang=None uid=a9d81e1b56f2b77103e87601621a99b3d82a15abf7c72ce57a032d0964317c7a
|
AttributeError
|
def _raise_400_if_invalid_id(self, record_id):
"""Raise 400 if specified record id does not match the format excepted
by storage backends.
:raises: :class:`pyramid.httpexceptions.HTTPBadRequest`
"""
is_string = isinstance(record_id, six.string_types)
if not is_string or not self.model.id_generator.match(record_id):
error_details = {"location": "path", "description": "Invalid record id"}
raise_invalid(self.request, **error_details)
|
def _raise_400_if_invalid_id(self, record_id):
"""Raise 400 if specified record id does not match the format excepted
by storage backends.
:raises: :class:`pyramid.httpexceptions.HTTPBadRequest`
"""
if not self.model.id_generator.match(six.text_type(record_id)):
error_details = {"location": "path", "description": "Invalid record id"}
raise_invalid(self.request, **error_details)
|
https://github.com/Kinto/kinto/issues/688
|
2016-06-21 13:41:49,701 DEBUG [kinto.core.storage.postgresql.pool.QueuePoolWithMaxBacklog][waitress] Connection <connection object at 0x7f1f396ad6e0; dsn: 'dbname=postgres user=postgres password=xxxxxxxx host=localhost', closed: 0> checked out from pool
2016-06-21 13:41:49,711 ERROR [kinto.core.storage.postgresql.client][waitress] "POST /v1/buckets/3f2e55d2-25ad-5fa9-3dcb-c0f29235627f/collections/foo/records" ? (? ms) (psycopg2.ProgrammingError) operator does not exist: text = integer
LINE 4: WHERE id = 2
^
HINT: No operator matches the given name and argument type(s). You might need to add explicit type casts.
[SQL: '\n SELECT as_epoch(last_modified) AS last_modified, data\n FROM records\n WHERE id = %(object_id)s\n AND parent_id = %(parent_id)s\n AND collection_id = %(collection_id)s;\n '] [parameters: {'parent_id': u'/buckets/3f2e55d2-25ad-5fa9-3dcb-c0f29235627f/collections/foo', 'object_id': 2, 'collection_id': 'record'}] agent=HTTPie/0.9.2 authn_type=basicauth collection_id=record collection_timestamp=1466530900543 errno=None lang=None uid=44ac0e1157457b4ff75a88b490cd735f73a27d5c8ac5b3fb4672d449969d88ea
2016-06-21 13:41:49,712 DEBUG [kinto.core.storage.postgresql.pool.QueuePoolWithMaxBacklog][waitress] Connection <connection object at 0x7f1f396ad6e0; dsn: 'dbname=postgres user=postgres password=xxxxxxxx host=localhost', closed: 0> being returned to pool
2016-06-21 13:41:49,712 DEBUG [kinto.core.storage.postgresql.pool.QueuePoolWithMaxBacklog][waitress] Connection <connection object at 0x7f1f396ad6e0; dsn: 'dbname=postgres user=postgres password=xxxxxxxx host=localhost', closed: 0> rollback-on-return, via agent
2016-06-21 13:41:49,713 CRITI [kinto.core.views.errors][waitress] "POST /v1/buckets/3f2e55d2-25ad-5fa9-3dcb-c0f29235627f/collections/foo/records" ? (? ms) (psycopg2.ProgrammingError) operator does not exist: text = integer
LINE 4: WHERE id = 2
^
HINT: No operator matches the given name and argument type(s). You might need to add explicit type casts.
[SQL: '\n SELECT as_epoch(last_modified) AS last_modified, data\n FROM records\n WHERE id = %(object_id)s\n AND parent_id = %(parent_id)s\n AND collection_id = %(collection_id)s;\n '] [parameters: {'parent_id': u'/buckets/3f2e55d2-25ad-5fa9-3dcb-c0f29235627f/collections/foo', 'object_id': 2, 'collection_id': 'record'}] agent=HTTPie/0.9.2 authn_type=basicauth collection_id=record collection_timestamp=1466530900543 errno=None exception=Traceback (most recent call last):
File "/home/ethan/Jobs/Mozilla/kinto/.venv/lib/python2.7/site-packages/pyramid/tweens.py", line 22, in excview_tween
response = handler(request)
File "/home/ethan/Jobs/Mozilla/kinto/.venv/lib/python2.7/site-packages/pyramid_tm/__init__.py", line 101, in tm_tween
reraise(*exc_info)
File "/home/ethan/Jobs/Mozilla/kinto/.venv/lib/python2.7/site-packages/pyramid_tm/__init__.py", line 83, in tm_tween
response = handler(request)
File "/home/ethan/Jobs/Mozilla/kinto/.venv/lib/python2.7/site-packages/pyramid/router.py", line 158, in handle_request
view_name
File "/home/ethan/Jobs/Mozilla/kinto/.venv/lib/python2.7/site-packages/pyramid/view.py", line 547, in _call_view
response = view_callable(context, request)
File "/home/ethan/Jobs/Mozilla/kinto/.venv/lib/python2.7/site-packages/pyramid/viewderivers.py", line 413, in viewresult_to_response
result = view(context, request)
File "/home/ethan/Jobs/Mozilla/kinto/.venv/lib/python2.7/site-packages/pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "/home/ethan/Jobs/Mozilla/kinto/kinto/plugins/default_bucket/__init__.py", line 150, in default_bucket
response = request.invoke_subrequest(subrequest)
File "/home/ethan/Jobs/Mozilla/kinto/.venv/lib/python2.7/site-packages/pyramid/router.py", line 211, in invoke_subrequest
response = handle_request(request)
File "/home/ethan/Jobs/Mozilla/kinto/.venv/lib/python2.7/site-packages/pyramid/router.py", line 158, in handle_request
view_name
File "/home/ethan/Jobs/Mozilla/kinto/.venv/lib/python2.7/site-packages/pyramid/view.py", line 547, in _call_view
response = view_callable(context, request)
File "/home/ethan/Jobs/Mozilla/kinto/.venv/lib/python2.7/site-packages/pyramid/config/views.py", line 182, in __call__
return view(context, request)
File "/home/ethan/Jobs/Mozilla/kinto/.venv/lib/python2.7/site-packages/pyramid/viewderivers.py", line 393, in attr_view
return view(context, request)
File "/home/ethan/Jobs/Mozilla/kinto/.venv/lib/python2.7/site-packages/pyramid/viewderivers.py", line 371, in predicate_wrapper
return view(context, request)
File "/home/ethan/Jobs/Mozilla/kinto/.venv/lib/python2.7/site-packages/pyramid/viewderivers.py", line 302, in _secured_view
return view(context, request)
File "/home/ethan/Jobs/Mozilla/kinto/.venv/lib/python2.7/site-packages/pyramid/viewderivers.py", line 442, in rendered_view
result = view(context, request)
File "/home/ethan/Jobs/Mozilla/kinto/.venv/lib/python2.7/site-packages/pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "/home/ethan/Jobs/Mozilla/kinto/.venv/lib/python2.7/site-packages/cornice/service.py", line 571, in wrapper
response = view_()
File "/home/ethan/Jobs/Mozilla/kinto/kinto/core/resource/__init__.py", line 303, in collection_post
existing = self._get_record_or_404(_id)
File "/home/ethan/Jobs/Mozilla/kinto/kinto/core/resource/__init__.py", line 670, in _get_record_or_404
return self.model.get_record(record_id)
File "/home/ethan/Jobs/Mozilla/kinto/kinto/core/resource/model.py", line 274, in get_record
record = super(ShareableModel, self).get_record(record_id, parent_id)
File "/home/ethan/Jobs/Mozilla/kinto/kinto/core/resource/model.py", line 142, in get_record
auth=self.auth)
File "/home/ethan/Jobs/Mozilla/kinto/kinto/core/storage/postgresql/__init__.py", line 261, in get
existing = result.fetchone()
File "/usr/lib64/python2.7/contextlib.py", line 35, in __exit__
self.gen.throw(type, value, traceback)
File "/home/ethan/Jobs/Mozilla/kinto/kinto/core/storage/postgresql/client.py", line 53, in connect
raise exceptions.BackendError(original=e)
BackendError: ProgrammingError: (psycopg2.ProgrammingError) operator does not exist: text = integer
LINE 4: WHERE id = 2
^
HINT: No operator matches the given name and argument type(s). You might need to add explicit type casts.
[SQL: '\n SELECT as_epoch(last_modified) AS last_modified, data\n FROM records\n WHERE id = %(object_id)s\n AND parent_id = %(parent_id)s\n AND collection_id = %(collection_id)s;\n '] [parameters: {'parent_id': u'/buckets/3f2e55d2-25ad-5fa9-3dcb-c0f29235627f/collections/foo', 'object_id': 2, 'collection_id': 'record'}] lang=None uid=44ac0e1157457b4ff75a88b490cd735f73a27d5c8ac5b3fb4672d449969d88ea
2016-06-21 13:41:49,713 INFO [kinto.core.initialization][waitress] "POST /v1/buckets/3f2e55d2-25ad-5fa9-3dcb-c0f29235627f/collections/foo/records" 503 (13 ms) request.summary agent=HTTPie/0.9.2 authn_type=basicauth collection_id=record collection_timestamp=1466530900543 errno=201 lang=None time=2016-06-21T13:41:49 uid=44ac0e1157457b4ff75a88b490cd735f73a27d5c8ac5b3fb4672d449969d88ea
|
BackendError
|
def includeme(config):
settings = config.get_settings()
# Heartbeat registry.
config.registry.heartbeats = {}
# Public settings registry.
config.registry.public_settings = {"batch_max_requests", "readonly"}
# Directive to declare arbitrary API capabilities.
def add_api_capability(config, identifier, description="", url="", **kw):
existing = config.registry.api_capabilities.get(identifier)
if existing:
error_msg = "The '%s' API capability was already registered (%s)."
raise ValueError(error_msg % (identifier, existing))
capability = dict(description=description, url=url, **kw)
config.registry.api_capabilities[identifier] = capability
config.add_directive("add_api_capability", add_api_capability)
config.registry.api_capabilities = {}
# Setup cornice.
config.include("cornice")
# Per-request transaction.
config.include("pyramid_tm")
# Add CORS settings to the base cliquet Service class.
Service.init_from_settings(settings)
# Setup components.
for step in aslist(settings["initialization_sequence"]):
step_func = config.maybe_dotted(step)
step_func(config)
# Custom helpers.
config.add_request_method(follow_subrequest)
config.add_request_method(
lambda request: {"id": request.prefixed_userid}, name="get_user_info"
)
config.commit()
# Include cliquet plugins after init, unlike pyramid includes.
includes = aslist(settings["includes"])
for app in includes:
config.include(app)
# # Show settings to output.
# for key, value in settings.items():
# logger.info('Using %s = %s' % (key, value))
# Scan views.
config.scan("cliquet.views")
# Give sign of life.
msg = "%(project_name)s %(project_version)s starting."
logger.info(msg % settings)
|
def includeme(config):
settings = config.get_settings()
# Heartbeat registry.
config.registry.heartbeats = {}
# Public settings registry.
config.registry.public_settings = {"batch_max_requests", "readonly"}
# Setup cornice.
config.include("cornice")
# Per-request transaction.
config.include("pyramid_tm")
# Add CORS settings to the base cliquet Service class.
Service.init_from_settings(settings)
# Setup components.
for step in aslist(settings["initialization_sequence"]):
step_func = config.maybe_dotted(step)
step_func(config)
# Custom helpers.
config.add_request_method(follow_subrequest)
config.add_request_method(
lambda request: {"id": request.prefixed_userid}, name="get_user_info"
)
config.commit()
# Include cliquet plugins after init, unlike pyramid includes.
includes = aslist(settings["includes"])
for app in includes:
config.include(app)
# # Show settings to output.
# for key, value in settings.items():
# logger.info('Using %s = %s' % (key, value))
# Scan views.
config.scan("cliquet.views")
# Give sign of life.
msg = "%(project_name)s %(project_version)s starting."
logger.info(msg % settings)
|
https://github.com/Kinto/kinto/issues/628
|
kinto start
event='Backend settings referring to cliquet are DEPRECATED. Please update your kinto.cache_backend setting to kinto.core.cache.memory (was: cliquet.cache.memory).'
event='Backend settings referring to cliquet are DEPRECATED. Please update your kinto.permission_backend setting to kinto.core.permission.memory (was: cliquet.permission.memory).'
event='Backend settings referring to cliquet are DEPRECATED. Please update your kinto.storage_backend setting to kinto.core.storage.memory (was: cliquet.storage.memory).'
Traceback (most recent call last):
File "/home/mathieu/Code/Mozilla/kinto/.venv/bin/kinto", line 9, in <module>
load_entry_point('kinto', 'console_scripts', 'kinto')()
File "/home/mathieu/Code/Mozilla/kinto/kinto/__main__.py", line 97, in main
pserve.main(pserve_argv)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/pyramid/scripts/pserve.py", line 60, in main
return command.run()
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/pyramid/scripts/pserve.py", line 371, in run
global_conf=vars)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/pyramid/scripts/pserve.py", line 406, in loadapp
return loadapp(app_spec, name=name, relative_to=relative_to, **kw)
File "/home/mathieu/Code/Mozilla/kinto/.venv/lib/python2.7/site-packages/PasteDeploy-1.5.2-py2.7.egg/paste/deploy/loadwsgi.py", line 247, in loadapp
return loadobj(APP, uri, name=name, **kw)
File "/home/mathieu/Code/Mozilla/kinto/.venv/lib/python2.7/site-packages/PasteDeploy-1.5.2-py2.7.egg/paste/deploy/loadwsgi.py", line 272, in loadobj
return context.create()
File "/home/mathieu/Code/Mozilla/kinto/.venv/lib/python2.7/site-packages/PasteDeploy-1.5.2-py2.7.egg/paste/deploy/loadwsgi.py", line 710, in create
return self.object_type.invoke(self)
File "/home/mathieu/Code/Mozilla/kinto/.venv/lib/python2.7/site-packages/PasteDeploy-1.5.2-py2.7.egg/paste/deploy/loadwsgi.py", line 146, in invoke
return fix_call(context.object, context.global_conf, **context.local_conf)
File "/home/mathieu/Code/Mozilla/kinto/.venv/lib/python2.7/site-packages/PasteDeploy-1.5.2-py2.7.egg/paste/deploy/util.py", line 55, in fix_call
val = callable(*args, **kw)
File "/home/mathieu/Code/Mozilla/kinto/kinto/__init__.py", line 44, in main
default_settings=DEFAULT_SETTINGS)
File "/home/mathieu/Code/Mozilla/kinto/kinto/core/initialization.py", line 553, in initialize
config.include("kinto.core", route_prefix=api_version)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/pyramid/config/__init__.py", line 800, in include
c(configurator)
File "/home/mathieu/Code/Mozilla/kinto/kinto/core/__init__.py", line 176, in includeme
logger.info(msg % settings)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/structlog/stdlib.py", line 67, in info
return self._proxy_to_logger('info', event, *args, **kw)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/structlog/stdlib.py", line 119, in _proxy_to_logger
**event_kw)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/structlog/_base.py", line 176, in _proxy_to_logger
args, kw = self._process_event(method_name, event, event_kw)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/structlog/_base.py", line 136, in _process_event
event_dict = proc(self._logger, method_name, event_dict)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/structlog/stdlib.py", line 327, in filter_by_level
if logger.isEnabledFor(_NAME_TO_LEVEL[name]):
AttributeError: 'PrintLogger' object has no attribute 'isEnabledFor'
|
AttributeError
|
def get_hello(request):
"""Return information regarding the current instance."""
settings = request.registry.settings
project_name = settings["project_name"]
project_version = settings["project_version"]
data = dict(
project_name=project_name,
project_version=project_version,
http_api_version=settings["http_api_version"],
project_docs=settings["project_docs"],
cliquet_protocol_version=PROTOCOL_VERSION,
url=request.route_url(hello.name),
)
eos = get_eos(request)
if eos:
data["eos"] = eos
data["settings"] = {}
public_settings = request.registry.public_settings
# Public settings will be prefixed with project name, unless explicitly
# specified with cliquet. (for retrocompability of clients for example).
for setting in list(public_settings):
if setting.startswith("cliquet."):
unprefixed = setting.replace("cliquet.", "", 1)
value = settings[unprefixed]
elif setting.startswith(project_name + "."):
unprefixed = setting.replace(project_name + ".", "")
value = settings[unprefixed]
else:
value = settings[setting]
data["settings"][setting] = value
prefixed_userid = getattr(request, "prefixed_userid", None)
if prefixed_userid:
data["user"] = request.get_user_info()
# Application can register and expose arbitrary capabilities.
data["capabilities"] = request.registry.api_capabilities
return data
|
def get_hello(request):
"""Return information regarding the current instance."""
settings = request.registry.settings
project_name = settings["project_name"]
project_version = settings["project_version"]
data = dict(
project_name=project_name,
project_version=project_version,
http_api_version=settings["http_api_version"],
project_docs=settings["project_docs"],
cliquet_protocol_version=PROTOCOL_VERSION,
url=request.route_url(hello.name),
)
eos = get_eos(request)
if eos:
data["eos"] = eos
data["settings"] = {}
public_settings = request.registry.public_settings
# Public settings will be prefixed with project name, unless explicitly
# specified with cliquet. (for retrocompability of clients for example).
for setting in list(public_settings):
if setting.startswith("cliquet."):
unprefixed = setting.replace("cliquet.", "", 1)
value = settings[unprefixed]
elif setting.startswith(project_name + "."):
unprefixed = setting.replace(project_name + ".", "")
value = settings[unprefixed]
else:
value = settings[setting]
data["settings"][setting] = value
prefixed_userid = getattr(request, "prefixed_userid", None)
if prefixed_userid:
data["user"] = request.get_user_info()
return data
|
https://github.com/Kinto/kinto/issues/628
|
kinto start
event='Backend settings referring to cliquet are DEPRECATED. Please update your kinto.cache_backend setting to kinto.core.cache.memory (was: cliquet.cache.memory).'
event='Backend settings referring to cliquet are DEPRECATED. Please update your kinto.permission_backend setting to kinto.core.permission.memory (was: cliquet.permission.memory).'
event='Backend settings referring to cliquet are DEPRECATED. Please update your kinto.storage_backend setting to kinto.core.storage.memory (was: cliquet.storage.memory).'
Traceback (most recent call last):
File "/home/mathieu/Code/Mozilla/kinto/.venv/bin/kinto", line 9, in <module>
load_entry_point('kinto', 'console_scripts', 'kinto')()
File "/home/mathieu/Code/Mozilla/kinto/kinto/__main__.py", line 97, in main
pserve.main(pserve_argv)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/pyramid/scripts/pserve.py", line 60, in main
return command.run()
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/pyramid/scripts/pserve.py", line 371, in run
global_conf=vars)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/pyramid/scripts/pserve.py", line 406, in loadapp
return loadapp(app_spec, name=name, relative_to=relative_to, **kw)
File "/home/mathieu/Code/Mozilla/kinto/.venv/lib/python2.7/site-packages/PasteDeploy-1.5.2-py2.7.egg/paste/deploy/loadwsgi.py", line 247, in loadapp
return loadobj(APP, uri, name=name, **kw)
File "/home/mathieu/Code/Mozilla/kinto/.venv/lib/python2.7/site-packages/PasteDeploy-1.5.2-py2.7.egg/paste/deploy/loadwsgi.py", line 272, in loadobj
return context.create()
File "/home/mathieu/Code/Mozilla/kinto/.venv/lib/python2.7/site-packages/PasteDeploy-1.5.2-py2.7.egg/paste/deploy/loadwsgi.py", line 710, in create
return self.object_type.invoke(self)
File "/home/mathieu/Code/Mozilla/kinto/.venv/lib/python2.7/site-packages/PasteDeploy-1.5.2-py2.7.egg/paste/deploy/loadwsgi.py", line 146, in invoke
return fix_call(context.object, context.global_conf, **context.local_conf)
File "/home/mathieu/Code/Mozilla/kinto/.venv/lib/python2.7/site-packages/PasteDeploy-1.5.2-py2.7.egg/paste/deploy/util.py", line 55, in fix_call
val = callable(*args, **kw)
File "/home/mathieu/Code/Mozilla/kinto/kinto/__init__.py", line 44, in main
default_settings=DEFAULT_SETTINGS)
File "/home/mathieu/Code/Mozilla/kinto/kinto/core/initialization.py", line 553, in initialize
config.include("kinto.core", route_prefix=api_version)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/pyramid/config/__init__.py", line 800, in include
c(configurator)
File "/home/mathieu/Code/Mozilla/kinto/kinto/core/__init__.py", line 176, in includeme
logger.info(msg % settings)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/structlog/stdlib.py", line 67, in info
return self._proxy_to_logger('info', event, *args, **kw)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/structlog/stdlib.py", line 119, in _proxy_to_logger
**event_kw)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/structlog/_base.py", line 176, in _proxy_to_logger
args, kw = self._process_event(method_name, event, event_kw)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/structlog/_base.py", line 136, in _process_event
event_dict = proc(self._logger, method_name, event_dict)
File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/structlog/stdlib.py", line 327, in filter_by_level
if logger.isEnabledFor(_NAME_TO_LEVEL[name]):
AttributeError: 'PrintLogger' object has no attribute 'isEnabledFor'
|
AttributeError
|
def setup_listeners(config):
# Register basic subscriber predicates, to filter events.
config.add_subscriber_predicate("for_actions", EventActionFilter)
config.add_subscriber_predicate("for_resources", EventResourceFilter)
write_actions = (ACTIONS.CREATE, ACTIONS.UPDATE, ACTIONS.DELETE)
settings = config.get_settings()
project_name = settings.get("project_name", "")
listeners = aslist(settings["event_listeners"])
for name in listeners:
logger.info("Setting up %r listener" % name)
prefix = "event_listeners.%s." % name
try:
listener_mod = config.maybe_dotted(name)
prefix = "event_listeners.%s." % name.split(".")[-1]
listener = listener_mod.load_from_config(config, prefix)
except (ImportError, AttributeError):
module_setting = prefix + "use"
# Read from ENV or settings.
module_value = utils.read_env(
project_name + "." + module_setting, settings.get(module_setting)
)
listener_mod = config.maybe_dotted(module_value)
listener = listener_mod.load_from_config(config, prefix)
# If StatsD is enabled, monitor execution time of listeners.
if getattr(config.registry, "statsd", None):
statsd_client = config.registry.statsd
key = "listeners.%s" % name
listener = statsd_client.timer(key)(listener.__call__)
# Optional filter by event action.
actions_setting = prefix + "actions"
# Read from ENV or settings.
actions_value = utils.read_env(
project_name + "." + actions_setting, settings.get(actions_setting, "")
)
actions = aslist(actions_value)
if len(actions) > 0:
actions = ACTIONS.from_string_list(actions)
else:
actions = write_actions
# Optional filter by event resource name.
resource_setting = prefix + "resources"
# Read from ENV or settings.
resource_value = utils.read_env(
project_name + "." + resource_setting, settings.get(resource_setting, "")
)
resource_names = aslist(resource_value)
# Pyramid event predicates.
options = dict(for_actions=actions, for_resources=resource_names)
if ACTIONS.READ in actions:
config.add_subscriber(listener, ResourceRead, **options)
if len(actions) == 1:
return
config.add_subscriber(listener, ResourceChanged, **options)
|
def setup_listeners(config):
# Register basic subscriber predicates, to filter events.
config.add_subscriber_predicate("for_actions", EventActionFilter)
config.add_subscriber_predicate("for_resources", EventResourceFilter)
write_actions = (ACTIONS.CREATE, ACTIONS.UPDATE, ACTIONS.DELETE)
settings = config.get_settings()
listeners = aslist(settings["event_listeners"])
for name in listeners:
logger.info("Setting up %r listener" % name)
prefix = "event_listeners.%s." % name
try:
listener_mod = config.maybe_dotted(name)
prefix = "event_listeners.%s." % name.split(".")[-1]
listener = listener_mod.load_from_config(config, prefix)
except (ImportError, AttributeError):
listener_mod = config.maybe_dotted(settings[prefix + "use"])
listener = listener_mod.load_from_config(config, prefix)
# If StatsD is enabled, monitor execution time of listeners.
if getattr(config.registry, "statsd", None):
statsd_client = config.registry.statsd
key = "listeners.%s" % name
listener = statsd_client.timer(key)(listener.__call__)
actions = aslist(settings.get(prefix + "actions", ""))
if len(actions) > 0:
actions = ACTIONS.from_string_list(actions)
else:
actions = write_actions
resource_names = aslist(settings.get(prefix + "resources", ""))
options = dict(for_actions=actions, for_resources=resource_names)
if ACTIONS.READ in actions:
config.add_subscriber(listener, ResourceRead, **options)
if len(actions) == 1:
return
config.add_subscriber(listener, ResourceChanged, **options)
|
https://github.com/Kinto/kinto/issues/515
|
web_1 | Traceback (most recent call last):
web_1 | File "/usr/local/lib/python3.5/dist-packages/cliquet/initialization.py", line 399, in setup_listeners
web_1 | listener = listener_mod.load_from_config(config, prefix)
web_1 | AttributeError: module 'redis' has no attribute 'load_from_config'
|
AttributeError
|
def create_from_config(config, prefix=""):
"""Create a PostgreSQLClient client using settings in the provided config."""
if sqlalchemy is None:
message = (
"PostgreSQL dependencies missing. "
"Refer to installation section in documentation."
)
raise ImportWarning(message)
settings = config.get_settings()
url = settings[prefix + "url"]
if url in _ENGINES:
msg = (
"Reuse existing PostgreSQL connection. "
"Parameters %s* will be ignored." % prefix
)
warnings.warn(msg)
return PostgreSQLClient(_ENGINES[url])
# Initialize SQLAlchemy engine.
poolclass_key = prefix + "poolclass"
settings.setdefault(poolclass_key, "sqlalchemy.pool.QueuePool")
settings[poolclass_key] = config.maybe_dotted(settings[poolclass_key])
settings.pop(prefix + "max_fetch_size", None)
settings.pop(prefix + "backend", None)
engine = sqlalchemy.engine_from_config(settings, prefix=prefix, url=url)
# Store one engine per URI.
_ENGINES[url] = engine
return PostgreSQLClient(engine)
|
def create_from_config(config, prefix=""):
"""Create a PostgreSQLClient client using settings in the provided config."""
if sqlalchemy is None:
message = (
"PostgreSQL dependencies missing. "
"Refer to installation section in documentation."
)
raise ImportWarning(message)
settings = config.get_settings()
url = settings[prefix + "url"]
if url in _ENGINES:
msg = (
"Reuse existing PostgreSQL connection. "
"Parameters %s* will be ignored." % prefix
)
warnings.warn(msg)
return PostgreSQLClient(_ENGINES[url])
# Initialize SQLAlchemy engine.
poolclass_key = prefix + "poolclass"
settings.setdefault(poolclass_key, "sqlalchemy.pool.QueuePool")
settings[poolclass_key] = config.maybe_dotted(settings[poolclass_key])
settings.pop(prefix + "max_fetch_size", None)
settings.pop(prefix + "backend", None)
# XXX: Disable pooling at least during tests to avoid stalled tests.
if os.getenv("TRAVIS", False): # pragma: no cover
warnings.warn("Disable pooling on TravisCI")
settings = dict([(poolclass_key, sqlalchemy.pool.StaticPool)])
engine = sqlalchemy.engine_from_config(settings, prefix=prefix, url=url)
# Store one engine per URI.
_ENGINES[url] = engine
return PostgreSQLClient(engine)
|
https://github.com/Kinto/kinto/issues/515
|
web_1 | Traceback (most recent call last):
web_1 | File "/usr/local/lib/python3.5/dist-packages/cliquet/initialization.py", line 399, in setup_listeners
web_1 | listener = listener_mod.load_from_config(config, prefix)
web_1 | AttributeError: module 'redis' has no attribute 'load_from_config'
|
AttributeError
|
def _format_conditions(self, filters, id_field, modified_field, prefix="filters"):
"""Format the filters list in SQL, with placeholders for safe escaping.
.. note::
All conditions are combined using AND.
.. note::
Field name and value are escaped as they come from HTTP API.
:returns: A SQL string with placeholders, and a dict mapping
placeholders to actual values.
:rtype: tuple
"""
operators = {
COMPARISON.EQ: "=",
COMPARISON.NOT: "<>",
COMPARISON.IN: "IN",
COMPARISON.EXCLUDE: "NOT IN",
}
conditions = []
holders = {}
for i, filtr in enumerate(filters):
value = filtr.value
if filtr.field == id_field:
sql_field = "id"
elif filtr.field == modified_field:
sql_field = "as_epoch(last_modified)"
else:
# Safely escape field name
field_holder = "%s_field_%s" % (prefix, i)
holders[field_holder] = filtr.field
# JSON operator ->> retrieves values as text.
# If field is missing, we default to ''.
sql_field = "coalesce(data->>:%s, '')" % field_holder
if isinstance(value, (int, float)) and value not in (True, False):
sql_field = "(data->>:%s)::numeric" % field_holder
if filtr.operator not in (COMPARISON.IN, COMPARISON.EXCLUDE):
# For the IN operator, let psycopg escape the values list.
# Otherwise JSON-ify the native value (e.g. True -> 'true')
if not isinstance(filtr.value, six.string_types):
value = json.dumps(filtr.value).strip('"')
else:
value = tuple(value)
if filtr.field == id_field:
value = tuple(
[v if isinstance(v, six.string_types) else None for v in value]
)
if filtr.field == modified_field:
value = tuple(
[v if isinstance(v, six.integer_types) else None for v in value]
)
# Safely escape value
value_holder = "%s_value_%s" % (prefix, i)
holders[value_holder] = value
sql_operator = operators.setdefault(filtr.operator, filtr.operator.value)
cond = "%s %s :%s" % (sql_field, sql_operator, value_holder)
conditions.append(cond)
safe_sql = " AND ".join(conditions)
return safe_sql, holders
|
def _format_conditions(self, filters, id_field, modified_field, prefix="filters"):
"""Format the filters list in SQL, with placeholders for safe escaping.
.. note::
All conditions are combined using AND.
.. note::
Field name and value are escaped as they come from HTTP API.
:returns: A SQL string with placeholders, and a dict mapping
placeholders to actual values.
:rtype: tuple
"""
operators = {
COMPARISON.EQ: "=",
COMPARISON.NOT: "<>",
COMPARISON.IN: "IN",
COMPARISON.EXCLUDE: "NOT IN",
}
conditions = []
holders = {}
for i, filtr in enumerate(filters):
value = filtr.value
if filtr.field == id_field:
sql_field = "id"
elif filtr.field == modified_field:
sql_field = "as_epoch(last_modified)"
else:
# Safely escape field name
field_holder = "%s_field_%s" % (prefix, i)
holders[field_holder] = filtr.field
# JSON operator ->> retrieves values as text.
# If field is missing, we default to ''.
sql_field = "coalesce(data->>:%s, '')" % field_holder
if isinstance(value, (int, float)) and value not in (True, False):
sql_field = "(data->>:%s)::numeric" % field_holder
if filtr.operator not in (COMPARISON.IN, COMPARISON.EXCLUDE):
# For the IN operator, let psycopg escape the values list.
# Otherwise JSON-ify the native value (e.g. True -> 'true')
if not isinstance(filtr.value, six.string_types):
value = json.dumps(filtr.value).strip('"')
else:
value = tuple(value)
# Safely escape value
value_holder = "%s_value_%s" % (prefix, i)
holders[value_holder] = value
sql_operator = operators.setdefault(filtr.operator, filtr.operator.value)
cond = "%s %s :%s" % (sql_field, sql_operator, value_holder)
conditions.append(cond)
safe_sql = " AND ".join(conditions)
return safe_sql, holders
|
https://github.com/Kinto/kinto/issues/587
|
{"Pid":4280,"EnvVersion":"2.0","Hostname":"ip-172-31-4-126","Timestamp":1462794460764000000,"Fields":{"lang":null,"exception":"Traceback (most recent call last):
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid\/tweens.py\", line 20, in excview_tween
response = handler(request)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid_tm\/__init__.py\", line 101, in tm_tween
reraise(*exc_info)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid_tm\/__init__.py\", line 83, in tm_tween
response = handler(request)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid\/router.py\", line 145, in handle_request
view_name
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid\/view.py\", line 541, in _call_view
response = view_callable(context, request)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid\/config\/views.py\", line 602, in __call__
return view(context, request)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid\/config\/views.py\", line 328, in attr_view
return view(context, request)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid\/config\/views.py\", line 304, in predicate_wrapper
return view(context, request)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid\/config\/views.py\", line 244, in _secured_view
return view(context, request)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid\/config\/views.py\", line 353, in rendered_view
result = view(context, request)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid\/config\/views.py\", line 507, in _requestonly_view
response = view(request)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/cornice\/service.py\", line 571, in wrapper
response = view_()
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/kinto\/views\/records.py\", line 76, in collection_get
result = super(Record, self).collection_get()
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/cliquet\/resource\/__init__.py\", line 240, in collection_get
include_deleted=include_deleted)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/cliquet\/resource\/model.py\", line 104, in get_records
auth=self.auth)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/cliquet\/storage\/postgresql\/__init__.py\", line 538, in get_all
retrieved = result.fetchmany(self._max_fetch_size)
File \"\/usr\/lib64\/python2.7\/contextlib.py\", line 35, in __exit__
self.gen.throw(type, value, traceback)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/cliquet\/storage\/postgresql\/client.py\", line 53, in connect
raise exceptions.BackendError(original=e)
BackendError: ProgrammingError: (psycopg2.ProgrammingError) operator does not exist: text <> integer
LINE 7: AND id NOT IN (1)
^
HINT: No operator matches the given name and argument type(s). You might need to add explicit type casts.
[SQL: '\
WITH total_filtered AS (\
SELECT COUNT(id) AS count\
FROM records\
WHERE parent_id = %(parent_id)s\
AND collection_id = %(collection_id)s\
AND id NOT IN %(filters_value_0)s\
),\
collection_filtered AS (\
SELECT id, last_modified, data\
FROM records\
WHERE parent_id = %(parent_id)s\
AND collection_id = %(collection_id)s\
AND id NOT IN %(filters_value_0)s\
LIMIT 10000\
),\
fake_deleted AS (\
SELECT (%(deleted_field)s)::JSONB AS data\
),\
filtered_deleted AS (\
SELECT id, last_modified, fake_deleted.data AS data\
FROM deleted, fake_deleted\
WHERE parent_id = %(parent_id)s\
AND collection_id = %(collection_id)s\
AND id NOT IN %(filters_value_0)s\
LIMIT 0\
),\
all_records AS (\
SELECT * FROM filtered_deleted\
UNION ALL\
SELECT * FROM collection_filtered\
),\
paginated_records AS (\
SELECT DISTINCT id\
FROM all_records\
\
)\
SELECT total_filtered.count AS count_total,\
a.id, as_epoch(a.last_modified) AS last_modified, a.data\
FROM paginated_records AS p JOIN all_records AS a ON (a.id = p.id),\
total_filtered\
ORDER BY last_modified DESC\
;\
'] [parameters: {'collection_id': 'record', 'deleted_field': '{\"deleted\":true}', 'parent_id': u'\/buckets\/blocklists\/collections\/certificates', 'filters_value_0': (1,)}]","uid":null,"errno":null,"querystring":"{\"exclude_id\":\"1\"}","agent":"Amazon CloudFront","method":"GET","collection_id":"record","path":"\/v1\/buckets\/blocklists\/collections\/certificates\/records","authn_type":null,"collection_timestamp":1461920076590},"Logger":"kinto","Type":["(psycopg2.ProgrammingError) operator does not exist: text <> integer
LINE 7: AND id NOT IN (1)
^
HINT: No operator matches the given name and argument type(s). You might need to add explicit type casts.
"],"Severity":0}
|
BackendError
|
def _extract_filters(self, queryparams=None):
"""Extracts filters from QueryString parameters."""
if not queryparams:
queryparams = self.request.GET
filters = []
for param, paramvalue in queryparams.items():
param = param.strip()
error_details = {
"name": param,
"location": "querystring",
"description": "Invalid value for %s" % param,
}
# Ignore specific fields
if param.startswith("_") and param not in ("_since", "_to", "_before"):
continue
# Handle the _since specific filter.
if param in ("_since", "_to", "_before"):
value = native_value(paramvalue.strip('"'))
if not isinstance(value, six.integer_types):
raise_invalid(self.request, **error_details)
if param == "_since":
operator = COMPARISON.GT
else:
if param == "_to":
message = "_to is now deprecated, you should use _before instead"
url = (
"http://cliquet.rtfd.org/en/2.4.0/api/resource"
".html#list-of-available-url-parameters"
)
send_alert(self.request, message, url)
operator = COMPARISON.LT
filters.append(Filter(self.model.modified_field, value, operator))
continue
m = re.match(r"^(min|max|not|lt|gt|in|exclude)_(\w+)$", param)
if m:
keyword, field = m.groups()
operator = getattr(COMPARISON, keyword.upper())
else:
operator, field = COMPARISON.EQ, param
if not self.is_known_field(field):
error_msg = "Unknown filter field '{0}'".format(param)
error_details["description"] = error_msg
raise_invalid(self.request, **error_details)
value = native_value(paramvalue)
if operator in (COMPARISON.IN, COMPARISON.EXCLUDE):
value = set([native_value(v) for v in paramvalue.split(",")])
all_integers = all([isinstance(v, six.integer_types) for v in value])
all_strings = all([isinstance(v, six.text_type) for v in value])
has_invalid_value = (field == self.model.id_field and not all_strings) or (
field == self.model.modified_field and not all_integers
)
if has_invalid_value:
raise_invalid(self.request, **error_details)
filters.append(Filter(field, value, operator))
return filters
|
def _extract_filters(self, queryparams=None):
"""Extracts filters from QueryString parameters."""
if not queryparams:
queryparams = self.request.GET
filters = []
for param, paramvalue in queryparams.items():
param = param.strip()
# Ignore specific fields
if param.startswith("_") and param not in ("_since", "_to", "_before"):
continue
# Handle the _since specific filter.
if param in ("_since", "_to", "_before"):
value = native_value(paramvalue.strip('"'))
if not isinstance(value, six.integer_types):
error_details = {
"name": param,
"location": "querystring",
"description": "Invalid value for %s" % param,
}
raise_invalid(self.request, **error_details)
if param == "_since":
operator = COMPARISON.GT
else:
if param == "_to":
message = "_to is now deprecated, you should use _before instead"
url = (
"http://cliquet.rtfd.org/en/2.4.0/api/resource"
".html#list-of-available-url-parameters"
)
send_alert(self.request, message, url)
operator = COMPARISON.LT
filters.append(Filter(self.model.modified_field, value, operator))
continue
m = re.match(r"^(min|max|not|lt|gt|in|exclude)_(\w+)$", param)
if m:
keyword, field = m.groups()
operator = getattr(COMPARISON, keyword.upper())
else:
operator, field = COMPARISON.EQ, param
if not self.is_known_field(field):
error_details = {
"location": "querystring",
"description": "Unknown filter field '{0}'".format(param),
}
raise_invalid(self.request, **error_details)
value = native_value(paramvalue)
if operator in (COMPARISON.IN, COMPARISON.EXCLUDE):
value = set([native_value(v) for v in paramvalue.split(",")])
filters.append(Filter(field, value, operator))
return filters
|
https://github.com/Kinto/kinto/issues/587
|
{"Pid":4280,"EnvVersion":"2.0","Hostname":"ip-172-31-4-126","Timestamp":1462794460764000000,"Fields":{"lang":null,"exception":"Traceback (most recent call last):
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid\/tweens.py\", line 20, in excview_tween
response = handler(request)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid_tm\/__init__.py\", line 101, in tm_tween
reraise(*exc_info)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid_tm\/__init__.py\", line 83, in tm_tween
response = handler(request)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid\/router.py\", line 145, in handle_request
view_name
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid\/view.py\", line 541, in _call_view
response = view_callable(context, request)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid\/config\/views.py\", line 602, in __call__
return view(context, request)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid\/config\/views.py\", line 328, in attr_view
return view(context, request)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid\/config\/views.py\", line 304, in predicate_wrapper
return view(context, request)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid\/config\/views.py\", line 244, in _secured_view
return view(context, request)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid\/config\/views.py\", line 353, in rendered_view
result = view(context, request)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/pyramid\/config\/views.py\", line 507, in _requestonly_view
response = view(request)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/cornice\/service.py\", line 571, in wrapper
response = view_()
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/kinto\/views\/records.py\", line 76, in collection_get
result = super(Record, self).collection_get()
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/cliquet\/resource\/__init__.py\", line 240, in collection_get
include_deleted=include_deleted)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/cliquet\/resource\/model.py\", line 104, in get_records
auth=self.auth)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/cliquet\/storage\/postgresql\/__init__.py\", line 538, in get_all
retrieved = result.fetchmany(self._max_fetch_size)
File \"\/usr\/lib64\/python2.7\/contextlib.py\", line 35, in __exit__
self.gen.throw(type, value, traceback)
File \"\/data\/kinto-dist\/lib\/python2.7\/site-packages\/cliquet\/storage\/postgresql\/client.py\", line 53, in connect
raise exceptions.BackendError(original=e)
BackendError: ProgrammingError: (psycopg2.ProgrammingError) operator does not exist: text <> integer
LINE 7: AND id NOT IN (1)
^
HINT: No operator matches the given name and argument type(s). You might need to add explicit type casts.
[SQL: '\
WITH total_filtered AS (\
SELECT COUNT(id) AS count\
FROM records\
WHERE parent_id = %(parent_id)s\
AND collection_id = %(collection_id)s\
AND id NOT IN %(filters_value_0)s\
),\
collection_filtered AS (\
SELECT id, last_modified, data\
FROM records\
WHERE parent_id = %(parent_id)s\
AND collection_id = %(collection_id)s\
AND id NOT IN %(filters_value_0)s\
LIMIT 10000\
),\
fake_deleted AS (\
SELECT (%(deleted_field)s)::JSONB AS data\
),\
filtered_deleted AS (\
SELECT id, last_modified, fake_deleted.data AS data\
FROM deleted, fake_deleted\
WHERE parent_id = %(parent_id)s\
AND collection_id = %(collection_id)s\
AND id NOT IN %(filters_value_0)s\
LIMIT 0\
),\
all_records AS (\
SELECT * FROM filtered_deleted\
UNION ALL\
SELECT * FROM collection_filtered\
),\
paginated_records AS (\
SELECT DISTINCT id\
FROM all_records\
\
)\
SELECT total_filtered.count AS count_total,\
a.id, as_epoch(a.last_modified) AS last_modified, a.data\
FROM paginated_records AS p JOIN all_records AS a ON (a.id = p.id),\
total_filtered\
ORDER BY last_modified DESC\
;\
'] [parameters: {'collection_id': 'record', 'deleted_field': '{\"deleted\":true}', 'parent_id': u'\/buckets\/blocklists\/collections\/certificates', 'filters_value_0': (1,)}]","uid":null,"errno":null,"querystring":"{\"exclude_id\":\"1\"}","agent":"Amazon CloudFront","method":"GET","collection_id":"record","path":"\/v1\/buckets\/blocklists\/collections\/certificates\/records","authn_type":null,"collection_timestamp":1461920076590},"Logger":"kinto","Type":["(psycopg2.ProgrammingError) operator does not exist: text <> integer
LINE 7: AND id NOT IN (1)
^
HINT: No operator matches the given name and argument type(s). You might need to add explicit type casts.
"],"Severity":0}
|
BackendError
|
def __init__(self, request):
# Make it available for the authorization policy.
self.prefixed_userid = getattr(request, "prefixed_userid", None)
self._check_permission = request.registry.permission.check_permission
# Partial collections of ProtectedResource:
self.shared_ids = []
# Store service, resource, record and required permission.
service = utils.current_service(request)
is_on_resource = (
service is not None
and hasattr(service, "viewset")
and hasattr(service, "resource")
)
if is_on_resource:
self.on_collection = getattr(service, "type", None) == "collection"
self.permission_object_id = self.get_permission_object_id(request)
# Decide what the required unbound permission is depending on the
# method that's being requested.
if request.method.lower() == "put":
# In the case of a "PUT", check if the targetted record already
# exists, return "write" if it does, "create" otherwise.
# If the view exists, use its collection to catch an
# eventual NotFound.
resource = service.resource(request=request, context=self)
try:
record = resource.collection.get_record(resource.record_id)
self.current_record = record
except storage_exceptions.RecordNotFoundError:
self.permission_object_id = service.collection_path.format(
**request.matchdict
)
self.required_permission = "create"
else:
self.required_permission = "write"
else:
method = request.method.lower()
self.required_permission = self.method_permissions.get(method)
self.resource_name = service.viewset.get_name(service.resource)
if self.on_collection:
object_id_match = self.get_permission_object_id(request, "*")
self.get_shared_ids = functools.partial(
request.registry.permission.principals_accessible_objects,
object_id_match=object_id_match,
)
settings = request.registry.settings
setting = "cliquet.%s_%s_principals" % (
self.resource_name,
self.required_permission,
)
self.allowed_principals = aslist(settings.get(setting, ""))
|
def __init__(self, request):
# Make it available for the authorization policy.
self.prefixed_userid = getattr(request, "prefixed_userid", None)
self._check_permission = request.registry.permission.check_permission
# Partial collections of ProtectedResource:
self.shared_ids = []
# Store service, resource, record and required permission.
service = utils.current_service(request)
is_on_resource = (
service is not None
and hasattr(service, "viewset")
and hasattr(service, "resource")
)
if is_on_resource:
self.on_collection = getattr(service, "type", None) == "collection"
self.permission_object_id = self.get_permission_object_id(request)
# Decide what the required unbound permission is depending on the
# method that's being requested.
if request.method.lower() == "put":
# In the case of a "PUT", check if the targetted record already
# exists, return "write" if it does, "create" otherwise.
# If the view exists, call it with the request and catch an
# eventual NotFound.
resource = service.resource(request=request, context=self)
try:
resource.collection.get_record(resource.record_id)
except storage_exceptions.RecordNotFoundError:
self.permission_object_id = service.collection_path.format(
**request.matchdict
)
self.required_permission = "create"
else:
self.required_permission = "write"
else:
method = request.method.lower()
self.required_permission = self.method_permissions.get(method)
self.resource_name = service.viewset.get_name(service.resource)
if self.on_collection:
object_id_match = self.get_permission_object_id(request, "*")
self.get_shared_ids = functools.partial(
request.registry.permission.principals_accessible_objects,
object_id_match=object_id_match,
)
settings = request.registry.settings
setting = "cliquet.%s_%s_principals" % (
self.resource_name,
self.required_permission,
)
self.allowed_principals = aslist(settings.get(setting, ""))
|
https://github.com/Kinto/kinto/issues/452
|
2016-02-22 15:21:36,730 ERROR [venusian][waitress] "POST /v1/buckets/default/collections/test-collection-2aa00fc8-b6e2-41d2-a6a5-28d23ca6fce5/records" ? (? ms) 'str' object does not support item assignment lang=None; exception=Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/pyramid/tweens.py", line 20, in excview_tween
response = handler(request)
File "/usr/local/lib/python2.7/dist-packages/pyramid_tm/__init__.py", line 101, in tm_tween
reraise(*exc_info)
File "/usr/local/lib/python2.7/dist-packages/pyramid_tm/__init__.py", line 83, in tm_tween
response = handler(request)
File "/usr/local/lib/python2.7/dist-packages/pyramid/router.py", line 145, in handle_request
view_name
File "/usr/local/lib/python2.7/dist-packages/pyramid/view.py", line 541, in _call_view
response = view_callable(context, request)
File "/usr/local/lib/python2.7/dist-packages/pyramid/config/views.py", line 385, in viewresult_to_response
result = view(context, request)
File "/usr/local/lib/python2.7/dist-packages/pyramid/config/views.py", line 507, in _requestonly_view
response = view(request)
File "/usr/local/lib/python2.7/dist-packages/kinto/plugins/default_bucket/__init__.py", line 108, in default_bucket
create_bucket(request, bucket_id)
File "/usr/local/lib/python2.7/dist-packages/kinto/plugins/default_bucket/__init__.py", line 35, in create_bucket
bucket = resource.model.create_record({'id': bucket_id})
File "/usr/local/lib/python2.7/dist-packages/cliquet/resource/model.py", line 294, in create_record
unique_fields)
File "/usr/local/lib/python2.7/dist-packages/cliquet/resource/model.py", line 177, in create_record
auth=self.auth)
File "/usr/local/lib/python2.7/dist-packages/cliquet/storage/postgresql/__init__.py", line 234, in create
for_creation=True)
File "/usr/local/lib/python2.7/dist-packages/cliquet/storage/postgresql/__init__.py", line 734, in _check_unicity
record = self.get(collection_id, parent_id, existing['id'])
File "/usr/local/lib/python2.7/dist-packages/cliquet/storage/postgresql/__init__.py", line 263, in get
record[id_field] = object_id
TypeError: 'str' object does not support item assignment; uid=d3f58ae9f309fa494504a2347a13f80c811b64e9d8815299b9c1467f85854288; errno=None; agent=node-superagent/1.7.2; authn_type=BasicAuth; collection_id=bucket; collection_timestamp=1456154496656
|
TypeError
|
def _get_record_or_404(self, record_id):
"""Retrieve record from storage and raise ``404 Not found`` if missing.
:raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPNotFound` if
the record is not found.
"""
if self.context and self.context.current_record:
# Set during authorization. Save a storage hit.
return self.context.current_record
try:
return self.collection.get_record(record_id)
except storage_exceptions.RecordNotFoundError:
response = http_error(HTTPNotFound(), errno=ERRORS.INVALID_RESOURCE_ID)
raise response
|
def _get_record_or_404(self, record_id):
"""Retrieve record from storage and raise ``404 Not found`` if missing.
:raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPNotFound` if
the record is not found.
"""
try:
return self.collection.get_record(record_id)
except storage_exceptions.RecordNotFoundError:
response = http_error(HTTPNotFound(), errno=ERRORS.INVALID_RESOURCE_ID)
raise response
|
https://github.com/Kinto/kinto/issues/452
|
2016-02-22 15:21:36,730 ERROR [venusian][waitress] "POST /v1/buckets/default/collections/test-collection-2aa00fc8-b6e2-41d2-a6a5-28d23ca6fce5/records" ? (? ms) 'str' object does not support item assignment lang=None; exception=Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/pyramid/tweens.py", line 20, in excview_tween
response = handler(request)
File "/usr/local/lib/python2.7/dist-packages/pyramid_tm/__init__.py", line 101, in tm_tween
reraise(*exc_info)
File "/usr/local/lib/python2.7/dist-packages/pyramid_tm/__init__.py", line 83, in tm_tween
response = handler(request)
File "/usr/local/lib/python2.7/dist-packages/pyramid/router.py", line 145, in handle_request
view_name
File "/usr/local/lib/python2.7/dist-packages/pyramid/view.py", line 541, in _call_view
response = view_callable(context, request)
File "/usr/local/lib/python2.7/dist-packages/pyramid/config/views.py", line 385, in viewresult_to_response
result = view(context, request)
File "/usr/local/lib/python2.7/dist-packages/pyramid/config/views.py", line 507, in _requestonly_view
response = view(request)
File "/usr/local/lib/python2.7/dist-packages/kinto/plugins/default_bucket/__init__.py", line 108, in default_bucket
create_bucket(request, bucket_id)
File "/usr/local/lib/python2.7/dist-packages/kinto/plugins/default_bucket/__init__.py", line 35, in create_bucket
bucket = resource.model.create_record({'id': bucket_id})
File "/usr/local/lib/python2.7/dist-packages/cliquet/resource/model.py", line 294, in create_record
unique_fields)
File "/usr/local/lib/python2.7/dist-packages/cliquet/resource/model.py", line 177, in create_record
auth=self.auth)
File "/usr/local/lib/python2.7/dist-packages/cliquet/storage/postgresql/__init__.py", line 234, in create
for_creation=True)
File "/usr/local/lib/python2.7/dist-packages/cliquet/storage/postgresql/__init__.py", line 734, in _check_unicity
record = self.get(collection_id, parent_id, existing['id'])
File "/usr/local/lib/python2.7/dist-packages/cliquet/storage/postgresql/__init__.py", line 263, in get
record[id_field] = object_id
TypeError: 'str' object does not support item assignment; uid=d3f58ae9f309fa494504a2347a13f80c811b64e9d8815299b9c1467f85854288; errno=None; agent=node-superagent/1.7.2; authn_type=BasicAuth; collection_id=bucket; collection_timestamp=1456154496656
|
TypeError
|
def render_template(template, destination, **kwargs):
template = os.path.join(HERE, template)
folder = os.path.dirname(destination)
os.makedirs(folder)
with codecs.open(template, "r", encoding="utf-8") as f:
raw_template = f.read()
rendered = raw_template.format(**kwargs)
with codecs.open(destination, "w+", encoding="utf-8") as output:
output.write(rendered)
|
def render_template(template, destination, **kwargs):
template = os.path.join(HERE, template)
with codecs.open(template, "r", encoding="utf-8") as f:
raw_template = f.read()
rendered = raw_template.format(**kwargs)
with codecs.open(destination, "w+", encoding="utf-8") as output:
output.write(rendered)
|
https://github.com/Kinto/kinto/issues/302
|
kinto init
Which backend to use? (1 - postgresql, 2 - redis, default - memory)
Traceback (most recent call last):
File "/var/www/kinto.leplat.re/venv/bin/kinto", line 9, in <module>
load_entry_point('kinto==1.9.0', 'console_scripts', 'kinto')()
File "/var/www/kinto.leplat.re/venv/local/lib/python2.7/site-packages/kinto/__main__.py", line 44, in main
init(config_file)
File "/var/www/kinto.leplat.re/venv/local/lib/python2.7/site-packages/kinto/config/__init__.py", line 61, in init
permission_url=values['permission_url'])
File "/var/www/kinto.leplat.re/venv/local/lib/python2.7/site-packages/kinto/config/__init__.py", line 15, in render_template
with codecs.open(destination, 'w+', encoding='utf-8') as output:
File "/var/www/kinto.leplat.re/venv/lib/python2.7/codecs.py", line 881, in open
file = __builtin__.open(filename, mode, buffering)
IOError: [Errno 2] No such file or directory: 'config/kinto.ini'
|
IOError
|
def __init__(self, file, fs=None):
"""Initialise the FSFile instance.
Args:
file (str, Pathlike, or OpenFile):
String, object implementing the `os.PathLike` protocol, or
an `fsspec.OpenFile` instance. If passed an instance of
`fsspec.OpenFile`, the following argument ``fs`` has no
effect.
fs (fsspec filesystem, optional)
Object implementing the fsspec filesystem protocol.
"""
try:
self._file = file.path
self._fs = file.fs
except AttributeError:
self._file = file
self._fs = fs
|
def __init__(self, file, fs=None):
"""Initialise the FSFile instance.
*file* can be string or an fsspec.OpenFile instance. In the latter case, the follow argument *fs* has no effect.
*fs* can be None or a fsspec filesystem instance.
"""
try:
self._file = file.path
self._fs = file.fs
except AttributeError:
self._file = file
self._fs = fs
|
https://github.com/pytroll/satpy/issues/1516
|
Traceback (most recent call last):
File "mwe/mwe002.py", line 5, in <module>
print(str(fsf))
TypeError: __str__ returned non-string (type PosixPath)
|
TypeError
|
def __str__(self):
"""Return the string version of the filename."""
return os.fspath(self._file)
|
def __str__(self):
"""Return the string version of the filename."""
return self._file
|
https://github.com/pytroll/satpy/issues/1516
|
Traceback (most recent call last):
File "mwe/mwe002.py", line 5, in <module>
print(str(fsf))
TypeError: __str__ returned non-string (type PosixPath)
|
TypeError
|
def __fspath__(self):
"""Comply with PathLike."""
return os.fspath(self._file)
|
def __fspath__(self):
"""Comply with PathLike."""
return self._file
|
https://github.com/pytroll/satpy/issues/1516
|
Traceback (most recent call last):
File "mwe/mwe002.py", line 5, in <module>
print(str(fsf))
TypeError: __str__ returned non-string (type PosixPath)
|
TypeError
|
def __init__(self, filename, filename_info, filetype_info):
"""Initialize object information by reading the input file."""
super(AVHRRAAPPL1BFile, self).__init__(filename, filename_info, filetype_info)
self.channels = {i: None for i in AVHRR_CHANNEL_NAMES}
self.units = {i: "counts" for i in AVHRR_CHANNEL_NAMES}
self._data = None
self._header = None
self._is3b = None
self._is3a = None
self._shape = None
self.area = None
self.sensor = "avhrr-3"
self.read()
self.active_channels = self._get_active_channels()
self.platform_name = PLATFORM_NAMES.get(self._header["satid"][0], None)
if self.platform_name is None:
raise ValueError("Unsupported platform ID: %d" % self.header["satid"])
|
def __init__(self, filename, filename_info, filetype_info):
"""Initialize object information by reading the input file."""
super(AVHRRAAPPL1BFile, self).__init__(filename, filename_info, filetype_info)
self.channels = {i: None for i in AVHRR_CHANNEL_NAMES}
self.units = {i: "counts" for i in AVHRR_CHANNEL_NAMES}
self._data = None
self._header = None
self._is3b = None
self._is3a = None
self._shape = None
self.lons = None
self.lats = None
self.area = None
self.sensor = "avhrr-3"
self.read()
self.platform_name = PLATFORM_NAMES.get(self._header["satid"][0], None)
if self.platform_name is None:
raise ValueError("Unsupported platform ID: %d" % self.header["satid"])
self.sunz, self.satz, self.azidiff = None, None, None
|
https://github.com/pytroll/satpy/issues/1330
|
Traceback (most recent call last):
File "/home/a001673/usr/src/satpy/satpy/readers/yaml_reader.py", line 785, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid, **kwargs)
File "/home/a001673/usr/src/satpy/satpy/readers/yaml_reader.py", line 668, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers, **kwargs)
File "/home/a001673/usr/src/satpy/satpy/readers/yaml_reader.py", line 644, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/home/a001673/usr/src/satpy/satpy/readers/aapp_l1b.py", line 112, in get_dataset
dataset = self.calibrate(key)
File "/home/a001673/usr/src/satpy/satpy/readers/aapp_l1b.py", line 234, in calibrate
raise ValueError("Empty dataset for channel 3A")
ValueError: Empty dataset for channel 3A
|
ValueError
|
def get_dataset(self, key, info):
"""Get a dataset from the file."""
if key["name"] in CHANNEL_NAMES:
if self.active_channels[key["name"]]:
dataset = self.calibrate(key)
else:
return None
elif key["name"] in ["longitude", "latitude"]:
dataset = self.navigate(key["name"])
dataset.attrs = info
elif key["name"] in ANGLES:
dataset = self.get_angles(key["name"])
else:
raise ValueError("Not a supported dataset: %s", key["name"])
self._update_dataset_attributes(dataset, key, info)
if not self._shape:
self._shape = dataset.shape
return dataset
|
def get_dataset(self, key, info):
"""Get a dataset from the file."""
if key["name"] in CHANNEL_NAMES:
dataset = self.calibrate(key)
elif key["name"] in ["longitude", "latitude"]:
if self.lons is None or self.lats is None:
self.navigate()
if key["name"] == "longitude":
dataset = create_xarray(self.lons)
else:
dataset = create_xarray(self.lats)
dataset.attrs = info
else: # Get sun-sat angles
if key["name"] in ANGLES:
if isinstance(getattr(self, ANGLES[key["name"]]), np.ndarray):
dataset = create_xarray(getattr(self, ANGLES[key["name"]]))
else:
dataset = self.get_angles(key["name"])
else:
raise ValueError(
"Not a supported sun-sensor viewing angle: %s", key["name"]
)
dataset.attrs.update({"platform_name": self.platform_name, "sensor": self.sensor})
dataset.attrs.update(key.to_dict())
for meta_key in ("standard_name", "units"):
if meta_key in info:
dataset.attrs.setdefault(meta_key, info[meta_key])
if not self._shape:
self._shape = dataset.shape
return dataset
|
https://github.com/pytroll/satpy/issues/1330
|
Traceback (most recent call last):
File "/home/a001673/usr/src/satpy/satpy/readers/yaml_reader.py", line 785, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid, **kwargs)
File "/home/a001673/usr/src/satpy/satpy/readers/yaml_reader.py", line 668, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers, **kwargs)
File "/home/a001673/usr/src/satpy/satpy/readers/yaml_reader.py", line 644, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/home/a001673/usr/src/satpy/satpy/readers/aapp_l1b.py", line 112, in get_dataset
dataset = self.calibrate(key)
File "/home/a001673/usr/src/satpy/satpy/readers/aapp_l1b.py", line 234, in calibrate
raise ValueError("Empty dataset for channel 3A")
ValueError: Empty dataset for channel 3A
|
ValueError
|
def get_angles(self, angle_id):
"""Get sun-satellite viewing angles."""
sunz, satz, azidiff = self._get_all_interpolated_angles()
name_to_variable = dict(zip(ANGLES, (satz, sunz, azidiff)))
return create_xarray(name_to_variable[angle_id])
|
def get_angles(self, angle_id):
"""Get sun-satellite viewing angles."""
sunz40km = self._data["ang"][:, :, 0] * 1e-2
satz40km = self._data["ang"][:, :, 1] * 1e-2
azidiff40km = self._data["ang"][:, :, 2] * 1e-2
try:
from geotiepoints.interpolator import Interpolator
except ImportError:
logger.warning(
"Could not interpolate sun-sat angles, python-geotiepoints missing."
)
self.sunz, self.satz, self.azidiff = sunz40km, satz40km, azidiff40km
else:
cols40km = np.arange(24, 2048, 40)
cols1km = np.arange(2048)
lines = sunz40km.shape[0]
rows40km = np.arange(lines)
rows1km = np.arange(lines)
along_track_order = 1
cross_track_order = 3
satint = Interpolator(
[sunz40km, satz40km, azidiff40km],
(rows40km, cols40km),
(rows1km, cols1km),
along_track_order,
cross_track_order,
)
self.sunz, self.satz, self.azidiff = delayed(satint.interpolate, nout=3)()
self.sunz = da.from_delayed(self.sunz, (lines, 2048), sunz40km.dtype)
self.satz = da.from_delayed(self.satz, (lines, 2048), satz40km.dtype)
self.azidiff = da.from_delayed(self.azidiff, (lines, 2048), azidiff40km.dtype)
return create_xarray(getattr(self, ANGLES[angle_id]))
|
https://github.com/pytroll/satpy/issues/1330
|
Traceback (most recent call last):
File "/home/a001673/usr/src/satpy/satpy/readers/yaml_reader.py", line 785, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid, **kwargs)
File "/home/a001673/usr/src/satpy/satpy/readers/yaml_reader.py", line 668, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers, **kwargs)
File "/home/a001673/usr/src/satpy/satpy/readers/yaml_reader.py", line 644, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/home/a001673/usr/src/satpy/satpy/readers/aapp_l1b.py", line 112, in get_dataset
dataset = self.calibrate(key)
File "/home/a001673/usr/src/satpy/satpy/readers/aapp_l1b.py", line 234, in calibrate
raise ValueError("Empty dataset for channel 3A")
ValueError: Empty dataset for channel 3A
|
ValueError
|
def navigate(self, coordinate_id):
"""Get the longitudes and latitudes of the scene."""
lons, lats = self._get_all_interpolated_coordinates()
if coordinate_id == "longitude":
return create_xarray(lons)
elif coordinate_id == "latitude":
return create_xarray(lats)
else:
raise KeyError("Coordinate {} unknown.".format(coordinate_id))
|
def navigate(self):
"""Get the longitudes and latitudes of the scene."""
lons40km = self._data["pos"][:, :, 1] * 1e-4
lats40km = self._data["pos"][:, :, 0] * 1e-4
try:
from geotiepoints import SatelliteInterpolator
except ImportError:
logger.warning("Could not interpolate lon/lats, python-geotiepoints missing.")
self.lons, self.lats = lons40km, lats40km
else:
cols40km = np.arange(24, 2048, 40)
cols1km = np.arange(2048)
lines = lons40km.shape[0]
rows40km = np.arange(lines)
rows1km = np.arange(lines)
along_track_order = 1
cross_track_order = 3
satint = SatelliteInterpolator(
(lons40km, lats40km),
(rows40km, cols40km),
(rows1km, cols1km),
along_track_order,
cross_track_order,
)
self.lons, self.lats = delayed(satint.interpolate, nout=2)()
self.lons = da.from_delayed(self.lons, (lines, 2048), lons40km.dtype)
self.lats = da.from_delayed(self.lats, (lines, 2048), lats40km.dtype)
|
https://github.com/pytroll/satpy/issues/1330
|
Traceback (most recent call last):
File "/home/a001673/usr/src/satpy/satpy/readers/yaml_reader.py", line 785, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid, **kwargs)
File "/home/a001673/usr/src/satpy/satpy/readers/yaml_reader.py", line 668, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers, **kwargs)
File "/home/a001673/usr/src/satpy/satpy/readers/yaml_reader.py", line 644, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/home/a001673/usr/src/satpy/satpy/readers/aapp_l1b.py", line 112, in get_dataset
dataset = self.calibrate(key)
File "/home/a001673/usr/src/satpy/satpy/readers/aapp_l1b.py", line 234, in calibrate
raise ValueError("Empty dataset for channel 3A")
ValueError: Empty dataset for channel 3A
|
ValueError
|
def calibrate(self, dataset_id, pre_launch_coeffs=False, calib_coeffs=None):
"""Calibrate the data."""
if calib_coeffs is None:
calib_coeffs = {}
units = {
"reflectance": "%",
"brightness_temperature": "K",
"counts": "",
"radiance": "W*m-2*sr-1*cm ?",
}
if dataset_id["name"] in ("3a", "3b") and self._is3b is None:
# Is it 3a or 3b:
self._is3a = (
da.bitwise_and(da.from_array(self._data["scnlinbit"], chunks=LINE_CHUNK), 3)
== 0
)
self._is3b = (
da.bitwise_and(da.from_array(self._data["scnlinbit"], chunks=LINE_CHUNK), 3)
== 1
)
try:
vis_idx = ["1", "2", "3a"].index(dataset_id["name"])
ir_idx = None
except ValueError:
vis_idx = None
ir_idx = ["3b", "4", "5"].index(dataset_id["name"])
mask = True
if vis_idx is not None:
coeffs = calib_coeffs.get("ch" + dataset_id["name"])
if dataset_id["name"] == "3a":
mask = self._is3a[:, None]
ds = create_xarray(
_vis_calibrate(
self._data,
vis_idx,
dataset_id["calibration"],
pre_launch_coeffs,
coeffs,
mask=mask,
)
)
else:
if dataset_id["name"] == "3b":
mask = self._is3b[:, None]
ds = create_xarray(
_ir_calibrate(
self._header, self._data, ir_idx, dataset_id["calibration"], mask=mask
)
)
ds.attrs["units"] = units[dataset_id["calibration"]]
ds.attrs.update(dataset_id._asdict())
return ds
|
def calibrate(self, dataset_id, pre_launch_coeffs=False, calib_coeffs=None):
"""Calibrate the data."""
if calib_coeffs is None:
calib_coeffs = {}
units = {
"reflectance": "%",
"brightness_temperature": "K",
"counts": "",
"radiance": "W*m-2*sr-1*cm ?",
}
if dataset_id["name"] in ("3a", "3b") and self._is3b is None:
# Is it 3a or 3b:
self._is3a = (
da.bitwise_and(da.from_array(self._data["scnlinbit"], chunks=LINE_CHUNK), 3)
== 0
)
self._is3b = (
da.bitwise_and(da.from_array(self._data["scnlinbit"], chunks=LINE_CHUNK), 3)
== 1
)
if dataset_id["name"] == "3a" and not np.any(self._is3a):
raise ValueError("Empty dataset for channel 3A")
if dataset_id["name"] == "3b" and not np.any(self._is3b):
raise ValueError("Empty dataset for channel 3B")
try:
vis_idx = ["1", "2", "3a"].index(dataset_id["name"])
ir_idx = None
except ValueError:
vis_idx = None
ir_idx = ["3b", "4", "5"].index(dataset_id["name"])
mask = True
if vis_idx is not None:
coeffs = calib_coeffs.get("ch" + dataset_id["name"])
if dataset_id["name"] == "3a":
mask = self._is3a[:, None]
ds = create_xarray(
_vis_calibrate(
self._data,
vis_idx,
dataset_id["calibration"],
pre_launch_coeffs,
coeffs,
mask=mask,
)
)
else:
if dataset_id["name"] == "3b":
mask = self._is3b[:, None]
ds = create_xarray(
_ir_calibrate(
self._header, self._data, ir_idx, dataset_id["calibration"], mask=mask
)
)
ds.attrs["units"] = units[dataset_id["calibration"]]
ds.attrs.update(dataset_id._asdict())
return ds
|
https://github.com/pytroll/satpy/issues/1330
|
Traceback (most recent call last):
File "/home/a001673/usr/src/satpy/satpy/readers/yaml_reader.py", line 785, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid, **kwargs)
File "/home/a001673/usr/src/satpy/satpy/readers/yaml_reader.py", line 668, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers, **kwargs)
File "/home/a001673/usr/src/satpy/satpy/readers/yaml_reader.py", line 644, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/home/a001673/usr/src/satpy/satpy/readers/aapp_l1b.py", line 112, in get_dataset
dataset = self.calibrate(key)
File "/home/a001673/usr/src/satpy/satpy/readers/aapp_l1b.py", line 234, in calibrate
raise ValueError("Empty dataset for channel 3A")
ValueError: Empty dataset for channel 3A
|
ValueError
|
def configs_for_reader(reader=None, ppp_config_dir=None):
"""Generate reader configuration files for one or more readers.
Args:
reader (Optional[str]): Yield configs only for this reader
ppp_config_dir (Optional[str]): Additional configuration directory
to search for reader configuration files.
Returns: Generator of lists of configuration files
"""
search_paths = (ppp_config_dir,) if ppp_config_dir else tuple()
if reader is not None:
if not isinstance(reader, (list, tuple)):
reader = [reader]
# check for old reader names
new_readers = []
for reader_name in reader:
if reader_name.endswith(".yaml") or reader_name not in OLD_READER_NAMES:
new_readers.append(reader_name)
continue
new_name = OLD_READER_NAMES[reader_name]
# Satpy 0.11 only displays a warning
# Satpy 0.13 will raise an exception
raise ValueError(
"Reader name '{}' has been deprecated, use '{}' instead.".format(
reader_name, new_name
)
)
# Satpy 0.15 or 1.0, remove exception and mapping
reader = new_readers
# given a config filename or reader name
config_files = [r if r.endswith(".yaml") else r + ".yaml" for r in reader]
else:
reader_configs = glob_config(os.path.join("readers", "*.yaml"), *search_paths)
config_files = set(reader_configs)
for config_file in config_files:
config_basename = os.path.basename(config_file)
reader_name = os.path.splitext(config_basename)[0]
reader_configs = config_search_paths(
os.path.join("readers", config_basename), *search_paths
)
if not reader_configs:
# either the reader they asked for does not exist
# or satpy is improperly configured and can't find its own readers
raise ValueError("No reader named: {}".format(reader_name))
yield reader_configs
|
def configs_for_reader(reader=None, ppp_config_dir=None):
"""Generate reader configuration files for one or more readers.
Args:
reader (Optional[str]): Yield configs only for this reader
ppp_config_dir (Optional[str]): Additional configuration directory
to search for reader configuration files.
Returns: Generator of lists of configuration files
"""
search_paths = (ppp_config_dir,) if ppp_config_dir else tuple()
if reader is not None:
if not isinstance(reader, (list, tuple)):
reader = [reader]
# check for old reader names
new_readers = []
for reader_name in reader:
if reader_name.endswith(".yaml") or reader_name not in OLD_READER_NAMES:
new_readers.append(reader_name)
continue
new_name = OLD_READER_NAMES[reader_name]
# Satpy 0.11 only displays a warning
# Satpy 0.13 will raise an exception
raise ValueError(
"Reader name '{}' has been deprecated, use '{}' instead.".format(
reader_name, new_name
)
)
# Satpy 0.15 or 1.0, remove exception and mapping
reader = new_readers
# given a config filename or reader name
config_files = [r if r.endswith(".yaml") else r + ".yaml" for r in reader]
else:
reader_configs = glob_config(os.path.join("readers", "*.yaml"), *search_paths)
config_files = set(reader_configs)
for config_file in config_files:
config_basename = os.path.basename(config_file)
reader_configs = config_search_paths(
os.path.join("readers", config_basename), *search_paths
)
if not reader_configs:
# either the reader they asked for does not exist
# or satpy is improperly configured and can't find its own readers
raise ValueError("No reader(s) named: {}".format(reader))
yield reader_configs
|
https://github.com/pytroll/satpy/issues/1201
|
Traceback (most recent call last):
File "mwe46.py", line 8, in <module>
sc = Scene(filenames={reader_sev: fn_sev, reader_nok: fn_nok})
File "/data/gholl/miniconda3/envs/py38/lib/python3.8/site-packages/satpy/scene.py", line 149, in __init__
self.readers = self.create_reader_instances(filenames=filenames,
File "/data/gholl/miniconda3/envs/py38/lib/python3.8/site-packages/satpy/scene.py", line 193, in create_reader_instances
return load_readers(filenames=filenames,
File "/data/gholl/miniconda3/envs/py38/lib/python3.8/site-packages/satpy/readers/__init__.py", line 706, in load_readers
for idx, reader_configs in enumerate(configs_for_reader(reader, ppp_config_dir)):
File "/data/gholl/miniconda3/envs/py38/lib/python3.8/site-packages/satpy/readers/__init__.py", line 552, in configs_for_reader
raise ValueError("No reader(s) named: {}".format(reader))
ValueError: No reader(s) named: ['fruit-reader']
|
ValueError
|
def _slice_area_from_bbox(self, src_area, dst_area, ll_bbox=None, xy_bbox=None):
"""Slice the provided area using the bounds provided."""
if ll_bbox is not None:
dst_area = AreaDefinition(
"crop_area",
"crop_area",
"crop_latlong",
{"proj": "latlong"},
100,
100,
ll_bbox,
)
elif xy_bbox is not None:
crs = src_area.crs if hasattr(src_area, "crs") else src_area.proj_dict
dst_area = AreaDefinition(
"crop_area",
"crop_area",
"crop_xy",
crs,
src_area.x_size,
src_area.y_size,
xy_bbox,
)
x_slice, y_slice = src_area.get_area_slices(dst_area)
return src_area[y_slice, x_slice], y_slice, x_slice
|
def _slice_area_from_bbox(self, src_area, dst_area, ll_bbox=None, xy_bbox=None):
"""Slice the provided area using the bounds provided."""
if ll_bbox is not None:
dst_area = AreaDefinition(
"crop_area",
"crop_area",
"crop_latlong",
{"proj": "latlong"},
100,
100,
ll_bbox,
)
elif xy_bbox is not None:
dst_area = AreaDefinition(
"crop_area",
"crop_area",
"crop_xy",
src_area.proj_dict,
src_area.x_size,
src_area.y_size,
xy_bbox,
)
x_slice, y_slice = src_area.get_area_slices(dst_area)
return src_area[y_slice, x_slice], y_slice, x_slice
|
https://github.com/pytroll/satpy/issues/1124
|
[DEBUG: 2020-03-29 12:19:40 : fiona.env] Entering env context: <fiona.env.Env object at 0x000002426A002708>
[DEBUG: 2020-03-29 12:19:40 : fiona.env] Starting outermost env
[DEBUG: 2020-03-29 12:19:40 : fiona.env] No GDAL environment exists
[DEBUG: 2020-03-29 12:19:40 : fiona.env] New GDAL environment <fiona._env.GDALEnv object at 0x0000024269FF6EC8> created
[DEBUG: 2020-03-29 12:19:40 : fiona._env] Logging error handler pushed.
[DEBUG: 2020-03-29 12:19:40 : fiona._env] All drivers registered.
[DEBUG: 2020-03-29 12:19:40 : fiona._env] GDAL_DATA found in environment: '...anaconda3\\envs\\Sentinel5\\Library\\share\\gdal'.
[DEBUG: 2020-03-29 12:19:40 : fiona._env] PROJ_LIB found in environment: '...anaconda3\\envs\\Sentinel5\\Library\\share\\proj'.
[DEBUG: 2020-03-29 12:19:40 : fiona._env] Started GDALEnv <fiona._env.GDALEnv object at 0x0000024269FF6EC8>.
[DEBUG: 2020-03-29 12:19:40 : fiona.env] Updated existing <fiona._env.GDALEnv object at 0x0000024269FF6EC8> with options {}
[DEBUG: 2020-03-29 12:19:40 : fiona.env] Entered env context: <fiona.env.Env object at 0x000002426A002708>
[DEBUG: 2020-03-29 12:19:40 : fiona.ogrext] Got coordinate system
[DEBUG: 2020-03-29 12:19:40 : fiona.ogrext] Got coordinate system
[DEBUG: 2020-03-29 12:19:40 : fiona.ogrext] Index: 0
[DEBUG: 2020-03-29 12:19:40 : fiona.collection] Flushed buffer
[DEBUG: 2020-03-29 12:19:40 : fiona.collection] Stopped session
[DEBUG: 2020-03-29 12:19:40 : fiona.env] Exiting env context: <fiona.env.Env object at 0x000002426A002708>
[DEBUG: 2020-03-29 12:19:40 : fiona.env] Cleared existing <fiona._env.GDALEnv object at 0x0000024269FF6EC8> options
[DEBUG: 2020-03-29 12:19:40 : fiona._env] Stopping GDALEnv <fiona._env.GDALEnv object at 0x0000024269FF6EC8>.
[DEBUG: 2020-03-29 12:19:40 : fiona._env] Error handler popped.
[DEBUG: 2020-03-29 12:19:40 : fiona._env] Stopped GDALEnv <fiona._env.GDALEnv object at 0x0000024269FF6EC8>.
[DEBUG: 2020-03-29 12:19:40 : fiona.env] Exiting outermost env
[DEBUG: 2020-03-29 12:19:40 : fiona.env] Exited env context: <fiona.env.Env object at 0x000002426A002708>
[DEBUG: 2020-03-29 12:19:40 : fiona.env] Entering env context: <fiona.env.Env object at 0x0000024269F94AC8>
[DEBUG: 2020-03-29 12:19:40 : fiona.env] Starting outermost env
[DEBUG: 2020-03-29 12:19:40 : fiona.env] No GDAL environment exists
[DEBUG: 2020-03-29 12:19:40 : fiona.env] New GDAL environment <fiona._env.GDALEnv object at 0x000002426A02CD48> created
[DEBUG: 2020-03-29 12:19:40 : fiona._env] Logging error handler pushed.
[DEBUG: 2020-03-29 12:19:40 : fiona._env] All drivers registered.
[DEBUG: 2020-03-29 12:19:40 : fiona._env] GDAL_DATA found in environment: '...anaconda3\\envs\\Sentinel5\\Library\\share\\gdal'.
[DEBUG: 2020-03-29 12:19:40 : fiona._env] PROJ_LIB found in environment: '...anaconda3\\envs\\Sentinel5\\Library\\share\\proj'.
[DEBUG: 2020-03-29 12:19:40 : fiona._env] Started GDALEnv <fiona._env.GDALEnv object at 0x000002426A02CD48>.
[DEBUG: 2020-03-29 12:19:40 : fiona.env] Updated existing <fiona._env.GDALEnv object at 0x000002426A02CD48> with options {}
[DEBUG: 2020-03-29 12:19:40 : fiona.env] Entered env context: <fiona.env.Env object at 0x0000024269F94AC8>
[DEBUG: 2020-03-29 12:19:40 : fiona.ogrext] Got coordinate system
[DEBUG: 2020-03-29 12:19:40 : fiona.ogrext] Got coordinate system
[DEBUG: 2020-03-29 12:19:40 : fiona.ogrext] Index: 0
[DEBUG: 2020-03-29 12:19:40 : fiona.collection] Flushed buffer
[DEBUG: 2020-03-29 12:19:40 : fiona.collection] Stopped session
[DEBUG: 2020-03-29 12:19:40 : fiona.env] Exiting env context: <fiona.env.Env object at 0x0000024269F94AC8>
[DEBUG: 2020-03-29 12:19:40 : fiona.env] Cleared existing <fiona._env.GDALEnv object at 0x000002426A02CD48> options
[DEBUG: 2020-03-29 12:19:40 : fiona._env] Stopping GDALEnv <fiona._env.GDALEnv object at 0x000002426A02CD48>.
[DEBUG: 2020-03-29 12:19:40 : fiona._env] Error handler popped.
[DEBUG: 2020-03-29 12:19:40 : fiona._env] Stopped GDALEnv <fiona._env.GDALEnv object at 0x000002426A02CD48>.
[DEBUG: 2020-03-29 12:19:40 : fiona.env] Exiting outermost env
[DEBUG: 2020-03-29 12:19:40 : fiona.env] Exited env context: <fiona.env.Env object at 0x0000024269F94AC8>
[DEBUG: 2020-03-29 12:24:31 : satpy.readers] Reading ['...anaconda3\\envs\\Sentinel5\\lib\\site-packages\\satpy\\etc\\readers\\msi_safe.yaml']
[DEBUG: 2020-03-29 12:24:31 : satpy.scene] Setting 'PPP_CONFIG_DIR' to '...anaconda3\envs\Sentinel5\lib\site-packages\satpy\etc'
[DEBUG: 2020-03-29 12:24:31 : satpy.readers] Reading ['...anaconda3\\envs\\Sentinel5\\lib\\site-packages\\satpy\\etc\\readers\\msi_safe.yaml']
[DEBUG: 2020-03-29 12:24:31 : satpy.readers.yaml_reader] Assigning to msi_safe: ['.\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438.SAFE\\GRANULE\\L1C_T32UQV_A024758_20200319T101336\\IMG_DATA\\T32UQV_20200319T101021_B8A.jp2', '.\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438.SAFE\\GRANULE\\L1C_T32UQV_A024758_20200319T101336\\IMG_DATA\\T32UQV_20200319T101021_B08.jp2', '.\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438.SAFE\\GRANULE\\L1C_T32UQV_A024758_20200319T101336\\IMG_DATA\\T32UQV_20200319T101021_B05.jp2', '.\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438.SAFE\\GRANULE\\L1C_T32UQV_A024758_20200319T101336\\IMG_DATA\\T32UQV_20200319T101021_B04.jp2', '.\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438.SAFE\\GRANULE\\L1C_T32UQV_A024758_20200319T101336\\IMG_DATA\\T32UQV_20200319T101021_B11.jp2', '.\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438.SAFE\\GRANULE\\L1C_T32UQV_A024758_20200319T101336\\IMG_DATA\\T32UQV_20200319T101021_B03.jp2', '.\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438.SAFE\\GRANULE\\L1C_T32UQV_A024758_20200319T101336\\IMG_DATA\\T32UQV_20200319T101021_B12.jp2', '.\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438.SAFE\\GRANULE\\L1C_T32UQV_A024758_20200319T101336\\IMG_DATA\\T32UQV_20200319T101021_B10.jp2', '.\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438.SAFE\\GRANULE\\L1C_T32UQV_A024758_20200319T101336\\IMG_DATA\\T32UQV_20200319T101021_TCI.jp2', '.\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438.SAFE\\GRANULE\\L1C_T32UQV_A024758_20200319T101336\\IMG_DATA\\T32UQV_20200319T101021_B09.jp2', '.\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438.SAFE\\GRANULE\\L1C_T32UQV_A024758_20200319T101336\\IMG_DATA\\T32UQV_20200319T101021_B06.jp2', '.\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438.SAFE\\GRANULE\\L1C_T32UQV_A024758_20200319T101336\\IMG_DATA\\T32UQV_20200319T101021_B07.jp2', '.\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438.SAFE\\GRANULE\\L1C_T32UQV_A024758_20200319T101336\\IMG_DATA\\T32UQV_20200319T101021_B01.jp2', '.\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438.SAFE\\GRANULE\\L1C_T32UQV_A024758_20200319T101336\\IMG_DATA\\T32UQV_20200319T101021_B02.jp2', '.\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438\\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438.SAFE\\GRANULE\\L1C_T32UQV_A024758_20200319T101336\\MTD_TL.xml']
[DEBUG: 2020-03-29 12:24:31 : satpy.composites] Looking for composites config file msi.yaml
[DEBUG: 2020-03-29 12:24:31 : satpy.composites] Looking for composites config file visir.yaml
...anaconda3\envs\Sentinel5\lib\site-packages\pyproj\crs\crs.py:53: FutureWarning: '+init=<authority>:<code>' syntax is deprecated. '<authority>:<code>' is the preferred initialization method. When making the change, be mindful of axis order changes: https://pyproj4.github.io/pyproj/stable/gotchas.html#axis-order-changes-in-proj-6
return _prepare_from_string(" ".join(pjargs))
...anaconda3\envs\Sentinel5\lib\site-packages\pyproj\crs\crs.py:543: UserWarning: You will likely lose important projection information when converting to a PROJ string from another format. See: https://proj.org/faq.html#what-is-the-best-format-for-describing-coordinate-reference-systems
proj_string = self.to_proj4()
...anaconda3\envs\Sentinel5\lib\site-packages\pyproj\crs\crs.py:53: FutureWarning: '+init=<authority>:<code>' syntax is deprecated. '<authority>:<code>' is the preferred initialization method. When making the change, be mindful of axis order changes: https://pyproj4.github.io/pyproj/stable/gotchas.html#axis-order-changes-in-proj-6
return _prepare_from_string(" ".join(pjargs))
...anaconda3\envs\Sentinel5\lib\site-packages\pyproj\crs\crs.py:543: UserWarning: You will likely lose important projection information when converting to a PROJ string from another format. See: https://proj.org/faq.html#what-is-the-best-format-for-describing-coordinate-reference-systems
proj_string = self.to_proj4()
...anaconda3\envs\Sentinel5\lib\site-packages\pyproj\crs\crs.py:53: FutureWarning: '+init=<authority>:<code>' syntax is deprecated. '<authority>:<code>' is the preferred initialization method. When making the change, be mindful of axis order changes: https://pyproj4.github.io/pyproj/stable/gotchas.html#axis-order-changes-in-proj-6
return _prepare_from_string(" ".join(pjargs))
...anaconda3\envs\Sentinel5\lib\site-packages\pyproj\crs\crs.py:543: UserWarning: You will likely lose important projection information when converting to a PROJ string from another format. See: https://proj.org/faq.html#what-is-the-best-format-for-describing-coordinate-reference-systems
proj_string = self.to_proj4()
...anaconda3\envs\Sentinel5\lib\site-packages\pyproj\crs\crs.py:53: FutureWarning: '+init=<authority>:<code>' syntax is deprecated. '<authority>:<code>' is the preferred initialization method. When making the change, be mindful of axis order changes: https://pyproj4.github.io/pyproj/stable/gotchas.html#axis-order-changes-in-proj-6
return _prepare_from_string(" ".join(pjargs))
...anaconda3\envs\Sentinel5\lib\site-packages\pyproj\crs\crs.py:543: UserWarning: You will likely lose important projection information when converting to a PROJ string from another format. See: https://proj.org/faq.html#what-is-the-best-format-for-describing-coordinate-reference-systems
proj_string = self.to_proj4()
[DEBUG: 2020-03-29 12:24:32 : satpy.readers.msi_safe] Reading B02.
...anaconda3\envs\Sentinel5\lib\site-packages\pyproj\crs\crs.py:53: FutureWarning: '+init=<authority>:<code>' syntax is deprecated. '<authority>:<code>' is the preferred initialization method. When making the change, be mindful of axis order changes: https://pyproj4.github.io/pyproj/stable/gotchas.html#axis-order-changes-in-proj-6
return _prepare_from_string(" ".join(pjargs))
...anaconda3\envs\Sentinel5\lib\site-packages\pyproj\crs\crs.py:543: UserWarning: You will likely lose important projection information when converting to a PROJ string from another format. See: https://proj.org/faq.html#what-is-the-best-format-for-describing-coordinate-reference-systems
proj_string = self.to_proj4()
[DEBUG: 2020-03-29 12:24:32 : satpy.readers.msi_safe] Reading B04.
...anaconda3\envs\Sentinel5\lib\site-packages\pyproj\crs\crs.py:53: FutureWarning: '+init=<authority>:<code>' syntax is deprecated. '<authority>:<code>' is the preferred initialization method. When making the change, be mindful of axis order changes: https://pyproj4.github.io/pyproj/stable/gotchas.html#axis-order-changes-in-proj-6
return _prepare_from_string(" ".join(pjargs))
...anaconda3\envs\Sentinel5\lib\site-packages\pyproj\crs\crs.py:543: UserWarning: You will likely lose important projection information when converting to a PROJ string from another format. See: https://proj.org/faq.html#what-is-the-best-format-for-describing-coordinate-reference-systems
proj_string = self.to_proj4()
...anaconda3\envs\Sentinel5\lib\site-packages\pyproj\crs\crs.py:53: FutureWarning: '+init=<authority>:<code>' syntax is deprecated. '<authority>:<code>' is the preferred initialization method. When making the change, be mindful of axis order changes: https://pyproj4.github.io/pyproj/stable/gotchas.html#axis-order-changes-in-proj-6
return _prepare_from_string(" ".join(pjargs))
...anaconda3\envs\Sentinel5\lib\site-packages\pyproj\crs\crs.py:543: UserWarning: You will likely lose important projection information when converting to a PROJ string from another format. See: https://proj.org/faq.html#what-is-the-best-format-for-describing-coordinate-reference-systems
proj_string = self.to_proj4()
[DEBUG: 2020-03-29 12:24:32 : satpy.readers.msi_safe] Reading B03.
[DEBUG: 2020-03-29 12:24:32 : satpy.composites] Applying sun zen correction
[DEBUG: 2020-03-29 12:24:32 : satpy.composites] Computing sun zenith angles.
[DEBUG: 2020-03-29 12:24:32 : satpy.composites] Apply the effective solar atmospheric path length correction method by Li and Shibata
[DEBUG: 2020-03-29 12:24:32 : satpy.composites] Sun-zenith correction applied. Computation time: 0.1 (sec)
[DEBUG: 2020-03-29 12:24:32 : satpy.composites] Applying sun zen correction
[DEBUG: 2020-03-29 12:24:32 : satpy.composites] Computing sun zenith angles.
[DEBUG: 2020-03-29 12:24:32 : satpy.composites] Apply the standard sun-zenith correction [1/cos(sunz)]
[DEBUG: 2020-03-29 12:24:32 : satpy.composites] Sun-zenith correction applied. Computation time: 0.1 (sec)
[INFO: 2020-03-29 12:24:32 : satpy.composites] Removing Rayleigh scattering with atmosphere 'us-standard' and aerosol type 'rayleigh_only' for 'B02'
[INFO: 2020-03-29 12:24:33 : pyspectral.rayleigh] Atmosphere chosen: us-standard
[DEBUG: 2020-03-29 12:24:33 : pyspectral.rayleigh] LUT filename: ...\AppData\Local\pytroll\pyspectral\rayleigh_only\rayleigh_lut_us-standard.h5
[DEBUG: 2020-03-29 12:24:33 : pyspectral.rsr_reader] Filename: ...\AppData\Local\pytroll\pyspectral\rsr_msi_Sentinel-2A.h5
[DEBUG: 2020-03-29 12:24:33 : pyspectral.rsr_reader] Filename: ...\AppData\Local\pytroll\pyspectral\rsr_msi_Sentinel-2A.h5
[DEBUG: 2020-03-29 12:24:33 : pyspectral.rsr_reader] No detectors found - assume only one...
[DEBUG: 2020-03-29 12:24:33 : pyspectral.rayleigh] Band name: B02 Effective wavelength: 0.489455
[DEBUG: 2020-03-29 12:24:33 : pyspectral.rayleigh] Time - Interpolation: 0.082282
[DEBUG: 2020-03-29 12:24:33 : satpy.composites] Applying sun zen correction
[DEBUG: 2020-03-29 12:24:33 : satpy.composites] Computing sun zenith angles.
[DEBUG: 2020-03-29 12:24:33 : satpy.composites] Apply the effective solar atmospheric path length correction method by Li and Shibata
[DEBUG: 2020-03-29 12:24:33 : satpy.composites] Sun-zenith correction applied. Computation time: 0.1 (sec)
[INFO: 2020-03-29 12:24:33 : satpy.composites] Removing Rayleigh scattering with atmosphere 'us-standard' and aerosol type 'rayleigh_only' for 'B04'
[INFO: 2020-03-29 12:24:33 : pyspectral.rayleigh] Atmosphere chosen: us-standard
[DEBUG: 2020-03-29 12:24:33 : pyspectral.rayleigh] LUT filename:...\AppData\Local\pytroll\pyspectral\rayleigh_only\rayleigh_lut_us-standard.h5
[DEBUG: 2020-03-29 12:24:33 : pyspectral.rsr_reader] Filename: ...\AppData\Local\pytroll\pyspectral\rsr_msi_Sentinel-2A.h5
[DEBUG: 2020-03-29 12:24:33 : pyspectral.rsr_reader] Filename: ...\AppData\Local\pytroll\pyspectral\rsr_msi_Sentinel-2A.h5
[DEBUG: 2020-03-29 12:24:33 : pyspectral.rsr_reader] No detectors found - assume only one...
[DEBUG: 2020-03-29 12:24:33 : pyspectral.rayleigh] Band name: B04 Effective wavelength: 0.664111
[DEBUG: 2020-03-29 12:24:33 : pyspectral.rayleigh] Time - Interpolation: 0.090826
[DEBUG: 2020-03-29 12:24:33 : satpy.composites] Applying sun zen correction
[DEBUG: 2020-03-29 12:24:33 : satpy.composites] Computing sun zenith angles.
[DEBUG: 2020-03-29 12:24:33 : satpy.composites] Apply the effective solar atmospheric path length correction method by Li and Shibata
[DEBUG: 2020-03-29 12:24:33 : satpy.composites] Sun-zenith correction applied. Computation time: 0.1 (sec)
[INFO: 2020-03-29 12:24:33 : satpy.composites] Removing Rayleigh scattering with atmosphere 'us-standard' and aerosol type 'rayleigh_only' for 'B03'
[INFO: 2020-03-29 12:24:33 : pyspectral.rayleigh] Atmosphere chosen: us-standard
[DEBUG: 2020-03-29 12:24:33 : pyspectral.rayleigh] LUT filename: ...\AppData\Local\pytroll\pyspectral\rayleigh_only\rayleigh_lut_us-standard.h5
[DEBUG: 2020-03-29 12:24:34 : pyspectral.rsr_reader] Filename: ...\AppData\Local\pytroll\pyspectral\rsr_msi_Sentinel-2A.h5
[DEBUG: 2020-03-29 12:24:34 : pyspectral.rsr_reader] Filename: ...\AppData\Local\pytroll\pyspectral\rsr_msi_Sentinel-2A.h5
[DEBUG: 2020-03-29 12:24:34 : pyspectral.rsr_reader] No detectors found - assume only one...
[DEBUG: 2020-03-29 12:24:34 : pyspectral.rayleigh] Band name: B03 Effective wavelength: 0.559090
[DEBUG: 2020-03-29 12:24:34 : pyspectral.rayleigh] Time - Interpolation: 0.088291
[DEBUG: 2020-03-29 12:24:34 : satpy.scene] Unloading dataset: DatasetID(name='satellite_zenith_angle', wavelength=None, resolution=10, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2020-03-29 12:24:34 : satpy.scene] Unloading dataset: DatasetID(name='solar_azimuth_angle', wavelength=None, resolution=10, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2020-03-29 12:24:34 : satpy.scene] Unloading dataset: DatasetID(name='solar_zenith_angle', wavelength=None, resolution=10, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2020-03-29 12:24:34 : satpy.scene] Unloading dataset: DatasetID(name='B02', wavelength=(0.44, 0.49, 0.54), resolution=10, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2020-03-29 12:24:34 : satpy.scene] Unloading dataset: DatasetID(name='B04', wavelength=(0.645, 0.665, 0.685), resolution=10, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2020-03-29 12:24:34 : satpy.scene] Unloading dataset: DatasetID(name='satellite_azimuth_angle', wavelength=None, resolution=10, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2020-03-29 12:24:34 : satpy.scene] Unloading dataset: DatasetID(name='B03', wavelength=(0.54, 0.56, 0.58), resolution=10, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2020-03-29 12:24:34 : satpy.scene] Unloading dataset: DatasetID(name='B02', wavelength=(0.44, 0.49, 0.54), resolution=10, polarization=None, calibration=None, level=None, modifiers=('effective_solar_pathlength_corrected',))
[DEBUG: 2020-03-29 12:24:34 : satpy.scene] Unloading dataset: DatasetID(name='B04', wavelength=(0.645, 0.665, 0.685), resolution=10, polarization=None, calibration=None, level=None, modifiers=('sunz_corrected',))
[DEBUG: 2020-03-29 12:24:34 : satpy.scene] Unloading dataset: DatasetID(name='B02', wavelength=(0.44, 0.49, 0.54), resolution=10, polarization=None, calibration=None, level=None, modifiers=('effective_solar_pathlength_corrected', 'rayleigh_corrected'))
[DEBUG: 2020-03-29 12:24:34 : satpy.scene] Unloading dataset: DatasetID(name='B04', wavelength=(0.645, 0.665, 0.685), resolution=10, polarization=None, calibration=None, level=None, modifiers=('effective_solar_pathlength_corrected',))
[DEBUG: 2020-03-29 12:24:34 : satpy.scene] Unloading dataset: DatasetID(name='B04', wavelength=(0.645, 0.665, 0.685), resolution=10, polarization=None, calibration=None, level=None, modifiers=('effective_solar_pathlength_corrected', 'rayleigh_corrected'))
[DEBUG: 2020-03-29 12:24:34 : satpy.scene] Unloading dataset: DatasetID(name='B03', wavelength=(0.54, 0.56, 0.58), resolution=10, polarization=None, calibration=None, level=None, modifiers=('effective_solar_pathlength_corrected',))
[DEBUG: 2020-03-29 12:24:34 : satpy.scene] Unloading dataset: DatasetID(name='B03', wavelength=(0.54, 0.56, 0.58), resolution=10, polarization=None, calibration=None, level=None, modifiers=('effective_solar_pathlength_corrected', 'rayleigh_corrected'))
[DEBUG: 2020-03-29 12:24:34 : satpy.writers] Reading ['...\\anaconda3\\envs\\Sentinel5\\lib\\site-packages\\satpy\\etc\\writers\\simple_image.yaml']
[DEBUG: 2020-03-29 12:24:35 : satpy.writers] Enhancement configuration options: [{'name': 'cira_stretch', 'method': <function cira_stretch at 0x000002426BDEA288>}]
[DEBUG: 2020-03-29 12:24:35 : satpy.enhancements] Applying the cira-stretch
[DEBUG: 2020-03-29 12:24:35 : satpy.writers.simple_image] Saving to image: .\S2A_MSIL1C_20200319T101021_N0209_R022_T32UQV_20200319T121438\RGB.png
...\anaconda3\envs\Sentinel5\lib\site-packages\dask\core.py:121: RuntimeWarning: invalid value encountered in log
return func(*(_execute_task(a, cache) for a in args))
...anaconda3\envs\Sentinel5\lib\site-packages\dask\core.py:121: RuntimeWarning: invalid value encountered in log
return func(*(_execute_task(a, cache) for a in args))
...anaconda3\envs\Sentinel5\lib\site-packages\dask\core.py:121: RuntimeWarning: invalid value encountered in log
return func(*(_execute_task(a, cache) for a in args))
...anaconda3\envs\Sentinel5\lib\site-packages\dask\core.py:121: RuntimeWarning: invalid value encountered in log
return func(*(_execute_task(a, cache) for a in args))
...anaconda3\envs\Sentinel5\lib\site-packages\dask\core.py:121: RuntimeWarning: invalid value encountered in log
return func(*(_execute_task(a, cache) for a in args))
...anaconda3\envs\Sentinel5\lib\site-packages\dask\core.py:121: RuntimeWarning: invalid value encountered in log
return func(*(_execute_task(a, cache) for a in args))
...anaconda3\envs\Sentinel5\lib\site-packages\dask\core.py:121: RuntimeWarning: invalid value encountered in log
return func(*(_execute_task(a, cache) for a in args))
...anaconda3\envs\Sentinel5\lib\site-packages\dask\core.py:121: RuntimeWarning: invalid value encountered in log
return func(*(_execute_task(a, cache) for a in args))
...anaconda3\envs\Sentinel5\lib\site-packages\dask\core.py:121: RuntimeWarning: invalid value encountered in log
return func(*(_execute_task(a, cache) for a in args))
...anaconda3\envs\Sentinel5\lib\site-packages\dask\core.py:121: RuntimeWarning: invalid value encountered in log
return func(*(_execute_task(a, cache) for a in args))
...anaconda3\envs\Sentinel5\lib\site-packages\dask\core.py:121: RuntimeWarning: invalid value encountered in log
return func(*(_execute_task(a, cache) for a in args))
...anaconda3\envs\Sentinel5\lib\site-packages\pyproj\crs\crs.py:53: FutureWarning: '+init=<authority>:<code>' syntax is deprecated. '<authority>:<code>' is the preferred initialization method. When making the change, be mindful of axis order changes: https://pyproj4.github.io/pyproj/stable/gotchas.html#axis-order-changes-in-proj-6
return _prepare_from_string(" ".join(pjargs))
...anaconda3\envs\Sentinel5\lib\site-packages\pyproj\crs\crs.py:294: FutureWarning: '+init=<authority>:<code>' syntax is deprecated. '<authority>:<code>' is the preferred initialization method. When making the change, be mindful of axis order changes: https://pyproj4.github.io/pyproj/stable/gotchas.html#axis-order-changes-in-proj-6
projstring = _prepare_from_string(" ".join((projstring, projkwargs)))
{'sensor': {'msi'}, 'start_time': datetime.datetime(2020, 3, 19, 10, 10, 21), 'end_time': datetime.datetime(2020, 3, 19, 10, 10, 21)}
...anaconda3\envs\Sentinel5\lib\site-packages\pyproj\crs\crs.py:53: FutureWarning: '+init=<authority>:<code>' syntax is deprecated. '<authority>:<code>' is the preferred initialization method. When making the change, be mindful of axis order changes: https://pyproj4.github.io/pyproj/stable/gotchas.html#axis-order-changes-in-proj-6
return _prepare_from_string(" ".join(pjargs))
...anaconda3\envs\Sentinel5\lib\site-packages\pyproj\crs\crs.py:294: FutureWarning: '+init=<authority>:<code>' syntax is deprecated. '<authority>:<code>' is the preferred initialization method. When making the change, be mindful of axis order changes: https://pyproj4.github.io/pyproj/stable/gotchas.html#axis-order-changes-in-proj-6
projstring = _prepare_from_string(" ".join((projstring, projkwargs)))
[DEBUG: 2020-03-29 12:33:23 : satpy.scene] Setting 'PPP_CONFIG_DIR' to '...anaconda3\envs\Sentinel5\lib\site-packages\satpy\etc'
Traceback (most recent call last):
File "...\AppData\Local\JetBrains\Toolbox\apps\PyCharm-P\ch-0\193.6911.25\plugins\python\helpers\pydev\pydevd.py", line 1434, in _exec
pydev_imports.execfile(file, globals, locals) # execute the script
File "...AppData\Local\JetBrains\Toolbox\apps\PyCharm-P\ch-0\193.6911.25\plugins\python\helpers\pydev\_pydev_imps\_pydev_execfile.py", line 18, in execfile
exec(compile(contents+"\n", file, 'exec'), glob, loc)
File "D:/Programmierung/Python/DataMining/Sentinel5/SentinelSatDownload.py", line 166, in <module>
crop_image_by_box(img, box_pos, folder)
File "D:/Programmierung/Python/DataMining/Sentinel5/SentinelSatDownload.py", line 51, in crop_image_by_box
scene_llbox = scn.crop(xy_bbox=bbox)
File "...anaconda3\envs\Sentinel5\lib\site-packages\satpy\scene.py", line 598, in crop
min_area, area, ll_bbox, xy_bbox)
File "...anaconda3\envs\Sentinel5\lib\site-packages\satpy\scene.py", line 481, in _slice_area_from_bbox
x_slice, y_slice = src_area.get_area_slices(dst_area)
File "...anaconda3\envs\Sentinel5\lib\site-packages\pyresample\geometry.py", line 1937, in get_area_slices
raise NotImplementedError("Source projection must be 'geos' if "
NotImplementedError: Source projection must be 'geos' if source/target projections are not equal.
Process finished with exit code 1
|
NotImplementedError
|
def __init__(self, filename, filename_info, filetype_info):
"""Initialize FileHandler."""
super(EPSAVHRRFile, self).__init__(filename, filename_info, filetype_info)
self.lons, self.lats = None, None
self.sun_azi, self.sun_zen, self.sat_azi, self.sat_zen = None, None, None, None
self.area = None
self.three_a_mask, self.three_b_mask = None, None
self._start_time = filename_info["start_time"]
self._end_time = filename_info["end_time"]
self.form = None
self.scanlines = None
self.pixels = None
self.sections = None
|
def __init__(self, filename, filename_info, filetype_info):
"""Initialize FileHandler."""
super(EPSAVHRRFile, self).__init__(filename, filename_info, filetype_info)
self.lons, self.lats = None, None
self.sun_azi, self.sun_zen, self.sat_azi, self.sat_zen = None, None, None, None
self.area = None
self.three_a_mask, self.three_b_mask = None, None
self._start_time = filename_info["start_time"]
self._end_time = filename_info["end_time"]
self.records = None
self.form = None
self.mdrs = None
self.scanlines = None
self.pixels = None
self.sections = None
|
https://github.com/pytroll/satpy/issues/924
|
[DEBUG: 2019-10-08 10:34:09 : satpy.readers.eps_l1b] Reading /data/pytroll/testdata/eps-l1b/avhrr_metop02_20191008055203_20191008062503.eps
[ERROR: 2019-10-08 10:34:13 : satpy.readers.yaml_reader] Could not load dataset 'DatasetID(name='longitude', wavelength=None, resolution=1050, polarization=None, calibration=None, level=None, modifiers=())': Too many ('veadr', 1)
Traceback (most recent call last):
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 722, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 602, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 578, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 309, in get_dataset
self._read_all(self.filename)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 174, in _read_all
raise ValueError("Too many " + str((rec_class, sub_class)))
ValueError: Too many ('veadr', 1)
[DEBUG: 2019-10-08 10:34:13 : satpy.readers.yaml_reader] No coordinates found for DatasetID(name='latitude', wavelength=None, resolution=1050, polarization=None, calibration=None, level=None, modifiers=())
Traceback (most recent call last):
File "/software/pytroll/scripts/test-veadr.py", line 10, in <module>
global_scene.load(['overview'])
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 967, in load
self.read(**kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 875, in read
return self._read_datasets(nodes, **kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 716, in _read_datasets
new_datasets = reader_instance.load(ds_ids, **kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 794, in load
ds = self._load_dataset_with_area(dsid, coords)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 722, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 602, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 578, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 312, in get_dataset
lons, lats = self.get_full_lonlats()
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 232, in get_full_lonlats
shape=(self.scanlines, self.pixels))
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 2406, in from_delayed
return Array(graph, name, chunks, dtype)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 880, in __new__
self._chunks = normalize_chunks(chunks, shape, dtype=self.dtype)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in normalize_chunks
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in <genexpr>
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in <genexpr>
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
TypeError: a float is required
|
ValueError
|
def _read_all(self):
logger.debug("Reading %s", self.filename)
self.sections, self.form = read_records(self.filename)
self.scanlines = self["TOTAL_MDR"]
if self.scanlines != len(self.sections[("mdr", 2)]):
logger.warning(
"Number of declared records doesn't match number of scanlines in the file."
)
self.pixels = self["EARTH_VIEWS_PER_SCANLINE"]
|
def _read_all(self, filename):
LOG.debug("Reading %s", filename)
self.records, self.form = read_raw(filename)
self.mdrs = [
record
for record in self.records
if record_class[record["record_class"]] == "mdr"
]
self.iprs = [
record
for record in self.records
if record_class[record["record_class"]] == "ipr"
]
self.scanlines = len(self.mdrs)
self.sections = {("mdr", 2): np.hstack(self.mdrs)}
self.sections[("ipr", 0)] = np.hstack(self.iprs)
for record in self.records:
rec_class = record_class[record["record_class"]]
sub_class = record["RECORD_SUBCLASS"]
if rec_class in ["mdr", "ipr"]:
continue
if (rec_class, sub_class) in self.sections:
raise ValueError("Too many " + str((rec_class, sub_class)))
else:
self.sections[(rec_class, sub_class)] = record
self.pixels = self["EARTH_VIEWS_PER_SCANLINE"]
|
https://github.com/pytroll/satpy/issues/924
|
[DEBUG: 2019-10-08 10:34:09 : satpy.readers.eps_l1b] Reading /data/pytroll/testdata/eps-l1b/avhrr_metop02_20191008055203_20191008062503.eps
[ERROR: 2019-10-08 10:34:13 : satpy.readers.yaml_reader] Could not load dataset 'DatasetID(name='longitude', wavelength=None, resolution=1050, polarization=None, calibration=None, level=None, modifiers=())': Too many ('veadr', 1)
Traceback (most recent call last):
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 722, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 602, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 578, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 309, in get_dataset
self._read_all(self.filename)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 174, in _read_all
raise ValueError("Too many " + str((rec_class, sub_class)))
ValueError: Too many ('veadr', 1)
[DEBUG: 2019-10-08 10:34:13 : satpy.readers.yaml_reader] No coordinates found for DatasetID(name='latitude', wavelength=None, resolution=1050, polarization=None, calibration=None, level=None, modifiers=())
Traceback (most recent call last):
File "/software/pytroll/scripts/test-veadr.py", line 10, in <module>
global_scene.load(['overview'])
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 967, in load
self.read(**kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 875, in read
return self._read_datasets(nodes, **kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 716, in _read_datasets
new_datasets = reader_instance.load(ds_ids, **kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 794, in load
ds = self._load_dataset_with_area(dsid, coords)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 722, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 602, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 578, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 312, in get_dataset
lons, lats = self.get_full_lonlats()
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 232, in get_full_lonlats
shape=(self.scanlines, self.pixels))
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 2406, in from_delayed
return Array(graph, name, chunks, dtype)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 880, in __new__
self._chunks = normalize_chunks(chunks, shape, dtype=self.dtype)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in normalize_chunks
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in <genexpr>
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in <genexpr>
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
TypeError: a float is required
|
ValueError
|
def __getitem__(self, key):
"""Get value for given key."""
for altkey in self.form.scales.keys():
try:
try:
return self.sections[altkey][key] * self.form.scales[altkey][key]
except TypeError:
val = self.sections[altkey][key].item().decode().split("=")[1]
try:
return float(val) * self.form.scales[altkey][key].item()
except ValueError:
return val.strip()
except (KeyError, ValueError):
continue
raise KeyError("No matching value for " + str(key))
|
def __getitem__(self, key):
"""Get value for given key."""
for altkey in self.form.scales.keys():
try:
try:
return (
da.from_array(self.sections[altkey][key], chunks=CHUNK_SIZE)
* self.form.scales[altkey][key]
)
except TypeError:
val = self.sections[altkey][key].decode().split("=")[1]
try:
return float(val) * self.form.scales[altkey][key]
except ValueError:
return val.strip()
except (KeyError, ValueError):
continue
raise KeyError("No matching value for " + str(key))
|
https://github.com/pytroll/satpy/issues/924
|
[DEBUG: 2019-10-08 10:34:09 : satpy.readers.eps_l1b] Reading /data/pytroll/testdata/eps-l1b/avhrr_metop02_20191008055203_20191008062503.eps
[ERROR: 2019-10-08 10:34:13 : satpy.readers.yaml_reader] Could not load dataset 'DatasetID(name='longitude', wavelength=None, resolution=1050, polarization=None, calibration=None, level=None, modifiers=())': Too many ('veadr', 1)
Traceback (most recent call last):
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 722, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 602, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 578, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 309, in get_dataset
self._read_all(self.filename)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 174, in _read_all
raise ValueError("Too many " + str((rec_class, sub_class)))
ValueError: Too many ('veadr', 1)
[DEBUG: 2019-10-08 10:34:13 : satpy.readers.yaml_reader] No coordinates found for DatasetID(name='latitude', wavelength=None, resolution=1050, polarization=None, calibration=None, level=None, modifiers=())
Traceback (most recent call last):
File "/software/pytroll/scripts/test-veadr.py", line 10, in <module>
global_scene.load(['overview'])
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 967, in load
self.read(**kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 875, in read
return self._read_datasets(nodes, **kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 716, in _read_datasets
new_datasets = reader_instance.load(ds_ids, **kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 794, in load
ds = self._load_dataset_with_area(dsid, coords)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 722, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 602, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 578, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 312, in get_dataset
lons, lats = self.get_full_lonlats()
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 232, in get_full_lonlats
shape=(self.scanlines, self.pixels))
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 2406, in from_delayed
return Array(graph, name, chunks, dtype)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 880, in __new__
self._chunks = normalize_chunks(chunks, shape, dtype=self.dtype)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in normalize_chunks
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in <genexpr>
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in <genexpr>
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
TypeError: a float is required
|
ValueError
|
def _get_full_lonlats(self, lons, lats):
nav_sample_rate = self["NAV_SAMPLE_RATE"]
if nav_sample_rate == 20 and self.pixels == 2048:
from geotiepoints import metop20kmto1km
return metop20kmto1km(lons, lats)
else:
raise NotImplementedError(
"Lon/lat expansion not implemented for "
+ "sample rate = "
+ str(nav_sample_rate)
+ " and earth views = "
+ str(self.pixels)
)
|
def _get_full_lonlats(self):
lats = np.hstack(
(
self["EARTH_LOCATION_FIRST"][:, [0]],
self["EARTH_LOCATIONS"][:, :, 0],
self["EARTH_LOCATION_LAST"][:, [0]],
)
)
lons = np.hstack(
(
self["EARTH_LOCATION_FIRST"][:, [1]],
self["EARTH_LOCATIONS"][:, :, 1],
self["EARTH_LOCATION_LAST"][:, [1]],
)
)
nav_sample_rate = self["NAV_SAMPLE_RATE"]
earth_views_per_scanline = self["EARTH_VIEWS_PER_SCANLINE"]
if nav_sample_rate == 20 and earth_views_per_scanline == 2048:
from geotiepoints import metop20kmto1km
return metop20kmto1km(lons, lats)
else:
raise NotImplementedError(
"Lon/lat expansion not implemented for "
+ "sample rate = "
+ str(nav_sample_rate)
+ " and earth views = "
+ str(earth_views_per_scanline)
)
|
https://github.com/pytroll/satpy/issues/924
|
[DEBUG: 2019-10-08 10:34:09 : satpy.readers.eps_l1b] Reading /data/pytroll/testdata/eps-l1b/avhrr_metop02_20191008055203_20191008062503.eps
[ERROR: 2019-10-08 10:34:13 : satpy.readers.yaml_reader] Could not load dataset 'DatasetID(name='longitude', wavelength=None, resolution=1050, polarization=None, calibration=None, level=None, modifiers=())': Too many ('veadr', 1)
Traceback (most recent call last):
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 722, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 602, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 578, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 309, in get_dataset
self._read_all(self.filename)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 174, in _read_all
raise ValueError("Too many " + str((rec_class, sub_class)))
ValueError: Too many ('veadr', 1)
[DEBUG: 2019-10-08 10:34:13 : satpy.readers.yaml_reader] No coordinates found for DatasetID(name='latitude', wavelength=None, resolution=1050, polarization=None, calibration=None, level=None, modifiers=())
Traceback (most recent call last):
File "/software/pytroll/scripts/test-veadr.py", line 10, in <module>
global_scene.load(['overview'])
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 967, in load
self.read(**kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 875, in read
return self._read_datasets(nodes, **kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 716, in _read_datasets
new_datasets = reader_instance.load(ds_ids, **kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 794, in load
ds = self._load_dataset_with_area(dsid, coords)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 722, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 602, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 578, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 312, in get_dataset
lons, lats = self.get_full_lonlats()
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 232, in get_full_lonlats
shape=(self.scanlines, self.pixels))
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 2406, in from_delayed
return Array(graph, name, chunks, dtype)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 880, in __new__
self._chunks = normalize_chunks(chunks, shape, dtype=self.dtype)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in normalize_chunks
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in <genexpr>
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in <genexpr>
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
TypeError: a float is required
|
ValueError
|
def get_full_lonlats(self):
"""Get the interpolated lons/lats."""
if self.lons is not None and self.lats is not None:
return self.lons, self.lats
raw_lats = np.hstack(
(
self["EARTH_LOCATION_FIRST"][:, [0]],
self["EARTH_LOCATIONS"][:, :, 0],
self["EARTH_LOCATION_LAST"][:, [0]],
)
)
raw_lons = np.hstack(
(
self["EARTH_LOCATION_FIRST"][:, [1]],
self["EARTH_LOCATIONS"][:, :, 1],
self["EARTH_LOCATION_LAST"][:, [1]],
)
)
self.lons, self.lats = self._get_full_lonlats(raw_lons, raw_lats)
self.lons = da.from_delayed(
self.lons,
dtype=self["EARTH_LOCATIONS"].dtype,
shape=(self.scanlines, self.pixels),
)
self.lats = da.from_delayed(
self.lats,
dtype=self["EARTH_LOCATIONS"].dtype,
shape=(self.scanlines, self.pixels),
)
return self.lons, self.lats
|
def get_full_lonlats(self):
"""Get the interpolated lons/lats."""
if self.lons is not None and self.lats is not None:
return self.lons, self.lats
self.lons, self.lats = self._get_full_lonlats()
self.lons = da.from_delayed(
self.lons,
dtype=self["EARTH_LOCATIONS"].dtype,
shape=(self.scanlines, self.pixels),
)
self.lats = da.from_delayed(
self.lats,
dtype=self["EARTH_LOCATIONS"].dtype,
shape=(self.scanlines, self.pixels),
)
return self.lons, self.lats
|
https://github.com/pytroll/satpy/issues/924
|
[DEBUG: 2019-10-08 10:34:09 : satpy.readers.eps_l1b] Reading /data/pytroll/testdata/eps-l1b/avhrr_metop02_20191008055203_20191008062503.eps
[ERROR: 2019-10-08 10:34:13 : satpy.readers.yaml_reader] Could not load dataset 'DatasetID(name='longitude', wavelength=None, resolution=1050, polarization=None, calibration=None, level=None, modifiers=())': Too many ('veadr', 1)
Traceback (most recent call last):
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 722, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 602, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 578, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 309, in get_dataset
self._read_all(self.filename)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 174, in _read_all
raise ValueError("Too many " + str((rec_class, sub_class)))
ValueError: Too many ('veadr', 1)
[DEBUG: 2019-10-08 10:34:13 : satpy.readers.yaml_reader] No coordinates found for DatasetID(name='latitude', wavelength=None, resolution=1050, polarization=None, calibration=None, level=None, modifiers=())
Traceback (most recent call last):
File "/software/pytroll/scripts/test-veadr.py", line 10, in <module>
global_scene.load(['overview'])
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 967, in load
self.read(**kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 875, in read
return self._read_datasets(nodes, **kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 716, in _read_datasets
new_datasets = reader_instance.load(ds_ids, **kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 794, in load
ds = self._load_dataset_with_area(dsid, coords)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 722, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 602, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 578, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 312, in get_dataset
lons, lats = self.get_full_lonlats()
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 232, in get_full_lonlats
shape=(self.scanlines, self.pixels))
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 2406, in from_delayed
return Array(graph, name, chunks, dtype)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 880, in __new__
self._chunks = normalize_chunks(chunks, shape, dtype=self.dtype)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in normalize_chunks
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in <genexpr>
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in <genexpr>
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
TypeError: a float is required
|
ValueError
|
def _get_full_angles(self, solar_zenith, sat_zenith, solar_azimuth, sat_azimuth):
nav_sample_rate = self["NAV_SAMPLE_RATE"]
if nav_sample_rate == 20 and self.pixels == 2048:
from geotiepoints import metop20kmto1km
# Note: interpolation asumes lat values values between -90 and 90
# Solar and satellite zenith is between 0 and 180.
solar_zenith -= 90
sun_azi, sun_zen = metop20kmto1km(solar_azimuth, solar_zenith)
sun_zen += 90
sat_zenith -= 90
sat_azi, sat_zen = metop20kmto1km(sat_azimuth, sat_zenith)
sat_zen += 90
return sun_azi, sun_zen, sat_azi, sat_zen
else:
raise NotImplementedError(
"Angles expansion not implemented for "
+ "sample rate = "
+ str(nav_sample_rate)
+ " and earth views = "
+ str(self.pixels)
)
|
def _get_full_angles(self):
solar_zenith = np.hstack(
(
self["ANGULAR_RELATIONS_FIRST"][:, [0]],
self["ANGULAR_RELATIONS"][:, :, 0],
self["ANGULAR_RELATIONS_LAST"][:, [0]],
)
)
sat_zenith = np.hstack(
(
self["ANGULAR_RELATIONS_FIRST"][:, [1]],
self["ANGULAR_RELATIONS"][:, :, 1],
self["ANGULAR_RELATIONS_LAST"][:, [1]],
)
)
solar_azimuth = np.hstack(
(
self["ANGULAR_RELATIONS_FIRST"][:, [2]],
self["ANGULAR_RELATIONS"][:, :, 2],
self["ANGULAR_RELATIONS_LAST"][:, [2]],
)
)
sat_azimuth = np.hstack(
(
self["ANGULAR_RELATIONS_FIRST"][:, [3]],
self["ANGULAR_RELATIONS"][:, :, 3],
self["ANGULAR_RELATIONS_LAST"][:, [3]],
)
)
nav_sample_rate = self["NAV_SAMPLE_RATE"]
earth_views_per_scanline = self["EARTH_VIEWS_PER_SCANLINE"]
if nav_sample_rate == 20 and earth_views_per_scanline == 2048:
from geotiepoints import metop20kmto1km
# Note: interpolation asumes lat values values between -90 and 90
# Solar and satellite zenith is between 0 and 180.
solar_zenith -= 90
sun_azi, sun_zen = metop20kmto1km(solar_azimuth, solar_zenith)
sun_zen += 90
sat_zenith -= 90
sat_azi, sat_zen = metop20kmto1km(sat_azimuth, sat_zenith)
sat_zen += 90
return sun_azi, sun_zen, sat_azi, sat_zen
else:
raise NotImplementedError(
"Angles expansion not implemented for "
+ "sample rate = "
+ str(nav_sample_rate)
+ " and earth views = "
+ str(earth_views_per_scanline)
)
|
https://github.com/pytroll/satpy/issues/924
|
[DEBUG: 2019-10-08 10:34:09 : satpy.readers.eps_l1b] Reading /data/pytroll/testdata/eps-l1b/avhrr_metop02_20191008055203_20191008062503.eps
[ERROR: 2019-10-08 10:34:13 : satpy.readers.yaml_reader] Could not load dataset 'DatasetID(name='longitude', wavelength=None, resolution=1050, polarization=None, calibration=None, level=None, modifiers=())': Too many ('veadr', 1)
Traceback (most recent call last):
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 722, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 602, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 578, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 309, in get_dataset
self._read_all(self.filename)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 174, in _read_all
raise ValueError("Too many " + str((rec_class, sub_class)))
ValueError: Too many ('veadr', 1)
[DEBUG: 2019-10-08 10:34:13 : satpy.readers.yaml_reader] No coordinates found for DatasetID(name='latitude', wavelength=None, resolution=1050, polarization=None, calibration=None, level=None, modifiers=())
Traceback (most recent call last):
File "/software/pytroll/scripts/test-veadr.py", line 10, in <module>
global_scene.load(['overview'])
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 967, in load
self.read(**kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 875, in read
return self._read_datasets(nodes, **kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 716, in _read_datasets
new_datasets = reader_instance.load(ds_ids, **kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 794, in load
ds = self._load_dataset_with_area(dsid, coords)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 722, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 602, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 578, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 312, in get_dataset
lons, lats = self.get_full_lonlats()
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 232, in get_full_lonlats
shape=(self.scanlines, self.pixels))
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 2406, in from_delayed
return Array(graph, name, chunks, dtype)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 880, in __new__
self._chunks = normalize_chunks(chunks, shape, dtype=self.dtype)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in normalize_chunks
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in <genexpr>
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in <genexpr>
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
TypeError: a float is required
|
ValueError
|
def get_full_angles(self):
"""Get the interpolated lons/lats."""
if (
self.sun_azi is not None
and self.sun_zen is not None
and self.sat_azi is not None
and self.sat_zen is not None
):
return self.sun_azi, self.sun_zen, self.sat_azi, self.sat_zen
solar_zenith = np.hstack(
(
self["ANGULAR_RELATIONS_FIRST"][:, [0]],
self["ANGULAR_RELATIONS"][:, :, 0],
self["ANGULAR_RELATIONS_LAST"][:, [0]],
)
)
sat_zenith = np.hstack(
(
self["ANGULAR_RELATIONS_FIRST"][:, [1]],
self["ANGULAR_RELATIONS"][:, :, 1],
self["ANGULAR_RELATIONS_LAST"][:, [1]],
)
)
solar_azimuth = np.hstack(
(
self["ANGULAR_RELATIONS_FIRST"][:, [2]],
self["ANGULAR_RELATIONS"][:, :, 2],
self["ANGULAR_RELATIONS_LAST"][:, [2]],
)
)
sat_azimuth = np.hstack(
(
self["ANGULAR_RELATIONS_FIRST"][:, [3]],
self["ANGULAR_RELATIONS"][:, :, 3],
self["ANGULAR_RELATIONS_LAST"][:, [3]],
)
)
self.sun_azi, self.sun_zen, self.sat_azi, self.sat_zen = self._get_full_angles(
solar_zenith, sat_zenith, solar_azimuth, sat_azimuth
)
self.sun_azi = da.from_delayed(
self.sun_azi,
dtype=self["ANGULAR_RELATIONS"].dtype,
shape=(self.scanlines, self.pixels),
)
self.sun_zen = da.from_delayed(
self.sun_zen,
dtype=self["ANGULAR_RELATIONS"].dtype,
shape=(self.scanlines, self.pixels),
)
self.sat_azi = da.from_delayed(
self.sat_azi,
dtype=self["ANGULAR_RELATIONS"].dtype,
shape=(self.scanlines, self.pixels),
)
self.sat_zen = da.from_delayed(
self.sat_zen,
dtype=self["ANGULAR_RELATIONS"].dtype,
shape=(self.scanlines, self.pixels),
)
return self.sun_azi, self.sun_zen, self.sat_azi, self.sat_zen
|
def get_full_angles(self):
"""Get the interpolated lons/lats."""
if (
self.sun_azi is not None
and self.sun_zen is not None
and self.sat_azi is not None
and self.sat_zen is not None
):
return self.sun_azi, self.sun_zen, self.sat_azi, self.sat_zen
self.sun_azi, self.sun_zen, self.sat_azi, self.sat_zen = self._get_full_angles()
self.sun_azi = da.from_delayed(
self.sun_azi,
dtype=self["ANGULAR_RELATIONS"].dtype,
shape=(self.scanlines, self.pixels),
)
self.sun_zen = da.from_delayed(
self.sun_zen,
dtype=self["ANGULAR_RELATIONS"].dtype,
shape=(self.scanlines, self.pixels),
)
self.sat_azi = da.from_delayed(
self.sat_azi,
dtype=self["ANGULAR_RELATIONS"].dtype,
shape=(self.scanlines, self.pixels),
)
self.sat_zen = da.from_delayed(
self.sat_zen,
dtype=self["ANGULAR_RELATIONS"].dtype,
shape=(self.scanlines, self.pixels),
)
return self.sun_azi, self.sun_zen, self.sat_azi, self.sat_zen
|
https://github.com/pytroll/satpy/issues/924
|
[DEBUG: 2019-10-08 10:34:09 : satpy.readers.eps_l1b] Reading /data/pytroll/testdata/eps-l1b/avhrr_metop02_20191008055203_20191008062503.eps
[ERROR: 2019-10-08 10:34:13 : satpy.readers.yaml_reader] Could not load dataset 'DatasetID(name='longitude', wavelength=None, resolution=1050, polarization=None, calibration=None, level=None, modifiers=())': Too many ('veadr', 1)
Traceback (most recent call last):
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 722, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 602, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 578, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 309, in get_dataset
self._read_all(self.filename)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 174, in _read_all
raise ValueError("Too many " + str((rec_class, sub_class)))
ValueError: Too many ('veadr', 1)
[DEBUG: 2019-10-08 10:34:13 : satpy.readers.yaml_reader] No coordinates found for DatasetID(name='latitude', wavelength=None, resolution=1050, polarization=None, calibration=None, level=None, modifiers=())
Traceback (most recent call last):
File "/software/pytroll/scripts/test-veadr.py", line 10, in <module>
global_scene.load(['overview'])
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 967, in load
self.read(**kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 875, in read
return self._read_datasets(nodes, **kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 716, in _read_datasets
new_datasets = reader_instance.load(ds_ids, **kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 794, in load
ds = self._load_dataset_with_area(dsid, coords)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 722, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 602, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 578, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 312, in get_dataset
lons, lats = self.get_full_lonlats()
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 232, in get_full_lonlats
shape=(self.scanlines, self.pixels))
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 2406, in from_delayed
return Array(graph, name, chunks, dtype)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 880, in __new__
self._chunks = normalize_chunks(chunks, shape, dtype=self.dtype)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in normalize_chunks
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in <genexpr>
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in <genexpr>
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
TypeError: a float is required
|
ValueError
|
def get_bounding_box(self):
"""Get bounding box."""
if self.sections is None:
self._read_all()
lats = np.hstack(
[
self["EARTH_LOCATION_FIRST"][0, [0]],
self["EARTH_LOCATION_LAST"][0, [0]],
self["EARTH_LOCATION_LAST"][-1, [0]],
self["EARTH_LOCATION_FIRST"][-1, [0]],
]
)
lons = np.hstack(
[
self["EARTH_LOCATION_FIRST"][0, [1]],
self["EARTH_LOCATION_LAST"][0, [1]],
self["EARTH_LOCATION_LAST"][-1, [1]],
self["EARTH_LOCATION_FIRST"][-1, [1]],
]
)
return lons.ravel(), lats.ravel()
|
def get_bounding_box(self):
"""Get bounding box."""
if self.mdrs is None:
self._read_all(self.filename)
lats = np.hstack(
[
self["EARTH_LOCATION_FIRST"][0, [0]],
self["EARTH_LOCATION_LAST"][0, [0]],
self["EARTH_LOCATION_LAST"][-1, [0]],
self["EARTH_LOCATION_FIRST"][-1, [0]],
]
)
lons = np.hstack(
[
self["EARTH_LOCATION_FIRST"][0, [1]],
self["EARTH_LOCATION_LAST"][0, [1]],
self["EARTH_LOCATION_LAST"][-1, [1]],
self["EARTH_LOCATION_FIRST"][-1, [1]],
]
)
return lons.ravel(), lats.ravel()
|
https://github.com/pytroll/satpy/issues/924
|
[DEBUG: 2019-10-08 10:34:09 : satpy.readers.eps_l1b] Reading /data/pytroll/testdata/eps-l1b/avhrr_metop02_20191008055203_20191008062503.eps
[ERROR: 2019-10-08 10:34:13 : satpy.readers.yaml_reader] Could not load dataset 'DatasetID(name='longitude', wavelength=None, resolution=1050, polarization=None, calibration=None, level=None, modifiers=())': Too many ('veadr', 1)
Traceback (most recent call last):
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 722, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 602, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 578, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 309, in get_dataset
self._read_all(self.filename)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 174, in _read_all
raise ValueError("Too many " + str((rec_class, sub_class)))
ValueError: Too many ('veadr', 1)
[DEBUG: 2019-10-08 10:34:13 : satpy.readers.yaml_reader] No coordinates found for DatasetID(name='latitude', wavelength=None, resolution=1050, polarization=None, calibration=None, level=None, modifiers=())
Traceback (most recent call last):
File "/software/pytroll/scripts/test-veadr.py", line 10, in <module>
global_scene.load(['overview'])
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 967, in load
self.read(**kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 875, in read
return self._read_datasets(nodes, **kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 716, in _read_datasets
new_datasets = reader_instance.load(ds_ids, **kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 794, in load
ds = self._load_dataset_with_area(dsid, coords)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 722, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 602, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 578, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 312, in get_dataset
lons, lats = self.get_full_lonlats()
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 232, in get_full_lonlats
shape=(self.scanlines, self.pixels))
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 2406, in from_delayed
return Array(graph, name, chunks, dtype)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 880, in __new__
self._chunks = normalize_chunks(chunks, shape, dtype=self.dtype)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in normalize_chunks
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in <genexpr>
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in <genexpr>
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
TypeError: a float is required
|
ValueError
|
def get_dataset(self, key, info):
"""Get calibrated channel data."""
if self.sections is None:
self._read_all()
if key.name in ["longitude", "latitude"]:
lons, lats = self.get_full_lonlats()
if key.name == "longitude":
dataset = create_xarray(lons)
else:
dataset = create_xarray(lats)
elif key.name in [
"solar_zenith_angle",
"solar_azimuth_angle",
"satellite_zenith_angle",
"satellite_azimuth_angle",
]:
sun_azi, sun_zen, sat_azi, sat_zen = self.get_full_angles()
if key.name == "solar_zenith_angle":
dataset = create_xarray(sun_zen)
elif key.name == "solar_azimuth_angle":
dataset = create_xarray(sun_azi)
if key.name == "satellite_zenith_angle":
dataset = create_xarray(sat_zen)
elif key.name == "satellite_azimuth_angle":
dataset = create_xarray(sat_azi)
else:
mask = None
if key.calibration == "counts":
raise ValueError(
"calibration=counts is not supported! "
+ "This reader cannot return counts"
)
elif key.calibration not in [
"reflectance",
"brightness_temperature",
"radiance",
]:
raise ValueError(
"calibration type " + str(key.calibration) + " is not supported!"
)
if key.name in ["3A", "3a"] and self.three_a_mask is None:
self.three_a_mask = (self["FRAME_INDICATOR"] & 2**16) != 2**16
if key.name in ["3B", "3b"] and self.three_b_mask is None:
self.three_b_mask = (self["FRAME_INDICATOR"] & 2**16) != 0
if key.name not in ["1", "2", "3a", "3A", "3b", "3B", "4", "5"]:
logger.info("Can't load channel in eps_l1b: " + str(key.name))
return
if key.name == "1":
if key.calibration == "reflectance":
array = radiance_to_refl(
self["SCENE_RADIANCES"][:, 0, :],
self["CH1_SOLAR_FILTERED_IRRADIANCE"],
)
else:
array = self["SCENE_RADIANCES"][:, 0, :]
if key.name == "2":
if key.calibration == "reflectance":
array = radiance_to_refl(
self["SCENE_RADIANCES"][:, 1, :],
self["CH2_SOLAR_FILTERED_IRRADIANCE"],
)
else:
array = self["SCENE_RADIANCES"][:, 1, :]
if key.name.lower() == "3a":
if key.calibration == "reflectance":
array = radiance_to_refl(
self["SCENE_RADIANCES"][:, 2, :],
self["CH3A_SOLAR_FILTERED_IRRADIANCE"],
)
else:
array = self["SCENE_RADIANCES"][:, 2, :]
mask = np.empty(array.shape, dtype=bool)
mask[:, :] = self.three_a_mask[:, np.newaxis]
if key.name.lower() == "3b":
if key.calibration == "brightness_temperature":
array = radiance_to_bt(
self["SCENE_RADIANCES"][:, 2, :],
self["CH3B_CENTRAL_WAVENUMBER"],
self["CH3B_CONSTANT1"],
self["CH3B_CONSTANT2_SLOPE"],
)
else:
array = self["SCENE_RADIANCES"][:, 2, :]
mask = np.empty(array.shape, dtype=bool)
mask[:, :] = self.three_b_mask[:, np.newaxis]
if key.name == "4":
if key.calibration == "brightness_temperature":
array = radiance_to_bt(
self["SCENE_RADIANCES"][:, 3, :],
self["CH4_CENTRAL_WAVENUMBER"],
self["CH4_CONSTANT1"],
self["CH4_CONSTANT2_SLOPE"],
)
else:
array = self["SCENE_RADIANCES"][:, 3, :]
if key.name == "5":
if key.calibration == "brightness_temperature":
array = radiance_to_bt(
self["SCENE_RADIANCES"][:, 4, :],
self["CH5_CENTRAL_WAVENUMBER"],
self["CH5_CONSTANT1"],
self["CH5_CONSTANT2_SLOPE"],
)
else:
array = self["SCENE_RADIANCES"][:, 4, :]
dataset = create_xarray(array)
if mask is not None:
dataset = dataset.where(~mask)
dataset.attrs["platform_name"] = self.platform_name
dataset.attrs["sensor"] = self.sensor_name
dataset.attrs.update(info)
dataset.attrs.update(key.to_dict())
return dataset
|
def get_dataset(self, key, info):
"""Get calibrated channel data."""
if self.mdrs is None:
self._read_all(self.filename)
if key.name in ["longitude", "latitude"]:
lons, lats = self.get_full_lonlats()
if key.name == "longitude":
dataset = create_xarray(lons)
else:
dataset = create_xarray(lats)
elif key.name in [
"solar_zenith_angle",
"solar_azimuth_angle",
"satellite_zenith_angle",
"satellite_azimuth_angle",
]:
sun_azi, sun_zen, sat_azi, sat_zen = self.get_full_angles()
if key.name == "solar_zenith_angle":
dataset = create_xarray(sun_zen)
elif key.name == "solar_azimuth_angle":
dataset = create_xarray(sun_azi)
if key.name == "satellite_zenith_angle":
dataset = create_xarray(sat_zen)
elif key.name == "satellite_azimuth_angle":
dataset = create_xarray(sat_azi)
else:
mask = None
if key.calibration == "counts":
raise ValueError(
"calibration=counts is not supported! "
+ "This reader cannot return counts"
)
elif key.calibration not in [
"reflectance",
"brightness_temperature",
"radiance",
]:
raise ValueError(
"calibration type " + str(key.calibration) + " is not supported!"
)
if key.name in ["3A", "3a"] and self.three_a_mask is None:
self.three_a_mask = (self["FRAME_INDICATOR"] & 2**16) != 2**16
if key.name in ["3B", "3b"] and self.three_b_mask is None:
self.three_b_mask = (self["FRAME_INDICATOR"] & 2**16) != 0
if key.name not in ["1", "2", "3a", "3A", "3b", "3B", "4", "5"]:
LOG.info("Can't load channel in eps_l1b: " + str(key.name))
return
if key.name == "1":
if key.calibration == "reflectance":
array = radiance_to_refl(
self["SCENE_RADIANCES"][:, 0, :],
self["CH1_SOLAR_FILTERED_IRRADIANCE"],
)
else:
array = self["SCENE_RADIANCES"][:, 0, :]
if key.name == "2":
if key.calibration == "reflectance":
array = radiance_to_refl(
self["SCENE_RADIANCES"][:, 1, :],
self["CH2_SOLAR_FILTERED_IRRADIANCE"],
)
else:
array = self["SCENE_RADIANCES"][:, 1, :]
if key.name.lower() == "3a":
if key.calibration == "reflectance":
array = radiance_to_refl(
self["SCENE_RADIANCES"][:, 2, :],
self["CH3A_SOLAR_FILTERED_IRRADIANCE"],
)
else:
array = self["SCENE_RADIANCES"][:, 2, :]
mask = np.empty(array.shape, dtype=bool)
mask[:, :] = self.three_a_mask[:, np.newaxis]
if key.name.lower() == "3b":
if key.calibration == "brightness_temperature":
array = radiance_to_bt(
self["SCENE_RADIANCES"][:, 2, :],
self["CH3B_CENTRAL_WAVENUMBER"],
self["CH3B_CONSTANT1"],
self["CH3B_CONSTANT2_SLOPE"],
)
else:
array = self["SCENE_RADIANCES"][:, 2, :]
mask = np.empty(array.shape, dtype=bool)
mask[:, :] = self.three_b_mask[:, np.newaxis]
if key.name == "4":
if key.calibration == "brightness_temperature":
array = radiance_to_bt(
self["SCENE_RADIANCES"][:, 3, :],
self["CH4_CENTRAL_WAVENUMBER"],
self["CH4_CONSTANT1"],
self["CH4_CONSTANT2_SLOPE"],
)
else:
array = self["SCENE_RADIANCES"][:, 3, :]
if key.name == "5":
if key.calibration == "brightness_temperature":
array = radiance_to_bt(
self["SCENE_RADIANCES"][:, 4, :],
self["CH5_CENTRAL_WAVENUMBER"],
self["CH5_CONSTANT1"],
self["CH5_CONSTANT2_SLOPE"],
)
else:
array = self["SCENE_RADIANCES"][:, 4, :]
dataset = create_xarray(array)
if mask is not None:
dataset = dataset.where(~mask)
dataset.attrs["platform_name"] = self.platform_name
dataset.attrs["sensor"] = self.sensor_name
dataset.attrs.update(info)
dataset.attrs.update(key.to_dict())
return dataset
|
https://github.com/pytroll/satpy/issues/924
|
[DEBUG: 2019-10-08 10:34:09 : satpy.readers.eps_l1b] Reading /data/pytroll/testdata/eps-l1b/avhrr_metop02_20191008055203_20191008062503.eps
[ERROR: 2019-10-08 10:34:13 : satpy.readers.yaml_reader] Could not load dataset 'DatasetID(name='longitude', wavelength=None, resolution=1050, polarization=None, calibration=None, level=None, modifiers=())': Too many ('veadr', 1)
Traceback (most recent call last):
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 722, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 602, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 578, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 309, in get_dataset
self._read_all(self.filename)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 174, in _read_all
raise ValueError("Too many " + str((rec_class, sub_class)))
ValueError: Too many ('veadr', 1)
[DEBUG: 2019-10-08 10:34:13 : satpy.readers.yaml_reader] No coordinates found for DatasetID(name='latitude', wavelength=None, resolution=1050, polarization=None, calibration=None, level=None, modifiers=())
Traceback (most recent call last):
File "/software/pytroll/scripts/test-veadr.py", line 10, in <module>
global_scene.load(['overview'])
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 967, in load
self.read(**kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 875, in read
return self._read_datasets(nodes, **kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 716, in _read_datasets
new_datasets = reader_instance.load(ds_ids, **kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 794, in load
ds = self._load_dataset_with_area(dsid, coords)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 722, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 602, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 578, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 312, in get_dataset
lons, lats = self.get_full_lonlats()
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 232, in get_full_lonlats
shape=(self.scanlines, self.pixels))
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 2406, in from_delayed
return Array(graph, name, chunks, dtype)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 880, in __new__
self._chunks = normalize_chunks(chunks, shape, dtype=self.dtype)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in normalize_chunks
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in <genexpr>
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in <genexpr>
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
TypeError: a float is required
|
ValueError
|
def process_array(elt, ascii=False):
"""Process an 'array' tag."""
del ascii
chld = list(elt)
if len(chld) > 1:
raise ValueError()
chld = chld[0]
try:
name, current_type, scale = CASES[chld.tag](chld)
size = None
except ValueError:
name, current_type, size, scale = CASES[chld.tag](chld)
del name
myname = elt.get("name") or elt.get("label")
if elt.get("length").startswith("$"):
length = int(VARIABLES[elt.get("length")[1:]])
else:
length = int(elt.get("length"))
if size is not None:
return (myname, current_type, (length,) + size, scale)
else:
return (myname, current_type, (length,), scale)
|
def process_array(elt, ascii=False):
"""Process an 'array' tag."""
del ascii
chld = elt.getchildren()
if len(chld) > 1:
raise ValueError()
chld = chld[0]
try:
name, current_type, scale = CASES[chld.tag](chld)
size = None
except ValueError:
name, current_type, size, scale = CASES[chld.tag](chld)
del name
myname = elt.get("name") or elt.get("label")
if elt.get("length").startswith("$"):
length = int(VARIABLES[elt.get("length")[1:]])
else:
length = int(elt.get("length"))
if size is not None:
return (myname, current_type, (length,) + size, scale)
else:
return (myname, current_type, (length,), scale)
|
https://github.com/pytroll/satpy/issues/924
|
[DEBUG: 2019-10-08 10:34:09 : satpy.readers.eps_l1b] Reading /data/pytroll/testdata/eps-l1b/avhrr_metop02_20191008055203_20191008062503.eps
[ERROR: 2019-10-08 10:34:13 : satpy.readers.yaml_reader] Could not load dataset 'DatasetID(name='longitude', wavelength=None, resolution=1050, polarization=None, calibration=None, level=None, modifiers=())': Too many ('veadr', 1)
Traceback (most recent call last):
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 722, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 602, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 578, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 309, in get_dataset
self._read_all(self.filename)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 174, in _read_all
raise ValueError("Too many " + str((rec_class, sub_class)))
ValueError: Too many ('veadr', 1)
[DEBUG: 2019-10-08 10:34:13 : satpy.readers.yaml_reader] No coordinates found for DatasetID(name='latitude', wavelength=None, resolution=1050, polarization=None, calibration=None, level=None, modifiers=())
Traceback (most recent call last):
File "/software/pytroll/scripts/test-veadr.py", line 10, in <module>
global_scene.load(['overview'])
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 967, in load
self.read(**kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 875, in read
return self._read_datasets(nodes, **kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 716, in _read_datasets
new_datasets = reader_instance.load(ds_ids, **kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 794, in load
ds = self._load_dataset_with_area(dsid, coords)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 722, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 602, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 578, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 312, in get_dataset
lons, lats = self.get_full_lonlats()
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 232, in get_full_lonlats
shape=(self.scanlines, self.pixels))
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 2406, in from_delayed
return Array(graph, name, chunks, dtype)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 880, in __new__
self._chunks = normalize_chunks(chunks, shape, dtype=self.dtype)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in normalize_chunks
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in <genexpr>
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in <genexpr>
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
TypeError: a float is required
|
ValueError
|
def parse_format(xml_file):
"""Parse the xml file to create types, scaling factor types, and scales."""
tree = ElementTree()
tree.parse(xml_file)
for param in tree.find("parameters"):
VARIABLES[param.get("name")] = param.get("value")
types_scales = {}
for prod in tree.find("product"):
ascii = prod.tag in ["mphr", "sphr"]
res = []
for i in prod:
lres = CASES[i.tag](i, ascii)
if lres is not None:
res.append(lres)
types_scales[(prod.tag, int(prod.get("subclass")))] = res
types = {}
stypes = {}
scales = {}
for key, val in types_scales.items():
types[key] = to_dtype(val)
stypes[key] = to_scaled_dtype(val)
scales[key] = to_scales(val)
return types, stypes, scales
|
def parse_format(xml_file):
"""Parse the xml file to create types, scaling factor types, and scales."""
tree = ElementTree()
tree.parse(xml_file)
for param in tree.find("parameters").getchildren():
VARIABLES[param.get("name")] = param.get("value")
types_scales = {}
for prod in tree.find("product"):
ascii = prod.tag in ["mphr", "sphr"]
res = []
for i in prod:
lres = CASES[i.tag](i, ascii)
if lres is not None:
res.append(lres)
types_scales[(prod.tag, int(prod.get("subclass")))] = res
types = {}
stypes = {}
scales = {}
for key, val in types_scales.items():
types[key] = to_dtype(val)
stypes[key] = to_scaled_dtype(val)
scales[key] = to_scales(val)
return types, stypes, scales
|
https://github.com/pytroll/satpy/issues/924
|
[DEBUG: 2019-10-08 10:34:09 : satpy.readers.eps_l1b] Reading /data/pytroll/testdata/eps-l1b/avhrr_metop02_20191008055203_20191008062503.eps
[ERROR: 2019-10-08 10:34:13 : satpy.readers.yaml_reader] Could not load dataset 'DatasetID(name='longitude', wavelength=None, resolution=1050, polarization=None, calibration=None, level=None, modifiers=())': Too many ('veadr', 1)
Traceback (most recent call last):
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 722, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 602, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 578, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 309, in get_dataset
self._read_all(self.filename)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 174, in _read_all
raise ValueError("Too many " + str((rec_class, sub_class)))
ValueError: Too many ('veadr', 1)
[DEBUG: 2019-10-08 10:34:13 : satpy.readers.yaml_reader] No coordinates found for DatasetID(name='latitude', wavelength=None, resolution=1050, polarization=None, calibration=None, level=None, modifiers=())
Traceback (most recent call last):
File "/software/pytroll/scripts/test-veadr.py", line 10, in <module>
global_scene.load(['overview'])
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 967, in load
self.read(**kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 875, in read
return self._read_datasets(nodes, **kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 716, in _read_datasets
new_datasets = reader_instance.load(ds_ids, **kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 794, in load
ds = self._load_dataset_with_area(dsid, coords)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 722, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 602, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 578, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 312, in get_dataset
lons, lats = self.get_full_lonlats()
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 232, in get_full_lonlats
shape=(self.scanlines, self.pixels))
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 2406, in from_delayed
return Array(graph, name, chunks, dtype)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 880, in __new__
self._chunks = normalize_chunks(chunks, shape, dtype=self.dtype)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in normalize_chunks
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in <genexpr>
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in <genexpr>
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
TypeError: a float is required
|
ValueError
|
def __init__(self, filename):
"""Init the format reader."""
self.types, self.stypes, self.scales = parse_format(filename)
self.translator = {}
for key, val in self.types.items():
self.translator[val] = (self.scales[key], self.stypes[key])
|
def __init__(self, filename):
self.types, self.stypes, self.scales = parse_format(filename)
self.translator = {}
for key, val in self.types.items():
self.translator[val] = (self.scales[key], self.stypes[key])
|
https://github.com/pytroll/satpy/issues/924
|
[DEBUG: 2019-10-08 10:34:09 : satpy.readers.eps_l1b] Reading /data/pytroll/testdata/eps-l1b/avhrr_metop02_20191008055203_20191008062503.eps
[ERROR: 2019-10-08 10:34:13 : satpy.readers.yaml_reader] Could not load dataset 'DatasetID(name='longitude', wavelength=None, resolution=1050, polarization=None, calibration=None, level=None, modifiers=())': Too many ('veadr', 1)
Traceback (most recent call last):
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 722, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 602, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 578, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 309, in get_dataset
self._read_all(self.filename)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 174, in _read_all
raise ValueError("Too many " + str((rec_class, sub_class)))
ValueError: Too many ('veadr', 1)
[DEBUG: 2019-10-08 10:34:13 : satpy.readers.yaml_reader] No coordinates found for DatasetID(name='latitude', wavelength=None, resolution=1050, polarization=None, calibration=None, level=None, modifiers=())
Traceback (most recent call last):
File "/software/pytroll/scripts/test-veadr.py", line 10, in <module>
global_scene.load(['overview'])
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 967, in load
self.read(**kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 875, in read
return self._read_datasets(nodes, **kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/scene.py", line 716, in _read_datasets
new_datasets = reader_instance.load(ds_ids, **kwargs)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 794, in load
ds = self._load_dataset_with_area(dsid, coords)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 722, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 602, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/yaml_reader.py", line 578, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 312, in get_dataset
lons, lats = self.get_full_lonlats()
File "/software/pytroll/lib/python3.5/site-packages/satpy/readers/eps_l1b.py", line 232, in get_full_lonlats
shape=(self.scanlines, self.pixels))
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 2406, in from_delayed
return Array(graph, name, chunks, dtype)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 880, in __new__
self._chunks = normalize_chunks(chunks, shape, dtype=self.dtype)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in normalize_chunks
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in <genexpr>
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 1995, in <genexpr>
return tuple(tuple(int(x) if not math.isnan(x) else x for x in c) for c in chunks)
TypeError: a float is required
|
ValueError
|
def __init__(self, filename, filename_info, filetype_info, engine=None):
"""Init the olci reader base."""
super(NCOLCIBase, self).__init__(filename, filename_info, filetype_info)
self.nc = xr.open_dataset(
self.filename,
decode_cf=True,
mask_and_scale=True,
engine=engine,
chunks={"columns": CHUNK_SIZE, "rows": CHUNK_SIZE},
)
self.nc = self.nc.rename({"columns": "x", "rows": "y"})
# TODO: get metadata from the manifest file (xfdumanifest.xml)
self.platform_name = PLATFORM_NAMES[filename_info["mission_id"]]
self.sensor = "olci"
|
def __init__(self, filename, filename_info, filetype_info):
"""Init the olci reader base."""
super(NCOLCIBase, self).__init__(filename, filename_info, filetype_info)
self.nc = xr.open_dataset(
self.filename,
decode_cf=True,
mask_and_scale=True,
engine="h5netcdf",
chunks={"columns": CHUNK_SIZE, "rows": CHUNK_SIZE},
)
self.nc = self.nc.rename({"columns": "x", "rows": "y"})
# TODO: get metadata from the manifest file (xfdumanifest.xml)
self.platform_name = PLATFORM_NAMES[filename_info["mission_id"]]
self.sensor = "olci"
|
https://github.com/pytroll/satpy/issues/944
|
Traceback (most recent call last):
File "/network/aopp/apres/users/proud/satpyconda/lib/python3.7/site-packages/xarray/backends/file_manager.py", line 243, in __del__
File "/network/aopp/apres/users/proud/satpyconda/lib/python3.7/site-packages/xarray/backends/file_manager.py", line 221, in close
File "/network/aopp/apres/users/proud/satpyconda/lib/python3.7/site-packages/h5netcdf/core.py", line 701, in close
File "/network/aopp/apres/users/proud/satpyconda/lib/python3.7/site-packages/h5py/_hl/files.py", line 431, in close
File "h5py/_objects.pyx", line 54, in h5py._objects.with_phil.wrapper
File "h5py/_objects.pyx", line 55, in h5py._objects.with_phil.wrapper
File "h5py/h5f.pyx", line 267, in h5py.h5f.get_obj_ids
File "h5py/h5i.pyx", line 43, in h5py.h5i.wrap_identifier
ImportError: sys.meta_path is None, Python is likely shutting down
|
ImportError
|
def __init__(self, filename, filename_info, filetype_info, engine=None):
"""Init the file handler."""
super(NCOLCIChannelBase, self).__init__(filename, filename_info, filetype_info)
self.channel = filename_info.get("dataset_name")
|
def __init__(self, filename, filename_info, filetype_info):
"""Init the file handler."""
super(NCOLCIChannelBase, self).__init__(filename, filename_info, filetype_info)
self.channel = filename_info.get("dataset_name")
|
https://github.com/pytroll/satpy/issues/944
|
Traceback (most recent call last):
File "/network/aopp/apres/users/proud/satpyconda/lib/python3.7/site-packages/xarray/backends/file_manager.py", line 243, in __del__
File "/network/aopp/apres/users/proud/satpyconda/lib/python3.7/site-packages/xarray/backends/file_manager.py", line 221, in close
File "/network/aopp/apres/users/proud/satpyconda/lib/python3.7/site-packages/h5netcdf/core.py", line 701, in close
File "/network/aopp/apres/users/proud/satpyconda/lib/python3.7/site-packages/h5py/_hl/files.py", line 431, in close
File "h5py/_objects.pyx", line 54, in h5py._objects.with_phil.wrapper
File "h5py/_objects.pyx", line 55, in h5py._objects.with_phil.wrapper
File "h5py/h5f.pyx", line 267, in h5py.h5f.get_obj_ids
File "h5py/h5i.pyx", line 43, in h5py.h5i.wrap_identifier
ImportError: sys.meta_path is None, Python is likely shutting down
|
ImportError
|
def __init__(self, filename, filename_info, filetype_info, cal, engine=None):
"""Init the file handler."""
super(NCOLCI1B, self).__init__(filename, filename_info, filetype_info)
self.cal = cal.nc
|
def __init__(self, filename, filename_info, filetype_info, cal):
"""Init the file handler."""
super(NCOLCI1B, self).__init__(filename, filename_info, filetype_info)
self.cal = cal.nc
|
https://github.com/pytroll/satpy/issues/944
|
Traceback (most recent call last):
File "/network/aopp/apres/users/proud/satpyconda/lib/python3.7/site-packages/xarray/backends/file_manager.py", line 243, in __del__
File "/network/aopp/apres/users/proud/satpyconda/lib/python3.7/site-packages/xarray/backends/file_manager.py", line 221, in close
File "/network/aopp/apres/users/proud/satpyconda/lib/python3.7/site-packages/h5netcdf/core.py", line 701, in close
File "/network/aopp/apres/users/proud/satpyconda/lib/python3.7/site-packages/h5py/_hl/files.py", line 431, in close
File "h5py/_objects.pyx", line 54, in h5py._objects.with_phil.wrapper
File "h5py/_objects.pyx", line 55, in h5py._objects.with_phil.wrapper
File "h5py/h5f.pyx", line 267, in h5py.h5f.get_obj_ids
File "h5py/h5i.pyx", line 43, in h5py.h5i.wrap_identifier
ImportError: sys.meta_path is None, Python is likely shutting down
|
ImportError
|
def __init__(self, filename, filename_info, filetype_info, engine=None):
"""Init the file handler."""
super(NCOLCIAngles, self).__init__(filename, filename_info, filetype_info)
self.nc = None
# TODO: get metadata from the manifest file (xfdumanifest.xml)
self.platform_name = PLATFORM_NAMES[filename_info["mission_id"]]
self.sensor = "olci"
self.cache = {}
self._start_time = filename_info["start_time"]
self._end_time = filename_info["end_time"]
self.engine = engine
|
def __init__(self, filename, filename_info, filetype_info):
"""Init the file handler."""
super(NCOLCIAngles, self).__init__(filename, filename_info, filetype_info)
self.nc = None
# TODO: get metadata from the manifest file (xfdumanifest.xml)
self.platform_name = PLATFORM_NAMES[filename_info["mission_id"]]
self.sensor = "olci"
self.cache = {}
self._start_time = filename_info["start_time"]
self._end_time = filename_info["end_time"]
|
https://github.com/pytroll/satpy/issues/944
|
Traceback (most recent call last):
File "/network/aopp/apres/users/proud/satpyconda/lib/python3.7/site-packages/xarray/backends/file_manager.py", line 243, in __del__
File "/network/aopp/apres/users/proud/satpyconda/lib/python3.7/site-packages/xarray/backends/file_manager.py", line 221, in close
File "/network/aopp/apres/users/proud/satpyconda/lib/python3.7/site-packages/h5netcdf/core.py", line 701, in close
File "/network/aopp/apres/users/proud/satpyconda/lib/python3.7/site-packages/h5py/_hl/files.py", line 431, in close
File "h5py/_objects.pyx", line 54, in h5py._objects.with_phil.wrapper
File "h5py/_objects.pyx", line 55, in h5py._objects.with_phil.wrapper
File "h5py/h5f.pyx", line 267, in h5py.h5f.get_obj_ids
File "h5py/h5i.pyx", line 43, in h5py.h5i.wrap_identifier
ImportError: sys.meta_path is None, Python is likely shutting down
|
ImportError
|
def get_dataset(self, key, info):
"""Load a dataset."""
if key.name not in self.datasets:
return
if self.nc is None:
self.nc = xr.open_dataset(
self.filename,
decode_cf=True,
mask_and_scale=True,
engine=self.engine,
chunks={"tie_columns": CHUNK_SIZE, "tie_rows": CHUNK_SIZE},
)
self.nc = self.nc.rename({"tie_columns": "x", "tie_rows": "y"})
logger.debug("Reading %s.", key.name)
l_step = self.nc.attrs["al_subsampling_factor"]
c_step = self.nc.attrs["ac_subsampling_factor"]
if (c_step != 1 or l_step != 1) and self.cache.get(key.name) is None:
if key.name.startswith("satellite"):
zen = self.nc[self.datasets["satellite_zenith_angle"]]
zattrs = zen.attrs
azi = self.nc[self.datasets["satellite_azimuth_angle"]]
aattrs = azi.attrs
elif key.name.startswith("solar"):
zen = self.nc[self.datasets["solar_zenith_angle"]]
zattrs = zen.attrs
azi = self.nc[self.datasets["solar_azimuth_angle"]]
aattrs = azi.attrs
else:
raise NotImplementedError("Don't know how to read " + key.name)
x, y, z = angle2xyz(azi, zen)
shape = x.shape
from geotiepoints.interpolator import Interpolator
tie_lines = np.arange(0, (shape[0] - 1) * l_step + 1, l_step)
tie_cols = np.arange(0, (shape[1] - 1) * c_step + 1, c_step)
lines = np.arange((shape[0] - 1) * l_step + 1)
cols = np.arange((shape[1] - 1) * c_step + 1)
along_track_order = 1
cross_track_order = 3
satint = Interpolator(
[x.values, y.values, z.values],
(tie_lines, tie_cols),
(lines, cols),
along_track_order,
cross_track_order,
)
(
x,
y,
z,
) = satint.interpolate()
del satint
x = xr.DataArray(
da.from_array(x, chunks=(CHUNK_SIZE, CHUNK_SIZE)), dims=["y", "x"]
)
y = xr.DataArray(
da.from_array(y, chunks=(CHUNK_SIZE, CHUNK_SIZE)), dims=["y", "x"]
)
z = xr.DataArray(
da.from_array(z, chunks=(CHUNK_SIZE, CHUNK_SIZE)), dims=["y", "x"]
)
azi, zen = xyz2angle(x, y, z)
azi.attrs = aattrs
zen.attrs = zattrs
if "zenith" in key.name:
values = zen
elif "azimuth" in key.name:
values = azi
else:
raise NotImplementedError("Don't know how to read " + key.name)
if key.name.startswith("satellite"):
self.cache["satellite_zenith_angle"] = zen
self.cache["satellite_azimuth_angle"] = azi
elif key.name.startswith("solar"):
self.cache["solar_zenith_angle"] = zen
self.cache["solar_azimuth_angle"] = azi
elif key.name in self.cache:
values = self.cache[key.name]
else:
values = self.nc[self.datasets[key.name]]
values.attrs["platform_name"] = self.platform_name
values.attrs["sensor"] = self.sensor
values.attrs.update(key.to_dict())
return values
|
def get_dataset(self, key, info):
"""Load a dataset."""
if key.name not in self.datasets:
return
if self.nc is None:
self.nc = xr.open_dataset(
self.filename,
decode_cf=True,
mask_and_scale=True,
engine="h5netcdf",
chunks={"tie_columns": CHUNK_SIZE, "tie_rows": CHUNK_SIZE},
)
self.nc = self.nc.rename({"tie_columns": "x", "tie_rows": "y"})
logger.debug("Reading %s.", key.name)
l_step = self.nc.attrs["al_subsampling_factor"]
c_step = self.nc.attrs["ac_subsampling_factor"]
if (c_step != 1 or l_step != 1) and self.cache.get(key.name) is None:
if key.name.startswith("satellite"):
zen = self.nc[self.datasets["satellite_zenith_angle"]]
zattrs = zen.attrs
azi = self.nc[self.datasets["satellite_azimuth_angle"]]
aattrs = azi.attrs
elif key.name.startswith("solar"):
zen = self.nc[self.datasets["solar_zenith_angle"]]
zattrs = zen.attrs
azi = self.nc[self.datasets["solar_azimuth_angle"]]
aattrs = azi.attrs
else:
raise NotImplementedError("Don't know how to read " + key.name)
x, y, z = angle2xyz(azi, zen)
shape = x.shape
from geotiepoints.interpolator import Interpolator
tie_lines = np.arange(0, (shape[0] - 1) * l_step + 1, l_step)
tie_cols = np.arange(0, (shape[1] - 1) * c_step + 1, c_step)
lines = np.arange((shape[0] - 1) * l_step + 1)
cols = np.arange((shape[1] - 1) * c_step + 1)
along_track_order = 1
cross_track_order = 3
satint = Interpolator(
[x.values, y.values, z.values],
(tie_lines, tie_cols),
(lines, cols),
along_track_order,
cross_track_order,
)
(
x,
y,
z,
) = satint.interpolate()
del satint
x = xr.DataArray(
da.from_array(x, chunks=(CHUNK_SIZE, CHUNK_SIZE)), dims=["y", "x"]
)
y = xr.DataArray(
da.from_array(y, chunks=(CHUNK_SIZE, CHUNK_SIZE)), dims=["y", "x"]
)
z = xr.DataArray(
da.from_array(z, chunks=(CHUNK_SIZE, CHUNK_SIZE)), dims=["y", "x"]
)
azi, zen = xyz2angle(x, y, z)
azi.attrs = aattrs
zen.attrs = zattrs
if "zenith" in key.name:
values = zen
elif "azimuth" in key.name:
values = azi
else:
raise NotImplementedError("Don't know how to read " + key.name)
if key.name.startswith("satellite"):
self.cache["satellite_zenith_angle"] = zen
self.cache["satellite_azimuth_angle"] = azi
elif key.name.startswith("solar"):
self.cache["solar_zenith_angle"] = zen
self.cache["solar_azimuth_angle"] = azi
elif key.name in self.cache:
values = self.cache[key.name]
else:
values = self.nc[self.datasets[key.name]]
values.attrs["platform_name"] = self.platform_name
values.attrs["sensor"] = self.sensor
values.attrs.update(key.to_dict())
return values
|
https://github.com/pytroll/satpy/issues/944
|
Traceback (most recent call last):
File "/network/aopp/apres/users/proud/satpyconda/lib/python3.7/site-packages/xarray/backends/file_manager.py", line 243, in __del__
File "/network/aopp/apres/users/proud/satpyconda/lib/python3.7/site-packages/xarray/backends/file_manager.py", line 221, in close
File "/network/aopp/apres/users/proud/satpyconda/lib/python3.7/site-packages/h5netcdf/core.py", line 701, in close
File "/network/aopp/apres/users/proud/satpyconda/lib/python3.7/site-packages/h5py/_hl/files.py", line 431, in close
File "h5py/_objects.pyx", line 54, in h5py._objects.with_phil.wrapper
File "h5py/_objects.pyx", line 55, in h5py._objects.with_phil.wrapper
File "h5py/h5f.pyx", line 267, in h5py.h5f.get_obj_ids
File "h5py/h5i.pyx", line 43, in h5py.h5i.wrap_identifier
ImportError: sys.meta_path is None, Python is likely shutting down
|
ImportError
|
def precompute(
self, mask=None, radius_of_influence=None, epsilon=0, cache_dir=None, **kwargs
):
"""Create a KDTree structure and store it for later use.
Note: The `mask` keyword should be provided if geolocation may be valid
where data points are invalid.
"""
from pyresample.kd_tree import XArrayResamplerNN
del kwargs
source_geo_def = self.source_geo_def
if mask is not None and cache_dir is not None:
LOG.warning(
"Mask and cache_dir both provided to nearest "
"resampler. Cached parameters are affected by "
"masked pixels. Will not cache results."
)
cache_dir = None
# TODO: move this to pyresample
if radius_of_influence is None:
try:
radius_of_influence = source_geo_def.lons.resolution * 3
except AttributeError:
try:
radius_of_influence = (
max(
abs(source_geo_def.pixel_size_x),
abs(source_geo_def.pixel_size_y),
)
* 3
)
except AttributeError:
radius_of_influence = 1000
except TypeError:
radius_of_influence = 10000
kwargs = dict(
source_geo_def=source_geo_def,
target_geo_def=self.target_geo_def,
radius_of_influence=radius_of_influence,
neighbours=1,
epsilon=epsilon,
)
if self.resampler is None:
# FIXME: We need to move all of this caching logic to pyresample
self.resampler = XArrayResamplerNN(**kwargs)
try:
self.load_neighbour_info(cache_dir, mask=mask, **kwargs)
LOG.debug("Read pre-computed kd-tree parameters")
except IOError:
LOG.debug("Computing kd-tree parameters")
self.resampler.get_neighbour_info(mask=mask)
self.save_neighbour_info(cache_dir, mask=mask, **kwargs)
|
def precompute(
self, mask=None, radius_of_influence=None, epsilon=0, cache_dir=None, **kwargs
):
"""Create a KDTree structure and store it for later use.
Note: The `mask` keyword should be provided if geolocation may be valid
where data points are invalid.
"""
del kwargs
source_geo_def = self.source_geo_def
if mask is not None and cache_dir is not None:
LOG.warning(
"Mask and cache_dir both provided to nearest "
"resampler. Cached parameters are affected by "
"masked pixels. Will not cache results."
)
cache_dir = None
# TODO: move this to pyresample
if radius_of_influence is None:
try:
radius_of_influence = source_geo_def.lons.resolution * 3
except AttributeError:
try:
radius_of_influence = (
max(
abs(source_geo_def.pixel_size_x),
abs(source_geo_def.pixel_size_y),
)
* 3
)
except AttributeError:
radius_of_influence = 1000
except TypeError:
radius_of_influence = 10000
kwargs = dict(
source_geo_def=source_geo_def,
target_geo_def=self.target_geo_def,
radius_of_influence=radius_of_influence,
neighbours=1,
epsilon=epsilon,
)
if self.resampler is None:
# FIXME: We need to move all of this caching logic to pyresample
self.resampler = XArrayResamplerNN(**kwargs)
try:
self.load_neighbour_info(cache_dir, mask=mask, **kwargs)
LOG.debug("Read pre-computed kd-tree parameters")
except IOError:
LOG.debug("Computing kd-tree parameters")
self.resampler.get_neighbour_info(mask=mask)
self.save_neighbour_info(cache_dir, mask=mask, **kwargs)
|
https://github.com/pytroll/satpy/issues/918
|
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/media/nas/x21324/miniconda3/envs/py37_sandbox/lib/python3.7/site-packages/satpy/__init__.py", line 53, in <module>
from satpy.writers import available_writers # noqa
File "/media/nas/x21324/miniconda3/envs/py37_sandbox/lib/python3.7/site-packages/satpy/writers/__init__.py", line 41, in <module>
from satpy.resample import get_area_def
File "/media/nas/x21324/miniconda3/envs/py37_sandbox/lib/python3.7/site-packages/satpy/resample.py", line 148, in <module>
from pyresample import bucket
ImportError: cannot import name 'bucket' from 'pyresample' (/media/nas/x21324/miniconda3/envs/py37_sandbox/lib/python3.7/site-packages/pyresample/__init__.py)
|
ImportError
|
def precompute(
self,
mask=None,
radius_of_influence=50000,
epsilon=0,
reduce_data=True,
cache_dir=False,
**kwargs,
):
"""Create bilinear coefficients and store them for later use."""
from pyresample.bilinear.xarr import XArrayResamplerBilinear
del kwargs
del mask
if self.resampler is None:
kwargs = dict(
source_geo_def=self.source_geo_def,
target_geo_def=self.target_geo_def,
radius_of_influence=radius_of_influence,
neighbours=32,
epsilon=epsilon,
reduce_data=reduce_data,
)
self.resampler = XArrayResamplerBilinear(**kwargs)
try:
self.load_bil_info(cache_dir, **kwargs)
LOG.debug("Loaded bilinear parameters")
except IOError:
LOG.debug("Computing bilinear parameters")
self.resampler.get_bil_info()
LOG.debug("Saving bilinear parameters.")
self.save_bil_info(cache_dir, **kwargs)
|
def precompute(
self,
mask=None,
radius_of_influence=50000,
epsilon=0,
reduce_data=True,
cache_dir=False,
**kwargs,
):
"""Create bilinear coefficients and store them for later use."""
del kwargs
del mask
if self.resampler is None:
kwargs = dict(
source_geo_def=self.source_geo_def,
target_geo_def=self.target_geo_def,
radius_of_influence=radius_of_influence,
neighbours=32,
epsilon=epsilon,
reduce_data=reduce_data,
)
self.resampler = XArrayResamplerBilinear(**kwargs)
try:
self.load_bil_info(cache_dir, **kwargs)
LOG.debug("Loaded bilinear parameters")
except IOError:
LOG.debug("Computing bilinear parameters")
self.resampler.get_bil_info()
LOG.debug("Saving bilinear parameters.")
self.save_bil_info(cache_dir, **kwargs)
|
https://github.com/pytroll/satpy/issues/918
|
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/media/nas/x21324/miniconda3/envs/py37_sandbox/lib/python3.7/site-packages/satpy/__init__.py", line 53, in <module>
from satpy.writers import available_writers # noqa
File "/media/nas/x21324/miniconda3/envs/py37_sandbox/lib/python3.7/site-packages/satpy/writers/__init__.py", line 41, in <module>
from satpy.resample import get_area_def
File "/media/nas/x21324/miniconda3/envs/py37_sandbox/lib/python3.7/site-packages/satpy/resample.py", line 148, in <module>
from pyresample import bucket
ImportError: cannot import name 'bucket' from 'pyresample' (/media/nas/x21324/miniconda3/envs/py37_sandbox/lib/python3.7/site-packages/pyresample/__init__.py)
|
ImportError
|
def __init__(self, source_geo_def, target_geo_def):
"""Initialize bucket resampler."""
super(BucketResamplerBase, self).__init__(source_geo_def, target_geo_def)
self.resampler = None
|
def __init__(self, source_geo_def, target_geo_def):
super(BucketResamplerBase, self).__init__(source_geo_def, target_geo_def)
self.resampler = None
|
https://github.com/pytroll/satpy/issues/918
|
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/media/nas/x21324/miniconda3/envs/py37_sandbox/lib/python3.7/site-packages/satpy/__init__.py", line 53, in <module>
from satpy.writers import available_writers # noqa
File "/media/nas/x21324/miniconda3/envs/py37_sandbox/lib/python3.7/site-packages/satpy/writers/__init__.py", line 41, in <module>
from satpy.resample import get_area_def
File "/media/nas/x21324/miniconda3/envs/py37_sandbox/lib/python3.7/site-packages/satpy/resample.py", line 148, in <module>
from pyresample import bucket
ImportError: cannot import name 'bucket' from 'pyresample' (/media/nas/x21324/miniconda3/envs/py37_sandbox/lib/python3.7/site-packages/pyresample/__init__.py)
|
ImportError
|
def precompute(self, **kwargs):
"""Create X and Y indices and store them for later use."""
from pyresample import bucket
LOG.debug("Initializing bucket resampler.")
source_lons, source_lats = self.source_geo_def.get_lonlats(chunks=CHUNK_SIZE)
self.resampler = bucket.BucketResampler(
self.target_geo_def, source_lons, source_lats
)
|
def precompute(self, **kwargs):
"""Create X and Y indices and store them for later use."""
LOG.debug("Initializing bucket resampler.")
source_lons, source_lats = self.source_geo_def.get_lonlats(chunks=CHUNK_SIZE)
self.resampler = bucket.BucketResampler(
self.target_geo_def, source_lons, source_lats
)
|
https://github.com/pytroll/satpy/issues/918
|
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/media/nas/x21324/miniconda3/envs/py37_sandbox/lib/python3.7/site-packages/satpy/__init__.py", line 53, in <module>
from satpy.writers import available_writers # noqa
File "/media/nas/x21324/miniconda3/envs/py37_sandbox/lib/python3.7/site-packages/satpy/writers/__init__.py", line 41, in <module>
from satpy.resample import get_area_def
File "/media/nas/x21324/miniconda3/envs/py37_sandbox/lib/python3.7/site-packages/satpy/resample.py", line 148, in <module>
from pyresample import bucket
ImportError: cannot import name 'bucket' from 'pyresample' (/media/nas/x21324/miniconda3/envs/py37_sandbox/lib/python3.7/site-packages/pyresample/__init__.py)
|
ImportError
|
def compute(self, data, **kwargs):
"""Call the resampling."""
LOG.debug("Resampling %s", str(data.name))
results = []
if data.ndim == 3:
for _i in range(data.shape[0]):
res = self.resampler.get_count()
results.append(res)
else:
res = self.resampler.get_count()
results.append(res)
return da.stack(results)
|
def compute(self, data, **kwargs):
"""Call the resampling."""
LOG.debug("Resampling %s", str(data.name))
results = []
if data.ndim == 3:
for i in range(data.shape[0]):
res = self.resampler.get_count()
results.append(res)
else:
res = self.resampler.get_count()
results.append(res)
return da.stack(results)
|
https://github.com/pytroll/satpy/issues/918
|
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/media/nas/x21324/miniconda3/envs/py37_sandbox/lib/python3.7/site-packages/satpy/__init__.py", line 53, in <module>
from satpy.writers import available_writers # noqa
File "/media/nas/x21324/miniconda3/envs/py37_sandbox/lib/python3.7/site-packages/satpy/writers/__init__.py", line 41, in <module>
from satpy.resample import get_area_def
File "/media/nas/x21324/miniconda3/envs/py37_sandbox/lib/python3.7/site-packages/satpy/resample.py", line 148, in <module>
from pyresample import bucket
ImportError: cannot import name 'bucket' from 'pyresample' (/media/nas/x21324/miniconda3/envs/py37_sandbox/lib/python3.7/site-packages/pyresample/__init__.py)
|
ImportError
|
def __call__(self, *args, **kwargs):
"""Call the compositor."""
from satpy import Scene
# Check if filename exists, if not then try from SATPY_ANCPATH
if not os.path.isfile(self.filename):
tmp_filename = os.path.join(get_environ_ancpath(), self.filename)
if os.path.isfile(tmp_filename):
self.filename = tmp_filename
scn = Scene(reader="generic_image", filenames=[self.filename])
scn.load(["image"])
img = scn["image"]
# use compositor parameters as extra metadata
# most important: set 'name' of the image
img.attrs.update(self.attrs)
# Check for proper area definition. Non-georeferenced images
# do not have `area` in the attributes
if "area" not in img.attrs:
if self.area is None:
raise AttributeError("Area definition needs to be configured")
img.attrs["area"] = self.area
img.attrs["sensor"] = None
img.attrs["mode"] = "".join(img.bands.data)
img.attrs.pop("modifiers", None)
img.attrs.pop("calibration", None)
# Add start time if not present in the filename
if "start_time" not in img.attrs or not img.attrs["start_time"]:
import datetime as dt
img.attrs["start_time"] = dt.datetime.utcnow()
if "end_time" not in img.attrs or not img.attrs["end_time"]:
import datetime as dt
img.attrs["end_time"] = dt.datetime.utcnow()
return img
|
def __call__(self, *args, **kwargs):
"""Call the compositor."""
from satpy import Scene
# Check if filename exists, if not then try from SATPY_ANCPATH
if not os.path.isfile(self.filename):
tmp_filename = os.path.join(get_environ_ancpath(), self.filename)
if os.path.isfile(tmp_filename):
self.filename = tmp_filename
scn = Scene(reader="generic_image", filenames=[self.filename])
scn.load(["image"])
img = scn["image"]
# use compositor parameters as extra metadata
# most important: set 'name' of the image
img.attrs.update(self.attrs)
# Check for proper area definition. Non-georeferenced images
# have None as .ndim
if img.area.ndim is None:
if self.area is None:
raise AttributeError("Area definition needs to be configured")
img.attrs["area"] = self.area
img.attrs["sensor"] = None
img.attrs["mode"] = "".join(img.bands.data)
img.attrs.pop("modifiers", None)
img.attrs.pop("calibration", None)
# Add start time if not present in the filename
if "start_time" not in img.attrs or not img.attrs["start_time"]:
import datetime as dt
img.attrs["start_time"] = dt.datetime.utcnow()
if "end_time" not in img.attrs or not img.attrs["end_time"]:
import datetime as dt
img.attrs["end_time"] = dt.datetime.utcnow()
return img
|
https://github.com/pytroll/satpy/issues/830
|
[DEBUG: 2019-06-26 09:01:32 : satpy.readers.yaml_reader] No coordinates found for DatasetID(name='image', wavelength=None, resolution=None, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-06-26 09:01:32 : satpy.readers.generic_image] Reading DatasetID(name='image', wavelength=None, resolution=None, polarization=None, calibration=None, level=None, modifiers=()).
Traceback (most recent call last):
File "make_msg3_static_day.py", line 190, in <module>
global_scene.load([composite])
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy/scene.py", line 969, in load
keepables = self.generate_composites()
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy/scene.py", line 884, in generate_composites
return self._read_composites(nodes)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy/scene.py", line 858, in _read_composites
self._generate_composite(item, keepables)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy/scene.py", line 833, in _generate_composite
**self.attrs)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy/composites/__init__.py", line 1415, in __call__
if img.area.ndim is None:
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/xarray/core/common.py", line 179, in __getattr__
(type(self).__name__, name))
AttributeError: 'DataArray' object has no attribute 'area'
|
AttributeError
|
def add_overlay(
orig,
area,
coast_dir,
color=(0, 0, 0),
width=0.5,
resolution=None,
level_coast=1,
level_borders=1,
fill_value=None,
):
"""Add coastline and political borders to image.
Uses ``color`` for feature colors where ``color`` is a 3-element tuple
of integers between 0 and 255 representing (R, G, B).
.. warning::
This function currently loses the data mask (alpha band).
``resolution`` is chosen automatically if None (default), otherwise it should be one of:
+-----+-------------------------+---------+
| 'f' | Full resolution | 0.04 km |
| 'h' | High resolution | 0.2 km |
| 'i' | Intermediate resolution | 1.0 km |
| 'l' | Low resolution | 5.0 km |
| 'c' | Crude resolution | 25 km |
+-----+-------------------------+---------+
"""
if area is None:
raise ValueError("Area of image is None, can't add overlay.")
from pycoast import ContourWriterAGG
if isinstance(area, str):
area = get_area_def(area)
LOG.info("Add coastlines and political borders to image.")
if resolution is None:
x_resolution = (area.area_extent[2] - area.area_extent[0]) / area.x_size
y_resolution = (area.area_extent[3] - area.area_extent[1]) / area.y_size
res = min(x_resolution, y_resolution)
if res > 25000:
resolution = "c"
elif res > 5000:
resolution = "l"
elif res > 1000:
resolution = "i"
elif res > 200:
resolution = "h"
else:
resolution = "f"
LOG.debug("Automagically choose resolution %s", resolution)
if hasattr(orig, "convert"):
# image must be in RGB space to work with pycoast/pydecorate
orig = orig.convert("RGBA" if orig.mode.endswith("A") else "RGB")
elif not orig.mode.startswith("RGB"):
raise RuntimeError(
"'trollimage' 1.6+ required to support adding "
"overlays/decorations to non-RGB data."
)
img = orig.pil_image(fill_value=fill_value)
cw_ = ContourWriterAGG(coast_dir)
cw_.add_coastlines(
img, area, outline=color, resolution=resolution, width=width, level=level_coast
)
cw_.add_borders(
img,
area,
outline=color,
resolution=resolution,
width=width,
level=level_borders,
)
arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE)
new_data = xr.DataArray(
arr,
dims=["y", "x", "bands"],
coords={
"y": orig.data.coords["y"],
"x": orig.data.coords["x"],
"bands": list(img.mode),
},
attrs=orig.data.attrs,
)
return XRImage(new_data)
|
def add_overlay(
orig,
area,
coast_dir,
color=(0, 0, 0),
width=0.5,
resolution=None,
level_coast=1,
level_borders=1,
fill_value=None,
):
"""Add coastline and political borders to image, using *color* (tuple
of integers between 0 and 255).
Warning: Loses the masks !
*resolution* is chosen automatically if None (default), otherwise it should be one of:
+-----+-------------------------+---------+
| 'f' | Full resolution | 0.04 km |
| 'h' | High resolution | 0.2 km |
| 'i' | Intermediate resolution | 1.0 km |
| 'l' | Low resolution | 5.0 km |
| 'c' | Crude resolution | 25 km |
+-----+-------------------------+---------+
"""
if area is None:
raise ValueError("Area of image is None, can't add overlay.")
from pycoast import ContourWriterAGG
if isinstance(area, str):
area = get_area_def(area)
LOG.info("Add coastlines and political borders to image.")
if resolution is None:
x_resolution = (area.area_extent[2] - area.area_extent[0]) / area.x_size
y_resolution = (area.area_extent[3] - area.area_extent[1]) / area.y_size
res = min(x_resolution, y_resolution)
if res > 25000:
resolution = "c"
elif res > 5000:
resolution = "l"
elif res > 1000:
resolution = "i"
elif res > 200:
resolution = "h"
else:
resolution = "f"
LOG.debug("Automagically choose resolution %s", resolution)
img = orig.pil_image(fill_value=fill_value)
cw_ = ContourWriterAGG(coast_dir)
cw_.add_coastlines(
img, area, outline=color, resolution=resolution, width=width, level=level_coast
)
cw_.add_borders(
img,
area,
outline=color,
resolution=resolution,
width=width,
level=level_borders,
)
arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE)
orig.data = xr.DataArray(
arr,
dims=["y", "x", "bands"],
coords={
"y": orig.data.coords["y"],
"x": orig.data.coords["x"],
"bands": list(img.mode),
},
attrs=orig.data.attrs,
)
|
https://github.com/pytroll/satpy/issues/449
|
[INFO: 2018-10-08 23:46:08 : satpy.writers] Add coastlines and political borders to image.
[DEBUG: 2018-10-08 23:46:08 : satpy.writers] Automagically choose resolution f
[DEBUG: 2018-10-08 23:46:09 : trollimage.xrimage] Interval: left=<xarray.DataArray 'reshape-59920bd5c4344e209a876883f95f2db3' (bands: 1)>
array([0.])
Coordinates:
* bands (bands) <U1 'L'
quantile float64 0.005, right=<xarray.DataArray 'reshape-59920bd5c4344e209a876883f95f2db3' (bands: 1)>
array([67.693016])
Coordinates:
* bands (bands) <U1 'L'
quantile float64 0.995
Traceback (most recent call last):
File "msg4-ernst_single_channel.py", line 93, in <module>
global_scene.save_dataset(composite,'./MSG4.png', overlay={'coast_dir': '/home/cpeters/software/', 'width':1, 'color':(200, 200, 200)})
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy-0+unknown-py3.6.egg/satpy/scene.py", line 1054, in save_dataset
**save_kwargs)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy-0+unknown-py3.6.egg/satpy/writers/__init__.py", line 611, in save_dataset
decorate=decorate, fill_value=fill_value)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy-0+unknown-py3.6.egg/satpy/writers/__init__.py", line 354, in get_enhanced_image
add_overlay(img, dataset.attrs['area'], fill_value=fill_value, **overlay)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy-0+unknown-py3.6.egg/satpy/writers/__init__.py", line 230, in add_overlay
resolution=resolution, width=width, level=level_coast)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/pycoast/cw_agg.py", line 418, in add_coastlines
y_offset=y_offset)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/pycoast/cw_base.py", line 598, in _add_feature
x_offset=x_offset, y_offset=y_offset, **kwargs)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/pycoast/cw_base.py", line 510, in add_shapes
draw = self._get_canvas(image)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/pycoast/cw_agg.py", line 52, in _get_canvas
return aggdraw.Draw(image)
ValueError: bad mode
|
ValueError
|
def add_text(orig, dc, img, text=None):
"""Add text to an image using the pydecorate package.
All the features of pydecorate's ``add_text`` are available.
See documentation of :doc:`pydecorate:index` for more info.
"""
LOG.info("Add text to image.")
dc.add_text(**text)
arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE)
new_data = xr.DataArray(
arr,
dims=["y", "x", "bands"],
coords={
"y": orig.data.coords["y"],
"x": orig.data.coords["x"],
"bands": list(img.mode),
},
attrs=orig.data.attrs,
)
return XRImage(new_data)
|
def add_text(orig, dc, img, text=None):
"""
Add text to an image using the pydecorate function add_text
All the features in pydecorate are available
See documentation of pydecorate
"""
LOG.info("Add text to image.")
dc.add_text(**text)
arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE)
orig.data = xr.DataArray(
arr,
dims=["y", "x", "bands"],
coords={
"y": orig.data.coords["y"],
"x": orig.data.coords["x"],
"bands": list(img.mode),
},
attrs=orig.data.attrs,
)
|
https://github.com/pytroll/satpy/issues/449
|
[INFO: 2018-10-08 23:46:08 : satpy.writers] Add coastlines and political borders to image.
[DEBUG: 2018-10-08 23:46:08 : satpy.writers] Automagically choose resolution f
[DEBUG: 2018-10-08 23:46:09 : trollimage.xrimage] Interval: left=<xarray.DataArray 'reshape-59920bd5c4344e209a876883f95f2db3' (bands: 1)>
array([0.])
Coordinates:
* bands (bands) <U1 'L'
quantile float64 0.005, right=<xarray.DataArray 'reshape-59920bd5c4344e209a876883f95f2db3' (bands: 1)>
array([67.693016])
Coordinates:
* bands (bands) <U1 'L'
quantile float64 0.995
Traceback (most recent call last):
File "msg4-ernst_single_channel.py", line 93, in <module>
global_scene.save_dataset(composite,'./MSG4.png', overlay={'coast_dir': '/home/cpeters/software/', 'width':1, 'color':(200, 200, 200)})
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy-0+unknown-py3.6.egg/satpy/scene.py", line 1054, in save_dataset
**save_kwargs)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy-0+unknown-py3.6.egg/satpy/writers/__init__.py", line 611, in save_dataset
decorate=decorate, fill_value=fill_value)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy-0+unknown-py3.6.egg/satpy/writers/__init__.py", line 354, in get_enhanced_image
add_overlay(img, dataset.attrs['area'], fill_value=fill_value, **overlay)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy-0+unknown-py3.6.egg/satpy/writers/__init__.py", line 230, in add_overlay
resolution=resolution, width=width, level=level_coast)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/pycoast/cw_agg.py", line 418, in add_coastlines
y_offset=y_offset)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/pycoast/cw_base.py", line 598, in _add_feature
x_offset=x_offset, y_offset=y_offset, **kwargs)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/pycoast/cw_base.py", line 510, in add_shapes
draw = self._get_canvas(image)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/pycoast/cw_agg.py", line 52, in _get_canvas
return aggdraw.Draw(image)
ValueError: bad mode
|
ValueError
|
def add_logo(orig, dc, img, logo=None):
"""Add logos or other images to an image using the pydecorate package.
All the features of pydecorate's ``add_logo`` are available.
See documentation of :doc:`pydecorate:index` for more info.
"""
LOG.info("Add logo to image.")
dc.add_logo(**logo)
arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE)
new_data = xr.DataArray(
arr,
dims=["y", "x", "bands"],
coords={
"y": orig.data.coords["y"],
"x": orig.data.coords["x"],
"bands": list(img.mode),
},
attrs=orig.data.attrs,
)
return XRImage(new_data)
|
def add_logo(orig, dc, img, logo=None):
"""
Add logos or other images to an image using the pydecorate function add_logo
All the features in pydecorate are available
See documentation of pydecorate
"""
LOG.info("Add logo to image.")
dc.add_logo(**logo)
arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE)
orig.data = xr.DataArray(
arr,
dims=["y", "x", "bands"],
coords={
"y": orig.data.coords["y"],
"x": orig.data.coords["x"],
"bands": list(img.mode),
},
attrs=orig.data.attrs,
)
|
https://github.com/pytroll/satpy/issues/449
|
[INFO: 2018-10-08 23:46:08 : satpy.writers] Add coastlines and political borders to image.
[DEBUG: 2018-10-08 23:46:08 : satpy.writers] Automagically choose resolution f
[DEBUG: 2018-10-08 23:46:09 : trollimage.xrimage] Interval: left=<xarray.DataArray 'reshape-59920bd5c4344e209a876883f95f2db3' (bands: 1)>
array([0.])
Coordinates:
* bands (bands) <U1 'L'
quantile float64 0.005, right=<xarray.DataArray 'reshape-59920bd5c4344e209a876883f95f2db3' (bands: 1)>
array([67.693016])
Coordinates:
* bands (bands) <U1 'L'
quantile float64 0.995
Traceback (most recent call last):
File "msg4-ernst_single_channel.py", line 93, in <module>
global_scene.save_dataset(composite,'./MSG4.png', overlay={'coast_dir': '/home/cpeters/software/', 'width':1, 'color':(200, 200, 200)})
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy-0+unknown-py3.6.egg/satpy/scene.py", line 1054, in save_dataset
**save_kwargs)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy-0+unknown-py3.6.egg/satpy/writers/__init__.py", line 611, in save_dataset
decorate=decorate, fill_value=fill_value)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy-0+unknown-py3.6.egg/satpy/writers/__init__.py", line 354, in get_enhanced_image
add_overlay(img, dataset.attrs['area'], fill_value=fill_value, **overlay)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy-0+unknown-py3.6.egg/satpy/writers/__init__.py", line 230, in add_overlay
resolution=resolution, width=width, level=level_coast)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/pycoast/cw_agg.py", line 418, in add_coastlines
y_offset=y_offset)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/pycoast/cw_base.py", line 598, in _add_feature
x_offset=x_offset, y_offset=y_offset, **kwargs)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/pycoast/cw_base.py", line 510, in add_shapes
draw = self._get_canvas(image)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/pycoast/cw_agg.py", line 52, in _get_canvas
return aggdraw.Draw(image)
ValueError: bad mode
|
ValueError
|
def add_decorate(orig, fill_value=None, **decorate):
"""Decorate an image with text and/or logos/images.
This call adds text/logos in order as given in the input to keep the
alignment features available in pydecorate.
An example of the decorate config::
decorate = {
'decorate': [
{'logo': {'logo_path': <path to a logo>, 'height': 143, 'bg': 'white', 'bg_opacity': 255}},
{'text': {'txt': start_time_txt,
'align': {'top_bottom': 'bottom', 'left_right': 'right'},
'font': <path to ttf font>,
'font_size': 22,
'height': 30,
'bg': 'black',
'bg_opacity': 255,
'line': 'white'}}
]
}
Any numbers of text/logo in any order can be added to the decorate list,
but the order of the list is kept as described above.
Note that a feature given in one element, eg. bg (which is the background color)
will also apply on the next elements unless a new value is given.
align is a special keyword telling where in the image to start adding features, top_bottom is either top or bottom
and left_right is either left or right.
"""
LOG.info("Decorate image.")
# Need to create this here to possible keep the alignment
# when adding text and/or logo with pydecorate
if hasattr(orig, "convert"):
# image must be in RGB space to work with pycoast/pydecorate
orig = orig.convert("RGBA" if orig.mode.endswith("A") else "RGB")
elif not orig.mode.startswith("RGB"):
raise RuntimeError(
"'trollimage' 1.6+ required to support adding "
"overlays/decorations to non-RGB data."
)
img_orig = orig.pil_image(fill_value=fill_value)
from pydecorate import DecoratorAGG
dc = DecoratorAGG(img_orig)
# decorate need to be a list to maintain the alignment
# as ordered in the list
img = orig
if "decorate" in decorate:
for dec in decorate["decorate"]:
if "logo" in dec:
img = add_logo(img, dc, img_orig, logo=dec["logo"])
elif "text" in dec:
img = add_text(img, dc, img_orig, text=dec["text"])
return img
|
def add_decorate(orig, fill_value=None, **decorate):
"""Decorate an image with text and/or logos/images.
This call adds text/logos in order as given in the input to keep the
alignment features available in pydecorate.
An example of the decorate config::
decorate = {
'decorate': [
{'logo': {'logo_path': <path to a logo>, 'height': 143, 'bg': 'white', 'bg_opacity': 255}},
{'text': {'txt': start_time_txt,
'align': {'top_bottom': 'bottom', 'left_right': 'right'},
'font': <path to ttf font>,
'font_size': 22,
'height': 30,
'bg': 'black',
'bg_opacity': 255,
'line': 'white'}}
]
}
Any numbers of text/logo in any order can be added to the decorate list,
but the order of the list is kept as described above.
Note that a feature given in one element, eg. bg (which is the background color)
will also apply on the next elements unless a new value is given.
align is a special keyword telling where in the image to start adding features, top_bottom is either top or bottom
and left_right is either left or right.
"""
LOG.info("Decorate image.")
# Need to create this here to possible keep the alignment
# when adding text and/or logo with pydecorate
img_orig = orig.pil_image(fill_value=fill_value)
from pydecorate import DecoratorAGG
dc = DecoratorAGG(img_orig)
# decorate need to be a list to maintain the alignment
# as ordered in the list
if "decorate" in decorate:
for dec in decorate["decorate"]:
if "logo" in dec:
add_logo(orig, dc, img_orig, logo=dec["logo"])
elif "text" in dec:
add_text(orig, dc, img_orig, text=dec["text"])
|
https://github.com/pytroll/satpy/issues/449
|
[INFO: 2018-10-08 23:46:08 : satpy.writers] Add coastlines and political borders to image.
[DEBUG: 2018-10-08 23:46:08 : satpy.writers] Automagically choose resolution f
[DEBUG: 2018-10-08 23:46:09 : trollimage.xrimage] Interval: left=<xarray.DataArray 'reshape-59920bd5c4344e209a876883f95f2db3' (bands: 1)>
array([0.])
Coordinates:
* bands (bands) <U1 'L'
quantile float64 0.005, right=<xarray.DataArray 'reshape-59920bd5c4344e209a876883f95f2db3' (bands: 1)>
array([67.693016])
Coordinates:
* bands (bands) <U1 'L'
quantile float64 0.995
Traceback (most recent call last):
File "msg4-ernst_single_channel.py", line 93, in <module>
global_scene.save_dataset(composite,'./MSG4.png', overlay={'coast_dir': '/home/cpeters/software/', 'width':1, 'color':(200, 200, 200)})
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy-0+unknown-py3.6.egg/satpy/scene.py", line 1054, in save_dataset
**save_kwargs)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy-0+unknown-py3.6.egg/satpy/writers/__init__.py", line 611, in save_dataset
decorate=decorate, fill_value=fill_value)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy-0+unknown-py3.6.egg/satpy/writers/__init__.py", line 354, in get_enhanced_image
add_overlay(img, dataset.attrs['area'], fill_value=fill_value, **overlay)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy-0+unknown-py3.6.egg/satpy/writers/__init__.py", line 230, in add_overlay
resolution=resolution, width=width, level=level_coast)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/pycoast/cw_agg.py", line 418, in add_coastlines
y_offset=y_offset)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/pycoast/cw_base.py", line 598, in _add_feature
x_offset=x_offset, y_offset=y_offset, **kwargs)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/pycoast/cw_base.py", line 510, in add_shapes
draw = self._get_canvas(image)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/pycoast/cw_agg.py", line 52, in _get_canvas
return aggdraw.Draw(image)
ValueError: bad mode
|
ValueError
|
def get_enhanced_image(
dataset,
ppp_config_dir=None,
enhance=None,
enhancement_config_file=None,
overlay=None,
decorate=None,
fill_value=None,
):
"""Get an enhanced version of `dataset` as an :class:`~trollimage.xrimage.XRImage` instance.
Args:
dataset (xarray.DataArray): Data to be enhanced and converted to an image.
ppp_config_dir (str): Root configuration directory.
enhance (bool or Enhancer): Whether to automatically enhance
data to be more visually useful and to fit inside the file
format being saved to. By default this will default to using
the enhancement configuration files found using the default
:class:`~satpy.writers.Enhancer` class. This can be set to
`False` so that no enhancments are performed. This can also
be an instance of the :class:`~satpy.writers.Enhancer` class
if further custom enhancement is needed.
enhancement_config_file (str): Deprecated.
overlay (dict): Options for image overlays. See :func:`add_overlay`
for available options.
decorate (dict): Options for decorating the image. See
:func:`add_decorate` for available options.
fill_value (int or float): Value to use when pixels are masked or
invalid. Default of `None` means to create an alpha channel.
See :meth:`~trollimage.xrimage.XRImage.finalize` for more
details. Only used when adding overlays or decorations. Otherwise
it is up to the caller to "finalize" the image before using it
except if calling ``img.show()`` or providing the image to
a writer as these will finalize the image.
.. versionchanged:: 0.10
Deprecated `enhancement_config_file` and 'enhancer' in favor of
`enhance`. Pass an instance of the `Enhancer` class to `enhance`
instead.
"""
if ppp_config_dir is None:
ppp_config_dir = get_environ_config_dir()
if enhancement_config_file is not None:
warnings.warn(
"'enhancement_config_file' has been deprecated. Pass an instance of the "
"'Enhancer' class to the 'enhance' keyword argument instead.",
DeprecationWarning,
)
if enhance is False:
# no enhancement
enhancer = None
elif enhance is None or enhance is True:
# default enhancement
enhancer = Enhancer(ppp_config_dir, enhancement_config_file)
else:
# custom enhancer
enhancer = enhance
# Create an image for enhancement
img = to_image(dataset)
if enhancer is None or enhancer.enhancement_tree is None:
LOG.debug("No enhancement being applied to dataset")
else:
if dataset.attrs.get("sensor", None):
enhancer.add_sensor_enhancements(dataset.attrs["sensor"])
enhancer.apply(img, **dataset.attrs)
if overlay is not None:
img = add_overlay(img, dataset.attrs["area"], fill_value=fill_value, **overlay)
if decorate is not None:
img = add_decorate(img, fill_value=fill_value, **decorate)
return img
|
def get_enhanced_image(
dataset,
ppp_config_dir=None,
enhance=None,
enhancement_config_file=None,
overlay=None,
decorate=None,
fill_value=None,
):
"""Get an enhanced version of `dataset` as an :class:`~trollimage.xrimage.XRImage` instance.
Args:
dataset (xarray.DataArray): Data to be enhanced and converted to an image.
ppp_config_dir (str): Root configuration directory.
enhance (bool or Enhancer): Whether to automatically enhance
data to be more visually useful and to fit inside the file
format being saved to. By default this will default to using
the enhancement configuration files found using the default
:class:`~satpy.writers.Enhancer` class. This can be set to
`False` so that no enhancments are performed. This can also
be an instance of the :class:`~satpy.writers.Enhancer` class
if further custom enhancement is needed.
enhancement_config_file (str): Deprecated.
overlay (dict): Options for image overlays. See :func:`add_overlay`
for available options.
decorate (dict): Options for decorating the image. See
:func:`add_decorate` for available options.
fill_value (int or float): Value to use when pixels are masked or
invalid. Default of `None` means to create an alpha channel.
See :meth:`~trollimage.xrimage.XRImage.finalize` for more
details. Only used when adding overlays or decorations. Otherwise
it is up to the caller to "finalize" the image before using it
except if calling ``img.show()`` or providing the image to
a writer as these will finalize the image.
.. versionchanged:: 0.10
Deprecated `enhancement_config_file` and 'enhancer' in favor of
`enhance`. Pass an instance of the `Enhancer` class to `enhance`
instead.
"""
if ppp_config_dir is None:
ppp_config_dir = get_environ_config_dir()
if enhancement_config_file is not None:
warnings.warn(
"'enhancement_config_file' has been deprecated. Pass an instance of the "
"'Enhancer' class to the 'enhance' keyword argument instead.",
DeprecationWarning,
)
if enhance is False:
# no enhancement
enhancer = None
elif enhance is None or enhance is True:
# default enhancement
enhancer = Enhancer(ppp_config_dir, enhancement_config_file)
else:
# custom enhancer
enhancer = enhance
# Create an image for enhancement
img = to_image(dataset)
if enhancer is None or enhancer.enhancement_tree is None:
LOG.debug("No enhancement being applied to dataset")
else:
if dataset.attrs.get("sensor", None):
enhancer.add_sensor_enhancements(dataset.attrs["sensor"])
enhancer.apply(img, **dataset.attrs)
if overlay is not None:
add_overlay(img, dataset.attrs["area"], fill_value=fill_value, **overlay)
if decorate is not None:
add_decorate(img, fill_value=fill_value, **decorate)
return img
|
https://github.com/pytroll/satpy/issues/449
|
[INFO: 2018-10-08 23:46:08 : satpy.writers] Add coastlines and political borders to image.
[DEBUG: 2018-10-08 23:46:08 : satpy.writers] Automagically choose resolution f
[DEBUG: 2018-10-08 23:46:09 : trollimage.xrimage] Interval: left=<xarray.DataArray 'reshape-59920bd5c4344e209a876883f95f2db3' (bands: 1)>
array([0.])
Coordinates:
* bands (bands) <U1 'L'
quantile float64 0.005, right=<xarray.DataArray 'reshape-59920bd5c4344e209a876883f95f2db3' (bands: 1)>
array([67.693016])
Coordinates:
* bands (bands) <U1 'L'
quantile float64 0.995
Traceback (most recent call last):
File "msg4-ernst_single_channel.py", line 93, in <module>
global_scene.save_dataset(composite,'./MSG4.png', overlay={'coast_dir': '/home/cpeters/software/', 'width':1, 'color':(200, 200, 200)})
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy-0+unknown-py3.6.egg/satpy/scene.py", line 1054, in save_dataset
**save_kwargs)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy-0+unknown-py3.6.egg/satpy/writers/__init__.py", line 611, in save_dataset
decorate=decorate, fill_value=fill_value)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy-0+unknown-py3.6.egg/satpy/writers/__init__.py", line 354, in get_enhanced_image
add_overlay(img, dataset.attrs['area'], fill_value=fill_value, **overlay)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/satpy-0+unknown-py3.6.egg/satpy/writers/__init__.py", line 230, in add_overlay
resolution=resolution, width=width, level=level_coast)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/pycoast/cw_agg.py", line 418, in add_coastlines
y_offset=y_offset)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/pycoast/cw_base.py", line 598, in _add_feature
x_offset=x_offset, y_offset=y_offset, **kwargs)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/pycoast/cw_base.py", line 510, in add_shapes
draw = self._get_canvas(image)
File "/home/cpeters/anaconda2/envs/pytroll/lib/python3.6/site-packages/pycoast/cw_agg.py", line 52, in _get_canvas
return aggdraw.Draw(image)
ValueError: bad mode
|
ValueError
|
def concatenate_dataset(self, dataset_group, var_path):
if "I" in dataset_group:
scan_size = 32
else:
scan_size = 16
scans_path = "All_Data/{dataset_group}_All/NumberOfScans"
number_of_granules_path = "Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateNumberGranules"
nb_granules_path = number_of_granules_path.format(
dataset_group=DATASET_KEYS[dataset_group]
)
scans = []
for granule in range(self[nb_granules_path]):
scans_path = "Data_Products/{dataset_group}/{dataset_group}_Gran_{granule}/attr/N_Number_Of_Scans"
scans_path = scans_path.format(
dataset_group=DATASET_KEYS[dataset_group], granule=granule
)
scans.append(self[scans_path])
start_scan = 0
data_chunks = []
scans = xr.DataArray(scans)
variable = self[var_path]
# check if these are single per-granule value
if variable.size != scans.size:
for gscans in scans.values:
data_chunks.append(
self[var_path].isel(
y=slice(start_scan, start_scan + gscans * scan_size)
)
)
start_scan += scan_size * 48
return xr.concat(data_chunks, "y")
else:
return self.expand_single_values(variable, scans)
|
def concatenate_dataset(self, dataset_group, var_path):
if "I" in dataset_group:
scan_size = 32
else:
scan_size = 16
scans_path = "All_Data/{dataset_group}_All/NumberOfScans"
scans_path = scans_path.format(dataset_group=DATASET_KEYS[dataset_group])
start_scan = 0
data_chunks = []
scans = self[scans_path]
variable = self[var_path]
# check if these are single per-granule value
if variable.size != scans.size:
for gscans in scans.values:
data_chunks.append(
self[var_path].isel(
y=slice(start_scan, start_scan + gscans * scan_size)
)
)
start_scan += scan_size * 48
return xr.concat(data_chunks, "y")
else:
return self.expand_single_values(variable, scans)
|
https://github.com/pytroll/satpy/issues/692
|
[DEBUG: 2019-04-03 06:58:17 : satpy.readers] Reading ['/software/pytroll/lib-satpy-grid/lib/python3.5/site-packages/satpy-0.13.0+83.ga6479be-py3.5.egg/satpy/etc/readers/viirs_sdr.yaml']
[DEBUG: 2019-04-03 06:58:17 : satpy.scene] Setting 'PPP_CONFIG_DIR' to '/software/pytroll/lib-satpy-grid/lib/python3.5/site-packages/satpy-0.13.0+83.ga6479be-py3.5.egg/satpy/etc'
[DEBUG: 2019-04-03 06:58:17 : satpy.readers] Reading ['/software/pytroll/lib-satpy-grid/lib/python3.5/site-packages/satpy-0.13.0+83.ga6479be-py3.5.egg/satpy/etc/readers/viirs_sdr.yaml']
[DEBUG: 2019-04-03 06:58:18 : satpy.readers.yaml_reader] Assigning to viirs_sdr: ['/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI03_j01_d20190403_t0505553_e0507198_b07107_c20190403051259425344_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM11_j01_d20190403_t0510108_e0511353_b07107_c20190403051251794740_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GDNBO_j01_d20190403_t0503056_e0504283_b07107_c20190403051244716616_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI04_j01_d20190403_t0505553_e0507198_b07107_c20190403051259492004_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM15_j01_d20190403_t0501398_e0503043_b07107_c20190403051301556141_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM11_j01_d20190403_t0508468_e0510095_b07107_c20190403051300551821_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM11_j01_d20190403_t0503056_e0504283_b07107_c20190403051307595895_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM13_j01_d20190403_t0503056_e0504283_b07107_c20190403051307642640_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM07_j01_d20190403_t0510108_e0511353_b07107_c20190403051251677575_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM07_j01_d20190403_t0505553_e0507198_b07107_c20190403051300398651_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM03_j01_d20190403_t0503056_e0504283_b07107_c20190403051307370911_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM11_j01_d20190403_t0507210_e0508456_b07107_c20190403051305723932_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM05_j01_d20190403_t0505553_e0507198_b07107_c20190403051259741226_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM09_j01_d20190403_t0505553_e0507198_b07107_c20190403051259862143_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GDNBO_j01_d20190403_t0507210_e0508456_b07107_c20190403051247011494_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM15_j01_d20190403_t0503056_e0504283_b07107_c20190403051307704845_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM11_j01_d20190403_t0505553_e0507198_b07107_c20190403051259917866_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI02_j01_d20190403_t0505553_e0507198_b07107_c20190403051259360858_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM13_j01_d20190403_t0510108_e0511353_b07107_c20190403051251844644_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI05_j01_d20190403_t0504295_e0505540_b07107_c20190403051304029618_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM06_j01_d20190403_t0503056_e0504283_b07107_c20190403051307464104_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI02_j01_d20190403_t0504295_e0505540_b07107_c20190403051303787382_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVDNB_j01_d20190403_t0510108_e0511353_b07107_c20190403051251108544_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM14_j01_d20190403_t0507210_e0508456_b07107_c20190403051305812249_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM16_j01_d20190403_t0504295_e0505540_b07107_c20190403051304551878_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI04_j01_d20190403_t0503056_e0504283_b07107_c20190403051307156347_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM10_j01_d20190403_t0507210_e0508456_b07107_c20190403051305697554_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM06_j01_d20190403_t0508468_e0510095_b07107_c20190403051300412808_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM05_j01_d20190403_t0503056_e0504283_b07107_c20190403051307432660_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI03_j01_d20190403_t0501398_e0503043_b07107_c20190403051300855808_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM03_j01_d20190403_t0510108_e0511353_b07107_c20190403051251585485_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM11_j01_d20190403_t0501398_e0503043_b07107_c20190403051301432082_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI03_j01_d20190403_t0510108_e0511353_b07107_c20190403051251360743_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM06_j01_d20190403_t0505553_e0507198_b07107_c20190403051259774339_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM15_j01_d20190403_t0504295_e0505540_b07107_c20190403051304523955_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM12_j01_d20190403_t0510108_e0511353_b07107_c20190403051251821430_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM10_j01_d20190403_t0501398_e0503043_b07107_c20190403051301402718_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM12_j01_d20190403_t0503056_e0504283_b07107_c20190403051307620894_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GMTCO_j01_d20190403_t0504295_e0505540_b07107_c20190403051242352100_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI03_j01_d20190403_t0503056_e0504283_b07107_c20190403051307073635_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM07_j01_d20190403_t0503056_e0504283_b07107_c20190403051307983724_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM12_j01_d20190403_t0505553_e0507198_b07107_c20190403051259945499_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM06_j01_d20190403_t0510108_e0511353_b07107_c20190403051251656625_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVDNB_j01_d20190403_t0508468_e0510095_b07107_c20190403051259897777_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM05_j01_d20190403_t0504295_e0505540_b07107_c20190403051304231460_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM03_j01_d20190403_t0501398_e0503043_b07107_c20190403051301176731_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GITCO_j01_d20190403_t0510108_e0511353_b07107_c20190403051236377179_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM07_j01_d20190403_t0508468_e0510095_b07107_c20190403051300429213_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI02_j01_d20190403_t0501398_e0503043_b07107_c20190403051300766463_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI05_j01_d20190403_t0501398_e0503043_b07107_c20190403051301031117_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVDNB_j01_d20190403_t0504295_e0505540_b07107_c20190403051303493844_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM12_j01_d20190403_t0501398_e0503043_b07107_c20190403051301461151_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM09_j01_d20190403_t0507210_e0508456_b07107_c20190403051305683314_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI05_j01_d20190403_t0510108_e0511353_b07107_c20190403051251455196_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI04_j01_d20190403_t0501398_e0503043_b07107_c20190403051300943917_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM06_j01_d20190403_t0507210_e0508456_b07107_c20190403051305608749_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM11_j01_d20190403_t0504295_e0505540_b07107_c20190403051304407002_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM07_j01_d20190403_t0507210_e0508456_b07107_c20190403051305622890_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI05_j01_d20190403_t0503056_e0504283_b07107_c20190403051307237495_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI04_j01_d20190403_t0508468_e0510095_b07107_c20190403051300154162_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GDNBO_j01_d20190403_t0505553_e0507198_b07107_c20190403051239742066_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM04_j01_d20190403_t0504295_e0505540_b07107_c20190403051304197857_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM01_j01_d20190403_t0504295_e0505540_b07107_c20190403051304110439_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM06_j01_d20190403_t0501398_e0503043_b07107_c20190403051301280768_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVDNB_j01_d20190403_t0505553_e0507198_b07107_c20190403051259055834_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM02_j01_d20190403_t0504295_e0505540_b07107_c20190403051304137593_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM10_j01_d20190403_t0505553_e0507198_b07107_c20190403051259889682_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM08_j01_d20190403_t0503056_e0504283_b07107_c20190403051307520196_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVDNB_j01_d20190403_t0507210_e0508456_b07107_c20190403051305171262_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM04_j01_d20190403_t0510108_e0511353_b07107_c20190403051251609871_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM04_j01_d20190403_t0508468_e0510095_b07107_c20190403051300382194_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM16_j01_d20190403_t0510108_e0511353_b07107_c20190403051251945105_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM15_j01_d20190403_t0508468_e0510095_b07107_c20190403051300685046_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM07_j01_d20190403_t0501398_e0503043_b07107_c20190403051301858353_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM10_j01_d20190403_t0508468_e0510095_b07107_c20190403051300508755_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM16_j01_d20190403_t0507210_e0508456_b07107_c20190403051305866022_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM14_j01_d20190403_t0504295_e0505540_b07107_c20190403051304496111_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM08_j01_d20190403_t0501398_e0503043_b07107_c20190403051301344861_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM14_j01_d20190403_t0510108_e0511353_b07107_c20190403051251889673_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM13_j01_d20190403_t0505553_e0507198_b07107_c20190403051259968643_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM09_j01_d20190403_t0503056_e0504283_b07107_c20190403051307545201_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM04_j01_d20190403_t0505553_e0507198_b07107_c20190403051259707977_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI01_j01_d20190403_t0504295_e0505540_b07107_c20190403051303704327_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GITCO_j01_d20190403_t0505553_e0507198_b07107_c20190403051239927122_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GMTCO_j01_d20190403_t0507210_e0508456_b07107_c20190403051248377440_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI01_j01_d20190403_t0508468_e0510095_b07107_c20190403051300107659_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM16_j01_d20190403_t0503056_e0504283_b07107_c20190403051307731100_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI01_j01_d20190403_t0503056_e0504283_b07107_c20190403051306907067_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI05_j01_d20190403_t0508468_e0510095_b07107_c20190403051300225702_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM01_j01_d20190403_t0507210_e0508456_b07107_c20190403051305535545_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI04_j01_d20190403_t0504295_e0505540_b07107_c20190403051303950336_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM02_j01_d20190403_t0503056_e0504283_b07107_c20190403051307345078_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM09_j01_d20190403_t0501398_e0503043_b07107_c20190403051301373985_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM13_j01_d20190403_t0504295_e0505540_b07107_c20190403051304456636_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM04_j01_d20190403_t0503056_e0504283_b07107_c20190403051307401933_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM03_j01_d20190403_t0507210_e0508456_b07107_c20190403051305562834_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVDNB_j01_d20190403_t0501398_e0503043_b07107_c20190403051300448113_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM14_j01_d20190403_t0503056_e0504283_b07107_c20190403051307678891_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI02_j01_d20190403_t0507210_e0508456_b07107_c20190403051305372970_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM05_j01_d20190403_t0510108_e0511353_b07107_c20190403051251636668_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM08_j01_d20190403_t0510108_e0511353_b07107_c20190403051251725014_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI05_j01_d20190403_t0505553_e0507198_b07107_c20190403051259556293_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GITCO_j01_d20190403_t0504295_e0505540_b07107_c20190403051241490581_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM02_j01_d20190403_t0510108_e0511353_b07107_c20190403051251562222_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM14_j01_d20190403_t0508468_e0510095_b07107_c20190403051300655294_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GDNBO_j01_d20190403_t0510108_e0511353_b07107_c20190403051236200342_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM02_j01_d20190403_t0505553_e0507198_b07107_c20190403051259647788_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM09_j01_d20190403_t0510108_e0511353_b07107_c20190403051251752383_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI01_j01_d20190403_t0510108_e0511353_b07107_c20190403051251328506_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM06_j01_d20190403_t0504295_e0505540_b07107_c20190403051304266685_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM05_j01_d20190403_t0507210_e0508456_b07107_c20190403051305595243_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM12_j01_d20190403_t0508468_e0510095_b07107_c20190403051300592248_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM02_j01_d20190403_t0501398_e0503043_b07107_c20190403051301147551_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI04_j01_d20190403_t0510108_e0511353_b07107_c20190403051251374835_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI04_j01_d20190403_t0507210_e0508456_b07107_c20190403051305403578_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GMTCO_j01_d20190403_t0501398_e0503043_b07107_c20190403051240472387_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM03_j01_d20190403_t0504295_e0505540_b07107_c20190403051304164256_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM02_j01_d20190403_t0508468_e0510095_b07107_c20190403051300345704_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM03_j01_d20190403_t0505553_e0507198_b07107_c20190403051259675005_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM14_j01_d20190403_t0505553_e0507198_b07107_c20190403051300005807_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM15_j01_d20190403_t0510108_e0511353_b07107_c20190403051251917175_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI02_j01_d20190403_t0510108_e0511353_b07107_c20190403051251346945_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI02_j01_d20190403_t0508468_e0510095_b07107_c20190403051300122269_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GDNBO_j01_d20190403_t0508468_e0510095_b07107_c20190403051243826382_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM12_j01_d20190403_t0504295_e0505540_b07107_c20190403051304433044_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM16_j01_d20190403_t0508468_e0510095_b07107_c20190403051300713898_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GITCO_j01_d20190403_t0508468_e0510095_b07107_c20190403051244010745_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GDNBO_j01_d20190403_t0504295_e0505540_b07107_c20190403051241303817_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM03_j01_d20190403_t0508468_e0510095_b07107_c20190403051300365538_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVDNB_j01_d20190403_t0503056_e0504283_b07107_c20190403051306692860_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM01_j01_d20190403_t0501398_e0503043_b07107_c20190403051301118486_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM13_j01_d20190403_t0507210_e0508456_b07107_c20190403051305774305_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM07_j01_d20190403_t0504295_e0505540_b07107_c20190403051304814003_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM13_j01_d20190403_t0508468_e0510095_b07107_c20190403051300616341_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM04_j01_d20190403_t0501398_e0503043_b07107_c20190403051301211943_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM05_j01_d20190403_t0508468_e0510095_b07107_c20190403051300397837_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM08_j01_d20190403_t0507210_e0508456_b07107_c20190403051305656929_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM15_j01_d20190403_t0507210_e0508456_b07107_c20190403051305838778_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GITCO_j01_d20190403_t0501398_e0503043_b07107_c20190403051239585732_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM12_j01_d20190403_t0507210_e0508456_b07107_c20190403051305751006_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI01_j01_d20190403_t0501398_e0503043_b07107_c20190403051300677875_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI02_j01_d20190403_t0503056_e0504283_b07107_c20190403051306990796_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM08_j01_d20190403_t0505553_e0507198_b07107_c20190403051259834866_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM01_j01_d20190403_t0508468_e0510095_b07107_c20190403051300320835_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM16_j01_d20190403_t0505553_e0507198_b07107_c20190403051300064457_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GITCO_j01_d20190403_t0507210_e0508456_b07107_c20190403051247260191_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM13_j01_d20190403_t0501398_e0503043_b07107_c20190403051301485727_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM10_j01_d20190403_t0504295_e0505540_b07107_c20190403051304380853_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GMTCO_j01_d20190403_t0508468_e0510095_b07107_c20190403051244947996_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM10_j01_d20190403_t0510108_e0511353_b07107_c20190403051251767691_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM08_j01_d20190403_t0504295_e0505540_b07107_c20190403051304327915_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM01_j01_d20190403_t0505553_e0507198_b07107_c20190403051259620487_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM09_j01_d20190403_t0504295_e0505540_b07107_c20190403051304354764_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM09_j01_d20190403_t0508468_e0510095_b07107_c20190403051300492387_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM01_j01_d20190403_t0510108_e0511353_b07107_c20190403051251538042_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM01_j01_d20190403_t0503056_e0504283_b07107_c20190403051307319597_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI01_j01_d20190403_t0507210_e0508456_b07107_c20190403051305358367_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM14_j01_d20190403_t0501398_e0503043_b07107_c20190403051301526879_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GMTCO_j01_d20190403_t0505553_e0507198_b07107_c20190403051240796141_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM10_j01_d20190403_t0503056_e0504283_b07107_c20190403051307570259_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GITCO_j01_d20190403_t0503056_e0504283_b07107_c20190403051244978504_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM02_j01_d20190403_t0507210_e0508456_b07107_c20190403051305548999_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI05_j01_d20190403_t0507210_e0508456_b07107_c20190403051305469953_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM05_j01_d20190403_t0501398_e0503043_b07107_c20190403051301246298_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM04_j01_d20190403_t0507210_e0508456_b07107_c20190403051305581523_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GMTCO_j01_d20190403_t0503056_e0504283_b07107_c20190403051246120863_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GMTCO_j01_d20190403_t0510108_e0511353_b07107_c20190403051237331161_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GDNBO_j01_d20190403_t0501398_e0503043_b07107_c20190403051239403562_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM08_j01_d20190403_t0508468_e0510095_b07107_c20190403051300464625_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM15_j01_d20190403_t0505553_e0507198_b07107_c20190403051300034801_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI01_j01_d20190403_t0505553_e0507198_b07107_c20190403051259296242_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI03_j01_d20190403_t0504295_e0505540_b07107_c20190403051303867972_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM16_j01_d20190403_t0501398_e0503043_b07107_c20190403051301585913_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI03_j01_d20190403_t0507210_e0508456_b07107_c20190403051305388022_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI03_j01_d20190403_t0508468_e0510095_b07107_c20190403051300138739_cspp_dev.h5']
[DEBUG: 2019-04-03 06:58:21 : satpy.composites] Looking for composites config file viirs.yaml
[DEBUG: 2019-04-03 06:58:21 : satpy.composites] Looking for composites config file visir.yaml
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.yaml_reader] No coordinates found for DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.yaml_reader] No coordinates found for DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Adjusting scaling factors to convert '1' to '%'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Adjusting scaling factors to convert '1' to '%'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Adjusting scaling factors to convert '1' to '%'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Adjusting scaling factors to convert '1' to '%'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Adjusting scaling factors to convert '1' to '%'
[DEBUG: 2019-04-03 06:58:22 : satpy.readers.viirs_sdr] Adjusting scaling factors to convert '1' to '%'
[DEBUG: 2019-04-03 06:58:22 : satpy.readers.viirs_sdr] Adjusting scaling factors to convert '1' to '%'
[DEBUG: 2019-04-03 06:58:22 : satpy.readers.viirs_sdr] Adjusting scaling factors to convert '1' to '%'
[DEBUG: 2019-04-03 06:58:22 : satpy.readers.viirs_sdr] Adjusting scaling factors to convert '1' to '%'
[DEBUG: 2019-04-03 06:58:22 : satpy.readers.viirs_sdr] Adjusting scaling factors to convert '1' to '%'
[DEBUG: 2019-04-03 06:58:22 : satpy.readers.viirs_sdr] Adjusting scaling factors to convert '1' to '%'
[DEBUG: 2019-04-03 06:58:22 : satpy.readers.viirs_sdr] Adjusting scaling factors to convert '1' to '%'
Traceback (most recent call last):
File "test-viirs.py", line 9, in <module>
global_scene.load(['overview'])
File "/software/pytroll/lib-satpy-grid/lib/python3.5/site-packages/satpy-0.13.0+83.ga6479be-py3.5.egg/satpy/scene.py", line 896, in load
self.read(**kwargs)
File "/software/pytroll/lib-satpy-grid/lib/python3.5/site-packages/satpy-0.13.0+83.ga6479be-py3.5.egg/satpy/scene.py", line 804, in read
return self._read_datasets(nodes, **kwargs)
File "/software/pytroll/lib-satpy-grid/lib/python3.5/site-packages/satpy-0.13.0+83.ga6479be-py3.5.egg/satpy/scene.py", line 671, in _read_datasets
new_datasets = reader_instance.load(ds_ids, **kwargs)
File "/software/pytroll/lib-satpy-grid/lib/python3.5/site-packages/satpy-0.13.0+83.ga6479be-py3.5.egg/satpy/readers/yaml_reader.py", line 818, in load
ds = self._load_dataset_with_area(dsid, coords)
File "/software/pytroll/lib-satpy-grid/lib/python3.5/site-packages/satpy-0.13.0+83.ga6479be-py3.5.egg/satpy/readers/yaml_reader.py", line 755, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid, **slice_kwargs)
File "/software/pytroll/lib-satpy-grid/lib/python3.5/site-packages/satpy-0.13.0+83.ga6479be-py3.5.egg/satpy/readers/yaml_reader.py", line 609, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib-satpy-grid/lib/python3.5/site-packages/satpy-0.13.0+83.ga6479be-py3.5.egg/satpy/readers/yaml_reader.py", line 581, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib-satpy-grid/lib/python3.5/site-packages/satpy-0.13.0+83.ga6479be-py3.5.egg/satpy/readers/viirs_sdr.py", line 354, in get_dataset
data = self.scale_swath_data(data, factors)
File "/software/pytroll/lib-satpy-grid/lib/python3.5/site-packages/satpy-0.13.0+83.ga6479be-py3.5.egg/satpy/readers/viirs_sdr.py", line 251, in scale_swath_data
factors = xr.DataArray(da.repeat(factors, gran_size, axis=0),
File "/software/pytroll/lib/python3.5/site-packages/dask/array/creation.py", line 737, in repeat
return concatenate(out, axis=axis)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 2709, in concatenate
ndim = len(seq[0].shape)
IndexError: list index out of range
|
IndexError
|
def get_dataset(self, dataset_id, ds_info):
"""Get the dataset corresponding to *dataset_id*.
The size of the return DataArray will be dependent on the number of
scans actually sensed, and not necessarily the regular 768 scanlines
that the file contains for each granule. To that end, the number of
scans for each granule is read from:
`Data_Products/...Gran_x/N_Number_Of_Scans`.
"""
dataset_group = [
ds_group for ds_group in ds_info["dataset_groups"] if ds_group in self.datasets
]
if not dataset_group:
return
else:
dataset_group = dataset_group[0]
ds_info["dataset_group"] = dataset_group
var_path = self._generate_file_key(dataset_id, ds_info)
factor_var_path = ds_info.get("factors_key", var_path + "Factors")
data = self.concatenate_dataset(dataset_group, var_path)
data = self.mask_fill_values(data, ds_info)
factors = self.get(factor_var_path)
if factors is None:
LOG.debug("No scaling factors found for %s", dataset_id)
file_units = self.get_file_units(dataset_id, ds_info)
output_units = ds_info.get("units", file_units)
factors = self.adjust_scaling_factors(factors, file_units, output_units)
if factors is not None:
data = self.scale_swath_data(data, factors)
i = getattr(data, "attrs", {})
i.update(ds_info)
i.update(
{
"units": ds_info.get("units", file_units),
"platform_name": self.platform_name,
"sensor": self.sensor_name,
"start_orbit": self.start_orbit_number,
"end_orbit": self.end_orbit_number,
}
)
i.update(dataset_id.to_dict())
data.attrs.update(i)
return data
|
def get_dataset(self, dataset_id, ds_info):
dataset_group = [
ds_group for ds_group in ds_info["dataset_groups"] if ds_group in self.datasets
]
if not dataset_group:
return
else:
dataset_group = dataset_group[0]
ds_info["dataset_group"] = dataset_group
var_path = self._generate_file_key(dataset_id, ds_info)
factor_var_path = ds_info.get("factors_key", var_path + "Factors")
data = self.concatenate_dataset(dataset_group, var_path)
data = self.mask_fill_values(data, ds_info)
factors = self.get(factor_var_path)
if factors is None:
LOG.debug("No scaling factors found for %s", dataset_id)
file_units = self.get_file_units(dataset_id, ds_info)
output_units = ds_info.get("units", file_units)
factors = self.adjust_scaling_factors(factors, file_units, output_units)
if factors is not None:
data = self.scale_swath_data(data, factors)
i = getattr(data, "attrs", {})
i.update(ds_info)
i.update(
{
"units": ds_info.get("units", file_units),
"platform_name": self.platform_name,
"sensor": self.sensor_name,
"start_orbit": self.start_orbit_number,
"end_orbit": self.end_orbit_number,
}
)
i.update(dataset_id.to_dict())
data.attrs.update(i)
return data
|
https://github.com/pytroll/satpy/issues/692
|
[DEBUG: 2019-04-03 06:58:17 : satpy.readers] Reading ['/software/pytroll/lib-satpy-grid/lib/python3.5/site-packages/satpy-0.13.0+83.ga6479be-py3.5.egg/satpy/etc/readers/viirs_sdr.yaml']
[DEBUG: 2019-04-03 06:58:17 : satpy.scene] Setting 'PPP_CONFIG_DIR' to '/software/pytroll/lib-satpy-grid/lib/python3.5/site-packages/satpy-0.13.0+83.ga6479be-py3.5.egg/satpy/etc'
[DEBUG: 2019-04-03 06:58:17 : satpy.readers] Reading ['/software/pytroll/lib-satpy-grid/lib/python3.5/site-packages/satpy-0.13.0+83.ga6479be-py3.5.egg/satpy/etc/readers/viirs_sdr.yaml']
[DEBUG: 2019-04-03 06:58:18 : satpy.readers.yaml_reader] Assigning to viirs_sdr: ['/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI03_j01_d20190403_t0505553_e0507198_b07107_c20190403051259425344_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM11_j01_d20190403_t0510108_e0511353_b07107_c20190403051251794740_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GDNBO_j01_d20190403_t0503056_e0504283_b07107_c20190403051244716616_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI04_j01_d20190403_t0505553_e0507198_b07107_c20190403051259492004_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM15_j01_d20190403_t0501398_e0503043_b07107_c20190403051301556141_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM11_j01_d20190403_t0508468_e0510095_b07107_c20190403051300551821_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM11_j01_d20190403_t0503056_e0504283_b07107_c20190403051307595895_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM13_j01_d20190403_t0503056_e0504283_b07107_c20190403051307642640_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM07_j01_d20190403_t0510108_e0511353_b07107_c20190403051251677575_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM07_j01_d20190403_t0505553_e0507198_b07107_c20190403051300398651_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM03_j01_d20190403_t0503056_e0504283_b07107_c20190403051307370911_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM11_j01_d20190403_t0507210_e0508456_b07107_c20190403051305723932_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM05_j01_d20190403_t0505553_e0507198_b07107_c20190403051259741226_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM09_j01_d20190403_t0505553_e0507198_b07107_c20190403051259862143_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GDNBO_j01_d20190403_t0507210_e0508456_b07107_c20190403051247011494_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM15_j01_d20190403_t0503056_e0504283_b07107_c20190403051307704845_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM11_j01_d20190403_t0505553_e0507198_b07107_c20190403051259917866_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI02_j01_d20190403_t0505553_e0507198_b07107_c20190403051259360858_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM13_j01_d20190403_t0510108_e0511353_b07107_c20190403051251844644_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI05_j01_d20190403_t0504295_e0505540_b07107_c20190403051304029618_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM06_j01_d20190403_t0503056_e0504283_b07107_c20190403051307464104_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI02_j01_d20190403_t0504295_e0505540_b07107_c20190403051303787382_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVDNB_j01_d20190403_t0510108_e0511353_b07107_c20190403051251108544_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM14_j01_d20190403_t0507210_e0508456_b07107_c20190403051305812249_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM16_j01_d20190403_t0504295_e0505540_b07107_c20190403051304551878_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI04_j01_d20190403_t0503056_e0504283_b07107_c20190403051307156347_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM10_j01_d20190403_t0507210_e0508456_b07107_c20190403051305697554_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM06_j01_d20190403_t0508468_e0510095_b07107_c20190403051300412808_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM05_j01_d20190403_t0503056_e0504283_b07107_c20190403051307432660_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI03_j01_d20190403_t0501398_e0503043_b07107_c20190403051300855808_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM03_j01_d20190403_t0510108_e0511353_b07107_c20190403051251585485_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM11_j01_d20190403_t0501398_e0503043_b07107_c20190403051301432082_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI03_j01_d20190403_t0510108_e0511353_b07107_c20190403051251360743_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM06_j01_d20190403_t0505553_e0507198_b07107_c20190403051259774339_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM15_j01_d20190403_t0504295_e0505540_b07107_c20190403051304523955_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM12_j01_d20190403_t0510108_e0511353_b07107_c20190403051251821430_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM10_j01_d20190403_t0501398_e0503043_b07107_c20190403051301402718_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM12_j01_d20190403_t0503056_e0504283_b07107_c20190403051307620894_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GMTCO_j01_d20190403_t0504295_e0505540_b07107_c20190403051242352100_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI03_j01_d20190403_t0503056_e0504283_b07107_c20190403051307073635_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM07_j01_d20190403_t0503056_e0504283_b07107_c20190403051307983724_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM12_j01_d20190403_t0505553_e0507198_b07107_c20190403051259945499_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM06_j01_d20190403_t0510108_e0511353_b07107_c20190403051251656625_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVDNB_j01_d20190403_t0508468_e0510095_b07107_c20190403051259897777_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM05_j01_d20190403_t0504295_e0505540_b07107_c20190403051304231460_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM03_j01_d20190403_t0501398_e0503043_b07107_c20190403051301176731_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GITCO_j01_d20190403_t0510108_e0511353_b07107_c20190403051236377179_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM07_j01_d20190403_t0508468_e0510095_b07107_c20190403051300429213_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI02_j01_d20190403_t0501398_e0503043_b07107_c20190403051300766463_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI05_j01_d20190403_t0501398_e0503043_b07107_c20190403051301031117_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVDNB_j01_d20190403_t0504295_e0505540_b07107_c20190403051303493844_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM12_j01_d20190403_t0501398_e0503043_b07107_c20190403051301461151_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM09_j01_d20190403_t0507210_e0508456_b07107_c20190403051305683314_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI05_j01_d20190403_t0510108_e0511353_b07107_c20190403051251455196_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI04_j01_d20190403_t0501398_e0503043_b07107_c20190403051300943917_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM06_j01_d20190403_t0507210_e0508456_b07107_c20190403051305608749_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM11_j01_d20190403_t0504295_e0505540_b07107_c20190403051304407002_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM07_j01_d20190403_t0507210_e0508456_b07107_c20190403051305622890_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI05_j01_d20190403_t0503056_e0504283_b07107_c20190403051307237495_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI04_j01_d20190403_t0508468_e0510095_b07107_c20190403051300154162_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GDNBO_j01_d20190403_t0505553_e0507198_b07107_c20190403051239742066_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM04_j01_d20190403_t0504295_e0505540_b07107_c20190403051304197857_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM01_j01_d20190403_t0504295_e0505540_b07107_c20190403051304110439_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM06_j01_d20190403_t0501398_e0503043_b07107_c20190403051301280768_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVDNB_j01_d20190403_t0505553_e0507198_b07107_c20190403051259055834_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM02_j01_d20190403_t0504295_e0505540_b07107_c20190403051304137593_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM10_j01_d20190403_t0505553_e0507198_b07107_c20190403051259889682_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM08_j01_d20190403_t0503056_e0504283_b07107_c20190403051307520196_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVDNB_j01_d20190403_t0507210_e0508456_b07107_c20190403051305171262_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM04_j01_d20190403_t0510108_e0511353_b07107_c20190403051251609871_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM04_j01_d20190403_t0508468_e0510095_b07107_c20190403051300382194_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM16_j01_d20190403_t0510108_e0511353_b07107_c20190403051251945105_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM15_j01_d20190403_t0508468_e0510095_b07107_c20190403051300685046_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM07_j01_d20190403_t0501398_e0503043_b07107_c20190403051301858353_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM10_j01_d20190403_t0508468_e0510095_b07107_c20190403051300508755_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM16_j01_d20190403_t0507210_e0508456_b07107_c20190403051305866022_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM14_j01_d20190403_t0504295_e0505540_b07107_c20190403051304496111_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM08_j01_d20190403_t0501398_e0503043_b07107_c20190403051301344861_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM14_j01_d20190403_t0510108_e0511353_b07107_c20190403051251889673_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM13_j01_d20190403_t0505553_e0507198_b07107_c20190403051259968643_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM09_j01_d20190403_t0503056_e0504283_b07107_c20190403051307545201_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM04_j01_d20190403_t0505553_e0507198_b07107_c20190403051259707977_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI01_j01_d20190403_t0504295_e0505540_b07107_c20190403051303704327_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GITCO_j01_d20190403_t0505553_e0507198_b07107_c20190403051239927122_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GMTCO_j01_d20190403_t0507210_e0508456_b07107_c20190403051248377440_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI01_j01_d20190403_t0508468_e0510095_b07107_c20190403051300107659_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM16_j01_d20190403_t0503056_e0504283_b07107_c20190403051307731100_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI01_j01_d20190403_t0503056_e0504283_b07107_c20190403051306907067_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI05_j01_d20190403_t0508468_e0510095_b07107_c20190403051300225702_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM01_j01_d20190403_t0507210_e0508456_b07107_c20190403051305535545_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI04_j01_d20190403_t0504295_e0505540_b07107_c20190403051303950336_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM02_j01_d20190403_t0503056_e0504283_b07107_c20190403051307345078_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM09_j01_d20190403_t0501398_e0503043_b07107_c20190403051301373985_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM13_j01_d20190403_t0504295_e0505540_b07107_c20190403051304456636_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM04_j01_d20190403_t0503056_e0504283_b07107_c20190403051307401933_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM03_j01_d20190403_t0507210_e0508456_b07107_c20190403051305562834_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVDNB_j01_d20190403_t0501398_e0503043_b07107_c20190403051300448113_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM14_j01_d20190403_t0503056_e0504283_b07107_c20190403051307678891_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI02_j01_d20190403_t0507210_e0508456_b07107_c20190403051305372970_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM05_j01_d20190403_t0510108_e0511353_b07107_c20190403051251636668_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM08_j01_d20190403_t0510108_e0511353_b07107_c20190403051251725014_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI05_j01_d20190403_t0505553_e0507198_b07107_c20190403051259556293_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GITCO_j01_d20190403_t0504295_e0505540_b07107_c20190403051241490581_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM02_j01_d20190403_t0510108_e0511353_b07107_c20190403051251562222_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM14_j01_d20190403_t0508468_e0510095_b07107_c20190403051300655294_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GDNBO_j01_d20190403_t0510108_e0511353_b07107_c20190403051236200342_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM02_j01_d20190403_t0505553_e0507198_b07107_c20190403051259647788_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM09_j01_d20190403_t0510108_e0511353_b07107_c20190403051251752383_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI01_j01_d20190403_t0510108_e0511353_b07107_c20190403051251328506_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM06_j01_d20190403_t0504295_e0505540_b07107_c20190403051304266685_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM05_j01_d20190403_t0507210_e0508456_b07107_c20190403051305595243_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM12_j01_d20190403_t0508468_e0510095_b07107_c20190403051300592248_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM02_j01_d20190403_t0501398_e0503043_b07107_c20190403051301147551_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI04_j01_d20190403_t0510108_e0511353_b07107_c20190403051251374835_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI04_j01_d20190403_t0507210_e0508456_b07107_c20190403051305403578_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GMTCO_j01_d20190403_t0501398_e0503043_b07107_c20190403051240472387_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM03_j01_d20190403_t0504295_e0505540_b07107_c20190403051304164256_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM02_j01_d20190403_t0508468_e0510095_b07107_c20190403051300345704_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM03_j01_d20190403_t0505553_e0507198_b07107_c20190403051259675005_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM14_j01_d20190403_t0505553_e0507198_b07107_c20190403051300005807_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM15_j01_d20190403_t0510108_e0511353_b07107_c20190403051251917175_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI02_j01_d20190403_t0510108_e0511353_b07107_c20190403051251346945_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI02_j01_d20190403_t0508468_e0510095_b07107_c20190403051300122269_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GDNBO_j01_d20190403_t0508468_e0510095_b07107_c20190403051243826382_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM12_j01_d20190403_t0504295_e0505540_b07107_c20190403051304433044_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM16_j01_d20190403_t0508468_e0510095_b07107_c20190403051300713898_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GITCO_j01_d20190403_t0508468_e0510095_b07107_c20190403051244010745_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GDNBO_j01_d20190403_t0504295_e0505540_b07107_c20190403051241303817_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM03_j01_d20190403_t0508468_e0510095_b07107_c20190403051300365538_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVDNB_j01_d20190403_t0503056_e0504283_b07107_c20190403051306692860_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM01_j01_d20190403_t0501398_e0503043_b07107_c20190403051301118486_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM13_j01_d20190403_t0507210_e0508456_b07107_c20190403051305774305_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM07_j01_d20190403_t0504295_e0505540_b07107_c20190403051304814003_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM13_j01_d20190403_t0508468_e0510095_b07107_c20190403051300616341_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM04_j01_d20190403_t0501398_e0503043_b07107_c20190403051301211943_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM05_j01_d20190403_t0508468_e0510095_b07107_c20190403051300397837_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM08_j01_d20190403_t0507210_e0508456_b07107_c20190403051305656929_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM15_j01_d20190403_t0507210_e0508456_b07107_c20190403051305838778_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GITCO_j01_d20190403_t0501398_e0503043_b07107_c20190403051239585732_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM12_j01_d20190403_t0507210_e0508456_b07107_c20190403051305751006_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI01_j01_d20190403_t0501398_e0503043_b07107_c20190403051300677875_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI02_j01_d20190403_t0503056_e0504283_b07107_c20190403051306990796_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM08_j01_d20190403_t0505553_e0507198_b07107_c20190403051259834866_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM01_j01_d20190403_t0508468_e0510095_b07107_c20190403051300320835_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM16_j01_d20190403_t0505553_e0507198_b07107_c20190403051300064457_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GITCO_j01_d20190403_t0507210_e0508456_b07107_c20190403051247260191_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM13_j01_d20190403_t0501398_e0503043_b07107_c20190403051301485727_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM10_j01_d20190403_t0504295_e0505540_b07107_c20190403051304380853_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GMTCO_j01_d20190403_t0508468_e0510095_b07107_c20190403051244947996_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM10_j01_d20190403_t0510108_e0511353_b07107_c20190403051251767691_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM08_j01_d20190403_t0504295_e0505540_b07107_c20190403051304327915_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM01_j01_d20190403_t0505553_e0507198_b07107_c20190403051259620487_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM09_j01_d20190403_t0504295_e0505540_b07107_c20190403051304354764_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM09_j01_d20190403_t0508468_e0510095_b07107_c20190403051300492387_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM01_j01_d20190403_t0510108_e0511353_b07107_c20190403051251538042_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM01_j01_d20190403_t0503056_e0504283_b07107_c20190403051307319597_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI01_j01_d20190403_t0507210_e0508456_b07107_c20190403051305358367_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM14_j01_d20190403_t0501398_e0503043_b07107_c20190403051301526879_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GMTCO_j01_d20190403_t0505553_e0507198_b07107_c20190403051240796141_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM10_j01_d20190403_t0503056_e0504283_b07107_c20190403051307570259_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GITCO_j01_d20190403_t0503056_e0504283_b07107_c20190403051244978504_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM02_j01_d20190403_t0507210_e0508456_b07107_c20190403051305548999_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI05_j01_d20190403_t0507210_e0508456_b07107_c20190403051305469953_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM05_j01_d20190403_t0501398_e0503043_b07107_c20190403051301246298_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM04_j01_d20190403_t0507210_e0508456_b07107_c20190403051305581523_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GMTCO_j01_d20190403_t0503056_e0504283_b07107_c20190403051246120863_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GMTCO_j01_d20190403_t0510108_e0511353_b07107_c20190403051237331161_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/GDNBO_j01_d20190403_t0501398_e0503043_b07107_c20190403051239403562_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM08_j01_d20190403_t0508468_e0510095_b07107_c20190403051300464625_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM15_j01_d20190403_t0505553_e0507198_b07107_c20190403051300034801_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI01_j01_d20190403_t0505553_e0507198_b07107_c20190403051259296242_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI03_j01_d20190403_t0504295_e0505540_b07107_c20190403051303867972_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVM16_j01_d20190403_t0501398_e0503043_b07107_c20190403051301585913_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI03_j01_d20190403_t0507210_e0508456_b07107_c20190403051305388022_cspp_dev.h5', '/data/CSPP/noaa20-viirs-sdr/20190403-050138/SVI03_j01_d20190403_t0508468_e0510095_b07107_c20190403051300138739_cspp_dev.h5']
[DEBUG: 2019-04-03 06:58:21 : satpy.composites] Looking for composites config file viirs.yaml
[DEBUG: 2019-04-03 06:58:21 : satpy.composites] Looking for composites config file visir.yaml
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.yaml_reader] No coordinates found for DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_longitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.yaml_reader] No coordinates found for DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] No scaling factors found for DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Unknown units for file key 'DatasetID(name='m_latitude', wavelength=None, resolution=742, polarization=None, calibration=None, level=None, modifiers=())'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Adjusting scaling factors to convert '1' to '%'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Adjusting scaling factors to convert '1' to '%'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Adjusting scaling factors to convert '1' to '%'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Adjusting scaling factors to convert '1' to '%'
[DEBUG: 2019-04-03 06:58:21 : satpy.readers.viirs_sdr] Adjusting scaling factors to convert '1' to '%'
[DEBUG: 2019-04-03 06:58:22 : satpy.readers.viirs_sdr] Adjusting scaling factors to convert '1' to '%'
[DEBUG: 2019-04-03 06:58:22 : satpy.readers.viirs_sdr] Adjusting scaling factors to convert '1' to '%'
[DEBUG: 2019-04-03 06:58:22 : satpy.readers.viirs_sdr] Adjusting scaling factors to convert '1' to '%'
[DEBUG: 2019-04-03 06:58:22 : satpy.readers.viirs_sdr] Adjusting scaling factors to convert '1' to '%'
[DEBUG: 2019-04-03 06:58:22 : satpy.readers.viirs_sdr] Adjusting scaling factors to convert '1' to '%'
[DEBUG: 2019-04-03 06:58:22 : satpy.readers.viirs_sdr] Adjusting scaling factors to convert '1' to '%'
[DEBUG: 2019-04-03 06:58:22 : satpy.readers.viirs_sdr] Adjusting scaling factors to convert '1' to '%'
Traceback (most recent call last):
File "test-viirs.py", line 9, in <module>
global_scene.load(['overview'])
File "/software/pytroll/lib-satpy-grid/lib/python3.5/site-packages/satpy-0.13.0+83.ga6479be-py3.5.egg/satpy/scene.py", line 896, in load
self.read(**kwargs)
File "/software/pytroll/lib-satpy-grid/lib/python3.5/site-packages/satpy-0.13.0+83.ga6479be-py3.5.egg/satpy/scene.py", line 804, in read
return self._read_datasets(nodes, **kwargs)
File "/software/pytroll/lib-satpy-grid/lib/python3.5/site-packages/satpy-0.13.0+83.ga6479be-py3.5.egg/satpy/scene.py", line 671, in _read_datasets
new_datasets = reader_instance.load(ds_ids, **kwargs)
File "/software/pytroll/lib-satpy-grid/lib/python3.5/site-packages/satpy-0.13.0+83.ga6479be-py3.5.egg/satpy/readers/yaml_reader.py", line 818, in load
ds = self._load_dataset_with_area(dsid, coords)
File "/software/pytroll/lib-satpy-grid/lib/python3.5/site-packages/satpy-0.13.0+83.ga6479be-py3.5.egg/satpy/readers/yaml_reader.py", line 755, in _load_dataset_with_area
ds = self._load_dataset_data(file_handlers, dsid, **slice_kwargs)
File "/software/pytroll/lib-satpy-grid/lib/python3.5/site-packages/satpy-0.13.0+83.ga6479be-py3.5.egg/satpy/readers/yaml_reader.py", line 609, in _load_dataset_data
proj = self._load_dataset(dsid, ds_info, file_handlers)
File "/software/pytroll/lib-satpy-grid/lib/python3.5/site-packages/satpy-0.13.0+83.ga6479be-py3.5.egg/satpy/readers/yaml_reader.py", line 581, in _load_dataset
projectable = fh.get_dataset(dsid, ds_info)
File "/software/pytroll/lib-satpy-grid/lib/python3.5/site-packages/satpy-0.13.0+83.ga6479be-py3.5.egg/satpy/readers/viirs_sdr.py", line 354, in get_dataset
data = self.scale_swath_data(data, factors)
File "/software/pytroll/lib-satpy-grid/lib/python3.5/site-packages/satpy-0.13.0+83.ga6479be-py3.5.egg/satpy/readers/viirs_sdr.py", line 251, in scale_swath_data
factors = xr.DataArray(da.repeat(factors, gran_size, axis=0),
File "/software/pytroll/lib/python3.5/site-packages/dask/array/creation.py", line 737, in repeat
return concatenate(out, axis=axis)
File "/software/pytroll/lib/python3.5/site-packages/dask/array/core.py", line 2709, in concatenate
ndim = len(seq[0].shape)
IndexError: list index out of range
|
IndexError
|
def read_mda(attribute):
"""Read HDFEOS metadata and return a dict with all the key/value pairs."""
lines = attribute.split("\n")
mda = {}
current_dict = mda
path = []
prev_line = None
for line in lines:
if not line:
continue
if line == "END":
break
if prev_line:
line = prev_line + line
key, val = line.split("=")
key = key.strip()
val = val.strip()
try:
val = eval(val)
except NameError:
pass
except SyntaxError:
prev_line = line
continue
prev_line = None
if key in ["GROUP", "OBJECT"]:
new_dict = {}
path.append(val)
current_dict[val] = new_dict
current_dict = new_dict
elif key in ["END_GROUP", "END_OBJECT"]:
if val != path[-1]:
raise SyntaxError
path = path[:-1]
current_dict = mda
for item in path:
current_dict = current_dict[item]
elif key in ["CLASS", "NUM_VAL"]:
pass
else:
current_dict[key] = val
return mda
|
def read_mda(self, attribute):
lines = attribute.split("\n")
mda = {}
current_dict = mda
path = []
for line in lines:
if not line:
continue
if line == "END":
break
key, val = line.split("=")
key = key.strip()
val = val.strip()
try:
val = eval(val)
except NameError:
pass
if key in ["GROUP", "OBJECT"]:
new_dict = {}
path.append(val)
current_dict[val] = new_dict
current_dict = new_dict
elif key in ["END_GROUP", "END_OBJECT"]:
if val != path[-1]:
raise SyntaxError
path = path[:-1]
current_dict = mda
for item in path:
current_dict = current_dict[item]
elif key in ["CLASS", "NUM_VAL"]:
pass
else:
current_dict[key] = val
return mda
|
https://github.com/pytroll/satpy/issues/626
|
[DEBUG: 2019-02-20 20:07:05 : satpy.readers] Reading ['/home/aprata/anaconda3/envs/satpy-env/lib/python3.6/site-packages/satpy/etc/readers/modis_l1b.yaml']
[DEBUG: 2019-02-20 20:07:05 : satpy.scene] Setting 'PPP_CONFIG_DIR' to '/home/aprata/anaconda3/envs/satpy-env/lib/python3.6/site-packages/satpy/etc'
[DEBUG: 2019-02-20 20:07:05 : satpy.readers] Reading ['/home/aprata/anaconda3/envs/satpy-env/lib/python3.6/site-packages/satpy/etc/readers/modis_l1b.yaml']
Reading files...
[DEBUG: 2019-02-20 20:07:05 : satpy.readers.yaml_reader] Assigning to modis_l1b: ['/home/aprata/satellite/etna/modis_aqua/data/20190511225/MYD02QKM.A2019051.1225.061.NRT.hdf', '/home/aprata/satellite/etna/modis_aqua/data/20190511225/MYD02HKM.A2019051.1225.061.NRT.hdf', '/home/aprata/satellite/etna/modis_aqua/data/20190511225/MYD021KM.A2019051.1225.061.NRT.hdf', '/home/aprata/satellite/etna/modis_aqua/data/20190511225/MYD03.A2019051.1225.061.NRT.hdf']
Traceback (most recent call last):
File "/home/aprata/anaconda3/envs/satpy-env/lib/python3.6/site-packages/IPython/core/interactiveshell.py", line 2683, in safe_execfile
self.compile if shell_futures else None)
File "/home/aprata/anaconda3/envs/satpy-env/lib/python3.6/site-packages/IPython/utils/py3compat.py", line 188, in execfile
exec(compiler(f.read(), fname, 'exec'), glob, loc)
File "/home/aprata/PycharmProjects/etna/src/plot_modis_btd.py", line 25, in <module>
scn = Scene(filenames=files)
File "/home/aprata/anaconda3/envs/satpy-env/lib/python3.6/site-packages/satpy/scene.py", line 154, in __init__
reader_kwargs=reader_kwargs)
File "/home/aprata/anaconda3/envs/satpy-env/lib/python3.6/site-packages/satpy/scene.py", line 199, in create_reader_instances
ppp_config_dir=self.ppp_config_dir)
File "/home/aprata/anaconda3/envs/satpy-env/lib/python3.6/site-packages/satpy/readers/__init__.py", line 657, in load_readers
reader_instance.create_filehandlers(loadables)
File "/home/aprata/anaconda3/envs/satpy-env/lib/python3.6/site-packages/satpy/readers/yaml_reader.py", line 500, in create_filehandlers
filename_set)
File "/home/aprata/anaconda3/envs/satpy-env/lib/python3.6/site-packages/satpy/readers/yaml_reader.py", line 487, in new_filehandlers_for_filetype
return list(filtered_iter)
File "/home/aprata/anaconda3/envs/satpy-env/lib/python3.6/site-packages/satpy/readers/yaml_reader.py", line 460, in filter_fh_by_metadata
for filehandler in filehandlers:
File "/home/aprata/anaconda3/envs/satpy-env/lib/python3.6/site-packages/satpy/readers/yaml_reader.py", line 403, in new_filehandler_instances
yield filetype_cls(filename, filename_info, filetype_info, *req_fh)
File "/home/aprata/anaconda3/envs/satpy-env/lib/python3.6/site-packages/satpy/readers/modis_l1b.py", line 122, in __init__
HDFEOSFileReader.__init__(self, filename, filename_info, filetype_info)
File "/home/aprata/anaconda3/envs/satpy-env/lib/python3.6/site-packages/satpy/readers/modis_l1b.py", line 64, in __init__
self.metadata = self.read_mda(self.sd.attributes()['CoreMetadata.0'])
File "/home/aprata/anaconda3/envs/satpy-env/lib/python3.6/site-packages/satpy/readers/modis_l1b.py", line 96, in read_mda
val = eval(val)
File "<string>", line 1
("MYD01.61.2019-051T12:25:00.000000Z.NA.29878844.500100_1.hdf", "MYD03LUT.coeff_V6.1.4", "PM1EPHND_NRT.A2019051.1220.061.2019051125628", "PM1EPHND_NRT.A2019051.1225.061.2019051125628", "PM1EPHND_NRT.A2019051.1230.061.2019051125628", "
^
SyntaxError: EOL while scanning string literal
|
SyntaxError
|
def __init__(
self,
name=None,
filename=None,
enhancement_config=None,
base_dir=None,
tags=None,
**kwargs,
):
ImageWriter.__init__(
self,
name,
filename,
enhancement_config,
base_dir,
default_config_filename="writers/mitiff.yaml",
**kwargs,
)
self.tags = self.info.get("tags", None) if tags is None else tags
if self.tags is None:
self.tags = {}
elif not isinstance(self.tags, dict):
# if it's coming from a config file
self.tags = dict(tuple(x.split("=")) for x in self.tags.split(","))
self.mitiff_config = {}
self.translate_channel_name = {}
self.channel_order = {}
|
def __init__(self, tags=None, **kwargs):
ImageWriter.__init__(self, default_config_filename="writers/mitiff.yaml", **kwargs)
self.tags = self.info.get("tags", None) if tags is None else tags
if self.tags is None:
self.tags = {}
elif not isinstance(self.tags, dict):
# if it's coming from a config file
self.tags = dict(tuple(x.split("=")) for x in self.tags.split(","))
self.mitiff_config = {}
self.translate_channel_name = {}
self.channel_order = {}
|
https://github.com/pytroll/satpy/issues/369
|
scn.save_dataset('natural', writer='mitiff', filename='/tmp/mitiff.tif')
[DEBUG: 2018-07-16 12:29:43 : satpy.writers] Reading ['/home/lahtinep/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/etc/writers/mitiff.yaml']
[DEBUG: 2018-07-16 12:29:43 : satpy.writers.mitiff] Starting in mitiff save_dataset ...
[WARNING: 2018-07-16 12:29:43 : satpy.writers.mitiff] Unset save_dir. Use: ./
/home/lahtinep/Software/miniconda3/lib/python3.6/site-packages/pyresample/kd_tree.py:924: RuntimeWarning: invalid value encountered in sqrt
mask=mask)
[DEBUG: 2018-07-16 12:29:45 : satpy.writers.mitiff] create_opts: ./
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-48-762780f9f91a> in <module>()
----> 1 lcl.save_dataset('natural', writer='mitiff', filename='/tmp/mitiff.tif')
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/scene.py in save_dataset(self, dataset_id, filename, writer, overlay, compute, **kwargs)
1008 return writer.save_dataset(self[dataset_id], filename=filename,
1009 overlay=overlay, compute=compute,
-> 1010 **save_kwargs)
1011
1012 def save_datasets(self, writer="geotiff", datasets=None, compute=True,
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/writers/mitiff.py in save_dataset(self, dataset, filename, fill_value, compute, base_dir, **kwargs)
116
117 if compute:
--> 118 return delayed.compute()
119 return delayed
120
~/Software/miniconda3/lib/python3.6/site-packages/dask/base.py in compute(self, **kwargs)
154 dask.base.compute
155 """
--> 156 (result,) = compute(self, traverse=False, **kwargs)
157 return result
158
~/Software/miniconda3/lib/python3.6/site-packages/dask/base.py in compute(*args, **kwargs)
400 keys = [x.__dask_keys__() for x in collections]
401 postcomputes = [x.__dask_postcompute__() for x in collections]
--> 402 results = schedule(dsk, keys, **kwargs)
403 return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)])
404
~/Software/miniconda3/lib/python3.6/site-packages/dask/threaded.py in get(dsk, result, cache, num_workers, **kwargs)
73 results = get_async(pool.apply_async, len(pool._pool), dsk, result,
74 cache=cache, get_id=_thread_get_id,
---> 75 pack_exception=pack_exception, **kwargs)
76
77 # Cleanup pools associated to dead threads
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in get_async(apply_async, num_workers, dsk, result, cache, get_id, rerun_exceptions_locally, pack_exception, raise_exception, callbacks, dumps, loads, **kwargs)
519 _execute_task(task, data) # Re-execute locally
520 else:
--> 521 raise_exception(exc, tb)
522 res, worker_id = loads(res_info)
523 state['cache'][key] = res
~/Software/miniconda3/lib/python3.6/site-packages/dask/compatibility.py in reraise(exc, tb)
67 if exc.__traceback__ is not tb:
68 raise exc.with_traceback(tb)
---> 69 raise exc
70
71 else:
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in execute_task(key, task_info, dumps, loads, get_id, pack_exception)
288 try:
289 task, data = loads(task_info)
--> 290 result = _execute_task(task, data)
291 id = get_id()
292 result = dumps((result, id))
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in _execute_task(arg, cache, dsk)
269 func, args = arg[0], arg[1:]
270 args2 = [_execute_task(a, cache) for a in args]
--> 271 return func(*args2)
272 elif not ishashable(arg):
273 return arg
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/writers/mitiff.py in _delayed_create(create_opts, dataset)
76 kwargs['name'] = dataset.attrs['name']
77 if 'start_time' not in kwargs:
---> 78 kwargs['start_time'] = dataset.attrs['start_time']
79 if 'sensor' not in kwargs:
80 kwargs['sensor'] = dataset.attrs['sensor']
KeyError: 'start_time'
|
KeyError
|
def save_dataset(self, dataset, filename=None, fill_value=None, compute=True, **kwargs):
LOG.debug("Starting in mitiff save_dataset ... ")
def _delayed_create(create_opts, dataset):
try:
if "platform_name" not in kwargs:
kwargs["platform_name"] = dataset.attrs["platform_name"]
if "name" not in kwargs:
kwargs["name"] = dataset.attrs["name"]
if "start_time" not in kwargs:
kwargs["start_time"] = dataset.attrs["start_time"]
if "sensor" not in kwargs:
kwargs["sensor"] = dataset.attrs["sensor"]
try:
self.mitiff_config[kwargs["sensor"]] = dataset.attrs[
"metadata_requirements"
]["config"]
self.channel_order[kwargs["sensor"]] = dataset.attrs[
"metadata_requirements"
]["order"]
self.file_pattern = dataset.attrs["metadata_requirements"][
"file_pattern"
]
except KeyError:
# For some mitiff products this info is needed, for others not.
# If needed you should know how to fix this
pass
try:
self.translate_channel_name[kwargs["sensor"]] = dataset.attrs[
"metadata_requirements"
]["translate"]
except KeyError:
# For some mitiff products this info is needed, for others not.
# If needed you should know how to fix this
pass
image_description = self._make_image_description(dataset, **kwargs)
gen_filename = filename or self.get_filename(**dataset.attrs)
LOG.info("Saving mitiff to: %s ...", gen_filename)
self._save_datasets_as_mitiff(
dataset, image_description, gen_filename, **kwargs
)
except:
raise
create_opts = ()
delayed = dask.delayed(_delayed_create)(create_opts, dataset)
if compute:
return delayed.compute()
return delayed
|
def save_dataset(
self, dataset, filename=None, fill_value=None, compute=True, base_dir=None, **kwargs
):
LOG.debug("Starting in mitiff save_dataset ... ")
def _delayed_create(create_opts, dataset):
LOG.debug("create_opts: %s", create_opts)
try:
if "platform_name" not in kwargs:
kwargs["platform_name"] = dataset.attrs["platform_name"]
if "name" not in kwargs:
kwargs["name"] = dataset.attrs["name"]
if "start_time" not in kwargs:
kwargs["start_time"] = dataset.attrs["start_time"]
if "sensor" not in kwargs:
kwargs["sensor"] = dataset.attrs["sensor"]
try:
self.mitiff_config[kwargs["sensor"]] = dataset.attrs[
"metadata_requirements"
]["config"]
self.channel_order[kwargs["sensor"]] = dataset.attrs[
"metadata_requirements"
]["order"]
self.file_pattern = dataset.attrs["metadata_requirements"][
"file_pattern"
]
except KeyError as ke:
LOG.warning(
"Something went wrong with assigning to various dicts: %s", ke
)
try:
self.translate_channel_name[kwargs["sensor"]] = dataset.attrs[
"metadata_requirements"
]["translate"]
except KeyError as ke:
LOG.warning("Something went wrong with assigning to translate: %s", ke)
image_description = self._make_image_description(dataset, **kwargs)
LOG.debug("File pattern %s", self.file_pattern)
self.filename_parser = self.create_filename_parser(create_opts)
gen_filename = filename or self.get_filename(**kwargs)
LOG.info("Saving mitiff to: %s ...", gen_filename)
self._save_datasets_as_mitiff(
dataset, image_description, gen_filename, **kwargs
)
except:
raise
save_dir = "./"
if "mitiff_dir" in kwargs:
save_dir = kwargs["mitiff_dir"]
elif "base_dir" in kwargs:
save_dir = kwargs["base_dir"]
elif base_dir:
save_dir = base_dir
else:
LOG.warning("Unset save_dir. Use: %s", save_dir)
create_opts = save_dir
delayed = dask.delayed(_delayed_create)(create_opts, dataset)
if compute:
return delayed.compute()
return delayed
|
https://github.com/pytroll/satpy/issues/369
|
scn.save_dataset('natural', writer='mitiff', filename='/tmp/mitiff.tif')
[DEBUG: 2018-07-16 12:29:43 : satpy.writers] Reading ['/home/lahtinep/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/etc/writers/mitiff.yaml']
[DEBUG: 2018-07-16 12:29:43 : satpy.writers.mitiff] Starting in mitiff save_dataset ...
[WARNING: 2018-07-16 12:29:43 : satpy.writers.mitiff] Unset save_dir. Use: ./
/home/lahtinep/Software/miniconda3/lib/python3.6/site-packages/pyresample/kd_tree.py:924: RuntimeWarning: invalid value encountered in sqrt
mask=mask)
[DEBUG: 2018-07-16 12:29:45 : satpy.writers.mitiff] create_opts: ./
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-48-762780f9f91a> in <module>()
----> 1 lcl.save_dataset('natural', writer='mitiff', filename='/tmp/mitiff.tif')
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/scene.py in save_dataset(self, dataset_id, filename, writer, overlay, compute, **kwargs)
1008 return writer.save_dataset(self[dataset_id], filename=filename,
1009 overlay=overlay, compute=compute,
-> 1010 **save_kwargs)
1011
1012 def save_datasets(self, writer="geotiff", datasets=None, compute=True,
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/writers/mitiff.py in save_dataset(self, dataset, filename, fill_value, compute, base_dir, **kwargs)
116
117 if compute:
--> 118 return delayed.compute()
119 return delayed
120
~/Software/miniconda3/lib/python3.6/site-packages/dask/base.py in compute(self, **kwargs)
154 dask.base.compute
155 """
--> 156 (result,) = compute(self, traverse=False, **kwargs)
157 return result
158
~/Software/miniconda3/lib/python3.6/site-packages/dask/base.py in compute(*args, **kwargs)
400 keys = [x.__dask_keys__() for x in collections]
401 postcomputes = [x.__dask_postcompute__() for x in collections]
--> 402 results = schedule(dsk, keys, **kwargs)
403 return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)])
404
~/Software/miniconda3/lib/python3.6/site-packages/dask/threaded.py in get(dsk, result, cache, num_workers, **kwargs)
73 results = get_async(pool.apply_async, len(pool._pool), dsk, result,
74 cache=cache, get_id=_thread_get_id,
---> 75 pack_exception=pack_exception, **kwargs)
76
77 # Cleanup pools associated to dead threads
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in get_async(apply_async, num_workers, dsk, result, cache, get_id, rerun_exceptions_locally, pack_exception, raise_exception, callbacks, dumps, loads, **kwargs)
519 _execute_task(task, data) # Re-execute locally
520 else:
--> 521 raise_exception(exc, tb)
522 res, worker_id = loads(res_info)
523 state['cache'][key] = res
~/Software/miniconda3/lib/python3.6/site-packages/dask/compatibility.py in reraise(exc, tb)
67 if exc.__traceback__ is not tb:
68 raise exc.with_traceback(tb)
---> 69 raise exc
70
71 else:
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in execute_task(key, task_info, dumps, loads, get_id, pack_exception)
288 try:
289 task, data = loads(task_info)
--> 290 result = _execute_task(task, data)
291 id = get_id()
292 result = dumps((result, id))
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in _execute_task(arg, cache, dsk)
269 func, args = arg[0], arg[1:]
270 args2 = [_execute_task(a, cache) for a in args]
--> 271 return func(*args2)
272 elif not ishashable(arg):
273 return arg
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/writers/mitiff.py in _delayed_create(create_opts, dataset)
76 kwargs['name'] = dataset.attrs['name']
77 if 'start_time' not in kwargs:
---> 78 kwargs['start_time'] = dataset.attrs['start_time']
79 if 'sensor' not in kwargs:
80 kwargs['sensor'] = dataset.attrs['sensor']
KeyError: 'start_time'
|
KeyError
|
def _delayed_create(create_opts, dataset):
try:
if "platform_name" not in kwargs:
kwargs["platform_name"] = dataset.attrs["platform_name"]
if "name" not in kwargs:
kwargs["name"] = dataset.attrs["name"]
if "start_time" not in kwargs:
kwargs["start_time"] = dataset.attrs["start_time"]
if "sensor" not in kwargs:
kwargs["sensor"] = dataset.attrs["sensor"]
try:
self.mitiff_config[kwargs["sensor"]] = dataset.attrs[
"metadata_requirements"
]["config"]
self.channel_order[kwargs["sensor"]] = dataset.attrs[
"metadata_requirements"
]["order"]
self.file_pattern = dataset.attrs["metadata_requirements"]["file_pattern"]
except KeyError:
# For some mitiff products this info is needed, for others not.
# If needed you should know how to fix this
pass
try:
self.translate_channel_name[kwargs["sensor"]] = dataset.attrs[
"metadata_requirements"
]["translate"]
except KeyError:
# For some mitiff products this info is needed, for others not.
# If needed you should know how to fix this
pass
image_description = self._make_image_description(dataset, **kwargs)
gen_filename = filename or self.get_filename(**dataset.attrs)
LOG.info("Saving mitiff to: %s ...", gen_filename)
self._save_datasets_as_mitiff(
dataset, image_description, gen_filename, **kwargs
)
except:
raise
|
def _delayed_create(create_opts, dataset):
LOG.debug("create_opts: %s", create_opts)
try:
if "platform_name" not in kwargs:
kwargs["platform_name"] = dataset.attrs["platform_name"]
if "name" not in kwargs:
kwargs["name"] = dataset.attrs["name"]
if "start_time" not in kwargs:
kwargs["start_time"] = dataset.attrs["start_time"]
if "sensor" not in kwargs:
kwargs["sensor"] = dataset.attrs["sensor"]
try:
self.mitiff_config[kwargs["sensor"]] = dataset.attrs[
"metadata_requirements"
]["config"]
self.channel_order[kwargs["sensor"]] = dataset.attrs[
"metadata_requirements"
]["order"]
self.file_pattern = dataset.attrs["metadata_requirements"]["file_pattern"]
except KeyError as ke:
LOG.warning("Something went wrong with assigning to various dicts: %s", ke)
try:
self.translate_channel_name[kwargs["sensor"]] = dataset.attrs[
"metadata_requirements"
]["translate"]
except KeyError as ke:
LOG.warning("Something went wrong with assigning to translate: %s", ke)
image_description = self._make_image_description(dataset, **kwargs)
LOG.debug("File pattern %s", self.file_pattern)
self.filename_parser = self.create_filename_parser(create_opts)
gen_filename = filename or self.get_filename(**kwargs)
LOG.info("Saving mitiff to: %s ...", gen_filename)
self._save_datasets_as_mitiff(
dataset, image_description, gen_filename, **kwargs
)
except:
raise
|
https://github.com/pytroll/satpy/issues/369
|
scn.save_dataset('natural', writer='mitiff', filename='/tmp/mitiff.tif')
[DEBUG: 2018-07-16 12:29:43 : satpy.writers] Reading ['/home/lahtinep/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/etc/writers/mitiff.yaml']
[DEBUG: 2018-07-16 12:29:43 : satpy.writers.mitiff] Starting in mitiff save_dataset ...
[WARNING: 2018-07-16 12:29:43 : satpy.writers.mitiff] Unset save_dir. Use: ./
/home/lahtinep/Software/miniconda3/lib/python3.6/site-packages/pyresample/kd_tree.py:924: RuntimeWarning: invalid value encountered in sqrt
mask=mask)
[DEBUG: 2018-07-16 12:29:45 : satpy.writers.mitiff] create_opts: ./
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-48-762780f9f91a> in <module>()
----> 1 lcl.save_dataset('natural', writer='mitiff', filename='/tmp/mitiff.tif')
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/scene.py in save_dataset(self, dataset_id, filename, writer, overlay, compute, **kwargs)
1008 return writer.save_dataset(self[dataset_id], filename=filename,
1009 overlay=overlay, compute=compute,
-> 1010 **save_kwargs)
1011
1012 def save_datasets(self, writer="geotiff", datasets=None, compute=True,
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/writers/mitiff.py in save_dataset(self, dataset, filename, fill_value, compute, base_dir, **kwargs)
116
117 if compute:
--> 118 return delayed.compute()
119 return delayed
120
~/Software/miniconda3/lib/python3.6/site-packages/dask/base.py in compute(self, **kwargs)
154 dask.base.compute
155 """
--> 156 (result,) = compute(self, traverse=False, **kwargs)
157 return result
158
~/Software/miniconda3/lib/python3.6/site-packages/dask/base.py in compute(*args, **kwargs)
400 keys = [x.__dask_keys__() for x in collections]
401 postcomputes = [x.__dask_postcompute__() for x in collections]
--> 402 results = schedule(dsk, keys, **kwargs)
403 return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)])
404
~/Software/miniconda3/lib/python3.6/site-packages/dask/threaded.py in get(dsk, result, cache, num_workers, **kwargs)
73 results = get_async(pool.apply_async, len(pool._pool), dsk, result,
74 cache=cache, get_id=_thread_get_id,
---> 75 pack_exception=pack_exception, **kwargs)
76
77 # Cleanup pools associated to dead threads
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in get_async(apply_async, num_workers, dsk, result, cache, get_id, rerun_exceptions_locally, pack_exception, raise_exception, callbacks, dumps, loads, **kwargs)
519 _execute_task(task, data) # Re-execute locally
520 else:
--> 521 raise_exception(exc, tb)
522 res, worker_id = loads(res_info)
523 state['cache'][key] = res
~/Software/miniconda3/lib/python3.6/site-packages/dask/compatibility.py in reraise(exc, tb)
67 if exc.__traceback__ is not tb:
68 raise exc.with_traceback(tb)
---> 69 raise exc
70
71 else:
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in execute_task(key, task_info, dumps, loads, get_id, pack_exception)
288 try:
289 task, data = loads(task_info)
--> 290 result = _execute_task(task, data)
291 id = get_id()
292 result = dumps((result, id))
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in _execute_task(arg, cache, dsk)
269 func, args = arg[0], arg[1:]
270 args2 = [_execute_task(a, cache) for a in args]
--> 271 return func(*args2)
272 elif not ishashable(arg):
273 return arg
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/writers/mitiff.py in _delayed_create(create_opts, dataset)
76 kwargs['name'] = dataset.attrs['name']
77 if 'start_time' not in kwargs:
---> 78 kwargs['start_time'] = dataset.attrs['start_time']
79 if 'sensor' not in kwargs:
80 kwargs['sensor'] = dataset.attrs['sensor']
KeyError: 'start_time'
|
KeyError
|
def save_datasets(
self, datasets, filename=None, fill_value=None, compute=True, **kwargs
):
"""Save all datasets to one or more files."""
LOG.debug("Starting in mitiff save_datasets ... ")
def _delayed_create(create_opts, datasets):
LOG.debug("create_opts: %s", create_opts)
try:
if "platform_name" not in kwargs:
kwargs["platform_name"] = datasets.attrs["platform_name"]
if "name" not in kwargs:
kwargs["name"] = datasets[0].attrs["name"]
if "start_time" not in kwargs:
kwargs["start_time"] = datasets[0].attrs["start_time"]
if "sensor" not in kwargs:
kwargs["sensor"] = datasets[0].attrs["sensor"]
try:
self.mitiff_config[kwargs["sensor"]] = datasets[
"metadata_requirements"
]["config"]
self.translate_channel_name[kwargs["sensor"]] = datasets[
"metadata_requirements"
]["translate"]
self.channel_order[kwargs["sensor"]] = datasets[
"metadata_requirements"
]["order"]
self.file_pattern = datasets["metadata_requirements"]["file_pattern"]
except KeyError:
# For some mitiff products this info is needed, for others not.
# If needed you should know how to fix this
pass
image_description = self._make_image_description(datasets, **kwargs)
LOG.debug("File pattern %s", self.file_pattern)
if isinstance(datasets, list):
kwargs["start_time"] = datasets[0].attrs["start_time"]
else:
kwargs["start_time"] = datasets.attrs["start_time"]
gen_filename = filename or self.get_filename(**kwargs)
LOG.info("Saving mitiff to: %s ...", gen_filename)
self._save_datasets_as_mitiff(
datasets, image_description, gen_filename, **kwargs
)
except:
raise
create_opts = ()
delayed = dask.delayed(_delayed_create)(create_opts, datasets)
LOG.debug("About to call delayed compute ...")
if compute:
return delayed.compute()
return delayed
|
def save_datasets(self, datasets, compute=True, **kwargs):
"""Save all datasets to one or more files."""
LOG.debug("Starting in mitiff save_datasets ... ")
LOG.debug("kwargs: %s", kwargs)
def _delayed_create(create_opts, datasets):
LOG.debug("create_opts: %s", create_opts)
try:
if "platform_name" not in kwargs:
kwargs["platform_name"] = datasets.attrs["platform_name"]
if "name" not in kwargs:
kwargs["name"] = datasets[0].attrs["name"]
if "start_time" not in kwargs:
kwargs["start_time"] = datasets[0].attrs["start_time"]
if "sensor" not in kwargs:
kwargs["sensor"] = datasets[0].attrs["sensor"]
try:
self.mitiff_config[kwargs["sensor"]] = datasets[
"metadata_requirements"
]["config"]
self.translate_channel_name[kwargs["sensor"]] = datasets[
"metadata_requirements"
]["translate"]
self.channel_order[kwargs["sensor"]] = datasets[
"metadata_requirements"
]["order"]
self.file_pattern = datasets["metadata_requirements"]["file_pattern"]
except KeyError:
LOG.warning(
"metadata requirements not given. This is ok for predefined composites in satpy"
)
image_description = self._make_image_description(datasets, **kwargs)
LOG.debug("File pattern %s", self.file_pattern)
if isinstance(datasets, list):
kwargs["start_time"] = datasets[0].attrs["start_time"]
else:
kwargs["start_time"] = datasets.attrs["start_time"]
self.filename_parser = self.create_filename_parser(kwargs["mitiff_dir"])
LOG.info("Saving mitiff to: %s ...", self.get_filename(**kwargs))
gen_filename = self.get_filename(**kwargs)
self._save_datasets_as_mitiff(
datasets, image_description, gen_filename, **kwargs
)
except:
raise
create_opts = ()
delayed = dask.delayed(_delayed_create)(create_opts, datasets)
LOG.debug("About to call delayed compute ...")
if compute:
return delayed.compute()
return delayed
|
https://github.com/pytroll/satpy/issues/369
|
scn.save_dataset('natural', writer='mitiff', filename='/tmp/mitiff.tif')
[DEBUG: 2018-07-16 12:29:43 : satpy.writers] Reading ['/home/lahtinep/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/etc/writers/mitiff.yaml']
[DEBUG: 2018-07-16 12:29:43 : satpy.writers.mitiff] Starting in mitiff save_dataset ...
[WARNING: 2018-07-16 12:29:43 : satpy.writers.mitiff] Unset save_dir. Use: ./
/home/lahtinep/Software/miniconda3/lib/python3.6/site-packages/pyresample/kd_tree.py:924: RuntimeWarning: invalid value encountered in sqrt
mask=mask)
[DEBUG: 2018-07-16 12:29:45 : satpy.writers.mitiff] create_opts: ./
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-48-762780f9f91a> in <module>()
----> 1 lcl.save_dataset('natural', writer='mitiff', filename='/tmp/mitiff.tif')
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/scene.py in save_dataset(self, dataset_id, filename, writer, overlay, compute, **kwargs)
1008 return writer.save_dataset(self[dataset_id], filename=filename,
1009 overlay=overlay, compute=compute,
-> 1010 **save_kwargs)
1011
1012 def save_datasets(self, writer="geotiff", datasets=None, compute=True,
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/writers/mitiff.py in save_dataset(self, dataset, filename, fill_value, compute, base_dir, **kwargs)
116
117 if compute:
--> 118 return delayed.compute()
119 return delayed
120
~/Software/miniconda3/lib/python3.6/site-packages/dask/base.py in compute(self, **kwargs)
154 dask.base.compute
155 """
--> 156 (result,) = compute(self, traverse=False, **kwargs)
157 return result
158
~/Software/miniconda3/lib/python3.6/site-packages/dask/base.py in compute(*args, **kwargs)
400 keys = [x.__dask_keys__() for x in collections]
401 postcomputes = [x.__dask_postcompute__() for x in collections]
--> 402 results = schedule(dsk, keys, **kwargs)
403 return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)])
404
~/Software/miniconda3/lib/python3.6/site-packages/dask/threaded.py in get(dsk, result, cache, num_workers, **kwargs)
73 results = get_async(pool.apply_async, len(pool._pool), dsk, result,
74 cache=cache, get_id=_thread_get_id,
---> 75 pack_exception=pack_exception, **kwargs)
76
77 # Cleanup pools associated to dead threads
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in get_async(apply_async, num_workers, dsk, result, cache, get_id, rerun_exceptions_locally, pack_exception, raise_exception, callbacks, dumps, loads, **kwargs)
519 _execute_task(task, data) # Re-execute locally
520 else:
--> 521 raise_exception(exc, tb)
522 res, worker_id = loads(res_info)
523 state['cache'][key] = res
~/Software/miniconda3/lib/python3.6/site-packages/dask/compatibility.py in reraise(exc, tb)
67 if exc.__traceback__ is not tb:
68 raise exc.with_traceback(tb)
---> 69 raise exc
70
71 else:
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in execute_task(key, task_info, dumps, loads, get_id, pack_exception)
288 try:
289 task, data = loads(task_info)
--> 290 result = _execute_task(task, data)
291 id = get_id()
292 result = dumps((result, id))
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in _execute_task(arg, cache, dsk)
269 func, args = arg[0], arg[1:]
270 args2 = [_execute_task(a, cache) for a in args]
--> 271 return func(*args2)
272 elif not ishashable(arg):
273 return arg
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/writers/mitiff.py in _delayed_create(create_opts, dataset)
76 kwargs['name'] = dataset.attrs['name']
77 if 'start_time' not in kwargs:
---> 78 kwargs['start_time'] = dataset.attrs['start_time']
79 if 'sensor' not in kwargs:
80 kwargs['sensor'] = dataset.attrs['sensor']
KeyError: 'start_time'
|
KeyError
|
def _delayed_create(create_opts, datasets):
LOG.debug("create_opts: %s", create_opts)
try:
if "platform_name" not in kwargs:
kwargs["platform_name"] = datasets.attrs["platform_name"]
if "name" not in kwargs:
kwargs["name"] = datasets[0].attrs["name"]
if "start_time" not in kwargs:
kwargs["start_time"] = datasets[0].attrs["start_time"]
if "sensor" not in kwargs:
kwargs["sensor"] = datasets[0].attrs["sensor"]
try:
self.mitiff_config[kwargs["sensor"]] = datasets["metadata_requirements"][
"config"
]
self.translate_channel_name[kwargs["sensor"]] = datasets[
"metadata_requirements"
]["translate"]
self.channel_order[kwargs["sensor"]] = datasets["metadata_requirements"][
"order"
]
self.file_pattern = datasets["metadata_requirements"]["file_pattern"]
except KeyError:
# For some mitiff products this info is needed, for others not.
# If needed you should know how to fix this
pass
image_description = self._make_image_description(datasets, **kwargs)
LOG.debug("File pattern %s", self.file_pattern)
if isinstance(datasets, list):
kwargs["start_time"] = datasets[0].attrs["start_time"]
else:
kwargs["start_time"] = datasets.attrs["start_time"]
gen_filename = filename or self.get_filename(**kwargs)
LOG.info("Saving mitiff to: %s ...", gen_filename)
self._save_datasets_as_mitiff(
datasets, image_description, gen_filename, **kwargs
)
except:
raise
|
def _delayed_create(create_opts, datasets):
LOG.debug("create_opts: %s", create_opts)
try:
if "platform_name" not in kwargs:
kwargs["platform_name"] = datasets.attrs["platform_name"]
if "name" not in kwargs:
kwargs["name"] = datasets[0].attrs["name"]
if "start_time" not in kwargs:
kwargs["start_time"] = datasets[0].attrs["start_time"]
if "sensor" not in kwargs:
kwargs["sensor"] = datasets[0].attrs["sensor"]
try:
self.mitiff_config[kwargs["sensor"]] = datasets["metadata_requirements"][
"config"
]
self.translate_channel_name[kwargs["sensor"]] = datasets[
"metadata_requirements"
]["translate"]
self.channel_order[kwargs["sensor"]] = datasets["metadata_requirements"][
"order"
]
self.file_pattern = datasets["metadata_requirements"]["file_pattern"]
except KeyError:
LOG.warning(
"metadata requirements not given. This is ok for predefined composites in satpy"
)
image_description = self._make_image_description(datasets, **kwargs)
LOG.debug("File pattern %s", self.file_pattern)
if isinstance(datasets, list):
kwargs["start_time"] = datasets[0].attrs["start_time"]
else:
kwargs["start_time"] = datasets.attrs["start_time"]
self.filename_parser = self.create_filename_parser(kwargs["mitiff_dir"])
LOG.info("Saving mitiff to: %s ...", self.get_filename(**kwargs))
gen_filename = self.get_filename(**kwargs)
self._save_datasets_as_mitiff(
datasets, image_description, gen_filename, **kwargs
)
except:
raise
|
https://github.com/pytroll/satpy/issues/369
|
scn.save_dataset('natural', writer='mitiff', filename='/tmp/mitiff.tif')
[DEBUG: 2018-07-16 12:29:43 : satpy.writers] Reading ['/home/lahtinep/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/etc/writers/mitiff.yaml']
[DEBUG: 2018-07-16 12:29:43 : satpy.writers.mitiff] Starting in mitiff save_dataset ...
[WARNING: 2018-07-16 12:29:43 : satpy.writers.mitiff] Unset save_dir. Use: ./
/home/lahtinep/Software/miniconda3/lib/python3.6/site-packages/pyresample/kd_tree.py:924: RuntimeWarning: invalid value encountered in sqrt
mask=mask)
[DEBUG: 2018-07-16 12:29:45 : satpy.writers.mitiff] create_opts: ./
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-48-762780f9f91a> in <module>()
----> 1 lcl.save_dataset('natural', writer='mitiff', filename='/tmp/mitiff.tif')
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/scene.py in save_dataset(self, dataset_id, filename, writer, overlay, compute, **kwargs)
1008 return writer.save_dataset(self[dataset_id], filename=filename,
1009 overlay=overlay, compute=compute,
-> 1010 **save_kwargs)
1011
1012 def save_datasets(self, writer="geotiff", datasets=None, compute=True,
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/writers/mitiff.py in save_dataset(self, dataset, filename, fill_value, compute, base_dir, **kwargs)
116
117 if compute:
--> 118 return delayed.compute()
119 return delayed
120
~/Software/miniconda3/lib/python3.6/site-packages/dask/base.py in compute(self, **kwargs)
154 dask.base.compute
155 """
--> 156 (result,) = compute(self, traverse=False, **kwargs)
157 return result
158
~/Software/miniconda3/lib/python3.6/site-packages/dask/base.py in compute(*args, **kwargs)
400 keys = [x.__dask_keys__() for x in collections]
401 postcomputes = [x.__dask_postcompute__() for x in collections]
--> 402 results = schedule(dsk, keys, **kwargs)
403 return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)])
404
~/Software/miniconda3/lib/python3.6/site-packages/dask/threaded.py in get(dsk, result, cache, num_workers, **kwargs)
73 results = get_async(pool.apply_async, len(pool._pool), dsk, result,
74 cache=cache, get_id=_thread_get_id,
---> 75 pack_exception=pack_exception, **kwargs)
76
77 # Cleanup pools associated to dead threads
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in get_async(apply_async, num_workers, dsk, result, cache, get_id, rerun_exceptions_locally, pack_exception, raise_exception, callbacks, dumps, loads, **kwargs)
519 _execute_task(task, data) # Re-execute locally
520 else:
--> 521 raise_exception(exc, tb)
522 res, worker_id = loads(res_info)
523 state['cache'][key] = res
~/Software/miniconda3/lib/python3.6/site-packages/dask/compatibility.py in reraise(exc, tb)
67 if exc.__traceback__ is not tb:
68 raise exc.with_traceback(tb)
---> 69 raise exc
70
71 else:
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in execute_task(key, task_info, dumps, loads, get_id, pack_exception)
288 try:
289 task, data = loads(task_info)
--> 290 result = _execute_task(task, data)
291 id = get_id()
292 result = dumps((result, id))
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in _execute_task(arg, cache, dsk)
269 func, args = arg[0], arg[1:]
270 args2 = [_execute_task(a, cache) for a in args]
--> 271 return func(*args2)
272 elif not ishashable(arg):
273 return arg
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/writers/mitiff.py in _delayed_create(create_opts, dataset)
76 kwargs['name'] = dataset.attrs['name']
77 if 'start_time' not in kwargs:
---> 78 kwargs['start_time'] = dataset.attrs['start_time']
79 if 'sensor' not in kwargs:
80 kwargs['sensor'] = dataset.attrs['sensor']
KeyError: 'start_time'
|
KeyError
|
def _add_proj4_string(self, datasets, first_dataset):
proj4_string = " Proj string: "
if isinstance(datasets, list):
proj4_string += first_dataset.attrs["area"].proj4_string
else:
proj4_string += datasets.attrs["area"].proj4_string
x_0 = 0
y_0 = 0
if "EPSG:32631" in proj4_string:
proj4_string = proj4_string.replace(
"+init=EPSG:32631",
"+proj=etmerc +lat_0=0 +lon_0=3 +k=0.9996 +ellps=WGS84 +datum=WGS84",
)
x_0 = 500000
elif "EPSG:32632" in proj4_string:
proj4_string = proj4_string.replace(
"+init=EPSG:32632",
"+proj=etmerc +lat_0=0 +lon_0=9 +k=0.9996 +ellps=WGS84 +datum=WGS84",
)
x_0 = 500000
elif "EPSG:32633" in proj4_string:
proj4_string = proj4_string.replace(
"+init=EPSG:32633",
"+proj=etmerc +lat_0=0 +lon_0=15 +k=0.9996 +ellps=WGS84 +datum=WGS84",
)
x_0 = 500000
elif "EPSG:32634" in proj4_string:
proj4_string = proj4_string.replace(
"+init=EPSG:32634",
"+proj=etmerc +lat_0=0 +lon_0=21 +k=0.9996 +ellps=WGS84 +datum=WGS84",
)
x_0 = 500000
elif "EPSG:32635" in proj4_string:
proj4_string = proj4_string.replace(
"+init=EPSG:32635",
"+proj=etmerc +lat_0=0 +lon_0=27 +k=0.9996 +ellps=WGS84 +datum=WGS84",
)
x_0 = 500000
elif "EPSG" in proj4_string:
LOG.warning(
"EPSG used in proj string but not converted. Please add this in code"
)
if "geos" in proj4_string:
proj4_string = proj4_string.replace("+sweep=x ", "")
if "+a=6378137.0 +b=6356752.31414" in proj4_string:
proj4_string = proj4_string.replace(
"+a=6378137.0 +b=6356752.31414", "+ellps=WGS84"
)
if "+units=m" in proj4_string:
proj4_string = proj4_string.replace("+units=m", "+units=km")
if not any(datum in proj4_string for datum in ["datum", "towgs84"]):
proj4_string += " +towgs84=0,0,0"
if "units" not in proj4_string:
proj4_string += " +units=km"
if isinstance(datasets, list):
proj4_string += " +x_0=%.6f" % (
(
-first_dataset.attrs["area"].area_extent[0]
+ first_dataset.attrs["area"].pixel_size_x
)
+ x_0
)
proj4_string += " +y_0=%.6f" % (
(
-first_dataset.attrs["area"].area_extent[1]
+ first_dataset.attrs["area"].pixel_size_y
)
+ y_0
)
else:
proj4_string += " +x_0=%.6f" % (
(
-datasets.attrs["area"].area_extent[0]
+ datasets.attrs["area"].pixel_size_x
)
+ x_0
)
proj4_string += " +y_0=%.6f" % (
(
-datasets.attrs["area"].area_extent[1]
+ datasets.attrs["area"].pixel_size_y
)
+ y_0
)
LOG.debug("proj4_string: %s", proj4_string)
proj4_string += "\n"
return proj4_string
|
def _add_proj4_string(self, datasets, first_dataset):
proj4_string = " Proj string: "
if isinstance(datasets, list):
proj4_string += first_dataset.attrs["area"].proj4_string
else:
proj4_string += datasets.attrs["area"].proj4_string
x_0 = 0
y_0 = 0
if "EPSG:32631" in proj4_string:
proj4_string = proj4_string.replace(
"+init=EPSG:32631",
"+proj=etmerc +lat_0=0 +lon_0=3 +k=0.9996 +ellps=WGS84 +datum=WGS84",
)
x_0 = 500000
elif "EPSG:32632" in proj4_string:
proj4_string = proj4_string.replace(
"+init=EPSG:32632",
"+proj=etmerc +lat_0=0 +lon_0=9 +k=0.9996 +ellps=WGS84 +datum=WGS84",
)
x_0 = 500000
elif "EPSG:32633" in proj4_string:
proj4_string = proj4_string.replace(
"+init=EPSG:32633",
"+proj=etmerc +lat_0=0 +lon_0=15 +k=0.9996 +ellps=WGS84 +datum=WGS84",
)
x_0 = 500000
elif "EPSG:32634" in proj4_string:
proj4_string = proj4_string.replace(
"+init=EPSG:32634",
"+proj=etmerc +lat_0=0 +lon_0=21 +k=0.9996 +ellps=WGS84 +datum=WGS84",
)
x_0 = 500000
elif "EPSG:32635" in proj4_string:
proj4_string = proj4_string.replace(
"+init=EPSG:32635",
"+proj=etmerc +lat_0=0 +lon_0=27 +k=0.9996 +ellps=WGS84 +datum=WGS84",
)
x_0 = 500000
elif "EPSG" in proj4_string:
LOG.warning(
"EPSG used in proj string but not converted. Please add this in code"
)
if "geos" in proj4_string:
proj4_string = proj4_string.replace("+sweep=x ", "")
if "+a=6378137.0 +b=6356752.31414" in proj4_string:
proj4_string = proj4_string.replace(
"+a=6378137.0 +b=6356752.31414", "+ellps=WGS84"
)
if "+units=m" in proj4_string:
proj4_string = proj4_string.replace("+units=m", "+units=km")
if not any(datum in proj4_string for datum in ["datum", "towgs84"]):
proj4_string += " +towgs84=0,0,0"
if "units" not in proj4_string:
proj4_string += " +units=km"
if isinstance(datasets, list):
proj4_string += " +x_0=%.6f" % (
(
-first_dataset.attrs["area"].area_extent[0]
+ first_dataset.attrs["area"].pixel_size_x
)
+ x_0
)
proj4_string += " +y_0=%.6f" % (
(
-first_dataset.attrs["area"].area_extent[1]
+ first_dataset.attrs["area"].pixel_size_y
)
+ y_0
)
else:
proj4_string += " +x_0=%.6f" % (
(
-datasets.attrs["area"].area_extent[0]
+ datasets.attrs["area"].pixel_size_x
)
+ x_0
)
proj4_string += " +y_0=%.6f" % (
(
-datasets.attrs["area"].area_extent[1]
+ datasets.attrs["area"].pixel_size_y
)
+ y_0
)
proj4_string += "\n"
LOG.debug("proj4_string: %s", proj4_string)
return proj4_string
|
https://github.com/pytroll/satpy/issues/369
|
scn.save_dataset('natural', writer='mitiff', filename='/tmp/mitiff.tif')
[DEBUG: 2018-07-16 12:29:43 : satpy.writers] Reading ['/home/lahtinep/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/etc/writers/mitiff.yaml']
[DEBUG: 2018-07-16 12:29:43 : satpy.writers.mitiff] Starting in mitiff save_dataset ...
[WARNING: 2018-07-16 12:29:43 : satpy.writers.mitiff] Unset save_dir. Use: ./
/home/lahtinep/Software/miniconda3/lib/python3.6/site-packages/pyresample/kd_tree.py:924: RuntimeWarning: invalid value encountered in sqrt
mask=mask)
[DEBUG: 2018-07-16 12:29:45 : satpy.writers.mitiff] create_opts: ./
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-48-762780f9f91a> in <module>()
----> 1 lcl.save_dataset('natural', writer='mitiff', filename='/tmp/mitiff.tif')
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/scene.py in save_dataset(self, dataset_id, filename, writer, overlay, compute, **kwargs)
1008 return writer.save_dataset(self[dataset_id], filename=filename,
1009 overlay=overlay, compute=compute,
-> 1010 **save_kwargs)
1011
1012 def save_datasets(self, writer="geotiff", datasets=None, compute=True,
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/writers/mitiff.py in save_dataset(self, dataset, filename, fill_value, compute, base_dir, **kwargs)
116
117 if compute:
--> 118 return delayed.compute()
119 return delayed
120
~/Software/miniconda3/lib/python3.6/site-packages/dask/base.py in compute(self, **kwargs)
154 dask.base.compute
155 """
--> 156 (result,) = compute(self, traverse=False, **kwargs)
157 return result
158
~/Software/miniconda3/lib/python3.6/site-packages/dask/base.py in compute(*args, **kwargs)
400 keys = [x.__dask_keys__() for x in collections]
401 postcomputes = [x.__dask_postcompute__() for x in collections]
--> 402 results = schedule(dsk, keys, **kwargs)
403 return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)])
404
~/Software/miniconda3/lib/python3.6/site-packages/dask/threaded.py in get(dsk, result, cache, num_workers, **kwargs)
73 results = get_async(pool.apply_async, len(pool._pool), dsk, result,
74 cache=cache, get_id=_thread_get_id,
---> 75 pack_exception=pack_exception, **kwargs)
76
77 # Cleanup pools associated to dead threads
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in get_async(apply_async, num_workers, dsk, result, cache, get_id, rerun_exceptions_locally, pack_exception, raise_exception, callbacks, dumps, loads, **kwargs)
519 _execute_task(task, data) # Re-execute locally
520 else:
--> 521 raise_exception(exc, tb)
522 res, worker_id = loads(res_info)
523 state['cache'][key] = res
~/Software/miniconda3/lib/python3.6/site-packages/dask/compatibility.py in reraise(exc, tb)
67 if exc.__traceback__ is not tb:
68 raise exc.with_traceback(tb)
---> 69 raise exc
70
71 else:
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in execute_task(key, task_info, dumps, loads, get_id, pack_exception)
288 try:
289 task, data = loads(task_info)
--> 290 result = _execute_task(task, data)
291 id = get_id()
292 result = dumps((result, id))
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in _execute_task(arg, cache, dsk)
269 func, args = arg[0], arg[1:]
270 args2 = [_execute_task(a, cache) for a in args]
--> 271 return func(*args2)
272 elif not ishashable(arg):
273 return arg
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/writers/mitiff.py in _delayed_create(create_opts, dataset)
76 kwargs['name'] = dataset.attrs['name']
77 if 'start_time' not in kwargs:
---> 78 kwargs['start_time'] = dataset.attrs['start_time']
79 if 'sensor' not in kwargs:
80 kwargs['sensor'] = dataset.attrs['sensor']
KeyError: 'start_time'
|
KeyError
|
def _add_calibration_datasets(
self, ch, datasets, reverse_offset, reverse_scale, decimals
):
_reverse_offset = reverse_offset
_reverse_scale = reverse_scale
_decimals = decimals
_table_calibration = ""
found_calibration = False
skip_calibration = False
for i, ds in enumerate(datasets):
if "prerequisites" in ds.attrs and isinstance(
ds.attrs["prerequisites"][i], DatasetID
):
if ds.attrs["prerequisites"][i][0] == ch:
if ds.attrs["prerequisites"][i][4] == "RADIANCE":
raise NotImplementedError(
"Mitiff radiance calibration not implemented."
)
# _table_calibration += ', Radiance, '
# _table_calibration += '[W/m²/µm/sr]'
# _decimals = 8
elif ds.attrs["prerequisites"][i][4] == "brightness_temperature":
found_calibration = True
_table_calibration += ", BT, "
_table_calibration += "\u00b0" # '\u2103'
_table_calibration += "[C]"
_reverse_offset = 255.0
_reverse_scale = -1.0
_decimals = 2
elif ds.attrs["prerequisites"][i][4] == "reflectance":
found_calibration = True
_table_calibration += ", Reflectance(Albedo), "
_table_calibration += "[%]"
_decimals = 2
else:
LOG.warning(
"Unknown calib type. Must be Radiance, Reflectance or BT."
)
break
else:
continue
else:
_table_calibration = ""
skip_calibration = True
break
if not found_calibration:
_table_calibration = ""
skip_calibration = True
# How to format string by passing the format
# http://stackoverflow.com/questions/1598579/rounding-decimals-with-new-python-format-function
return (
skip_calibration,
_table_calibration,
_reverse_offset,
_reverse_scale,
_decimals,
)
|
def _add_calibration_datasets(
self, ch, datasets, reverse_offset, reverse_scale, decimals
):
_reverse_offset = reverse_offset
_reverse_scale = reverse_scale
_decimals = decimals
_table_calibration = ""
found_calibration = False
skip_calibration = False
for i, ds in enumerate(datasets):
if isinstance(ds.attrs["prerequisites"][i], DatasetID):
if ds.attrs["prerequisites"][i][0] == ch:
if ds.attrs["prerequisites"][i][4] == "RADIANCE":
raise NotImplementedError(
"Mitiff radiance calibration not implemented."
)
# _table_calibration += ', Radiance, '
# _table_calibration += '[W/m²/µm/sr]'
# _decimals = 8
elif ds.attrs["prerequisites"][i][4] == "brightness_temperature":
found_calibration = True
_table_calibration += ", BT, "
_table_calibration += "\u00b0" # '\u2103'
_table_calibration += "[C]"
_reverse_offset = 255.0
_reverse_scale = -1.0
_decimals = 2
elif ds.attrs["prerequisites"][i][4] == "reflectance":
found_calibration = True
_table_calibration += ", Reflectance(Albedo), "
_table_calibration += "[%]"
_decimals = 2
else:
LOG.warning(
"Unknown calib type. Must be Radiance, Reflectance or BT."
)
break
else:
continue
else:
_table_calibration = ""
skip_calibration = True
break
if not found_calibration:
_table_calibration = ""
skip_calibration = True
# How to format string by passing the format
# http://stackoverflow.com/questions/1598579/rounding-decimals-with-new-python-format-function
return (
skip_calibration,
_table_calibration,
_reverse_offset,
_reverse_scale,
_decimals,
)
|
https://github.com/pytroll/satpy/issues/369
|
scn.save_dataset('natural', writer='mitiff', filename='/tmp/mitiff.tif')
[DEBUG: 2018-07-16 12:29:43 : satpy.writers] Reading ['/home/lahtinep/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/etc/writers/mitiff.yaml']
[DEBUG: 2018-07-16 12:29:43 : satpy.writers.mitiff] Starting in mitiff save_dataset ...
[WARNING: 2018-07-16 12:29:43 : satpy.writers.mitiff] Unset save_dir. Use: ./
/home/lahtinep/Software/miniconda3/lib/python3.6/site-packages/pyresample/kd_tree.py:924: RuntimeWarning: invalid value encountered in sqrt
mask=mask)
[DEBUG: 2018-07-16 12:29:45 : satpy.writers.mitiff] create_opts: ./
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-48-762780f9f91a> in <module>()
----> 1 lcl.save_dataset('natural', writer='mitiff', filename='/tmp/mitiff.tif')
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/scene.py in save_dataset(self, dataset_id, filename, writer, overlay, compute, **kwargs)
1008 return writer.save_dataset(self[dataset_id], filename=filename,
1009 overlay=overlay, compute=compute,
-> 1010 **save_kwargs)
1011
1012 def save_datasets(self, writer="geotiff", datasets=None, compute=True,
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/writers/mitiff.py in save_dataset(self, dataset, filename, fill_value, compute, base_dir, **kwargs)
116
117 if compute:
--> 118 return delayed.compute()
119 return delayed
120
~/Software/miniconda3/lib/python3.6/site-packages/dask/base.py in compute(self, **kwargs)
154 dask.base.compute
155 """
--> 156 (result,) = compute(self, traverse=False, **kwargs)
157 return result
158
~/Software/miniconda3/lib/python3.6/site-packages/dask/base.py in compute(*args, **kwargs)
400 keys = [x.__dask_keys__() for x in collections]
401 postcomputes = [x.__dask_postcompute__() for x in collections]
--> 402 results = schedule(dsk, keys, **kwargs)
403 return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)])
404
~/Software/miniconda3/lib/python3.6/site-packages/dask/threaded.py in get(dsk, result, cache, num_workers, **kwargs)
73 results = get_async(pool.apply_async, len(pool._pool), dsk, result,
74 cache=cache, get_id=_thread_get_id,
---> 75 pack_exception=pack_exception, **kwargs)
76
77 # Cleanup pools associated to dead threads
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in get_async(apply_async, num_workers, dsk, result, cache, get_id, rerun_exceptions_locally, pack_exception, raise_exception, callbacks, dumps, loads, **kwargs)
519 _execute_task(task, data) # Re-execute locally
520 else:
--> 521 raise_exception(exc, tb)
522 res, worker_id = loads(res_info)
523 state['cache'][key] = res
~/Software/miniconda3/lib/python3.6/site-packages/dask/compatibility.py in reraise(exc, tb)
67 if exc.__traceback__ is not tb:
68 raise exc.with_traceback(tb)
---> 69 raise exc
70
71 else:
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in execute_task(key, task_info, dumps, loads, get_id, pack_exception)
288 try:
289 task, data = loads(task_info)
--> 290 result = _execute_task(task, data)
291 id = get_id()
292 result = dumps((result, id))
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in _execute_task(arg, cache, dsk)
269 func, args = arg[0], arg[1:]
270 args2 = [_execute_task(a, cache) for a in args]
--> 271 return func(*args2)
272 elif not ishashable(arg):
273 return arg
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/writers/mitiff.py in _delayed_create(create_opts, dataset)
76 kwargs['name'] = dataset.attrs['name']
77 if 'start_time' not in kwargs:
---> 78 kwargs['start_time'] = dataset.attrs['start_time']
79 if 'sensor' not in kwargs:
80 kwargs['sensor'] = dataset.attrs['sensor']
KeyError: 'start_time'
|
KeyError
|
def _add_calibration(self, channels, cns, datasets, **kwargs):
_table_calibration = ""
skip_calibration = False
for ch in channels:
palette = False
# Make calibration.
if palette:
raise NotImplementedError("Mitiff palette saving is not implemented.")
else:
_table_calibration += "Table_calibration: "
try:
_table_calibration += str(
self.mitiff_config[kwargs["sensor"]][cns.get(ch, ch)]["alias"]
)
except KeyError:
_table_calibration += str(ch)
_reverse_offset = 0.0
_reverse_scale = 1.0
_decimals = 2
(
skip_calibration,
__table_calibration,
_reverse_offset,
_reverse_scale,
_decimals,
) = self._add_calibration_datasets(
ch, datasets, _reverse_offset, _reverse_scale, _decimals
)
_table_calibration += __table_calibration
if not skip_calibration:
_table_calibration += ", 8, [ "
for val in range(0, 256):
# Comma separated list of values
_table_calibration += "{0:.{1}f} ".format(
(
float(
self.mitiff_config[kwargs["sensor"]][cns.get(ch, ch)][
"min-val"
]
)
+ (
(_reverse_offset + _reverse_scale * val)
* (
float(
self.mitiff_config[kwargs["sensor"]][
cns.get(ch, ch)
]["max-val"]
)
- float(
self.mitiff_config[kwargs["sensor"]][
cns.get(ch, ch)
]["min-val"]
)
)
)
/ 255.0
),
_decimals,
)
# _table_calibration += '0.00000000 '
_table_calibration += "]\n\n"
else:
_table_calibration = ""
return _table_calibration
|
def _add_calibration(self, channels, cns, datasets, **kwargs):
_table_calibration = ""
skip_calibration = False
for ch in channels:
palette = False
# Make calibration.
if palette:
raise NotImplementedError("Mitiff palette saving is not implemented.")
else:
_table_calibration += "Table_calibration: "
try:
_table_calibration += str(
self.mitiff_config[kwargs["sensor"]][cns.get(ch, ch)]["alias"]
)
except KeyError:
_table_calibration += str(ch)
_reverse_offset = 0.0
_reverse_scale = 1.0
_decimals = 2
try:
if ch.calibration == "RADIANCE":
raise NotImplementedError(
"Mitiff radiance calibration not implemented."
)
# _table_calibration += ', Radiance, '
# _table_calibration += '[W/m²/µm/sr]'
# _decimals = 8
elif ch.calibration == "brightness_temperature":
_table_calibration += ", BT, "
_table_calibration += "\u00b0" # '\u2103'
_table_calibration += "[C]"
_reverse_offset = 255.0
_reverse_scale = -1.0
_decimals = 2
elif ch.calibration == "reflectance":
_table_calibration += ", Reflectance(Albedo), "
_table_calibration += "[%]"
_decimals = 2
elif ch.calibration is None:
LOG.warning("ch.calibration is None")
_table_calibration = ""
break
else:
LOG.warning(
"Unknown calib type. Must be Radiance, Reflectance or BT."
)
except AttributeError:
(
skip_calibration,
__table_calibration,
_reverse_offset,
_reverse_scale,
_decimals,
) = self._add_calibration_datasets(
ch, datasets, _reverse_offset, _reverse_scale, _decimals
)
_table_calibration += __table_calibration
if not skip_calibration:
_table_calibration += ", 8, [ "
for val in range(0, 256):
# Comma separated list of values
_table_calibration += "{0:.{1}f} ".format(
(
float(
self.mitiff_config[kwargs["sensor"]][cns.get(ch, ch)][
"min-val"
]
)
+ (
(_reverse_offset + _reverse_scale * val)
* (
float(
self.mitiff_config[kwargs["sensor"]][
cns.get(ch, ch)
]["max-val"]
)
- float(
self.mitiff_config[kwargs["sensor"]][
cns.get(ch, ch)
]["min-val"]
)
)
)
/ 255.0
),
_decimals,
)
# _table_calibration += '0.00000000 '
_table_calibration += "]\n\n"
else:
_table_calibration = ""
return _table_calibration
|
https://github.com/pytroll/satpy/issues/369
|
scn.save_dataset('natural', writer='mitiff', filename='/tmp/mitiff.tif')
[DEBUG: 2018-07-16 12:29:43 : satpy.writers] Reading ['/home/lahtinep/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/etc/writers/mitiff.yaml']
[DEBUG: 2018-07-16 12:29:43 : satpy.writers.mitiff] Starting in mitiff save_dataset ...
[WARNING: 2018-07-16 12:29:43 : satpy.writers.mitiff] Unset save_dir. Use: ./
/home/lahtinep/Software/miniconda3/lib/python3.6/site-packages/pyresample/kd_tree.py:924: RuntimeWarning: invalid value encountered in sqrt
mask=mask)
[DEBUG: 2018-07-16 12:29:45 : satpy.writers.mitiff] create_opts: ./
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-48-762780f9f91a> in <module>()
----> 1 lcl.save_dataset('natural', writer='mitiff', filename='/tmp/mitiff.tif')
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/scene.py in save_dataset(self, dataset_id, filename, writer, overlay, compute, **kwargs)
1008 return writer.save_dataset(self[dataset_id], filename=filename,
1009 overlay=overlay, compute=compute,
-> 1010 **save_kwargs)
1011
1012 def save_datasets(self, writer="geotiff", datasets=None, compute=True,
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/writers/mitiff.py in save_dataset(self, dataset, filename, fill_value, compute, base_dir, **kwargs)
116
117 if compute:
--> 118 return delayed.compute()
119 return delayed
120
~/Software/miniconda3/lib/python3.6/site-packages/dask/base.py in compute(self, **kwargs)
154 dask.base.compute
155 """
--> 156 (result,) = compute(self, traverse=False, **kwargs)
157 return result
158
~/Software/miniconda3/lib/python3.6/site-packages/dask/base.py in compute(*args, **kwargs)
400 keys = [x.__dask_keys__() for x in collections]
401 postcomputes = [x.__dask_postcompute__() for x in collections]
--> 402 results = schedule(dsk, keys, **kwargs)
403 return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)])
404
~/Software/miniconda3/lib/python3.6/site-packages/dask/threaded.py in get(dsk, result, cache, num_workers, **kwargs)
73 results = get_async(pool.apply_async, len(pool._pool), dsk, result,
74 cache=cache, get_id=_thread_get_id,
---> 75 pack_exception=pack_exception, **kwargs)
76
77 # Cleanup pools associated to dead threads
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in get_async(apply_async, num_workers, dsk, result, cache, get_id, rerun_exceptions_locally, pack_exception, raise_exception, callbacks, dumps, loads, **kwargs)
519 _execute_task(task, data) # Re-execute locally
520 else:
--> 521 raise_exception(exc, tb)
522 res, worker_id = loads(res_info)
523 state['cache'][key] = res
~/Software/miniconda3/lib/python3.6/site-packages/dask/compatibility.py in reraise(exc, tb)
67 if exc.__traceback__ is not tb:
68 raise exc.with_traceback(tb)
---> 69 raise exc
70
71 else:
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in execute_task(key, task_info, dumps, loads, get_id, pack_exception)
288 try:
289 task, data = loads(task_info)
--> 290 result = _execute_task(task, data)
291 id = get_id()
292 result = dumps((result, id))
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in _execute_task(arg, cache, dsk)
269 func, args = arg[0], arg[1:]
270 args2 = [_execute_task(a, cache) for a in args]
--> 271 return func(*args2)
272 elif not ishashable(arg):
273 return arg
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/writers/mitiff.py in _delayed_create(create_opts, dataset)
76 kwargs['name'] = dataset.attrs['name']
77 if 'start_time' not in kwargs:
---> 78 kwargs['start_time'] = dataset.attrs['start_time']
79 if 'sensor' not in kwargs:
80 kwargs['sensor'] = dataset.attrs['sensor']
KeyError: 'start_time'
|
KeyError
|
def _make_image_description(self, datasets, **kwargs):
"""
generate image description for mitiff.
Satellite: NOAA 18
Date and Time: 06:58 31/05-2016
SatDir: 0
Channels: 6 In this file: 1-VIS0.63 2-VIS0.86 3(3B)-IR3.7
4-IR10.8 5-IR11.5 6(3A)-VIS1.6
Xsize: 4720
Ysize: 5544
Map projection: Stereographic
Proj string: +proj=stere +lon_0=0 +lat_0=90 +lat_ts=60
+ellps=WGS84 +towgs84=0,0,0 +units=km
+x_0=2526000.000000 +y_0=5806000.000000
TrueLat: 60 N
GridRot: 0
Xunit:1000 m Yunit: 1000 m
NPX: 0.000000 NPY: 0.000000
Ax: 1.000000 Ay: 1.000000 Bx: -2526.000000 By: -262.000000
Satellite: <satellite name>
Date and Time: <HH:MM dd/mm-yyyy>
SatDir: 0
Channels: <number of chanels> In this file: <channels names in order>
Xsize: <number of pixels x>
Ysize: <number of pixels y>
Map projection: Stereographic
Proj string: <proj4 string with +x_0 and +y_0 which is the positive
distance from proj origo
to the lower left corner of the image data>
TrueLat: 60 N
GridRot: 0
Xunit:1000 m Yunit: 1000 m
NPX: 0.000000 NPY: 0.000000
Ax: <pixels size x in km> Ay: <pixel size y in km> Bx: <left corner of
upper right pixel in km>
By: <upper corner of upper right pixel in km>
if palette image write special palette
if normal channel write table calibration:
Table_calibration: <channel name>, <calibration type>, [<unit>],
<no of bits of data>,
[<calibration values space separated>]\n\n
"""
translate_platform_name = {
"metop01": "Metop-B",
"metop02": "Metop-A",
"metop03": "Metop-C",
"noaa15": "NOAA-15",
"noaa16": "NOAA-16",
"noaa17": "NOAA-17",
"noaa18": "NOAA-18",
"noaa19": "NOAA-19",
}
first_dataset = datasets
if isinstance(datasets, list):
LOG.debug("Datasets is a list of dataset")
first_dataset = datasets[0]
if "platform_name" in first_dataset.attrs:
_platform_name = translate_platform_name.get(
first_dataset.attrs["platform_name"], first_dataset.attrs["platform_name"]
)
elif "platform_name" in kwargs:
_platform_name = translate_platform_name.get(
kwargs["platform_name"], kwargs["platform_name"]
)
else:
_platform_name = None
_image_description = ""
_image_description.encode("utf-8")
_image_description += " Satellite: "
if _platform_name is not None:
_image_description += _platform_name
_image_description += "\n"
_image_description += " Date and Time: "
# Select earliest start_time
first = True
earliest = 0
for dataset in datasets:
if first:
earliest = dataset.attrs["start_time"]
else:
if dataset.attrs["start_time"] < earliest:
earliest = dataset.attrs["start_time"]
first = False
LOG.debug("earliest start_time: %s", earliest)
_image_description += earliest.strftime("%H:%M %d/%m-%Y\n")
_image_description += " SatDir: 0\n"
_image_description += " Channels: "
if isinstance(datasets, list):
LOG.debug("len datasets: %s", len(datasets))
_image_description += str(len(datasets))
elif "bands" in datasets.sizes:
LOG.debug("len datasets: %s", datasets.sizes["bands"])
_image_description += str(datasets.sizes["bands"])
elif len(datasets.sizes) == 2:
LOG.debug("len datasets: 1")
_image_description += "1"
_image_description += " In this file: "
channels = self._make_channel_list(datasets, **kwargs)
try:
cns = self.translate_channel_name.get(kwargs["sensor"], {})
except KeyError:
pass
_image_description += self._channel_names(channels, cns, **kwargs)
_image_description += self._add_sizes(datasets, first_dataset)
_image_description += " Map projection: Stereographic\n"
_image_description += self._add_proj4_string(datasets, first_dataset)
_image_description += " TrueLat: 60N\n"
_image_description += " GridRot: 0\n"
_image_description += " Xunit:1000 m Yunit: 1000 m\n"
_image_description += " NPX: %.6f" % (0)
_image_description += " NPY: %.6f" % (0) + "\n"
_image_description += self._add_pixel_sizes(datasets, first_dataset)
_image_description += self._add_corners(datasets, first_dataset)
if isinstance(datasets, list):
LOG.debug("Area extent: %s", first_dataset.attrs["area"].area_extent)
else:
LOG.debug("Area extent: %s", datasets.attrs["area"].area_extent)
_image_description += self._add_calibration(channels, cns, datasets, **kwargs)
return _image_description
|
def _make_image_description(self, datasets, **kwargs):
"""
generate image description for mitiff.
Satellite: NOAA 18
Date and Time: 06:58 31/05-2016
SatDir: 0
Channels: 6 In this file: 1-VIS0.63 2-VIS0.86 3(3B)-IR3.7
4-IR10.8 5-IR11.5 6(3A)-VIS1.6
Xsize: 4720
Ysize: 5544
Map projection: Stereographic
Proj string: +proj=stere +lon_0=0 +lat_0=90 +lat_ts=60
+ellps=WGS84 +towgs84=0,0,0 +units=km
+x_0=2526000.000000 +y_0=5806000.000000
TrueLat: 60 N
GridRot: 0
Xunit:1000 m Yunit: 1000 m
NPX: 0.000000 NPY: 0.000000
Ax: 1.000000 Ay: 1.000000 Bx: -2526.000000 By: -262.000000
Satellite: <satellite name>
Date and Time: <HH:MM dd/mm-yyyy>
SatDir: 0
Channels: <number of chanels> In this file: <channels names in order>
Xsize: <number of pixels x>
Ysize: <number of pixels y>
Map projection: Stereographic
Proj string: <proj4 string with +x_0 and +y_0 which is the positive
distance from proj origo
to the lower left corner of the image data>
TrueLat: 60 N
GridRot: 0
Xunit:1000 m Yunit: 1000 m
NPX: 0.000000 NPY: 0.000000
Ax: <pixels size x in km> Ay: <pixel size y in km> Bx: <left corner of
upper right pixel in km>
By: <upper corner of upper right pixel in km>
if palette image write special palette
if normal channel write table calibration:
Table_calibration: <channel name>, <calibration type>, [<unit>],
<no of bits of data>,
[<calibration values space separated>]\n\n
"""
translate_platform_name = {
"metop01": "Metop-B",
"metop02": "Metop-A",
"metop03": "Metop-C",
"noaa15": "NOAA-15",
"noaa16": "NOAA-16",
"noaa17": "NOAA-17",
"noaa18": "NOAA-18",
"noaa19": "NOAA-19",
}
first_dataset = datasets
if isinstance(datasets, list):
LOG.debug("Datasets is a list of dataset")
first_dataset = datasets[0]
if "platform_name" in first_dataset.attrs:
_platform_name = translate_platform_name.get(
first_dataset.attrs["platform_name"], first_dataset.attrs["platform_name"]
)
elif "platform_name" in kwargs:
_platform_name = translate_platform_name.get(
kwargs["platform_name"], kwargs["platform_name"]
)
else:
_platform_name = None
_image_description = ""
_image_description.encode("utf-8")
_image_description += " Satellite: "
if _platform_name is not None:
_image_description += _platform_name
_image_description += "\n"
_image_description += " Date and Time: "
# Select earliest start_time
first = True
earliest = 0
for dataset in datasets:
if first:
earliest = dataset.attrs["start_time"]
else:
if dataset.attrs["start_time"] < earliest:
earliest = dataset.attrs["start_time"]
first = False
LOG.debug("earliest start_time: %s", earliest)
_image_description += earliest.strftime("%H:%M %d/%m-%Y\n")
_image_description += " SatDir: 0\n"
_image_description += " Channels: "
if isinstance(datasets, list):
LOG.debug("len datasets: %s", len(datasets))
_image_description += str(len(datasets))
else:
LOG.debug("len datasets: %s", datasets.sizes["bands"])
_image_description += str(datasets.sizes["bands"])
_image_description += " In this file: "
channels = self._make_channel_list(datasets, **kwargs)
try:
cns = self.translate_channel_name.get(kwargs["sensor"], {})
except KeyError:
pass
_image_description += self._channel_names(channels, cns, **kwargs)
_image_description += self._add_sizes(datasets, first_dataset)
_image_description += " Map projection: Stereographic\n"
_image_description += self._add_proj4_string(datasets, first_dataset)
_image_description += " TrueLat: 60N\n"
_image_description += " GridRot: 0\n"
_image_description += " Xunit:1000 m Yunit: 1000 m\n"
_image_description += " NPX: %.6f" % (0)
_image_description += " NPY: %.6f" % (0) + "\n"
_image_description += self._add_pixel_sizes(datasets, first_dataset)
_image_description += self._add_corners(datasets, first_dataset)
if isinstance(datasets, list):
LOG.debug("Area extent: %s", first_dataset.attrs["area"].area_extent)
else:
LOG.debug("Area extent: %s", datasets.attrs["area"].area_extent)
_image_description += self._add_calibration(channels, cns, datasets, **kwargs)
return _image_description
|
https://github.com/pytroll/satpy/issues/369
|
scn.save_dataset('natural', writer='mitiff', filename='/tmp/mitiff.tif')
[DEBUG: 2018-07-16 12:29:43 : satpy.writers] Reading ['/home/lahtinep/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/etc/writers/mitiff.yaml']
[DEBUG: 2018-07-16 12:29:43 : satpy.writers.mitiff] Starting in mitiff save_dataset ...
[WARNING: 2018-07-16 12:29:43 : satpy.writers.mitiff] Unset save_dir. Use: ./
/home/lahtinep/Software/miniconda3/lib/python3.6/site-packages/pyresample/kd_tree.py:924: RuntimeWarning: invalid value encountered in sqrt
mask=mask)
[DEBUG: 2018-07-16 12:29:45 : satpy.writers.mitiff] create_opts: ./
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-48-762780f9f91a> in <module>()
----> 1 lcl.save_dataset('natural', writer='mitiff', filename='/tmp/mitiff.tif')
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/scene.py in save_dataset(self, dataset_id, filename, writer, overlay, compute, **kwargs)
1008 return writer.save_dataset(self[dataset_id], filename=filename,
1009 overlay=overlay, compute=compute,
-> 1010 **save_kwargs)
1011
1012 def save_datasets(self, writer="geotiff", datasets=None, compute=True,
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/writers/mitiff.py in save_dataset(self, dataset, filename, fill_value, compute, base_dir, **kwargs)
116
117 if compute:
--> 118 return delayed.compute()
119 return delayed
120
~/Software/miniconda3/lib/python3.6/site-packages/dask/base.py in compute(self, **kwargs)
154 dask.base.compute
155 """
--> 156 (result,) = compute(self, traverse=False, **kwargs)
157 return result
158
~/Software/miniconda3/lib/python3.6/site-packages/dask/base.py in compute(*args, **kwargs)
400 keys = [x.__dask_keys__() for x in collections]
401 postcomputes = [x.__dask_postcompute__() for x in collections]
--> 402 results = schedule(dsk, keys, **kwargs)
403 return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)])
404
~/Software/miniconda3/lib/python3.6/site-packages/dask/threaded.py in get(dsk, result, cache, num_workers, **kwargs)
73 results = get_async(pool.apply_async, len(pool._pool), dsk, result,
74 cache=cache, get_id=_thread_get_id,
---> 75 pack_exception=pack_exception, **kwargs)
76
77 # Cleanup pools associated to dead threads
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in get_async(apply_async, num_workers, dsk, result, cache, get_id, rerun_exceptions_locally, pack_exception, raise_exception, callbacks, dumps, loads, **kwargs)
519 _execute_task(task, data) # Re-execute locally
520 else:
--> 521 raise_exception(exc, tb)
522 res, worker_id = loads(res_info)
523 state['cache'][key] = res
~/Software/miniconda3/lib/python3.6/site-packages/dask/compatibility.py in reraise(exc, tb)
67 if exc.__traceback__ is not tb:
68 raise exc.with_traceback(tb)
---> 69 raise exc
70
71 else:
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in execute_task(key, task_info, dumps, loads, get_id, pack_exception)
288 try:
289 task, data = loads(task_info)
--> 290 result = _execute_task(task, data)
291 id = get_id()
292 result = dumps((result, id))
~/Software/miniconda3/lib/python3.6/site-packages/dask/local.py in _execute_task(arg, cache, dsk)
269 func, args = arg[0], arg[1:]
270 args2 = [_execute_task(a, cache) for a in args]
--> 271 return func(*args2)
272 elif not ishashable(arg):
273 return arg
~/Software/miniconda3/lib/python3.6/site-packages/satpy-0.9.1a0.dev0-py3.6.egg/satpy/writers/mitiff.py in _delayed_create(create_opts, dataset)
76 kwargs['name'] = dataset.attrs['name']
77 if 'start_time' not in kwargs:
---> 78 kwargs['start_time'] = dataset.attrs['start_time']
79 if 'sensor' not in kwargs:
80 kwargs['sensor'] = dataset.attrs['sensor']
KeyError: 'start_time'
|
KeyError
|
def read_band(self, key, info):
"""Read the data."""
shape = int(np.ceil(self.mda["data_field_length"] / 8.0))
if self.mda["number_of_bits_per_pixel"] == 16:
dtype = ">u2"
shape //= 2
elif self.mda["number_of_bits_per_pixel"] in [8, 10]:
dtype = np.uint8
shape = (shape,)
data = np.memmap(
self.filename,
mode="r",
offset=self.mda["total_header_length"],
dtype=dtype,
shape=shape,
)
data = da.from_array(data, chunks=shape[0])
if self.mda["number_of_bits_per_pixel"] == 10:
data = dec10216(data)
data = data.reshape((self.mda["number_of_lines"], self.mda["number_of_columns"]))
return data
|
def read_band(self, key, info):
"""Read the data."""
shape = int(np.ceil(self.mda["data_field_length"] / 8.0))
if self.mda["number_of_bits_per_pixel"] == 16:
dtype = ">u2"
shape /= 2
elif self.mda["number_of_bits_per_pixel"] in [8, 10]:
dtype = np.uint8
shape = (shape,)
data = np.memmap(
self.filename,
mode="r",
offset=self.mda["total_header_length"],
dtype=dtype,
shape=shape,
)
data = da.from_array(data, chunks=shape[0])
if self.mda["number_of_bits_per_pixel"] == 10:
data = dec10216(data)
data = data.reshape((self.mda["number_of_lines"], self.mda["number_of_columns"]))
return data
|
https://github.com/pytroll/satpy/issues/328
|
filenames=find_files_and_readers(base_dir='./DK01_201101010030/', reader='hrit_jma')
scn = Scene(filenames=filenames)
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-2-5fb0dc98e3f1> in <module>()
1 filenames=find_files_and_readers(base_dir='./DK01_201101010030/', reader='hrit_jma')
----> 2 scn = Scene(filenames=filenames)
/mnt/c/Users/harry/gdrive/work/2018-02_rerun/data_sources/mtsat/ext/satpy/satpy/scene.py in __init__(self, filenames, reader, filter_parameters, reader_kwargs, ppp_config_dir, base_dir, sensor, start_time, end_time, area)
148 self.readers = self.create_reader_instances(filenames=filenames,
149 reader=reader,
--> 150 reader_kwargs=reader_kwargs)
151 self.attrs.update(self._compute_metadata_from_readers())
152 self.datasets = DatasetDict()
/mnt/c/Users/harry/gdrive/work/2018-02_rerun/data_sources/mtsat/ext/satpy/satpy/scene.py in create_reader_instances(self, filenames, reader, reader_kwargs)
193 reader=reader,
194 reader_kwargs=reader_kwargs,
--> 195 ppp_config_dir=self.ppp_config_dir)
196
197 @property
/mnt/c/Users/harry/gdrive/work/2018-02_rerun/data_sources/mtsat/ext/satpy/satpy/readers/__init__.py in load_readers(filenames, reader, reader_kwargs, ppp_config_dir)
587 loadables = reader_instance.select_files_from_pathnames(readers_files)
588 if loadables:
--> 589 reader_instance.create_filehandlers(loadables)
590 reader_instances[reader_instance.name] = reader_instance
591 remaining_filenames -= set(loadables)
/mnt/c/Users/harry/gdrive/work/2018-02_rerun/data_sources/mtsat/ext/satpy/satpy/readers/yaml_reader.py in create_filehandlers(self, filenames)
485 for filetype, filetype_info in self.sorted_filetype_items():
486 filehandlers = self.new_filehandlers_for_filetype(filetype_info,
--> 487 filename_set)
488
489 filename_set -= set([fhd.filename for fhd in filehandlers])
/mnt/c/Users/harry/gdrive/work/2018-02_rerun/data_sources/mtsat/ext/satpy/satpy/readers/yaml_reader.py in new_filehandlers_for_filetype(self, filetype_info, filenames)
472 filename_iter)
473 filtered_iter = self.filter_fh_by_metadata(filehandler_iter)
--> 474 return list(filtered_iter)
475
476 def create_filehandlers(self, filenames):
/mnt/c/Users/harry/gdrive/work/2018-02_rerun/data_sources/mtsat/ext/satpy/satpy/readers/yaml_reader.py in filter_fh_by_metadata(self, filehandlers)
445 def filter_fh_by_metadata(self, filehandlers):
446 """Filter out filehandlers using provide filter parameters."""
--> 447 for filehandler in filehandlers:
448 filehandler.metadata['start_time'] = filehandler.start_time
449 filehandler.metadata['end_time'] = filehandler.end_time
/mnt/c/Users/harry/gdrive/work/2018-02_rerun/data_sources/mtsat/ext/satpy/satpy/readers/yaml_reader.py in new_filehandler_instances(self, filetype_info, filename_items)
389 continue
390
--> 391 yield filetype_cls(filename, filename_info, filetype_info, *req_fh)
392
393 def time_matches(self, fstart, fend):
/mnt/c/Users/harry/gdrive/work/2018-02_rerun/data_sources/mtsat/ext/satpy/satpy/readers/hrit_jma.py in __init__(self, filename, filename_info, filetype_info)
116 self.mda['planned_start_segment_number'] = 1
117
--> 118 items = self.mda['image_data_function'].split('\r')
119 if items[0].startswith('$HALFTONE'):
120 self.calibration_table = []
TypeError: a bytes-like object is required, not 'str'
|
TypeError
|
def __init__(self, filename, filename_info, filetype_info):
"""Initialize the reader."""
super(HRITJMAFileHandler, self).__init__(
filename,
filename_info,
filetype_info,
(jma_hdr_map, jma_variable_length_headers, jma_text_headers),
)
self.mda["segment_sequence_number"] = self.mda["image_segm_seq_no"]
self.mda["planned_end_segment_number"] = self.mda["total_no_image_segm"]
self.mda["planned_start_segment_number"] = 1
items = self.mda["image_data_function"].decode().split("\r")
if items[0].startswith("$HALFTONE"):
self.calibration_table = []
for item in items[1:]:
if item == "":
continue
key, value = item.split(":=")
if key.startswith("_UNIT"):
self.mda["unit"] = item.split(":=")[1]
elif key.startswith("_NAME"):
pass
elif key.isdigit():
key = int(key)
value = float(value)
self.calibration_table.append((key, value))
self.calibration_table = np.array(self.calibration_table)
projection_name = self.mda["projection_name"].decode()
sublon = float(projection_name.strip().split("(")[1][:-1])
self.mda["projection_parameters"]["SSP_longitude"] = sublon
|
def __init__(self, filename, filename_info, filetype_info):
"""Initialize the reader."""
super(HRITJMAFileHandler, self).__init__(
filename,
filename_info,
filetype_info,
(jma_hdr_map, jma_variable_length_headers, jma_text_headers),
)
self.mda["segment_sequence_number"] = self.mda["image_segm_seq_no"]
self.mda["planned_end_segment_number"] = self.mda["total_no_image_segm"]
self.mda["planned_start_segment_number"] = 1
items = self.mda["image_data_function"].split("\r")
if items[0].startswith("$HALFTONE"):
self.calibration_table = []
for item in items[1:]:
if item == "":
continue
key, value = item.split(":=")
if key.startswith("_UNIT"):
self.mda["unit"] = item.split(":=")[1]
elif key.startswith("_NAME"):
pass
elif key.isdigit():
key = int(key)
value = float(value)
self.calibration_table.append((key, value))
self.calibration_table = np.array(self.calibration_table)
sublon = float(self.mda["projection_name"].strip().split("(")[1][:-1])
self.mda["projection_parameters"]["SSP_longitude"] = sublon
|
https://github.com/pytroll/satpy/issues/328
|
filenames=find_files_and_readers(base_dir='./DK01_201101010030/', reader='hrit_jma')
scn = Scene(filenames=filenames)
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-2-5fb0dc98e3f1> in <module>()
1 filenames=find_files_and_readers(base_dir='./DK01_201101010030/', reader='hrit_jma')
----> 2 scn = Scene(filenames=filenames)
/mnt/c/Users/harry/gdrive/work/2018-02_rerun/data_sources/mtsat/ext/satpy/satpy/scene.py in __init__(self, filenames, reader, filter_parameters, reader_kwargs, ppp_config_dir, base_dir, sensor, start_time, end_time, area)
148 self.readers = self.create_reader_instances(filenames=filenames,
149 reader=reader,
--> 150 reader_kwargs=reader_kwargs)
151 self.attrs.update(self._compute_metadata_from_readers())
152 self.datasets = DatasetDict()
/mnt/c/Users/harry/gdrive/work/2018-02_rerun/data_sources/mtsat/ext/satpy/satpy/scene.py in create_reader_instances(self, filenames, reader, reader_kwargs)
193 reader=reader,
194 reader_kwargs=reader_kwargs,
--> 195 ppp_config_dir=self.ppp_config_dir)
196
197 @property
/mnt/c/Users/harry/gdrive/work/2018-02_rerun/data_sources/mtsat/ext/satpy/satpy/readers/__init__.py in load_readers(filenames, reader, reader_kwargs, ppp_config_dir)
587 loadables = reader_instance.select_files_from_pathnames(readers_files)
588 if loadables:
--> 589 reader_instance.create_filehandlers(loadables)
590 reader_instances[reader_instance.name] = reader_instance
591 remaining_filenames -= set(loadables)
/mnt/c/Users/harry/gdrive/work/2018-02_rerun/data_sources/mtsat/ext/satpy/satpy/readers/yaml_reader.py in create_filehandlers(self, filenames)
485 for filetype, filetype_info in self.sorted_filetype_items():
486 filehandlers = self.new_filehandlers_for_filetype(filetype_info,
--> 487 filename_set)
488
489 filename_set -= set([fhd.filename for fhd in filehandlers])
/mnt/c/Users/harry/gdrive/work/2018-02_rerun/data_sources/mtsat/ext/satpy/satpy/readers/yaml_reader.py in new_filehandlers_for_filetype(self, filetype_info, filenames)
472 filename_iter)
473 filtered_iter = self.filter_fh_by_metadata(filehandler_iter)
--> 474 return list(filtered_iter)
475
476 def create_filehandlers(self, filenames):
/mnt/c/Users/harry/gdrive/work/2018-02_rerun/data_sources/mtsat/ext/satpy/satpy/readers/yaml_reader.py in filter_fh_by_metadata(self, filehandlers)
445 def filter_fh_by_metadata(self, filehandlers):
446 """Filter out filehandlers using provide filter parameters."""
--> 447 for filehandler in filehandlers:
448 filehandler.metadata['start_time'] = filehandler.start_time
449 filehandler.metadata['end_time'] = filehandler.end_time
/mnt/c/Users/harry/gdrive/work/2018-02_rerun/data_sources/mtsat/ext/satpy/satpy/readers/yaml_reader.py in new_filehandler_instances(self, filetype_info, filename_items)
389 continue
390
--> 391 yield filetype_cls(filename, filename_info, filetype_info, *req_fh)
392
393 def time_matches(self, fstart, fend):
/mnt/c/Users/harry/gdrive/work/2018-02_rerun/data_sources/mtsat/ext/satpy/satpy/readers/hrit_jma.py in __init__(self, filename, filename_info, filetype_info)
116 self.mda['planned_start_segment_number'] = 1
117
--> 118 items = self.mda['image_data_function'].split('\r')
119 if items[0].startswith('$HALFTONE'):
120 self.calibration_table = []
TypeError: a bytes-like object is required, not 'str'
|
TypeError
|
def omerc2cf(area):
"""Return the cf grid mapping for the omerc projection."""
proj_dict = area.proj_dict
args = dict(
azimuth_of_central_line=proj_dict.get("alpha"),
latitude_of_projection_origin=proj_dict.get("lat_0"),
longitude_of_projection_origin=proj_dict.get("lonc"),
grid_mapping_name="oblique_mercator",
reference_ellipsoid_name=proj_dict.get("ellps", "WGS84"),
false_easting=0.0,
false_northing=0.0,
)
if "no_rot" in proj_dict:
args["no_rotation"] = 1
if "gamma" in proj_dict:
args["gamma"] = proj_dict["gamma"]
return args
|
def omerc2cf(proj_dict):
"""Return the cf grid mapping for the omerc projection."""
if "no_rot" in proj_dict:
no_rotation = " "
else:
no_rotation = None
args = dict(
azimuth_of_central_line=proj_dict.get("alpha"),
latitude_of_projection_origin=proj_dict.get("lat_0"),
longitude_of_projection_origin=proj_dict.get("lonc"),
grid_mapping_name="oblique_mercator",
# longitude_of_projection_origin=0.,
no_rotation=no_rotation,
# reference_ellipsoid_name=proj_dict.get('ellps'),
semi_major_axis=6378137.0,
semi_minor_axis=6356752.3142,
false_easting=0.0,
false_northing=0.0,
)
return args
|
https://github.com/pytroll/satpy/issues/123
|
[INFO: 2017-12-12 08:14:04 : satpy.writers.cf_writer] Saving datasets to NetCDF4/CF.
[INFO: 2017-12-12 08:14:04 : satpy.writers.cf_writer] No longitude and latitude data to save.
[INFO: 2017-12-12 08:14:04 : satpy.writers.cf_writer] No grid mapping to save.
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-24-58ece08c15f3> in <module>()
----> 1 lcl.save_datasets(writer='cf', filename='/tmp/foo.nc')
/home/lahtinep/Software/pytroll/packages/satpy/satpy/scene.pyc in save_datasets(self, writer, datasets, **kwargs)
632 datasets = self.datasets.values()
633 writer = self.get_writer(writer, **kwargs)
--> 634 writer.save_datasets(datasets, **kwargs)
635
636 def get_writer(self, writer="geotiff", **kwargs):
/home/lahtinep/Software/pytroll/packages/satpy/satpy/writers/cf_writer.pyc in save_datasets(self, datasets, filename, **kwargs)
176 coords = [line_coord, pixel_coord]
177
--> 178 domain = cf.Domain(dim=coords,
179 aux=aux,
180 ref=grid_mapping)
AttributeError: 'module' object has no attribute 'Domain'
|
AttributeError
|
def geos2cf(area):
"""Return the cf grid mapping for the geos projection."""
proj_dict = area.proj_dict
args = dict(
perspective_point_height=proj_dict.get("h"),
latitude_of_projection_origin=proj_dict.get("lat_0"),
longitude_of_projection_origin=proj_dict.get("lon_0"),
grid_mapping_name="geostationary",
semi_major_axis=proj_dict.get("a"),
semi_minor_axis=proj_dict.get("b"),
sweep_axis=proj_dict.get("sweep"),
)
return args
|
def geos2cf(proj_dict):
"""Return the cf grid mapping for the geos projection."""
args = dict(
perspective_point_height=proj_dict.get("h"),
latitude_of_projection_origin=proj_dict.get("lat_0"),
longitude_of_projection_origin=proj_dict.get("lon_0"),
grid_mapping_name="geostationary",
semi_major_axis=proj_dict.get("a"),
semi_minor_axis=proj_dict.get("b"),
sweep_axis=proj_dict.get("sweep"),
)
return args
|
https://github.com/pytroll/satpy/issues/123
|
[INFO: 2017-12-12 08:14:04 : satpy.writers.cf_writer] Saving datasets to NetCDF4/CF.
[INFO: 2017-12-12 08:14:04 : satpy.writers.cf_writer] No longitude and latitude data to save.
[INFO: 2017-12-12 08:14:04 : satpy.writers.cf_writer] No grid mapping to save.
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-24-58ece08c15f3> in <module>()
----> 1 lcl.save_datasets(writer='cf', filename='/tmp/foo.nc')
/home/lahtinep/Software/pytroll/packages/satpy/satpy/scene.pyc in save_datasets(self, writer, datasets, **kwargs)
632 datasets = self.datasets.values()
633 writer = self.get_writer(writer, **kwargs)
--> 634 writer.save_datasets(datasets, **kwargs)
635
636 def get_writer(self, writer="geotiff", **kwargs):
/home/lahtinep/Software/pytroll/packages/satpy/satpy/writers/cf_writer.pyc in save_datasets(self, datasets, filename, **kwargs)
176 coords = [line_coord, pixel_coord]
177
--> 178 domain = cf.Domain(dim=coords,
179 aux=aux,
180 ref=grid_mapping)
AttributeError: 'module' object has no attribute 'Domain'
|
AttributeError
|
def laea2cf(area):
"""Return the cf grid mapping for the laea projection."""
proj_dict = area.proj_dict
args = dict(
latitude_of_projection_origin=proj_dict.get("lat_0"),
longitude_of_projection_origin=proj_dict.get("lon_0"),
grid_mapping_name="lambert_azimuthal_equal_area",
)
return args
|
def laea2cf(proj_dict):
"""Return the cf grid mapping for the laea projection."""
args = dict(
latitude_of_projection_origin=proj_dict.get("lat_0"),
longitude_of_projection_origin=proj_dict.get("lon_0"),
grid_mapping_name="lambert_azimuthal_equal_area",
)
return args
|
https://github.com/pytroll/satpy/issues/123
|
[INFO: 2017-12-12 08:14:04 : satpy.writers.cf_writer] Saving datasets to NetCDF4/CF.
[INFO: 2017-12-12 08:14:04 : satpy.writers.cf_writer] No longitude and latitude data to save.
[INFO: 2017-12-12 08:14:04 : satpy.writers.cf_writer] No grid mapping to save.
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-24-58ece08c15f3> in <module>()
----> 1 lcl.save_datasets(writer='cf', filename='/tmp/foo.nc')
/home/lahtinep/Software/pytroll/packages/satpy/satpy/scene.pyc in save_datasets(self, writer, datasets, **kwargs)
632 datasets = self.datasets.values()
633 writer = self.get_writer(writer, **kwargs)
--> 634 writer.save_datasets(datasets, **kwargs)
635
636 def get_writer(self, writer="geotiff", **kwargs):
/home/lahtinep/Software/pytroll/packages/satpy/satpy/writers/cf_writer.pyc in save_datasets(self, datasets, filename, **kwargs)
176 coords = [line_coord, pixel_coord]
177
--> 178 domain = cf.Domain(dim=coords,
179 aux=aux,
180 ref=grid_mapping)
AttributeError: 'module' object has no attribute 'Domain'
|
AttributeError
|
def create_grid_mapping(area):
"""Create the grid mapping instance for `area`."""
try:
grid_mapping = mappings[area.proj_dict["proj"]](area)
grid_mapping["name"] = area.proj_dict["proj"]
except KeyError:
raise NotImplementedError
return grid_mapping
|
def create_grid_mapping(area):
"""Create the grid mapping instance for `area`."""
try:
grid_mapping = mappings[area.proj_dict["proj"]](area.proj_dict)
grid_mapping["name"] = area.proj_dict["proj"]
except KeyError:
raise NotImplementedError
return grid_mapping
|
https://github.com/pytroll/satpy/issues/123
|
[INFO: 2017-12-12 08:14:04 : satpy.writers.cf_writer] Saving datasets to NetCDF4/CF.
[INFO: 2017-12-12 08:14:04 : satpy.writers.cf_writer] No longitude and latitude data to save.
[INFO: 2017-12-12 08:14:04 : satpy.writers.cf_writer] No grid mapping to save.
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-24-58ece08c15f3> in <module>()
----> 1 lcl.save_datasets(writer='cf', filename='/tmp/foo.nc')
/home/lahtinep/Software/pytroll/packages/satpy/satpy/scene.pyc in save_datasets(self, writer, datasets, **kwargs)
632 datasets = self.datasets.values()
633 writer = self.get_writer(writer, **kwargs)
--> 634 writer.save_datasets(datasets, **kwargs)
635
636 def get_writer(self, writer="geotiff", **kwargs):
/home/lahtinep/Software/pytroll/packages/satpy/satpy/writers/cf_writer.pyc in save_datasets(self, datasets, filename, **kwargs)
176 coords = [line_coord, pixel_coord]
177
--> 178 domain = cf.Domain(dim=coords,
179 aux=aux,
180 ref=grid_mapping)
AttributeError: 'module' object has no attribute 'Domain'
|
AttributeError
|
def da2cf(dataarray, epoch=EPOCH):
"""Convert the dataarray to something cf-compatible."""
new_data = dataarray.copy()
# Remove the area
new_data.attrs.pop("area", None)
anc = [ds.attrs["name"] for ds in new_data.attrs.get("ancillary_variables", [])]
if anc:
new_data.attrs["ancillary_variables"] = " ".join(anc)
# TODO: make this a grid mapping or lon/lats
# new_data.attrs['area'] = str(new_data.attrs.get('area'))
for key, val in new_data.attrs.copy().items():
if val is None:
new_data.attrs.pop(key)
new_data.attrs.pop("_last_resampler", None)
if "time" in new_data.coords:
new_data["time"].encoding["units"] = epoch
new_data["time"].attrs["standard_name"] = "time"
new_data["time"].attrs.pop("bounds", None)
if "x" in new_data.coords:
new_data["x"].attrs["standard_name"] = "projection_x_coordinate"
new_data["x"].attrs["units"] = "m"
if "y" in new_data.coords:
new_data["y"].attrs["standard_name"] = "projection_y_coordinate"
new_data["y"].attrs["units"] = "m"
new_data.attrs.setdefault("long_name", new_data.attrs.pop("name"))
return new_data
|
def da2cf(dataarray, epoch=EPOCH):
"""Convert the dataarray to something cf-compatible."""
new_data = dataarray.copy()
# TODO: make these boundaries of the time dimension
new_data.attrs.pop("start_time", None)
new_data.attrs.pop("end_time", None)
# Remove the area
new_data.attrs.pop("area", None)
anc = [ds.attrs["name"] for ds in new_data.attrs.get("ancillary_variables", [])]
if anc:
new_data.attrs["ancillary_variables"] = " ".join(anc)
# TODO: make this a grid mapping or lon/lats
# new_data.attrs['area'] = str(new_data.attrs.get('area'))
for key, val in new_data.attrs.copy().items():
if val is None:
new_data.attrs.pop(key)
new_data.attrs.pop("_last_resampler", None)
if "time" in new_data.coords:
new_data["time"].encoding["units"] = epoch
new_data["time"].attrs["standard_name"] = "time"
new_data["time"].attrs.pop("bounds", None)
if "x" in new_data.coords:
new_data["x"].attrs["standard_name"] = "projection_x_coordinate"
new_data["x"].attrs["units"] = "m"
if "y" in new_data.coords:
new_data["y"].attrs["standard_name"] = "projection_y_coordinate"
new_data["y"].attrs["units"] = "m"
new_data.attrs.setdefault("long_name", new_data.attrs.pop("name"))
return new_data
|
https://github.com/pytroll/satpy/issues/123
|
[INFO: 2017-12-12 08:14:04 : satpy.writers.cf_writer] Saving datasets to NetCDF4/CF.
[INFO: 2017-12-12 08:14:04 : satpy.writers.cf_writer] No longitude and latitude data to save.
[INFO: 2017-12-12 08:14:04 : satpy.writers.cf_writer] No grid mapping to save.
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-24-58ece08c15f3> in <module>()
----> 1 lcl.save_datasets(writer='cf', filename='/tmp/foo.nc')
/home/lahtinep/Software/pytroll/packages/satpy/satpy/scene.pyc in save_datasets(self, writer, datasets, **kwargs)
632 datasets = self.datasets.values()
633 writer = self.get_writer(writer, **kwargs)
--> 634 writer.save_datasets(datasets, **kwargs)
635
636 def get_writer(self, writer="geotiff", **kwargs):
/home/lahtinep/Software/pytroll/packages/satpy/satpy/writers/cf_writer.pyc in save_datasets(self, datasets, filename, **kwargs)
176 coords = [line_coord, pixel_coord]
177
--> 178 domain = cf.Domain(dim=coords,
179 aux=aux,
180 ref=grid_mapping)
AttributeError: 'module' object has no attribute 'Domain'
|
AttributeError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.